hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
28b9df927ec936505848845508ec0db3e1bed871 | 2,019 | macro_rules! replace(
($name:ident, $which:ident, $re:expr,
$search:expr, $replace:expr, $result:expr) => (
#[test]
fn $name() {
let re = regex!($re);
assert_eq!(re.$which(text!($search), $replace), text!($result));
}
);
);
replace!(first, replace, r"\d", "age: 26", t!("Z"), "age: Z6");
replace!(plus, replace, r"\d+", "age: 26", t!("Z"), "age: Z");
replace!(all, replace_all, r"\d", "age: 26", t!("Z"), "age: ZZ");
replace!(groups, replace, r"(\S+)\s+(\S+)", "w1 w2", t!("$2 $1"), "w2 w1");
replace!(double_dollar, replace,
r"(\S+)\s+(\S+)", "w1 w2", t!("$2 $$1"), "w2 $1");
// replace!(adjacent_index, replace,
// r"([^aeiouy])ies$", "skies", t!("$1y"), "sky");
replace!(named, replace_all,
r"(?P<first>\S+)\s+(?P<last>\S+)(?P<space>\s*)",
"w1 w2 w3 w4", t!("$last $first$space"), "w2 w1 w4 w3");
replace!(trim, replace_all, "^[ \t]+|[ \t]+$", " \t trim me\t \t",
t!(""), "trim me");
replace!(number_hypen, replace, r"(.)(.)", "ab", t!("$1-$2"), "a-b");
// replace!(number_underscore, replace, r"(.)(.)", "ab", t!("$1_$2"), "a_b");
replace!(simple_expand, replace_all, r"(\w) (\w)", "a b", t!("$2 $1"), "b a");
replace!(literal_dollar1, replace_all,
r"(\w+) (\w+)", "a b", t!("$$1"), "$1");
replace!(literal_dollar2, replace_all,
r"(\w+) (\w+)", "a b", t!("$2 $$c $1"), "b $c a");
replace!(no_expand1, replace,
r"(\S+)\s+(\S+)", "w1 w2", no_expand!("$2 $1"), "$2 $1");
replace!(no_expand2, replace,
r"(\S+)\s+(\S+)", "w1 w2", no_expand!("$$1"), "$$1");
// See https://github.com/rust-lang/regex/issues/314
replace!(match_at_start_replace_with_empty, replace_all, r"foo", "foobar", t!(""), "bar");
// See https://github.com/rust-lang/regex/issues/393
replace!(single_empty_match, replace, r"^", "bar", t!("foo"), "foobar");
// See https://github.com/rust-lang/regex/issues/399
replace!(capture_longest_possible_name, replace_all, r"(.)", "b", t!("${1}a $1a"), "ba ");
| 44.866667 | 90 | 0.521545 |
2290c699d8b5ee89e62bd24edda4e37ca3fadc36 | 169 | // ANCHOR: here
#[test]
fn it_works() {
assert_eq!(2 + 2, 4);
}
#[test]
#[ignore]
fn expensive_test() {
// code that takes an hour to run
}
// ANCHOR_END: here
| 13 | 37 | 0.591716 |
117f96d277e01dd28de2179fabe23987b64ed1e6 | 9,100 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Parquet data source
use std::string::String;
use std::sync::Arc;
use arrow::datatypes::*;
use crate::datasource::TableProvider;
use crate::error::Result;
use crate::physical_plan::parquet::ParquetExec;
use crate::physical_plan::ExecutionPlan;
/// Table-based representation of a `ParquetFile`.
pub struct ParquetTable {
path: String,
schema: SchemaRef,
}
impl ParquetTable {
/// Attempt to initialize a new `ParquetTable` from a file path.
pub fn try_new(path: &str) -> Result<Self> {
let parquet_exec = ParquetExec::try_new(path, None, 0)?;
let schema = parquet_exec.schema();
Ok(Self {
path: path.to_string(),
schema,
})
}
}
impl TableProvider for ParquetTable {
/// Get the schema for this parquet file.
fn schema(&self) -> SchemaRef {
self.schema.clone()
}
/// Scan the file(s), using the provided projection, and return one BatchIterator per
/// partition.
fn scan(
&self,
projection: &Option<Vec<usize>>,
batch_size: usize,
) -> Result<Arc<dyn ExecutionPlan>> {
Ok(Arc::new(ParquetExec::try_new(
&self.path,
projection.clone(),
batch_size,
)?))
}
}
#[cfg(test)]
mod tests {
use super::*;
use arrow::array::{
BinaryArray, BooleanArray, Float32Array, Float64Array, Int32Array,
TimestampNanosecondArray,
};
use arrow::record_batch::RecordBatch;
use std::env;
#[test]
fn read_small_batches() -> Result<()> {
let table = load_table("alltypes_plain.parquet")?;
let projection = None;
let exec = table.scan(&projection, 2)?;
let it = exec.execute(0)?;
let mut it = it.lock().unwrap();
let mut count = 0;
while let Some(batch) = it.next_batch()? {
assert_eq!(11, batch.num_columns());
assert_eq!(2, batch.num_rows());
count += 1;
}
// we should have seen 4 batches of 2 rows
assert_eq!(4, count);
Ok(())
}
#[test]
fn read_alltypes_plain_parquet() -> Result<()> {
let table = load_table("alltypes_plain.parquet")?;
let x: Vec<String> = table
.schema()
.fields()
.iter()
.map(|f| format!("{}: {:?}", f.name(), f.data_type()))
.collect();
let y = x.join("\n");
assert_eq!(
"id: Int32\n\
bool_col: Boolean\n\
tinyint_col: Int32\n\
smallint_col: Int32\n\
int_col: Int32\n\
bigint_col: Int64\n\
float_col: Float32\n\
double_col: Float64\n\
date_string_col: Binary\n\
string_col: Binary\n\
timestamp_col: Timestamp(Nanosecond, None)",
y
);
let projection = None;
let batch = get_first_batch(table, &projection)?;
assert_eq!(11, batch.num_columns());
assert_eq!(8, batch.num_rows());
Ok(())
}
#[test]
fn read_bool_alltypes_plain_parquet() -> Result<()> {
let table = load_table("alltypes_plain.parquet")?;
let projection = Some(vec![1]);
let batch = get_first_batch(table, &projection)?;
assert_eq!(1, batch.num_columns());
assert_eq!(8, batch.num_rows());
let array = batch
.column(0)
.as_any()
.downcast_ref::<BooleanArray>()
.unwrap();
let mut values: Vec<bool> = vec![];
for i in 0..batch.num_rows() {
values.push(array.value(i));
}
assert_eq!(
"[true, false, true, false, true, false, true, false]",
format!("{:?}", values)
);
Ok(())
}
#[test]
fn read_i32_alltypes_plain_parquet() -> Result<()> {
let table = load_table("alltypes_plain.parquet")?;
let projection = Some(vec![0]);
let batch = get_first_batch(table, &projection)?;
assert_eq!(1, batch.num_columns());
assert_eq!(8, batch.num_rows());
let array = batch
.column(0)
.as_any()
.downcast_ref::<Int32Array>()
.unwrap();
let mut values: Vec<i32> = vec![];
for i in 0..batch.num_rows() {
values.push(array.value(i));
}
assert_eq!("[4, 5, 6, 7, 2, 3, 0, 1]", format!("{:?}", values));
Ok(())
}
#[test]
fn read_i96_alltypes_plain_parquet() -> Result<()> {
let table = load_table("alltypes_plain.parquet")?;
let projection = Some(vec![10]);
let batch = get_first_batch(table, &projection)?;
assert_eq!(1, batch.num_columns());
assert_eq!(8, batch.num_rows());
let array = batch
.column(0)
.as_any()
.downcast_ref::<TimestampNanosecondArray>()
.unwrap();
let mut values: Vec<i64> = vec![];
for i in 0..batch.num_rows() {
values.push(array.value(i));
}
assert_eq!("[1235865600000000000, 1235865660000000000, 1238544000000000000, 1238544060000000000, 1233446400000000000, 1233446460000000000, 1230768000000000000, 1230768060000000000]", format!("{:?}", values));
Ok(())
}
#[test]
fn read_f32_alltypes_plain_parquet() -> Result<()> {
let table = load_table("alltypes_plain.parquet")?;
let projection = Some(vec![6]);
let batch = get_first_batch(table, &projection)?;
assert_eq!(1, batch.num_columns());
assert_eq!(8, batch.num_rows());
let array = batch
.column(0)
.as_any()
.downcast_ref::<Float32Array>()
.unwrap();
let mut values: Vec<f32> = vec![];
for i in 0..batch.num_rows() {
values.push(array.value(i));
}
assert_eq!(
"[0.0, 1.1, 0.0, 1.1, 0.0, 1.1, 0.0, 1.1]",
format!("{:?}", values)
);
Ok(())
}
#[test]
fn read_f64_alltypes_plain_parquet() -> Result<()> {
let table = load_table("alltypes_plain.parquet")?;
let projection = Some(vec![7]);
let batch = get_first_batch(table, &projection)?;
assert_eq!(1, batch.num_columns());
assert_eq!(8, batch.num_rows());
let array = batch
.column(0)
.as_any()
.downcast_ref::<Float64Array>()
.unwrap();
let mut values: Vec<f64> = vec![];
for i in 0..batch.num_rows() {
values.push(array.value(i));
}
assert_eq!(
"[0.0, 10.1, 0.0, 10.1, 0.0, 10.1, 0.0, 10.1]",
format!("{:?}", values)
);
Ok(())
}
#[test]
fn read_binary_alltypes_plain_parquet() -> Result<()> {
let table = load_table("alltypes_plain.parquet")?;
let projection = Some(vec![9]);
let batch = get_first_batch(table, &projection)?;
assert_eq!(1, batch.num_columns());
assert_eq!(8, batch.num_rows());
let array = batch
.column(0)
.as_any()
.downcast_ref::<BinaryArray>()
.unwrap();
let mut values: Vec<&str> = vec![];
for i in 0..batch.num_rows() {
values.push(std::str::from_utf8(array.value(i)).unwrap());
}
assert_eq!(
"[\"0\", \"1\", \"0\", \"1\", \"0\", \"1\", \"0\", \"1\"]",
format!("{:?}", values)
);
Ok(())
}
fn load_table(name: &str) -> Result<Box<dyn TableProvider>> {
let testdata =
env::var("PARQUET_TEST_DATA").expect("PARQUET_TEST_DATA not defined");
let filename = format!("{}/{}", testdata, name);
let table = ParquetTable::try_new(&filename)?;
Ok(Box::new(table))
}
fn get_first_batch(
table: Box<dyn TableProvider>,
projection: &Option<Vec<usize>>,
) -> Result<RecordBatch> {
let exec = table.scan(projection, 1024)?;
let it = exec.execute(0)?;
let mut it = it.lock().expect("failed to lock mutex");
Ok(it
.next_batch()?
.expect("should have received at least one batch"))
}
}
| 29.26045 | 216 | 0.545714 |
ef96676753ca13f177bb91dc33da356d5c5d191d | 3,150 | // Copyright 2020 The Druid Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::io::{ErrorKind, Result};
use std::path::{Path, PathBuf};
use std::{env, fs};
fn main() -> Result<()> {
let crate_dir = PathBuf::from(&env::var("CARGO_MANIFEST_DIR").unwrap());
let src_dir = crate_dir.join("src");
let examples_dir = src_dir.join("examples");
let parent_dir = crate_dir.parent().unwrap();
// Generate example module and the necessary html documents.
let mut index_html = r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Druid web examples</title>
</head>
<body>
<h1>Druid web examples</h1>
<ul>
"#
.to_string();
// if let Some(example) = path.file_stem() {
let example_str = "rust_hdl_pcb_schematic_layout_tool";
// Add an entry to the index.html file.
let index_entry = format!(
"<li><a href=\"./html/{name}.html\">{name}</a></li>",
name = example_str
);
index_html.push_str(&index_entry);
// Create an html document for each example.
let html = format!(
r#"
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Druid web examples - {name}</title>
<style>
html, body, canvas {{
margin: 0px;
padding: 0px;
width: 100%;
height: 100%;
overflow: hidden;
}}
</style>
</head>
<body>
<noscript>This page contains WebAssembly and JavaScript content, please enable JavaScript in your browser.</noscript>
<canvas id="canvas"></canvas>
<script type="module">
import init, {{ {name} }} from '../pkg/rust_hdl_pcb_schematic_layout_tool.js';
async function run() {{
await init();
{name}();
}}
run();
</script>
</body>
</html>"#,
name = example_str.to_string()
);
// Write out the html file into a designated html directory located in crate root.
let html_dir = crate_dir.join("html");
if !html_dir.exists() {
fs::create_dir(&html_dir)
.unwrap_or_else(|_| panic!("Failed to create output html directory: {:?}", &html_dir));
}
fs::write(html_dir.join(example_str).with_extension("html"), html)
.unwrap_or_else(|_| panic!("Failed to create {}.html", example_str));
// }
// }
index_html.push_str("</ul></body></html>");
// Write out the index.html file
fs::write(crate_dir.join("index.html"), index_html)?;
Ok(())
}
| 29.716981 | 125 | 0.593333 |
d5d10e2945e09cb024b26782778d8965bb6f8f9a | 3,300 | #![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
pub mod root {
#[allow(unused_imports)]
use self::super::root;
extern "C" {
#[link_name = "\u{1}_Z9top_levelv"]
pub fn top_level();
}
pub mod whatever {
#[allow(unused_imports)]
use self::super::super::root;
pub type whatever_int_t = ::std::os::raw::c_int;
extern "C" {
#[link_name = "\u{1}_ZN8whatever11in_whateverEv"]
pub fn in_whatever();
}
}
pub mod _bindgen_mod_id_17 {
#[allow(unused_imports)]
use self::super::super::root;
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct A {
pub b: root::whatever::whatever_int_t,
}
#[test]
fn bindgen_test_layout_A() {
assert_eq!(
::std::mem::size_of::<A>(),
4usize,
concat!("Size of: ", stringify!(A))
);
assert_eq!(
::std::mem::align_of::<A>(),
4usize,
concat!("Alignment of ", stringify!(A))
);
assert_eq!(
unsafe { &(*(::std::ptr::null::<A>())).b as *const _ as usize },
0usize,
concat!(
"Offset of field: ",
stringify!(A),
"::",
stringify!(b)
)
);
}
}
#[repr(C)]
#[derive(Debug)]
pub struct C<T> {
pub _base: root::_bindgen_mod_id_17::A,
pub m_c: T,
pub m_c_ptr: *mut T,
pub m_c_arr: [T; 10usize],
pub _phantom_0: ::std::marker::PhantomData<::std::cell::UnsafeCell<T>>,
}
impl<T> Default for C<T> {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
pub mod w {
#[allow(unused_imports)]
use self::super::super::root;
pub type whatever_int_t = ::std::os::raw::c_uint;
#[repr(C)]
#[derive(Debug)]
pub struct D<T> {
pub m_c: root::C<T>,
pub _phantom_0:
::std::marker::PhantomData<::std::cell::UnsafeCell<T>>,
}
impl<T> Default for D<T> {
fn default() -> Self {
unsafe { ::std::mem::zeroed() }
}
}
extern "C" {
#[link_name = "\u{1}_ZN1w3hehEv"]
pub fn heh() -> root::w::whatever_int_t;
}
extern "C" {
#[link_name = "\u{1}_ZN1w3fooEv"]
pub fn foo() -> root::C<::std::os::raw::c_int>;
}
extern "C" {
#[link_name = "\u{1}_ZN1w4barrEv"]
pub fn barr() -> root::C<f32>;
}
}
pub mod foobar {
#[allow(unused_imports)]
use self::super::super::root;
extern "C" {
#[link_name = "\u{1}_ZN6foobar3fooEv"]
pub fn foo();
}
}
pub mod faraway {
#[allow(unused_imports)]
use self::super::super::root;
extern "C" {
#[link_name = "\u{1}_ZN7faraway3barEv"]
pub fn bar();
}
}
}
| 28.205128 | 80 | 0.452121 |
d52b3ca6df059463062b8af1ff912a940db16a28 | 141 | use std::vec::Vec;
pub trait Storage {
fn get(&self, key: &[u8]) -> Option<Vec<u8>>;
fn set(&mut self, key: &[u8], value: &[u8]);
}
| 20.142857 | 49 | 0.539007 |
39435da3922421ac22a143344cda31fec0e102fb | 5,266 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
execution_correctness::ExecutionCorrectness,
local::{LocalClient, LocalService},
process::ProcessService,
remote_service::RemoteService,
serializer::{SerializerClient, SerializerService},
thread::ThreadService,
};
use aptos_config::config::{ExecutionCorrectnessService, NodeConfig};
use aptos_crypto::ed25519::Ed25519PrivateKey;
use aptos_global_constants::EXECUTION_KEY;
use aptos_secure_storage::{CryptoStorage, Storage};
use aptos_vm::AptosVM;
use executor::block_executor::BlockExecutor;
use std::{convert::TryInto, net::SocketAddr, sync::Arc};
use storage_client::StorageClient;
use storage_interface::DbReaderWriter;
pub fn extract_execution_prikey(config: &NodeConfig) -> Option<Ed25519PrivateKey> {
let backend = &config.execution.backend;
let mut storage: Storage = backend.try_into().expect("Unable to initialize storage");
if let Some(test_config) = config.test.as_ref() {
let private_key = test_config
.execution_key
.as_ref()
.expect("Missing execution key in test config")
.private_key();
storage
.import_private_key(EXECUTION_KEY, private_key)
.expect("Unable to insert execution key");
}
if config.execution.sign_vote_proposal {
Some(
storage
.export_private_key(EXECUTION_KEY)
.expect("Missing execution_private_key in secure storage"),
)
} else {
None
}
}
enum ExecutionCorrectnessWrapper {
Local(Arc<LocalService>),
Process(ProcessService),
Serializer(Arc<SerializerService>),
Thread(ThreadService),
}
pub struct ExecutionCorrectnessManager {
internal_execution_correctness: ExecutionCorrectnessWrapper,
}
impl ExecutionCorrectnessManager {
pub fn new(config: &NodeConfig, local_db: DbReaderWriter) -> Self {
if let ExecutionCorrectnessService::Process(remote_service) = &config.execution.service {
return Self::new_process(
remote_service.server_address,
config.execution.network_timeout_ms,
);
}
let execution_prikey = extract_execution_prikey(config);
let storage_address = config.storage.address;
let timeout_ms = config.storage.timeout_ms;
match &config.execution.service {
ExecutionCorrectnessService::Local => Self::new_local(local_db, execution_prikey),
ExecutionCorrectnessService::Serializer => {
Self::new_serializer(storage_address, execution_prikey, timeout_ms)
}
ExecutionCorrectnessService::Thread => {
Self::new_thread(storage_address, execution_prikey, timeout_ms)
}
_ => unreachable!(
"Unimplemented ExecutionCorrectnessService: {:?}",
config.execution.service
),
}
}
pub fn new_local(db: DbReaderWriter, execution_prikey: Option<Ed25519PrivateKey>) -> Self {
let block_executor = Box::new(BlockExecutor::<AptosVM>::new(db));
Self {
internal_execution_correctness: ExecutionCorrectnessWrapper::Local(Arc::new(
LocalService::new(block_executor, execution_prikey),
)),
}
}
pub fn new_process(server_addr: SocketAddr, network_timeout: u64) -> Self {
let process_service = ProcessService::new(server_addr, network_timeout);
Self {
internal_execution_correctness: ExecutionCorrectnessWrapper::Process(process_service),
}
}
pub fn new_serializer(
storage_address: SocketAddr,
execution_prikey: Option<Ed25519PrivateKey>,
timeout: u64,
) -> Self {
let block_executor = Box::new(BlockExecutor::<AptosVM>::new(DbReaderWriter::new(
StorageClient::new(&storage_address, timeout),
)));
let serializer_service = SerializerService::new(block_executor, execution_prikey);
Self {
internal_execution_correctness: ExecutionCorrectnessWrapper::Serializer(Arc::new(
serializer_service,
)),
}
}
pub fn new_thread(
storage_address: SocketAddr,
execution_prikey: Option<Ed25519PrivateKey>,
network_timeout: u64,
) -> Self {
let thread = ThreadService::new(storage_address, execution_prikey, network_timeout);
Self {
internal_execution_correctness: ExecutionCorrectnessWrapper::Thread(thread),
}
}
pub fn client(&self) -> Box<dyn ExecutionCorrectness + Send + Sync> {
match &self.internal_execution_correctness {
ExecutionCorrectnessWrapper::Local(local_service) => {
Box::new(LocalClient::new(local_service.clone()))
}
ExecutionCorrectnessWrapper::Process(process) => Box::new(process.client()),
ExecutionCorrectnessWrapper::Serializer(serializer_service) => {
Box::new(SerializerClient::new(serializer_service.clone()))
}
ExecutionCorrectnessWrapper::Thread(thread) => Box::new(thread.client()),
}
}
}
| 37.084507 | 98 | 0.658185 |
489dab4a38712fbfc78a98b542f4e50a6f44976b | 791 | //! Generates unfiltered subpixel RGB output for Fontdue.
use std::fs::File;
use std::io::Write;
// Scratch pad for glyphs: ⅞ g
const CHARACTER: char = 'g';
const SIZE: f32 = 12.0;
// cargo run --example rgb --release
pub fn main() {
// Loading and rasterization
let font = include_bytes!("../resources/fonts/Roboto-Regular.ttf") as &[u8];
let settings = fontdue::FontSettings {
scale: SIZE,
..fontdue::FontSettings::default()
};
let font = fontdue::Font::from_bytes(font, settings).unwrap();
let (metrics, bitmap) = font.rasterize_subpixel(CHARACTER, SIZE);
// Output
let mut o = File::create("rgb.ppm").unwrap();
let _ = o.write(format!("P6\n{} {}\n255\n", metrics.width, metrics.height).as_bytes());
let _ = o.write(&bitmap);
}
| 30.423077 | 91 | 0.640961 |
093b824701fa4c9ec50b45bec5fc65aeecd9305d | 46,624 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Handles translation of callees as well as other call-related
//! things. Callees are a superset of normal rust values and sometimes
//! have different representations. In particular, top-level fn items
//! and methods are represented as just a fn ptr and not a full
//! closure.
pub use self::AutorefArg::*;
pub use self::CalleeData::*;
pub use self::CallArgs::*;
use arena::TypedArena;
use back::link;
use session;
use llvm::{self, ValueRef, get_params};
use metadata::csearch;
use middle::def;
use middle::subst;
use middle::subst::{Subst, Substs};
use trans::adt;
use trans::base;
use trans::base::*;
use trans::build::*;
use trans::callee;
use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::closure;
use trans::common::{self, Block, Result, NodeIdAndSpan, ExprId, CrateContext,
ExprOrMethodCall, FunctionContext, MethodCallKey};
use trans::consts;
use trans::datum::*;
use trans::debuginfo::{DebugLoc, ToDebugLoc};
use trans::declare;
use trans::expr;
use trans::glue;
use trans::inline;
use trans::foreign;
use trans::intrinsic;
use trans::meth;
use trans::monomorphize;
use trans::type_::Type;
use trans::type_of;
use middle::ty::{self, Ty};
use middle::ty::MethodCall;
use rustc::ast_map;
use syntax::abi as synabi;
use syntax::ast;
use syntax::ptr::P;
#[derive(Copy, Clone)]
pub struct MethodData {
pub llfn: ValueRef,
pub llself: ValueRef,
}
pub enum CalleeData<'tcx> {
// Constructor for enum variant/tuple-like-struct
// i.e. Some, Ok
NamedTupleConstructor(subst::Substs<'tcx>, ty::Disr),
// Represents a (possibly monomorphized) top-level fn item or method
// item. Note that this is just the fn-ptr and is not a Rust closure
// value (which is a pair).
Fn(/* llfn */ ValueRef),
Intrinsic(ast::NodeId, subst::Substs<'tcx>),
TraitItem(MethodData)
}
pub struct Callee<'blk, 'tcx: 'blk> {
pub bcx: Block<'blk, 'tcx>,
pub data: CalleeData<'tcx>,
}
fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
-> Callee<'blk, 'tcx> {
let _icx = push_ctxt("trans_callee");
debug!("callee::trans(expr={:?})", expr);
// pick out special kinds of expressions that can be called:
match expr.node {
ast::ExprPath(..) => {
return trans_def(bcx, bcx.def(expr.id), expr);
}
_ => {}
}
// any other expressions are closures:
return datum_callee(bcx, expr);
fn datum_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
-> Callee<'blk, 'tcx> {
let DatumBlock { bcx, datum, .. } = expr::trans(bcx, expr);
match datum.ty.sty {
ty::TyBareFn(..) => {
let llval = datum.to_llscalarish(bcx);
return Callee {
bcx: bcx,
data: Fn(llval),
};
}
_ => {
bcx.tcx().sess.span_bug(
expr.span,
&format!("type of callee is neither bare-fn nor closure: {}",
datum.ty));
}
}
}
fn fn_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, llfn: ValueRef)
-> Callee<'blk, 'tcx> {
return Callee {
bcx: bcx,
data: Fn(llfn),
};
}
fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
def: def::Def,
ref_expr: &ast::Expr)
-> Callee<'blk, 'tcx> {
debug!("trans_def(def={:?}, ref_expr={:?})", def, ref_expr);
let expr_ty = common::node_id_type(bcx, ref_expr.id);
match def {
def::DefFn(did, _) if {
let maybe_def_id = inline::get_local_instance(bcx.ccx(), did);
let maybe_ast_node = maybe_def_id.and_then(|def_id| bcx.tcx().map
.find(def_id.node));
match maybe_ast_node {
Some(ast_map::NodeStructCtor(_)) => true,
_ => false
}
} => {
let substs = common::node_id_substs(bcx.ccx(),
ExprId(ref_expr.id),
bcx.fcx.param_substs);
Callee {
bcx: bcx,
data: NamedTupleConstructor(substs, 0)
}
}
def::DefFn(did, _) if match expr_ty.sty {
ty::TyBareFn(_, ref f) => f.abi == synabi::RustIntrinsic,
_ => false
} => {
let substs = common::node_id_substs(bcx.ccx(),
ExprId(ref_expr.id),
bcx.fcx.param_substs);
let def_id = inline::maybe_instantiate_inline(bcx.ccx(), did);
Callee { bcx: bcx, data: Intrinsic(def_id.node, substs) }
}
def::DefFn(did, _) | def::DefMethod(did, def::FromImpl(_)) => {
fn_callee(bcx, trans_fn_ref(bcx.ccx(), did, ExprId(ref_expr.id),
bcx.fcx.param_substs).val)
}
def::DefMethod(meth_did, def::FromTrait(trait_did)) => {
fn_callee(bcx, meth::trans_static_method_callee(bcx.ccx(),
meth_did,
trait_did,
ref_expr.id,
bcx.fcx.param_substs).val)
}
def::DefVariant(tid, vid, _) => {
let vinfo = ty::enum_variant_with_id(bcx.tcx(), tid, vid);
let substs = common::node_id_substs(bcx.ccx(),
ExprId(ref_expr.id),
bcx.fcx.param_substs);
// Nullary variants are not callable
assert!(!vinfo.args.is_empty());
Callee {
bcx: bcx,
data: NamedTupleConstructor(substs, vinfo.disr_val)
}
}
def::DefStruct(_) => {
let substs = common::node_id_substs(bcx.ccx(),
ExprId(ref_expr.id),
bcx.fcx.param_substs);
Callee {
bcx: bcx,
data: NamedTupleConstructor(substs, 0)
}
}
def::DefStatic(..) |
def::DefConst(..) |
def::DefAssociatedConst(..) |
def::DefLocal(..) |
def::DefUpvar(..) => {
datum_callee(bcx, ref_expr)
}
def::DefMod(..) | def::DefForeignMod(..) | def::DefTrait(..) |
def::DefTy(..) | def::DefPrimTy(..) | def::DefAssociatedTy(..) |
def::DefUse(..) | def::DefRegion(..) | def::DefLabel(..) |
def::DefTyParam(..) | def::DefSelfTy(..) => {
bcx.tcx().sess.span_bug(
ref_expr.span,
&format!("cannot translate def {:?} \
to a callable thing!", def));
}
}
}
}
/// Translates a reference (with id `ref_id`) to the fn/method with id `def_id` into a function
/// pointer. This may require monomorphization or inlining.
pub fn trans_fn_ref<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
def_id: ast::DefId,
node: ExprOrMethodCall,
param_substs: &'tcx subst::Substs<'tcx>)
-> Datum<'tcx, Rvalue> {
let _icx = push_ctxt("trans_fn_ref");
let substs = common::node_id_substs(ccx, node, param_substs);
debug!("trans_fn_ref(def_id={:?}, node={:?}, substs={:?})",
def_id,
node,
substs);
trans_fn_ref_with_substs(ccx, def_id, node, param_substs, substs)
}
fn trans_fn_ref_with_substs_to_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
def_id: ast::DefId,
ref_id: ast::NodeId,
substs: subst::Substs<'tcx>)
-> Callee<'blk, 'tcx> {
Callee {
bcx: bcx,
data: Fn(trans_fn_ref_with_substs(bcx.ccx(),
def_id,
ExprId(ref_id),
bcx.fcx.param_substs,
substs).val),
}
}
/// Translates an adapter that implements the `Fn` trait for a fn
/// pointer. This is basically the equivalent of something like:
///
/// ```
/// impl<'a> Fn(&'a int) -> &'a int for fn(&int) -> &int {
/// extern "rust-abi" fn call(&self, args: (&'a int,)) -> &'a int {
/// (*self)(args.0)
/// }
/// }
/// ```
///
/// but for the bare function type given.
pub fn trans_fn_pointer_shim<'a, 'tcx>(
ccx: &'a CrateContext<'a, 'tcx>,
closure_kind: ty::ClosureKind,
bare_fn_ty: Ty<'tcx>)
-> ValueRef
{
let _icx = push_ctxt("trans_fn_pointer_shim");
let tcx = ccx.tcx();
// Normalize the type for better caching.
let bare_fn_ty = common::erase_regions(tcx, &bare_fn_ty);
// If this is an impl of `Fn` or `FnMut` trait, the receiver is `&self`.
let is_by_ref = match closure_kind {
ty::FnClosureKind | ty::FnMutClosureKind => true,
ty::FnOnceClosureKind => false,
};
let bare_fn_ty_maybe_ref = if is_by_ref {
ty::mk_imm_rptr(tcx, tcx.mk_region(ty::ReStatic), bare_fn_ty)
} else {
bare_fn_ty
};
// Check if we already trans'd this shim.
match ccx.fn_pointer_shims().borrow().get(&bare_fn_ty_maybe_ref) {
Some(&llval) => { return llval; }
None => { }
}
debug!("trans_fn_pointer_shim(bare_fn_ty={:?})",
bare_fn_ty);
// Construct the "tuply" version of `bare_fn_ty`. It takes two arguments: `self`,
// which is the fn pointer, and `args`, which is the arguments tuple.
let (opt_def_id, sig) =
match bare_fn_ty.sty {
ty::TyBareFn(opt_def_id,
&ty::BareFnTy { unsafety: ast::Unsafety::Normal,
abi: synabi::Rust,
ref sig }) => {
(opt_def_id, sig)
}
_ => {
tcx.sess.bug(&format!("trans_fn_pointer_shim invoked on invalid type: {}",
bare_fn_ty));
}
};
let sig = ty::erase_late_bound_regions(tcx, sig);
let tuple_input_ty = ty::mk_tup(tcx, sig.inputs.to_vec());
let tuple_fn_ty = ty::mk_bare_fn(tcx,
opt_def_id,
tcx.mk_bare_fn(ty::BareFnTy {
unsafety: ast::Unsafety::Normal,
abi: synabi::RustCall,
sig: ty::Binder(ty::FnSig {
inputs: vec![bare_fn_ty_maybe_ref,
tuple_input_ty],
output: sig.output,
variadic: false
})}));
debug!("tuple_fn_ty: {:?}", tuple_fn_ty);
//
let function_name = link::mangle_internal_name_by_type_and_seq(ccx, bare_fn_ty,
"fn_pointer_shim");
let llfn = declare::declare_internal_rust_fn(ccx, &function_name[..], tuple_fn_ty);
//
let empty_substs = tcx.mk_substs(Substs::trans_empty());
let (block_arena, fcx): (TypedArena<_>, FunctionContext);
block_arena = TypedArena::new();
fcx = new_fn_ctxt(ccx,
llfn,
ast::DUMMY_NODE_ID,
false,
sig.output,
empty_substs,
None,
&block_arena);
let mut bcx = init_function(&fcx, false, sig.output);
let llargs = get_params(fcx.llfn);
let self_idx = fcx.arg_offset();
// the first argument (`self`) will be ptr to the the fn pointer
let llfnpointer = if is_by_ref {
Load(bcx, llargs[self_idx])
} else {
llargs[self_idx]
};
assert!(!fcx.needs_ret_allocas);
let dest = fcx.llretslotptr.get().map(|_|
expr::SaveIn(fcx.get_ret_slot(bcx, sig.output, "ret_slot"))
);
bcx = trans_call_inner(bcx,
DebugLoc::None,
bare_fn_ty,
|bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) },
ArgVals(&llargs[(self_idx + 1)..]),
dest).bcx;
finish_fn(&fcx, bcx, sig.output, DebugLoc::None);
ccx.fn_pointer_shims().borrow_mut().insert(bare_fn_ty_maybe_ref, llfn);
llfn
}
/// Translates a reference to a fn/method item, monomorphizing and
/// inlining as it goes.
///
/// # Parameters
///
/// - `ccx`: the crate context
/// - `def_id`: def id of the fn or method item being referenced
/// - `node`: node id of the reference to the fn/method, if applicable.
/// This parameter may be zero; but, if so, the resulting value may not
/// have the right type, so it must be cast before being used.
/// - `param_substs`: if the `node` is in a polymorphic function, these
/// are the substitutions required to monomorphize its type
/// - `substs`: values for each of the fn/method's parameters
pub fn trans_fn_ref_with_substs<'a, 'tcx>(
ccx: &CrateContext<'a, 'tcx>,
def_id: ast::DefId,
node: ExprOrMethodCall,
param_substs: &'tcx subst::Substs<'tcx>,
substs: subst::Substs<'tcx>)
-> Datum<'tcx, Rvalue>
{
let _icx = push_ctxt("trans_fn_ref_with_substs");
let tcx = ccx.tcx();
debug!("trans_fn_ref_with_substs(def_id={:?}, node={:?}, \
param_substs={:?}, substs={:?})",
def_id,
node,
param_substs,
substs);
assert!(substs.types.all(|t| !ty::type_needs_infer(*t)));
assert!(substs.types.all(|t| !ty::type_has_escaping_regions(*t)));
let substs = substs.erase_regions();
// Load the info for the appropriate trait if necessary.
match ty::trait_of_item(tcx, def_id) {
None => {}
Some(trait_id) => {
ty::populate_implementations_for_trait_if_necessary(tcx, trait_id)
}
}
// We need to do a bunch of special handling for default methods.
// We need to modify the def_id and our substs in order to monomorphize
// the function.
let (is_default, def_id, substs) = match ty::provided_source(tcx, def_id) {
None => {
(false, def_id, tcx.mk_substs(substs))
}
Some(source_id) => {
// There are two relevant substitutions when compiling
// default methods. First, there is the substitution for
// the type parameters of the impl we are using and the
// method we are calling. This substitution is the substs
// argument we already have.
// In order to compile a default method, though, we need
// to consider another substitution: the substitution for
// the type parameters on trait; the impl we are using
// implements the trait at some particular type
// parameters, and we need to substitute for those first.
// So, what we need to do is find this substitution and
// compose it with the one we already have.
let impl_id = ty::impl_or_trait_item(tcx, def_id).container()
.id();
let impl_or_trait_item = ty::impl_or_trait_item(tcx, source_id);
match impl_or_trait_item {
ty::MethodTraitItem(method) => {
let trait_ref = ty::impl_trait_ref(tcx, impl_id).unwrap();
// Compute the first substitution
let first_subst =
ty::make_substs_for_receiver_types(tcx, &trait_ref, &*method)
.erase_regions();
// And compose them
let new_substs = tcx.mk_substs(first_subst.subst(tcx, &substs));
debug!("trans_fn_with_vtables - default method: \
substs = {:?}, trait_subst = {:?}, \
first_subst = {:?}, new_subst = {:?}",
substs, trait_ref.substs,
first_subst, new_substs);
(true, source_id, new_substs)
}
_ => {
tcx.sess.bug("trans_fn_ref_with_vtables() tried \
to translate a non-method?!")
}
}
}
};
// If this is a closure, redirect to it.
match closure::get_or_create_declaration_if_closure(ccx, def_id, substs) {
None => {}
Some(llfn) => return llfn,
}
// Check whether this fn has an inlined copy and, if so, redirect
// def_id to the local id of the inlined copy.
let def_id = inline::maybe_instantiate_inline(ccx, def_id);
// We must monomorphise if the fn has type parameters, is a default method,
// or is a named tuple constructor.
let must_monomorphise = if !substs.types.is_empty() || is_default {
true
} else if def_id.krate == ast::LOCAL_CRATE {
let map_node = session::expect(
ccx.sess(),
tcx.map.find(def_id.node),
|| "local item should be in ast map".to_string());
match map_node {
ast_map::NodeVariant(v) => match v.node.kind {
ast::TupleVariantKind(ref args) => !args.is_empty(),
_ => false
},
ast_map::NodeStructCtor(_) => true,
_ => false
}
} else {
false
};
debug!("trans_fn_ref_with_substs({:?}) must_monomorphise: {}",
def_id, must_monomorphise);
// Create a monomorphic version of generic functions
if must_monomorphise {
// Should be either intra-crate or inlined.
assert_eq!(def_id.krate, ast::LOCAL_CRATE);
let opt_ref_id = match node {
ExprId(id) => if id != 0 { Some(id) } else { None },
MethodCallKey(_) => None,
};
let (val, fn_ty, must_cast) =
monomorphize::monomorphic_fn(ccx, def_id, substs, opt_ref_id);
if must_cast && node != ExprId(0) {
// Monotype of the REFERENCE to the function (type params
// are subst'd)
let ref_ty = match node {
ExprId(id) => ty::node_id_to_type(tcx, id),
MethodCallKey(method_call) => {
tcx.method_map.borrow().get(&method_call).unwrap().ty
}
};
let ref_ty = monomorphize::apply_param_substs(tcx,
param_substs,
&ref_ty);
let llptrty = type_of::type_of_fn_from_ty(ccx, ref_ty).ptr_to();
if llptrty != common::val_ty(val) {
let val = consts::ptrcast(val, llptrty);
return Datum::new(val, ref_ty, Rvalue::new(ByValue));
}
}
return Datum::new(val, fn_ty, Rvalue::new(ByValue));
}
// Type scheme of the function item (may have type params)
let fn_type_scheme = ty::lookup_item_type(tcx, def_id);
let fn_type = monomorphize::normalize_associated_type(tcx, &fn_type_scheme.ty);
// Find the actual function pointer.
let mut val = {
if def_id.krate == ast::LOCAL_CRATE {
// Internal reference.
get_item_val(ccx, def_id.node)
} else {
// External reference.
trans_external_path(ccx, def_id, fn_type)
}
};
// This is subtle and surprising, but sometimes we have to bitcast
// the resulting fn pointer. The reason has to do with external
// functions. If you have two crates that both bind the same C
// library, they may not use precisely the same types: for
// example, they will probably each declare their own structs,
// which are distinct types from LLVM's point of view (nominal
// types).
//
// Now, if those two crates are linked into an application, and
// they contain inlined code, you can wind up with a situation
// where both of those functions wind up being loaded into this
// application simultaneously. In that case, the same function
// (from LLVM's point of view) requires two types. But of course
// LLVM won't allow one function to have two types.
//
// What we currently do, therefore, is declare the function with
// one of the two types (whichever happens to come first) and then
// bitcast as needed when the function is referenced to make sure
// it has the type we expect.
//
// This can occur on either a crate-local or crate-external
// reference. It also occurs when testing libcore and in some
// other weird situations. Annoying.
let llty = type_of::type_of_fn_from_ty(ccx, fn_type);
let llptrty = llty.ptr_to();
if common::val_ty(val) != llptrty {
debug!("trans_fn_ref_with_vtables(): casting pointer!");
val = consts::ptrcast(val, llptrty);
} else {
debug!("trans_fn_ref_with_vtables(): not casting pointer!");
}
Datum::new(val, fn_type, Rvalue::new(ByValue))
}
// ______________________________________________________________________
// Translating calls
pub fn trans_call<'a, 'blk, 'tcx>(in_cx: Block<'blk, 'tcx>,
call_expr: &ast::Expr,
f: &ast::Expr,
args: CallArgs<'a, 'tcx>,
dest: expr::Dest)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_call");
trans_call_inner(in_cx,
call_expr.debug_loc(),
common::expr_ty_adjusted(in_cx, f),
|cx, _| trans(cx, f),
args,
Some(dest)).bcx
}
pub fn trans_method_call<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
call_expr: &ast::Expr,
rcvr: &ast::Expr,
args: CallArgs<'a, 'tcx>,
dest: expr::Dest)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_method_call");
debug!("trans_method_call(call_expr={:?})", call_expr);
let method_call = MethodCall::expr(call_expr.id);
let method_ty = match bcx.tcx().method_map.borrow().get(&method_call) {
Some(method) => match method.origin {
ty::MethodTraitObject(_) => match method.ty.sty {
ty::TyBareFn(_, ref fty) => {
ty::mk_bare_fn(bcx.tcx(), None, meth::opaque_method_ty(bcx.tcx(), fty))
}
_ => method.ty
},
_ => method.ty
},
None => panic!("method not found in trans_method_call")
};
trans_call_inner(
bcx,
call_expr.debug_loc(),
common::monomorphize_type(bcx, method_ty),
|cx, arg_cleanup_scope| {
meth::trans_method_callee(cx, method_call, Some(rcvr), arg_cleanup_scope)
},
args,
Some(dest)).bcx
}
pub fn trans_lang_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
did: ast::DefId,
args: &[ValueRef],
dest: Option<expr::Dest>,
debug_loc: DebugLoc)
-> Result<'blk, 'tcx> {
let fty = if did.krate == ast::LOCAL_CRATE {
ty::node_id_to_type(bcx.tcx(), did.node)
} else {
csearch::get_type(bcx.tcx(), did).ty
};
callee::trans_call_inner(bcx,
debug_loc,
fty,
|bcx, _| {
trans_fn_ref_with_substs_to_callee(bcx,
did,
0,
subst::Substs::trans_empty())
},
ArgVals(args),
dest)
}
/// This behemoth of a function translates function calls. Unfortunately, in order to generate more
/// efficient LLVM output at -O0, it has quite a complex signature (refactoring this into two
/// functions seems like a good idea).
///
/// In particular, for lang items, it is invoked with a dest of None, and in that case the return
/// value contains the result of the fn. The lang item must not return a structural type or else
/// all heck breaks loose.
///
/// For non-lang items, `dest` is always Some, and hence the result is written into memory
/// somewhere. Nonetheless we return the actual return value of the function.
pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
debug_loc: DebugLoc,
callee_ty: Ty<'tcx>,
get_callee: F,
args: CallArgs<'a, 'tcx>,
dest: Option<expr::Dest>)
-> Result<'blk, 'tcx> where
F: FnOnce(Block<'blk, 'tcx>, cleanup::ScopeId) -> Callee<'blk, 'tcx>,
{
// Introduce a temporary cleanup scope that will contain cleanups
// for the arguments while they are being evaluated. The purpose
// this cleanup is to ensure that, should a panic occur while
// evaluating argument N, the values for arguments 0...N-1 are all
// cleaned up. If no panic occurs, the values are handed off to
// the callee, and hence none of the cleanups in this temporary
// scope will ever execute.
let fcx = bcx.fcx;
let ccx = fcx.ccx;
let arg_cleanup_scope = fcx.push_custom_cleanup_scope();
let callee = get_callee(bcx, cleanup::CustomScope(arg_cleanup_scope));
let mut bcx = callee.bcx;
let (abi, ret_ty) = match callee_ty.sty {
ty::TyBareFn(_, ref f) => {
let output = ty::erase_late_bound_regions(bcx.tcx(), &f.sig.output());
(f.abi, output)
}
_ => panic!("expected bare rust fn or closure in trans_call_inner")
};
let (llfn, llenv, llself) = match callee.data {
Fn(llfn) => {
(llfn, None, None)
}
TraitItem(d) => {
(d.llfn, None, Some(d.llself))
}
Intrinsic(node, substs) => {
assert!(abi == synabi::RustIntrinsic);
assert!(dest.is_some());
let call_info = match debug_loc {
DebugLoc::At(id, span) => NodeIdAndSpan { id: id, span: span },
DebugLoc::None => {
bcx.sess().bug("No call info for intrinsic call?")
}
};
return intrinsic::trans_intrinsic_call(bcx, node, callee_ty,
arg_cleanup_scope, args,
dest.unwrap(), substs,
call_info);
}
NamedTupleConstructor(substs, disr) => {
assert!(dest.is_some());
fcx.pop_custom_cleanup_scope(arg_cleanup_scope);
let ctor_ty = callee_ty.subst(bcx.tcx(), &substs);
return base::trans_named_tuple_constructor(bcx,
ctor_ty,
disr,
args,
dest.unwrap(),
debug_loc);
}
};
// Intrinsics should not become actual functions.
// We trans them in place in `trans_intrinsic_call`
assert!(abi != synabi::RustIntrinsic);
let is_rust_fn = abi == synabi::Rust || abi == synabi::RustCall;
// Generate a location to store the result. If the user does
// not care about the result, just make a stack slot.
let opt_llretslot = dest.and_then(|dest| match dest {
expr::SaveIn(dst) => Some(dst),
expr::Ignore => {
let ret_ty = match ret_ty {
ty::FnConverging(ret_ty) => ret_ty,
ty::FnDiverging => ty::mk_nil(ccx.tcx())
};
if !is_rust_fn ||
type_of::return_uses_outptr(ccx, ret_ty) ||
bcx.fcx.type_needs_drop(ret_ty) {
// Push the out-pointer if we use an out-pointer for this
// return type, otherwise push "undef".
if common::type_is_zero_size(ccx, ret_ty) {
let llty = type_of::type_of(ccx, ret_ty);
Some(common::C_undef(llty.ptr_to()))
} else {
Some(alloc_ty(bcx, ret_ty, "__llret"))
}
} else {
None
}
}
});
let mut llresult = unsafe {
llvm::LLVMGetUndef(Type::nil(ccx).ptr_to().to_ref())
};
// The code below invokes the function, using either the Rust
// conventions (if it is a rust fn) or the native conventions
// (otherwise). The important part is that, when all is said
// and done, either the return value of the function will have been
// written in opt_llretslot (if it is Some) or `llresult` will be
// set appropriately (otherwise).
if is_rust_fn {
let mut llargs = Vec::new();
if let (ty::FnConverging(ret_ty), Some(mut llretslot)) = (ret_ty, opt_llretslot) {
if type_of::return_uses_outptr(ccx, ret_ty) {
let llformal_ret_ty = type_of::type_of(ccx, ret_ty).ptr_to();
let llret_ty = common::val_ty(llretslot);
if llformal_ret_ty != llret_ty {
// this could happen due to e.g. subtyping
debug!("casting actual return type ({}) to match formal ({})",
bcx.llty_str(llret_ty), bcx.llty_str(llformal_ret_ty));
llretslot = PointerCast(bcx, llretslot, llformal_ret_ty);
}
llargs.push(llretslot);
}
}
// Push the environment (or a trait object's self).
match (llenv, llself) {
(Some(llenv), None) => llargs.push(llenv),
(None, Some(llself)) => llargs.push(llself),
_ => {}
}
// Push the arguments.
bcx = trans_args(bcx,
args,
callee_ty,
&mut llargs,
cleanup::CustomScope(arg_cleanup_scope),
llself.is_some(),
abi);
fcx.scopes.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean();
// Invoke the actual rust fn and update bcx/llresult.
let (llret, b) = base::invoke(bcx,
llfn,
&llargs[..],
callee_ty,
debug_loc);
bcx = b;
llresult = llret;
// If the Rust convention for this type is return via
// the return value, copy it into llretslot.
match (opt_llretslot, ret_ty) {
(Some(llretslot), ty::FnConverging(ret_ty)) => {
if !type_of::return_uses_outptr(bcx.ccx(), ret_ty) &&
!common::type_is_zero_size(bcx.ccx(), ret_ty)
{
store_ty(bcx, llret, llretslot, ret_ty)
}
}
(_, _) => {}
}
} else {
// Lang items are the only case where dest is None, and
// they are always Rust fns.
assert!(dest.is_some());
let mut llargs = Vec::new();
let arg_tys = match args {
ArgExprs(a) => a.iter().map(|x| common::expr_ty_adjusted(bcx, &**x)).collect(),
_ => panic!("expected arg exprs.")
};
bcx = trans_args(bcx,
args,
callee_ty,
&mut llargs,
cleanup::CustomScope(arg_cleanup_scope),
false,
abi);
fcx.scopes.borrow_mut().last_mut().unwrap().drop_non_lifetime_clean();
bcx = foreign::trans_native_call(bcx,
callee_ty,
llfn,
opt_llretslot.unwrap(),
&llargs[..],
arg_tys,
debug_loc);
}
fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope);
// If the caller doesn't care about the result of this fn call,
// drop the temporary slot we made.
match (dest, opt_llretslot, ret_ty) {
(Some(expr::Ignore), Some(llretslot), ty::FnConverging(ret_ty)) => {
// drop the value if it is not being saved.
bcx = glue::drop_ty(bcx,
llretslot,
ret_ty,
debug_loc);
call_lifetime_end(bcx, llretslot);
}
_ => {}
}
if ret_ty == ty::FnDiverging {
Unreachable(bcx);
}
Result::new(bcx, llresult)
}
pub enum CallArgs<'a, 'tcx> {
// Supply value of arguments as a list of expressions that must be
// translated. This is used in the common case of `foo(bar, qux)`.
ArgExprs(&'a [P<ast::Expr>]),
// Supply value of arguments as a list of LLVM value refs; frequently
// used with lang items and so forth, when the argument is an internal
// value.
ArgVals(&'a [ValueRef]),
// For overloaded operators: `(lhs, Vec(rhs, rhs_id), autoref)`. `lhs`
// is the left-hand-side and `rhs/rhs_id` is the datum/expr-id of
// the right-hand-side arguments (if any). `autoref` indicates whether the `rhs`
// arguments should be auto-referenced
ArgOverloadedOp(Datum<'tcx, Expr>, Vec<(Datum<'tcx, Expr>, ast::NodeId)>, bool),
// Supply value of arguments as a list of expressions that must be
// translated, for overloaded call operators.
ArgOverloadedCall(Vec<&'a ast::Expr>),
}
fn trans_args_under_call_abi<'blk, 'tcx>(
mut bcx: Block<'blk, 'tcx>,
arg_exprs: &[P<ast::Expr>],
fn_ty: Ty<'tcx>,
llargs: &mut Vec<ValueRef>,
arg_cleanup_scope: cleanup::ScopeId,
ignore_self: bool)
-> Block<'blk, 'tcx>
{
let args =
ty::erase_late_bound_regions(
bcx.tcx(), &ty::ty_fn_args(fn_ty));
// Translate the `self` argument first.
if !ignore_self {
let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &*arg_exprs[0]));
bcx = trans_arg_datum(bcx,
args[0],
arg_datum,
arg_cleanup_scope,
DontAutorefArg,
llargs);
}
// Now untuple the rest of the arguments.
let tuple_expr = &arg_exprs[1];
let tuple_type = common::node_id_type(bcx, tuple_expr.id);
match tuple_type.sty {
ty::TyTuple(ref field_types) => {
let tuple_datum = unpack_datum!(bcx,
expr::trans(bcx, &**tuple_expr));
let tuple_lvalue_datum =
unpack_datum!(bcx,
tuple_datum.to_lvalue_datum(bcx,
"args",
tuple_expr.id));
let repr = adt::represent_type(bcx.ccx(), tuple_type);
let repr_ptr = &*repr;
for (i, field_type) in field_types.iter().enumerate() {
let arg_datum = tuple_lvalue_datum.get_element(
bcx,
field_type,
|srcval| {
adt::trans_field_ptr(bcx, repr_ptr, srcval, 0, i)
}).to_expr_datum();
bcx = trans_arg_datum(bcx,
field_type,
arg_datum,
arg_cleanup_scope,
DontAutorefArg,
llargs);
}
}
_ => {
bcx.sess().span_bug(tuple_expr.span,
"argument to `.call()` wasn't a tuple?!")
}
};
bcx
}
fn trans_overloaded_call_args<'blk, 'tcx>(
mut bcx: Block<'blk, 'tcx>,
arg_exprs: Vec<&ast::Expr>,
fn_ty: Ty<'tcx>,
llargs: &mut Vec<ValueRef>,
arg_cleanup_scope: cleanup::ScopeId,
ignore_self: bool)
-> Block<'blk, 'tcx> {
// Translate the `self` argument first.
let arg_tys = ty::erase_late_bound_regions(bcx.tcx(), &ty::ty_fn_args(fn_ty));
if !ignore_self {
let arg_datum = unpack_datum!(bcx, expr::trans(bcx, arg_exprs[0]));
bcx = trans_arg_datum(bcx,
arg_tys[0],
arg_datum,
arg_cleanup_scope,
DontAutorefArg,
llargs);
}
// Now untuple the rest of the arguments.
let tuple_type = arg_tys[1];
match tuple_type.sty {
ty::TyTuple(ref field_types) => {
for (i, &field_type) in field_types.iter().enumerate() {
let arg_datum =
unpack_datum!(bcx, expr::trans(bcx, arg_exprs[i + 1]));
bcx = trans_arg_datum(bcx,
field_type,
arg_datum,
arg_cleanup_scope,
DontAutorefArg,
llargs);
}
}
_ => {
bcx.sess().span_bug(arg_exprs[0].span,
"argument to `.call()` wasn't a tuple?!")
}
};
bcx
}
pub fn trans_args<'a, 'blk, 'tcx>(cx: Block<'blk, 'tcx>,
args: CallArgs<'a, 'tcx>,
fn_ty: Ty<'tcx>,
llargs: &mut Vec<ValueRef>,
arg_cleanup_scope: cleanup::ScopeId,
ignore_self: bool,
abi: synabi::Abi)
-> Block<'blk, 'tcx> {
debug!("trans_args(abi={})", abi);
let _icx = push_ctxt("trans_args");
let arg_tys = ty::erase_late_bound_regions(cx.tcx(), &ty::ty_fn_args(fn_ty));
let variadic = ty::fn_is_variadic(fn_ty);
let mut bcx = cx;
// First we figure out the caller's view of the types of the arguments.
// This will be needed if this is a generic call, because the callee has
// to cast her view of the arguments to the caller's view.
match args {
ArgExprs(arg_exprs) => {
if abi == synabi::RustCall {
// This is only used for direct calls to the `call`,
// `call_mut` or `call_once` functions.
return trans_args_under_call_abi(cx,
arg_exprs,
fn_ty,
llargs,
arg_cleanup_scope,
ignore_self)
}
let num_formal_args = arg_tys.len();
for (i, arg_expr) in arg_exprs.iter().enumerate() {
if i == 0 && ignore_self {
continue;
}
let arg_ty = if i >= num_formal_args {
assert!(variadic);
common::expr_ty_adjusted(cx, &**arg_expr)
} else {
arg_tys[i]
};
let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &**arg_expr));
bcx = trans_arg_datum(bcx, arg_ty, arg_datum,
arg_cleanup_scope,
DontAutorefArg,
llargs);
}
}
ArgOverloadedCall(arg_exprs) => {
return trans_overloaded_call_args(cx,
arg_exprs,
fn_ty,
llargs,
arg_cleanup_scope,
ignore_self)
}
ArgOverloadedOp(lhs, rhs, autoref) => {
assert!(!variadic);
bcx = trans_arg_datum(bcx, arg_tys[0], lhs,
arg_cleanup_scope,
DontAutorefArg,
llargs);
assert_eq!(arg_tys.len(), 1 + rhs.len());
for (rhs, rhs_id) in rhs {
bcx = trans_arg_datum(bcx, arg_tys[1], rhs,
arg_cleanup_scope,
if autoref { DoAutorefArg(rhs_id) } else { DontAutorefArg },
llargs);
}
}
ArgVals(vs) => {
llargs.push_all(vs);
}
}
bcx
}
#[derive(Copy, Clone)]
pub enum AutorefArg {
DontAutorefArg,
DoAutorefArg(ast::NodeId)
}
pub fn trans_arg_datum<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
formal_arg_ty: Ty<'tcx>,
arg_datum: Datum<'tcx, Expr>,
arg_cleanup_scope: cleanup::ScopeId,
autoref_arg: AutorefArg,
llargs: &mut Vec<ValueRef>)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_arg_datum");
let mut bcx = bcx;
let ccx = bcx.ccx();
debug!("trans_arg_datum({:?})",
formal_arg_ty);
let arg_datum_ty = arg_datum.ty;
debug!(" arg datum: {}", arg_datum.to_string(bcx.ccx()));
let mut val;
// FIXME(#3548) use the adjustments table
match autoref_arg {
DoAutorefArg(arg_id) => {
// We will pass argument by reference
// We want an lvalue, so that we can pass by reference and
let arg_datum = unpack_datum!(
bcx, arg_datum.to_lvalue_datum(bcx, "arg", arg_id));
val = arg_datum.val;
}
DontAutorefArg if common::type_is_fat_ptr(bcx.tcx(), arg_datum_ty) &&
!bcx.fcx.type_needs_drop(arg_datum_ty) => {
val = arg_datum.val
}
DontAutorefArg => {
// Make this an rvalue, since we are going to be
// passing ownership.
let arg_datum = unpack_datum!(
bcx, arg_datum.to_rvalue_datum(bcx, "arg"));
// Now that arg_datum is owned, get it into the appropriate
// mode (ref vs value).
let arg_datum = unpack_datum!(
bcx, arg_datum.to_appropriate_datum(bcx));
// Technically, ownership of val passes to the callee.
// However, we must cleanup should we panic before the
// callee is actually invoked.
val = arg_datum.add_clean(bcx.fcx, arg_cleanup_scope);
}
}
if type_of::arg_is_indirect(ccx, formal_arg_ty) && formal_arg_ty != arg_datum_ty {
// this could happen due to e.g. subtyping
let llformal_arg_ty = type_of::type_of_explicit_arg(ccx, formal_arg_ty);
debug!("casting actual type ({}) to match formal ({})",
bcx.val_to_string(val), bcx.llty_str(llformal_arg_ty));
debug!("Rust types: {:?}; {:?}", arg_datum_ty,
formal_arg_ty);
val = PointerCast(bcx, val, llformal_arg_ty);
}
debug!("--- trans_arg_datum passing {}", bcx.val_to_string(val));
if common::type_is_fat_ptr(bcx.tcx(), formal_arg_ty) {
llargs.push(Load(bcx, expr::get_dataptr(bcx, val)));
llargs.push(Load(bcx, expr::get_len(bcx, val)));
} else {
llargs.push(val);
}
bcx
}
| 39.646259 | 99 | 0.491742 |
4bc3442871f5d749dba9e83d1e297ea1b0a0e281 | 664 | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::rc::Rc;
use std::cell::RefCell;
fn bar<T: Sync>(_: T) {}
fn main() {
let x = Rc::new(RefCell::new(5));
bar(x);
//~^ ERROR the trait `core::marker::Sync` is not implemented
}
| 31.619048 | 68 | 0.692771 |
337c620a9b97527fded0dd1f4ed4ed13cb1648be | 8,568 | use super::*;
#[derive(PartialEq, Debug, Clone)]
pub(crate) struct PocketDimensionVec {
// 3-dimensional state [z][y][x]
states: Vec<Vec<Vec<State>>>,
}
impl From<&Vec<String>> for PocketDimensionVec {
fn from(grid: &Vec<String>) -> Self {
let states = vec![grid
.iter()
.map(|row| row.chars().map(State::from).collect::<Vec<State>>())
.collect::<Vec<Vec<State>>>()];
PocketDimensionVec { states }
}
}
impl Display for PocketDimensionVec {
fn fmt(&self, f: &mut Formatter<'_>) -> Result {
let mut s = String::new();
let z_offset = self.states.len() / 2;
for (z, rows) in self.states.iter().enumerate() {
s.push_str(&*format!("z={}\n", z as isize - z_offset as isize));
for row in rows.iter() {
let row_string: String = row.iter().map(|s| s.to_char()).collect();
s.push_str(&*row_string.to_string());
s.push('\n');
}
s.push('\n');
}
write!(f, "{}", s.trim())
}
}
impl PocketDimension<(isize, isize, isize)> for PocketDimensionVec {
fn active_cube_count(&self) -> usize {
self.states
.iter()
.map(|rows| {
rows.iter()
.map(|row| row.iter().filter(|&s| s == &State::Active).count())
.sum::<usize>()
})
.sum()
}
fn get_state_at(&self, pos: &(isize, isize, isize)) -> State {
let pos = PocketDimensionVec::range_checked(self, pos);
self.states[pos.0][pos.1][pos.2]
}
fn is_active(&self, pos: &(isize, isize, isize)) -> bool {
let pos = PocketDimensionVec::range_checked(self, pos);
self.states[pos.0][pos.1][pos.2] == State::Active
}
fn set_state_at(&mut self, pos: &(isize, isize, isize), state: State) {
let pos = PocketDimensionVec::range_checked(self, pos);
self.states[pos.0][pos.1][pos.2] = state;
}
fn safe_neighbors_of(&self, pos: &(isize, isize, isize)) -> Vec<(isize, isize, isize)> {
self.safe_offsets(pos)
.iter()
.map(|offset| (pos.0 + offset.0, pos.1 + offset.1, pos.2 + offset.2))
.collect()
}
fn active_neighbor_count_of(&self, pos: &(isize, isize, isize)) -> usize {
self.safe_neighbors_of(pos)
.iter()
.filter(|&neighbor| self.is_active(neighbor))
.count()
}
fn offsets() -> Vec<(isize, isize, isize)> {
crate::day17::offsets_3d()
}
}
impl ExecutableCycle for PocketDimensionVec {
fn execute_cycle(self) -> Self {
// allocate a new pocket dimension that extends 1 cube further out on all 6 sides
let mut prev: PocketDimensionVec = PocketDimensionVec::create_new_of_size(
self.depth() + 2,
self.height() + 2,
self.width() + 2,
);
// set its states to a copy of the previous pocket dimension;
// note this copy is offset such that it is in the middle
for z in 0..self.depth() {
let z = z as isize;
for y in 0..self.height() {
let y = y as isize;
for x in 0..self.width() {
let x = x as isize;
let state = self.get_state_at(&(z, y, x));
let new_pos = (z + 1, y + 1, x + 1);
prev.set_state_at(&new_pos, state);
}
}
}
let mut next: PocketDimensionVec = PocketDimensionVec::create_new_of_size(
self.depth() + 2,
self.height() + 2,
self.width() + 2,
);
// go through all these locations and determine the new state
for z in 0..prev.depth() {
let z = z as isize;
for y in 0..prev.height() {
let y = y as isize;
for x in 0..prev.width() {
let x = x as isize;
let pos = &(z, y, x);
let prev_state = prev.get_state_at(pos);
let active_neighbor_count = prev.active_neighbor_count_of(pos);
let new_state = match (&prev_state, active_neighbor_count) {
(State::Active, 2..=3) | (State::Inactive, 3) => State::Active,
(_, _) => State::Inactive,
};
next.set_state_at(pos, new_state);
}
}
}
// println!("----prev:----\n{}\n", prev);
// println!("----next:----\n{}\n", next);
next.trim()
}
fn trim(mut self) -> Self {
// trim z:
let mut z = 0;
while z < self.depth() {
let no_active_cubes = (0..self.height()).into_iter().all(|y| {
(0..self.width())
.into_iter()
.all(|x| !self.is_active(&(z as isize, y as isize, x as isize)))
});
if no_active_cubes {
// println!("For z = {} no cubes are active, removing them", z);
self.states.remove(z);
} else {
z += 1;
}
}
// trim y
let mut y = 0;
while y < self.height() {
let all_inactive = (0..self.depth()).into_iter().all(|z| {
(0..self.width())
.into_iter()
.all(|x| !self.is_active(&(z as isize, y as isize, x as isize)))
});
if all_inactive {
// println!("For y = {} no cubes are active, removing them", y);
(0..self.depth()).into_iter().for_each(|z| {
self.states[z].remove(y);
});
} else {
y += 1;
}
}
// trim x:
let mut x = 0;
while x < self.width() {
let all_inactive = (0..self.height()).into_iter().all(|y| {
(0..self.depth())
.into_iter()
.all(|z| !self.is_active(&(z as isize, y as isize, x as isize)))
});
if all_inactive {
// println!("For x = {} no cubes are active, removing them", x);
(0..self.depth()).into_iter().for_each(|z| {
(0..self.height()).into_iter().for_each(|y| {
self.states[z][y].remove(x);
})
});
} else {
x += 1;
}
}
// return it
self
}
}
impl PocketDimensionVec {
pub(crate) fn depth(&self) -> usize {
self.states.len()
}
pub(crate) fn height(&self) -> usize {
if self.states.is_empty() {
0
} else {
self.states[0].len()
}
}
pub(crate) fn width(&self) -> usize {
if self.states.is_empty() || self.states[0].is_empty() {
0
} else {
self.states[0][0].len()
}
}
pub(crate) fn safe_offsets(&self, pos: &(isize, isize, isize)) -> Vec<(isize, isize, isize)> {
PocketDimensionVec::range_checked(self, pos);
PocketDimensionVec::offsets()
.iter()
.filter(|&offset| {
PocketDimensionVec::is_safe_pos(
self,
&(pos.0 + offset.0, pos.1 + offset.1, pos.2 + offset.2),
)
})
.cloned()
.collect()
}
fn create_new_of_size(depth: usize, height: usize, width: usize) -> Self {
PocketDimensionVec {
states: vec![vec![vec![State::Inactive; width]; height]; depth],
}
}
fn range_checked(&self, pos: &(isize, isize, isize)) -> (usize, usize, usize) {
let check_range = |num, lo, hi| {
if !(lo..hi as isize).contains(&num) {
panic!("{} not in range {}..{}", num, lo, hi);
}
};
check_range(pos.0, 0, self.depth());
check_range(pos.1, 0, self.height());
check_range(pos.2, 0, self.width());
(pos.0 as usize, pos.1 as usize, pos.2 as usize)
}
// returns true if the given pos is safely within all bounds
fn is_safe_pos(&self, pos: &(isize, isize, isize)) -> bool {
let is_in_range = |num, lo, hi| (lo..hi as isize).contains(&num);
is_in_range(pos.0, 0, self.depth())
&& is_in_range(pos.1, 0, self.height())
&& is_in_range(pos.2, 0, self.width())
}
}
| 34.272 | 98 | 0.470472 |
28f68ac2de037dccadfcee9f33e62ced3987c333 | 14,739 | // This pass converts move out from array by Subslice and
// ConstIndex{.., from_end: true} to ConstIndex move out(s) from begin
// of array. It allows detect error by mir borrowck and elaborate
// drops for array without additional work.
//
// Example:
//
// let a = [ box 1,box 2, box 3];
// if b {
// let [_a.., _] = a;
// } else {
// let [.., _b] = a;
// }
//
// mir statement _10 = move _2[:-1]; replaced by:
// StorageLive(_12);
// _12 = move _2[0 of 3];
// StorageLive(_13);
// _13 = move _2[1 of 3];
// _10 = [move _12, move _13]
// StorageDead(_12);
// StorageDead(_13);
//
// and mir statement _11 = move _2[-1 of 1]; replaced by:
// _11 = move _2[2 of 3];
//
// FIXME: integrate this transformation to the mir build
use rustc::ty;
use rustc::ty::TyCtxt;
use rustc::mir::*;
use rustc::mir::visit::{Visitor, PlaceContext, NonUseContext};
use rustc_index::vec::{IndexVec};
use crate::transform::{MirPass, MirSource};
use crate::util::patch::MirPatch;
pub struct UniformArrayMoveOut;
impl<'tcx> MirPass<'tcx> for UniformArrayMoveOut {
fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
let mut patch = MirPatch::new(body);
let param_env = tcx.param_env(src.def_id());
{
let mut visitor = UniformArrayMoveOutVisitor{body, patch: &mut patch, tcx, param_env};
visitor.visit_body(body);
}
patch.apply(body);
}
}
struct UniformArrayMoveOutVisitor<'a, 'tcx> {
body: &'a Body<'tcx>,
patch: &'a mut MirPatch<'tcx>,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
impl<'a, 'tcx> Visitor<'tcx> for UniformArrayMoveOutVisitor<'a, 'tcx> {
fn visit_assign(&mut self,
dst_place: &Place<'tcx>,
rvalue: &Rvalue<'tcx>,
location: Location) {
if let Rvalue::Use(Operand::Move(ref src_place)) = rvalue {
if let &[ref proj_base @ .., elem] = src_place.projection.as_ref() {
if let ProjectionElem::ConstantIndex{offset: _,
min_length: _,
from_end: false} = elem {
// no need to transformation
} else {
let place_ty =
Place::ty_from(&src_place.base, proj_base, self.body, self.tcx).ty;
if let ty::Array(item_ty, const_size) = place_ty.kind {
if let Some(size) = const_size.try_eval_usize(self.tcx, self.param_env) {
assert!(size <= u32::max_value() as u64,
"uniform array move out doesn't supported
for array bigger then u32");
self.uniform(
location,
dst_place,
&src_place.base,
&src_place.projection,
item_ty,
size as u32,
);
}
}
}
}
}
self.super_assign(dst_place, rvalue, location)
}
}
impl<'a, 'tcx> UniformArrayMoveOutVisitor<'a, 'tcx> {
fn uniform(&mut self,
location: Location,
dst_place: &Place<'tcx>,
base: &PlaceBase<'tcx>,
proj: &[PlaceElem<'tcx>],
item_ty: &'tcx ty::TyS<'tcx>,
size: u32) {
if let [proj_base @ .., elem] = proj {
match elem {
// uniforms statements like_10 = move _2[:-1];
ProjectionElem::Subslice{from, to} => {
self.patch.make_nop(location);
let temps : Vec<_> = (*from..(size-*to)).map(|i| {
let temp =
self.patch.new_temp(item_ty, self.body.source_info(location).span);
self.patch.add_statement(location, StatementKind::StorageLive(temp));
let mut projection = proj_base.to_vec();
projection.push(ProjectionElem::ConstantIndex {
offset: i,
min_length: size,
from_end: false,
});
self.patch.add_assign(
location,
Place::from(temp),
Rvalue::Use(Operand::Move(Place {
base: base.clone(),
projection: self.tcx.intern_place_elems(&projection),
})),
);
temp
}).collect();
self.patch.add_assign(
location,
dst_place.clone(),
Rvalue::Aggregate(
box AggregateKind::Array(item_ty),
temps.iter().map(
|x| Operand::Move(Place::from(*x))
).collect()
)
);
for temp in temps {
self.patch.add_statement(location, StatementKind::StorageDead(temp));
}
}
// uniforms statements like _11 = move _2[-1 of 1];
ProjectionElem::ConstantIndex{offset, min_length: _, from_end: true} => {
self.patch.make_nop(location);
let mut projection = proj_base.to_vec();
projection.push(ProjectionElem::ConstantIndex {
offset: size - offset,
min_length: size,
from_end: false,
});
self.patch.add_assign(
location,
dst_place.clone(),
Rvalue::Use(Operand::Move(Place {
base: base.clone(),
projection: self.tcx.intern_place_elems(&projection),
})),
);
}
_ => {}
}
}
}
}
// Restore Subslice move out after analysis
// Example:
//
// next statements:
// StorageLive(_12);
// _12 = move _2[0 of 3];
// StorageLive(_13);
// _13 = move _2[1 of 3];
// _10 = [move _12, move _13]
// StorageDead(_12);
// StorageDead(_13);
//
// replaced by _10 = move _2[:-1];
pub struct RestoreSubsliceArrayMoveOut<'tcx> {
tcx: TyCtxt<'tcx>
}
impl<'tcx> MirPass<'tcx> for RestoreSubsliceArrayMoveOut<'tcx> {
fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) {
let mut patch = MirPatch::new(body);
let param_env = tcx.param_env(src.def_id());
{
let mut visitor = RestoreDataCollector {
locals_use: IndexVec::from_elem(LocalUse::new(), &body.local_decls),
candidates: vec![],
};
visitor.visit_body(body);
for candidate in &visitor.candidates {
let statement = &body[candidate.block].statements[candidate.statement_index];
if let StatementKind::Assign(box(ref dst_place, ref rval)) = statement.kind {
if let Rvalue::Aggregate(box AggregateKind::Array(_), ref items) = *rval {
let items : Vec<_> = items.iter().map(|item| {
if let Operand::Move(place) = item {
if let Some(local) = place.as_local() {
let local_use = &visitor.locals_use[local];
let opt_index_and_place =
Self::try_get_item_source(local_use, body);
// each local should be used twice:
// in assign and in aggregate statements
if local_use.use_count == 2 && opt_index_and_place.is_some() {
let (index, src_place) = opt_index_and_place.unwrap();
return Some((local_use, index, src_place));
}
}
}
None
}).collect();
let opt_src_place = items.first().and_then(|x| *x).map(|x| x.2);
let opt_size = opt_src_place.and_then(|src_place| {
let src_ty =
Place::ty_from(src_place.base, src_place.projection, body, tcx).ty;
if let ty::Array(_, ref size_o) = src_ty.kind {
size_o.try_eval_usize(tcx, param_env)
} else {
None
}
});
let restore_subslice = RestoreSubsliceArrayMoveOut { tcx };
restore_subslice
.check_and_patch(*candidate, &items, opt_size, &mut patch, dst_place);
}
}
}
}
patch.apply(body);
}
}
impl RestoreSubsliceArrayMoveOut<'tcx> {
pub fn new(tcx: TyCtxt<'tcx>) -> Self {
RestoreSubsliceArrayMoveOut { tcx }
}
// Checks that source has size, all locals are inited from same source place and
// indices is an integer interval. If all checks pass do the replacent.
// items are Vec<Option<LocalUse, index in source array, source place for init local>>
fn check_and_patch(&self,
candidate: Location,
items: &[Option<(&LocalUse, u32, PlaceRef<'_, 'tcx>)>],
opt_size: Option<u64>,
patch: &mut MirPatch<'tcx>,
dst_place: &Place<'tcx>) {
let opt_src_place = items.first().and_then(|x| *x).map(|x| x.2);
if opt_size.is_some() && items.iter().all(
|l| l.is_some() && l.unwrap().2 == opt_src_place.unwrap()) {
let src_place = opt_src_place.unwrap();
let indices: Vec<_> = items.iter().map(|x| x.unwrap().1).collect();
for i in 1..indices.len() {
if indices[i - 1] + 1 != indices[i] {
return;
}
}
let min = *indices.first().unwrap();
let max = *indices.last().unwrap();
for item in items {
let locals_use = item.unwrap().0;
patch.make_nop(locals_use.alive.unwrap());
patch.make_nop(locals_use.dead.unwrap());
patch.make_nop(locals_use.first_use.unwrap());
}
patch.make_nop(candidate);
let size = opt_size.unwrap() as u32;
let mut projection = src_place.projection.to_vec();
projection.push(ProjectionElem::Subslice { from: min, to: size - max - 1 });
patch.add_assign(
candidate,
dst_place.clone(),
Rvalue::Use(Operand::Move(Place {
base: src_place.base.clone(),
projection: self.tcx.intern_place_elems(&projection),
})),
);
}
}
fn try_get_item_source<'a>(local_use: &LocalUse,
body: &'a Body<'tcx>) -> Option<(u32, PlaceRef<'a, 'tcx>)> {
if let Some(location) = local_use.first_use {
let block = &body[location.block];
if block.statements.len() > location.statement_index {
let statement = &block.statements[location.statement_index];
if let StatementKind::Assign(
box(place, Rvalue::Use(Operand::Move(src_place)))
) = &statement.kind {
if let (Some(_), PlaceRef {
base: _,
projection: &[.., ProjectionElem::ConstantIndex {
offset, min_length: _, from_end: false
}],
}) = (place.as_local(), src_place.as_ref()) {
if let StatementKind::Assign(
box(_, Rvalue::Use(Operand::Move(place)))
) = &statement.kind {
if let PlaceRef {
base,
projection: &[ref proj_base @ .., _],
} = place.as_ref() {
return Some((offset, PlaceRef {
base,
projection: proj_base,
}))
}
}
}
}
}
}
None
}
}
#[derive(Copy, Clone, Debug)]
struct LocalUse {
alive: Option<Location>,
dead: Option<Location>,
use_count: u32,
first_use: Option<Location>,
}
impl LocalUse {
pub fn new() -> Self {
LocalUse{alive: None, dead: None, use_count: 0, first_use: None}
}
}
struct RestoreDataCollector {
locals_use: IndexVec<Local, LocalUse>,
candidates: Vec<Location>,
}
impl<'tcx> Visitor<'tcx> for RestoreDataCollector {
fn visit_assign(&mut self,
place: &Place<'tcx>,
rvalue: &Rvalue<'tcx>,
location: Location) {
if let Rvalue::Aggregate(box AggregateKind::Array(_), _) = *rvalue {
self.candidates.push(location);
}
self.super_assign(place, rvalue, location)
}
fn visit_local(&mut self,
local: &Local,
context: PlaceContext,
location: Location) {
let local_use = &mut self.locals_use[*local];
match context {
PlaceContext::NonUse(NonUseContext::StorageLive) => local_use.alive = Some(location),
PlaceContext::NonUse(NonUseContext::StorageDead) => local_use.dead = Some(location),
PlaceContext::NonUse(NonUseContext::VarDebugInfo) => {}
_ => {
local_use.use_count += 1;
if local_use.first_use.is_none() {
local_use.first_use = Some(location);
}
}
}
}
}
| 39.304 | 99 | 0.461225 |
f5e8d067c5a9160ec3c4166ddb408e87cd54b1c1 | 5,706 | use crate::map::Map;
use rltk::RGB;
use serde::{Deserialize, Serialize};
use specs::error::NoError;
use specs::prelude::*;
use specs::saveload::{ConvertSaveload, Marker};
/*
`Component`. You can replace this with writing code specifying
Specs storage if you prefer, but the macro is much easier!
`ConvertSaveload` adds Serialize and Deserialize, but with extra
conversion for any Entity classes it encounters.
`Clone` is saying
"this structure can be copied in memory from one point to another."
This is necessary for the inner-workings of Serde, and also allows
you to attach .clone() to the end of any reference to a component -
and get another, perfect copy of it. In most cases, clone is really
fast (and occasionally the compiler can make it do nothing at
all!)
*/
/*
When you have a component with no data, the ConvertSaveload macro
doesn't work! Fortunately, these don't require any additional
conversion - so you can fall back to the default Serde syntax. Check Player for
a non-data ("tag") class.
*/
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct Player {}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct Monster {}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct Name {
pub name: String,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct WantsToMelee {
pub target: Entity,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct SufferDamage {
pub amount: Vec<i32>,
}
impl SufferDamage {
pub fn new_damage(store: &mut WriteStorage<SufferDamage>, victim: Entity, amount: i32) {
if let Some(suffering) = store.get_mut(victim) {
suffering.amount.push(amount);
} else {
let dmg = SufferDamage {
amount: vec![amount],
};
store.insert(victim, dmg).expect("Unable to insert damage");
}
}
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct Destructable {
pub broken: bool,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct CombatStats {
pub max_hp: i32,
pub hp: i32,
pub defense: i32,
pub strength: i32,
}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct BlocksTile {}
#[derive(Component, ConvertSaveload, Clone)]
pub struct Position {
pub x: i32,
pub y: i32,
}
/// What you see from a specific place.
#[derive(Component, ConvertSaveload, Clone)]
pub struct Viewshed {
pub visible_tiles: Vec<rltk::Point>,
pub range: i32,
pub dirty: bool,
}
#[derive(Component, ConvertSaveload, Clone)]
pub struct Renderable {
pub glyph: u16,
pub fg: RGB,
pub bg: RGB,
pub render_order: i32,
}
#[derive(Component, Serialize, Deserialize, Clone)]
pub struct Item {}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct InBackpack {
pub owner: Entity,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct WantsToPickupItem {
pub collected_by: Entity,
pub item: Entity,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct WantsToDropItem {
pub item: Entity,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct WantsToUseItem {
pub item: Entity,
pub target: Option<rltk::Point>,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct WantsToRemoveEquipment {
pub item: Entity,
}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct Consumable {}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct ProvidesHealing {
pub heal_amount: i32,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct Ranged {
pub range: i32,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct InflictsDamage {
pub damage: i32,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct AreaOfEffect {
pub radius: i32,
}
#[derive(Component, Debug, ConvertSaveload, Clone)]
pub struct Confusion {
pub turns: i32,
}
#[derive(PartialEq, Copy, Clone, Serialize, Deserialize)]
pub enum EquipmentSlot {
Melee,
Shield,
}
#[derive(Component, Serialize, Deserialize, Clone)]
pub struct Equippable {
pub slot: EquipmentSlot,
}
#[derive(Component, ConvertSaveload, Clone)]
pub struct Equipped {
pub owner: Entity,
pub slot: EquipmentSlot,
}
#[derive(Component, ConvertSaveload, Clone)]
pub struct MeleePowerBonus {
pub power: i32,
}
#[derive(Component, ConvertSaveload, Clone)]
pub struct DefenseBonus {
pub defense: i32,
}
#[derive(Component, Serialize, Deserialize, Clone)]
pub struct ParticleLifetime {
pub lifetime_ms: f32,
}
#[derive(Serialize, Deserialize, Copy, Clone, PartialEq)]
pub enum HungerState {
WellFed,
Normal,
Hungry,
Starving,
}
#[derive(Component, Serialize, Deserialize, Clone)]
pub struct HungerClock {
pub state: HungerState,
pub duration: i32,
}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct ProvidesFood {}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct MagicMapper {}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct Hidden {}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct EntryTrigger {}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct EntityMoved {}
#[derive(Component, Debug, Serialize, Deserialize, Clone)]
pub struct SingleActivation {}
// "Serialization helper code. We need to implement ConvertSaveload for each type that contains an
// Entity."
pub struct SerializeMe;
// Special component that exists to help serialize the game data
#[derive(Component, Serialize, Deserialize, Clone)]
pub struct SerializationHelper {
pub map: Map,
}
| 24.384615 | 98 | 0.712758 |
22a9a8efe28fa601e46bbdb75df8cfedc0e46e6a | 6,344 | // Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
mod auth_cmd;
mod chain_cmd;
mod config;
mod fetch_params_cmd;
mod genesis_cmd;
pub(super) use self::auth_cmd::AuthCommands;
pub(super) use self::chain_cmd::ChainCommands;
pub use self::config::Config;
pub(super) use self::fetch_params_cmd::FetchCommands;
pub(super) use self::genesis_cmd::GenesisCommands;
use jsonrpc_v2::Error as JsonRpcError;
use std::cell::RefCell;
use std::io;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use structopt::StructOpt;
use utils::{read_file_to_string, read_toml};
/// CLI structure generated when interacting with Forest binary
#[derive(StructOpt)]
#[structopt(
name = "forest",
version = "0.0.1",
about = "Filecoin implementation in Rust. This command will start the daemon process",
author = "ChainSafe Systems <[email protected]>"
)]
pub struct CLI {
#[structopt(flatten)]
pub daemon_opts: DaemonOpts,
#[structopt(subcommand)]
pub cmd: Option<Subcommand>,
}
/// Forest binary subcommands available.
#[derive(StructOpt)]
#[structopt(setting = structopt::clap::AppSettings::VersionlessSubcommands)]
pub enum Subcommand {
#[structopt(
name = "fetch-params",
about = "Download parameters for generating and verifying proofs for given size"
)]
Fetch(FetchCommands),
#[structopt(name = "chain", about = "Interact with Filecoin blockchain")]
Chain(ChainCommands),
#[structopt(name = "auth", about = "Manage RPC Permissions")]
Auth(AuthCommands),
#[structopt(name = "genesis", about = "Work with blockchain genesis")]
Genesis(GenesisCommands),
}
/// Daemon process command line options.
#[derive(StructOpt, Debug)]
pub struct DaemonOpts {
#[structopt(short, long, help = "A toml file containing relevant configurations")]
pub config: Option<String>,
#[structopt(short, long, help = "The genesis CAR file")]
pub genesis: Option<String>,
#[structopt(short, long, help = "Allow rpc to be active or not (default = true)")]
pub rpc: Option<bool>,
#[structopt(short, long, help = "The port used for communication")]
pub port: Option<String>,
#[structopt(short, long, help = "Allow Kademlia (default = true)")]
pub kademlia: Option<bool>,
#[structopt(short, long, help = "Allow MDNS (default = true)")]
pub mdns: Option<bool>,
#[structopt(long, help = "Import a snapshot from a local CAR file or url")]
pub import_snapshot: Option<String>,
#[structopt(long, help = "Import a chain from a local CAR file or url")]
pub import_chain: Option<String>,
#[structopt(
long,
help = "Skips loading CAR file and uses header to index chain.\
Assumes a pre-loaded database"
)]
pub skip_load: bool,
#[structopt(long, help = "Number of worker sync tasks spawned (default is 1")]
pub worker_tasks: Option<usize>,
#[structopt(
long,
help = "Number of tipsets requested over chain exchange (default is 200)"
)]
pub req_window: Option<i64>,
}
impl DaemonOpts {
pub fn to_config(&self) -> Result<Config, io::Error> {
let mut cfg: Config = match &self.config {
Some(config_file) => {
// Read from config file
let toml = read_file_to_string(&*config_file)?;
// Parse and return the configuration file
read_toml(&toml)?
}
None => Config::default(),
};
if let Some(genesis_file) = &self.genesis {
cfg.genesis_file = Some(genesis_file.to_owned());
}
if self.rpc.unwrap_or(cfg.enable_rpc) {
cfg.enable_rpc = true;
cfg.rpc_port = self.port.to_owned().unwrap_or(cfg.rpc_port);
} else {
cfg.enable_rpc = false;
}
if self.import_snapshot.is_some() && self.import_chain.is_some() {
panic!("Can't set import_snapshot and import_chain at the same time!");
} else {
if let Some(snapshot_path) = &self.import_snapshot {
cfg.snapshot_path = Some(snapshot_path.to_owned());
cfg.snapshot = true;
}
if let Some(snapshot_path) = &self.import_chain {
cfg.snapshot_path = Some(snapshot_path.to_owned());
cfg.snapshot = false;
}
cfg.skip_load = self.skip_load;
}
cfg.network.kademlia = self.kademlia.unwrap_or(cfg.network.kademlia);
cfg.network.mdns = self.mdns.unwrap_or(cfg.network.mdns);
// (where to find these flags, should be easy to do with structops)
// check and set syncing configurations
// TODO add MAX conditions
if let Some(req_window) = &self.req_window {
cfg.sync.req_window = req_window.to_owned();
}
if let Some(worker_tsk) = &self.worker_tasks {
cfg.sync.worker_tasks = worker_tsk.to_owned();
}
Ok(cfg)
}
}
/// Blocks current thread until ctrl-c is received
pub(super) async fn block_until_sigint() {
let (ctrlc_send, ctrlc_oneshot) = futures::channel::oneshot::channel();
let ctrlc_send_c = RefCell::new(Some(ctrlc_send));
let running = Arc::new(AtomicUsize::new(0));
ctrlc::set_handler(move || {
let prev = running.fetch_add(1, Ordering::SeqCst);
if prev == 0 {
println!("Got interrupt, shutting down...");
// Send sig int in channel to blocking task
if let Some(ctrlc_send) = ctrlc_send_c.try_borrow_mut().unwrap().take() {
ctrlc_send.send(()).expect("Error sending ctrl-c message");
}
} else {
process::exit(0);
}
})
.expect("Error setting Ctrl-C handler");
ctrlc_oneshot.await.unwrap();
}
/// Returns a stringified JSON-RPC error
pub(super) fn stringify_rpc_err(e: JsonRpcError) -> String {
match e {
JsonRpcError::Full {
code,
message,
data: _,
} => {
return format!("JSON RPC Error: Code: {} Message: {}", code, message);
}
JsonRpcError::Provided { code, message } => {
return format!("JSON RPC Error: Code: {} Message: {}", code, message);
}
}
}
| 34.478261 | 90 | 0.621217 |
bfad0ea902814a6d476857a2ce79d0d0f9949bad | 603 | //! ランレングス
pub fn compress(s: &str) -> Vec<(char, usize)> {
let vs = s.chars().collect::<Vec<_>>();
let mut buff = vs[0];
let mut count = 1;
let mut v = vec![];
for c in vs.iter().skip(1) {
if *c != buff {
v.push((buff, count));
count = 1;
buff = *c;
} else {
count += 1;
}
}
v.push((buff, count));
v
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_run_length() {
let v = vec![('a', 5usize), ('b', 3), ('c', 1)];
assert_eq!(compress("aaaaabbbc"), v);
}
}
| 19.451613 | 56 | 0.422886 |
2f4196a9dd3b185affc5498bf41e49dce11c2e9a | 921 | /*
* Created on Sat Nov 28 2020
*
* Copyright (c) storycraft. Licensed under the MIT Licence.
*/
use serde::{Deserialize, Serialize};
pub mod crypto;
pub mod session;
pub mod stream;
pub const SECURE_HEAD_SIZE: usize = SECURE_HEADER_SIZE + 4;
pub const SECURE_HEADER_SIZE: usize = 16;
#[derive(Serialize, Deserialize, PartialEq, Debug)]
pub struct SecureHeader {
pub iv: [u8; 16],
}
#[derive(Debug)]
pub struct SecurePacket {
pub header: SecureHeader,
pub data: Vec<u8>,
}
pub const SECURE_HANDSHAKE_HEAD_SIZE: usize = SECURE_HANDSHAKE_HEADER_SIZE + 4;
pub const SECURE_HANDSHAKE_HEADER_SIZE: usize = 8;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecureHandshakeHeader {
pub key_encrypt_type: u32,
pub encrypt_type: u32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SecureHandshake {
pub header: SecureHandshakeHeader,
pub encrypted_key: Vec<u8>,
}
| 22.463415 | 79 | 0.732899 |
081fa5c49699a4732a3e87ac7ea52ca90767ba29 | 1,946 | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
# [wasm_bindgen (extends = EventTarget , extends = :: js_sys :: Object , js_name = AbortSignal , typescript_type = "AbortSignal")]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `AbortSignal` class."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `AbortSignal`*"]
pub type AbortSignal;
# [wasm_bindgen (structural , method , getter , js_class = "AbortSignal" , js_name = aborted)]
#[doc = "Getter for the `aborted` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal/aborted)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `AbortSignal`*"]
pub fn aborted(this: &AbortSignal) -> bool;
# [wasm_bindgen (structural , method , getter , js_class = "AbortSignal" , js_name = onabort)]
#[doc = "Getter for the `onabort` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal/onabort)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `AbortSignal`*"]
pub fn onabort(this: &AbortSignal) -> Option<::js_sys::Function>;
# [wasm_bindgen (structural , method , setter , js_class = "AbortSignal" , js_name = onabort)]
#[doc = "Setter for the `onabort` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/AbortSignal/onabort)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `AbortSignal`*"]
pub fn set_onabort(this: &AbortSignal, value: Option<&::js_sys::Function>);
}
| 54.055556 | 134 | 0.650051 |
ef14afc64f2b39dcbf9b2ff561a21cc62915fa45 | 279 | fn main() {
requestty::questions![Password {
name: "name",
mask: '*',
on_esc: requestty::OnEsc::Terminate,
transform: |_, _, _| Ok(()),
validate: |_, _| Ok(()),
validate_on_key: |_, _| true,
filter: |t, _| t,
}];
}
| 23.25 | 44 | 0.462366 |
0e8d8bb2731b87ce0d46504d5713a95481a36e3c | 41,190 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Module containing functionality to compute array equality.
//! This module uses [ArrayData] and does not
//! depend on dynamic casting of `Array`.
use super::{
Array, ArrayData, BinaryOffsetSizeTrait, BooleanArray, DecimalArray,
FixedSizeBinaryArray, FixedSizeListArray, GenericBinaryArray, GenericListArray,
GenericStringArray, MapArray, NullArray, OffsetSizeTrait, PrimitiveArray,
StringOffsetSizeTrait, StructArray,
};
use crate::{
buffer::Buffer,
datatypes::{ArrowPrimitiveType, DataType, IntervalUnit},
};
use half::f16;
mod boolean;
mod decimal;
mod dictionary;
mod fixed_binary;
mod fixed_list;
mod list;
mod null;
mod primitive;
mod structure;
mod utils;
mod variable_size;
// these methods assume the same type, len and null count.
// For this reason, they are not exposed and are instead used
// to build the generic functions below (`equal_range` and `equal`).
use boolean::boolean_equal;
use decimal::decimal_equal;
use dictionary::dictionary_equal;
use fixed_binary::fixed_binary_equal;
use fixed_list::fixed_list_equal;
use list::list_equal;
use null::null_equal;
use primitive::primitive_equal;
use structure::struct_equal;
use variable_size::variable_sized_equal;
impl PartialEq for dyn Array {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
impl<T: Array> PartialEq<T> for dyn Array {
fn eq(&self, other: &T) -> bool {
equal(self.data(), other.data())
}
}
impl PartialEq for NullArray {
fn eq(&self, other: &NullArray) -> bool {
equal(self.data(), other.data())
}
}
impl<T: ArrowPrimitiveType> PartialEq for PrimitiveArray<T> {
fn eq(&self, other: &PrimitiveArray<T>) -> bool {
equal(self.data(), other.data())
}
}
impl PartialEq for BooleanArray {
fn eq(&self, other: &BooleanArray) -> bool {
equal(self.data(), other.data())
}
}
impl<OffsetSize: StringOffsetSizeTrait> PartialEq for GenericStringArray<OffsetSize> {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
impl<OffsetSize: BinaryOffsetSizeTrait> PartialEq for GenericBinaryArray<OffsetSize> {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
impl PartialEq for FixedSizeBinaryArray {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
impl PartialEq for DecimalArray {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
impl<OffsetSize: OffsetSizeTrait> PartialEq for GenericListArray<OffsetSize> {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
impl PartialEq for MapArray {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
impl PartialEq for FixedSizeListArray {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
impl PartialEq for StructArray {
fn eq(&self, other: &Self) -> bool {
equal(self.data(), other.data())
}
}
/// Compares the values of two [ArrayData] starting at `lhs_start` and `rhs_start` respectively
/// for `len` slots. The null buffers `lhs_nulls` and `rhs_nulls` inherit parent nullability.
///
/// If an array is a child of a struct or list, the array's nulls have to be merged with the parent.
/// This then affects the null count of the array, thus the merged nulls are passed separately
/// as `lhs_nulls` and `rhs_nulls` variables to functions.
/// The nulls are merged with a bitwise AND, and null counts are recomputed where necessary.
#[inline]
fn equal_values(
lhs: &ArrayData,
rhs: &ArrayData,
lhs_nulls: Option<&Buffer>,
rhs_nulls: Option<&Buffer>,
lhs_start: usize,
rhs_start: usize,
len: usize,
) -> bool {
match lhs.data_type() {
DataType::Null => null_equal(lhs, rhs, lhs_start, rhs_start, len),
DataType::Boolean => {
boolean_equal(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
DataType::UInt8 => primitive_equal::<u8>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::UInt16 => primitive_equal::<u16>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::UInt32 => primitive_equal::<u32>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::UInt64 => primitive_equal::<u64>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Int8 => primitive_equal::<i8>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Int16 => primitive_equal::<i16>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Int32 => primitive_equal::<i32>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Int64 => primitive_equal::<i64>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Float32 => primitive_equal::<f32>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Float64 => primitive_equal::<f64>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Date32
| DataType::Time32(_)
| DataType::Interval(IntervalUnit::YearMonth) => primitive_equal::<i32>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Date64
| DataType::Interval(IntervalUnit::DayTime)
| DataType::Time64(_)
| DataType::Timestamp(_, _)
| DataType::Duration(_) => primitive_equal::<i64>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Utf8 | DataType::Binary => variable_sized_equal::<i32>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::LargeUtf8 | DataType::LargeBinary => variable_sized_equal::<i64>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::FixedSizeBinary(_) => {
fixed_binary_equal(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
DataType::Decimal(_, _) => {
decimal_equal(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
DataType::List(_) => {
list_equal::<i32>(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
DataType::LargeList(_) => {
list_equal::<i64>(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
DataType::FixedSizeList(_, _) => {
fixed_list_equal(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
DataType::Struct(_) => {
struct_equal(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
DataType::Union(_) => unimplemented!("See ARROW-8576"),
DataType::Dictionary(data_type, _) => match data_type.as_ref() {
DataType::Int8 => dictionary_equal::<i8>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Int16 => dictionary_equal::<i16>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Int32 => dictionary_equal::<i32>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Int64 => dictionary_equal::<i64>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::UInt8 => dictionary_equal::<u8>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::UInt16 => dictionary_equal::<u16>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::UInt32 => dictionary_equal::<u32>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::UInt64 => dictionary_equal::<u64>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
_ => unreachable!(),
},
DataType::Float16 => primitive_equal::<f16>(
lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len,
),
DataType::Map(_, _) => {
list_equal::<i32>(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
}
}
fn equal_range(
lhs: &ArrayData,
rhs: &ArrayData,
lhs_nulls: Option<&Buffer>,
rhs_nulls: Option<&Buffer>,
lhs_start: usize,
rhs_start: usize,
len: usize,
) -> bool {
utils::base_equal(lhs, rhs)
&& utils::equal_nulls(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
&& equal_values(lhs, rhs, lhs_nulls, rhs_nulls, lhs_start, rhs_start, len)
}
/// Logically compares two [ArrayData].
/// Two arrays are logically equal if and only if:
/// * their data types are equal
/// * their lengths are equal
/// * their null counts are equal
/// * their null bitmaps are equal
/// * each of their items are equal
/// two items are equal when their in-memory representation is physically equal (i.e. same bit content).
/// The physical comparison depend on the data type.
/// # Panics
/// This function may panic whenever any of the [ArrayData] does not follow the Arrow specification.
/// (e.g. wrong number of buffers, buffer `len` does not correspond to the declared `len`)
pub fn equal(lhs: &ArrayData, rhs: &ArrayData) -> bool {
let lhs_nulls = lhs.null_buffer();
let rhs_nulls = rhs.null_buffer();
utils::base_equal(lhs, rhs)
&& lhs.null_count() == rhs.null_count()
&& utils::equal_nulls(lhs, rhs, lhs_nulls, rhs_nulls, 0, 0, lhs.len())
&& equal_values(lhs, rhs, lhs_nulls, rhs_nulls, 0, 0, lhs.len())
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use std::sync::Arc;
use crate::array::{
array::Array, ArrayDataBuilder, ArrayRef, BinaryOffsetSizeTrait, BooleanArray,
DecimalBuilder, FixedSizeBinaryBuilder, FixedSizeListBuilder, GenericBinaryArray,
Int32Builder, ListBuilder, NullArray, PrimitiveBuilder, StringArray,
StringDictionaryBuilder, StringOffsetSizeTrait, StructArray,
};
use crate::array::{GenericStringArray, Int32Array};
use crate::buffer::Buffer;
use crate::datatypes::{Field, Int16Type, ToByteSlice};
use super::*;
#[test]
fn test_null_equal() {
let a = NullArray::new(12);
let a = a.data();
let b = NullArray::new(12);
let b = b.data();
test_equal(a, b, true);
let b = NullArray::new(10);
let b = b.data();
test_equal(a, b, false);
// Test the case where offset != 0
let a_slice = a.slice(2, 3);
let b_slice = b.slice(1, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(5, 4);
let b_slice = b.slice(3, 3);
test_equal(&a_slice, &b_slice, false);
}
#[test]
fn test_boolean_equal() {
let a = BooleanArray::from(vec![false, false, true]);
let a = a.data();
let b = BooleanArray::from(vec![false, false, true]);
let b = b.data();
test_equal(a, b, true);
let b = BooleanArray::from(vec![false, false, false]);
let b = b.data();
test_equal(a, b, false);
}
#[test]
fn test_boolean_equal_nulls() {
let a = BooleanArray::from(vec![Some(false), None, None, Some(true)]);
let a = a.data();
let b = BooleanArray::from(vec![Some(false), None, None, Some(true)]);
let b = b.data();
test_equal(a, b, true);
let b = BooleanArray::from(vec![None, None, None, Some(true)]);
let b = b.data();
test_equal(a, b, false);
let b = BooleanArray::from(vec![Some(true), None, None, Some(true)]);
let b = b.data();
test_equal(a, b, false);
}
#[test]
fn test_boolean_equal_offset() {
let a = BooleanArray::from(vec![false, true, false, true, false, false, true]);
let a = a.data();
let b =
BooleanArray::from(vec![true, false, false, false, true, false, true, true]);
let b = b.data();
assert!(!equal(a, b));
assert!(!equal(b, a));
let a_slice = a.slice(2, 3);
let b_slice = b.slice(3, 3);
assert!(equal(&a_slice, &b_slice));
assert!(equal(&b_slice, &a_slice));
let a_slice = a.slice(3, 4);
let b_slice = b.slice(4, 4);
assert!(!equal(&a_slice, &b_slice));
assert!(!equal(&b_slice, &a_slice));
// Test the optimization cases where null_count == 0 and starts at 0 and len >= size_of(u8)
// Elements fill in `u8`'s exactly.
let mut vector = vec![false, false, true, true, true, true, true, true];
let a = BooleanArray::from(vector.clone());
let a = a.data();
let b = BooleanArray::from(vector.clone());
let b = b.data();
test_equal(a, b, true);
// Elements fill in `u8`s + suffix bits.
vector.push(true);
let a = BooleanArray::from(vector.clone());
let a = a.data();
let b = BooleanArray::from(vector);
let b = b.data();
test_equal(a, b, true);
}
#[test]
fn test_primitive() {
let cases = vec![
(
vec![Some(1), Some(2), Some(3)],
vec![Some(1), Some(2), Some(3)],
true,
),
(
vec![Some(1), Some(2), Some(3)],
vec![Some(1), Some(2), Some(4)],
false,
),
(
vec![Some(1), Some(2), None],
vec![Some(1), Some(2), None],
true,
),
(
vec![Some(1), None, Some(3)],
vec![Some(1), Some(2), None],
false,
),
(
vec![Some(1), None, None],
vec![Some(1), Some(2), None],
false,
),
];
for (lhs, rhs, expected) in cases {
let lhs = Int32Array::from(lhs);
let lhs = lhs.data();
let rhs = Int32Array::from(rhs);
let rhs = rhs.data();
test_equal(lhs, rhs, expected);
}
}
#[test]
fn test_primitive_slice() {
let cases = vec![
(
vec![Some(1), Some(2), Some(3)],
(0, 1),
vec![Some(1), Some(2), Some(3)],
(0, 1),
true,
),
(
vec![Some(1), Some(2), Some(3)],
(1, 1),
vec![Some(1), Some(2), Some(3)],
(2, 1),
false,
),
(
vec![Some(1), Some(2), None],
(1, 1),
vec![Some(1), None, Some(2)],
(2, 1),
true,
),
(
vec![None, Some(2), None],
(1, 1),
vec![None, None, Some(2)],
(2, 1),
true,
),
(
vec![Some(1), None, Some(2), None, Some(3)],
(2, 2),
vec![None, Some(2), None, Some(3)],
(1, 2),
true,
),
];
for (lhs, slice_lhs, rhs, slice_rhs, expected) in cases {
let lhs = Int32Array::from(lhs);
let lhs = lhs.data();
let lhs = lhs.slice(slice_lhs.0, slice_lhs.1);
let rhs = Int32Array::from(rhs);
let rhs = rhs.data();
let rhs = rhs.slice(slice_rhs.0, slice_rhs.1);
test_equal(&lhs, &rhs, expected);
}
}
fn test_equal(lhs: &ArrayData, rhs: &ArrayData, expected: bool) {
// equality is symmetric
assert!(equal(lhs, lhs), "\n{:?}\n{:?}", lhs, lhs);
assert!(equal(rhs, rhs), "\n{:?}\n{:?}", rhs, rhs);
assert_eq!(equal(lhs, rhs), expected, "\n{:?}\n{:?}", lhs, rhs);
assert_eq!(equal(rhs, lhs), expected, "\n{:?}\n{:?}", rhs, lhs);
}
fn binary_cases() -> Vec<(Vec<Option<String>>, Vec<Option<String>>, bool)> {
let base = vec![
Some("hello".to_owned()),
None,
None,
Some("world".to_owned()),
None,
None,
];
let not_base = vec![
Some("hello".to_owned()),
Some("foo".to_owned()),
None,
Some("world".to_owned()),
None,
None,
];
vec![
(
vec![Some("hello".to_owned()), Some("world".to_owned())],
vec![Some("hello".to_owned()), Some("world".to_owned())],
true,
),
(
vec![Some("hello".to_owned()), Some("world".to_owned())],
vec![Some("hello".to_owned()), Some("arrow".to_owned())],
false,
),
(base.clone(), base.clone(), true),
(base, not_base, false),
]
}
fn test_generic_string_equal<OffsetSize: StringOffsetSizeTrait>() {
let cases = binary_cases();
for (lhs, rhs, expected) in cases {
let lhs = lhs.iter().map(|x| x.as_deref()).collect();
let rhs = rhs.iter().map(|x| x.as_deref()).collect();
let lhs = GenericStringArray::<OffsetSize>::from_opt_vec(lhs);
let lhs = lhs.data();
let rhs = GenericStringArray::<OffsetSize>::from_opt_vec(rhs);
let rhs = rhs.data();
test_equal(lhs, rhs, expected);
}
}
#[test]
fn test_string_equal() {
test_generic_string_equal::<i32>()
}
#[test]
fn test_large_string_equal() {
test_generic_string_equal::<i64>()
}
fn test_generic_binary_equal<OffsetSize: BinaryOffsetSizeTrait>() {
let cases = binary_cases();
for (lhs, rhs, expected) in cases {
let lhs = lhs
.iter()
.map(|x| x.as_deref().map(|x| x.as_bytes()))
.collect();
let rhs = rhs
.iter()
.map(|x| x.as_deref().map(|x| x.as_bytes()))
.collect();
let lhs = GenericBinaryArray::<OffsetSize>::from_opt_vec(lhs);
let lhs = lhs.data();
let rhs = GenericBinaryArray::<OffsetSize>::from_opt_vec(rhs);
let rhs = rhs.data();
test_equal(lhs, rhs, expected);
}
}
#[test]
fn test_binary_equal() {
test_generic_binary_equal::<i32>()
}
#[test]
fn test_large_binary_equal() {
test_generic_binary_equal::<i64>()
}
#[test]
fn test_string_offset() {
let a = StringArray::from(vec![Some("a"), None, Some("b")]);
let a = a.data();
let a = a.slice(2, 1);
let b = StringArray::from(vec![Some("b")]);
let b = b.data();
test_equal(&a, b, true);
}
#[test]
fn test_string_offset_larger() {
let a = StringArray::from(vec![Some("a"), None, Some("b"), None, Some("c")]);
let a = a.data();
let b = StringArray::from(vec![None, Some("b"), None, Some("c")]);
let b = b.data();
test_equal(&a.slice(2, 2), &b.slice(0, 2), false);
test_equal(&a.slice(2, 2), &b.slice(1, 2), true);
test_equal(&a.slice(2, 2), &b.slice(2, 2), false);
}
#[test]
fn test_null() {
let a = NullArray::new(2);
let a = a.data();
let b = NullArray::new(2);
let b = b.data();
test_equal(a, b, true);
let b = NullArray::new(1);
let b = b.data();
test_equal(a, b, false);
}
fn create_list_array<U: AsRef<[i32]>, T: AsRef<[Option<U>]>>(data: T) -> ArrayData {
let mut builder = ListBuilder::new(Int32Builder::new(10));
for d in data.as_ref() {
if let Some(v) = d {
builder.values().append_slice(v.as_ref()).unwrap();
builder.append(true).unwrap()
} else {
builder.append(false).unwrap()
}
}
builder.finish().data().clone()
}
#[test]
fn test_list_equal() {
let a = create_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 6])]);
let b = create_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 6])]);
test_equal(&a, &b, true);
let b = create_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 7])]);
test_equal(&a, &b, false);
}
// Test the case where null_count > 0
#[test]
fn test_list_null() {
let a =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 4]), None, None]);
let b =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 4]), None, None]);
test_equal(&a, &b, true);
let b = create_list_array(&[
Some(&[1, 2]),
None,
Some(&[5, 6]),
Some(&[3, 4]),
None,
None,
]);
test_equal(&a, &b, false);
let b =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 5]), None, None]);
test_equal(&a, &b, false);
// a list where the nullness of values is determined by the list's bitmap
let c_values = Int32Array::from(vec![1, 2, -1, -2, 3, 4, -3, -4]);
let c = ArrayDataBuilder::new(DataType::List(Box::new(Field::new(
"item",
DataType::Int32,
true,
))))
.len(6)
.add_buffer(Buffer::from(vec![0i32, 2, 3, 4, 6, 7, 8].to_byte_slice()))
.add_child_data(c_values.data().clone())
.null_bit_buffer(Buffer::from(vec![0b00001001]))
.build()
.unwrap();
let d_values = Int32Array::from(vec![
Some(1),
Some(2),
None,
None,
Some(3),
Some(4),
None,
None,
]);
let d = ArrayDataBuilder::new(DataType::List(Box::new(Field::new(
"item",
DataType::Int32,
true,
))))
.len(6)
.add_buffer(Buffer::from(vec![0i32, 2, 3, 4, 6, 7, 8].to_byte_slice()))
.add_child_data(d_values.data().clone())
.null_bit_buffer(Buffer::from(vec![0b00001001]))
.build()
.unwrap();
test_equal(&c, &d, true);
}
// Test the case where offset != 0
#[test]
fn test_list_offsets() {
let a =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 4]), None, None]);
let b =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 5]), None, None]);
let a_slice = a.slice(0, 3);
let b_slice = b.slice(0, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(0, 5);
let b_slice = b.slice(0, 5);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(4, 1);
let b_slice = b.slice(4, 1);
test_equal(&a_slice, &b_slice, true);
}
fn create_fixed_size_binary_array<U: AsRef<[u8]>, T: AsRef<[Option<U>]>>(
data: T,
) -> ArrayData {
let mut builder = FixedSizeBinaryBuilder::new(15, 5);
for d in data.as_ref() {
if let Some(v) = d {
builder.append_value(v.as_ref()).unwrap();
} else {
builder.append_null().unwrap();
}
}
builder.finish().data().clone()
}
#[test]
fn test_fixed_size_binary_equal() {
let a = create_fixed_size_binary_array(&[Some(b"hello"), Some(b"world")]);
let b = create_fixed_size_binary_array(&[Some(b"hello"), Some(b"world")]);
test_equal(&a, &b, true);
let b = create_fixed_size_binary_array(&[Some(b"hello"), Some(b"arrow")]);
test_equal(&a, &b, false);
}
// Test the case where null_count > 0
#[test]
fn test_fixed_size_binary_null() {
let a = create_fixed_size_binary_array(&[Some(b"hello"), None, Some(b"world")]);
let b = create_fixed_size_binary_array(&[Some(b"hello"), None, Some(b"world")]);
test_equal(&a, &b, true);
let b = create_fixed_size_binary_array(&[Some(b"hello"), Some(b"world"), None]);
test_equal(&a, &b, false);
let b = create_fixed_size_binary_array(&[Some(b"hello"), None, Some(b"arrow")]);
test_equal(&a, &b, false);
}
#[test]
fn test_fixed_size_binary_offsets() {
// Test the case where offset != 0
let a = create_fixed_size_binary_array(&[
Some(b"hello"),
None,
None,
Some(b"world"),
None,
None,
]);
let b = create_fixed_size_binary_array(&[
Some(b"hello"),
None,
None,
Some(b"arrow"),
None,
None,
]);
let a_slice = a.slice(0, 3);
let b_slice = b.slice(0, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(0, 5);
let b_slice = b.slice(0, 5);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(4, 1);
let b_slice = b.slice(4, 1);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(3, 1);
let b_slice = b.slice(3, 1);
test_equal(&a_slice, &b_slice, false);
}
fn create_decimal_array(data: &[Option<i128>]) -> ArrayData {
let mut builder = DecimalBuilder::new(20, 23, 6);
for d in data {
if let Some(v) = d {
builder.append_value(*v).unwrap();
} else {
builder.append_null().unwrap();
}
}
builder.finish().data().clone()
}
#[test]
fn test_decimal_equal() {
let a = create_decimal_array(&[Some(8_887_000_000), Some(-8_887_000_000)]);
let b = create_decimal_array(&[Some(8_887_000_000), Some(-8_887_000_000)]);
test_equal(&a, &b, true);
let b = create_decimal_array(&[Some(15_887_000_000), Some(-8_887_000_000)]);
test_equal(&a, &b, false);
}
// Test the case where null_count > 0
#[test]
fn test_decimal_null() {
let a = create_decimal_array(&[Some(8_887_000_000), None, Some(-8_887_000_000)]);
let b = create_decimal_array(&[Some(8_887_000_000), None, Some(-8_887_000_000)]);
test_equal(&a, &b, true);
let b = create_decimal_array(&[Some(8_887_000_000), Some(-8_887_000_000), None]);
test_equal(&a, &b, false);
let b = create_decimal_array(&[Some(15_887_000_000), None, Some(-8_887_000_000)]);
test_equal(&a, &b, false);
}
#[test]
fn test_decimal_offsets() {
// Test the case where offset != 0
let a = create_decimal_array(&[
Some(8_887_000_000),
None,
None,
Some(-8_887_000_000),
None,
None,
]);
let b = create_decimal_array(&[
None,
Some(8_887_000_000),
None,
None,
Some(15_887_000_000),
None,
None,
]);
let a_slice = a.slice(0, 3);
let b_slice = b.slice(1, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(0, 5);
let b_slice = b.slice(1, 5);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(4, 1);
let b_slice = b.slice(5, 1);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(3, 3);
let b_slice = b.slice(4, 3);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(1, 3);
let b_slice = b.slice(2, 3);
test_equal(&a_slice, &b_slice, false);
let b = create_decimal_array(&[
None,
None,
None,
Some(-8_887_000_000),
Some(-3_000),
None,
]);
let a_slice = a.slice(1, 3);
let b_slice = b.slice(1, 3);
test_equal(&a_slice, &b_slice, true);
}
/// Create a fixed size list of 2 value lengths
fn create_fixed_size_list_array<U: AsRef<[i32]>, T: AsRef<[Option<U>]>>(
data: T,
) -> ArrayData {
let mut builder = FixedSizeListBuilder::new(Int32Builder::new(10), 3);
for d in data.as_ref() {
if let Some(v) = d {
builder.values().append_slice(v.as_ref()).unwrap();
builder.append(true).unwrap()
} else {
for _ in 0..builder.value_length() {
builder.values().append_null().unwrap();
}
builder.append(false).unwrap()
}
}
builder.finish().data().clone()
}
#[test]
fn test_fixed_size_list_equal() {
let a = create_fixed_size_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 6])]);
let b = create_fixed_size_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 6])]);
test_equal(&a, &b, true);
let b = create_fixed_size_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 7])]);
test_equal(&a, &b, false);
}
// Test the case where null_count > 0
#[test]
fn test_fixed_list_null() {
let a = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[4, 5, 6]),
None,
None,
]);
let b = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[4, 5, 6]),
None,
None,
]);
test_equal(&a, &b, true);
let b = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
Some(&[7, 8, 9]),
Some(&[4, 5, 6]),
None,
None,
]);
test_equal(&a, &b, false);
let b = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[3, 6, 9]),
None,
None,
]);
test_equal(&a, &b, false);
}
#[test]
fn test_fixed_list_offsets() {
// Test the case where offset != 0
let a = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[4, 5, 6]),
None,
None,
]);
let b = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[3, 6, 9]),
None,
None,
]);
let a_slice = a.slice(0, 3);
let b_slice = b.slice(0, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(0, 5);
let b_slice = b.slice(0, 5);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(4, 1);
let b_slice = b.slice(4, 1);
test_equal(&a_slice, &b_slice, true);
}
#[test]
fn test_struct_equal() {
let strings: ArrayRef = Arc::new(StringArray::from(vec![
Some("joe"),
None,
None,
Some("mark"),
Some("doe"),
]));
let ints: ArrayRef = Arc::new(Int32Array::from(vec![
Some(1),
Some(2),
None,
Some(4),
Some(5),
]));
let a =
StructArray::try_from(vec![("f1", strings.clone()), ("f2", ints.clone())])
.unwrap();
let a = a.data();
let b = StructArray::try_from(vec![("f1", strings), ("f2", ints)]).unwrap();
let b = b.data();
test_equal(a, b, true);
}
#[test]
fn test_struct_equal_null() {
let strings: ArrayRef = Arc::new(StringArray::from(vec![
Some("joe"),
None,
None,
Some("mark"),
Some("doe"),
]));
let ints: ArrayRef = Arc::new(Int32Array::from(vec![
Some(1),
Some(2),
None,
Some(4),
Some(5),
]));
let ints_non_null: ArrayRef = Arc::new(Int32Array::from(vec![1, 2, 3, 4, 0]));
let a = ArrayData::builder(DataType::Struct(vec![
Field::new("f1", DataType::Utf8, true),
Field::new("f2", DataType::Int32, true),
]))
.null_bit_buffer(Buffer::from(vec![0b00001011]))
.len(5)
.add_child_data(strings.data_ref().clone())
.add_child_data(ints.data_ref().clone())
.build()
.unwrap();
let a = crate::array::make_array(a);
let b = ArrayData::builder(DataType::Struct(vec![
Field::new("f1", DataType::Utf8, true),
Field::new("f2", DataType::Int32, true),
]))
.null_bit_buffer(Buffer::from(vec![0b00001011]))
.len(5)
.add_child_data(strings.data_ref().clone())
.add_child_data(ints_non_null.data_ref().clone())
.build()
.unwrap();
let b = crate::array::make_array(b);
test_equal(a.data_ref(), b.data_ref(), true);
// test with arrays that are not equal
let c_ints_non_null: ArrayRef = Arc::new(Int32Array::from(vec![1, 2, 3, 0, 4]));
let c = ArrayData::builder(DataType::Struct(vec![
Field::new("f1", DataType::Utf8, true),
Field::new("f2", DataType::Int32, true),
]))
.null_bit_buffer(Buffer::from(vec![0b00001011]))
.len(5)
.add_child_data(strings.data_ref().clone())
.add_child_data(c_ints_non_null.data_ref().clone())
.build()
.unwrap();
let c = crate::array::make_array(c);
test_equal(a.data_ref(), c.data_ref(), false);
// test a nested struct
let a = ArrayData::builder(DataType::Struct(vec![Field::new(
"f3",
a.data_type().clone(),
true,
)]))
.null_bit_buffer(Buffer::from(vec![0b00011110]))
.len(5)
.add_child_data(a.data_ref().clone())
.build()
.unwrap();
let a = crate::array::make_array(a);
// reconstruct b, but with different data where the first struct is null
let strings: ArrayRef = Arc::new(StringArray::from(vec![
Some("joanne"), // difference
None,
None,
Some("mark"),
Some("doe"),
]));
let b = ArrayData::builder(DataType::Struct(vec![
Field::new("f1", DataType::Utf8, true),
Field::new("f2", DataType::Int32, true),
]))
.null_bit_buffer(Buffer::from(vec![0b00001011]))
.len(5)
.add_child_data(strings.data_ref().clone())
.add_child_data(ints_non_null.data_ref().clone())
.build()
.unwrap();
let b = ArrayData::builder(DataType::Struct(vec![Field::new(
"f3",
b.data_type().clone(),
true,
)]))
.null_bit_buffer(Buffer::from(vec![0b00011110]))
.len(5)
.add_child_data(b)
.build()
.unwrap();
let b = crate::array::make_array(b);
test_equal(a.data_ref(), b.data_ref(), true);
}
#[test]
fn test_struct_equal_null_variable_size() {
// the string arrays differ, but where the struct array is null
let strings1: ArrayRef = Arc::new(StringArray::from(vec![
Some("joe"),
None,
None,
Some("mark"),
Some("doel"),
]));
let strings2: ArrayRef = Arc::new(StringArray::from(vec![
Some("joel"),
None,
None,
Some("mark"),
Some("doe"),
]));
let a = ArrayData::builder(DataType::Struct(vec![Field::new(
"f1",
DataType::Utf8,
true,
)]))
.null_bit_buffer(Buffer::from(vec![0b00001010]))
.len(5)
.add_child_data(strings1.data_ref().clone())
.build()
.unwrap();
let a = crate::array::make_array(a);
let b = ArrayData::builder(DataType::Struct(vec![Field::new(
"f1",
DataType::Utf8,
true,
)]))
.null_bit_buffer(Buffer::from(vec![0b00001010]))
.len(5)
.add_child_data(strings2.data_ref().clone())
.build()
.unwrap();
let b = crate::array::make_array(b);
test_equal(a.data_ref(), b.data_ref(), true);
// test with arrays that are not equal
let strings3: ArrayRef = Arc::new(StringArray::from(vec![
Some("mark"),
None,
None,
Some("doe"),
Some("joe"),
]));
let c = ArrayData::builder(DataType::Struct(vec![Field::new(
"f1",
DataType::Utf8,
true,
)]))
.null_bit_buffer(Buffer::from(vec![0b00001011]))
.len(5)
.add_child_data(strings3.data_ref().clone())
.build()
.unwrap();
let c = crate::array::make_array(c);
test_equal(a.data_ref(), c.data_ref(), false);
}
fn create_dictionary_array(values: &[&str], keys: &[Option<&str>]) -> ArrayData {
let values = StringArray::from(values.to_vec());
let mut builder = StringDictionaryBuilder::new_with_dictionary(
PrimitiveBuilder::<Int16Type>::new(3),
&values,
)
.unwrap();
for key in keys {
if let Some(v) = key {
builder.append(v).unwrap();
} else {
builder.append_null().unwrap()
}
}
builder.finish().data().clone()
}
#[test]
fn test_dictionary_equal() {
// (a, b, c), (1, 2, 1, 3) => (a, b, a, c)
let a = create_dictionary_array(
&["a", "b", "c"],
&[Some("a"), Some("b"), Some("a"), Some("c")],
);
// different representation (values and keys are swapped), same result
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), Some("b"), Some("a"), Some("c")],
);
test_equal(&a, &b, true);
// different len
let b =
create_dictionary_array(&["a", "c", "b"], &[Some("a"), Some("b"), Some("a")]);
test_equal(&a, &b, false);
// different key
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), Some("b"), Some("a"), Some("a")],
);
test_equal(&a, &b, false);
// different values, same keys
let b = create_dictionary_array(
&["a", "b", "d"],
&[Some("a"), Some("b"), Some("a"), Some("d")],
);
test_equal(&a, &b, false);
}
#[test]
fn test_dictionary_equal_null() {
// (a, b, c), (1, 2, 1, 3) => (a, b, a, c)
let a = create_dictionary_array(
&["a", "b", "c"],
&[Some("a"), None, Some("a"), Some("c")],
);
// equal to self
test_equal(&a, &a, true);
// different representation (values and keys are swapped), same result
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), None, Some("a"), Some("c")],
);
test_equal(&a, &b, true);
// different null position
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), Some("b"), Some("a"), None],
);
test_equal(&a, &b, false);
// different key
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), None, Some("a"), Some("a")],
);
test_equal(&a, &b, false);
// different values, same keys
let b = create_dictionary_array(
&["a", "b", "d"],
&[Some("a"), None, Some("a"), Some("d")],
);
test_equal(&a, &b, false);
}
}
| 31.684615 | 104 | 0.51833 |
01fdd3a4d6c833e7a4fbabbada9d87c0f89ed617 | 23,380 | #[doc = "Register `SECDIAG` reader"]
pub struct R(crate::R<SECDIAG_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<SECDIAG_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<SECDIAG_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<SECDIAG_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Battery-On-Reset Flag. This bit is set once the back up battery is conneted.\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BORF_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<BORF_A> for bool {
#[inline(always)]
fn from(variant: BORF_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BORF` reader - Battery-On-Reset Flag. This bit is set once the back up battery is conneted."]
pub struct BORF_R(crate::FieldReader<bool, BORF_A>);
impl BORF_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
BORF_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BORF_A {
match self.bits {
false => BORF_A::NOEVENT,
true => BORF_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == BORF_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == BORF_A::OCCURRED
}
}
impl core::ops::Deref for BORF_R {
type Target = crate::FieldReader<bool, BORF_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Die Shield Flag.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SHIELDF_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<SHIELDF_A> for bool {
#[inline(always)]
fn from(variant: SHIELDF_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SHIELDF` reader - Die Shield Flag."]
pub struct SHIELDF_R(crate::FieldReader<bool, SHIELDF_A>);
impl SHIELDF_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SHIELDF_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SHIELDF_A {
match self.bits {
false => SHIELDF_A::NOEVENT,
true => SHIELDF_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == SHIELDF_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == SHIELDF_A::OCCURRED
}
}
impl core::ops::Deref for SHIELDF_R {
type Target = crate::FieldReader<bool, SHIELDF_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Low Temperature Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LOTEMP_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<LOTEMP_A> for bool {
#[inline(always)]
fn from(variant: LOTEMP_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LOTEMP` reader - Low Temperature Detect."]
pub struct LOTEMP_R(crate::FieldReader<bool, LOTEMP_A>);
impl LOTEMP_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LOTEMP_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LOTEMP_A {
match self.bits {
false => LOTEMP_A::NOEVENT,
true => LOTEMP_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == LOTEMP_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == LOTEMP_A::OCCURRED
}
}
impl core::ops::Deref for LOTEMP_R {
type Target = crate::FieldReader<bool, LOTEMP_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "High Temperature Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum HITEMP_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<HITEMP_A> for bool {
#[inline(always)]
fn from(variant: HITEMP_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `HITEMP` reader - High Temperature Detect."]
pub struct HITEMP_R(crate::FieldReader<bool, HITEMP_A>);
impl HITEMP_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
HITEMP_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> HITEMP_A {
match self.bits {
false => HITEMP_A::NOEVENT,
true => HITEMP_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == HITEMP_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == HITEMP_A::OCCURRED
}
}
impl core::ops::Deref for HITEMP_R {
type Target = crate::FieldReader<bool, HITEMP_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Battery Undervoltage Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BATLO_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<BATLO_A> for bool {
#[inline(always)]
fn from(variant: BATLO_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BATLO` reader - Battery Undervoltage Detect."]
pub struct BATLO_R(crate::FieldReader<bool, BATLO_A>);
impl BATLO_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
BATLO_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BATLO_A {
match self.bits {
false => BATLO_A::NOEVENT,
true => BATLO_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == BATLO_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == BATLO_A::OCCURRED
}
}
impl core::ops::Deref for BATLO_R {
type Target = crate::FieldReader<bool, BATLO_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Battery Overvoltage Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum BATHI_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<BATHI_A> for bool {
#[inline(always)]
fn from(variant: BATHI_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `BATHI` reader - Battery Overvoltage Detect."]
pub struct BATHI_R(crate::FieldReader<bool, BATHI_A>);
impl BATHI_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
BATHI_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> BATHI_A {
match self.bits {
false => BATHI_A::NOEVENT,
true => BATHI_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == BATHI_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == BATHI_A::OCCURRED
}
}
impl core::ops::Deref for BATHI_R {
type Target = crate::FieldReader<bool, BATHI_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Dynamic Sensor Flag. This bit is set to 1 when any of the EXTSTAT bits are set.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DYNF_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<DYNF_A> for bool {
#[inline(always)]
fn from(variant: DYNF_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `DYNF` reader - Dynamic Sensor Flag. This bit is set to 1 when any of the EXTSTAT bits are set."]
pub struct DYNF_R(crate::FieldReader<bool, DYNF_A>);
impl DYNF_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
DYNF_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DYNF_A {
match self.bits {
false => DYNF_A::NOEVENT,
true => DYNF_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == DYNF_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == DYNF_A::OCCURRED
}
}
impl core::ops::Deref for DYNF_R {
type Target = crate::FieldReader<bool, DYNF_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "AES Key Transfer. This bit is set to 1 when AES Key has been transferred from the TRNG to the battery backed AES key register. This bit can only be reset by a BOR.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum AESKT_A {
#[doc = "0: Key has not been transferred."]
INCOMPLETE = 0,
#[doc = "1: Key has been transferred."]
COMPLETE = 1,
}
impl From<AESKT_A> for bool {
#[inline(always)]
fn from(variant: AESKT_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `AESKT` reader - AES Key Transfer. This bit is set to 1 when AES Key has been transferred from the TRNG to the battery backed AES key register. This bit can only be reset by a BOR."]
pub struct AESKT_R(crate::FieldReader<bool, AESKT_A>);
impl AESKT_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
AESKT_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> AESKT_A {
match self.bits {
false => AESKT_A::INCOMPLETE,
true => AESKT_A::COMPLETE,
}
}
#[doc = "Checks if the value of the field is `INCOMPLETE`"]
#[inline(always)]
pub fn is_incomplete(&self) -> bool {
**self == AESKT_A::INCOMPLETE
}
#[doc = "Checks if the value of the field is `COMPLETE`"]
#[inline(always)]
pub fn is_complete(&self) -> bool {
**self == AESKT_A::COMPLETE
}
}
impl core::ops::Deref for AESKT_R {
type Target = crate::FieldReader<bool, AESKT_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "External Sensor 0 Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EXTSTAT0_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<EXTSTAT0_A> for bool {
#[inline(always)]
fn from(variant: EXTSTAT0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EXTSTAT0` reader - External Sensor 0 Detect."]
pub struct EXTSTAT0_R(crate::FieldReader<bool, EXTSTAT0_A>);
impl EXTSTAT0_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
EXTSTAT0_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EXTSTAT0_A {
match self.bits {
false => EXTSTAT0_A::NOEVENT,
true => EXTSTAT0_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == EXTSTAT0_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == EXTSTAT0_A::OCCURRED
}
}
impl core::ops::Deref for EXTSTAT0_R {
type Target = crate::FieldReader<bool, EXTSTAT0_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "External Sensor 1 Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EXTSTAT1_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<EXTSTAT1_A> for bool {
#[inline(always)]
fn from(variant: EXTSTAT1_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EXTSTAT1` reader - External Sensor 1 Detect."]
pub struct EXTSTAT1_R(crate::FieldReader<bool, EXTSTAT1_A>);
impl EXTSTAT1_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
EXTSTAT1_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EXTSTAT1_A {
match self.bits {
false => EXTSTAT1_A::NOEVENT,
true => EXTSTAT1_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == EXTSTAT1_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == EXTSTAT1_A::OCCURRED
}
}
impl core::ops::Deref for EXTSTAT1_R {
type Target = crate::FieldReader<bool, EXTSTAT1_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "External Sensor 2 Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EXTSTAT2_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<EXTSTAT2_A> for bool {
#[inline(always)]
fn from(variant: EXTSTAT2_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EXTSTAT2` reader - External Sensor 2 Detect."]
pub struct EXTSTAT2_R(crate::FieldReader<bool, EXTSTAT2_A>);
impl EXTSTAT2_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
EXTSTAT2_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EXTSTAT2_A {
match self.bits {
false => EXTSTAT2_A::NOEVENT,
true => EXTSTAT2_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == EXTSTAT2_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == EXTSTAT2_A::OCCURRED
}
}
impl core::ops::Deref for EXTSTAT2_R {
type Target = crate::FieldReader<bool, EXTSTAT2_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "External Sensor 3 Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EXTSTAT3_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<EXTSTAT3_A> for bool {
#[inline(always)]
fn from(variant: EXTSTAT3_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EXTSTAT3` reader - External Sensor 3 Detect."]
pub struct EXTSTAT3_R(crate::FieldReader<bool, EXTSTAT3_A>);
impl EXTSTAT3_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
EXTSTAT3_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EXTSTAT3_A {
match self.bits {
false => EXTSTAT3_A::NOEVENT,
true => EXTSTAT3_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == EXTSTAT3_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == EXTSTAT3_A::OCCURRED
}
}
impl core::ops::Deref for EXTSTAT3_R {
type Target = crate::FieldReader<bool, EXTSTAT3_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "External Sensor 4 Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EXTSTAT4_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<EXTSTAT4_A> for bool {
#[inline(always)]
fn from(variant: EXTSTAT4_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EXTSTAT4` reader - External Sensor 4 Detect."]
pub struct EXTSTAT4_R(crate::FieldReader<bool, EXTSTAT4_A>);
impl EXTSTAT4_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
EXTSTAT4_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EXTSTAT4_A {
match self.bits {
false => EXTSTAT4_A::NOEVENT,
true => EXTSTAT4_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == EXTSTAT4_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == EXTSTAT4_A::OCCURRED
}
}
impl core::ops::Deref for EXTSTAT4_R {
type Target = crate::FieldReader<bool, EXTSTAT4_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "External Sensor 5 Detect.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EXTSTAT5_A {
#[doc = "0: The event has not occurred."]
NOEVENT = 0,
#[doc = "1: The event has occurred."]
OCCURRED = 1,
}
impl From<EXTSTAT5_A> for bool {
#[inline(always)]
fn from(variant: EXTSTAT5_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `EXTSTAT5` reader - External Sensor 5 Detect."]
pub struct EXTSTAT5_R(crate::FieldReader<bool, EXTSTAT5_A>);
impl EXTSTAT5_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
EXTSTAT5_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> EXTSTAT5_A {
match self.bits {
false => EXTSTAT5_A::NOEVENT,
true => EXTSTAT5_A::OCCURRED,
}
}
#[doc = "Checks if the value of the field is `NOEVENT`"]
#[inline(always)]
pub fn is_no_event(&self) -> bool {
**self == EXTSTAT5_A::NOEVENT
}
#[doc = "Checks if the value of the field is `OCCURRED`"]
#[inline(always)]
pub fn is_occurred(&self) -> bool {
**self == EXTSTAT5_A::OCCURRED
}
}
impl core::ops::Deref for EXTSTAT5_R {
type Target = crate::FieldReader<bool, EXTSTAT5_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl R {
#[doc = "Bit 0 - Battery-On-Reset Flag. This bit is set once the back up battery is conneted."]
#[inline(always)]
pub fn borf(&self) -> BORF_R {
BORF_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 2 - Die Shield Flag."]
#[inline(always)]
pub fn shieldf(&self) -> SHIELDF_R {
SHIELDF_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Low Temperature Detect."]
#[inline(always)]
pub fn lotemp(&self) -> LOTEMP_R {
LOTEMP_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - High Temperature Detect."]
#[inline(always)]
pub fn hitemp(&self) -> HITEMP_R {
HITEMP_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Battery Undervoltage Detect."]
#[inline(always)]
pub fn batlo(&self) -> BATLO_R {
BATLO_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Battery Overvoltage Detect."]
#[inline(always)]
pub fn bathi(&self) -> BATHI_R {
BATHI_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Dynamic Sensor Flag. This bit is set to 1 when any of the EXTSTAT bits are set."]
#[inline(always)]
pub fn dynf(&self) -> DYNF_R {
DYNF_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 8 - AES Key Transfer. This bit is set to 1 when AES Key has been transferred from the TRNG to the battery backed AES key register. This bit can only be reset by a BOR."]
#[inline(always)]
pub fn aeskt(&self) -> AESKT_R {
AESKT_R::new(((self.bits >> 8) & 0x01) != 0)
}
#[doc = "Bit 16 - External Sensor 0 Detect."]
#[inline(always)]
pub fn extstat0(&self) -> EXTSTAT0_R {
EXTSTAT0_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 17 - External Sensor 1 Detect."]
#[inline(always)]
pub fn extstat1(&self) -> EXTSTAT1_R {
EXTSTAT1_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - External Sensor 2 Detect."]
#[inline(always)]
pub fn extstat2(&self) -> EXTSTAT2_R {
EXTSTAT2_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - External Sensor 3 Detect."]
#[inline(always)]
pub fn extstat3(&self) -> EXTSTAT3_R {
EXTSTAT3_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - External Sensor 4 Detect."]
#[inline(always)]
pub fn extstat4(&self) -> EXTSTAT4_R {
EXTSTAT4_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - External Sensor 5 Detect."]
#[inline(always)]
pub fn extstat5(&self) -> EXTSTAT5_R {
EXTSTAT5_R::new(((self.bits >> 21) & 0x01) != 0)
}
}
#[doc = "Security Diagnostic Register.\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [secdiag](index.html) module"]
pub struct SECDIAG_SPEC;
impl crate::RegisterSpec for SECDIAG_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [secdiag::R](R) reader structure"]
impl crate::Readable for SECDIAG_SPEC {
type Reader = R;
}
#[doc = "`reset()` method sets SECDIAG to value 0x01"]
impl crate::Resettable for SECDIAG_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0x01
}
}
| 30.682415 | 239 | 0.587511 |
64e1ea2eae5285ab340994bf1e7bcd5d53448e1a | 8,417 | #![cfg_attr(test, deny(warnings))]
// While we're getting used to 2018:
#![warn(rust_2018_idioms)]
// Clippy isn't enforced by CI (@alexcrichton isn't a fan).
#![allow(clippy::blacklisted_name)] // frequently used in tests
#![allow(clippy::cognitive_complexity)] // large project
#![allow(clippy::derive_hash_xor_eq)] // there's an intentional incoherence
#![allow(clippy::explicit_into_iter_loop)] // explicit loops are clearer
#![allow(clippy::explicit_iter_loop)] // explicit loops are clearer
#![allow(clippy::identity_op)] // used for vertical alignment
#![allow(clippy::implicit_hasher)] // large project
#![allow(clippy::large_enum_variant)] // large project
#![allow(clippy::new_without_default)] // explicit is maybe clearer
#![allow(clippy::redundant_closure)] // closures can be less verbose
#![allow(clippy::redundant_closure_call)] // closures over try catch blocks
#![allow(clippy::too_many_arguments)] // large project
#![allow(clippy::type_complexity)] // there's an exceptionally complex type
#![allow(clippy::wrong_self_convention)] // perhaps `Rc` should be special-cased in Clippy?
#![warn(clippy::needless_borrow)]
#![warn(clippy::redundant_clone)]
// Unit is now interned, and would probably be better as pass-by-copy, but
// doing so causes a lot of & and * shenanigans that makes the code arguably
// less clear and harder to read.
#![allow(clippy::trivially_copy_pass_by_ref)]
// exhaustively destructuring ensures future fields are handled
#![allow(clippy::unneeded_field_pattern)]
use std::fmt;
use std::io;
use failure::Error;
use log::debug;
use serde::ser;
use crate::core::shell::Verbosity::Verbose;
use crate::core::Shell;
pub use crate::util::errors::Internal;
pub use crate::util::{CargoResult, CliError, CliResult, Config};
pub const CARGO_ENV: &str = "CARGO";
#[macro_use]
mod macros;
pub mod core;
pub mod ops;
pub mod sources;
pub mod util;
pub struct CommitInfo {
pub short_commit_hash: String,
pub commit_hash: String,
pub commit_date: String,
}
pub struct CfgInfo {
// Information about the Git repository we may have been built from.
pub commit_info: Option<CommitInfo>,
// The release channel we were built for.
pub release_channel: String,
}
pub struct VersionInfo {
pub major: u8,
pub minor: u8,
pub patch: u8,
pub pre_release: Option<String>,
// Information that's only available when we were built with
// configure/make, rather than Cargo itself.
pub cfg_info: Option<CfgInfo>,
}
impl fmt::Display for VersionInfo {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "cargo {}.{}.{}", self.major, self.minor, self.patch)?;
if let Some(channel) = self.cfg_info.as_ref().map(|ci| &ci.release_channel) {
if channel != "stable" {
write!(f, "-{}", channel)?;
let empty = String::new();
write!(f, "{}", self.pre_release.as_ref().unwrap_or(&empty))?;
}
};
if let Some(ref cfg) = self.cfg_info {
if let Some(ref ci) = cfg.commit_info {
write!(f, " ({} {})", ci.short_commit_hash, ci.commit_date)?;
}
};
Ok(())
}
}
pub fn print_json<T: ser::Serialize>(obj: &T) {
let encoded = serde_json::to_string(&obj).unwrap();
println!("{}", encoded);
}
pub fn exit_with_error(err: CliError, shell: &mut Shell) -> ! {
debug!("exit_with_error; err={:?}", err);
if let Some(ref err) = err.error {
if let Some(clap_err) = err.downcast_ref::<clap::Error>() {
clap_err.exit()
}
}
let CliError {
error,
exit_code,
unknown,
} = err;
// `exit_code` of 0 means non-fatal error (e.g., docopt version info).
let fatal = exit_code != 0;
let hide = unknown && shell.verbosity() != Verbose;
if let Some(error) = error {
if hide {
drop(shell.error("An unknown error occurred"))
} else if fatal {
drop(shell.error(&error))
} else {
println!("{}", error);
}
if !handle_cause(&error, shell) || hide {
drop(writeln!(
shell.err(),
"\nTo learn more, run the command again \
with --verbose."
));
}
}
std::process::exit(exit_code)
}
pub fn handle_error(err: &failure::Error, shell: &mut Shell) {
debug!("handle_error; err={:?}", err);
let _ignored_result = shell.error(err);
handle_cause(err, shell);
}
fn handle_cause(cargo_err: &Error, shell: &mut Shell) -> bool {
fn print(error: &str, shell: &mut Shell) {
drop(writeln!(shell.err(), "\nCaused by:"));
drop(writeln!(shell.err(), " {}", error));
}
fn print_stderror_causes(error: &dyn std::error::Error, shell: &mut Shell) {
let mut cur = std::error::Error::source(error);
while let Some(err) = cur {
print(&err.to_string(), shell);
cur = std::error::Error::source(err);
}
}
let verbose = shell.verbosity();
// The first error has already been printed to the shell.
for err in cargo_err.iter_causes() {
// If we're not in verbose mode then print remaining errors until one
// marked as `Internal` appears.
if verbose != Verbose && err.downcast_ref::<Internal>().is_some() {
return false;
}
print(&err.to_string(), shell);
// Using the `failure` crate currently means that when using
// `iter_causes` we're only iterating over the `failure` causes, but
// this doesn't include the causes from the standard library `Error`
// trait. We don't have a great way of getting an `&dyn Error` from a
// `&dyn Fail`, so we currently just special case a few errors that are
// known to maybe have causes and we try to print them here.
//
// Note that this isn't an exhaustive match since causes for
// `std::error::Error` aren't the most common thing in the world.
if let Some(io) = err.downcast_ref::<io::Error>() {
print_stderror_causes(io, shell);
}
}
true
}
pub fn version() -> VersionInfo {
macro_rules! option_env_str {
($name:expr) => {
option_env!($name).map(|s| s.to_string())
};
}
// So this is pretty horrible...
// There are two versions at play here:
// - version of cargo-the-binary, which you see when you type `cargo --version`
// - version of cargo-the-library, which you download from crates.io for use
// in your packages.
//
// We want to make the `binary` version the same as the corresponding Rust/rustc release.
// At the same time, we want to keep the library version at `0.x`, because Cargo as
// a library is (and probably will always be) unstable.
//
// Historically, Cargo used the same version number for both the binary and the library.
// Specifically, rustc 1.x.z was paired with cargo 0.x+1.w.
// We continue to use this scheme for the library, but transform it to 1.x.w for the purposes
// of `cargo --version`.
let major = 1;
let minor = env!("CARGO_PKG_VERSION_MINOR").parse::<u8>().unwrap() - 1;
let patch = env!("CARGO_PKG_VERSION_PATCH").parse::<u8>().unwrap();
match option_env!("CFG_RELEASE_CHANNEL") {
// We have environment variables set up from configure/make.
Some(_) => {
let commit_info = option_env!("CFG_COMMIT_HASH").map(|s| CommitInfo {
commit_hash: s.to_string(),
short_commit_hash: option_env_str!("CFG_SHORT_COMMIT_HASH").unwrap(),
commit_date: option_env_str!("CFG_COMMIT_DATE").unwrap(),
});
VersionInfo {
major,
minor,
patch,
pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
cfg_info: Some(CfgInfo {
release_channel: option_env_str!("CFG_RELEASE_CHANNEL").unwrap(),
commit_info,
}),
}
}
// We are being compiled by Cargo itself.
None => VersionInfo {
major,
minor,
patch,
pre_release: option_env_str!("CARGO_PKG_VERSION_PRE"),
cfg_info: None,
},
}
}
| 34.780992 | 97 | 0.609481 |
e22a4954418c639d62140182860ffca0e3d9378f | 4,235 | use std::collections::HashMap;
use proc_macro2::Span;
use crate::ast::*;
pub struct GrammarAnalysis<'a> {
pub rules: HashMap<String, &'a Rule>,
pub left_recursion: Vec<RecursionError>,
}
pub fn check<'a>(grammar: &'a Grammar) -> GrammarAnalysis<'a> {
let mut rules = HashMap::new();
for rule in grammar.iter_rules() {
rules.entry(rule.name.to_string()).or_insert(rule);
}
let left_recursion = RecursionVisitor::check(grammar, &rules);
GrammarAnalysis { rules, left_recursion }
}
struct RecursionVisitor<'a> {
stack: Vec<String>,
rules: &'a HashMap<String, &'a Rule>,
errors: Vec<RecursionError>,
}
pub struct RecursionError {
pub span: Span,
pub path: Vec<String>,
}
impl RecursionError {
pub fn msg(&self) -> String {
format!("left recursive rules create an infinite loop: {}", self.path.join(" -> "))
}
}
#[derive(Clone)]
struct RuleInfo {
/// True if the rule is known to match without consuming any input.
/// This is a conservative heuristic, if unknown, we return false to avoid reporting false-positives
/// for left recursion.
nullable: bool,
}
impl<'a> RecursionVisitor<'a> {
fn check(grammar: &'a Grammar, rules: &HashMap<String, &'a Rule>) -> Vec<RecursionError> {
let mut visitor = RecursionVisitor {
rules,
errors: Vec::new(),
stack: Vec::new(),
};
for rule in grammar.iter_rules() {
visitor.walk_rule(rule);
debug_assert!(visitor.stack.is_empty());
}
visitor.errors
}
fn walk_rule(&mut self, rule: &'a Rule) -> RuleInfo {
self.stack.push(rule.name.to_string());
let res = self.walk_expr(&rule.expr);
self.stack.pop().unwrap();
res
}
fn walk_expr(&mut self, this_expr: &Expr) -> RuleInfo {
use self::Expr::*;
match *this_expr {
RuleExpr(ref rule_ident, _) => {
let name = rule_ident.to_string();
if let Some(loop_start) = self.stack.iter().position(|caller_name| { caller_name == &name}) {
let mut recursive_loop = self.stack[loop_start..].to_vec();
recursive_loop.push(name);
self.errors.push(RecursionError { path: recursive_loop, span: rule_ident.span()});
return RuleInfo { nullable: false };
}
if let Some(rule) = self.rules.get(&name) {
self.walk_rule(rule)
} else {
// Missing rule would have already been reported
RuleInfo { nullable: false }
}
}
ActionExpr(ref elems, ..) => {
for elem in elems {
if !self.walk_expr(&elem.expr).nullable {
return RuleInfo { nullable: false }
}
}
RuleInfo { nullable: true }
}
ChoiceExpr(ref choices) => {
let mut nullable = false;
for expr in choices {
nullable |= self.walk_expr(expr).nullable;
}
RuleInfo { nullable }
}
OptionalExpr(ref expr) |
PosAssertExpr(ref expr) |
NegAssertExpr(ref expr) => {
self.walk_expr(expr);
RuleInfo { nullable: true }
}
Repeat(ref expr, ref bounds, _) => {
let nullable = match bounds {
BoundedRepeat::None => true,
_ => false,
};
let res = self.walk_expr(expr);
RuleInfo { nullable: res.nullable | nullable }
}
MatchStrExpr(ref expr) |
QuietExpr(ref expr) => self.walk_expr(expr),
PrecedenceExpr{ .. } => { RuleInfo { nullable: false } },
| LiteralExpr(_)
| PatternExpr(_)
| MethodExpr(_, _)
| FailExpr(_)
| MarkerExpr(_) => { RuleInfo { nullable: false } }
PositionExpr => { RuleInfo { nullable: true} }
}
}
}
| 29.823944 | 109 | 0.516647 |
16993a4128ae7e951da18b810a4ee1a2ccdb9896 | 957 | use crate::ffi::object::*;
#[cfg(not(PyPy))]
use crate::ffi::pyport::Py_ssize_t;
#[repr(C)]
pub struct PyTupleObject {
pub ob_base: PyVarObject,
pub ob_item: [*mut PyObject; 1],
}
// skipped _PyTuple_Resize
// skipped _PyTuple_MaybeUntrack
/// Macro, trading safety for speed
// skipped _PyTuple_CAST
#[inline]
#[cfg(not(PyPy))]
pub unsafe fn PyTuple_GET_SIZE(op: *mut PyObject) -> Py_ssize_t {
Py_SIZE(op)
}
#[inline]
#[cfg(not(PyPy))]
pub unsafe fn PyTuple_GET_ITEM(op: *mut PyObject, i: Py_ssize_t) -> *mut PyObject {
*(*(op as *mut PyTupleObject))
.ob_item
.as_ptr()
.offset(i as isize)
}
/// Macro, *only* to be used to fill in brand new tuples
#[inline]
#[cfg(not(PyPy))]
pub unsafe fn PyTuple_SET_ITEM(op: *mut PyObject, i: Py_ssize_t, v: *mut PyObject) {
*(*(op as *mut PyTupleObject))
.ob_item
.as_mut_ptr()
.offset(i as isize) = v;
}
// skipped _PyTuple_DebugMallocStats
| 21.75 | 84 | 0.649948 |
26958f867745f9802eba37c700fe6ceba0870367 | 1,010 | use competitive::prelude::*;
#[doc(no_inline)]
pub use competitive::{
algebra::LinearOperation,
data_structure::QueueAggregation,
num::{mint_basic::MInt998244353, MInt},
};
#[verify::verify("https://judge.yosupo.jp/problem/queue_operate_all_composite")]
pub fn queue_operate_all_composite(reader: impl Read, mut writer: impl Write) {
let s = read_all_unchecked(reader);
let mut scanner = Scanner::new(&s);
scan!(scanner, q);
let mut que = QueueAggregation::<LinearOperation<_>>::new();
for _ in 0..q {
match scanner.scan::<usize>() {
0 => {
scan!(scanner, ab: (MInt998244353, MInt998244353));
que.push(ab);
}
1 => {
que.pop();
}
2 => {
scan!(scanner, x: MInt998244353);
let (a, b) = que.fold_all();
writeln!(writer, "{}", a * x + b).ok();
}
_ => panic!("unknown query"),
}
}
}
| 30.606061 | 80 | 0.525743 |
6239331982f8754523fffdd29bcceb2f6b4ba927 | 7,288 | use eyre::Result;
use std::io::{ Read, Write };
use std::process;
use clap::Parser;
use std::path::{ Path ,PathBuf };
use serde::{Deserialize, Serialize};
use subprocess::Exec;
use cacache::WriteOpts;
use sha2::{Sha256, Digest};
fn get_default_cache_dir() -> &'static str {
let mut pb = PathBuf::from(home::home_dir().expect("Cannot operate without a home directory"));
pb.push(".clawbang-cache");
let f = pb.to_string_lossy().into_owned();
Box::leak(f.into_boxed_str())
}
#[derive(Parser)]
#[clap(author, version, about, long_about = None)]
struct Options {
#[clap(short, long, parse(from_occurrences))]
verbose: usize,
#[clap(long, env="CLAWBANG_DIR", default_value=get_default_cache_dir())]
cache_dir: PathBuf,
#[clap(default_value="/dev/fd/0")]
file: PathBuf,
rest: Vec<String>,
}
#[derive(Serialize, Deserialize)]
struct CacheEntry {
output_id: String, // content ref of the output
exit_code: i32,
}
struct Tee<Inner: std::io::Write> {
accum: Vec<u8>,
inner: Inner,
}
impl<Inner: std::io::Write> Tee<Inner> {
pub(crate) fn new(inner: Inner) -> Self {
Self {
accum: Vec::new(),
inner
}
}
pub(crate) fn into_inner(self) -> (Vec<u8>, Inner) {
(self.accum, self.inner)
}
}
impl<Inner: std::io::Write> std::io::Write for Tee<Inner> {
fn write(&mut self, bytes: &[u8]) -> Result<usize, std::io::Error> {
self.accum.extend(bytes);
self.inner.write(bytes)
}
fn flush(&mut self) -> std::io::Result<()> {
self.inner.flush()
}
}
fn main() -> Result<()> {
// positional arguments check comes first: are we reading from a file or stdin?
let opts = Options::parse();
let mut file = std::fs::OpenOptions::new().read(true).open(opts.file)?;
let mut input = Vec::new();
file.read_to_end(&mut input)?;
let source = String::from_utf8(input)?;
let tempdir = tempfile::tempdir()?;
let mut pb = PathBuf::from(tempdir.as_ref());
let cache_key = get_key(&source);
let metadata = cacache::metadata_sync(opts.cache_dir.as_path(), &cache_key)?;
if let Some(metadata) = metadata {
let cache_entry: CacheEntry = serde_json::from_value(metadata.metadata)?;
if cache_entry.exit_code == 0 {
pb.push("bin");
cacache::copy_sync(opts.cache_dir.as_path(), &cache_key, pb.as_path())?;
#[cfg(not(target_os = "windows"))]
{
use std::os::unix::fs::PermissionsExt;
std::fs::set_permissions(pb.as_path(), std::fs::Permissions::from_mode(0o755))?;
}
} else {
let build_output = cacache::read_sync(opts.cache_dir.as_path(), cache_entry.output_id)?;
std::io::stderr().write_all(&build_output[..])?;
process::exit(cache_entry.exit_code);
}
} else {
if opts.verbose < 1 {
populate_cache(
&cache_key,
opts.cache_dir.as_path(),
pb.as_path(),
std::io::sink(),
source.as_str()
)?;
} else {
populate_cache(
&cache_key,
opts.cache_dir.as_path(),
pb.as_path(),
std::io::sink(),
source.as_str()
)?;
}
pb.push("target");
pb.push("release");
pb.push("bin");
}
let mut exec = Exec::cmd(&pb).cwd(std::env::current_dir()?);
for arg in opts.rest {
exec = exec.arg(arg);
}
std::process::exit(match exec.join()? {
subprocess::ExitStatus::Exited(xs) => xs as i32,
subprocess::ExitStatus::Signaled(xs) => xs as i32,
subprocess::ExitStatus::Other(xs) => xs,
subprocess::ExitStatus::Undetermined => -1,
});
}
fn get_key(input: impl AsRef<str>) -> String {
let mut hasher = Sha256::new();
let bytes = input.as_ref().as_bytes();
hasher.update(bytes);
let hash_bytes = &hasher.finalize()[..];
hex::encode(hash_bytes)
}
fn populate_cache(
cache_key: &str,
cache: impl AsRef<Path>,
tempdir: impl AsRef<Path>,
stdout: impl Write,
source: &str
) -> Result<()> {
let mut pb = PathBuf::from(tempdir.as_ref());
let trimmed = if source.trim().starts_with("#!") {
source[source.find("\n").unwrap() + 1..].trim()
} else {
source.trim()
};
let (frontmatter, rust_src) = if trimmed.starts_with("+++\n") {
let offset = trimmed[4..].find("\n+++\n").ok_or_else(|| eyre::eyre!("Hit EOF before finding end of frontmatter delimeter, \"+++\"."))?;
(&trimmed[4..offset + 4], &trimmed[offset + 9..])
} else {
(&trimmed[0..0], &trimmed[0..])
};
let mut frontmatter: toml::Value = toml::from_str(frontmatter)?;
let tbl = frontmatter.as_table_mut().ok_or_else(|| eyre::eyre!("Expected frontmatter to contain valid TOML, but the top level is not a table"))?;
let cargo_toml_pkg = tbl.entry("package").or_insert(toml::Value::Table(toml::map::Map::new())).as_table_mut().unwrap();
cargo_toml_pkg.insert("name".to_string(), toml::Value::String("bin".to_string()));
cargo_toml_pkg.insert("version".to_string(), toml::Value::String("0.0.1".to_string()));
cargo_toml_pkg.insert("edition".to_string(), toml::Value::String("2021".to_string()));
let cargo_toml = toml::to_string_pretty(&frontmatter)?;
pb.push("Cargo.toml");
{
let mut cargo_toml_file = std::fs::OpenOptions::new().write(true).create(true).open(&pb)?;
cargo_toml_file.write_all(cargo_toml.as_bytes())?;
}
pb.pop();
pb.push("src");
std::fs::create_dir(&pb)?;
pb.push("main.rs");
{
let mut src_file = std::fs::OpenOptions::new().write(true).create(true).open(&pb)?;
src_file.write_all(rust_src.as_bytes())?;
}
pb.pop();
pb.pop();
let mut popen = Exec::cmd("cargo")
.arg("--color")
.arg("always")
.arg("build")
.arg("--release")
.stdout(subprocess::Redirection::Pipe)
.stderr(subprocess::Redirection::Merge)
.cwd(&tempdir)
.popen()?;
let mut out = Tee::new(stdout);
while popen.poll().is_none() {
if let Some(mut pstdout) = popen.stdout.as_mut() {
std::io::copy(&mut pstdout, &mut out)?;
}
}
let exit_code = match popen.exit_status() {
Some(subprocess::ExitStatus::Exited(xs)) => xs as i32,
Some(subprocess::ExitStatus::Signaled(xs)) => xs as i32,
Some(subprocess::ExitStatus::Other(xs)) => xs,
_ => 1
};
let (accum, _) = out.into_inner();
let output_hash = cacache::write_hash_sync(&cache, accum)?;
let build_metadata = CacheEntry {
output_id: output_hash.to_string(),
exit_code
};
pb.push("target");
pb.push("release");
pb.push("bin");
let mut binary_file = std::fs::OpenOptions::new().read(true).open(&pb)?;
let mut writer = WriteOpts::new()
.algorithm(cacache::Algorithm::Sha256)
.metadata(serde_json::to_value(build_metadata)?)
.open_sync(&cache, cache_key)?;
std::io::copy(&mut binary_file, &mut writer)?;
writer.commit()?;
Ok(())
}
| 29.626016 | 149 | 0.57876 |
1d04210b0f01e897350a444e2bb798c09cc9936e | 653 | /* automatically generated by rust-bindgen */
#![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)]
pub const nsCSSPropertyID_eCSSProperty_COUNT_unexistingVariantValue:
nsCSSPropertyID =
nsCSSPropertyID::eCSSProperty_COUNT_unexistingVariantValue;
pub const nsCSSPropertyID_eCSSProperty_COUNT: nsCSSPropertyID =
nsCSSPropertyID::eCSSPropertyAlias_aa;
#[repr(u32)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum nsCSSPropertyID {
eCSSProperty_a = 0,
eCSSProperty_b = 1,
eCSSPropertyAlias_aa = 2,
eCSSPropertyAlias_bb = 3,
eCSSProperty_COUNT_unexistingVariantValue = 4,
}
| 31.095238 | 82 | 0.782542 |
e2aad6307d7246084b7586e7065735c815a9339c | 4,607 | use hyper::Client;
use hyper::net::HttpsConnector;
use hyper::status::StatusCode;
use hyper_rustls::TlsClient;
use std::io::Error;
use std::io::Read;
use std::io::Write;
use std::io::stdout;
use std::fs;
use std::fs::File;
use std::string::String;
use std::sync::mpsc::channel;
use std::thread;
use std::path::Path;
use std::process::Command;
use std::process::Stdio;
use url::Url;
const THREADS: usize = 4;
pub fn m3u8(url: &str, index: usize, name: &str) {
let parsed_url = Url::parse(url).unwrap();
let mut base_url = parsed_url.join("a").unwrap().into_string();
base_url.pop(); // remove the 'a' from above
let m3u8_metadata = m3u8_to_vector(wget_to_string(url));
let m3u8_data_url = format!("{}{}",&base_url,&m3u8_metadata[index]);
let m3u8_data = m3u8_to_vector(wget_to_string(&m3u8_data_url));
download(base_url,&m3u8_data);
ffmpeg_concatenate(&m3u8_data,name);
}
fn m3u8_to_vector(data: String) -> Vec<String> {
let mut vec = Vec::new();
for line in data.split("\n") {
if line != "" && line.chars().nth(0).unwrap() != '#' {
vec.push(String::from(line));
}
}
return vec;
}
fn wget_to_string(url: &str) -> String {
let client = Client::with_connector(HttpsConnector::new(TlsClient::new()));
let mut res = client.get(&*url)
.send().unwrap();
let mut buffer = String::new();
let _ = res.read_to_string(&mut buffer);
if res.status == StatusCode::Ok {
return buffer;
} else {
println!("{}",buffer);
panic!(format!("Error in wget_to_string: {}",res.status));
}
}
pub fn wget_to_file(url: String, name: String, client: &Client) -> Result<(), Error> {
if Path::new(&name).exists() {
// File exists and was already downloaded.
Ok(())
} else {
let tmp_name = format!("{}.tmp",name);
//http://stackoverflow.com/a/41451006/1687505
let mut res = client.get(&*url).send().unwrap();
let mut file = try!(File::create(&tmp_name));
let mut buf = [0; 128 * 1024];
loop {
let len = match res.read(&mut buf) {
Ok(0) => break, //End of file reached.
Ok(len) => len,
Err(err) => panic!(format!("Error in wget_to_file: {}",err)),
};
try!(file.write_all(&buf[..len]));
}
// Close the file handle so we can rename it
drop(file);
fs::rename(tmp_name,name).unwrap();
Ok(())
}
}
fn download(base_url: String, files: &Vec<String>) {
let mut comm = Vec::new();
let length = files.len();
let mut file = File::create("toffmpeg.txt").unwrap();
for i in 0..length {
let _ = file.write_all(format!("file '{}'\n",files[i]).as_bytes());
}
for _ in 0..THREADS {
let (slave_tx, main_rx) = channel();
let (main_tx, slave_rx) = channel();
comm.push((main_tx,main_rx));
let base = base_url.clone();
thread::spawn(move || {
let client = Client::with_connector(HttpsConnector::new(TlsClient::new()));
// Ask for work
slave_tx.send(true).unwrap();
loop {
let (status, file) = slave_rx.recv().unwrap();
if status {
wget_to_file(
format!("{}{}",base,file),
file,
&client
).unwrap();
slave_tx.send(true).unwrap();
} else {
break;
}
}
});
}
let mut live_threads = THREADS;
let mut j = 0;
let empty_string = String::new();
'outer: loop {
for &(ref tx,ref rx) in &comm {
// Try recieve will return an error if the channel is closed or if it is empty - both are useful cases to skip processing
if !rx.try_recv().is_err() {
if j < length {
// Flush stdout https://github.com/rust-lang/rust/issues/23818
print!("\rProcessing {} of {}",j,length);
stdout().flush().ok().expect("Could not flush stdout");
tx.send((true,files[j].clone())).unwrap();
j += 1;
} else {
tx.send((false,empty_string.clone())).unwrap();
live_threads -= 1;
}
if live_threads == 0 {
break 'outer;
}
}
}
}
println!("\rDownload complete. ");
}
fn ffmpeg_concatenate(files: &Vec<String>,name: &str) {
// Now concatentate the files and clean
// https://github.com/rust-lang/rust/issues/30098
let child = Command::new("ffmpeg")
.args(&["-loglevel","panic","-hide_banner","-stats","-f","concat","-i","toffmpeg.txt","-c:v","copy","-c:a","copy","-bsf:a","aac_adtstoasc"])
.arg(name)
.stdout(Stdio::piped())
.spawn()
.unwrap_or_else(|e| { panic!("failed to execute process: {}", e) });
let mut out = child.stdout.unwrap();
let mut read_buf = [0u8; 64];
while let Ok(size) = out.read(&mut read_buf) {
if size == 0 {
break;
}
stdout().write_all(&read_buf).unwrap();
}
for value in files {
let _ = fs::remove_file(value);
}
let _ = fs::remove_file("toffmpeg.txt");
} | 24.902703 | 142 | 0.622097 |
696da8611e9209ef9452b698d101fbe41f955692 | 628 | use std::{fmt, error};
#[derive(Debug)]
pub enum Error {
InvalidHostmaskString,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::InvalidHostmaskString => {
write!(f,
"invalid hostmask string (hostmask should have the following format: \
[email protected])")
}
}
}
}
impl error::Error for Error {
fn description(&self) -> &str {
match *self {
Error::InvalidHostmaskString => "invalid hostmask string",
}
}
}
| 23.259259 | 93 | 0.522293 |
4b675c5fd2f8a89e6f80196ec1cea9f89723ca81 | 16 | pub use chrono;
| 8 | 15 | 0.75 |
f5adfa6ab487bf3c0d1f781b0e75d86d52393a3c | 1,242 | //! JIT-style runtime for WebAssembly using Cranelift.
#![deny(missing_docs, trivial_numeric_casts, unused_extern_crates)]
#![warn(unused_import_braces)]
#![cfg_attr(feature = "std", deny(unstable_features))]
#![cfg_attr(
feature = "clippy",
plugin(clippy(conf_file = "../../clippy.toml"))
)]
#![cfg_attr(
feature = "cargo-clippy",
allow(new_without_default, new_without_default_derive)
)]
#![cfg_attr(
feature = "cargo-clippy",
warn(
float_arithmetic,
mut_mut,
nonminimal_bool,
option_map_unwrap_or,
option_map_unwrap_or_else,
print_stdout,
unicode_not_nfc,
use_self
)
)]
#![cfg_attr(not(feature = "std"), no_std)]
#![cfg_attr(not(feature = "std"), feature(alloc))]
extern crate cranelift_codegen;
extern crate cranelift_entity;
extern crate cranelift_wasm;
extern crate memmap;
extern crate region;
extern crate wasmtime_environ;
#[cfg(not(feature = "std"))]
#[macro_use]
extern crate alloc;
mod execute;
mod instance;
mod memory;
pub use execute::{compile_and_link_module, execute};
pub use instance::Instance;
#[cfg(not(feature = "std"))]
mod std {
pub use alloc::{string, vec};
pub use core::*;
pub use core::{i32, str, u32};
}
| 23.433962 | 67 | 0.681965 |
bf98aa7e2b580b1e1fdb6ac1b8d13744600ac8b2 | 45,113 | // error-pattern:cargo-clippy
#![feature(box_syntax)]
#![feature(never_type)]
#![feature(rustc_private)]
#![feature(slice_patterns)]
#![feature(stmt_expr_attributes)]
#![allow(clippy::missing_docs_in_private_items)]
#![recursion_limit = "256"]
#![warn(rust_2018_idioms, trivial_casts, trivial_numeric_casts)]
#![feature(crate_visibility_modifier)]
// FIXME: switch to something more ergonomic here, once available.
// (Currently there is no way to opt into sysroot crates without `extern crate`.)
#[allow(unused_extern_crates)]
extern crate fmt_macros;
#[allow(unused_extern_crates)]
extern crate rustc;
#[allow(unused_extern_crates)]
extern crate rustc_data_structures;
#[allow(unused_extern_crates)]
extern crate rustc_errors;
#[allow(unused_extern_crates)]
extern crate rustc_mir;
#[allow(unused_extern_crates)]
extern crate rustc_plugin;
#[allow(unused_extern_crates)]
extern crate rustc_target;
#[allow(unused_extern_crates)]
extern crate rustc_typeck;
#[allow(unused_extern_crates)]
extern crate syntax;
#[allow(unused_extern_crates)]
extern crate syntax_pos;
use toml;
/// Macro used to declare a Clippy lint.
///
/// Every lint declaration consists of 4 parts:
///
/// 1. The documentation, which is used for the website
/// 2. The `LINT_NAME`. See [lint naming][lint_naming] on lint naming conventions.
/// 3. The `lint_level`, which is a mapping from *one* of our lint groups to `Allow`, `Warn` or
/// `Deny`. The lint level here has nothing to do with what lint groups the lint is a part of.
/// 4. The `description` that contains a short explanation on what's wrong with code where the
/// lint is triggered.
///
/// Currently the categories `style`, `correctness`, `complexity` and `perf` are enabled by default.
/// As said in the README.md of this repository, if the lint level mapping changes, please update
/// README.md.
///
/// # Example
///
/// ```
/// # #![feature(rustc_private)]
/// # #[allow(unused_extern_crates)]
/// # extern crate rustc;
/// # #[macro_use]
/// # use clippy_lints::declare_clippy_lint;
/// use rustc::declare_tool_lint;
///
/// declare_clippy_lint! {
/// /// **What it does:** Checks for ... (describe what the lint matches).
/// ///
/// /// **Why is this bad?** Supply the reason for linting the code.
/// ///
/// /// **Known problems:** None. (Or describe where it could go wrong.)
/// ///
/// /// **Example:**
/// ///
/// /// ```rust
/// /// // Bad
/// /// Insert a short example of code that triggers the lint
/// ///
/// /// // Good
/// /// Insert a short example of improved code that doesn't trigger the lint
/// /// ```
/// pub LINT_NAME,
/// pedantic,
/// "description"
/// }
/// ```
/// [lint_naming]: https://rust-lang.github.io/rfcs/0344-conventions-galore.html#lints
#[macro_export]
macro_rules! declare_clippy_lint {
{ $(#[$attr:meta])* pub $name:tt, style, $description:tt } => {
declare_tool_lint! {
$(#[$attr])* pub clippy::$name, Warn, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, correctness, $description:tt } => {
declare_tool_lint! {
$(#[$attr])* pub clippy::$name, Deny, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, complexity, $description:tt } => {
declare_tool_lint! {
pub clippy::$name, Warn, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, perf, $description:tt } => {
declare_tool_lint! {
pub clippy::$name, Warn, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, pedantic, $description:tt } => {
declare_tool_lint! {
pub clippy::$name, Allow, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, restriction, $description:tt } => {
declare_tool_lint! {
pub clippy::$name, Allow, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, cargo, $description:tt } => {
declare_tool_lint! {
pub clippy::$name, Allow, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, nursery, $description:tt } => {
declare_tool_lint! {
pub clippy::$name, Allow, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, internal, $description:tt } => {
declare_tool_lint! {
pub clippy::$name, Allow, $description, report_in_external_macro: true
}
};
{ $(#[$attr:meta])* pub $name:tt, internal_warn, $description:tt } => {
declare_tool_lint! {
pub clippy::$name, Warn, $description, report_in_external_macro: true
}
};
}
mod consts;
#[macro_use]
mod utils;
// begin lints modules, do not remove this comment, it’s used in `update_lints`
pub mod approx_const;
pub mod arithmetic;
pub mod assertions_on_constants;
pub mod assign_ops;
pub mod attrs;
pub mod bit_mask;
pub mod blacklisted_name;
pub mod block_in_if_condition;
pub mod booleans;
pub mod bytecount;
pub mod cargo_common_metadata;
pub mod cognitive_complexity;
pub mod collapsible_if;
pub mod const_static_lifetime;
pub mod copies;
pub mod copy_iterator;
pub mod dbg_macro;
pub mod default_trait_access;
pub mod derive;
pub mod doc;
pub mod double_comparison;
pub mod double_parens;
pub mod drop_bounds;
pub mod drop_forget_ref;
pub mod duration_subsec;
pub mod else_if_without_else;
pub mod empty_enum;
pub mod entry;
pub mod enum_clike;
pub mod enum_glob_use;
pub mod enum_variants;
pub mod eq_op;
pub mod erasing_op;
pub mod escape;
pub mod eta_reduction;
pub mod eval_order_dependence;
pub mod excessive_precision;
pub mod explicit_write;
pub mod fallible_impl_from;
pub mod format;
pub mod formatting;
pub mod functions;
pub mod identity_conversion;
pub mod identity_op;
pub mod if_not_else;
pub mod implicit_return;
pub mod indexing_slicing;
pub mod infallible_destructuring_match;
pub mod infinite_iter;
pub mod inherent_impl;
pub mod inline_fn_without_body;
pub mod int_plus_one;
pub mod invalid_ref;
pub mod items_after_statements;
pub mod large_enum_variant;
pub mod len_zero;
pub mod let_if_seq;
pub mod lifetimes;
pub mod literal_representation;
pub mod loops;
pub mod map_clone;
pub mod map_unit_fn;
pub mod matches;
pub mod mem_discriminant;
pub mod mem_forget;
pub mod mem_replace;
pub mod methods;
pub mod minmax;
pub mod misc;
pub mod misc_early;
pub mod missing_const_for_fn;
pub mod missing_doc;
pub mod missing_inline;
pub mod multiple_crate_versions;
pub mod mut_mut;
pub mod mut_reference;
pub mod mutex_atomic;
pub mod needless_bool;
pub mod needless_borrow;
pub mod needless_borrowed_ref;
pub mod needless_continue;
pub mod needless_pass_by_value;
pub mod needless_update;
pub mod neg_cmp_op_on_partial_ord;
pub mod neg_multiply;
pub mod new_without_default;
pub mod no_effect;
pub mod non_copy_const;
pub mod non_expressive_names;
pub mod ok_if_let;
pub mod open_options;
pub mod overflow_check_conditional;
pub mod panic_unimplemented;
pub mod partialeq_ne_impl;
pub mod precedence;
pub mod ptr;
pub mod ptr_offset_with_cast;
pub mod question_mark;
pub mod ranges;
pub mod redundant_clone;
pub mod redundant_field_names;
pub mod redundant_pattern_matching;
pub mod reference;
pub mod regex;
pub mod replace_consts;
pub mod returns;
pub mod serde_api;
pub mod shadow;
pub mod slow_vector_initialization;
pub mod strings;
pub mod suspicious_trait_impl;
pub mod swap;
pub mod temporary_assignment;
pub mod transmute;
pub mod trivially_copy_pass_by_ref;
pub mod types;
pub mod unicode;
pub mod unsafe_removed_from_name;
pub mod unused_io_amount;
pub mod unused_label;
pub mod unwrap;
pub mod use_self;
pub mod vec;
pub mod wildcard_dependencies;
pub mod write;
pub mod zero_div_zero;
// end lints modules, do not remove this comment, it’s used in `update_lints`
pub use crate::utils::conf::Conf;
mod reexport {
crate use syntax::ast::Name;
}
/// Register all pre expansion lints
///
/// Pre-expansion lints run before any macro expansion has happened.
///
/// Note that due to the architechture of the compiler, currently `cfg_attr` attributes on crate
/// level (i.e `#![cfg_attr(...)]`) will still be expanded even when using a pre-expansion pass.
///
/// Used in `./src/driver.rs`.
pub fn register_pre_expansion_lints(
session: &rustc::session::Session,
store: &mut rustc::lint::LintStore,
conf: &Conf,
) {
store.register_pre_expansion_pass(Some(session), true, false, box write::Pass);
store.register_pre_expansion_pass(
Some(session),
true,
false,
box redundant_field_names::RedundantFieldNames,
);
store.register_pre_expansion_pass(
Some(session),
true,
false,
box non_expressive_names::NonExpressiveNames {
single_char_binding_names_threshold: conf.single_char_binding_names_threshold,
},
);
store.register_pre_expansion_pass(Some(session), true, false, box attrs::CfgAttrPass);
store.register_pre_expansion_pass(Some(session), true, false, box dbg_macro::Pass);
}
#[doc(hidden)]
pub fn read_conf(reg: &rustc_plugin::Registry<'_>) -> Conf {
match utils::conf::file_from_args(reg.args()) {
Ok(file_name) => {
// if the user specified a file, it must exist, otherwise default to `clippy.toml` but
// do not require the file to exist
let file_name = if let Some(file_name) = file_name {
Some(file_name)
} else {
match utils::conf::lookup_conf_file() {
Ok(path) => path,
Err(error) => {
reg.sess
.struct_err(&format!("error finding Clippy's configuration file: {}", error))
.emit();
None
},
}
};
let file_name = file_name.map(|file_name| {
if file_name.is_relative() {
reg.sess
.local_crate_source_file
.as_ref()
.and_then(|file| std::path::Path::new(&file).parent().map(std::path::Path::to_path_buf))
.unwrap_or_default()
.join(file_name)
} else {
file_name
}
});
let (conf, errors) = utils::conf::read(file_name.as_ref().map(std::convert::AsRef::as_ref));
// all conf errors are non-fatal, we just use the default conf in case of error
for error in errors {
reg.sess
.struct_err(&format!(
"error reading Clippy's configuration file `{}`: {}",
file_name.as_ref().and_then(|p| p.to_str()).unwrap_or(""),
error
))
.emit();
}
conf
},
Err((err, span)) => {
reg.sess
.struct_span_err(span, err)
.span_note(span, "Clippy will use default configuration")
.emit();
toml::from_str("").expect("we never error on empty config files")
},
}
}
/// Register all lints and lint groups with the rustc plugin registry
///
/// Used in `./src/driver.rs`.
#[allow(clippy::too_many_lines)]
#[rustfmt::skip]
pub fn register_plugins(reg: &mut rustc_plugin::Registry<'_>, conf: &Conf) {
let mut store = reg.sess.lint_store.borrow_mut();
// begin deprecated lints, do not remove this comment, it’s used in `update_lints`
store.register_removed(
"should_assert_eq",
"`assert!()` will be more flexible with RFC 2011",
);
store.register_removed(
"extend_from_slice",
"`.extend_from_slice(_)` is a faster way to extend a Vec by a slice",
);
store.register_removed(
"range_step_by_zero",
"`iterator.step_by(0)` panics nowadays",
);
store.register_removed(
"unstable_as_slice",
"`Vec::as_slice` has been stabilized in 1.7",
);
store.register_removed(
"unstable_as_mut_slice",
"`Vec::as_mut_slice` has been stabilized in 1.7",
);
store.register_removed(
"str_to_string",
"using `str::to_string` is common even today and specialization will likely happen soon",
);
store.register_removed(
"string_to_string",
"using `string::to_string` is common even today and specialization will likely happen soon",
);
store.register_removed(
"misaligned_transmute",
"this lint has been split into cast_ptr_alignment and transmute_ptr_to_ptr",
);
store.register_removed(
"assign_ops",
"using compound assignment operators (e.g., `+=`) is harmless",
);
store.register_removed(
"if_let_redundant_pattern_matching",
"this lint has been changed to redundant_pattern_matching",
);
store.register_removed(
"unsafe_vector_initialization",
"the replacement suggested by this lint had substantially different behavior",
);
// end deprecated lints, do not remove this comment, it’s used in `update_lints`
reg.register_late_lint_pass(box serde_api::Serde);
reg.register_early_lint_pass(box utils::internal_lints::Clippy);
reg.register_late_lint_pass(box utils::internal_lints::CompilerLintFunctions::new());
reg.register_early_lint_pass(box utils::internal_lints::DefaultHashTypes::default());
reg.register_late_lint_pass(box utils::internal_lints::LintWithoutLintPass::default());
reg.register_late_lint_pass(box utils::inspector::Pass);
reg.register_late_lint_pass(box utils::author::Pass);
reg.register_late_lint_pass(box types::TypePass);
reg.register_late_lint_pass(box booleans::NonminimalBool);
reg.register_late_lint_pass(box eq_op::EqOp);
reg.register_early_lint_pass(box enum_variants::EnumVariantNames::new(conf.enum_variant_name_threshold));
reg.register_late_lint_pass(box enum_glob_use::EnumGlobUse);
reg.register_late_lint_pass(box enum_clike::UnportableVariant);
reg.register_late_lint_pass(box excessive_precision::ExcessivePrecision);
reg.register_late_lint_pass(box bit_mask::BitMask::new(conf.verbose_bit_mask_threshold));
reg.register_late_lint_pass(box ptr::PointerPass);
reg.register_late_lint_pass(box needless_bool::NeedlessBool);
reg.register_late_lint_pass(box needless_bool::BoolComparison);
reg.register_late_lint_pass(box approx_const::Pass);
reg.register_late_lint_pass(box misc::Pass);
reg.register_early_lint_pass(box precedence::Precedence);
reg.register_early_lint_pass(box needless_continue::NeedlessContinue);
reg.register_late_lint_pass(box eta_reduction::EtaPass);
reg.register_late_lint_pass(box identity_op::IdentityOp);
reg.register_late_lint_pass(box erasing_op::ErasingOp);
reg.register_early_lint_pass(box items_after_statements::ItemsAfterStatements);
reg.register_late_lint_pass(box mut_mut::MutMut);
reg.register_late_lint_pass(box mut_reference::UnnecessaryMutPassed);
reg.register_late_lint_pass(box len_zero::LenZero);
reg.register_late_lint_pass(box attrs::AttrPass);
reg.register_early_lint_pass(box collapsible_if::CollapsibleIf);
reg.register_late_lint_pass(box block_in_if_condition::BlockInIfCondition);
reg.register_late_lint_pass(box unicode::Unicode);
reg.register_late_lint_pass(box strings::StringAdd);
reg.register_early_lint_pass(box returns::ReturnPass);
reg.register_late_lint_pass(box implicit_return::Pass);
reg.register_late_lint_pass(box methods::Pass);
reg.register_late_lint_pass(box map_clone::Pass);
reg.register_late_lint_pass(box shadow::Pass);
reg.register_late_lint_pass(box types::LetPass);
reg.register_late_lint_pass(box types::UnitCmp);
reg.register_late_lint_pass(box loops::Pass);
reg.register_late_lint_pass(box lifetimes::LifetimePass);
reg.register_late_lint_pass(box entry::HashMapLint);
reg.register_late_lint_pass(box ranges::Pass);
reg.register_late_lint_pass(box types::CastPass);
reg.register_late_lint_pass(box types::TypeComplexityPass::new(conf.type_complexity_threshold));
reg.register_late_lint_pass(box matches::MatchPass);
reg.register_late_lint_pass(box minmax::MinMaxPass);
reg.register_late_lint_pass(box open_options::NonSensical);
reg.register_late_lint_pass(box zero_div_zero::Pass);
reg.register_late_lint_pass(box mutex_atomic::MutexAtomic);
reg.register_late_lint_pass(box needless_update::Pass);
reg.register_late_lint_pass(box needless_borrow::NeedlessBorrow::default());
reg.register_late_lint_pass(box needless_borrowed_ref::NeedlessBorrowedRef);
reg.register_late_lint_pass(box no_effect::Pass);
reg.register_late_lint_pass(box temporary_assignment::Pass);
reg.register_late_lint_pass(box transmute::Transmute);
reg.register_late_lint_pass(
box cognitive_complexity::CognitiveComplexity::new(conf.cognitive_complexity_threshold)
);
reg.register_late_lint_pass(box escape::Pass{too_large_for_stack: conf.too_large_for_stack});
reg.register_early_lint_pass(box misc_early::MiscEarly);
reg.register_late_lint_pass(box panic_unimplemented::Pass);
reg.register_late_lint_pass(box strings::StringLitAsBytes);
reg.register_late_lint_pass(box derive::Derive);
reg.register_late_lint_pass(box types::CharLitAsU8);
reg.register_late_lint_pass(box vec::Pass);
reg.register_late_lint_pass(box drop_bounds::Pass);
reg.register_late_lint_pass(box drop_forget_ref::Pass);
reg.register_late_lint_pass(box empty_enum::EmptyEnum);
reg.register_late_lint_pass(box types::AbsurdExtremeComparisons);
reg.register_late_lint_pass(box types::InvalidUpcastComparisons);
reg.register_late_lint_pass(box regex::Pass::default());
reg.register_late_lint_pass(box copies::CopyAndPaste);
reg.register_late_lint_pass(box copy_iterator::CopyIterator);
reg.register_late_lint_pass(box format::Pass);
reg.register_early_lint_pass(box formatting::Formatting);
reg.register_late_lint_pass(box swap::Swap);
reg.register_early_lint_pass(box if_not_else::IfNotElse);
reg.register_early_lint_pass(box else_if_without_else::ElseIfWithoutElse);
reg.register_early_lint_pass(box int_plus_one::IntPlusOne);
reg.register_late_lint_pass(box overflow_check_conditional::OverflowCheckConditional);
reg.register_late_lint_pass(box unused_label::UnusedLabel);
reg.register_late_lint_pass(box new_without_default::NewWithoutDefault::default());
reg.register_late_lint_pass(box blacklisted_name::BlackListedName::new(
conf.blacklisted_names.iter().cloned().collect()
));
reg.register_late_lint_pass(box functions::Functions::new(conf.too_many_arguments_threshold, conf.too_many_lines_threshold));
reg.register_early_lint_pass(box doc::Doc::new(conf.doc_valid_idents.iter().cloned().collect()));
reg.register_late_lint_pass(box neg_multiply::NegMultiply);
reg.register_early_lint_pass(box unsafe_removed_from_name::UnsafeNameRemoval);
reg.register_late_lint_pass(box mem_discriminant::MemDiscriminant);
reg.register_late_lint_pass(box mem_forget::MemForget);
reg.register_late_lint_pass(box mem_replace::MemReplace);
reg.register_late_lint_pass(box arithmetic::Arithmetic::default());
reg.register_late_lint_pass(box assign_ops::AssignOps);
reg.register_late_lint_pass(box let_if_seq::LetIfSeq);
reg.register_late_lint_pass(box eval_order_dependence::EvalOrderDependence);
reg.register_late_lint_pass(box missing_doc::MissingDoc::new());
reg.register_late_lint_pass(box missing_inline::MissingInline);
reg.register_late_lint_pass(box ok_if_let::Pass);
reg.register_late_lint_pass(box redundant_pattern_matching::Pass);
reg.register_late_lint_pass(box partialeq_ne_impl::Pass);
reg.register_early_lint_pass(box reference::Pass);
reg.register_early_lint_pass(box reference::DerefPass);
reg.register_early_lint_pass(box double_parens::DoubleParens);
reg.register_late_lint_pass(box unused_io_amount::UnusedIoAmount);
reg.register_late_lint_pass(box large_enum_variant::LargeEnumVariant::new(conf.enum_variant_size_threshold));
reg.register_late_lint_pass(box explicit_write::Pass);
reg.register_late_lint_pass(box needless_pass_by_value::NeedlessPassByValue);
reg.register_late_lint_pass(box trivially_copy_pass_by_ref::TriviallyCopyPassByRef::new(
conf.trivial_copy_size_limit,
®.sess.target,
));
reg.register_early_lint_pass(box literal_representation::LiteralDigitGrouping);
reg.register_early_lint_pass(box literal_representation::LiteralRepresentation::new(
conf.literal_representation_threshold
));
reg.register_late_lint_pass(box use_self::UseSelf);
reg.register_late_lint_pass(box bytecount::ByteCount);
reg.register_late_lint_pass(box infinite_iter::Pass);
reg.register_late_lint_pass(box inline_fn_without_body::Pass);
reg.register_late_lint_pass(box invalid_ref::InvalidRef);
reg.register_late_lint_pass(box identity_conversion::IdentityConversion::default());
reg.register_late_lint_pass(box types::ImplicitHasher);
reg.register_early_lint_pass(box const_static_lifetime::StaticConst);
reg.register_late_lint_pass(box fallible_impl_from::FallibleImplFrom);
reg.register_late_lint_pass(box replace_consts::ReplaceConsts);
reg.register_late_lint_pass(box types::UnitArg);
reg.register_late_lint_pass(box double_comparison::Pass);
reg.register_late_lint_pass(box question_mark::Pass);
reg.register_late_lint_pass(box suspicious_trait_impl::SuspiciousImpl);
reg.register_early_lint_pass(box cargo_common_metadata::Pass);
reg.register_early_lint_pass(box multiple_crate_versions::Pass);
reg.register_early_lint_pass(box wildcard_dependencies::Pass);
reg.register_late_lint_pass(box map_unit_fn::Pass);
reg.register_late_lint_pass(box infallible_destructuring_match::Pass);
reg.register_late_lint_pass(box inherent_impl::Pass::default());
reg.register_late_lint_pass(box neg_cmp_op_on_partial_ord::NoNegCompOpForPartialOrd);
reg.register_late_lint_pass(box unwrap::Pass);
reg.register_late_lint_pass(box duration_subsec::DurationSubsec);
reg.register_late_lint_pass(box default_trait_access::DefaultTraitAccess);
reg.register_late_lint_pass(box indexing_slicing::IndexingSlicing);
reg.register_late_lint_pass(box non_copy_const::NonCopyConst);
reg.register_late_lint_pass(box ptr_offset_with_cast::Pass);
reg.register_late_lint_pass(box redundant_clone::RedundantClone);
reg.register_late_lint_pass(box slow_vector_initialization::Pass);
reg.register_late_lint_pass(box types::RefToMut);
reg.register_late_lint_pass(box assertions_on_constants::AssertionsOnConstants);
reg.register_late_lint_pass(box missing_const_for_fn::MissingConstForFn);
reg.register_lint_group("clippy::restriction", Some("clippy_restriction"), vec![
arithmetic::FLOAT_ARITHMETIC,
arithmetic::INTEGER_ARITHMETIC,
dbg_macro::DBG_MACRO,
else_if_without_else::ELSE_IF_WITHOUT_ELSE,
implicit_return::IMPLICIT_RETURN,
indexing_slicing::INDEXING_SLICING,
inherent_impl::MULTIPLE_INHERENT_IMPL,
literal_representation::DECIMAL_LITERAL_REPRESENTATION,
matches::WILDCARD_ENUM_MATCH_ARM,
mem_forget::MEM_FORGET,
methods::CLONE_ON_REF_PTR,
methods::GET_UNWRAP,
methods::OPTION_UNWRAP_USED,
methods::RESULT_UNWRAP_USED,
methods::WRONG_PUB_SELF_CONVENTION,
misc::FLOAT_CMP_CONST,
missing_doc::MISSING_DOCS_IN_PRIVATE_ITEMS,
missing_inline::MISSING_INLINE_IN_PUBLIC_ITEMS,
panic_unimplemented::UNIMPLEMENTED,
shadow::SHADOW_REUSE,
shadow::SHADOW_SAME,
strings::STRING_ADD,
write::PRINT_STDOUT,
write::USE_DEBUG,
]);
reg.register_lint_group("clippy::pedantic", Some("clippy_pedantic"), vec![
attrs::INLINE_ALWAYS,
copies::MATCH_SAME_ARMS,
copy_iterator::COPY_ITERATOR,
default_trait_access::DEFAULT_TRAIT_ACCESS,
derive::EXPL_IMPL_CLONE_ON_COPY,
doc::DOC_MARKDOWN,
empty_enum::EMPTY_ENUM,
enum_glob_use::ENUM_GLOB_USE,
enum_variants::MODULE_NAME_REPETITIONS,
enum_variants::PUB_ENUM_VARIANT_NAMES,
functions::TOO_MANY_LINES,
if_not_else::IF_NOT_ELSE,
infinite_iter::MAYBE_INFINITE_ITER,
items_after_statements::ITEMS_AFTER_STATEMENTS,
literal_representation::LARGE_DIGIT_GROUPS,
loops::EXPLICIT_INTO_ITER_LOOP,
loops::EXPLICIT_ITER_LOOP,
matches::SINGLE_MATCH_ELSE,
methods::FILTER_MAP,
methods::MAP_FLATTEN,
methods::OPTION_MAP_UNWRAP_OR,
methods::OPTION_MAP_UNWRAP_OR_ELSE,
methods::RESULT_MAP_UNWRAP_OR_ELSE,
misc::USED_UNDERSCORE_BINDING,
misc_early::UNSEPARATED_LITERAL_SUFFIX,
mut_mut::MUT_MUT,
needless_continue::NEEDLESS_CONTINUE,
needless_pass_by_value::NEEDLESS_PASS_BY_VALUE,
non_expressive_names::SIMILAR_NAMES,
replace_consts::REPLACE_CONSTS,
shadow::SHADOW_UNRELATED,
strings::STRING_ADD_ASSIGN,
types::CAST_POSSIBLE_TRUNCATION,
types::CAST_POSSIBLE_WRAP,
types::CAST_PRECISION_LOSS,
types::CAST_SIGN_LOSS,
types::INVALID_UPCAST_COMPARISONS,
types::LINKEDLIST,
unicode::NON_ASCII_LITERAL,
unicode::UNICODE_NOT_NFC,
use_self::USE_SELF,
]);
reg.register_lint_group("clippy::internal", Some("clippy_internal"), vec![
utils::internal_lints::CLIPPY_LINTS_INTERNAL,
utils::internal_lints::COMPILER_LINT_FUNCTIONS,
utils::internal_lints::DEFAULT_HASH_TYPES,
utils::internal_lints::LINT_WITHOUT_LINT_PASS,
]);
reg.register_lint_group("clippy::all", Some("clippy"), vec![
approx_const::APPROX_CONSTANT,
assertions_on_constants::ASSERTIONS_ON_CONSTANTS,
assign_ops::ASSIGN_OP_PATTERN,
assign_ops::MISREFACTORED_ASSIGN_OP,
attrs::DEPRECATED_CFG_ATTR,
attrs::DEPRECATED_SEMVER,
attrs::UNKNOWN_CLIPPY_LINTS,
attrs::USELESS_ATTRIBUTE,
bit_mask::BAD_BIT_MASK,
bit_mask::INEFFECTIVE_BIT_MASK,
bit_mask::VERBOSE_BIT_MASK,
blacklisted_name::BLACKLISTED_NAME,
block_in_if_condition::BLOCK_IN_IF_CONDITION_EXPR,
block_in_if_condition::BLOCK_IN_IF_CONDITION_STMT,
booleans::LOGIC_BUG,
booleans::NONMINIMAL_BOOL,
bytecount::NAIVE_BYTECOUNT,
cognitive_complexity::COGNITIVE_COMPLEXITY,
collapsible_if::COLLAPSIBLE_IF,
const_static_lifetime::CONST_STATIC_LIFETIME,
copies::IFS_SAME_COND,
copies::IF_SAME_THEN_ELSE,
derive::DERIVE_HASH_XOR_EQ,
double_comparison::DOUBLE_COMPARISONS,
double_parens::DOUBLE_PARENS,
drop_bounds::DROP_BOUNDS,
drop_forget_ref::DROP_COPY,
drop_forget_ref::DROP_REF,
drop_forget_ref::FORGET_COPY,
drop_forget_ref::FORGET_REF,
duration_subsec::DURATION_SUBSEC,
entry::MAP_ENTRY,
enum_clike::ENUM_CLIKE_UNPORTABLE_VARIANT,
enum_variants::ENUM_VARIANT_NAMES,
enum_variants::MODULE_INCEPTION,
eq_op::EQ_OP,
eq_op::OP_REF,
erasing_op::ERASING_OP,
escape::BOXED_LOCAL,
eta_reduction::REDUNDANT_CLOSURE,
eval_order_dependence::DIVERGING_SUB_EXPRESSION,
eval_order_dependence::EVAL_ORDER_DEPENDENCE,
excessive_precision::EXCESSIVE_PRECISION,
explicit_write::EXPLICIT_WRITE,
format::USELESS_FORMAT,
formatting::POSSIBLE_MISSING_COMMA,
formatting::SUSPICIOUS_ASSIGNMENT_FORMATTING,
formatting::SUSPICIOUS_ELSE_FORMATTING,
functions::NOT_UNSAFE_PTR_ARG_DEREF,
functions::TOO_MANY_ARGUMENTS,
identity_conversion::IDENTITY_CONVERSION,
identity_op::IDENTITY_OP,
indexing_slicing::OUT_OF_BOUNDS_INDEXING,
infallible_destructuring_match::INFALLIBLE_DESTRUCTURING_MATCH,
infinite_iter::INFINITE_ITER,
inline_fn_without_body::INLINE_FN_WITHOUT_BODY,
int_plus_one::INT_PLUS_ONE,
invalid_ref::INVALID_REF,
large_enum_variant::LARGE_ENUM_VARIANT,
len_zero::LEN_WITHOUT_IS_EMPTY,
len_zero::LEN_ZERO,
let_if_seq::USELESS_LET_IF_SEQ,
lifetimes::EXTRA_UNUSED_LIFETIMES,
lifetimes::NEEDLESS_LIFETIMES,
literal_representation::INCONSISTENT_DIGIT_GROUPING,
literal_representation::MISTYPED_LITERAL_SUFFIXES,
literal_representation::UNREADABLE_LITERAL,
loops::EMPTY_LOOP,
loops::EXPLICIT_COUNTER_LOOP,
loops::FOR_KV_MAP,
loops::FOR_LOOP_OVER_OPTION,
loops::FOR_LOOP_OVER_RESULT,
loops::ITER_NEXT_LOOP,
loops::MANUAL_MEMCPY,
loops::MUT_RANGE_BOUND,
loops::NEEDLESS_COLLECT,
loops::NEEDLESS_RANGE_LOOP,
loops::NEVER_LOOP,
loops::REVERSE_RANGE_LOOP,
loops::UNUSED_COLLECT,
loops::WHILE_IMMUTABLE_CONDITION,
loops::WHILE_LET_LOOP,
loops::WHILE_LET_ON_ITERATOR,
map_clone::MAP_CLONE,
map_unit_fn::OPTION_MAP_UNIT_FN,
map_unit_fn::RESULT_MAP_UNIT_FN,
matches::MATCH_AS_REF,
matches::MATCH_BOOL,
matches::MATCH_OVERLAPPING_ARM,
matches::MATCH_REF_PATS,
matches::MATCH_WILD_ERR_ARM,
matches::SINGLE_MATCH,
mem_discriminant::MEM_DISCRIMINANT_NON_ENUM,
mem_replace::MEM_REPLACE_OPTION_WITH_NONE,
methods::CHARS_LAST_CMP,
methods::CHARS_NEXT_CMP,
methods::CLONE_DOUBLE_REF,
methods::CLONE_ON_COPY,
methods::EXPECT_FUN_CALL,
methods::FILTER_NEXT,
methods::INTO_ITER_ON_ARRAY,
methods::INTO_ITER_ON_REF,
methods::ITER_CLONED_COLLECT,
methods::ITER_NTH,
methods::ITER_SKIP_NEXT,
methods::NEW_RET_NO_SELF,
methods::OK_EXPECT,
methods::OPTION_MAP_OR_NONE,
methods::OR_FUN_CALL,
methods::SEARCH_IS_SOME,
methods::SHOULD_IMPLEMENT_TRAIT,
methods::SINGLE_CHAR_PATTERN,
methods::STRING_EXTEND_CHARS,
methods::TEMPORARY_CSTRING_AS_PTR,
methods::UNNECESSARY_FILTER_MAP,
methods::UNNECESSARY_FOLD,
methods::USELESS_ASREF,
methods::WRONG_SELF_CONVENTION,
minmax::MIN_MAX,
misc::CMP_NAN,
misc::CMP_OWNED,
misc::FLOAT_CMP,
misc::MODULO_ONE,
misc::REDUNDANT_PATTERN,
misc::SHORT_CIRCUIT_STATEMENT,
misc::TOPLEVEL_REF_ARG,
misc::ZERO_PTR,
misc_early::BUILTIN_TYPE_SHADOW,
misc_early::DOUBLE_NEG,
misc_early::DUPLICATE_UNDERSCORE_ARGUMENT,
misc_early::MIXED_CASE_HEX_LITERALS,
misc_early::REDUNDANT_CLOSURE_CALL,
misc_early::UNNEEDED_FIELD_PATTERN,
misc_early::ZERO_PREFIXED_LITERAL,
mut_reference::UNNECESSARY_MUT_PASSED,
mutex_atomic::MUTEX_ATOMIC,
needless_bool::BOOL_COMPARISON,
needless_bool::NEEDLESS_BOOL,
needless_borrowed_ref::NEEDLESS_BORROWED_REFERENCE,
needless_update::NEEDLESS_UPDATE,
neg_cmp_op_on_partial_ord::NEG_CMP_OP_ON_PARTIAL_ORD,
neg_multiply::NEG_MULTIPLY,
new_without_default::NEW_WITHOUT_DEFAULT,
no_effect::NO_EFFECT,
no_effect::UNNECESSARY_OPERATION,
non_copy_const::BORROW_INTERIOR_MUTABLE_CONST,
non_copy_const::DECLARE_INTERIOR_MUTABLE_CONST,
non_expressive_names::JUST_UNDERSCORES_AND_DIGITS,
non_expressive_names::MANY_SINGLE_CHAR_NAMES,
ok_if_let::IF_LET_SOME_RESULT,
open_options::NONSENSICAL_OPEN_OPTIONS,
overflow_check_conditional::OVERFLOW_CHECK_CONDITIONAL,
panic_unimplemented::PANIC_PARAMS,
partialeq_ne_impl::PARTIALEQ_NE_IMPL,
precedence::PRECEDENCE,
ptr::CMP_NULL,
ptr::MUT_FROM_REF,
ptr::PTR_ARG,
ptr_offset_with_cast::PTR_OFFSET_WITH_CAST,
question_mark::QUESTION_MARK,
ranges::ITERATOR_STEP_BY_ZERO,
ranges::RANGE_MINUS_ONE,
ranges::RANGE_PLUS_ONE,
ranges::RANGE_ZIP_WITH_LEN,
redundant_field_names::REDUNDANT_FIELD_NAMES,
redundant_pattern_matching::REDUNDANT_PATTERN_MATCHING,
reference::DEREF_ADDROF,
reference::REF_IN_DEREF,
regex::INVALID_REGEX,
regex::REGEX_MACRO,
regex::TRIVIAL_REGEX,
returns::LET_AND_RETURN,
returns::NEEDLESS_RETURN,
returns::UNUSED_UNIT,
serde_api::SERDE_API_MISUSE,
slow_vector_initialization::SLOW_VECTOR_INITIALIZATION,
strings::STRING_LIT_AS_BYTES,
suspicious_trait_impl::SUSPICIOUS_ARITHMETIC_IMPL,
suspicious_trait_impl::SUSPICIOUS_OP_ASSIGN_IMPL,
swap::ALMOST_SWAPPED,
swap::MANUAL_SWAP,
temporary_assignment::TEMPORARY_ASSIGNMENT,
transmute::CROSSPOINTER_TRANSMUTE,
transmute::TRANSMUTE_BYTES_TO_STR,
transmute::TRANSMUTE_INT_TO_BOOL,
transmute::TRANSMUTE_INT_TO_CHAR,
transmute::TRANSMUTE_INT_TO_FLOAT,
transmute::TRANSMUTE_PTR_TO_PTR,
transmute::TRANSMUTE_PTR_TO_REF,
transmute::USELESS_TRANSMUTE,
transmute::WRONG_TRANSMUTE,
trivially_copy_pass_by_ref::TRIVIALLY_COPY_PASS_BY_REF,
types::ABSURD_EXTREME_COMPARISONS,
types::BORROWED_BOX,
types::BOX_VEC,
types::CAST_LOSSLESS,
types::CAST_PTR_ALIGNMENT,
types::CAST_REF_TO_MUT,
types::CHAR_LIT_AS_U8,
types::FN_TO_NUMERIC_CAST,
types::FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
types::IMPLICIT_HASHER,
types::LET_UNIT_VALUE,
types::OPTION_OPTION,
types::TYPE_COMPLEXITY,
types::UNIT_ARG,
types::UNIT_CMP,
types::UNNECESSARY_CAST,
types::VEC_BOX,
unicode::ZERO_WIDTH_SPACE,
unsafe_removed_from_name::UNSAFE_REMOVED_FROM_NAME,
unused_io_amount::UNUSED_IO_AMOUNT,
unused_label::UNUSED_LABEL,
vec::USELESS_VEC,
write::PRINTLN_EMPTY_STRING,
write::PRINT_LITERAL,
write::PRINT_WITH_NEWLINE,
write::WRITELN_EMPTY_STRING,
write::WRITE_LITERAL,
write::WRITE_WITH_NEWLINE,
zero_div_zero::ZERO_DIVIDED_BY_ZERO,
]);
reg.register_lint_group("clippy::style", Some("clippy_style"), vec![
assertions_on_constants::ASSERTIONS_ON_CONSTANTS,
assign_ops::ASSIGN_OP_PATTERN,
attrs::UNKNOWN_CLIPPY_LINTS,
bit_mask::VERBOSE_BIT_MASK,
blacklisted_name::BLACKLISTED_NAME,
block_in_if_condition::BLOCK_IN_IF_CONDITION_EXPR,
block_in_if_condition::BLOCK_IN_IF_CONDITION_STMT,
collapsible_if::COLLAPSIBLE_IF,
const_static_lifetime::CONST_STATIC_LIFETIME,
enum_variants::ENUM_VARIANT_NAMES,
enum_variants::MODULE_INCEPTION,
eq_op::OP_REF,
eta_reduction::REDUNDANT_CLOSURE,
excessive_precision::EXCESSIVE_PRECISION,
formatting::SUSPICIOUS_ASSIGNMENT_FORMATTING,
formatting::SUSPICIOUS_ELSE_FORMATTING,
infallible_destructuring_match::INFALLIBLE_DESTRUCTURING_MATCH,
len_zero::LEN_WITHOUT_IS_EMPTY,
len_zero::LEN_ZERO,
let_if_seq::USELESS_LET_IF_SEQ,
literal_representation::INCONSISTENT_DIGIT_GROUPING,
literal_representation::UNREADABLE_LITERAL,
loops::EMPTY_LOOP,
loops::FOR_KV_MAP,
loops::NEEDLESS_RANGE_LOOP,
loops::WHILE_LET_ON_ITERATOR,
map_clone::MAP_CLONE,
matches::MATCH_BOOL,
matches::MATCH_OVERLAPPING_ARM,
matches::MATCH_REF_PATS,
matches::MATCH_WILD_ERR_ARM,
matches::SINGLE_MATCH,
mem_replace::MEM_REPLACE_OPTION_WITH_NONE,
methods::CHARS_LAST_CMP,
methods::INTO_ITER_ON_REF,
methods::ITER_CLONED_COLLECT,
methods::ITER_SKIP_NEXT,
methods::NEW_RET_NO_SELF,
methods::OK_EXPECT,
methods::OPTION_MAP_OR_NONE,
methods::SHOULD_IMPLEMENT_TRAIT,
methods::STRING_EXTEND_CHARS,
methods::UNNECESSARY_FOLD,
methods::WRONG_SELF_CONVENTION,
misc::REDUNDANT_PATTERN,
misc::TOPLEVEL_REF_ARG,
misc::ZERO_PTR,
misc_early::BUILTIN_TYPE_SHADOW,
misc_early::DOUBLE_NEG,
misc_early::DUPLICATE_UNDERSCORE_ARGUMENT,
misc_early::MIXED_CASE_HEX_LITERALS,
misc_early::UNNEEDED_FIELD_PATTERN,
mut_reference::UNNECESSARY_MUT_PASSED,
neg_multiply::NEG_MULTIPLY,
new_without_default::NEW_WITHOUT_DEFAULT,
non_expressive_names::JUST_UNDERSCORES_AND_DIGITS,
non_expressive_names::MANY_SINGLE_CHAR_NAMES,
ok_if_let::IF_LET_SOME_RESULT,
panic_unimplemented::PANIC_PARAMS,
ptr::CMP_NULL,
ptr::PTR_ARG,
question_mark::QUESTION_MARK,
redundant_field_names::REDUNDANT_FIELD_NAMES,
redundant_pattern_matching::REDUNDANT_PATTERN_MATCHING,
regex::REGEX_MACRO,
regex::TRIVIAL_REGEX,
returns::LET_AND_RETURN,
returns::NEEDLESS_RETURN,
returns::UNUSED_UNIT,
strings::STRING_LIT_AS_BYTES,
types::FN_TO_NUMERIC_CAST,
types::FN_TO_NUMERIC_CAST_WITH_TRUNCATION,
types::IMPLICIT_HASHER,
types::LET_UNIT_VALUE,
unsafe_removed_from_name::UNSAFE_REMOVED_FROM_NAME,
write::PRINTLN_EMPTY_STRING,
write::PRINT_LITERAL,
write::PRINT_WITH_NEWLINE,
write::WRITELN_EMPTY_STRING,
write::WRITE_LITERAL,
write::WRITE_WITH_NEWLINE,
]);
reg.register_lint_group("clippy::complexity", Some("clippy_complexity"), vec![
assign_ops::MISREFACTORED_ASSIGN_OP,
attrs::DEPRECATED_CFG_ATTR,
booleans::NONMINIMAL_BOOL,
cognitive_complexity::COGNITIVE_COMPLEXITY,
double_comparison::DOUBLE_COMPARISONS,
double_parens::DOUBLE_PARENS,
duration_subsec::DURATION_SUBSEC,
eval_order_dependence::DIVERGING_SUB_EXPRESSION,
eval_order_dependence::EVAL_ORDER_DEPENDENCE,
explicit_write::EXPLICIT_WRITE,
format::USELESS_FORMAT,
functions::TOO_MANY_ARGUMENTS,
identity_conversion::IDENTITY_CONVERSION,
identity_op::IDENTITY_OP,
int_plus_one::INT_PLUS_ONE,
lifetimes::EXTRA_UNUSED_LIFETIMES,
lifetimes::NEEDLESS_LIFETIMES,
loops::EXPLICIT_COUNTER_LOOP,
loops::MUT_RANGE_BOUND,
loops::WHILE_LET_LOOP,
map_unit_fn::OPTION_MAP_UNIT_FN,
map_unit_fn::RESULT_MAP_UNIT_FN,
matches::MATCH_AS_REF,
methods::CHARS_NEXT_CMP,
methods::CLONE_ON_COPY,
methods::FILTER_NEXT,
methods::SEARCH_IS_SOME,
methods::UNNECESSARY_FILTER_MAP,
methods::USELESS_ASREF,
misc::SHORT_CIRCUIT_STATEMENT,
misc_early::REDUNDANT_CLOSURE_CALL,
misc_early::ZERO_PREFIXED_LITERAL,
needless_bool::BOOL_COMPARISON,
needless_bool::NEEDLESS_BOOL,
needless_borrowed_ref::NEEDLESS_BORROWED_REFERENCE,
needless_update::NEEDLESS_UPDATE,
neg_cmp_op_on_partial_ord::NEG_CMP_OP_ON_PARTIAL_ORD,
no_effect::NO_EFFECT,
no_effect::UNNECESSARY_OPERATION,
overflow_check_conditional::OVERFLOW_CHECK_CONDITIONAL,
partialeq_ne_impl::PARTIALEQ_NE_IMPL,
precedence::PRECEDENCE,
ptr_offset_with_cast::PTR_OFFSET_WITH_CAST,
ranges::RANGE_MINUS_ONE,
ranges::RANGE_PLUS_ONE,
ranges::RANGE_ZIP_WITH_LEN,
reference::DEREF_ADDROF,
reference::REF_IN_DEREF,
swap::MANUAL_SWAP,
temporary_assignment::TEMPORARY_ASSIGNMENT,
transmute::CROSSPOINTER_TRANSMUTE,
transmute::TRANSMUTE_BYTES_TO_STR,
transmute::TRANSMUTE_INT_TO_BOOL,
transmute::TRANSMUTE_INT_TO_CHAR,
transmute::TRANSMUTE_INT_TO_FLOAT,
transmute::TRANSMUTE_PTR_TO_PTR,
transmute::TRANSMUTE_PTR_TO_REF,
transmute::USELESS_TRANSMUTE,
types::BORROWED_BOX,
types::CAST_LOSSLESS,
types::CHAR_LIT_AS_U8,
types::OPTION_OPTION,
types::TYPE_COMPLEXITY,
types::UNIT_ARG,
types::UNNECESSARY_CAST,
types::VEC_BOX,
unused_label::UNUSED_LABEL,
zero_div_zero::ZERO_DIVIDED_BY_ZERO,
]);
reg.register_lint_group("clippy::correctness", Some("clippy_correctness"), vec![
approx_const::APPROX_CONSTANT,
attrs::DEPRECATED_SEMVER,
attrs::USELESS_ATTRIBUTE,
bit_mask::BAD_BIT_MASK,
bit_mask::INEFFECTIVE_BIT_MASK,
booleans::LOGIC_BUG,
copies::IFS_SAME_COND,
copies::IF_SAME_THEN_ELSE,
derive::DERIVE_HASH_XOR_EQ,
drop_bounds::DROP_BOUNDS,
drop_forget_ref::DROP_COPY,
drop_forget_ref::DROP_REF,
drop_forget_ref::FORGET_COPY,
drop_forget_ref::FORGET_REF,
enum_clike::ENUM_CLIKE_UNPORTABLE_VARIANT,
eq_op::EQ_OP,
erasing_op::ERASING_OP,
formatting::POSSIBLE_MISSING_COMMA,
functions::NOT_UNSAFE_PTR_ARG_DEREF,
indexing_slicing::OUT_OF_BOUNDS_INDEXING,
infinite_iter::INFINITE_ITER,
inline_fn_without_body::INLINE_FN_WITHOUT_BODY,
invalid_ref::INVALID_REF,
literal_representation::MISTYPED_LITERAL_SUFFIXES,
loops::FOR_LOOP_OVER_OPTION,
loops::FOR_LOOP_OVER_RESULT,
loops::ITER_NEXT_LOOP,
loops::NEVER_LOOP,
loops::REVERSE_RANGE_LOOP,
loops::WHILE_IMMUTABLE_CONDITION,
mem_discriminant::MEM_DISCRIMINANT_NON_ENUM,
methods::CLONE_DOUBLE_REF,
methods::INTO_ITER_ON_ARRAY,
methods::TEMPORARY_CSTRING_AS_PTR,
minmax::MIN_MAX,
misc::CMP_NAN,
misc::FLOAT_CMP,
misc::MODULO_ONE,
non_copy_const::BORROW_INTERIOR_MUTABLE_CONST,
non_copy_const::DECLARE_INTERIOR_MUTABLE_CONST,
open_options::NONSENSICAL_OPEN_OPTIONS,
ptr::MUT_FROM_REF,
ranges::ITERATOR_STEP_BY_ZERO,
regex::INVALID_REGEX,
serde_api::SERDE_API_MISUSE,
suspicious_trait_impl::SUSPICIOUS_ARITHMETIC_IMPL,
suspicious_trait_impl::SUSPICIOUS_OP_ASSIGN_IMPL,
swap::ALMOST_SWAPPED,
transmute::WRONG_TRANSMUTE,
types::ABSURD_EXTREME_COMPARISONS,
types::CAST_PTR_ALIGNMENT,
types::CAST_REF_TO_MUT,
types::UNIT_CMP,
unicode::ZERO_WIDTH_SPACE,
unused_io_amount::UNUSED_IO_AMOUNT,
]);
reg.register_lint_group("clippy::perf", Some("clippy_perf"), vec![
bytecount::NAIVE_BYTECOUNT,
entry::MAP_ENTRY,
escape::BOXED_LOCAL,
large_enum_variant::LARGE_ENUM_VARIANT,
loops::MANUAL_MEMCPY,
loops::NEEDLESS_COLLECT,
loops::UNUSED_COLLECT,
methods::EXPECT_FUN_CALL,
methods::ITER_NTH,
methods::OR_FUN_CALL,
methods::SINGLE_CHAR_PATTERN,
misc::CMP_OWNED,
mutex_atomic::MUTEX_ATOMIC,
slow_vector_initialization::SLOW_VECTOR_INITIALIZATION,
trivially_copy_pass_by_ref::TRIVIALLY_COPY_PASS_BY_REF,
types::BOX_VEC,
vec::USELESS_VEC,
]);
reg.register_lint_group("clippy::cargo", Some("clippy_cargo"), vec![
cargo_common_metadata::CARGO_COMMON_METADATA,
multiple_crate_versions::MULTIPLE_CRATE_VERSIONS,
wildcard_dependencies::WILDCARD_DEPENDENCIES,
]);
reg.register_lint_group("clippy::nursery", Some("clippy_nursery"), vec![
attrs::EMPTY_LINE_AFTER_OUTER_ATTR,
fallible_impl_from::FALLIBLE_IMPL_FROM,
missing_const_for_fn::MISSING_CONST_FOR_FN,
mutex_atomic::MUTEX_INTEGER,
needless_borrow::NEEDLESS_BORROW,
redundant_clone::REDUNDANT_CLONE,
unwrap::PANICKING_UNWRAP,
unwrap::UNNECESSARY_UNWRAP,
]);
}
/// Register renamed lints.
///
/// Used in `./src/driver.rs`.
pub fn register_renamed(ls: &mut rustc::lint::LintStore) {
ls.register_renamed("clippy::stutter", "clippy::module_name_repetitions");
ls.register_renamed("clippy::new_without_default_derive", "clippy::new_without_default");
ls.register_renamed("clippy::cyclomatic_complexity", "clippy::cognitive_complexity");
}
// only exists to let the dogfood integration test works.
// Don't run clippy as an executable directly
#[allow(dead_code)]
fn main() {
panic!("Please use the cargo-clippy executable");
}
| 39.503503 | 129 | 0.704786 |
bb112327abf4b02f0075411958ca63a9af2c911b | 9,554 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/// Entry point of thread panic, for details, see std::macros
#[macro_export]
#[allow_internal_unstable]
macro_rules! panic {
() => (
panic!("explicit panic")
);
($msg:expr) => ({
static _MSG_FILE_LINE: (&'static str, &'static str, u32) = ($msg, file!(), line!());
$crate::panicking::panic(&_MSG_FILE_LINE)
});
($fmt:expr, $($arg:tt)*) => ({
// The leading _'s are to avoid dead code warnings if this is
// used inside a dead function. Just `#[allow(dead_code)]` is
// insufficient, since the user may have
// `#[forbid(dead_code)]` and which cannot be overridden.
static _FILE_LINE: (&'static str, u32) = (file!(), line!());
$crate::panicking::panic_fmt(format_args!($fmt, $($arg)*), &_FILE_LINE)
});
}
/// Ensure that a boolean expression is `true` at runtime.
///
/// This will invoke the `panic!` macro if the provided expression cannot be
/// evaluated to `true` at runtime.
///
/// # Examples
///
/// ```
/// // the panic message for these assertions is the stringified value of the
/// // expression given.
/// assert!(true);
///
/// fn some_computation() -> bool { true } // a very simple function
///
/// assert!(some_computation());
///
/// // assert with a custom message
/// let x = true;
/// assert!(x, "x wasn't true!");
///
/// let a = 3; let b = 27;
/// assert!(a + b == 30, "a = {}, b = {}", a, b);
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
macro_rules! assert {
($cond:expr) => (
if !$cond {
panic!(concat!("assertion failed: ", stringify!($cond)))
}
);
($cond:expr, $($arg:tt)+) => (
if !$cond {
panic!($($arg)+)
}
);
}
/// Asserts that two expressions are equal to each other.
///
/// On panic, this macro will print the values of the expressions with their
/// debug representations.
///
/// # Examples
///
/// ```
/// let a = 3;
/// let b = 1 + 2;
/// assert_eq!(a, b);
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
macro_rules! assert_eq {
($left:expr , $right:expr) => ({
match (&($left), &($right)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
panic!("assertion failed: `(left == right)` \
(left: `{:?}`, right: `{:?}`)", *left_val, *right_val)
}
}
}
})
}
/// Ensure that a boolean expression is `true` at runtime.
///
/// This will invoke the `panic!` macro if the provided expression cannot be
/// evaluated to `true` at runtime.
///
/// Unlike `assert!`, `debug_assert!` statements are only enabled in non
/// optimized builds by default. An optimized build will omit all
/// `debug_assert!` statements unless `-C debug-assertions` is passed to the
/// compiler. This makes `debug_assert!` useful for checks that are too
/// expensive to be present in a release build but may be helpful during
/// development.
///
/// # Examples
///
/// ```
/// // the panic message for these assertions is the stringified value of the
/// // expression given.
/// debug_assert!(true);
///
/// fn some_expensive_computation() -> bool { true } // a very simple function
/// debug_assert!(some_expensive_computation());
///
/// // assert with a custom message
/// let x = true;
/// debug_assert!(x, "x wasn't true!");
///
/// let a = 3; let b = 27;
/// debug_assert!(a + b == 30, "a = {}, b = {}", a, b);
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
macro_rules! debug_assert {
($($arg:tt)*) => (if cfg!(debug_assertions) { assert!($($arg)*); })
}
/// Asserts that two expressions are equal to each other, testing equality in
/// both directions.
///
/// On panic, this macro will print the values of the expressions.
///
/// Unlike `assert_eq!`, `debug_assert_eq!` statements are only enabled in non
/// optimized builds by default. An optimized build will omit all
/// `debug_assert_eq!` statements unless `-C debug-assertions` is passed to the
/// compiler. This makes `debug_assert_eq!` useful for checks that are too
/// expensive to be present in a release build but may be helpful during
/// development.
///
/// # Examples
///
/// ```
/// let a = 3;
/// let b = 1 + 2;
/// debug_assert_eq!(a, b);
/// ```
#[macro_export]
macro_rules! debug_assert_eq {
($($arg:tt)*) => (if cfg!(debug_assertions) { assert_eq!($($arg)*); })
}
/// Short circuiting evaluation on Err
///
/// `libstd` contains a more general `try!` macro that uses `From<E>`.
#[macro_export]
macro_rules! try {
($e:expr) => ({
use $crate::result::Result::{Ok, Err};
match $e {
Ok(e) => e,
Err(e) => return Err(e),
}
})
}
/// Use the `format!` syntax to write data into a buffer.
///
/// This macro is typically used with a buffer of `&mut `[`Write`][write].
///
/// See [`std::fmt`][fmt] for more information on format syntax.
///
/// [fmt]: fmt/index.html
/// [write]: io/trait.Write.html
///
/// # Examples
///
/// ```
/// use std::io::Write;
///
/// let mut w = Vec::new();
/// write!(&mut w, "test").unwrap();
/// write!(&mut w, "formatted {}", "arguments").unwrap();
///
/// assert_eq!(w, b"testformatted arguments");
/// ```
#[macro_export]
macro_rules! write {
($dst:expr, $($arg:tt)*) => ($dst.write_fmt(format_args!($($arg)*)))
}
/// Use the `format!` syntax to write data into a buffer, appending a newline.
///
/// This macro is typically used with a buffer of `&mut `[`Write`][write].
///
/// See [`std::fmt`][fmt] for more information on format syntax.
///
/// [fmt]: fmt/index.html
/// [write]: io/trait.Write.html
///
/// # Examples
///
/// ```
/// use std::io::Write;
///
/// let mut w = Vec::new();
/// writeln!(&mut w, "test").unwrap();
/// writeln!(&mut w, "formatted {}", "arguments").unwrap();
///
/// assert_eq!(&w[..], "test\nformatted arguments\n".as_bytes());
/// ```
#[macro_export]
#[stable(feature = "rust1", since = "1.0.0")]
macro_rules! writeln {
($dst:expr, $fmt:expr) => (
write!($dst, concat!($fmt, "\n"))
);
($dst:expr, $fmt:expr, $($arg:tt)*) => (
write!($dst, concat!($fmt, "\n"), $($arg)*)
);
}
/// A utility macro for indicating unreachable code.
///
/// This is useful any time that the compiler can't determine that some code is unreachable. For
/// example:
///
/// * Match arms with guard conditions.
/// * Loops that dynamically terminate.
/// * Iterators that dynamically terminate.
///
/// # Panics
///
/// This will always panic.
///
/// # Examples
///
/// Match arms:
///
/// ```
/// fn foo(x: Option<i32>) {
/// match x {
/// Some(n) if n >= 0 => println!("Some(Non-negative)"),
/// Some(n) if n < 0 => println!("Some(Negative)"),
/// Some(_) => unreachable!(), // compile error if commented out
/// None => println!("None")
/// }
/// }
/// ```
///
/// Iterators:
///
/// ```
/// fn divide_by_three(x: u32) -> u32 { // one of the poorest implementations of x/3
/// for i in 0.. {
/// if 3*i < i { panic!("u32 overflow"); }
/// if x < 3*i { return i-1; }
/// }
/// unreachable!();
/// }
/// ```
#[macro_export]
#[unstable(feature = "core",
reason = "relationship with panic is unclear",
issue = "27701")]
macro_rules! unreachable {
() => ({
panic!("internal error: entered unreachable code")
});
($msg:expr) => ({
unreachable!("{}", $msg)
});
($fmt:expr, $($arg:tt)*) => ({
panic!(concat!("internal error: entered unreachable code: ", $fmt), $($arg)*)
});
}
/// A standardized placeholder for marking unfinished code. It panics with the
/// message `"not yet implemented"` when executed.
///
/// This can be useful if you are prototyping and are just looking to have your
/// code typecheck, or if you're implementing a trait that requires multiple
/// methods, and you're only planning on using one of them.
///
/// # Examples
///
/// Here's an example of some in-progress code. We have a trait `Foo`:
///
/// ```
/// trait Foo {
/// fn bar(&self);
/// fn baz(&self);
/// }
/// ```
///
/// We want to implement `Foo` on one of our types, but we also want to work on
/// just `bar()` first. In order for our code to compile, we need to implement
/// `baz()`, so we can use `unimplemented!`:
///
/// ```
/// # trait Foo {
/// # fn foo(&self);
/// # fn bar(&self);
/// # }
/// struct MyStruct;
///
/// impl Foo for MyStruct {
/// fn foo(&self) {
/// // implementation goes here
/// }
///
/// fn bar(&self) {
/// // let's not worry about implementing bar() for now
/// unimplemented!();
/// }
/// }
///
/// fn main() {
/// let s = MyStruct;
/// s.foo();
///
/// // we aren't even using bar() yet, so this is fine.
/// }
/// ```
#[macro_export]
#[unstable(feature = "core",
reason = "relationship with panic is unclear",
issue = "27701")]
macro_rules! unimplemented {
() => (panic!("not yet implemented"))
}
| 28.434524 | 96 | 0.570965 |
e8d71ebeb31bf26a0157b369a93d844ab92ec285 | 2,115 | extern crate milagro_bls;
extern crate ssz;
#[macro_use]
mod macros;
mod keypair;
mod secret_key;
pub use crate::keypair::Keypair;
pub use crate::secret_key::SecretKey;
pub use milagro_bls::{compress_g2, hash_on_g2};
#[cfg(feature = "fake_crypto")]
mod fake_aggregate_public_key;
#[cfg(feature = "fake_crypto")]
mod fake_aggregate_signature;
#[cfg(feature = "fake_crypto")]
mod fake_public_key;
#[cfg(feature = "fake_crypto")]
mod fake_signature;
#[cfg(not(feature = "fake_crypto"))]
mod aggregate_public_key;
#[cfg(not(feature = "fake_crypto"))]
mod aggregate_signature;
#[cfg(not(feature = "fake_crypto"))]
mod public_key;
#[cfg(not(feature = "fake_crypto"))]
mod signature;
#[cfg(feature = "fake_crypto")]
pub use fakes::*;
#[cfg(feature = "fake_crypto")]
mod fakes {
pub use crate::fake_aggregate_public_key::FakeAggregatePublicKey as AggregatePublicKey;
pub use crate::fake_aggregate_signature::FakeAggregateSignature as AggregateSignature;
pub use crate::fake_public_key::FakePublicKey as PublicKey;
pub use crate::fake_signature::FakeSignature as Signature;
}
#[cfg(not(feature = "fake_crypto"))]
pub use reals::*;
#[cfg(not(feature = "fake_crypto"))]
mod reals {
pub use crate::aggregate_public_key::AggregatePublicKey;
pub use crate::aggregate_signature::AggregateSignature;
pub use crate::public_key::PublicKey;
pub use crate::signature::Signature;
}
pub const BLS_AGG_SIG_BYTE_SIZE: usize = 96;
pub const BLS_SIG_BYTE_SIZE: usize = 96;
pub const BLS_SECRET_KEY_BYTE_SIZE: usize = 48;
pub const BLS_PUBLIC_KEY_BYTE_SIZE: usize = 48;
use hashing::hash;
use ssz::ssz_encode;
/// Returns the withdrawal credentials for a given public key.
pub fn get_withdrawal_credentials(pubkey: &PublicKey, prefix_byte: u8) -> Vec<u8> {
let hashed = hash(&ssz_encode(pubkey));
let mut prefixed = vec![prefix_byte];
prefixed.extend_from_slice(&hashed[1..]);
prefixed
}
pub fn bls_verify_aggregate(
pubkey: &AggregatePublicKey,
message: &[u8],
signature: &AggregateSignature,
domain: u64,
) -> bool {
signature.verify(message, domain, pubkey)
}
| 27.828947 | 91 | 0.740426 |
08db3dffbf5ae3e3def5c6bfa352420e7ac45529 | 1,468 | use crate::world::World;
use std::cell::UnsafeCell;
use std::sync::atomic::{AtomicUsize, Ordering};
pub(crate) type Command = Box<dyn FnOnce(&mut World) + Send + 'static>;
pub(crate) type CommandBuffer = Vec<Command>;
pub(crate) struct CommandBuffers {
buffers: Vec<UnsafeCell<CommandBuffer>>,
index: AtomicUsize,
}
unsafe impl Sync for CommandBuffers {}
impl CommandBuffers {
pub fn new(buffer_count: usize) -> Self {
let mut buffers = Vec::new();
buffers.resize_with(buffer_count, || UnsafeCell::new(Vec::new()));
Self {
buffers,
index: AtomicUsize::new(0),
}
}
pub fn next(&self) -> Option<&mut CommandBuffer> {
let mut prev = self.index.load(Ordering::Relaxed);
while prev < self.buffers.len() {
match self.index.compare_exchange_weak(
prev,
prev + 1,
Ordering::Relaxed,
Ordering::Relaxed,
) {
Ok(result) => unsafe { return Some(&mut *self.buffers[result].get()) },
Err(next_prev) => prev = next_prev,
}
}
None
}
pub fn drain(&mut self) -> impl Iterator<Item = Command> + '_ {
let used_buffers = *self.index.get_mut();
*self.index.get_mut() = 0;
self.buffers
.iter_mut()
.take(used_buffers)
.flat_map(|buffer| buffer.get_mut().drain(..))
}
}
| 27.185185 | 87 | 0.553815 |
72b73f70fe8ad1e511e980a540827a699aac0aad | 2,763 | mod cpsr;
mod cpu_mode;
#[allow(clippy::module_inception)]
mod gba;
mod instructions;
mod rom;
mod utils;
pub use cpsr::CPSR;
pub use cpu_mode::CpuMode;
pub use gba::{GBAButton, GBAError, LogLevel, GBA};
pub use rom::Rom;
use alloc::boxed::Box;
/// Wrapper around the GBA emulator struct to prevent dereferencing the Box
/// Since it's too big for the stack
pub struct GBABox {
internal_gba: Box<gba::GBA>,
}
impl GBABox {
pub fn new(
log_level: LogLevel,
bios_file: core::option::Option<&[u8]>,
print_fn: Option<fn(&str) -> ()>,
) -> Self {
GBABox { internal_gba: gba::GBA::new(log_level, bios_file, print_fn) }
}
pub fn update_video_output(&mut self) {
self.internal_gba.update_video_output()
}
pub fn video_output(&self) -> &[u8] {
&self.internal_gba.output_texture[..]
}
pub fn io_memory(&self) -> &[u8] {
&self.internal_gba.io_mem[..]
}
/// This registers a button only for the next frame
pub fn single_input_is_down(&mut self, button: GBAButton) {
GBA::input(self, button);
}
pub fn set_log_level(&mut self, lvl: LogLevel) {
GBA::set_log_level(self, lvl);
}
/// This registers a button as pressed until persistent_input_released is called
pub fn persistent_input_pressed(&mut self, button: GBAButton) {
GBA::persistent_input_pressed(self, button);
}
/// This unregisters a button as pressed
pub fn persistent_input_released(&mut self, button: GBAButton) {
GBA::persistent_input_released(self, button);
}
/// Gets the loaded Rom. Returns None if no Rom is loaded
pub fn loaded_rom(&self) -> Option<&Rom> {
self.internal_gba.loaded_rom.as_ref()
}
pub fn registers(&self) -> [u32; 16] {
self.regs
}
/// Get the current Program Status Register
pub fn cpsr(&self) -> CPSR {
self.cpsr
}
pub fn vram_bytes(&self) -> &[u8] {
&self.vram[..]
}
pub fn chip_wram_bytes(&self) -> &[u8] {
&self.internal_gba.wram_chip[..]
}
pub fn board_wram_bytes(&self) -> &[u8] {
&self.internal_gba.wram_board[..]
}
pub fn palette_bytes(&self) -> &[u8] {
&self.internal_gba.palette_ram[..]
}
pub fn oam_bytes(&self) -> &[u8] {
&self.internal_gba.oam[..]
}
pub fn bios_bytes(&self) -> &[u8] {
&self.internal_gba.bios_rom[..]
}
pub fn spsr(&self) -> Option<CPSR> {
let mode = self.cpsr.mode();
if mode == CpuMode::Privileged || mode == CpuMode::User {
None
} else {
Some(self.spsrs[self.cpsr.mode().as_usize() - 1])
}
}
}
impl core::ops::Deref for GBABox {
type Target = gba::GBA;
fn deref(&self) -> &Self::Target {
&*self.internal_gba
}
}
impl core::ops::DerefMut for GBABox {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut *self.internal_gba
}
}
| 22.834711 | 82 | 0.643503 |
9ce5e2c7ba20a5e0d2ff4e59d787fcb2e883a653 | 19,056 | // Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A double-ended queue implemented as a circular buffer
use std::num;
use std::uint;
use std::vec;
use std::iterator::FromIterator;
static INITIAL_CAPACITY: uint = 8u; // 2^3
static MINIMUM_CAPACITY: uint = 2u;
#[allow(missing_doc)]
#[deriving(Clone)]
pub struct Deque<T> {
priv nelts: uint,
priv lo: uint,
priv elts: ~[Option<T>]
}
impl<T> Container for Deque<T> {
/// Return the number of elements in the deque
fn len(&self) -> uint { self.nelts }
/// Return true if the deque contains no elements
fn is_empty(&self) -> bool { self.len() == 0 }
}
impl<T> Mutable for Deque<T> {
/// Clear the deque, removing all values.
fn clear(&mut self) {
for self.elts.mut_iter().advance |x| { *x = None }
self.nelts = 0;
self.lo = 0;
}
}
impl<T> Deque<T> {
/// Create an empty Deque
pub fn new() -> Deque<T> {
Deque::with_capacity(INITIAL_CAPACITY)
}
/// Create an empty Deque with space for at least `n` elements.
pub fn with_capacity(n: uint) -> Deque<T> {
Deque{nelts: 0, lo: 0,
elts: vec::from_fn(num::max(MINIMUM_CAPACITY, n), |_| None)}
}
/// Return a reference to the first element in the deque
///
/// Fails if the deque is empty
pub fn peek_front<'a>(&'a self) -> &'a T { get(self.elts, self.raw_index(0)) }
/// Return a reference to the last element in the deque
///
/// Fails if the deque is empty
pub fn peek_back<'a>(&'a self) -> &'a T {
if self.nelts > 0 {
get(self.elts, self.raw_index(self.nelts - 1))
} else {
fail!("peek_back: empty deque");
}
}
/// Retrieve an element in the deque by index
///
/// Fails if there is no element with the given index
pub fn get<'a>(&'a self, i: int) -> &'a T {
let idx = (self.lo + (i as uint)) % self.elts.len();
get(self.elts, idx)
}
/// Remove and return the first element in the deque
///
/// Fails if the deque is empty
pub fn pop_front(&mut self) -> T {
let result = self.elts[self.lo].swap_unwrap();
self.lo = (self.lo + 1u) % self.elts.len();
self.nelts -= 1u;
result
}
/// Return index in underlying vec for a given logical element index
fn raw_index(&self, idx: uint) -> uint {
raw_index(self.lo, self.elts.len(), idx)
}
/// Remove and return the last element in the deque
///
/// Fails if the deque is empty
pub fn pop_back(&mut self) -> T {
self.nelts -= 1;
let hi = self.raw_index(self.nelts);
self.elts[hi].swap_unwrap()
}
/// Prepend an element to the deque
pub fn add_front(&mut self, t: T) {
if self.nelts == self.elts.len() {
grow(self.nelts, &mut self.lo, &mut self.elts);
}
if self.lo == 0u {
self.lo = self.elts.len() - 1u;
} else { self.lo -= 1u; }
self.elts[self.lo] = Some(t);
self.nelts += 1u;
}
/// Append an element to the deque
pub fn add_back(&mut self, t: T) {
if self.nelts == self.elts.len() {
grow(self.nelts, &mut self.lo, &mut self.elts);
}
let hi = self.raw_index(self.nelts);
self.elts[hi] = Some(t);
self.nelts += 1u;
}
/// Reserve capacity for exactly `n` elements in the given deque,
/// doing nothing if `self`'s capacity is already equal to or greater
/// than the requested capacity
///
/// # Arguments
///
/// * n - The number of elements to reserve space for
pub fn reserve(&mut self, n: uint) {
self.elts.reserve(n);
}
/// Reserve capacity for at least `n` elements in the given deque,
/// over-allocating in case the caller needs to reserve additional
/// space.
///
/// Do nothing if `self`'s capacity is already equal to or greater
/// than the requested capacity.
///
/// # Arguments
///
/// * n - The number of elements to reserve space for
pub fn reserve_at_least(&mut self, n: uint) {
self.elts.reserve_at_least(n);
}
/// Front-to-back iterator.
pub fn iter<'a>(&'a self) -> DequeIterator<'a, T> {
DequeIterator{index: 0, nelts: self.nelts, elts: self.elts, lo: self.lo}
}
/// Front-to-back iterator which returns mutable values.
pub fn mut_iter<'a>(&'a mut self) -> DequeMutIterator<'a, T> {
DequeMutIterator{index: 0, nelts: self.nelts, elts: self.elts, lo: self.lo}
}
/// Back-to-front iterator.
pub fn rev_iter<'a>(&'a self) -> DequeRevIterator<'a, T> {
DequeRevIterator{index: self.nelts-1, nelts: self.nelts, elts: self.elts,
lo: self.lo}
}
/// Back-to-front iterator which returns mutable values.
pub fn mut_rev_iter<'a>(&'a mut self) -> DequeMutRevIterator<'a, T> {
DequeMutRevIterator{index: self.nelts-1, nelts: self.nelts, elts: self.elts,
lo: self.lo}
}
}
macro_rules! iterator {
(impl $name:ident -> $elem:ty, $getter:ident, $step:expr) => {
impl<'self, T> Iterator<$elem> for $name<'self, T> {
#[inline]
fn next(&mut self) -> Option<$elem> {
if self.nelts == 0 {
return None;
}
let raw_index = raw_index(self.lo, self.elts.len(), self.index);
self.index += $step;
self.nelts -= 1;
Some(self.elts[raw_index]. $getter ())
}
}
}
}
/// Deque iterator
pub struct DequeIterator<'self, T> {
priv lo: uint,
priv nelts: uint,
priv index: uint,
priv elts: &'self [Option<T>],
}
iterator!{impl DequeIterator -> &'self T, get_ref, 1}
/// Deque reverse iterator
pub struct DequeRevIterator<'self, T> {
priv lo: uint,
priv nelts: uint,
priv index: uint,
priv elts: &'self [Option<T>],
}
iterator!{impl DequeRevIterator -> &'self T, get_ref, -1}
/// Deque mutable iterator
pub struct DequeMutIterator<'self, T> {
priv lo: uint,
priv nelts: uint,
priv index: uint,
priv elts: &'self mut [Option<T>],
}
iterator!{impl DequeMutIterator -> &'self mut T, get_mut_ref, 1}
/// Deque mutable reverse iterator
pub struct DequeMutRevIterator<'self, T> {
priv lo: uint,
priv nelts: uint,
priv index: uint,
priv elts: &'self mut [Option<T>],
}
iterator!{impl DequeMutRevIterator -> &'self mut T, get_mut_ref, -1}
/// Grow is only called on full elts, so nelts is also len(elts), unlike
/// elsewhere.
fn grow<T>(nelts: uint, loptr: &mut uint, elts: &mut ~[Option<T>]) {
assert_eq!(nelts, elts.len());
let lo = *loptr;
let newlen = nelts * 2;
elts.reserve(newlen);
/* fill with None */
for uint::range(elts.len(), elts.capacity()) |_| {
elts.push(None);
}
/*
Move the shortest half into the newly reserved area.
lo ---->|
nelts ----------->|
[o o o|o o o o o]
A [. . .|o o o o o o o o|. . . . .]
B [o o o|. . . . . . . .|o o o o o]
*/
assert!(newlen - nelts/2 >= nelts);
if lo <= (nelts - lo) { // A
for uint::range(0, lo) |i| {
elts.swap(i, nelts + i);
}
} else { // B
for uint::range(lo, nelts) |i| {
elts.swap(i, newlen - nelts + i);
}
*loptr += newlen - nelts;
}
}
fn get<'r, T>(elts: &'r [Option<T>], i: uint) -> &'r T {
match elts[i] { Some(ref t) => t, _ => fail!() }
}
/// Return index in underlying vec for a given logical element index
fn raw_index(lo: uint, len: uint, index: uint) -> uint {
if lo >= len - index {
lo + index - len
} else {
lo + index
}
}
impl<A: Eq> Eq for Deque<A> {
fn eq(&self, other: &Deque<A>) -> bool {
self.nelts == other.nelts &&
self.iter().zip(other.iter()).all(|(a, b)| a.eq(b))
}
fn ne(&self, other: &Deque<A>) -> bool {
!self.eq(other)
}
}
impl<A, T: Iterator<A>> FromIterator<A, T> for Deque<A> {
fn from_iterator(iterator: &mut T) -> Deque<A> {
let mut deq = Deque::new();
for iterator.advance |elt| {
deq.add_back(elt);
}
deq
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::cmp::Eq;
use std::kinds::Copy;
use std::{int, uint};
use extra::test;
#[test]
fn test_simple() {
let mut d = Deque::new();
assert_eq!(d.len(), 0u);
d.add_front(17);
d.add_front(42);
d.add_back(137);
assert_eq!(d.len(), 3u);
d.add_back(137);
assert_eq!(d.len(), 4u);
debug!(d.peek_front());
assert_eq!(*d.peek_front(), 42);
debug!(d.peek_back());
assert_eq!(*d.peek_back(), 137);
let mut i: int = d.pop_front();
debug!(i);
assert_eq!(i, 42);
i = d.pop_back();
debug!(i);
assert_eq!(i, 137);
i = d.pop_back();
debug!(i);
assert_eq!(i, 137);
i = d.pop_back();
debug!(i);
assert_eq!(i, 17);
assert_eq!(d.len(), 0u);
d.add_back(3);
assert_eq!(d.len(), 1u);
d.add_front(2);
assert_eq!(d.len(), 2u);
d.add_back(4);
assert_eq!(d.len(), 3u);
d.add_front(1);
assert_eq!(d.len(), 4u);
debug!(d.get(0));
debug!(d.get(1));
debug!(d.get(2));
debug!(d.get(3));
assert_eq!(*d.get(0), 1);
assert_eq!(*d.get(1), 2);
assert_eq!(*d.get(2), 3);
assert_eq!(*d.get(3), 4);
}
#[test]
fn test_boxes() {
let a: @int = @5;
let b: @int = @72;
let c: @int = @64;
let d: @int = @175;
let mut deq = Deque::new();
assert_eq!(deq.len(), 0);
deq.add_front(a);
deq.add_front(b);
deq.add_back(c);
assert_eq!(deq.len(), 3);
deq.add_back(d);
assert_eq!(deq.len(), 4);
assert_eq!(*deq.peek_front(), b);
assert_eq!(*deq.peek_back(), d);
assert_eq!(deq.pop_front(), b);
assert_eq!(deq.pop_back(), d);
assert_eq!(deq.pop_back(), c);
assert_eq!(deq.pop_back(), a);
assert_eq!(deq.len(), 0);
deq.add_back(c);
assert_eq!(deq.len(), 1);
deq.add_front(b);
assert_eq!(deq.len(), 2);
deq.add_back(d);
assert_eq!(deq.len(), 3);
deq.add_front(a);
assert_eq!(deq.len(), 4);
assert_eq!(*deq.get(0), a);
assert_eq!(*deq.get(1), b);
assert_eq!(*deq.get(2), c);
assert_eq!(*deq.get(3), d);
}
#[cfg(test)]
fn test_parameterized<T:Copy + Eq>(a: T, b: T, c: T, d: T) {
let mut deq = Deque::new();
assert_eq!(deq.len(), 0);
deq.add_front(copy a);
deq.add_front(copy b);
deq.add_back(copy c);
assert_eq!(deq.len(), 3);
deq.add_back(copy d);
assert_eq!(deq.len(), 4);
assert_eq!(copy *deq.peek_front(), copy b);
assert_eq!(copy *deq.peek_back(), copy d);
assert_eq!(deq.pop_front(), copy b);
assert_eq!(deq.pop_back(), copy d);
assert_eq!(deq.pop_back(), copy c);
assert_eq!(deq.pop_back(), copy a);
assert_eq!(deq.len(), 0);
deq.add_back(copy c);
assert_eq!(deq.len(), 1);
deq.add_front(copy b);
assert_eq!(deq.len(), 2);
deq.add_back(copy d);
assert_eq!(deq.len(), 3);
deq.add_front(copy a);
assert_eq!(deq.len(), 4);
assert_eq!(copy *deq.get(0), copy a);
assert_eq!(copy *deq.get(1), copy b);
assert_eq!(copy *deq.get(2), copy c);
assert_eq!(copy *deq.get(3), copy d);
}
#[test]
fn test_add_front_grow() {
let mut deq = Deque::new();
for int::range(0, 66) |i| {
deq.add_front(i);
}
assert_eq!(deq.len(), 66);
for int::range(0, 66) |i| {
assert_eq!(*deq.get(i), 65 - i);
}
let mut deq = Deque::new();
for int::range(0, 66) |i| {
deq.add_back(i);
}
for int::range(0, 66) |i| {
assert_eq!(*deq.get(i), i);
}
}
#[bench]
fn bench_new(b: &mut test::BenchHarness) {
do b.iter {
let _ = Deque::new::<u64>();
}
}
#[bench]
fn bench_add_back(b: &mut test::BenchHarness) {
let mut deq = Deque::new();
do b.iter {
deq.add_back(0);
}
}
#[bench]
fn bench_add_front(b: &mut test::BenchHarness) {
let mut deq = Deque::new();
do b.iter {
deq.add_front(0);
}
}
#[bench]
fn bench_grow(b: &mut test::BenchHarness) {
let mut deq = Deque::new();
do b.iter {
for 65.times {
deq.add_front(1);
}
}
}
#[deriving(Eq)]
enum Taggy { One(int), Two(int, int), Three(int, int, int), }
#[deriving(Eq)]
enum Taggypar<T> {
Onepar(int), Twopar(int, int), Threepar(int, int, int),
}
#[deriving(Eq)]
struct RecCy {
x: int,
y: int,
t: Taggy
}
#[test]
fn test_param_int() {
test_parameterized::<int>(5, 72, 64, 175);
}
#[test]
fn test_param_at_int() {
test_parameterized::<@int>(@5, @72, @64, @175);
}
#[test]
fn test_param_taggy() {
test_parameterized::<Taggy>(One(1), Two(1, 2), Three(1, 2, 3), Two(17, 42));
}
#[test]
fn test_param_taggypar() {
test_parameterized::<Taggypar<int>>(Onepar::<int>(1),
Twopar::<int>(1, 2),
Threepar::<int>(1, 2, 3),
Twopar::<int>(17, 42));
}
#[test]
fn test_param_reccy() {
let reccy1 = RecCy { x: 1, y: 2, t: One(1) };
let reccy2 = RecCy { x: 345, y: 2, t: Two(1, 2) };
let reccy3 = RecCy { x: 1, y: 777, t: Three(1, 2, 3) };
let reccy4 = RecCy { x: 19, y: 252, t: Two(17, 42) };
test_parameterized::<RecCy>(reccy1, reccy2, reccy3, reccy4);
}
#[test]
fn test_with_capacity() {
let mut d = Deque::with_capacity(0);
d.add_back(1);
assert_eq!(d.len(), 1);
let mut d = Deque::with_capacity(50);
d.add_back(1);
assert_eq!(d.len(), 1);
}
#[test]
fn test_reserve() {
let mut d = Deque::new();
d.add_back(0u64);
d.reserve(50);
assert_eq!(d.elts.capacity(), 50);
let mut d = Deque::new();
d.add_back(0u32);
d.reserve(50);
assert_eq!(d.elts.capacity(), 50);
}
#[test]
fn test_reserve_at_least() {
let mut d = Deque::new();
d.add_back(0u64);
d.reserve_at_least(50);
assert_eq!(d.elts.capacity(), 64);
let mut d = Deque::new();
d.add_back(0u32);
d.reserve_at_least(50);
assert_eq!(d.elts.capacity(), 64);
}
#[test]
fn test_iter() {
let mut d = Deque::new();
assert_eq!(d.iter().next(), None);
for int::range(0,5) |i| {
d.add_back(i);
}
assert_eq!(d.iter().collect::<~[&int]>(), ~[&0,&1,&2,&3,&4]);
for int::range(6,9) |i| {
d.add_front(i);
}
assert_eq!(d.iter().collect::<~[&int]>(), ~[&8,&7,&6,&0,&1,&2,&3,&4]);
}
#[test]
fn test_rev_iter() {
let mut d = Deque::new();
assert_eq!(d.rev_iter().next(), None);
for int::range(0,5) |i| {
d.add_back(i);
}
assert_eq!(d.rev_iter().collect::<~[&int]>(), ~[&4,&3,&2,&1,&0]);
for int::range(6,9) |i| {
d.add_front(i);
}
assert_eq!(d.rev_iter().collect::<~[&int]>(), ~[&4,&3,&2,&1,&0,&6,&7,&8]);
}
#[test]
fn test_mut_iter() {
let mut d = Deque::new();
assert!(d.mut_iter().next().is_none());
for uint::range(0,3) |i| {
d.add_front(i);
}
for d.mut_iter().enumerate().advance |(i, elt)| {
assert_eq!(*elt, 2 - i);
*elt = i;
}
{
let mut it = d.mut_iter();
assert_eq!(*it.next().unwrap(), 0);
assert_eq!(*it.next().unwrap(), 1);
assert_eq!(*it.next().unwrap(), 2);
assert!(it.next().is_none());
}
}
#[test]
fn test_mut_rev_iter() {
let mut d = Deque::new();
assert!(d.mut_rev_iter().next().is_none());
for uint::range(0,3) |i| {
d.add_front(i);
}
for d.mut_rev_iter().enumerate().advance |(i, elt)| {
assert_eq!(*elt, i);
*elt = i;
}
{
let mut it = d.mut_rev_iter();
assert_eq!(*it.next().unwrap(), 0);
assert_eq!(*it.next().unwrap(), 1);
assert_eq!(*it.next().unwrap(), 2);
assert!(it.next().is_none());
}
}
#[test]
fn test_from_iterator() {
use std::iterator;
let v = ~[1,2,3,4,5,6,7];
let deq: Deque<int> = v.iter().transform(|&x| x).collect();
let u: ~[int] = deq.iter().transform(|&x| x).collect();
assert_eq!(u, v);
let mut seq = iterator::Counter::new(0u, 2).take_(256);
let deq: Deque<uint> = seq.collect();
for deq.iter().enumerate().advance |(i, &x)| {
assert_eq!(2*i, x);
}
assert_eq!(deq.len(), 256);
}
#[test]
fn test_clone() {
let mut d = Deque::new();
d.add_front(17);
d.add_front(42);
d.add_back(137);
d.add_back(137);
assert_eq!(d.len(), 4u);
let mut e = d.clone();
assert_eq!(e.len(), 4u);
while !d.is_empty() {
assert_eq!(d.pop_back(), e.pop_back());
}
assert_eq!(d.len(), 0u);
assert_eq!(e.len(), 0u);
}
#[test]
fn test_eq() {
let mut d = Deque::new();
assert_eq!(&d, &Deque::with_capacity(0));
d.add_front(137);
d.add_front(17);
d.add_front(42);
d.add_back(137);
let mut e = Deque::with_capacity(0);
e.add_back(42);
e.add_back(17);
e.add_back(137);
e.add_back(137);
assert_eq!(&e, &d);
e.pop_back();
e.add_back(0);
assert!(e != d);
e.clear();
assert_eq!(e, Deque::new());
}
}
| 27.818978 | 84 | 0.505982 |
d54654c60868c26d586907659b1b6a3d3ec1ecff | 1,361 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
#![allow(non_camel_case_types)]
#![feature(from_ref)]
#![cfg_attr(stage0, feature(match_default_bindings))]
#![feature(quote)]
#[macro_use] extern crate log;
extern crate syntax;
extern crate syntax_pos;
extern crate rustc_errors as errors;
extern crate rustc_data_structures;
// for "clarity", rename the graphviz crate to dot; graphviz within `borrowck`
// refers to the borrowck-specific graphviz adapter traits.
extern crate graphviz as dot;
#[macro_use]
extern crate rustc;
extern crate rustc_mir;
pub use borrowck::check_crate;
pub use borrowck::build_borrowck_dataflow_data_for_fn;
mod borrowck;
pub mod graphviz;
pub use borrowck::provide;
| 31.651163 | 86 | 0.753857 |
0952cdaf66e4022160add091313b0d168fc2ee92 | 3,761 | use super::{datetime::DateTime, rrule::RRule};
use crate::{parser::build_rruleset, RRuleError, WithError};
use chrono::TimeZone;
use chrono_tz::UTC;
use std::str::FromStr;
#[derive(Debug, Clone)]
pub struct RRuleSet {
pub rrule: Vec<RRule>,
pub rdate: Vec<DateTime>,
pub exrule: Vec<RRule>,
pub exdate: Vec<DateTime>,
pub dt_start: DateTime,
}
impl Default for RRuleSet {
fn default() -> Self {
Self {
rrule: vec![],
rdate: vec![],
exrule: vec![],
exdate: vec![],
dt_start: UTC.ymd(1970, 1, 1).and_hms(0, 0, 0), // Unix Epoch
}
}
}
impl RRuleSet {
pub fn rrule(&mut self, rrule: RRule) {
self.rrule.push(rrule);
}
pub fn exrule(&mut self, rrule: RRule) {
self.exrule.push(rrule);
}
pub fn rdate(&mut self, rdate: DateTime) {
self.rdate.push(rdate);
}
pub fn exdate(&mut self, exdate: DateTime) {
self.exdate.push(exdate);
}
/// Returns all the recurrences of the rruleset.
/// Limit must be set in order to prevent infinite loops.
/// The max limit is `65535`. If you need more please use `into_iter` directly.
pub fn all(&self, limit: u16) -> Vec<DateTime> {
self.into_iter().take(limit as usize).collect()
}
/// Returns all the recurrences of the rrule.
/// Limit must be set in order to prevent infinite loops.
/// The max limit is `65535`. If you need more please use `into_iter` directly.
///
/// In case where the iterator ended with an errors the error will be included,
/// otherwise the second value of the return tuple will be `None`.
pub fn all_with_error(&self, limit: u16) -> (Vec<DateTime>, Option<RRuleError>) {
let mut iterator = self.into_iter();
let mut list = vec![];
let mut err = None;
for _i in 0..limit {
let next = iterator.next();
match next {
Some(value) => list.push(value),
None => {
if iterator.has_err() {
err = iterator.get_err();
}
break;
}
}
}
(list, err.cloned())
}
/// Returns the last recurrence before the given datetime instance.
/// The inc keyword defines what happens if dt is a recurrence.
/// With inc == true, if dt itself is a recurrence, it will be returned.
pub fn before(&self, dt: DateTime, inc: bool) -> Option<DateTime> {
self.into_iter()
.take_while(|d| if inc { *d <= dt } else { *d < dt })
.last()
}
/// Returns the last recurrence after the given datetime instance.
/// The inc keyword defines what happens if dt is a recurrence.
/// With inc == true, if dt itself is a recurrence, it will be returned.
pub fn after(&self, dt: DateTime, inc: bool) -> Option<DateTime> {
self.into_iter()
.find(|d| !(if inc { *d <= dt } else { *d < dt }))
}
/// Returns all the recurrences of the rrule between after and before.
/// The inc keyword defines what happens if after and/or before are
/// themselves recurrences. With inc == true, they will be included in the
/// list, if they are found in the recurrence set.
pub fn between(&self, after: DateTime, before: DateTime, inc: bool) -> Vec<DateTime> {
self.into_iter()
.skip_while(|d| if inc { *d <= after } else { *d < after })
.take_while(|d| if inc { *d <= before } else { *d < before })
.collect()
}
}
impl FromStr for RRuleSet {
type Err = RRuleError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
build_rruleset(s)
}
}
| 33.283186 | 90 | 0.571125 |
1ec554a696d7278fe574e7622539a130407da79d | 4,069 | use std::ffi::OsString;
use std::fs::{self, FileType, Metadata};
use std::io::{Error, Result};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use crate::{ClientState, ReadDirSpec};
/// Representation of a file or directory.
///
/// This representation does not wrap a `std::fs::DirEntry`. Instead it copies
/// `file_name`, `file_type`, and optionaly `metadata` out of the underlying
/// `std::fs::DirEntry`. This allows it to quickly drop the underlying file
/// descriptor.
#[derive(Debug)]
pub struct DirEntry<C: ClientState> {
/// Depth of this entry relative to the root directory where the walk
/// started.
pub depth: usize,
/// File name of this entry without leading path component.
pub file_name: OsString,
/// File type result for the file/directory that this entry points at.
pub file_type_result: Result<FileType>,
/// Metadata result for the file/directory that this entry points at. Defaults
/// to `None`. Filled in by the walk process when the
/// [`preload_metadata`](struct.WalkDir.html#method.preload_metadata) option
/// is set.
pub metadata_result: Option<Result<Metadata>>,
/// Field where clients can store state from within the The
/// [`process_entries`](struct.WalkDirGeneric.html#method.process_entries)
/// callback. This state will be cloned once for entries that have a
/// `read_children_path` set.
pub client_state: C,
/// Path used by this entry's parent to read this entry.
pub parent_path: Arc<PathBuf>,
/// Path that will be used to read child entries. This is automatically set
/// for directories. The
/// [`process_entries`](struct.WalkDirGeneric.html#method.process_entries) callback
/// may set this field to `None` to skip reading the contents of a
/// particular directory.
pub read_children_path: Option<Arc<PathBuf>>,
/// If `read_children_path` is set and resulting `fs::read_dir` generates an error
/// then that error is stored here.
pub read_children_error: Option<Error>,
}
impl<C: ClientState> DirEntry<C> {
pub(crate) fn new(
depth: usize,
file_name: OsString,
file_type_result: Result<FileType>,
metadata_result: Option<Result<Metadata>>,
parent_path: Arc<PathBuf>,
read_children_path: Option<Arc<PathBuf>>,
client_state: C,
) -> DirEntry<C> {
DirEntry {
depth,
file_name,
file_type_result,
parent_path,
metadata_result,
read_children_path,
read_children_error: None,
client_state,
}
}
pub(crate) fn new_root_with_path(path: &Path) -> Result<DirEntry<C>> {
let metadata = fs::metadata(path)?;
let root_name = OsString::from("/");
let file_name = path.file_name().unwrap_or(&root_name);
let parent_path = Arc::new(path.parent().map(Path::to_path_buf).unwrap_or_default());
let read_children_path = if metadata.file_type().is_dir() {
Some(Arc::new(path.into()))
} else {
None
};
Ok(DirEntry::new(
0,
file_name.to_owned(),
Ok(metadata.file_type()),
Some(Ok(metadata)),
parent_path,
read_children_path,
C::default(),
))
}
/// Path to the file/directory represented by this entry.
///
/// The path is created by joining `parent_path` with `file_name`.
pub fn path(&self) -> PathBuf {
self.parent_path.join(&self.file_name)
}
/// Reference to the path of the directory containing this entry.
pub fn parent_path(&self) -> &Path {
&self.parent_path
}
pub(crate) fn read_children_spec(&self) -> Option<ReadDirSpec<C>> {
self.read_children_path.as_ref().map({
|children_path| ReadDirSpec {
depth: self.depth,
path: children_path.clone(),
client_state: self.client_state.clone(),
}
})
}
}
| 36.00885 | 93 | 0.626935 |
116c44afa7e2f8fb1f6bd67563349c3045cd485d | 2,271 | use crate::config::ListenerConfigColor;
use crate::transformation;
use std::sync::{Arc, RwLock};
use rosrust;
use rustros_tf;
pub struct LaserListener {
pub config: ListenerConfigColor,
pub points: Arc<RwLock<Vec<(f64, f64)>>>,
_tf_listener: Arc<rustros_tf::TfListener>,
_static_frame: String,
_subscriber: rosrust::Subscriber,
}
impl LaserListener {
pub fn new(
config: ListenerConfigColor,
tf_listener: Arc<rustros_tf::TfListener>,
static_frame: String,
) -> LaserListener {
let scan_points = Arc::new(RwLock::new(Vec::<(f64, f64)>::new()));
let cb_scan_points = scan_points.clone();
let str_ = static_frame.clone();
let local_listener = tf_listener.clone();
let laser_sub = rosrust::subscribe(
&config.topic,
2,
move |scan: rosrust_msg::sensor_msgs::LaserScan| {
let mut points: Vec<(f64, f64)> = Vec::new();
let res = local_listener.lookup_transform(
&str_,
&scan.header.frame_id,
scan.header.stamp,
);
match &res {
Ok(res) => res,
Err(_e) => return,
};
for (i, range) in scan.ranges.iter().enumerate() {
let angle = scan.angle_min + i as f32 * scan.angle_increment;
let pt = transformation::transform_relative_pt(
&res.as_ref().unwrap().transform,
(
*range as f64 * angle.cos() as f64,
*range as f64 * angle.sin() as f64,
),
);
if range > &scan.range_min {
points.push(pt);
}
}
let mut cb_scan_points = cb_scan_points.write().unwrap();
*cb_scan_points = points;
},
)
.unwrap();
LaserListener {
config,
points: scan_points,
_tf_listener: tf_listener.clone(),
_static_frame: static_frame.to_string(),
_subscriber: laser_sub,
}
}
}
| 32.913043 | 81 | 0.492734 |
f9c838a0fc69701ca551d1eade30c4a06462f7bb | 193,331 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// <p>Adds or overwrites one or more tags for the specified resource. Tags are metadata that you
/// can assign to your documents, managed instances, maintenance windows, Parameter Store parameters,
/// and patch baselines. Tags enable you to categorize your resources in different ways, for example,
/// by purpose, owner, or environment. Each tag consists of a key and an optional value, both of
/// which you define. For example, you could define a set of tags for your account's managed
/// instances that helps you track each instance's owner and stack level. For example: Key=Owner and
/// Value=DbAdmin, SysAdmin, or Dev. Or Key=Stack and Value=Production, Pre-Production, or
/// Test.</p>
/// <p>Each resource can have a maximum of 50 tags. </p>
/// <p>We recommend that you devise a set of tag keys that meets your needs for each resource type.
/// Using a consistent set of tag keys makes it easier for you to manage your resources. You can
/// search and filter the resources based on the tags you add. Tags don't have any semantic meaning
/// to and are interpreted strictly as a string of characters. </p>
/// <p>For more information about using tags with EC2 instances, see <a href="https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/Using_Tags.html">Tagging your Amazon EC2 resources</a> in the
/// <i>Amazon EC2 User Guide</i>.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct AddTagsToResource {
_private: (),
}
impl AddTagsToResource {
/// Creates a new builder-style object to manufacture [`AddTagsToResourceInput`](crate::input::AddTagsToResourceInput)
pub fn builder() -> crate::input::add_tags_to_resource_input::Builder {
crate::input::add_tags_to_resource_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for AddTagsToResource {
type Output = std::result::Result<
crate::output::AddTagsToResourceOutput,
crate::error::AddTagsToResourceError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_add_tags_to_resource_error(response)
} else {
crate::operation_deser::parse_add_tags_to_resource_response(response)
}
}
}
/// <p>Associates a related resource to a Systems Manager OpsCenter OpsItem. For example, you can
/// associate an Incident Manager incident or analysis with an OpsItem. Incident Manager is a
/// capability of AWS Systems Manager.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct AssociateOpsItemRelatedItem {
_private: (),
}
impl AssociateOpsItemRelatedItem {
/// Creates a new builder-style object to manufacture [`AssociateOpsItemRelatedItemInput`](crate::input::AssociateOpsItemRelatedItemInput)
pub fn builder() -> crate::input::associate_ops_item_related_item_input::Builder {
crate::input::associate_ops_item_related_item_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for AssociateOpsItemRelatedItem {
type Output = std::result::Result<
crate::output::AssociateOpsItemRelatedItemOutput,
crate::error::AssociateOpsItemRelatedItemError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_associate_ops_item_related_item_error(response)
} else {
crate::operation_deser::parse_associate_ops_item_related_item_response(response)
}
}
}
/// <p>Attempts to cancel the command specified by the Command ID. There is no guarantee that the
/// command will be terminated and the underlying process stopped.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CancelCommand {
_private: (),
}
impl CancelCommand {
/// Creates a new builder-style object to manufacture [`CancelCommandInput`](crate::input::CancelCommandInput)
pub fn builder() -> crate::input::cancel_command_input::Builder {
crate::input::cancel_command_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CancelCommand {
type Output =
std::result::Result<crate::output::CancelCommandOutput, crate::error::CancelCommandError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_cancel_command_error(response)
} else {
crate::operation_deser::parse_cancel_command_response(response)
}
}
}
/// <p>Stops a maintenance window execution that is already in progress and cancels any tasks in
/// the window that have not already starting running. (Tasks already in progress will continue to
/// completion.)</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CancelMaintenanceWindowExecution {
_private: (),
}
impl CancelMaintenanceWindowExecution {
/// Creates a new builder-style object to manufacture [`CancelMaintenanceWindowExecutionInput`](crate::input::CancelMaintenanceWindowExecutionInput)
pub fn builder() -> crate::input::cancel_maintenance_window_execution_input::Builder {
crate::input::cancel_maintenance_window_execution_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CancelMaintenanceWindowExecution {
type Output = std::result::Result<
crate::output::CancelMaintenanceWindowExecutionOutput,
crate::error::CancelMaintenanceWindowExecutionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_cancel_maintenance_window_execution_error(response)
} else {
crate::operation_deser::parse_cancel_maintenance_window_execution_response(response)
}
}
}
/// <p>Generates an activation code and activation ID you can use to register your on-premises
/// server or virtual machine (VM) with Systems Manager. Registering these machines with Systems Manager makes it
/// possible to manage them using Systems Manager capabilities. You use the activation code and ID when
/// installing SSM Agent on machines in your hybrid environment. For more information about
/// requirements for managing on-premises instances and VMs using Systems Manager, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/systems-manager-managedinstances.html">Setting up
/// AWS Systems Manager for hybrid environments</a> in the <i>AWS Systems Manager User Guide</i>. </p>
/// <note>
/// <p>On-premises servers or VMs that are registered with Systems Manager and EC2 instances that you manage
/// with Systems Manager are all called <i>managed instances</i>.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreateActivation {
_private: (),
}
impl CreateActivation {
/// Creates a new builder-style object to manufacture [`CreateActivationInput`](crate::input::CreateActivationInput)
pub fn builder() -> crate::input::create_activation_input::Builder {
crate::input::create_activation_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreateActivation {
type Output = std::result::Result<
crate::output::CreateActivationOutput,
crate::error::CreateActivationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_activation_error(response)
} else {
crate::operation_deser::parse_create_activation_response(response)
}
}
}
/// <p>A State Manager association defines the state that you want to maintain on your instances.
/// For example, an association can specify that anti-virus software must be installed and running on
/// your instances, or that certain ports must be closed. For static targets, the association
/// specifies a schedule for when the configuration is reapplied. For dynamic targets, such as an AWS
/// Resource Group or an AWS Autoscaling Group, State Manager applies the configuration when new
/// instances are added to the group. The association also specifies actions to take when applying
/// the configuration. For example, an association for anti-virus software might run once a day. If
/// the software is not installed, then State Manager installs it. If the software is installed, but
/// the service is not running, then the association might instruct State Manager to start the
/// service. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreateAssociation {
_private: (),
}
impl CreateAssociation {
/// Creates a new builder-style object to manufacture [`CreateAssociationInput`](crate::input::CreateAssociationInput)
pub fn builder() -> crate::input::create_association_input::Builder {
crate::input::create_association_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreateAssociation {
type Output = std::result::Result<
crate::output::CreateAssociationOutput,
crate::error::CreateAssociationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_association_error(response)
} else {
crate::operation_deser::parse_create_association_response(response)
}
}
}
/// <p>Associates the specified Systems Manager document with the specified instances or targets.</p>
/// <p>When you associate a document with one or more instances using instance IDs or tags,
/// SSM Agent running on the instance processes the document and configures the instance as
/// specified.</p>
/// <p>If you associate a document with an instance that already has an associated document, the
/// system returns the AssociationAlreadyExists exception.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreateAssociationBatch {
_private: (),
}
impl CreateAssociationBatch {
/// Creates a new builder-style object to manufacture [`CreateAssociationBatchInput`](crate::input::CreateAssociationBatchInput)
pub fn builder() -> crate::input::create_association_batch_input::Builder {
crate::input::create_association_batch_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreateAssociationBatch {
type Output = std::result::Result<
crate::output::CreateAssociationBatchOutput,
crate::error::CreateAssociationBatchError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_association_batch_error(response)
} else {
crate::operation_deser::parse_create_association_batch_response(response)
}
}
}
/// <p>Creates a Systems Manager (SSM) document. An SSM document defines the actions that Systems Manager performs on
/// your managed instances. For more information about SSM documents, including information about
/// supported schemas, features, and syntax, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-ssm-docs.html">AWS Systems Manager Documents</a> in the
/// <i>AWS Systems Manager User Guide</i>.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreateDocument {
_private: (),
}
impl CreateDocument {
/// Creates a new builder-style object to manufacture [`CreateDocumentInput`](crate::input::CreateDocumentInput)
pub fn builder() -> crate::input::create_document_input::Builder {
crate::input::create_document_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreateDocument {
type Output =
std::result::Result<crate::output::CreateDocumentOutput, crate::error::CreateDocumentError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_document_error(response)
} else {
crate::operation_deser::parse_create_document_response(response)
}
}
}
/// <p>Creates a new maintenance window.</p>
/// <note>
/// <p>The value you specify for <code>Duration</code> determines the specific end time for the
/// maintenance window based on the time it begins. No maintenance window tasks are permitted to
/// start after the resulting endtime minus the number of hours you specify for <code>Cutoff</code>.
/// For example, if the maintenance window starts at 3 PM, the duration is three hours, and the
/// value you specify for <code>Cutoff</code> is one hour, no maintenance window tasks can start
/// after 5 PM.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreateMaintenanceWindow {
_private: (),
}
impl CreateMaintenanceWindow {
/// Creates a new builder-style object to manufacture [`CreateMaintenanceWindowInput`](crate::input::CreateMaintenanceWindowInput)
pub fn builder() -> crate::input::create_maintenance_window_input::Builder {
crate::input::create_maintenance_window_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreateMaintenanceWindow {
type Output = std::result::Result<
crate::output::CreateMaintenanceWindowOutput,
crate::error::CreateMaintenanceWindowError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_maintenance_window_error(response)
} else {
crate::operation_deser::parse_create_maintenance_window_response(response)
}
}
}
/// <p>Creates a new OpsItem. You must have permission in AWS Identity and Access Management (IAM)
/// to create a new OpsItem. For more information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html">Getting started with
/// OpsCenter</a> in the <i>AWS Systems Manager User Guide</i>.</p>
/// <p>Operations engineers and IT professionals use OpsCenter to view, investigate, and remediate
/// operational issues impacting the performance and health of their AWS resources. For more
/// information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html">AWS Systems Manager OpsCenter</a> in the
/// <i>AWS Systems Manager User Guide</i>. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreateOpsItem {
_private: (),
}
impl CreateOpsItem {
/// Creates a new builder-style object to manufacture [`CreateOpsItemInput`](crate::input::CreateOpsItemInput)
pub fn builder() -> crate::input::create_ops_item_input::Builder {
crate::input::create_ops_item_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreateOpsItem {
type Output =
std::result::Result<crate::output::CreateOpsItemOutput, crate::error::CreateOpsItemError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_ops_item_error(response)
} else {
crate::operation_deser::parse_create_ops_item_response(response)
}
}
}
/// <p>If you create a new application in Application Manager, Systems Manager calls this API action to specify
/// information about the new application, including the application type.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreateOpsMetadata {
_private: (),
}
impl CreateOpsMetadata {
/// Creates a new builder-style object to manufacture [`CreateOpsMetadataInput`](crate::input::CreateOpsMetadataInput)
pub fn builder() -> crate::input::create_ops_metadata_input::Builder {
crate::input::create_ops_metadata_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreateOpsMetadata {
type Output = std::result::Result<
crate::output::CreateOpsMetadataOutput,
crate::error::CreateOpsMetadataError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_ops_metadata_error(response)
} else {
crate::operation_deser::parse_create_ops_metadata_response(response)
}
}
}
/// <p>Creates a patch baseline.</p>
/// <note>
/// <p>For information about valid key and value pairs in <code>PatchFilters</code> for each
/// supported operating system type, see <a href="http://docs.aws.amazon.com/systems-manager/latest/APIReference/API_PatchFilter.html">PatchFilter</a>.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreatePatchBaseline {
_private: (),
}
impl CreatePatchBaseline {
/// Creates a new builder-style object to manufacture [`CreatePatchBaselineInput`](crate::input::CreatePatchBaselineInput)
pub fn builder() -> crate::input::create_patch_baseline_input::Builder {
crate::input::create_patch_baseline_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreatePatchBaseline {
type Output = std::result::Result<
crate::output::CreatePatchBaselineOutput,
crate::error::CreatePatchBaselineError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_patch_baseline_error(response)
} else {
crate::operation_deser::parse_create_patch_baseline_response(response)
}
}
}
/// <p>A resource data sync helps you view data from multiple sources in a single location. Systems
/// Manager offers two types of resource data sync: <code>SyncToDestination</code> and
/// <code>SyncFromSource</code>.</p>
/// <p>You can configure Systems Manager Inventory to use the <code>SyncToDestination</code> type to
/// synchronize Inventory data from multiple AWS Regions to a single S3 bucket. For more information,
/// see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/sysman-inventory-datasync.html">Configuring Resource Data
/// Sync for Inventory</a> in the <i>AWS Systems Manager User Guide</i>.</p>
/// <p>You can configure Systems Manager Explorer to use the <code>SyncFromSource</code> type to synchronize
/// operational work items (OpsItems) and operational data (OpsData) from multiple AWS Regions to a
/// single S3 bucket. This type can synchronize OpsItems and OpsData from multiple AWS accounts and
/// Regions or <code>EntireOrganization</code> by using AWS Organizations. For more information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/Explorer-resource-data-sync.html">Setting up Systems Manager Explorer to display data from multiple accounts and Regions</a> in the
/// <i>AWS Systems Manager User Guide</i>.</p>
/// <p>A resource data sync is an asynchronous operation that returns immediately. After a
/// successful initial sync is completed, the system continuously syncs data. To check the status of
/// a sync, use the <a>ListResourceDataSync</a>.</p>
/// <note>
/// <p>By default, data is not encrypted in Amazon S3. We strongly recommend that you enable encryption
/// in Amazon S3 to ensure secure data storage. We also recommend that you secure access to the Amazon S3
/// bucket by creating a restrictive bucket policy. </p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct CreateResourceDataSync {
_private: (),
}
impl CreateResourceDataSync {
/// Creates a new builder-style object to manufacture [`CreateResourceDataSyncInput`](crate::input::CreateResourceDataSyncInput)
pub fn builder() -> crate::input::create_resource_data_sync_input::Builder {
crate::input::create_resource_data_sync_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for CreateResourceDataSync {
type Output = std::result::Result<
crate::output::CreateResourceDataSyncOutput,
crate::error::CreateResourceDataSyncError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_create_resource_data_sync_error(response)
} else {
crate::operation_deser::parse_create_resource_data_sync_response(response)
}
}
}
/// <p>Deletes an activation. You are not required to delete an activation. If you delete an
/// activation, you can no longer use it to register additional managed instances. Deleting an
/// activation does not de-register managed instances. You must manually de-register managed
/// instances.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteActivation {
_private: (),
}
impl DeleteActivation {
/// Creates a new builder-style object to manufacture [`DeleteActivationInput`](crate::input::DeleteActivationInput)
pub fn builder() -> crate::input::delete_activation_input::Builder {
crate::input::delete_activation_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteActivation {
type Output = std::result::Result<
crate::output::DeleteActivationOutput,
crate::error::DeleteActivationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_activation_error(response)
} else {
crate::operation_deser::parse_delete_activation_response(response)
}
}
}
/// <p>Disassociates the specified Systems Manager document from the specified instance.</p>
/// <p>When you disassociate a document from an instance, it does not change the configuration of
/// the instance. To change the configuration state of an instance after you disassociate a document,
/// you must create a new document with the desired configuration and associate it with the
/// instance.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteAssociation {
_private: (),
}
impl DeleteAssociation {
/// Creates a new builder-style object to manufacture [`DeleteAssociationInput`](crate::input::DeleteAssociationInput)
pub fn builder() -> crate::input::delete_association_input::Builder {
crate::input::delete_association_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteAssociation {
type Output = std::result::Result<
crate::output::DeleteAssociationOutput,
crate::error::DeleteAssociationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_association_error(response)
} else {
crate::operation_deser::parse_delete_association_response(response)
}
}
}
/// <p>Deletes the Systems Manager document and all instance associations to the document.</p>
/// <p>Before you delete the document, we recommend that you use <a>DeleteAssociation</a> to disassociate all instances that are associated with the document.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteDocument {
_private: (),
}
impl DeleteDocument {
/// Creates a new builder-style object to manufacture [`DeleteDocumentInput`](crate::input::DeleteDocumentInput)
pub fn builder() -> crate::input::delete_document_input::Builder {
crate::input::delete_document_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteDocument {
type Output =
std::result::Result<crate::output::DeleteDocumentOutput, crate::error::DeleteDocumentError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_document_error(response)
} else {
crate::operation_deser::parse_delete_document_response(response)
}
}
}
/// <p>Delete a custom inventory type or the data associated with a custom Inventory type. Deleting
/// a custom inventory type is also referred to as deleting a custom inventory schema.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteInventory {
_private: (),
}
impl DeleteInventory {
/// Creates a new builder-style object to manufacture [`DeleteInventoryInput`](crate::input::DeleteInventoryInput)
pub fn builder() -> crate::input::delete_inventory_input::Builder {
crate::input::delete_inventory_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteInventory {
type Output = std::result::Result<
crate::output::DeleteInventoryOutput,
crate::error::DeleteInventoryError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_inventory_error(response)
} else {
crate::operation_deser::parse_delete_inventory_response(response)
}
}
}
/// <p>Deletes a maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteMaintenanceWindow {
_private: (),
}
impl DeleteMaintenanceWindow {
/// Creates a new builder-style object to manufacture [`DeleteMaintenanceWindowInput`](crate::input::DeleteMaintenanceWindowInput)
pub fn builder() -> crate::input::delete_maintenance_window_input::Builder {
crate::input::delete_maintenance_window_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteMaintenanceWindow {
type Output = std::result::Result<
crate::output::DeleteMaintenanceWindowOutput,
crate::error::DeleteMaintenanceWindowError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_maintenance_window_error(response)
} else {
crate::operation_deser::parse_delete_maintenance_window_response(response)
}
}
}
/// <p>Delete OpsMetadata related to an application.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteOpsMetadata {
_private: (),
}
impl DeleteOpsMetadata {
/// Creates a new builder-style object to manufacture [`DeleteOpsMetadataInput`](crate::input::DeleteOpsMetadataInput)
pub fn builder() -> crate::input::delete_ops_metadata_input::Builder {
crate::input::delete_ops_metadata_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteOpsMetadata {
type Output = std::result::Result<
crate::output::DeleteOpsMetadataOutput,
crate::error::DeleteOpsMetadataError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_ops_metadata_error(response)
} else {
crate::operation_deser::parse_delete_ops_metadata_response(response)
}
}
}
/// <p>Delete a parameter from the system.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteParameter {
_private: (),
}
impl DeleteParameter {
/// Creates a new builder-style object to manufacture [`DeleteParameterInput`](crate::input::DeleteParameterInput)
pub fn builder() -> crate::input::delete_parameter_input::Builder {
crate::input::delete_parameter_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteParameter {
type Output = std::result::Result<
crate::output::DeleteParameterOutput,
crate::error::DeleteParameterError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_parameter_error(response)
} else {
crate::operation_deser::parse_delete_parameter_response(response)
}
}
}
/// <p>Delete a list of parameters.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteParameters {
_private: (),
}
impl DeleteParameters {
/// Creates a new builder-style object to manufacture [`DeleteParametersInput`](crate::input::DeleteParametersInput)
pub fn builder() -> crate::input::delete_parameters_input::Builder {
crate::input::delete_parameters_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteParameters {
type Output = std::result::Result<
crate::output::DeleteParametersOutput,
crate::error::DeleteParametersError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_parameters_error(response)
} else {
crate::operation_deser::parse_delete_parameters_response(response)
}
}
}
/// <p>Deletes a patch baseline.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeletePatchBaseline {
_private: (),
}
impl DeletePatchBaseline {
/// Creates a new builder-style object to manufacture [`DeletePatchBaselineInput`](crate::input::DeletePatchBaselineInput)
pub fn builder() -> crate::input::delete_patch_baseline_input::Builder {
crate::input::delete_patch_baseline_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeletePatchBaseline {
type Output = std::result::Result<
crate::output::DeletePatchBaselineOutput,
crate::error::DeletePatchBaselineError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_patch_baseline_error(response)
} else {
crate::operation_deser::parse_delete_patch_baseline_response(response)
}
}
}
/// <p>Deletes a Resource Data Sync configuration. After the configuration is deleted, changes to
/// data on managed instances are no longer synced to or from the target. Deleting a sync
/// configuration does not delete data.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeleteResourceDataSync {
_private: (),
}
impl DeleteResourceDataSync {
/// Creates a new builder-style object to manufacture [`DeleteResourceDataSyncInput`](crate::input::DeleteResourceDataSyncInput)
pub fn builder() -> crate::input::delete_resource_data_sync_input::Builder {
crate::input::delete_resource_data_sync_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeleteResourceDataSync {
type Output = std::result::Result<
crate::output::DeleteResourceDataSyncOutput,
crate::error::DeleteResourceDataSyncError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_delete_resource_data_sync_error(response)
} else {
crate::operation_deser::parse_delete_resource_data_sync_response(response)
}
}
}
/// <p>Removes the server or virtual machine from the list of registered servers. You can
/// reregister the instance again at any time. If you don't plan to use Run Command on the server, we
/// suggest uninstalling SSM Agent first.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeregisterManagedInstance {
_private: (),
}
impl DeregisterManagedInstance {
/// Creates a new builder-style object to manufacture [`DeregisterManagedInstanceInput`](crate::input::DeregisterManagedInstanceInput)
pub fn builder() -> crate::input::deregister_managed_instance_input::Builder {
crate::input::deregister_managed_instance_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeregisterManagedInstance {
type Output = std::result::Result<
crate::output::DeregisterManagedInstanceOutput,
crate::error::DeregisterManagedInstanceError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_deregister_managed_instance_error(response)
} else {
crate::operation_deser::parse_deregister_managed_instance_response(response)
}
}
}
/// <p>Removes a patch group from a patch baseline.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeregisterPatchBaselineForPatchGroup {
_private: (),
}
impl DeregisterPatchBaselineForPatchGroup {
/// Creates a new builder-style object to manufacture [`DeregisterPatchBaselineForPatchGroupInput`](crate::input::DeregisterPatchBaselineForPatchGroupInput)
pub fn builder() -> crate::input::deregister_patch_baseline_for_patch_group_input::Builder {
crate::input::deregister_patch_baseline_for_patch_group_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeregisterPatchBaselineForPatchGroup {
type Output = std::result::Result<
crate::output::DeregisterPatchBaselineForPatchGroupOutput,
crate::error::DeregisterPatchBaselineForPatchGroupError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_deregister_patch_baseline_for_patch_group_error(response)
} else {
crate::operation_deser::parse_deregister_patch_baseline_for_patch_group_response(
response,
)
}
}
}
/// <p>Removes a target from a maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeregisterTargetFromMaintenanceWindow {
_private: (),
}
impl DeregisterTargetFromMaintenanceWindow {
/// Creates a new builder-style object to manufacture [`DeregisterTargetFromMaintenanceWindowInput`](crate::input::DeregisterTargetFromMaintenanceWindowInput)
pub fn builder() -> crate::input::deregister_target_from_maintenance_window_input::Builder {
crate::input::deregister_target_from_maintenance_window_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeregisterTargetFromMaintenanceWindow {
type Output = std::result::Result<
crate::output::DeregisterTargetFromMaintenanceWindowOutput,
crate::error::DeregisterTargetFromMaintenanceWindowError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_deregister_target_from_maintenance_window_error(response)
} else {
crate::operation_deser::parse_deregister_target_from_maintenance_window_response(
response,
)
}
}
}
/// <p>Removes a task from a maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DeregisterTaskFromMaintenanceWindow {
_private: (),
}
impl DeregisterTaskFromMaintenanceWindow {
/// Creates a new builder-style object to manufacture [`DeregisterTaskFromMaintenanceWindowInput`](crate::input::DeregisterTaskFromMaintenanceWindowInput)
pub fn builder() -> crate::input::deregister_task_from_maintenance_window_input::Builder {
crate::input::deregister_task_from_maintenance_window_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DeregisterTaskFromMaintenanceWindow {
type Output = std::result::Result<
crate::output::DeregisterTaskFromMaintenanceWindowOutput,
crate::error::DeregisterTaskFromMaintenanceWindowError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_deregister_task_from_maintenance_window_error(response)
} else {
crate::operation_deser::parse_deregister_task_from_maintenance_window_response(response)
}
}
}
/// <p>Describes details about the activation, such as the date and time the activation was
/// created, its expiration date, the IAM role assigned to the instances in the activation, and the
/// number of instances registered by using this activation.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeActivations {
_private: (),
}
impl DescribeActivations {
/// Creates a new builder-style object to manufacture [`DescribeActivationsInput`](crate::input::DescribeActivationsInput)
pub fn builder() -> crate::input::describe_activations_input::Builder {
crate::input::describe_activations_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeActivations {
type Output = std::result::Result<
crate::output::DescribeActivationsOutput,
crate::error::DescribeActivationsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_activations_error(response)
} else {
crate::operation_deser::parse_describe_activations_response(response)
}
}
}
/// <p>Describes the association for the specified target or instance. If you created the
/// association by using the <code>Targets</code> parameter, then you must retrieve the association
/// by using the association ID. If you created the association by specifying an instance ID and a
/// Systems Manager document, then you retrieve the association by specifying the document name and the
/// instance ID. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeAssociation {
_private: (),
}
impl DescribeAssociation {
/// Creates a new builder-style object to manufacture [`DescribeAssociationInput`](crate::input::DescribeAssociationInput)
pub fn builder() -> crate::input::describe_association_input::Builder {
crate::input::describe_association_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeAssociation {
type Output = std::result::Result<
crate::output::DescribeAssociationOutput,
crate::error::DescribeAssociationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_association_error(response)
} else {
crate::operation_deser::parse_describe_association_response(response)
}
}
}
/// <p>Use this API action to view all executions for a specific association ID. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeAssociationExecutions {
_private: (),
}
impl DescribeAssociationExecutions {
/// Creates a new builder-style object to manufacture [`DescribeAssociationExecutionsInput`](crate::input::DescribeAssociationExecutionsInput)
pub fn builder() -> crate::input::describe_association_executions_input::Builder {
crate::input::describe_association_executions_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeAssociationExecutions {
type Output = std::result::Result<
crate::output::DescribeAssociationExecutionsOutput,
crate::error::DescribeAssociationExecutionsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_association_executions_error(response)
} else {
crate::operation_deser::parse_describe_association_executions_response(response)
}
}
}
/// <p>Use this API action to view information about a specific execution of a specific
/// association.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeAssociationExecutionTargets {
_private: (),
}
impl DescribeAssociationExecutionTargets {
/// Creates a new builder-style object to manufacture [`DescribeAssociationExecutionTargetsInput`](crate::input::DescribeAssociationExecutionTargetsInput)
pub fn builder() -> crate::input::describe_association_execution_targets_input::Builder {
crate::input::describe_association_execution_targets_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeAssociationExecutionTargets {
type Output = std::result::Result<
crate::output::DescribeAssociationExecutionTargetsOutput,
crate::error::DescribeAssociationExecutionTargetsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_association_execution_targets_error(response)
} else {
crate::operation_deser::parse_describe_association_execution_targets_response(response)
}
}
}
/// <p>Provides details about all active and terminated Automation executions.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeAutomationExecutions {
_private: (),
}
impl DescribeAutomationExecutions {
/// Creates a new builder-style object to manufacture [`DescribeAutomationExecutionsInput`](crate::input::DescribeAutomationExecutionsInput)
pub fn builder() -> crate::input::describe_automation_executions_input::Builder {
crate::input::describe_automation_executions_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeAutomationExecutions {
type Output = std::result::Result<
crate::output::DescribeAutomationExecutionsOutput,
crate::error::DescribeAutomationExecutionsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_automation_executions_error(response)
} else {
crate::operation_deser::parse_describe_automation_executions_response(response)
}
}
}
/// <p>Information about all active and terminated step executions in an Automation
/// workflow.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeAutomationStepExecutions {
_private: (),
}
impl DescribeAutomationStepExecutions {
/// Creates a new builder-style object to manufacture [`DescribeAutomationStepExecutionsInput`](crate::input::DescribeAutomationStepExecutionsInput)
pub fn builder() -> crate::input::describe_automation_step_executions_input::Builder {
crate::input::describe_automation_step_executions_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeAutomationStepExecutions {
type Output = std::result::Result<
crate::output::DescribeAutomationStepExecutionsOutput,
crate::error::DescribeAutomationStepExecutionsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_automation_step_executions_error(response)
} else {
crate::operation_deser::parse_describe_automation_step_executions_response(response)
}
}
}
/// <p>Lists all patches eligible to be included in a patch baseline.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeAvailablePatches {
_private: (),
}
impl DescribeAvailablePatches {
/// Creates a new builder-style object to manufacture [`DescribeAvailablePatchesInput`](crate::input::DescribeAvailablePatchesInput)
pub fn builder() -> crate::input::describe_available_patches_input::Builder {
crate::input::describe_available_patches_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeAvailablePatches {
type Output = std::result::Result<
crate::output::DescribeAvailablePatchesOutput,
crate::error::DescribeAvailablePatchesError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_available_patches_error(response)
} else {
crate::operation_deser::parse_describe_available_patches_response(response)
}
}
}
/// <p>Describes the specified Systems Manager document.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeDocument {
_private: (),
}
impl DescribeDocument {
/// Creates a new builder-style object to manufacture [`DescribeDocumentInput`](crate::input::DescribeDocumentInput)
pub fn builder() -> crate::input::describe_document_input::Builder {
crate::input::describe_document_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeDocument {
type Output = std::result::Result<
crate::output::DescribeDocumentOutput,
crate::error::DescribeDocumentError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_document_error(response)
} else {
crate::operation_deser::parse_describe_document_response(response)
}
}
}
/// <p>Describes the permissions for a Systems Manager document. If you created the document, you are the
/// owner. If a document is shared, it can either be shared privately (by specifying a user's AWS
/// account ID) or publicly (<i>All</i>). </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeDocumentPermission {
_private: (),
}
impl DescribeDocumentPermission {
/// Creates a new builder-style object to manufacture [`DescribeDocumentPermissionInput`](crate::input::DescribeDocumentPermissionInput)
pub fn builder() -> crate::input::describe_document_permission_input::Builder {
crate::input::describe_document_permission_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeDocumentPermission {
type Output = std::result::Result<
crate::output::DescribeDocumentPermissionOutput,
crate::error::DescribeDocumentPermissionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_document_permission_error(response)
} else {
crate::operation_deser::parse_describe_document_permission_response(response)
}
}
}
/// <p>All associations for the instance(s).</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeEffectiveInstanceAssociations {
_private: (),
}
impl DescribeEffectiveInstanceAssociations {
/// Creates a new builder-style object to manufacture [`DescribeEffectiveInstanceAssociationsInput`](crate::input::DescribeEffectiveInstanceAssociationsInput)
pub fn builder() -> crate::input::describe_effective_instance_associations_input::Builder {
crate::input::describe_effective_instance_associations_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeEffectiveInstanceAssociations {
type Output = std::result::Result<
crate::output::DescribeEffectiveInstanceAssociationsOutput,
crate::error::DescribeEffectiveInstanceAssociationsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_effective_instance_associations_error(response)
} else {
crate::operation_deser::parse_describe_effective_instance_associations_response(
response,
)
}
}
}
/// <p>Retrieves the current effective patches (the patch and the approval state) for the specified
/// patch baseline. Note that this API applies only to Windows patch baselines.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeEffectivePatchesForPatchBaseline {
_private: (),
}
impl DescribeEffectivePatchesForPatchBaseline {
/// Creates a new builder-style object to manufacture [`DescribeEffectivePatchesForPatchBaselineInput`](crate::input::DescribeEffectivePatchesForPatchBaselineInput)
pub fn builder() -> crate::input::describe_effective_patches_for_patch_baseline_input::Builder {
crate::input::describe_effective_patches_for_patch_baseline_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeEffectivePatchesForPatchBaseline {
type Output = std::result::Result<
crate::output::DescribeEffectivePatchesForPatchBaselineOutput,
crate::error::DescribeEffectivePatchesForPatchBaselineError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_effective_patches_for_patch_baseline_error(
response,
)
} else {
crate::operation_deser::parse_describe_effective_patches_for_patch_baseline_response(
response,
)
}
}
}
/// <p>The status of the associations for the instance(s).</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInstanceAssociationsStatus {
_private: (),
}
impl DescribeInstanceAssociationsStatus {
/// Creates a new builder-style object to manufacture [`DescribeInstanceAssociationsStatusInput`](crate::input::DescribeInstanceAssociationsStatusInput)
pub fn builder() -> crate::input::describe_instance_associations_status_input::Builder {
crate::input::describe_instance_associations_status_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeInstanceAssociationsStatus {
type Output = std::result::Result<
crate::output::DescribeInstanceAssociationsStatusOutput,
crate::error::DescribeInstanceAssociationsStatusError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_instance_associations_status_error(response)
} else {
crate::operation_deser::parse_describe_instance_associations_status_response(response)
}
}
}
/// <p>Describes one or more of your instances, including information about the operating system
/// platform, the version of SSM Agent installed on the instance, instance status, and so on.</p>
/// <p>If you specify one or more instance IDs, it returns information for those instances. If you
/// do not specify instance IDs, it returns information for all your instances. If you specify an
/// instance ID that is not valid or an instance that you do not own, you receive an error.</p>
/// <note>
/// <p>The IamRole field for this API action is the Amazon Identity and Access Management (IAM)
/// role assigned to on-premises instances. This call does not return the IAM role for EC2
/// instances.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInstanceInformation {
_private: (),
}
impl DescribeInstanceInformation {
/// Creates a new builder-style object to manufacture [`DescribeInstanceInformationInput`](crate::input::DescribeInstanceInformationInput)
pub fn builder() -> crate::input::describe_instance_information_input::Builder {
crate::input::describe_instance_information_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeInstanceInformation {
type Output = std::result::Result<
crate::output::DescribeInstanceInformationOutput,
crate::error::DescribeInstanceInformationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_instance_information_error(response)
} else {
crate::operation_deser::parse_describe_instance_information_response(response)
}
}
}
/// <p>Retrieves information about the patches on the specified instance and their state relative
/// to the patch baseline being used for the instance.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInstancePatches {
_private: (),
}
impl DescribeInstancePatches {
/// Creates a new builder-style object to manufacture [`DescribeInstancePatchesInput`](crate::input::DescribeInstancePatchesInput)
pub fn builder() -> crate::input::describe_instance_patches_input::Builder {
crate::input::describe_instance_patches_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeInstancePatches {
type Output = std::result::Result<
crate::output::DescribeInstancePatchesOutput,
crate::error::DescribeInstancePatchesError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_instance_patches_error(response)
} else {
crate::operation_deser::parse_describe_instance_patches_response(response)
}
}
}
/// <p>Retrieves the high-level patch state of one or more instances.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInstancePatchStates {
_private: (),
}
impl DescribeInstancePatchStates {
/// Creates a new builder-style object to manufacture [`DescribeInstancePatchStatesInput`](crate::input::DescribeInstancePatchStatesInput)
pub fn builder() -> crate::input::describe_instance_patch_states_input::Builder {
crate::input::describe_instance_patch_states_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeInstancePatchStates {
type Output = std::result::Result<
crate::output::DescribeInstancePatchStatesOutput,
crate::error::DescribeInstancePatchStatesError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_instance_patch_states_error(response)
} else {
crate::operation_deser::parse_describe_instance_patch_states_response(response)
}
}
}
/// <p>Retrieves the high-level patch state for the instances in the specified patch group.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInstancePatchStatesForPatchGroup {
_private: (),
}
impl DescribeInstancePatchStatesForPatchGroup {
/// Creates a new builder-style object to manufacture [`DescribeInstancePatchStatesForPatchGroupInput`](crate::input::DescribeInstancePatchStatesForPatchGroupInput)
pub fn builder() -> crate::input::describe_instance_patch_states_for_patch_group_input::Builder
{
crate::input::describe_instance_patch_states_for_patch_group_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeInstancePatchStatesForPatchGroup {
type Output = std::result::Result<
crate::output::DescribeInstancePatchStatesForPatchGroupOutput,
crate::error::DescribeInstancePatchStatesForPatchGroupError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_instance_patch_states_for_patch_group_error(
response,
)
} else {
crate::operation_deser::parse_describe_instance_patch_states_for_patch_group_response(
response,
)
}
}
}
/// <p>Describes a specific delete inventory operation.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeInventoryDeletions {
_private: (),
}
impl DescribeInventoryDeletions {
/// Creates a new builder-style object to manufacture [`DescribeInventoryDeletionsInput`](crate::input::DescribeInventoryDeletionsInput)
pub fn builder() -> crate::input::describe_inventory_deletions_input::Builder {
crate::input::describe_inventory_deletions_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeInventoryDeletions {
type Output = std::result::Result<
crate::output::DescribeInventoryDeletionsOutput,
crate::error::DescribeInventoryDeletionsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_inventory_deletions_error(response)
} else {
crate::operation_deser::parse_describe_inventory_deletions_response(response)
}
}
}
/// <p>Lists the executions of a maintenance window. This includes information about when the
/// maintenance window was scheduled to be active, and information about tasks registered and run
/// with the maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMaintenanceWindowExecutions {
_private: (),
}
impl DescribeMaintenanceWindowExecutions {
/// Creates a new builder-style object to manufacture [`DescribeMaintenanceWindowExecutionsInput`](crate::input::DescribeMaintenanceWindowExecutionsInput)
pub fn builder() -> crate::input::describe_maintenance_window_executions_input::Builder {
crate::input::describe_maintenance_window_executions_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeMaintenanceWindowExecutions {
type Output = std::result::Result<
crate::output::DescribeMaintenanceWindowExecutionsOutput,
crate::error::DescribeMaintenanceWindowExecutionsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_maintenance_window_executions_error(response)
} else {
crate::operation_deser::parse_describe_maintenance_window_executions_response(response)
}
}
}
/// <p>Retrieves the individual task executions (one per target) for a particular task run as part
/// of a maintenance window execution.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMaintenanceWindowExecutionTaskInvocations {
_private: (),
}
impl DescribeMaintenanceWindowExecutionTaskInvocations {
/// Creates a new builder-style object to manufacture [`DescribeMaintenanceWindowExecutionTaskInvocationsInput`](crate::input::DescribeMaintenanceWindowExecutionTaskInvocationsInput)
pub fn builder(
) -> crate::input::describe_maintenance_window_execution_task_invocations_input::Builder {
crate::input::describe_maintenance_window_execution_task_invocations_input::Builder::default(
)
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse
for DescribeMaintenanceWindowExecutionTaskInvocations
{
type Output = std::result::Result<
crate::output::DescribeMaintenanceWindowExecutionTaskInvocationsOutput,
crate::error::DescribeMaintenanceWindowExecutionTaskInvocationsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_maintenance_window_execution_task_invocations_error(response)
} else {
crate::operation_deser::parse_describe_maintenance_window_execution_task_invocations_response(response)
}
}
}
/// <p>For a given maintenance window execution, lists the tasks that were run.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMaintenanceWindowExecutionTasks {
_private: (),
}
impl DescribeMaintenanceWindowExecutionTasks {
/// Creates a new builder-style object to manufacture [`DescribeMaintenanceWindowExecutionTasksInput`](crate::input::DescribeMaintenanceWindowExecutionTasksInput)
pub fn builder() -> crate::input::describe_maintenance_window_execution_tasks_input::Builder {
crate::input::describe_maintenance_window_execution_tasks_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeMaintenanceWindowExecutionTasks {
type Output = std::result::Result<
crate::output::DescribeMaintenanceWindowExecutionTasksOutput,
crate::error::DescribeMaintenanceWindowExecutionTasksError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_maintenance_window_execution_tasks_error(
response,
)
} else {
crate::operation_deser::parse_describe_maintenance_window_execution_tasks_response(
response,
)
}
}
}
/// <p>Retrieves the maintenance windows in an AWS account.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMaintenanceWindows {
_private: (),
}
impl DescribeMaintenanceWindows {
/// Creates a new builder-style object to manufacture [`DescribeMaintenanceWindowsInput`](crate::input::DescribeMaintenanceWindowsInput)
pub fn builder() -> crate::input::describe_maintenance_windows_input::Builder {
crate::input::describe_maintenance_windows_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeMaintenanceWindows {
type Output = std::result::Result<
crate::output::DescribeMaintenanceWindowsOutput,
crate::error::DescribeMaintenanceWindowsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_maintenance_windows_error(response)
} else {
crate::operation_deser::parse_describe_maintenance_windows_response(response)
}
}
}
/// <p>Retrieves information about upcoming executions of a maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMaintenanceWindowSchedule {
_private: (),
}
impl DescribeMaintenanceWindowSchedule {
/// Creates a new builder-style object to manufacture [`DescribeMaintenanceWindowScheduleInput`](crate::input::DescribeMaintenanceWindowScheduleInput)
pub fn builder() -> crate::input::describe_maintenance_window_schedule_input::Builder {
crate::input::describe_maintenance_window_schedule_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeMaintenanceWindowSchedule {
type Output = std::result::Result<
crate::output::DescribeMaintenanceWindowScheduleOutput,
crate::error::DescribeMaintenanceWindowScheduleError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_maintenance_window_schedule_error(response)
} else {
crate::operation_deser::parse_describe_maintenance_window_schedule_response(response)
}
}
}
/// <p>Retrieves information about the maintenance window targets or tasks that an instance is
/// associated with.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMaintenanceWindowsForTarget {
_private: (),
}
impl DescribeMaintenanceWindowsForTarget {
/// Creates a new builder-style object to manufacture [`DescribeMaintenanceWindowsForTargetInput`](crate::input::DescribeMaintenanceWindowsForTargetInput)
pub fn builder() -> crate::input::describe_maintenance_windows_for_target_input::Builder {
crate::input::describe_maintenance_windows_for_target_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeMaintenanceWindowsForTarget {
type Output = std::result::Result<
crate::output::DescribeMaintenanceWindowsForTargetOutput,
crate::error::DescribeMaintenanceWindowsForTargetError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_maintenance_windows_for_target_error(response)
} else {
crate::operation_deser::parse_describe_maintenance_windows_for_target_response(response)
}
}
}
/// <p>Lists the targets registered with the maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMaintenanceWindowTargets {
_private: (),
}
impl DescribeMaintenanceWindowTargets {
/// Creates a new builder-style object to manufacture [`DescribeMaintenanceWindowTargetsInput`](crate::input::DescribeMaintenanceWindowTargetsInput)
pub fn builder() -> crate::input::describe_maintenance_window_targets_input::Builder {
crate::input::describe_maintenance_window_targets_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeMaintenanceWindowTargets {
type Output = std::result::Result<
crate::output::DescribeMaintenanceWindowTargetsOutput,
crate::error::DescribeMaintenanceWindowTargetsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_maintenance_window_targets_error(response)
} else {
crate::operation_deser::parse_describe_maintenance_window_targets_response(response)
}
}
}
/// <p>Lists the tasks in a maintenance window.</p>
/// <note>
/// <p>For maintenance window tasks without a specified target, you cannot supply values for
/// <code>--max-errors</code> and <code>--max-concurrency</code>. Instead, the system inserts a
/// placeholder value of <code>1</code>, which may be reported in the response to this command.
/// These values do not affect the running of your task and can be ignored.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeMaintenanceWindowTasks {
_private: (),
}
impl DescribeMaintenanceWindowTasks {
/// Creates a new builder-style object to manufacture [`DescribeMaintenanceWindowTasksInput`](crate::input::DescribeMaintenanceWindowTasksInput)
pub fn builder() -> crate::input::describe_maintenance_window_tasks_input::Builder {
crate::input::describe_maintenance_window_tasks_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeMaintenanceWindowTasks {
type Output = std::result::Result<
crate::output::DescribeMaintenanceWindowTasksOutput,
crate::error::DescribeMaintenanceWindowTasksError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_maintenance_window_tasks_error(response)
} else {
crate::operation_deser::parse_describe_maintenance_window_tasks_response(response)
}
}
}
/// <p>Query a set of OpsItems. You must have permission in AWS Identity and Access Management
/// (IAM) to query a list of OpsItems. For more information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html">Getting started with
/// OpsCenter</a> in the <i>AWS Systems Manager User Guide</i>.</p>
/// <p>Operations engineers and IT professionals use OpsCenter to view, investigate, and remediate
/// operational issues impacting the performance and health of their AWS resources. For more
/// information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html">AWS Systems Manager OpsCenter</a> in the
/// <i>AWS Systems Manager User Guide</i>. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeOpsItems {
_private: (),
}
impl DescribeOpsItems {
/// Creates a new builder-style object to manufacture [`DescribeOpsItemsInput`](crate::input::DescribeOpsItemsInput)
pub fn builder() -> crate::input::describe_ops_items_input::Builder {
crate::input::describe_ops_items_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeOpsItems {
type Output = std::result::Result<
crate::output::DescribeOpsItemsOutput,
crate::error::DescribeOpsItemsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_ops_items_error(response)
} else {
crate::operation_deser::parse_describe_ops_items_response(response)
}
}
}
/// <p>Get information about a parameter.</p>
/// <note>
/// <p>Request results are returned on a best-effort basis. If you specify <code>MaxResults</code>
/// in the request, the response includes information up to the limit specified. The number of items
/// returned, however, can be between zero and the value of <code>MaxResults</code>. If the service
/// reaches an internal limit while processing the results, it stops the operation and returns the
/// matching values up to that point and a <code>NextToken</code>. You can specify the
/// <code>NextToken</code> in a subsequent call to get the next set of results.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeParameters {
_private: (),
}
impl DescribeParameters {
/// Creates a new builder-style object to manufacture [`DescribeParametersInput`](crate::input::DescribeParametersInput)
pub fn builder() -> crate::input::describe_parameters_input::Builder {
crate::input::describe_parameters_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeParameters {
type Output = std::result::Result<
crate::output::DescribeParametersOutput,
crate::error::DescribeParametersError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_parameters_error(response)
} else {
crate::operation_deser::parse_describe_parameters_response(response)
}
}
}
/// <p>Lists the patch baselines in your AWS account.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribePatchBaselines {
_private: (),
}
impl DescribePatchBaselines {
/// Creates a new builder-style object to manufacture [`DescribePatchBaselinesInput`](crate::input::DescribePatchBaselinesInput)
pub fn builder() -> crate::input::describe_patch_baselines_input::Builder {
crate::input::describe_patch_baselines_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribePatchBaselines {
type Output = std::result::Result<
crate::output::DescribePatchBaselinesOutput,
crate::error::DescribePatchBaselinesError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_patch_baselines_error(response)
} else {
crate::operation_deser::parse_describe_patch_baselines_response(response)
}
}
}
/// <p>Lists all patch groups that have been registered with patch baselines.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribePatchGroups {
_private: (),
}
impl DescribePatchGroups {
/// Creates a new builder-style object to manufacture [`DescribePatchGroupsInput`](crate::input::DescribePatchGroupsInput)
pub fn builder() -> crate::input::describe_patch_groups_input::Builder {
crate::input::describe_patch_groups_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribePatchGroups {
type Output = std::result::Result<
crate::output::DescribePatchGroupsOutput,
crate::error::DescribePatchGroupsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_patch_groups_error(response)
} else {
crate::operation_deser::parse_describe_patch_groups_response(response)
}
}
}
/// <p>Returns high-level aggregated patch compliance state for a patch group.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribePatchGroupState {
_private: (),
}
impl DescribePatchGroupState {
/// Creates a new builder-style object to manufacture [`DescribePatchGroupStateInput`](crate::input::DescribePatchGroupStateInput)
pub fn builder() -> crate::input::describe_patch_group_state_input::Builder {
crate::input::describe_patch_group_state_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribePatchGroupState {
type Output = std::result::Result<
crate::output::DescribePatchGroupStateOutput,
crate::error::DescribePatchGroupStateError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_patch_group_state_error(response)
} else {
crate::operation_deser::parse_describe_patch_group_state_response(response)
}
}
}
/// <p>Lists the properties of available patches organized by product, product family,
/// classification, severity, and other properties of available patches. You can use the reported
/// properties in the filters you specify in requests for actions such as <a>CreatePatchBaseline</a>, <a>UpdatePatchBaseline</a>, <a>DescribeAvailablePatches</a>, and <a>DescribePatchBaselines</a>.</p>
/// <p>The following section lists the properties that can be used in filters for each major
/// operating system type:</p>
/// <dl>
/// <dt>AMAZON_LINUX</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, CLASSIFICATION, SEVERITY</p>
/// </dd>
/// <dt>AMAZON_LINUX_2</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, CLASSIFICATION, SEVERITY</p>
/// </dd>
/// <dt>CENTOS</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, CLASSIFICATION, SEVERITY</p>
/// </dd>
/// <dt>DEBIAN</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, PRIORITY</p>
/// </dd>
/// <dt>MACOS</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, CLASSIFICATION</p>
/// </dd>
/// <dt>ORACLE_LINUX</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, CLASSIFICATION, SEVERITY</p>
/// </dd>
/// <dt>REDHAT_ENTERPRISE_LINUX</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, CLASSIFICATION, SEVERITY</p>
/// </dd>
/// <dt>SUSE</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, CLASSIFICATION, SEVERITY</p>
/// </dd>
/// <dt>UBUNTU</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, PRIORITY</p>
/// </dd>
/// <dt>WINDOWS</dt>
/// <dd>
/// <p>Valid properties: PRODUCT, PRODUCT_FAMILY, CLASSIFICATION, MSRC_SEVERITY</p>
/// </dd>
/// </dl>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribePatchProperties {
_private: (),
}
impl DescribePatchProperties {
/// Creates a new builder-style object to manufacture [`DescribePatchPropertiesInput`](crate::input::DescribePatchPropertiesInput)
pub fn builder() -> crate::input::describe_patch_properties_input::Builder {
crate::input::describe_patch_properties_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribePatchProperties {
type Output = std::result::Result<
crate::output::DescribePatchPropertiesOutput,
crate::error::DescribePatchPropertiesError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_patch_properties_error(response)
} else {
crate::operation_deser::parse_describe_patch_properties_response(response)
}
}
}
/// <p>Retrieves a list of all active sessions (both connected and disconnected) or terminated
/// sessions from the past 30 days.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DescribeSessions {
_private: (),
}
impl DescribeSessions {
/// Creates a new builder-style object to manufacture [`DescribeSessionsInput`](crate::input::DescribeSessionsInput)
pub fn builder() -> crate::input::describe_sessions_input::Builder {
crate::input::describe_sessions_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DescribeSessions {
type Output = std::result::Result<
crate::output::DescribeSessionsOutput,
crate::error::DescribeSessionsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_describe_sessions_error(response)
} else {
crate::operation_deser::parse_describe_sessions_response(response)
}
}
}
/// <p>Deletes the association between an OpsItem and a related resource. For example, this API
/// action can delete an Incident Manager incident from an OpsItem. Incident Manager is a capability
/// of AWS Systems Manager.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct DisassociateOpsItemRelatedItem {
_private: (),
}
impl DisassociateOpsItemRelatedItem {
/// Creates a new builder-style object to manufacture [`DisassociateOpsItemRelatedItemInput`](crate::input::DisassociateOpsItemRelatedItemInput)
pub fn builder() -> crate::input::disassociate_ops_item_related_item_input::Builder {
crate::input::disassociate_ops_item_related_item_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for DisassociateOpsItemRelatedItem {
type Output = std::result::Result<
crate::output::DisassociateOpsItemRelatedItemOutput,
crate::error::DisassociateOpsItemRelatedItemError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_disassociate_ops_item_related_item_error(response)
} else {
crate::operation_deser::parse_disassociate_ops_item_related_item_response(response)
}
}
}
/// <p>Get detailed information about a particular Automation execution.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetAutomationExecution {
_private: (),
}
impl GetAutomationExecution {
/// Creates a new builder-style object to manufacture [`GetAutomationExecutionInput`](crate::input::GetAutomationExecutionInput)
pub fn builder() -> crate::input::get_automation_execution_input::Builder {
crate::input::get_automation_execution_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetAutomationExecution {
type Output = std::result::Result<
crate::output::GetAutomationExecutionOutput,
crate::error::GetAutomationExecutionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_automation_execution_error(response)
} else {
crate::operation_deser::parse_get_automation_execution_response(response)
}
}
}
/// <p>Gets the state of the AWS Systems Manager Change Calendar at an optional, specified time. If you
/// specify a time, <code>GetCalendarState</code> returns the state of the calendar at a specific
/// time, and returns the next time that the Change Calendar state will transition. If you do not
/// specify a time, <code>GetCalendarState</code> assumes the current time. Change Calendar entries
/// have two possible states: <code>OPEN</code> or <code>CLOSED</code>.</p>
/// <p>If you specify more than one calendar in a request, the command returns the status of
/// <code>OPEN</code> only if all calendars in the request are open. If one or more calendars in the
/// request are closed, the status returned is <code>CLOSED</code>.</p>
/// <p>For more information about Systems Manager Change Calendar, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/systems-manager-change-calendar.html">AWS Systems Manager Change
/// Calendar</a> in the <i>AWS Systems Manager User Guide</i>.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetCalendarState {
_private: (),
}
impl GetCalendarState {
/// Creates a new builder-style object to manufacture [`GetCalendarStateInput`](crate::input::GetCalendarStateInput)
pub fn builder() -> crate::input::get_calendar_state_input::Builder {
crate::input::get_calendar_state_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetCalendarState {
type Output = std::result::Result<
crate::output::GetCalendarStateOutput,
crate::error::GetCalendarStateError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_calendar_state_error(response)
} else {
crate::operation_deser::parse_get_calendar_state_response(response)
}
}
}
/// <p>Returns detailed information about command execution for an invocation or plugin.</p>
/// <p>
/// <code>GetCommandInvocation</code> only gives the execution status of a plugin in a document.
/// To get the command execution status on a specific instance, use <a>ListCommandInvocations</a>. To get the command execution status across instances, use
/// <a>ListCommands</a>.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetCommandInvocation {
_private: (),
}
impl GetCommandInvocation {
/// Creates a new builder-style object to manufacture [`GetCommandInvocationInput`](crate::input::GetCommandInvocationInput)
pub fn builder() -> crate::input::get_command_invocation_input::Builder {
crate::input::get_command_invocation_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetCommandInvocation {
type Output = std::result::Result<
crate::output::GetCommandInvocationOutput,
crate::error::GetCommandInvocationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_command_invocation_error(response)
} else {
crate::operation_deser::parse_get_command_invocation_response(response)
}
}
}
/// <p>Retrieves the Session Manager connection status for an instance to determine whether it is running and
/// ready to receive Session Manager connections.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetConnectionStatus {
_private: (),
}
impl GetConnectionStatus {
/// Creates a new builder-style object to manufacture [`GetConnectionStatusInput`](crate::input::GetConnectionStatusInput)
pub fn builder() -> crate::input::get_connection_status_input::Builder {
crate::input::get_connection_status_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetConnectionStatus {
type Output = std::result::Result<
crate::output::GetConnectionStatusOutput,
crate::error::GetConnectionStatusError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_connection_status_error(response)
} else {
crate::operation_deser::parse_get_connection_status_response(response)
}
}
}
/// <p>Retrieves the default patch baseline. Note that Systems Manager supports creating multiple default
/// patch baselines. For example, you can create a default patch baseline for each operating
/// system.</p>
/// <p>If you do not specify an operating system value, the default patch baseline for Windows is
/// returned.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetDefaultPatchBaseline {
_private: (),
}
impl GetDefaultPatchBaseline {
/// Creates a new builder-style object to manufacture [`GetDefaultPatchBaselineInput`](crate::input::GetDefaultPatchBaselineInput)
pub fn builder() -> crate::input::get_default_patch_baseline_input::Builder {
crate::input::get_default_patch_baseline_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetDefaultPatchBaseline {
type Output = std::result::Result<
crate::output::GetDefaultPatchBaselineOutput,
crate::error::GetDefaultPatchBaselineError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_default_patch_baseline_error(response)
} else {
crate::operation_deser::parse_get_default_patch_baseline_response(response)
}
}
}
/// <p>Retrieves the current snapshot for the patch baseline the instance uses. This API is
/// primarily used by the AWS-RunPatchBaseline Systems Manager document. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetDeployablePatchSnapshotForInstance {
_private: (),
}
impl GetDeployablePatchSnapshotForInstance {
/// Creates a new builder-style object to manufacture [`GetDeployablePatchSnapshotForInstanceInput`](crate::input::GetDeployablePatchSnapshotForInstanceInput)
pub fn builder() -> crate::input::get_deployable_patch_snapshot_for_instance_input::Builder {
crate::input::get_deployable_patch_snapshot_for_instance_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetDeployablePatchSnapshotForInstance {
type Output = std::result::Result<
crate::output::GetDeployablePatchSnapshotForInstanceOutput,
crate::error::GetDeployablePatchSnapshotForInstanceError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_deployable_patch_snapshot_for_instance_error(response)
} else {
crate::operation_deser::parse_get_deployable_patch_snapshot_for_instance_response(
response,
)
}
}
}
/// <p>Gets the contents of the specified Systems Manager document.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetDocument {
_private: (),
}
impl GetDocument {
/// Creates a new builder-style object to manufacture [`GetDocumentInput`](crate::input::GetDocumentInput)
pub fn builder() -> crate::input::get_document_input::Builder {
crate::input::get_document_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetDocument {
type Output =
std::result::Result<crate::output::GetDocumentOutput, crate::error::GetDocumentError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_document_error(response)
} else {
crate::operation_deser::parse_get_document_response(response)
}
}
}
/// <p>Query inventory information.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetInventory {
_private: (),
}
impl GetInventory {
/// Creates a new builder-style object to manufacture [`GetInventoryInput`](crate::input::GetInventoryInput)
pub fn builder() -> crate::input::get_inventory_input::Builder {
crate::input::get_inventory_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetInventory {
type Output =
std::result::Result<crate::output::GetInventoryOutput, crate::error::GetInventoryError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_inventory_error(response)
} else {
crate::operation_deser::parse_get_inventory_response(response)
}
}
}
/// <p>Return a list of inventory type names for the account, or return a list of attribute names
/// for a specific Inventory item type.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetInventorySchema {
_private: (),
}
impl GetInventorySchema {
/// Creates a new builder-style object to manufacture [`GetInventorySchemaInput`](crate::input::GetInventorySchemaInput)
pub fn builder() -> crate::input::get_inventory_schema_input::Builder {
crate::input::get_inventory_schema_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetInventorySchema {
type Output = std::result::Result<
crate::output::GetInventorySchemaOutput,
crate::error::GetInventorySchemaError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_inventory_schema_error(response)
} else {
crate::operation_deser::parse_get_inventory_schema_response(response)
}
}
}
/// <p>Retrieves a maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetMaintenanceWindow {
_private: (),
}
impl GetMaintenanceWindow {
/// Creates a new builder-style object to manufacture [`GetMaintenanceWindowInput`](crate::input::GetMaintenanceWindowInput)
pub fn builder() -> crate::input::get_maintenance_window_input::Builder {
crate::input::get_maintenance_window_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetMaintenanceWindow {
type Output = std::result::Result<
crate::output::GetMaintenanceWindowOutput,
crate::error::GetMaintenanceWindowError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_maintenance_window_error(response)
} else {
crate::operation_deser::parse_get_maintenance_window_response(response)
}
}
}
/// <p>Retrieves details about a specific a maintenance window execution.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetMaintenanceWindowExecution {
_private: (),
}
impl GetMaintenanceWindowExecution {
/// Creates a new builder-style object to manufacture [`GetMaintenanceWindowExecutionInput`](crate::input::GetMaintenanceWindowExecutionInput)
pub fn builder() -> crate::input::get_maintenance_window_execution_input::Builder {
crate::input::get_maintenance_window_execution_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetMaintenanceWindowExecution {
type Output = std::result::Result<
crate::output::GetMaintenanceWindowExecutionOutput,
crate::error::GetMaintenanceWindowExecutionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_maintenance_window_execution_error(response)
} else {
crate::operation_deser::parse_get_maintenance_window_execution_response(response)
}
}
}
/// <p>Retrieves the details about a specific task run as part of a maintenance window
/// execution.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetMaintenanceWindowExecutionTask {
_private: (),
}
impl GetMaintenanceWindowExecutionTask {
/// Creates a new builder-style object to manufacture [`GetMaintenanceWindowExecutionTaskInput`](crate::input::GetMaintenanceWindowExecutionTaskInput)
pub fn builder() -> crate::input::get_maintenance_window_execution_task_input::Builder {
crate::input::get_maintenance_window_execution_task_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetMaintenanceWindowExecutionTask {
type Output = std::result::Result<
crate::output::GetMaintenanceWindowExecutionTaskOutput,
crate::error::GetMaintenanceWindowExecutionTaskError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_maintenance_window_execution_task_error(response)
} else {
crate::operation_deser::parse_get_maintenance_window_execution_task_response(response)
}
}
}
/// <p>Retrieves information about a specific task running on a specific target.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetMaintenanceWindowExecutionTaskInvocation {
_private: (),
}
impl GetMaintenanceWindowExecutionTaskInvocation {
/// Creates a new builder-style object to manufacture [`GetMaintenanceWindowExecutionTaskInvocationInput`](crate::input::GetMaintenanceWindowExecutionTaskInvocationInput)
pub fn builder() -> crate::input::get_maintenance_window_execution_task_invocation_input::Builder
{
crate::input::get_maintenance_window_execution_task_invocation_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetMaintenanceWindowExecutionTaskInvocation {
type Output = std::result::Result<
crate::output::GetMaintenanceWindowExecutionTaskInvocationOutput,
crate::error::GetMaintenanceWindowExecutionTaskInvocationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_maintenance_window_execution_task_invocation_error(
response,
)
} else {
crate::operation_deser::parse_get_maintenance_window_execution_task_invocation_response(
response,
)
}
}
}
/// <p>Lists the tasks in a maintenance window.</p>
/// <note>
/// <p>For maintenance window tasks without a specified target, you cannot supply values for
/// <code>--max-errors</code> and <code>--max-concurrency</code>. Instead, the system inserts a
/// placeholder value of <code>1</code>, which may be reported in the response to this command.
/// These values do not affect the running of your task and can be ignored.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetMaintenanceWindowTask {
_private: (),
}
impl GetMaintenanceWindowTask {
/// Creates a new builder-style object to manufacture [`GetMaintenanceWindowTaskInput`](crate::input::GetMaintenanceWindowTaskInput)
pub fn builder() -> crate::input::get_maintenance_window_task_input::Builder {
crate::input::get_maintenance_window_task_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetMaintenanceWindowTask {
type Output = std::result::Result<
crate::output::GetMaintenanceWindowTaskOutput,
crate::error::GetMaintenanceWindowTaskError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_maintenance_window_task_error(response)
} else {
crate::operation_deser::parse_get_maintenance_window_task_response(response)
}
}
}
/// <p>Get information about an OpsItem by using the ID. You must have permission in AWS Identity
/// and Access Management (IAM) to view information about an OpsItem. For more information, see
/// <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html">Getting started with
/// OpsCenter</a> in the <i>AWS Systems Manager User Guide</i>.</p>
/// <p>Operations engineers and IT professionals use OpsCenter to view, investigate, and remediate
/// operational issues impacting the performance and health of their AWS resources. For more
/// information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html">AWS Systems Manager OpsCenter</a> in the
/// <i>AWS Systems Manager User Guide</i>. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetOpsItem {
_private: (),
}
impl GetOpsItem {
/// Creates a new builder-style object to manufacture [`GetOpsItemInput`](crate::input::GetOpsItemInput)
pub fn builder() -> crate::input::get_ops_item_input::Builder {
crate::input::get_ops_item_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetOpsItem {
type Output =
std::result::Result<crate::output::GetOpsItemOutput, crate::error::GetOpsItemError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_ops_item_error(response)
} else {
crate::operation_deser::parse_get_ops_item_response(response)
}
}
}
/// <p>View operational metadata related to an application in Application Manager.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetOpsMetadata {
_private: (),
}
impl GetOpsMetadata {
/// Creates a new builder-style object to manufacture [`GetOpsMetadataInput`](crate::input::GetOpsMetadataInput)
pub fn builder() -> crate::input::get_ops_metadata_input::Builder {
crate::input::get_ops_metadata_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetOpsMetadata {
type Output =
std::result::Result<crate::output::GetOpsMetadataOutput, crate::error::GetOpsMetadataError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_ops_metadata_error(response)
} else {
crate::operation_deser::parse_get_ops_metadata_response(response)
}
}
}
/// <p>View a summary of OpsItems based on specified filters and aggregators.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetOpsSummary {
_private: (),
}
impl GetOpsSummary {
/// Creates a new builder-style object to manufacture [`GetOpsSummaryInput`](crate::input::GetOpsSummaryInput)
pub fn builder() -> crate::input::get_ops_summary_input::Builder {
crate::input::get_ops_summary_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetOpsSummary {
type Output =
std::result::Result<crate::output::GetOpsSummaryOutput, crate::error::GetOpsSummaryError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_ops_summary_error(response)
} else {
crate::operation_deser::parse_get_ops_summary_response(response)
}
}
}
/// <p>Get information about a parameter by using the parameter name. Don't confuse this API action
/// with the <a>GetParameters</a> API action.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetParameter {
_private: (),
}
impl GetParameter {
/// Creates a new builder-style object to manufacture [`GetParameterInput`](crate::input::GetParameterInput)
pub fn builder() -> crate::input::get_parameter_input::Builder {
crate::input::get_parameter_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetParameter {
type Output =
std::result::Result<crate::output::GetParameterOutput, crate::error::GetParameterError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_parameter_error(response)
} else {
crate::operation_deser::parse_get_parameter_response(response)
}
}
}
/// <p>Retrieves the history of all changes to a parameter.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetParameterHistory {
_private: (),
}
impl GetParameterHistory {
/// Creates a new builder-style object to manufacture [`GetParameterHistoryInput`](crate::input::GetParameterHistoryInput)
pub fn builder() -> crate::input::get_parameter_history_input::Builder {
crate::input::get_parameter_history_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetParameterHistory {
type Output = std::result::Result<
crate::output::GetParameterHistoryOutput,
crate::error::GetParameterHistoryError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_parameter_history_error(response)
} else {
crate::operation_deser::parse_get_parameter_history_response(response)
}
}
}
/// <p>Get details of a parameter. Don't confuse this API action with the <a>GetParameter</a> API action.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetParameters {
_private: (),
}
impl GetParameters {
/// Creates a new builder-style object to manufacture [`GetParametersInput`](crate::input::GetParametersInput)
pub fn builder() -> crate::input::get_parameters_input::Builder {
crate::input::get_parameters_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetParameters {
type Output =
std::result::Result<crate::output::GetParametersOutput, crate::error::GetParametersError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_parameters_error(response)
} else {
crate::operation_deser::parse_get_parameters_response(response)
}
}
}
/// <p>Retrieve information about one or more parameters in a specific hierarchy. </p>
/// <note>
/// <p>Request results are returned on a best-effort basis. If you specify <code>MaxResults</code>
/// in the request, the response includes information up to the limit specified. The number of items
/// returned, however, can be between zero and the value of <code>MaxResults</code>. If the service
/// reaches an internal limit while processing the results, it stops the operation and returns the
/// matching values up to that point and a <code>NextToken</code>. You can specify the
/// <code>NextToken</code> in a subsequent call to get the next set of results.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetParametersByPath {
_private: (),
}
impl GetParametersByPath {
/// Creates a new builder-style object to manufacture [`GetParametersByPathInput`](crate::input::GetParametersByPathInput)
pub fn builder() -> crate::input::get_parameters_by_path_input::Builder {
crate::input::get_parameters_by_path_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetParametersByPath {
type Output = std::result::Result<
crate::output::GetParametersByPathOutput,
crate::error::GetParametersByPathError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_parameters_by_path_error(response)
} else {
crate::operation_deser::parse_get_parameters_by_path_response(response)
}
}
}
/// <p>Retrieves information about a patch baseline.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetPatchBaseline {
_private: (),
}
impl GetPatchBaseline {
/// Creates a new builder-style object to manufacture [`GetPatchBaselineInput`](crate::input::GetPatchBaselineInput)
pub fn builder() -> crate::input::get_patch_baseline_input::Builder {
crate::input::get_patch_baseline_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetPatchBaseline {
type Output = std::result::Result<
crate::output::GetPatchBaselineOutput,
crate::error::GetPatchBaselineError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_patch_baseline_error(response)
} else {
crate::operation_deser::parse_get_patch_baseline_response(response)
}
}
}
/// <p>Retrieves the patch baseline that should be used for the specified patch group.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetPatchBaselineForPatchGroup {
_private: (),
}
impl GetPatchBaselineForPatchGroup {
/// Creates a new builder-style object to manufacture [`GetPatchBaselineForPatchGroupInput`](crate::input::GetPatchBaselineForPatchGroupInput)
pub fn builder() -> crate::input::get_patch_baseline_for_patch_group_input::Builder {
crate::input::get_patch_baseline_for_patch_group_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetPatchBaselineForPatchGroup {
type Output = std::result::Result<
crate::output::GetPatchBaselineForPatchGroupOutput,
crate::error::GetPatchBaselineForPatchGroupError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_patch_baseline_for_patch_group_error(response)
} else {
crate::operation_deser::parse_get_patch_baseline_for_patch_group_response(response)
}
}
}
/// <p>
/// <code>ServiceSetting</code> is an account-level setting for an AWS service. This setting
/// defines how a user interacts with or uses a service or a feature of a service. For example, if an
/// AWS service charges money to the account based on feature or service usage, then the AWS service
/// team might create a default setting of "false". This means the user can't use this feature unless
/// they change the setting to "true" and intentionally opt in for a paid feature.</p>
/// <p>Services map a <code>SettingId</code> object to a setting value. AWS services teams define
/// the default value for a <code>SettingId</code>. You can't create a new <code>SettingId</code>,
/// but you can overwrite the default value if you have the <code>ssm:UpdateServiceSetting</code>
/// permission for the setting. Use the <a>UpdateServiceSetting</a> API action to change
/// the default setting. Or use the <a>ResetServiceSetting</a> to change the value back to
/// the original value defined by the AWS service team.</p>
/// <p>Query the current service setting for the account. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct GetServiceSetting {
_private: (),
}
impl GetServiceSetting {
/// Creates a new builder-style object to manufacture [`GetServiceSettingInput`](crate::input::GetServiceSettingInput)
pub fn builder() -> crate::input::get_service_setting_input::Builder {
crate::input::get_service_setting_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for GetServiceSetting {
type Output = std::result::Result<
crate::output::GetServiceSettingOutput,
crate::error::GetServiceSettingError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_get_service_setting_error(response)
} else {
crate::operation_deser::parse_get_service_setting_response(response)
}
}
}
/// <p>A parameter label is a user-defined alias to help you manage different versions of a
/// parameter. When you modify a parameter, Systems Manager automatically saves a new version and increments
/// the version number by one. A label can help you remember the purpose of a parameter when there
/// are multiple versions. </p>
/// <p>Parameter labels have the following requirements and restrictions.</p>
/// <ul>
/// <li>
/// <p>A version of a parameter can have a maximum of 10 labels.</p>
/// </li>
/// <li>
/// <p>You can't attach the same label to different versions of the same parameter. For example,
/// if version 1 has the label Production, then you can't attach Production to version 2.</p>
/// </li>
/// <li>
/// <p>You can move a label from one version of a parameter to another.</p>
/// </li>
/// <li>
/// <p>You can't create a label when you create a new parameter. You must attach a label to a
/// specific version of a parameter.</p>
/// </li>
/// <li>
/// <p>If you no longer want to use a parameter label, then you can either delete it or move it
/// to a different version of a parameter.</p>
/// </li>
/// <li>
/// <p>A label can have a maximum of 100 characters.</p>
/// </li>
/// <li>
/// <p>Labels can contain letters (case sensitive), numbers, periods (.), hyphens (-), or
/// underscores (_).</p>
/// </li>
/// <li>
/// <p>Labels can't begin with a number, "aws," or "ssm" (not case sensitive). If a label fails
/// to meet these requirements, then the label is not associated with a parameter and the system
/// displays it in the list of InvalidLabels.</p>
/// </li>
/// </ul>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct LabelParameterVersion {
_private: (),
}
impl LabelParameterVersion {
/// Creates a new builder-style object to manufacture [`LabelParameterVersionInput`](crate::input::LabelParameterVersionInput)
pub fn builder() -> crate::input::label_parameter_version_input::Builder {
crate::input::label_parameter_version_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for LabelParameterVersion {
type Output = std::result::Result<
crate::output::LabelParameterVersionOutput,
crate::error::LabelParameterVersionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_label_parameter_version_error(response)
} else {
crate::operation_deser::parse_label_parameter_version_response(response)
}
}
}
/// <p>Returns all State Manager associations in the current AWS account and Region. You can limit
/// the results to a specific State Manager association document or instance by specifying a
/// filter.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListAssociations {
_private: (),
}
impl ListAssociations {
/// Creates a new builder-style object to manufacture [`ListAssociationsInput`](crate::input::ListAssociationsInput)
pub fn builder() -> crate::input::list_associations_input::Builder {
crate::input::list_associations_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListAssociations {
type Output = std::result::Result<
crate::output::ListAssociationsOutput,
crate::error::ListAssociationsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_associations_error(response)
} else {
crate::operation_deser::parse_list_associations_response(response)
}
}
}
/// <p>Retrieves all versions of an association for a specific association ID.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListAssociationVersions {
_private: (),
}
impl ListAssociationVersions {
/// Creates a new builder-style object to manufacture [`ListAssociationVersionsInput`](crate::input::ListAssociationVersionsInput)
pub fn builder() -> crate::input::list_association_versions_input::Builder {
crate::input::list_association_versions_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListAssociationVersions {
type Output = std::result::Result<
crate::output::ListAssociationVersionsOutput,
crate::error::ListAssociationVersionsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_association_versions_error(response)
} else {
crate::operation_deser::parse_list_association_versions_response(response)
}
}
}
/// <p>An invocation is copy of a command sent to a specific instance. A command can apply to one
/// or more instances. A command invocation applies to one instance. For example, if a user runs
/// SendCommand against three instances, then a command invocation is created for each requested
/// instance ID. ListCommandInvocations provide status about command execution.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListCommandInvocations {
_private: (),
}
impl ListCommandInvocations {
/// Creates a new builder-style object to manufacture [`ListCommandInvocationsInput`](crate::input::ListCommandInvocationsInput)
pub fn builder() -> crate::input::list_command_invocations_input::Builder {
crate::input::list_command_invocations_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListCommandInvocations {
type Output = std::result::Result<
crate::output::ListCommandInvocationsOutput,
crate::error::ListCommandInvocationsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_command_invocations_error(response)
} else {
crate::operation_deser::parse_list_command_invocations_response(response)
}
}
}
/// <p>Lists the commands requested by users of the AWS account.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListCommands {
_private: (),
}
impl ListCommands {
/// Creates a new builder-style object to manufacture [`ListCommandsInput`](crate::input::ListCommandsInput)
pub fn builder() -> crate::input::list_commands_input::Builder {
crate::input::list_commands_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListCommands {
type Output =
std::result::Result<crate::output::ListCommandsOutput, crate::error::ListCommandsError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_commands_error(response)
} else {
crate::operation_deser::parse_list_commands_response(response)
}
}
}
/// <p>For a specified resource ID, this API action returns a list of compliance statuses for
/// different resource types. Currently, you can only specify one resource ID per call. List results
/// depend on the criteria specified in the filter.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListComplianceItems {
_private: (),
}
impl ListComplianceItems {
/// Creates a new builder-style object to manufacture [`ListComplianceItemsInput`](crate::input::ListComplianceItemsInput)
pub fn builder() -> crate::input::list_compliance_items_input::Builder {
crate::input::list_compliance_items_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListComplianceItems {
type Output = std::result::Result<
crate::output::ListComplianceItemsOutput,
crate::error::ListComplianceItemsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_compliance_items_error(response)
} else {
crate::operation_deser::parse_list_compliance_items_response(response)
}
}
}
/// <p>Returns a summary count of compliant and non-compliant resources for a compliance type. For
/// example, this call can return State Manager associations, patches, or custom compliance types
/// according to the filter criteria that you specify.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListComplianceSummaries {
_private: (),
}
impl ListComplianceSummaries {
/// Creates a new builder-style object to manufacture [`ListComplianceSummariesInput`](crate::input::ListComplianceSummariesInput)
pub fn builder() -> crate::input::list_compliance_summaries_input::Builder {
crate::input::list_compliance_summaries_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListComplianceSummaries {
type Output = std::result::Result<
crate::output::ListComplianceSummariesOutput,
crate::error::ListComplianceSummariesError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_compliance_summaries_error(response)
} else {
crate::operation_deser::parse_list_compliance_summaries_response(response)
}
}
}
/// <p>Information about approval reviews for a version of an SSM document.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListDocumentMetadataHistory {
_private: (),
}
impl ListDocumentMetadataHistory {
/// Creates a new builder-style object to manufacture [`ListDocumentMetadataHistoryInput`](crate::input::ListDocumentMetadataHistoryInput)
pub fn builder() -> crate::input::list_document_metadata_history_input::Builder {
crate::input::list_document_metadata_history_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListDocumentMetadataHistory {
type Output = std::result::Result<
crate::output::ListDocumentMetadataHistoryOutput,
crate::error::ListDocumentMetadataHistoryError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_document_metadata_history_error(response)
} else {
crate::operation_deser::parse_list_document_metadata_history_response(response)
}
}
}
/// <p>Returns all Systems Manager (SSM) documents in the current AWS account and Region. You can limit the
/// results of this request by using a filter.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListDocuments {
_private: (),
}
impl ListDocuments {
/// Creates a new builder-style object to manufacture [`ListDocumentsInput`](crate::input::ListDocumentsInput)
pub fn builder() -> crate::input::list_documents_input::Builder {
crate::input::list_documents_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListDocuments {
type Output =
std::result::Result<crate::output::ListDocumentsOutput, crate::error::ListDocumentsError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_documents_error(response)
} else {
crate::operation_deser::parse_list_documents_response(response)
}
}
}
/// <p>List all versions for a document.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListDocumentVersions {
_private: (),
}
impl ListDocumentVersions {
/// Creates a new builder-style object to manufacture [`ListDocumentVersionsInput`](crate::input::ListDocumentVersionsInput)
pub fn builder() -> crate::input::list_document_versions_input::Builder {
crate::input::list_document_versions_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListDocumentVersions {
type Output = std::result::Result<
crate::output::ListDocumentVersionsOutput,
crate::error::ListDocumentVersionsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_document_versions_error(response)
} else {
crate::operation_deser::parse_list_document_versions_response(response)
}
}
}
/// <p>A list of inventory items returned by the request.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListInventoryEntries {
_private: (),
}
impl ListInventoryEntries {
/// Creates a new builder-style object to manufacture [`ListInventoryEntriesInput`](crate::input::ListInventoryEntriesInput)
pub fn builder() -> crate::input::list_inventory_entries_input::Builder {
crate::input::list_inventory_entries_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListInventoryEntries {
type Output = std::result::Result<
crate::output::ListInventoryEntriesOutput,
crate::error::ListInventoryEntriesError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_inventory_entries_error(response)
} else {
crate::operation_deser::parse_list_inventory_entries_response(response)
}
}
}
/// <p>Returns a list of all OpsItem events in the current AWS account and Region. You can limit
/// the results to events associated with specific OpsItems by specifying a filter.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListOpsItemEvents {
_private: (),
}
impl ListOpsItemEvents {
/// Creates a new builder-style object to manufacture [`ListOpsItemEventsInput`](crate::input::ListOpsItemEventsInput)
pub fn builder() -> crate::input::list_ops_item_events_input::Builder {
crate::input::list_ops_item_events_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListOpsItemEvents {
type Output = std::result::Result<
crate::output::ListOpsItemEventsOutput,
crate::error::ListOpsItemEventsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_ops_item_events_error(response)
} else {
crate::operation_deser::parse_list_ops_item_events_response(response)
}
}
}
/// <p>Lists all related-item resources associated with an OpsItem.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListOpsItemRelatedItems {
_private: (),
}
impl ListOpsItemRelatedItems {
/// Creates a new builder-style object to manufacture [`ListOpsItemRelatedItemsInput`](crate::input::ListOpsItemRelatedItemsInput)
pub fn builder() -> crate::input::list_ops_item_related_items_input::Builder {
crate::input::list_ops_item_related_items_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListOpsItemRelatedItems {
type Output = std::result::Result<
crate::output::ListOpsItemRelatedItemsOutput,
crate::error::ListOpsItemRelatedItemsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_ops_item_related_items_error(response)
} else {
crate::operation_deser::parse_list_ops_item_related_items_response(response)
}
}
}
/// <p>Systems Manager calls this API action when displaying all Application Manager OpsMetadata objects or
/// blobs.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListOpsMetadata {
_private: (),
}
impl ListOpsMetadata {
/// Creates a new builder-style object to manufacture [`ListOpsMetadataInput`](crate::input::ListOpsMetadataInput)
pub fn builder() -> crate::input::list_ops_metadata_input::Builder {
crate::input::list_ops_metadata_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListOpsMetadata {
type Output = std::result::Result<
crate::output::ListOpsMetadataOutput,
crate::error::ListOpsMetadataError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_ops_metadata_error(response)
} else {
crate::operation_deser::parse_list_ops_metadata_response(response)
}
}
}
/// <p>Returns a resource-level summary count. The summary includes information about compliant and
/// non-compliant statuses and detailed compliance-item severity counts, according to the filter
/// criteria you specify.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListResourceComplianceSummaries {
_private: (),
}
impl ListResourceComplianceSummaries {
/// Creates a new builder-style object to manufacture [`ListResourceComplianceSummariesInput`](crate::input::ListResourceComplianceSummariesInput)
pub fn builder() -> crate::input::list_resource_compliance_summaries_input::Builder {
crate::input::list_resource_compliance_summaries_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListResourceComplianceSummaries {
type Output = std::result::Result<
crate::output::ListResourceComplianceSummariesOutput,
crate::error::ListResourceComplianceSummariesError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_resource_compliance_summaries_error(response)
} else {
crate::operation_deser::parse_list_resource_compliance_summaries_response(response)
}
}
}
/// <p>Lists your resource data sync configurations. Includes information about the last time a
/// sync attempted to start, the last sync status, and the last time a sync successfully
/// completed.</p>
/// <p>The number of sync configurations might be too large to return using a single call to
/// <code>ListResourceDataSync</code>. You can limit the number of sync configurations returned by
/// using the <code>MaxResults</code> parameter. To determine whether there are more sync
/// configurations to list, check the value of <code>NextToken</code> in the output. If there are
/// more sync configurations to list, you can request them by specifying the <code>NextToken</code>
/// returned in the call to the parameter of a subsequent call. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListResourceDataSync {
_private: (),
}
impl ListResourceDataSync {
/// Creates a new builder-style object to manufacture [`ListResourceDataSyncInput`](crate::input::ListResourceDataSyncInput)
pub fn builder() -> crate::input::list_resource_data_sync_input::Builder {
crate::input::list_resource_data_sync_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListResourceDataSync {
type Output = std::result::Result<
crate::output::ListResourceDataSyncOutput,
crate::error::ListResourceDataSyncError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_resource_data_sync_error(response)
} else {
crate::operation_deser::parse_list_resource_data_sync_response(response)
}
}
}
/// <p>Returns a list of the tags assigned to the specified resource.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ListTagsForResource {
_private: (),
}
impl ListTagsForResource {
/// Creates a new builder-style object to manufacture [`ListTagsForResourceInput`](crate::input::ListTagsForResourceInput)
pub fn builder() -> crate::input::list_tags_for_resource_input::Builder {
crate::input::list_tags_for_resource_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ListTagsForResource {
type Output = std::result::Result<
crate::output::ListTagsForResourceOutput,
crate::error::ListTagsForResourceError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_list_tags_for_resource_error(response)
} else {
crate::operation_deser::parse_list_tags_for_resource_response(response)
}
}
}
/// <p>Shares a Systems Manager document publicly or privately. If you share a document privately, you must
/// specify the AWS user account IDs for those people who can use the document. If you share a
/// document publicly, you must specify <i>All</i> as the account ID.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ModifyDocumentPermission {
_private: (),
}
impl ModifyDocumentPermission {
/// Creates a new builder-style object to manufacture [`ModifyDocumentPermissionInput`](crate::input::ModifyDocumentPermissionInput)
pub fn builder() -> crate::input::modify_document_permission_input::Builder {
crate::input::modify_document_permission_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ModifyDocumentPermission {
type Output = std::result::Result<
crate::output::ModifyDocumentPermissionOutput,
crate::error::ModifyDocumentPermissionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_modify_document_permission_error(response)
} else {
crate::operation_deser::parse_modify_document_permission_response(response)
}
}
}
/// <p>Registers a compliance type and other compliance details on a designated resource. This
/// action lets you register custom compliance details with a resource. This call overwrites existing
/// compliance information on the resource, so you must provide a full list of compliance items each
/// time that you send the request.</p>
/// <p>ComplianceType can be one of the following:</p>
/// <ul>
/// <li>
/// <p>ExecutionId: The execution ID when the patch, association, or custom compliance item was
/// applied.</p>
/// </li>
/// <li>
/// <p>ExecutionType: Specify patch, association, or Custom:<code>string</code>.</p>
/// </li>
/// <li>
/// <p>ExecutionTime. The time the patch, association, or custom compliance item was applied to
/// the instance.</p>
/// </li>
/// <li>
/// <p>Id: The patch, association, or custom compliance ID.</p>
/// </li>
/// <li>
/// <p>Title: A title.</p>
/// </li>
/// <li>
/// <p>Status: The status of the compliance item. For example, <code>approved</code> for patches,
/// or <code>Failed</code> for associations.</p>
/// </li>
/// <li>
/// <p>Severity: A patch severity. For example, <code>critical</code>.</p>
/// </li>
/// <li>
/// <p>DocumentName: A SSM document name. For example, AWS-RunPatchBaseline.</p>
/// </li>
/// <li>
/// <p>DocumentVersion: An SSM document version number. For example, 4.</p>
/// </li>
/// <li>
/// <p>Classification: A patch classification. For example, <code>security updates</code>.</p>
/// </li>
/// <li>
/// <p>PatchBaselineId: A patch baseline ID.</p>
/// </li>
/// <li>
/// <p>PatchSeverity: A patch severity. For example, <code>Critical</code>.</p>
/// </li>
/// <li>
/// <p>PatchState: A patch state. For example, <code>InstancesWithFailedPatches</code>.</p>
/// </li>
/// <li>
/// <p>PatchGroup: The name of a patch group.</p>
/// </li>
/// <li>
/// <p>InstalledTime: The time the association, patch, or custom compliance item was applied to
/// the resource. Specify the time by using the following format: yyyy-MM-dd'T'HH:mm:ss'Z'</p>
/// </li>
/// </ul>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct PutComplianceItems {
_private: (),
}
impl PutComplianceItems {
/// Creates a new builder-style object to manufacture [`PutComplianceItemsInput`](crate::input::PutComplianceItemsInput)
pub fn builder() -> crate::input::put_compliance_items_input::Builder {
crate::input::put_compliance_items_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for PutComplianceItems {
type Output = std::result::Result<
crate::output::PutComplianceItemsOutput,
crate::error::PutComplianceItemsError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_put_compliance_items_error(response)
} else {
crate::operation_deser::parse_put_compliance_items_response(response)
}
}
}
/// <p>Bulk update custom inventory items on one more instance. The request adds an inventory item,
/// if it doesn't already exist, or updates an inventory item, if it does exist.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct PutInventory {
_private: (),
}
impl PutInventory {
/// Creates a new builder-style object to manufacture [`PutInventoryInput`](crate::input::PutInventoryInput)
pub fn builder() -> crate::input::put_inventory_input::Builder {
crate::input::put_inventory_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for PutInventory {
type Output =
std::result::Result<crate::output::PutInventoryOutput, crate::error::PutInventoryError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_put_inventory_error(response)
} else {
crate::operation_deser::parse_put_inventory_response(response)
}
}
}
/// <p>Add a parameter to the system.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct PutParameter {
_private: (),
}
impl PutParameter {
/// Creates a new builder-style object to manufacture [`PutParameterInput`](crate::input::PutParameterInput)
pub fn builder() -> crate::input::put_parameter_input::Builder {
crate::input::put_parameter_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for PutParameter {
type Output =
std::result::Result<crate::output::PutParameterOutput, crate::error::PutParameterError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_put_parameter_error(response)
} else {
crate::operation_deser::parse_put_parameter_response(response)
}
}
}
/// <p>Defines the default patch baseline for the relevant operating system.</p>
/// <p>To reset the AWS predefined patch baseline as the default, specify the full patch baseline
/// ARN as the baseline ID value. For example, for CentOS, specify
/// <code>arn:aws:ssm:us-east-2:733109147000:patchbaseline/pb-0574b43a65ea646ed</code> instead of
/// <code>pb-0574b43a65ea646ed</code>.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct RegisterDefaultPatchBaseline {
_private: (),
}
impl RegisterDefaultPatchBaseline {
/// Creates a new builder-style object to manufacture [`RegisterDefaultPatchBaselineInput`](crate::input::RegisterDefaultPatchBaselineInput)
pub fn builder() -> crate::input::register_default_patch_baseline_input::Builder {
crate::input::register_default_patch_baseline_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for RegisterDefaultPatchBaseline {
type Output = std::result::Result<
crate::output::RegisterDefaultPatchBaselineOutput,
crate::error::RegisterDefaultPatchBaselineError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_register_default_patch_baseline_error(response)
} else {
crate::operation_deser::parse_register_default_patch_baseline_response(response)
}
}
}
/// <p>Registers a patch baseline for a patch group.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct RegisterPatchBaselineForPatchGroup {
_private: (),
}
impl RegisterPatchBaselineForPatchGroup {
/// Creates a new builder-style object to manufacture [`RegisterPatchBaselineForPatchGroupInput`](crate::input::RegisterPatchBaselineForPatchGroupInput)
pub fn builder() -> crate::input::register_patch_baseline_for_patch_group_input::Builder {
crate::input::register_patch_baseline_for_patch_group_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for RegisterPatchBaselineForPatchGroup {
type Output = std::result::Result<
crate::output::RegisterPatchBaselineForPatchGroupOutput,
crate::error::RegisterPatchBaselineForPatchGroupError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_register_patch_baseline_for_patch_group_error(response)
} else {
crate::operation_deser::parse_register_patch_baseline_for_patch_group_response(response)
}
}
}
/// <p>Registers a target with a maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct RegisterTargetWithMaintenanceWindow {
_private: (),
}
impl RegisterTargetWithMaintenanceWindow {
/// Creates a new builder-style object to manufacture [`RegisterTargetWithMaintenanceWindowInput`](crate::input::RegisterTargetWithMaintenanceWindowInput)
pub fn builder() -> crate::input::register_target_with_maintenance_window_input::Builder {
crate::input::register_target_with_maintenance_window_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for RegisterTargetWithMaintenanceWindow {
type Output = std::result::Result<
crate::output::RegisterTargetWithMaintenanceWindowOutput,
crate::error::RegisterTargetWithMaintenanceWindowError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_register_target_with_maintenance_window_error(response)
} else {
crate::operation_deser::parse_register_target_with_maintenance_window_response(response)
}
}
}
/// <p>Adds a new task to a maintenance window.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct RegisterTaskWithMaintenanceWindow {
_private: (),
}
impl RegisterTaskWithMaintenanceWindow {
/// Creates a new builder-style object to manufacture [`RegisterTaskWithMaintenanceWindowInput`](crate::input::RegisterTaskWithMaintenanceWindowInput)
pub fn builder() -> crate::input::register_task_with_maintenance_window_input::Builder {
crate::input::register_task_with_maintenance_window_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for RegisterTaskWithMaintenanceWindow {
type Output = std::result::Result<
crate::output::RegisterTaskWithMaintenanceWindowOutput,
crate::error::RegisterTaskWithMaintenanceWindowError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_register_task_with_maintenance_window_error(response)
} else {
crate::operation_deser::parse_register_task_with_maintenance_window_response(response)
}
}
}
/// <p>Removes tag keys from the specified resource.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct RemoveTagsFromResource {
_private: (),
}
impl RemoveTagsFromResource {
/// Creates a new builder-style object to manufacture [`RemoveTagsFromResourceInput`](crate::input::RemoveTagsFromResourceInput)
pub fn builder() -> crate::input::remove_tags_from_resource_input::Builder {
crate::input::remove_tags_from_resource_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for RemoveTagsFromResource {
type Output = std::result::Result<
crate::output::RemoveTagsFromResourceOutput,
crate::error::RemoveTagsFromResourceError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_remove_tags_from_resource_error(response)
} else {
crate::operation_deser::parse_remove_tags_from_resource_response(response)
}
}
}
/// <p>
/// <code>ServiceSetting</code> is an account-level setting for an AWS service. This setting
/// defines how a user interacts with or uses a service or a feature of a service. For example, if an
/// AWS service charges money to the account based on feature or service usage, then the AWS service
/// team might create a default setting of "false". This means the user can't use this feature unless
/// they change the setting to "true" and intentionally opt in for a paid feature.</p>
/// <p>Services map a <code>SettingId</code> object to a setting value. AWS services teams define
/// the default value for a <code>SettingId</code>. You can't create a new <code>SettingId</code>,
/// but you can overwrite the default value if you have the <code>ssm:UpdateServiceSetting</code>
/// permission for the setting. Use the <a>GetServiceSetting</a> API action to view the
/// current value. Use the <a>UpdateServiceSetting</a> API action to change the default
/// setting. </p>
/// <p>Reset the service setting for the account to the default value as provisioned by the AWS
/// service team. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ResetServiceSetting {
_private: (),
}
impl ResetServiceSetting {
/// Creates a new builder-style object to manufacture [`ResetServiceSettingInput`](crate::input::ResetServiceSettingInput)
pub fn builder() -> crate::input::reset_service_setting_input::Builder {
crate::input::reset_service_setting_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ResetServiceSetting {
type Output = std::result::Result<
crate::output::ResetServiceSettingOutput,
crate::error::ResetServiceSettingError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_reset_service_setting_error(response)
} else {
crate::operation_deser::parse_reset_service_setting_response(response)
}
}
}
/// <p>Reconnects a session to an instance after it has been disconnected. Connections can be
/// resumed for disconnected sessions, but not terminated sessions.</p>
/// <note>
/// <p>This command is primarily for use by client machines to automatically reconnect during
/// intermittent network issues. It is not intended for any other use.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct ResumeSession {
_private: (),
}
impl ResumeSession {
/// Creates a new builder-style object to manufacture [`ResumeSessionInput`](crate::input::ResumeSessionInput)
pub fn builder() -> crate::input::resume_session_input::Builder {
crate::input::resume_session_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for ResumeSession {
type Output =
std::result::Result<crate::output::ResumeSessionOutput, crate::error::ResumeSessionError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_resume_session_error(response)
} else {
crate::operation_deser::parse_resume_session_response(response)
}
}
}
/// <p>Sends a signal to an Automation execution to change the current behavior or status of the
/// execution. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct SendAutomationSignal {
_private: (),
}
impl SendAutomationSignal {
/// Creates a new builder-style object to manufacture [`SendAutomationSignalInput`](crate::input::SendAutomationSignalInput)
pub fn builder() -> crate::input::send_automation_signal_input::Builder {
crate::input::send_automation_signal_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for SendAutomationSignal {
type Output = std::result::Result<
crate::output::SendAutomationSignalOutput,
crate::error::SendAutomationSignalError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_send_automation_signal_error(response)
} else {
crate::operation_deser::parse_send_automation_signal_response(response)
}
}
}
/// <p>Runs commands on one or more managed instances.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct SendCommand {
_private: (),
}
impl SendCommand {
/// Creates a new builder-style object to manufacture [`SendCommandInput`](crate::input::SendCommandInput)
pub fn builder() -> crate::input::send_command_input::Builder {
crate::input::send_command_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for SendCommand {
type Output =
std::result::Result<crate::output::SendCommandOutput, crate::error::SendCommandError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_send_command_error(response)
} else {
crate::operation_deser::parse_send_command_response(response)
}
}
}
/// <p>Use this API action to run an association immediately and only one time. This action can be
/// helpful when troubleshooting associations.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct StartAssociationsOnce {
_private: (),
}
impl StartAssociationsOnce {
/// Creates a new builder-style object to manufacture [`StartAssociationsOnceInput`](crate::input::StartAssociationsOnceInput)
pub fn builder() -> crate::input::start_associations_once_input::Builder {
crate::input::start_associations_once_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for StartAssociationsOnce {
type Output = std::result::Result<
crate::output::StartAssociationsOnceOutput,
crate::error::StartAssociationsOnceError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_start_associations_once_error(response)
} else {
crate::operation_deser::parse_start_associations_once_response(response)
}
}
}
/// <p>Initiates execution of an Automation document.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct StartAutomationExecution {
_private: (),
}
impl StartAutomationExecution {
/// Creates a new builder-style object to manufacture [`StartAutomationExecutionInput`](crate::input::StartAutomationExecutionInput)
pub fn builder() -> crate::input::start_automation_execution_input::Builder {
crate::input::start_automation_execution_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for StartAutomationExecution {
type Output = std::result::Result<
crate::output::StartAutomationExecutionOutput,
crate::error::StartAutomationExecutionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_start_automation_execution_error(response)
} else {
crate::operation_deser::parse_start_automation_execution_response(response)
}
}
}
/// <p>Creates a change request for Change Manager. The runbooks (Automation documents) specified in the
/// change request run only after all required approvals for the change request have been
/// received.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct StartChangeRequestExecution {
_private: (),
}
impl StartChangeRequestExecution {
/// Creates a new builder-style object to manufacture [`StartChangeRequestExecutionInput`](crate::input::StartChangeRequestExecutionInput)
pub fn builder() -> crate::input::start_change_request_execution_input::Builder {
crate::input::start_change_request_execution_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for StartChangeRequestExecution {
type Output = std::result::Result<
crate::output::StartChangeRequestExecutionOutput,
crate::error::StartChangeRequestExecutionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_start_change_request_execution_error(response)
} else {
crate::operation_deser::parse_start_change_request_execution_response(response)
}
}
}
/// <p>Initiates a connection to a target (for example, an instance) for a Session Manager session. Returns a
/// URL and token that can be used to open a WebSocket connection for sending input and receiving
/// outputs.</p>
/// <note>
/// <p>AWS CLI usage: <code>start-session</code> is an interactive command that requires the Session Manager
/// plugin to be installed on the client machine making the call. For information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/session-manager-working-with-install-plugin.html">Install
/// the Session Manager plugin for the AWS CLI</a> in the <i>AWS Systems Manager User Guide</i>.</p>
/// <p>AWS Tools for PowerShell usage: Start-SSMSession is not currently supported by AWS Tools
/// for PowerShell on Windows local machines.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct StartSession {
_private: (),
}
impl StartSession {
/// Creates a new builder-style object to manufacture [`StartSessionInput`](crate::input::StartSessionInput)
pub fn builder() -> crate::input::start_session_input::Builder {
crate::input::start_session_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for StartSession {
type Output =
std::result::Result<crate::output::StartSessionOutput, crate::error::StartSessionError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_start_session_error(response)
} else {
crate::operation_deser::parse_start_session_response(response)
}
}
}
/// <p>Stop an Automation that is currently running.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct StopAutomationExecution {
_private: (),
}
impl StopAutomationExecution {
/// Creates a new builder-style object to manufacture [`StopAutomationExecutionInput`](crate::input::StopAutomationExecutionInput)
pub fn builder() -> crate::input::stop_automation_execution_input::Builder {
crate::input::stop_automation_execution_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for StopAutomationExecution {
type Output = std::result::Result<
crate::output::StopAutomationExecutionOutput,
crate::error::StopAutomationExecutionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_stop_automation_execution_error(response)
} else {
crate::operation_deser::parse_stop_automation_execution_response(response)
}
}
}
/// <p>Permanently ends a session and closes the data connection between the Session Manager client and
/// SSM Agent on the instance. A terminated session cannot be resumed.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct TerminateSession {
_private: (),
}
impl TerminateSession {
/// Creates a new builder-style object to manufacture [`TerminateSessionInput`](crate::input::TerminateSessionInput)
pub fn builder() -> crate::input::terminate_session_input::Builder {
crate::input::terminate_session_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for TerminateSession {
type Output = std::result::Result<
crate::output::TerminateSessionOutput,
crate::error::TerminateSessionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_terminate_session_error(response)
} else {
crate::operation_deser::parse_terminate_session_response(response)
}
}
}
/// <p>Remove a label or labels from a parameter.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UnlabelParameterVersion {
_private: (),
}
impl UnlabelParameterVersion {
/// Creates a new builder-style object to manufacture [`UnlabelParameterVersionInput`](crate::input::UnlabelParameterVersionInput)
pub fn builder() -> crate::input::unlabel_parameter_version_input::Builder {
crate::input::unlabel_parameter_version_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UnlabelParameterVersion {
type Output = std::result::Result<
crate::output::UnlabelParameterVersionOutput,
crate::error::UnlabelParameterVersionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_unlabel_parameter_version_error(response)
} else {
crate::operation_deser::parse_unlabel_parameter_version_response(response)
}
}
}
/// <p>Updates an association. You can update the association name and version, the document
/// version, schedule, parameters, and Amazon S3 output. </p>
/// <p>In order to call this API action, your IAM user account, group, or role must be configured
/// with permission to call the <a>DescribeAssociation</a> API action. If you don't have
/// permission to call DescribeAssociation, then you receive the following error: <code>An error
/// occurred (AccessDeniedException) when calling the UpdateAssociation operation: User:
/// <user_arn> is not authorized to perform: ssm:DescribeAssociation on resource:
/// <resource_arn></code>
/// </p>
/// <important>
/// <p>When you update an association, the association immediately runs against the specified
/// targets.</p>
/// </important>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateAssociation {
_private: (),
}
impl UpdateAssociation {
/// Creates a new builder-style object to manufacture [`UpdateAssociationInput`](crate::input::UpdateAssociationInput)
pub fn builder() -> crate::input::update_association_input::Builder {
crate::input::update_association_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateAssociation {
type Output = std::result::Result<
crate::output::UpdateAssociationOutput,
crate::error::UpdateAssociationError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_association_error(response)
} else {
crate::operation_deser::parse_update_association_response(response)
}
}
}
/// <p>Updates the status of the Systems Manager document associated with the specified instance.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateAssociationStatus {
_private: (),
}
impl UpdateAssociationStatus {
/// Creates a new builder-style object to manufacture [`UpdateAssociationStatusInput`](crate::input::UpdateAssociationStatusInput)
pub fn builder() -> crate::input::update_association_status_input::Builder {
crate::input::update_association_status_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateAssociationStatus {
type Output = std::result::Result<
crate::output::UpdateAssociationStatusOutput,
crate::error::UpdateAssociationStatusError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_association_status_error(response)
} else {
crate::operation_deser::parse_update_association_status_response(response)
}
}
}
/// <p>Updates one or more values for an SSM document.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateDocument {
_private: (),
}
impl UpdateDocument {
/// Creates a new builder-style object to manufacture [`UpdateDocumentInput`](crate::input::UpdateDocumentInput)
pub fn builder() -> crate::input::update_document_input::Builder {
crate::input::update_document_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateDocument {
type Output =
std::result::Result<crate::output::UpdateDocumentOutput, crate::error::UpdateDocumentError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_document_error(response)
} else {
crate::operation_deser::parse_update_document_response(response)
}
}
}
/// <p>Set the default version of a document. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateDocumentDefaultVersion {
_private: (),
}
impl UpdateDocumentDefaultVersion {
/// Creates a new builder-style object to manufacture [`UpdateDocumentDefaultVersionInput`](crate::input::UpdateDocumentDefaultVersionInput)
pub fn builder() -> crate::input::update_document_default_version_input::Builder {
crate::input::update_document_default_version_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateDocumentDefaultVersion {
type Output = std::result::Result<
crate::output::UpdateDocumentDefaultVersionOutput,
crate::error::UpdateDocumentDefaultVersionError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_document_default_version_error(response)
} else {
crate::operation_deser::parse_update_document_default_version_response(response)
}
}
}
/// <p>Updates information related to approval reviews for a specific version of a document.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateDocumentMetadata {
_private: (),
}
impl UpdateDocumentMetadata {
/// Creates a new builder-style object to manufacture [`UpdateDocumentMetadataInput`](crate::input::UpdateDocumentMetadataInput)
pub fn builder() -> crate::input::update_document_metadata_input::Builder {
crate::input::update_document_metadata_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateDocumentMetadata {
type Output = std::result::Result<
crate::output::UpdateDocumentMetadataOutput,
crate::error::UpdateDocumentMetadataError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_document_metadata_error(response)
} else {
crate::operation_deser::parse_update_document_metadata_response(response)
}
}
}
/// <p>Updates an existing maintenance window. Only specified parameters are modified.</p>
/// <note>
/// <p>The value you specify for <code>Duration</code> determines the specific end time for the
/// maintenance window based on the time it begins. No maintenance window tasks are permitted to
/// start after the resulting endtime minus the number of hours you specify for <code>Cutoff</code>.
/// For example, if the maintenance window starts at 3 PM, the duration is three hours, and the
/// value you specify for <code>Cutoff</code> is one hour, no maintenance window tasks can start
/// after 5 PM.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateMaintenanceWindow {
_private: (),
}
impl UpdateMaintenanceWindow {
/// Creates a new builder-style object to manufacture [`UpdateMaintenanceWindowInput`](crate::input::UpdateMaintenanceWindowInput)
pub fn builder() -> crate::input::update_maintenance_window_input::Builder {
crate::input::update_maintenance_window_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateMaintenanceWindow {
type Output = std::result::Result<
crate::output::UpdateMaintenanceWindowOutput,
crate::error::UpdateMaintenanceWindowError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_maintenance_window_error(response)
} else {
crate::operation_deser::parse_update_maintenance_window_response(response)
}
}
}
/// <p>Modifies the target of an existing maintenance window. You
/// can change the following:</p>
/// <ul>
/// <li>
/// <p>Name</p>
/// </li>
/// <li>
/// <p>Description</p>
/// </li>
/// <li>
/// <p>Owner</p>
/// </li>
/// <li>
/// <p>IDs for an ID target</p>
/// </li>
/// <li>
/// <p>Tags for a Tag target</p>
/// </li>
/// <li>
/// <p>From any supported tag type to another. The three supported tag types are ID target, Tag
/// target, and resource group. For more information, see <a>Target</a>.</p>
/// </li>
/// </ul>
/// <note>
/// <p>If a parameter is null, then the corresponding field is not modified.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateMaintenanceWindowTarget {
_private: (),
}
impl UpdateMaintenanceWindowTarget {
/// Creates a new builder-style object to manufacture [`UpdateMaintenanceWindowTargetInput`](crate::input::UpdateMaintenanceWindowTargetInput)
pub fn builder() -> crate::input::update_maintenance_window_target_input::Builder {
crate::input::update_maintenance_window_target_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateMaintenanceWindowTarget {
type Output = std::result::Result<
crate::output::UpdateMaintenanceWindowTargetOutput,
crate::error::UpdateMaintenanceWindowTargetError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_maintenance_window_target_error(response)
} else {
crate::operation_deser::parse_update_maintenance_window_target_response(response)
}
}
}
/// <p>Modifies a task assigned to a maintenance window. You can't change the task type, but you
/// can change the following values:</p>
/// <ul>
/// <li>
/// <p>TaskARN. For example, you can change a RUN_COMMAND task from AWS-RunPowerShellScript to
/// AWS-RunShellScript.</p>
/// </li>
/// <li>
/// <p>ServiceRoleArn</p>
/// </li>
/// <li>
/// <p>TaskInvocationParameters</p>
/// </li>
/// <li>
/// <p>Priority</p>
/// </li>
/// <li>
/// <p>MaxConcurrency</p>
/// </li>
/// <li>
/// <p>MaxErrors</p>
/// </li>
/// </ul>
/// <note>
/// <p>One or more targets must be specified for maintenance window Run Command-type tasks.
/// Depending on the task, targets are optional for other maintenance window task types (Automation,
/// AWS Lambda, and AWS Step Functions). For more information about running tasks that do not
/// specify targets, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/maintenance-windows-targetless-tasks.html">Registering
/// maintenance window tasks without targets</a> in the
/// <i>AWS Systems Manager User Guide</i>.</p>
/// </note>
/// <p>If the value for a parameter in <code>UpdateMaintenanceWindowTask</code> is null, then the
/// corresponding field is not modified. If you set <code>Replace</code> to true, then all fields
/// required by the <a>RegisterTaskWithMaintenanceWindow</a> action are required for this
/// request. Optional fields that aren't specified are set to null.</p>
/// <important>
/// <p>When you update a maintenance window task that has options specified in
/// <code>TaskInvocationParameters</code>, you must provide again all the
/// <code>TaskInvocationParameters</code> values that you want to retain. The values you do not
/// specify again are removed. For example, suppose that when you registered a Run Command task, you
/// specified <code>TaskInvocationParameters</code> values for <code>Comment</code>,
/// <code>NotificationConfig</code>, and <code>OutputS3BucketName</code>. If you update the
/// maintenance window task and specify only a different <code>OutputS3BucketName</code> value, the
/// values for <code>Comment</code> and <code>NotificationConfig</code> are removed.</p>
/// </important>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateMaintenanceWindowTask {
_private: (),
}
impl UpdateMaintenanceWindowTask {
/// Creates a new builder-style object to manufacture [`UpdateMaintenanceWindowTaskInput`](crate::input::UpdateMaintenanceWindowTaskInput)
pub fn builder() -> crate::input::update_maintenance_window_task_input::Builder {
crate::input::update_maintenance_window_task_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateMaintenanceWindowTask {
type Output = std::result::Result<
crate::output::UpdateMaintenanceWindowTaskOutput,
crate::error::UpdateMaintenanceWindowTaskError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_maintenance_window_task_error(response)
} else {
crate::operation_deser::parse_update_maintenance_window_task_response(response)
}
}
}
/// <p>Changes the Amazon Identity and Access Management (IAM) role that is assigned to the
/// on-premises instance or virtual machines (VM). IAM roles are first assigned to these hybrid
/// instances during the activation process. For more information, see <a>CreateActivation</a>.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateManagedInstanceRole {
_private: (),
}
impl UpdateManagedInstanceRole {
/// Creates a new builder-style object to manufacture [`UpdateManagedInstanceRoleInput`](crate::input::UpdateManagedInstanceRoleInput)
pub fn builder() -> crate::input::update_managed_instance_role_input::Builder {
crate::input::update_managed_instance_role_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateManagedInstanceRole {
type Output = std::result::Result<
crate::output::UpdateManagedInstanceRoleOutput,
crate::error::UpdateManagedInstanceRoleError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_managed_instance_role_error(response)
} else {
crate::operation_deser::parse_update_managed_instance_role_response(response)
}
}
}
/// <p>Edit or change an OpsItem. You must have permission in AWS Identity and Access Management
/// (IAM) to update an OpsItem. For more information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter-getting-started.html">Getting started with
/// OpsCenter</a> in the <i>AWS Systems Manager User Guide</i>.</p>
/// <p>Operations engineers and IT professionals use OpsCenter to view, investigate, and remediate
/// operational issues impacting the performance and health of their AWS resources. For more
/// information, see <a href="https://docs.aws.amazon.com/systems-manager/latest/userguide/OpsCenter.html">AWS Systems Manager OpsCenter</a> in the
/// <i>AWS Systems Manager User Guide</i>. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateOpsItem {
_private: (),
}
impl UpdateOpsItem {
/// Creates a new builder-style object to manufacture [`UpdateOpsItemInput`](crate::input::UpdateOpsItemInput)
pub fn builder() -> crate::input::update_ops_item_input::Builder {
crate::input::update_ops_item_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateOpsItem {
type Output =
std::result::Result<crate::output::UpdateOpsItemOutput, crate::error::UpdateOpsItemError>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_ops_item_error(response)
} else {
crate::operation_deser::parse_update_ops_item_response(response)
}
}
}
/// <p>Systems Manager calls this API action when you edit OpsMetadata in Application Manager.</p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateOpsMetadata {
_private: (),
}
impl UpdateOpsMetadata {
/// Creates a new builder-style object to manufacture [`UpdateOpsMetadataInput`](crate::input::UpdateOpsMetadataInput)
pub fn builder() -> crate::input::update_ops_metadata_input::Builder {
crate::input::update_ops_metadata_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateOpsMetadata {
type Output = std::result::Result<
crate::output::UpdateOpsMetadataOutput,
crate::error::UpdateOpsMetadataError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_ops_metadata_error(response)
} else {
crate::operation_deser::parse_update_ops_metadata_response(response)
}
}
}
/// <p>Modifies an existing patch baseline. Fields not specified in the request are left
/// unchanged.</p>
/// <note>
/// <p>For information about valid key and value pairs in <code>PatchFilters</code> for each
/// supported operating system type, see <a href="http://docs.aws.amazon.com/systems-manager/latest/APIReference/API_PatchFilter.html">PatchFilter</a>.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdatePatchBaseline {
_private: (),
}
impl UpdatePatchBaseline {
/// Creates a new builder-style object to manufacture [`UpdatePatchBaselineInput`](crate::input::UpdatePatchBaselineInput)
pub fn builder() -> crate::input::update_patch_baseline_input::Builder {
crate::input::update_patch_baseline_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdatePatchBaseline {
type Output = std::result::Result<
crate::output::UpdatePatchBaselineOutput,
crate::error::UpdatePatchBaselineError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_patch_baseline_error(response)
} else {
crate::operation_deser::parse_update_patch_baseline_response(response)
}
}
}
/// <p>Update a resource data sync. After you create a resource data sync for a Region, you can't
/// change the account options for that sync. For example, if you create a sync in the us-east-2
/// (Ohio) Region and you choose the Include only the current account option, you can't edit that
/// sync later and choose the Include all accounts from my AWS Organizations configuration option. Instead,
/// you must delete the first resource data sync, and create a new one.</p>
/// <note>
/// <p>This API action only supports a resource data sync that was created with a SyncFromSource
/// <code>SyncType</code>.</p>
/// </note>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateResourceDataSync {
_private: (),
}
impl UpdateResourceDataSync {
/// Creates a new builder-style object to manufacture [`UpdateResourceDataSyncInput`](crate::input::UpdateResourceDataSyncInput)
pub fn builder() -> crate::input::update_resource_data_sync_input::Builder {
crate::input::update_resource_data_sync_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateResourceDataSync {
type Output = std::result::Result<
crate::output::UpdateResourceDataSyncOutput,
crate::error::UpdateResourceDataSyncError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_resource_data_sync_error(response)
} else {
crate::operation_deser::parse_update_resource_data_sync_response(response)
}
}
}
/// <p>
/// <code>ServiceSetting</code> is an account-level setting for an AWS service. This setting
/// defines how a user interacts with or uses a service or a feature of a service. For example, if an
/// AWS service charges money to the account based on feature or service usage, then the AWS service
/// team might create a default setting of "false". This means the user can't use this feature unless
/// they change the setting to "true" and intentionally opt in for a paid feature.</p>
/// <p>Services map a <code>SettingId</code> object to a setting value. AWS services teams define
/// the default value for a <code>SettingId</code>. You can't create a new <code>SettingId</code>,
/// but you can overwrite the default value if you have the <code>ssm:UpdateServiceSetting</code>
/// permission for the setting. Use the <a>GetServiceSetting</a> API action to view the
/// current value. Or, use the <a>ResetServiceSetting</a> to change the value back to the
/// original value defined by the AWS service team.</p>
/// <p>Update the service setting for the account. </p>
#[derive(std::default::Default, std::clone::Clone, std::fmt::Debug)]
pub struct UpdateServiceSetting {
_private: (),
}
impl UpdateServiceSetting {
/// Creates a new builder-style object to manufacture [`UpdateServiceSettingInput`](crate::input::UpdateServiceSettingInput)
pub fn builder() -> crate::input::update_service_setting_input::Builder {
crate::input::update_service_setting_input::Builder::default()
}
pub fn new() -> Self {
Self { _private: () }
}
}
impl smithy_http::response::ParseStrictResponse for UpdateServiceSetting {
type Output = std::result::Result<
crate::output::UpdateServiceSettingOutput,
crate::error::UpdateServiceSettingError,
>;
fn parse(&self, response: &http::Response<bytes::Bytes>) -> Self::Output {
if !response.status().is_success() && response.status().as_u16() != 200 {
crate::operation_deser::parse_update_service_setting_error(response)
} else {
crate::operation_deser::parse_update_service_setting_response(response)
}
}
}
| 45.065501 | 302 | 0.69742 |
acc576f57d74a1d1debb26f24c0c1eaafd86914e | 933 | mod report_tracker;
use std::env;
use std::fs;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() > 1 {
let filename = &args[1];
println!("File: {}", filename);
let input = fs::read_to_string(filename).expect("Failed to open file");
let lines = input.split("\n");
let mut tracker = report_tracker::build_report_tracker(2020);
for line in lines {
match String::from(line).parse::<i32>(){
Ok(parsed_line) => {
let (match_found, mult) = tracker.add_expense(parsed_line);
if match_found {
println!("Expense: {}", mult);
return;
}
},
Err(e) => println!("Failed to parse {} with {}", line, e)
}
}
} else {
println!("Please specify a file to parse!");
}
}
| 31.1 | 79 | 0.478028 |
69912b4ab19ea82fe0a0193b159ebc5e03fb94db | 2,232 | use std::cmp::PartialEq;
use std::collections::HashMap;
use driver::interner::Ident;
use middle::ir;
use middle::ir::visit::*;
/*
#[derive(Clone, Hash)]
pub enum InstructionRef<'a> {
ControlFlow(&'a ir::ControlFlowInstruction),
Regular(&'a ir::Instruction),
}
impl<'a> PartialEq for InstructionRef<'a> {
fn eq(&self, other: &InstructionRef<'a>) -> bool {
match *self {
InstructionRef::ControlFlow(a) => match *other {
InstructionRef::ControlFlow(b) => a as *const _ == b as *const _,
_ => false
},
InstructionRef::Regular(a) => match *other {
InstructionRef::Regular(b) => a as *const _ == b as *const _,
_ => false
},
}
}
}
#[derive(Clone, Hash)]
pub struct LivenessRange<'a>(Vec<InstructionRef<'a>>);
pub struct Liveness<'a> {
ranges: HashMap<Ident, LivenessRange<'a>>
}
impl<'a> Liveness<'a> {
pub fn is_last(&self, reg: Ident, instr: &InstructionRef) -> bool {
self.ranges.get(®)
.expect(&format!("Register {:?} not registered during liveness analysis", reg))
.0
.last()
.map_or(false, |other| other == instr)
}
}
pub struct LivenessAnalysis<'a>(HashMap<Ident, Liveness<'a>>);
pub struct LivenessAnalyzer<'a> {
current_function: Option<Ident>,
result: LivenessAnalysis<'a>
}
impl<'a> LivenessAnalyzer<'a> {
pub fn run(code: &'a ir::Program) -> LivenessAnalysis<'a> {
LivenessAnalyzer {
current_function: None,
result: LivenessAnalysis(HashMap::new())
}.internal_run(code)
}
fn internal_run(self, code: &ir::Program) -> LivenessAnalysis<'a> {
self.result
}
}
impl<'v> Visitor<'v> for LivenessAnalyzer<'v> {
fn visit_symbol(&mut self, symbol: &'v ir::Symbol) {
if let ir::Symbol::Function { name, .. } = *symbol {
self.current_function = Some(name);
}
walk_symbol(self, symbol);
if self.current_function.is_some() {
self.current_function = None;
}
}
fn visit_block(&mut self, block: &'v ir::Block) {
//
}
}
*/ | 25.363636 | 98 | 0.562276 |
5b47277e7d598f148431085887b0d9784cacc97b | 352 |
use futures::Stream;
use crate::fs::AsyncFile;
pub struct AsyncFileStream {
file: AsyncFile
}
impl AsyncFileStream {
pub fn new(file: AsyncFile) -> Self {
Self {
file
}
}
}
impl Stream for AsyncFileStream {
fn poll_next(self: Pin<&mut Self>, lw: &Waker) -> Poll<Option<Self::Item>> {
}
} | 14.08 | 80 | 0.573864 |
1da477aa701316ad68796c843da782eb2c2aba27 | 452 | mod ram {
#[test]
#[cfg(test)]
fn test_ram() {
use rin_sys::ram::RamInfo;
let mut r = RamInfo::new();
r.fetch();
assert_eq!(r.mem_used != 0, true);
assert_eq!(r.mem_total != 0, true);
assert_eq!(r.percent_free > 100.0 || r.percent_free < 0.0, false);
assert_eq!(r.percent_used > 100.0 || r.percent_used < 0.0, false);
assert_eq!(r.mem_free < r.mem_total, true);
}
}
| 21.52381 | 74 | 0.533186 |
18d0e6fb463a5827595ac76ba0d6255b1294cfa1 | 8,089 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Analysis which computes information needed in backends for monomorphization. This
//! computes the distinct type instantiations in the model for structs and inlined functions.
use crate::{
function_target::FunctionTarget,
function_target_pipeline::{FunctionTargetProcessor, FunctionTargetsHolder, FunctionVariant},
stackless_bytecode::{Bytecode, Operation},
verification_analysis,
};
use itertools::Itertools;
use move_model::{
model::{FunId, GlobalEnv, QualifiedId, StructEnv, StructId},
ty::{Type, TypeDisplayContext},
};
use std::{
collections::{BTreeMap, BTreeSet},
fmt,
rc::Rc,
};
/// The environment extension computed by this analysis.
#[derive(Clone, Default, Debug, PartialEq, PartialOrd, Eq)]
pub struct MonoInfo {
pub structs: BTreeMap<QualifiedId<StructId>, BTreeSet<Vec<Type>>>,
pub funs: BTreeMap<QualifiedId<FunId>, BTreeSet<Vec<Type>>>,
}
/// Get the information computed by this analysis.
pub fn get_info(env: &GlobalEnv) -> Rc<MonoInfo> {
env.get_extension::<MonoInfo>()
.unwrap_or_else(|| Rc::new(MonoInfo::default()))
}
pub struct MonoAnalysisProcessor();
impl MonoAnalysisProcessor {
pub fn new() -> Box<Self> {
Box::new(Self())
}
}
impl FunctionTargetProcessor for MonoAnalysisProcessor {
fn run(&self, env: &GlobalEnv, targets: &mut FunctionTargetsHolder) {
let mut analyzer = Analyzer {
env,
targets,
info: MonoInfo::default(),
todo_targets: vec![],
done_targets: BTreeSet::new(),
inst_opt: None,
};
analyzer.analyze();
let Analyzer { info, .. } = analyzer;
env.set_extension(info);
}
fn is_single_run(&self) -> bool {
true
}
fn name(&self) -> String {
"mono_analysis".to_owned()
}
fn dump_result(
&self,
f: &mut fmt::Formatter,
env: &GlobalEnv,
_targets: &FunctionTargetsHolder,
) -> fmt::Result {
writeln!(f, "\n\n==== mono-analysis result ====\n")?;
let info = env
.get_extension::<MonoInfo>()
.expect("monomorphization analysis not run");
let tctx = TypeDisplayContext::WithEnv {
env,
type_param_names: None,
};
let display_inst = |tys: &[Type]| {
tys.iter()
.map(|ty| ty.display(&tctx).to_string())
.join(", ")
};
for (sid, insts) in &info.structs {
let sname = env.get_struct(*sid).get_full_name_str();
writeln!(f, "struct {} = {{", sname)?;
for inst in insts {
writeln!(f, " <{}>", display_inst(inst))?;
}
writeln!(f, "}}")?;
}
for (fid, insts) in &info.funs {
let fname = env.get_function(*fid).get_full_name_str();
writeln!(f, "fun {} = {{", fname)?;
for inst in insts {
writeln!(f, " <{}>", display_inst(inst))?;
}
writeln!(f, "}}")?;
}
Ok(())
}
}
struct Analyzer<'a> {
env: &'a GlobalEnv,
targets: &'a FunctionTargetsHolder,
info: MonoInfo,
todo_targets: Vec<(QualifiedId<FunId>, Vec<Type>)>,
done_targets: BTreeSet<(QualifiedId<FunId>, Vec<Type>)>,
inst_opt: Option<Vec<Type>>,
}
impl<'a> Analyzer<'a> {
fn analyze(&mut self) {
// Analyze top-level, verified functions. Any functions they call will be queued in
// self.todo_targets for later analysis. During this phase, self.inst_opt is None.
for module in self.env.get_modules() {
for fun in module.get_functions() {
for (_, target) in self.targets.get_targets(&fun) {
let info = verification_analysis::get_info(&target);
if info.verified {
self.analyze_target(target);
}
}
}
}
// Now incrementally work included targets until they are done, while self.inst_opt
// contains the specific instantiation.
while !self.todo_targets.is_empty() {
let (fun, inst) = self.todo_targets.pop().unwrap();
self.inst_opt = Some(inst);
self.analyze_target(
self.targets
.get_target(&self.env.get_function(fun), &FunctionVariant::Baseline),
);
let inst = std::mem::take(&mut self.inst_opt).unwrap();
if !inst.is_empty() {
// Insert it into final analysis result if not trivial
self.info.funs.entry(fun).or_default().insert(inst.clone());
}
self.done_targets.insert((fun, inst));
}
}
fn analyze_target(&mut self, target: FunctionTarget<'_>) {
// Analyze function locals and return value types.
for idx in 0..target.get_local_count() {
self.add_type(target.get_local_type(idx));
}
for ty in target.get_return_types().iter() {
self.add_type(ty);
}
// Analyze code.
if !target.func_env.is_native_or_intrinsic() {
for bc in target.get_bytecode() {
self.analyze_bytecode(&target, bc);
}
}
}
fn analyze_bytecode(&mut self, target: &FunctionTarget<'_>, bc: &Bytecode) {
use Bytecode::*;
use Operation::*;
// We only need to analyze function calls, not `pack` or other instructions
// because the types those are using are reflected in locals which are analyzed
// elsewhere.
if let Call(_, _, Function(mid, fid, targs), ..) = bc {
if !target.is_opaque() {
// This call needs to be inlined, with targs instantiated by self.inst_opt.
// Schedule for later processing.
let actuals = if let Some(inst) = &self.inst_opt {
targs.iter().map(|ty| ty.instantiate(inst)).collect_vec()
} else {
targs.to_owned()
};
let fun = mid.qualified(*fid);
// Only if this call has not been processed yet, queue it for future processing.
if !self.done_targets.contains(&(fun, actuals.clone())) {
self.todo_targets.push((mid.qualified(*fid), actuals));
}
}
}
}
// Type Analysis
// =============
fn add_type(&mut self, ty: &Type) {
if let Some(inst) = &self.inst_opt {
let ty = ty.instantiate(inst);
self.add_type_continue(&ty)
} else {
self.add_type_continue(ty)
}
}
fn add_type_continue(&mut self, ty: &Type) {
match ty {
Type::Primitive(_) => {}
Type::Tuple(tys) => self.add_types(tys),
Type::Vector(et) => self.add_type(&*et),
Type::Struct(mid, sid, targs) => {
self.add_struct(self.env.get_module(*mid).into_struct(*sid), targs)
}
Type::Reference(_, rt) => self.add_type(&*rt),
Type::Fun(args, res) => {
self.add_types(args);
self.add_type(&*res);
}
Type::TypeDomain(rd) => self.add_type(&*rd),
Type::ResourceDomain(mid, sid, Some(targs)) => {
self.add_struct(self.env.get_module(*mid).into_struct(*sid), targs)
}
_ => {}
}
}
fn add_types<'b, T: IntoIterator<Item = &'b Type>>(&mut self, tys: T) {
for ty in tys {
self.add_type(ty);
}
}
fn add_struct(&mut self, struct_: StructEnv<'_>, targs: &[Type]) {
if !targs.is_empty() {
self.info
.structs
.entry(struct_.get_qualified_id())
.or_default()
.insert(targs.to_owned());
self.add_types(targs);
}
}
}
| 33.845188 | 96 | 0.543083 |
6958b4f002c2af3b614006d5dad65e0a51470ee4 | 5,156 | use ffi;
#[cfg(feature = "use_glib")]
use glib::translate::*;
use std::cmp::PartialEq;
use std::hash;
#[cfg(any(feature = "v1_16", feature = "dox"))]
use font::font_face::to_optional_string;
#[cfg(any(feature = "v1_16", feature = "dox"))]
use std::ffi::CString;
use enums::{Antialias, HintMetrics, HintStyle, Status, SubpixelOrder};
#[cfg(feature = "use_glib")]
glib_wrapper! {
#[derive(Debug)]
pub struct FontOptions(Boxed<ffi::cairo_font_options_t>);
match fn {
copy => |ptr| {
let ptr = ffi::cairo_font_options_copy(ptr);
let status = ffi::cairo_font_options_status(ptr);
Status::from(status).ensure_valid();
ptr
},
free => |ptr| ffi::cairo_font_options_destroy(ptr),
get_type => || ffi::gobject::cairo_gobject_font_options_get_type(),
}
}
#[cfg(not(feature = "use_glib"))]
#[derive(Debug)]
pub struct FontOptions(*mut ffi::cairo_font_options_t);
impl FontOptions {
pub fn new() -> FontOptions {
let font_options: FontOptions =
unsafe { FontOptions::from_raw_full(ffi::cairo_font_options_create()) };
font_options.ensure_status();
font_options
}
#[cfg(feature = "use_glib")]
pub unsafe fn from_raw_full(ptr: *mut ffi::cairo_font_options_t) -> FontOptions {
from_glib_full(ptr)
}
#[cfg(not(feature = "use_glib"))]
pub unsafe fn from_raw_full(ptr: *mut ffi::cairo_font_options_t) -> FontOptions {
assert!(!ptr.is_null());
FontOptions(ptr)
}
#[cfg(feature = "use_glib")]
pub fn to_raw_none(&self) -> *mut ffi::cairo_font_options_t {
mut_override(self.to_glib_none().0)
}
#[cfg(not(feature = "use_glib"))]
pub fn to_raw_none(&self) -> *mut ffi::cairo_font_options_t {
self.0
}
pub fn ensure_status(&self) {
let status = unsafe { ffi::cairo_font_options_status(self.to_raw_none()) };
Status::from(status).ensure_valid()
}
pub fn merge(&mut self, other: &FontOptions) {
unsafe { ffi::cairo_font_options_merge(self.to_raw_none(), other.to_raw_none()) }
}
pub fn set_antialias(&mut self, antialias: Antialias) {
unsafe { ffi::cairo_font_options_set_antialias(self.to_raw_none(), antialias.into()) }
}
pub fn get_antialias(&self) -> Antialias {
unsafe { Antialias::from(ffi::cairo_font_options_get_antialias(self.to_raw_none())) }
}
pub fn set_subpixel_order(&mut self, order: SubpixelOrder) {
unsafe { ffi::cairo_font_options_set_subpixel_order(self.to_raw_none(), order.into()) }
}
pub fn get_subpixel_order(&self) -> SubpixelOrder {
unsafe {
SubpixelOrder::from(ffi::cairo_font_options_get_subpixel_order(
self.to_raw_none(),
))
}
}
pub fn set_hint_style(&mut self, hint_style: HintStyle) {
unsafe { ffi::cairo_font_options_set_hint_style(self.to_raw_none(), hint_style.into()) }
}
pub fn get_hint_style(&self) -> HintStyle {
unsafe { HintStyle::from(ffi::cairo_font_options_get_hint_style(self.to_raw_none())) }
}
pub fn set_hint_metrics(&mut self, hint_metrics: HintMetrics) {
unsafe { ffi::cairo_font_options_set_hint_metrics(self.to_raw_none(), hint_metrics.into()) }
}
pub fn get_hint_metrics(&self) -> HintMetrics {
unsafe { HintMetrics::from(ffi::cairo_font_options_get_hint_metrics(self.to_raw_none())) }
}
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn get_variations(&self) -> Option<String> {
unsafe { to_optional_string(ffi::cairo_font_options_get_variations(self.to_raw_none())) }
}
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn set_variations<'a, T: Into<Option<&'a str>>>(&self, variations: T) {
unsafe {
let variations = variations.into();
match variations {
Some(ref v) => {
let v = CString::new(*v).unwrap();
ffi::cairo_font_options_set_variations(self.to_raw_none(), v.as_ptr())
}
None => ffi::cairo_font_options_set_variations(self.to_raw_none(), 0 as *const _),
}
}
}
}
impl PartialEq for FontOptions {
fn eq(&self, other: &FontOptions) -> bool {
unsafe { ffi::cairo_font_options_equal(self.to_raw_none(), other.to_raw_none()).as_bool() }
}
}
impl Eq for FontOptions {}
impl hash::Hash for FontOptions {
fn hash<H>(&self, state: &mut H)
where
H: hash::Hasher,
{
unsafe { hash::Hash::hash(&ffi::cairo_font_options_hash(self.to_raw_none()), state) }
}
}
impl Default for FontOptions {
fn default() -> Self {
Self::new()
}
}
#[cfg(not(feature = "use_glib"))]
impl Drop for FontOptions {
fn drop(&mut self) {
unsafe {
ffi::cairo_font_options_destroy(self.to_raw_none());
}
}
}
#[cfg(not(feature = "use_glib"))]
impl Clone for FontOptions {
fn clone(&self) -> FontOptions {
unsafe { FontOptions::from_raw_full(ffi::cairo_font_options_copy(self.to_raw_none())) }
}
}
| 30.874251 | 100 | 0.625485 |
261d8bc7260ca5deab6ab1519d4d9387eebaac87 | 1,034 | #![warn(clippy::single_char_lifetime_names)]
// Lifetimes should only be linted when they're introduced
struct DiagnosticCtx<'a, 'b>
where
'a: 'b,
{
_source: &'a str,
_unit: &'b (),
}
// Only the lifetimes on the `impl`'s generics should be linted
impl<'a, 'b> DiagnosticCtx<'a, 'b> {
fn new(source: &'a str, unit: &'b ()) -> DiagnosticCtx<'a, 'b> {
Self {
_source: source,
_unit: unit,
}
}
}
// No lifetimes should be linted here
impl<'src, 'unit> DiagnosticCtx<'src, 'unit> {
fn new_pass(source: &'src str, unit: &'unit ()) -> DiagnosticCtx<'src, 'unit> {
Self {
_source: source,
_unit: unit,
}
}
}
// Only 'a should be linted here
fn split_once<'a>(base: &'a str, other: &'_ str) -> (&'a str, Option<&'a str>) {
base.split_once(other)
.map(|(left, right)| (left, Some(right)))
.unwrap_or((base, None))
}
fn main() {
let src = "loop {}";
let unit = ();
DiagnosticCtx::new(src, &unit);
}
| 23.5 | 83 | 0.549323 |
716ab512716afa709458e1f7d175b44400d48499 | 20,926 | //! This file was generated automatically by the Snowball to Rust compiler
//! http://snowballstem.org/
#![allow(non_upper_case_globals)]
#![allow(non_snake_case)]
#![allow(unused_variables)]
#![allow(unused_mut)]
use std::prelude::v1::*;
use snowball::SnowballEnv;
use snowball::Among;
static A_0: &'static [Among<Context>; 11] = &[
Among("", -1, 6, None),
Among("\u{00E1}", 0, 1, None),
Among("\u{00E4}", 0, 1, None),
Among("\u{00E9}", 0, 2, None),
Among("\u{00EB}", 0, 2, None),
Among("\u{00ED}", 0, 3, None),
Among("\u{00EF}", 0, 3, None),
Among("\u{00F3}", 0, 4, None),
Among("\u{00F6}", 0, 4, None),
Among("\u{00FA}", 0, 5, None),
Among("\u{00FC}", 0, 5, None),
];
static A_1: &'static [Among<Context>; 3] = &[
Among("", -1, 3, None),
Among("I", 0, 2, None),
Among("Y", 0, 1, None),
];
static A_2: &'static [Among<Context>; 3] = &[
Among("dd", -1, -1, None),
Among("kk", -1, -1, None),
Among("tt", -1, -1, None),
];
static A_3: &'static [Among<Context>; 5] = &[
Among("ene", -1, 2, None),
Among("se", -1, 3, None),
Among("en", -1, 2, None),
Among("heden", 2, 1, None),
Among("s", -1, 3, None),
];
static A_4: &'static [Among<Context>; 6] = &[
Among("end", -1, 1, None),
Among("ig", -1, 2, None),
Among("ing", -1, 1, None),
Among("lijk", -1, 3, None),
Among("baar", -1, 4, None),
Among("bar", -1, 5, None),
];
static A_5: &'static [Among<Context>; 4] = &[
Among("aa", -1, -1, None),
Among("ee", -1, -1, None),
Among("oo", -1, -1, None),
Among("uu", -1, -1, None),
];
static G_v: &'static [u8; 17] = &[17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
static G_v_I: &'static [u8; 20] = &[1, 0, 0, 17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
static G_v_j: &'static [u8; 17] = &[17, 67, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
#[derive(Clone)]
struct Context {
i_p2: usize,
i_p1: usize,
b_e_found: bool,
}
fn r_prelude(env: &mut SnowballEnv, context: &mut Context) -> bool {
let mut among_var;
// (, line 41
// test, line 42
let v_1 = env.cursor;
// repeat, line 42
'replab0: loop{
let v_2 = env.cursor;
'lab1: for _ in 0..1 {
// (, line 42
// [, line 43
env.bra = env.cursor;
// substring, line 43
among_var = env.find_among(A_0, context);
if among_var == 0 {
break 'lab1;
}
// ], line 43
env.ket = env.cursor;
if among_var == 0 {
break 'lab1;
} else if among_var == 1 {
// (, line 45
// <-, line 45
if !env.slice_from("a") {
return false;
}
} else if among_var == 2 {
// (, line 47
// <-, line 47
if !env.slice_from("e") {
return false;
}
} else if among_var == 3 {
// (, line 49
// <-, line 49
if !env.slice_from("i") {
return false;
}
} else if among_var == 4 {
// (, line 51
// <-, line 51
if !env.slice_from("o") {
return false;
}
} else if among_var == 5 {
// (, line 53
// <-, line 53
if !env.slice_from("u") {
return false;
}
} else if among_var == 6 {
// (, line 54
// next, line 54
if env.cursor >= env.limit {
break 'lab1;
}
env.next_char();
}
continue 'replab0;
}
env.cursor = v_2;
break 'replab0;
}
env.cursor = v_1;
// try, line 57
let v_3 = env.cursor;
'lab2: loop {
// (, line 57
// [, line 57
env.bra = env.cursor;
// literal, line 57
if !env.eq_s(&"y") {
env.cursor = v_3;
break 'lab2;
}
// ], line 57
env.ket = env.cursor;
// <-, line 57
if !env.slice_from("Y") {
return false;
}
break 'lab2;
}
// repeat, line 58
'replab3: loop{
let v_4 = env.cursor;
'lab4: for _ in 0..1 {
// goto, line 58
'golab5: loop {
let v_5 = env.cursor;
'lab6: loop {
// (, line 58
if !env.in_grouping(G_v, 97, 232) {
break 'lab6;
}
// [, line 59
env.bra = env.cursor;
// or, line 59
'lab7: loop {
let v_6 = env.cursor;
'lab8: loop {
// (, line 59
// literal, line 59
if !env.eq_s(&"i") {
break 'lab8;
}
// ], line 59
env.ket = env.cursor;
if !env.in_grouping(G_v, 97, 232) {
break 'lab8;
}
// <-, line 59
if !env.slice_from("I") {
return false;
}
break 'lab7;
}
env.cursor = v_6;
// (, line 60
// literal, line 60
if !env.eq_s(&"y") {
break 'lab6;
}
// ], line 60
env.ket = env.cursor;
// <-, line 60
if !env.slice_from("Y") {
return false;
}
break 'lab7;
}
env.cursor = v_5;
break 'golab5;
}
env.cursor = v_5;
if env.cursor >= env.limit {
break 'lab4;
}
env.next_char();
}
continue 'replab3;
}
env.cursor = v_4;
break 'replab3;
}
return true;
}
fn r_mark_regions(env: &mut SnowballEnv, context: &mut Context) -> bool {
// (, line 64
context.i_p1 = env.limit;
context.i_p2 = env.limit;
// gopast, line 69
'golab0: loop {
'lab1: loop {
if !env.in_grouping(G_v, 97, 232) {
break 'lab1;
}
break 'golab0;
}
if env.cursor >= env.limit {
return false;
}
env.next_char();
}
// gopast, line 69
'golab2: loop {
'lab3: loop {
if !env.out_grouping(G_v, 97, 232) {
break 'lab3;
}
break 'golab2;
}
if env.cursor >= env.limit {
return false;
}
env.next_char();
}
// setmark p1, line 69
context.i_p1 = env.cursor;
// try, line 70
'lab4: loop {
// (, line 70
if !(context.i_p1 < 3){
break 'lab4;
}
context.i_p1 = 3;
break 'lab4;
}
// gopast, line 71
'golab5: loop {
'lab6: loop {
if !env.in_grouping(G_v, 97, 232) {
break 'lab6;
}
break 'golab5;
}
if env.cursor >= env.limit {
return false;
}
env.next_char();
}
// gopast, line 71
'golab7: loop {
'lab8: loop {
if !env.out_grouping(G_v, 97, 232) {
break 'lab8;
}
break 'golab7;
}
if env.cursor >= env.limit {
return false;
}
env.next_char();
}
// setmark p2, line 71
context.i_p2 = env.cursor;
return true;
}
fn r_postlude(env: &mut SnowballEnv, context: &mut Context) -> bool {
let mut among_var;
// repeat, line 75
'replab0: loop{
let v_1 = env.cursor;
'lab1: for _ in 0..1 {
// (, line 75
// [, line 77
env.bra = env.cursor;
// substring, line 77
among_var = env.find_among(A_1, context);
if among_var == 0 {
break 'lab1;
}
// ], line 77
env.ket = env.cursor;
if among_var == 0 {
break 'lab1;
} else if among_var == 1 {
// (, line 78
// <-, line 78
if !env.slice_from("y") {
return false;
}
} else if among_var == 2 {
// (, line 79
// <-, line 79
if !env.slice_from("i") {
return false;
}
} else if among_var == 3 {
// (, line 80
// next, line 80
if env.cursor >= env.limit {
break 'lab1;
}
env.next_char();
}
continue 'replab0;
}
env.cursor = v_1;
break 'replab0;
}
return true;
}
fn r_R1(env: &mut SnowballEnv, context: &mut Context) -> bool {
if !(context.i_p1 <= env.cursor){
return false;
}
return true;
}
fn r_R2(env: &mut SnowballEnv, context: &mut Context) -> bool {
if !(context.i_p2 <= env.cursor){
return false;
}
return true;
}
fn r_undouble(env: &mut SnowballEnv, context: &mut Context) -> bool {
// (, line 90
// test, line 91
let v_1 = env.limit - env.cursor;
// among, line 91
if env.find_among_b(A_2, context) == 0 {
return false;
}
env.cursor = env.limit - v_1;
// [, line 91
env.ket = env.cursor;
// next, line 91
if env.cursor <= env.limit_backward {
return false;
}
env.previous_char();
// ], line 91
env.bra = env.cursor;
// delete, line 91
if !env.slice_del() {
return false;
}
return true;
}
fn r_e_ending(env: &mut SnowballEnv, context: &mut Context) -> bool {
// (, line 94
// unset e_found, line 95
context.b_e_found = false;
// [, line 96
env.ket = env.cursor;
// literal, line 96
if !env.eq_s_b(&"e") {
return false;
}
// ], line 96
env.bra = env.cursor;
// call R1, line 96
if !r_R1(env, context) {
return false;
}
// test, line 96
let v_1 = env.limit - env.cursor;
if !env.out_grouping_b(G_v, 97, 232) {
return false;
}
env.cursor = env.limit - v_1;
// delete, line 96
if !env.slice_del() {
return false;
}
// set e_found, line 97
context.b_e_found = true;
// call undouble, line 98
if !r_undouble(env, context) {
return false;
}
return true;
}
fn r_en_ending(env: &mut SnowballEnv, context: &mut Context) -> bool {
// (, line 101
// call R1, line 102
if !r_R1(env, context) {
return false;
}
// and, line 102
let v_1 = env.limit - env.cursor;
if !env.out_grouping_b(G_v, 97, 232) {
return false;
}
env.cursor = env.limit - v_1;
// not, line 102
let v_2 = env.limit - env.cursor;
'lab0: loop {
// literal, line 102
if !env.eq_s_b(&"gem") {
break 'lab0;
}
return false;
}
env.cursor = env.limit - v_2;
// delete, line 102
if !env.slice_del() {
return false;
}
// call undouble, line 103
if !r_undouble(env, context) {
return false;
}
return true;
}
fn r_standard_suffix(env: &mut SnowballEnv, context: &mut Context) -> bool {
let mut among_var;
// (, line 106
// do, line 107
let v_1 = env.limit - env.cursor;
'lab0: loop {
// (, line 107
// [, line 108
env.ket = env.cursor;
// substring, line 108
among_var = env.find_among_b(A_3, context);
if among_var == 0 {
break 'lab0;
}
// ], line 108
env.bra = env.cursor;
if among_var == 0 {
break 'lab0;
} else if among_var == 1 {
// (, line 110
// call R1, line 110
if !r_R1(env, context) {
break 'lab0;
}
// <-, line 110
if !env.slice_from("heid") {
return false;
}
} else if among_var == 2 {
// (, line 113
// call en_ending, line 113
if !r_en_ending(env, context) {
break 'lab0;
}
} else if among_var == 3 {
// (, line 116
// call R1, line 116
if !r_R1(env, context) {
break 'lab0;
}
if !env.out_grouping_b(G_v_j, 97, 232) {
break 'lab0;
}
// delete, line 116
if !env.slice_del() {
return false;
}
}
break 'lab0;
}
env.cursor = env.limit - v_1;
// do, line 120
let v_2 = env.limit - env.cursor;
'lab1: loop {
// call e_ending, line 120
if !r_e_ending(env, context) {
break 'lab1;
}
break 'lab1;
}
env.cursor = env.limit - v_2;
// do, line 122
let v_3 = env.limit - env.cursor;
'lab2: loop {
// (, line 122
// [, line 122
env.ket = env.cursor;
// literal, line 122
if !env.eq_s_b(&"heid") {
break 'lab2;
}
// ], line 122
env.bra = env.cursor;
// call R2, line 122
if !r_R2(env, context) {
break 'lab2;
}
// not, line 122
let v_4 = env.limit - env.cursor;
'lab3: loop {
// literal, line 122
if !env.eq_s_b(&"c") {
break 'lab3;
}
break 'lab2;
}
env.cursor = env.limit - v_4;
// delete, line 122
if !env.slice_del() {
return false;
}
// [, line 123
env.ket = env.cursor;
// literal, line 123
if !env.eq_s_b(&"en") {
break 'lab2;
}
// ], line 123
env.bra = env.cursor;
// call en_ending, line 123
if !r_en_ending(env, context) {
break 'lab2;
}
break 'lab2;
}
env.cursor = env.limit - v_3;
// do, line 126
let v_5 = env.limit - env.cursor;
'lab4: loop {
// (, line 126
// [, line 127
env.ket = env.cursor;
// substring, line 127
among_var = env.find_among_b(A_4, context);
if among_var == 0 {
break 'lab4;
}
// ], line 127
env.bra = env.cursor;
if among_var == 0 {
break 'lab4;
} else if among_var == 1 {
// (, line 129
// call R2, line 129
if !r_R2(env, context) {
break 'lab4;
}
// delete, line 129
if !env.slice_del() {
return false;
}
// or, line 130
'lab5: loop {
let v_6 = env.limit - env.cursor;
'lab6: loop {
// (, line 130
// [, line 130
env.ket = env.cursor;
// literal, line 130
if !env.eq_s_b(&"ig") {
break 'lab6;
}
// ], line 130
env.bra = env.cursor;
// call R2, line 130
if !r_R2(env, context) {
break 'lab6;
}
// not, line 130
let v_7 = env.limit - env.cursor;
'lab7: loop {
// literal, line 130
if !env.eq_s_b(&"e") {
break 'lab7;
}
break 'lab6;
}
env.cursor = env.limit - v_7;
// delete, line 130
if !env.slice_del() {
return false;
}
break 'lab5;
}
env.cursor = env.limit - v_6;
// call undouble, line 130
if !r_undouble(env, context) {
break 'lab4;
}
break 'lab5;
}
} else if among_var == 2 {
// (, line 133
// call R2, line 133
if !r_R2(env, context) {
break 'lab4;
}
// not, line 133
let v_8 = env.limit - env.cursor;
'lab8: loop {
// literal, line 133
if !env.eq_s_b(&"e") {
break 'lab8;
}
break 'lab4;
}
env.cursor = env.limit - v_8;
// delete, line 133
if !env.slice_del() {
return false;
}
} else if among_var == 3 {
// (, line 136
// call R2, line 136
if !r_R2(env, context) {
break 'lab4;
}
// delete, line 136
if !env.slice_del() {
return false;
}
// call e_ending, line 136
if !r_e_ending(env, context) {
break 'lab4;
}
} else if among_var == 4 {
// (, line 139
// call R2, line 139
if !r_R2(env, context) {
break 'lab4;
}
// delete, line 139
if !env.slice_del() {
return false;
}
} else if among_var == 5 {
// (, line 142
// call R2, line 142
if !r_R2(env, context) {
break 'lab4;
}
// Boolean test e_found, line 142
if !context.b_e_found {
break 'lab4;
}
// delete, line 142
if !env.slice_del() {
return false;
}
}
break 'lab4;
}
env.cursor = env.limit - v_5;
// do, line 146
let v_9 = env.limit - env.cursor;
'lab9: loop {
// (, line 146
if !env.out_grouping_b(G_v_I, 73, 232) {
break 'lab9;
}
// test, line 148
let v_10 = env.limit - env.cursor;
// (, line 148
// among, line 149
if env.find_among_b(A_5, context) == 0 {
break 'lab9;
}
if !env.out_grouping_b(G_v, 97, 232) {
break 'lab9;
}
env.cursor = env.limit - v_10;
// [, line 152
env.ket = env.cursor;
// next, line 152
if env.cursor <= env.limit_backward {
break 'lab9;
}
env.previous_char();
// ], line 152
env.bra = env.cursor;
// delete, line 152
if !env.slice_del() {
return false;
}
break 'lab9;
}
env.cursor = env.limit - v_9;
return true;
}
pub fn stem(env: &mut SnowballEnv) -> bool {
let mut context = &mut Context {
i_p2: 0,
i_p1: 0,
b_e_found: false,
};
// (, line 157
// do, line 159
let v_1 = env.cursor;
'lab0: loop {
// call prelude, line 159
if !r_prelude(env, context) {
break 'lab0;
}
break 'lab0;
}
env.cursor = v_1;
// do, line 160
let v_2 = env.cursor;
'lab1: loop {
// call mark_regions, line 160
if !r_mark_regions(env, context) {
break 'lab1;
}
break 'lab1;
}
env.cursor = v_2;
// backwards, line 161
env.limit_backward = env.cursor;
env.cursor = env.limit;
// do, line 162
let v_3 = env.limit - env.cursor;
'lab2: loop {
// call standard_suffix, line 162
if !r_standard_suffix(env, context) {
break 'lab2;
}
break 'lab2;
}
env.cursor = env.limit - v_3;
env.cursor = env.limit_backward;
// do, line 163
let v_4 = env.cursor;
'lab3: loop {
// call postlude, line 163
if !r_postlude(env, context) {
break 'lab3;
}
break 'lab3;
}
env.cursor = v_4;
return true;
}
| 27.106218 | 101 | 0.40605 |
bb27f9f349feb160af2284b968302529830d3c92 | 3,853 | /*
* No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
*
* The version of the OpenAPI document: 20220523
*
* Generated by: https://openapi-generator.tech
*/
use reqwest;
use crate::apis::ResponseContent;
use super::{Error, configuration};
/// struct for typed errors of method [`move_target_list`]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum MoveTargetListError {
DefaultResponse(String),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method [`move_target_read`]
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum MoveTargetReadError {
DefaultResponse(String),
UnknownValue(serde_json::Value),
}
pub async fn move_target_list(configuration: &configuration::Configuration, limit: Option<i32>, offset: Option<i32>) -> Result<String, Error<MoveTargetListError>> {
let local_var_configuration = configuration;
let local_var_client = &local_var_configuration.client;
let local_var_uri_str = format!("{}/api/v2/move-target/", local_var_configuration.base_path);
let mut local_var_req_builder = local_var_client.request(reqwest::Method::GET, local_var_uri_str.as_str());
if let Some(ref local_var_str) = limit {
local_var_req_builder = local_var_req_builder.query(&[("limit", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = offset {
local_var_req_builder = local_var_req_builder.query(&[("offset", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = local_var_configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<MoveTargetListError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
pub async fn move_target_read(configuration: &configuration::Configuration, id: i32) -> Result<String, Error<MoveTargetReadError>> {
let local_var_configuration = configuration;
let local_var_client = &local_var_configuration.client;
let local_var_uri_str = format!("{}/api/v2/move-target/{id}/", local_var_configuration.base_path, id=id);
let mut local_var_req_builder = local_var_client.request(reqwest::Method::GET, local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = local_var_configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<MoveTargetReadError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
| 41.430108 | 164 | 0.745134 |
03456f8529028a36a8527a3f00cc3a5e8e99548a | 45,867 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use self::Constructor::*;
use self::Usefulness::*;
use self::WitnessPreference::*;
use middle::const_eval::{compare_const_vals, const_bool, const_float, const_val};
use middle::const_eval::{const_expr_to_pat, eval_const_expr, lookup_const_by_id};
use middle::def::*;
use middle::expr_use_visitor::{ConsumeMode, Delegate, ExprUseVisitor, Init};
use middle::expr_use_visitor::{JustWrite, LoanCause, MutateMode};
use middle::expr_use_visitor::{WriteAndRead};
use middle::expr_use_visitor as euv;
use middle::mem_categorization::cmt;
use middle::pat_util::*;
use middle::ty::*;
use middle::ty;
use std::cmp::Ordering;
use std::fmt;
use std::iter::{range_inclusive, AdditiveIterator, FromIterator, repeat};
use std::num::Float;
use std::slice;
use syntax::ast::{self, DUMMY_NODE_ID, NodeId, Pat};
use syntax::ast_util;
use syntax::codemap::{Span, Spanned, DUMMY_SP};
use syntax::fold::{Folder, noop_fold_pat};
use syntax::print::pprust::pat_to_string;
use syntax::parse::token;
use syntax::ptr::P;
use syntax::visit::{self, Visitor, FnKind};
use util::ppaux::ty_to_string;
use util::nodemap::FnvHashMap;
pub const DUMMY_WILD_PAT: &'static Pat = &Pat {
id: DUMMY_NODE_ID,
node: ast::PatWild(ast::PatWildSingle),
span: DUMMY_SP
};
struct Matrix<'a>(Vec<Vec<&'a Pat>>);
/// Pretty-printer for matrices of patterns, example:
/// ++++++++++++++++++++++++++
/// + _ + [] +
/// ++++++++++++++++++++++++++
/// + true + [First] +
/// ++++++++++++++++++++++++++
/// + true + [Second(true)] +
/// ++++++++++++++++++++++++++
/// + false + [_] +
/// ++++++++++++++++++++++++++
/// + _ + [_, _, ..tail] +
/// ++++++++++++++++++++++++++
impl<'a> fmt::Debug for Matrix<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "\n"));
let &Matrix(ref m) = self;
let pretty_printed_matrix: Vec<Vec<String>> = m.iter().map(|row| {
row.iter()
.map(|&pat| pat_to_string(&*pat))
.collect::<Vec<String>>()
}).collect();
let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0);
assert!(m.iter().all(|row| row.len() == column_count));
let column_widths: Vec<uint> = (0..column_count).map(|col| {
pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0)
}).collect();
let total_width = column_widths.iter().map(|n| *n).sum() + column_count * 3 + 1;
let br = repeat('+').take(total_width).collect::<String>();
try!(write!(f, "{}\n", br));
for row in pretty_printed_matrix {
try!(write!(f, "+"));
for (column, pat_str) in row.into_iter().enumerate() {
try!(write!(f, " "));
try!(write!(f, "{:1$}", pat_str, column_widths[column]));
try!(write!(f, " +"));
}
try!(write!(f, "\n"));
try!(write!(f, "{}\n", br));
}
Ok(())
}
}
impl<'a> FromIterator<Vec<&'a Pat>> for Matrix<'a> {
fn from_iter<T: Iterator<Item=Vec<&'a Pat>>>(iterator: T) -> Matrix<'a> {
Matrix(iterator.collect())
}
}
pub struct MatchCheckCtxt<'a, 'tcx: 'a> {
pub tcx: &'a ty::ctxt<'tcx>,
pub param_env: ParameterEnvironment<'a, 'tcx>,
}
#[derive(Clone, PartialEq)]
pub enum Constructor {
/// The constructor of all patterns that don't vary by constructor,
/// e.g. struct patterns and fixed-length arrays.
Single,
/// Enum variants.
Variant(ast::DefId),
/// Literal values.
ConstantValue(const_val),
/// Ranges of literal values (2..5).
ConstantRange(const_val, const_val),
/// Array patterns of length n.
Slice(uint),
/// Array patterns with a subslice.
SliceWithSubslice(uint, uint)
}
#[derive(Clone, PartialEq)]
enum Usefulness {
Useful,
UsefulWithWitness(Vec<P<Pat>>),
NotUseful
}
#[derive(Copy)]
enum WitnessPreference {
ConstructWitness,
LeaveOutWitness
}
impl<'a, 'tcx, 'v> Visitor<'v> for MatchCheckCtxt<'a, 'tcx> {
fn visit_expr(&mut self, ex: &ast::Expr) {
check_expr(self, ex);
}
fn visit_local(&mut self, l: &ast::Local) {
check_local(self, l);
}
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v ast::FnDecl,
b: &'v ast::Block, s: Span, n: NodeId) {
check_fn(self, fk, fd, b, s, n);
}
}
pub fn check_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut MatchCheckCtxt {
tcx: tcx,
param_env: ty::empty_parameter_environment(tcx),
}, tcx.map.krate());
tcx.sess.abort_if_errors();
}
fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
visit::walk_expr(cx, ex);
match ex.node {
ast::ExprMatch(ref scrut, ref arms, source) => {
for arm in arms {
// First, check legality of move bindings.
check_legality_of_move_bindings(cx,
arm.guard.is_some(),
&arm.pats[]);
// Second, if there is a guard on each arm, make sure it isn't
// assigning or borrowing anything mutably.
match arm.guard {
Some(ref guard) => check_for_mutation_in_guard(cx, &**guard),
None => {}
}
}
let mut static_inliner = StaticInliner::new(cx.tcx, None);
let inlined_arms = arms.iter().map(|arm| {
(arm.pats.iter().map(|pat| {
static_inliner.fold_pat((*pat).clone())
}).collect(), arm.guard.as_ref().map(|e| &**e))
}).collect::<Vec<(Vec<P<Pat>>, Option<&ast::Expr>)>>();
// Bail out early if inlining failed.
if static_inliner.failed {
return;
}
for pat in inlined_arms
.iter()
.flat_map(|&(ref pats, _)| pats.iter()) {
// Third, check legality of move bindings.
check_legality_of_bindings_in_at_patterns(cx, &**pat);
// Fourth, check if there are any references to NaN that we should warn about.
check_for_static_nan(cx, &**pat);
// Fifth, check if for any of the patterns that match an enumerated type
// are bindings with the same name as one of the variants of said type.
check_for_bindings_named_the_same_as_variants(cx, &**pat);
}
// Fourth, check for unreachable arms.
check_arms(cx, &inlined_arms[], source);
// Finally, check if the whole match expression is exhaustive.
// Check for empty enum, because is_useful only works on inhabited types.
let pat_ty = node_id_to_type(cx.tcx, scrut.id);
if inlined_arms.is_empty() {
if !type_is_empty(cx.tcx, pat_ty) {
// We know the type is inhabited, so this must be wrong
span_err!(cx.tcx.sess, ex.span, E0002,
"non-exhaustive patterns: type {} is non-empty",
ty_to_string(cx.tcx, pat_ty)
);
}
// If the type *is* empty, it's vacuously exhaustive
return;
}
let matrix: Matrix = inlined_arms
.iter()
.filter(|&&(_, guard)| guard.is_none())
.flat_map(|arm| arm.0.iter())
.map(|pat| vec![&**pat])
.collect();
check_exhaustive(cx, ex.span, &matrix, source);
},
_ => ()
}
}
fn is_expr_const_nan(tcx: &ty::ctxt, expr: &ast::Expr) -> bool {
match eval_const_expr(tcx, expr) {
const_float(f) => f.is_nan(),
_ => false
}
}
fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat) {
ast_util::walk_pat(pat, |p| {
match p.node {
ast::PatIdent(ast::BindByValue(ast::MutImmutable), ident, None) => {
let pat_ty = ty::pat_ty(cx.tcx, p);
if let ty::ty_enum(def_id, _) = pat_ty.sty {
let def = cx.tcx.def_map.borrow().get(&p.id).cloned();
if let Some(DefLocal(_)) = def {
if ty::enum_variants(cx.tcx, def_id).iter().any(|variant|
token::get_name(variant.name) == token::get_name(ident.node.name)
&& variant.args.len() == 0
) {
span_warn!(cx.tcx.sess, p.span, E0170,
"pattern binding `{}` is named the same as one \
of the variants of the type `{}`",
&token::get_ident(ident.node), ty_to_string(cx.tcx, pat_ty));
span_help!(cx.tcx.sess, p.span,
"if you meant to match on a variant, \
consider making the path in the pattern qualified: `{}::{}`",
ty_to_string(cx.tcx, pat_ty), &token::get_ident(ident.node));
}
}
}
}
_ => ()
}
true
});
}
// Check that we do not match against a static NaN (#6804)
fn check_for_static_nan(cx: &MatchCheckCtxt, pat: &Pat) {
ast_util::walk_pat(pat, |p| {
match p.node {
ast::PatLit(ref expr) if is_expr_const_nan(cx.tcx, &**expr) => {
span_warn!(cx.tcx.sess, p.span, E0003,
"unmatchable NaN in pattern, \
use the is_nan method in a guard instead");
}
_ => ()
}
true
});
}
// Check for unreachable patterns
fn check_arms(cx: &MatchCheckCtxt,
arms: &[(Vec<P<Pat>>, Option<&ast::Expr>)],
source: ast::MatchSource) {
let mut seen = Matrix(vec![]);
let mut printed_if_let_err = false;
for &(ref pats, guard) in arms {
for pat in pats {
let v = vec![&**pat];
match is_useful(cx, &seen, &v[], LeaveOutWitness) {
NotUseful => {
match source {
ast::MatchSource::IfLetDesugar { .. } => {
if printed_if_let_err {
// we already printed an irrefutable if-let pattern error.
// We don't want two, that's just confusing.
} else {
// find the first arm pattern so we can use its span
let &(ref first_arm_pats, _) = &arms[0];
let first_pat = &first_arm_pats[0];
let span = first_pat.span;
span_err!(cx.tcx.sess, span, E0162, "irrefutable if-let pattern");
printed_if_let_err = true;
}
},
ast::MatchSource::WhileLetDesugar => {
// find the first arm pattern so we can use its span
let &(ref first_arm_pats, _) = &arms[0];
let first_pat = &first_arm_pats[0];
let span = first_pat.span;
span_err!(cx.tcx.sess, span, E0165, "irrefutable while-let pattern");
},
ast::MatchSource::ForLoopDesugar => {
// this is a bug, because on `match iter.next()` we cover
// `Some(<head>)` and `None`. It's impossible to have an unreachable
// pattern
// (see libsyntax/ext/expand.rs for the full expansion of a for loop)
cx.tcx.sess.span_bug(pat.span, "unreachable for-loop pattern")
},
ast::MatchSource::Normal => {
span_err!(cx.tcx.sess, pat.span, E0001, "unreachable pattern")
},
}
}
Useful => (),
UsefulWithWitness(_) => unreachable!()
}
if guard.is_none() {
let Matrix(mut rows) = seen;
rows.push(v);
seen = Matrix(rows);
}
}
}
}
fn raw_pat<'a>(p: &'a Pat) -> &'a Pat {
match p.node {
ast::PatIdent(_, _, Some(ref s)) => raw_pat(&**s),
_ => p
}
}
fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix, source: ast::MatchSource) {
match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) {
UsefulWithWitness(pats) => {
let witness = match &pats[] {
[ref witness] => &**witness,
[] => DUMMY_WILD_PAT,
_ => unreachable!()
};
match source {
ast::MatchSource::ForLoopDesugar => {
// `witness` has the form `Some(<head>)`, peel off the `Some`
let witness = match witness.node {
ast::PatEnum(_, Some(ref pats)) => match &pats[] {
[ref pat] => &**pat,
_ => unreachable!(),
},
_ => unreachable!(),
};
span_err!(cx.tcx.sess, sp, E0297,
"refutable pattern in `for` loop binding: \
`{}` not covered",
pat_to_string(witness));
},
_ => {
span_err!(cx.tcx.sess, sp, E0004,
"non-exhaustive patterns: `{}` not covered",
pat_to_string(witness)
);
},
}
}
NotUseful => {
// This is good, wildcard pattern isn't reachable
},
_ => unreachable!()
}
}
fn const_val_to_expr(value: &const_val) -> P<ast::Expr> {
let node = match value {
&const_bool(b) => ast::LitBool(b),
_ => unreachable!()
};
P(ast::Expr {
id: 0,
node: ast::ExprLit(P(Spanned { node: node, span: DUMMY_SP })),
span: DUMMY_SP
})
}
pub struct StaticInliner<'a, 'tcx: 'a> {
pub tcx: &'a ty::ctxt<'tcx>,
pub failed: bool,
pub renaming_map: Option<&'a mut FnvHashMap<(NodeId, Span), NodeId>>,
}
impl<'a, 'tcx> StaticInliner<'a, 'tcx> {
pub fn new<'b>(tcx: &'b ty::ctxt<'tcx>,
renaming_map: Option<&'b mut FnvHashMap<(NodeId, Span), NodeId>>)
-> StaticInliner<'b, 'tcx> {
StaticInliner {
tcx: tcx,
failed: false,
renaming_map: renaming_map
}
}
}
struct RenamingRecorder<'map> {
substituted_node_id: NodeId,
origin_span: Span,
renaming_map: &'map mut FnvHashMap<(NodeId, Span), NodeId>
}
impl<'map> ast_util::IdVisitingOperation for RenamingRecorder<'map> {
fn visit_id(&mut self, node_id: NodeId) {
let key = (node_id, self.origin_span);
self.renaming_map.insert(key, self.substituted_node_id);
}
}
impl<'a, 'tcx> Folder for StaticInliner<'a, 'tcx> {
fn fold_pat(&mut self, pat: P<Pat>) -> P<Pat> {
return match pat.node {
ast::PatIdent(..) | ast::PatEnum(..) => {
let def = self.tcx.def_map.borrow().get(&pat.id).cloned();
match def {
Some(DefConst(did)) => match lookup_const_by_id(self.tcx, did) {
Some(const_expr) => {
const_expr_to_pat(self.tcx, const_expr, pat.span).map(|new_pat| {
if let Some(ref mut renaming_map) = self.renaming_map {
// Record any renamings we do here
record_renamings(const_expr, &pat, renaming_map);
}
new_pat
})
}
None => {
self.failed = true;
span_err!(self.tcx.sess, pat.span, E0158,
"statics cannot be referenced in patterns");
pat
}
},
_ => noop_fold_pat(pat, self)
}
}
_ => noop_fold_pat(pat, self)
};
fn record_renamings(const_expr: &ast::Expr,
substituted_pat: &ast::Pat,
renaming_map: &mut FnvHashMap<(NodeId, Span), NodeId>) {
let mut renaming_recorder = RenamingRecorder {
substituted_node_id: substituted_pat.id,
origin_span: substituted_pat.span,
renaming_map: renaming_map,
};
let mut id_visitor = ast_util::IdVisitor {
operation: &mut renaming_recorder,
pass_through_items: true,
visited_outermost: false,
};
id_visitor.visit_expr(const_expr);
}
}
}
/// Constructs a partial witness for a pattern given a list of
/// patterns expanded by the specialization step.
///
/// When a pattern P is discovered to be useful, this function is used bottom-up
/// to reconstruct a complete witness, e.g. a pattern P' that covers a subset
/// of values, V, where each value in that set is not covered by any previously
/// used patterns and is covered by the pattern P'. Examples:
///
/// left_ty: tuple of 3 elements
/// pats: [10, 20, _] => (10, 20, _)
///
/// left_ty: struct X { a: (bool, &'static str), b: uint}
/// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 }
fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor,
pats: Vec<&Pat>, left_ty: Ty) -> P<Pat> {
let pats_len = pats.len();
let mut pats = pats.into_iter().map(|p| P((*p).clone()));
let pat = match left_ty.sty {
ty::ty_tup(_) => ast::PatTup(pats.collect()),
ty::ty_enum(cid, _) | ty::ty_struct(cid, _) => {
let (vid, is_structure) = match ctor {
&Variant(vid) =>
(vid, ty::enum_variant_with_id(cx.tcx, cid, vid).arg_names.is_some()),
_ =>
(cid, !ty::is_tuple_struct(cx.tcx, cid))
};
if is_structure {
let fields = ty::lookup_struct_fields(cx.tcx, vid);
let field_pats: Vec<_> = fields.into_iter()
.zip(pats)
.filter(|&(_, ref pat)| pat.node != ast::PatWild(ast::PatWildSingle))
.map(|(field, pat)| Spanned {
span: DUMMY_SP,
node: ast::FieldPat {
ident: ast::Ident::new(field.name),
pat: pat,
is_shorthand: false,
}
}).collect();
let has_more_fields = field_pats.len() < pats_len;
ast::PatStruct(def_to_path(cx.tcx, vid), field_pats, has_more_fields)
} else {
ast::PatEnum(def_to_path(cx.tcx, vid), Some(pats.collect()))
}
}
ty::ty_rptr(_, ty::mt { ty, mutbl }) => {
match ty.sty {
ty::ty_vec(_, Some(n)) => match ctor {
&Single => {
assert_eq!(pats_len, n);
ast::PatVec(pats.collect(), None, vec!())
},
_ => unreachable!()
},
ty::ty_vec(_, None) => match ctor {
&Slice(n) => {
assert_eq!(pats_len, n);
ast::PatVec(pats.collect(), None, vec!())
},
_ => unreachable!()
},
ty::ty_str => ast::PatWild(ast::PatWildSingle),
_ => {
assert_eq!(pats_len, 1);
ast::PatRegion(pats.nth(0).unwrap(), mutbl)
}
}
}
ty::ty_vec(_, Some(len)) => {
assert_eq!(pats_len, len);
ast::PatVec(pats.collect(), None, vec![])
}
_ => {
match *ctor {
ConstantValue(ref v) => ast::PatLit(const_val_to_expr(v)),
_ => ast::PatWild(ast::PatWildSingle),
}
}
};
P(ast::Pat {
id: 0,
node: pat,
span: DUMMY_SP
})
}
fn missing_constructor(cx: &MatchCheckCtxt, &Matrix(ref rows): &Matrix,
left_ty: Ty, max_slice_length: uint) -> Option<Constructor> {
let used_constructors: Vec<Constructor> = rows.iter()
.flat_map(|row| pat_constructors(cx, row[0], left_ty, max_slice_length).into_iter())
.collect();
all_constructors(cx, left_ty, max_slice_length)
.into_iter()
.find(|c| !used_constructors.contains(c))
}
/// This determines the set of all possible constructors of a pattern matching
/// values of type `left_ty`. For vectors, this would normally be an infinite set
/// but is instead bounded by the maximum fixed length of slice patterns in
/// the column of patterns being analyzed.
fn all_constructors(cx: &MatchCheckCtxt, left_ty: Ty,
max_slice_length: uint) -> Vec<Constructor> {
match left_ty.sty {
ty::ty_bool =>
[true, false].iter().map(|b| ConstantValue(const_bool(*b))).collect(),
ty::ty_rptr(_, ty::mt { ty, .. }) => match ty.sty {
ty::ty_vec(_, None) =>
range_inclusive(0, max_slice_length).map(|length| Slice(length)).collect(),
_ => vec!(Single)
},
ty::ty_enum(eid, _) =>
ty::enum_variants(cx.tcx, eid)
.iter()
.map(|va| Variant(va.id))
.collect(),
_ =>
vec!(Single)
}
}
// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html
//
// Whether a vector `v` of patterns is 'useful' in relation to a set of such
// vectors `m` is defined as there being a set of inputs that will match `v`
// but not any of the sets in `m`.
//
// This is used both for reachability checking (if a pattern isn't useful in
// relation to preceding patterns, it is not reachable) and exhaustiveness
// checking (if a wildcard pattern is useful in relation to a matrix, the
// matrix isn't exhaustive).
// Note: is_useful doesn't work on empty types, as the paper notes.
// So it assumes that v is non-empty.
fn is_useful(cx: &MatchCheckCtxt,
matrix: &Matrix,
v: &[&Pat],
witness: WitnessPreference)
-> Usefulness {
let &Matrix(ref rows) = matrix;
debug!("{:?}", matrix);
if rows.len() == 0 {
return match witness {
ConstructWitness => UsefulWithWitness(vec!()),
LeaveOutWitness => Useful
};
}
if rows[0].len() == 0 {
return NotUseful;
}
let real_pat = match rows.iter().find(|r| (*r)[0].id != DUMMY_NODE_ID) {
Some(r) => raw_pat(r[0]),
None if v.len() == 0 => return NotUseful,
None => v[0]
};
let left_ty = if real_pat.id == DUMMY_NODE_ID {
ty::mk_nil(cx.tcx)
} else {
ty::pat_ty(cx.tcx, &*real_pat)
};
let max_slice_length = rows.iter().filter_map(|row| match row[0].node {
ast::PatVec(ref before, _, ref after) => Some(before.len() + after.len()),
_ => None
}).max().map_or(0, |v| v + 1);
let constructors = pat_constructors(cx, v[0], left_ty, max_slice_length);
if constructors.is_empty() {
match missing_constructor(cx, matrix, left_ty, max_slice_length) {
None => {
all_constructors(cx, left_ty, max_slice_length).into_iter().map(|c| {
match is_useful_specialized(cx, matrix, v, c.clone(), left_ty, witness) {
UsefulWithWitness(pats) => UsefulWithWitness({
let arity = constructor_arity(cx, &c, left_ty);
let mut result = {
let pat_slice = &pats[];
let subpats: Vec<_> = (0..arity).map(|i| {
pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p)
}).collect();
vec![construct_witness(cx, &c, subpats, left_ty)]
};
result.extend(pats.into_iter().skip(arity));
result
}),
result => result
}
}).find(|result| result != &NotUseful).unwrap_or(NotUseful)
},
Some(constructor) => {
let matrix = rows.iter().filter_map(|r| {
if pat_is_binding_or_wild(&cx.tcx.def_map, raw_pat(r[0])) {
Some(r.tail().to_vec())
} else {
None
}
}).collect();
match is_useful(cx, &matrix, v.tail(), witness) {
UsefulWithWitness(pats) => {
let arity = constructor_arity(cx, &constructor, left_ty);
let wild_pats: Vec<_> = repeat(DUMMY_WILD_PAT).take(arity).collect();
let enum_pat = construct_witness(cx, &constructor, wild_pats, left_ty);
let mut new_pats = vec![enum_pat];
new_pats.extend(pats.into_iter());
UsefulWithWitness(new_pats)
},
result => result
}
}
}
} else {
constructors.into_iter().map(|c|
is_useful_specialized(cx, matrix, v, c.clone(), left_ty, witness)
).find(|result| result != &NotUseful).unwrap_or(NotUseful)
}
}
fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix,
v: &[&Pat], ctor: Constructor, lty: Ty,
witness: WitnessPreference) -> Usefulness {
let arity = constructor_arity(cx, &ctor, lty);
let matrix = Matrix(m.iter().filter_map(|r| {
specialize(cx, &r[], &ctor, 0, arity)
}).collect());
match specialize(cx, v, &ctor, 0, arity) {
Some(v) => is_useful(cx, &matrix, &v[], witness),
None => NotUseful
}
}
/// Determines the constructors that the given pattern can be specialized to.
///
/// In most cases, there's only one constructor that a specific pattern
/// represents, such as a specific enum variant or a specific literal value.
/// Slice patterns, however, can match slices of different lengths. For instance,
/// `[a, b, ..tail]` can match a slice of length 2, 3, 4 and so on.
///
/// On the other hand, a wild pattern and an identifier pattern cannot be
/// specialized in any way.
fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat,
left_ty: Ty, max_slice_length: uint) -> Vec<Constructor> {
let pat = raw_pat(p);
match pat.node {
ast::PatIdent(..) =>
match cx.tcx.def_map.borrow().get(&pat.id) {
Some(&DefConst(..)) =>
cx.tcx.sess.span_bug(pat.span, "const pattern should've \
been rewritten"),
Some(&DefStruct(_)) => vec!(Single),
Some(&DefVariant(_, id, _)) => vec!(Variant(id)),
_ => vec!()
},
ast::PatEnum(..) =>
match cx.tcx.def_map.borrow().get(&pat.id) {
Some(&DefConst(..)) =>
cx.tcx.sess.span_bug(pat.span, "const pattern should've \
been rewritten"),
Some(&DefVariant(_, id, _)) => vec!(Variant(id)),
_ => vec!(Single)
},
ast::PatStruct(..) =>
match cx.tcx.def_map.borrow().get(&pat.id) {
Some(&DefConst(..)) =>
cx.tcx.sess.span_bug(pat.span, "const pattern should've \
been rewritten"),
Some(&DefVariant(_, id, _)) => vec!(Variant(id)),
_ => vec!(Single)
},
ast::PatLit(ref expr) =>
vec!(ConstantValue(eval_const_expr(cx.tcx, &**expr))),
ast::PatRange(ref lo, ref hi) =>
vec!(ConstantRange(eval_const_expr(cx.tcx, &**lo), eval_const_expr(cx.tcx, &**hi))),
ast::PatVec(ref before, ref slice, ref after) =>
match left_ty.sty {
ty::ty_vec(_, Some(_)) => vec!(Single),
_ => if slice.is_some() {
range_inclusive(before.len() + after.len(), max_slice_length)
.map(|length| Slice(length))
.collect()
} else {
vec!(Slice(before.len() + after.len()))
}
},
ast::PatBox(_) | ast::PatTup(_) | ast::PatRegion(..) =>
vec!(Single),
ast::PatWild(_) =>
vec!(),
ast::PatMac(_) =>
cx.tcx.sess.bug("unexpanded macro")
}
}
/// This computes the arity of a constructor. The arity of a constructor
/// is how many subpattern patterns of that constructor should be expanded to.
///
/// For instance, a tuple pattern (_, 42, Some([])) has the arity of 3.
/// A struct pattern's arity is the number of fields it contains, etc.
pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uint {
match ty.sty {
ty::ty_tup(ref fs) => fs.len(),
ty::ty_uniq(_) => 1,
ty::ty_rptr(_, ty::mt { ty, .. }) => match ty.sty {
ty::ty_vec(_, None) => match *ctor {
Slice(length) => length,
ConstantValue(_) => 0,
_ => unreachable!()
},
ty::ty_str => 0,
_ => 1
},
ty::ty_enum(eid, _) => {
match *ctor {
Variant(id) => enum_variant_with_id(cx.tcx, eid, id).args.len(),
_ => unreachable!()
}
}
ty::ty_struct(cid, _) => ty::lookup_struct_fields(cx.tcx, cid).len(),
ty::ty_vec(_, Some(n)) => n,
_ => 0
}
}
fn range_covered_by_constructor(ctor: &Constructor,
from: &const_val, to: &const_val) -> Option<bool> {
let (c_from, c_to) = match *ctor {
ConstantValue(ref value) => (value, value),
ConstantRange(ref from, ref to) => (from, to),
Single => return Some(true),
_ => unreachable!()
};
let cmp_from = compare_const_vals(c_from, from);
let cmp_to = compare_const_vals(c_to, to);
match (cmp_from, cmp_to) {
(Some(cmp_from), Some(cmp_to)) => {
Some(cmp_from != Ordering::Less && cmp_to != Ordering::Greater)
}
_ => None
}
}
/// This is the main specialization step. It expands the first pattern in the given row
/// into `arity` patterns based on the constructor. For most patterns, the step is trivial,
/// for instance tuple patterns are flattened and box patterns expand into their inner pattern.
///
/// OTOH, slice patterns with a subslice pattern (..tail) can be expanded into multiple
/// different patterns.
/// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing
/// fields filled with wild patterns.
pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat],
constructor: &Constructor, col: uint, arity: uint) -> Option<Vec<&'a Pat>> {
let &Pat {
id: pat_id, ref node, span: pat_span
} = raw_pat(r[col]);
let head: Option<Vec<&Pat>> = match *node {
ast::PatWild(_) =>
Some(repeat(DUMMY_WILD_PAT).take(arity).collect()),
ast::PatIdent(_, _, _) => {
let opt_def = cx.tcx.def_map.borrow().get(&pat_id).cloned();
match opt_def {
Some(DefConst(..)) =>
cx.tcx.sess.span_bug(pat_span, "const pattern should've \
been rewritten"),
Some(DefVariant(_, id, _)) => if *constructor == Variant(id) {
Some(vec!())
} else {
None
},
_ => Some(repeat(DUMMY_WILD_PAT).take(arity).collect())
}
}
ast::PatEnum(_, ref args) => {
let def = cx.tcx.def_map.borrow()[pat_id].clone();
match def {
DefConst(..) =>
cx.tcx.sess.span_bug(pat_span, "const pattern should've \
been rewritten"),
DefVariant(_, id, _) if *constructor != Variant(id) => None,
DefVariant(..) | DefStruct(..) => {
Some(match args {
&Some(ref args) => args.iter().map(|p| &**p).collect(),
&None => repeat(DUMMY_WILD_PAT).take(arity).collect(),
})
}
_ => None
}
}
ast::PatStruct(_, ref pattern_fields, _) => {
// Is this a struct or an enum variant?
let def = cx.tcx.def_map.borrow()[pat_id].clone();
let class_id = match def {
DefConst(..) =>
cx.tcx.sess.span_bug(pat_span, "const pattern should've \
been rewritten"),
DefVariant(_, variant_id, _) => if *constructor == Variant(variant_id) {
Some(variant_id)
} else {
None
},
_ => {
// Assume this is a struct.
match ty::ty_to_def_id(node_id_to_type(cx.tcx, pat_id)) {
None => {
cx.tcx.sess.span_bug(pat_span,
"struct pattern wasn't of a \
type with a def ID?!")
}
Some(def_id) => Some(def_id),
}
}
};
class_id.map(|variant_id| {
let struct_fields = ty::lookup_struct_fields(cx.tcx, variant_id);
let args = struct_fields.iter().map(|sf| {
match pattern_fields.iter().find(|f| f.node.ident.name == sf.name) {
Some(ref f) => &*f.node.pat,
_ => DUMMY_WILD_PAT
}
}).collect();
args
})
}
ast::PatTup(ref args) =>
Some(args.iter().map(|p| &**p).collect()),
ast::PatBox(ref inner) | ast::PatRegion(ref inner, _) =>
Some(vec![&**inner]),
ast::PatLit(ref expr) => {
let expr_value = eval_const_expr(cx.tcx, &**expr);
match range_covered_by_constructor(constructor, &expr_value, &expr_value) {
Some(true) => Some(vec![]),
Some(false) => None,
None => {
span_err!(cx.tcx.sess, pat_span, E0298, "mismatched types between arms");
None
}
}
}
ast::PatRange(ref from, ref to) => {
let from_value = eval_const_expr(cx.tcx, &**from);
let to_value = eval_const_expr(cx.tcx, &**to);
match range_covered_by_constructor(constructor, &from_value, &to_value) {
Some(true) => Some(vec![]),
Some(false) => None,
None => {
span_err!(cx.tcx.sess, pat_span, E0299, "mismatched types between arms");
None
}
}
}
ast::PatVec(ref before, ref slice, ref after) => {
match *constructor {
// Fixed-length vectors.
Single => {
let mut pats: Vec<&Pat> = before.iter().map(|p| &**p).collect();
pats.extend(repeat(DUMMY_WILD_PAT).take(arity - before.len() - after.len()));
pats.extend(after.iter().map(|p| &**p));
Some(pats)
},
Slice(length) if before.len() + after.len() <= length && slice.is_some() => {
let mut pats: Vec<&Pat> = before.iter().map(|p| &**p).collect();
pats.extend(repeat(DUMMY_WILD_PAT).take(arity - before.len() - after.len()));
pats.extend(after.iter().map(|p| &**p));
Some(pats)
},
Slice(length) if before.len() + after.len() == length => {
let mut pats: Vec<&Pat> = before.iter().map(|p| &**p).collect();
pats.extend(after.iter().map(|p| &**p));
Some(pats)
},
SliceWithSubslice(prefix, suffix)
if before.len() == prefix
&& after.len() == suffix
&& slice.is_some() => {
let mut pats: Vec<&Pat> = before.iter().map(|p| &**p).collect();
pats.extend(after.iter().map(|p| &**p));
Some(pats)
}
_ => None
}
}
ast::PatMac(_) => {
span_err!(cx.tcx.sess, pat_span, E0300, "unexpanded macro");
None
}
};
head.map(|mut head| {
head.push_all(&r[..col]);
head.push_all(&r[col + 1..]);
head
})
}
fn check_local(cx: &mut MatchCheckCtxt, loc: &ast::Local) {
visit::walk_local(cx, loc);
let name = match loc.source {
ast::LocalLet => "local",
ast::LocalFor => "`for` loop"
};
let mut static_inliner = StaticInliner::new(cx.tcx, None);
is_refutable(cx, &*static_inliner.fold_pat(loc.pat.clone()), |pat| {
span_err!(cx.tcx.sess, loc.pat.span, E0005,
"refutable pattern in {} binding: `{}` not covered",
name, pat_to_string(pat)
);
});
// Check legality of move bindings and `@` patterns.
check_legality_of_move_bindings(cx, false, slice::ref_slice(&loc.pat));
check_legality_of_bindings_in_at_patterns(cx, &*loc.pat);
}
fn check_fn(cx: &mut MatchCheckCtxt,
kind: FnKind,
decl: &ast::FnDecl,
body: &ast::Block,
sp: Span,
fn_id: NodeId) {
match kind {
visit::FkFnBlock => {}
_ => cx.param_env = ParameterEnvironment::for_item(cx.tcx, fn_id),
}
visit::walk_fn(cx, kind, decl, body, sp);
for input in &decl.inputs {
is_refutable(cx, &*input.pat, |pat| {
span_err!(cx.tcx.sess, input.pat.span, E0006,
"refutable pattern in function argument: `{}` not covered",
pat_to_string(pat)
);
});
check_legality_of_move_bindings(cx, false, slice::ref_slice(&input.pat));
check_legality_of_bindings_in_at_patterns(cx, &*input.pat);
}
}
fn is_refutable<A, F>(cx: &MatchCheckCtxt, pat: &Pat, refutable: F) -> Option<A> where
F: FnOnce(&Pat) -> A,
{
let pats = Matrix(vec!(vec!(pat)));
match is_useful(cx, &pats, &[DUMMY_WILD_PAT], ConstructWitness) {
UsefulWithWitness(pats) => {
assert_eq!(pats.len(), 1);
Some(refutable(&*pats[0]))
},
NotUseful => None,
Useful => unreachable!()
}
}
// Legality of move bindings checking
fn check_legality_of_move_bindings(cx: &MatchCheckCtxt,
has_guard: bool,
pats: &[P<Pat>]) {
let tcx = cx.tcx;
let def_map = &tcx.def_map;
let mut by_ref_span = None;
for pat in pats {
pat_bindings(def_map, &**pat, |bm, _, span, _path| {
match bm {
ast::BindByRef(_) => {
by_ref_span = Some(span);
}
ast::BindByValue(_) => {
}
}
})
}
let check_move = |p: &Pat, sub: Option<&Pat>| {
// check legality of moving out of the enum
// x @ Foo(..) is legal, but x @ Foo(y) isn't.
if sub.map_or(false, |p| pat_contains_bindings(def_map, &*p)) {
span_err!(cx.tcx.sess, p.span, E0007, "cannot bind by-move with sub-bindings");
} else if has_guard {
span_err!(cx.tcx.sess, p.span, E0008, "cannot bind by-move into a pattern guard");
} else if by_ref_span.is_some() {
span_err!(cx.tcx.sess, p.span, E0009,
"cannot bind by-move and by-ref in the same pattern");
span_note!(cx.tcx.sess, by_ref_span.unwrap(), "by-ref binding occurs here");
}
};
for pat in pats {
ast_util::walk_pat(&**pat, |p| {
if pat_is_binding(def_map, &*p) {
match p.node {
ast::PatIdent(ast::BindByValue(_), _, ref sub) => {
let pat_ty = ty::node_id_to_type(tcx, p.id);
if ty::type_moves_by_default(&cx.param_env, pat.span, pat_ty) {
check_move(p, sub.as_ref().map(|p| &**p));
}
}
ast::PatIdent(ast::BindByRef(_), _, _) => {
}
_ => {
cx.tcx.sess.span_bug(
p.span,
&format!("binding pattern {} is not an \
identifier: {:?}",
p.id,
p.node)[]);
}
}
}
true
});
}
}
/// Ensures that a pattern guard doesn't borrow by mutable reference or
/// assign.
fn check_for_mutation_in_guard<'a, 'tcx>(cx: &'a MatchCheckCtxt<'a, 'tcx>,
guard: &ast::Expr) {
let mut checker = MutationChecker {
cx: cx,
};
let mut visitor = ExprUseVisitor::new(&mut checker,
&checker.cx.param_env);
visitor.walk_expr(guard);
}
struct MutationChecker<'a, 'tcx: 'a> {
cx: &'a MatchCheckCtxt<'a, 'tcx>,
}
impl<'a, 'tcx> Delegate<'tcx> for MutationChecker<'a, 'tcx> {
fn matched_pat(&mut self, _: &Pat, _: cmt, _: euv::MatchMode) {}
fn consume(&mut self, _: NodeId, _: Span, _: cmt, _: ConsumeMode) {}
fn consume_pat(&mut self, _: &Pat, _: cmt, _: ConsumeMode) {}
fn borrow(&mut self,
_: NodeId,
span: Span,
_: cmt,
_: Region,
kind: BorrowKind,
_: LoanCause) {
match kind {
MutBorrow => {
span_err!(self.cx.tcx.sess, span, E0301,
"cannot mutably borrow in a pattern guard")
}
ImmBorrow | UniqueImmBorrow => {}
}
}
fn decl_without_init(&mut self, _: NodeId, _: Span) {}
fn mutate(&mut self, _: NodeId, span: Span, _: cmt, mode: MutateMode) {
match mode {
JustWrite | WriteAndRead => {
span_err!(self.cx.tcx.sess, span, E0302, "cannot assign in a pattern guard")
}
Init => {}
}
}
}
/// Forbids bindings in `@` patterns. This is necessary for memory safety,
/// because of the way rvalues are handled in the borrow check. (See issue
/// #14587.)
fn check_legality_of_bindings_in_at_patterns(cx: &MatchCheckCtxt, pat: &Pat) {
AtBindingPatternVisitor { cx: cx, bindings_allowed: true }.visit_pat(pat);
}
struct AtBindingPatternVisitor<'a, 'b:'a, 'tcx:'b> {
cx: &'a MatchCheckCtxt<'b, 'tcx>,
bindings_allowed: bool
}
impl<'a, 'b, 'tcx, 'v> Visitor<'v> for AtBindingPatternVisitor<'a, 'b, 'tcx> {
fn visit_pat(&mut self, pat: &Pat) {
if !self.bindings_allowed && pat_is_binding(&self.cx.tcx.def_map, pat) {
span_err!(self.cx.tcx.sess, pat.span, E0303,
"pattern bindings are not allowed \
after an `@`");
}
match pat.node {
ast::PatIdent(_, _, Some(_)) => {
let bindings_were_allowed = self.bindings_allowed;
self.bindings_allowed = false;
visit::walk_pat(self, pat);
self.bindings_allowed = bindings_were_allowed;
}
_ => visit::walk_pat(self, pat),
}
}
}
| 38.543697 | 98 | 0.488194 |
5d69fec58cdc7d592eb51e7eb5ae7b3d24d7aab7 | 13,635 | // TODO: CAUTION: this code is no longer usable
// use crate::ir::{function::*, module::*, opcode::*, types::*, value::*};
use crate::ir::{function::*, module::*, types::*};
use rustc_hash::FxHashMap;
#[derive(Debug, Clone, PartialEq)]
pub enum ConcreteValue {
Void,
Int1(bool),
Int32(i32),
Mem(*mut u8, Type),
}
pub struct Interpreter<'a> {
_module: &'a Module,
_internal_func: FxHashMap<String, fn(&[ConcreteValue]) -> ConcreteValue>,
}
impl<'a> Interpreter<'a> {
pub fn new(module: &'a Module) -> Self {
Self {
_module: module,
_internal_func: {
vec![("sericum.println.i32".to_string(), sericum_println_i32 as _)]
.into_iter()
.collect::<FxHashMap<_, _>>()
},
}
}
// TODO: Refactor
pub fn run_function(&mut self, _id: FunctionId, _args: Vec<ConcreteValue>) -> ConcreteValue {
panic!()
// let f = self.module.function_ref(id);
//
// if let Some(f) = self.internal_func.get(&f.name) {
// return f(&args);
// }
//
// let mut mem = FxHashMap::default();
//
// fn get_value(
// val: &Value,
// args: &[ConcreteValue],
// mem: &mut FxHashMap<InstructionId, ConcreteValue>,
// ) -> ConcreteValue {
// match val {
// Value::Argument(ArgumentValue { index, .. }) => args[*index].clone(),
// Value::Instruction(InstructionValue { id, .. }) => mem.get(&id).unwrap().clone(),
// Value::Immediate(im) => match im {
// ImmediateValue::Int32(i) => ConcreteValue::Int32(*i),
// _ => unimplemented!(),
// },
// Value::Function(_id) => unimplemented!(),
// Value::None => ConcreteValue::Void,
// }
// }
//
// let (mut cur_bb_id, mut bb) = f.basic_block_arena.iter().next().unwrap();
// let mut last_bb_id = cur_bb_id;
//
// let ret = 'main: loop {
// for val in &*bb.iseq.borrow() {
// let inst_id = val.get_inst_id().unwrap();
// let inst = &f.inst_table[inst_id];
// match &inst.opcode {
// Opcode::Add(v1, v2) => {
// let val =
// get_value(&v1, &args, &mut mem).add(get_value(&v2, &args, &mut mem));
// mem.insert(inst_id, val);
// }
// Opcode::Sub(v1, v2) => {
// let val =
// get_value(&v1, &args, &mut mem).sub(get_value(&v2, &args, &mut mem));
// mem.insert(inst_id, val);
// }
// Opcode::Mul(v1, v2) => {
// let val =
// get_value(&v1, &args, &mut mem).mul(get_value(&v2, &args, &mut mem));
// mem.insert(inst_id, val);
// }
// Opcode::Rem(v1, v2) => {
// let val =
// get_value(&v1, &args, &mut mem).rem(get_value(&v2, &args, &mut mem));
// mem.insert(inst_id, val);
// }
// Opcode::Alloca(ty) => {
// mem.insert(
// inst_id,
// ConcreteValue::Mem(
// match ty {
// Type::Int1 => Box::into_raw(Box::new(0u8)) as *mut u8,
// Type::Int32 => Box::into_raw(Box::new(0u32)) as *mut u8,
// Type::Int64 => Box::into_raw(Box::new(0u64)) as *mut u8,
// Type::F64 => unimplemented!(),
// Type::Pointer(_) => unimplemented!(),
// Type::Void => unreachable!(),
// Type::Function(_) => unimplemented!(),
// Type::Array(a) => unsafe {
// let mut vec = match a.elem_ty {
// Type::Int32 => {
// let mut v = Vec::<*mut i32>::with_capacity(a.len);
// for _ in 0..a.len {
// v.push(
// Box::into_raw(Box::new(0i32)) as *mut i32
// );
// }
// v
// }
// _ => unimplemented!(),
// };
// vec.set_len(a.len);
// Box::into_raw(vec.into_boxed_slice()) as *mut u8
// },
// Type::Struct(_) => unimplemented!(),
// },
// ty.get_pointer_ty(),
// ),
// );
// }
// Opcode::ICmp(kind, v1, v2) => {
// let val =
// match kind {
// ICmpKind::Eq => get_value(&v1, &args, &mut mem)
// .eq(get_value(&v2, &args, &mut mem)),
// ICmpKind::Le => get_value(&v1, &args, &mut mem)
// .le(get_value(&v2, &args, &mut mem)),
// ICmpKind::Lt => get_value(&v1, &args, &mut mem)
// .lt(get_value(&v2, &args, &mut mem)),
// };
// mem.insert(inst_id, val);
// }
// Opcode::Br(id) => {
// last_bb_id = cur_bb_id;
// cur_bb_id = *id;
// bb = f.basic_block_ref(*id);
// break;
// }
// Opcode::CondBr(cond, bb1, bb2) => {
// let cond = get_value(&cond, &args, &mut mem).i1_as_bool().unwrap();
// bb = f.basic_block_ref({
// last_bb_id = cur_bb_id;
// cur_bb_id = if cond { *bb1 } else { *bb2 };
// cur_bb_id
// });
// break;
// }
// Opcode::Phi(pairs) => {
// let val = get_value(
// &pairs.iter().find(|&(_, bb)| bb == &last_bb_id).unwrap().0,
// &args,
// &mut mem,
// );
// mem.insert(inst_id, val);
// }
// Opcode::Call(f, f_args) => match f {
// Value::Function(FunctionValue { func_id, .. }) => {
// let val = self.run_function(
// *func_id,
// f_args
// .iter()
// .map(|arg| get_value(&arg, &args, &mut mem))
// .collect(),
// );
// if val != ConcreteValue::Void {
// mem.insert(inst_id, val);
// }
// }
// _ => unimplemented!(),
// },
// Opcode::Load(v) => {
// let ptr = match get_value(&v, &args, &mut mem) {
// ConcreteValue::Mem(ptr, _) => ptr,
// _ => unreachable!(),
// };
// let val = match v.get_type(self.module).get_element_ty(None).unwrap() {
// Type::Int1 => {
// ConcreteValue::Int1(if unsafe { *(ptr as *mut u8) } == 0 {
// false
// } else {
// true
// })
// }
// Type::Int32 => ConcreteValue::Int32(unsafe { *(ptr as *mut i32) }),
// _ => unimplemented!(),
// };
// mem.insert(inst_id, val);
// }
// Opcode::Store(src, dst) => {
// let dst_ptr = match get_value(&dst, &args, &mut mem) {
// ConcreteValue::Mem(ptr, _) => ptr,
// _ => unreachable!(),
// };
// match get_value(&src, &args, &mut mem) {
// ConcreteValue::Int32(i) => {
// unsafe { *(dst_ptr as *mut i32) = i };
// }
// _ => unimplemented!(),
// }
// }
// Opcode::GetElementPtr(ptrval, indices) => {
// let ptr = match get_value(&ptrval, &args, &mut mem) {
// ConcreteValue::Mem(ptr, ref ty) => {
// let mut v = ptr;
// let mut t = ty;
// for idx in indices {
// match t {
// Type::Pointer(_) => {}
// Type::Array(_a) => unsafe {
// // TODO: assume _a.elem_ty is Int32 for now
// v = *((v as *mut *mut i32).add(
// match get_value(&idx, &args, &mut mem) {
// ConcreteValue::Int32(i) => i as usize,
// _ => unimplemented!(),
// },
// ))
// as *mut u8
// },
// _ => unimplemented!(),
// }
// t = t.get_element_ty_with_indices(&vec![*idx]).unwrap();
// }
// v
// }
// _ => unreachable!(),
// };
// mem.insert(inst_id, ConcreteValue::Mem(ptr, inst.ty.clone()));
// }
// Opcode::Ret(v) => break 'main get_value(&v, &args, &mut mem),
// }
// }
// };
//
// ret
}
}
impl ConcreteValue {
pub fn add(self, v: ConcreteValue) -> Self {
match (self, v) {
(ConcreteValue::Int32(i1), ConcreteValue::Int32(i2)) => ConcreteValue::Int32(i1 + i2),
_ => unimplemented!(),
}
}
pub fn sub(self, v: ConcreteValue) -> Self {
match (self, v) {
(ConcreteValue::Int32(i1), ConcreteValue::Int32(i2)) => ConcreteValue::Int32(i1 - i2),
_ => unimplemented!(),
}
}
pub fn mul(self, v: ConcreteValue) -> Self {
match (self, v) {
(ConcreteValue::Int32(i1), ConcreteValue::Int32(i2)) => ConcreteValue::Int32(i1 * i2),
_ => unimplemented!(),
}
}
pub fn rem(self, v: ConcreteValue) -> Self {
match (self, v) {
(ConcreteValue::Int32(i1), ConcreteValue::Int32(i2)) => ConcreteValue::Int32(i1 % i2),
_ => unimplemented!(),
}
}
pub fn eq(self, v: ConcreteValue) -> Self {
match (self, v) {
(ConcreteValue::Int32(i1), ConcreteValue::Int32(i2)) => ConcreteValue::Int1(i1 == i2),
(ConcreteValue::Int1(i1), ConcreteValue::Int1(i2)) => ConcreteValue::Int1(i1 == i2),
_ => unimplemented!(),
}
}
pub fn le(self, v: ConcreteValue) -> Self {
match (self, v) {
(ConcreteValue::Int32(i1), ConcreteValue::Int32(i2)) => ConcreteValue::Int1(i1 <= i2),
_ => unimplemented!(),
}
}
pub fn lt(self, v: ConcreteValue) -> Self {
match (self, v) {
(ConcreteValue::Int32(i1), ConcreteValue::Int32(i2)) => ConcreteValue::Int1(i1 < i2),
_ => unimplemented!(),
}
}
pub fn i1_as_bool(self) -> Option<bool> {
match self {
ConcreteValue::Int1(b) => Some(b),
_ => None,
}
}
}
fn sericum_println_i32(args: &[ConcreteValue]) -> ConcreteValue {
match args[0] {
ConcreteValue::Int32(i) => println!("{}", i),
_ => unimplemented!(),
}
ConcreteValue::Void
}
| 44.704918 | 101 | 0.337367 |
266f7cdde11af0281836105808fcd27b6628c018 | 681 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Rec {
f: int
}
fn destructure(x: &mut Rec) {
match *x {
Rec {f: ref mut f} => *f += 1
}
}
pub fn main() {
let mut v = Rec {f: 22};
destructure(&mut v);
assert_eq!(v.f, 23);
}
| 26.192308 | 68 | 0.660793 |
919b027232c0c2e85a2975f58839711766ae6360 | 2,921 | //! Helper module to compute a CRC32 checksum
use crc32fast::Hasher;
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::AsyncRead;
/// Reader that validates the CRC32 when it reaches the EOF.
pub struct Crc32Reader<R: AsyncRead + Unpin> {
reader: R,
hasher: Hasher,
check: u32,
}
impl<R: AsyncRead + Unpin> Crc32Reader<R> {
/// Get a new Crc32Reader which check the inner reader against checksum.
pub fn new(reader: R, checksum: u32) -> Crc32Reader<R> {
Crc32Reader {
reader,
hasher: Hasher::new(),
check: checksum,
}
}
fn check_matches(&self) -> bool {
self.check == self.hasher.clone().finalize()
}
pub fn into_inner(self) -> R {
self.reader
}
}
impl<R: AsyncRead + Unpin> AsyncRead for Crc32Reader<R> {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let this = self.get_mut();
let poll = { Pin::new(&mut this.reader).poll_read(cx, buf) };
match poll {
Poll::Ready(Ok(0)) if !buf.is_empty() && !this.check_matches() => Poll::Ready(Err(
io::Error::new(io::ErrorKind::Other, "Invalid checksum"),
)),
Poll::Ready(Ok(n)) => {
this.hasher.update(&buf[0..n]);
Poll::Ready(Ok(n))
}
poll => poll,
}
}
}
#[cfg(test)]
mod test {
use super::*;
use tokio::io::AsyncReadExt;
#[tokio::test]
async fn test_empty_reader() {
let data: &[u8] = b"";
let mut buf = [0; 1];
let mut reader = Crc32Reader::new(data, 0);
assert_eq!(reader.read(&mut buf).await.unwrap(), 0);
let mut reader = Crc32Reader::new(data, 1);
assert!(reader
.read(&mut buf)
.await
.unwrap_err()
.to_string()
.contains("Invalid checksum"));
}
#[tokio::test]
async fn test_byte_by_byte() {
let data: &[u8] = b"1234";
let mut buf = [0; 1];
let mut reader = Crc32Reader::new(data, 0x9be3e0a3);
assert_eq!(reader.read(&mut buf).await.unwrap(), 1);
assert_eq!(reader.read(&mut buf).await.unwrap(), 1);
assert_eq!(reader.read(&mut buf).await.unwrap(), 1);
assert_eq!(reader.read(&mut buf).await.unwrap(), 1);
assert_eq!(reader.read(&mut buf).await.unwrap(), 0);
// Can keep reading 0 bytes after the end
assert_eq!(reader.read(&mut buf).await.unwrap(), 0);
}
#[tokio::test]
async fn test_zero_read() {
let data: &[u8] = b"1234";
let mut buf = [0; 5];
let mut reader = Crc32Reader::new(data, 0x9be3e0a3);
assert_eq!(reader.read(&mut buf[..0]).await.unwrap(), 0);
assert_eq!(reader.read(&mut buf).await.unwrap(), 4);
}
}
| 28.359223 | 94 | 0.542622 |
f9e1b13b197ecf8c397f818c263a3dc8ebecb4cb | 8,064 | pub mod biblio;
pub mod dfn;
pub mod reference;
use kuchiki::NodeRef;
use std::collections::HashMap;
use crate::config::{self, DFN_SELECTOR};
use crate::html::{self, Attr};
use crate::spec::Spec;
use reference::query::Query;
pub fn process_biblio_links(doc: &mut Spec) {
for biblio_link_el in html::select(doc.dom(), "a[data-link-type='biblio']") {
let biblio_type = html::get_attr(&biblio_link_el, "data-biblio-type").unwrap();
let mut link_text = html::get_text_content(&biblio_link_el);
if !link_text.is_empty()
&& &link_text[0..=0] == "["
&& &link_text[link_text.len() - 1..link_text.len()] == "]"
{
link_text = link_text[1..link_text.len() - 1].to_owned();
}
let mut biblio = doc.biblio_manager.get_biblio(&link_text);
if let Some(ref mut biblio) = biblio {
let name = config::generate_name(&link_text);
html::insert_attr(&biblio_link_el, "href", format!("#biblio-{}", name));
biblio.link_text = link_text;
let storage = match biblio_type.as_str() {
"normative" => &mut doc.normative_biblios,
"informative" => &mut doc.informative_biblios,
_ => die!("Unknown biblio type: {}.", biblio_type),
};
storage.insert(biblio.link_text.to_owned(), biblio.to_owned());
}
}
}
pub fn process_auto_links(doc: &mut Spec) {
for auto_link_el in html::select(doc.dom(), "a:not([href]):not([data-link-type='biblio'])") {
let link_type = determine_link_type(&auto_link_el);
html::insert_attr(&auto_link_el, "data-link-type", &link_type);
let link_text = html::get_text_content(&auto_link_el);
let link_fors = match html::get_attr(&auto_link_el, "data-link-for") {
Some(link_for) => Some(config::split_for_vals(&link_for)),
None => None,
};
let reference = doc.reference_manager.get_reference(Query {
link_type: &link_type,
link_text: &link_text,
status: None,
link_fors: &link_fors,
explicit_for: doc.md.assume_explicit_for,
});
if let Some(ref reference_spec) = reference.spec {
if let Some(ref doc_spec) = doc.reference_manager.spec {
if reference_spec.to_lowercase() != doc_spec.to_lowercase() {
doc.external_references_used
.entry(reference_spec.to_owned())
.or_default()
.insert(link_text, reference.to_owned());
}
}
if let Some(biblio) = doc.biblio_manager.get_biblio(&reference_spec) {
doc.normative_biblios
.insert(biblio.link_text.to_owned(), biblio);
}
}
// Decorate auto-link.
html::insert_attr(&auto_link_el, "href", &reference.url);
let name = reference.url.rsplitn(2, '#').next().unwrap();
html::insert_attr(&auto_link_el, "id", format!("ref-for-{}", name));
}
html::dedup_ids(doc.dom());
}
fn determine_link_type(link_el: &NodeRef) -> String {
match html::get_attr(link_el, "data-link-type") {
Some(link_type) => link_type,
None => "dfn".to_owned(),
}
}
pub fn add_self_links(doc: &mut Spec) {
let dfn_els = html::select(doc.dom(), &DFN_SELECTOR).collect::<Vec<NodeRef>>();
let mut found_first_numbered_section = false;
for heading_el in html::select(doc.dom(), "h2, h3, h4, h5, h6") {
found_first_numbered_section |= html::get_attr(&heading_el, "data-level").is_some();
if dfn_els.contains(&heading_el) || !found_first_numbered_section {
continue;
}
// Append self-link.
if let Some(id) = html::get_attr(&heading_el, "id") {
let a_el = html::new_a(
btreemap! {
"class" => "self-link".to_owned(),
"href" => format!("#{}", id),
},
"",
);
heading_el.append(a_el);
}
}
add_dfn_panels(doc, &dfn_els);
}
fn add_dfn_panels(doc: &mut Spec, dfn_els: &[NodeRef]) {
// id => <a> elements with this id
let mut all_link_els: HashMap<String, Vec<NodeRef>> = HashMap::new();
for a_el in html::select(doc.dom(), "a") {
let href = match html::get_attr(&a_el, "href") {
Some(href) => href,
None => continue,
};
if !href.starts_with('#') {
continue;
}
all_link_els
.entry(href[1..].to_owned())
.or_default()
.push(a_el.to_owned());
}
let mut at_least_one_panel = false;
for dfn_el in dfn_els {
let id = match html::get_attr(dfn_el, "id") {
Some(id) => id,
None => continue,
};
// section name => <a> elements
let mut section_els: HashMap<String, Vec<NodeRef>> = HashMap::new();
if let Some(link_els) = all_link_els.get(&id) {
for link_el in link_els {
if let Some(section) = html::get_section(link_el) {
section_els
.entry(section)
.or_default()
.push(link_el.to_owned());
}
}
}
if section_els.is_empty() {
// Insert a self-link.
let a_el = html::new_a(
btreemap! {
"class" => "self-link".to_owned(),
"href" => format!("#{}", id),
},
"",
);
dfn_el.append(a_el);
continue;
}
at_least_one_panel = true;
html::add_class(dfn_el, "dfn-paneled");
let aside_el = html::new_element(
"aside",
btreemap! {
"class" => "dfn-panel",
"data-for" => &id,
},
);
aside_el.append({
let b_el = html::new_element("b", None::<Attr>);
b_el.append(html::new_a(
btreemap! {
"href" => format!("#{}", id)
},
format!("#{}", id),
));
b_el
});
aside_el.append({
let b_el = html::new_element("b", None::<Attr>);
b_el.append(html::new_text("Referenced in:"));
b_el
});
let ul_el = html::new_element("ul", None::<Attr>);
for (section, section_els) in section_els {
let li_el = html::new_element("li", None::<Attr>);
for (i, section_el) in section_els.iter().enumerate() {
let section_el_id = match html::get_attr(§ion_el, "id") {
Some(section_el_id) => section_el_id,
None => {
let id = format!("ref-for-{}", id);
html::insert_attr(§ion_el, "id", &id);
id
}
};
let a_el = match i {
0 => html::new_a(
btreemap! {
"href" => format!("#{}", section_el_id)
},
§ion,
),
_ => html::new_a(
btreemap! {
"href" => format!("#{}", section_el_id)
},
format!("({})", i + 1),
),
};
li_el.append(a_el);
}
ul_el.append(li_el);
}
aside_el.append(ul_el);
doc.body().append(aside_el);
}
if at_least_one_panel {
doc.extra_styles
.insert("dfn-panel", include_str!("../style/dfn-panel.css"));
doc.extra_scripts
.insert("dfn-panel", include_str!("../script/dfn-panel.js"));
}
}
| 31.377432 | 97 | 0.489583 |
08bd79dda4ff0d15bd36c6532d7b0618694a784f | 17,071 | //! Shared setup for nrf52dk boards.
#![no_std]
#[allow(unused_imports)]
use kernel::{create_capability, debug, debug_gpio, debug_verbose, static_init};
use capsules::analog_comparator;
use capsules::virtual_alarm::VirtualMuxAlarm;
use capsules::virtual_spi::MuxSpiMaster;
use kernel::capabilities;
use kernel::common::dynamic_deferred_call::{DynamicDeferredCall, DynamicDeferredCallClientState};
use kernel::component::Component;
use kernel::hil;
use nrf52::gpio::Pin;
use nrf52::rtc::Rtc;
use nrf52::uicr::Regulator0Output;
pub mod nrf52_components;
use nrf52_components::ble::BLEComponent;
use nrf52_components::ieee802154::Ieee802154Component;
// Constants related to the configuration of the 15.4 network stack
const SRC_MAC: u16 = 0xf00f;
const PAN_ID: u16 = 0xABCD;
/// Pins for SPI for the flash chip MX25R6435F
#[derive(Debug)]
pub struct SpiMX25R6435FPins {
chip_select: Pin,
write_protect_pin: Pin,
hold_pin: Pin,
}
impl SpiMX25R6435FPins {
pub fn new(chip_select: Pin, write_protect_pin: Pin, hold_pin: Pin) -> Self {
Self {
chip_select,
write_protect_pin,
hold_pin,
}
}
}
/// Pins for the SPI driver
#[derive(Debug)]
pub struct SpiPins {
mosi: Pin,
miso: Pin,
clk: Pin,
}
impl SpiPins {
pub fn new(mosi: Pin, miso: Pin, clk: Pin) -> Self {
Self { mosi, miso, clk }
}
}
/// Pins for the UART
#[derive(Debug)]
pub struct UartPins {
rts: Option<Pin>,
txd: Pin,
cts: Option<Pin>,
rxd: Pin,
}
impl UartPins {
pub fn new(rts: Option<Pin>, txd: Pin, cts: Option<Pin>, rxd: Pin) -> Self {
Self { rts, txd, cts, rxd }
}
}
pub enum UartChannel<'a> {
Pins(UartPins),
Rtt(components::segger_rtt::SeggerRttMemoryRefs<'a>),
}
/// Supported drivers by the platform
pub struct Platform {
ble_radio: &'static capsules::ble_advertising_driver::BLE<
'static,
nrf52::ble_radio::Radio,
VirtualMuxAlarm<'static, Rtc<'static>>,
>,
ieee802154_radio: Option<&'static capsules::ieee802154::RadioDriver<'static>>,
button: &'static capsules::button::Button<'static, nrf52::gpio::GPIOPin>,
pconsole: &'static capsules::process_console::ProcessConsole<
'static,
components::process_console::Capability,
>,
console: &'static capsules::console::Console<'static>,
gpio: &'static capsules::gpio::GPIO<'static, nrf52::gpio::GPIOPin>,
led: &'static capsules::led::LED<'static, nrf52::gpio::GPIOPin>,
rng: &'static capsules::rng::RngDriver<'static>,
temp: &'static capsules::temperature::TemperatureSensor<'static>,
ipc: kernel::ipc::IPC,
analog_comparator:
&'static capsules::analog_comparator::AnalogComparator<'static, nrf52::acomp::Comparator>,
alarm: &'static capsules::alarm::AlarmDriver<
'static,
capsules::virtual_alarm::VirtualMuxAlarm<'static, nrf52::rtc::Rtc<'static>>,
>,
// The nRF52dk does not have the flash chip on it, so we make this optional.
nonvolatile_storage:
Option<&'static capsules::nonvolatile_storage_driver::NonvolatileStorage<'static>>,
}
impl kernel::Platform for Platform {
fn with_driver<F, R>(&self, driver_num: usize, f: F) -> R
where
F: FnOnce(Option<&dyn kernel::Driver>) -> R,
{
match driver_num {
capsules::console::DRIVER_NUM => f(Some(self.console)),
capsules::gpio::DRIVER_NUM => f(Some(self.gpio)),
capsules::alarm::DRIVER_NUM => f(Some(self.alarm)),
capsules::led::DRIVER_NUM => f(Some(self.led)),
capsules::button::DRIVER_NUM => f(Some(self.button)),
capsules::rng::DRIVER_NUM => f(Some(self.rng)),
capsules::ble_advertising_driver::DRIVER_NUM => f(Some(self.ble_radio)),
capsules::ieee802154::DRIVER_NUM => match self.ieee802154_radio {
Some(radio) => f(Some(radio)),
None => f(None),
},
capsules::temperature::DRIVER_NUM => f(Some(self.temp)),
capsules::analog_comparator::DRIVER_NUM => f(Some(self.analog_comparator)),
capsules::nonvolatile_storage_driver::DRIVER_NUM => {
f(self.nonvolatile_storage.map_or(None, |nv| Some(nv)))
}
kernel::ipc::DRIVER_NUM => f(Some(&self.ipc)),
_ => f(None),
}
}
}
/// Generic function for starting an nrf52dk board.
#[inline]
pub unsafe fn setup_board<I: nrf52::interrupt_service::InterruptService>(
board_kernel: &'static kernel::Kernel,
button_rst_pin: Pin,
gpio_port: &'static nrf52::gpio::Port,
gpio: &'static capsules::gpio::GPIO<'static, nrf52::gpio::GPIOPin>,
debug_pin1_index: Pin,
debug_pin2_index: Pin,
debug_pin3_index: Pin,
led: &'static capsules::led::LED<'static, nrf52::gpio::GPIOPin>,
uart_channel: UartChannel<'static>,
spi_pins: &SpiPins,
mx25r6435f: &Option<SpiMX25R6435FPins>,
button: &'static capsules::button::Button<'static, nrf52::gpio::GPIOPin>,
ieee802154: bool,
app_memory: &mut [u8],
process_pointers: &'static mut [Option<&'static dyn kernel::procs::ProcessType>],
app_fault_response: kernel::procs::FaultResponse,
reg_vout: Regulator0Output,
nfc_as_gpios: bool,
chip: &'static nrf52::chip::NRF52<I>,
) {
// Make non-volatile memory writable and activate the reset button
let uicr = nrf52::uicr::Uicr::new();
// Check if we need to erase UICR memory to re-program it
// This only needs to be done when a bit needs to be flipped from 0 to 1.
let psel0_reset: u32 = uicr.get_psel0_reset_pin().map_or(0, |pin| pin as u32);
let psel1_reset: u32 = uicr.get_psel1_reset_pin().map_or(0, |pin| pin as u32);
let mut erase_uicr = ((!psel0_reset & (button_rst_pin as u32))
| (!psel1_reset & (button_rst_pin as u32))
| (!(uicr.get_vout() as u32) & (reg_vout as u32)))
!= 0;
// Only enabling the NFC pin protection requires an erase.
if nfc_as_gpios {
erase_uicr |= !uicr.is_nfc_pins_protection_enabled();
}
if erase_uicr {
nrf52::nvmc::NVMC.erase_uicr();
}
nrf52::nvmc::NVMC.configure_writeable();
while !nrf52::nvmc::NVMC.is_ready() {}
let mut needs_soft_reset: bool = false;
// Configure reset pins
if uicr
.get_psel0_reset_pin()
.map_or(true, |pin| pin != button_rst_pin)
{
uicr.set_psel0_reset_pin(button_rst_pin);
while !nrf52::nvmc::NVMC.is_ready() {}
needs_soft_reset = true;
}
if uicr
.get_psel1_reset_pin()
.map_or(true, |pin| pin != button_rst_pin)
{
uicr.set_psel1_reset_pin(button_rst_pin);
while !nrf52::nvmc::NVMC.is_ready() {}
needs_soft_reset = true;
}
// Configure voltage regulator output
if uicr.get_vout() != reg_vout {
uicr.set_vout(reg_vout);
while !nrf52::nvmc::NVMC.is_ready() {}
needs_soft_reset = true;
}
// Check if we need to free the NFC pins for GPIO
if nfc_as_gpios {
uicr.set_nfc_pins_protection(true);
while !nrf52::nvmc::NVMC.is_ready() {}
needs_soft_reset = true;
}
// Any modification of UICR needs a soft reset for the changes to be taken into account.
if needs_soft_reset {
cortexm4::scb::reset();
}
// Create capabilities that the board needs to call certain protected kernel
// functions.
let process_management_capability =
create_capability!(capabilities::ProcessManagementCapability);
let main_loop_capability = create_capability!(capabilities::MainLoopCapability);
let memory_allocation_capability = create_capability!(capabilities::MemoryAllocationCapability);
// Configure kernel debug gpios as early as possible
kernel::debug::assign_gpios(
Some(&gpio_port[debug_pin1_index]),
Some(&gpio_port[debug_pin2_index]),
Some(&gpio_port[debug_pin3_index]),
);
let rtc = &nrf52::rtc::RTC;
rtc.start();
let mux_alarm = components::alarm::AlarmMuxComponent::new(rtc)
.finalize(components::alarm_mux_component_helper!(nrf52::rtc::Rtc));
let alarm = components::alarm::AlarmDriverComponent::new(board_kernel, mux_alarm)
.finalize(components::alarm_component_helper!(nrf52::rtc::Rtc));
let channel: &dyn kernel::hil::uart::Uart = match uart_channel {
UartChannel::Pins(uart_pins) => {
nrf52::uart::UARTE0.initialize(
nrf52::pinmux::Pinmux::new(uart_pins.txd as u32),
nrf52::pinmux::Pinmux::new(uart_pins.rxd as u32),
uart_pins.cts.map(|x| nrf52::pinmux::Pinmux::new(x as u32)),
uart_pins.rts.map(|x| nrf52::pinmux::Pinmux::new(x as u32)),
);
&nrf52::uart::UARTE0
}
UartChannel::Rtt(rtt_memory) => {
let rtt = components::segger_rtt::SeggerRttComponent::new(mux_alarm, rtt_memory)
.finalize(components::segger_rtt_component_helper!(nrf52::rtc::Rtc));
rtt
}
};
let dynamic_deferred_call_clients =
static_init!([DynamicDeferredCallClientState; 2], Default::default());
let dynamic_deferred_caller = static_init!(
DynamicDeferredCall,
DynamicDeferredCall::new(dynamic_deferred_call_clients)
);
DynamicDeferredCall::set_global_instance(dynamic_deferred_caller);
// Create a shared UART channel for the console and for kernel debug.
let uart_mux =
components::console::UartMuxComponent::new(channel, 115200, dynamic_deferred_caller)
.finalize(());
let pconsole =
components::process_console::ProcessConsoleComponent::new(board_kernel, uart_mux)
.finalize(());
// Setup the console.
let console = components::console::ConsoleComponent::new(board_kernel, uart_mux).finalize(());
// Create the debugger object that handles calls to `debug!()`.
components::debug_writer::DebugWriterComponent::new(uart_mux).finalize(());
let ble_radio =
BLEComponent::new(board_kernel, &nrf52::ble_radio::RADIO, mux_alarm).finalize(());
let ieee802154_radio = if ieee802154 {
let (radio, _) = Ieee802154Component::new(
board_kernel,
&nrf52::ieee802154_radio::RADIO,
PAN_ID,
SRC_MAC,
)
.finalize(());
Some(radio)
} else {
None
};
let temp = static_init!(
capsules::temperature::TemperatureSensor<'static>,
capsules::temperature::TemperatureSensor::new(
&nrf52::temperature::TEMP,
board_kernel.create_grant(&memory_allocation_capability)
)
);
kernel::hil::sensors::TemperatureDriver::set_client(&nrf52::temperature::TEMP, temp);
let rng = components::rng::RngComponent::new(board_kernel, &nrf52::trng::TRNG).finalize(());
// SPI
let mux_spi = static_init!(
MuxSpiMaster<'static, nrf52::spi::SPIM>,
MuxSpiMaster::new(&nrf52::spi::SPIM0)
);
hil::spi::SpiMaster::set_client(&nrf52::spi::SPIM0, mux_spi);
hil::spi::SpiMaster::init(&nrf52::spi::SPIM0);
nrf52::spi::SPIM0.configure(
nrf52::pinmux::Pinmux::new(spi_pins.mosi as u32),
nrf52::pinmux::Pinmux::new(spi_pins.miso as u32),
nrf52::pinmux::Pinmux::new(spi_pins.clk as u32),
);
let nonvolatile_storage: Option<
&'static capsules::nonvolatile_storage_driver::NonvolatileStorage<'static>,
> = if let Some(driver) = mx25r6435f {
// Create a SPI device for the mx25r6435f flash chip.
let mx25r6435f_spi = static_init!(
capsules::virtual_spi::VirtualSpiMasterDevice<'static, nrf52::spi::SPIM>,
capsules::virtual_spi::VirtualSpiMasterDevice::new(
mux_spi,
&gpio_port[driver.chip_select]
)
);
// Create an alarm for this chip.
let mx25r6435f_virtual_alarm = static_init!(
VirtualMuxAlarm<'static, nrf52::rtc::Rtc>,
VirtualMuxAlarm::new(mux_alarm)
);
// Setup the actual MX25R6435F driver.
let mx25r6435f = static_init!(
capsules::mx25r6435f::MX25R6435F<
'static,
capsules::virtual_spi::VirtualSpiMasterDevice<'static, nrf52::spi::SPIM>,
nrf52::gpio::GPIOPin,
VirtualMuxAlarm<'static, nrf52::rtc::Rtc>,
>,
capsules::mx25r6435f::MX25R6435F::new(
mx25r6435f_spi,
mx25r6435f_virtual_alarm,
&mut capsules::mx25r6435f::TXBUFFER,
&mut capsules::mx25r6435f::RXBUFFER,
Some(&gpio_port[driver.write_protect_pin]),
Some(&gpio_port[driver.hold_pin])
)
);
mx25r6435f_spi.set_client(mx25r6435f);
hil::time::Alarm::set_client(mx25r6435f_virtual_alarm, mx25r6435f);
pub static mut FLASH_PAGEBUFFER: capsules::mx25r6435f::Mx25r6435fSector =
capsules::mx25r6435f::Mx25r6435fSector::new();
let nv_to_page = static_init!(
capsules::nonvolatile_to_pages::NonvolatileToPages<
'static,
capsules::mx25r6435f::MX25R6435F<
'static,
capsules::virtual_spi::VirtualSpiMasterDevice<'static, nrf52::spi::SPIM>,
nrf52::gpio::GPIOPin,
VirtualMuxAlarm<'static, nrf52::rtc::Rtc>,
>,
>,
capsules::nonvolatile_to_pages::NonvolatileToPages::new(
mx25r6435f,
&mut FLASH_PAGEBUFFER
)
);
hil::flash::HasClient::set_client(mx25r6435f, nv_to_page);
let nonvolatile_storage = static_init!(
capsules::nonvolatile_storage_driver::NonvolatileStorage<'static>,
capsules::nonvolatile_storage_driver::NonvolatileStorage::new(
nv_to_page,
board_kernel.create_grant(&memory_allocation_capability),
0x60000, // Start address for userspace accessible region
0x20000, // Length of userspace accessible region
0, // Start address of kernel accessible region
0x60000, // Length of kernel accessible region
&mut capsules::nonvolatile_storage_driver::BUFFER
)
);
hil::nonvolatile_storage::NonvolatileStorage::set_client(nv_to_page, nonvolatile_storage);
Some(nonvolatile_storage)
} else {
None
};
// Initialize AC using AIN5 (P0.29) as VIN+ and VIN- as AIN0 (P0.02)
// These are hardcoded pin assignments specified in the driver
let ac_channels = static_init!(
[&'static nrf52::acomp::Channel; 1],
[&nrf52::acomp::CHANNEL_AC0,]
);
let analog_comparator = static_init!(
analog_comparator::AnalogComparator<'static, nrf52::acomp::Comparator>,
analog_comparator::AnalogComparator::new(&nrf52::acomp::ACOMP, ac_channels)
);
nrf52::acomp::ACOMP.set_client(analog_comparator);
// Start all of the clocks. Low power operation will require a better
// approach than this.
nrf52::clock::CLOCK.low_stop();
nrf52::clock::CLOCK.high_stop();
nrf52::clock::CLOCK.low_set_source(nrf52::clock::LowClockSource::XTAL);
nrf52::clock::CLOCK.low_start();
nrf52::clock::CLOCK.high_set_source(nrf52::clock::HighClockSource::XTAL);
nrf52::clock::CLOCK.high_start();
while !nrf52::clock::CLOCK.low_started() {}
while !nrf52::clock::CLOCK.high_started() {}
let platform = Platform {
button: button,
ble_radio: ble_radio,
ieee802154_radio: ieee802154_radio,
pconsole: pconsole,
console: console,
led: led,
gpio: gpio,
rng: rng,
temp: temp,
alarm: alarm,
analog_comparator: analog_comparator,
nonvolatile_storage: nonvolatile_storage,
ipc: kernel::ipc::IPC::new(board_kernel, &memory_allocation_capability),
};
platform.pconsole.start();
debug!("Initialization complete. Entering main loop\r");
debug!("{}", &nrf52::ficr::FICR_INSTANCE);
extern "C" {
/// Beginning of the ROM region containing app images.
static _sapps: u8;
/// End of the ROM region containing app images.
///
/// This symbol is defined in the linker script.
static _eapps: u8;
}
kernel::procs::load_processes(
board_kernel,
chip,
core::slice::from_raw_parts(
&_sapps as *const u8,
&_eapps as *const u8 as usize - &_sapps as *const u8 as usize,
),
app_memory,
process_pointers,
app_fault_response,
&process_management_capability,
)
.unwrap_or_else(|err| {
debug!("Error loading processes!");
debug!("{:?}", err);
});
board_kernel.kernel_loop(&platform, chip, Some(&platform.ipc), &main_loop_capability);
}
| 36.476496 | 100 | 0.633589 |
d6d91725493bc3ca660c9d4a644271175a44dffe | 788 | #[cxx::bridge(namespace = "aoc")]
mod ffi {
#![allow(clippy::items_after_statements)]
extern "Rust" {
pub fn solve(year: u16, day: u8, part: u8, input: &str) -> Result<String>;
}
}
/// Returns the answer for the specified problem and input.
///
/// Arguments:
/// year: The year of the problem, as in 2018 or 2019.
/// day: The day of the problem, from 1 to 25.
/// part: The part of the problem, either 1 or 2.
/// input: The input to the problem.
///
/// Returns:
/// The computed answer as text.
///
/// Raises:
/// ValueError: If the input was invalid.
fn solve(
year: u16,
day: u8,
part: u8,
input: &str,
#[cfg(feature = "visualization")] painter: PainterRef,
) -> Result<String, String> {
advent_of_code::solve(year, day, part, input)
}
| 25.419355 | 82 | 0.621827 |
eb5b80ef2c9a208e642f0c90aff4152c6d6d7a42 | 8,707 | use crate::prelude::*;
use crate::dap_codec::{DecoderError, DecoderResult};
use adapter_protocol::*;
use futures::prelude::*;
use std::collections::{hash_map::Entry, HashMap};
use std::io;
use std::pin::Pin;
use std::sync::{Arc, Weak};
use tokio::sync::{broadcast, mpsc, oneshot};
pub trait DAPChannel:
Stream<Item = Result<DecoderResult, io::Error>> + Sink<ProtocolMessage, Error = io::Error> + Send
{
}
impl<T> DAPChannel for T where
T: Stream<Item = Result<DecoderResult, io::Error>> + Sink<ProtocolMessage, Error = io::Error> + Send
{
}
#[derive(Clone)]
pub struct DAPSession {
requests_sender: Weak<broadcast::Sender<(u32, RequestArguments)>>,
events_sender: Weak<broadcast::Sender<EventBody>>,
out_sender: mpsc::Sender<(ProtocolMessageType, Option<oneshot::Sender<ResponseResult>>)>,
}
impl DAPSession {
pub fn new(channel: Box<dyn DAPChannel>) -> (DAPSession, impl Future<Output = ()> + Send) {
let mut channel: Pin<Box<dyn DAPChannel>> = channel.into();
let requests_sender = Arc::new(broadcast::channel::<(u32, RequestArguments)>(100).0);
let events_sender = Arc::new(broadcast::channel::<EventBody>(100).0);
let (out_sender, mut out_receiver) = mpsc::channel(100);
let mut pending_requests: HashMap<u32, oneshot::Sender<ResponseResult>> = HashMap::new();
let mut message_seq = 0;
let client = DAPSession {
requests_sender: Arc::downgrade(&requests_sender),
events_sender: Arc::downgrade(&events_sender),
out_sender: out_sender,
};
let worker = async move {
loop {
tokio::select! {
maybe_result = channel.next() => {
match maybe_result {
Some(Ok(decoder_result)) => {
match decoder_result {
Ok(message) => match message.type_ {
ProtocolMessageType::Request(request) => log_errors!(requests_sender.send((message.seq, request))),
ProtocolMessageType::Event(event) => log_errors!(events_sender.send(event)),
ProtocolMessageType::Response(response) => match pending_requests.entry(response.request_seq) {
Entry::Vacant(_) => {
error!("Received response without a pending request (request_seq={})", response.request_seq);
}
Entry::Occupied(entry) => {
let sender = entry.remove();
if let Err(_) = sender.send(response.result) {
error!("Requestor is gone (request_seq={})", response.request_seq);
}
}
},
}
Err(err) => match err {
DecoderError::SerdeError { error, value } => {
// The decoder read a complete frame, but failed to deserialize it
error!("Deserialization error: {}", error);
// Try to extract request seq
use serde_json::value::*;
let request_seq = match value {
Value::Object(obj) => {
match obj.get("seq") {
Some(Value::Number(seq)) => seq.as_u64(),
_ => None,
}
},
_ => None
};
// If succeeded, send error response
if let Some(request_seq) = request_seq {
message_seq += 1;
let message = ProtocolMessage {
seq: message_seq,
type_: ProtocolMessageType::Response(
Response {
request_seq: request_seq as u32,
success: false,
result: ResponseResult::Error {
message: "Malformed message".into(),
command: "".into(),
show_user: None
}
}
)
};
log_errors!(channel.send(message).await);
}
}
}
}
},
Some(Err(err)) => {
error!("Frame decoder error: {}", err);
break;
},
None => {
debug!("Client has disconnected");
break
}
}
},
Some((message_type, response_sender)) = out_receiver.recv() => {
message_seq += 1;
let message = ProtocolMessage {
seq: message_seq,
type_: message_type
};
if let Some(response_sender) = response_sender {
pending_requests.insert(message.seq, response_sender);
}
log_errors!(channel.send(message).await);
}
}
}
};
(client, worker)
}
pub fn subscribe_requests(&self) -> Result<broadcast::Receiver<(u32, RequestArguments)>, Error> {
match self.requests_sender.upgrade() {
Some(r) => Ok(r.subscribe()),
None => Err("Sender is gone".into()),
}
}
#[allow(unused)]
pub fn subscribe_events(&self) -> Result<broadcast::Receiver<EventBody>, Error> {
match self.events_sender.upgrade() {
Some(r) => Ok(r.subscribe()),
None => Err("Sender is gone".into()),
}
}
pub async fn send_request(&self, request_args: RequestArguments) -> Result<ResponseBody, Error> {
let (sender, receiver) = oneshot::channel();
let request = ProtocolMessageType::Request(request_args);
self.out_sender.send((request, Some(sender))).await?;
let result = receiver.await?;
match result {
ResponseResult::Success {
body,
} => Ok(body),
ResponseResult::Error {
message,
..
} => Err(message.into()),
}
}
#[allow(unused)]
pub async fn send_response(&self, response: Response) -> Result<(), Error> {
self.out_sender.send((ProtocolMessageType::Response(response), None)).await?;
Ok(())
}
pub fn try_send_response(&self, response: Response) -> Result<(), Error> {
self.out_sender.try_send((ProtocolMessageType::Response(response), None))?;
Ok(())
}
pub async fn send_event(&self, event_body: EventBody) -> Result<(), Error> {
self.out_sender.send((ProtocolMessageType::Event(event_body), None)).await?;
Ok(())
}
pub fn try_send_event(&self, event_body: EventBody) -> Result<(), Error> {
self.out_sender.try_send((ProtocolMessageType::Event(event_body), None))?;
Ok(())
}
}
| 47.064865 | 141 | 0.409326 |
69d17efa5200d0344eb6bc06889a4214b785d5d8 | 1,880 | use std::{
fmt,
fmt::{Display, Formatter},
};
use crate::{
ast::ast_node::{AstNodeTrait, SpannedNode},
codegen::tree_walker::TreeWalker,
parser::span::Span,
Result,
};
/// A node representing the use of a variable.
#[derive(Hash, Debug, Clone, Eq, PartialOrd, PartialEq)]
pub struct VarNode {
/// The name of the variable.
pub name: String,
/// The span of the string in the original file
pub span: Option<Span>,
/// Is this node referring to a global? Tracked in case a var is used,
/// then subsequently defined.
pub global: bool,
/// Is this variable actually the name of a function? Function pointers create this ambiguity.
/// The only time this ends up true is in the case of a bareword function var init, e.g.:
/// `function f = dump;` as `dump` in this case would otherwise be seen as a variable.
/// The alternate `&name()` syntax is parsed as a function pointer from the start.
pub function_name: bool,
}
impl VarNode {
pub fn new(name: &str) -> Self {
Self {
name: String::from(name),
span: None,
global: false,
function_name: false,
}
}
/// Set whether this node is global
pub fn set_global(&mut self, val: bool) {
self.global = val;
}
/// Set whether this node is actually a known function name, rather than a variable.
pub fn set_function_name(&mut self, val: bool) {
self.function_name = val;
}
}
impl SpannedNode for VarNode {
fn span(&self) -> Option<Span> {
self.span
}
}
impl AstNodeTrait for VarNode {
fn visit(&mut self, tree_walker: &mut impl TreeWalker) -> Result<()> {
tree_walker.visit_var(self)
}
}
impl Display for VarNode {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.name)
}
}
| 26.478873 | 98 | 0.614894 |
ed586635e9c53328bb21eb5e06f0d3491ef1783c | 16,234 | //! This file contains tests relevant to Rustup's handling of updating PATHs.
//! It depends on self-update working, so if absolutely everything here breaks,
//! check those tests as well.
pub mod mock;
// Prefer omitting actually unpacking content while just testing paths.
const INIT_NONE: [&str; 4] = ["rustup-init", "-y", "--default-toolchain", "none"];
#[cfg(unix)]
mod unix {
use std::fmt::Display;
use std::fs;
use std::path::PathBuf;
use rustup::utils::raw;
use super::INIT_NONE;
use crate::mock::clitools::{self, expect_err, expect_ok, Scenario};
// Let's write a fake .rc which looks vaguely like a real script.
const FAKE_RC: &str = r#"
# Sources fruity punch.
. ~/fruit/punch
# Adds apples to PATH.
export PATH="$HOME/apple/bin"
"#;
const DEFAULT_EXPORT: &str = "export PATH=\"$HOME/.cargo/bin:$PATH\"\n";
const POSIX_SH: &str = "env";
fn source(dir: impl Display, sh: impl Display) -> String {
format!(". \"{dir}/{sh}\"\n", dir = dir, sh = sh)
}
// In 1.23 we used `source` instead of `.` by accident. This is not POSIX
// so we want to ensure that if we put this into someone's dot files, then
// with newer rustups we will revert that.
fn non_posix_source(dir: impl Display, sh: impl Display) -> String {
format!("source \"{dir}/{sh}\"\n", dir = dir, sh = sh)
}
#[test]
fn install_creates_necessary_scripts() {
clitools::setup(Scenario::Empty, &|config| {
// Override the test harness so that cargo home looks like
// $HOME/.cargo by removing CARGO_HOME from the environment,
// otherwise the literal path will be written to the file.
let mut cmd = clitools::cmd(config, "rustup-init", &INIT_NONE[1..]);
let files: Vec<PathBuf> = [".cargo/env", ".profile", ".zshenv"]
.iter()
.map(|file| config.homedir.join(file))
.collect();
for file in &files {
assert!(!file.exists());
}
cmd.env_remove("CARGO_HOME");
cmd.env("SHELL", "zsh");
assert!(cmd.output().unwrap().status.success());
let mut rcs = files.iter();
let env = rcs.next().unwrap();
let envfile = fs::read_to_string(&env).unwrap();
let (_, envfile_export) = envfile.split_at(envfile.find("export PATH").unwrap_or(0));
assert_eq!(&envfile_export[..DEFAULT_EXPORT.len()], DEFAULT_EXPORT);
for rc in rcs {
let expected = source("$HOME/.cargo", POSIX_SH);
let new_profile = fs::read_to_string(&rc).unwrap();
assert_eq!(new_profile, expected);
}
});
}
#[test]
fn install_updates_bash_rcs() {
clitools::setup(Scenario::Empty, &|config| {
let rcs: Vec<PathBuf> = [".bashrc", ".bash_profile", ".bash_login", ".profile"]
.iter()
.map(|rc| config.homedir.join(rc))
.collect();
for rc in &rcs {
raw::write_file(rc, FAKE_RC).unwrap();
}
expect_ok(config, &INIT_NONE);
let expected = FAKE_RC.to_owned() + &source(config.cargodir.display(), POSIX_SH);
for rc in &rcs {
let new_rc = fs::read_to_string(&rc).unwrap();
assert_eq!(new_rc, expected);
}
})
}
#[test]
fn install_does_not_create_bash_rcs() {
clitools::setup(Scenario::Empty, &|config| {
let rcs: Vec<PathBuf> = [".bashrc", ".bash_profile", ".bash_login"]
.iter()
.map(|rc| config.homedir.join(rc))
.collect();
let rcs_before = rcs.iter().map(|rc| rc.exists());
expect_ok(config, &INIT_NONE);
for (before, after) in rcs_before.zip(rcs.iter().map(|rc| rc.exists())) {
assert!(!before);
assert_eq!(before, after);
}
});
}
#[test]
fn install_errors_when_rc_cannot_be_updated() {
clitools::setup(Scenario::Empty, &|config| {
let rc = config.homedir.join(".profile");
fs::File::create(&rc).unwrap();
let mut perms = fs::metadata(&rc).unwrap().permissions();
perms.set_readonly(true);
fs::set_permissions(&rc, perms).unwrap();
expect_err(config, &INIT_NONE, "amend shell");
});
}
#[test]
fn install_with_zdotdir() {
clitools::setup(Scenario::Empty, &|config| {
let zdotdir = tempfile::Builder::new()
.prefix("zdotdir")
.tempdir()
.unwrap();
let rc = zdotdir.path().join(".zshenv");
raw::write_file(&rc, FAKE_RC).unwrap();
let mut cmd = clitools::cmd(config, "rustup-init", &INIT_NONE[1..]);
cmd.env("SHELL", "zsh");
cmd.env("ZDOTDIR", zdotdir.path());
assert!(cmd.output().unwrap().status.success());
let new_rc = fs::read_to_string(&rc).unwrap();
let expected = FAKE_RC.to_owned() + &source(config.cargodir.display(), POSIX_SH);
assert_eq!(new_rc, expected);
});
}
#[test]
fn install_adds_path_to_rc_just_once() {
clitools::setup(Scenario::Empty, &|config| {
let profile = config.homedir.join(".profile");
raw::write_file(&profile, FAKE_RC).unwrap();
expect_ok(config, &INIT_NONE);
expect_ok(config, &INIT_NONE);
let new_profile = fs::read_to_string(&profile).unwrap();
let expected = FAKE_RC.to_owned() + &source(config.cargodir.display(), POSIX_SH);
assert_eq!(new_profile, expected);
});
}
#[test]
fn install_adds_path_to_rc_handling_no_newline() {
clitools::setup(Scenario::Empty, &|config| {
let profile = config.homedir.join(".profile");
let fake_rc_modified = FAKE_RC.strip_suffix('\n').expect("Should end in a newline");
raw::write_file(&profile, fake_rc_modified).unwrap();
// Run once to to add the configuration
expect_ok(config, &INIT_NONE);
// Run twice to test that the process is idempotent
expect_ok(config, &INIT_NONE);
let new_profile = fs::read_to_string(&profile).unwrap();
let expected = FAKE_RC.to_owned() + &source(config.cargodir.display(), POSIX_SH);
assert_eq!(new_profile, expected);
});
}
#[test]
fn install_adds_path_to_multiple_rc_files() {
clitools::setup(Scenario::Empty, &|config| {
// Two RC files that are both from the same shell
let bash_profile = config.homedir.join(".bash_profile");
let bashrc = config.homedir.join(".bashrc");
let expected = FAKE_RC.to_owned() + &source(config.cargodir.display(), POSIX_SH);
// The order that the two files are processed isn't known, so test both orders
for [path1, path2] in &[[&bash_profile, &bashrc], [&bashrc, &bash_profile]] {
raw::write_file(path1, &expected).unwrap();
raw::write_file(path2, FAKE_RC).unwrap();
expect_ok(config, &INIT_NONE);
let new1 = fs::read_to_string(&path1).unwrap();
assert_eq!(new1, expected);
let new2 = fs::read_to_string(&path2).unwrap();
assert_eq!(new2, expected);
}
});
}
#[test]
fn uninstall_removes_source_from_rcs() {
clitools::setup(Scenario::Empty, &|config| {
let rcs: Vec<PathBuf> = [
".bashrc",
".bash_profile",
".bash_login",
".profile",
".zshenv",
]
.iter()
.map(|rc| config.homedir.join(rc))
.collect();
for rc in &rcs {
raw::write_file(rc, FAKE_RC).unwrap();
}
expect_ok(config, &INIT_NONE);
expect_ok(config, &["rustup", "self", "uninstall", "-y"]);
for rc in &rcs {
let new_rc = fs::read_to_string(&rc).unwrap();
assert_eq!(new_rc, FAKE_RC);
}
})
}
#[test]
fn install_adds_sources_while_removing_legacy_paths() {
clitools::setup(Scenario::Empty, &|config| {
let zdotdir = tempfile::Builder::new()
.prefix("zdotdir")
.tempdir()
.unwrap();
let rcs: Vec<PathBuf> = [".bash_profile", ".profile"]
.iter()
.map(|rc| config.homedir.join(rc))
.collect();
let zprofiles = vec![
config.homedir.join(".zprofile"),
zdotdir.path().join(".zprofile"),
];
let old_rc =
FAKE_RC.to_owned() + DEFAULT_EXPORT + &non_posix_source("$HOME/.cargo", POSIX_SH);
for rc in rcs.iter().chain(zprofiles.iter()) {
raw::write_file(rc, &old_rc).unwrap();
}
let mut cmd = clitools::cmd(config, "rustup-init", &INIT_NONE[1..]);
cmd.env("SHELL", "zsh");
cmd.env("ZDOTDIR", zdotdir.path());
cmd.env_remove("CARGO_HOME");
assert!(cmd.output().unwrap().status.success());
let fixed_rc = FAKE_RC.to_owned() + &source("$HOME/.cargo", POSIX_SH);
for rc in &rcs {
let new_rc = fs::read_to_string(&rc).unwrap();
assert_eq!(new_rc, fixed_rc);
}
for rc in &zprofiles {
let new_rc = fs::read_to_string(&rc).unwrap();
assert_eq!(new_rc, FAKE_RC);
}
})
}
#[test]
fn uninstall_cleans_up_legacy_paths() {
clitools::setup(Scenario::Empty, &|config| {
// Install first, then overwrite.
expect_ok(config, &INIT_NONE);
let zdotdir = tempfile::Builder::new()
.prefix("zdotdir")
.tempdir()
.unwrap();
let mut cmd = clitools::cmd(config, "rustup-init", &INIT_NONE[1..]);
cmd.env("SHELL", "zsh");
cmd.env("ZDOTDIR", zdotdir.path());
cmd.env_remove("CARGO_HOME");
assert!(cmd.output().unwrap().status.success());
let mut rcs: Vec<PathBuf> = [".bash_profile", ".profile", ".zprofile"]
.iter()
.map(|rc| config.homedir.join(rc))
.collect();
rcs.push(zdotdir.path().join(".zprofile"));
let old_rc =
FAKE_RC.to_owned() + DEFAULT_EXPORT + &non_posix_source("$HOME/.cargo", POSIX_SH);
for rc in &rcs {
raw::write_file(rc, &old_rc).unwrap();
}
let mut cmd = clitools::cmd(config, "rustup", &["self", "uninstall", "-y"]);
cmd.env("SHELL", "zsh");
cmd.env("ZDOTDIR", zdotdir.path());
cmd.env_remove("CARGO_HOME");
assert!(cmd.output().unwrap().status.success());
for rc in &rcs {
let new_rc = fs::read_to_string(&rc).unwrap();
// It's not ideal, but it's OK, if we leave whitespace.
assert_eq!(new_rc, FAKE_RC);
}
})
}
// In the default case we want to write $HOME/.cargo/bin as the path,
// not the full path.
#[test]
fn when_cargo_home_is_the_default_write_path_specially() {
clitools::setup(Scenario::Empty, &|config| {
// Override the test harness so that cargo home looks like
// $HOME/.cargo by removing CARGO_HOME from the environment,
// otherwise the literal path will be written to the file.
let profile = config.homedir.join(".profile");
raw::write_file(&profile, FAKE_RC).unwrap();
let mut cmd = clitools::cmd(config, "rustup-init", &INIT_NONE[1..]);
cmd.env_remove("CARGO_HOME");
assert!(cmd.output().unwrap().status.success());
let new_profile = fs::read_to_string(&profile).unwrap();
let expected = format!("{}. \"$HOME/.cargo/env\"\n", FAKE_RC);
assert_eq!(new_profile, expected);
let mut cmd = clitools::cmd(config, "rustup", &["self", "uninstall", "-y"]);
cmd.env_remove("CARGO_HOME");
assert!(cmd.output().unwrap().status.success());
let new_profile = fs::read_to_string(&profile).unwrap();
assert_eq!(new_profile, FAKE_RC);
});
}
#[test]
fn install_doesnt_modify_path_if_passed_no_modify_path() {
clitools::setup(Scenario::Empty, &|config| {
let profile = config.homedir.join(".profile");
expect_ok(
config,
&[
"rustup-init",
"-y",
"--no-modify-path",
"--default-toolchain",
"none",
],
);
assert!(!profile.exists());
});
}
}
#[cfg(windows)]
mod windows {
use rustup::test::{get_path, with_saved_path};
use super::INIT_NONE;
use crate::mock::clitools::{self, expect_ok, Scenario};
#[test]
/// Smoke test for end-to-end code connectivity of the installer path mgmt on windows.
fn install_uninstall_affect_path() {
clitools::setup(Scenario::Empty, &|config| {
with_saved_path(&|| {
let path = format!("{:?}", config.cargodir.join("bin").to_string_lossy());
expect_ok(config, &INIT_NONE);
assert!(
get_path()
.unwrap()
.unwrap()
.to_string()
.contains(path.trim_matches('"')),
"`{}` not in `{}`",
path,
get_path().unwrap().unwrap()
);
expect_ok(config, &["rustup", "self", "uninstall", "-y"]);
assert!(!get_path().unwrap().unwrap().to_string().contains(&path));
})
});
}
#[test]
/// Smoke test for end-to-end code connectivity of the installer path mgmt on windows.
fn install_uninstall_affect_path_with_non_unicode() {
use std::ffi::OsString;
use std::os::windows::ffi::OsStrExt;
use winreg::enums::{RegType, HKEY_CURRENT_USER, KEY_READ, KEY_WRITE};
use winreg::{RegKey, RegValue};
clitools::setup(Scenario::Empty, &|config| {
with_saved_path(&|| {
// Set up a non unicode PATH
let reg_value = RegValue {
bytes: vec![
0x00, 0xD8, // leading surrogate
0x01, 0x01, // bogus trailing surrogate
0x00, 0x00, // null
],
vtype: RegType::REG_EXPAND_SZ,
};
RegKey::predef(HKEY_CURRENT_USER)
.open_subkey_with_flags("Environment", KEY_READ | KEY_WRITE)
.unwrap()
.set_raw_value("PATH", ®_value)
.unwrap();
// compute expected path after installation
let expected = RegValue {
bytes: OsString::from(config.cargodir.join("bin"))
.encode_wide()
.flat_map(|v| vec![v as u8, (v >> 8) as u8])
.chain(vec![b';', 0])
.chain(reg_value.bytes.iter().copied())
.collect(),
vtype: RegType::REG_EXPAND_SZ,
};
expect_ok(config, &INIT_NONE);
assert_eq!(get_path().unwrap().unwrap(), expected);
expect_ok(config, &["rustup", "self", "uninstall", "-y"]);
assert_eq!(get_path().unwrap().unwrap(), reg_value);
})
});
}
}
| 37.665893 | 98 | 0.517001 |
9c778c3bd60d16c517450230f2e1cb1b456e001e | 550 | use std::env::args;
fn main()
{
let mut params = args().skip(1).take(2);
let mut base_word: String = params.next().unwrap();
let sub_word: String = params.next().unwrap();
for i in 0..sub_word.len()
{
if base_word.chars().nth(i) != sub_word.chars().nth(i)
{
base_word.remove(i);
if base_word == sub_word
{
println!("They funnel");
return;
}
}
}
println!("'{}' cannot be made into '{}'", base_word, sub_word);
} | 23.913043 | 67 | 0.487273 |
fe943bf00ecaded94330d92d946648c05a7cc78f | 4,653 | use num::FromPrimitive;
use std::fmt;
bitfield::bitfield! {
#[repr(transparent)]
#[derive(Copy, Clone)]
pub struct DeviceStatus(u8);
impl Debug;
pub connected, _: 0;
pub u8, into DeviceType, device_type, _: 2, 1;
pub charging, _: 4;
pub u8, into BatteryLevel, battery_level, _: 7, 5;
}
#[derive(Debug, Copy, Clone, FromPrimitive)]
pub enum DeviceType {
ProController = 0,
// Used when the ringcon is plugged, maybe also for the pokeball?
MaybeAccessory = 1,
// Used in one InputReport when the ringcon is plugged, then switch to value 1.
MaybeInitializingAccessory = 2,
Joycon = 3,
}
impl From<u8> for DeviceType {
fn from(v: u8) -> Self {
match DeviceType::from_u8(v) {
Some(t) => t,
None => panic!("unknown device type 0x{:x}", v),
}
}
}
#[derive(Debug, Copy, Clone, FromPrimitive, Eq, PartialEq, Ord, PartialOrd)]
pub enum BatteryLevel {
Empty = 0,
Critical = 1,
Low = 2,
Medium = 3,
Full = 4,
}
impl From<u8> for BatteryLevel {
fn from(v: u8) -> Self {
BatteryLevel::from_u8(v).expect("unexpected battery level")
}
}
#[repr(packed)]
#[derive(Copy, Clone, Default)]
pub struct ButtonsStatus {
pub right: RightButtons,
pub middle: MiddleButtons,
pub left: LeftButtons,
}
impl fmt::Debug for ButtonsStatus {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("ButtonsStatus")
.field(&format_args!("{}", self))
.finish()
}
}
impl fmt::Display for ButtonsStatus {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.right.a() {
write!(f, " A")?;
}
if self.right.b() {
write!(f, " B")?;
}
if self.right.x() {
write!(f, " X")?;
}
if self.right.y() {
write!(f, " Y")?;
}
if self.left.up() {
write!(f, " UP")?;
}
if self.left.down() {
write!(f, " DOWN")?;
}
if self.left.left() {
write!(f, " LEFT")?;
}
if self.left.right() {
write!(f, " RIGHT")?;
}
if self.left.l() {
write!(f, " L")?;
}
if self.left.zl() {
write!(f, " ZL")?;
}
if self.right.r() {
write!(f, " R")?;
}
if self.right.zr() {
write!(f, " ZR")?;
}
if self.left.sl() || self.right.sl() {
write!(f, " SR")?;
}
if self.left.sr() || self.right.sr() {
write!(f, " SR")?;
}
if self.middle.lstick() {
write!(f, " L3")?;
}
if self.middle.rstick() {
write!(f, " R3")?;
}
if self.middle.minus() {
write!(f, " -")?;
}
if self.middle.plus() {
write!(f, " +")?;
}
if self.middle.capture() {
write!(f, " CAPTURE")?;
}
if self.middle.home() {
write!(f, " HOME")?;
}
Ok(())
}
}
bitfield::bitfield! {
#[repr(transparent)]
#[derive(Copy, Clone, Default)]
pub struct RightButtons(u8);
impl Debug;
pub y, _: 0;
pub x, _: 1;
pub b, _: 2;
pub a, _: 3;
pub sr, _: 4;
pub sl, _: 5;
pub r, _: 6;
pub zr, _: 7;
}
bitfield::bitfield! {
#[repr(transparent)]
#[derive(Copy, Clone, Default)]
pub struct MiddleButtons(u8);
impl Debug;
pub minus, _: 0;
pub plus, _: 1;
pub rstick, _: 2;
pub lstick, _: 3;
pub home, _: 4;
pub capture, _: 5;
pub _unused, _: 6;
pub charging_grip, _: 7;
}
bitfield::bitfield! {
#[repr(transparent)]
#[derive(Copy, Clone, Default)]
pub struct LeftButtons(u8);
impl Debug;
pub down, _: 0;
pub up, _: 1;
pub right, _: 2;
pub left, _: 3;
pub sr, _: 4;
pub sl, _: 5;
pub l, _: 6;
pub zl, _: 7;
}
pub enum Button {
N,
S,
E,
W,
L,
R,
ZL,
ZR,
L3,
R3,
UP,
DOWN,
LEFT,
RIGHT,
}
#[repr(packed)]
#[derive(Copy, Clone)]
pub struct Stick {
data: [u8; 3],
}
impl Stick {
pub fn x(self) -> u16 {
u16::from(self.data[0]) | u16::from(self.data[1] & 0xf) << 8
}
pub fn y(self) -> u16 {
u16::from(self.data[1]) >> 4 | u16::from(self.data[2]) << 4
}
}
impl fmt::Debug for Stick {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Stick")
.field(&self.x())
.field(&self.y())
.finish()
}
}
| 21.442396 | 83 | 0.478186 |
61258179af4d9a7562ea7cd2be55bcfce1f75323 | 323 | //! Utility functions
use std::io::{self, Read, Write};
/// Pause stdin and wait for input.
pub fn pause() -> io::Result<()> {
let mut stdin = io::stdin();
let mut stdout = io::stdout();
write!(stdout, "Press any key to continue...")?;
stdout.flush()?;
let _ = stdin.read(&mut [0u8])?;
Ok(())
}
| 20.1875 | 52 | 0.563467 |
0ed2f959233e8b8f64bc0ef4b600bfbaa574bac6 | 26,536 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::PESOC_PERI_CLK_CTRL1 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct SOC_SLPCK_DAC_ENR {
bits: bool,
}
impl SOC_SLPCK_DAC_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_ACTCK_DAC_ENR {
bits: bool,
}
impl SOC_ACTCK_DAC_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_SLPCK_ADC_ENR {
bits: bool,
}
impl SOC_SLPCK_ADC_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_ACTCK_ADC_ENR {
bits: bool,
}
impl SOC_ACTCK_ADC_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_SLPCK_PCM_ENR {
bits: bool,
}
impl SOC_SLPCK_PCM_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_ACTCK_PCM_ENR {
bits: bool,
}
impl SOC_ACTCK_PCM_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_SLPCK_I2S_ENR {
bits: bool,
}
impl SOC_SLPCK_I2S_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_ACTCK_I2S_ENR {
bits: bool,
}
impl SOC_ACTCK_I2S_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_SLPCK_I2C3_ENR {
bits: bool,
}
impl SOC_SLPCK_I2C3_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_ACTCK_I2C3_ENR {
bits: bool,
}
impl SOC_ACTCK_I2C3_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_SLPCK_I2C2_ENR {
bits: bool,
}
impl SOC_SLPCK_I2C2_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_ACTCK_I2C2_ENR {
bits: bool,
}
impl SOC_ACTCK_I2C2_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_SLPCK_I2C1_ENR {
bits: bool,
}
impl SOC_SLPCK_I2C1_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_ACTCK_I2C1_ENR {
bits: bool,
}
impl SOC_ACTCK_I2C1_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_SLPCK_I2C0_ENR {
bits: bool,
}
impl SOC_SLPCK_I2C0_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct SOC_ACTCK_I2C0_ENR {
bits: bool,
}
impl SOC_ACTCK_I2C0_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _SOC_SLPCK_DAC_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_SLPCK_DAC_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_ACTCK_DAC_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_ACTCK_DAC_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_SLPCK_ADC_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_SLPCK_ADC_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_ACTCK_ADC_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_ACTCK_ADC_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_SLPCK_PCM_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_SLPCK_PCM_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_ACTCK_PCM_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_ACTCK_PCM_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_SLPCK_I2S_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_SLPCK_I2S_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_ACTCK_I2S_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_ACTCK_I2S_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_SLPCK_I2C3_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_SLPCK_I2C3_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_ACTCK_I2C3_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_ACTCK_I2C3_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_SLPCK_I2C2_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_SLPCK_I2C2_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_ACTCK_I2C2_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_ACTCK_I2C2_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_SLPCK_I2C1_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_SLPCK_I2C1_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_ACTCK_I2C1_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_ACTCK_I2C1_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_SLPCK_I2C0_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_SLPCK_I2C0_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SOC_ACTCK_I2C0_ENW<'a> {
w: &'a mut W,
}
impl<'a> _SOC_ACTCK_I2C0_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 29"]
#[inline]
pub fn soc_slpck_dac_en(&self) -> SOC_SLPCK_DAC_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 29;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_SLPCK_DAC_ENR { bits }
}
#[doc = "Bit 28"]
#[inline]
pub fn soc_actck_dac_en(&self) -> SOC_ACTCK_DAC_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_ACTCK_DAC_ENR { bits }
}
#[doc = "Bit 25"]
#[inline]
pub fn soc_slpck_adc_en(&self) -> SOC_SLPCK_ADC_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_SLPCK_ADC_ENR { bits }
}
#[doc = "Bit 24"]
#[inline]
pub fn soc_actck_adc_en(&self) -> SOC_ACTCK_ADC_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_ACTCK_ADC_ENR { bits }
}
#[doc = "Bit 21"]
#[inline]
pub fn soc_slpck_pcm_en(&self) -> SOC_SLPCK_PCM_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_SLPCK_PCM_ENR { bits }
}
#[doc = "Bit 20"]
#[inline]
pub fn soc_actck_pcm_en(&self) -> SOC_ACTCK_PCM_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_ACTCK_PCM_ENR { bits }
}
#[doc = "Bit 17"]
#[inline]
pub fn soc_slpck_i2s_en(&self) -> SOC_SLPCK_I2S_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_SLPCK_I2S_ENR { bits }
}
#[doc = "Bit 16"]
#[inline]
pub fn soc_actck_i2s_en(&self) -> SOC_ACTCK_I2S_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_ACTCK_I2S_ENR { bits }
}
#[doc = "Bit 7"]
#[inline]
pub fn soc_slpck_i2c3_en(&self) -> SOC_SLPCK_I2C3_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_SLPCK_I2C3_ENR { bits }
}
#[doc = "Bit 6"]
#[inline]
pub fn soc_actck_i2c3_en(&self) -> SOC_ACTCK_I2C3_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_ACTCK_I2C3_ENR { bits }
}
#[doc = "Bit 5"]
#[inline]
pub fn soc_slpck_i2c2_en(&self) -> SOC_SLPCK_I2C2_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_SLPCK_I2C2_ENR { bits }
}
#[doc = "Bit 4"]
#[inline]
pub fn soc_actck_i2c2_en(&self) -> SOC_ACTCK_I2C2_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_ACTCK_I2C2_ENR { bits }
}
#[doc = "Bit 3"]
#[inline]
pub fn soc_slpck_i2c1_en(&self) -> SOC_SLPCK_I2C1_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_SLPCK_I2C1_ENR { bits }
}
#[doc = "Bit 2"]
#[inline]
pub fn soc_actck_i2c1_en(&self) -> SOC_ACTCK_I2C1_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_ACTCK_I2C1_ENR { bits }
}
#[doc = "Bit 1"]
#[inline]
pub fn soc_slpck_i2c0_en(&self) -> SOC_SLPCK_I2C0_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_SLPCK_I2C0_ENR { bits }
}
#[doc = "Bit 0"]
#[inline]
pub fn soc_actck_i2c0_en(&self) -> SOC_ACTCK_I2C0_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
SOC_ACTCK_I2C0_ENR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 29"]
#[inline]
pub fn soc_slpck_dac_en(&mut self) -> _SOC_SLPCK_DAC_ENW {
_SOC_SLPCK_DAC_ENW { w: self }
}
#[doc = "Bit 28"]
#[inline]
pub fn soc_actck_dac_en(&mut self) -> _SOC_ACTCK_DAC_ENW {
_SOC_ACTCK_DAC_ENW { w: self }
}
#[doc = "Bit 25"]
#[inline]
pub fn soc_slpck_adc_en(&mut self) -> _SOC_SLPCK_ADC_ENW {
_SOC_SLPCK_ADC_ENW { w: self }
}
#[doc = "Bit 24"]
#[inline]
pub fn soc_actck_adc_en(&mut self) -> _SOC_ACTCK_ADC_ENW {
_SOC_ACTCK_ADC_ENW { w: self }
}
#[doc = "Bit 21"]
#[inline]
pub fn soc_slpck_pcm_en(&mut self) -> _SOC_SLPCK_PCM_ENW {
_SOC_SLPCK_PCM_ENW { w: self }
}
#[doc = "Bit 20"]
#[inline]
pub fn soc_actck_pcm_en(&mut self) -> _SOC_ACTCK_PCM_ENW {
_SOC_ACTCK_PCM_ENW { w: self }
}
#[doc = "Bit 17"]
#[inline]
pub fn soc_slpck_i2s_en(&mut self) -> _SOC_SLPCK_I2S_ENW {
_SOC_SLPCK_I2S_ENW { w: self }
}
#[doc = "Bit 16"]
#[inline]
pub fn soc_actck_i2s_en(&mut self) -> _SOC_ACTCK_I2S_ENW {
_SOC_ACTCK_I2S_ENW { w: self }
}
#[doc = "Bit 7"]
#[inline]
pub fn soc_slpck_i2c3_en(&mut self) -> _SOC_SLPCK_I2C3_ENW {
_SOC_SLPCK_I2C3_ENW { w: self }
}
#[doc = "Bit 6"]
#[inline]
pub fn soc_actck_i2c3_en(&mut self) -> _SOC_ACTCK_I2C3_ENW {
_SOC_ACTCK_I2C3_ENW { w: self }
}
#[doc = "Bit 5"]
#[inline]
pub fn soc_slpck_i2c2_en(&mut self) -> _SOC_SLPCK_I2C2_ENW {
_SOC_SLPCK_I2C2_ENW { w: self }
}
#[doc = "Bit 4"]
#[inline]
pub fn soc_actck_i2c2_en(&mut self) -> _SOC_ACTCK_I2C2_ENW {
_SOC_ACTCK_I2C2_ENW { w: self }
}
#[doc = "Bit 3"]
#[inline]
pub fn soc_slpck_i2c1_en(&mut self) -> _SOC_SLPCK_I2C1_ENW {
_SOC_SLPCK_I2C1_ENW { w: self }
}
#[doc = "Bit 2"]
#[inline]
pub fn soc_actck_i2c1_en(&mut self) -> _SOC_ACTCK_I2C1_ENW {
_SOC_ACTCK_I2C1_ENW { w: self }
}
#[doc = "Bit 1"]
#[inline]
pub fn soc_slpck_i2c0_en(&mut self) -> _SOC_SLPCK_I2C0_ENW {
_SOC_SLPCK_I2C0_ENW { w: self }
}
#[doc = "Bit 0"]
#[inline]
pub fn soc_actck_i2c0_en(&mut self) -> _SOC_ACTCK_I2C0_ENW {
_SOC_ACTCK_I2C0_ENW { w: self }
}
}
| 26.299306 | 64 | 0.519634 |
4b207894330b3f54636822e3dff64f1d0d3c9d25 | 16,846 | /*
This tool is part of the WhiteboxTools geospatial analysis library.
Authors: Dr. John Lindsay
Created: 09/09/2017
Last Modified: 31/05/2021
License: MIT
*/
use whitebox_raster::*;
use crate::tools::*;
use num_cpus;
use std::collections::HashMap;
use std::env;
use std::f64;
use std::io::{Error, ErrorKind};
use std::path;
use std::sync::mpsc;
use std::sync::Arc;
use std::thread;
/// This tool creates a new raster in which the value of each grid cell is determined by an input raster (`--input`) and a
/// collection of user-defined classes. The user must specify the *New* value, the *From* value, and the *To Just Less Than*
/// value of each class triplet of the reclass string. Classes must be mutually exclusive, i.e. non-overlapping. For example:
///
/// > --reclass_vals='0.0;0.0;1.0;1.0;1.0;2.0'
///
/// The above reclass string assigns 0.0 to all grid cells in the input image with values from 0.0-1.0 and an output
/// value of 1.0 from to inputs from 1.0-2.0. Alternatively, if the `--assign_mode` flag is specified, `Reclass` will
/// operate in assign mode, using a reclass string composed of paired values:
///
/// > --reclass_vals='0.0;1.0;1.0;2.0'
///
/// Here, 0.0 is assigned to input grid cell values of 1.0 and 1.0 is output for all input cells with a value of 2.0. Users
/// may add the text strings *min* and *max* in the class definitions to stand in for the raster's minimum and maximum values.
/// For example:
///
/// > --reclass_vals='0.0;min;1.0;1.0;1.0;max'
///
/// Any values in the input raster that do not fall within one of the classes will be assigned its original value in the
/// output raster. NoData values in the input raster will be assigned NoData values in the output raster, unless NoData is
/// used in one of the user-defined reclass ranges (notice that it is valid to enter 'NoData' in these ranges).
///
/// # See Also
/// `ReclassEqualInterval`, `ReclassFromFile`
pub struct Reclass {
name: String,
description: String,
toolbox: String,
parameters: Vec<ToolParameter>,
example_usage: String,
}
impl Reclass {
/// public constructor
pub fn new() -> Reclass {
let name = "Reclass".to_string();
let toolbox = "GIS Analysis".to_string();
let description = "Reclassifies the values in a raster image.".to_string();
let mut parameters = vec![];
parameters.push(ToolParameter {
name: "Input File".to_owned(),
flags: vec!["-i".to_owned(), "--input".to_owned()],
description: "Input raster file.".to_owned(),
parameter_type: ParameterType::ExistingFile(ParameterFileType::Raster),
default_value: None,
optional: false,
});
parameters.push(ToolParameter {
name: "Output File".to_owned(),
flags: vec!["-o".to_owned(), "--output".to_owned()],
description: "Output raster file.".to_owned(),
parameter_type: ParameterType::NewFile(ParameterFileType::Raster),
default_value: None,
optional: false,
});
parameters.push(ToolParameter{
name: "Reclass Values (new value; from value; to less than)".to_owned(),
flags: vec!["--reclass_vals".to_owned()],
description: "Reclassification triplet values (new value; from value; to less than), e.g. '0.0;0.0;1.0;1.0;1.0;2.0'".to_owned(),
parameter_type: ParameterType::String,
default_value: None,
optional: false
});
parameters.push(ToolParameter{
name: "Operate in assign mode? (i.e. Reclass data are pair values rather than triplets)".to_owned(),
flags: vec!["--assign_mode".to_owned()],
description: "Optional Boolean flag indicating whether to operate in assign mode, reclass_vals values are interpreted as new value; old value pairs.".to_owned(),
parameter_type: ParameterType::Boolean,
default_value: None,
optional: true
});
let sep: String = path::MAIN_SEPARATOR.to_string();
let p = format!("{}", env::current_dir().unwrap().display());
let e = format!("{}", env::current_exe().unwrap().display());
let mut short_exe = e
.replace(&p, "")
.replace(".exe", "")
.replace(".", "")
.replace(&sep, "");
if e.contains(".exe") {
short_exe += ".exe";
}
let usage = format!(">>.*{0} -r={1} -v --wd=\"*path*to*data*\" -i='input.tif' -o=output.tif --reclass_vals='0.0;0.0;1.0;1.0;1.0;2.0'
>>.*{0} -r={1} -v --wd=\"*path*to*data*\" -i='input.tif' -o=output.tif --reclass_vals='10;1;20;2;30;3;40;4' --assign_mode ", short_exe, name).replace("*", &sep);
Reclass {
name: name,
description: description,
toolbox: toolbox,
parameters: parameters,
example_usage: usage,
}
}
}
impl WhiteboxTool for Reclass {
fn get_source_file(&self) -> String {
String::from(file!())
}
fn get_tool_name(&self) -> String {
self.name.clone()
}
fn get_tool_description(&self) -> String {
self.description.clone()
}
fn get_tool_parameters(&self) -> String {
match serde_json::to_string(&self.parameters) {
Ok(json_str) => return format!("{{\"parameters\":{}}}", json_str),
Err(err) => return format!("{:?}", err),
}
}
fn get_example_usage(&self) -> String {
self.example_usage.clone()
}
fn get_toolbox(&self) -> String {
self.toolbox.clone()
}
fn run<'a>(
&self,
args: Vec<String>,
working_directory: &'a str,
verbose: bool,
) -> Result<(), Error> {
let mut input_file = String::new();
let mut output_file = String::new();
let mut reclass_str = String::new();
let mut assign_mode = false;
if args.len() == 0 {
return Err(Error::new(
ErrorKind::InvalidInput,
"Tool run with no parameters.",
));
}
for i in 0..args.len() {
let mut arg = args[i].replace("\"", "");
arg = arg.replace("\'", "");
let cmd = arg.split("="); // in case an equals sign was used
let vec = cmd.collect::<Vec<&str>>();
let mut keyval = false;
if vec.len() > 1 {
keyval = true;
}
if vec[0].to_lowercase() == "-i" || vec[0].to_lowercase() == "--input" {
input_file = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if vec[0].to_lowercase() == "-o" || vec[0].to_lowercase() == "--output" {
output_file = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if vec[0].to_lowercase() == "-reclass_vals"
|| vec[0].to_lowercase() == "--reclass_vals"
{
reclass_str = if keyval {
vec[1].to_string()
} else {
args[i + 1].to_string()
};
} else if vec[0].to_lowercase() == "-assign_mode"
|| vec[0].to_lowercase() == "--assign_mode"
{
if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") {
assign_mode = true;
}
}
}
if verbose {
let tool_name = self.get_tool_name();
let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28);
// 28 = length of the 'Powered by' by statement.
println!("{}", "*".repeat(welcome_len));
println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len()));
println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28));
println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23));
println!("{}", "*".repeat(welcome_len));
}
let sep: String = path::MAIN_SEPARATOR.to_string();
let mut progress: usize;
let mut old_progress: usize = 1;
if !input_file.contains(&sep) && !input_file.contains("/") {
input_file = format!("{}{}", working_directory, input_file);
}
if !output_file.contains(&sep) && !output_file.contains("/") {
output_file = format!("{}{}", working_directory, output_file);
}
if verbose {
println!("Reading data...")
};
let input = Arc::new(Raster::new(&input_file, "r")?);
let start = Instant::now();
let rows = input.configs.rows as isize;
let columns = input.configs.columns as isize;
let nodata = input.configs.nodata;
let min_val = input.configs.minimum;
let max_val = input.configs.maximum;
let mut v: Vec<&str> = reclass_str.split(";").collect();
if v.len() < 2 {
// delimiter can be a semicolon, comma, space, or tab.
v = reclass_str.split(",").collect();
if v.len() < 2 {
v = reclass_str.split(" ").collect();
if v.len() < 2 {
v = reclass_str.split("\t").collect();
}
}
}
let reclass_vals: Vec<f64> = v
.iter()
.map(|s| {
if s.to_lowercase().contains("min") {
min_val
} else if s.to_lowercase().contains("max") {
max_val
} else {
s.trim().parse().unwrap()
}
})
.collect();
if reclass_vals.len() % 3 != 0 && !assign_mode {
return Err(Error::new(ErrorKind::InvalidInput,
"The reclass values string must include triplet values (new value; from value; to less than), e.g. '0.0;0.0;1.0;1.0;1.0;2.0'"));
} else if reclass_vals.len() % 2 != 0 && assign_mode {
return Err(Error::new(ErrorKind::InvalidInput,
"The reclass values string must include pair values (new value; old value), e.g. '1;10;2;20;3;30;4;40'"));
}
let num_ranges = match assign_mode {
false => reclass_vals.len() / 3,
true => reclass_vals.len() / 2,
};
let reclass_vals = Arc::new(reclass_vals);
let mut num_procs = num_cpus::get() as isize;
let configs = whitebox_common::configs::get_configs()?;
let max_procs = configs.max_procs;
if max_procs > 0 && max_procs < num_procs {
num_procs = max_procs;
}
let (tx, rx) = mpsc::channel();
let mut output = Raster::initialize_using_file(&output_file, &input);
if !assign_mode {
for tid in 0..num_procs {
let input = input.clone();
let reclass_vals = reclass_vals.clone();
let tx = tx.clone();
thread::spawn(move || {
let mut z: f64;
let mut prev_idx: usize = 0;
for row in (0..rows).filter(|r| r % num_procs == tid) {
let mut data: Vec<f64> = vec![nodata; columns as usize];
for col in 0..columns {
z = input[(row, col)];
if z != nodata {
// This is a shortcut intended to take advantage of the inherent
// spatial autocorrelation in spatial distributions to speed up
// the search for the appropriate range bin.
if z >= reclass_vals[prev_idx * 3 + 1]
&& z < reclass_vals[prev_idx * 3 + 2]
{
z = reclass_vals[prev_idx * 3];
} else {
for a in 0..num_ranges {
if z >= reclass_vals[a * 3 + 1]
&& z < reclass_vals[a * 3 + 2]
{
z = reclass_vals[a * 3];
prev_idx = a;
break;
}
}
}
data[col as usize] = z;
}
}
tx.send((row, data)).unwrap();
}
});
}
for r in 0..rows {
let (row, data) = rx.recv().expect("Error receiving data from thread.");
output.set_row_data(row, data);
if verbose {
progress = (100.0_f64 * r as f64 / (rows - 1) as f64) as usize;
if progress != old_progress {
println!("Progress: {}%", progress);
old_progress = progress;
}
}
}
} else {
// assign_mode
// create a hashmap to hold the assign values
// the key is the old_value and the value is the new_value.
/* Note: Rust doesn't support using HashMaps with floating-point keys because it is unsafe.
NaN != NaN and due to rounding errors sometimes 0.1 ! = 0.1. To deal with this, we apply
a multiplier of 10000 and convert to an i64.
*/
let multiplier = 10000f64;
let mut assign_map = HashMap::new();
for a in 0..num_ranges {
assign_map.insert(
(reclass_vals[a * 2 + 1] * multiplier).round() as i64,
reclass_vals[a * 2],
);
}
let assign_map = Arc::new(assign_map);
for tid in 0..num_procs {
let input = input.clone();
let assign_map = assign_map.clone();
let tx = tx.clone();
thread::spawn(move || {
let mut z: f64;
for row in (0..rows).filter(|r| r % num_procs == tid) {
let mut data: Vec<f64> = vec![nodata; columns as usize];
for col in 0..columns {
z = input[(row, col)];
if z != nodata {
// is z in the hashmap?
if assign_map.contains_key(&((z * multiplier).round() as i64)) {
z = *assign_map
.get(&((z * multiplier).round() as i64))
.unwrap();
}
data[col as usize] = z;
}
}
tx.send((row, data)).unwrap();
}
});
}
for r in 0..rows {
let (row, data) = rx.recv().expect("Error receiving data from thread.");
output.set_row_data(row, data);
if verbose {
progress = (100.0_f64 * r as f64 / (rows - 1) as f64) as usize;
if progress != old_progress {
println!("Progress: {}%", progress);
old_progress = progress;
}
}
}
}
let elapsed_time = get_formatted_elapsed_time(start);
output.add_metadata_entry(format!(
"Created by whitebox_tools\' {} tool",
self.get_tool_name()
));
output.add_metadata_entry(format!("Input file: {}", input_file));
output.add_metadata_entry(format!("Reclass values: {:?}", reclass_vals));
output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time));
if verbose {
println!("Saving data...")
};
let _ = match output.write() {
Ok(_) => {
if verbose {
println!("Output file written")
}
}
Err(e) => return Err(e),
};
if verbose {
println!(
"{}",
&format!("Elapsed Time (excluding I/O): {}", elapsed_time)
);
}
Ok(())
}
}
| 39.731132 | 173 | 0.483972 |
fc57b6b8acedfa88ad04291827c9c41e2be745d8 | 65,273 | use crate::cgu_reuse_tracker::CguReuseTracker;
use crate::code_stats::CodeStats;
pub use crate::code_stats::{DataTypeKind, FieldInfo, SizeKind, VariantInfo};
use crate::config::{self, CrateType, OutputType, PrintRequest, SanitizerSet, SwitchWithOptPath};
use crate::filesearch;
use crate::lint::{self, LintId};
use crate::parse::ParseSess;
use crate::search_paths::{PathKind, SearchPath};
pub use rustc_ast::attr::MarkedAttrs;
pub use rustc_ast::Attribute;
use rustc_data_structures::flock;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::jobserver::{self, Client};
use rustc_data_structures::profiling::{duration_to_secs_str, SelfProfiler, SelfProfilerRef};
use rustc_data_structures::sync::{
self, AtomicU64, AtomicUsize, Lock, Lrc, OnceCell, OneThread, Ordering, Ordering::SeqCst,
};
use rustc_errors::annotate_snippet_emitter_writer::AnnotateSnippetEmitterWriter;
use rustc_errors::emitter::{Emitter, EmitterWriter, HumanReadableErrorType};
use rustc_errors::json::JsonEmitter;
use rustc_errors::registry::Registry;
use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, DiagnosticId, ErrorReported};
use rustc_lint_defs::FutureBreakage;
pub use rustc_span::crate_disambiguator::CrateDisambiguator;
use rustc_span::edition::Edition;
use rustc_span::source_map::{FileLoader, MultiSpan, RealFileLoader, SourceMap, Span};
use rustc_span::{sym, SourceFileHashAlgorithm, Symbol};
use rustc_target::asm::InlineAsmArch;
use rustc_target::spec::{CodeModel, PanicStrategy, RelocModel, RelroLevel};
use rustc_target::spec::{SplitDebuginfo, Target, TargetTriple, TlsModel};
use std::cell::{self, RefCell};
use std::env;
use std::fmt;
use std::io::Write;
use std::num::NonZeroU32;
use std::ops::{Div, Mul};
use std::path::PathBuf;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
pub trait SessionLintStore: sync::Send + sync::Sync {
fn name_to_lint(&self, lint_name: &str) -> LintId;
}
pub struct OptimizationFuel {
/// If `-zfuel=crate=n` is specified, initially set to `n`, otherwise `0`.
remaining: u64,
/// We're rejecting all further optimizations.
out_of_fuel: bool,
}
/// The behavior of the CTFE engine when an error occurs with regards to backtraces.
#[derive(Clone, Copy)]
pub enum CtfeBacktrace {
/// Do nothing special, return the error as usual without a backtrace.
Disabled,
/// Capture a backtrace at the point the error is created and return it in the error
/// (to be printed later if/when the error ever actually gets shown to the user).
Capture,
/// Capture a backtrace at the point the error is created and immediately print it out.
Immediate,
}
/// New-type wrapper around `usize` for representing limits. Ensures that comparisons against
/// limits are consistent throughout the compiler.
#[derive(Clone, Copy, Debug)]
pub struct Limit(pub usize);
impl Limit {
/// Create a new limit from a `usize`.
pub fn new(value: usize) -> Self {
Limit(value)
}
/// Check that `value` is within the limit. Ensures that the same comparisons are used
/// throughout the compiler, as mismatches can cause ICEs, see #72540.
#[inline]
pub fn value_within_limit(&self, value: usize) -> bool {
value <= self.0
}
}
impl fmt::Display for Limit {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl Div<usize> for Limit {
type Output = Limit;
fn div(self, rhs: usize) -> Self::Output {
Limit::new(self.0 / rhs)
}
}
impl Mul<usize> for Limit {
type Output = Limit;
fn mul(self, rhs: usize) -> Self::Output {
Limit::new(self.0 * rhs)
}
}
/// Represents the data associated with a compilation
/// session for a single crate.
pub struct Session {
pub target: Target,
pub host: Target,
pub opts: config::Options,
pub host_tlib_path: SearchPath,
/// `None` if the host and target are the same.
pub target_tlib_path: Option<SearchPath>,
pub parse_sess: ParseSess,
pub sysroot: PathBuf,
/// The name of the root source file of the crate, in the local file system.
/// `None` means that there is no source file.
pub local_crate_source_file: Option<PathBuf>,
/// The directory the compiler has been executed in plus a flag indicating
/// if the value stored here has been affected by path remapping.
pub working_dir: (PathBuf, bool),
/// Set of `(DiagnosticId, Option<Span>, message)` tuples tracking
/// (sub)diagnostics that have been set once, but should not be set again,
/// in order to avoid redundantly verbose output (Issue #24690, #44953).
pub one_time_diagnostics: Lock<FxHashSet<(DiagnosticMessageId, Option<Span>, String)>>,
crate_types: OnceCell<Vec<CrateType>>,
/// The `crate_disambiguator` is constructed out of all the `-C metadata`
/// arguments passed to the compiler. Its value together with the crate-name
/// forms a unique global identifier for the crate. It is used to allow
/// multiple crates with the same name to coexist. See the
/// `rustc_codegen_llvm::back::symbol_names` module for more information.
pub crate_disambiguator: OnceCell<CrateDisambiguator>,
features: OnceCell<rustc_feature::Features>,
lint_store: OnceCell<Lrc<dyn SessionLintStore>>,
/// The maximum recursion limit for potentially infinitely recursive
/// operations such as auto-dereference and monomorphization.
pub recursion_limit: OnceCell<Limit>,
/// The maximum length of types during monomorphization.
pub type_length_limit: OnceCell<Limit>,
/// The maximum blocks a const expression can evaluate.
pub const_eval_limit: OnceCell<Limit>,
incr_comp_session: OneThread<RefCell<IncrCompSession>>,
/// Used for incremental compilation tests. Will only be populated if
/// `-Zquery-dep-graph` is specified.
pub cgu_reuse_tracker: CguReuseTracker,
/// Used by `-Z self-profile`.
pub prof: SelfProfilerRef,
/// Some measurements that are being gathered during compilation.
pub perf_stats: PerfStats,
/// Data about code being compiled, gathered during compilation.
pub code_stats: CodeStats,
/// If `-zfuel=crate=n` is specified, `Some(crate)`.
optimization_fuel_crate: Option<String>,
/// Tracks fuel info if `-zfuel=crate=n` is specified.
optimization_fuel: Lock<OptimizationFuel>,
// The next two are public because the driver needs to read them.
/// If `-zprint-fuel=crate`, `Some(crate)`.
pub print_fuel_crate: Option<String>,
/// Always set to zero and incremented so that we can print fuel expended by a crate.
pub print_fuel: AtomicU64,
/// Loaded up early on in the initialization of this `Session` to avoid
/// false positives about a job server in our environment.
pub jobserver: Client,
/// Cap lint level specified by a driver specifically.
pub driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
/// `Span`s of trait methods that weren't found to avoid emitting object safety errors
pub trait_methods_not_found: Lock<FxHashSet<Span>>,
/// Mapping from ident span to path span for paths that don't exist as written, but that
/// exist under `std`. For example, wrote `str::from_utf8` instead of `std::str::from_utf8`.
pub confused_type_with_std_module: Lock<FxHashMap<Span, Span>>,
/// Path for libraries that will take preference over libraries shipped by Rust.
/// Used by windows-gnu targets to priortize system mingw-w64 libraries.
pub system_library_path: OneThread<RefCell<Option<Option<PathBuf>>>>,
/// Tracks the current behavior of the CTFE engine when an error occurs.
/// Options range from returning the error without a backtrace to returning an error
/// and immediately printing the backtrace to stderr.
pub ctfe_backtrace: Lock<CtfeBacktrace>,
/// This tracks where `-Zunleash-the-miri-inside-of-you` was used to get around a
/// const check, optionally with the relevant feature gate. We use this to
/// warn about unleashing, but with a single diagnostic instead of dozens that
/// drown everything else in noise.
miri_unleashed_features: Lock<Vec<(Span, Option<Symbol>)>>,
/// Base directory containing the `src/` for the Rust standard library, and
/// potentially `rustc` as well, if we can can find it. Right now it's always
/// `$sysroot/lib/rustlib/src/rust` (i.e. the `rustup` `rust-src` component).
///
/// This directory is what the virtual `/rustc/$hash` is translated back to,
/// if Rust was built with path remapping to `/rustc/$hash` enabled
/// (the `rust.remap-debuginfo` option in `config.toml`).
pub real_rust_source_base_dir: Option<PathBuf>,
/// Architecture to use for interpreting asm!.
pub asm_arch: Option<InlineAsmArch>,
/// Set of enabled features for the current target.
pub target_features: FxHashSet<Symbol>,
known_attrs: Lock<MarkedAttrs>,
used_attrs: Lock<MarkedAttrs>,
/// `Span`s for `if` conditions that we have suggested turning into `if let`.
pub if_let_suggestions: Lock<FxHashSet<Span>>,
}
pub struct PerfStats {
/// The accumulated time spent on computing symbol hashes.
pub symbol_hash_time: Lock<Duration>,
/// Total number of values canonicalized queries constructed.
pub queries_canonicalized: AtomicUsize,
/// Number of times this query is invoked.
pub normalize_generic_arg_after_erasing_regions: AtomicUsize,
/// Number of times this query is invoked.
pub normalize_projection_ty: AtomicUsize,
}
/// Enum to support dispatch of one-time diagnostics (in `Session.diag_once`).
enum DiagnosticBuilderMethod {
Note,
SpanNote,
SpanSuggestion(String), // suggestion
// Add more variants as needed to support one-time diagnostics.
}
/// Trait implemented by error types. This should not be implemented manually. Instead, use
/// `#[derive(SessionDiagnostic)]` -- see [rustc_macros::SessionDiagnostic].
pub trait SessionDiagnostic<'a> {
/// Write out as a diagnostic out of `sess`.
#[must_use]
fn into_diagnostic(self, sess: &'a Session) -> DiagnosticBuilder<'a>;
}
/// Diagnostic message ID, used by `Session.one_time_diagnostics` to avoid
/// emitting the same message more than once.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum DiagnosticMessageId {
ErrorId(u16), // EXXXX error code as integer
LintId(lint::LintId),
StabilityId(Option<NonZeroU32>), // issue number
}
impl From<&'static lint::Lint> for DiagnosticMessageId {
fn from(lint: &'static lint::Lint) -> Self {
DiagnosticMessageId::LintId(lint::LintId::of(lint))
}
}
impl Session {
pub fn miri_unleashed_feature(&self, span: Span, feature_gate: Option<Symbol>) {
self.miri_unleashed_features.lock().push((span, feature_gate));
}
fn check_miri_unleashed_features(&self) {
let unleashed_features = self.miri_unleashed_features.lock();
if !unleashed_features.is_empty() {
let mut must_err = false;
// Create a diagnostic pointing at where things got unleashed.
let mut diag = self.struct_warn("skipping const checks");
for &(span, feature_gate) in unleashed_features.iter() {
// FIXME: `span_label` doesn't do anything, so we use "help" as a hack.
if let Some(feature_gate) = feature_gate {
diag.span_help(span, &format!("skipping check for `{}` feature", feature_gate));
// The unleash flag must *not* be used to just "hack around" feature gates.
must_err = true;
} else {
diag.span_help(span, "skipping check that does not even have a feature gate");
}
}
diag.emit();
// If we should err, make sure we did.
if must_err && !self.has_errors() {
// We have skipped a feature gate, and not run into other errors... reject.
self.err(
"`-Zunleash-the-miri-inside-of-you` may not be used to circumvent feature \
gates, except when testing error paths in the CTFE engine",
);
}
}
}
/// Invoked all the way at the end to finish off diagnostics printing.
pub fn finish_diagnostics(&self, registry: &Registry) {
self.check_miri_unleashed_features();
self.diagnostic().print_error_count(registry);
self.emit_future_breakage();
}
fn emit_future_breakage(&self) {
if !self.opts.debugging_opts.emit_future_incompat_report {
return;
}
let diags = self.diagnostic().take_future_breakage_diagnostics();
if diags.is_empty() {
return;
}
// If any future-breakage lints were registered, this lint store
// should be available
let lint_store = self.lint_store.get().expect("`lint_store` not initialized!");
let diags_and_breakage: Vec<(FutureBreakage, Diagnostic)> = diags
.into_iter()
.map(|diag| {
let lint_name = match &diag.code {
Some(DiagnosticId::Lint { name, has_future_breakage: true }) => name,
_ => panic!("Unexpected code in diagnostic {:?}", diag),
};
let lint = lint_store.name_to_lint(&lint_name);
let future_breakage =
lint.lint.future_incompatible.unwrap().future_breakage.unwrap();
(future_breakage, diag)
})
.collect();
self.parse_sess.span_diagnostic.emit_future_breakage_report(diags_and_breakage);
}
pub fn local_crate_disambiguator(&self) -> CrateDisambiguator {
self.crate_disambiguator.get().copied().unwrap()
}
pub fn crate_types(&self) -> &[CrateType] {
self.crate_types.get().unwrap().as_slice()
}
pub fn init_crate_types(&self, crate_types: Vec<CrateType>) {
self.crate_types.set(crate_types).expect("`crate_types` was initialized twice")
}
#[inline]
pub fn recursion_limit(&self) -> Limit {
self.recursion_limit.get().copied().unwrap()
}
#[inline]
pub fn type_length_limit(&self) -> Limit {
self.type_length_limit.get().copied().unwrap()
}
pub fn const_eval_limit(&self) -> Limit {
self.const_eval_limit.get().copied().unwrap()
}
pub fn struct_span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_warn(sp, msg)
}
pub fn struct_span_warn_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
code: DiagnosticId,
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_warn_with_code(sp, msg, code)
}
pub fn struct_warn(&self, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_warn(msg)
}
pub fn struct_span_allow<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_allow(sp, msg)
}
pub fn struct_allow(&self, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_allow(msg)
}
pub fn struct_span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_err(sp, msg)
}
pub fn struct_span_err_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
code: DiagnosticId,
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_err_with_code(sp, msg, code)
}
// FIXME: This method should be removed (every error should have an associated error code).
pub fn struct_err(&self, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_err(msg)
}
pub fn struct_err_with_code(&self, msg: &str, code: DiagnosticId) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_err_with_code(msg, code)
}
pub fn struct_span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_fatal(sp, msg)
}
pub fn struct_span_fatal_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
code: DiagnosticId,
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_fatal_with_code(sp, msg, code)
}
pub fn struct_fatal(&self, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_fatal(msg)
}
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
self.diagnostic().span_fatal(sp, msg).raise()
}
pub fn span_fatal_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
code: DiagnosticId,
) -> ! {
self.diagnostic().span_fatal_with_code(sp, msg, code).raise()
}
pub fn fatal(&self, msg: &str) -> ! {
self.diagnostic().fatal(msg).raise()
}
pub fn span_err_or_warn<S: Into<MultiSpan>>(&self, is_warning: bool, sp: S, msg: &str) {
if is_warning {
self.span_warn(sp, msg);
} else {
self.span_err(sp, msg);
}
}
pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.diagnostic().span_err(sp, msg)
}
pub fn span_err_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) {
self.diagnostic().span_err_with_code(sp, &msg, code)
}
pub fn err(&self, msg: &str) {
self.diagnostic().err(msg)
}
pub fn emit_err<'a>(&'a self, err: impl SessionDiagnostic<'a>) {
err.into_diagnostic(self).emit()
}
pub fn err_count(&self) -> usize {
self.diagnostic().err_count()
}
pub fn has_errors(&self) -> bool {
self.diagnostic().has_errors()
}
pub fn has_errors_or_delayed_span_bugs(&self) -> bool {
self.diagnostic().has_errors_or_delayed_span_bugs()
}
pub fn abort_if_errors(&self) {
self.diagnostic().abort_if_errors();
}
pub fn compile_status(&self) -> Result<(), ErrorReported> {
if self.has_errors() {
self.diagnostic().emit_stashed_diagnostics();
Err(ErrorReported)
} else {
Ok(())
}
}
// FIXME(matthewjasper) Remove this method, it should never be needed.
pub fn track_errors<F, T>(&self, f: F) -> Result<T, ErrorReported>
where
F: FnOnce() -> T,
{
let old_count = self.err_count();
let result = f();
let errors = self.err_count() - old_count;
if errors == 0 { Ok(result) } else { Err(ErrorReported) }
}
pub fn span_warn<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.diagnostic().span_warn(sp, msg)
}
pub fn span_warn_with_code<S: Into<MultiSpan>>(&self, sp: S, msg: &str, code: DiagnosticId) {
self.diagnostic().span_warn_with_code(sp, msg, code)
}
pub fn warn(&self, msg: &str) {
self.diagnostic().warn(msg)
}
pub fn opt_span_warn<S: Into<MultiSpan>>(&self, opt_sp: Option<S>, msg: &str) {
match opt_sp {
Some(sp) => self.span_warn(sp, msg),
None => self.warn(msg),
}
}
/// Delay a span_bug() call until abort_if_errors()
#[track_caller]
pub fn delay_span_bug<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.diagnostic().delay_span_bug(sp, msg)
}
/// Used for code paths of expensive computations that should only take place when
/// warnings or errors are emitted. If no messages are emitted ("good path"), then
/// it's likely a bug.
pub fn delay_good_path_bug(&self, msg: &str) {
if self.opts.debugging_opts.print_type_sizes
|| self.opts.debugging_opts.query_dep_graph
|| self.opts.debugging_opts.dump_mir.is_some()
|| self.opts.debugging_opts.unpretty.is_some()
|| self.opts.output_types.contains_key(&OutputType::Mir)
|| std::env::var_os("RUSTC_LOG").is_some()
{
return;
}
self.diagnostic().delay_good_path_bug(msg)
}
pub fn note_without_error(&self, msg: &str) {
self.diagnostic().note_without_error(msg)
}
pub fn span_note_without_error<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.diagnostic().span_note_without_error(sp, msg)
}
pub fn struct_note_without_error(&self, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_note_without_error(msg)
}
pub fn diagnostic(&self) -> &rustc_errors::Handler {
&self.parse_sess.span_diagnostic
}
/// Analogous to calling methods on the given `DiagnosticBuilder`, but
/// deduplicates on lint ID, span (if any), and message for this `Session`
fn diag_once<'a, 'b>(
&'a self,
diag_builder: &'b mut DiagnosticBuilder<'a>,
method: DiagnosticBuilderMethod,
msg_id: DiagnosticMessageId,
message: &str,
span_maybe: Option<Span>,
) {
let id_span_message = (msg_id, span_maybe, message.to_owned());
let fresh = self.one_time_diagnostics.borrow_mut().insert(id_span_message);
if fresh {
match method {
DiagnosticBuilderMethod::Note => {
diag_builder.note(message);
}
DiagnosticBuilderMethod::SpanNote => {
let span = span_maybe.expect("`span_note` needs a span");
diag_builder.span_note(span, message);
}
DiagnosticBuilderMethod::SpanSuggestion(suggestion) => {
let span = span_maybe.expect("`span_suggestion_*` needs a span");
diag_builder.span_suggestion(
span,
message,
suggestion,
Applicability::Unspecified,
);
}
}
}
}
pub fn diag_span_note_once<'a, 'b>(
&'a self,
diag_builder: &'b mut DiagnosticBuilder<'a>,
msg_id: DiagnosticMessageId,
span: Span,
message: &str,
) {
self.diag_once(
diag_builder,
DiagnosticBuilderMethod::SpanNote,
msg_id,
message,
Some(span),
);
}
pub fn diag_note_once<'a, 'b>(
&'a self,
diag_builder: &'b mut DiagnosticBuilder<'a>,
msg_id: DiagnosticMessageId,
message: &str,
) {
self.diag_once(diag_builder, DiagnosticBuilderMethod::Note, msg_id, message, None);
}
pub fn diag_span_suggestion_once<'a, 'b>(
&'a self,
diag_builder: &'b mut DiagnosticBuilder<'a>,
msg_id: DiagnosticMessageId,
span: Span,
message: &str,
suggestion: String,
) {
self.diag_once(
diag_builder,
DiagnosticBuilderMethod::SpanSuggestion(suggestion),
msg_id,
message,
Some(span),
);
}
#[inline]
pub fn source_map(&self) -> &SourceMap {
self.parse_sess.source_map()
}
pub fn verbose(&self) -> bool {
self.opts.debugging_opts.verbose
}
pub fn time_passes(&self) -> bool {
self.opts.debugging_opts.time_passes || self.opts.debugging_opts.time
}
pub fn instrument_mcount(&self) -> bool {
self.opts.debugging_opts.instrument_mcount
}
pub fn time_llvm_passes(&self) -> bool {
self.opts.debugging_opts.time_llvm_passes
}
pub fn meta_stats(&self) -> bool {
self.opts.debugging_opts.meta_stats
}
pub fn asm_comments(&self) -> bool {
self.opts.debugging_opts.asm_comments
}
pub fn verify_llvm_ir(&self) -> bool {
self.opts.debugging_opts.verify_llvm_ir || option_env!("RUSTC_VERIFY_LLVM_IR").is_some()
}
pub fn borrowck_stats(&self) -> bool {
self.opts.debugging_opts.borrowck_stats
}
pub fn print_llvm_passes(&self) -> bool {
self.opts.debugging_opts.print_llvm_passes
}
pub fn binary_dep_depinfo(&self) -> bool {
self.opts.debugging_opts.binary_dep_depinfo
}
pub fn mir_opt_level(&self) -> usize {
self.opts
.debugging_opts
.mir_opt_level
.unwrap_or_else(|| if self.opts.optimize != config::OptLevel::No { 2 } else { 1 })
}
/// Gets the features enabled for the current compilation session.
/// DO NOT USE THIS METHOD if there is a TyCtxt available, as it circumvents
/// dependency tracking. Use tcx.features() instead.
#[inline]
pub fn features_untracked(&self) -> &rustc_feature::Features {
self.features.get().unwrap()
}
pub fn init_features(&self, features: rustc_feature::Features) {
match self.features.set(features) {
Ok(()) => {}
Err(_) => panic!("`features` was initialized twice"),
}
}
pub fn init_lint_store(&self, lint_store: Lrc<dyn SessionLintStore>) {
self.lint_store
.set(lint_store)
.map_err(|_| ())
.expect("`lint_store` was initialized twice");
}
/// Calculates the flavor of LTO to use for this compilation.
pub fn lto(&self) -> config::Lto {
// If our target has codegen requirements ignore the command line
if self.target.requires_lto {
return config::Lto::Fat;
}
// If the user specified something, return that. If they only said `-C
// lto` and we've for whatever reason forced off ThinLTO via the CLI,
// then ensure we can't use a ThinLTO.
match self.opts.cg.lto {
config::LtoCli::Unspecified => {
// The compiler was invoked without the `-Clto` flag. Fall
// through to the default handling
}
config::LtoCli::No => {
// The user explicitly opted out of any kind of LTO
return config::Lto::No;
}
config::LtoCli::Yes | config::LtoCli::Fat | config::LtoCli::NoParam => {
// All of these mean fat LTO
return config::Lto::Fat;
}
config::LtoCli::Thin => {
return if self.opts.cli_forced_thinlto_off {
config::Lto::Fat
} else {
config::Lto::Thin
};
}
}
// Ok at this point the target doesn't require anything and the user
// hasn't asked for anything. Our next decision is whether or not
// we enable "auto" ThinLTO where we use multiple codegen units and
// then do ThinLTO over those codegen units. The logic below will
// either return `No` or `ThinLocal`.
// If processing command line options determined that we're incompatible
// with ThinLTO (e.g., `-C lto --emit llvm-ir`) then return that option.
if self.opts.cli_forced_thinlto_off {
return config::Lto::No;
}
// If `-Z thinlto` specified process that, but note that this is mostly
// a deprecated option now that `-C lto=thin` exists.
if let Some(enabled) = self.opts.debugging_opts.thinlto {
if enabled {
return config::Lto::ThinLocal;
} else {
return config::Lto::No;
}
}
// If there's only one codegen unit and LTO isn't enabled then there's
// no need for ThinLTO so just return false.
if self.codegen_units() == 1 {
return config::Lto::No;
}
// Now we're in "defaults" territory. By default we enable ThinLTO for
// optimized compiles (anything greater than O0).
match self.opts.optimize {
config::OptLevel::No => config::Lto::No,
_ => config::Lto::ThinLocal,
}
}
/// Returns the panic strategy for this compile session. If the user explicitly selected one
/// using '-C panic', use that, otherwise use the panic strategy defined by the target.
pub fn panic_strategy(&self) -> PanicStrategy {
self.opts.cg.panic.unwrap_or(self.target.panic_strategy)
}
pub fn fewer_names(&self) -> bool {
if let Some(fewer_names) = self.opts.debugging_opts.fewer_names {
fewer_names
} else {
let more_names = self.opts.output_types.contains_key(&OutputType::LlvmAssembly)
|| self.opts.output_types.contains_key(&OutputType::Bitcode)
// AddressSanitizer and MemorySanitizer use alloca name when reporting an issue.
|| self.opts.debugging_opts.sanitizer.intersects(SanitizerSet::ADDRESS | SanitizerSet::MEMORY);
!more_names
}
}
pub fn unstable_options(&self) -> bool {
self.opts.debugging_opts.unstable_options
}
pub fn is_nightly_build(&self) -> bool {
self.opts.unstable_features.is_nightly_build()
}
pub fn overflow_checks(&self) -> bool {
self.opts
.cg
.overflow_checks
.or(self.opts.debugging_opts.force_overflow_checks)
.unwrap_or(self.opts.debug_assertions)
}
/// Check whether this compile session and crate type use static crt.
pub fn crt_static(&self, crate_type: Option<CrateType>) -> bool {
if !self.target.crt_static_respected {
// If the target does not opt in to crt-static support, use its default.
return self.target.crt_static_default;
}
let requested_features = self.opts.cg.target_feature.split(',');
let found_negative = requested_features.clone().any(|r| r == "-crt-static");
let found_positive = requested_features.clone().any(|r| r == "+crt-static");
if found_positive || found_negative {
found_positive
} else if crate_type == Some(CrateType::ProcMacro)
|| crate_type == None && self.opts.crate_types.contains(&CrateType::ProcMacro)
{
// FIXME: When crate_type is not available,
// we use compiler options to determine the crate_type.
// We can't check `#![crate_type = "proc-macro"]` here.
false
} else {
self.target.crt_static_default
}
}
pub fn inline_asm_dialect(&self) -> rustc_ast::LlvmAsmDialect {
match self.asm_arch {
Some(InlineAsmArch::X86 | InlineAsmArch::X86_64) => rustc_ast::LlvmAsmDialect::Intel,
_ => rustc_ast::LlvmAsmDialect::Att,
}
}
pub fn relocation_model(&self) -> RelocModel {
self.opts.cg.relocation_model.unwrap_or(self.target.relocation_model)
}
pub fn code_model(&self) -> Option<CodeModel> {
self.opts.cg.code_model.or(self.target.code_model)
}
pub fn tls_model(&self) -> TlsModel {
self.opts.debugging_opts.tls_model.unwrap_or(self.target.tls_model)
}
pub fn is_wasi_reactor(&self) -> bool {
self.target.options.os == "wasi"
&& matches!(
self.opts.debugging_opts.wasi_exec_model,
Some(config::WasiExecModel::Reactor)
)
}
pub fn split_debuginfo(&self) -> SplitDebuginfo {
self.opts.cg.split_debuginfo.unwrap_or(self.target.split_debuginfo)
}
pub fn target_can_use_split_dwarf(&self) -> bool {
!self.target.is_like_windows && !self.target.is_like_osx
}
pub fn must_not_eliminate_frame_pointers(&self) -> bool {
// "mcount" function relies on stack pointer.
// See <https://sourceware.org/binutils/docs/gprof/Implementation.html>.
if self.instrument_mcount() {
true
} else if let Some(x) = self.opts.cg.force_frame_pointers {
x
} else {
!self.target.eliminate_frame_pointer
}
}
pub fn must_emit_unwind_tables(&self) -> bool {
// This is used to control the emission of the `uwtable` attribute on
// LLVM functions.
//
// At the very least, unwind tables are needed when compiling with
// `-C panic=unwind`.
//
// On some targets (including windows), however, exceptions include
// other events such as illegal instructions, segfaults, etc. This means
// that on Windows we end up still needing unwind tables even if the `-C
// panic=abort` flag is passed.
//
// You can also find more info on why Windows needs unwind tables in:
// https://bugzilla.mozilla.org/show_bug.cgi?id=1302078
//
// If a target requires unwind tables, then they must be emitted.
// Otherwise, we can defer to the `-C force-unwind-tables=<yes/no>`
// value, if it is provided, or disable them, if not.
if self.panic_strategy() == PanicStrategy::Unwind {
true
} else if self.target.requires_uwtable {
true
} else {
self.opts.cg.force_unwind_tables.unwrap_or(false)
}
}
/// Returns the symbol name for the registrar function,
/// given the crate `Svh` and the function `DefIndex`.
pub fn generate_plugin_registrar_symbol(&self, disambiguator: CrateDisambiguator) -> String {
format!("__rustc_plugin_registrar_{}__", disambiguator.to_fingerprint().to_hex())
}
pub fn generate_proc_macro_decls_symbol(&self, disambiguator: CrateDisambiguator) -> String {
format!("__rustc_proc_macro_decls_{}__", disambiguator.to_fingerprint().to_hex())
}
pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch<'_> {
filesearch::FileSearch::new(
&self.sysroot,
self.opts.target_triple.triple(),
&self.opts.search_paths,
// `target_tlib_path == None` means it's the same as `host_tlib_path`.
self.target_tlib_path.as_ref().unwrap_or(&self.host_tlib_path),
kind,
)
}
pub fn host_filesearch(&self, kind: PathKind) -> filesearch::FileSearch<'_> {
filesearch::FileSearch::new(
&self.sysroot,
config::host_triple(),
&self.opts.search_paths,
&self.host_tlib_path,
kind,
)
}
pub fn set_incr_session_load_dep_graph(&self, load: bool) {
let mut incr_comp_session = self.incr_comp_session.borrow_mut();
if let IncrCompSession::Active { ref mut load_dep_graph, .. } = *incr_comp_session {
*load_dep_graph = load;
}
}
pub fn incr_session_load_dep_graph(&self) -> bool {
let incr_comp_session = self.incr_comp_session.borrow();
match *incr_comp_session {
IncrCompSession::Active { load_dep_graph, .. } => load_dep_graph,
_ => false,
}
}
pub fn init_incr_comp_session(
&self,
session_dir: PathBuf,
lock_file: flock::Lock,
load_dep_graph: bool,
) {
let mut incr_comp_session = self.incr_comp_session.borrow_mut();
if let IncrCompSession::NotInitialized = *incr_comp_session {
} else {
panic!("Trying to initialize IncrCompSession `{:?}`", *incr_comp_session)
}
*incr_comp_session =
IncrCompSession::Active { session_directory: session_dir, lock_file, load_dep_graph };
}
pub fn finalize_incr_comp_session(&self, new_directory_path: PathBuf) {
let mut incr_comp_session = self.incr_comp_session.borrow_mut();
if let IncrCompSession::Active { .. } = *incr_comp_session {
} else {
panic!("trying to finalize `IncrCompSession` `{:?}`", *incr_comp_session);
}
// Note: this will also drop the lock file, thus unlocking the directory.
*incr_comp_session = IncrCompSession::Finalized { session_directory: new_directory_path };
}
pub fn mark_incr_comp_session_as_invalid(&self) {
let mut incr_comp_session = self.incr_comp_session.borrow_mut();
let session_directory = match *incr_comp_session {
IncrCompSession::Active { ref session_directory, .. } => session_directory.clone(),
IncrCompSession::InvalidBecauseOfErrors { .. } => return,
_ => panic!("trying to invalidate `IncrCompSession` `{:?}`", *incr_comp_session),
};
// Note: this will also drop the lock file, thus unlocking the directory.
*incr_comp_session = IncrCompSession::InvalidBecauseOfErrors { session_directory };
}
pub fn incr_comp_session_dir(&self) -> cell::Ref<'_, PathBuf> {
let incr_comp_session = self.incr_comp_session.borrow();
cell::Ref::map(incr_comp_session, |incr_comp_session| match *incr_comp_session {
IncrCompSession::NotInitialized => panic!(
"trying to get session directory from `IncrCompSession`: {:?}",
*incr_comp_session,
),
IncrCompSession::Active { ref session_directory, .. }
| IncrCompSession::Finalized { ref session_directory }
| IncrCompSession::InvalidBecauseOfErrors { ref session_directory } => {
session_directory
}
})
}
pub fn incr_comp_session_dir_opt(&self) -> Option<cell::Ref<'_, PathBuf>> {
self.opts.incremental.as_ref().map(|_| self.incr_comp_session_dir())
}
pub fn print_perf_stats(&self) {
eprintln!(
"Total time spent computing symbol hashes: {}",
duration_to_secs_str(*self.perf_stats.symbol_hash_time.lock())
);
eprintln!(
"Total queries canonicalized: {}",
self.perf_stats.queries_canonicalized.load(Ordering::Relaxed)
);
eprintln!(
"normalize_generic_arg_after_erasing_regions: {}",
self.perf_stats.normalize_generic_arg_after_erasing_regions.load(Ordering::Relaxed)
);
eprintln!(
"normalize_projection_ty: {}",
self.perf_stats.normalize_projection_ty.load(Ordering::Relaxed)
);
}
/// We want to know if we're allowed to do an optimization for crate foo from -z fuel=foo=n.
/// This expends fuel if applicable, and records fuel if applicable.
pub fn consider_optimizing<T: Fn() -> String>(&self, crate_name: &str, msg: T) -> bool {
let mut ret = true;
if let Some(ref c) = self.optimization_fuel_crate {
if c == crate_name {
assert_eq!(self.threads(), 1);
let mut fuel = self.optimization_fuel.lock();
ret = fuel.remaining != 0;
if fuel.remaining == 0 && !fuel.out_of_fuel {
self.warn(&format!("optimization-fuel-exhausted: {}", msg()));
fuel.out_of_fuel = true;
} else if fuel.remaining > 0 {
fuel.remaining -= 1;
}
}
}
if let Some(ref c) = self.print_fuel_crate {
if c == crate_name {
assert_eq!(self.threads(), 1);
self.print_fuel.fetch_add(1, SeqCst);
}
}
ret
}
/// Returns the number of query threads that should be used for this
/// compilation
pub fn threads(&self) -> usize {
self.opts.debugging_opts.threads
}
/// Returns the number of codegen units that should be used for this
/// compilation
pub fn codegen_units(&self) -> usize {
if let Some(n) = self.opts.cli_forced_codegen_units {
return n;
}
if let Some(n) = self.target.default_codegen_units {
return n as usize;
}
// If incremental compilation is turned on, we default to a high number
// codegen units in order to reduce the "collateral damage" small
// changes cause.
if self.opts.incremental.is_some() {
return 256;
}
// Why is 16 codegen units the default all the time?
//
// The main reason for enabling multiple codegen units by default is to
// leverage the ability for the codegen backend to do codegen and
// optimization in parallel. This allows us, especially for large crates, to
// make good use of all available resources on the machine once we've
// hit that stage of compilation. Large crates especially then often
// take a long time in codegen/optimization and this helps us amortize that
// cost.
//
// Note that a high number here doesn't mean that we'll be spawning a
// large number of threads in parallel. The backend of rustc contains
// global rate limiting through the `jobserver` crate so we'll never
// overload the system with too much work, but rather we'll only be
// optimizing when we're otherwise cooperating with other instances of
// rustc.
//
// Rather a high number here means that we should be able to keep a lot
// of idle cpus busy. By ensuring that no codegen unit takes *too* long
// to build we'll be guaranteed that all cpus will finish pretty closely
// to one another and we should make relatively optimal use of system
// resources
//
// Note that the main cost of codegen units is that it prevents LLVM
// from inlining across codegen units. Users in general don't have a lot
// of control over how codegen units are split up so it's our job in the
// compiler to ensure that undue performance isn't lost when using
// codegen units (aka we can't require everyone to slap `#[inline]` on
// everything).
//
// If we're compiling at `-O0` then the number doesn't really matter too
// much because performance doesn't matter and inlining is ok to lose.
// In debug mode we just want to try to guarantee that no cpu is stuck
// doing work that could otherwise be farmed to others.
//
// In release mode, however (O1 and above) performance does indeed
// matter! To recover the loss in performance due to inlining we'll be
// enabling ThinLTO by default (the function for which is just below).
// This will ensure that we recover any inlining wins we otherwise lost
// through codegen unit partitioning.
//
// ---
//
// Ok that's a lot of words but the basic tl;dr; is that we want a high
// number here -- but not too high. Additionally we're "safe" to have it
// always at the same number at all optimization levels.
//
// As a result 16 was chosen here! Mostly because it was a power of 2
// and most benchmarks agreed it was roughly a local optimum. Not very
// scientific.
16
}
pub fn teach(&self, code: &DiagnosticId) -> bool {
self.opts.debugging_opts.teach && self.diagnostic().must_teach(code)
}
pub fn rust_2015(&self) -> bool {
self.opts.edition == Edition::Edition2015
}
/// Are we allowed to use features from the Rust 2018 edition?
pub fn rust_2018(&self) -> bool {
self.opts.edition >= Edition::Edition2018
}
/// Are we allowed to use features from the Rust 2021 edition?
pub fn rust_2021(&self) -> bool {
self.opts.edition >= Edition::Edition2021
}
pub fn edition(&self) -> Edition {
self.opts.edition
}
/// Returns `true` if we cannot skip the PLT for shared library calls.
pub fn needs_plt(&self) -> bool {
// Check if the current target usually needs PLT to be enabled.
// The user can use the command line flag to override it.
let needs_plt = self.target.needs_plt;
let dbg_opts = &self.opts.debugging_opts;
let relro_level = dbg_opts.relro_level.unwrap_or(self.target.relro_level);
// Only enable this optimization by default if full relro is also enabled.
// In this case, lazy binding was already unavailable, so nothing is lost.
// This also ensures `-Wl,-z,now` is supported by the linker.
let full_relro = RelroLevel::Full == relro_level;
// If user didn't explicitly forced us to use / skip the PLT,
// then try to skip it where possible.
dbg_opts.plt.unwrap_or(needs_plt || !full_relro)
}
/// Checks if LLVM lifetime markers should be emitted.
pub fn emit_lifetime_markers(&self) -> bool {
self.opts.optimize != config::OptLevel::No
// AddressSanitizer uses lifetimes to detect use after scope bugs.
// MemorySanitizer uses lifetimes to detect use of uninitialized stack variables.
// HWAddressSanitizer will use lifetimes to detect use after scope bugs in the future.
|| self.opts.debugging_opts.sanitizer.intersects(SanitizerSet::ADDRESS | SanitizerSet::MEMORY | SanitizerSet::HWADDRESS)
}
pub fn link_dead_code(&self) -> bool {
self.opts.cg.link_dead_code.unwrap_or(false)
}
pub fn mark_attr_known(&self, attr: &Attribute) {
self.known_attrs.lock().mark(attr)
}
pub fn is_attr_known(&self, attr: &Attribute) -> bool {
self.known_attrs.lock().is_marked(attr)
}
pub fn mark_attr_used(&self, attr: &Attribute) {
self.used_attrs.lock().mark(attr)
}
pub fn is_attr_used(&self, attr: &Attribute) -> bool {
self.used_attrs.lock().is_marked(attr)
}
/// Returns `true` if the attribute's path matches the argument. If it
/// matches, then the attribute is marked as used.
///
/// This method should only be used by rustc, other tools can use
/// `Attribute::has_name` instead, because only rustc is supposed to report
/// the `unused_attributes` lint. (`MetaItem` and `NestedMetaItem` are
/// produced by lowering an `Attribute` and don't have identity, so they
/// only have the `has_name` method, and you need to mark the original
/// `Attribute` as used when necessary.)
pub fn check_name(&self, attr: &Attribute, name: Symbol) -> bool {
let matches = attr.has_name(name);
if matches {
self.mark_attr_used(attr);
}
matches
}
pub fn is_proc_macro_attr(&self, attr: &Attribute) -> bool {
[sym::proc_macro, sym::proc_macro_attribute, sym::proc_macro_derive]
.iter()
.any(|kind| self.check_name(attr, *kind))
}
pub fn contains_name(&self, attrs: &[Attribute], name: Symbol) -> bool {
attrs.iter().any(|item| self.check_name(item, name))
}
pub fn find_by_name<'a>(
&'a self,
attrs: &'a [Attribute],
name: Symbol,
) -> Option<&'a Attribute> {
attrs.iter().find(|attr| self.check_name(attr, name))
}
pub fn filter_by_name<'a>(
&'a self,
attrs: &'a [Attribute],
name: Symbol,
) -> impl Iterator<Item = &'a Attribute> {
attrs.iter().filter(move |attr| self.check_name(attr, name))
}
pub fn first_attr_value_str_by_name(
&self,
attrs: &[Attribute],
name: Symbol,
) -> Option<Symbol> {
attrs.iter().find(|at| self.check_name(at, name)).and_then(|at| at.value_str())
}
}
fn default_emitter(
sopts: &config::Options,
registry: rustc_errors::registry::Registry,
source_map: Lrc<SourceMap>,
emitter_dest: Option<Box<dyn Write + Send>>,
) -> Box<dyn Emitter + sync::Send> {
let macro_backtrace = sopts.debugging_opts.macro_backtrace;
match (sopts.error_format, emitter_dest) {
(config::ErrorOutputType::HumanReadable(kind), dst) => {
let (short, color_config) = kind.unzip();
if let HumanReadableErrorType::AnnotateSnippet(_) = kind {
let emitter =
AnnotateSnippetEmitterWriter::new(Some(source_map), short, macro_backtrace);
Box::new(emitter.ui_testing(sopts.debugging_opts.ui_testing))
} else {
let emitter = match dst {
None => EmitterWriter::stderr(
color_config,
Some(source_map),
short,
sopts.debugging_opts.teach,
sopts.debugging_opts.terminal_width,
macro_backtrace,
),
Some(dst) => EmitterWriter::new(
dst,
Some(source_map),
short,
false, // no teach messages when writing to a buffer
false, // no colors when writing to a buffer
None, // no terminal width
macro_backtrace,
),
};
Box::new(emitter.ui_testing(sopts.debugging_opts.ui_testing))
}
}
(config::ErrorOutputType::Json { pretty, json_rendered }, None) => Box::new(
JsonEmitter::stderr(
Some(registry),
source_map,
pretty,
json_rendered,
sopts.debugging_opts.terminal_width,
macro_backtrace,
)
.ui_testing(sopts.debugging_opts.ui_testing),
),
(config::ErrorOutputType::Json { pretty, json_rendered }, Some(dst)) => Box::new(
JsonEmitter::new(
dst,
Some(registry),
source_map,
pretty,
json_rendered,
sopts.debugging_opts.terminal_width,
macro_backtrace,
)
.ui_testing(sopts.debugging_opts.ui_testing),
),
}
}
pub enum DiagnosticOutput {
Default,
Raw(Box<dyn Write + Send>),
}
pub fn build_session(
sopts: config::Options,
local_crate_source_file: Option<PathBuf>,
registry: rustc_errors::registry::Registry,
diagnostics_output: DiagnosticOutput,
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
file_loader: Option<Box<dyn FileLoader + Send + Sync + 'static>>,
target_override: Option<Target>,
) -> Session {
// FIXME: This is not general enough to make the warning lint completely override
// normal diagnostic warnings, since the warning lint can also be denied and changed
// later via the source code.
let warnings_allow = sopts
.lint_opts
.iter()
.filter(|&&(ref key, _)| *key == "warnings")
.map(|&(_, ref level)| *level == lint::Allow)
.last()
.unwrap_or(false);
let cap_lints_allow = sopts.lint_cap.map_or(false, |cap| cap == lint::Allow);
let can_emit_warnings = !(warnings_allow || cap_lints_allow);
let write_dest = match diagnostics_output {
DiagnosticOutput::Default => None,
DiagnosticOutput::Raw(write) => Some(write),
};
let target_cfg = config::build_target_config(&sopts, target_override);
let host_triple = TargetTriple::from_triple(config::host_triple());
let host = Target::search(&host_triple).unwrap_or_else(|e| {
early_error(sopts.error_format, &format!("Error loading host specification: {}", e))
});
let loader = file_loader.unwrap_or_else(|| Box::new(RealFileLoader));
let hash_kind = sopts.debugging_opts.src_hash_algorithm.unwrap_or_else(|| {
if target_cfg.is_like_msvc {
SourceFileHashAlgorithm::Sha1
} else {
SourceFileHashAlgorithm::Md5
}
});
let source_map = Lrc::new(SourceMap::with_file_loader_and_hash_kind(
loader,
sopts.file_path_mapping(),
hash_kind,
));
let emitter = default_emitter(&sopts, registry, source_map.clone(), write_dest);
let span_diagnostic = rustc_errors::Handler::with_emitter_and_flags(
emitter,
sopts.debugging_opts.diagnostic_handler_flags(can_emit_warnings),
);
let self_profiler = if let SwitchWithOptPath::Enabled(ref d) = sopts.debugging_opts.self_profile
{
let directory =
if let Some(ref directory) = d { directory } else { std::path::Path::new(".") };
let profiler = SelfProfiler::new(
directory,
sopts.crate_name.as_deref(),
&sopts.debugging_opts.self_profile_events,
);
match profiler {
Ok(profiler) => Some(Arc::new(profiler)),
Err(e) => {
early_warn(sopts.error_format, &format!("failed to create profiler: {}", e));
None
}
}
} else {
None
};
let mut parse_sess = ParseSess::with_span_handler(span_diagnostic, source_map);
parse_sess.assume_incomplete_release = sopts.debugging_opts.assume_incomplete_release;
let sysroot = match &sopts.maybe_sysroot {
Some(sysroot) => sysroot.clone(),
None => filesearch::get_or_default_sysroot(),
};
let host_triple = config::host_triple();
let target_triple = sopts.target_triple.triple();
let host_tlib_path = SearchPath::from_sysroot_and_triple(&sysroot, host_triple);
let target_tlib_path = if host_triple == target_triple {
None
} else {
Some(SearchPath::from_sysroot_and_triple(&sysroot, target_triple))
};
let file_path_mapping = sopts.file_path_mapping();
let local_crate_source_file =
local_crate_source_file.map(|path| file_path_mapping.map_prefix(path).0);
let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone());
let optimization_fuel = Lock::new(OptimizationFuel {
remaining: sopts.debugging_opts.fuel.as_ref().map_or(0, |i| i.1),
out_of_fuel: false,
});
let print_fuel_crate = sopts.debugging_opts.print_fuel.clone();
let print_fuel = AtomicU64::new(0);
let working_dir = env::current_dir().unwrap_or_else(|e| {
parse_sess.span_diagnostic.fatal(&format!("Current directory is invalid: {}", e)).raise()
});
let working_dir = file_path_mapping.map_prefix(working_dir);
let cgu_reuse_tracker = if sopts.debugging_opts.query_dep_graph {
CguReuseTracker::new()
} else {
CguReuseTracker::new_disabled()
};
let prof = SelfProfilerRef::new(
self_profiler,
sopts.debugging_opts.time_passes || sopts.debugging_opts.time,
sopts.debugging_opts.time_passes,
);
let ctfe_backtrace = Lock::new(match env::var("RUSTC_CTFE_BACKTRACE") {
Ok(ref val) if val == "immediate" => CtfeBacktrace::Immediate,
Ok(ref val) if val != "0" => CtfeBacktrace::Capture,
_ => CtfeBacktrace::Disabled,
});
// Try to find a directory containing the Rust `src`, for more details see
// the doc comment on the `real_rust_source_base_dir` field.
let real_rust_source_base_dir = {
// This is the location used by the `rust-src` `rustup` component.
let mut candidate = sysroot.join("lib/rustlib/src/rust");
if let Ok(metadata) = candidate.symlink_metadata() {
// Replace the symlink rustbuild creates, with its destination.
// We could try to use `fs::canonicalize` instead, but that might
// produce unnecessarily verbose path.
if metadata.file_type().is_symlink() {
if let Ok(symlink_dest) = std::fs::read_link(&candidate) {
candidate = symlink_dest;
}
}
}
// Only use this directory if it has a file we can expect to always find.
if candidate.join("library/std/src/lib.rs").is_file() { Some(candidate) } else { None }
};
let asm_arch =
if target_cfg.allow_asm { InlineAsmArch::from_str(&target_cfg.arch).ok() } else { None };
let sess = Session {
target: target_cfg,
host,
opts: sopts,
host_tlib_path,
target_tlib_path,
parse_sess,
sysroot,
local_crate_source_file,
working_dir,
one_time_diagnostics: Default::default(),
crate_types: OnceCell::new(),
crate_disambiguator: OnceCell::new(),
features: OnceCell::new(),
lint_store: OnceCell::new(),
recursion_limit: OnceCell::new(),
type_length_limit: OnceCell::new(),
const_eval_limit: OnceCell::new(),
incr_comp_session: OneThread::new(RefCell::new(IncrCompSession::NotInitialized)),
cgu_reuse_tracker,
prof,
perf_stats: PerfStats {
symbol_hash_time: Lock::new(Duration::from_secs(0)),
queries_canonicalized: AtomicUsize::new(0),
normalize_generic_arg_after_erasing_regions: AtomicUsize::new(0),
normalize_projection_ty: AtomicUsize::new(0),
},
code_stats: Default::default(),
optimization_fuel_crate,
optimization_fuel,
print_fuel_crate,
print_fuel,
jobserver: jobserver::client(),
driver_lint_caps,
trait_methods_not_found: Lock::new(Default::default()),
confused_type_with_std_module: Lock::new(Default::default()),
system_library_path: OneThread::new(RefCell::new(Default::default())),
ctfe_backtrace,
miri_unleashed_features: Lock::new(Default::default()),
real_rust_source_base_dir,
asm_arch,
target_features: FxHashSet::default(),
known_attrs: Lock::new(MarkedAttrs::new()),
used_attrs: Lock::new(MarkedAttrs::new()),
if_let_suggestions: Default::default(),
};
validate_commandline_args_with_session_available(&sess);
sess
}
// If it is useful to have a Session available already for validating a
// commandline argument, you can do so here.
fn validate_commandline_args_with_session_available(sess: &Session) {
// Since we don't know if code in an rlib will be linked to statically or
// dynamically downstream, rustc generates `__imp_` symbols that help linkers
// on Windows deal with this lack of knowledge (#27438). Unfortunately,
// these manually generated symbols confuse LLD when it tries to merge
// bitcode during ThinLTO. Therefore we disallow dynamic linking on Windows
// when compiling for LLD ThinLTO. This way we can validly just not generate
// the `dllimport` attributes and `__imp_` symbols in that case.
if sess.opts.cg.linker_plugin_lto.enabled()
&& sess.opts.cg.prefer_dynamic
&& sess.target.is_like_windows
{
sess.err(
"Linker plugin based LTO is not supported together with \
`-C prefer-dynamic` when targeting Windows-like targets",
);
}
// Make sure that any given profiling data actually exists so LLVM can't
// decide to silently skip PGO.
if let Some(ref path) = sess.opts.cg.profile_use {
if !path.exists() {
sess.err(&format!(
"File `{}` passed to `-C profile-use` does not exist.",
path.display()
));
}
}
// Unwind tables cannot be disabled if the target requires them.
if let Some(include_uwtables) = sess.opts.cg.force_unwind_tables {
if sess.panic_strategy() == PanicStrategy::Unwind && !include_uwtables {
sess.err(
"panic=unwind requires unwind tables, they cannot be disabled \
with `-C force-unwind-tables=no`.",
);
}
if sess.target.requires_uwtable && !include_uwtables {
sess.err(
"target requires unwind tables, they cannot be disabled with \
`-C force-unwind-tables=no`.",
);
}
}
// PGO does not work reliably with panic=unwind on Windows. Let's make it
// an error to combine the two for now. It always runs into an assertions
// if LLVM is built with assertions, but without assertions it sometimes
// does not crash and will probably generate a corrupted binary.
// We should only display this error if we're actually going to run PGO.
// If we're just supposed to print out some data, don't show the error (#61002).
if sess.opts.cg.profile_generate.enabled()
&& sess.target.is_like_msvc
&& sess.panic_strategy() == PanicStrategy::Unwind
&& sess.opts.prints.iter().all(|&p| p == PrintRequest::NativeStaticLibs)
{
sess.err(
"Profile-guided optimization does not yet work in conjunction \
with `-Cpanic=unwind` on Windows when targeting MSVC. \
See issue #61002 <https://github.com/rust-lang/rust/issues/61002> \
for more information.",
);
}
const ASAN_SUPPORTED_TARGETS: &[&str] = &[
"aarch64-apple-darwin",
"aarch64-fuchsia",
"aarch64-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-fuchsia",
"x86_64-unknown-freebsd",
"x86_64-unknown-linux-gnu",
];
const LSAN_SUPPORTED_TARGETS: &[&str] = &[
"aarch64-apple-darwin",
"aarch64-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-unknown-linux-gnu",
];
const MSAN_SUPPORTED_TARGETS: &[&str] =
&["aarch64-unknown-linux-gnu", "x86_64-unknown-freebsd", "x86_64-unknown-linux-gnu"];
const TSAN_SUPPORTED_TARGETS: &[&str] = &[
"aarch64-apple-darwin",
"aarch64-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-unknown-freebsd",
"x86_64-unknown-linux-gnu",
];
const HWASAN_SUPPORTED_TARGETS: &[&str] =
&["aarch64-linux-android", "aarch64-unknown-linux-gnu"];
// Sanitizers can only be used on some tested platforms.
for s in sess.opts.debugging_opts.sanitizer {
let supported_targets = match s {
SanitizerSet::ADDRESS => ASAN_SUPPORTED_TARGETS,
SanitizerSet::LEAK => LSAN_SUPPORTED_TARGETS,
SanitizerSet::MEMORY => MSAN_SUPPORTED_TARGETS,
SanitizerSet::THREAD => TSAN_SUPPORTED_TARGETS,
SanitizerSet::HWADDRESS => HWASAN_SUPPORTED_TARGETS,
_ => panic!("unrecognized sanitizer {}", s),
};
if !supported_targets.contains(&&*sess.opts.target_triple.triple()) {
sess.err(&format!(
"`-Zsanitizer={}` only works with targets: {}",
s,
supported_targets.join(", ")
));
}
let conflicting = sess.opts.debugging_opts.sanitizer - s;
if !conflicting.is_empty() {
sess.err(&format!(
"`-Zsanitizer={}` is incompatible with `-Zsanitizer={}`",
s, conflicting,
));
// Don't report additional errors.
break;
}
}
}
/// Holds data on the current incremental compilation session, if there is one.
#[derive(Debug)]
pub enum IncrCompSession {
/// This is the state the session will be in until the incr. comp. dir is
/// needed.
NotInitialized,
/// This is the state during which the session directory is private and can
/// be modified.
Active { session_directory: PathBuf, lock_file: flock::Lock, load_dep_graph: bool },
/// This is the state after the session directory has been finalized. In this
/// state, the contents of the directory must not be modified any more.
Finalized { session_directory: PathBuf },
/// This is an error state that is reached when some compilation error has
/// occurred. It indicates that the contents of the session directory must
/// not be used, since they might be invalid.
InvalidBecauseOfErrors { session_directory: PathBuf },
}
pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
let emitter: Box<dyn Emitter + sync::Send> = match output {
config::ErrorOutputType::HumanReadable(kind) => {
let (short, color_config) = kind.unzip();
Box::new(EmitterWriter::stderr(color_config, None, short, false, None, false))
}
config::ErrorOutputType::Json { pretty, json_rendered } => {
Box::new(JsonEmitter::basic(pretty, json_rendered, None, false))
}
};
let handler = rustc_errors::Handler::with_emitter(true, None, emitter);
handler.struct_fatal(msg).emit();
rustc_errors::FatalError.raise();
}
pub fn early_warn(output: config::ErrorOutputType, msg: &str) {
let emitter: Box<dyn Emitter + sync::Send> = match output {
config::ErrorOutputType::HumanReadable(kind) => {
let (short, color_config) = kind.unzip();
Box::new(EmitterWriter::stderr(color_config, None, short, false, None, false))
}
config::ErrorOutputType::Json { pretty, json_rendered } => {
Box::new(JsonEmitter::basic(pretty, json_rendered, None, false))
}
};
let handler = rustc_errors::Handler::with_emitter(true, None, emitter);
handler.struct_warn(msg).emit();
}
| 39.321084 | 128 | 0.622907 |
29107eae4ebba32f0facd84bf6399f1f4abdd56e | 5,285 | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "wasm-bindgen" {
# [wasm_bindgen (extends = :: js_sys :: Object , js_name = CSSRule , typescript_type = "CSSRule")]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `CssRule` class."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/CSSRule)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub type CssRule;
# [wasm_bindgen (structural , method , getter , js_class = "CSSRule" , js_name = type)]
#[doc = "Getter for the `type` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/CSSRule/type)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub fn type_(this: &CssRule) -> u16;
# [wasm_bindgen (structural , method , getter , js_class = "CSSRule" , js_name = cssText)]
#[doc = "Getter for the `cssText` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/CSSRule/cssText)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub fn css_text(this: &CssRule) -> String;
# [wasm_bindgen (structural , method , setter , js_class = "CSSRule" , js_name = cssText)]
#[doc = "Setter for the `cssText` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/CSSRule/cssText)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub fn set_css_text(this: &CssRule, value: &str);
# [wasm_bindgen (structural , method , getter , js_class = "CSSRule" , js_name = parentRule)]
#[doc = "Getter for the `parentRule` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/CSSRule/parentRule)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub fn parent_rule(this: &CssRule) -> Option<CssRule>;
#[cfg(feature = "CssStyleSheet")]
# [wasm_bindgen (structural , method , getter , js_class = "CSSRule" , js_name = parentStyleSheet)]
#[doc = "Getter for the `parentStyleSheet` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/CSSRule/parentStyleSheet)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`, `CssStyleSheet`*"]
pub fn parent_style_sheet(this: &CssRule) -> Option<CssStyleSheet>;
}
impl CssRule {
#[doc = "The `CSSRule.STYLE_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const STYLE_RULE: u16 = 1u64 as u16;
#[doc = "The `CSSRule.CHARSET_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const CHARSET_RULE: u16 = 2u64 as u16;
#[doc = "The `CSSRule.IMPORT_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const IMPORT_RULE: u16 = 3u64 as u16;
#[doc = "The `CSSRule.MEDIA_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const MEDIA_RULE: u16 = 4u64 as u16;
#[doc = "The `CSSRule.FONT_FACE_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const FONT_FACE_RULE: u16 = 5u64 as u16;
#[doc = "The `CSSRule.PAGE_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const PAGE_RULE: u16 = 6u64 as u16;
#[doc = "The `CSSRule.NAMESPACE_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const NAMESPACE_RULE: u16 = 10u64 as u16;
#[doc = "The `CSSRule.KEYFRAMES_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const KEYFRAMES_RULE: u16 = 7u64 as u16;
#[doc = "The `CSSRule.KEYFRAME_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const KEYFRAME_RULE: u16 = 8u64 as u16;
#[doc = "The `CSSRule.COUNTER_STYLE_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const COUNTER_STYLE_RULE: u16 = 11u64 as u16;
#[doc = "The `CSSRule.SUPPORTS_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const SUPPORTS_RULE: u16 = 12u64 as u16;
#[doc = "The `CSSRule.FONT_FEATURE_VALUES_RULE` const."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `CssRule`*"]
pub const FONT_FEATURE_VALUES_RULE: u16 = 14u64 as u16;
}
| 52.326733 | 109 | 0.639546 |
ed7cdad662fb607e91d6ded15ebc8d780f1c3872 | 13,385 | use crate::utils::manifest::Manifest;
use anyhow::{anyhow, bail, Context, Result};
use curl::easy::Easy;
use dirs::home_dir;
use flate2::read::GzDecoder;
use serde::{Deserialize, Serialize};
use std::{
collections::HashMap,
fs,
io::Cursor,
path::{Path, PathBuf},
};
use sway_utils::constants;
use tar::Archive;
// A collection of remote dependency related functions
#[derive(Serialize, Deserialize, Debug)]
#[serde(untagged)]
pub enum Dependency {
/// In the simple format, only a version is specified, eg.
/// `package = "<version>"`
Simple(String),
/// The simple format is equivalent to a detailed dependency
/// specifying only a version, eg.
/// `package = { version = "<version>" }`
Detailed(DependencyDetails),
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "kebab-case")]
pub struct DependencyDetails {
pub(crate) version: Option<String>,
pub(crate) path: Option<String>,
pub(crate) git: Option<String>,
pub(crate) branch: Option<String>,
}
pub enum OfflineMode {
Yes,
No,
}
impl From<bool> for OfflineMode {
fn from(v: bool) -> OfflineMode {
match v {
true => OfflineMode::Yes,
false => OfflineMode::No,
}
}
}
pub type GitHubAPICommitsResponse = Vec<GithubCommit>;
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct GithubCommit {
pub sha: String,
}
/// VersionedDependencyDirectory holds the path to the directory where a given
/// GitHub-based dependency is installed and its respective git hash.
#[derive(Debug)]
pub struct VersionedDependencyDirectory {
pub hash: String,
pub path: PathBuf,
}
pub type GitHubRepoReleases = Vec<TaggedRelease>;
#[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TaggedRelease {
#[serde(rename = "tag_name")]
pub tag_name: String,
#[serde(rename = "target_commitish")]
pub target_commitish: String,
pub name: String,
pub draft: bool,
pub prerelease: bool,
#[serde(rename = "created_at")]
pub created_at: String,
#[serde(rename = "published_at")]
pub published_at: String,
}
/// Downloads a non-local dependency that's hosted on GitHub.
/// By default, it stores the dependency in `~/.forc/`.
/// A given dependency `dep` is stored under `~/.forc/dep/default/$owner-$repo-$hash`.
/// If no hash (nor any other type of reference) is provided, Forc
/// will download the default branch at the latest commit.
/// If a branch is specified, it will go in `~/.forc/dep/$branch/$owner-$repo-$hash.
/// If a version is specified, it will go in `~/.forc/dep/$version/$owner-$repo-$hash.
/// Version takes precedence over branch reference.
pub fn download_github_dep(
dep_name: &str,
repo_base_url: &str,
branch: &Option<String>,
version: &Option<String>,
offline_mode: OfflineMode,
) -> Result<String> {
let home_dir = match home_dir() {
None => return Err(anyhow!("Couldn't find home directory (`~/`)")),
Some(p) => p.to_str().unwrap().to_owned(),
};
// Version tag takes precedence over branch reference.
let out_dir = match &version {
Some(v) => PathBuf::from(format!(
"{}/{}/{}/{}",
home_dir,
constants::FORC_DEPENDENCIES_DIRECTORY,
dep_name,
v
)),
// If no version specified, check if a branch was specified
None => match &branch {
Some(b) => PathBuf::from(format!(
"{}/{}/{}/{}",
home_dir,
constants::FORC_DEPENDENCIES_DIRECTORY,
dep_name,
b
)),
// If no version and no branch, use default
None => PathBuf::from(format!(
"{}/{}/{}/default",
home_dir,
constants::FORC_DEPENDENCIES_DIRECTORY,
dep_name
)),
},
};
// Check if dependency is already installed, if so, return its path.
if out_dir.exists() {
for entry in fs::read_dir(&out_dir)? {
let path = entry?.path();
// If the path to that dependency at that branch/version already
// exists and there's a directory inside of it,
// this directory should be the installation path.
if path.is_dir() {
return Ok(path.to_str().unwrap().to_string());
}
}
}
// If offline mode is enabled, don't proceed as it will
// make use of the network to download the dependency from
// GitHub.
// If it's offline mode and the dependency already exists
// locally, then it would've been returned in the block above.
if let OfflineMode::Yes = offline_mode {
return Err(anyhow!(
"Can't build dependency: dependency {} doesn't exist locally and offline mode is enabled",
dep_name
));
}
let github_api_url = build_github_repo_api_url(repo_base_url, branch, version);
let _ = crate::utils::helpers::println_green(&format!(
" Downloading {:?} ({:?})",
dep_name, out_dir
));
match download_tarball(&github_api_url, &out_dir) {
Ok(downloaded_dir) => Ok(downloaded_dir),
Err(e) => Err(anyhow!("couldn't download from {}: {}", &github_api_url, e)),
}
}
/// Builds a proper URL that's used to call GitHub's API.
/// The dependency is specified as `https://github.com/:owner/:project`
/// And the API URL must be like `https://api.github.com/repos/:owner/:project/tarball`
/// Adding a `:ref` at the end makes it download a branch/tag based repo.
/// Omitting it makes it download the default branch at latest commit.
pub fn build_github_repo_api_url(
dependency_url: &str,
branch: &Option<String>,
version: &Option<String>,
) -> String {
let dependency_url = dependency_url.trim_end_matches('/');
let mut pieces = dependency_url.rsplit('/');
let project_name: &str = match pieces.next() {
Some(p) => p,
None => dependency_url,
};
let owner_name: &str = match pieces.next() {
Some(p) => p,
None => dependency_url,
};
// Version tag takes precedence over branch reference.
match version {
Some(v) => {
format!(
"https://api.github.com/repos/{}/{}/tarball/{}",
owner_name, project_name, v
)
}
// If no version specified, check if a branch was specified
None => match branch {
Some(b) => {
format!(
"https://api.github.com/repos/{}/{}/tarball/{}",
owner_name, project_name, b
)
}
// If no version and no branch, download default branch at latest commit
None => {
format!(
"https://api.github.com/repos/{}/{}/tarball",
owner_name, project_name
)
}
},
}
}
pub fn download_tarball(url: &str, out_dir: &Path) -> Result<String> {
let mut data = Vec::new();
let mut handle = Easy::new();
// Download the tarball.
handle.url(url).context("failed to configure tarball URL")?;
handle
.follow_location(true)
.context("failed to configure follow location")?;
handle
.useragent("forc-builder")
.context("failed to configure User-Agent")?;
{
let mut transfer = handle.transfer();
transfer
.write_function(|new_data| {
data.extend_from_slice(new_data);
Ok(new_data.len())
})
.context("failed to write download data")?;
transfer.perform().context("failed to download tarball")?;
}
// Unpack the tarball.
Archive::new(GzDecoder::new(Cursor::new(data)))
.unpack(out_dir)
.with_context(|| {
format!(
"failed to unpack tarball in directory: {}",
out_dir.display()
)
})?;
for entry in fs::read_dir(out_dir)? {
let path = entry?.path();
match path.is_dir() {
true => return Ok(path.to_str().unwrap().to_string()),
false => (),
}
}
Err(anyhow!(
"couldn't find downloaded dependency in directory: {}",
out_dir.display(),
))
}
pub fn replace_dep_version(
target_directory: &Path,
git: &str,
dep: &DependencyDetails,
) -> Result<()> {
let current = get_current_dependency_version(target_directory)?;
let api_url = build_github_repo_api_url(git, &dep.branch, &dep.version);
download_tarball(&api_url, target_directory)?;
// Delete old one
match fs::remove_dir_all(current.path) {
Ok(_) => Ok(()),
Err(e) => {
return Err(anyhow!(
"failed to remove old version of the dependency ({}): {}",
git,
e
))
}
}
}
pub fn get_current_dependency_version(dep_dir: &Path) -> Result<VersionedDependencyDirectory> {
let mut entries =
fs::read_dir(dep_dir).context(format!("couldn't read directory {}", dep_dir.display()))?;
let entry = match entries.next() {
Some(entry) => entry,
None => bail!("Dependency directory is empty. Run `forc build` to install dependencies."),
};
let path = entry?.path();
if !path.is_dir() {
bail!("{} isn't a directory.", dep_dir.display())
}
let file_name = path.file_name().unwrap();
// Dependencies directories are named as "$repo_owner-$repo-$concatenated_hash"
let hash = file_name
.to_str()
.with_context(|| format!("Invalid utf8 in dependency name: {}", path.display()))?
.split('-')
.last()
.with_context(|| format!("Unexpected dependency naming scheme: {}", path.display()))?
.into();
Ok(VersionedDependencyDirectory { hash, path })
}
// Returns the _truncated_ (e.g `e6940e4`) latest commit hash of a
// GitHub repository given a branch. If branch is None, the default branch is used.
pub async fn get_latest_commit_sha(
dependency_url: &str,
branch: &Option<String>,
) -> Result<String> {
// Quick protection against `git` dependency URL ending with `/`.
let dependency_url = dependency_url.trim_end_matches('/');
let mut pieces = dependency_url.rsplit('/');
let project_name: &str = match pieces.next() {
Some(p) => p,
None => dependency_url,
};
let owner_name: &str = match pieces.next() {
Some(p) => p,
None => dependency_url,
};
let api_endpoint = match branch {
Some(b) => {
format!(
"https://api.github.com/repos/{}/{}/commits?sha={}&per_page=1",
owner_name, project_name, b
)
}
None => {
format!(
"https://api.github.com/repos/{}/{}/commits?per_page=1",
owner_name, project_name
)
}
};
let client = reqwest::Client::builder()
.user_agent("forc-builder")
.build()?;
let resp = client.get(&api_endpoint).send().await?;
let hash_vec = resp.json::<GitHubAPICommitsResponse>().await?;
// `take(7)` because the truncated SHA1 used by GitHub is 7 chars long.
let truncated_hash: String = hash_vec[0].sha.chars().take(7).collect();
if truncated_hash.is_empty() {
bail!(
"failed to extract hash from GitHub commit history API, response: {:?}",
hash_vec
)
}
Ok(truncated_hash)
}
// Helper to get only detailed dependencies (`Dependency::Detailed`).
pub fn get_detailed_dependencies(manifest: &mut Manifest) -> HashMap<String, &DependencyDetails> {
let mut dependencies: HashMap<String, &DependencyDetails> = HashMap::new();
if let Some(ref mut deps) = manifest.dependencies {
for (dep_name, dependency_details) in deps.iter_mut() {
match dependency_details {
Dependency::Simple(..) => continue,
Dependency::Detailed(dep_details) => {
dependencies.insert(dep_name.to_owned(), dep_details)
}
};
}
}
dependencies
}
pub async fn get_github_repo_releases(dependency_url: &str) -> Result<Vec<String>> {
// Quick protection against `git` dependency URL ending with `/`.
let dependency_url = dependency_url.trim_end_matches('/');
let mut pieces = dependency_url.rsplit('/');
let project_name: &str = match pieces.next() {
Some(p) => p,
None => dependency_url,
};
let owner_name: &str = match pieces.next() {
Some(p) => p,
None => dependency_url,
};
let api_endpoint = format!(
"https://api.github.com/repos/{}/{}/releases",
owner_name, project_name
);
let client = reqwest::Client::builder()
.user_agent("forc-builder")
.build()?;
let resp = client.get(&api_endpoint).send().await?;
let releases_vec = resp.json::<GitHubRepoReleases>().await?;
let semver_releases: Vec<String> = releases_vec.iter().map(|r| r.tag_name.to_owned()).collect();
Ok(semver_releases)
}
| 31.420188 | 102 | 0.590586 |
d9a8f44a39b356acaf4f6adca8eb2c4bc1a54fde | 3,336 | use std::{
env, fs,
path::{Path, PathBuf},
};
use fs_extra::dir::{copy, CopyOptions};
use super::Config;
pub fn path_to_string(path: PathBuf) -> String {
path.to_str().unwrap_or("").to_string()
}
pub fn find_it<P>(exe_name: P) -> Option<PathBuf>
where
P: AsRef<Path>,
{
env::var_os("PATH").and_then(|paths| {
env::split_paths(&paths)
.filter_map(|dir| {
let full_path = dir.join(&exe_name);
if full_path.is_file() {
Some(full_path)
} else {
None
}
})
.next()
})
}
pub fn get_bin_or_cmd_name<'a>() -> &'a str {
if cfg!(target_os = "windows") {
"code.cmd"
} else {
"code"
}
}
pub fn get_home_dir() -> Result<PathBuf, String> {
let home_dir = dirs::home_dir();
match home_dir {
None => Err("Couldn't find user's home folder.".into()),
Some(data) => Ok(data),
}
}
pub fn create_or_get_ena_home_folder() -> Result<PathBuf, Box<dyn std::error::Error>> {
let mut home_folder = get_home_dir()?;
home_folder = home_folder.join(".ena-code");
let path_ena_code_folder = Path::new(&home_folder);
if !path_ena_code_folder.is_dir() {
fs::create_dir(&path_ena_code_folder)?;
}
Ok(home_folder)
}
pub fn get_profiles_folder_path() -> PathBuf {
let config = Config::get_config(false);
Path::new(&config.profiles_folder).to_path_buf()
}
pub fn get_profile_path(profile_name: &str) -> PathBuf {
get_profiles_folder_path().join(profile_name)
}
pub fn check_profile_exists(profile_name: &str) -> bool {
let ena_folder = get_profile_path(profile_name);
let path = Path::new(&ena_folder);
path.is_dir()
}
pub fn create_profile(profile_name: &str, profile_fonte: &str) {
let dir_destino = get_profile_path(profile_name);
let dir_origin = get_profile_path(profile_fonte);
let mut options = CopyOptions::new();
options.skip_exist = true;
options.overwrite = false;
options.copy_inside = true;
if let Err(why) = copy(&dir_origin, &dir_destino, &options) {
println!("Couldn't derive from the profile {}, initializating from a new.\n{{Origin: {:?}, Target: {:?}}}\n\nReason: {}", profile_fonte, dir_origin, dir_destino, why);
}
}
pub fn copy_profile(profile: &str, profile_origin: &str) {
if profile != profile_origin
&& !check_profile_exists(profile)
&& check_profile_exists(profile_origin)
{
create_profile(profile, profile_origin)
}
}
pub fn remove_caracteres(path: &str, config: &Config) -> String {
let mut string_path = path.to_string();
string_path.retain(|c| !r#"(),".;:'<>/\|?*"#.contains(c));
if string_path.is_empty() {
string_path = config.create_new_profile_from.clone();
}
string_path
}
pub fn config_folder(config: &Config, profile_path: &Path, profiles_base_folder: &Path) -> PathBuf {
if config.shared_profiles_configs {
let default_profile_folder = profiles_base_folder.join(&config.create_new_profile_from);
default_profile_folder.join("configs")
} else {
profile_path.join("configs")
}
}
| 28.758621 | 176 | 0.605815 |
56c13660ccba79e1cfbbd352949aa6487289fe01 | 1,278 | use getset::{CopyGetters, Getters};
use crate::{transaction::Entry, ACCOUNTING};
#[derive(CopyGetters, Debug, Default, Eq, Getters, PartialEq, Ord)]
pub struct Account {
#[getset(get = "pub")]
name: String,
#[getset(get_copy = "pub")]
value: isize,
}
impl Account {
pub fn format_value_as_currency(&self) -> String {
let padded_value = format!("{:0>width$}", self.value, width = 3);
let whole_part = &padded_value[..padded_value.len() - 2];
let decimal_part = &padded_value[padded_value.len() - 2..];
let value = format!("{}.{}", whole_part, decimal_part)
.parse::<f64>()
.unwrap();
ACCOUNTING.format_money(value)
}
}
impl PartialOrd for Account {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.name().partial_cmp(other.name())
}
}
impl From<&[&Entry]> for Account {
fn from(entries: &[&Entry]) -> Self {
entries.iter().fold(Account::default(), |acc, cur| {
let name = if acc.name().is_empty() {
cur.account().to_owned()
} else {
acc.name().to_owned()
};
let value = acc.value() + cur.value();
Self { name, value }
})
}
}
| 26.081633 | 73 | 0.551643 |
e4119871862e0f7d81cdac2cc971ecf245e819b6 | 1,245 | #![allow(unused_imports)]
use super::super::diagram::Diagram;
use super::super::geometry::{Line, Point};
use super::super::site_event as VSE;
use super::super::BvError;
#[test]
fn inverse_test_1() {
type I1 = i32;
type F1 = f32;
let mut a_site =
VSE::SiteEvent::<I1, F1>::new_3(Point { x: 10, y: 10 }, Point { x: 50, y: 50 }, 1);
assert_eq!(a_site.is_inverse(), false);
let _ = a_site.inverse();
assert_eq!(a_site.is_inverse(), true);
}
#[test]
fn inverse_test_2() {
type I1 = i32;
type F1 = f32;
let mut a_site =
VSE::SiteEvent::<I1, F1>::new_3(Point { x: 10, y: 11 }, Point { x: 12, y: 13 }, 1);
assert_eq!(a_site.is_inverse(), false);
assert_eq!(a_site.x0(), 10);
assert_eq!(a_site.y0(), 11);
assert_eq!(a_site.x1(), 12);
assert_eq!(a_site.y1(), 13);
let _ = a_site.inverse();
assert_eq!(a_site.is_inverse(), true);
assert_eq!(a_site.x0(), 12);
assert_eq!(a_site.y0(), 13);
assert_eq!(a_site.x1(), 10);
assert_eq!(a_site.y1(), 11);
let _ = a_site.inverse();
assert_eq!(a_site.is_inverse(), false);
assert_eq!(a_site.x0(), 10);
assert_eq!(a_site.y0(), 11);
assert_eq!(a_site.x1(), 12);
assert_eq!(a_site.y1(), 13);
}
| 27.065217 | 91 | 0.593574 |
01a3c03217a177957ec6a64d731f9472820ae9f9 | 5,365 | use crate::db::tests::db_mock;
#[test]
fn seek_none() {
let mut db = db_mock().build().unwrap();
let mut tx = db.begin_rw_tx().unwrap();
drop(tx.create_bucket(b"blub").unwrap());
let c = tx.cursor();
let item = c.seek(b"foo");
assert!(item.is_ok());
assert!(item.unwrap().is_none());
}
#[test]
fn seek_some() {
let mut db = db_mock().build().unwrap();
let mut tx = db.begin_rw_tx().unwrap();
drop(tx.create_bucket(b"foo").unwrap());
let c = tx.cursor();
let item = c.seek(b"foo");
assert!(item.is_ok());
assert!(item.unwrap().is_some());
}
#[test]
fn values_cursor() {
let mut db = db_mock().build().unwrap();
let mut tx = db.begin_rw_tx().unwrap();
{
let mut bucket = tx.create_bucket(b"bucket").unwrap();
bucket.put(b"petr", b"rachmaninov".to_vec()).unwrap();
bucket.put(b"robert", b"plant".to_vec()).unwrap();
bucket.put(b"ziggy", b"stardust".to_vec()).unwrap();
{
let cursor = bucket.cursor().unwrap();
assert_eq!(cursor.first().unwrap().key.unwrap(), b"petr");
}
{
let cursor = bucket.cursor().unwrap();
assert_eq!(cursor.first().unwrap().key.unwrap(), b"petr");
assert_eq!(cursor.next().unwrap().key.unwrap(), b"robert");
}
{
let mut key_names = vec![];
let cursor = bucket.cursor().unwrap();
{
let item = cursor.first().unwrap();
key_names.push(item.key.unwrap().to_vec());
}
loop {
let item = cursor.next().unwrap();
if item.is_none() {
break;
}
key_names.push(item.key.unwrap().to_vec());
}
assert_eq!(key_names.len(), 3);
assert!(key_names.contains(&b"petr".to_vec()));
assert!(key_names.contains(&b"robert".to_vec()));
assert!(key_names.contains(&b"ziggy".to_vec()));
}
// backwards
{
let cursor = bucket.cursor().unwrap();
assert_eq!(cursor.last().unwrap().key.unwrap(), b"ziggy");
}
{
let cursor = bucket.cursor().unwrap();
assert_eq!(cursor.last().unwrap().key.unwrap(), b"ziggy");
assert_eq!(cursor.prev().unwrap().key.unwrap(), b"robert");
}
{
let mut key_names = vec![];
let cursor = bucket.cursor().unwrap();
{
let item = cursor.last().unwrap();
key_names.push(item.key.unwrap().to_vec());
}
loop {
let item = cursor.prev().unwrap();
if item.is_none() {
break;
}
key_names.push(item.key.unwrap().to_vec());
}
assert_eq!(key_names.len(), 3);
assert!(key_names.contains(&b"petr".to_vec()));
assert!(key_names.contains(&b"robert".to_vec()));
assert!(key_names.contains(&b"ziggy".to_vec()));
}
{
let cursor = bucket.cursor().unwrap();
assert_eq!(cursor.last().unwrap().key.unwrap(), b"ziggy");
assert_eq!(cursor.prev().unwrap().key.unwrap(), b"robert");
assert_eq!(cursor.prev().unwrap().key.unwrap(), b"petr");
assert_eq!(cursor.next().unwrap().key.unwrap(), b"robert");
assert_eq!(cursor.first().unwrap().key.unwrap(), b"petr");
assert_eq!(cursor.next().unwrap().key.unwrap(), b"robert");
assert_eq!(cursor.next().unwrap().key.unwrap(), b"ziggy");
assert_eq!(cursor.prev().unwrap().key.unwrap(), b"robert");
}
{
let cursor = bucket.cursor().unwrap();
assert_eq!(cursor.first().unwrap().key.unwrap(), b"petr");
assert_eq!(cursor.prev().unwrap().key, None);
assert_eq!(cursor.prev().unwrap().key, None);
}
{
let cursor = bucket.cursor().unwrap();
assert_eq!(cursor.last().unwrap().key.unwrap(), b"ziggy");
assert_eq!(cursor.next().unwrap().key, None);
assert_eq!(cursor.next().unwrap().key, None);
}
}
}
#[test]
fn bucket_cursor() {
let mut db = db_mock().build().unwrap();
let mut tx = db.begin_rw_tx().unwrap();
{
let mut bucket = tx.create_bucket(b"bucket").unwrap();
bucket.put(b"key", b"value".to_vec()).unwrap();
bucket.put(b"keys", b"value".to_vec()).unwrap();
}
{
let mut bucket = tx.create_bucket(b"another bucket").unwrap();
bucket.put(b"key", b"value".to_vec()).unwrap();
bucket.put(b"keys", b"value".to_vec()).unwrap();
}
{
let mut bucket_names = vec![];
let cursor = tx.cursor();
{
let item = cursor.first().unwrap();
bucket_names.push(item.key.unwrap().to_vec());
}
loop {
let item = cursor.next().unwrap();
if item.is_none() {
break;
}
bucket_names.push(item.key.unwrap().to_vec());
}
assert_eq!(bucket_names.len(), 2);
assert!(bucket_names.contains(&b"bucket".to_vec()));
assert!(bucket_names.contains(&b"another bucket".to_vec()));
}
}
| 35.296053 | 71 | 0.507176 |
f80ccb75fb60198e79882a20e67987787e847373 | 1,991 | // Copyright 2020 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT
use address::Address;
use forest_actor::Multimap;
use ipld_amt::Amt;
#[test]
fn basic_add() {
let store = db::MemoryDB::default();
let mut mm = Multimap::new(&store);
let addr = Address::new_id(100);
assert_eq!(mm.get::<u64>(&addr.to_bytes()).unwrap(), None);
mm.add(addr.to_bytes().into(), 8).unwrap();
let arr: Amt<u64, _> = mm.get(&addr.to_bytes()).unwrap().unwrap();
assert_eq!(arr.get(0).unwrap(), Some(&8));
mm.add(addr.to_bytes().into(), 2).unwrap();
mm.add(addr.to_bytes().into(), 78).unwrap();
}
#[test]
fn for_each() {
let store = db::MemoryDB::default();
let mut mm = Multimap::new(&store);
let addr = Address::new_id(100);
assert_eq!(mm.get::<u64>(&addr.to_bytes()).unwrap(), None);
mm.add(addr.to_bytes().into(), 8).unwrap();
mm.add(addr.to_bytes().into(), 2).unwrap();
mm.add(addr.to_bytes().into(), 3).unwrap();
mm.add("Some other string".into(), 7).unwrap();
let mut vals: Vec<(usize, u64)> = Vec::new();
mm.for_each(&addr.to_bytes(), |i, v| {
vals.push((i, *v));
Ok(())
})
.unwrap();
assert_eq!(&vals, &[(0, 8), (1, 2), (2, 3)])
}
#[test]
fn remove_all() {
let store = db::MemoryDB::default();
let mut mm = Multimap::new(&store);
let addr1 = Address::new_id(100);
let addr2 = Address::new_id(101);
mm.add(addr1.to_bytes().into(), 8).unwrap();
mm.add(addr1.to_bytes().into(), 88).unwrap();
mm.add(addr2.to_bytes().into(), 1).unwrap();
let arr: Amt<u64, _> = mm.get(&addr1.to_bytes()).unwrap().unwrap();
assert_eq!(arr.get(1).unwrap(), Some(&88));
mm.remove_all(&addr1.to_bytes()).unwrap();
assert_eq!(mm.get::<u64>(&addr1.to_bytes()).unwrap(), None);
assert!(mm.get::<u64>(&addr2.to_bytes()).unwrap().is_some());
mm.remove_all(&addr2.to_bytes()).unwrap();
assert_eq!(mm.get::<u64>(&addr2.to_bytes()).unwrap(), None);
}
| 28.855072 | 71 | 0.59116 |
760e44897ce2a8c4fcda69e6e9b52aa05448ed38 | 3,184 | #![allow(unused)]
use core::arch::asm;
const SYSCALL_GETCWD: usize = 17;
const SYSCALL_DUP: usize = 23;
const SYSCALL_DUP3: usize = 24;
const SYSCALL_MKDIRAT: usize = 34;
const SYSCALL_UNLINKAT: usize = 35;
const SYSCALL_LINKAT: usize = 37;
const SYSCALL_UMOUNT2: usize = 39;
const SYSCALL_MOUNT: usize = 40;
const SYSCALL_CHDIR: usize = 49;
const SYSCALL_OPENAT: usize = 56;
const SYSCALL_CLOSE: usize = 57;
const SYSCALL_PIPE: usize = 59;
const SYSCALL_GETDENTS64: usize = 61;
const SYSCALL_READ: usize = 63;
const SYSCALL_WRITE: usize = 64;
const SYSCALL_FSTAT: usize = 80;
const SYSCALL_EXIT: usize = 93;
const SYSCALL_NANOSLEEP: usize = 101;
const SYSCALL_SCHED_YIELD: usize = 124;
const SYSCALL_TIMES: usize = 153;
const SYSCALL_UNAME: usize = 160;
const SYSCALL_GET_TIMEOFDAY: usize = 169;
const SYSCALL_GETPID: usize = 172;
const SYSCALL_GETPPID: usize = 172;
const SYSCALL_BRK: usize = 214;
const SYSCALL_MUNMAP: usize = 215;
const SYSCALL_CLONE: usize = 220;
const SYSCALL_EXECVE: usize = 221;
const SYSCALL_MMAP: usize = 222;
const SYSCALL_WAIT4: usize = 260;
fn syscall(id: usize, args: [usize; 6]) -> isize {
let mut ret: isize;
unsafe {
asm!(
"ecall",
inlateout("x10") args[0] => ret,
in("x11") args[1],
in("x12") args[2],
in("x13") args[3],
in("x14") args[4],
in("x15") args[5],
in("x17") id
);
}
ret
}
// pub fn sys_dup(fd: usize) -> isize {
// syscall(SYSCALL_DUP, [fd, 0, 0])
// }
pub fn sys_open(path: &str, flags: u32) -> isize {
syscall(
SYSCALL_OPENAT,
[path.as_ptr() as usize, flags as usize, 0, 0, 0, 0],
)
}
pub fn sys_close(fd: usize) -> isize {
syscall(SYSCALL_CLOSE, [fd, 0, 0, 0, 0, 0])
}
pub fn sys_pipe(pipe: &mut [usize]) -> isize {
syscall(SYSCALL_PIPE, [pipe.as_mut_ptr() as usize, 0, 0, 0, 0, 0])
}
pub fn sys_read(fd: usize, buffer: &mut [u8]) -> isize {
syscall(
SYSCALL_READ,
[fd, buffer.as_mut_ptr() as usize, buffer.len(), 0, 0, 0],
)
}
pub fn sys_write(fd: usize, buffer: &[u8]) -> isize {
syscall(
SYSCALL_WRITE,
[fd, buffer.as_ptr() as usize, buffer.len(), 0, 0, 0],
)
}
pub fn sys_exit(exit_code: i32) -> ! {
syscall(SYSCALL_EXIT, [exit_code as usize, 0, 0, 0, 0, 0]);
panic!("sys_exit never returns!");
}
pub fn sys_yield() -> isize {
syscall(SYSCALL_SCHED_YIELD, [0, 0, 0, 0, 0, 0])
}
// pub fn sys_kill(pid: usize, signal: i32) -> isize {
// syscall(SYSCALL_KILL, [pid, signal as usize, 0, 0, 0, 0])
// }
pub fn sys_get_time() -> isize {
syscall(SYSCALL_GET_TIMEOFDAY, [0, 0, 0, 0, 0, 0])
}
pub fn sys_getpid() -> isize {
syscall(SYSCALL_GETPID, [0, 0, 0, 0, 0, 0])
}
pub fn sys_fork() -> isize {
syscall(SYSCALL_CLONE, [0, 0, 0, 0, 0, 0])
}
pub fn sys_execve(path: &str, args: &[*const u8]) -> isize {
syscall(
SYSCALL_EXECVE,
[path.as_ptr() as usize, args.as_ptr() as usize, 0, 0, 0, 0],
)
}
pub fn sys_waitpid(pid: isize, exit_code: *mut i32) -> isize {
syscall(
SYSCALL_WAIT4,
[pid as usize, exit_code as usize, 0, 0, 0, 0],
)
}
| 25.886179 | 70 | 0.612123 |
fca424740d01116c2dabc9057327d1b8bcdddeb1 | 52 | mod student;
//mod test;
pub use self::student::*;
| 10.4 | 25 | 0.653846 |
2856e8936774fd2f3bb3edd711847eee297cb3fa | 4,190 | // The From trait is used for value-to-value conversions.
// If From is implemented correctly for a type, the Into trait should work conversely.
// You can read more about it at https://doc.rust-lang.org/std/convert/trait.From.html
#[derive(Debug)]
struct Person {
name: String,
age: usize,
}
// We implement the Default trait to use it as a fallback
// when the provided string is not convertible into a Person object
impl Default for Person {
fn default() -> Person {
Person {
name: String::from("John"),
age: 30,
}
}
}
// Your task is to complete this implementation
// in order for the line `let p = Person::from("Mark,20")` to compile
// Please note that you'll need to parse the age component into a `usize`
// with something like `"4".parse::<usize>()`. The outcome of this needs to
// be handled appropriately.
//
// Steps:
// 1. If the length of the provided string is 0, then return the default of Person
// 2. Split the given string on the commas present in it
// 3. Extract the first element from the split operation and use it as the name
// 4. If the name is empty, then return the default of Person
// 5. Extract the other element from the split operation and parse it into a `usize` as the age
// If while parsing the age, something goes wrong, then return the default of Person
// Otherwise, then return an instantiated Person object with the results
impl From<&str> for Person {
fn from(s: &str) -> Person {
// exit early on empty string
if s.len() == 0 {
return Person::default();
}
// split
let parts: Vec<&str> = s.split(",").collect();
// if any part is missing, return default
if parts.len() < 2 {
return Person::default();
}
// if both parts are present then extract parts
let name = parts[0].to_string();
let age = parts[1].parse::<usize>();
if age.is_err() {
return Person::default()
}
if name.len() == 0 {
return Person::default()
}
Person {
name: name,
age: age.unwrap()
}
}
}
fn main() {
// Use the `from` function
let p1 = Person::from("Mark,20");
// Since From is implemented for Person, we should be able to use Into
let p2: Person = "Gerald,70".into();
println!("{:?}", p1);
println!("{:?}", p2);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default() {
// Test that the default person is 30 year old John
let dp = Person::default();
assert_eq!(dp.name, "John");
assert_eq!(dp.age, 30);
}
#[test]
fn test_bad_convert() {
// Test that John is returned when bad string is provided
let p = Person::from("");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_good_convert() {
// Test that "Mark,20" works
let p = Person::from("Mark,20");
assert_eq!(p.name, "Mark");
assert_eq!(p.age, 20);
}
#[test]
fn test_bad_age() {
// Test that "Mark.twenty" will return the default person due to an error in parsing age
let p = Person::from("Mark,twenty");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_comma_and_age() {
let p: Person = Person::from("Mark");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_age() {
let p: Person = Person::from("Mark,");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name() {
let p: Person = Person::from(",1");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_age() {
let p: Person = Person::from(",");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
#[test]
fn test_missing_name_and_invalid_age() {
let p: Person = Person::from(",one");
assert_eq!(p.name, "John");
assert_eq!(p.age, 30);
}
}
| 28.310811 | 96 | 0.571838 |
4a14f470f53dec396a87c31f8c5991a8d874b0b4 | 2,540 | use crate::tap_suite::TapSuite;
use crate::tap_test::TapTest;
/// Coordinator for constructing `TapSuite` objects using the builder pattern.
///
/// # Examples
///
/// ```
/// use testanything::tap_suite_builder::TapSuiteBuilder;
/// use testanything::tap_test_builder::TapTestBuilder;
///
/// // Make a Vec<TapTest> so we have something
/// let tests = vec![TapTestBuilder::new()
/// .name("Example TAP test")
/// .passed(true)
/// .finalize()];
///
/// let tap_suite_from_builder = TapSuiteBuilder::new()
/// .name("Example TAP test suite")
/// .tests(tests)
/// .finalize();
///
/// ```
#[derive(Debug, Clone, Default)]
pub struct TapSuiteBuilder {
/// Name of test suite
pub name: Option<String>,
/// Vector of type `Vec<TapTest>` which holds the actual tests
pub tests: Option<Vec<TapTest>>,
}
impl TapSuiteBuilder {
/// Produce a new builder object
pub fn new() -> TapSuiteBuilder {
TapSuiteBuilder {
name: None,
tests: None,
}
}
/// Set the name
pub fn name<S: Into<String>>(&mut self, s: S) -> &mut TapSuiteBuilder {
self.name = Some(s.into());
self
}
/// Set the tests
pub fn tests(&mut self, test_vec: Vec<TapTest>) -> &mut TapSuiteBuilder {
self.tests = Some(test_vec);
self
}
/// Produce the configured `TapSuite` object. Name defaults to a blank `String` and the tests default to an empty `Vec`.
pub fn finalize(&mut self) -> TapSuite {
TapSuite {
name: self.name.take().unwrap_or_else(|| "".to_string()),
tests: self.tests.take().unwrap_or_else(Vec::new),
}
}
}
#[cfg(test)]
mod test {
use super::TapSuiteBuilder;
use crate::tap_suite::TapSuite;
use crate::tap_test_builder::TapTestBuilder;
#[test]
fn test_tap_suite_builder() {
let tests = vec![TapTestBuilder::new()
.name("Example TAP test")
.passed(true)
.finalize()];
let tap_suite_from_builder = TapSuiteBuilder::new()
.name("Example TAP test suite")
.tests(tests)
.finalize();
let tap_suite_from_scratch = TapSuite {
name: "Example TAP test suite".to_string(),
tests: vec![TapTestBuilder::new()
.name("Example TAP test")
.passed(true)
.finalize()],
};
assert_eq!(tap_suite_from_builder, tap_suite_from_scratch);
}
}
| 28.863636 | 124 | 0.576772 |
03a9f6b10432aedec377d66df5ff3738009b0926 | 53,650 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Code to convert Arrow schemas and DataFusion logical plans to protocol buffer format, allowing
//! DataFusion logical plans to be serialized and transmitted between
//! processes.
use crate::protobuf;
use crate::protobuf::plan_type::PlanTypeEnum::{
FinalLogicalPlan, FinalPhysicalPlan, InitialLogicalPlan, InitialPhysicalPlan,
OptimizedLogicalPlan, OptimizedPhysicalPlan,
};
use crate::protobuf::{
EmptyMessage, OptimizedLogicalPlanType, OptimizedPhysicalPlanType,
};
use datafusion::logical_plan::plan::StringifiedPlan;
use datafusion::logical_plan::PlanType;
use datafusion::{
arrow::datatypes::{
DataType, Field, IntervalUnit, Schema, SchemaRef, TimeUnit, UnionMode,
},
logical_expr::{BuiltInWindowFunction, BuiltinScalarFunction, WindowFunction},
logical_plan::{
window_frames::{WindowFrame, WindowFrameBound, WindowFrameUnits},
Column, DFField, DFSchemaRef, Expr,
},
physical_plan::aggregates::AggregateFunction,
scalar::ScalarValue,
};
#[derive(Debug)]
pub enum Error {
General(String),
InconsistentListTyping(DataType, DataType),
InconsistentListDesignated {
value: ScalarValue,
designated: DataType,
},
InvalidScalarValue(ScalarValue),
InvalidScalarType(DataType),
InvalidTimeUnit(TimeUnit),
UnsupportedScalarFunction(BuiltinScalarFunction),
}
impl std::error::Error for Error {}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::General(desc) => write!(f, "General error: {}", desc),
Self::InconsistentListTyping(type1, type2) => {
write!(
f,
"Lists with inconsistent typing; {:?} and {:?} found within list",
type1, type2,
)
}
Self::InconsistentListDesignated { value, designated } => {
write!(
f,
"Value {:?} was inconsistent with designated type {:?}",
value, designated
)
}
Self::InvalidScalarValue(value) => {
write!(f, "{:?} is invalid as a DataFusion scalar value", value)
}
Self::InvalidScalarType(data_type) => {
write!(f, "{:?} is invalid as a DataFusion scalar type", data_type)
}
Self::InvalidTimeUnit(time_unit) => {
write!(
f,
"Only TimeUnit::Microsecond and TimeUnit::Nanosecond are valid time units, found: {:?}",
time_unit
)
}
Self::UnsupportedScalarFunction(function) => {
write!(f, "Unsupported scalar function {:?}", function)
}
}
}
}
impl Error {
fn inconsistent_list_typing(type1: &DataType, type2: &DataType) -> Self {
Self::InconsistentListTyping(type1.to_owned(), type2.to_owned())
}
fn inconsistent_list_designated(value: &ScalarValue, designated: &DataType) -> Self {
Self::InconsistentListDesignated {
value: value.to_owned(),
designated: designated.to_owned(),
}
}
fn invalid_scalar_value(value: &ScalarValue) -> Self {
Self::InvalidScalarValue(value.to_owned())
}
fn invalid_scalar_type(data_type: &DataType) -> Self {
Self::InvalidScalarType(data_type.to_owned())
}
fn invalid_time_unit(time_unit: &TimeUnit) -> Self {
Self::InvalidTimeUnit(time_unit.to_owned())
}
}
impl From<&Field> for protobuf::Field {
fn from(field: &Field) -> Self {
Self {
name: field.name().to_owned(),
arrow_type: Some(Box::new(field.data_type().into())),
nullable: field.is_nullable(),
children: Vec::new(),
}
}
}
impl From<&DataType> for protobuf::ArrowType {
fn from(val: &DataType) -> Self {
Self {
arrow_type_enum: Some(val.into()),
}
}
}
impl From<&DataType> for protobuf::arrow_type::ArrowTypeEnum {
fn from(val: &DataType) -> Self {
match val {
DataType::Null => Self::None(EmptyMessage {}),
DataType::Boolean => Self::Bool(EmptyMessage {}),
DataType::Int8 => Self::Int8(EmptyMessage {}),
DataType::Int16 => Self::Int16(EmptyMessage {}),
DataType::Int32 => Self::Int32(EmptyMessage {}),
DataType::Int64 => Self::Int64(EmptyMessage {}),
DataType::UInt8 => Self::Uint8(EmptyMessage {}),
DataType::UInt16 => Self::Uint16(EmptyMessage {}),
DataType::UInt32 => Self::Uint32(EmptyMessage {}),
DataType::UInt64 => Self::Uint64(EmptyMessage {}),
DataType::Float16 => Self::Float16(EmptyMessage {}),
DataType::Float32 => Self::Float32(EmptyMessage {}),
DataType::Float64 => Self::Float64(EmptyMessage {}),
DataType::Timestamp(time_unit, timezone) => {
Self::Timestamp(protobuf::Timestamp {
time_unit: protobuf::TimeUnit::from(time_unit) as i32,
timezone: timezone.to_owned().unwrap_or_default(),
})
}
DataType::Date32 => Self::Date32(EmptyMessage {}),
DataType::Date64 => Self::Date64(EmptyMessage {}),
DataType::Time32(time_unit) => {
Self::Time32(protobuf::TimeUnit::from(time_unit) as i32)
}
DataType::Time64(time_unit) => {
Self::Time64(protobuf::TimeUnit::from(time_unit) as i32)
}
DataType::Duration(time_unit) => {
Self::Duration(protobuf::TimeUnit::from(time_unit) as i32)
}
DataType::Interval(interval_unit) => {
Self::Interval(protobuf::IntervalUnit::from(interval_unit) as i32)
}
DataType::Binary => Self::Binary(EmptyMessage {}),
DataType::FixedSizeBinary(size) => Self::FixedSizeBinary(*size),
DataType::LargeBinary => Self::LargeBinary(EmptyMessage {}),
DataType::Utf8 => Self::Utf8(EmptyMessage {}),
DataType::LargeUtf8 => Self::LargeUtf8(EmptyMessage {}),
DataType::List(item_type) => Self::List(Box::new(protobuf::List {
field_type: Some(Box::new(item_type.as_ref().into())),
})),
DataType::FixedSizeList(item_type, size) => {
Self::FixedSizeList(Box::new(protobuf::FixedSizeList {
field_type: Some(Box::new(item_type.as_ref().into())),
list_size: *size,
}))
}
DataType::LargeList(item_type) => Self::LargeList(Box::new(protobuf::List {
field_type: Some(Box::new(item_type.as_ref().into())),
})),
DataType::Struct(struct_fields) => Self::Struct(protobuf::Struct {
sub_field_types: struct_fields
.iter()
.map(|field| field.into())
.collect::<Vec<_>>(),
}),
DataType::Union(union_types, union_mode) => {
let union_mode = match union_mode {
UnionMode::Sparse => protobuf::UnionMode::Sparse,
UnionMode::Dense => protobuf::UnionMode::Dense,
};
Self::Union(protobuf::Union {
union_types: union_types
.iter()
.map(|field| field.into())
.collect::<Vec<_>>(),
union_mode: union_mode.into(),
})
}
DataType::Dictionary(key_type, value_type) => {
Self::Dictionary(Box::new(protobuf::Dictionary {
key: Some(Box::new(key_type.as_ref().into())),
value: Some(Box::new(value_type.as_ref().into())),
}))
}
DataType::Decimal(whole, fractional) => Self::Decimal(protobuf::Decimal {
whole: *whole as u64,
fractional: *fractional as u64,
}),
DataType::Map(_, _) => {
unimplemented!("The Map data type is not yet supported")
}
}
}
}
impl From<Column> for protobuf::Column {
fn from(c: Column) -> Self {
Self {
relation: c
.relation
.map(|relation| protobuf::ColumnRelation { relation }),
name: c.name,
}
}
}
impl From<&Column> for protobuf::Column {
fn from(c: &Column) -> Self {
c.clone().into()
}
}
impl From<&Schema> for protobuf::Schema {
fn from(schema: &Schema) -> Self {
Self {
columns: schema
.fields()
.iter()
.map(protobuf::Field::from)
.collect::<Vec<_>>(),
}
}
}
impl From<SchemaRef> for protobuf::Schema {
fn from(schema: SchemaRef) -> Self {
Self {
columns: schema
.fields()
.iter()
.map(protobuf::Field::from)
.collect::<Vec<_>>(),
}
}
}
impl From<&DFField> for protobuf::DfField {
fn from(f: &DFField) -> protobuf::DfField {
protobuf::DfField {
field: Some(f.field().into()),
qualifier: f.qualifier().map(|r| protobuf::ColumnRelation {
relation: r.to_string(),
}),
}
}
}
impl From<&DFSchemaRef> for protobuf::DfSchema {
fn from(s: &DFSchemaRef) -> protobuf::DfSchema {
let columns = s.fields().iter().map(|f| f.into()).collect::<Vec<_>>();
protobuf::DfSchema {
columns,
metadata: s.metadata().clone(),
}
}
}
impl From<&StringifiedPlan> for protobuf::StringifiedPlan {
fn from(stringified_plan: &StringifiedPlan) -> Self {
Self {
plan_type: match stringified_plan.clone().plan_type {
PlanType::InitialLogicalPlan => Some(protobuf::PlanType {
plan_type_enum: Some(InitialLogicalPlan(EmptyMessage {})),
}),
PlanType::OptimizedLogicalPlan { optimizer_name } => {
Some(protobuf::PlanType {
plan_type_enum: Some(OptimizedLogicalPlan(
OptimizedLogicalPlanType { optimizer_name },
)),
})
}
PlanType::FinalLogicalPlan => Some(protobuf::PlanType {
plan_type_enum: Some(FinalLogicalPlan(EmptyMessage {})),
}),
PlanType::InitialPhysicalPlan => Some(protobuf::PlanType {
plan_type_enum: Some(InitialPhysicalPlan(EmptyMessage {})),
}),
PlanType::OptimizedPhysicalPlan { optimizer_name } => {
Some(protobuf::PlanType {
plan_type_enum: Some(OptimizedPhysicalPlan(
OptimizedPhysicalPlanType { optimizer_name },
)),
})
}
PlanType::FinalPhysicalPlan => Some(protobuf::PlanType {
plan_type_enum: Some(FinalPhysicalPlan(EmptyMessage {})),
}),
},
plan: stringified_plan.plan.to_string(),
}
}
}
impl From<&AggregateFunction> for protobuf::AggregateFunction {
fn from(value: &AggregateFunction) -> Self {
match value {
AggregateFunction::Min => Self::Min,
AggregateFunction::Max => Self::Max,
AggregateFunction::Sum => Self::Sum,
AggregateFunction::Avg => Self::Avg,
AggregateFunction::Count => Self::Count,
AggregateFunction::ApproxDistinct => Self::ApproxDistinct,
AggregateFunction::ArrayAgg => Self::ArrayAgg,
AggregateFunction::Variance => Self::Variance,
AggregateFunction::VariancePop => Self::VariancePop,
AggregateFunction::Covariance => Self::Covariance,
AggregateFunction::CovariancePop => Self::CovariancePop,
AggregateFunction::Stddev => Self::Stddev,
AggregateFunction::StddevPop => Self::StddevPop,
AggregateFunction::Correlation => Self::Correlation,
AggregateFunction::ApproxPercentileCont => Self::ApproxPercentileCont,
AggregateFunction::ApproxPercentileContWithWeight => {
Self::ApproxPercentileContWithWeight
}
AggregateFunction::ApproxMedian => Self::ApproxMedian,
}
}
}
impl From<&BuiltInWindowFunction> for protobuf::BuiltInWindowFunction {
fn from(value: &BuiltInWindowFunction) -> Self {
match value {
BuiltInWindowFunction::FirstValue => Self::FirstValue,
BuiltInWindowFunction::LastValue => Self::LastValue,
BuiltInWindowFunction::NthValue => Self::NthValue,
BuiltInWindowFunction::Ntile => Self::Ntile,
BuiltInWindowFunction::CumeDist => Self::CumeDist,
BuiltInWindowFunction::PercentRank => Self::PercentRank,
BuiltInWindowFunction::RowNumber => Self::RowNumber,
BuiltInWindowFunction::Rank => Self::Rank,
BuiltInWindowFunction::Lag => Self::Lag,
BuiltInWindowFunction::Lead => Self::Lead,
BuiltInWindowFunction::DenseRank => Self::DenseRank,
}
}
}
impl From<WindowFrameUnits> for protobuf::WindowFrameUnits {
fn from(units: WindowFrameUnits) -> Self {
match units {
WindowFrameUnits::Rows => Self::Rows,
WindowFrameUnits::Range => Self::Range,
WindowFrameUnits::Groups => Self::Groups,
}
}
}
impl From<WindowFrameBound> for protobuf::WindowFrameBound {
fn from(bound: WindowFrameBound) -> Self {
match bound {
WindowFrameBound::CurrentRow => Self {
window_frame_bound_type: protobuf::WindowFrameBoundType::CurrentRow
.into(),
bound_value: None,
},
WindowFrameBound::Preceding(v) => Self {
window_frame_bound_type: protobuf::WindowFrameBoundType::Preceding.into(),
bound_value: v.map(protobuf::window_frame_bound::BoundValue::Value),
},
WindowFrameBound::Following(v) => Self {
window_frame_bound_type: protobuf::WindowFrameBoundType::Following.into(),
bound_value: v.map(protobuf::window_frame_bound::BoundValue::Value),
},
}
}
}
impl From<WindowFrame> for protobuf::WindowFrame {
fn from(window: WindowFrame) -> Self {
Self {
window_frame_units: protobuf::WindowFrameUnits::from(window.units).into(),
start_bound: Some(window.start_bound.into()),
end_bound: Some(protobuf::window_frame::EndBound::Bound(
window.end_bound.into(),
)),
}
}
}
impl TryFrom<&Expr> for protobuf::LogicalExprNode {
type Error = Error;
fn try_from(expr: &Expr) -> Result<Self, Self::Error> {
use protobuf::logical_expr_node::ExprType;
let expr_node = match expr {
Expr::Column(c) => Self {
expr_type: Some(ExprType::Column(c.into())),
},
Expr::Alias(expr, alias) => {
let alias = Box::new(protobuf::AliasNode {
expr: Some(Box::new(expr.as_ref().try_into()?)),
alias: alias.to_owned(),
});
Self {
expr_type: Some(ExprType::Alias(alias)),
}
}
Expr::Literal(value) => {
let pb_value: protobuf::ScalarValue = value.try_into()?;
Self {
expr_type: Some(ExprType::Literal(pb_value)),
}
}
Expr::BinaryExpr { left, op, right } => {
let binary_expr = Box::new(protobuf::BinaryExprNode {
l: Some(Box::new(left.as_ref().try_into()?)),
r: Some(Box::new(right.as_ref().try_into()?)),
op: format!("{:?}", op),
});
Self {
expr_type: Some(ExprType::BinaryExpr(binary_expr)),
}
}
Expr::WindowFunction {
ref fun,
ref args,
ref partition_by,
ref order_by,
ref window_frame,
} => {
let window_function = match fun {
WindowFunction::AggregateFunction(fun) => {
protobuf::window_expr_node::WindowFunction::AggrFunction(
protobuf::AggregateFunction::from(fun).into(),
)
}
WindowFunction::BuiltInWindowFunction(fun) => {
protobuf::window_expr_node::WindowFunction::BuiltInFunction(
protobuf::BuiltInWindowFunction::from(fun).into(),
)
}
};
let arg_expr: Option<Box<Self>> = if !args.is_empty() {
let arg = &args[0];
Some(Box::new(arg.try_into()?))
} else {
None
};
let partition_by = partition_by
.iter()
.map(|e| e.try_into())
.collect::<Result<Vec<_>, _>>()?;
let order_by = order_by
.iter()
.map(|e| e.try_into())
.collect::<Result<Vec<_>, _>>()?;
let window_frame = window_frame.map(|window_frame| {
protobuf::window_expr_node::WindowFrame::Frame(window_frame.into())
});
let window_expr = Box::new(protobuf::WindowExprNode {
expr: arg_expr,
window_function: Some(window_function),
partition_by,
order_by,
window_frame,
});
Self {
expr_type: Some(ExprType::WindowExpr(window_expr)),
}
}
Expr::AggregateFunction {
ref fun, ref args, ..
} => {
let aggr_function = match fun {
AggregateFunction::ApproxDistinct => {
protobuf::AggregateFunction::ApproxDistinct
}
AggregateFunction::ApproxPercentileCont => {
protobuf::AggregateFunction::ApproxPercentileCont
}
AggregateFunction::ApproxPercentileContWithWeight => {
protobuf::AggregateFunction::ApproxPercentileContWithWeight
}
AggregateFunction::ArrayAgg => protobuf::AggregateFunction::ArrayAgg,
AggregateFunction::Min => protobuf::AggregateFunction::Min,
AggregateFunction::Max => protobuf::AggregateFunction::Max,
AggregateFunction::Sum => protobuf::AggregateFunction::Sum,
AggregateFunction::Avg => protobuf::AggregateFunction::Avg,
AggregateFunction::Count => protobuf::AggregateFunction::Count,
AggregateFunction::Variance => protobuf::AggregateFunction::Variance,
AggregateFunction::VariancePop => {
protobuf::AggregateFunction::VariancePop
}
AggregateFunction::Covariance => {
protobuf::AggregateFunction::Covariance
}
AggregateFunction::CovariancePop => {
protobuf::AggregateFunction::CovariancePop
}
AggregateFunction::Stddev => protobuf::AggregateFunction::Stddev,
AggregateFunction::StddevPop => {
protobuf::AggregateFunction::StddevPop
}
AggregateFunction::Correlation => {
protobuf::AggregateFunction::Correlation
}
AggregateFunction::ApproxMedian => {
protobuf::AggregateFunction::ApproxMedian
}
};
let aggregate_expr = protobuf::AggregateExprNode {
aggr_function: aggr_function.into(),
expr: args
.iter()
.map(|v| v.try_into())
.collect::<Result<Vec<_>, _>>()?,
};
Self {
expr_type: Some(ExprType::AggregateExpr(aggregate_expr)),
}
}
Expr::ScalarVariable(_, _) => unimplemented!(),
Expr::ScalarFunction { ref fun, ref args } => {
let fun: protobuf::ScalarFunction = fun.try_into()?;
let args: Vec<Self> = args
.iter()
.map(|e| e.try_into())
.collect::<Result<Vec<Self>, Error>>()?;
Self {
expr_type: Some(ExprType::ScalarFunction(
protobuf::ScalarFunctionNode {
fun: fun.into(),
args,
},
)),
}
}
Expr::ScalarUDF { fun, args } => Self {
expr_type: Some(ExprType::ScalarUdfExpr(protobuf::ScalarUdfExprNode {
fun_name: fun.name.clone(),
args: args
.iter()
.map(|expr| expr.try_into())
.collect::<Result<Vec<_>, Error>>()?,
})),
},
Expr::AggregateUDF { fun, args } => Self {
expr_type: Some(ExprType::AggregateUdfExpr(
protobuf::AggregateUdfExprNode {
fun_name: fun.name.clone(),
args: args.iter().map(|expr| expr.try_into()).collect::<Result<
Vec<_>,
Error,
>>(
)?,
},
)),
},
Expr::Not(expr) => {
let expr = Box::new(protobuf::Not {
expr: Some(Box::new(expr.as_ref().try_into()?)),
});
Self {
expr_type: Some(ExprType::NotExpr(expr)),
}
}
Expr::IsNull(expr) => {
let expr = Box::new(protobuf::IsNull {
expr: Some(Box::new(expr.as_ref().try_into()?)),
});
Self {
expr_type: Some(ExprType::IsNullExpr(expr)),
}
}
Expr::IsNotNull(expr) => {
let expr = Box::new(protobuf::IsNotNull {
expr: Some(Box::new(expr.as_ref().try_into()?)),
});
Self {
expr_type: Some(ExprType::IsNotNullExpr(expr)),
}
}
Expr::Between {
expr,
negated,
low,
high,
} => {
let expr = Box::new(protobuf::BetweenNode {
expr: Some(Box::new(expr.as_ref().try_into()?)),
negated: *negated,
low: Some(Box::new(low.as_ref().try_into()?)),
high: Some(Box::new(high.as_ref().try_into()?)),
});
Self {
expr_type: Some(ExprType::Between(expr)),
}
}
Expr::Case {
expr,
when_then_expr,
else_expr,
} => {
let when_then_expr = when_then_expr
.iter()
.map(|(w, t)| {
Ok(protobuf::WhenThen {
when_expr: Some(w.as_ref().try_into()?),
then_expr: Some(t.as_ref().try_into()?),
})
})
.collect::<Result<Vec<protobuf::WhenThen>, Error>>()?;
let expr = Box::new(protobuf::CaseNode {
expr: match expr {
Some(e) => Some(Box::new(e.as_ref().try_into()?)),
None => None,
},
when_then_expr,
else_expr: match else_expr {
Some(e) => Some(Box::new(e.as_ref().try_into()?)),
None => None,
},
});
Self {
expr_type: Some(ExprType::Case(expr)),
}
}
Expr::Cast { expr, data_type } => {
let expr = Box::new(protobuf::CastNode {
expr: Some(Box::new(expr.as_ref().try_into()?)),
arrow_type: Some(data_type.into()),
});
Self {
expr_type: Some(ExprType::Cast(expr)),
}
}
Expr::Sort {
expr,
asc,
nulls_first,
} => {
let expr = Box::new(protobuf::SortExprNode {
expr: Some(Box::new(expr.as_ref().try_into()?)),
asc: *asc,
nulls_first: *nulls_first,
});
Self {
expr_type: Some(ExprType::Sort(expr)),
}
}
Expr::Negative(expr) => {
let expr = Box::new(protobuf::NegativeNode {
expr: Some(Box::new(expr.as_ref().try_into()?)),
});
Self {
expr_type: Some(ExprType::Negative(expr)),
}
}
Expr::InList {
expr,
list,
negated,
} => {
let expr = Box::new(protobuf::InListNode {
expr: Some(Box::new(expr.as_ref().try_into()?)),
list: list
.iter()
.map(|expr| expr.try_into())
.collect::<Result<Vec<_>, Error>>()?,
negated: *negated,
});
Self {
expr_type: Some(ExprType::InList(expr)),
}
}
Expr::Wildcard => Self {
expr_type: Some(ExprType::Wildcard(true)),
},
_ => unimplemented!(),
};
Ok(expr_node)
}
}
impl TryFrom<&ScalarValue> for protobuf::ScalarValue {
type Error = Error;
fn try_from(val: &ScalarValue) -> Result<Self, Self::Error> {
use datafusion::scalar;
use protobuf::{scalar_value::Value, PrimitiveScalarType};
let scalar_val = match val {
scalar::ScalarValue::Boolean(val) => {
create_proto_scalar(val, PrimitiveScalarType::Bool, |s| {
Value::BoolValue(*s)
})
}
scalar::ScalarValue::Float32(val) => {
create_proto_scalar(val, PrimitiveScalarType::Float32, |s| {
Value::Float32Value(*s)
})
}
scalar::ScalarValue::Float64(val) => {
create_proto_scalar(val, PrimitiveScalarType::Float64, |s| {
Value::Float64Value(*s)
})
}
scalar::ScalarValue::Int8(val) => {
create_proto_scalar(val, PrimitiveScalarType::Int8, |s| {
Value::Int8Value(*s as i32)
})
}
scalar::ScalarValue::Int16(val) => {
create_proto_scalar(val, PrimitiveScalarType::Int16, |s| {
Value::Int16Value(*s as i32)
})
}
scalar::ScalarValue::Int32(val) => {
create_proto_scalar(val, PrimitiveScalarType::Int32, |s| {
Value::Int32Value(*s)
})
}
scalar::ScalarValue::Int64(val) => {
create_proto_scalar(val, PrimitiveScalarType::Int64, |s| {
Value::Int64Value(*s)
})
}
scalar::ScalarValue::UInt8(val) => {
create_proto_scalar(val, PrimitiveScalarType::Uint8, |s| {
Value::Uint8Value(*s as u32)
})
}
scalar::ScalarValue::UInt16(val) => {
create_proto_scalar(val, PrimitiveScalarType::Uint16, |s| {
Value::Uint16Value(*s as u32)
})
}
scalar::ScalarValue::UInt32(val) => {
create_proto_scalar(val, PrimitiveScalarType::Uint32, |s| {
Value::Uint32Value(*s)
})
}
scalar::ScalarValue::UInt64(val) => {
create_proto_scalar(val, PrimitiveScalarType::Uint64, |s| {
Value::Uint64Value(*s)
})
}
scalar::ScalarValue::Utf8(val) => {
create_proto_scalar(val, PrimitiveScalarType::Utf8, |s| {
Value::Utf8Value(s.to_owned())
})
}
scalar::ScalarValue::LargeUtf8(val) => {
create_proto_scalar(val, PrimitiveScalarType::LargeUtf8, |s| {
Value::LargeUtf8Value(s.to_owned())
})
}
scalar::ScalarValue::List(value, datatype) => {
println!("Current datatype of list: {:?}", datatype);
match value {
Some(values) => {
if values.is_empty() {
protobuf::ScalarValue {
value: Some(protobuf::scalar_value::Value::ListValue(
protobuf::ScalarListValue {
datatype: Some(datatype.as_ref().try_into()?),
values: Vec::new(),
},
)),
}
} else {
let scalar_type = match datatype.as_ref() {
DataType::List(field) => field.as_ref().data_type(),
_ => todo!("Proper error handling"),
};
println!("Current scalar type for list: {:?}", scalar_type);
let type_checked_values: Vec<protobuf::ScalarValue> = values
.iter()
.map(|scalar| match (scalar, scalar_type) {
(
scalar::ScalarValue::List(_, list_type),
DataType::List(field),
) => {
if let DataType::List(list_field) =
list_type.as_ref()
{
let scalar_datatype = field.data_type();
let list_datatype = list_field.data_type();
if std::mem::discriminant(list_datatype)
!= std::mem::discriminant(scalar_datatype)
{
return Err(
Error::inconsistent_list_typing(
list_datatype,
scalar_datatype,
),
);
}
scalar.try_into()
} else {
Err(Error::inconsistent_list_designated(
scalar, datatype,
))
}
}
(
scalar::ScalarValue::Boolean(_),
DataType::Boolean,
) => scalar.try_into(),
(
scalar::ScalarValue::Float32(_),
DataType::Float32,
) => scalar.try_into(),
(
scalar::ScalarValue::Float64(_),
DataType::Float64,
) => scalar.try_into(),
(scalar::ScalarValue::Int8(_), DataType::Int8) => {
scalar.try_into()
}
(scalar::ScalarValue::Int16(_), DataType::Int16) => {
scalar.try_into()
}
(scalar::ScalarValue::Int32(_), DataType::Int32) => {
scalar.try_into()
}
(scalar::ScalarValue::Int64(_), DataType::Int64) => {
scalar.try_into()
}
(scalar::ScalarValue::UInt8(_), DataType::UInt8) => {
scalar.try_into()
}
(
scalar::ScalarValue::UInt16(_),
DataType::UInt16,
) => scalar.try_into(),
(
scalar::ScalarValue::UInt32(_),
DataType::UInt32,
) => scalar.try_into(),
(
scalar::ScalarValue::UInt64(_),
DataType::UInt64,
) => scalar.try_into(),
(scalar::ScalarValue::Utf8(_), DataType::Utf8) => {
scalar.try_into()
}
(
scalar::ScalarValue::LargeUtf8(_),
DataType::LargeUtf8,
) => scalar.try_into(),
_ => Err(Error::inconsistent_list_designated(
scalar, datatype,
)),
})
.collect::<Result<Vec<_>, _>>()?;
protobuf::ScalarValue {
value: Some(protobuf::scalar_value::Value::ListValue(
protobuf::ScalarListValue {
datatype: Some(datatype.as_ref().try_into()?),
values: type_checked_values,
},
)),
}
}
}
None => protobuf::ScalarValue {
value: Some(protobuf::scalar_value::Value::NullListValue(
datatype.as_ref().try_into()?,
)),
},
}
}
datafusion::scalar::ScalarValue::Date32(val) => {
create_proto_scalar(val, PrimitiveScalarType::Date32, |s| {
Value::Date32Value(*s)
})
}
datafusion::scalar::ScalarValue::TimestampMicrosecond(val, tz) => {
create_proto_scalar(val, PrimitiveScalarType::TimeMicrosecond, |s| {
Value::TimestampValue(protobuf::ScalarTimestampValue {
timezone: tz.as_ref().unwrap_or(&"".to_string()).clone(),
value: Some(
protobuf::scalar_timestamp_value::Value::TimeMicrosecondValue(
*s,
),
),
})
})
}
datafusion::scalar::ScalarValue::TimestampNanosecond(val, tz) => {
create_proto_scalar(val, PrimitiveScalarType::TimeNanosecond, |s| {
Value::TimestampValue(protobuf::ScalarTimestampValue {
timezone: tz.as_ref().unwrap_or(&"".to_string()).clone(),
value: Some(
protobuf::scalar_timestamp_value::Value::TimeNanosecondValue(
*s,
),
),
})
})
}
datafusion::scalar::ScalarValue::Decimal128(val, p, s) => match *val {
Some(v) => {
let array = v.to_be_bytes();
let vec_val: Vec<u8> = array.to_vec();
protobuf::ScalarValue {
value: Some(Value::Decimal128Value(protobuf::Decimal128 {
value: vec_val,
p: *p as i64,
s: *s as i64,
})),
}
}
None => protobuf::ScalarValue {
value: Some(protobuf::scalar_value::Value::NullValue(
PrimitiveScalarType::Decimal128 as i32,
)),
},
},
datafusion::scalar::ScalarValue::Date64(val) => {
create_proto_scalar(val, PrimitiveScalarType::Date64, |s| {
Value::Date64Value(*s)
})
}
datafusion::scalar::ScalarValue::TimestampSecond(val, tz) => {
create_proto_scalar(val, PrimitiveScalarType::TimeSecond, |s| {
Value::TimestampValue(protobuf::ScalarTimestampValue {
timezone: tz.as_ref().unwrap_or(&"".to_string()).clone(),
value: Some(
protobuf::scalar_timestamp_value::Value::TimeSecondValue(*s),
),
})
})
}
datafusion::scalar::ScalarValue::TimestampMillisecond(val, tz) => {
create_proto_scalar(val, PrimitiveScalarType::TimeMillisecond, |s| {
Value::TimestampValue(protobuf::ScalarTimestampValue {
timezone: tz.as_ref().unwrap_or(&"".to_string()).clone(),
value: Some(
protobuf::scalar_timestamp_value::Value::TimeMillisecondValue(
*s,
),
),
})
})
}
datafusion::scalar::ScalarValue::IntervalYearMonth(val) => {
create_proto_scalar(val, PrimitiveScalarType::IntervalYearmonth, |s| {
Value::IntervalYearmonthValue(*s)
})
}
datafusion::scalar::ScalarValue::IntervalDayTime(val) => {
create_proto_scalar(val, PrimitiveScalarType::IntervalDaytime, |s| {
Value::IntervalDaytimeValue(*s)
})
}
_ => {
return Err(Error::invalid_scalar_value(val));
}
};
Ok(scalar_val)
}
}
impl TryFrom<&BuiltinScalarFunction> for protobuf::ScalarFunction {
type Error = Error;
fn try_from(scalar: &BuiltinScalarFunction) -> Result<Self, Self::Error> {
let scalar_function = match scalar {
BuiltinScalarFunction::Sqrt => Self::Sqrt,
BuiltinScalarFunction::Sin => Self::Sin,
BuiltinScalarFunction::Cos => Self::Cos,
BuiltinScalarFunction::Tan => Self::Tan,
BuiltinScalarFunction::Asin => Self::Asin,
BuiltinScalarFunction::Acos => Self::Acos,
BuiltinScalarFunction::Atan => Self::Atan,
BuiltinScalarFunction::Exp => Self::Exp,
BuiltinScalarFunction::Log => Self::Log,
BuiltinScalarFunction::Ln => Self::Ln,
BuiltinScalarFunction::Log10 => Self::Log10,
BuiltinScalarFunction::Floor => Self::Floor,
BuiltinScalarFunction::Ceil => Self::Ceil,
BuiltinScalarFunction::Round => Self::Round,
BuiltinScalarFunction::Trunc => Self::Trunc,
BuiltinScalarFunction::Abs => Self::Abs,
BuiltinScalarFunction::OctetLength => Self::OctetLength,
BuiltinScalarFunction::Concat => Self::Concat,
BuiltinScalarFunction::Lower => Self::Lower,
BuiltinScalarFunction::Upper => Self::Upper,
BuiltinScalarFunction::Trim => Self::Trim,
BuiltinScalarFunction::Ltrim => Self::Ltrim,
BuiltinScalarFunction::Rtrim => Self::Rtrim,
BuiltinScalarFunction::ToTimestamp => Self::ToTimestamp,
BuiltinScalarFunction::Array => Self::Array,
BuiltinScalarFunction::NullIf => Self::NullIf,
BuiltinScalarFunction::DatePart => Self::DatePart,
BuiltinScalarFunction::DateTrunc => Self::DateTrunc,
BuiltinScalarFunction::MD5 => Self::Md5,
BuiltinScalarFunction::SHA224 => Self::Sha224,
BuiltinScalarFunction::SHA256 => Self::Sha256,
BuiltinScalarFunction::SHA384 => Self::Sha384,
BuiltinScalarFunction::SHA512 => Self::Sha512,
BuiltinScalarFunction::Digest => Self::Digest,
BuiltinScalarFunction::ToTimestampMillis => Self::ToTimestampMillis,
BuiltinScalarFunction::Log2 => Self::Log2,
BuiltinScalarFunction::Signum => Self::Signum,
BuiltinScalarFunction::Ascii => Self::Ascii,
BuiltinScalarFunction::BitLength => Self::BitLength,
BuiltinScalarFunction::Btrim => Self::Btrim,
BuiltinScalarFunction::CharacterLength => Self::CharacterLength,
BuiltinScalarFunction::Chr => Self::Chr,
BuiltinScalarFunction::ConcatWithSeparator => Self::ConcatWithSeparator,
BuiltinScalarFunction::InitCap => Self::InitCap,
BuiltinScalarFunction::Left => Self::Left,
BuiltinScalarFunction::Lpad => Self::Lpad,
BuiltinScalarFunction::Random => Self::Random,
BuiltinScalarFunction::RegexpReplace => Self::RegexpReplace,
BuiltinScalarFunction::Repeat => Self::Repeat,
BuiltinScalarFunction::Replace => Self::Replace,
BuiltinScalarFunction::Reverse => Self::Reverse,
BuiltinScalarFunction::Right => Self::Right,
BuiltinScalarFunction::Rpad => Self::Rpad,
BuiltinScalarFunction::SplitPart => Self::SplitPart,
BuiltinScalarFunction::StartsWith => Self::StartsWith,
BuiltinScalarFunction::Strpos => Self::Strpos,
BuiltinScalarFunction::Substr => Self::Substr,
BuiltinScalarFunction::ToHex => Self::ToHex,
BuiltinScalarFunction::ToTimestampMicros => Self::ToTimestampMicros,
BuiltinScalarFunction::ToTimestampSeconds => Self::ToTimestampSeconds,
BuiltinScalarFunction::Now => Self::Now,
BuiltinScalarFunction::Translate => Self::Translate,
BuiltinScalarFunction::RegexpMatch => Self::RegexpMatch,
BuiltinScalarFunction::Coalesce => Self::Coalesce,
BuiltinScalarFunction::Power => Self::Power,
};
Ok(scalar_function)
}
}
impl TryFrom<&DataType> for protobuf::ScalarType {
type Error = Error;
fn try_from(value: &DataType) -> Result<Self, Self::Error> {
let datatype = protobuf::scalar_type::Datatype::try_from(value)?;
Ok(Self {
datatype: Some(datatype),
})
}
}
impl TryFrom<&DataType> for protobuf::scalar_type::Datatype {
type Error = Error;
fn try_from(val: &DataType) -> Result<Self, Self::Error> {
use protobuf::PrimitiveScalarType;
let scalar_value = match val {
DataType::Boolean => Self::Scalar(PrimitiveScalarType::Bool as i32),
DataType::Int8 => Self::Scalar(PrimitiveScalarType::Int8 as i32),
DataType::Int16 => Self::Scalar(PrimitiveScalarType::Int16 as i32),
DataType::Int32 => Self::Scalar(PrimitiveScalarType::Int32 as i32),
DataType::Int64 => Self::Scalar(PrimitiveScalarType::Int64 as i32),
DataType::UInt8 => Self::Scalar(PrimitiveScalarType::Uint8 as i32),
DataType::UInt16 => Self::Scalar(PrimitiveScalarType::Uint16 as i32),
DataType::UInt32 => Self::Scalar(PrimitiveScalarType::Uint32 as i32),
DataType::UInt64 => Self::Scalar(PrimitiveScalarType::Uint64 as i32),
DataType::Float32 => Self::Scalar(PrimitiveScalarType::Float32 as i32),
DataType::Float64 => Self::Scalar(PrimitiveScalarType::Float64 as i32),
DataType::Date32 => Self::Scalar(PrimitiveScalarType::Date32 as i32),
DataType::Time64(time_unit) => match time_unit {
TimeUnit::Microsecond => {
Self::Scalar(PrimitiveScalarType::TimeMicrosecond as i32)
}
TimeUnit::Nanosecond => {
Self::Scalar(PrimitiveScalarType::TimeNanosecond as i32)
}
_ => {
return Err(Error::invalid_time_unit(time_unit));
}
},
DataType::Utf8 => Self::Scalar(PrimitiveScalarType::Utf8 as i32),
DataType::LargeUtf8 => Self::Scalar(PrimitiveScalarType::LargeUtf8 as i32),
DataType::List(field_type) => {
let mut field_names: Vec<String> = Vec::new();
let mut curr_field = field_type.as_ref();
field_names.push(curr_field.name().to_owned());
// For each nested field check nested datatype, since datafusion scalars only
// support recursive lists with a leaf scalar type
// any other compound types are errors.
while let DataType::List(nested_field_type) = curr_field.data_type() {
curr_field = nested_field_type.as_ref();
field_names.push(curr_field.name().to_owned());
if !is_valid_scalar_type_no_list_check(curr_field.data_type()) {
return Err(Error::invalid_scalar_type(curr_field.data_type()));
}
}
let deepest_datatype = curr_field.data_type();
if !is_valid_scalar_type_no_list_check(deepest_datatype) {
return Err(Error::invalid_scalar_type(deepest_datatype));
}
let pb_deepest_type: PrimitiveScalarType = match deepest_datatype {
DataType::Boolean => PrimitiveScalarType::Bool,
DataType::Int8 => PrimitiveScalarType::Int8,
DataType::Int16 => PrimitiveScalarType::Int16,
DataType::Int32 => PrimitiveScalarType::Int32,
DataType::Int64 => PrimitiveScalarType::Int64,
DataType::UInt8 => PrimitiveScalarType::Uint8,
DataType::UInt16 => PrimitiveScalarType::Uint16,
DataType::UInt32 => PrimitiveScalarType::Uint32,
DataType::UInt64 => PrimitiveScalarType::Uint64,
DataType::Float32 => PrimitiveScalarType::Float32,
DataType::Float64 => PrimitiveScalarType::Float64,
DataType::Date32 => PrimitiveScalarType::Date32,
DataType::Time64(time_unit) => match time_unit {
TimeUnit::Microsecond => PrimitiveScalarType::TimeMicrosecond,
TimeUnit::Nanosecond => PrimitiveScalarType::TimeNanosecond,
_ => {
return Err(Error::invalid_time_unit(time_unit));
}
},
DataType::Utf8 => PrimitiveScalarType::Utf8,
DataType::LargeUtf8 => PrimitiveScalarType::LargeUtf8,
_ => {
return Err(Error::invalid_scalar_type(val));
}
};
Self::List(protobuf::ScalarListType {
field_names,
deepest_type: pb_deepest_type as i32,
})
}
DataType::Null
| DataType::Float16
| DataType::Timestamp(_, _)
| DataType::Date64
| DataType::Time32(_)
| DataType::Duration(_)
| DataType::Interval(_)
| DataType::Binary
| DataType::FixedSizeBinary(_)
| DataType::LargeBinary
| DataType::FixedSizeList(_, _)
| DataType::LargeList(_)
| DataType::Struct(_)
| DataType::Union(_, _)
| DataType::Dictionary(_, _)
| DataType::Map(_, _)
| DataType::Decimal(_, _) => {
return Err(Error::invalid_scalar_type(val));
}
};
Ok(scalar_value)
}
}
impl From<&TimeUnit> for protobuf::TimeUnit {
fn from(val: &TimeUnit) -> Self {
match val {
TimeUnit::Second => protobuf::TimeUnit::Second,
TimeUnit::Millisecond => protobuf::TimeUnit::TimeMillisecond,
TimeUnit::Microsecond => protobuf::TimeUnit::Microsecond,
TimeUnit::Nanosecond => protobuf::TimeUnit::Nanosecond,
}
}
}
impl From<&IntervalUnit> for protobuf::IntervalUnit {
fn from(interval_unit: &IntervalUnit) -> Self {
match interval_unit {
IntervalUnit::YearMonth => protobuf::IntervalUnit::YearMonth,
IntervalUnit::DayTime => protobuf::IntervalUnit::DayTime,
IntervalUnit::MonthDayNano => protobuf::IntervalUnit::MonthDayNano,
}
}
}
fn create_proto_scalar<I, T: FnOnce(&I) -> protobuf::scalar_value::Value>(
v: &Option<I>,
null_arrow_type: protobuf::PrimitiveScalarType,
constructor: T,
) -> protobuf::ScalarValue {
protobuf::ScalarValue {
value: Some(v.as_ref().map(constructor).unwrap_or(
protobuf::scalar_value::Value::NullValue(null_arrow_type as i32),
)),
}
}
// Does not check if list subtypes are valid
fn is_valid_scalar_type_no_list_check(datatype: &DataType) -> bool {
match datatype {
DataType::Boolean
| DataType::Int8
| DataType::Int16
| DataType::Int32
| DataType::Int64
| DataType::UInt8
| DataType::UInt16
| DataType::UInt32
| DataType::UInt64
| DataType::Float32
| DataType::Float64
| DataType::LargeUtf8
| DataType::Utf8
| DataType::Date32 => true,
DataType::Time64(time_unit) => {
matches!(time_unit, TimeUnit::Microsecond | TimeUnit::Nanosecond)
}
DataType::List(_) => true,
_ => false,
}
}
| 42.714968 | 108 | 0.481435 |
6a106075446db0c71ec4db5be7ab0eba3208fddf | 7,896 | #![allow(unused_imports, non_camel_case_types)]
use crate::model::Annotation::Annotation;
use crate::model::CodeableConcept::CodeableConcept;
use crate::model::Extension::Extension;
use serde_json::json;
use serde_json::value::Value;
use std::borrow::Cow;
/// The RiskEvidenceSynthesis resource describes the likelihood of an outcome in a
/// population plus exposure state where the risk estimate is derived from a
/// combination of research studies.
#[derive(Debug)]
pub struct RiskEvidenceSynthesis_CertaintySubcomponent<'a> {
pub(crate) value: Cow<'a, Value>,
}
impl RiskEvidenceSynthesis_CertaintySubcomponent<'_> {
pub fn new(value: &Value) -> RiskEvidenceSynthesis_CertaintySubcomponent {
RiskEvidenceSynthesis_CertaintySubcomponent {
value: Cow::Borrowed(value),
}
}
pub fn to_json(&self) -> Value {
(*self.value).clone()
}
/// May be used to represent additional information that is not part of the basic
/// definition of the element. To make the use of extensions safe and manageable,
/// there is a strict set of governance applied to the definition and use of
/// extensions. Though any implementer can define an extension, there is a set of
/// requirements that SHALL be met as part of the definition of the extension.
pub fn extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("extension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Unique id for the element within a resource (for internal references). This may
/// be any string value that does not contain spaces.
pub fn id(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("id") {
return Some(string);
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the element and that modifies the understanding of the element in
/// which it is contained and/or the understanding of the containing element's
/// descendants. Usually modifier elements provide negation or qualification. To
/// make the use of extensions safe and manageable, there is a strict set of
/// governance applied to the definition and use of extensions. Though any
/// implementer can define an extension, there is a set of requirements that SHALL
/// be met as part of the definition of the extension. Applications processing a
/// resource are required to check for modifier extensions. Modifier extensions
/// SHALL NOT change the meaning of any elements on Resource or DomainResource
/// (including cannot change the meaning of modifierExtension itself).
pub fn modifier_extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("modifierExtension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A human-readable string to clarify or explain concepts about the resource.
pub fn note(&self) -> Option<Vec<Annotation>> {
if let Some(Value::Array(val)) = self.value.get("note") {
return Some(
val.into_iter()
.map(|e| Annotation {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// A rating of a subcomponent of rating certainty.
pub fn rating(&self) -> Option<Vec<CodeableConcept>> {
if let Some(Value::Array(val)) = self.value.get("rating") {
return Some(
val.into_iter()
.map(|e| CodeableConcept {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Type of subcomponent of certainty rating.
pub fn fhir_type(&self) -> Option<CodeableConcept> {
if let Some(val) = self.value.get("type") {
return Some(CodeableConcept {
value: Cow::Borrowed(val),
});
}
return None;
}
pub fn validate(&self) -> bool {
if let Some(_val) = self.extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.id() {}
if let Some(_val) = self.modifier_extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.note() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.rating() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.fhir_type() {
if !_val.validate() {
return false;
}
}
return true;
}
}
#[derive(Debug)]
pub struct RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
pub(crate) value: Value,
}
impl RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
pub fn build(&self) -> RiskEvidenceSynthesis_CertaintySubcomponent {
RiskEvidenceSynthesis_CertaintySubcomponent {
value: Cow::Owned(self.value.clone()),
}
}
pub fn with(
existing: RiskEvidenceSynthesis_CertaintySubcomponent,
) -> RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
value: (*existing.value).clone(),
}
}
pub fn new() -> RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
let mut __value: Value = json!({});
return RiskEvidenceSynthesis_CertaintySubcomponentBuilder { value: __value };
}
pub fn extension<'a>(
&'a mut self,
val: Vec<Extension>,
) -> &'a mut RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn id<'a>(
&'a mut self,
val: &str,
) -> &'a mut RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
self.value["id"] = json!(val);
return self;
}
pub fn modifier_extension<'a>(
&'a mut self,
val: Vec<Extension>,
) -> &'a mut RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
self.value["modifierExtension"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn note<'a>(
&'a mut self,
val: Vec<Annotation>,
) -> &'a mut RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
self.value["note"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn rating<'a>(
&'a mut self,
val: Vec<CodeableConcept>,
) -> &'a mut RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
self.value["rating"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn fhir_type<'a>(
&'a mut self,
val: CodeableConcept,
) -> &'a mut RiskEvidenceSynthesis_CertaintySubcomponentBuilder {
self.value["type"] = json!(val.value);
return self;
}
}
| 35.25 | 94 | 0.581054 |
9ce8e594560e9d84913700b626f49428b4f4c1ab | 2,149 | #![deny(missing_docs, rust_2018_idioms)]
#![warn(
clippy::all,
missing_copy_implementations,
missing_debug_implementations
)]
//! Rust bindings for Live2D's cubism sdk
mod error;
mod log;
mod mem;
mod moc;
mod model;
pub use crate::{error::*, log::*, moc::*, model::*};
/// Returns the linked library version in a (major, minor, patch) tuple
pub fn version() -> (u8, u8, u16) {
let version = unsafe { ffi::csmGetVersion() };
let major = (version & 0xFF00_0000) >> 24;
let minor = (version & 0x00FF_0000) >> 16;
let patch = version & 0xFFFF;
(major as u8, minor as u8, patch as u16)
}
bitflags::bitflags! {
/// The constant flags of a [Model](model/struct.Model.html)'s drawable.
pub struct ConstantFlags: u8 {
/// The drawable should be blended additively.
const BLEND_ADDITIVE = ffi::csmBlendAdditive;
/// The drawable should be blended multiplicatively.
const BLEND_MULTIPLICATIVE = ffi::csmBlendMultiplicative;
/// The drawable is double sided and therefore shouldn't be culled.
const IS_DOUBLE_SIDED = ffi::csmIsDoubleSided;
/// Whether the clipping mask is inverted or not.
const IS_INVERTED_MASK = ffi::csmIsInvertedMask;
}
}
bitflags::bitflags! {
/// The dynamic flags of a [Model](model/struct.Model.html)'s drawable.
pub struct DynamicFlags: u8 {
/// The drawable is visible.
const IS_VISIBLE = ffi::csmIsVisible;
/// The drawable's visibility changed since the last update.
const VISIBILITY_CHANGED = ffi::csmVisibilityDidChange;
/// The drawable's opacity changed since the last update.
const OPACITY_CHANGED = ffi::csmOpacityDidChange;
/// The drawable's drawing order changed since the last update.
const DRAW_ORDER_CHANGED = ffi::csmDrawOrderDidChange;
/// The drawable's render order changed since the last update.
const RENDER_ORDER_CHANGED = ffi::csmRenderOrderDidChange;
/// The drawable's vertex positions changed since the last update.
const VERTEX_POSITIONS_CHANGED = ffi::csmVertexPositionsDidChange;
}
}
| 37.051724 | 76 | 0.67892 |
38f4fcf5fd393e256e156f71fb4b72ef2b6be7d5 | 8,517 | use crate::FunctionKind;
use crate::Globals;
use crate::RcStr;
use crate::Result;
use crate::Value;
use std::path::Path;
pub fn climain(mut globals: Globals) {
let mut source_roots = Vec::new();
let mut script_args = Vec::new();
let mut mode = Mode::Normal;
let mut command = Command::Unspecified;
add_source_roots_from_env(&mut globals);
for argstr in std::env::args().skip(1) {
let arg: &str = &argstr;
match mode {
Mode::Normal => match arg {
"-m" => mode = Mode::SetRunModule,
"-d" => mode = Mode::SetDocModule,
"-r" => command = Command::Repl,
"--" => mode = Mode::ScriptArgs,
_ => {
let path = Path::new(arg);
if !path.exists() {
eprintln!("Path {:?} does not exist", path);
std::process::exit(1);
}
source_roots.push(argstr);
}
},
Mode::SetRunModule => {
command = Command::RunModule(argstr);
mode = Mode::Normal;
}
Mode::SetDocModule => {
command = Command::DocModule(argstr);
mode = Mode::Normal;
}
Mode::ScriptArgs => {
script_args.push(RcStr::from(argstr));
}
}
}
if let Command::Unspecified = command {
command = if let Some(path) = source_roots.pop() {
Command::RunPath(path)
} else {
Command::Repl
};
}
globals.set_argv(script_args);
for source_root in source_roots {
globals.add_source_root(source_root);
}
match command {
Command::Unspecified => panic!("Command::Unspecified should be unreachable"),
Command::Repl => repl(globals),
Command::DocModule(module) => doc_module(globals, &module.into()),
Command::RunModule(module) => run_module(globals, &module.into()),
Command::RunPath(pathstr) => run_path(globals, pathstr),
}
}
fn add_source_roots_from_env(globals: &mut Globals) {
match std::env::var("HOME") {
Ok(home_string) => {
let home = Path::new(&home_string);
let core_lib: String = home
.join("git")
.join("mtots_core")
.join("lib")
.into_os_string()
.into_string()
.unwrap();
let main_lib: String = home
.join("git")
.join("mtots")
.join("lib")
.into_os_string()
.into_string()
.unwrap();
globals.add_source_root(core_lib);
globals.add_source_root(main_lib);
}
Err(std::env::VarError::NotPresent) => match std::env::var("USERPROFILE") {
Ok(home_string) => {
let home = Path::new(&home_string);
let core_lib_windows: String = home
.join("Documents")
.join("GitHub")
.join("mtots_core")
.join("lib")
.into_os_string()
.into_string()
.unwrap();
let main_lib_windows: String = home
.join("Documents")
.join("GitHub")
.join("mtots")
.join("lib")
.into_os_string()
.into_string()
.unwrap();
globals.add_source_root(core_lib_windows);
globals.add_source_root(main_lib_windows);
}
Err(_) => {}
},
Err(_) => {}
}
}
enum Mode {
Normal,
SetRunModule,
SetDocModule,
ScriptArgs,
}
enum Command {
Unspecified,
Repl,
DocModule(String),
RunModule(String),
RunPath(String),
}
fn repl(mut globals: Globals) {
let trace_base = globals.trace().len();
'exit: loop {
let mut input = globals.readline(">> ").unwrap();
let mut line = String::new();
loop {
match input {
None => break 'exit,
Some(part) => {
line.push_str(&part);
}
}
if globals.repl_ready(&line) {
break;
}
input = globals.readline(".. ").unwrap();
}
match globals.exec_repl(&line) {
Ok(Value::Nil) => {}
Ok(value) => {
println!("{}", value);
}
Err(error) => {
let error = error.prepended(globals.trace().clone());
globals.eprint(&format!("{}", error.format()));
globals.trace_unwind(trace_base);
}
}
globals.save_line_history().unwrap();
}
}
fn doc_module(mut globals: Globals, module: &RcStr) {
globals.set_main(module.clone());
let r = globals.load(module).map(|m| m.clone());
let module = ordie(&mut globals, r);
let header = format!("Module {}", module.name());
println!("{}", header);
for _ in header.chars() {
print!("=");
}
println!("");
if let Some(doc) = module.doc() {
println!("\n{}", format_doc(doc, 0));
}
println!("Members");
println!("=======");
let mut pairs: Vec<_> = module.docmap().iter().collect();
pairs.sort();
for (field_name, field_doc) in pairs {
print!(" ");
let value = match module.map().get(field_name) {
Some(value) => value.borrow().clone(),
None => panic!(
"{}: Doc for field {:?} found with no associated value",
module.name(),
field_name
),
};
match value {
Value::Function(func) => {
let type_ = match func.kind() {
FunctionKind::Normal => "def",
FunctionKind::Generator => "def*",
FunctionKind::Async => "async def",
};
println!("{} {}{}\n", type_, field_name, func.argspec());
}
Value::NativeFunction(func) => {
println!("native def {}{}\n", field_name, func.argspec());
}
value if short_printable_value(&value) => {
println!("{} = {:?}\n", field_name, value);
}
_ => println!("{}", field_name),
}
println!("{}", format_doc(field_doc, 8));
}
}
fn short_printable_value(value: &Value) -> bool {
match value {
Value::Nil | Value::Bool(_) | Value::Number(_) => true,
Value::String(s) => {
s.len() < 40 && s.chars().all(|c| !c.is_control() && c != '\n' && c != '\t')
}
_ => false,
}
}
fn format_doc(doc: &str, indent: usize) -> String {
const LINE_WIDTH: usize = 80;
doc.lines()
.flat_map(|line| {
let line = line.trim();
if line.is_empty() {
vec!["".to_owned()]
} else {
line.trim()
.chars()
.collect::<Vec<_>>()
.chunks(LINE_WIDTH - indent)
.map(|chars| chars.iter().collect::<String>())
.collect::<Vec<_>>()
}
})
.map(|line| format!("{}{}\n", " ".repeat(indent), line))
.collect::<Vec<_>>()
.join("")
}
fn run_module(mut globals: Globals, module: &RcStr) {
globals.set_main(module.clone());
let r = globals.load(module).map(|_| ());
globals.handle_trampoline_and_last_result(r);
}
fn run_path(mut globals: Globals, pathstr: String) {
let path = Path::new(&pathstr);
if path.is_dir() {
globals.add_source_root(pathstr);
run_module(globals, &"__main".into());
} else {
let data = std::fs::read_to_string(path).unwrap();
globals.set_main("__main".into());
let r = globals.exec_str("__main", Some(&pathstr), &data);
globals.handle_trampoline_and_last_result(r);
}
}
pub fn ordie<T>(globals: &mut Globals, r: Result<T>) -> T {
match r {
Ok(t) => t,
Err(error) => {
let error = error.prepended(globals.trace().clone());
globals.eprint(&format!("{}", error.format()));
std::process::exit(1);
}
}
}
| 31.197802 | 88 | 0.469414 |
75cd2586343e675a4835e1d6ad344975c3492ce2 | 11,020 | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#![cfg(test)]
use {
anyhow::Context as _,
fuchsia_component::{client::AppBuilder, server::ServiceFs},
futures::{FutureExt as _, StreamExt as _, TryStreamExt as _},
net_declare::fidl_ip_v4,
};
struct Command<'a> {
args: Vec<&'a str>,
expected_stdout: &'a str,
expected_stderr: &'a str,
}
async fn test_cli_with_config(
parameters: &mut [fidl_fuchsia_net_dhcp::Parameter],
commands: Vec<Command<'_>>,
) {
let mut fs = ServiceFs::new_local();
let mut netstack_builder =
AppBuilder::new("fuchsia-pkg://fuchsia.com/dhcpd-cli-tests#meta/netstack-debug.cmx");
let mut stash_builder =
AppBuilder::new("fuchsia-pkg://fuchsia.com/dhcpd-cli-tests#meta/stash_secure.cmx");
let mut dhcpd_builder =
AppBuilder::new("fuchsia-pkg://fuchsia.com/dhcpd-cli-tests#meta/dhcpd.cmx");
fs.add_proxy_service_to::<fidl_fuchsia_stash::SecureStoreMarker, _>(
stash_builder
.directory_request()
.expect("failed to get test stash directory request")
.clone(),
)
.add_proxy_service_to::<fidl_fuchsia_posix_socket::ProviderMarker, _>(
netstack_builder
.directory_request()
.expect("failed to get test netstack directory request")
.clone(),
)
.add_proxy_service_to::<fidl_fuchsia_net_dhcp::Server_Marker, _>(
dhcpd_builder
.directory_request()
.expect("failed to get test dhcpd directory request")
.clone(),
);
let env =
fs.create_salted_nested_environment("test_cli").expect("failed to create environment");
let fs = fs.for_each_concurrent(None, futures::future::ready);
futures::pin_mut!(fs);
let _stash = stash_builder.spawn(env.launcher()).expect("failed to launch test stash");
let dhcpd = dhcpd_builder.spawn(env.launcher()).expect("failed to launch test dhcpd");
let _netstack = netstack_builder.spawn(env.launcher()).expect("failed to launch test netstack");
let dhcp_server = dhcpd
.connect_to_service::<fidl_fuchsia_net_dhcp::Server_Marker>()
.expect("failed to connect to DHCP server");
let dhcp_server_ref = &dhcp_server;
let test_fut = async {
let () = futures::stream::iter(parameters.iter_mut())
.map(Ok)
.try_for_each_concurrent(None, |parameter| async move {
dhcp_server_ref
.set_parameter(parameter)
.await
.context("failed to call dhcp/Server.SetParameter")?
.map_err(fuchsia_zircon::Status::from_raw)
.with_context(|| {
format!("dhcp/Server.SetParameter({:?}) returned error", parameter)
})
})
.await
.expect("failed to configure DHCP server");
for Command { args, expected_stdout, expected_stderr } in commands {
let output =
AppBuilder::new("fuchsia-pkg://fuchsia.com/dhcpd-cli-tests#meta/dhcpd-cli.cmx")
.args(args)
.output(env.launcher())
.expect("failed to launch dhcpd-cli")
.await
.expect("failed to collect dhcpd-cli output");
let stdout = std::str::from_utf8(&output.stdout).expect("failed to get stdout");
let stderr = std::str::from_utf8(&output.stderr).expect("failed to get stderr");
assert_eq!(stderr, expected_stderr);
assert_eq!(stdout, expected_stdout);
}
};
futures::select! {
() = fs => panic!("request stream terminated"),
() = test_fut.fuse() => {},
};
}
async fn test_cli(commands: Vec<Command<'_>>) {
test_cli_with_config(&mut [], commands).await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_get_option_subnet() {
test_cli(vec![Command {
args: vec!["get", "option", "subnet-mask"],
expected_stdout: "",
expected_stderr: r#"Error: get_option(SubnetMask(SubnetMask { mask: None })) failed
Caused by:
NOT_FOUND
"#,
}])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_get_parameter_lease() {
test_cli(vec![Command {
args: vec!["get", "parameter", "lease-length"],
expected_stdout: r#"Lease(
LeaseLength {
default: Some(
86400,
),
max: Some(
86400,
),
},
)
"#,
expected_stderr: "",
}])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_set_option_subnet() {
test_cli(vec![Command {
args: vec!["set", "option", "subnet-mask", "--mask", "255.255.255.0"],
expected_stdout: "",
expected_stderr: "",
}])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_set_parameter_lease() {
test_cli(vec![Command {
args: vec!["set", "parameter", "lease-length", "--default", "42"],
expected_stdout: "",
expected_stderr: "",
}])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_list_option() {
test_cli(vec![
Command {
args: vec!["set", "option", "subnet-mask", "--mask", "255.255.255.0"],
expected_stdout: "",
expected_stderr: "",
},
Command {
args: vec!["list", "option"],
expected_stdout: r#"[
SubnetMask(
Ipv4Address {
addr: [
255,
255,
255,
0,
],
},
),
]
"#,
expected_stderr: "",
},
])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_list_parameter() {
test_cli(vec![Command {
args: vec!["list", "parameter"],
expected_stdout: r#"[
IpAddrs(
[],
),
AddressPool(
AddressPool {
network_id: Some(
Ipv4Address {
addr: [
0,
0,
0,
0,
],
},
),
broadcast: Some(
Ipv4Address {
addr: [
0,
0,
0,
0,
],
},
),
mask: Some(
Ipv4Address {
addr: [
0,
0,
0,
0,
],
},
),
pool_range_start: Some(
Ipv4Address {
addr: [
0,
0,
0,
0,
],
},
),
pool_range_stop: Some(
Ipv4Address {
addr: [
0,
0,
0,
0,
],
},
),
},
),
Lease(
LeaseLength {
default: Some(
86400,
),
max: Some(
86400,
),
},
),
PermittedMacs(
[],
),
StaticallyAssignedAddrs(
[],
),
ArpProbe(
false,
),
BoundDeviceNames(
[],
),
]
"#,
expected_stderr: "",
}])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_reset_option() {
test_cli(vec![
Command {
args: vec!["set", "option", "subnet-mask", "--mask", "255.255.255.0"],
expected_stdout: "",
expected_stderr: "",
},
Command {
args: vec!["list", "option"],
expected_stdout: r#"[
SubnetMask(
Ipv4Address {
addr: [
255,
255,
255,
0,
],
},
),
]
"#,
expected_stderr: "",
},
Command { args: vec!["reset", "option"], expected_stdout: "", expected_stderr: "" },
Command { args: vec!["list", "option"], expected_stdout: "[]\n", expected_stderr: "" },
])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_reset_parameter() {
test_cli(vec![
Command {
args: vec!["set", "parameter", "lease-length", "--default", "42"],
expected_stdout: "",
expected_stderr: "",
},
Command {
args: vec!["get", "parameter", "lease-length"],
expected_stdout: r#"Lease(
LeaseLength {
default: Some(
42,
),
max: Some(
42,
),
},
)
"#,
expected_stderr: "",
},
Command { args: vec!["reset", "parameter"], expected_stdout: "", expected_stderr: "" },
Command {
args: vec!["get", "parameter", "lease-length"],
expected_stdout: r#"Lease(
LeaseLength {
default: Some(
86400,
),
max: Some(
86400,
),
},
)
"#,
expected_stderr: "",
},
])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_clear_leases() {
test_cli(vec![Command { args: vec!["clear-leases"], expected_stdout: "", expected_stderr: "" }])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_start_fails() {
test_cli(vec![Command {
args: vec!["start"],
expected_stdout: "",
// Starting the server fails because the default configuration has an
// empty address pool.
expected_stderr: "Error: failed to start server\n\nCaused by:\n INVALID_ARGS\n",
}])
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_start_succeeds() {
test_cli_with_config(
&mut [
fidl_fuchsia_net_dhcp::Parameter::IpAddrs(vec![fidl_ip_v4!(192.168.0.1)]),
fidl_fuchsia_net_dhcp::Parameter::AddressPool(fidl_fuchsia_net_dhcp::AddressPool {
network_id: Some(fidl_ip_v4!(192.168.0.0)),
broadcast: Some(fidl_ip_v4!(192.168.0.127)),
mask: Some(fidl_ip_v4!(255.255.255.128)),
pool_range_start: Some(fidl_ip_v4!(192.168.0.2)),
pool_range_stop: Some(fidl_ip_v4!(192.168.0.5)),
}),
],
vec![Command { args: vec!["start"], expected_stdout: "", expected_stderr: "" }],
)
.await
}
#[fuchsia_async::run_singlethreaded(test)]
async fn test_stop() {
test_cli(vec![Command { args: vec!["stop"], expected_stdout: "", expected_stderr: "" }]).await
}
| 27.828283 | 100 | 0.503085 |
721b817bd5ccc9967743474213e418bbfbc03fbc | 1,884 | use crate::common::{jormungandr::ConfigurationBuilder, startup};
use chain_impl_mockchain::{chaintypes::ConsensusType, fee::LinearFee};
use jormungandr_lib::interfaces::{ActiveSlotCoefficient, Mempool};
use jormungandr_testing_utils::testing::{
FragmentGenerator, FragmentSender, FragmentSenderSetup, FragmentVerifier, MemPoolCheck,
};
use std::time::Duration;
#[test]
pub fn send_all_fragments() {
let receiver = startup::create_new_account_address();
let sender = startup::create_new_account_address();
let (jormungandr, _) = startup::start_stake_pool(
&[sender.clone()],
&[receiver.clone()],
ConfigurationBuilder::new()
.with_block0_consensus(ConsensusType::GenesisPraos)
.with_slots_per_epoch(10)
.with_consensus_genesis_praos_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)
.with_slot_duration(3)
.with_linear_fees(LinearFee::new(1, 1, 1))
.with_explorer()
.with_mempool(Mempool {
pool_max_entries: 1_000_000usize.into(),
log_max_entries: 1_000_000usize.into(),
}),
)
.unwrap();
let fragment_sender = FragmentSender::new(
jormungandr.genesis_block_hash(),
jormungandr.fees(),
FragmentSenderSetup::resend_3_times(),
);
let time_era = jormungandr.time_era();
let mut fragment_generator = FragmentGenerator::new(
sender,
receiver,
jormungandr.to_remote(),
jormungandr.explorer(),
time_era.slots_per_epoch(),
fragment_sender,
);
fragment_generator.prepare();
let mem_checks: Vec<MemPoolCheck> = fragment_generator.send_all().unwrap();
let verifier = FragmentVerifier;
verifier
.wait_and_verify_all_are_in_block(Duration::from_secs(2), mem_checks, &jormungandr)
.unwrap();
}
| 33.642857 | 91 | 0.671975 |
8706333c7436495ae973a04b5800a55e61e6aec2 | 523 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
pub struct Z(&'static Z);
pub fn main() {}
| 34.866667 | 68 | 0.728489 |
381d90e74e402ef1a021fe391567f7c0a5f31558 | 1,483 | #[doc = r" Value read from the register"]
pub struct R {
bits: u16,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u16,
}
impl super::CRCDIRB {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u16 {
self.bits
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u16) -> &mut Self {
self.bits = bits;
self
}
}
| 22.815385 | 59 | 0.496291 |
e2aec62fdfa8fd2d044694aa592917f7204ab803 | 4,542 | use std::env::args;
use std::time::Instant;
fn main() {
let now = Instant::now();
let output = run(&args().nth(1).expect("Please provide an input"));
let elapsed = now.elapsed();
println!("_duration:{}", elapsed.as_secs_f64() * 1000.);
println!("{}", output);
}
fn run(input: &str) -> isize {
let mut lines = input.as_bytes().split(|&b| b == b'\n');
let mut rules = Vec::new();
while let Some(l) = lines.next() {
if l.is_empty() {
break;
} else {
parse_rule(l, &mut rules);
}
}
// we want to match 42+ (42 31)+
let mut count = 0;
for l in lines {
let mut i = 1;
let mut j = 1;
while i + j < l.len() {
let mut pos = 0;
let mut eight = vec![42; i];
eight.append(&mut vec![42; j]);
eight.append(&mut vec![31; j]);
let rule = RuleRef {
opt1: eight,
opt2: Vec::new(),
};
if rule.validate(&rules, l, &mut pos) && pos == l.len() {
count += 1;
break;
}
if i == 1 {
i = j + 1;
j = 1;
} else {
i -= 1;
j += 1;
}
}
}
count
}
fn parse_rule(input: &[u8], rules: &mut Vec<Rule>) {
let mut pos = 0;
let num = stoi(input, &mut pos);
pos += 2;
if input[pos] == b'"' {
let c = input[pos + 1];
if rules.len() <= num as usize {
rules.resize(num as usize + 1, Rule::default());
}
rules[num as usize] = Rule::Char(c);
} else {
let mut opt1 = Vec::new();
let mut opt2 = Vec::new();
let mut choice = 1;
while pos < input.len() {
if input[pos] == b'|' {
choice += 1;
pos += 2;
}
if choice == 1 {
opt1.push(stoi(input, &mut pos));
} else {
opt2.push(stoi(input, &mut pos));
}
pos += 1;
}
let r = RuleRef { opt1, opt2 };
if rules.len() <= num as usize {
rules.resize(num as usize + 1, Rule::default());
}
rules[num as usize] = Rule::Ref(r);
}
}
fn stoi(input: &[u8], pos: &mut usize) -> u8 {
let mut res = 0;
while *pos < input.len() {
let c = input[*pos];
if (b'0'..=b'9').contains(&c) {
res *= 10;
res += c - b'0';
*pos += 1;
} else {
return res;
}
}
res
}
#[derive(Debug, Clone)]
enum Rule {
Char(u8),
Ref(RuleRef),
}
impl Default for Rule {
fn default() -> Self {
Self::Char(0)
}
}
impl Rule {
fn validate(&self, rules: &[Rule], input: &[u8], pos: &mut usize) -> bool {
match self {
Self::Char(c) => {
if input.len() > *pos {
*pos += 1;
*c == input[*pos - 1]
} else {
false
}
}
Self::Ref(r) => r.validate(rules, input, pos),
}
}
}
#[derive(Debug, Clone)]
struct RuleRef {
opt1: Vec<u8>,
opt2: Vec<u8>,
}
impl RuleRef {
fn validate(&self, rules: &[Rule], input: &[u8], pos: &mut usize) -> bool {
let mut res = true;
let mut pos_copy = *pos;
for &r in &self.opt1 {
if !rules[r as usize].validate(rules, input, &mut pos_copy) {
res = false;
break;
}
}
if res {
*pos = pos_copy;
return true;
}
pos_copy = *pos;
for &r in &self.opt2 {
res = true;
if !rules[r as usize].validate(rules, input, &mut pos_copy) {
return false;
}
}
*pos = pos_copy;
res
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn run_test() {
assert_eq!(
run(r#"42: 9 14 | 10 1
9: 14 27 | 1 26
10: 23 14 | 28 1
1: "a"
11: 42 31
5: 1 14 | 15 1
19: 14 1 | 14 14
12: 24 14 | 19 1
16: 15 1 | 14 14
31: 14 17 | 1 13
6: 14 14 | 1 14
2: 1 24 | 14 4
0: 8 11
13: 14 3 | 1 12
15: 1 | 14
17: 14 2 | 1 7
23: 25 1 | 22 14
28: 16 1
4: 1 1
20: 14 14 | 1 15
3: 5 14 | 16 1
27: 1 6 | 14 18
14: "b"
21: 14 1 | 1 14
25: 1 1 | 1 14
22: 14 14
8: 42
26: 14 22 | 1 20
18: 15 15
7: 14 5 | 1 21
24: 14 1
babbbbaabbbbbabbbbbbaabaaabaaa"#),
1
)
}
}
| 22.156098 | 79 | 0.417657 |
dded7b11401e2ceca642e9e74b0d3cad5e601214 | 17,781 | // Copyright 2019 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::util::{Mutex, RwLock};
use std::fmt;
use std::fs::File;
use std::io::Read;
use std::net::{Shutdown, TcpStream};
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use crate::chain;
use crate::conn;
use crate::core::core::hash::{Hash, Hashed};
use crate::core::pow::Difficulty;
use crate::core::ser::Writeable;
use crate::core::{core, global};
use crate::handshake::Handshake;
use crate::msg::{
self, BanReason, GetPeerAddrs, KernelDataRequest, Locator, Msg, Ping, TxHashSetRequest, Type,
};
use crate::protocol::Protocol;
use crate::types::{
Capabilities, ChainAdapter, Error, NetAdapter, P2PConfig, PeerAddr, PeerInfo, ReasonForBan,
TxHashSetRead,
};
use chrono::prelude::{DateTime, Utc};
const MAX_TRACK_SIZE: usize = 30;
const MAX_PEER_MSG_PER_MIN: u64 = 500;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
/// Remind: don't mix up this 'State' with that 'State' in p2p/src/store.rs,
/// which has different 3 states: {Healthy, Banned, Defunct}.
/// For example: 'Disconnected' state here could still be 'Healthy' and could reconnect in next loop.
enum State {
Connected,
Banned,
}
pub struct Peer {
pub info: PeerInfo,
state: Arc<RwLock<State>>,
// set of all hashes known to this peer (so no need to send)
tracking_adapter: TrackingAdapter,
tracker: Arc<conn::Tracker>,
send_handle: Mutex<conn::ConnHandle>,
// we need a special lock for stop operation, can't reuse handle mutex for that
// because it may be locked by different reasons, so we should wait for that, close
// mutex can be taken only during shutdown, it happens once
stop_handle: Mutex<conn::StopHandle>,
// Whether or not we requested a txhashset from this peer
state_sync_requested: Arc<AtomicBool>,
}
impl fmt::Debug for Peer {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Peer({:?})", &self.info)
}
}
impl Peer {
// Only accept and connect can be externally used to build a peer
fn new(info: PeerInfo, conn: TcpStream, adapter: Arc<dyn NetAdapter>) -> std::io::Result<Peer> {
let state = Arc::new(RwLock::new(State::Connected));
let state_sync_requested = Arc::new(AtomicBool::new(false));
let tracking_adapter = TrackingAdapter::new(adapter);
let handler = Protocol::new(
Arc::new(tracking_adapter.clone()),
info.clone(),
state_sync_requested.clone(),
);
let tracker = Arc::new(conn::Tracker::new());
let (sendh, stoph) = conn::listen(conn, info.version, tracker.clone(), handler)?;
let send_handle = Mutex::new(sendh);
let stop_handle = Mutex::new(stoph);
Ok(Peer {
info,
state,
tracking_adapter,
tracker,
send_handle,
stop_handle,
state_sync_requested,
})
}
pub fn accept(
mut conn: TcpStream,
capab: Capabilities,
total_difficulty: Difficulty,
hs: &Handshake,
adapter: Arc<dyn NetAdapter>,
) -> Result<Peer, Error> {
debug!("accept: handshaking from {:?}", conn.peer_addr());
let info = hs.accept(capab, total_difficulty, &mut conn);
match info {
Ok(info) => Ok(Peer::new(info, conn, adapter)?),
Err(e) => {
debug!(
"accept: handshaking from {:?} failed with error: {:?}",
conn.peer_addr(),
e
);
if let Err(e) = conn.shutdown(Shutdown::Both) {
debug!("Error shutting down conn: {:?}", e);
}
Err(e)
}
}
}
pub fn connect(
mut conn: TcpStream,
capab: Capabilities,
total_difficulty: Difficulty,
self_addr: PeerAddr,
hs: &Handshake,
adapter: Arc<dyn NetAdapter>,
) -> Result<Peer, Error> {
debug!("connect: handshaking with {:?}", conn.peer_addr());
let info = hs.initiate(capab, total_difficulty, self_addr, &mut conn);
match info {
Ok(info) => Ok(Peer::new(info, conn, adapter)?),
Err(e) => {
debug!(
"connect: handshaking with {:?} failed with error: {:?}",
conn.peer_addr(),
e
);
if let Err(e) = conn.shutdown(Shutdown::Both) {
debug!("Error shutting down conn: {:?}", e);
}
Err(e)
}
}
}
pub fn is_denied(config: &P2PConfig, peer_addr: PeerAddr) -> bool {
if let Some(ref denied) = config.peers_deny {
if denied.contains(&peer_addr) {
debug!(
"checking peer allowed/denied: {:?} explicitly denied",
peer_addr
);
return true;
}
}
if let Some(ref allowed) = config.peers_allow {
if allowed.contains(&peer_addr) {
debug!(
"checking peer allowed/denied: {:?} explicitly allowed",
peer_addr
);
return false;
} else {
debug!(
"checking peer allowed/denied: {:?} not explicitly allowed, denying",
peer_addr
);
return true;
}
}
// default to allowing peer connection if we do not explicitly allow or deny
// the peer
false
}
/// Whether this peer is currently connected.
pub fn is_connected(&self) -> bool {
State::Connected == *self.state.read()
}
/// Whether this peer has been banned.
pub fn is_banned(&self) -> bool {
State::Banned == *self.state.read()
}
/// Whether this peer is stuck on sync.
pub fn is_stuck(&self) -> (bool, Difficulty) {
let peer_live_info = self.info.live_info.read();
let now = Utc::now().timestamp_millis();
// if last updated difficulty is 2 hours ago, we're sure this peer is a stuck node.
if now > peer_live_info.stuck_detector.timestamp_millis() + global::STUCK_PEER_KICK_TIME {
(true, peer_live_info.total_difficulty)
} else {
(false, peer_live_info.total_difficulty)
}
}
/// Whether the peer is considered abusive, mostly for spammy nodes
pub fn is_abusive(&self) -> bool {
let rec = self.tracker.received_bytes.read();
let sent = self.tracker.sent_bytes.read();
rec.count_per_min() > MAX_PEER_MSG_PER_MIN || sent.count_per_min() > MAX_PEER_MSG_PER_MIN
}
/// Number of bytes sent to the peer
pub fn last_min_sent_bytes(&self) -> Option<u64> {
let sent_bytes = self.tracker.sent_bytes.read();
Some(sent_bytes.bytes_per_min())
}
/// Number of bytes received from the peer
pub fn last_min_received_bytes(&self) -> Option<u64> {
let received_bytes = self.tracker.received_bytes.read();
Some(received_bytes.bytes_per_min())
}
pub fn last_min_message_counts(&self) -> Option<(u64, u64)> {
let received_bytes = self.tracker.received_bytes.read();
let sent_bytes = self.tracker.sent_bytes.read();
Some((sent_bytes.count_per_min(), received_bytes.count_per_min()))
}
/// Set this peer status to banned
pub fn set_banned(&self) {
*self.state.write() = State::Banned;
}
/// Send a msg with given msg_type to our peer via the connection.
fn send<T: Writeable>(&self, msg: T, msg_type: Type) -> Result<(), Error> {
let msg = Msg::new(msg_type, msg, self.info.version)?;
self.send_handle.lock().send(msg)
}
/// Send a ping to the remote peer, providing our local difficulty and
/// height
pub fn send_ping(&self, total_difficulty: Difficulty, height: u64) -> Result<(), Error> {
let ping_msg = Ping {
total_difficulty,
height,
};
self.send(ping_msg, msg::Type::Ping)
}
/// Send the ban reason before banning
pub fn send_ban_reason(&self, ban_reason: ReasonForBan) -> Result<(), Error> {
let ban_reason_msg = BanReason { ban_reason };
self.send(ban_reason_msg, msg::Type::BanReason).map(|_| ())
}
/// Sends the provided block to the remote peer. The request may be dropped
/// if the remote peer is known to already have the block.
pub fn send_block(&self, b: &core::Block) -> Result<bool, Error> {
if !self.tracking_adapter.has_recv(b.hash()) {
trace!("Send block {} to {}", b.hash(), self.info.addr);
self.send(b, msg::Type::Block)?;
Ok(true)
} else {
debug!(
"Suppress block send {} to {} (already seen)",
b.hash(),
self.info.addr,
);
Ok(false)
}
}
pub fn send_compact_block(&self, b: &core::CompactBlock) -> Result<bool, Error> {
if !self.tracking_adapter.has_recv(b.hash()) {
trace!("Send compact block {} to {}", b.hash(), self.info.addr);
self.send(b, msg::Type::CompactBlock)?;
Ok(true)
} else {
debug!(
"Suppress compact block send {} to {} (already seen)",
b.hash(),
self.info.addr,
);
Ok(false)
}
}
pub fn send_header(&self, bh: &core::BlockHeader) -> Result<bool, Error> {
if !self.tracking_adapter.has_recv(bh.hash()) {
debug!("Send header {} to {}", bh.hash(), self.info.addr);
self.send(bh, msg::Type::Header)?;
Ok(true)
} else {
debug!(
"Suppress header send {} to {} (already seen)",
bh.hash(),
self.info.addr,
);
Ok(false)
}
}
pub fn send_tx_kernel_hash(&self, h: Hash) -> Result<bool, Error> {
if !self.tracking_adapter.has_recv(h) {
debug!("Send tx kernel hash {} to {}", h, self.info.addr);
self.send(h, msg::Type::TransactionKernel)?;
Ok(true)
} else {
debug!(
"Not sending tx kernel hash {} to {} (already seen)",
h, self.info.addr
);
Ok(false)
}
}
/// Sends the provided transaction to the remote peer. The request may be
/// dropped if the remote peer is known to already have the transaction.
/// We support broadcast of lightweight tx kernel hash
/// so track known txs by kernel hash.
pub fn send_transaction(&self, tx: &core::Transaction) -> Result<bool, Error> {
let kernel = &tx.kernels()[0];
if self
.info
.capabilities
.contains(Capabilities::TX_KERNEL_HASH)
{
return self.send_tx_kernel_hash(kernel.hash());
}
if !self.tracking_adapter.has_recv(kernel.hash()) {
debug!("Send full tx {} to {}", tx.hash(), self.info.addr);
self.send(tx, msg::Type::Transaction)?;
Ok(true)
} else {
debug!(
"Not sending tx {} to {} (already seen)",
tx.hash(),
self.info.addr
);
Ok(false)
}
}
/// Sends the provided stem transaction to the remote peer.
/// Note: tracking adapter is ignored for stem transactions (while under
/// embargo).
pub fn send_stem_transaction(&self, tx: &core::Transaction) -> Result<(), Error> {
debug!("Send (stem) tx {} to {}", tx.hash(), self.info.addr);
self.send(tx, msg::Type::StemTransaction)
}
/// Sends a request for block headers from the provided block locator
pub fn send_header_request(&self, locator: Vec<Hash>) -> Result<(), Error> {
self.send(&Locator { hashes: locator }, msg::Type::GetHeaders)
}
pub fn send_tx_request(&self, h: Hash) -> Result<(), Error> {
debug!(
"Requesting tx (kernel hash) {} from peer {}.",
h, self.info.addr
);
self.send(&h, msg::Type::GetTransaction)
}
/// Sends a request for a specific block by hash
pub fn send_block_request(&self, h: Hash) -> Result<(), Error> {
debug!("Requesting block {} from peer {}.", h, self.info.addr);
self.tracking_adapter.push_req(h);
self.send(&h, msg::Type::GetBlock)
}
/// Sends a request for a specific compact block by hash
pub fn send_compact_block_request(&self, h: Hash) -> Result<(), Error> {
debug!("Requesting compact block {} from {}", h, self.info.addr);
self.send(&h, msg::Type::GetCompactBlock)
}
pub fn send_peer_request(&self, capab: Capabilities) -> Result<(), Error> {
trace!("Asking {} for more peers {:?}", self.info.addr, capab);
self.send(
&GetPeerAddrs {
capabilities: capab,
},
msg::Type::GetPeerAddrs,
)
}
pub fn send_txhashset_request(&self, height: u64, hash: Hash) -> Result<(), Error> {
debug!(
"Asking {} for txhashset archive at {} {}.",
self.info.addr, height, hash
);
self.state_sync_requested.store(true, Ordering::Relaxed);
self.send(
&TxHashSetRequest { hash, height },
msg::Type::TxHashSetRequest,
)
}
pub fn send_kernel_data_request(&self) -> Result<(), Error> {
debug!("Asking {} for kernel data.", self.info.addr);
self.send(&KernelDataRequest {}, msg::Type::KernelDataRequest)
}
/// Stops the peer
pub fn stop(&self) {
debug!("Stopping peer {:?}", self.info.addr);
match self.stop_handle.try_lock() {
Some(handle) => handle.stop(),
None => error!("can't get stop lock for peer"),
}
}
/// Waits until the peer's thread exit
pub fn wait(&self) {
debug!("Waiting for peer {:?} to stop", self.info.addr);
match self.stop_handle.try_lock() {
Some(mut handle) => handle.wait(),
None => error!("can't get stop lock for peer"),
}
}
}
/// Adapter implementation that forwards everything to an underlying adapter
/// but keeps track of the block and transaction hashes that were requested or
/// received.
#[derive(Clone)]
struct TrackingAdapter {
adapter: Arc<dyn NetAdapter>,
known: Arc<RwLock<Vec<Hash>>>,
requested: Arc<RwLock<Vec<Hash>>>,
}
impl TrackingAdapter {
fn new(adapter: Arc<dyn NetAdapter>) -> TrackingAdapter {
TrackingAdapter {
adapter: adapter,
known: Arc::new(RwLock::new(Vec::with_capacity(MAX_TRACK_SIZE))),
requested: Arc::new(RwLock::new(Vec::with_capacity(MAX_TRACK_SIZE))),
}
}
fn has_recv(&self, hash: Hash) -> bool {
let known = self.known.read();
// may become too slow, an ordered set (by timestamp for eviction) may
// end up being a better choice
known.contains(&hash)
}
fn push_recv(&self, hash: Hash) {
let mut known = self.known.write();
if known.len() > MAX_TRACK_SIZE {
known.truncate(MAX_TRACK_SIZE);
}
if !known.contains(&hash) {
known.insert(0, hash);
}
}
fn has_req(&self, hash: Hash) -> bool {
let requested = self.requested.read();
// may become too slow, an ordered set (by timestamp for eviction) may
// end up being a better choice
requested.contains(&hash)
}
fn push_req(&self, hash: Hash) {
let mut requested = self.requested.write();
if requested.len() > MAX_TRACK_SIZE {
requested.truncate(MAX_TRACK_SIZE);
}
if !requested.contains(&hash) {
requested.insert(0, hash);
}
}
}
impl ChainAdapter for TrackingAdapter {
fn total_difficulty(&self) -> Result<Difficulty, chain::Error> {
self.adapter.total_difficulty()
}
fn total_height(&self) -> Result<u64, chain::Error> {
self.adapter.total_height()
}
fn get_transaction(&self, kernel_hash: Hash) -> Option<core::Transaction> {
self.adapter.get_transaction(kernel_hash)
}
fn tx_kernel_received(
&self,
kernel_hash: Hash,
peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
self.push_recv(kernel_hash);
self.adapter.tx_kernel_received(kernel_hash, peer_info)
}
fn transaction_received(
&self,
tx: core::Transaction,
stem: bool,
) -> Result<bool, chain::Error> {
// Do not track the tx hash for stem txs.
// Otherwise we fail to handle the subsequent fluff or embargo expiration
// correctly.
if !stem {
let kernel = &tx.kernels()[0];
self.push_recv(kernel.hash());
}
self.adapter.transaction_received(tx, stem)
}
fn block_received(
&self,
b: core::Block,
peer_info: &PeerInfo,
_was_requested: bool,
) -> Result<bool, chain::Error> {
let bh = b.hash();
self.push_recv(bh);
self.adapter.block_received(b, peer_info, self.has_req(bh))
}
fn compact_block_received(
&self,
cb: core::CompactBlock,
peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
self.push_recv(cb.hash());
self.adapter.compact_block_received(cb, peer_info)
}
fn header_received(
&self,
bh: core::BlockHeader,
peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
self.push_recv(bh.hash());
self.adapter.header_received(bh, peer_info)
}
fn headers_received(
&self,
bh: &[core::BlockHeader],
peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
self.adapter.headers_received(bh, peer_info)
}
fn locate_headers(&self, locator: &[Hash]) -> Result<Vec<core::BlockHeader>, chain::Error> {
self.adapter.locate_headers(locator)
}
fn get_block(&self, h: Hash) -> Option<core::Block> {
self.adapter.get_block(h)
}
fn kernel_data_read(&self) -> Result<File, chain::Error> {
self.adapter.kernel_data_read()
}
fn kernel_data_write(&self, reader: &mut dyn Read) -> Result<bool, chain::Error> {
self.adapter.kernel_data_write(reader)
}
fn txhashset_read(&self, h: Hash) -> Option<TxHashSetRead> {
self.adapter.txhashset_read(h)
}
fn txhashset_archive_header(&self) -> Result<core::BlockHeader, chain::Error> {
self.adapter.txhashset_archive_header()
}
fn txhashset_receive_ready(&self) -> bool {
self.adapter.txhashset_receive_ready()
}
fn txhashset_write(
&self,
h: Hash,
txhashset_data: File,
peer_info: &PeerInfo,
) -> Result<bool, chain::Error> {
self.adapter.txhashset_write(h, txhashset_data, peer_info)
}
fn txhashset_download_update(
&self,
start_time: DateTime<Utc>,
downloaded_size: u64,
total_size: u64,
) -> bool {
self.adapter
.txhashset_download_update(start_time, downloaded_size, total_size)
}
fn get_tmp_dir(&self) -> PathBuf {
self.adapter.get_tmp_dir()
}
fn get_tmpfile_pathname(&self, tmpfile_name: String) -> PathBuf {
self.adapter.get_tmpfile_pathname(tmpfile_name)
}
}
impl NetAdapter for TrackingAdapter {
fn find_peer_addrs(&self, capab: Capabilities) -> Vec<PeerAddr> {
self.adapter.find_peer_addrs(capab)
}
fn peer_addrs_received(&self, addrs: Vec<PeerAddr>) {
self.adapter.peer_addrs_received(addrs)
}
fn peer_difficulty(&self, addr: PeerAddr, diff: Difficulty, height: u64) {
self.adapter.peer_difficulty(addr, diff, height)
}
fn is_banned(&self, addr: PeerAddr) -> bool {
self.adapter.is_banned(addr)
}
}
| 28.358852 | 103 | 0.674653 |
0a624184b494146e80d23dee9360c0ad5a838da9 | 18,900 | use crate::builtins::string::is_trimmable_whitespace;
use boa_gc::{unsafe_empty_trace, Finalize, Trace};
use rustc_hash::FxHashSet;
use std::{
alloc::{alloc, dealloc, handle_alloc_error, Layout},
borrow::Borrow,
cell::Cell,
hash::{Hash, Hasher},
marker::PhantomData,
ops::Deref,
ptr::{copy_nonoverlapping, NonNull},
rc::Rc,
};
const CONSTANTS_ARRAY: [&str; 127] = [
// Empty string
"",
// Misc
",",
":",
// Generic use
"name",
"length",
"arguments",
"prototype",
"constructor",
// typeof
"null",
"undefined",
"number",
"string",
"symbol",
"bigint",
"object",
"function",
// Property descriptor
"value",
"get",
"set",
"writable",
"enumerable",
"configurable",
// Object object
"Object",
"assing",
"create",
"toString",
"valueOf",
"is",
"seal",
"isSealed",
"freeze",
"isFrozen",
"keys",
"values",
"entries",
// Function object
"Function",
"apply",
"bind",
"call",
// Array object
"Array",
"from",
"isArray",
"of",
"get [Symbol.species]",
"copyWithin",
"entries",
"every",
"fill",
"filter",
"find",
"findIndex",
"flat",
"flatMap",
"forEach",
"includes",
"indexOf",
"join",
"map",
"reduce",
"reduceRight",
"reverse",
"shift",
"slice",
"some",
"sort",
"unshift",
"push",
"pop",
// String object
"String",
"charAt",
"charCodeAt",
"concat",
"endsWith",
"includes",
"indexOf",
"lastIndexOf",
"match",
"matchAll",
"normalize",
"padEnd",
"padStart",
"repeat",
"replace",
"replaceAll",
"search",
"slice",
"split",
"startsWith",
"substring",
"toLowerString",
"toUpperString",
"trim",
"trimEnd",
"trimStart",
// Number object
"Number",
// Boolean object
"Boolean",
// RegExp object
"RegExp",
"exec",
"test",
"flags",
"index",
"lastIndex",
// Symbol object
"Symbol",
"for",
"keyFor",
"description",
"[Symbol.toPrimitive]",
"",
// Map object
"Map",
"clear",
"delete",
"get",
"has",
"set",
"size",
// Set object
"Set",
// Reflect object
"Reflect",
// Error objects
"Error",
"TypeError",
"RangeError",
"SyntaxError",
"ReferenceError",
"EvalError",
"URIError",
"message",
// Date object
"Date",
"toJSON",
];
const MAX_CONSTANT_STRING_LENGTH: usize = {
let mut max = 0;
let mut i = 0;
while i < CONSTANTS_ARRAY.len() {
let len = CONSTANTS_ARRAY[i].len();
if len > max {
max = len;
}
i += 1;
}
max
};
unsafe fn try_alloc(layout: Layout) -> *mut u8 {
let ptr = alloc(layout);
if ptr.is_null() {
handle_alloc_error(layout);
}
ptr
}
thread_local! {
static CONSTANTS: FxHashSet<JsString> = {
let mut constants = FxHashSet::default();
for s in CONSTANTS_ARRAY.iter() {
let s = JsString {
inner: Inner::new(s),
_marker: PhantomData,
};
constants.insert(s);
}
constants
};
}
/// The inner representation of a [`JsString`].
#[repr(C)]
struct Inner {
/// The utf8 length, the number of bytes.
len: usize,
/// The number of references to the string.
///
/// When this reaches `0` the string is deallocated.
refcount: Cell<usize>,
/// An empty array which is used to get the offset of string data.
data: [u8; 0],
}
impl Inner {
/// Create a new `Inner` from `&str`.
#[inline]
fn new(s: &str) -> NonNull<Self> {
// We get the layout of the `Inner` type and we extend by the size
// of the string array.
let inner_layout = Layout::new::<Self>();
let (layout, offset) = inner_layout
.extend(Layout::array::<u8>(s.len()).expect("failed to create memory layout"))
.expect("failed to extend memory layout");
let inner = unsafe {
let inner = try_alloc(layout).cast::<Self>();
// Write the first part, the Inner.
inner.write(Self {
len: s.len(),
refcount: Cell::new(1),
data: [0; 0],
});
// Get offset into the string data.
let data = (*inner).data.as_mut_ptr();
debug_assert!(std::ptr::eq(inner.cast::<u8>().add(offset), data));
// Copy string data into data offset.
copy_nonoverlapping(s.as_ptr(), data, s.len());
inner
};
// Safety: We already know it's not null, so this is safe.
unsafe { NonNull::new_unchecked(inner) }
}
/// Concatenate array of strings.
#[inline]
fn concat_array(strings: &[&str]) -> NonNull<Self> {
let mut total_string_size = 0;
for string in strings {
total_string_size += string.len();
}
// We get the layout of the `Inner` type and we extend by the size
// of the string array.
let inner_layout = Layout::new::<Self>();
let (layout, offset) = inner_layout
.extend(Layout::array::<u8>(total_string_size).expect("failed to create memory layout"))
.expect("failed to extend memory layout");
let inner = unsafe {
let inner = try_alloc(layout).cast::<Self>();
// Write the first part, the Inner.
inner.write(Self {
len: total_string_size,
refcount: Cell::new(1),
data: [0; 0],
});
// Get offset into the string data.
let data = (*inner).data.as_mut_ptr();
debug_assert!(std::ptr::eq(inner.cast::<u8>().add(offset), data));
// Copy the two string data into data offset.
let mut offset = 0;
for string in strings {
copy_nonoverlapping(string.as_ptr(), data.add(offset), string.len());
offset += string.len();
}
inner
};
// Safety: We already know it's not null, so this is safe.
unsafe { NonNull::new_unchecked(inner) }
}
/// Deallocate inner type with string data.
#[inline]
unsafe fn dealloc(x: NonNull<Self>) {
let len = (*x.as_ptr()).len;
let inner_layout = Layout::new::<Self>();
let (layout, _offset) = inner_layout
.extend(Layout::array::<u8>(len).expect("failed to create memory layout"))
.expect("failed to extend memory layout");
dealloc(x.as_ptr().cast::<_>(), layout);
}
}
/// This represents a JavaScript primitive string.
///
/// This is similar to `Rc<str>`. But unlike `Rc<str>` which stores the length
/// on the stack and a pointer to the data (this is also known as fat pointers).
/// The `JsString` length and data is stored on the heap. and just an non-null
/// pointer is kept, so its size is the size of a pointer.
#[derive(Finalize)]
pub struct JsString {
inner: NonNull<Inner>,
_marker: PhantomData<Rc<str>>,
}
impl Default for JsString {
#[inline]
fn default() -> Self {
Self::new("")
}
}
impl JsString {
/// Create an empty string, same as calling default.
#[inline]
pub fn empty() -> Self {
Self::default()
}
/// Create a new JavaScript string.
#[inline]
pub fn new<S: AsRef<str>>(s: S) -> Self {
let s = s.as_ref();
if s.len() <= MAX_CONSTANT_STRING_LENGTH {
if let Some(constant) = CONSTANTS.with(|c| c.get(s).cloned()) {
return constant;
}
}
Self {
inner: Inner::new(s),
_marker: PhantomData,
}
}
/// Concatenate two string.
pub fn concat<T, U>(x: T, y: U) -> Self
where
T: AsRef<str>,
U: AsRef<str>,
{
let x = x.as_ref();
let y = y.as_ref();
let this = Self {
inner: Inner::concat_array(&[x, y]),
_marker: PhantomData,
};
if this.len() <= MAX_CONSTANT_STRING_LENGTH {
if let Some(constant) = CONSTANTS.with(|c| c.get(&this).cloned()) {
return constant;
}
}
this
}
/// Concatenate array of string.
pub fn concat_array(strings: &[&str]) -> Self {
let this = Self {
inner: Inner::concat_array(strings),
_marker: PhantomData,
};
if this.len() <= MAX_CONSTANT_STRING_LENGTH {
if let Some(constant) = CONSTANTS.with(|c| c.get(&this).cloned()) {
return constant;
}
}
this
}
/// Return the inner representation.
#[inline]
fn inner(&self) -> &Inner {
unsafe { self.inner.as_ref() }
}
/// Return the JavaScript string as a rust `&str`.
#[inline]
pub fn as_str(&self) -> &str {
let inner = self.inner();
unsafe {
let slice = std::slice::from_raw_parts(inner.data.as_ptr(), inner.len);
std::str::from_utf8_unchecked(slice)
}
}
/// Gets the number of `JsString`s which point to this allocation.
#[inline]
pub fn refcount(this: &Self) -> usize {
this.inner().refcount.get()
}
/// Returns `true` if the two `JsString`s point to the same allocation (in a vein similar to [`ptr::eq`]).
///
/// [`ptr::eq`]: std::ptr::eq
#[inline]
pub fn ptr_eq(x: &Self, y: &Self) -> bool {
x.inner == y.inner
}
/// `6.1.4.1 StringIndexOf ( string, searchValue, fromIndex )`
///
/// Note: Instead of returning an isize with `-1` as the "not found" value,
/// We make use of the type system and return Option<usize> with None as the "not found" value.
///
/// More information:
/// - [ECMAScript reference][spec]
///
/// [spec]: https://tc39.es/ecma262/#sec-stringindexof
pub(crate) fn index_of(&self, search_value: &Self, from_index: usize) -> Option<usize> {
// 1. Assert: Type(string) is String.
// 2. Assert: Type(searchValue) is String.
// 3. Assert: fromIndex is a non-negative integer.
// 4. Let len be the length of string.
let len = self.encode_utf16().count();
// 5. If searchValue is the empty String and fromIndex ≤ len, return fromIndex.
if search_value.is_empty() && from_index <= len {
return Some(from_index);
}
// 6. Let searchLen be the length of searchValue.
let search_len = search_value.encode_utf16().count();
// 7. For each integer i starting with fromIndex such that i ≤ len - searchLen, in ascending order, do
for i in from_index..=len {
if i as isize > (len as isize - search_len as isize) {
break;
}
// a. Let candidate be the substring of string from i to i + searchLen.
let candidate = String::from_utf16_lossy(
&self
.encode_utf16()
.skip(i)
.take(search_len)
.collect::<Vec<u16>>(),
);
// b. If candidate is the same sequence of code units as searchValue, return i.
if candidate == search_value.as_str() {
return Some(i);
}
}
// 8. Return -1.
None
}
pub(crate) fn string_to_number(&self) -> f64 {
let string = self.trim_matches(is_trimmable_whitespace);
match string {
"" => return 0.0,
"-Infinity" => return f64::NEG_INFINITY,
"Infinity" | "+Infinity" => return f64::INFINITY,
_ => {}
}
let mut s = string.bytes();
let base = match (s.next(), s.next()) {
(Some(b'0'), Some(b'b' | b'B')) => Some(2),
(Some(b'0'), Some(b'o' | b'O')) => Some(8),
(Some(b'0'), Some(b'x' | b'X')) => Some(16),
_ => None,
};
// Parse numbers that begin with `0b`, `0o` and `0x`.
if let Some(base) = base {
let string = &string[2..];
if string.is_empty() {
return f64::NAN;
}
// Fast path
if let Ok(value) = u32::from_str_radix(string, base) {
return f64::from(value);
}
// Slow path
let mut value = 0.0;
for c in s {
if let Some(digit) = char::from(c).to_digit(base) {
value = value * f64::from(base) + f64::from(digit);
} else {
return f64::NAN;
}
}
return value;
}
match string {
// Handle special cases so `fast_float` does not return infinity.
"inf" | "+inf" | "-inf" => f64::NAN,
string => fast_float::parse(string).unwrap_or(f64::NAN),
}
}
}
// Safety: [`JsString`] does not contain any objects which recquire trace,
// so this is safe.
unsafe impl Trace for JsString {
unsafe_empty_trace!();
}
impl Clone for JsString {
#[inline]
fn clone(&self) -> Self {
let inner = self.inner();
inner.refcount.set(inner.refcount.get() + 1);
Self {
inner: self.inner,
_marker: PhantomData,
}
}
}
impl Drop for JsString {
#[inline]
fn drop(&mut self) {
let inner = self.inner();
if inner.refcount.get() == 1 {
// Safety: If refcount is 1 and we call drop, that means this is the last
// JsString which points to this memory allocation, so deallocating it is safe.
unsafe {
Inner::dealloc(self.inner);
}
} else {
inner.refcount.set(inner.refcount.get() - 1);
}
}
}
impl std::fmt::Debug for JsString {
#[inline]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_str().fmt(f)
}
}
impl std::fmt::Display for JsString {
#[inline]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_str().fmt(f)
}
}
impl From<&str> for JsString {
#[inline]
fn from(s: &str) -> Self {
Self::new(s)
}
}
impl From<Box<str>> for JsString {
#[inline]
fn from(s: Box<str>) -> Self {
Self::new(s)
}
}
impl From<String> for JsString {
#[inline]
fn from(s: String) -> Self {
Self::new(s)
}
}
impl AsRef<str> for JsString {
#[inline]
fn as_ref(&self) -> &str {
self.as_str()
}
}
impl Borrow<str> for JsString {
#[inline]
fn borrow(&self) -> &str {
self.as_str()
}
}
impl Deref for JsString {
type Target = str;
#[inline]
fn deref(&self) -> &Self::Target {
self.as_str()
}
}
impl PartialEq<Self> for JsString {
#[inline]
fn eq(&self, other: &Self) -> bool {
// If they point at the same memory allocation, then they are equal.
if Self::ptr_eq(self, other) {
return true;
}
self.as_str() == other.as_str()
}
}
impl Eq for JsString {}
impl Hash for JsString {
#[inline]
fn hash<H: Hasher>(&self, state: &mut H) {
self.as_str().hash(state);
}
}
impl PartialOrd for JsString {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
self.as_str().partial_cmp(other.as_str())
}
}
impl Ord for JsString {
#[inline]
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.as_str().cmp(other)
}
}
impl PartialEq<str> for JsString {
#[inline]
fn eq(&self, other: &str) -> bool {
self.as_str() == other
}
}
impl PartialEq<JsString> for str {
#[inline]
fn eq(&self, other: &JsString) -> bool {
self == other.as_str()
}
}
impl PartialEq<&str> for JsString {
#[inline]
fn eq(&self, other: &&str) -> bool {
self.as_str() == *other
}
}
impl PartialEq<JsString> for &str {
#[inline]
fn eq(&self, other: &JsString) -> bool {
*self == other.as_str()
}
}
#[cfg(test)]
mod tests {
use super::JsString;
use std::mem::size_of;
#[test]
fn empty() {
let _empty = JsString::new("");
}
#[test]
fn pointer_size() {
assert_eq!(size_of::<JsString>(), size_of::<*const u8>());
assert_eq!(size_of::<Option<JsString>>(), size_of::<*const u8>());
}
#[test]
fn refcount() {
let x = JsString::new("Hello wrold");
assert_eq!(JsString::refcount(&x), 1);
{
let y = x.clone();
assert_eq!(JsString::refcount(&x), 2);
assert_eq!(JsString::refcount(&y), 2);
{
let z = y.clone();
assert_eq!(JsString::refcount(&x), 3);
assert_eq!(JsString::refcount(&y), 3);
assert_eq!(JsString::refcount(&z), 3);
}
assert_eq!(JsString::refcount(&x), 2);
assert_eq!(JsString::refcount(&y), 2);
}
assert_eq!(JsString::refcount(&x), 1);
}
#[test]
fn ptr_eq() {
let x = JsString::new("Hello");
let y = x.clone();
assert!(JsString::ptr_eq(&x, &y));
let z = JsString::new("Hello");
assert!(!JsString::ptr_eq(&x, &z));
assert!(!JsString::ptr_eq(&y, &z));
}
#[test]
fn as_str() {
let s = "Hello";
let x = JsString::new(s);
assert_eq!(x.as_str(), s);
}
#[test]
fn hash() {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
let s = "Hello, world!";
let x = JsString::new(s);
assert_eq!(x.as_str(), s);
let mut hasher = DefaultHasher::new();
s.hash(&mut hasher);
let s_hash = hasher.finish();
let mut hasher = DefaultHasher::new();
x.hash(&mut hasher);
let x_hash = hasher.finish();
assert_eq!(s_hash, x_hash);
}
#[test]
fn concat() {
let x = JsString::new("hello");
let y = ", ";
let z = JsString::new("world");
let w = String::from("!");
let xy = JsString::concat(x, y);
assert_eq!(xy, "hello, ");
assert_eq!(JsString::refcount(&xy), 1);
let xyz = JsString::concat(xy, z);
assert_eq!(xyz, "hello, world");
assert_eq!(JsString::refcount(&xyz), 1);
let xyzw = JsString::concat(xyz, w);
assert_eq!(xyzw, "hello, world!");
assert_eq!(JsString::refcount(&xyzw), 1);
}
}
| 24.293059 | 110 | 0.514656 |
0320166bcbe9ab8a4241c387cae67171ccf61d73 | 7,035 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Print format variants
use arrow::csv::writer::WriterBuilder;
use arrow::json::{ArrayWriter, LineDelimitedWriter};
use datafusion::arrow::record_batch::RecordBatch;
use datafusion::arrow::util::pretty;
use datafusion::error::{DataFusionError, Result};
use std::fmt;
use std::str::FromStr;
/// Allow records to be printed in different formats
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum PrintFormat {
Csv,
Tsv,
Table,
Json,
NdJson,
}
/// returns all print formats
pub fn all_print_formats() -> Vec<PrintFormat> {
vec![
PrintFormat::Csv,
PrintFormat::Tsv,
PrintFormat::Table,
PrintFormat::Json,
PrintFormat::NdJson,
]
}
impl FromStr for PrintFormat {
type Err = ();
fn from_str(s: &str) -> std::result::Result<Self, ()> {
match s.to_lowercase().as_str() {
"csv" => Ok(Self::Csv),
"tsv" => Ok(Self::Tsv),
"table" => Ok(Self::Table),
"json" => Ok(Self::Json),
"ndjson" => Ok(Self::NdJson),
_ => Err(()),
}
}
}
impl fmt::Display for PrintFormat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Self::Csv => write!(f, "csv"),
Self::Tsv => write!(f, "tsv"),
Self::Table => write!(f, "table"),
Self::Json => write!(f, "json"),
Self::NdJson => write!(f, "ndjson"),
}
}
}
macro_rules! batches_to_json {
($WRITER: ident, $batches: expr) => {{
let mut bytes = vec![];
{
let mut writer = $WRITER::new(&mut bytes);
writer.write_batches($batches)?;
writer.finish()?;
}
String::from_utf8(bytes).map_err(|e| DataFusionError::Execution(e.to_string()))?
}};
}
fn print_batches_with_sep(batches: &[RecordBatch], delimiter: u8) -> Result<String> {
let mut bytes = vec![];
{
let builder = WriterBuilder::new()
.has_headers(true)
.with_delimiter(delimiter);
let mut writer = builder.build(&mut bytes);
for batch in batches {
writer.write(batch)?;
}
}
let formatted = String::from_utf8(bytes)
.map_err(|e| DataFusionError::Execution(e.to_string()))?;
Ok(formatted)
}
impl PrintFormat {
/// print the batches to stdout using the specified format
pub fn print_batches(&self, batches: &[RecordBatch]) -> Result<()> {
match self {
Self::Csv => println!("{}", print_batches_with_sep(batches, b',')?),
Self::Tsv => println!("{}", print_batches_with_sep(batches, b'\t')?),
Self::Table => pretty::print_batches(batches)?,
Self::Json => println!("{}", batches_to_json!(ArrayWriter, batches)),
Self::NdJson => {
println!("{}", batches_to_json!(LineDelimitedWriter, batches))
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use arrow::array::Int32Array;
use arrow::datatypes::{DataType, Field, Schema};
use datafusion::from_slice::FromSlice;
use std::sync::Arc;
#[test]
fn test_from_str() {
let format = "csv".parse::<PrintFormat>().unwrap();
assert_eq!(PrintFormat::Csv, format);
let format = "tsv".parse::<PrintFormat>().unwrap();
assert_eq!(PrintFormat::Tsv, format);
let format = "json".parse::<PrintFormat>().unwrap();
assert_eq!(PrintFormat::Json, format);
let format = "ndjson".parse::<PrintFormat>().unwrap();
assert_eq!(PrintFormat::NdJson, format);
let format = "table".parse::<PrintFormat>().unwrap();
assert_eq!(PrintFormat::Table, format);
}
#[test]
fn test_to_str() {
assert_eq!("csv", PrintFormat::Csv.to_string());
assert_eq!("table", PrintFormat::Table.to_string());
assert_eq!("tsv", PrintFormat::Tsv.to_string());
assert_eq!("json", PrintFormat::Json.to_string());
assert_eq!("ndjson", PrintFormat::NdJson.to_string());
}
#[test]
fn test_from_str_failure() {
assert!("pretty".parse::<PrintFormat>().is_err());
}
#[test]
fn test_print_batches_with_sep() {
let batches = vec![];
assert_eq!("", print_batches_with_sep(&batches, b',').unwrap());
let schema = Arc::new(Schema::new(vec![
Field::new("a", DataType::Int32, false),
Field::new("b", DataType::Int32, false),
Field::new("c", DataType::Int32, false),
]));
let batch = RecordBatch::try_new(
schema,
vec![
Arc::new(Int32Array::from_slice(&[1, 2, 3])),
Arc::new(Int32Array::from_slice(&[4, 5, 6])),
Arc::new(Int32Array::from_slice(&[7, 8, 9])),
],
)
.unwrap();
let batches = vec![batch];
let r = print_batches_with_sep(&batches, b',').unwrap();
assert_eq!("a,b,c\n1,4,7\n2,5,8\n3,6,9\n", r);
}
#[test]
fn test_print_batches_to_json_empty() -> Result<()> {
let batches = vec![];
let r = batches_to_json!(ArrayWriter, &batches);
assert_eq!("", r);
let r = batches_to_json!(LineDelimitedWriter, &batches);
assert_eq!("", r);
let schema = Arc::new(Schema::new(vec![
Field::new("a", DataType::Int32, false),
Field::new("b", DataType::Int32, false),
Field::new("c", DataType::Int32, false),
]));
let batch = RecordBatch::try_new(
schema,
vec![
Arc::new(Int32Array::from_slice(&[1, 2, 3])),
Arc::new(Int32Array::from_slice(&[4, 5, 6])),
Arc::new(Int32Array::from_slice(&[7, 8, 9])),
],
)
.unwrap();
let batches = vec![batch];
let r = batches_to_json!(ArrayWriter, &batches);
assert_eq!("[{\"a\":1,\"b\":4,\"c\":7},{\"a\":2,\"b\":5,\"c\":8},{\"a\":3,\"b\":6,\"c\":9}]", r);
let r = batches_to_json!(LineDelimitedWriter, &batches);
assert_eq!("{\"a\":1,\"b\":4,\"c\":7}\n{\"a\":2,\"b\":5,\"c\":8}\n{\"a\":3,\"b\":6,\"c\":9}\n", r);
Ok(())
}
}
| 32.270642 | 107 | 0.560057 |
0e11f638833f5d358a597dc98d6d8b5c9e63090e | 24,651 | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::autorepeater,
crate::input_device::{self, Handled, InputDeviceBinding},
anyhow::{format_err, Error, Result},
async_trait::async_trait,
fidl_fuchsia_input,
fidl_fuchsia_input_report::{InputDeviceProxy, InputReport},
fidl_fuchsia_ui_input3 as fidl_ui_input3,
fidl_fuchsia_ui_input3::KeyEventType,
fuchsia_async as fasync,
fuchsia_syslog::fx_log_err,
fuchsia_zircon as zx,
futures::{channel::mpsc::Sender, SinkExt},
};
/// A [`KeyboardEvent`] represents an input event from a keyboard device.
///
/// The keyboard event contains information about a key event. A key event represents a change in
/// the key state. Clients can expect the following sequence of events for a given key:
///
/// 1. [`KeyEventType::Pressed`]: the key has transitioned to being pressed.
/// 2. [`KeyEventType::Released`]: the key has transitioned to being released.
///
/// No duplicate [`KeyEventType::Pressed`] events will be sent for keys, even if the
/// key is present in a subsequent [`InputReport`]. Clients can assume that
/// a key is pressed for all received input events until the key is present in
/// the [`KeyEventType::Released`] entry of [`keys`].
///
/// Use `new` to create. Use `get_*` methods to read fields. Use `into_with_*`
/// methods to add optional information.
#[derive(Clone, Debug, PartialEq)]
pub struct KeyboardEvent {
/// The key that changed state in this [KeyboardEvent].
key: fidl_fuchsia_input::Key,
/// A description of what happened to `key`.
event_type: KeyEventType,
/// The [`fidl_ui_input3::Modifiers`] associated with the pressed keys.
modifiers: Option<fidl_ui_input3::Modifiers>,
/// The [`fidl_ui_input3::LockState`] currently computed.
lock_state: Option<fidl_ui_input3::LockState>,
/// If set, contains the unique identifier of the keymap to be used when or
/// if remapping the keypresses.
keymap: Option<String>,
/// If set, denotes the meaning of `key` in terms of the key effect.
/// A `KeyboardEvent` starts off with `key_meaning` unset, and the key
/// meaning is added in the input pipeline by the appropriate
/// keymap-aware input handlers.
key_meaning: Option<fidl_fuchsia_ui_input3::KeyMeaning>,
/// If this keyboard event has been generated as a result of a repeated
/// generation of the same key, then this will be a nonzero. A nonzero
/// value N here means that this is Nth generated autorepeat for this
/// keyboard event. The counter is reset for each new autorepeat key
/// span.
repeat_sequence: u32,
/// The currently active autorepeater settings.
autorepeat_settings: Option<autorepeater::Settings>,
}
impl KeyboardEvent {
/// Creates a new KeyboardEvent, with required fields filled out. Use the
/// `into_with_*` methods to add optional information.
pub fn new(key: fidl_fuchsia_input::Key, event_type: KeyEventType) -> Self {
KeyboardEvent {
key,
event_type,
modifiers: None,
lock_state: None,
keymap: None,
key_meaning: None,
repeat_sequence: 0,
autorepeat_settings: Default::default(),
}
}
/// Converts [KeyboardEvent] into the same one, but with the specified settings.
pub fn into_with_autorepeat_settings(
self,
autorepeat_settings: Option<autorepeater::Settings>,
) -> Self {
Self { autorepeat_settings, ..self }
}
pub fn get_autorepeat_settings(&self) -> autorepeater::Settings {
self.autorepeat_settings.unwrap_or(Default::default())
}
pub fn get_key(&self) -> fidl_fuchsia_input::Key {
self.key
}
pub fn get_event_type(&self) -> KeyEventType {
self.event_type
}
/// Converts [KeyboardEvent] into the same one, but with specified modifiers.
pub fn into_with_modifiers(self, modifiers: Option<fidl_ui_input3::Modifiers>) -> Self {
Self { modifiers, ..self }
}
/// Returns the currently applicable modifiers.
pub fn get_modifiers(&self) -> Option<fidl_ui_input3::Modifiers> {
self.modifiers
}
/// Converts [KeyboardEvent] into the same one, but with the specified lock state.
pub fn into_with_lock_state(self, lock_state: Option<fidl_ui_input3::LockState>) -> Self {
Self { lock_state, ..self }
}
/// Returns the currently applicable lock state.
pub fn get_lock_state(&self) -> Option<fidl_ui_input3::LockState> {
self.lock_state
}
/// Converts [KeyboardEvent] into the same one, but with the specified keymap
/// applied.
pub fn into_with_keymap(self, keymap: Option<String>) -> Self {
Self { keymap, ..self }
}
/// Returns the currently applied keymap.
pub fn get_keymap(&self) -> Option<String> {
self.keymap.clone()
}
/// Converts [KeyboardEvent] into the same one, but with the key meaning applied.
pub fn into_with_key_meaning(
self,
key_meaning: Option<fidl_fuchsia_ui_input3::KeyMeaning>,
) -> Self {
Self { key_meaning, ..self }
}
/// Returns the currently valid key meaning.
pub fn get_key_meaning(&self) -> Option<fidl_fuchsia_ui_input3::KeyMeaning> {
self.key_meaning
}
/// Returns the repeat sequence number. If a nonzero number N is returned,
/// that means this [KeyboardEvent] is the N-th generated autorepeat event.
/// A zero means this is an event that came from the keyboard driver.
pub fn get_repeat_sequence(&self) -> u32 {
self.repeat_sequence
}
/// Converts [KeyboardEvent] into the same one, but with the repeat sequence
/// changed.
pub fn into_with_repeat_sequence(self, repeat_sequence: u32) -> Self {
Self { repeat_sequence, ..self }
}
}
impl KeyboardEvent {
/// Returns true if the two keyboard events are about the same key.
pub fn same_key(this: &KeyboardEvent, that: &KeyboardEvent) -> bool {
this.get_key() == that.get_key()
}
}
/// A [`KeyboardDeviceDescriptor`] contains information about a specific keyboard device.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct KeyboardDeviceDescriptor {
/// All the [`fidl_fuchsia_input::Key`]s available on the keyboard device.
pub keys: Vec<fidl_fuchsia_input::Key>,
}
/// A [`KeyboardBinding`] represents a connection to a keyboard input device.
///
/// The [`KeyboardBinding`] parses and exposes keyboard device descriptor properties (e.g., the
/// available keyboard keys) for the device it is associated with. It also parses [`InputReport`]s
/// from the device, and sends them to the device binding owner over `event_sender`.
pub struct KeyboardBinding {
/// The channel to stream InputEvents to.
event_sender: Sender<input_device::InputEvent>,
/// Holds information about this device.
device_descriptor: KeyboardDeviceDescriptor,
}
#[async_trait]
impl input_device::InputDeviceBinding for KeyboardBinding {
fn input_event_sender(&self) -> Sender<input_device::InputEvent> {
self.event_sender.clone()
}
fn get_device_descriptor(&self) -> input_device::InputDeviceDescriptor {
input_device::InputDeviceDescriptor::Keyboard(self.device_descriptor.clone())
}
}
impl KeyboardBinding {
/// Creates a new [`InputDeviceBinding`] from the `device_proxy`.
///
/// The binding will start listening for input reports immediately and send new InputEvents
/// to the device binding owner over `input_event_sender`.
///
/// # Parameters
/// - `device_proxy`: The proxy to bind the new [`InputDeviceBinding`] to.
/// - `input_event_sender`: The channel to send new InputEvents to.
///
/// # Errors
/// If there was an error binding to the proxy.
pub async fn new(
device_proxy: InputDeviceProxy,
input_event_sender: Sender<input_device::InputEvent>,
) -> Result<Self, Error> {
let device_binding = Self::bind_device(&device_proxy, input_event_sender).await?;
input_device::initialize_report_stream(
device_proxy,
device_binding.get_device_descriptor(),
device_binding.input_event_sender(),
Self::process_reports,
);
Ok(device_binding)
}
/// Converts a vector of keyboard keys to the appropriate [`fidl_ui_input3::Modifiers`] bitflags.
///
/// For example, if `keys` contains `Key::CapsLock`, the bitflags will contain the corresponding
/// flags for `CapsLock`.
///
/// # Parameters
/// - `keys`: The keys to check for modifiers.
///
/// # Returns
/// Returns `None` if there are no modifier keys present.
pub fn to_modifiers(keys: &[&fidl_fuchsia_input::Key]) -> Option<fidl_ui_input3::Modifiers> {
let mut modifiers = fidl_ui_input3::Modifiers::empty();
for key in keys {
let modifier = match key {
fidl_fuchsia_input::Key::CapsLock => Some(fidl_ui_input3::Modifiers::CAPS_LOCK),
fidl_fuchsia_input::Key::NumLock => Some(fidl_ui_input3::Modifiers::NUM_LOCK),
fidl_fuchsia_input::Key::ScrollLock => Some(fidl_ui_input3::Modifiers::SCROLL_LOCK),
_ => None,
};
if let Some(modifier) = modifier {
modifiers.insert(modifier);
};
}
if modifiers.is_empty() {
return None;
}
Some(modifiers)
}
/// Binds the provided input device to a new instance of `Self`.
///
/// # Parameters
/// - `device`: The device to use to initialize the binding.
/// - `input_event_sender`: The channel to send new InputEvents to.
///
/// # Errors
/// If the device descriptor could not be retrieved, or the descriptor could not be parsed
/// correctly.
async fn bind_device(
device: &InputDeviceProxy,
input_event_sender: Sender<input_device::InputEvent>,
) -> Result<Self, Error> {
match device.get_descriptor().await?.keyboard {
Some(fidl_fuchsia_input_report::KeyboardDescriptor {
input: Some(fidl_fuchsia_input_report::KeyboardInputDescriptor { keys3, .. }),
output: _,
..
}) => Ok(KeyboardBinding {
event_sender: input_event_sender,
device_descriptor: KeyboardDeviceDescriptor { keys: keys3.unwrap_or_default() },
}),
device_descriptor => Err(format_err!(
"Keyboard Device Descriptor failed to parse: \n {:?}",
device_descriptor
)),
}
}
/// Parses an [`InputReport`] into one or more [`InputEvent`]s.
///
/// The [`InputEvent`]s are sent to the device binding owner via [`input_event_sender`].
///
/// # Parameters
/// `report`: The incoming [`InputReport`].
/// `previous_report`: The previous [`InputReport`] seen for the same device. This can be
/// used to determine, for example, which keys are no longer present in
/// a keyboard report to generate key released events. If `None`, no
/// previous report was found.
/// `device_descriptor`: The descriptor for the input device generating the input reports.
/// `input_event_sender`: The sender for the device binding's input event stream.
///
/// # Returns
/// An [`InputReport`] which will be passed to the next call to [`process_reports`], as
/// [`previous_report`]. If `None`, the next call's [`previous_report`] will be `None`.
fn process_reports(
report: InputReport,
previous_report: Option<InputReport>,
device_descriptor: &input_device::InputDeviceDescriptor,
input_event_sender: &mut Sender<input_device::InputEvent>,
) -> Option<InputReport> {
// Input devices can have multiple types so ensure `report` is a KeyboardInputReport.
match &report.keyboard {
None => return previous_report,
_ => (),
};
let new_keys = match KeyboardBinding::parse_pressed_keys(&report) {
Some(keys) => keys,
None => {
// It's OK for the report to contain an empty vector of keys, but it's not OK for
// the report to not have the appropriate fields set.
//
// In this case the report is treated as malformed, and the previous report is not
// updated.
fx_log_err!("Failed to parse keyboard keys: {:?}", report);
return previous_report;
}
};
let previous_keys: Vec<fidl_fuchsia_input::Key> = previous_report
.as_ref()
.and_then(|unwrapped_report| KeyboardBinding::parse_pressed_keys(&unwrapped_report))
.unwrap_or_default();
let event_time: zx::Time = input_device::event_time_or_now(report.event_time);
KeyboardBinding::send_key_events(
&new_keys,
&previous_keys,
device_descriptor.clone(),
event_time,
input_event_sender.clone(),
);
Some(report)
}
/// Parses the currently pressed [`fidl_fuchsia_input3::Key`]s from an input report.
///
/// # Parameters
/// - `input_report`: The input report to parse the keyboard keys from.
///
/// # Returns
/// Returns `None` if any of the required input report fields are `None`. If all the
/// required report fields are present, but there are no pressed keys, an empty vector
/// is returned.
fn parse_pressed_keys(input_report: &InputReport) -> Option<Vec<fidl_fuchsia_input::Key>> {
input_report
.keyboard
.as_ref()
.and_then(|unwrapped_keyboard| unwrapped_keyboard.pressed_keys3.as_ref())
.and_then(|unwrapped_keys| Some(unwrapped_keys.iter().cloned().collect()))
}
/// Sends key events to clients based on the new and previously pressed keys.
///
/// # Parameters
/// - `new_keys`: The input3 keys which are currently pressed, as reported by the bound device.
/// - `previous_keys`: The input3 keys which were pressed in the previous input report.
/// - `device_descriptor`: The descriptor for the input device generating the input reports.
/// - `event_time`: The time in nanoseconds when the event was first recorded.
/// - `input_event_sender`: The sender for the device binding's input event stream.
fn send_key_events(
new_keys: &Vec<fidl_fuchsia_input::Key>,
previous_keys: &Vec<fidl_fuchsia_input::Key>,
device_descriptor: input_device::InputDeviceDescriptor,
event_time: zx::Time,
input_event_sender: Sender<input_device::InputEvent>,
) {
// Dispatches all key events individually in a separate task. This is done in a separate
// function so that the lifetime of `new_keys` above could be detached from that of the
// spawned task.
fn dispatch_events(
key_events: Vec<(fidl_fuchsia_input::Key, fidl_fuchsia_ui_input3::KeyEventType)>,
device_descriptor: input_device::InputDeviceDescriptor,
event_time: zx::Time,
mut input_event_sender: Sender<input_device::InputEvent>,
) {
fasync::Task::spawn(async move {
let mut event_time = event_time;
for (key, event_type) in key_events.into_iter() {
match input_event_sender
.send(input_device::InputEvent {
device_event: input_device::InputDeviceEvent::Keyboard(
KeyboardEvent::new(key, event_type),
),
device_descriptor: device_descriptor.clone(),
event_time,
handled: Handled::No,
})
.await
{
Err(error) => {
fx_log_err!(
"Failed to send KeyboardEvent for key: {:?}, event_type: {:?}: {:?}",
&key,
&event_type,
error
);
}
_ => (),
}
// If key events happen to have been reported at the same time,
// we pull them apart artificially. A 1ns increment will likely
// be enough of a difference that it is recognizable but that it
// does not introduce confusion.
event_time = event_time + zx::Duration::from_nanos(1);
}
})
.detach();
}
// Filter out the keys which were present in the previous keyboard report to avoid sending
// multiple `KeyEventType::Pressed` events for a key.
let pressed_keys = new_keys
.iter()
.cloned()
.filter(|key| !previous_keys.contains(key))
.map(|k| (k, fidl_fuchsia_ui_input3::KeyEventType::Pressed));
// Any key which is not present in the new keys, but was present in the previous report
// is considered to be released.
let released_keys = previous_keys
.iter()
.cloned()
.filter(|key| !new_keys.contains(key))
.map(|k| (k, fidl_fuchsia_ui_input3::KeyEventType::Released));
// It is important that key releases are dispatched before key presses,
// so that modifier tracking would work correctly. We collect the result
// into a vector since an iterator is not Send and can not be moved into
// a closure.
let all_keys = released_keys.chain(pressed_keys).collect::<Vec<_>>();
dispatch_events(all_keys, device_descriptor, event_time, input_event_sender);
}
}
#[cfg(test)]
mod tests {
use {
super::*, crate::testing_utilities, fuchsia_async as fasync, fuchsia_zircon as zx,
futures::StreamExt,
};
/// Tests that a key that is present in the new report, but was not present in the previous report
/// is propagated as pressed.
#[fasync::run_singlethreaded(test)]
async fn pressed_key() {
let descriptor = input_device::InputDeviceDescriptor::Keyboard(KeyboardDeviceDescriptor {
keys: vec![fidl_fuchsia_input::Key::A],
});
let (event_time_i64, event_time_u64) = testing_utilities::event_times();
let reports = vec![testing_utilities::create_keyboard_input_report(
vec![fidl_fuchsia_input::Key::A],
event_time_i64,
)];
let expected_events = vec![testing_utilities::create_keyboard_event(
fidl_fuchsia_input::Key::A,
fidl_fuchsia_ui_input3::KeyEventType::Pressed,
None,
event_time_u64,
&descriptor,
/* keymap= */ None,
)];
assert_input_report_sequence_generates_events!(
input_reports: reports,
expected_events: expected_events,
device_descriptor: descriptor,
device_type: KeyboardBinding,
);
}
/// Tests that a key that is not present in the new report, but was present in the previous report
/// is propagated as released.
#[fasync::run_singlethreaded(test)]
async fn released_key() {
let descriptor = input_device::InputDeviceDescriptor::Keyboard(KeyboardDeviceDescriptor {
keys: vec![fidl_fuchsia_input::Key::A],
});
let (event_time_i64, event_time_u64) = testing_utilities::event_times();
let reports = vec![
testing_utilities::create_keyboard_input_report(
vec![fidl_fuchsia_input::Key::A],
event_time_i64,
),
testing_utilities::create_keyboard_input_report(vec![], event_time_i64),
];
let expected_events = vec![
testing_utilities::create_keyboard_event(
fidl_fuchsia_input::Key::A,
fidl_fuchsia_ui_input3::KeyEventType::Pressed,
None,
event_time_u64,
&descriptor,
/* keymap= */ None,
),
testing_utilities::create_keyboard_event(
fidl_fuchsia_input::Key::A,
fidl_fuchsia_ui_input3::KeyEventType::Released,
None,
event_time_u64,
&descriptor,
/* keymap= */ None,
),
];
assert_input_report_sequence_generates_events!(
input_reports: reports,
expected_events: expected_events,
device_descriptor: descriptor.clone(),
device_type: KeyboardBinding,
);
}
/// Tests that a key that is present in multiple consecutive input reports is not propagated
/// as a pressed event more than once.
#[fasync::run_singlethreaded(test)]
async fn multiple_pressed_event_filtering() {
let descriptor = input_device::InputDeviceDescriptor::Keyboard(KeyboardDeviceDescriptor {
keys: vec![fidl_fuchsia_input::Key::A],
});
let (event_time_i64, event_time_u64) = testing_utilities::event_times();
let reports = vec![
testing_utilities::create_keyboard_input_report(
vec![fidl_fuchsia_input::Key::A],
event_time_i64,
),
testing_utilities::create_keyboard_input_report(
vec![fidl_fuchsia_input::Key::A],
event_time_i64,
),
];
let expected_events = vec![testing_utilities::create_keyboard_event(
fidl_fuchsia_input::Key::A,
fidl_fuchsia_ui_input3::KeyEventType::Pressed,
None,
event_time_u64,
&descriptor,
/* keymap= */ None,
)];
assert_input_report_sequence_generates_events!(
input_reports: reports,
expected_events: expected_events,
device_descriptor: descriptor,
device_type: KeyboardBinding,
);
}
/// Tests that both pressed and released keys are sent at once.
#[fasync::run_singlethreaded(test)]
async fn pressed_and_released_keys() {
let descriptor = input_device::InputDeviceDescriptor::Keyboard(KeyboardDeviceDescriptor {
keys: vec![fidl_fuchsia_input::Key::A, fidl_fuchsia_input::Key::B],
});
let (event_time_i64, event_time) = testing_utilities::event_times();
let reports = vec![
testing_utilities::create_keyboard_input_report(
vec![fidl_fuchsia_input::Key::A],
event_time_i64,
),
testing_utilities::create_keyboard_input_report(
vec![fidl_fuchsia_input::Key::B],
event_time_i64,
),
];
let expected_events = vec![
testing_utilities::create_keyboard_event(
fidl_fuchsia_input::Key::A,
fidl_fuchsia_ui_input3::KeyEventType::Pressed,
None,
event_time,
&descriptor,
/* keymap= */ None,
),
testing_utilities::create_keyboard_event(
fidl_fuchsia_input::Key::A,
fidl_fuchsia_ui_input3::KeyEventType::Released,
None,
event_time,
&descriptor,
/* keymap= */ None,
),
testing_utilities::create_keyboard_event(
fidl_fuchsia_input::Key::B,
fidl_fuchsia_ui_input3::KeyEventType::Pressed,
None,
// Simultaneous key events are artificially separated by 1ns
// on purpose.
event_time + zx::Duration::from_nanos(1),
&descriptor,
/* keymap= */ None,
),
];
assert_input_report_sequence_generates_events!(
input_reports: reports,
expected_events: expected_events,
device_descriptor: descriptor,
device_type: KeyboardBinding,
);
}
}
| 39.695652 | 102 | 0.614093 |
dbb88dc8db1a21b219517440f7fcd62a913cabd3 | 14,038 | // Copyright 2018-2021 the Deno authors. All rights reserved. MIT license.
use deno_core::error::invalid_hostname;
use deno_core::error::null_opbuf;
use deno_core::error::AnyError;
use deno_core::futures::stream::SplitSink;
use deno_core::futures::stream::SplitStream;
use deno_core::futures::SinkExt;
use deno_core::futures::StreamExt;
use deno_core::include_js_files;
use deno_core::op_async;
use deno_core::op_sync;
use deno_core::url;
use deno_core::AsyncRefCell;
use deno_core::CancelFuture;
use deno_core::CancelHandle;
use deno_core::Extension;
use deno_core::OpState;
use deno_core::RcRef;
use deno_core::Resource;
use deno_core::ResourceId;
use deno_core::ZeroCopyBuf;
use deno_tls::create_client_config;
use deno_tls::webpki::DNSNameRef;
use http::{Method, Request, Uri};
use serde::Deserialize;
use serde::Serialize;
use std::borrow::Cow;
use std::cell::RefCell;
use std::fmt;
use std::path::PathBuf;
use std::rc::Rc;
use std::sync::Arc;
use tokio::net::TcpStream;
use tokio_rustls::rustls::RootCertStore;
use tokio_rustls::TlsConnector;
use tokio_tungstenite::tungstenite::{
handshake::client::Response, protocol::frame::coding::CloseCode,
protocol::CloseFrame, Message,
};
use tokio_tungstenite::MaybeTlsStream;
use tokio_tungstenite::{client_async, WebSocketStream};
pub use tokio_tungstenite; // Re-export tokio_tungstenite
#[derive(Clone)]
pub struct WsRootStore(pub Option<RootCertStore>);
#[derive(Clone)]
pub struct WsUserAgent(pub String);
pub trait WebSocketPermissions {
fn check_net_url(&mut self, _url: &url::Url) -> Result<(), AnyError>;
}
/// `UnsafelyIgnoreCertificateErrors` is a wrapper struct so it can be placed inside `GothamState`;
/// using type alias for a `Option<Vec<String>>` could work, but there's a high chance
/// that there might be another type alias pointing to a `Option<Vec<String>>`, which
/// would override previously used alias.
pub struct UnsafelyIgnoreCertificateErrors(Option<Vec<String>>);
/// For use with `op_websocket_*` when the user does not want permissions.
pub struct NoWebSocketPermissions;
impl WebSocketPermissions for NoWebSocketPermissions {
fn check_net_url(&mut self, _url: &url::Url) -> Result<(), AnyError> {
Ok(())
}
}
type WsStream = WebSocketStream<MaybeTlsStream<TcpStream>>;
pub enum WebSocketStreamType {
Client {
tx: AsyncRefCell<SplitSink<WsStream, Message>>,
rx: AsyncRefCell<SplitStream<WsStream>>,
},
Server {
tx: AsyncRefCell<
SplitSink<WebSocketStream<hyper::upgrade::Upgraded>, Message>,
>,
rx: AsyncRefCell<SplitStream<WebSocketStream<hyper::upgrade::Upgraded>>>,
},
}
pub struct WsStreamResource {
pub stream: WebSocketStreamType,
// When a `WsStreamResource` resource is closed, all pending 'read' ops are
// canceled, while 'write' ops are allowed to complete. Therefore only
// 'read' futures are attached to this cancel handle.
pub cancel: CancelHandle,
}
impl WsStreamResource {
async fn send(self: &Rc<Self>, message: Message) -> Result<(), AnyError> {
match self.stream {
WebSocketStreamType::Client { .. } => {
let mut tx = RcRef::map(self, |r| match &r.stream {
WebSocketStreamType::Client { tx, .. } => tx,
WebSocketStreamType::Server { .. } => unreachable!(),
})
.borrow_mut()
.await;
tx.send(message).await?;
}
WebSocketStreamType::Server { .. } => {
let mut tx = RcRef::map(self, |r| match &r.stream {
WebSocketStreamType::Client { .. } => unreachable!(),
WebSocketStreamType::Server { tx, .. } => tx,
})
.borrow_mut()
.await;
tx.send(message).await?;
}
}
Ok(())
}
async fn next_message(
self: &Rc<Self>,
cancel: RcRef<CancelHandle>,
) -> Result<
Option<Result<Message, tokio_tungstenite::tungstenite::Error>>,
AnyError,
> {
match &self.stream {
WebSocketStreamType::Client { .. } => {
let mut rx = RcRef::map(self, |r| match &r.stream {
WebSocketStreamType::Client { rx, .. } => rx,
WebSocketStreamType::Server { .. } => unreachable!(),
})
.borrow_mut()
.await;
rx.next().or_cancel(cancel).await.map_err(AnyError::from)
}
WebSocketStreamType::Server { .. } => {
let mut rx = RcRef::map(self, |r| match &r.stream {
WebSocketStreamType::Client { .. } => unreachable!(),
WebSocketStreamType::Server { rx, .. } => rx,
})
.borrow_mut()
.await;
rx.next().or_cancel(cancel).await.map_err(AnyError::from)
}
}
}
}
impl Resource for WsStreamResource {
fn name(&self) -> Cow<str> {
"webSocketStream".into()
}
}
pub struct WsCancelResource(Rc<CancelHandle>);
impl Resource for WsCancelResource {
fn name(&self) -> Cow<str> {
"webSocketCancel".into()
}
fn close(self: Rc<Self>) {
self.0.cancel()
}
}
// This op is needed because creating a WS instance in JavaScript is a sync
// operation and should throw error when permissions are not fulfilled,
// but actual op that connects WS is async.
pub fn op_ws_check_permission_and_cancel_handle<WP>(
state: &mut OpState,
url: String,
cancel_handle: bool,
) -> Result<Option<ResourceId>, AnyError>
where
WP: WebSocketPermissions + 'static,
{
state
.borrow_mut::<WP>()
.check_net_url(&url::Url::parse(&url)?)?;
if cancel_handle {
let rid = state
.resource_table
.add(WsCancelResource(CancelHandle::new_rc()));
Ok(Some(rid))
} else {
Ok(None)
}
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CreateArgs {
url: String,
protocols: String,
cancel_handle: Option<ResourceId>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct CreateResponse {
rid: ResourceId,
protocol: String,
extensions: String,
}
pub async fn op_ws_create<WP>(
state: Rc<RefCell<OpState>>,
args: CreateArgs,
_: (),
) -> Result<CreateResponse, AnyError>
where
WP: WebSocketPermissions + 'static,
{
{
let mut s = state.borrow_mut();
s.borrow_mut::<WP>()
.check_net_url(&url::Url::parse(&args.url)?)
.expect(
"Permission check should have been done in op_ws_check_permission",
);
}
let unsafely_ignore_certificate_errors = state
.borrow()
.try_borrow::<UnsafelyIgnoreCertificateErrors>()
.and_then(|it| it.0.clone());
let root_cert_store = state.borrow().borrow::<WsRootStore>().0.clone();
let user_agent = state.borrow().borrow::<WsUserAgent>().0.clone();
let uri: Uri = args.url.parse()?;
let mut request = Request::builder().method(Method::GET).uri(&uri);
request = request.header("User-Agent", user_agent);
if !args.protocols.is_empty() {
request = request.header("Sec-WebSocket-Protocol", args.protocols);
}
let request = request.body(())?;
let domain = &uri.host().unwrap().to_string();
let port = &uri.port_u16().unwrap_or(match uri.scheme_str() {
Some("wss") => 443,
Some("ws") => 80,
_ => unreachable!(),
});
let addr = format!("{}:{}", domain, port);
let tcp_socket = TcpStream::connect(addr).await?;
let socket: MaybeTlsStream<TcpStream> = match uri.scheme_str() {
Some("ws") => MaybeTlsStream::Plain(tcp_socket),
Some("wss") => {
let tls_config = create_client_config(
root_cert_store,
vec![],
unsafely_ignore_certificate_errors,
)?;
let tls_connector = TlsConnector::from(Arc::new(tls_config));
let dnsname = DNSNameRef::try_from_ascii_str(domain)
.map_err(|_| invalid_hostname(domain))?;
let tls_socket = tls_connector.connect(dnsname, tcp_socket).await?;
MaybeTlsStream::Rustls(tls_socket)
}
_ => unreachable!(),
};
let client = client_async(request, socket);
let (stream, response): (WsStream, Response) =
if let Some(cancel_rid) = args.cancel_handle {
let r = state
.borrow_mut()
.resource_table
.get::<WsCancelResource>(cancel_rid)?;
client
.or_cancel(r.0.to_owned())
.await
.map_err(|_| DomExceptionAbortError::new("connection was aborted"))?
} else {
client.await
}
.map_err(|err| {
DomExceptionNetworkError::new(&format!(
"failed to connect to WebSocket: {}",
err.to_string()
))
})?;
if let Some(cancel_rid) = args.cancel_handle {
state.borrow_mut().resource_table.close(cancel_rid).ok();
}
let (ws_tx, ws_rx) = stream.split();
let resource = WsStreamResource {
stream: WebSocketStreamType::Client {
rx: AsyncRefCell::new(ws_rx),
tx: AsyncRefCell::new(ws_tx),
},
cancel: Default::default(),
};
let mut state = state.borrow_mut();
let rid = state.resource_table.add(resource);
let protocol = match response.headers().get("Sec-WebSocket-Protocol") {
Some(header) => header.to_str().unwrap(),
None => "",
};
let extensions = response
.headers()
.get_all("Sec-WebSocket-Extensions")
.iter()
.map(|header| header.to_str().unwrap())
.collect::<String>();
Ok(CreateResponse {
rid,
protocol: protocol.to_string(),
extensions,
})
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SendArgs {
rid: ResourceId,
kind: String,
text: Option<String>,
}
pub async fn op_ws_send(
state: Rc<RefCell<OpState>>,
args: SendArgs,
buf: Option<ZeroCopyBuf>,
) -> Result<(), AnyError> {
let msg = match args.kind.as_str() {
"text" => Message::Text(args.text.unwrap()),
"binary" => Message::Binary(buf.ok_or_else(null_opbuf)?.to_vec()),
"pong" => Message::Pong(vec![]),
_ => unreachable!(),
};
let resource = state
.borrow_mut()
.resource_table
.get::<WsStreamResource>(args.rid)?;
resource.send(msg).await?;
Ok(())
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CloseArgs {
rid: ResourceId,
code: Option<u16>,
reason: Option<String>,
}
pub async fn op_ws_close(
state: Rc<RefCell<OpState>>,
args: CloseArgs,
_: (),
) -> Result<(), AnyError> {
let rid = args.rid;
let msg = Message::Close(args.code.map(|c| CloseFrame {
code: CloseCode::from(c),
reason: match args.reason {
Some(reason) => Cow::from(reason),
None => Default::default(),
},
}));
let resource = state
.borrow_mut()
.resource_table
.get::<WsStreamResource>(rid)?;
resource.send(msg).await?;
Ok(())
}
#[derive(Serialize)]
#[serde(tag = "kind", content = "value", rename_all = "camelCase")]
pub enum NextEventResponse {
String(String),
Binary(ZeroCopyBuf),
Close { code: u16, reason: String },
Ping,
Pong,
Error(String),
Closed,
}
pub async fn op_ws_next_event(
state: Rc<RefCell<OpState>>,
rid: ResourceId,
_: (),
) -> Result<NextEventResponse, AnyError> {
let resource = state
.borrow_mut()
.resource_table
.get::<WsStreamResource>(rid)?;
let cancel = RcRef::map(&resource, |r| &r.cancel);
let val = resource.next_message(cancel).await?;
let res = match val {
Some(Ok(Message::Text(text))) => NextEventResponse::String(text),
Some(Ok(Message::Binary(data))) => NextEventResponse::Binary(data.into()),
Some(Ok(Message::Close(Some(frame)))) => NextEventResponse::Close {
code: frame.code.into(),
reason: frame.reason.to_string(),
},
Some(Ok(Message::Close(None))) => NextEventResponse::Close {
code: 1005,
reason: String::new(),
},
Some(Ok(Message::Ping(_))) => NextEventResponse::Ping,
Some(Ok(Message::Pong(_))) => NextEventResponse::Pong,
Some(Err(e)) => NextEventResponse::Error(e.to_string()),
None => {
state.borrow_mut().resource_table.close(rid).unwrap();
NextEventResponse::Closed
}
};
Ok(res)
}
pub fn init<P: WebSocketPermissions + 'static>(
user_agent: String,
root_cert_store: Option<RootCertStore>,
unsafely_ignore_certificate_errors: Option<Vec<String>>,
) -> Extension {
Extension::builder()
.js(include_js_files!(
prefix "deno:ext/websocket",
"01_websocket.js",
"02_websocketstream.js",
))
.ops(vec![
(
"op_ws_check_permission_and_cancel_handle",
op_sync(op_ws_check_permission_and_cancel_handle::<P>),
),
("op_ws_create", op_async(op_ws_create::<P>)),
("op_ws_send", op_async(op_ws_send)),
("op_ws_close", op_async(op_ws_close)),
("op_ws_next_event", op_async(op_ws_next_event)),
])
.state(move |state| {
state.put::<WsUserAgent>(WsUserAgent(user_agent.clone()));
state.put(UnsafelyIgnoreCertificateErrors(
unsafely_ignore_certificate_errors.clone(),
));
state.put::<WsRootStore>(WsRootStore(root_cert_store.clone()));
Ok(())
})
.build()
}
pub fn get_declaration() -> PathBuf {
PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("lib.deno_websocket.d.ts")
}
#[derive(Debug)]
pub struct DomExceptionNetworkError {
pub msg: String,
}
impl DomExceptionNetworkError {
pub fn new(msg: &str) -> Self {
DomExceptionNetworkError {
msg: msg.to_string(),
}
}
}
impl fmt::Display for DomExceptionNetworkError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad(&self.msg)
}
}
impl std::error::Error for DomExceptionNetworkError {}
pub fn get_network_error_class_name(e: &AnyError) -> Option<&'static str> {
e.downcast_ref::<DomExceptionNetworkError>()
.map(|_| "DOMExceptionNetworkError")
}
#[derive(Debug)]
pub struct DomExceptionAbortError {
pub msg: String,
}
impl DomExceptionAbortError {
pub fn new(msg: &str) -> Self {
DomExceptionAbortError {
msg: msg.to_string(),
}
}
}
impl fmt::Display for DomExceptionAbortError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad(&self.msg)
}
}
impl std::error::Error for DomExceptionAbortError {}
pub fn get_abort_error_class_name(e: &AnyError) -> Option<&'static str> {
e.downcast_ref::<DomExceptionAbortError>()
.map(|_| "DOMExceptionAbortError")
}
| 27.471624 | 99 | 0.656361 |
3899a2057c479b43a54d81924d3c2bd0d9cf788f | 73 | /// ## pow consensus algo
/// - 以太坊的 POW 共识算法
#[cfg(test)]
mod tests {}
| 12.166667 | 25 | 0.575342 |
1de6af06ad2ce66163b949726c5692f80e7a0956 | 4,104 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use datafusion::datasource::file_format::parquet::ParquetFormat;
use datafusion::datasource::file_format::FileFormat;
use datafusion::datasource::object_store::ObjectStoreUrl;
use datafusion::error::Result;
use datafusion::physical_plan::file_format::FileScanConfig;
use datafusion::physical_plan::{collect, ExecutionPlan};
use datafusion::prelude::SessionContext;
use datafusion_data_access::object_store::local::{
local_unpartitioned_file, LocalFileSystem,
};
use datafusion_row::layout::RowType::{Compact, WordAligned};
use datafusion_row::reader::read_as_batch;
use datafusion_row::writer::write_batch_unchecked;
use std::sync::Arc;
#[tokio::test]
async fn test_with_parquet() -> Result<()> {
let session_ctx = SessionContext::new();
let task_ctx = session_ctx.task_ctx();
let projection = Some(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
let exec = get_exec("alltypes_plain.parquet", &projection, None).await?;
let schema = exec.schema().clone();
let batches = collect(exec, task_ctx).await?;
assert_eq!(1, batches.len());
let batch = &batches[0];
let mut vector = vec![0; 20480];
let row_offsets =
{ write_batch_unchecked(&mut vector, 0, batch, 0, schema.clone(), Compact) };
let output_batch = { read_as_batch(&vector, schema, &row_offsets, Compact)? };
assert_eq!(*batch, output_batch);
Ok(())
}
#[tokio::test]
async fn test_with_parquet_word_aligned() -> Result<()> {
let session_ctx = SessionContext::new();
let task_ctx = session_ctx.task_ctx();
let projection = Some(vec![0, 1, 2, 3, 4, 5, 6, 7]);
let exec = get_exec("alltypes_plain.parquet", &projection, None).await?;
let schema = exec.schema().clone();
let batches = collect(exec, task_ctx).await?;
assert_eq!(1, batches.len());
let batch = &batches[0];
let mut vector = vec![0; 20480];
let row_offsets =
{ write_batch_unchecked(&mut vector, 0, batch, 0, schema.clone(), WordAligned) };
let output_batch = { read_as_batch(&vector, schema, &row_offsets, WordAligned)? };
assert_eq!(*batch, output_batch);
Ok(())
}
async fn get_exec(
file_name: &str,
projection: &Option<Vec<usize>>,
limit: Option<usize>,
) -> Result<Arc<dyn ExecutionPlan>> {
let testdata = datafusion::test_util::parquet_test_data();
let filename = format!("{}/{}", testdata, file_name);
let meta = local_unpartitioned_file(filename);
let format = ParquetFormat::default();
let object_store = Arc::new(LocalFileSystem {}) as _;
let object_store_url = ObjectStoreUrl::local_filesystem();
let file_schema = format
.infer_schema(&object_store, &[meta.clone()])
.await
.expect("Schema inference");
let statistics = format
.infer_stats(&object_store, file_schema.clone(), &meta)
.await
.expect("Stats inference");
let file_groups = vec![vec![meta.into()]];
let exec = format
.create_physical_plan(
FileScanConfig {
object_store_url,
file_schema,
file_groups,
statistics,
projection: projection.clone(),
limit,
table_partition_cols: vec![],
},
&[],
)
.await?;
Ok(exec)
}
| 36.318584 | 89 | 0.668129 |
dddc823199bf609ea8e9a0dd029e0eaf9c3fdea6 | 575 | //! Exposes a C-ABI for symbolic
extern crate symbolic_common;
extern crate symbolic_demangle;
extern crate symbolic_debuginfo;
extern crate symbolic_symcache;
extern crate symbolic_sourcemap;
extern crate symbolic_proguard;
extern crate symbolic_minidump;
extern crate uuid;
#[macro_use] mod utils;
mod core;
mod common;
mod demangle;
mod debuginfo;
mod symcache;
mod sourcemap;
mod proguard;
mod minidump;
pub use core::*;
pub use common::*;
pub use demangle::*;
pub use debuginfo::*;
pub use symcache::*;
pub use sourcemap::*;
pub use proguard::*;
pub use minidump::*;
| 19.166667 | 32 | 0.770435 |
01865762f6effb800cc8e36106aef0fc1a37aed2 | 2,601 | #![allow(unused)]
struct Point {
x: i32,
y: i32,
}
// This code creates the variables "a" and "b" that match the values of the "x" and
// "y" fields of the "p" struct. This example shows that the names of the variables
// in the pattern don't have to match the field names of the struct.
fn destructure_struct() {
let p = Point { x: 0, y: 7 };
let Point { x: a, y: b } = p;
assert_eq!(0, a);
assert_eq!(7, b);
}
// This code creates the variables "x" and "y" that match the "x" and "y" fields of
// the "p" variable.
fn destructure_struct_v2() {
let p = Point { x: 0, y: 7 };
let Point { x, y } = p;
assert_eq!(0, x);
assert_eq!(7, y);
}
fn match_and_destructure_struct() {
let p = Point { x: 0, y: 7 };
match p {
Point { x, y: 0 } => println!("On the x axis at {}", x),
Point { x: 0, y } => println!("On the y axis at {}", y),
Point { x, y } => println!("On neither axis: ({}, {})", x, y),
}
}
enum Message {
Quit,
Move { x: i32, y: i32 },
Write(String),
ChangeColor(i32, i32, i32),
}
// Destructuring enum variants that hold different kinds of values.
fn destructure_enums() {
let msg = Message::ChangeColor(0, 160, 255);
match msg {
Message::Quit => {
println!("The Quit variant has no data to destructure.")
}
Message::Move { x, y } => {
println!("Move in the x direction {} and in the y direction {}", x, y);
}
Message::Write(text) => println!("Text message: {}", text),
Message::ChangeColor(r, g, b) => println!("Change the color to red {}, green {}, and blue {}", r, g, b),
}
}
enum Color {
Rgb(i32, i32, i32),
Hsv(i32, i32, i32),
}
enum MessageV2 {
Quit,
Move { x: i32, y: i32 },
Write(String),
ChangeColor(Color),
}
fn destructure_nested_structs_and_enums() {
let msg = MessageV2::ChangeColor(Color::Hsv(0, 160, 255));
match msg {
MessageV2::ChangeColor(Color::Rgb(r, g, b)) => {
println!("Change the color to red {}, green {}, and blue {}", r, g, b)
}
MessageV2::ChangeColor(Color::Hsv(h, s, v)) => {
println!("Change the color to hue {}, saturation {}, and value {}", h, s, v)
}
_ => (),
}
}
// Nest structs and tuples inside a tuple and destructure all the primitive values out.
fn destructure_structs_and_tuples() {
struct Point {
x: i32,
y: i32,
}
let ((feet, inches), Point { x, y }) = ((3, 10), Point { x: 3, y: -10 });
}
pub fn run() {
destructure_enums();
}
| 25.752475 | 112 | 0.550173 |
1dc28670233932927ff1b55b359ddce4d55d41d0 | 7,850 | //! Asynchronous sinks
//!
//! This crate contains the `Sink` trait which allows values to be sent
//! asynchronously.
#![no_std]
#![deny(missing_docs, missing_debug_implementations)]
#![doc(html_root_url = "https://docs.rs/futures-sink/0.2.0")]
#[cfg(feature = "std")]
extern crate std;
extern crate futures_core;
#[cfg(feature = "std")]
extern crate futures_channel;
macro_rules! if_std {
($($i:item)*) => ($(
#[cfg(feature = "std")]
$i
)*)
}
use futures_core::{Poll, task};
if_std! {
mod channel_impls;
use futures_core::Async;
use futures_core::never::Never;
impl<T> Sink for ::std::vec::Vec<T> {
type SinkItem = T;
type SinkError = Never;
fn poll_ready(&mut self, _: &mut task::Context) -> Poll<(), Self::SinkError> {
Ok(Async::Ready(()))
}
fn start_send(&mut self, item: Self::SinkItem) -> Result<(), Self::SinkError> {
self.push(item);
Ok(())
}
fn poll_flush(&mut self, _: &mut task::Context) -> Poll<(), Self::SinkError> {
Ok(Async::Ready(()))
}
fn poll_close(&mut self, _: &mut task::Context) -> Poll<(), Self::SinkError> {
Ok(Async::Ready(()))
}
}
impl<T> Sink for ::std::collections::VecDeque<T> {
type SinkItem = T;
type SinkError = Never;
fn poll_ready(&mut self, _: &mut task::Context) -> Poll<(), Self::SinkError> {
Ok(Async::Ready(()))
}
fn start_send(&mut self, item: Self::SinkItem) -> Result<(), Self::SinkError> {
self.push_back(item);
Ok(())
}
fn poll_flush(&mut self, _: &mut task::Context) -> Poll<(), Self::SinkError> {
Ok(Async::Ready(()))
}
fn poll_close(&mut self, _: &mut task::Context) -> Poll<(), Self::SinkError> {
Ok(Async::Ready(()))
}
}
impl<S: ?Sized + Sink> Sink for ::std::boxed::Box<S> {
type SinkItem = S::SinkItem;
type SinkError = S::SinkError;
fn poll_ready(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError> {
(**self).poll_ready(cx)
}
fn start_send(&mut self, item: Self::SinkItem) -> Result<(), Self::SinkError> {
(**self).start_send(item)
}
fn poll_flush(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError> {
(**self).poll_flush(cx)
}
fn poll_close(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError> {
(**self).poll_close(cx)
}
}
}
/// A `Sink` is a value into which other values can be sent, asynchronously.
///
/// Basic examples of sinks include the sending side of:
///
/// - Channels
/// - Sockets
/// - Pipes
///
/// In addition to such "primitive" sinks, it's typical to layer additional
/// functionality, such as buffering, on top of an existing sink.
///
/// Sending to a sink is "asynchronous" in the sense that the value may not be
/// sent in its entirety immediately. Instead, values are sent in a two-phase
/// way: first by initiating a send, and then by polling for completion. This
/// two-phase setup is analogous to buffered writing in synchronous code, where
/// writes often succeed immediately, but internally are buffered and are
/// *actually* written only upon flushing.
///
/// In addition, the `Sink` may be *full*, in which case it is not even possible
/// to start the sending process.
///
/// As with `Future` and `Stream`, the `Sink` trait is built from a few core
/// required methods, and a host of default methods for working in a
/// higher-level way. The `Sink::send_all` combinator is of particular
/// importance: you can use it to send an entire stream to a sink, which is
/// the simplest way to ultimately consume a stream.
pub trait Sink {
/// The type of value that the sink accepts.
type SinkItem;
/// The type of value produced by the sink when an error occurs.
type SinkError;
/// Attempts to prepare the `Sink` to receive a value.
///
/// This method must be called and return `Ok(Async::Ready(()))` prior to
/// each call to `start_send`.
///
/// This method returns `Async::Ready` once the underlying sink is ready to
/// receive data. If this method returns `Async::Pending`, the current task
/// is registered to be notified (via `cx.waker()`) when `poll_ready`
/// should be called again.
///
/// In most cases, if the sink encounters an error, the sink will
/// permanently be unable to receive items.
fn poll_ready(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError>;
/// Begin the process of sending a value to the sink.
/// Each call to this function must be proceeded by a successful call to
/// `poll_ready` which returned `Ok(Async::Ready(()))`.
///
/// As the name suggests, this method only *begins* the process of sending
/// the item. If the sink employs buffering, the item isn't fully processed
/// until the buffer is fully flushed. Since sinks are designed to work with
/// asynchronous I/O, the process of actually writing out the data to an
/// underlying object takes place asynchronously. **You *must* use
/// `poll_flush` or `poll_close` in order to guarantee completion of a
/// send**.
///
/// Implementations of `poll_ready` and `start_send` will usually involve
/// flushing behind the scenes in order to make room for new messages.
/// It is only necessary to call `poll_flush` if you need to guarantee that
/// *all* of the items placed into the `Sink` have been sent.
///
/// In most cases, if the sink encounters an error, the sink will
/// permanently be unable to receive items.
fn start_send(&mut self, item: Self::SinkItem)
-> Result<(), Self::SinkError>;
/// Flush any remaining output from this sink.
///
/// Returns `Ok(Async::Ready(()))` when no buffered items remain. If this
/// value is returned then it is guaranteed that all previous values sent
/// via `start_send` have been flushed.
///
/// Returns `Ok(Async::Pending)` if there is more work left to do, in which
/// case the current task is scheduled (via `cx.waker()`) to wake up when
/// `poll_flush` should be called again.
///
/// In most cases, if the sink encounters an error, the sink will
/// permanently be unable to receive items.
fn poll_flush(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError>;
/// Flush any remaining output and close this sink, if necessary.
///
/// Returns `Ok(Async::Ready(()))` when no buffered items remain and the sink
/// has been successfully closed.
///
/// Returns `Ok(Async::Pending)` if there is more work left to do, in which
/// case the current task is scheduled (via `cx.waker()`) to wake up when
/// `poll_close` should be called again.
///
/// If this function encounters an error, the sink should be considered to
/// have failed permanently, and no more `Sink` methods should be called.
fn poll_close(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError>;
}
impl<'a, S: ?Sized + Sink> Sink for &'a mut S {
type SinkItem = S::SinkItem;
type SinkError = S::SinkError;
fn poll_ready(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError> {
(**self).poll_ready(cx)
}
fn start_send(&mut self, item: Self::SinkItem) -> Result<(), Self::SinkError> {
(**self).start_send(item)
}
fn poll_flush(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError> {
(**self).poll_flush(cx)
}
fn poll_close(&mut self, cx: &mut task::Context) -> Poll<(), Self::SinkError> {
(**self).poll_close(cx)
}
}
| 36.511628 | 87 | 0.615032 |
6adaf96caa72c56b0f01576f7e7731989365460b | 1,262 | // Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use std::{env, path::Path};
use move_prover_test_utils::{baseline_test::verify_or_update_baseline, read_bool_env_var};
use move_stdlib::move_stdlib_files;
use move_unit_test::UnitTestingConfig;
const BASELINE_EXTENSION: &str = "exp";
fn test_runner(path: &Path) -> datatest_stable::Result<()> {
env::set_var("NO_COLOR", "1");
let mut targets = move_stdlib_files();
targets.push(path.to_str().unwrap().to_owned());
let config = UnitTestingConfig {
instruction_execution_bound: 5000,
filter: None,
num_threads: 1,
source_files: targets,
check_stackless_vm: true,
report_storage_on_error: false,
report_statistics: false,
list: false,
verbose: read_bool_env_var("VERBOSE"),
};
let test_plan = config.build_test_plan().unwrap();
let mut buffer = vec![];
config.run_and_report_unit_tests(test_plan, &mut buffer)?;
let output = String::from_utf8(buffer)?;
let baseline_path = path.with_extension(BASELINE_EXTENSION);
verify_or_update_baseline(&baseline_path, &output)?;
Ok(())
}
datatest_stable::harness!(test_runner, "tests/concrete_check", r".*\.move$");
| 31.55 | 90 | 0.694929 |
0186cc403e371aa7e56dd63c3e4a479eb4019e32 | 3,663 | // Copyright (c) 2021, COSIC-KU Leuven, Kasteelpark Arenberg 10, bus 2452, B-3001 Leuven-Heverlee, Belgium.
// Copyright (c) 2021, Cosmian Tech SAS, 53-55 rue La Boétie, Paris, France.
#![warn(clippy::all)]
#![deny(rust_2018_idioms)]
#[macro_use]
extern crate tracing;
use scasm::lexer::Lexical;
use scasm::span::Span;
use std::convert::TryInto;
#[derive(Debug)]
enum Error {
Io(std::io::Error),
ErrorReported,
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
Error::Io(err)
}
}
impl From<scasm::ErrorReported> for Error {
fn from(_: scasm::ErrorReported) -> Self {
Error::ErrorReported
}
}
#[allow(clippy::cognitive_complexity)]
fn main() -> std::io::Result<()> {
scasm::init_logger().unwrap();
for file in std::fs::read_dir("tests/scasm-tests")? {
let file = file?.path();
print!("seeing: {} ... ", file.display());
if file == std::path::Path::new("tests/scasm-tests/scasm-test_math-0.asm")
&& std::env::var("SKIP_HUGE_MATH_TEST").is_ok()
{
println!("skipping: {}", file.display());
continue;
}
if let Some(extension) = file.extension() {
if extension == "asm" {
println!("procssing");
let mut cx = scasm::Compiler::stderr();
// these are just noise here
cx.show_warnings = false;
let mut parsed = cx.parse_asm(&file, std::fs::File::open(&file).unwrap());
let relexed = parsed.relex(&cx);
if cx.check_for_errors().is_ok() {
let file_id = cx.file_paths.iter().position(|f| *f == file).unwrap();
let lexed = cx.lex(file_id.try_into().unwrap());
for (lex, relex) in lexed.iter().zip(relexed) {
assert_eq!(lex.display(&cx).to_string(), relex.display(&cx).to_string());
}
info!("optimize");
scasm::transforms::apply_default_optimization_pipeline(&cx, &mut parsed, None);
info!("generate bytecode");
scasm::binary::generate_bytecode(
&cx,
&lexed,
std::fs::File::create(file.with_extension("bc"))?,
)?;
info!("parse bytecode");
let reparsed = cx.parse_bytecode(
file.with_extension("bc"),
std::fs::File::open(file.with_extension("bc"))?,
);
info!("relex");
let relexed = reparsed.relex(&cx);
info!("cmp");
for (lex, relex) in lexed.iter().zip(relexed) {
trace!("{} \t\t {}", lex.display(&cx), relex.display(&cx));
let lex = Lexical {
comment: Span::DUMMY,
..lex.clone()
};
assert_eq!(lex.instruction, relex.instruction);
assert_eq!(lex.args.len(), relex.args.len());
for (a, b) in lex.args.iter().zip(relex.args.iter()) {
if a.elem != b.elem {
panic!("{} != {}", lex.display(&cx), relex.display(&cx));
}
}
}
}
let _ = cx.check_for_errors();
} else {
println!("skipping");
}
}
}
Ok(())
}
| 36.267327 | 107 | 0.461097 |
11b0658ff4e1a3a38838fd7bb31f10f264c68bae | 114 |
Na slici su geometrijska tela i geometrijske figure.
Oboj kružić pored @name@.
@center@ @mycanvas(result)@
| 12.666667 | 53 | 0.72807 |
8fc871aa430ed0369f4ff226110b0773cf39f736 | 370 | // Copyright 2020-2021 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
#![allow(clippy::module_inception)]
mod collection;
mod key;
mod pair;
mod reference;
mod type_;
pub use self::collection::KeyCollection;
pub use self::key::PrivateKey;
pub use self::key::PublicKey;
pub use self::pair::KeyPair;
pub use self::reference::KeyRef;
pub use self::type_::KeyType;
| 20.555556 | 40 | 0.748649 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.