hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
e9110c267c5f5c54ab7543bd51707ce6a7105db7
13,843
use futures::{Stream, TryStream}; use http::header::HeaderMap; use mime::Mime; use std::pin::Pin; use std::task::{Context, Poll}; use self::boundary::BoundaryFinder; use self::field::ReadHeaders; use crate::http::errors::ReadError; use crate::http::BodyChunk; #[cfg(test)] #[macro_use] pub mod test_util; mod helpers; pub use self::field::{Field, FieldData, FieldHeaders, NextField, ReadToString}; macro_rules! try_opt ( ($expr:expr) => ( match $expr { Some(val) => val, None => return None, } ) ); macro_rules! ret_err ( ($($args:tt)+) => ( return fmt_err!($($args)+).into(); ) ); // macro_rules! ret_ok( // ($expr:expr) => (return Ok($expr).into()); // ); macro_rules! fmt_err ( ($string:expr) => ( Err($crate::http::errors::ReadError::Parsing($string.into())) ); ($string:expr, $($args:tt)*) => ( Err($crate::http::errors::ReadError::Parsing(format!($string, $($args)*).into())) ); ); mod boundary; mod field; /// The server-side implementation of `multipart/form-data` requests. /// /// After constructing with either the [`::with_body()`](#method.with_body) or /// [`::try_from_request()`](#method.try_from_request), two different workflows for processing the /// request are provided, assuming any `Poll::Pending` and `Ready(Err(_))`/`Ready(Some(Err(_)))` /// results are handled in the typical fashion: /// /// ### High-Level Flow /// /// 1. Await the next field with [`.next_field()`](#method.next_field). /// 2. Read the field data via the `Stream` impl on `Field::data`. /// 3. Repeat until `.next_field()` returns `None`. /// /// ### Low-Level Flow /// /// 1. Poll for the first field boundary with [`.poll_has_next_field()`](#method.poll_has_next_field); /// if it returns `true` proceed to the next step, if `false` the request is at an end. /// /// 2. Poll for the field's headers containing its name, content-type and other info with /// [`.poll_field_headers()`](#method.poll_field_headers). /// /// 3. Poll for the field's data chunks with [`.poll_field_chunk()](#method.poll_field_chunk) /// until `None` is returned, then loop back to step 2. /// /// Any data before the first boundary and past the end of the terminating boundary is ignored /// as it is out-of-spec and should not be expected to be left in the underlying stream intact. /// Please open an issue if you have a legitimate use-case for extraneous data in a multipart request. pub struct Multipart<S: TryStream> where S::Error: Into<ReadError>, { inner: PushChunk<BoundaryFinder<S>, S::Ok>, read_hdr: ReadHeaders, } // Q: why can't we just wrap up these bounds into a trait? // A: https://github.com/rust-lang/rust/issues/24616#issuecomment-112065997 // (The workaround mentioned in a later comment doesn't seem to be worth the added complexity) impl<S> Multipart<S> where S: TryStream, S::Ok: BodyChunk, S::Error: Into<ReadError>, { unsafe_pinned!(inner: PushChunk<BoundaryFinder<S>, S::Ok>); unsafe_unpinned!(read_hdr: ReadHeaders); /// Construct a new `Multipart` with the given body reader and boundary. /// /// The boundary should be taken directly from the `Content-Type: multipart/form-data` header /// of the request. This will add the requisite `--` to the boundary as per /// [IETF RFC 7578 section 4.1](https://tools.ietf.org/html/rfc7578#section-4.1). pub fn with_body<B: Into<String>>(stream: S, boundary: B) -> Self { let mut boundary = boundary.into(); boundary.insert_str(0, "--"); // debug!("Boundary: {}", boundary); Multipart { inner: PushChunk::new(BoundaryFinder::new(stream, boundary)), read_hdr: ReadHeaders::default(), } } pub fn try_from_body_headers(body: S, headers: &HeaderMap) -> Result<Self, ReadError> { fn get_boundary(headers: &HeaderMap) -> Option<String> { Some( headers .get(http::header::CONTENT_TYPE)? .to_str() .ok()? .parse::<Mime>() .ok()? .get_param(mime::BOUNDARY)? .to_string(), ) } if let Some(boundary) = get_boundary(headers) { return Ok(Self::with_body(body, boundary)); } Err(ReadError::Parsing("parse multiprart failed".into())) } pub fn next_field(&mut self) -> NextField<S> where Self: Unpin, { NextField::new(Pin::new(self)) } /// Same as [`.next_field()`](#method.next_field) but with a receiver of `Pin<&mut Self>`. pub fn next_field_pinned(self: Pin<&mut Self>) -> NextField<S> { NextField::new(self) } /// Poll for the next boundary, returning `true` if a field should follow that boundary, /// or `false` if the request is at an end. See above for the overall flow. /// /// If this returns `Ready(Ok(true))`, you may then begin /// [polling for the headers of the next field](#method.poll_field_headers). /// /// If a field was being read, its contents will be discarded. /// /// This is a low-level call and is expected to be supplemented/replaced by a more ergonomic /// API once more design work has taken place. pub fn poll_has_next_field(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<bool, ReadError>> { self.as_mut().inner().stream().consume_boundary(cx) } /// Poll for the headers of the next field, returning the headers or an error otherwise. /// /// Once you have the field headers, you may then begin /// [polling for field chunks](#method.poll_field_chunk). /// /// In addition to bubbling up errors from the underlying stream, this will also return an /// error if: /// * the headers were corrupted, or: /// * did not contain a `Content-Disposition: form-data` header with a `name` parameter, or: /// * the end of stream was reached before the header segment terminator `\r\n\r\n`, or: /// * the buffer for the headers exceeds a preset size. /// /// This is a low-level call and is expected to be supplemented/replaced by a more ergonomic /// API once more design work has taken place. /// /// ### Note: Calling This Is Not Enforced /// If this step is skipped then [`.poll_field_chunk()`](#method.poll_field_chunk) /// will return chunks of the header segment which may or may not be desirable depending /// on your use-case. /// /// If you do want to inspect the raw field headers, they are separated by one CRLF (`\r\n`) and /// terminated by two CRLFs (`\r\n\r\n`) after which the field chunks follow. pub fn poll_field_headers(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<FieldHeaders, ReadError>> { unsafe { let this = self.as_mut().get_unchecked_mut(); this.read_hdr.read_headers(Pin::new_unchecked(&mut this.inner), cx) } } /// Poll for the next chunk of the current field. /// /// This returns `Ready(Some(Ok(chunk)))` as long as there are chunks in the field, /// yielding `Ready(None)` when the next boundary is reached. /// /// You may then begin the next field with /// [`.poll_has_next_field()`](#method.poll_has_next_field). /// /// This is a low-level call and is expected to be supplemented/replaced by a more ergonomic /// API once more design work has taken place. /// /// ### Note: Call `.poll_field_headers()` First for Correct Data /// If [`.poll_field_headers()`](#method.poll_field_headers) is skipped then this call /// will return chunks of the header segment which may or may not be desirable depending /// on your use-case. /// /// If you do want to inspect the raw field headers, they are separated by one CRLF (`\r\n`) and /// terminated by two CRLFs (`\r\n\r\n`) after which the field chunks follow. pub fn poll_field_chunk(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Result<S::Ok, ReadError>>> { if !self.read_hdr.is_reading_headers() { self.inner().poll_next(cx) } else { Poll::Ready(None) } } } /// Struct wrapping a stream which allows a chunk to be pushed back to it to be yielded next. pub(crate) struct PushChunk<S, T> { stream: S, pushed: Option<T>, } impl<S, T> PushChunk<S, T> { unsafe_pinned!(stream: S); unsafe_unpinned!(pushed: Option<T>); pub(crate) fn new(stream: S) -> Self { PushChunk { stream, pushed: None } } } impl<S: TryStream> PushChunk<S, S::Ok> where S::Ok: BodyChunk, S::Error: Into<ReadError>, { fn push_chunk(mut self: Pin<&mut Self>, chunk: S::Ok) { // if let Some(pushed) = self.as_mut().pushed() { // debug_panic!( // "pushing excess chunk: \"{}\" already pushed chunk: \"{}\"", // show_bytes(chunk.as_slice()), // show_bytes(pushed.as_slice()) // ); // } debug_assert!(!chunk.is_empty(), "pushing empty chunk"); *self.as_mut().pushed() = Some(chunk); } } impl<S: TryStream> Stream for PushChunk<S, S::Ok> where S::Error: Into<ReadError>, { type Item = Result<S::Ok, S::Error>; fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> { if let Some(pushed) = self.as_mut().pushed().take() { return Poll::Ready(Some(Ok(pushed))); } self.stream().try_poll_next(cx) } } #[cfg(test)] mod test { use crate::http::multipart::test_util::mock_stream; use crate::http::multipart::FieldHeaders; use super::Multipart; // use std::convert::Infallible; const BOUNDARY: &str = "boundary"; #[test] fn test_empty_body() { let multipart = Multipart::with_body(mock_stream(&[]), BOUNDARY); pin_mut!(multipart); ready_assert_ok_eq!(|cx| multipart.as_mut().poll_has_next_field(cx), false); } #[test] fn test_no_headers() { let multipart = Multipart::with_body(mock_stream(&[b"--boundary", b"\r\n", b"\r\n", b"--boundary--"]), BOUNDARY); pin_mut!(multipart); ready_assert_ok_eq!(|cx| multipart.as_mut().poll_has_next_field(cx), true); until_ready!(|cx| multipart.as_mut().poll_field_headers(cx)).unwrap_err(); ready_assert_ok_eq!(|cx| multipart.as_mut().poll_has_next_field(cx), false); } #[test] fn test_single_field() { let multipart = Multipart::with_body( mock_stream(&[ b"--boundary\r", b"\n", b"Content-Disposition:", b" form-data; name=", b"\"foo\"", b"\r\n\r\n", b"field data", b"\r", b"\n--boundary--", ]), BOUNDARY, ); pin_mut!(multipart); ready_assert_ok_eq!(|cx| multipart.as_mut().poll_has_next_field(cx), true); ready_assert_ok_eq!( |cx| multipart.as_mut().poll_field_headers(cx), FieldHeaders { name: "foo".into(), filename: None, content_type: None, ext_headers: Default::default(), } ); ready_assert_some_ok_eq!(|cx| multipart.as_mut().poll_field_chunk(cx), &b"field data"[..]); ready_assert_eq_none!(|cx| multipart.as_mut().poll_field_chunk(cx)); ready_assert_ok_eq!(|cx| multipart.as_mut().poll_has_next_field(cx), false); } #[test] fn test_two_fields() { let multipart = Multipart::with_body( mock_stream(&[ b"--boundary\r", b"\n", b"Content-Disposition:", b" form-data; name=", b"\"foo\"", b"\r\n\r\n", b"field data", b"\r", b"\n--boundary\r\n", b"Content-Disposition: form-data; name=", b"foo-", b"data", b"; filename=", b"\"foo.txt\"", b"\r\n", b"Content-Type: ", b"text/plain; charset", b"=utf-8", b"\r\n", b"\r\n", b"field data--2\r\n--data--field", b"\r\n--boundary--", ]), BOUNDARY, ); pin_mut!(multipart); ready_assert_ok_eq!(|cx| multipart.as_mut().poll_has_next_field(cx), true); ready_assert_ok_eq!( |cx| multipart.as_mut().poll_field_headers(cx), FieldHeaders { name: "foo".into(), filename: None, content_type: None, ext_headers: Default::default(), } ); ready_assert_some_ok_eq!(|cx| multipart.as_mut().poll_field_chunk(cx), &b"field data"[..]); ready_assert_eq_none!(|cx| multipart.as_mut().poll_field_chunk(cx)); ready_assert_ok_eq!(|cx| multipart.as_mut().poll_has_next_field(cx), true); ready_assert_ok_eq!( |cx| multipart.as_mut().poll_field_headers(cx), FieldHeaders { name: "foo-data".into(), filename: Some("foo.txt".into()), content_type: Some(mime::TEXT_PLAIN_UTF_8), ext_headers: Default::default(), } ); ready_assert_some_ok_eq!(|cx| multipart.as_mut().poll_field_chunk(cx), &b"field data--2\r\n--data--field"[..]); ready_assert_eq_none!(|cx| multipart.as_mut().poll_field_chunk(cx)); ready_assert_ok_eq!(|cx| multipart.as_mut().poll_has_next_field(cx), false); } }
35.404092
121
0.587517
ddf691747dd0ebc80cf5bf494d49889954b055b7
944
use advent_of_code::utils::challenges::prelude::*; fn parse(input: &PuzzleInput) -> Vec<i32> { input.trim().split(',').map(|s| s.parse().unwrap()).collect() } fn part_one(input: &PuzzleInput, _args: &RawPuzzleArgs) -> Solution { let mut crabs = parse(input); crabs.sort(); let target = crabs[crabs.len() / 2]; let fuel = crabs.iter().fold(0, |acc, crab| ( acc + (target - crab).abs() )); Answer(fuel as u64) } fn part_two(input: &PuzzleInput, _args: &RawPuzzleArgs) -> Solution { let crabs = parse(input); let highest = crabs.iter().max().unwrap(); let candidates = (0..=*highest).map(|i| { crabs.iter().fold(0, |acc, crab| { let distance = (i - crab).abs(); // See: https://en.wikipedia.org/wiki/Triangular_number acc + (distance * (distance + 1)) / 2 }) }); Answer(candidates.min().unwrap() as u64) } solve!(part_one, part_two);
29.5
69
0.579449
0869e993b02f3d62ca215790ede6b3259c84add6
3,115
/* Copyright 2021 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use crate::{Once, Provider}; /// Wraps a binding so it can be lazily created. /// /// When `Foo` depends on `Bar`, `Bar` is created before `Foo` is created which may not be desirable /// if creating `Bar` is costly but it will only be used much later or only conditionally. By /// wrapping `Bar` as `Lazy<Bar>`, `Bar` will only be created when [`Lazy<bar>.get()`](#method.get) is /// called. /// /// [`Lazy.get()`](#method.get) is cached and the same instance will be returned if called multiple times. /// /// If multiple instances of the object is needed, use [`Provider<T>`](Provider) instead /// /// ``` /// # use lockjaw::{epilogue, injectable, module, component, Cl, Lazy}; /// # use std::cell::RefCell; /// # lockjaw::prologue!("src/lib.rs"); /// pub struct Counter { /// counter: i32, /// } /// /// # #[injectable(scope: crate::MyComponent, container: RefCell)] /// # impl Counter { /// # #[inject] /// # pub fn new() -> Self { /// # Self { /// # counter: 0, /// # } /// # } /// # /// # pub fn get(&self) -> i32 { /// # self.counter /// # } ///# /// # pub fn increment(&mut self) -> i32 { /// # self.counter += 1; /// # self.counter /// # } /// # } /// pub struct Foo { /// pub i: i32, /// } /// /// #[injectable] /// impl Foo { /// #[inject] /// pub fn new(counter: &RefCell<Counter>) -> Foo{ /// Foo { /// i: counter.borrow_mut().increment(), /// } /// } /// } /// /// #[component] /// pub trait MyComponent { /// fn foo(&self) -> Lazy<crate::Foo>; /// /// fn counter(&self) -> &RefCell<Counter>; /// } /// /// pub fn main() { /// let component: Box<dyn MyComponent> = <dyn MyComponent>::new(); /// let counter = component.counter(); /// let lazy_foo = component.foo(); /// // Foo not created yet. /// assert_eq!(counter.borrow().get(), 0); /// /// let foo = lazy_foo.get(); /// assert_eq!(counter.borrow().get(), 1); /// /// // Same instance is reused /// let foo2 = lazy_foo.get(); /// assert_eq!(foo.i, foo2.i); /// } /// epilogue!(); /// ``` pub struct Lazy<'a, T> { provider: Provider<'a, T>, value: Once<T>, } impl<'a, T> Lazy<'a, T> { #[doc(hidden)] pub fn new(provider: Provider<'a, T>) -> Self { Lazy { provider, value: Once::new(), } } /// Creates or retrieves a cached instance and returns a reference to it. pub fn get(&'a self) -> &'a T { self.value.get(|| self.provider.get()) } }
27.8125
106
0.569502
f47a051f1cfaac0e04cac2f5182eab4360221777
7,568
extern crate proc_macro; mod function_component; mod tags; mod attribute; mod child; mod children; mod partial_eq; mod use_effect; mod widget; mod widget_attributes; mod widget_builder; mod widget_props; use function_component::WidgetArguments; use partial_eq::impl_dyn_partial_eq; use proc_macro::TokenStream; use proc_macro_error::proc_macro_error; use quote::quote; use syn::{parse_macro_input, parse_quote}; use use_effect::UseEffect; use widget::ConstructedWidget; use crate::widget::Widget; use crate::widget_props::impl_widget_props; /// A top level macro that works the same as [`rsx`] but provides some additional /// context for building the root widget. #[proc_macro] #[proc_macro_error] pub fn render(input: TokenStream) -> TokenStream { let widget = parse_macro_input!(input as Widget); let kayak_core = get_core_crate(); let result = quote! { let mut context = #kayak_core::KayakContextRef::new(context, None); let parent_id: Option<Index> = None; let children: Option<#kayak_core::Children> = None; #widget context.commit(); }; TokenStream::from(result) } /// A proc macro that turns RSX syntax into structure constructors and calls the /// context to create the widgets. #[proc_macro] #[proc_macro_error] pub fn rsx(input: TokenStream) -> TokenStream { let widget = parse_macro_input!(input as Widget); let result = quote! { #widget }; TokenStream::from(result) } /// A proc macro that turns RSX syntax into structure constructors only. #[proc_macro] #[proc_macro_error] pub fn constructor(input: TokenStream) -> TokenStream { let el = parse_macro_input!(input as ConstructedWidget); let widget = el.widget; let result = quote! { #widget }; TokenStream::from(result) } /// This attribute macro is what allows Rust functions to be generated into /// valid widgets structs. /// /// # Examples /// /// ``` /// #[widget] /// fn MyWidget() { /* ... */ } /// ``` #[proc_macro_attribute] #[proc_macro_error] pub fn widget(args: TokenStream, item: TokenStream) -> TokenStream { let mut widget_args = WidgetArguments::default(); if !args.is_empty() { // Parse stuff.. let parsed = args.to_string(); widget_args.focusable = parsed.contains("focusable"); } let f = parse_macro_input!(item as syn::ItemFn); function_component::create_function_widget(f, widget_args) } /// A derive macro for the `WidgetProps` trait #[proc_macro_derive(WidgetProps, attributes(prop_field))] #[proc_macro_error] pub fn derive_widget_props(item: TokenStream) -> TokenStream { impl_widget_props(item) } #[proc_macro_derive(DynPartialEq)] pub fn dyn_partial_eq_macro_derive(input: TokenStream) -> TokenStream { let ast = syn::parse(input).unwrap(); impl_dyn_partial_eq(&ast) } #[proc_macro_attribute] pub fn dyn_partial_eq(_: TokenStream, input: TokenStream) -> TokenStream { let mut input = parse_macro_input!(input as syn::ItemTrait); let name = &input.ident; let bound: syn::TypeParamBound = parse_quote! { DynPartialEq }; input.supertraits.push(bound); (quote! { #input impl core::cmp::PartialEq for Box<dyn #name> { fn eq(&self, other: &Self) -> bool { self.box_eq(other.as_any()) } } }) .into() } /// Register some state data with an initial value. /// /// Once the state is created, this macro returns the current value, a closure for updating the current value, and /// the raw Binding in a tuple. /// /// For more details, check out [React's documentation](https://reactjs.org/docs/hooks-state.html), /// upon which this macro is based. /// /// # Arguments /// /// * `initial_state`: The initial value for the state /// /// returns: (state, set_state, state_binding) /// /// # Examples /// /// ``` /// # use kayak_core::{EventType, OnEvent}; /// # use kayak_render_macros::use_state; /// /// let (count, set_count, ..) = use_state!(0); /// /// let on_event = OnEvent::new(move |_, event| match event.event_type { /// EventType::Click(..) => { /// set_count(foo + 1); /// } /// _ => {} /// }); /// /// rsx! { /// <> /// <Button on_event={Some(on_event)}> /// <Text size={16.0} content={format!("Count: {}", count)}>{}</Text> /// </Button> /// </> /// } /// ``` #[proc_macro] pub fn use_state(initial_state: TokenStream) -> TokenStream { let initial_state = parse_macro_input!(initial_state as syn::Expr); let kayak_core = get_core_crate(); let result = quote! {{ use #kayak_core::{Bound, MutableBound}; let state = context.create_state(#initial_state).unwrap(); let cloned_state = state.clone(); let set_state = move |value| { cloned_state.set(value); }; let state_value = state.get(); (state.get(), set_state, state) }}; TokenStream::from(result) } /// Registers a side-effect callback for a given set of dependencies. /// /// This macro takes on the form: `use_effect!(callback, dependencies)`. The callback is /// the closure that's ran whenever one of the Bindings in the dependencies array is changed. /// /// Dependencies are automatically cloned when added to the dependency array. This allows the /// original bindings to be used within the callback without having to clone them manually first. /// This can be seen in the example below where `count_state` is used within the callback and in /// the dependency array. /// /// For more details, check out [React's documentation](https://reactjs.org/docs/hooks-effect.html), /// upon which this macro is based. /// /// # Arguments /// /// * `callback`: The side-effect closure /// * `dependencies`: The dependency array (in the form `[dep_1, dep_2, ...]`) /// /// returns: () /// /// # Examples /// /// ``` /// # use kayak_core::{EventType, OnEvent}; /// # use kayak_render_macros::{use_effect, use_state}; /// /// let (count, set_count, count_state) = use_state!(0); /// /// use_effect!(move || { /// println!("Count: {}", count_state.get()); /// }, [count_state]); /// /// let on_event = OnEvent::new(move |_, event| match event.event_type { /// EventType::Click(..) => { /// set_count(foo + 1); /// } /// _ => {} /// }); /// /// rsx! { /// <> /// <Button on_event={Some(on_event)}> /// <Text size={16.0} content={format!("Count: {}", count)} /> /// </Button> /// </> /// } /// ``` #[proc_macro] pub fn use_effect(input: TokenStream) -> TokenStream { let effect = parse_macro_input!(input as UseEffect); effect.build() } /// Helper method for getting the core crate /// /// Depending on the usage of the macro, this will become `crate`, `kayak_core`, /// or `kayak_ui::core`. /// /// # Examples /// /// ``` /// fn my_macro() -> proc_macro2::TokenStream { /// let kayak_core = get_core_crate(); /// quote! { /// let foo = #kayak_core::Foo; /// } /// } /// ``` fn get_core_crate() -> proc_macro2::TokenStream { let found_crate = proc_macro_crate::crate_name("kayak_core"); if let Ok(found_crate) = found_crate { match found_crate { proc_macro_crate::FoundCrate::Itself => quote! { crate }, proc_macro_crate::FoundCrate::Name(name) => { let ident = syn::Ident::new(&name, proc_macro2::Span::call_site()); quote!(#ident) } } } else { quote!(kayak_ui::core) } }
28.238806
114
0.632796
db13f4bba7c34e5ebcd5a952d6b9366cef665955
907
use rand::{Rand, Rng}; use serde_derive::{Deserialize, Serialize}; use super::SenderQueueableMessage; /// A `SenderQueue` message. #[derive(Clone, Debug, Deserialize, Serialize)] pub enum Message<M: SenderQueueableMessage> { /// The announcement that this node has reached the given epoch. EpochStarted(M::Epoch), /// A message of the wrapped algorithm. Algo(M), } impl<M> Rand for Message<M> where M: SenderQueueableMessage + Rand, M::Epoch: Rand, { fn rand<R: Rng>(rng: &mut R) -> Self { let message_type = *rng.choose(&["epoch", "algo"]).unwrap(); match message_type { "epoch" => Message::EpochStarted(rng.gen()), "algo" => Message::Algo(rng.gen()), _ => unreachable!(), } } } impl<M: SenderQueueableMessage> From<M> for Message<M> { fn from(message: M) -> Self { Message::Algo(message) } }
25.194444
68
0.610805
082d7cb7edc39c055c33b33df368773c77abd2f3
152,751
#![warn(missing_docs)] #![feature(decl_macro)] #![feature(proc_macro_hygiene)] #![feature(try_trait)] #![recursion_limit = "512"] use config::CONFIG; use crossbeam_channel::Sender; use hcor::config; use hcor::frontend::emojify; use hcor::possess; use hcor::{Category, Key}; use log::*; use possess::{Possessed, Possession}; use regex::Regex; use rocket::request::LenientForm; use rocket::tokio; use rocket::{post, routes, FromForm, State}; use rocket_contrib::json::Json; use rusoto_dynamodb::{AttributeValue, DynamoDb, DynamoDbClient}; use serde_json::{json, Value}; use std::collections::HashMap; use std::convert::TryInto; pub mod banker; pub mod event; pub mod hacksteader; pub mod market; use hacksteader::Hacksteader; pub fn dyn_db() -> DynamoDbClient { DynamoDbClient::new(if *LOCAL_DB { rusoto_core::Region::Custom { name: "local".to_string(), endpoint: "http://localhost:8000".to_string(), } } else { rusoto_core::Region::UsEast1 }) } const FARM_CYCLE_SECS: u64 = 5; const FARM_CYCLE_MILLIS: u64 = FARM_CYCLE_SECS * 1000; const FARM_CYCLES_PER_MIN: u64 = 60 / FARM_CYCLE_SECS; lazy_static::lazy_static! { pub static ref TOKEN: String = std::env::var("TOKEN").unwrap(); pub static ref ID: String = std::env::var("ID").unwrap(); pub static ref APP_ID: String = std::env::var("APP_ID").unwrap(); pub static ref URL: String = std::env::var("URL").unwrap(); pub static ref HACKSTEAD_PRICE: u64 = std::env::var("HACKSTEAD_PRICE").unwrap().parse().unwrap(); pub static ref LOCAL_DB: bool = std::env::var("LOCAL_DB").is_ok(); } pub fn mrkdwn<S: std::string::ToString>(txt: S) -> Value { json!({ "type": "mrkdwn", "text": txt.to_string(), }) } pub fn plain_text<S: std::string::ToString>(txt: S) -> Value { json!({ "type": "plain_text", "text": txt.to_string(), }) } pub fn comment<S: ToString>(txt: S) -> Value { json!({ "type": "context", "elements": [ mrkdwn(txt) ] }) } pub fn filify<S: ToString>(txt: S) -> String { txt.to_string().to_lowercase().replace(" ", "_") } pub async fn dm_blocks( user_id: String, notif_msg: String, blocks: Vec<Value>, ) -> Result<(), String> { let o = json!({ "channel": user_id, "token": *TOKEN, "blocks": blocks, "text": notif_msg }); debug!("{}", serde_json::to_string_pretty(&o).unwrap()); // TODO: use response let client = reqwest::Client::new(); client .post("https://slack.com/api/chat.postMessage") .bearer_auth(&*TOKEN) .json(&o) .send() .await .map_err(|e| format!("couldn't dm {}: {}", user_id, e))?; Ok(()) } async fn gift_dm( giver: &str, new_owner: &str, possession: &Possession, notif_msg: String, count: usize, ) -> Result<(), String> { dm_blocks(new_owner.to_string(), notif_msg, { // TODO: with_capacity optimization let mut blocks = vec![ json!({ "type": "section", "text": mrkdwn(format!( "<@{}> has been so kind as to gift you {} {} _{}_!", giver, match count { 1 => "a".to_string(), other => format!("*{}*", other), }, emojify(&possession.name), possession.nickname() )) }), json!({ "type": "divider" }), ]; let page = PossessionPage { interactivity: Interactivity::Read, credentials: Credentials::Owner, possession: possession.clone(), }; blocks.append(&mut page.blocks()); blocks.push(json!({ "type": "divider" })); blocks.push(comment(format!( "Manage all of your possessions like this one at your <slack://app?team=T0266FRGM&id={}&tab=home|hackstead>", *APP_ID, ))); blocks }) .await } /// `push` should be true if this modal is being put on top of an existing one. fn gotchi_block( gotchi: Possessed<possess::Gotchi>, interactivity: Interactivity, credentials: Credentials, push: bool, ) -> Value { json!({ "type": "section", "text": mrkdwn(format!( "_{} ({}, {})_", emojify(&gotchi.name), gotchi.name, match gotchi.inner.hatch_table { None => format!("{} happiness", gotchi.inner.base_happiness), Some(_) => "ready to hatch!".to_string(), } )), "accessory": { "type": "button", "style": "primary", "text": plain_text(&gotchi.inner.nickname), "value": serde_json::to_string(&PossessionPage { possession: gotchi.into_possession(), interactivity, credentials, }).unwrap(), "action_id": match push { true => "push_possession_page", _ => "possession_page", } } }) } fn inventory_occurences(inventory: Vec<Possession>) -> HashMap<String, Vec<Possession>> { let mut o = HashMap::new(); for possession in inventory.into_iter() { o.entry(possession.name.clone()) .or_insert(vec![]) .push(possession) } o } fn inventory_section( inv_occurrences: HashMap<String, Vec<Possession>>, interactivity: Interactivity, credentials: Credentials, push: bool, user_id: String, ) -> Vec<Value> { let mut blocks = vec![]; blocks.push(json!({ "type": "section", "text": mrkdwn("*Inventory*"), })); let mut inv_entries = inv_occurrences.into_iter().collect::<Vec<_>>(); inv_entries.sort_unstable_by_key(|(_, p)| p.last().unwrap().archetype_handle); for (name, possessions) in inv_entries.into_iter() { // this is safe because you have to have at least one // for it to end up here let last = possessions.last().unwrap().clone(); if possessions.len() == 1 { blocks.push(json!({ "type": "section", "text": mrkdwn(format!( "{} _{}_", emojify(&name), name )), "accessory": { "type": "button", "style": "primary", "text": plain_text(&name), "value": serde_json::to_string(&PossessionPage { possession: last, interactivity, credentials, }).unwrap(), "action_id": match push { false => "possession_page", true => "push_possession_page" }, } })); } else { blocks.push(json!({ "type": "section", "text": mrkdwn(format!( "*{}* {} _{}_", possessions.len(), emojify(&name), name )), "accessory": { "type": "button", "style": "primary", "text": plain_text(&name), "value": serde_json::to_string(&PossessionOverviewPage { source: PossessionOverviewSource::Hacksteader(user_id.clone()), page: 0, item_name: name, interactivity, credentials, }).unwrap(), "action_id": match push { false => "possession_overview_page", true => "push_possession_overview_page" }, } })); } } blocks } fn gotchi_section( gotchis: Vec<Possessed<possess::Gotchi>>, interactivity: Interactivity, credentials: Credentials, push: bool, ) -> Vec<Value> { let mut blocks = vec![]; blocks.push(json!({ "type": "section", "text": mrkdwn(match gotchis.len() { 1 => "*Your Hackagotchi*".into(), _ => format!("*Your {} Hackagotchi*", gotchis.len()) }), })); let total_happiness = gotchis.iter().map(|g| g.inner.base_happiness).sum::<u64>(); for g in gotchis.into_iter().take(20) { blocks.push(gotchi_block(g, interactivity, credentials, push)); } blocks.push(json!({ "type": "section", "text": mrkdwn(format!("Total happiness: *{}*", total_happiness)) })); blocks.push(comment( "The total happiness of all your gotchi is equivalent to the \ amount of GP you'll get at the next Harvest.", )); blocks } #[derive(serde::Serialize, serde::Deserialize, Clone)] pub enum PossessionOverviewSource { Hacksteader(String), Market(Category), } #[derive(serde::Serialize, serde::Deserialize, Clone)] pub struct PossessionOverviewPage { source: PossessionOverviewSource, page: usize, item_name: String, interactivity: Interactivity, credentials: Credentials, } impl PossessionOverviewPage { const PAGE_SIZE: usize = 20; fn title(&self) -> String { let mut title = self.item_name.clone(); if title.len() > 24 { title.truncate(24 - 3); title.push_str("..."); } title } async fn modal(self, trigger_id: String, method: &'static str) -> Result<Modal, String> { Ok(Modal { callback_id: self.callback_id(), blocks: self.blocks().await?, submit: None, title: self.title(), method: method.to_string(), trigger_id, private_metadata: String::new(), }) } /*fn modal_update(self, trigger_id: String, page_json: String, view_id: String) -> ModalUpdate { ModalUpdate { callback_id: self.callback_id(), blocks: self.blocks(), submit: None, title: self.name, private_metadata: page_json, trigger_id, view_id, hash: None, } }*/ fn callback_id(&self) -> String { "possession_overview_page_".to_string() + self.interactivity.id() } async fn blocks(&self) -> Result<Vec<Value>, String> { let Self { source, item_name, page, credentials, interactivity, } = self; let inventory: Vec<_> = match source { PossessionOverviewSource::Hacksteader(hacksteader) => { let hs = Hacksteader::from_db(&dyn_db(), hacksteader.clone()).await?; let mut inv: Vec<_> = hs .inventory .into_iter() .filter(|i| i.name == *item_name) .collect(); inv.sort_unstable_by(|a, b| match (a.sale.as_ref(), b.sale.as_ref()) { (Some(a), Some(b)) => a.price.cmp(&b.price), (Some(_), None) => std::cmp::Ordering::Less, (None, Some(_)) => std::cmp::Ordering::Greater, (None, None) => std::cmp::Ordering::Equal, }); inv } PossessionOverviewSource::Market(cat) => market::market_search(&dyn_db(), *cat) .await .map_err(|e| error!("couldn't search market: {}", e)) .unwrap_or_default() .into_iter() .filter(|(_, i)| i.name == *item_name) .map(|(sale, mut possession)| { possession.sale.replace(sale); possession }) .collect(), }; // caching this so I can use it later after .into_iter() is called let inventory_len = inventory.len(); let first_item = inventory.first().cloned(); let mut blocks = inventory .into_iter() .skip(page * Self::PAGE_SIZE) .take(Self::PAGE_SIZE) .map(|possession| { json!({ "type": "section", "text": mrkdwn(format!( "{} _{}_{}", emojify(&item_name), item_name, if let Some(hcor::market::Sale { price, .. }) = possession.sale { match source { PossessionOverviewSource::Hacksteader(..) => { format!(" (selling at *{}gp*)", price) }, PossessionOverviewSource::Market(..) => { format!( " (sold by *<@{}>*)", possession.steader, ) } } } else { "".to_string() } )), "accessory": { "type": "button", "style": "primary", "text": plain_text(match (source, possession.sale.as_ref()) { (PossessionOverviewSource::Market(..), Some(s)) => { format!("{}gp", s.price) } _ => item_name.clone(), }), "value": serde_json::to_string(&( possession, interactivity, credentials )).unwrap(), "action_id": "push_possession_page", } }) }) .collect::<Vec<_>>(); if let Some(p) = first_item.filter(|p| p.kind.is_keepsake()) { blocks.push(comment(format!( "hmm, maybe \"*/hgive <@U01581HFAGZ> {} {}*\" is in your future?", inventory_len, emojify(&p.name) ))); } let needs_back_page = *page != 0; let needs_next_page = inventory_len > Self::PAGE_SIZE * (page + 1); if needs_back_page || needs_next_page { blocks.push(json!({ "type": "actions", "elements": ({ let mut buttons = vec![]; let mut current_page = self.clone(); if needs_back_page { let mut back_page = &mut current_page; back_page.page = *page - 1; buttons.push(json!({ "type": "button", "text": plain_text("Previous Page"), "style": "primary", "value": serde_json::to_string(back_page).unwrap(), "action_id": "possession_overview_page" })); } if needs_next_page { let mut next_page = &mut current_page; next_page.page = *page + 1; buttons.push(json!({ "type": "button", "text": plain_text("Next Page"), "style": "primary", "value": serde_json::to_string(next_page).unwrap(), "action_id": "possession_overview_page" })); } buttons }) })); } Ok(blocks) } } #[derive(serde::Serialize, serde::Deserialize)] pub struct PossessionPage { possession: Possession, interactivity: Interactivity, credentials: Credentials, } impl PossessionPage { fn title(&self) -> String { let mut title = self.possession.nickname().to_string(); if title.len() > 24 { title.truncate(24 - 3); title.push_str("..."); } title } fn modal(self, trigger_id: String, method: &'static str) -> Modal { Modal { callback_id: self.callback_id(), blocks: self.blocks(), submit: self.submit(), title: self.title(), method: method.to_string(), trigger_id: trigger_id, private_metadata: serde_json::to_string(&self.possession.key()).unwrap(), } } fn modal_update(self, trigger_id: String, view_id: String) -> ModalUpdate { ModalUpdate { callback_id: self.callback_id(), blocks: self.blocks(), submit: self.submit(), title: self.title(), private_metadata: serde_json::to_string(&self.possession.key()).unwrap(), trigger_id, view_id, hash: None, } } fn submit(&self) -> Option<String> { if let Some(sale) = self .possession .sale .as_ref() .filter(|_| self.interactivity.market(self.credentials)) { match self.credentials { Credentials::Owner => return Some("Take off Market".to_string()), Credentials::Hacksteader => return Some(format!("Buy for {} gp", sale.price)), _ => {} } } None } fn callback_id(&self) -> String { if self .possession .sale .as_ref() .filter(|_| self.interactivity.market(self.credentials)) .is_some() { match self.credentials { Credentials::Owner => return "sale_removal".to_string(), Credentials::Hacksteader => return "sale_complete".to_string(), _ => {} } } "possession_page_".to_string() + self.interactivity.id() } fn blocks(&self) -> Vec<Value> { // TODO: with_capacity optimization let mut blocks: Vec<Value> = Vec::new(); let Self { possession, interactivity, credentials, } = self; let actions = |prefix: &str, buttons: &[(&str, Option<Value>)]| -> Value { match interactivity { Interactivity::Write => json!({ "type": "actions", "elements": buttons.iter().map(|(action, value)| { let mut o = json!({ "type": "button", "text": plain_text(action), "action_id": format!("{}_{}", prefix, action.to_lowercase()), }); if let Some(v) = value { o.as_object_mut().unwrap().insert("value".to_string(), v.clone()); } o }).collect::<Vec<_>>() }), _ => comment("This page is read only."), } }; if let Some(g) = possession.kind.gotchi() { blocks.push(actions("gotchi", &{ let mut a = vec![("Nickname", Some(json!(possession.nickname())))]; if g.hatch_table.is_some() { a.push(( "Hatch", Some(json!(serde_json::to_string(&possession.id).unwrap())), )); } a })); } let mut text_fields = vec![ ("description", possession.description.clone()), ("kind", possession.name.clone()), ( "owner log", possession .ownership_log .iter() .map(|o| format!("[{}]<@{}>", o.acquisition, o.id)) .collect::<Vec<_>>() .join(" -> ") .to_string(), ), ]; if let Some(g) = possession.kind.gotchi() { text_fields.push(("base happiness", g.base_happiness.to_string())); } let text = text_fields .iter() .map(|(l, r)| format!("*{}:* _{}_", l, r)) .collect::<Vec<_>>() .join("\n"); blocks.push(json!({ "type": "section", "text": mrkdwn(text), "accessory": { "type": "image", "image_url": format!( "http://{}/gotchi/img/{}/{}.png", *URL, format!("{:?}", possession.kind.category()).to_lowercase(), filify(&possession.name) ), "alt_text": "hackagotchi img", } })); blocks.push(actions("possession", &[("Give", None), ("Sell", None)])); if let Some(g) = possession.kind.gotchi() { blocks.push(comment(format!( "*Lifetime GP harvested: {}*", g.harvest_log.iter().map(|x| x.harvested).sum::<u64>(), ))); for owner in g.harvest_log.iter().rev() { blocks.push(comment(format!( "{}gp harvested for <@{}>", owner.harvested, owner.id ))); } } if let (Credentials::None, true) = (credentials, interactivity.market(*credentials)) { blocks.push(json!({ "type": "divider" })); blocks.push(comment(format!( "In order to buy this, you have to have a \ <slack://app?team=T0266FRGM&id={}&tab=home|hackstead>.", std::env::var("APP_ID").expect("no app_id env var") ))); } blocks } } async fn update_user_home_tab(user_id: String) -> Result<(), String> { update_home_tab( Hacksteader::from_db(&dyn_db(), user_id.clone()).await.ok(), user_id.clone(), ) .await } async fn update_home_tab(hs: Option<Hacksteader>, user_id: String) -> Result<(), String> { let o = json!({ "user_id": user_id, "view": { "type": "home", "blocks": hacksteader_greeting_blocks(hs, Interactivity::Write, Credentials::Owner), } }); debug!("home screen: {}", serde_json::to_string_pretty(&o).unwrap()); let client = reqwest::Client::new(); client .post("https://slack.com/api/views.publish") .bearer_auth(&*TOKEN) .json(&o) .send() .await .map_err(|e| format!("couldn't publish home tab view: {}", e))?; Ok(()) } fn progress_bar(size: usize, progress_ratio: f32) -> String { format!( "`\u{2062}{}\u{2062}`", (0..size) .map(|i| { if (i as f32 / size as f32) < progress_ratio { '\u{2588}' } else { ' ' } }) .collect::<String>() ) } fn hackstead_blocks( hs: Hacksteader, interactivity: Interactivity, credentials: Credentials, ) -> Vec<Value> { use humantime::format_duration; use std::time::SystemTime; // TODO: with_capacity optimization let mut blocks: Vec<Value> = Vec::new(); let neighbor_bonuses = hs.neighbor_bonuses(); let Hacksteader { profile, mut inventory, land, user_id, gotchis, .. } = hs; let inv_occurrences = inventory_occurences(inventory.clone()); //let bottom_gotchi = gotchis.len() < 5; //let bottom_inventory = inv_occurrences.len() < 5; let hs_adv = profile.current_advancement(); let next_hs_adv = profile.next_advancement(); let hs_adv_sum = profile.advancements_sum(); blocks.push(json!({ "type": "section", "text": mrkdwn(format!( "*_<@{}>'s {}_* - *{}lvl* - _{}xp_", user_id, hs_adv.achiever_title, profile.advancements.current_position(profile.xp), profile.xp, )), })); blocks.push(comment(format!( "founded {} ago (roughly)", format_duration(SystemTime::now().duration_since(profile.joined).unwrap()), ))); if let Some(na) = next_hs_adv { blocks.push({ let (have, need) = (profile.xp - hs_adv_sum.xp, na.xp); json!({ "type": "section", "text": mrkdwn(format!( "Next: *{}*\n{} {}xp to go\n_{}_", na.title, progress_bar(50, have as f32 / need as f32), need - have, na.description )), }) }); } blocks.push(comment(format!("Last Advancement: \"{}\"", hs_adv.title))); blocks.push(comment(format!( concat!( "The level of your hackstead allows you to redeem ", "Land Deeds for up to {} more pieces of land.", ), hs_adv_sum.land ))); blocks.push(json!({ "type": "divider" })); /*if !bottom_inventory { let mut actions = vec![]; /* if !bottom_gotchi { actions.push(json!({ "type": "button", "text": plain_text("Hackagotchi"), "style": "primary", "value": serde_json::to_string(&(&user_id, interactivity, credentials, false)).unwrap(), "action_id": "gotchi_overview", })); }*/ if !bottom_inventory { actions.push(json!({ "type": "button", "text": plain_text("Inventory"), "style": "primary", "value": serde_json::to_string(&(&user_id, interactivity, credentials, false)).unwrap(), "action_id": "inventory_overview", })); } blocks.push(json!({ "type": "actions", "elements": actions })); blocks.push(json!({ "type": "divider" })); }*/ let tiles_owned = land.len(); for tile in land.into_iter() { if let Some(p) = tile.plant.as_ref() { let neighbor_bonuses = neighbor_bonuses .clone() .bonuses_for_plant(tile.id, p.archetype_handle); let sum = p.advancements_sum(neighbor_bonuses.iter()); let unboosted_sum = p.neighborless_advancements_sum(std::iter::empty()); let ca = p.current_advancement(); blocks.push(json!({ "type": "section", "text": mrkdwn({ let mut s = String::new(); s.push_str(&format!( "*{}* - _{}_ - *{}lvl* - {}xp {}\n\n", p.name, ca.achiever_title, p.advancements.current_position(p.xp), p.xp, p .effects .iter() .filter_map(|e| Some(emojify( &CONFIG .possession_archetypes .get(e.item_archetype_handle)? .name ))) .collect::<Vec<String>>() .join("") )); if let Some(na) = p.next_advancement() { let (have, need) = (p.xp - unboosted_sum.xp, na.xp); s.push_str(&format!( "Next: *{}*\n{} {}xp to go\n_{}_", na.title, progress_bar(35, have as f32/need as f32), need - have, na.description )); } s }), "accessory": { "type": "image", "image_url": format!("http://{}/gotchi/img/plant/{}.gif", *URL, filify(&ca.art)), "alt_text": format!("A healthy, growing {}!", p.name), } })); if let (false, Some(base_yield_duration)) = (sum.yields.is_empty(), p.base_yield_duration) { blocks.push(json!({ "type": "section", "text": mrkdwn(format!( "*Yield*\n{} {:.3} minutes to go", progress_bar(30, 1.0 - p.until_yield/base_yield_duration), (p.until_yield / sum.yield_speed_multiplier) / FARM_CYCLES_PER_MIN as f32 )), "accessory": { "type": "button", "text": plain_text("Yield Stats"), "value": serde_json::to_string(&(&user_id, tile.id)).unwrap(), "action_id": "yield_stats", } })); } if !sum.recipes.is_empty() { if let (Some(craft), Some(recipe)) = (p.craft.as_ref(), p.current_recipe()) { blocks.push(json!({ "type": "section", "text": mrkdwn(format!( "*Crafting {}*\n{} {:.3} minutes to go", recipe.title(), progress_bar(30, 1.0 - craft.until_finish/recipe.time), (craft.until_finish / sum.crafting_speed_multiplier) / FARM_CYCLES_PER_MIN as f32 )) })); } else { blocks.push(json!({ "type": "section", "text": mrkdwn(format!( "*{}/{}* recipes craftable", sum.recipes.iter().filter(|r| r.satisfies(&inventory)).count(), sum.recipes.len() )), "accessory": { "type": "button", "text": plain_text("Crafting"), "value": serde_json::to_string(&(tile.id, &user_id, 0)).unwrap(), "action_id": "crafting", } })); } } } else { blocks.push(json!({ "type": "section", "text": mrkdwn("*Empty Land*\nOpportunity Awaits!"), "accessory": { "type": "image", "image_url": format!("http://{}/gotchi/img/icon/dirt.png", *URL), "alt_text": "Land, waiting to be monopolized upon!", } })); } match tile.plant { Some(p) => { let ca = p.current_advancement(); let mut actions = vec![]; let applicables: Vec<String> = inventory .iter() .cloned() .filter_map(|x| { x.kind.keepsake()?.item_application.as_ref().filter(|a| { a.effects.iter().any(|e| e.keep_plants.allows(&p.name)) })?; Some(x.id.to_simple().to_string()) }) .collect(); if !applicables.is_empty() && interactivity.write() { actions.push(json!({ "type": "button", "text": plain_text("Apply Item"), "style": "primary", "value": serde_json::to_string(&( tile.id.to_simple().to_string(), user_id.clone() )).unwrap(), "action_id": "item_apply", })) } actions.push(json!({ "type": "button", "text": plain_text("Levels"), "value": serde_json::to_string(&(p.archetype_handle, p.xp)).unwrap(), "action_id": "levels", })); blocks.push(json!({ "type": "actions", "elements": actions, })); blocks.push(comment(format!("Last Advancement: \"{}\"", ca.title))); } None => { let seeds: Vec<Possessed<possess::Seed>> = inventory .iter() .cloned() .filter_map(|p| p.try_into().ok()) .collect(); blocks.push(if seeds.is_empty() { comment(":seedlet: No seeds! See if you can buy some on the /hackmarket") } else if let Interactivity::Write = interactivity { json!({ "type": "actions", "elements": [{ "type": "button", "text": plain_text("Plant Seed"), "style": "primary", "value": tile.id.to_simple().to_string(), "action_id": "seed_plant", }], }) } else { comment(":seedling: No planting seeds for you! This page is read only.") }); } } } inventory.sort_unstable_by(|a, b| { let l = a .kind .keepsake() .and_then(|k| k.unlocks_land.as_ref()) .map(|lu| lu.requires_xp); let r = b .kind .keepsake() .and_then(|k| k.unlocks_land.as_ref()) .map(|lu| lu.requires_xp); match (l, r) { (Some(true), Some(false)) => std::cmp::Ordering::Less, (Some(false), Some(true)) => std::cmp::Ordering::Greater, _ => std::cmp::Ordering::Equal, } }); if let Some(land_deed) = inventory.iter().find_map(|possession| { possession .kind .keepsake()? .unlocks_land .as_ref() .filter(|cert| { if cert.requires_xp { tiles_owned < hs_adv_sum.land.try_into().unwrap() } else { true } }) .map(|_| possession) }) { blocks.push(json!({ "type": "divider" })); blocks.push(json!({ "type": "actions", "elements": [{ "type": "button", "text": plain_text("Redeem Land Deed"), "style": "primary", "value": land_deed.id.to_simple().to_string(), "action_id": "unlock_land", }], })); } //if bottom_inventory { blocks.push(json!({ "type": "divider" })); if inv_occurrences.is_empty() { blocks.push(comment("Your inventory is empty")); } else { blocks.append(&mut inventory_section( inv_occurrences, interactivity, credentials, false, user_id.clone(), )); } //} //if bottom_gotchi && gotchis.len() > 0 { if gotchis.len() > 0 { blocks.push(json!({ "type": "divider" })); blocks.append(&mut gotchi_section( gotchis, interactivity, credentials, false, )); } if let Interactivity::Read = interactivity { blocks.push(json!({ "type": "divider" })); blocks.push(comment(format!( "This is a read-only snapshot of <@{}>'s Hackagotchi Hackstead at a specific point in time. \ You can manage your own Hackagotchi Hackstead in real time at your \ <slack://app?team=T0266FRGM&id={}&tab=home|hackstead>.", &user_id, *APP_ID, ))); } debug!( "{}", serde_json::to_string_pretty(&json!( { "blocks": blocks.clone() })).unwrap() ); blocks } macro_rules! hacksteader_opening_blurb { () => { format!( " *Build Your Own Hackstead With Hackagotchi!* :ear_of_rice: *Grow plants* to *produce and craft* cool items! :adorpheus: Have a chance to *hatch Hackagotchi* that can help you *earn money* and *boost crops*! :money_with_wings: *Buy and barter* with other Hack Clubbers on a *real-time market*! _Hacksteading is *free*!_ Each Hacksteader starts off with a :dirt: *single plot of land* to grow crops \ and a :nest_egg: *Nest Egg* to get started! \ Grow your Hacksteading empire by starting today! ", ) } } fn hackstead_explanation_blocks() -> Vec<Value> { vec![ json!({ "type": "section", "text": mrkdwn(hacksteader_opening_blurb!()), }), json!({ "type": "actions", "elements": [{ "type": "button", "action_id": "hackstead_confirm", "style": "danger", "text": plain_text("Monopolize on Adorableness?"), "confirm": { "style": "danger", "title": plain_text("Let's Hackstead, Fred!"), "text": mrkdwn( "(P.S. once you click that button, \ expect a direct message from banker on what to do next!)" ), "deny": plain_text("I'm short on GP"), "confirm": plain_text("LET'S HACKSTEAD, FRED!"), } }] }), ] } /// Returns Slack JSON displaying someone's hackstead if they're /// registered, if not, this command will greet them with an explanation /// of what hacksteading is and how they can get a hackstead of their own. fn hacksteader_greeting_blocks( hacksteader: Option<Hacksteader>, interactivity: Interactivity, creds: Credentials, ) -> Vec<Value> { let o = match hacksteader { Some(hs) => hackstead_blocks(hs, interactivity, creds), None => hackstead_explanation_blocks(), }; debug!("{}", serde_json::to_string_pretty(&o).unwrap()); o } async fn hackmarket_blocks(cat: Category, viewer: String) -> Vec<Value> { use config::ArchetypeHandle; let sales = market::market_search(&dyn_db(), cat) .await .map_err(|e| error!("couldn't search market: {}", e)) .unwrap_or_default(); let (all_goods_count, all_goods_price) = (sales.len(), sales.iter().map(|(s, _)| s.price).sum::<u64>()); let (your_goods_count, your_goods_price) = sales .iter() .filter(|(_, p)| p.steader == viewer) .map(|(s, _)| s.price) .fold((0, 0), |(n, sum), p| (n + 1, sum + p)); // things for sale, sorted by the type of thing they are. let entries: Vec<(String, (u64, usize))> = { let mut entries: HashMap<String, (u64, usize, ArchetypeHandle)> = Default::default(); for (sale, p) in sales.into_iter() { entries .entry(sale.market_name.clone()) .and_modify(|e| { if sale.price < e.0 { e.0 = sale.price } e.1 += 1; }) .or_insert((sale.price, 1, p.archetype_handle)); } let mut v: Vec<_> = entries.into_iter().collect(); v.sort_by_key(|&(_, (_, _, ah))| ah); v.into_iter() .map(|(name, (lowest_price, count, _))| (name, (lowest_price, count))) .collect() }; let entry_count = entries.len(); std::iter::once(comment(format!( concat!( "Your *{}* goods cost *{}gp* in total, ", "*{}%* of the market's ", "_{}gp_ value across _{}_ items.", ), your_goods_count, your_goods_price, your_goods_price as f32 / all_goods_price as f32 * 100.0, all_goods_price, all_goods_count, ))) .chain( entries .into_iter() .flat_map(|(name, (lowest_price, count))| { std::iter::once(json!({ "type": "section", "fields": [ mrkdwn(format!( "{} _{}_", emojify(&name), name, )), ], "accessory": { "type": "button", "style": "primary", "text": plain_text(format!( "{} for sale starting at {}gp", count, lowest_price )), "action_id": "possession_market_overview_page", "value": serde_json::to_string(&(name, cat)).unwrap(), } })) .chain(std::iter::once(json!({ "type": "divider" }))) }) .take((entry_count * 2).saturating_sub(1)), ) .collect() } #[derive(FromForm, Debug, Clone)] struct SlashCommand { token: String, team_id: String, team_domain: String, channel_id: String, channel_name: String, user_id: String, user_name: String, command: String, text: String, response_url: String, trigger_id: String, } #[post("/hackmarket", data = "<slash_command>")] async fn hackmarket<'a>(slash_command: LenientForm<SlashCommand>) -> Result<(), String> { debug!("{} | {}", slash_command.command, slash_command.text); Modal { method: "open".to_string(), trigger_id: slash_command.trigger_id.clone(), callback_id: "hackstreet_modal".to_string(), title: "Hackstreet!".to_string(), private_metadata: String::new(), blocks: hackmarket_blocks( match slash_command.text.as_str() { "gotchi" | "g" => Category::Gotchi, _ => Category::Misc, }, slash_command.user_id.clone(), ) .await, submit: None, } .launch() .await?; Ok(()) } pub async fn stateofsteading_blocks() -> Vec<Value> { let profiles = hcor::Profile::fetch_all(&dyn_db()).await.unwrap(); let tiles = hacksteader::Tile::fetch_all(&dyn_db()).await.unwrap(); struct PlantEntry { owner: String, seed_from: String, level: usize, } let mut archetype_occurrences: HashMap<config::PlantArchetype, Vec<PlantEntry>> = Default::default(); for tile in tiles.iter() { let plant = match &tile.plant { Some(plant) => plant, None => continue, }; archetype_occurrences .entry((**plant).clone()) .or_default() .push(PlantEntry { owner: tile.steader.clone(), seed_from: plant .pedigree .last() .map(|x| x.id.clone()) .unwrap_or("U013STH0TNG".to_string()), level: plant.advancements.current_position(plant.xp), }); } std::iter::once(json!({ "type": "section", "text": mrkdwn(format!( concat!( "Total Hacksteaders: *{}*\n", "Total Tiles: *{}*\n", "{}", ), profiles.len(), tiles.len(), archetype_occurrences .iter() .map(|(plant_archetype, plants)| format!( "*{}* _{}_ plants", plants.len(), plant_archetype.name )) .collect::<Vec<String>>() .join("\n") )), "accessory": { "type": "image", "image_url": format!("http://{}/gotchi/img/icon/seedlet.png", *URL), "alt_text": "happy shiny better hackstead", } })) .chain( archetype_occurrences .iter_mut() .map(|(plant_archetype, plants)| { plants.sort_by_key(|p| p.level); json!({ "type": "section", "text": mrkdwn( plants .iter_mut() .map(|plant| format!( "<@{}> grows a *{}lvl* _{}_ from <@{}>'s seed", plant.owner, plant.level, plant_archetype.name, plant.seed_from )) .collect::<Vec<_>>() .join("\n") ), "accessory": { "type": "image", "image_url": format!( "http://{}/gotchi/img/plant/{}.gif", *URL, filify(&plant_archetype.advancements.base.art) ), "alt_text": "happy shiny plant give u stuffs", } }) }), ) .collect() } #[post("/stateofsteading", data = "<slash_command>")] async fn stateofsteading<'a>(slash_command: LenientForm<SlashCommand>) -> Result<(), String> { Modal { method: "open".to_string(), trigger_id: slash_command.trigger_id.clone(), callback_id: "stateofsteading_modal".to_string(), title: "All the Steaders!".to_string(), private_metadata: String::new(), blocks: stateofsteading_blocks().await, submit: None, } .launch() .await?; Ok(()) } #[post("/hgive", data = "<slash_command>")] async fn hgive<'a>(slash_command: LenientForm<SlashCommand>) -> Json<Value> { use regex::Regex; fn res<S: std::string::ToString>(s: S) -> Json<Value> { debug!("{}", s.to_string()); Json(json!({ "blocks": [{ "type": "section", "text": mrkdwn(s), }], "response_type": "ephemeral", })) } lazy_static::lazy_static!( static ref HGIVE: Regex = Regex::new("^<@([A-z0-9]+)\\|.+>( [0-9]+)? :(.+):$").unwrap(); ); debug!("trying /hgive {}", slash_command.text); let c = match HGIVE.captures(&slash_command.text) { Some(c) => c, None => return res("Invalid syntax!"), }; debug!("{:?}", c); let receiver = match c.get(1) { Some(s) => s.as_str().to_string(), None => return res("Couldn't parse receiver?"), }; if &receiver == &slash_command.user_id { return res(format!( concat!("Can't give {}; ", "you can't give stuff to yourself! ",), slash_command.text )); } let amount = c .get(2) .and_then(|x| x.as_str().trim().parse().ok()) .unwrap_or(1); let possession_name = match c.get(3) { Some(a) => a.as_str().replace("_", " ").to_lowercase(), None => return res("Couldn't parse possession name?"), }; let (archetype_handle, possession_archetype) = match CONFIG .possession_archetypes .iter() .enumerate() .find(|(_, x)| x.name.to_lowercase() == possession_name) { Some(ah) => ah, None => return res(format!("no possession by name of {}", possession_name)), }; let user = slash_command.user_id.to_string(); let hs = match Hacksteader::from_db(&dyn_db(), user.clone()).await { Ok(hs) => hs, Err(_) => { return res(format!( concat!( "Can't give {}; ", "you don't have a hackstead! ", "try /hstead to get started!" ), slash_command.text )) } }; let possessions = hs .inventory .into_iter() .filter(|p| p.archetype_handle == archetype_handle) .take(amount) .collect::<Vec<possess::Possession>>(); if possessions.len() != amount { return match possessions.len() { 0 => res(format!("You don't have any {}", possession_name)), more => res(format!( "You only have {} {}, not {}!", more, possession_name, amount )), }; } else if amount == 0 { res("Well, I mean ... that's not really anything but ... ok") } else { let notif_msg = format!( "<@{}> gave <@{}> {} {}!", user, receiver, amount, possession_archetype.name ); let res_msg = json!({ "blocks": [ { "type": "section", "text": mrkdwn(format!( "<@{}> shall soon happen across *{}* of <@{}>'s :{}: _{}_!", receiver, amount, user, possession_name.replace(" ", "_"), possession_archetype.name, )), "accessory": { "type": "image", "image_url": format!( "http://{}/gotchi/img/misc/{}.png", *URL, filify(&possession_name) ), "alt_text": "hackagotchi img", } }, comment("TAKE THIS AND DONT TELL MOM") ], "response_type": "in_channel", "text": notif_msg }); tokio::spawn(async move { debug!("I mean this happens?"); let _ = gift_dm( &user, &receiver, possessions.first().unwrap(), notif_msg, amount, ) .await .map_err(|e| error!("{}", e)); for possession in possessions { match Hacksteader::transfer_possession( &dyn_db(), receiver.clone(), possess::Acquisition::Trade, Key::misc(possession.id), ) .await { Err(e) => { let _ = banker::message(e).await.map_err(|e| error!("{}", e)); } Ok(_) => {} } } }); Json(res_msg) } } #[post("/egghatchwhen", data = "<slash_command>")] async fn egghatchwhen<'a>( slash_command: LenientForm<SlashCommand>, to_farming: State<'_, Sender<FarmingInputEvent>>, ) -> Json<Value> { use rand::seq::SliceRandom; let SlashCommand { text, user_id, .. } = (*slash_command).clone(); if text != "" { return Json(json!({ "response_type": "ephemeral", })); } fn res<S: std::string::ToString>(s: S) -> Json<Value> { Json(json!({ "blocks": [{ "type": "section", "text": mrkdwn(s), }], "response_type": "in_channel", })) } let user = user_id.to_string(); let hs = match Hacksteader::from_db(&dyn_db(), user.clone()).await { Ok(hs) => hs, Err(_) => { return res(concat!( "Can't open one of your eggs; ", "you don't have a hackstead! ", "try /hstead to get started!" )) } }; let egg_search = hs .gotchis .into_iter() .filter(|p| p.inner.hatch_table.is_some()) .map(|p| p.id) .collect::<Vec<uuid::Uuid>>() .choose(&mut rand::thread_rng()) .map(|id| id.clone()); let egg_id = match egg_search { None => return res("You don't have any eggs to hatch!"), Some(id) => id, }; to_farming .send(FarmingInputEvent::ActivateUser(user.clone())) .unwrap(); to_farming .send(FarmingInputEvent::HatchEgg(egg_id, user.clone())) .unwrap(); res("Selected one of your eggs and hatched it!") } #[post("/hackstead", data = "<slash_command>")] async fn hackstead<'a>(slash_command: LenientForm<SlashCommand>) -> Json<Value> { debug!("{:#?}", slash_command); lazy_static::lazy_static! { static ref HACKSTEAD: Regex = Regex::new( "(<@([A-z0-9]+)|(.+)>)?" ).unwrap(); } let captures = HACKSTEAD.captures(&slash_command.text); debug!("captures: {:#?}", captures); let user = captures .and_then(|c| c.get(2).map(|x| x.as_str())) .unwrap_or(&slash_command.user_id); let hs = Hacksteader::from_db(&dyn_db(), user.to_string()).await; Json(json!({ "blocks": hacksteader_greeting_blocks( hs.ok(), Interactivity::Read, Credentials::None ), "response_type": "ephemeral", })) } #[derive(FromForm, Debug)] struct ActionData { payload: String, } #[derive(serde::Deserialize, Debug)] pub struct Interaction { trigger_id: String, actions: Vec<Action>, user: User, view: Option<View>, } #[derive(serde::Deserialize, Debug)] pub struct View { private_metadata: String, callback_id: String, root_view_id: String, } #[derive(serde::Deserialize, Debug)] pub struct User { id: String, } #[derive(serde::Deserialize, Debug)] pub struct Action { action_id: Option<String>, name: Option<String>, #[serde(default)] value: String, } #[derive(Default)] pub struct Modal { method: String, trigger_id: String, callback_id: String, title: String, private_metadata: String, blocks: Vec<Value>, submit: Option<String>, } impl Modal { async fn launch(self) -> Result<Value, String> { let submit = self.submit.clone(); let method = self.method.clone(); let mut o = json!({ "trigger_id": self.trigger_id, "view": self.view() }); if let Some(submit_msg) = submit { o["view"] .as_object_mut() .unwrap() .insert("submit".to_string(), plain_text(submit_msg)); } let client = reqwest::Client::new(); client .post(&format!("https://slack.com/api/views.{}", method)) .bearer_auth(&*TOKEN) .json(&o) .send() .await .map_err(|e| format!("couldn't open modal: {}", e))?; debug!("{}", serde_json::to_string_pretty(&o).unwrap()); Ok(o) } fn view(self) -> Value { json!({ "type": "modal", "private_metadata": self.private_metadata, "callback_id": self.callback_id, "title": plain_text(self.title), "blocks": self.blocks }) } } #[derive(Default)] pub struct ModalUpdate { trigger_id: String, callback_id: String, title: String, private_metadata: String, hash: Option<String>, view_id: String, blocks: Vec<Value>, submit: Option<String>, } impl ModalUpdate { async fn launch(self) -> Result<Value, String> { let mut o = json!({ "trigger_id": self.trigger_id, "view_id": self.view_id, "view": { "type": "modal", "private_metadata": self.private_metadata, "callback_id": self.callback_id, "title": plain_text(self.title), "blocks": self.blocks } }); if let Some(hash) = self.hash { o.as_object_mut() .unwrap() .insert("hash".to_string(), json!(hash)); } if let Some(submit_msg) = self.submit { o["view"] .as_object_mut() .unwrap() .insert("submit".to_string(), plain_text(submit_msg)); } let client = reqwest::Client::new(); client .post("https://slack.com/api/views.update") .bearer_auth(&*TOKEN) .json(&o) .send() .await .map_err(|e| format!("couldn't open modal: {}", e))?; debug!("{}", serde_json::to_string_pretty(&o).unwrap()); Ok(o) } } #[derive(rocket::Responder)] pub enum ActionResponse { Json(Json<Value>), Ok(()), } #[derive(Clone, Copy, serde::Serialize, serde::Deserialize)] enum Interactivity { Read, Write, Buy, } impl Interactivity { fn id(self) -> &'static str { use Interactivity::*; match self { Read => "static", Write => "dynamic", Buy => "market", } } } impl Interactivity { fn market(&self, creds: Credentials) -> bool { match self { Interactivity::Buy => true, Interactivity::Write => creds == Credentials::Owner, _ => false, } } fn write(&self) -> bool { match self { Interactivity::Write => true, _ => false, } } } #[derive(Debug, PartialEq, Clone, Copy, serde::Serialize, serde::Deserialize)] pub enum Credentials { Owner, Hacksteader, None, } #[post("/interact", data = "<action_data>")] async fn action_endpoint( to_farming: State<'_, Sender<FarmingInputEvent>>, action_data: LenientForm<ActionData>, ) -> Result<ActionResponse, String> { debug!("{:?}", action_data); let v = serde_json::from_str::<Value>(&action_data.payload).unwrap(); debug!("action data: {:#?}", v); if let Some("view_submission") = v.get("type").and_then(|t| t.as_str()) { debug!("right type!"); let view = v.get("view").and_then(|view| { let parsed_view = serde_json::from_value::<View>(view.clone()).ok()?; let key_json = &parsed_view.private_metadata; let key: Option<Key> = match serde_json::from_str(&key_json) { Ok(k) => Some(k), Err(e) => { error!("couldn't parse {}: {}", key_json, e); None } }; Some(( parsed_view, key, v.get("trigger_id")?.as_str()?, view.get("state").and_then(|s| s.get("values").cloned())?, serde_json::from_value::<User>(v.get("user")?.clone()).ok()?, )) }); if let Some((view, Some(key), trigger_id, values, user)) = view { debug!("view state values: {:#?}", values); match view.callback_id.as_str() { "sale_removal" => { info!("Revoking sale"); market::take_off_market(&dyn_db(), key).await?; return Ok(ActionResponse::Json(Json(json!({ "response_action": "clear", })))); } "sale_complete" => { info!("Completing sale!"); let possession = hacksteader::get_possession(&dyn_db(), key).await?; if let Some(sale) = possession.sale.as_ref() { banker::invoice( &user.id, sale.price, &format!( "hackmarket purchase buying {} at {}gp :{}:{} from <@{}>", possession.name, sale.price, key.id, key.category as u8, possession.steader, ), ) .await?; } return Ok(ActionResponse::Json(Json(json!({ "response_action": "clear", })))); } _ => {} }; if let Some(Value::String(nickname)) = values .get("gotchi_nickname_block") .and_then(|i| i.get("gotchi_nickname_input")) .and_then(|s| s.get("value")) { // update the nickname in the DB let db = dyn_db(); db.update_item(rusoto_dynamodb::UpdateItemInput { table_name: hcor::TABLE_NAME.to_string(), key: key.into_item(), update_expression: Some("SET nickname = :new_name".to_string()), expression_attribute_values: Some( [( ":new_name".to_string(), AttributeValue { s: Some(nickname.clone()), ..Default::default() }, )] .iter() .cloned() .collect(), ), ..Default::default() }) .await .map_err(|e| format!("Couldn't change nickname in database: {}", e))?; // TODO: parse what the above could return let mut possession = hacksteader::get_possession(&db, key).await?; let gotchi = possession .kind .gotchi_mut() .ok_or("can only nickname gotchi".to_string())?; // update the nickname on the Gotchi, gotchi.nickname = nickname.clone(); let page = PossessionPage { credentials: Credentials::Owner, interactivity: Interactivity::Write, possession, }; // update the page in the background with the new gotchi data page.modal_update(trigger_id.to_string(), view.root_view_id) .launch() .await?; // update the home tab to_farming .send(FarmingInputEvent::ActivateUser(user.id.clone())) .unwrap(); // this will close the "enter nickname" modal return Ok(ActionResponse::Ok(())); } else if let Some(price) = values .get("possession_sell_price_block") .and_then(|i| i.get("possession_sell_price_input")) .and_then(|s| s.get("value")) .and_then(|x| x.as_str()) .and_then(|s| s.parse::<u64>().ok()) { let possession = hacksteader::get_possession(&dyn_db(), key).await?; banker::invoice( &user.id, price / 20_u64, &format!( "hackmarket fees for selling {} at {}gp :{}:{}", possession.name, price, possession.id, possession.kind.category() as u8 ), ) .await?; return Ok(ActionResponse::Ok(())); } else if let Some(Value::String(new_owner)) = values .get("possession_give_receiver_block") .and_then(|i| i.get("possession_give_receiver_input")) .and_then(|s| s.get("selected_user")) { info!( "giving {} from {} to {}", view.private_metadata, user.id, new_owner ); if user.id == *new_owner { debug!("self giving attempted"); return Ok(ActionResponse::Json(Json(json!({ "response_action": "errors", "errors": { "possession_give_receiver_input": "absolutely not okay", } })))); } // update the owner in the DB Hacksteader::transfer_possession( &dyn_db(), new_owner.clone(), possess::Acquisition::Trade, key, ) .await?; // update the home tab // TODO: make this not read from the database update_user_home_tab(user.id.clone()).await?; let possession = hacksteader::get_possession(&dyn_db(), key).await?; let notif_msg = format!("<@{}> has gifted you a {}!", user.id, possession.nickname()); // DM the new_owner about their new acquisition! gift_dm(&user.id, new_owner, &possession, notif_msg, 1).await?; // close ALL THE MODALS!!! return Ok(ActionResponse::Json(Json(json!({ "response_action": "clear", })))); } } else if let Some((view, None, _trigger_id, values, user)) = view { debug!("view state values: {:#?}", values); match view.callback_id.as_str() { "crafting_confirm_modal" => { debug!("crafting confirm modal"); let (tile_id, recipe_archetype_handle): (uuid::Uuid, config::ArchetypeHandle) = serde_json::from_str(&view.private_metadata) .map_err(|e| error!("{}", e)) .unwrap(); to_farming .send(FarmingInputEvent::BeginCraft { tile_id, recipe_archetype_handle, }) .expect("couldn't send to farming"); return Ok(ActionResponse::Json(Json(json!({ "response_action": "clear", })))); } _ => {} }; if let Some((tile_id, seed_id)) = values .get("seed_plant_input") .and_then(|i| i.get("seed_plant_select")) .and_then(|s| s.get("selected_option")) .and_then(|s| s.get("value")) .and_then(|s| s.as_str()) .and_then(|v| serde_json::from_str(v).ok()) { debug!("planting seed!"); let db = dyn_db(); let seed = Hacksteader::take(&db, Key::misc(seed_id)) .await .map_err(|e| { let a = format!("couldn't delete seed: {}", e); error!("{}", a); a })? .try_into() .map_err(|e| { let a = format!("seed_id wrong type: {}", e); error!("{}", a); a })?; to_farming .send(FarmingInputEvent::PlantSeed( tile_id, hacksteader::Plant::from_seed(seed), )) .unwrap(); to_farming .send(FarmingInputEvent::ActivateUser(user.id.clone())) .unwrap(); update_user_home_tab(user.id).await.map_err(|e| { let a = format!("{}", e); error!("{}", a); a })?; return Ok(ActionResponse::Ok(())); } if let Some((tile_id, item_id)) = values .get("item_apply_input") .and_then(|i| i.get("item_apply_select")) .and_then(|s| s.get("selected_option")) .and_then(|s| s.get("value")) .and_then(|s| s.as_str()) .and_then(|v| serde_json::from_str(v).ok()) { debug!("applying item!"); to_farming .send(FarmingInputEvent::ApplyItem( ItemApplication { tile: tile_id, item: item_id, }, user.id.clone(), )) .unwrap(); return Ok(ActionResponse::Ok(())); } } } let mut i: Interaction = serde_json::from_str(&action_data.payload).map_err(|e| { let a = format!("bad data: {}", e); error!("{}", a); a })?; debug!("{:#?}", i); let action = i.actions.pop().ok_or_else(|| "no action?".to_string())?; let route = action .action_id .or(action.name.clone()) .ok_or("no action name".to_string())?; let output_json = match route.as_str() { "hackstead_confirm" => { info!("confirming new user!"); if !hacksteader::exists(&dyn_db(), i.user.id.clone()).await { banker::invoice(&i.user.id, *HACKSTEAD_PRICE, "let's hackstead, fred!") .await .map_err(|e| format!("couldn't send Banker invoice DM: {}", e))?; } mrkdwn("Check your DMs from Banker for the hacksteading invoice!") } "possession_sell" => { let page_json = i.view.ok_or("no view!".to_string())?.private_metadata; //let page: PossessionPage = serde_json::from_str(&page_json) // .map_err(|e| dbg!(format!("couldn't parse {}: {}", page_json, e)))?; Modal { method: "push".to_string(), trigger_id: i.trigger_id, callback_id: "possession_sell_modal".to_string(), title: "Sell Item".to_string(), private_metadata: page_json, blocks: vec![ json!({ "type": "input", "block_id": "possession_sell_price_block", "label": plain_text("Price (gp)"), "element": { "type": "plain_text_input", "action_id": "possession_sell_price_input", "placeholder": plain_text("Price Item"), "initial_value": "50", } }), json!({ "type": "divider" }), comment("As a form of confirmation, you'll get an invoice to pay before your Item goes up on the market. \ To fund Harvests and to encourage Hacksteaders to keep prices sensible, \ this invoice is 5% of the price of your sale \ rounded down to the nearest GP (meaning that sales below 20gp aren't taxed at all)."), ], submit: Some("Sell!".to_string()), ..Default::default() } .launch() .await? } "possession_give" => { let key_json = i.view.ok_or("no view!".to_string())?.private_metadata; let key: Key = serde_json::from_str(&key_json).map_err(|e| { let a = format!("couldn't parse {}: {}", key_json, e); error!("{}", a); a })?; let possession = hacksteader::get_possession(&dyn_db(), key).await?; Modal { method: "push".to_string(), trigger_id: i.trigger_id, callback_id: "possession_give_modal".to_string(), title: "Give Item".to_string(), blocks: vec![json!({ "type": "input", "block_id": "possession_give_receiver_block", "label": plain_text("Give Item"), "element": { "type": "users_select", "action_id": "possession_give_receiver_input", "placeholder": plain_text("Who Really Gets your Gotchi?"), "initial_user": ({ let s = &CONFIG.special_users; &s.get(key_json.len() % s.len()).unwrap_or(&*ID) }), "confirm": { "title": plain_text("You sure?"), "text": mrkdwn(format!( "Are you sure you want to give away {} _{}_? You might not get them back. :frowning:", emojify(&possession.name), possession.nickname() )), "confirm": plain_text("Give!"), "deny": plain_text("No!"), "style": "danger", } } })], private_metadata: key_json, submit: Some("Trade Away!".to_string()), ..Default::default() } .launch() .await? } "unlock_land" => { // id of the item which allowed them to unlock this land let cert_id: uuid::Uuid = uuid::Uuid::parse_str(&action.value).map_err(|e| { let a = format!("couldn't parse land cert id: {}", e); error!("{}", a); a })?; to_farming .send(FarmingInputEvent::RedeemLandCert( cert_id, i.user.id.clone(), )) .unwrap(); json!({}) } "seed_plant" => { let tile_id: uuid::Uuid = uuid::Uuid::parse_str(&action.value).unwrap(); let hs = match Hacksteader::from_db(&dyn_db(), i.user.id.clone()).await { Ok(hs) => hs, Err(e) => { let a = format!("error fetching user for seed plant: {}", e); error!("{}", a); return Err(a); } }; let seeds: Vec<Possessed<possess::Seed>> = hs .inventory .iter() .cloned() .filter_map(|p| p.try_into().ok()) .collect(); Modal { method: "open".to_string(), trigger_id: i.trigger_id, callback_id: "seed_plant_modal".to_string(), title: "Plant a Seed!".to_string(), private_metadata: String::new(), blocks: vec![json!({ "type": "input", "label": plain_text("Seed Select"), "block_id": "seed_plant_input", "element": { "type": "static_select", "placeholder": plain_text("Which seed do ya wanna plant?"), "action_id": "seed_plant_select", // show them each seed they have that grows a given plant "option_groups": CONFIG .plant_archetypes .iter() .filter_map(|pa| { // plant archetype let mut seed_iter = seeds .iter() .filter(|s| s.inner.grows_into == pa.name); let first_seed = seed_iter.next()?; let seed_count = seed_iter.count() + 1; let mut desc = format!( "{} - {}", seed_count, first_seed.description ); desc.truncate(75); if dbg!(desc.len()) == 75 { desc.truncate(71); desc.push_str("...") } Some(json!({ "label": plain_text(&pa.name), "options": [{ "text": plain_text(format!("{} {}", emojify(&first_seed.name), first_seed.name)), "description": plain_text(desc), // this is fucky-wucky because value can only be 75 chars "value": serde_json::to_string(&( &tile_id.to_simple().to_string(), first_seed.id.to_simple().to_string(), )).unwrap(), }] })) }) .collect::<Vec<Value>>(), } })], submit: Some("Plant it!".to_string()), } .launch() .await? } "gotchi_overview" => { let (steader, interactivity, credentials, push): ( String, Interactivity, Credentials, bool, ) = serde_json::from_str(&action.value).unwrap(); let hs = Hacksteader::from_db(&dyn_db(), steader).await?; let gotchi_count = hs.gotchis.len(); let blocks = gotchi_section(hs.gotchis, interactivity, credentials, true); Modal { method: if push { "push" } else { "open" }.to_string(), trigger_id: i.trigger_id, callback_id: "gotchi_overview_modal".to_string(), title: match gotchi_count { 1 => "Your Hackagotchi".to_string(), more => format!("Your {} Hackagotchi", more), }, private_metadata: action.value.to_string(), blocks, submit: None, } .launch() .await? } "inventory_overview" => { let (steader, interactivity, credentials, push): ( String, Interactivity, Credentials, bool, ) = serde_json::from_str(&action.value).unwrap(); let hs = Hacksteader::from_db(&dyn_db(), steader.clone()).await?; let inv_count = hs.inventory.len(); let blocks = inventory_section( inventory_occurences(hs.inventory), interactivity, credentials, true, steader.clone(), ); Modal { method: if push { "push" } else { "open" }.to_string(), trigger_id: i.trigger_id, callback_id: "inventory_overview_modal".to_string(), title: match inv_count { 1 => "Your Item".to_string(), more => format!("Your {} Items", more), }, private_metadata: action.value.to_string(), blocks, submit: None, } .launch() .await? } "crafting_confirm" => { let craft_json = &action.value; let (plant_id, recipe_index): (uuid::Uuid, config::ArchetypeHandle) = serde_json::from_str(&craft_json).unwrap(); let hs = Hacksteader::from_db(&dyn_db(), i.user.id.clone()).await?; let all_nb = hs.neighbor_bonuses(); let plant = hs .land .into_iter() .find(|t| t.id == plant_id) .ok_or_else(|| { let e = format!("no tile with id {} for this user {} ", plant_id, i.user.id); error!("{}", e); e })? .plant .ok_or_else(|| { let e = format!("can't craft on tile {}; it's not a plant", plant_id); error!("{}", e); e })?; let neighbor_bonuses = all_nb.bonuses_for_plant(plant_id, plant.archetype_handle); let sum = plant.advancements_sum(neighbor_bonuses.iter()); let recipe = plant.get_recipe(recipe_index).ok_or_else(|| { let e = format!( "can't craft unknown recipe: {} on {:?} {}xp", recipe_index, plant.name, plant.xp ); error!("{}", e); e })?; let possible_output = recipe.makes.any(); Modal { method: "push".to_string(), trigger_id: i.trigger_id, callback_id: "crafting_confirm_modal".to_string(), title: "Crafting Confirmation".to_string(), private_metadata: craft_json.to_string(), blocks: vec![json!({ "type": "section", "text": mrkdwn(format!( concat!( "Are you sure you want your plant to ", "spend the next {:.2} minutes crafting {} ", "using\n\n{}\n{}", ), (recipe.time / sum.crafting_speed_multiplier) / FARM_CYCLES_PER_MIN as f32, recipe.makes, recipe .needs .iter() .map(|(n, what)| { format!( "*{}* {} _{}_", n, emojify(&what.name), what.name ) }) .collect::<Vec<_>>() .join("\n"), if recipe.destroys_plant { "WARNING: THIS WILL DESTROY YOUR PLANT" } else { "" } )), "accessory": { "type": "image", "image_url": match possible_output { Some(po) => format!("http://{}/gotchi/img/{}/{}.png", *URL, po.kind.category(), filify(&po.name) ), None => format!("http://{}/gotchi/img/icon/dirt.png", *URL), }, "alt_text": "The thing you'd like to craft", } })], submit: Some("Craft!".to_string()), } .launch() .await? } "crafting" | "crafting_next_page" | "crafting_back_page" => { use hcor::config::Recipe; debug!("crafting window"); let (plant_id, steader, page): (uuid::Uuid, String, usize) = serde_json::from_str(&action.value).unwrap(); let hs = Hacksteader::from_db(&dyn_db(), steader.to_string()).await?; let plant = hs .land .iter() .find_map(|tile| tile.plant.as_ref().filter(|_p| tile.id == plant_id)) .ok_or_else(|| { let a = format!("couldn't crafing window: no such plant!"); error!("{}", a); a })?; let all_nb = hs.neighbor_bonuses(); let neighbor_bonuses = all_nb.bonuses_for_plant(plant_id, plant.archetype_handle); const RECIPE_PAGE_SIZE: usize = 12; let unlocked_recipes = plant .advancements_sum(neighbor_bonuses.iter()) .recipes .into_iter() .enumerate() .collect::<Vec<_>>(); let max_recipes = plant.advancements_max_sum(neighbor_bonuses.iter()).recipes; let locked_recipes = max_recipes.iter().skip(unlocked_recipes.len()); let extra_page = max_recipes.len() != unlocked_recipes.len(); let recipe_page_count = { let l = unlocked_recipes.chunks(RECIPE_PAGE_SIZE).count(); if extra_page { l + 1 } else { l } }; let last_page = recipe_page_count == (page + 1); let first_page = page == 0; let mut recipe_pages = unlocked_recipes.chunks(RECIPE_PAGE_SIZE).skip(page); let this_page_unlocked_recipes = recipe_pages.next(); let make_unlocked_recipe_blocks = |(recipe_handle, raw_recipe): (_, Recipe<usize>)| { use hcor::config::Archetype; let possible = raw_recipe.satisfies(&hs.inventory); let recipe = raw_recipe .lookup_handles() .expect("invalid archetype handle"); let mut b = Vec::with_capacity(recipe.needs.len() + 2); let output: Vec<(&'static Archetype, usize)> = recipe.makes.all(); let craft_output_count: usize = output.iter().map(|(_, n)| n).sum(); let (hi, lo) = recipe.xp; let mut head = json!({ "type": "section", "text": mrkdwn(format!( "{} + around {}xp\n_{}_", if craft_output_count <= 1 { format!("_{}_", recipe.title()) } else { output .iter() .map(|(a, n)| format!( "*{}* {} _{}_", n, emojify(&a.name), a.name, )) .collect::<Vec<String>>() .join(match recipe.makes { config::RecipeMakes::OneOf(_) => " *or*\n", _ => " *and*\n", }) }, (hi + lo) / 2, recipe.explanation() )), }); if possible && steader == i.user.id { head.as_object_mut().unwrap().insert( "accessory".to_string(), json!({ "type": "button", "style": "primary", "text": plain_text(format!( "Craft {}", emojify(match recipe.makes.any() { Some(i) => &i.name, None => "seedlet" }) )), "value": serde_json::to_string(&( &plant_id, recipe_handle, )).unwrap(), "action_id": "crafting_confirm", }), ); } b.push(head); b.push(comment("*needs:* ".to_string())); for (count, resource) in recipe.needs { b.push(comment(format!( "*{}* {} _{}_", count, emojify(&resource.name), resource.name ))); } b.push(json!({ "type": "divider" })); b }; let mut blocks: Vec<Value> = match this_page_unlocked_recipes { Some(r) => r .iter() .cloned() .flat_map(make_unlocked_recipe_blocks) .collect(), None => locked_recipes .map(|raw_recipe| { comment(format!( "*Level up to unlock:* {}", raw_recipe.clone().lookup_handles().unwrap().title() )) }) .collect(), }; if !last_page || !first_page { let mut elements = vec![]; elements.push(if !first_page { json!({ "style": "primary", "type": "button", "value": serde_json::to_string(&(plant_id, &steader, page - 1)).unwrap(), "text": plain_text("Previous Page"), "action_id": "crafting_back_page", }) } else { json!({ "value": action.value, "type": "button", "text": plain_text("Previous Page"), "action_id": "crafting_back_page", }) }); elements.push(if !last_page { json!({ "style": "primary", "type": "button", "value": serde_json::to_string(&(plant_id, &steader, page + 1)).unwrap(), "text": plain_text("Next Page"), "action_id": "crafting_next_page", }) } else { json!({ "type": "button", "text": plain_text("Next Page"), "value": action.value, "action_id": "crafting_next_page", }) }); blocks.push(json!({ "type": "actions", "elements": elements })); } const MAX_NAME_LEN: usize = 11; let title = format!( "{} Crafting {}/{}", if plant.name.len() > MAX_NAME_LEN { let mut p = plant.name.clone(); p.truncate(MAX_NAME_LEN - 3); p.push_str("..."); p } else { plant.name.clone() }, page + 1, recipe_page_count ); match route.as_str() { "crafting" => { Modal { method: "open".to_string(), trigger_id: i.trigger_id, callback_id: "crafting_modal".to_string(), title, private_metadata: String::new(), blocks, submit: None, } .launch() .await? } _ => { ModalUpdate { callback_id: "crafting_modal".to_string(), blocks, submit: None, title, private_metadata: String::new(), trigger_id: i.trigger_id, view_id: i.view.expect("oof no view").root_view_id, hash: None, } .launch() .await? } } } "levels" => { let (ah, xp): (config::ArchetypeHandle, u64) = serde_json::from_str(&action.value).unwrap(); let arch = CONFIG .plant_archetypes .get(ah) .ok_or_else(|| format!("invalid archetype handle: {}", ah))?; let current_position = arch.advancements.current_position(xp); let blocks = arch .advancements .all() .enumerate() .map(|(i, adv)| { let text = format!( "*{}* - {} - {}xp\n_{}_", adv.title, adv.achiever_title, adv.xp, adv.description, ); if i <= current_position { json!({ "type": "section", "text": mrkdwn(text), }) } else { comment(text) } }) .collect(); Modal { method: "open".to_string(), trigger_id: i.trigger_id, callback_id: "levels_modal".to_string(), title: "Levels Overview".to_string(), private_metadata: String::new(), blocks, submit: None, } .launch() .await? } "yield_stats" => { use config::PlantAdvancementKind::*; let (user_id, plant_id): (String, uuid::Uuid) = serde_json::from_str(&action.value).unwrap(); let hs = Hacksteader::from_db(&dyn_db(), user_id.to_string()).await?; let plant = hs .land .iter() .find_map(|tile| tile.plant.as_ref().filter(|_p| tile.id == plant_id)) .ok_or_else(|| format!("no such plant!"))?; let all_nb = hs.neighbor_bonuses(); let neighbor_bonuses = all_nb.bonuses_for_plant(plant_id, plant.archetype_handle); let advancements = plant .unlocked_advancements(neighbor_bonuses.iter()) .filter(|a| match &a.kind { Neighbor(..) => false, _ => true, }) .chain(neighbor_bonuses.iter()) .collect::<Vec<_>>(); let sum = plant.advancements_sum(neighbor_bonuses.iter()); let yield_farm_cycles = plant .base_yield_duration .map(|x| x / sum.yield_speed_multiplier); let mut blocks = vec![]; if let Some(yield_farm_cycles) = yield_farm_cycles { blocks.push(json!({ "type": "section", "text": mrkdwn(format!( concat!( "*Yield Speed*\n", "Yields every: *{:.2} minutes*\n", "Total Speedboost: *x{:.3}*", ), yield_farm_cycles / FARM_CYCLES_PER_MIN as f32, sum.yield_speed_multiplier )), })); } for adv in advancements.iter() { match &adv.kind { YieldSpeedMultiplier(s) => { blocks.push(comment(format!("_{}_: *x{}* speed boost", adv.title, s))); } Neighbor(s) => match **s { YieldSpeedMultiplier(s) => { blocks.push(comment(format!( "_{}_: *x{}* speed boost _(from neighbor)_", adv.title, s ))); } _ => {} }, _ => {} } } blocks.push(json!({ "type": "section", "text": mrkdwn(format!( "*Yield Size*\n*x{:.3}* yield size multiplier", sum.yield_size_multiplier, )) })); for adv in advancements.iter() { match &adv.kind { YieldSizeMultiplier(x) => { blocks.push(comment(format!("_{}_: *x{}* size boost", adv.title, x))); } Neighbor(s) => match **s { YieldSizeMultiplier(s) => { blocks.push(comment(format!( "_{}_: *x{}* size boost _(from neighbor)_", adv.title, s ))); } _ => {} }, _ => {} } } blocks.push(json!({ "type": "section", "text": mrkdwn("*Yield Items*".to_string()) })); for y in sum.yields.iter() { let arch = match CONFIG.possession_archetypes.get(y.yields) { Some(arch) => arch, None => { error!("unknown arch in yield {}", y.yields); continue; } }; let name = &arch.name; let (lo, hi) = y.amount; blocks.push(comment(format!( "{}between *{}* and *{}* {} _{}_", if y.chance == 1.0 { "".to_string() } else { format!("up to *{:.1}*% chance of ", y.chance * 100.0) }, lo.floor(), hi.ceil(), emojify(name), name, ))); } Modal { method: "open".to_string(), trigger_id: i.trigger_id, callback_id: "yield_stats_modal".to_string(), title: "Yield Stats".to_string(), private_metadata: String::new(), blocks, submit: None, } .launch() .await? } "item_apply" => { let (tile_id, user_id): (uuid::Uuid, String) = serde_json::from_str(&action.value) .map_err(|e| { let a = format!("couldn't parse action value: {}", e); error!("{}", a); a })?; let db = dyn_db(); let Hacksteader { inventory, land, .. } = Hacksteader::from_db(&db, user_id).await.map_err(|e| { error!("{}", e); e })?; let plant = land .into_iter() .find(|t| t.id == tile_id) .and_then(|t| t.plant) .ok_or_else(|| { let e = "Couldn't find such a plant at this user's hackstead".to_string(); error!("{}", e); e })?; Modal { method: "open".to_string(), trigger_id: i.trigger_id, callback_id: "item_apply_modal".to_string(), title: "Item + Plant = :D".to_string(), private_metadata: String::new(), blocks: vec![json!({ "type": "input", "label": plain_text("Item Select"), "block_id": "item_apply_input", "element": { "type": "static_select", "placeholder": plain_text("Which item do you wanna use on this plant?"), "action_id": "item_apply_select", // show them each item they have that can be applied "option_groups": CONFIG .possession_archetypes .iter() .enumerate() .filter(|(_, pa)| { pa .kind .keepsake() .and_then(|i| { i .item_application .as_ref() .map(|item_appl| { item_appl .effects .iter() .any(|e| e.keep_plants.allows(&plant.name)) }) }) .unwrap_or(false) }) .filter_map(|(pah, pa)| { // possession archetype handle, possession archetype let item = inventory.iter().find(|i| i.archetype_handle == pah)?; let has_count = inventory.iter().filter(|i| i.archetype_handle == pah).count(); let mut desc = pa .kind .keepsake()? .item_application .as_ref()? .short_description .clone(); desc.truncate(75); if dbg!(desc.len()) == 75 { desc.truncate(71); desc.push_str("...") } Some(json!({ "label": plain_text(format!("{} {}", emojify(&pa.name), pa.name)), "options": [{ "text": plain_text(has_count), "description": plain_text(desc), // this is fucky-wucky because value can only be 75 chars "value": serde_json::to_string(&( &tile_id.to_simple().to_string(), item.id.to_simple().to_string(), )).unwrap(), }] })) }) .collect::<Vec<Value>>() } })], submit: Some("Apply!".to_string()), } .launch() .await? } "gotchi_hatch" => { info!("hatching egg!"); to_farming .send(FarmingInputEvent::HatchEgg( serde_json::from_str(&dbg!(action.value)).unwrap(), i.user.id.clone(), )) .unwrap(); json!({}) } "gotchi_nickname" => { Modal { method: "push".to_string(), trigger_id: i.trigger_id, callback_id: "gotchi_nickname_modal".to_string(), title: "Nickname Gotchi".to_string(), private_metadata: i.view.ok_or("no view!".to_string())?.private_metadata, blocks: vec![json!({ "type": "input", "block_id": "gotchi_nickname_block", "label": plain_text("Nickname Gotchi"), "element": { "type": "plain_text_input", "action_id": "gotchi_nickname_input", "placeholder": plain_text("Nickname Gotchi"), "initial_value": action.value, "min_length": 1, "max_length": 25, } })], submit: Some("Change it!".to_string()), ..Default::default() } .launch() .await? } "possession_market_overview_page" => { let page_json = &action.value; let (item_name, cat): (String, Category) = serde_json::from_str(page_json).unwrap(); let page = PossessionOverviewPage { credentials: if hacksteader::exists(&dyn_db(), i.user.id.clone()).await { Credentials::Hacksteader } else { Credentials::None }, page: 0, interactivity: Interactivity::Buy, source: PossessionOverviewSource::Market(cat), item_name, }; page.modal(i.trigger_id, "push").await?.launch().await? } "possession_page" => { let page_json = action.value; let mut page: PossessionPage = serde_json::from_str(&page_json).unwrap(); if page.possession.steader == i.user.id { page.credentials = Credentials::Owner; } page.modal(i.trigger_id, "open").launch().await? } "push_possession_page" => { let page_json = action.value; let mut page: PossessionPage = serde_json::from_str(&page_json).unwrap(); if page.possession.steader == i.user.id { page.credentials = Credentials::Owner; } page.modal(i.trigger_id, "push").launch().await? } "possession_overview_page" => { let page_json = action.value; let page: PossessionOverviewPage = serde_json::from_str(&page_json).unwrap(); page.modal(i.trigger_id, "open").await?.launch().await? } "push_possession_overview_page" => { let page_json = action.value; let page: PossessionOverviewPage = serde_json::from_str(&page_json).unwrap(); page.modal(i.trigger_id, "push").await?.launch().await? } _ => mrkdwn("huh?"), }; Ok(ActionResponse::Json(Json(output_json))) } #[rocket::get("/steadercount")] async fn steadercount() -> Result<String, String> { hcor::Profile::fetch_all(&dyn_db()) .await .map(|profiles| profiles.len().to_string()) } pub enum FarmingInputEvent { ActivateUser(String), RedeemLandCert(uuid::Uuid, String), HatchEgg(uuid::Uuid, String), ApplyItem(ItemApplication, String), PlantSeed(uuid::Uuid, hacksteader::Plant), BeginCraft { tile_id: uuid::Uuid, recipe_archetype_handle: usize, }, } pub fn format_yield(items: Vec<Possession>, user: String) -> Vec<Value> { if items.len() < 8 { items .iter() .map(|p| { json!({ "type": "section", "text": mrkdwn(format!( "*<@{}>'s new* {} _{}_!", user, emojify(&p.name), p.name, )), "accessory": { "type": "image", "image_url": format!( "http://{}/gotchi/img/{}/{}.png", *URL, format!( "{:?}", p.kind.category() ).to_lowercase(), filify(&p.name) ), "alt_text": "happy shiny give u stuffs", } }) }) .collect::<Vec<_>>() } else { let mut occurrences: HashMap<_, usize> = Default::default(); for p in &items { *occurrences .entry(( p.name.clone(), format!("{:?}", p.kind.category()).to_lowercase(), )) .or_insert(0) += 1; } occurrences .iter() .map(|((name, category), count)| { json!({ "type": "section", "text": mrkdwn(format!( "_<@{}>'s_ *{}* new {} _{}_!", user, count, emojify(&name), name )), "accessory": { "type": "image", "image_url": format!( "http://{}/gotchi/img/{}/{}.png", *URL, category, filify(&name) ), "alt_text": "happy shiny egg give u stuffs", } }) }) .collect::<Vec<_>>() } } pub struct ItemApplication { tile: uuid::Uuid, item: uuid::Uuid, } fn setup_logger() -> Result<(), fern::InitError> { let base_config = fern::Dispatch::new(); let debug_config = fern::Dispatch::new() .format(|out, msg, record| { out.finish(format_args!( "{}[{}][{}] {}", chrono::Local::now().format("[%y-%m-%d][%H:%M:%S]"), record.target(), record.level(), msg )) }) .level(log::LevelFilter::Error) .level_for("gotchi", log::LevelFilter::Debug) .level_for("rocket", log::LevelFilter::Debug) .chain(fern::log_file("debug.log")?); let info_config = fern::Dispatch::new() .format(|out, msg, record| { out.finish(format_args!( "{}[{}][{}] {}", chrono::Local::now().format("[%y-%m-%d[%H:%M:%S]]"), record.target(), record.level(), msg )) }) .level_for("rocket", log::LevelFilter::Error) .level_for("gotchi", log::LevelFilter::Info) .level(log::LevelFilter::Error) .chain(std::io::stdout()) .chain(fern::log_file("output.log")?); base_config.chain(debug_config).chain(info_config).apply()?; Ok(()) } #[rocket::main] async fn main() -> Result<(), Box<dyn std::error::Error + 'static>> { use rocket_contrib::serve::StaticFiles; use std::fs; dotenv::dotenv().ok(); setup_logger()?; if let Ok(_) = fs::read("restart") { fs::remove_file("restart")?; info!("Restarted!"); } info!("starting"); let (tx, rx) = crossbeam_channel::unbounded(); tokio::task::spawn({ use std::time::{Duration, SystemTime}; use tokio::time::interval; let mut interval = interval(Duration::from_millis(FARM_CYCLE_MILLIS)); let mut active_users: HashMap<String, bool> = HashMap::new(); let mut plant_queue: HashMap<uuid::Uuid, hacksteader::Plant> = HashMap::new(); // fix like this let mut craft_queue: HashMap<uuid::Uuid, config::ArchetypeHandle> = HashMap::new(); let mut item_application_queue: HashMap<String, ItemApplication> = HashMap::new(); let mut land_cert_queue: HashMap<String, uuid::Uuid> = HashMap::new(); let mut hatch_egg_queue: HashMap<String, uuid::Uuid> = HashMap::new(); async move { use futures::stream::{self, StreamExt, TryStreamExt}; use hacksteader::{Plant, Tile}; use hcor::Profile; loop { for (_, fresh) in active_users.iter_mut() { *fresh = false; } while let Ok(farming_event) = rx.try_recv() { use FarmingInputEvent::*; match farming_event { ActivateUser(name) => { debug!("activated: {}", name); active_users.insert(name, true); } ApplyItem(application, user_id) => { item_application_queue.insert(user_id, application); } PlantSeed(tile_id, plant) => { plant_queue.insert(tile_id, plant); } RedeemLandCert(cert_id, user_id) => { land_cert_queue.insert(user_id, cert_id); } HatchEgg(egg_id, user_id) => { hatch_egg_queue.insert(user_id, egg_id); } BeginCraft { tile_id, recipe_archetype_handle, } => { craft_queue.insert(tile_id, recipe_archetype_handle); } } } interval.tick().await; debug!("update!"); if active_users.is_empty() { info!("nobody on."); continue; } let db = dyn_db(); use rand::Rng; let mut deletions = vec![]; let mut clear_plants = vec![]; let mut possessions = vec![]; let mut new_tiles = vec![]; let mut dms: Vec<(String, Vec<Value>, String)> = Vec::new(); let mut market_logs: Vec<(Vec<Value>, String)> = Vec::new(); let mut hacksteaders: Vec<Hacksteader> = stream::iter(active_users.clone()) .map(|(id, _)| Hacksteader::from_db(&db, id)) .buffer_unordered(50) .collect::<Vec<_>>() .await .into_iter() .filter_map(|hs| match hs { Ok(i) => Some(i), Err(e) => { error!("error reading hacksteader from db: {}", e); None } }) .collect(); // Give away requested land/hatch eggs for hs in hacksteaders.iter_mut() { if let Some((plant, appl, item)) = item_application_queue.remove(&hs.user_id).and_then(|appl| { let i = hs.inventory.iter().find(|i| i.id == appl.item)?; Some(( hs.land .iter_mut() .find(|t| t.id == appl.tile)? .plant .as_mut()?, i.kind.keepsake()?.item_application.as_ref()?, i, )) }) { deletions.push(Key::misc(item.id)); for (i, e) in appl.effects.iter().enumerate() { match &e.kind { config::ItemApplicationEffectKind::TurnsPlantInto(name) => { plant.archetype_handle = CONFIG.find_plant_handle(name).expect("invalid handle"); for (i, e) in plant .effects .clone() .iter() .filter_map(|e| { CONFIG.get_item_application_effect( e.item_archetype_handle, e.effect_archetype_handle, ) }) .enumerate() { if !e .keep_plants .lookup_handles() .unwrap() .allows(&plant.archetype_handle) { plant.effects.swap_remove(i); } } } _ => {} } plant.effects.push(hacksteader::Effect { until_finish: e.duration, item_archetype_handle: item.archetype_handle, effect_archetype_handle: i, }); } } if let Some(cert_id) = land_cert_queue.remove(&hs.user_id) { if hs.inventory.iter().any(|p| { let same_id = p.id == cert_id; let actually_land_cert = p .kind .keepsake() .filter(|k| k.unlocks_land.is_some()) .is_some(); same_id && actually_land_cert }) { deletions.push(Key::misc(cert_id)); let new_tile = hacksteader::Tile::new(hs.user_id.clone()); hs.land.push(new_tile.clone()); new_tiles.push(new_tile.clone()); } } if let Some(egg_id) = hatch_egg_queue.remove(&hs.user_id) { debug!("egg hatch requested!"); if let Some((p, hatch_table)) = hs.gotchis.iter().find_map(|g| { Some(g).filter(|g| g.id == egg_id).and_then(|g| { debug!("hatching {:?}", g); Some((g, g.inner.hatch_table.as_ref()?)) }) }) { deletions.push(Key::gotchi(egg_id)); let (spawn_handles, percentile) = config::spawn_with_percentile(hatch_table, &mut rand::thread_rng()); let spawned: Vec<Possession> = spawn_handles .into_iter() .map(|h| { Possession::new( CONFIG.find_possession_handle(&h).unwrap(), possess::Owner::hatcher(hs.user_id.clone()), ) }) .collect(); let mut msg = vec![ json!({ "type": "section", "text": mrkdwn(format!( concat!( "*<@{}> hatched a {}!*\n", "The rarity of this loot puts it in the ", "*{:.2}th* percentile for loot from eggs of this type.", ), hs.user_id, p.name, percentile * 100.0 )), "accessory": { "type": "image", "image_url": format!( "http://{}/gotchi/img/{}/{}.png", *URL, format!( "{:?}", p.kind.category() ).to_lowercase(), filify(&p.name) ), "alt_text": "happy shiny egg give u stuffs", } }), comment("WAT I THOUGHT IT WAS ROCK"), json!({ "type": "divider" }), ]; possessions.extend_from_slice(&spawned); msg.append(&mut format_yield(spawned, hs.user_id.clone())); dms.push(( hs.user_id.clone(), msg.clone(), format!("Your {} hatched!", p.name), )); market_logs .push((msg, format!("<@{}> hatched a {}!", hs.user_id, p.name))); } else { warn!("egg hatch ignored; hack attempt?") } } } // Launch requested crafts for hs in hacksteaders.iter_mut() { let nb = hs.neighbor_bonuses(); let Hacksteader { inventory, land, .. } = hs; let mut land_iter = land.iter_mut(); while let Some(Tile { plant: Some(ref mut plant), steader, id, .. }) = land_iter.next() { let config::PlantAdvancementSum { recipes, craft_return_chance, .. } = plant.advancements_sum( nb.clone() .bonuses_for_plant(*id, plant.archetype_handle) .iter(), ); if let Some((recipe_archetype_handle, recipe)) = craft_queue .remove(&id) .filter(|_| plant.craft.is_none()) .and_then(|i| Some((i, recipes.get(i)?))) { let should_take: usize = recipe.needs.iter().map(|(n, _)| n).sum::<usize>(); let used_resources = recipe .needs .clone() .into_iter() .flat_map(|(count, ah)| { inventory .iter() .filter(move |p| p.archetype_handle == ah) .take(count) }) .collect::<Vec<_>>(); if should_take == used_resources.len() { let mut rng = rand::thread_rng(); deletions.append( &mut used_resources .into_iter() .filter(|p| { let keep = rng.gen_range(0.0, 1.0) < craft_return_chance; if keep { debug!("mommy can we keep it? YES? YESSS"); dms.push(( steader.clone(), vec![ comment("your craft return bonus just came in quite handy!"), comment(format!( "looks like you get to keep a {} from that craft!", &p.name, )), ], "What's this, a crafting bonus‽".to_string() )); } !keep }) .map(|p| p.key()) .collect() ); debug!("submitting craft"); plant.craft = Some(hacksteader::Craft { until_finish: recipe.time, recipe_archetype_handle, }); } else { dms.push(( steader.clone(), vec![ comment("you don't have enough resources to craft that"), comment("nice try tho"), ], "You sure you have enough to craft that? Check again..." .to_string(), )); } } } } // we'll be frequently looking up profiles by who owns them to award xp. let mut profiles: HashMap<String, Profile> = hacksteaders .iter() .map(|hs| (hs.user_id.clone(), hs.profile.clone())) .collect(); // we only want to update the time on someone's profile once // even though they might have several plants, any of which // might be boosted, so we give them a "plant token" for // each of their plants, and move them forward when they // run out of tokens let mut plant_tokens: HashMap<String, usize> = hacksteaders .iter() .map(|hs| { ( hs.user_id.clone(), hs.land.iter().filter_map(|t| t.plant.as_ref()).count(), ) }) .collect(); // same goes with the neighbor bonuses for each hackstead let neighbor_bonuses: HashMap<String, _> = hacksteaders .iter() .map(|hs| (hs.user_id.clone(), hs.neighbor_bonuses())) .collect(); // we can only farm on tiles with plants, let mut tiles: Vec<(Plant, Tile)> = hacksteaders .into_iter() .flat_map(|hs| hs.land.into_iter()) .filter_map(|mut t| { Some(( t.plant.take().or_else(|| { plant_queue.remove(&t.id).map(|plant| { profiles .get_mut(&t.steader) .expect("tile has no owner") .last_farm = SystemTime::now(); plant }) })?, t, )) }) .collect(); // remove inactive users for ((_, profile), (user, fresh)) in profiles.iter_mut().zip(active_users.clone().into_iter()) { let now = std::time::SystemTime::now(); if fresh { profile.last_active = now; } else { const ACTIVE_DURATION_SECS: u64 = 60 * 5; if now .duration_since(profile.last_active) .ok() .filter(|r| r.as_secs() >= ACTIVE_DURATION_SECS) .is_some() { active_users.remove(&user); } } } // game tick loop: // this is where we go through and we increment each xp/craft/yield for (plant, tile) in tiles.iter_mut() { let profile = match profiles.get_mut(&tile.steader) { Some(profile) => profile, None => { error!( concat!( "ignoring 1 active user: ", "couldn't get tile[{}]'s steader[{}]'s profile", ), tile.id, tile.steader ); continue; } }; let neighbor_bonuses = match neighbor_bonuses.get(&tile.steader) { Some(bonuses) => bonuses, None => { error!( concat!( "ignoring 1 active user: ", "couldn't get tile[{}]'s steader[{}]'s neighbor bonuses", ), tile.id, tile.steader ); continue; } }; let neighbor_bonuses = neighbor_bonuses .clone() .bonuses_for_plant(tile.id, plant.archetype_handle); // amount of farm cycles since the last farm, rounded down let elapsed = SystemTime::now() .duration_since(profile.last_farm) .unwrap_or_default() .as_millis() / (FARM_CYCLE_MILLIS as u128); // increment their profile's "last farm" time so we can calculate // an accurate "elapsed" during the next update. if elapsed > 0 { if let Some(tokens) = plant_tokens.get_mut(&profile.id) { *tokens = *tokens - 1; if *tokens == 0 { debug!("all plants finished for {}", profile.id); // we don't want to add the boosted_elapsed here, then your item effects // would have to be "paid for" later (your farm wouldn't work for however // much time the effect gave you). profile.last_farm += Duration::from_millis( (FARM_CYCLE_MILLIS as u128 * elapsed) .try_into() .unwrap_or_else(|e| { error!( "too many farm cycle millis * elapsed[{}]: {}", elapsed, e ); 0 }), ); } } } debug!("elapsing {} cycles for {}", elapsed, profile.id); for _ in 0..elapsed { plant.effects = plant .effects .iter_mut() .filter_map(|e| { if let Some(uf) = e.until_finish.as_mut() { // decrement counter, remove if 0 *uf = (*uf - 1.0).max(0.0); if *uf == 0.0 { debug!("removing effect: {:?}", e); return None; } } Some(*e) }) .collect::<Vec<_>>(); // you want to recalculate this every update because it's dependent // on what effects are active, especially the `total_extra_time_ticks`. let plant_sum = plant.advancements_sum(neighbor_bonuses.iter()); let ticks = plant_sum.total_extra_time_ticks + 1; let mut rng = rand::thread_rng(); debug!("triggering {} ticks for {}'s cycle", ticks, profile.id); for _ in 0..ticks { plant.craft = match plant .current_recipe_raw() .and_then(|r| Some((r, plant.craft.take()?))) { Some((recipe, mut craft)) => { if craft.until_finish > plant_sum.crafting_speed_multiplier { craft.until_finish -= plant_sum.crafting_speed_multiplier; Some(craft) } else { let earned_xp = { let (lo, hi) = recipe.xp; rng.gen_range(lo, hi) }; plant.queued_xp_bonus += earned_xp; let mut output: Vec<Possession> = recipe .makes .clone() .output() .into_iter() .map(|ah| { Possession::new( ah, possess::Owner::crafter(tile.steader.clone()), ) }) .collect(); if rng.gen_range(0.0, 1.0) < plant_sum.double_craft_yield_chance { debug!("cloning recipe output! {:?}", output); output.append(&mut output.clone()); debug!("after clone: {:?}", output); } possessions.extend_from_slice(&output); let mut msg = vec![ json!({ "type": "section", "text": mrkdwn(format!( concat!( "Your *{}* has finished crafting *{}* for you!\n", "This earned it {} xp!", ), plant.name, recipe.clone().lookup_handles().unwrap().title(), earned_xp, )), "accessory": { "type": "image", "image_url": format!( "http://{}/gotchi/img/plant/{}.gif", *URL, filify(&plant.current_advancement().art) ), "alt_text": "happy shiny plant give u stuffs", } }), comment("YAY FREE STUFFZ 'CEPT LIKE IT'S NOT FREE"), json!({ "type": "divider" }), ]; msg.append(&mut format_yield(output, tile.steader.clone())); dms.push(( tile.steader.clone(), msg, format!( "What's this, a new {}?", recipe.clone().lookup_handles().unwrap().title() ), )); if recipe.destroys_plant { clear_plants.push(tile.id.clone()); } None } } None => None, }; plant.until_yield = match plant.until_yield - plant_sum.yield_speed_multiplier { n if n > 0.0 => n, _ if plant.base_yield_duration.is_some() => { let owner = &tile.steader; let (yielded, xp_bonuses): (Vec<_>, Vec<_>) = config::spawn(&plant_sum.yields, &mut rand::thread_rng()) .map(|(ah, xp)| { ( Possession::new( ah, possess::Owner::farmer(owner.clone()), ), xp, ) }) .unzip(); let earned_xp = xp_bonuses.into_iter().sum::<usize>() as u64; plant.queued_xp_bonus += earned_xp; let mut msg = vec![ json!({ "type": "section", "text": mrkdwn(format!( concat!( "Your *{}* has produced a crop yield for you!\n", "This earned it {} xp!" ), plant.name, earned_xp, )), "accessory": { "type": "image", "image_url": format!( "http://{}/gotchi/img/plant/{}.gif", *URL, filify(&plant.current_advancement().art) ), "alt_text": "happy shiny plant give u stuffs", } }), comment("FREE STUFF FROM CUTE THING"), json!({ "type": "divider" }), ]; possessions.extend_from_slice(&yielded); msg.append(&mut format_yield(yielded, tile.steader.clone())); dms.push(( tile.steader.clone(), msg, "FREE STUFF FROM CUTE THING".to_string(), )); plant.base_yield_duration.unwrap_or(0.0) } n => n, }; if let Some(advancement) = plant.increase_xp(plant_sum.xp_multiplier) { dms.push((tile.steader.clone(), vec![ json!({ "type": "section", "text": mrkdwn(format!( concat!( ":tada: Your _{}_ is now a *{}*!\n\n", "*{}* Achieved:\n _{}_\n\n", ":stonks: Total XP: *{}xp*", ), plant.name, advancement.achiever_title, advancement.title, advancement.description, advancement.xp )), "accessory": { "type": "image", "image_url": format!("http://{}/gotchi/img/plant/{}.gif", *URL, filify(&advancement.art)), "alt_text": "happy shiny better plant", } }), comment("EXCITING LEVELING UP NOISES"), ], format!( "Your {} is now a {}!", plant.name, advancement.achiever_title ) )) } let profile_sum = profile.advancements.sum(profile.xp, std::iter::empty()); if let Some(advancement) = profile.increase_xp(plant_sum.xp_multiplier) { dms.push((tile.steader.clone(), vec![ json!({ "type": "section", "text": mrkdwn(format!( concat!( ":tada: Your _Hackstead_ is now a *{}*!\n\n", "*{}* Achieved:\n_{}_\n\n", ":stonks: Total XP: *{}xp*\n", ":mountain: Land Available: *{} pieces* _(+{} pieces)_" ), advancement.achiever_title, advancement.title, advancement.description, advancement.xp, profile_sum.land, match advancement.kind { config::HacksteadAdvancementKind::Land { pieces } => pieces, } )), "accessory": { "type": "image", "image_url": format!("http://{}/gotchi/img/icon/seedlet.png", *URL), "alt_text": "happy shiny better hackstead", } }), comment("SUPER EXCITING LEVELING UP NOISES"), ], format!("Your Hackstead is now a {}!", advancement.achiever_title) )); } } } } let _ = stream::iter( tiles .into_iter() .map(|(plant, mut tile)| rusoto_dynamodb::WriteRequest { put_request: Some(rusoto_dynamodb::PutRequest { item: { tile.plant = if clear_plants.iter().any(|id| *id == tile.id) { None } else { Some(plant) }; tile.into_av().m.expect("tile attribute should be map") }, }), ..Default::default() }) .chain( new_tiles .into_iter() .map(|t| rusoto_dynamodb::WriteRequest { put_request: Some(rusoto_dynamodb::PutRequest { item: t.into_av().m.expect("tile attribute should be map"), }), ..Default::default() }), ) .chain(profiles.iter().map(|(_, p)| rusoto_dynamodb::WriteRequest { put_request: Some(rusoto_dynamodb::PutRequest { item: p.item() }), ..Default::default() })) .chain(possessions.iter().map(|p| rusoto_dynamodb::WriteRequest { put_request: Some(rusoto_dynamodb::PutRequest { item: p.item() }), ..Default::default() })) .chain( deletions .into_iter() .map(|key| rusoto_dynamodb::WriteRequest { delete_request: Some(rusoto_dynamodb::DeleteRequest { key: key.into_item(), }), ..Default::default() }), ) .collect::<Vec<_>>() .chunks(25) .map(|items| { db.batch_write_item(rusoto_dynamodb::BatchWriteItemInput { request_items: [(hcor::TABLE_NAME.to_string(), items.to_vec())] .iter() .cloned() .collect(), ..Default::default() }) }), ) .map(|x| Ok(x)) .try_for_each_concurrent(None, |r| async move { match r.await { Ok(_) => Ok(()), Err(e) => Err(format!("error updating db after farm cycle: {}", e)), } }) .await .map_err(|e| error!("farm cycle async err: {}", e)); let _ = futures::try_join!( stream::iter(profiles.clone()) .map(|x| Ok(x)) .try_for_each_concurrent(None, |(who, _)| { update_user_home_tab(who) }), stream::iter(dms).map(|x| Ok(x)).try_for_each_concurrent( None, |(who, blocks, craft_type)| { dm_blocks(who.clone(), craft_type.clone(), blocks.to_vec()) } ), stream::iter(market_logs) .map(|x| Ok(x)) .try_for_each_concurrent(None, |(blocks, notif_type)| { market::log_blocks(notif_type, blocks) }), ) .map_err(|e| error!("farm cycle async err: {}", e)); if let Ok(_) = fs::read("restart") { std::process::exit(0); } } } }); rocket::ignite() .manage(tx) .mount( "/gotchi", routes![ hackstead, hackmarket, action_endpoint, egghatchwhen, hgive, event::event, stateofsteading, steadercount ], ) .mount("/gotchi/img", StaticFiles::from("./img")) .launch() .await .expect("launch fail"); Ok(()) }
38.235544
134
0.387598
14dd8f4f2fef226c4bf24cac9bb8fa7e81329235
18,276
/* * Copyright (c) Meta Platforms, Inc. and affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use crate::writer::{Prop, SortedASTList, SortedStringKeyList, StringLiteral, Writer, AST}; use crate::TypegenConfig; use crate::{KEY_DATA, KEY_FRAGMENT_SPREADS, KEY_FRAGMENT_TYPE}; use intern::string_key::{Intern, StringKey}; use itertools::Itertools; use std::fmt::{Result as FmtResult, Write}; pub struct TypeScriptPrinter { result: String, use_import_type_syntax: bool, indentation: usize, should_sort_typegen_items: bool, } impl Write for TypeScriptPrinter { fn write_str(&mut self, s: &str) -> FmtResult { self.result.write_str(s) } } impl Writer for TypeScriptPrinter { fn into_string(self: Box<Self>) -> String { self.result } fn get_runtime_fragment_import(&self) -> &'static str { "FragmentRefs" } fn write(&mut self, ast: &AST) -> FmtResult { match ast { AST::Any => write!(&mut self.result, "any"), AST::String => write!(&mut self.result, "string"), AST::StringLiteral(literal) => self.write_string_literal(**literal), AST::OtherTypename => self.write_other_string(), AST::Number => write!(&mut self.result, "number"), AST::Boolean => write!(&mut self.result, "boolean"), AST::Callable(return_type) => self.write_callable(&*return_type), AST::Identifier(identifier) => write!(&mut self.result, "{}", identifier), AST::RawType(raw) => write!(&mut self.result, "{}", raw), AST::Union(members) => self.write_union(members), AST::ReadOnlyArray(of_type) => self.write_read_only_array(of_type), AST::Nullable(of_type) => self.write_nullable(of_type), AST::NonNullable(of_type) => self.write_non_nullable(of_type), AST::ExactObject(object) => self.write_object(object), AST::InexactObject(object) => self.write_object(object), AST::Local3DPayload(document_name, selections) => { self.write_local_3d_payload(*document_name, selections) } AST::FragmentReference(fragments) => self.write_fragment_references(fragments), AST::FragmentReferenceType(fragment) => self.write_fragment_references_type(*fragment), AST::ReturnTypeOfFunctionWithName(function_name) => { self.write_return_type_of_function_with_name(*function_name) } AST::ActorChangePoint(_) => { panic!("ActorChangePoint is not supported yet in Typescript") } AST::ReturnTypeOfMethodCall(object, method_name) => { self.write_return_type_of_method_call(object, *method_name) } } } fn write_local_type(&mut self, name: &str, value: &AST) -> FmtResult { write!(&mut self.result, "type {} = ", name)?; self.write(value)?; writeln!(&mut self.result, ";") } fn write_export_type(&mut self, name: &str, value: &AST) -> FmtResult { write!(&mut self.result, "export type {} = ", name)?; self.write(value)?; writeln!(&mut self.result, ";") } fn write_import_module_default(&mut self, name: &str, from: &str) -> FmtResult { writeln!(&mut self.result, "import {} from \"{}\";", name, from) } fn write_import_type(&mut self, types: &[&str], from: &str) -> FmtResult { writeln!( &mut self.result, "import {}{{ {} }} from \"{}\";", if self.use_import_type_syntax { "type " } else { "" }, types.iter().format(", "), from ) } // In TypeScript, we don't need to import "any" fragment types, these are unused. fn write_any_type_definition(&mut self, _name: &str) -> FmtResult { Ok(()) } // In TypeScript, we don't export & import fragments. We just use the generic FragmentRefs type instead. fn write_import_fragment_type(&mut self, _types: &[&str], _from: &str) -> FmtResult { Ok(()) } fn write_export_fragment_type(&mut self, _name: &str) -> FmtResult { Ok(()) } fn write_export_fragment_types( &mut self, _fragment_type_name_1: &str, _fragment_type_name_2: &str, ) -> FmtResult { Ok(()) } } impl TypeScriptPrinter { pub fn new(config: &TypegenConfig, should_sort_typegen_items: bool) -> Self { Self { result: String::new(), indentation: 0, use_import_type_syntax: config.use_import_type_syntax, should_sort_typegen_items, } } fn write_indentation(&mut self) -> FmtResult { self.result.write_str(&" ".repeat(self.indentation)) } fn write_string_literal(&mut self, literal: StringKey) -> FmtResult { write!(&mut self.result, "\"{}\"", literal) } fn write_other_string(&mut self) -> FmtResult { write!(&mut self.result, r#""%other""#) } fn write_union(&mut self, members: &[AST]) -> FmtResult { let mut first = true; for member in members { if first { first = false; } else { write!(&mut self.result, " | ")?; } self.write(member)?; } Ok(()) } fn write_read_only_array(&mut self, of_type: &AST) -> FmtResult { write!(&mut self.result, "ReadonlyArray<")?; self.write(of_type)?; write!(&mut self.result, ">") } fn write_non_nullable(&mut self, of_type: &AST) -> FmtResult { write!(&mut self.result, "NonNullable<")?; self.write(of_type)?; write!(&mut self.result, ">") } fn write_nullable(&mut self, of_type: &AST) -> FmtResult { let null_type = AST::RawType("null".intern()); if let AST::Union(members) = of_type { let mut new_members = Vec::with_capacity(members.len() + 1); new_members.extend_from_slice(members); new_members.push(null_type); self.write_union(&*new_members)?; } else { self.write_union(&*vec![of_type.clone(), null_type])?; } Ok(()) } fn write_object(&mut self, props: &[Prop]) -> FmtResult { if props.is_empty() { write!(&mut self.result, "{{}}")?; return Ok(()); } // Replication of babel printer oddity: objects only containing a spread // are missing a newline. if props.len() == 1 { if let Prop::Spread(_) = props[0] { write!(&mut self.result, "{{}}")?; return Ok(()); } } writeln!(&mut self.result, "{{")?; self.indentation += 1; for prop in props { match prop { Prop::Spread(_) => continue, Prop::KeyValuePair(key_value_pair) => { self.write_indentation()?; if let AST::OtherTypename = key_value_pair.value { writeln!( &mut self.result, "// This will never be '%other', but we need some" )?; self.write_indentation()?; writeln!( &mut self.result, "// value in case none of the concrete values match." )?; self.write_indentation()?; } if key_value_pair.read_only { write!(&mut self.result, "readonly ")?; } if key_value_pair.key == *KEY_FRAGMENT_SPREADS || key_value_pair.key == *KEY_FRAGMENT_TYPE || key_value_pair.key == *KEY_DATA { write!(&mut self.result, "\" {}\"", key_value_pair.key)?; } else { write!(&mut self.result, "{}", key_value_pair.key)?; } if key_value_pair.optional { write!(&mut self.result, "?")?; } write!(&mut self.result, ": ")?; self.write(&key_value_pair.value)?; writeln!(&mut self.result, ";")?; } Prop::GetterSetterPair(_) => { panic!( "Getters and setters with different types are not implemented in typescript. See https://github.com/microsoft/TypeScript/issues/43662" ); } } } self.indentation -= 1; self.write_indentation()?; write!(&mut self.result, "}}")?; Ok(()) } fn write_local_3d_payload(&mut self, document_name: StringKey, selections: &AST) -> FmtResult { write!(&mut self.result, "Local3DPayload<\"{}\", ", document_name)?; self.write(selections)?; write!(&mut self.result, ">")?; Ok(()) } fn write_fragment_references(&mut self, fragments: &SortedStringKeyList) -> FmtResult { write!(&mut self.result, "FragmentRefs<")?; self.write(&AST::Union(SortedASTList::new( fragments .iter() .map(|key| AST::StringLiteral(StringLiteral(*key))) .collect(), self.should_sort_typegen_items, )))?; write!(&mut self.result, ">") } fn write_fragment_references_type(&mut self, fragment: StringKey) -> FmtResult { self.write(&AST::StringLiteral(StringLiteral(fragment))) } fn write_return_type_of_function_with_name(&mut self, function_name: StringKey) -> FmtResult { write!(&mut self.result, "ReturnType<typeof {}>", function_name) } fn write_return_type_of_method_call( &mut self, object: &AST, method_name: StringKey, ) -> FmtResult { write!(&mut self.result, "ReturnType<")?; self.write(object)?; write!(&mut self.result, "[\"{}\"]>", method_name) } fn write_callable(&mut self, return_type: &AST) -> FmtResult { write!(&mut self.result, "() => ")?; self.write(return_type) } } #[cfg(test)] mod tests { use crate::writer::{ExactObject, InexactObject, KeyValuePairProp, SortedASTList}; use super::*; use intern::string_key::Intern; fn print_type(ast: &AST) -> String { print_type_with_config(ast, &Default::default()) } fn print_type_with_config(ast: &AST, config: &TypegenConfig) -> String { let mut printer = Box::new(TypeScriptPrinter::new(config, true)); printer.write(ast).unwrap(); printer.into_string() } #[test] fn scalar_types() { assert_eq!(print_type(&AST::Any), "any".to_string()); assert_eq!(print_type(&AST::String), "string".to_string()); assert_eq!(print_type(&AST::Number), "number".to_string()); } #[test] fn union_type() { assert_eq!( print_type(&AST::Union(SortedASTList::new( vec![AST::String, AST::Number], true ))), "string | number".to_string() ); } #[test] fn read_only_array_type() { assert_eq!( print_type(&AST::ReadOnlyArray(Box::new(AST::String))), "ReadonlyArray<string>".to_string() ); } #[test] fn nullable_type() { assert_eq!( print_type(&AST::Nullable(Box::new(AST::String))), "string | null".to_string() ); assert_eq!( print_type(&AST::Nullable(Box::new(AST::Union(SortedASTList::new( vec![AST::String, AST::Number,], true ))))), "string | number | null" ) } #[test] fn exact_object() { assert_eq!( print_type(&AST::ExactObject(ExactObject::new(Vec::new(), true))), r"{}".to_string() ); assert_eq!( print_type(&AST::ExactObject(ExactObject::new( vec![Prop::KeyValuePair(KeyValuePairProp { key: "single".intern(), optional: false, read_only: false, value: AST::String, })], true ))), r"{ single: string; }" .to_string() ); assert_eq!( print_type(&AST::ExactObject(ExactObject::new( vec![ Prop::KeyValuePair(KeyValuePairProp { key: "foo".intern(), optional: true, read_only: false, value: AST::String, }), Prop::KeyValuePair(KeyValuePairProp { key: "bar".intern(), optional: false, read_only: true, value: AST::Number, }), ], true ))), r"{ readonly bar: number; foo?: string; }" .to_string() ); } #[test] fn nested_object() { assert_eq!( print_type(&AST::ExactObject(ExactObject::new( vec![ Prop::KeyValuePair(KeyValuePairProp { key: "foo".intern(), optional: true, read_only: false, value: AST::ExactObject(ExactObject::new( vec![ Prop::KeyValuePair(KeyValuePairProp { key: "nested_foo".intern(), optional: true, read_only: false, value: AST::String, }), Prop::KeyValuePair(KeyValuePairProp { key: "nested_foo2".intern(), optional: false, read_only: true, value: AST::Number, }), ], true )), }), Prop::KeyValuePair(KeyValuePairProp { key: "bar".intern(), optional: false, read_only: true, value: AST::Number, }), ], true ))), r"{ readonly bar: number; foo?: { nested_foo?: string; readonly nested_foo2: number; }; }" .to_string() ); } #[test] fn inexact_object() { assert_eq!( print_type(&AST::InexactObject(InexactObject::new(Vec::new(), true))), "{}".to_string() ); assert_eq!( print_type(&AST::InexactObject(InexactObject::new( vec![Prop::KeyValuePair(KeyValuePairProp { key: "single".intern(), optional: false, read_only: false, value: AST::String, }),], true ))), r"{ single: string; }" .to_string() ); assert_eq!( print_type(&AST::InexactObject(InexactObject::new( vec![ Prop::KeyValuePair(KeyValuePairProp { key: "foo".intern(), optional: false, read_only: false, value: AST::String, }), Prop::KeyValuePair(KeyValuePairProp { key: "bar".intern(), optional: true, read_only: true, value: AST::Number, }) ], true ))), r"{ readonly bar?: number; foo: string; }" .to_string() ); } #[test] fn other_comment() { assert_eq!( print_type(&AST::ExactObject(ExactObject::new( vec![Prop::KeyValuePair(KeyValuePairProp { key: "with_comment".intern(), optional: false, read_only: false, value: AST::OtherTypename, }),], true ))), r#"{ // This will never be '%other', but we need some // value in case none of the concrete values match. with_comment: "%other"; }"# .to_string() ); } #[test] fn import_type() { let mut printer = Box::new(TypeScriptPrinter::new(&TypegenConfig::default(), true)); printer.write_import_type(&["A", "B"], "module").unwrap(); assert_eq!(printer.into_string(), "import { A, B } from \"module\";\n"); let mut printer = Box::new(TypeScriptPrinter::new( &TypegenConfig { use_import_type_syntax: true, ..Default::default() }, true, )); printer.write_import_type(&["C"], "./foo").unwrap(); assert_eq!(printer.into_string(), "import type { C } from \"./foo\";\n"); } #[test] fn import_module() { let mut printer = Box::new(TypeScriptPrinter::new(&TypegenConfig::default(), true)); printer.write_import_module_default("A", "module").unwrap(); assert_eq!(printer.into_string(), "import A from \"module\";\n"); } #[test] fn function_return_type() { assert_eq!( print_type(&AST::ReturnTypeOfFunctionWithName("someFunc".intern(),)), "ReturnType<typeof someFunc>".to_string() ); } }
33.289617
158
0.491191
080530e7b93d1714f94f1d532d27b91dddb5b4c7
10,620
use crate::common::{ jormungandr::process::JormungandrProcess, network::{self, params, wallet}, }; use jormungandr_lib::{ interfaces::{ Explorer, PeerRecord, Policy, PreferredListConfig, TopicsOfInterest, TrustedPeer, }, time::Duration, }; use jortestkit::process as process_utils; const CLIENT: &str = "CLIENT"; const SERVER: &str = "SERVER"; pub fn assert_empty_quarantine(node: &JormungandrProcess, info: &str) { let quarantine = node .rest() .p2p_quarantined() .expect("cannot list quarantined peers"); assert!( quarantine.is_empty(), "{}: Peer {} has got non empty quarantine list", info, node.alias() ); } pub fn assert_are_in_quarantine( node: &JormungandrProcess, peers: Vec<&JormungandrProcess>, info: &str, ) { let available_list = node .rest() .p2p_quarantined() .expect("cannot list quarantined peers"); assert_record_is_present(available_list, peers, "quarantine", info) } pub fn assert_record_is_present( peer_list: Vec<PeerRecord>, peers: Vec<&JormungandrProcess>, list_name: &str, info: &str, ) { for peer in peers { assert!( peer_list.iter().any(|x| { println!("{} == {}", x.address, peer.address().to_string()); x.address == peer.address().to_string() }), "{}: Peer {} is not present in {} list", info, peer.alias(), list_name ); } } pub fn assert_record_is_not_present( peer_list: Vec<PeerRecord>, peers: Vec<&JormungandrProcess>, list_name: &str, ) { for peer in peers { assert!( !peer_list .iter() .any(|x| { x.address == peer.address().to_string() }), "Peer {} is present in {} list, while should not", peer.alias(), list_name ); } } pub fn assert_node_stats( node: &JormungandrProcess, peer_available_cnt: usize, peer_quarantined_cnt: usize, peer_total_cnt: usize, peer_unreachable_cnt: usize, info: &str, ) { node.log_stats(); let stats = node .rest() .stats() .expect("cannot get stats") .stats .expect("empty stats"); assert_eq!( peer_available_cnt, stats.peer_available_cnt.clone(), "{}: peer_available_cnt, Node {}", info, node.alias() ); assert_eq!( peer_quarantined_cnt, stats.peer_quarantined_cnt, "{}: peer_quarantined_cnt, Node {}", info, node.alias() ); assert_eq!( peer_total_cnt, stats.peer_total_cnt, "{}: peer_total_cnt, Node {}", info, node.alias() ); assert_eq!( peer_unreachable_cnt, stats.peer_unreachable_cnt, "{}: peer_unreachable_cnt, Node {}", info, node.alias() ); } #[test] pub fn node_whitelist_itself() { let mut network_controller = network::builder() .single_trust_direction(CLIENT, SERVER) .initials(vec![ wallet("delegated1").with(1_000_000).delegated_to(CLIENT), wallet("delegated2").with(1_000_000).delegated_to(SERVER), ]) .custom_config(vec![params(CLIENT).explorer(Explorer { enabled: true })]) .build() .unwrap(); let _server = network_controller.spawn_and_wait(SERVER); let client_public_address = network_controller .node_config(CLIENT) .unwrap() .p2p .public_address; let policy = Policy { quarantine_duration: Some(Duration::new(1, 0)), quarantine_whitelist: Some(vec![client_public_address]), }; let client = network_controller .spawn_custom(params(CLIENT).policy(policy)) .unwrap(); client.assert_no_errors_in_log(); } #[test] pub fn node_does_not_quarantine_whitelisted_node() { let mut network_controller = network::builder() .single_trust_direction(CLIENT, SERVER) .initials(vec![ wallet("delegated1").with(1_000_000).delegated_to(CLIENT), wallet("delegated2").with(1_000_000).delegated_to(SERVER), ]) .custom_config(vec![params(CLIENT).explorer(Explorer { enabled: true })]) .build() .unwrap(); let server = network_controller.spawn_and_wait(SERVER); let server_public_address = network_controller .node_config(SERVER) .unwrap() .p2p .public_address; let policy = Policy { quarantine_duration: Some(Duration::new(30, 0)), quarantine_whitelist: Some(vec![server_public_address]), }; let client = network_controller .spawn_custom(params(CLIENT).policy(policy)) .unwrap(); server.shutdown(); process_utils::sleep(10); assert_node_stats(&client, 1, 0, 1, 0, "before spawning server again"); assert_empty_quarantine(&client, "before spawning server again"); let _server_after = network_controller.spawn_and_wait(SERVER); assert_node_stats(&client, 1, 0, 1, 0, "after spawning server again"); assert_empty_quarantine(&client, "after spawning server again"); } #[test] pub fn node_put_in_quarantine_nodes_which_are_not_whitelisted() { let mut network_controller = network::builder() .single_trust_direction(CLIENT, SERVER) .initials(vec![ wallet("delegated1").with(1_000_000).delegated_to(CLIENT), wallet("delegated2").with(1_000_000).delegated_to(SERVER), ]) .custom_config(vec![params(CLIENT).explorer(Explorer { enabled: true })]) .build() .unwrap(); let mut server = network_controller.spawn_and_wait(SERVER); let client_public_address = network_controller .node_config(CLIENT) .unwrap() .p2p .public_address; let policy = Policy { quarantine_duration: Some(Duration::new(1, 0)), quarantine_whitelist: Some(vec![client_public_address]), }; let client = network_controller .spawn_custom(params(CLIENT).policy(policy)) .unwrap(); server.shutdown(); process_utils::sleep(10); assert_node_stats(&client, 0, 1, 1, 0, "before spawning server again"); assert_are_in_quarantine(&client, vec![&server], "before spawning server again"); server = network_controller.spawn_and_wait(SERVER); assert_node_stats(&client, 0, 1, 1, 0, "after spawning server again"); assert_are_in_quarantine(&client, vec![&server], "after spawning server again"); process_utils::sleep(10); assert_node_stats( &client, 0, 1, 1, 0, "after spawning server again (10 s. delay)", ); assert_are_in_quarantine( &client, vec![&server], "after spawning server again (10 s. delay)", ); } #[test] pub fn node_trust_itself() { let mut network_controller = network::builder() .single_trust_direction(CLIENT, SERVER) .initials(vec![ wallet("delegated1").with(1_000_000).delegated_to(CLIENT), wallet("delegated2").with(1_000_000).delegated_to(SERVER), ]) .custom_config(vec![params(CLIENT).explorer(Explorer { enabled: true })]) .build() .unwrap(); let _server = network_controller.spawn_and_wait(SERVER); let config = network_controller.node_config(CLIENT).unwrap().p2p.clone(); let peer = TrustedPeer { address: config.public_address, id: None, }; network_controller .expect_spawn_failed( params(CLIENT).trusted_peers(vec![peer]), "failed to retrieve the list of bootstrap peers from trusted peer", ) .unwrap(); } #[test] #[ignore] pub fn node_put_itself_in_preffered_layers() { let mut network_controller = network::builder() .single_trust_direction(CLIENT, SERVER) .initials(vec![ wallet("delegated1").with(1_000_000).delegated_to(CLIENT), wallet("delegated2").with(1_000_000).delegated_to(SERVER), ]) .build() .unwrap(); let _server = network_controller.spawn_and_wait(SERVER); let config = network_controller.node_config(CLIENT).unwrap().p2p.clone(); let peer = TrustedPeer { address: config.public_address, id: None, }; let layer = PreferredListConfig { view_max: Default::default(), peers: vec![peer], }; assert!(network_controller .expect_spawn_failed( params(CLIENT).preferred_layer(layer), "topology tells the node to connect to itself" ) .is_ok()); } #[ignore] #[test] pub fn topic_of_interest_influences_node_sync_ability() { let fast_client_alias = "FAST_CLIENT"; let slow_client_alias = "SLOW_CLIENT"; let high_topic_of_interests = TopicsOfInterest { messages: "high".to_owned(), blocks: "high".to_owned(), }; let low_topic_of_interests = TopicsOfInterest { messages: "low".to_owned(), blocks: "low".to_owned(), }; let mut network_controller = network::builder() .star_topology(SERVER, vec![fast_client_alias, slow_client_alias]) .initials(vec![ wallet("delegated0").with(1_000_000).delegated_to(SERVER), wallet("delegated1") .with(1_000_000) .delegated_to(fast_client_alias), wallet("delegated2") .with(1_000_000) .delegated_to(slow_client_alias), ]) .custom_config(vec![ params(fast_client_alias).topics_of_interest(high_topic_of_interests), params(slow_client_alias).topics_of_interest(low_topic_of_interests), ]) .build() .unwrap(); let _server = network_controller.spawn_and_wait(SERVER); let fast_client = network_controller.spawn_and_wait(fast_client_alias); let slow_client = network_controller.spawn_and_wait(fast_client_alias); process_utils::sleep(30); let fast_client_block_recv_cnt = fast_client .rest() .stats() .unwrap() .stats .unwrap() .block_recv_cnt; let slow_client_block_recv_cnt = slow_client .rest() .stats() .unwrap() .stats .unwrap() .block_recv_cnt; assert!( fast_client_block_recv_cnt > slow_client_block_recv_cnt, "node with high block topic of interest should have more recieved blocks" ); }
28.395722
89
0.615066
566350c82b5f932687f02b3d27c566963def94fa
4,304
use core::future::Future; use embedded_hal::digital::v2::OutputPin; use embedded_hal_async::delay::DelayUs; use crate::{ error::{Error, Result}, non_blocking::bus::DataBus, }; pub struct EightBitBus< RS: OutputPin, EN: OutputPin, D0: OutputPin, D1: OutputPin, D2: OutputPin, D3: OutputPin, D4: OutputPin, D5: OutputPin, D6: OutputPin, D7: OutputPin, D: DelayUs, > { rs: RS, en: EN, d0: D0, d1: D1, d2: D2, d3: D3, d4: D4, d5: D5, d6: D6, d7: D7, delay: D, } impl< RS: OutputPin, EN: OutputPin, D0: OutputPin, D1: OutputPin, D2: OutputPin, D3: OutputPin, D4: OutputPin, D5: OutputPin, D6: OutputPin, D7: OutputPin, D: DelayUs, > EightBitBus<RS, EN, D0, D1, D2, D3, D4, D5, D6, D7, D> { pub fn from_pins( rs: RS, en: EN, d0: D0, d1: D1, d2: D2, d3: D3, d4: D4, d5: D5, d6: D6, d7: D7, delay: D, ) -> EightBitBus<RS, EN, D0, D1, D2, D3, D4, D5, D6, D7, D> { EightBitBus { rs, en, d0, d1, d2, d3, d4, d5, d6, d7, delay, } } fn set_bus_bits(&mut self, data: u8) -> Result<()> { let db0: bool = (0b0000_0001 & data) != 0; let db1: bool = (0b0000_0010 & data) != 0; let db2: bool = (0b0000_0100 & data) != 0; let db3: bool = (0b0000_1000 & data) != 0; let db4: bool = (0b0001_0000 & data) != 0; let db5: bool = (0b0010_0000 & data) != 0; let db6: bool = (0b0100_0000 & data) != 0; let db7: bool = (0b1000_0000 & data) != 0; if db0 { self.d0.set_high().map_err(|_| Error)?; } else { self.d0.set_low().map_err(|_| Error)?; } if db1 { self.d1.set_high().map_err(|_| Error)?; } else { self.d1.set_low().map_err(|_| Error)?; } if db2 { self.d2.set_high().map_err(|_| Error)?; } else { self.d2.set_low().map_err(|_| Error)?; } if db3 { self.d3.set_high().map_err(|_| Error)?; } else { self.d3.set_low().map_err(|_| Error)?; } if db4 { self.d4.set_high().map_err(|_| Error)?; } else { self.d4.set_low().map_err(|_| Error)?; } if db5 { self.d5.set_high().map_err(|_| Error)?; } else { self.d5.set_low().map_err(|_| Error)?; } if db6 { self.d6.set_high().map_err(|_| Error)?; } else { self.d6.set_low().map_err(|_| Error)?; } if db7 { self.d7.set_high().map_err(|_| Error)?; } else { self.d7.set_low().map_err(|_| Error)?; } Ok(()) } /// Future that completes after now + millis pub async fn delay_ms(&mut self, millis: u32) { self.delay.delay_ms(millis).await.unwrap() } } impl< RS: OutputPin + 'static, EN: OutputPin + 'static, D0: OutputPin + 'static, D1: OutputPin + 'static, D2: OutputPin + 'static, D3: OutputPin + 'static, D4: OutputPin + 'static, D5: OutputPin + 'static, D6: OutputPin + 'static, D7: OutputPin + 'static, D: DelayUs, > DataBus for EightBitBus<RS, EN, D0, D1, D2, D3, D4, D5, D6, D7, D> { type WriteFuture<'a> = impl Future<Output = Result<()>> + 'a where D: 'a; fn write<'a>(&'a mut self, byte: u8, data: bool) -> Self::WriteFuture<'a> { async move { if data { self.rs.set_high().map_err(|_| Error)?; } else { self.rs.set_low().map_err(|_| Error)?; } self.set_bus_bits(byte)?; self.en.set_high().map_err(|_| Error)?; self.delay.delay_ms(2).await.unwrap(); self.en.set_low().map_err(|_| Error)?; if data { self.rs.set_low().map_err(|_| Error)?; } Ok(()) } } }
23.779006
79
0.455623
2f82e68f1be0b49b48142d5ac1c08e2314e8c95f
7,419
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License.. #![allow(dead_code)] #![allow(unused_assignments)] extern crate sgx_types; extern crate sgx_urts; use sgx_types::*; use sgx_urts::SgxEnclave; use std::os::unix::io::{IntoRawFd, AsRawFd}; use std::env; use std::net::{TcpListener, TcpStream, SocketAddr}; use std::str; const BUFFER_SIZE: usize = 1024; static ENCLAVE_FILE: &'static str = "enclave.signed.so"; static ENCLAVE_TOKEN: &'static str = "enclave.token"; extern { fn run_server(eid: sgx_enclave_id_t, retval: *mut sgx_status_t, socket_fd: c_int) -> sgx_status_t; fn run_client(eid: sgx_enclave_id_t, retval: *mut sgx_status_t, socket_fd: c_int) -> sgx_status_t; } #[no_mangle] pub extern "C" fn ocall_sgx_init_quote(ret_ti: *mut sgx_target_info_t, ret_gid : *mut sgx_epid_group_id_t) -> sgx_status_t { println!("Entering ocall_sgx_init_quote"); unsafe {sgx_init_quote(ret_ti, ret_gid)} } pub fn lookup_ipv4(host: &str, port: u16) -> SocketAddr { use std::net::ToSocketAddrs; let addrs = (host, port).to_socket_addrs().unwrap(); for addr in addrs { if let SocketAddr::V4(_) = addr { return addr; } } unreachable!("Cannot lookup address"); } #[no_mangle] pub extern "C" fn ocall_get_ias_socket(ret_fd : *mut c_int) -> sgx_status_t { let port = 443; let hostname = "api.trustedservices.intel.com"; let addr = lookup_ipv4(hostname, port); let sock = TcpStream::connect(&addr).expect("[-] Connect tls server failed!"); unsafe {*ret_fd = sock.into_raw_fd();} sgx_status_t::SGX_SUCCESS } #[no_mangle] pub extern "C" fn ocall_get_quote (p_sigrl : *const u8, sigrl_len : u32, p_report : *const sgx_report_t, quote_type : sgx_quote_sign_type_t, p_spid : *const sgx_spid_t, p_nonce : *const sgx_quote_nonce_t, p_qe_report : *mut sgx_report_t, p_quote : *mut u8, _maxlen : u32, p_quote_len : *mut u32) -> sgx_status_t { println!("Entering ocall_get_quote"); let mut real_quote_len : u32 = 0; let ret = unsafe { sgx_calc_quote_size(p_sigrl, sigrl_len, &mut real_quote_len as *mut u32) }; if ret != sgx_status_t::SGX_SUCCESS { println!("sgx_calc_quote_size returned {}", ret); return ret; } println!("quote size = {}", real_quote_len); unsafe { *p_quote_len = real_quote_len; } let ret = unsafe { sgx_get_quote(p_report, quote_type, p_spid, p_nonce, p_sigrl, sigrl_len, p_qe_report, p_quote as *mut sgx_quote_t, real_quote_len) }; if ret != sgx_status_t::SGX_SUCCESS { println!("sgx_calc_quote_size returned {}", ret); return ret; } println!("sgx_calc_quote_size returned {}", ret); ret } #[no_mangle] pub extern "C" fn ocall_get_update_info (platform_blob: * const sgx_platform_info_t, enclave_trusted: i32, update_info: * mut sgx_update_info_bit_t) -> sgx_status_t { unsafe{ sgx_report_attestation_status(platform_blob, enclave_trusted, update_info) } } fn init_enclave() -> SgxResult<SgxEnclave> { let mut launch_token: sgx_launch_token_t = [0; 1024]; let mut launch_token_updated: i32 = 0; // call sgx_create_enclave to initialize an enclave instance // Debug Support: set 2nd parameter to 1 let debug = 1; let mut misc_attr = sgx_misc_attribute_t {secs_attr: sgx_attributes_t { flags:0, xfrm:0}, misc_select:0}; SgxEnclave::create(ENCLAVE_FILE, debug, &mut launch_token, &mut launch_token_updated, &mut misc_attr) } enum Mode { Client, Server, } fn main() { let mut mode:Mode = Mode::Server; let mut args: Vec<_> = env::args().collect(); args.remove(0); while !args.is_empty() { match args.remove(0).as_ref() { "--client" => mode = Mode::Client, "--server" => mode = Mode::Server, _ => { panic!("Only --client/server/unlink is accepted"); } } } let enclave = match init_enclave() { Ok(r) => { println!("[+] Init Enclave Successful {}!", r.geteid()); r }, Err(x) => { println!("[-] Init Enclave Failed {}!", x.as_str()); return; }, }; match mode { Mode::Server => { println!("Running as server..."); let listener = TcpListener::bind("0.0.0.0:3443").unwrap(); loop { match listener.accept() { Ok((socket, addr)) => { println!("new client from {:?} {}", addr, socket.as_raw_fd()); let mut retval = sgx_status_t::SGX_SUCCESS; let result = unsafe { run_server(enclave.geteid(), &mut retval, socket.as_raw_fd()) }; match result { sgx_status_t::SGX_SUCCESS => { println!("ECALL success!"); }, _ => { println!("[-] ECALL Enclave Failed {}!", result.as_str()); return; } } } Err(e) => println!("couldn't get client: {:?}", e), } } //loop } Mode::Client => { println!("Running as client..."); let socket = TcpStream::connect("localhost:3443").unwrap(); let mut retval = sgx_status_t::SGX_SUCCESS; let result = unsafe { run_client(enclave.geteid(), &mut retval, socket.as_raw_fd()) }; match result { sgx_status_t::SGX_SUCCESS => { println!("ECALL success!"); }, _ => { println!("[-] ECALL Enclave Failed {}!", result.as_str()); return; } } } } println!("[+] Done!"); enclave.destroy(); }
32.116883
109
0.536326
8a8bbc09d07f8025bd652f7317a0bebdebe0deb8
391
use super::entry::Like; pub fn create(base: String, author: Address, timestamp: u64) -> ZomeApiResult<Address> { let new_like = Like::new(base, author, timestamp); let like_address = hdk::commit_entry(&new_like.entry())?; // TODO: create link to agent here Ok(like_address) } /* pub fn delete(base: String, author: Address, timestamp: u64) -> ZomeApiResult<Address> { } */
27.928571
88
0.685422
eb96191b49506187206ab676fa32f0dea4d3a1e2
14,149
//! Contains helper functions for converting between raw SID and SID string representations. #![allow(non_snake_case)] use libc; use std::ffi::OsStr; use std::iter::once; use std::mem; use std::ops::Drop; use std::os::windows::ffi::OsStrExt; use widestring::WideString; use winapi::shared::minwindef::{BYTE, DWORD, FALSE, HLOCAL, PDWORD}; use winapi::shared::ntdef::{HANDLE, LPCWSTR, LPWSTR, NULL, WCHAR}; use winapi::shared::sddl::{ConvertSidToStringSidW, ConvertStringSidToSidW}; use winapi::shared::winerror::{ERROR_INSUFFICIENT_BUFFER, ERROR_NOT_ALL_ASSIGNED, ERROR_SUCCESS}; #[allow(unused_imports)] use winapi::um::accctrl::{ SE_FILE_OBJECT, SE_KERNEL_OBJECT, SE_OBJECT_TYPE, SE_REGISTRY_KEY, SE_REGISTRY_WOW64_32KEY, SE_SERVICE, }; use winapi::um::aclapi::{ GetNamedSecurityInfoW, GetSecurityInfo, SetNamedSecurityInfoW, SetSecurityInfo, }; use winapi::um::errhandlingapi::{GetLastError, SetLastError}; use winapi::um::handleapi::{CloseHandle, INVALID_HANDLE_VALUE}; use winapi::um::processthreadsapi::{GetCurrentProcess, OpenProcessToken}; use winapi::um::securitybaseapi::{AdjustTokenPrivileges, CopySid, GetLengthSid}; use winapi::um::winbase::{GetUserNameW, LocalFree, LookupAccountNameW, LookupPrivilegeValueW}; use winapi::um::winnt::{ DACL_SECURITY_INFORMATION, GROUP_SECURITY_INFORMATION, LABEL_SECURITY_INFORMATION, OWNER_SECURITY_INFORMATION, PACL, PROTECTED_DACL_SECURITY_INFORMATION, PSECURITY_DESCRIPTOR, PSID, PTOKEN_PRIVILEGES, SACL_SECURITY_INFORMATION, SE_PRIVILEGE_ENABLED, SID_NAME_USE, TOKEN_ADJUST_PRIVILEGES, TOKEN_PRIVILEGES, TOKEN_QUERY, }; /// Converts a raw SID into a SID string representation. /// /// # Arguments /// * `sid` - A pointer to a raw SID buffer. In native Windows, this is would be a `PSID` type. /// /// # Errors /// On error, a Windows error code is returned with the `Err` type. pub fn sid_to_string(sid: PSID) -> Result<String, DWORD> { let mut raw_string_sid: LPWSTR = NULL as LPWSTR; if unsafe { ConvertSidToStringSidW(sid, &mut raw_string_sid) } == 0 || raw_string_sid == (NULL as LPWSTR) { return Err(unsafe { GetLastError() }); } let raw_string_sid_len = unsafe { libc::wcslen(raw_string_sid) }; let sid_string = unsafe { WideString::from_ptr(raw_string_sid, raw_string_sid_len) }; unsafe { LocalFree(raw_string_sid as HLOCAL) }; Ok(sid_string.to_string_lossy()) } /// Resolves a system username (either in the format of "user" or "DOMAIN\user") into a raw SID. The raw SID /// is represented by a `Vec<u8>` object. /// /// # Arguments /// * `name` - The user name to be resolved into a raw SID. /// * `system` - An optional string denoting the scope of the user name (such as a machine or domain name). If not required, use `None`. /// /// # Errors /// On error, a Windows error code is returned with the `Err` type. /// /// **Note**: If the error code is 0, `GetLastError()` returned `ERROR_INSUFFICIENT_BUFFER` after invoking `LookupAccountNameW` or /// the `sid_size` is 0. pub fn name_to_sid(name: &str, system: Option<&str>) -> Result<Vec<BYTE>, DWORD> { let raw_name: Vec<u16> = OsStr::new(name).encode_wide().chain(once(0)).collect(); let raw_system: Option<Vec<u16>> = system.map(|name| OsStr::new(name).encode_wide().chain(once(0)).collect()); let system_ptr: LPCWSTR = match raw_system { Some(sys_name) => sys_name.as_ptr(), None => NULL as LPCWSTR, }; let mut sid_size: DWORD = 0; let mut sid_type: SID_NAME_USE = 0 as SID_NAME_USE; let mut name_size: DWORD = 0; if unsafe { LookupAccountNameW( system_ptr, raw_name.as_ptr() as LPCWSTR, NULL as PSID, &mut sid_size, NULL as LPWSTR, &mut name_size, &mut sid_type, ) } != 0 { return Err(unsafe { GetLastError() }); } if unsafe { GetLastError() } != ERROR_INSUFFICIENT_BUFFER { return Err(0); } if sid_size == 0 { return Err(0); } let mut sid: Vec<BYTE> = Vec::with_capacity(sid_size as usize); let mut name: Vec<BYTE> = Vec::with_capacity((name_size as usize) * mem::size_of::<WCHAR>()); if unsafe { LookupAccountNameW( system_ptr, raw_name.as_ptr() as LPCWSTR, sid.as_mut_ptr() as PSID, &mut sid_size, name.as_mut_ptr() as LPWSTR, &mut name_size, &mut sid_type, ) } == 0 { return Err(unsafe { GetLastError() }); } unsafe { sid.set_len(sid_size as usize) }; Ok(sid) } /// Converts a string representation of a SID into a raw SID. The returned raw SID is contained in a `Vec<u8>` object. /// /// # Arguments /// * `string_sid` - The SID to converted into raw form as a string. /// /// # Errors /// On error, a Windows error code is wrapped in an `Err` type. pub fn string_to_sid(string_sid: &str) -> Result<Vec<BYTE>, DWORD> { let mut sid: PSID = NULL as PSID; let raw_string_sid: Vec<u16> = OsStr::new(string_sid) .encode_wide() .chain(once(0)) .collect(); if unsafe { ConvertStringSidToSidW(raw_string_sid.as_ptr(), &mut sid) } == 0 { return Err(unsafe { GetLastError() }); } let size = unsafe { GetLengthSid(sid) }; let mut sid_buf: Vec<BYTE> = Vec::with_capacity(size as usize); if unsafe { CopySid(size, sid_buf.as_mut_ptr() as PSID, sid) } == 0 { return Err(unsafe { GetLastError() }); } unsafe { sid_buf.set_len(size as usize) }; Ok(sid_buf) } /// Retrieves the user name of the current user. pub fn current_user() -> Option<String> { let mut username_size: DWORD = 0 as DWORD; if unsafe { GetUserNameW(NULL as LPWSTR, &mut username_size) } != 0 { return None; } let mut username: Vec<u16> = Vec::with_capacity(username_size as usize); if unsafe { GetUserNameW(username.as_mut_ptr() as LPWSTR, &mut username_size) } == 0 { return None; } let name = unsafe { WideString::from_ptr(username.as_ptr(), (username_size - 1) as usize) }; Some(name.to_string_lossy()) } fn set_privilege(name: &str, is_enabled: bool) -> Result<bool, DWORD> { let mut tkp = unsafe { mem::zeroed::<TOKEN_PRIVILEGES>() }; let wPrivilegeName: Vec<u16> = OsStr::new(name).encode_wide().chain(once(0)).collect(); if unsafe { LookupPrivilegeValueW( NULL as LPCWSTR, wPrivilegeName.as_ptr(), &mut tkp.Privileges[0].Luid, ) } == 0 { return Err(unsafe { GetLastError() }); } tkp.PrivilegeCount = 1; if is_enabled { tkp.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED; } else { tkp.Privileges[0].Attributes = 0; } let mut hToken: HANDLE = INVALID_HANDLE_VALUE; if unsafe { OpenProcessToken( GetCurrentProcess(), TOKEN_ADJUST_PRIVILEGES | TOKEN_QUERY, &mut hToken, ) } == 0 { return Err(unsafe { GetLastError() }); } let status = unsafe { AdjustTokenPrivileges( hToken, FALSE, &mut tkp, 0, NULL as PTOKEN_PRIVILEGES, NULL as PDWORD, ) }; let code = unsafe { GetLastError() }; unsafe { CloseHandle(hToken) }; if code == ERROR_NOT_ALL_ASSIGNED { return Err(code); } if status == 0 { return Err(code); } Ok(is_enabled) } #[derive(Debug)] pub enum SDSource { Path(String), Handle(HANDLE), } #[derive(Debug)] struct SystemPrivilege { name: Option<String>, } impl SystemPrivilege { fn acquire(name: &str) -> Result<SystemPrivilege, DWORD> { set_privilege(name, true).map(|_| SystemPrivilege { name: Some(name.to_owned()), }) } fn release(&mut self) -> bool { let mut status = true; if let Some(ref name) = self.name { status = set_privilege(name, false).is_ok(); } self.name = None; status } } impl Drop for SystemPrivilege { fn drop(&mut self) { self.release(); } } /// This structure manages a Windows `SECURITY_DESCRIPTOR` object. #[derive(Debug)] pub struct SecurityDescriptor { pSecurityDescriptor: PSECURITY_DESCRIPTOR, /// Pointer to the discretionary access control list in the security descriptor pub pDacl: PACL, /// Pointer to the system access control list in the security descriptor pub pSacl: PACL, psidOwner: PSID, psidGroup: PSID, } impl SecurityDescriptor { /// Returns a `SecurityDescriptor` object for the specified object source. /// /// # Arguments /// * `source` - An object handle or a string containing the named object path. /// * `obj_type` - The named object path's type. See [SE_OBJECT_TYPE](https://docs.microsoft.com/en-us/windows/desktop/api/accctrl/ne-accctrl-_se_object_type). /// * `get_sacl` - A boolean specifying whether the returned `ACL` object will be able to enumerate and set /// System ACL entries. /// /// # Errors /// On error, a Windows error code is wrapped in an `Err` type pub fn from_source( source: &SDSource, obj_type: SE_OBJECT_TYPE, get_sacl: bool, ) -> Result<SecurityDescriptor, DWORD> { let mut obj = SecurityDescriptor::default(); let mut flags = DACL_SECURITY_INFORMATION | GROUP_SECURITY_INFORMATION | OWNER_SECURITY_INFORMATION; let privilege: Option<SystemPrivilege>; if get_sacl { privilege = SystemPrivilege::acquire("SeSecurityPrivilege").ok(); if privilege.is_none() { return Err(unsafe { GetLastError() }); } flags |= SACL_SECURITY_INFORMATION | LABEL_SECURITY_INFORMATION; } let ret = match *source { SDSource::Handle(handle) => unsafe { GetSecurityInfo( handle, obj_type, flags, &mut obj.psidOwner, &mut obj.psidGroup, &mut obj.pDacl, &mut obj.pSacl, &mut obj.pSecurityDescriptor, ) }, SDSource::Path(ref path) => { let wPath: Vec<u16> = OsStr::new(path).encode_wide().chain(once(0)).collect(); unsafe { GetNamedSecurityInfoW( wPath.as_ptr(), obj_type, flags, &mut obj.psidOwner, &mut obj.psidGroup, &mut obj.pDacl, &mut obj.pSacl, &mut obj.pSecurityDescriptor, ) } } }; if ret != ERROR_SUCCESS { unsafe { SetLastError(ret) }; return Err(ret); } if !get_sacl { obj.pSacl = NULL as PACL; } Ok(obj) } fn default() -> SecurityDescriptor { SecurityDescriptor { pSecurityDescriptor: NULL, pDacl: NULL as PACL, pSacl: NULL as PACL, psidOwner: NULL, psidGroup: NULL, } } /// Commits a provided discretionary and/or system access control list to the specified named object path. /// /// # Arguments /// * `path` - A string containing the named object path. /// * `obj_type` - The named object path's type. See [SE_OBJECT_TYPE](https://docs.microsoft.com/en-us/windows/desktop/api/accctrl/ne-accctrl-_se_object_type). /// * `dacl` - An optional /// * `sacl` - An optional /// /// # Remarks /// This function does not update the `pSacl` or `pDacl` field in the `SecurityDescriptor` object. The `ACL` object tends /// to completely reload the `SecurityDescriptor` object after a reload to ensure consistency. /// /// # Errors /// On error, `false` is returned. pub fn apply( &mut self, source: &SDSource, obj_type: SE_OBJECT_TYPE, dacl: Option<PACL>, sacl: Option<PACL>, ) -> bool { let dacl_ptr = dacl.unwrap_or(NULL as PACL); let sacl_ptr = sacl.unwrap_or(NULL as PACL); let mut flags = 0; if dacl_ptr != (NULL as PACL) { flags |= DACL_SECURITY_INFORMATION | PROTECTED_DACL_SECURITY_INFORMATION; } let privilege: Option<SystemPrivilege>; if sacl_ptr != (NULL as PACL) { privilege = SystemPrivilege::acquire("SeSecurityPrivilege").ok(); if privilege.is_none() { return false; } flags |= SACL_SECURITY_INFORMATION | LABEL_SECURITY_INFORMATION; } let ret = match *source { SDSource::Handle(handle) => unsafe { SetSecurityInfo( handle, obj_type, flags, NULL as PSID, NULL as PSID, dacl_ptr, sacl_ptr, ) }, SDSource::Path(ref path) => { let mut wPath: Vec<u16> = OsStr::new(path).encode_wide().chain(once(0)).collect(); unsafe { SetNamedSecurityInfoW( wPath.as_mut_ptr(), obj_type, flags, NULL as PSID, NULL as PSID, dacl_ptr, sacl_ptr, ) } } }; if ret != ERROR_SUCCESS { unsafe { SetLastError(ret) }; return false; } true } } impl Drop for SecurityDescriptor { fn drop(&mut self) { if self.pSecurityDescriptor != NULL { unsafe { LocalFree(self.pSecurityDescriptor) }; } } }
31.096703
163
0.583363
c1fbed0d8bb30b60acf6153c6837e0fe66f38668
6,211
use base64; use byteorder::{BigEndian, ByteOrder}; use crypto; use crypto::aes; use crypto::digest::Digest; use crypto::hmac::Hmac; use crypto::pbkdf2::pbkdf2; use crypto::sha1::Sha1; use protobuf::ProtobufEnum; use serde; use serde_json; use std::fs::File; use std::io::{self, Read, Write}; use std::ops::FnOnce; use std::path::Path; use protocol::authentication::AuthenticationType; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Credentials { pub username: String, #[serde(serialize_with = "serialize_protobuf_enum")] #[serde(deserialize_with = "deserialize_protobuf_enum")] pub auth_type: AuthenticationType, #[serde(serialize_with = "serialize_base64")] #[serde(deserialize_with = "deserialize_base64")] pub auth_data: Vec<u8>, } impl Credentials { pub fn with_password(username: String, password: String) -> Credentials { Credentials { username: username, auth_type: AuthenticationType::AUTHENTICATION_USER_PASS, auth_data: password.into_bytes(), } } pub fn with_blob(username: String, encrypted_blob: &str, device_id: &str) -> Credentials { fn read_u8<R: Read>(stream: &mut R) -> io::Result<u8> { let mut data = [0u8]; try!(stream.read_exact(&mut data)); Ok(data[0]) } fn read_int<R: Read>(stream: &mut R) -> io::Result<u32> { let lo = try!(read_u8(stream)) as u32; if lo & 0x80 == 0 { return Ok(lo); } let hi = try!(read_u8(stream)) as u32; Ok(lo & 0x7f | hi << 7) } fn read_bytes<R: Read>(stream: &mut R) -> io::Result<Vec<u8>> { let length = try!(read_int(stream)); let mut data = vec![0u8; length as usize]; try!(stream.read_exact(&mut data)); Ok(data) } let encrypted_blob = base64::decode(encrypted_blob).unwrap(); let secret = { let mut data = [0u8; 20]; let mut h = crypto::sha1::Sha1::new(); h.input(device_id.as_bytes()); h.result(&mut data); data }; let key = { let mut data = [0u8; 24]; let mut mac = Hmac::new(Sha1::new(), &secret); pbkdf2(&mut mac, username.as_bytes(), 0x100, &mut data[0..20]); let mut hash = Sha1::new(); hash.input(&data[0..20]); hash.result(&mut data[0..20]); BigEndian::write_u32(&mut data[20..], 20); data }; let blob = { // Anyone know what this block mode is ? let mut data = vec![0u8; encrypted_blob.len()]; let mut cipher = aes::ecb_decryptor( aes::KeySize::KeySize192, &key, crypto::blockmodes::NoPadding, ); cipher .decrypt( &mut crypto::buffer::RefReadBuffer::new(&encrypted_blob), &mut crypto::buffer::RefWriteBuffer::new(&mut data), true, ) .unwrap(); let l = encrypted_blob.len(); for i in 0..l - 0x10 { data[l - i - 1] ^= data[l - i - 0x11]; } data }; let mut cursor = io::Cursor::new(&blob); read_u8(&mut cursor).unwrap(); read_bytes(&mut cursor).unwrap(); read_u8(&mut cursor).unwrap(); let auth_type = read_int(&mut cursor).unwrap(); let auth_type = AuthenticationType::from_i32(auth_type as i32).unwrap(); read_u8(&mut cursor).unwrap(); let auth_data = read_bytes(&mut cursor).unwrap(); Credentials { username: username, auth_type: auth_type, auth_data: auth_data, } } fn from_reader<R: Read>(mut reader: R) -> Credentials { let mut contents = String::new(); reader.read_to_string(&mut contents).unwrap(); serde_json::from_str(&contents).unwrap() } pub(crate) fn from_file<P: AsRef<Path>>(path: P) -> Option<Credentials> { File::open(path).ok().map(Credentials::from_reader) } fn save_to_writer<W: Write>(&self, writer: &mut W) { let contents = serde_json::to_string(&self.clone()).unwrap(); writer.write_all(contents.as_bytes()).unwrap(); } pub(crate) fn save_to_file<P: AsRef<Path>>(&self, path: P) { let mut file = File::create(path).unwrap(); self.save_to_writer(&mut file) } } fn serialize_protobuf_enum<T, S>(v: &T, ser: S) -> Result<S::Ok, S::Error> where T: ProtobufEnum, S: serde::Serializer, { serde::Serialize::serialize(&v.value(), ser) } fn deserialize_protobuf_enum<T, D>(de: D) -> Result<T, D::Error> where T: ProtobufEnum, D: serde::Deserializer, { let v: i32 = try!(serde::Deserialize::deserialize(de)); T::from_i32(v).ok_or_else(|| serde::de::Error::custom("Invalid enum value")) } fn serialize_base64<T, S>(v: &T, ser: S) -> Result<S::Ok, S::Error> where T: AsRef<[u8]>, S: serde::Serializer, { serde::Serialize::serialize(&base64::encode(v.as_ref()), ser) } fn deserialize_base64<D>(de: D) -> Result<Vec<u8>, D::Error> where D: serde::Deserializer, { let v: String = try!(serde::Deserialize::deserialize(de)); base64::decode(&v).map_err(|e| serde::de::Error::custom(e.to_string())) } pub fn get_credentials<F: FnOnce(&String) -> String>( username: Option<String>, password: Option<String>, cached_credentials: Option<Credentials>, prompt: F, ) -> Option<Credentials> { match (username, password, cached_credentials) { (Some(username), Some(password), _) => Some(Credentials::with_password(username, password)), (Some(ref username), _, Some(ref credentials)) if *username == credentials.username => { Some(credentials.clone()) } (Some(username), None, _) => Some(Credentials::with_password( username.clone(), prompt(&username), )), (None, _, Some(credentials)) => Some(credentials), (None, _, None) => None, } }
30.150485
100
0.565609
fce9723bb2ae0f881120eeba279acf1f51b7d7be
3,314
use crate::{ lua_simple_enum::{LuaAlignment, LuaLinkType}, lua_tag::LuaTag, }; use pulldown_cmark::{CodeBlockKind, CowStr, Tag}; #[derive(Debug, Clone, Copy)] pub struct LuaTagModule; impl mlua::UserData for LuaTagModule { fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) { methods.add_method("paragraph", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::Paragraph)) }); methods.add_method("heading", |_, _, n: u32| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::Heading(n))) }); methods.add_method("indented_code_block", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::CodeBlock(CodeBlockKind::Indented))) }); methods.add_method("fenced_code_block", |_, _, info: String| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::CodeBlock(CodeBlockKind::Fenced(CowStr::Boxed(info.into_boxed_str()))))) }); methods.add_method("ordered_list", |_, _, start: u64| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::List(Some(start)))) }); methods.add_method("unordered_list", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::List(None))) }); methods.add_method("footnote_definition", |_, _, def: String| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::FootnoteDefinition(CowStr::Boxed(def.into_boxed_str())))) }); methods.add_method( "table", |_, _, alignments: Vec<LuaAlignment>| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::Table(alignments.into_iter().map(|x| x.0).collect()))) }, ); methods.add_method("table_head", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::TableHead)) }); methods.add_method("table_row", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::TableRow)) }); methods.add_method("table_cell", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::TableCell)) }); methods.add_method("emphasis", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::Emphasis)) }); methods .add_method("strong", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::Strong)) }); methods.add_method("strikethrough", |_, _, ()| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::Strikethrough)) }); methods.add_method( "link", |_, _, (link_type, destination, title): (LuaLinkType, String, String)| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::Link( link_type.0, CowStr::Boxed(destination.into_boxed_str()), CowStr::Boxed(title.into_boxed_str()), ))) }, ); methods.add_method( "image", |_, _, (link_type, destination, title): (LuaLinkType, String, String)| -> mlua::Result<LuaTag> { Ok(LuaTag(Tag::Image( link_type.0, CowStr::Boxed(destination.into_boxed_str()), CowStr::Boxed(title.into_boxed_str()), ))) }, ); } }
33.474747
99
0.513277
91b31b27066a067b7d2a0b902f8b0d264815471d
2,979
extern crate log; use std::{cell::RefCell, collections::VecDeque, rc::Rc}; use naia_socket_shared::{parse_server_url, SocketConfig}; use crate::{ conditioned_packet_receiver::ConditionedPacketReceiver, io::Io, packet_receiver::{PacketReceiver, PacketReceiverTrait}, }; use super::{ addr_cell::AddrCell, packet_receiver::PacketReceiverImpl, packet_sender::PacketSender, webrtc_internal::webrtc_initialize, }; /// A client-side socket which communicates with an underlying unordered & /// unreliable protocol pub struct Socket { config: SocketConfig, io: Option<Io>, } impl Socket { /// Create a new Socket pub fn new(config: &SocketConfig) -> Self { Socket { config: config.clone(), io: None, } } /// Connects to the given server address pub fn connect(&mut self, server_session_url: &str) { if self.io.is_some() { panic!("Socket already listening!"); } let server_url = parse_server_url(server_session_url); let addr_cell = AddrCell::new(); let message_queue = Rc::new(RefCell::new(VecDeque::new())); let data_channel = webrtc_initialize( format!("{}{}", server_url, self.config.rtc_endpoint_path.clone()), message_queue.clone(), addr_cell.clone(), ); let dropped_outgoing_messages = Rc::new(RefCell::new(VecDeque::new())); let packet_sender = PacketSender::new( data_channel.clone(), dropped_outgoing_messages.clone(), addr_cell.clone(), ); let packet_receiver = PacketReceiverImpl::new(message_queue.clone(), addr_cell.clone()); let sender = packet_sender.clone(); let receiver: Box<dyn PacketReceiverTrait> = { let inner_receiver = Box::new(packet_receiver.clone()); if let Some(config) = &self.config.link_condition { Box::new(ConditionedPacketReceiver::new(inner_receiver, config)) } else { inner_receiver } }; self.io = Some(Io { packet_sender: sender, packet_receiver: PacketReceiver::new(receiver), }); } /// Gets a PacketSender which can be used to send packets through the Socket pub fn packet_sender(&self) -> PacketSender { return self .io .as_ref() .expect("Socket is not connected yet! Call Socket.connect() before this.") .packet_sender .clone(); } /// Gets a PacketReceiver which can be used to receive packets from the /// Socket pub fn packet_receiver(&self) -> PacketReceiver { return self .io .as_ref() .expect("Socket is not connected yet! Call Socket.connect() before this.") .packet_receiver .clone(); } } unsafe impl Send for Socket {} unsafe impl Sync for Socket {}
30.090909
96
0.605237
dba152a71b756fb700d392ca2dd7b0f1d38f7f34
2,107
extern crate failure; #[macro_use] extern crate failure_derive; use std::io; use std::fmt; use failure::{Backtrace, Fail}; #[derive(Fail, Debug)] #[fail(display = "An error has occurred: {}", inner)] struct WrapError { #[fail(cause)] inner: io::Error, } #[test] fn wrap_error() { let inner = io::Error::from_raw_os_error(98); let err = WrapError { inner }; assert!(err.cause().and_then(|err| err.downcast_ref::<io::Error>()).is_some()); } #[derive(Fail, Debug)] #[fail(display = "An error has occurred: {}", _0)] struct WrapTupleError(#[fail(cause)] io::Error); #[test] fn wrap_tuple_error() { let io_error = io::Error::from_raw_os_error(98); let err: WrapTupleError = WrapTupleError(io_error); assert!(err.cause().and_then(|err| err.downcast_ref::<io::Error>()).is_some()); } #[derive(Fail, Debug)] #[fail(display = "An error has occurred: {}", inner)] struct WrapBacktraceError { #[fail(cause)] inner: io::Error, backtrace: Backtrace, } #[test] fn wrap_backtrace_error() { let inner = io::Error::from_raw_os_error(98); let err: WrapBacktraceError = WrapBacktraceError { inner, backtrace: Backtrace::new() }; assert!(err.cause().and_then(|err| err.downcast_ref::<io::Error>()).is_some()); assert!(err.backtrace().is_some()); } #[derive(Fail, Debug)] enum WrapEnumError { #[fail(display = "An error has occurred: {}", _0)] Io(#[fail(cause)] io::Error), #[fail(display = "An error has occurred: {}", inner)] Fmt { #[fail(cause)] inner: fmt::Error, backtrace: Backtrace, }, } #[test] fn wrap_enum_error() { let io_error = io::Error::from_raw_os_error(98); let err: WrapEnumError = WrapEnumError::Io(io_error); assert!(err.cause().and_then(|err| err.downcast_ref::<io::Error>()).is_some()); assert!(err.backtrace().is_none()); let fmt_error = fmt::Error::default(); let err: WrapEnumError = WrapEnumError::Fmt { inner: fmt_error, backtrace: Backtrace::new() }; assert!(err.cause().and_then(|err| err.downcast_ref::<fmt::Error>()).is_some()); assert!(err.backtrace().is_some()); }
30.1
98
0.650688
26979e5bbfb2e822e4b81005af6b4bec66d1c873
1,582
// Copyright 2022 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::any::Any; use crate::sql::optimizer::ColumnSet; use crate::sql::optimizer::PhysicalProperty; use crate::sql::optimizer::SExpr; use crate::sql::plans::BasePlan; use crate::sql::plans::LogicalPlan; use crate::sql::plans::PhysicalPlan; use crate::sql::plans::PlanType; use crate::sql::IndexType; #[derive(Clone, Debug)] pub struct PhysicalScan { pub table_index: IndexType, pub columns: ColumnSet, } impl BasePlan for PhysicalScan { fn plan_type(&self) -> PlanType { PlanType::PhysicalScan } fn is_physical(&self) -> bool { true } fn is_logical(&self) -> bool { false } fn as_physical(&self) -> Option<&dyn PhysicalPlan> { todo!() } fn as_logical(&self) -> Option<&dyn LogicalPlan> { None } fn as_any(&self) -> &dyn Any { self } } impl PhysicalPlan for PhysicalScan { fn compute_physical_prop(&self, _expression: &SExpr) -> PhysicalProperty { todo!() } }
25.111111
78
0.671934
0308af19808ab44adec81fcbaa17e680cad2d88f
6,652
// This file is part of Substrate. // Copyright (C) 2017-2021 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0 // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. use crate::{chain_spec, service, Cli, Subcommand}; use labor_node_executor::Executor; use labor_node_runtime::{Block, RuntimeApi}; use sc_cli::{Result, SubstrateCli, RuntimeVersion, Role, ChainSpec}; use sc_service::PartialComponents; use crate::service::new_partial; impl SubstrateCli for Cli { fn impl_name() -> String { "Labor Node".into() } fn impl_version() -> String { env!("SUBSTRATE_CLI_IMPL_VERSION").into() } fn description() -> String { env!("CARGO_PKG_DESCRIPTION").into() } fn author() -> String { env!("CARGO_PKG_AUTHORS").into() } fn support_url() -> String { "https://github.com/benjamin-martijn/labor-node/issues/new".into() } fn copyright_start_year() -> i32 { 2021 } fn load_spec(&self, id: &str) -> std::result::Result<Box<dyn sc_service::ChainSpec>, String> { let spec = match id { "" => return Err("Please specify which chain you want to run, e.g. --dev or --chain=local".into()), "dev" => Box::new(chain_spec::development_config()), "local" => Box::new(chain_spec::local_testnet_config()), "fir" | "flaming-fir" => Box::new(chain_spec::flaming_fir_config()?), "staging" => Box::new(chain_spec::staging_testnet_config()), path => Box::new(chain_spec::ChainSpec::from_json_file( std::path::PathBuf::from(path), )?), }; Ok(spec) } fn native_runtime_version(_: &Box<dyn ChainSpec>) -> &'static RuntimeVersion { &labor_node_runtime::VERSION } } /// Parse command line arguments into service configuration. pub fn run() -> Result<()> { let cli = Cli::from_args(); match &cli.subcommand { None => { let runner = cli.create_runner(&cli.run)?; runner.run_node_until_exit(|config| async move { match config.role { Role::Light => service::new_light(config), _ => service::new_full(config), }.map_err(sc_cli::Error::Service) }) } Some(Subcommand::Inspect(cmd)) => { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run::<Block, RuntimeApi, Executor>(config)) } Some(Subcommand::Benchmark(cmd)) => { if cfg!(feature = "runtime-benchmarks") { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run::<Block, Executor>(config)) } else { Err("Benchmarking wasn't enabled when building the node. \ You can enable it with `--features runtime-benchmarks`.".into()) } } Some(Subcommand::Key(cmd)) => cmd.run(&cli), Some(Subcommand::Sign(cmd)) => cmd.run(), Some(Subcommand::Verify(cmd)) => cmd.run(), Some(Subcommand::Vanity(cmd)) => cmd.run(), Some(Subcommand::BuildSpec(cmd)) => { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run(config.chain_spec, config.network)) } Some(Subcommand::CheckBlock(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { let PartialComponents { client, task_manager, import_queue, .. } = new_partial(&config)?; Ok((cmd.run(client, import_queue), task_manager)) }) } Some(Subcommand::ExportBlocks(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { let PartialComponents { client, task_manager, .. } = new_partial(&config)?; Ok((cmd.run(client, config.database), task_manager)) }) } Some(Subcommand::ExportState(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { let PartialComponents { client, task_manager, .. } = new_partial(&config)?; Ok((cmd.run(client, config.chain_spec), task_manager)) }) } Some(Subcommand::ImportBlocks(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { let PartialComponents { client, task_manager, import_queue, .. } = new_partial(&config)?; Ok((cmd.run(client, import_queue), task_manager)) }) } Some(Subcommand::PurgeChain(cmd)) => { let runner = cli.create_runner(cmd)?; runner.sync_run(|config| cmd.run(config.database)) } Some(Subcommand::Revert(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { let PartialComponents { client, task_manager, backend, .. } = new_partial(&config)?; Ok((cmd.run(client, backend), task_manager)) }) } #[cfg(feature = "try-runtime")] Some(Subcommand::TryRuntime(cmd)) => { let runner = cli.create_runner(cmd)?; runner.async_run(|config| { // we don't need any of the components of new_partial, just a runtime, or a task // manager to do `async_run`. let registry = config.prometheus_config.as_ref().map(|cfg| &cfg.registry); let task_manager = sc_service::TaskManager::new( config.task_executor.clone(), registry, ).map_err(|e| sc_cli::Error::Service(sc_service::Error::Prometheus(e)))?; Ok((cmd.run::<Block, Executor>(config), task_manager)) }) } } }
39.360947
115
0.567348
5be57f4ec161f8663928c868c7d251f59c333dcb
7,653
//! Rendering backend for the DOM with hydration support. use std::fmt; use std::hash::{Hash, Hasher}; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use web_sys::Node; use crate::generic_node::{DomNode, GenericNode, Html}; use crate::reactive::*; use crate::utils::hydrate::web::get_next_element; use crate::utils::hydrate::{hydration_completed, with_hydration_context}; use crate::utils::render::insert; use crate::view::View; use super::dom_node::NodeId; /// Rendering backend for the DOM with hydration support. /// /// _This API requires the following crate features to be activated: /// `experimental-hydrate`, `dom`_ #[derive(Clone)] pub struct HydrateNode { node: DomNode, } impl HydrateNode { pub fn inner_element(&self) -> Node { self.node.inner_element() } pub fn unchecked_into<T: JsCast>(self) -> T { self.node.unchecked_into() } pub(super) fn get_node_id(&self) -> NodeId { self.node.get_node_id() } pub fn from_web_sys(node: Node) -> Self { Self { node: DomNode::from_web_sys(node), } } } impl PartialEq for HydrateNode { fn eq(&self, other: &Self) -> bool { self.node == other.node } } impl Eq for HydrateNode {} impl Hash for HydrateNode { fn hash<H: Hasher>(&self, state: &mut H) { self.get_node_id().hash(state); } } impl AsRef<JsValue> for HydrateNode { fn as_ref(&self) -> &JsValue { self.node.as_ref() } } impl From<HydrateNode> for JsValue { fn from(node: HydrateNode) -> Self { JsValue::from(node.node) } } impl fmt::Debug for HydrateNode { /// Prints outerHtml of [`Node`]. fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.node.fmt(f) } } impl GenericNode for HydrateNode { type EventType = web_sys::Event; const USE_HYDRATION_CONTEXT: bool = true; const CLIENT_SIDE_HYDRATION: bool = true; /// When hydrating, instead of creating a new node, this will attempt to /// hydrate an existing node. fn element(tag: &str) -> Self { let el = get_next_element(); if let Some(el) = el { Self { node: DomNode::from_web_sys(el.into()), } } else { Self { node: DomNode::element(tag), } } } /// When hydrating, instead of creating a new node, this will attempt to /// hydrate an existing node. fn text_node(text: &str) -> Self { // TODO Self { node: DomNode::text_node(text), } } fn marker() -> Self { // TODO Self { node: DomNode::marker(), } } fn marker_with_text(text: &str) -> Self { // TODO Self { node: DomNode::marker_with_text(text), } } #[inline] fn set_attribute(&self, name: &str, value: &str) { self.node.set_attribute(name, value); } #[inline] fn remove_attribute(&self, name: &str) { self.node.remove_attribute(name); } #[inline] fn set_class_name(&self, value: &str) { self.node.set_class_name(value); } #[inline] fn add_class(&self, class: &str) { self.node.add_class(class); } #[inline] fn remove_class(&self, class: &str) { self.node.remove_class(class); } #[inline] fn set_property(&self, name: &str, value: &JsValue) { self.node.set_property(name, value); } #[inline] fn remove_property(&self, name: &str) { self.node.remove_property(name); } #[inline] fn append_child(&self, child: &Self) { if hydration_completed() { // Do not append nodes during hydration as that will result in // duplicate text nodes. self.node.append_child(&child.node); } } #[inline] fn first_child(&self) -> Option<Self> { self.node.first_child().map(|node| Self { node }) } #[inline] fn insert_child_before( &self, new_node: &Self, reference_node: Option<&Self>, ) { self.node.insert_child_before( &new_node.node, reference_node.map(|node| &node.node), ); } #[inline] fn remove_child(&self, child: &Self) { self.node.remove_child(&child.node); } #[inline] fn replace_child(&self, old: &Self, new: &Self) { self.node.replace_child(&old.node, &new.node); } #[inline] fn insert_sibling_before(&self, child: &Self) { self.node.insert_sibling_before(&child.node); } #[inline] fn parent_node(&self) -> Option<Self> { self.node.parent_node().map(|node| Self { node }) } #[inline] fn next_sibling(&self) -> Option<Self> { self.node.next_sibling().map(|node| Self { node }) } #[inline] fn remove_self(&self) { self.node.remove_self(); } #[inline] fn event<'a>( &self, ctx: ScopeRef<'a>, name: &str, handler: Box<dyn Fn(Self::EventType) + 'a>, ) { self.node.event(ctx, name, handler); } #[inline] fn update_inner_text(&self, text: &str) { self.node.update_inner_text(text); } #[inline] fn dangerously_set_inner_html(&self, html: &str) { self.node.dangerously_set_inner_html(html); } #[inline] fn clone_node(&self) -> Self { Self { node: self.node.clone_node(), } } } impl Html for HydrateNode { const IS_BROWSER: bool = true; } /// Render a [`View`] under a `parent` node by reusing existing nodes (client /// side hydration). Alias for [`hydrate_to`] with `parent` being the `<body>` /// tag. /// /// For rendering without hydration, use [`render`](super::render) instead. /// /// _This API requires the following crate features to be activated: /// `experimental-hydrate`, `dom`_ pub fn hydrate(template: impl FnOnce(ScopeRef<'_>) -> View<HydrateNode>) { let window = web_sys::window().unwrap(); let document = window.document().unwrap(); hydrate_to(template, &document.body().unwrap()); } /// Render a [`View`] under a `parent` node by reusing existing nodes (client /// side hydration). For rendering under the `<body>` tag, use [`hydrate_to`] /// instead. /// /// For rendering without hydration, use [`render`](super::render) instead. /// /// _This API requires the following crate features to be activated: /// `experimental-hydrate`, `dom`_ pub fn hydrate_to( view: impl FnOnce(ScopeRef<'_>) -> View<HydrateNode>, parent: &Node, ) { // Do not call the destructor function, effectively leaking the scope. let _ = hydrate_get_scope(view, parent); } /// Render a [`View`] under a `parent` node, in a way that can be cleaned up. /// This function is intended to be used for injecting an ephemeral sycamore /// view into a non-sycamore app (for example, a file upload modal where you /// want to cancel the upload if the modal is closed). /// /// _This API requires the following crate features to be activated: /// `experimental-hydrate`, `dom`_ #[must_use = "please hold onto the ReactiveScope until you want to clean things up, or use render_to() instead"] pub fn hydrate_get_scope<'a>( view: impl FnOnce(ScopeRef<'_>) -> View<HydrateNode> + 'a, parent: &'a Node, ) -> ScopeDisposer<'a> { create_scope(|ctx| { insert( ctx, &HydrateNode::from_web_sys(parent.clone()), with_hydration_context(|| view(ctx)), None, None, false, ); }) }
25.425249
112
0.593493
230587b726fd1e022748df5e76ab48899237be7f
1,846
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Exposes the NonZero lang item which provides optimization hints. use marker::{Sized, MarkerTrait}; use ops::Deref; /// Unsafe trait to indicate what types are usable with the NonZero struct pub unsafe trait Zeroable : MarkerTrait {} unsafe impl<T:?Sized> Zeroable for *const T {} unsafe impl<T:?Sized> Zeroable for *mut T {} unsafe impl Zeroable for isize {} unsafe impl Zeroable for usize {} unsafe impl Zeroable for i8 {} unsafe impl Zeroable for u8 {} unsafe impl Zeroable for i16 {} unsafe impl Zeroable for u16 {} unsafe impl Zeroable for i32 {} unsafe impl Zeroable for u32 {} unsafe impl Zeroable for i64 {} unsafe impl Zeroable for u64 {} /// A wrapper type for raw pointers and integers that will never be /// NULL or 0 that might allow certain optimizations. #[lang="non_zero"] #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Debug, Hash)] #[unstable(feature = "core")] pub struct NonZero<T: Zeroable>(T); impl<T: Zeroable> NonZero<T> { /// Create an instance of NonZero with the provided value. /// You must indeed ensure that the value is actually "non-zero". #[inline(always)] pub unsafe fn new(inner: T) -> NonZero<T> { NonZero(inner) } } impl<T: Zeroable> Deref for NonZero<T> { type Target = T; #[inline] fn deref<'a>(&'a self) -> &'a T { let NonZero(ref inner) = *self; inner } }
32.385965
74
0.700433
79826889688990f92f05210c089882d9dcbd07ef
8,333
use tiny_skia::*; fn draw_line(x0: f32, y0: f32, x1: f32, y1: f32, anti_alias: bool, width: f32, line_cap: LineCap) -> Pixmap { let mut pixmap = Pixmap::new(100, 100).unwrap(); let mut canvas = Canvas::from(pixmap.as_mut()); let mut pb = PathBuilder::new(); pb.move_to(x0, y0); pb.line_to(x1, y1); let path = pb.finish().unwrap(); let mut paint = Paint::default(); paint.set_color_rgba8(50, 127, 150, 200); paint.anti_alias = anti_alias; let mut stroke = Stroke::default(); stroke.width = width; stroke.line_cap = line_cap; canvas.stroke_path(&path, &paint, &stroke); pixmap } #[test] fn hline_05() { let expected = Pixmap::load_png("tests/images/hairline/hline-05.png").unwrap(); assert_eq!(draw_line(10.0, 10.0, 90.0, 10.0, false, 0.5, LineCap::Butt), expected); } #[test] fn hline_05_aa() { let expected = Pixmap::load_png("tests/images/hairline/hline-05-aa.png").unwrap(); assert_eq!(draw_line(10.0, 10.0, 90.0, 10.0, true, 0.5, LineCap::Butt), expected); } #[test] fn hline_05_aa_round() { let expected = Pixmap::load_png("tests/images/hairline/hline-05-aa-round.png").unwrap(); assert_eq!(draw_line(10.0, 10.0, 90.0, 10.0, true, 0.5, LineCap::Round), expected); } #[test] fn vline_05() { let expected = Pixmap::load_png("tests/images/hairline/vline-05.png").unwrap(); assert_eq!(draw_line(10.0, 10.0, 10.0, 90.0, false, 0.5, LineCap::Butt), expected); } #[test] fn vline_05_aa() { let expected = Pixmap::load_png("tests/images/hairline/vline-05-aa.png").unwrap(); assert_eq!(draw_line(10.0, 10.0, 10.0, 90.0, true, 0.5, LineCap::Butt), expected); } #[test] fn vline_05_aa_round() { let expected = Pixmap::load_png("tests/images/hairline/vline-05-aa-round.png").unwrap(); assert_eq!(draw_line(10.0, 10.0, 10.0, 90.0, true, 0.5, LineCap::Round), expected); } #[test] fn horish_05_aa() { let expected = Pixmap::load_png("tests/images/hairline/horish-05-aa.png").unwrap(); assert_eq!(draw_line(10.0, 10.0, 90.0, 70.0, true, 0.5, LineCap::Butt), expected); } #[test] fn vertish_05_aa() { let expected = Pixmap::load_png("tests/images/hairline/vertish-05-aa.png").unwrap(); assert_eq!(draw_line(10.0, 10.0, 70.0, 90.0, true, 0.5, LineCap::Butt), expected); } #[test] fn clip_line_05_aa() { let expected = Pixmap::load_png("tests/images/hairline/clip-line-05-aa.png").unwrap(); assert_eq!(draw_line(-10.0, 10.0, 110.0, 70.0, true, 0.5, LineCap::Butt), expected); } #[test] fn clip_line_00() { let expected = Pixmap::load_png("tests/images/hairline/clip-line-00.png").unwrap(); assert_eq!(draw_line(-10.0, 10.0, 110.0, 70.0, false, 0.0, LineCap::Butt), expected); } #[test] fn clip_hline_top_aa() { let expected = Pixmap::load_png("tests/images/hairline/clip-hline-top-aa.png").unwrap(); assert_eq!(draw_line(-1.0, 0.0, 101.0, 0.0, true, 1.0, LineCap::Butt), expected); } #[test] fn clip_hline_bottom_aa() { let expected = Pixmap::load_png("tests/images/hairline/clip-hline-bottom-aa.png").unwrap(); assert_eq!(draw_line(-1.0, 100.0, 101.0, 100.0, true, 1.0, LineCap::Butt), expected); } #[test] fn clip_vline_left_aa() { let expected = Pixmap::load_png("tests/images/hairline/clip-vline-left-aa.png").unwrap(); assert_eq!(draw_line(0.0, -1.0, 0.0, 101.0, true, 1.0, LineCap::Butt), expected); } #[test] fn clip_vline_right_aa() { let expected = Pixmap::load_png("tests/images/hairline/clip-vline-right-aa.png").unwrap(); assert_eq!(draw_line(100.0, -1.0, 100.0, 101.0, true, 1.0, LineCap::Butt), expected); } fn draw_quad(anti_alias: bool, width: f32, line_cap: LineCap) -> Pixmap { let mut pixmap = Pixmap::new(200, 100).unwrap(); let mut canvas = Canvas::from(pixmap.as_mut()); let mut pb = PathBuilder::new(); pb.move_to(25.0, 80.0); pb.quad_to(155.0, 75.0, 175.0, 20.0); let path = pb.finish().unwrap(); let mut paint = Paint::default(); paint.set_color_rgba8(50, 127, 150, 200); paint.anti_alias = anti_alias; let mut stroke = Stroke::default(); stroke.width = width; stroke.line_cap = line_cap; canvas.stroke_path(&path, &paint, &stroke); pixmap } #[test] fn quad_width_05_aa() { let expected = Pixmap::load_png("tests/images/hairline/quad-width-05-aa.png").unwrap(); assert_eq!(draw_quad(true, 0.5, LineCap::Butt), expected); } #[test] fn quad_width_05_aa_round() { let expected = Pixmap::load_png("tests/images/hairline/quad-width-05-aa-round.png").unwrap(); assert_eq!(draw_quad(true, 0.5, LineCap::Round), expected); } #[test] fn quad_width_00() { let expected = Pixmap::load_png("tests/images/hairline/quad-width-00.png").unwrap(); assert_eq!(draw_quad(false, 0.0, LineCap::Butt), expected); } fn draw_cubic(points: &[f32; 8], anti_alias: bool, width: f32, line_cap: LineCap) -> Pixmap { let mut pixmap = Pixmap::new(200, 100).unwrap(); let mut canvas = Canvas::from(pixmap.as_mut()); let mut pb = PathBuilder::new(); pb.move_to(points[0], points[1]); pb.cubic_to(points[2], points[3], points[4], points[5], points[6], points[7]); let path = pb.finish().unwrap(); let mut paint = Paint::default(); paint.set_color_rgba8(50, 127, 150, 200); paint.anti_alias = anti_alias; let mut stroke = Stroke::default(); stroke.width = width; stroke.line_cap = line_cap; canvas.stroke_path(&path, &paint, &stroke); pixmap } #[test] fn cubic_width_10_aa() { let expected = Pixmap::load_png("tests/images/hairline/cubic-width-10-aa.png").unwrap(); assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 1.0, LineCap::Butt), expected); } #[test] fn cubic_width_05_aa() { let expected = Pixmap::load_png("tests/images/hairline/cubic-width-05-aa.png").unwrap(); assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 0.5, LineCap::Butt), expected); } #[test] fn cubic_width_00_aa() { let expected = Pixmap::load_png("tests/images/hairline/cubic-width-00-aa.png").unwrap(); assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 0.0, LineCap::Butt), expected); } #[test] fn cubic_width_00() { let expected = Pixmap::load_png("tests/images/hairline/cubic-width-00.png").unwrap(); assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], false, 0.0, LineCap::Butt), expected); } #[test] fn cubic_width_05_aa_round() { let expected = Pixmap::load_png("tests/images/hairline/cubic-width-05-aa-round.png").unwrap(); assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 0.5, LineCap::Round), expected); } #[test] fn cubic_width_00_round() { let expected = Pixmap::load_png("tests/images/hairline/cubic-width-00-round.png").unwrap(); assert_eq!(draw_cubic(&[25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], false, 0.0, LineCap::Round), expected); } #[test] fn chop_cubic_01() { let expected = Pixmap::load_png("tests/images/hairline/chop-cubic-01.png").unwrap(); // This curve will invoke `path_geometry::chop_cubic_at_max_curvature` branch of `hair_cubic`. assert_eq!(draw_cubic(&[57.0, 13.0, 17.0, 15.0, 55.0, 97.0, 89.0, 62.0], true, 0.5, LineCap::Butt), expected); } #[test] fn clip_cubic_05_aa() { let expected = Pixmap::load_png("tests/images/hairline/clip-cubic-05-aa.png").unwrap(); assert_eq!(draw_cubic(&[-25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], true, 0.5, LineCap::Butt), expected); } #[test] fn clip_cubic_00() { let expected = Pixmap::load_png("tests/images/hairline/clip-cubic-00.png").unwrap(); assert_eq!(draw_cubic(&[-25.0, 80.0, 55.0, 25.0, 155.0, 75.0, 175.0, 20.0], false, 0.0, LineCap::Butt), expected); } #[test] fn clipped_circle_aa() { let mut pixmap = Pixmap::new(100, 100).unwrap(); let mut canvas = Canvas::from(pixmap.as_mut()); let mut paint = Paint::default(); paint.set_color_rgba8(50, 127, 150, 200); paint.anti_alias = true; let mut stroke = Stroke::default(); stroke.width = 0.5; let path = PathBuilder::from_circle(50.0, 50.0, 55.0).unwrap(); canvas.stroke_path(&path, &paint, &stroke); let expected = Pixmap::load_png("tests/images/hairline/clipped-circle-aa.png").unwrap(); assert_eq!(pixmap, expected); }
34.576763
118
0.655826
f70f24b9e573cbd6b768be014cc0d48f7daf9a5e
408
extern crate div; use div::class::ConstantString; use div::class::ConstantTag; use div::class::U8Reader; #[test] fn test_constant_string() { let mut u8_reader = U8Reader::new(&[0x00, 0x15]); let constant_string = ConstantString::new(&mut u8_reader); assert_eq!(ConstantTag::String, constant_string.tag); assert_eq!(21, constant_string.string_index); assert_eq!(2, u8_reader.position); }
29.142857
62
0.72549
288017b7ca53ef1d83bc0c512df7e748199bec6e
28,879
//! Primitive traits and types representing basic properties of types. //! //! Rust types can be classified in various useful ways according to //! their intrinsic properties. These classifications are represented //! as traits. #![stable(feature = "rust1", since = "1.0.0")] use crate::cell::UnsafeCell; use crate::cmp; use crate::hash::Hash; use crate::hash::Hasher; /// Types that can be transferred across thread boundaries. /// /// This trait is automatically implemented when the compiler determines it's /// appropriate. /// /// An example of a non-`Send` type is the reference-counting pointer /// [`rc::Rc`][`Rc`]. If two threads attempt to clone [`Rc`]s that point to the same /// reference-counted value, they might try to update the reference count at the /// same time, which is [undefined behavior][ub] because [`Rc`] doesn't use atomic /// operations. Its cousin [`sync::Arc`][arc] does use atomic operations (incurring /// some overhead) and thus is `Send`. /// /// See [the Nomicon](../../nomicon/send-and-sync.html) for more details. /// /// [`Rc`]: ../../std/rc/struct.Rc.html /// [arc]: ../../std/sync/struct.Arc.html /// [ub]: ../../reference/behavior-considered-undefined.html #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "send_trait")] #[rustc_on_unimplemented( message="`{Self}` cannot be sent between threads safely", label="`{Self}` cannot be sent between threads safely" )] pub unsafe auto trait Send { // empty. } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> !Send for *const T { } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> !Send for *mut T { } /// Types with a constant size known at compile time. /// /// All type parameters have an implicit bound of `Sized`. The special syntax /// `?Sized` can be used to remove this bound if it's not appropriate. /// /// ``` /// # #![allow(dead_code)] /// struct Foo<T>(T); /// struct Bar<T: ?Sized>(T); /// /// // struct FooUse(Foo<[i32]>); // error: Sized is not implemented for [i32] /// struct BarUse(Bar<[i32]>); // OK /// ``` /// /// The one exception is the implicit `Self` type of a trait. A trait does not /// have an implicit `Sized` bound as this is incompatible with [trait object]s /// where, by definition, the trait needs to work with all possible implementors, /// and thus could be any size. /// /// Although Rust will let you bind `Sized` to a trait, you won't /// be able to use it to form a trait object later: /// /// ``` /// # #![allow(unused_variables)] /// trait Foo { } /// trait Bar: Sized { } /// /// struct Impl; /// impl Foo for Impl { } /// impl Bar for Impl { } /// /// let x: &dyn Foo = &Impl; // OK /// // let y: &dyn Bar = &Impl; // error: the trait `Bar` cannot /// // be made into an object /// ``` /// /// [trait object]: ../../book/ch17-02-trait-objects.html #[stable(feature = "rust1", since = "1.0.0")] #[lang = "sized"] #[rustc_on_unimplemented( on(parent_trait="std::path::Path", label="borrow the `Path` instead"), message="the size for values of type `{Self}` cannot be known at compilation time", label="doesn't have a size known at compile-time", note="to learn more, visit <https://doc.rust-lang.org/book/\ ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait>", )] #[fundamental] // for Default, for example, which requires that `[T]: !Default` be evaluatable pub trait Sized { // Empty. } /// Types that can be "unsized" to a dynamically-sized type. /// /// For example, the sized array type `[i8; 2]` implements `Unsize<[i8]>` and /// `Unsize<fmt::Debug>`. /// /// All implementations of `Unsize` are provided automatically by the compiler. /// /// `Unsize` is implemented for: /// /// - `[T; N]` is `Unsize<[T]>` /// - `T` is `Unsize<dyn Trait>` when `T: Trait` /// - `Foo<..., T, ...>` is `Unsize<Foo<..., U, ...>>` if: /// - `T: Unsize<U>` /// - Foo is a struct /// - Only the last field of `Foo` has a type involving `T` /// - `T` is not part of the type of any other fields /// - `Bar<T>: Unsize<Bar<U>>`, if the last field of `Foo` has type `Bar<T>` /// /// `Unsize` is used along with [`ops::CoerceUnsized`][coerceunsized] to allow /// "user-defined" containers such as [`rc::Rc`][rc] to contain dynamically-sized /// types. See the [DST coercion RFC][RFC982] and [the nomicon entry on coercion][nomicon-coerce] /// for more details. /// /// [coerceunsized]: ../ops/trait.CoerceUnsized.html /// [rc]: ../../std/rc/struct.Rc.html /// [RFC982]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md /// [nomicon-coerce]: ../../nomicon/coercions.html #[unstable(feature = "unsize", issue = "27732")] #[lang = "unsize"] pub trait Unsize<T: ?Sized> { // Empty. } /// Required trait for constants used in pattern matches. /// /// Any type that derives `PartialEq` automatically implements this trait, /// *regardless* of whether its type-parameters implement `Eq`. /// /// If a `const` item contains some type that does not implement this trait, /// then that type either (1.) does not implement `PartialEq` (which means the /// constant will not provide that comparison method, which code generation /// assumes is available), or (2.) it implements *its own* version of /// `PartialEq` (which we assume does not conform to a structural-equality /// comparison). /// /// In either of the two scenarios above, we reject usage of such a constant in /// a pattern match. /// /// See also the [structural match RFC][RFC1445], and [issue 63438][] which /// motivated migrating from attribute-based design to this trait. /// /// [RFC1445]: https://github.com/rust-lang/rfcs/blob/master/text/1445-restrict-constants-in-patterns.md /// [issue 63438]: https://github.com/rust-lang/rust/issues/63438 #[unstable(feature = "structural_match", issue = "31434")] #[rustc_on_unimplemented(message="the type `{Self}` does not `#[derive(PartialEq)]`")] #[lang = "structural_peq"] pub trait StructuralPartialEq { // Empty. } /// Required trait for constants used in pattern matches. /// /// Any type that derives `Eq` automatically implements this trait, *regardless* /// of whether its type-parameters implement `Eq`. /// /// This is a hack to workaround a limitation in our type-system. /// /// Background: /// /// We want to require that types of consts used in pattern matches /// have the attribute `#[derive(PartialEq, Eq)]`. /// /// In a more ideal world, we could check that requirement by just checking that /// the given type implements both (1.) the `StructuralPartialEq` trait *and* /// (2.) the `Eq` trait. However, you can have ADTs that *do* `derive(PartialEq, Eq)`, /// and be a case that we want the compiler to accept, and yet the constant's /// type fails to implement `Eq`. /// /// Namely, a case like this: /// /// ```rust /// #[derive(PartialEq, Eq)] /// struct Wrap<X>(X); /// fn higher_order(_: &()) { } /// const CFN: Wrap<fn(&())> = Wrap(higher_order); /// fn main() { /// match CFN { /// CFN => {} /// _ => {} /// } /// } /// ``` /// /// (The problem in the above code is that `Wrap<fn(&())>` does not implement /// `PartialEq`, nor `Eq`, because `for<'a> fn(&'a _)` does not implement those /// traits.) /// /// Therefore, we cannot rely on naive check for `StructuralPartialEq` and /// mere `Eq`. /// /// As a hack to work around this, we use two separate traits injected by each /// of the two derives (`#[derive(PartialEq)]` and `#[derive(Eq)]`) and check /// that both of them are present as part of structural-match checking. #[unstable(feature = "structural_match", issue = "31434")] #[rustc_on_unimplemented(message="the type `{Self}` does not `#[derive(Eq)]`")] #[lang = "structural_teq"] pub trait StructuralEq { // Empty. } /// Types whose values can be duplicated simply by copying bits. /// /// By default, variable bindings have 'move semantics.' In other /// words: /// /// ``` /// #[derive(Debug)] /// struct Foo; /// /// let x = Foo; /// /// let y = x; /// /// // `x` has moved into `y`, and so cannot be used /// /// // println!("{:?}", x); // error: use of moved value /// ``` /// /// However, if a type implements `Copy`, it instead has 'copy semantics': /// /// ``` /// // We can derive a `Copy` implementation. `Clone` is also required, as it's /// // a supertrait of `Copy`. /// #[derive(Debug, Copy, Clone)] /// struct Foo; /// /// let x = Foo; /// /// let y = x; /// /// // `y` is a copy of `x` /// /// println!("{:?}", x); // A-OK! /// ``` /// /// It's important to note that in these two examples, the only difference is whether you /// are allowed to access `x` after the assignment. Under the hood, both a copy and a move /// can result in bits being copied in memory, although this is sometimes optimized away. /// /// ## How can I implement `Copy`? /// /// There are two ways to implement `Copy` on your type. The simplest is to use `derive`: /// /// ``` /// #[derive(Copy, Clone)] /// struct MyStruct; /// ``` /// /// You can also implement `Copy` and `Clone` manually: /// /// ``` /// struct MyStruct; /// /// impl Copy for MyStruct { } /// /// impl Clone for MyStruct { /// fn clone(&self) -> MyStruct { /// *self /// } /// } /// ``` /// /// There is a small difference between the two: the `derive` strategy will also place a `Copy` /// bound on type parameters, which isn't always desired. /// /// ## What's the difference between `Copy` and `Clone`? /// /// Copies happen implicitly, for example as part of an assignment `y = x`. The behavior of /// `Copy` is not overloadable; it is always a simple bit-wise copy. /// /// Cloning is an explicit action, `x.clone()`. The implementation of [`Clone`] can /// provide any type-specific behavior necessary to duplicate values safely. For example, /// the implementation of [`Clone`] for [`String`] needs to copy the pointed-to string /// buffer in the heap. A simple bitwise copy of [`String`] values would merely copy the /// pointer, leading to a double free down the line. For this reason, [`String`] is [`Clone`] /// but not `Copy`. /// /// [`Clone`] is a supertrait of `Copy`, so everything which is `Copy` must also implement /// [`Clone`]. If a type is `Copy` then its [`Clone`] implementation only needs to return `*self` /// (see the example above). /// /// ## When can my type be `Copy`? /// /// A type can implement `Copy` if all of its components implement `Copy`. For example, this /// struct can be `Copy`: /// /// ``` /// # #[allow(dead_code)] /// struct Point { /// x: i32, /// y: i32, /// } /// ``` /// /// A struct can be `Copy`, and [`i32`] is `Copy`, therefore `Point` is eligible to be `Copy`. /// By contrast, consider /// /// ``` /// # #![allow(dead_code)] /// # struct Point; /// struct PointList { /// points: Vec<Point>, /// } /// ``` /// /// The struct `PointList` cannot implement `Copy`, because [`Vec<T>`] is not `Copy`. If we /// attempt to derive a `Copy` implementation, we'll get an error: /// /// ```text /// the trait `Copy` may not be implemented for this type; field `points` does not implement `Copy` /// ``` /// /// ## When *can't* my type be `Copy`? /// /// Some types can't be copied safely. For example, copying `&mut T` would create an aliased /// mutable reference. Copying [`String`] would duplicate responsibility for managing the /// [`String`]'s buffer, leading to a double free. /// /// Generalizing the latter case, any type implementing [`Drop`] can't be `Copy`, because it's /// managing some resource besides its own [`size_of::<T>`] bytes. /// /// If you try to implement `Copy` on a struct or enum containing non-`Copy` data, you will get /// the error [E0204]. /// /// [E0204]: ../../error-index.html#E0204 /// /// ## When *should* my type be `Copy`? /// /// Generally speaking, if your type _can_ implement `Copy`, it should. Keep in mind, though, /// that implementing `Copy` is part of the public API of your type. If the type might become /// non-`Copy` in the future, it could be prudent to omit the `Copy` implementation now, to /// avoid a breaking API change. /// /// ## Additional implementors /// /// In addition to the [implementors listed below][impls], /// the following types also implement `Copy`: /// /// * Function item types (i.e., the distinct types defined for each function) /// * Function pointer types (e.g., `fn() -> i32`) /// * Array types, for all sizes, if the item type also implements `Copy` (e.g., `[i32; 123456]`) /// * Tuple types, if each component also implements `Copy` (e.g., `()`, `(i32, bool)`) /// * Closure types, if they capture no value from the environment /// or if all such captured values implement `Copy` themselves. /// Note that variables captured by shared reference always implement `Copy` /// (even if the referent doesn't), /// while variables captured by mutable reference never implement `Copy`. /// /// [`Vec<T>`]: ../../std/vec/struct.Vec.html /// [`String`]: ../../std/string/struct.String.html /// [`Drop`]: ../../std/ops/trait.Drop.html /// [`size_of::<T>`]: ../../std/mem/fn.size_of.html /// [`Clone`]: ../clone/trait.Clone.html /// [`String`]: ../../std/string/struct.String.html /// [`i32`]: ../../std/primitive.i32.html /// [impls]: #implementors #[stable(feature = "rust1", since = "1.0.0")] #[lang = "copy"] pub trait Copy : Clone { // Empty. } /// Derive macro generating an impl of the trait `Copy`. #[rustc_builtin_macro] #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow_internal_unstable(core_intrinsics, derive_clone_copy)] pub macro Copy($item:item) { /* compiler built-in */ } /// Types for which it is safe to share references between threads. /// /// This trait is automatically implemented when the compiler determines /// it's appropriate. /// /// The precise definition is: a type `T` is `Sync` if and only if `&T` is /// [`Send`][send]. In other words, if there is no possibility of /// [undefined behavior][ub] (including data races) when passing /// `&T` references between threads. /// /// As one would expect, primitive types like [`u8`][u8] and [`f64`][f64] /// are all `Sync`, and so are simple aggregate types containing them, /// like tuples, structs and enums. More examples of basic `Sync` /// types include "immutable" types like `&T`, and those with simple /// inherited mutability, such as [`Box<T>`][box], [`Vec<T>`][vec] and /// most other collection types. (Generic parameters need to be `Sync` /// for their container to be `Sync`.) /// /// A somewhat surprising consequence of the definition is that `&mut T` /// is `Sync` (if `T` is `Sync`) even though it seems like that might /// provide unsynchronized mutation. The trick is that a mutable /// reference behind a shared reference (that is, `& &mut T`) /// becomes read-only, as if it were a `& &T`. Hence there is no risk /// of a data race. /// /// Types that are not `Sync` are those that have "interior /// mutability" in a non-thread-safe form, such as [`cell::Cell`][cell] /// and [`cell::RefCell`][refcell]. These types allow for mutation of /// their contents even through an immutable, shared reference. For /// example the `set` method on [`Cell<T>`][cell] takes `&self`, so it requires /// only a shared reference [`&Cell<T>`][cell]. The method performs no /// synchronization, thus [`Cell`][cell] cannot be `Sync`. /// /// Another example of a non-`Sync` type is the reference-counting /// pointer [`rc::Rc`][rc]. Given any reference [`&Rc<T>`][rc], you can clone /// a new [`Rc<T>`][rc], modifying the reference counts in a non-atomic way. /// /// For cases when one does need thread-safe interior mutability, /// Rust provides [atomic data types], as well as explicit locking via /// [`sync::Mutex`][mutex] and [`sync::RwLock`][rwlock]. These types /// ensure that any mutation cannot cause data races, hence the types /// are `Sync`. Likewise, [`sync::Arc`][arc] provides a thread-safe /// analogue of [`Rc`][rc]. /// /// Any types with interior mutability must also use the /// [`cell::UnsafeCell`][unsafecell] wrapper around the value(s) which /// can be mutated through a shared reference. Failing to doing this is /// [undefined behavior][ub]. For example, [`transmute`][transmute]-ing /// from `&T` to `&mut T` is invalid. /// /// See [the Nomicon](../../nomicon/send-and-sync.html) for more /// details about `Sync`. /// /// [send]: trait.Send.html /// [u8]: ../../std/primitive.u8.html /// [f64]: ../../std/primitive.f64.html /// [box]: ../../std/boxed/struct.Box.html /// [vec]: ../../std/vec/struct.Vec.html /// [cell]: ../cell/struct.Cell.html /// [refcell]: ../cell/struct.RefCell.html /// [rc]: ../../std/rc/struct.Rc.html /// [arc]: ../../std/sync/struct.Arc.html /// [atomic data types]: ../sync/atomic/index.html /// [mutex]: ../../std/sync/struct.Mutex.html /// [rwlock]: ../../std/sync/struct.RwLock.html /// [unsafecell]: ../cell/struct.UnsafeCell.html /// [ub]: ../../reference/behavior-considered-undefined.html /// [transmute]: ../../std/mem/fn.transmute.html #[stable(feature = "rust1", since = "1.0.0")] #[cfg_attr(not(test), rustc_diagnostic_item = "sync_trait")] #[lang = "sync"] #[rustc_on_unimplemented( message="`{Self}` cannot be shared between threads safely", label="`{Self}` cannot be shared between threads safely" )] pub unsafe auto trait Sync { // FIXME(estebank): once support to add notes in `rustc_on_unimplemented` // lands in beta, and it has been extended to check whether a closure is // anywhere in the requirement chain, extend it as such (#48534): // ``` // on( // closure, // note="`{Self}` cannot be shared safely, consider marking the closure `move`" // ), // ``` // Empty } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> !Sync for *const T { } #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> !Sync for *mut T { } macro_rules! impls{ ($t: ident) => ( #[stable(feature = "rust1", since = "1.0.0")] impl<T:?Sized> Hash for $t<T> { #[inline] fn hash<H: Hasher>(&self, _: &mut H) { } } #[stable(feature = "rust1", since = "1.0.0")] impl<T:?Sized> cmp::PartialEq for $t<T> { fn eq(&self, _other: &$t<T>) -> bool { true } } #[stable(feature = "rust1", since = "1.0.0")] impl<T:?Sized> cmp::Eq for $t<T> { } #[stable(feature = "rust1", since = "1.0.0")] impl<T:?Sized> cmp::PartialOrd for $t<T> { fn partial_cmp(&self, _other: &$t<T>) -> Option<cmp::Ordering> { Option::Some(cmp::Ordering::Equal) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T:?Sized> cmp::Ord for $t<T> { fn cmp(&self, _other: &$t<T>) -> cmp::Ordering { cmp::Ordering::Equal } } #[stable(feature = "rust1", since = "1.0.0")] impl<T:?Sized> Copy for $t<T> { } #[stable(feature = "rust1", since = "1.0.0")] impl<T:?Sized> Clone for $t<T> { fn clone(&self) -> $t<T> { $t } } #[stable(feature = "rust1", since = "1.0.0")] impl<T:?Sized> Default for $t<T> { fn default() -> $t<T> { $t } } #[unstable(feature = "structural_match", issue = "31434")] impl<T: ?Sized> StructuralPartialEq for $t<T> { } #[unstable(feature = "structural_match", issue = "31434")] impl<T: ?Sized> StructuralEq for $t<T> { } ) } /// Zero-sized type used to mark things that "act like" they own a `T`. /// /// Adding a `PhantomData<T>` field to your type tells the compiler that your /// type acts as though it stores a value of type `T`, even though it doesn't /// really. This information is used when computing certain safety properties. /// /// For a more in-depth explanation of how to use `PhantomData<T>`, please see /// [the Nomicon](../../nomicon/phantom-data.html). /// /// # A ghastly note 👻👻👻 /// /// Though they both have scary names, `PhantomData` and 'phantom types' are /// related, but not identical. A phantom type parameter is simply a type /// parameter which is never used. In Rust, this often causes the compiler to /// complain, and the solution is to add a "dummy" use by way of `PhantomData`. /// /// # Examples /// /// ## Unused lifetime parameters /// /// Perhaps the most common use case for `PhantomData` is a struct that has an /// unused lifetime parameter, typically as part of some unsafe code. For /// example, here is a struct `Slice` that has two pointers of type `*const T`, /// presumably pointing into an array somewhere: /// /// ```compile_fail,E0392 /// struct Slice<'a, T> { /// start: *const T, /// end: *const T, /// } /// ``` /// /// The intention is that the underlying data is only valid for the /// lifetime `'a`, so `Slice` should not outlive `'a`. However, this /// intent is not expressed in the code, since there are no uses of /// the lifetime `'a` and hence it is not clear what data it applies /// to. We can correct this by telling the compiler to act *as if* the /// `Slice` struct contained a reference `&'a T`: /// /// ``` /// use std::marker::PhantomData; /// /// # #[allow(dead_code)] /// struct Slice<'a, T: 'a> { /// start: *const T, /// end: *const T, /// phantom: PhantomData<&'a T>, /// } /// ``` /// /// This also in turn requires the annotation `T: 'a`, indicating /// that any references in `T` are valid over the lifetime `'a`. /// /// When initializing a `Slice` you simply provide the value /// `PhantomData` for the field `phantom`: /// /// ``` /// # #![allow(dead_code)] /// # use std::marker::PhantomData; /// # struct Slice<'a, T: 'a> { /// # start: *const T, /// # end: *const T, /// # phantom: PhantomData<&'a T>, /// # } /// fn borrow_vec<T>(vec: &Vec<T>) -> Slice<'_, T> { /// let ptr = vec.as_ptr(); /// Slice { /// start: ptr, /// end: unsafe { ptr.add(vec.len()) }, /// phantom: PhantomData, /// } /// } /// ``` /// /// ## Unused type parameters /// /// It sometimes happens that you have unused type parameters which /// indicate what type of data a struct is "tied" to, even though that /// data is not actually found in the struct itself. Here is an /// example where this arises with [FFI]. The foreign interface uses /// handles of type `*mut ()` to refer to Rust values of different /// types. We track the Rust type using a phantom type parameter on /// the struct `ExternalResource` which wraps a handle. /// /// [FFI]: ../../book/ch19-01-unsafe-rust.html#using-extern-functions-to-call-external-code /// /// ``` /// # #![allow(dead_code)] /// # trait ResType { } /// # struct ParamType; /// # mod foreign_lib { /// # pub fn new(_: usize) -> *mut () { 42 as *mut () } /// # pub fn do_stuff(_: *mut (), _: usize) {} /// # } /// # fn convert_params(_: ParamType) -> usize { 42 } /// use std::marker::PhantomData; /// use std::mem; /// /// struct ExternalResource<R> { /// resource_handle: *mut (), /// resource_type: PhantomData<R>, /// } /// /// impl<R: ResType> ExternalResource<R> { /// fn new() -> ExternalResource<R> { /// let size_of_res = mem::size_of::<R>(); /// ExternalResource { /// resource_handle: foreign_lib::new(size_of_res), /// resource_type: PhantomData, /// } /// } /// /// fn do_stuff(&self, param: ParamType) { /// let foreign_params = convert_params(param); /// foreign_lib::do_stuff(self.resource_handle, foreign_params); /// } /// } /// ``` /// /// ## Ownership and the drop check /// /// Adding a field of type `PhantomData<T>` indicates that your /// type owns data of type `T`. This in turn implies that when your /// type is dropped, it may drop one or more instances of the type /// `T`. This has bearing on the Rust compiler's [drop check] /// analysis. /// /// If your struct does not in fact *own* the data of type `T`, it is /// better to use a reference type, like `PhantomData<&'a T>` /// (ideally) or `PhantomData<*const T>` (if no lifetime applies), so /// as not to indicate ownership. /// /// [drop check]: ../../nomicon/dropck.html #[lang = "phantom_data"] #[structural_match] #[stable(feature = "rust1", since = "1.0.0")] pub struct PhantomData<T:?Sized>; impls! { PhantomData } mod impls { #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<T: Sync + ?Sized> Send for &T {} #[stable(feature = "rust1", since = "1.0.0")] unsafe impl<T: Send + ?Sized> Send for &mut T {} } /// Compiler-internal trait used to determine whether a type contains /// any `UnsafeCell` internally, but not through an indirection. /// This affects, for example, whether a `static` of that type is /// placed in read-only static memory or writable static memory. #[lang = "freeze"] pub(crate) unsafe auto trait Freeze {} impl<T: ?Sized> !Freeze for UnsafeCell<T> {} unsafe impl<T: ?Sized> Freeze for PhantomData<T> {} unsafe impl<T: ?Sized> Freeze for *const T {} unsafe impl<T: ?Sized> Freeze for *mut T {} unsafe impl<T: ?Sized> Freeze for &T {} unsafe impl<T: ?Sized> Freeze for &mut T {} /// Types that can be safely moved after being pinned. /// /// Since Rust itself has no notion of immovable types, and considers moves /// (e.g., through assignment or [`mem::replace`]) to always be safe, /// this trait cannot prevent types from moving by itself. /// /// Instead it is used to prevent moves through the type system, /// by controlling the behavior of pointers `P` wrapped in the [`Pin<P>`] wrapper, /// which "pin" the type in place by not allowing it to be moved out of them. /// See the [`pin module`] documentation for more information on pinning. /// /// Implementing this trait lifts the restrictions of pinning off a type, /// which then allows it to move out with functions such as [`mem::replace`]. /// /// `Unpin` has no consequence at all for non-pinned data. In particular, /// [`mem::replace`] happily moves `!Unpin` data (it works for any `&mut T`, not /// just when `T: Unpin`). However, you cannot use /// [`mem::replace`] on data wrapped inside a [`Pin<P>`] because you cannot get the /// `&mut T` you need for that, and *that* is what makes this system work. /// /// So this, for example, can only be done on types implementing `Unpin`: /// /// ```rust /// use std::mem; /// use std::pin::Pin; /// /// let mut string = "this".to_string(); /// let mut pinned_string = Pin::new(&mut string); /// /// // We need a mutable reference to call `mem::replace`. /// // We can obtain such a reference by (implicitly) invoking `Pin::deref_mut`, /// // but that is only possible because `String` implements `Unpin`. /// mem::replace(&mut *pinned_string, "other".to_string()); /// ``` /// /// This trait is automatically implemented for almost every type. /// /// [`mem::replace`]: ../../std/mem/fn.replace.html /// [`Pin<P>`]: ../pin/struct.Pin.html /// [`pin module`]: ../../std/pin/index.html #[stable(feature = "pin", since = "1.33.0")] #[lang = "unpin"] pub auto trait Unpin {} /// A marker type which does not implement `Unpin`. /// /// If a type contains a `PhantomPinned`, it will not implement `Unpin` by default. #[stable(feature = "pin", since = "1.33.0")] #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] pub struct PhantomPinned; #[stable(feature = "pin", since = "1.33.0")] impl !Unpin for PhantomPinned {} #[stable(feature = "pin", since = "1.33.0")] impl<'a, T: ?Sized + 'a> Unpin for &'a T {} #[stable(feature = "pin", since = "1.33.0")] impl<'a, T: ?Sized + 'a> Unpin for &'a mut T {} #[stable(feature = "pin_raw", since = "1.38.0")] impl<T: ?Sized> Unpin for *const T {} #[stable(feature = "pin_raw", since = "1.38.0")] impl<T: ?Sized> Unpin for *mut T {} /// Implementations of `Copy` for primitive types. /// /// Implementations that cannot be described in Rust /// are implemented in `SelectionContext::copy_clone_conditions()` in librustc. mod copy_impls { use super::Copy; macro_rules! impl_copy { ($($t:ty)*) => { $( #[stable(feature = "rust1", since = "1.0.0")] impl Copy for $t {} )* } } impl_copy! { usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 f32 f64 bool char } #[stable(feature = "never_type", since = "1.41.0")] impl Copy for ! {} #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> Copy for *const T {} #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> Copy for *mut T {} // Shared references can be copied, but mutable references *cannot*! #[stable(feature = "rust1", since = "1.0.0")] impl<T: ?Sized> Copy for &T {} }
36.417402
104
0.62613
dd5008f0a97f2b5728c82843bc867e46281a334f
837
extern crate win_event_log; use win_event_log::prelude::*; fn main() { let conditions = vec![ Condition::filter(EventFilter::level(1, Comparison::Equal)), Condition::filter(EventFilter::level(4, Comparison::GreaterThanOrEqual)), ]; let query = QueryList::new() .with_query( Query::new() .item( QueryItem::selector("Application".to_owned()) .system_conditions(Condition::or(conditions)) .build(), ) .query(), ) .build(); match WinEvents::get(query) { Ok(events) => { if let Some(event) = events.into_iter().next() { println!("{}", event); } } Err(e) => println!("Error: {}", e), } }
27
81
0.476703
2805b1617e50f9cec770109c2e3c1bb02997367c
5,371
use std::cell::RefCell; use std::rc::Rc; #[derive(Debug)] pub struct PingchuanParser {} #[derive(Debug, Clone)] #[repr(transparent)] pub struct PingchuanEvent { request_content: String, } #[derive(Debug, Clone)] pub struct PingchuanPacket { pub transaction_id: u64, pub topic_len: u64, pub content_len: u64, pub role: u64, pub order: u64, pub gzip: u64, pub offset: u64, pub topic: String, pub content: Rc<RefCell<Vec<u8>>>, } impl PingchuanPacket { pub fn new() -> PingchuanPacket { PingchuanPacket { transaction_id: 0, topic_len: 0, content_len: 0, role: 0, order: 0, gzip: 0, // crc: String::from(""), offset: 0, topic: String::from(""), content: Rc::new(RefCell::new(Vec::new())), } } } impl PingchuanParser { pub fn parse(&mut self, request_content: String) -> PingchuanEvent { let pingchuan_event = PingchuanEvent::of(request_content); pingchuan_event } pub fn of() -> PingchuanParser { PingchuanParser {} } pub unsafe fn any_as_u8_slice<T: Sized>(p: &T) -> &[u8] { std::slice::from_raw_parts((p as *const T) as *const u8, ::std::mem::size_of::<T>()) } fn add_u64_to_bytes(t: u64, bytes: &mut Vec<u8>) -> &mut Vec<u8> { unsafe { let tmps = std::mem::transmute::<u64, [u8; 8]>(t); for byte in tmps.iter() { bytes.push(*byte); } } bytes } fn from_bytes_to_64(bytes: Vec<u8>) -> u64 { unsafe { let mut tmps: [u8; 8] = [0; 8]; for index in 0..bytes.len() { tmps[index] = bytes[index]; } let tmps = std::mem::transmute::<[u8; 8], u64>(tmps); return tmps; } } pub fn deserialize_from_pingchuan_packet( pingchuan_packet: Rc<RefCell<PingchuanPacket>>, bytes: &mut Vec<u8>, ) -> Option<Rc<RefCell<PingchuanPacket>>> { let magic_bytes = b"pingchuan"; let mut packet = pingchuan_packet.borrow_mut(); // 检验魔数 for index in 0..magic_bytes.len() { if bytes[index] != magic_bytes[index] { return None; } } let (_, tmp) = bytes.split_at(magic_bytes.len()); let split_length = 8; let (transaction_id, tmp) = tmp.split_at(split_length); packet.transaction_id = PingchuanParser::from_bytes_to_64(transaction_id.to_vec()); let (topic_len, tmp) = tmp.split_at(split_length); packet.topic_len = PingchuanParser::from_bytes_to_64(topic_len.to_vec()); let (content_len, tmp) = tmp.split_at(split_length); packet.content_len = PingchuanParser::from_bytes_to_64(content_len.to_vec()); let (role, tmp) = tmp.split_at(split_length); packet.role = PingchuanParser::from_bytes_to_64(role.to_vec()); let (order, tmp) = tmp.split_at(split_length); packet.order = PingchuanParser::from_bytes_to_64(order.to_vec()); let (gzip, tmp) = tmp.split_at(split_length); packet.gzip = PingchuanParser::from_bytes_to_64(gzip.to_vec()); // let (crc, tmp) = tmp.split_at(split_length); // packet.crc = String::from_utf8_lossy(&crc).into_owned(); let (offset, tmp) = tmp.split_at(split_length); packet.offset = PingchuanParser::from_bytes_to_64(offset.to_vec()); let (topic, tmp) = tmp.split_at(packet.topic_len as usize); packet.topic = String::from_utf8_lossy(&topic).into_owned(); let (content, _) = tmp.split_at(packet.content_len as usize); packet.content = Rc::new(RefCell::new(content.to_vec())); Some(pingchuan_packet.clone()) } pub fn serialize_to_pingchuan_packet( p: Rc<RefCell<PingchuanPacket>>, bytes: &mut Vec<u8>, ) -> &mut Vec<u8> { let packet = p.borrow(); let magic_bytes = b"pingchuan"; for index in 0..magic_bytes.len() { bytes.push(magic_bytes[index]); } let mut topic_len: u64 = 0; for _ in packet.topic.bytes() { topic_len += 1; } let result = PingchuanParser::add_u64_to_bytes(packet.transaction_id, bytes); let result = PingchuanParser::add_u64_to_bytes(topic_len, result); // content let content_len = packet.content.borrow().len() as u64; let result = PingchuanParser::add_u64_to_bytes(content_len, result); let result = PingchuanParser::add_u64_to_bytes(packet.role, result); let result = PingchuanParser::add_u64_to_bytes(packet.order, result); let result = PingchuanParser::add_u64_to_bytes(packet.gzip, result); // crc // for byte in packet.crc.bytes() { // result.push(byte); // } let result = PingchuanParser::add_u64_to_bytes(packet.offset, result); for byte in packet.topic.bytes() { result.push(byte); } // content let tmp = packet.content.borrow(); for byte in tmp.iter() { result.push(*byte); } result } } impl PingchuanEvent { pub fn of(request_content: String) -> PingchuanEvent { PingchuanEvent { request_content } } }
31.409357
92
0.584621
8afe1401ab8b3b91fec729322ef844497fc79de7
17,749
// // Copyright (c) The yang2-rs Core Contributors // // See LICENSE for license details. // //! YANG context. use bitflags::bitflags; use std::collections::HashMap; use std::ffi::CString; use std::os::raw::{c_char, c_void}; use std::os::unix::ffi::OsStrExt; use std::path::Path; use std::slice; use crate::error::{Error, Result}; use crate::iter::{SchemaModules, Set}; use crate::schema::{SchemaModule, SchemaNode}; use crate::utils::*; use libyang2_sys as ffi; /// Context of the YANG schemas. /// /// [Official C documentation] /// /// [Official C documentation]: https://netopeer.liberouter.org/doc/libyang/libyang2/html/howto_context.html #[derive(Debug, PartialEq)] pub struct Context { pub(crate) raw: *mut ffi::ly_ctx, } bitflags! { /// Options to change context behavior. pub struct ContextFlags: u16 { /// All the imported modules of the schema being parsed are implemented. const ALL_IMPLEMENTED = ffi::LY_CTX_ALL_IMPLEMENTED as u16; /// Implement all imported modules "referenced" from an implemented /// module. Normally, leafrefs, augment and deviation targets are /// implemented as specified by YANG 1.1. In addition to this, implement /// any modules of nodes referenced by when and must conditions and by /// any default values. Generally, only if all these modules are /// implemented, the explicitly implemented modules can be properly /// used and instantiated in data. const REF_IMPLEMENTED = ffi::LY_CTX_REF_IMPLEMENTED as u16; /// Do not internally implement ietf-yang-library module. This option /// cannot be changed on existing context. const NO_YANGLIBRARY = ffi::LY_CTX_NO_YANGLIBRARY as u16; /// Do not search for schemas in context's searchdirs neither in current /// working directory. const DISABLE_SEARCHDIRS = ffi::LY_CTX_DISABLE_SEARCHDIRS as u16; /// Do not automatically search for schemas in current working /// directory, which is by default searched automatically (despite not /// recursively). const DISABLE_SEARCHDIR_CWD = ffi::LY_CTX_DISABLE_SEARCHDIR_CWD as u16; } } /// Embedded module key containing the module/submodule name and optional /// revision. #[derive(Debug, Eq, Hash, PartialEq)] pub struct EmbeddedModuleKey { mod_name: &'static str, mod_rev: Option<&'static str>, submod_name: Option<&'static str>, submod_rev: Option<&'static str>, } /// A hashmap containing embedded YANG modules. pub type EmbeddedModules = HashMap<EmbeddedModuleKey, &'static str>; // ===== impl Context ===== impl Context { /// Create libyang context. /// /// Context is used to hold all information about schemas. Usually, the /// application is supposed to work with a single context in which /// libyang is holding all schemas (and other internal information) /// according to which the data trees will be processed and validated. pub fn new(options: ContextFlags) -> Result<Context> { let mut context = std::ptr::null_mut(); let ctx_ptr = &mut context; let ret = unsafe { ffi::ly_ctx_new(std::ptr::null(), options.bits, ctx_ptr) }; if ret != ffi::LY_ERR::LY_SUCCESS { // Need to construct error structure by hand. return Err(Error { errcode: ret, msg: None, path: None, apptag: None, }); } Ok(Context { raw: context }) } /// Add the search path into libyang context. pub fn set_searchdir<P: AsRef<Path>>( &mut self, search_dir: P, ) -> Result<()> { let search_dir = CString::new(search_dir.as_ref().as_os_str().as_bytes()).unwrap(); let ret = unsafe { ffi::ly_ctx_set_searchdir(self.raw, search_dir.as_ptr()) }; if ret != ffi::LY_ERR::LY_SUCCESS { return Err(Error::new(self)); } Ok(()) } /// Clean the search path from the libyang context. /// /// To remove the recently added search path(s), use /// Context::unset_searchdir_last(). pub fn unset_searchdir<P: AsRef<Path>>( &mut self, search_dir: P, ) -> Result<()> { let search_dir = CString::new(search_dir.as_ref().as_os_str().as_bytes()).unwrap(); let ret = unsafe { ffi::ly_ctx_unset_searchdir(self.raw, search_dir.as_ptr()) }; if ret != ffi::LY_ERR::LY_SUCCESS { return Err(Error::new(self)); } Ok(()) } /// Clean all search paths from the libyang context. pub fn unset_searchdirs(&mut self) -> Result<()> { let ret = unsafe { ffi::ly_ctx_unset_searchdir(self.raw, std::ptr::null()) }; if ret != ffi::LY_ERR::LY_SUCCESS { return Err(Error::new(self)); } Ok(()) } /// Remove the least recently added search path(s) from the libyang context. /// /// To remove a specific search path by its value, use /// Context::unset_searchdir(). pub fn unset_searchdir_last(&mut self, count: u32) -> Result<()> { let ret = unsafe { ffi::ly_ctx_unset_searchdir_last(self.raw, count) }; if ret != ffi::LY_ERR::LY_SUCCESS { return Err(Error::new(self)); } Ok(()) } /// Set hash map containing embedded YANG modules, which are loaded on /// demand. pub fn set_embedded_modules(&mut self, modules: &EmbeddedModules) { unsafe { ffi::ly_ctx_set_module_imp_clb( self.raw, Some(ly_module_import_cb), modules as *const _ as *mut c_void, ) }; } /// Remove all embedded modules from the libyang context. pub fn unset_embedded_modules(&mut self) { unsafe { ffi::ly_ctx_set_module_imp_clb(self.raw, None, std::ptr::null_mut()) }; } /// Get the currently set context's options. pub fn get_options(&self) -> ContextFlags { let options = unsafe { ffi::ly_ctx_get_options(self.raw) }; ContextFlags::from_bits_truncate(options) } /// Set some of the context's options. pub fn set_options(&mut self, options: ContextFlags) -> Result<()> { let ret = unsafe { ffi::ly_ctx_set_options(self.raw, options.bits) }; if ret != ffi::LY_ERR::LY_SUCCESS { return Err(Error::new(self)); } Ok(()) } /// Unset some of the context's options. pub fn unset_options(&mut self, options: ContextFlags) -> Result<()> { let ret = unsafe { ffi::ly_ctx_unset_options(self.raw, options.bits) }; if ret != ffi::LY_ERR::LY_SUCCESS { return Err(Error::new(self)); } Ok(()) } /// Get current ID of the modules set. pub fn get_module_set_id(&self) -> u16 { unsafe { ffi::ly_ctx_get_change_count(self.raw) } } /// Get YANG module of the given name and revision. /// /// If the revision is not specified, the schema with no revision is /// returned (if it is present in the context). pub fn get_module( &self, name: &str, revision: Option<&str>, ) -> Option<SchemaModule> { let name = CString::new(name).unwrap(); let revision_cstr; let revision_ptr = match revision { Some(revision) => { revision_cstr = CString::new(revision).unwrap(); revision_cstr.as_ptr() } None => std::ptr::null(), }; let module = unsafe { ffi::ly_ctx_get_module(self.raw, name.as_ptr(), revision_ptr) }; if module.is_null() { return None; } Some(SchemaModule::from_raw(self, module)) } /// Get the latest revision of the YANG module specified by its name. /// /// YANG modules with no revision are supposed to be the oldest one. pub fn get_module_latest(&self, name: &str) -> Option<SchemaModule> { let name = CString::new(name).unwrap(); let module = unsafe { ffi::ly_ctx_get_module_latest(self.raw, name.as_ptr()) }; if module.is_null() { return None; } Some(SchemaModule::from_raw(self, module)) } /// Get the (only) implemented YANG module specified by its name. pub fn get_module_implemented(&self, name: &str) -> Option<SchemaModule> { let name = CString::new(name).unwrap(); let module = unsafe { ffi::ly_ctx_get_module_implemented(self.raw, name.as_ptr()) }; if module.is_null() { return None; } Some(SchemaModule::from_raw(self, module)) } /// YANG module of the given namespace and revision. /// /// If the revision is not specified, the schema with no revision is /// returned (if it is present in the context). pub fn get_module_ns( &self, ns: &str, revision: Option<&str>, ) -> Option<SchemaModule> { let ns = CString::new(ns).unwrap(); let revision_cstr; let revision_ptr = match revision { Some(revision) => { revision_cstr = CString::new(revision).unwrap(); revision_cstr.as_ptr() } None => std::ptr::null(), }; let module = unsafe { ffi::ly_ctx_get_module_ns(self.raw, ns.as_ptr(), revision_ptr) }; if module.is_null() { return None; } Some(SchemaModule::from_raw(self, module)) } /// Get the latest revision of the YANG module specified by its namespace. /// /// YANG modules with no revision are supposed to be the oldest one. pub fn get_module_latest_ns(&self, ns: &str) -> Option<SchemaModule> { let ns = CString::new(ns).unwrap(); let module = unsafe { ffi::ly_ctx_get_module_latest_ns(self.raw, ns.as_ptr()) }; if module.is_null() { return None; } Some(SchemaModule::from_raw(self, module)) } /// Get the (only) implemented YANG module specified by its namespace. pub fn get_module_implemented_ns(&self, ns: &str) -> Option<SchemaModule> { let ns = CString::new(ns).unwrap(); let module = unsafe { ffi::ly_ctx_get_module_implemented_ns(self.raw, ns.as_ptr()) }; if module.is_null() { return None; } Some(SchemaModule::from_raw(self, module)) } /// Get list of loaded modules. /// /// Internal modules (loaded during the context creation) can be skipped by /// setting "skip_internal" to true. pub fn modules(&self, skip_internal: bool) -> SchemaModules { SchemaModules::new(&self, skip_internal) } /// Returns an iterator over all data nodes from all modules in the YANG /// context (depth-first search algorithm). pub fn traverse(&self) -> impl Iterator<Item = SchemaNode> { self.modules(false).flat_map(|module| module.traverse()) } /// Reset cached latest revision information of the schemas in the context. /// /// When a (sub)module is imported/included without revision, the latest /// revision is searched. libyang searches for the latest revision in /// searchdir. Then it is expected that the content of searchdirs does not /// change. So when it changes, it is necessary to force searching for the /// latest revision in case of loading another module, which what this /// function does. pub fn reset_latests(&mut self) { unsafe { ffi::ly_ctx_reset_latests(self.raw) }; } /// Learn the number of internal modules of the context. Internal modules is /// considered one that was loaded during the context creation. pub fn internal_module_count(&self) -> u32 { unsafe { ffi::ly_ctx_internal_modules_count(self.raw) } } /// Try to find the model in the searchpaths and load it. /// /// The context itself is searched for the requested module first. If /// revision is not specified (the module of the latest revision is /// requested) and there is implemented revision of the requested module /// in the context, this implemented revision is returned despite there /// might be a newer revision. This behavior is caused by the fact that /// it is not possible to have multiple implemented revisions of /// the same module in the context. /// /// If the revision is not specified, the latest revision is loaded. pub fn load_module( &mut self, name: &str, revision: Option<&str>, ) -> Result<SchemaModule> { let name = CString::new(name).unwrap(); let revision_cstr; let revision_ptr = match revision { Some(revision) => { revision_cstr = CString::new(revision).unwrap(); revision_cstr.as_ptr() } None => std::ptr::null(), }; let module = unsafe { ffi::ly_ctx_load_module( self.raw, name.as_ptr(), revision_ptr, std::ptr::null_mut(), ) }; if module.is_null() { return Err(Error::new(self)); } Ok(SchemaModule::from_raw(self, module as *mut _)) } /// Evaluate an xpath expression on schema nodes. pub fn find_xpath(&self, path: &str) -> Result<Set<SchemaNode>> { let path = CString::new(path).unwrap(); let mut set = std::ptr::null_mut(); let set_ptr = &mut set; let options = 0u32; let ret = unsafe { ffi::lys_find_xpath( self.raw, std::ptr::null(), path.as_ptr(), options, set_ptr, ) }; if ret != ffi::LY_ERR::LY_SUCCESS { return Err(Error::new(self)); } let rnodes_count = unsafe { (*set).count } as usize; let slice = if rnodes_count == 0 { &[] } else { let rnodes = unsafe { (*set).__bindgen_anon_1.snodes }; unsafe { slice::from_raw_parts(rnodes, rnodes_count) } }; Ok(Set::new(self, slice)) } /// Get a schema node based on the given data path (JSON format). pub fn find_path(&self, path: &str) -> Result<SchemaNode> { let path = CString::new(path).unwrap(); let rnode = unsafe { ffi::lys_find_path(self.raw, std::ptr::null(), path.as_ptr(), 0) }; if rnode.is_null() { return Err(Error::new(self)); } Ok(SchemaNode::from_raw(self, rnode as *mut _)) } } unsafe impl Send for Context {} unsafe impl Sync for Context {} impl Drop for Context { fn drop(&mut self) { unsafe { ffi::ly_ctx_destroy(self.raw, None) }; } } // ===== impl EmbeddedModuleKey ===== impl EmbeddedModuleKey { pub fn new( mod_name: &'static str, mod_rev: Option<&'static str>, submod_name: Option<&'static str>, submod_rev: Option<&'static str>, ) -> EmbeddedModuleKey { EmbeddedModuleKey { mod_name, mod_rev, submod_name, submod_rev, } } } // ===== helper functions ===== fn find_embedded_module<'a>( modules: &'a EmbeddedModules, mod_name: &'a str, mod_rev: Option<&'a str>, submod_name: Option<&'a str>, submod_rev: Option<&'a str>, ) -> Option<(&'a EmbeddedModuleKey, &'a &'a str)> { modules.iter().find(|(key, _)| { // Check module name. if *key.mod_name != *mod_name { return false; } // Check module revision. if let Some(mod_rev) = &mod_rev { if let Some(emod_rev) = &key.mod_rev { if *emod_rev != *mod_rev { return false; } } } // Check submodule name. if let Some(submod_name) = &submod_name { if let Some(esubmod_name) = &key.submod_name { if *esubmod_name != *submod_name { return false; } } // Check submodule revision. if let Some(submod_rev) = &submod_rev { if let Some(esubmod_rev) = &key.submod_rev { if *esubmod_rev != *submod_rev { return false; } } } } true }) } unsafe extern "C" fn ly_module_import_cb( mod_name: *const c_char, mod_rev: *const c_char, submod_name: *const c_char, submod_rev: *const c_char, user_data: *mut c_void, format: *mut ffi::LYS_INFORMAT::Type, module_data: *mut *const c_char, _free_module_data: *mut ffi::ly_module_imp_data_free_clb, ) -> ffi::LY_ERR::Type { let modules = &*(user_data as *const EmbeddedModules); let mod_name = char_ptr_to_str(mod_name); let mod_rev = char_ptr_to_opt_str(mod_rev); let submod_name = char_ptr_to_opt_str(submod_name); let submod_rev = char_ptr_to_opt_str(submod_rev); if let Some((_emod_key, emod_data)) = find_embedded_module( modules, mod_name, mod_rev, submod_name, submod_rev, ) { let data = CString::new(*emod_data).unwrap(); *format = ffi::LYS_INFORMAT::LYS_IN_YANG; *module_data = data.as_ptr(); std::mem::forget(data); return ffi::LY_ERR::LY_SUCCESS; } ffi::LY_ERR::LY_ENOTFOUND }
32.153986
108
0.584653
879deb610511c9b91155ba9542130c5b39efd077
701
use crate::sys::syscall::*; pub use core::fmt; #[macro_export] macro_rules! print { ($($arg:tt)*) => { { use core::fmt::Write; let _ = write!(OsPrint(), $($arg)*); } }; } #[macro_export] macro_rules! println { ($fmt:expr) => { print!(concat!($fmt, "\r\n")) }; ($fmt:expr, $($arg:tt)*) => { print!(concat!($fmt, "\r\n"), $($arg)*) }; } #[panic_handler] fn panic(_info: &core::panic::PanicInfo) -> ! { // println!("{}", info); os_exit(); } pub struct OsPrint(); impl fmt::Write for OsPrint { #[inline] fn write_str(&mut self, s: &str) -> core::fmt::Result { os_print(s); Ok(()) } }
17.974359
59
0.475036
0e30352a462dfaabf75614c3f42854b1bb9e647b
6,123
//! A special structure which iterates over the characters in a file. It has //! special functions to ensure smooth traversal through a file or any //! sequence that can be iterated over. //! //! See [`FileStream`]. use std::{ convert::TryFrom, fs::OpenOptions, io::{stdin, BufRead, BufReader, Lines, Read}, iter::{Enumerate, Iterator}, path::Path }; use crate::{ error::{Error, ErrorKind, Result}, token::FileIndex }; /// An iterator that outputs each line in a Kaleidoscope programme one at a /// time. type BufferIterator<'a> = Enumerate<Lines<BufReader<Box<dyn Read + 'a>>>>; /// A file stream which returns a unicode codepoint one at a time. /// This is in contrast to a normal [`std::fs::File`] which can only read /// bytes to an array. /// /// An object of this struct also stores the index of the current character. /// See [`FileIndex`] for implementation details. This index stores the /// current line and character column, and can be retrieved by calling /// [`FileStream::get_index`]. pub struct FileStream<'a> { buffer: BufferIterator<'a>, line: Vec<char>, cursor: usize, index: FileIndex, error: Option<Error>, eof_reached: bool } impl<'a> FileStream<'a> { /// Create a new `FileStream` from an iterator over the lines of a buffered /// reader. pub fn new(buffer: BufferIterator<'a>) -> Self { let mut this = FileStream { buffer, cursor: 0, line: Vec::new(), index: Default::default(), error: None, eof_reached: false }; this.init(); this } /// Check if the file/stream has ended (i.e. whether the stream has /// encountered an EOF character). pub fn eof_reached(&self) -> bool { self.eof_reached } /// Get the character currently being read. pub fn get_unit(&self) -> Option<char> { self.line.get(self.cursor).cloned() } /// Get the index of the current character. pub fn get_index(&self) -> FileIndex { self.index } /// Get a mutable reference to the index of the current character. #[warn(unsafe_code)] pub fn get_mut_index(&mut self) -> &mut FileIndex { &mut self.index } /// Get the error currently being stored. pub fn get_err(&self) -> Option<Error> { self.error.clone() } /// Set the current error to [`None`] and return the old error. pub fn silence_err(&mut self) -> Option<Error> { self.error.take() } fn init(&mut self) -> &mut Self { self.next_line(); self } /// Push the next line into the buffer. /// If successful, [`true`] is returned. /// Otherwise, if the file/stream has ended or an error has happened, /// [`false`] is returned. pub fn next_line(&mut self) -> bool { if let Some((line_no, line)) = self.buffer.next() { match line { Ok(l) => { self.line = l.chars().collect(); // Mandatory extra new line character so that // the tokeniser knows that the end of the line has // been reached. // Useful for getting to the end of a comment or statement self.line.push('\n'); self.cursor = 0; self.eof_reached = false; self.index = FileIndex::new(Some(line_no), 0); true }, Err(e) => { self.error = Some(Error::from_err(Box::new(e), ErrorKind::FileIOError)); self.eof_reached = false; false } } } else { self.line.clear(); self.eof_reached = true; false } } /// Read the next character in the stream. /// If there are no more characters or an error has occurred, /// [`None`] is returned. pub fn next_unit(&mut self) -> Option<char> { loop { if self.eof_reached() || self.error.is_some() { break None; } else if self.cursor >= self.line.len() { self.next_line(); } else { let unit = self.get_unit(); self.cursor += 1; self.index += 1; break unit; } } } /// Create a [`FileStream`] from the [`stdin`] stream. /// By [`Default`], [`FileStream`] reads from stdin. pub fn from_stdin() -> Self { let stdin: Box<dyn Read> = Box::new(stdin()); let buffer = BufReader::new(stdin).lines().enumerate(); Self::new(buffer) } /// Create a new [`FileStream`] from a path. pub fn from_path(path: &Path) -> Result<Self> { let file: Box<dyn Read> = match OpenOptions::new().read(true).open(path) { Ok(f) => Box::new(f), Err(e) => return Err(Error::from_err(Box::new(e), ErrorKind::FileIOError)) }; let buffer = BufReader::new(file).lines().enumerate(); Ok(Self::new(buffer)) } /// Create a new [`FileStream`] from a slice of bytes. pub fn from_bytes(byte_array: &'a [u8]) -> Self { let read: Box<dyn Read + 'a> = Box::new(byte_array); let buffer = BufReader::new(read).lines().enumerate(); Self::new(buffer) } } impl<'a> Default for FileStream<'a> { fn default() -> Self { Self::from_stdin() } } impl<'a> TryFrom<&Path> for FileStream<'a> { type Error = Error; fn try_from(path: &Path) -> Result<Self> { Self::from_path(path) } } impl<'a> From<&'a [u8]> for FileStream<'a> { fn from(byte_array: &'a [u8]) -> Self { Self::from_bytes(byte_array) } } impl<'a> From<&'a str> for FileStream<'a> { fn from(string: &'a str) -> Self { Self::from_bytes(string.as_bytes()) } } impl<'a> Iterator for FileStream<'a> { type Item = char; fn next(&mut self) -> Option<Self::Item> { self.next_unit() } }
30.462687
92
0.546954
162ad1ce883ee8477144d999ca2da9953069b487
2,569
// Copyright (c) Facebook, Inc. and its affiliates. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::default::Default; use std::fs; use std::path::PathBuf; use anyhow::{bail, Result}; use serde::{Deserialize, Serialize}; use toml; pub const BELOW_DEFAULT_CONF: &str = "/etc/below/below.conf"; const BELOW_DEFAULT_LOG: &str = "/var/log/below"; const BELOW_DEFAULT_STORE: &str = "/var/log/below/store"; #[derive(Serialize, Deserialize, Debug)] // If value is missing during deserialization, use the Default::default() #[serde(default)] pub struct BelowConfig { pub log_dir: PathBuf, pub store_dir: PathBuf, pub cgroup_filter_out: String, } impl Default for BelowConfig { fn default() -> Self { BelowConfig { log_dir: BELOW_DEFAULT_LOG.into(), store_dir: BELOW_DEFAULT_STORE.into(), cgroup_filter_out: String::new(), } } } impl BelowConfig { pub fn load(path: &PathBuf) -> Result<Self> { match path.exists() { true if !path.is_file() => bail!("{} exists and is not a file", path.to_string_lossy()), true => BelowConfig::load_exists(path), false if path.to_string_lossy() == BELOW_DEFAULT_CONF => Ok(Default::default()), false => bail!("No such file or directory: {}", path.to_string_lossy()), } } fn load_exists(path: &PathBuf) -> Result<Self> { let string_config = match fs::read_to_string(path) { Ok(sc) => sc, Err(e) => { bail!( "Failed to read from config file {}: {}", path.to_string_lossy(), e ); } }; match toml::from_str(string_config.as_str()) { Ok(bc) => Ok(bc), Err(e) => { bail!( "Failed to parse config file {}: {}\n{}", path.to_string_lossy(), e, string_config ); } } } }
31.716049
100
0.582328
4bfe958d2023b426c52a3dbc23909d293b4e0c40
4,467
//! Utility functions use crate::Config; use ethers_solc::remappings::{Remapping, RemappingError}; use figment::value::Value; use std::{ path::{Path, PathBuf}, str::FromStr, }; /// Loads the config for the current project workspace pub fn load_config() -> Config { load_config_with_root(None) } /// Loads the config for the current project workspace or the provided root path pub fn load_config_with_root(root: Option<PathBuf>) -> Config { if let Some(root) = root { Config::load_with_root(root) } else { Config::load_with_root(find_project_root_path().unwrap()) } .sanitized() } /// Returns the path of the top-level directory of the working git tree. If there is no working /// tree, an error is returned. pub fn find_git_root_path() -> eyre::Result<PathBuf> { let path = std::process::Command::new("git").args(&["rev-parse", "--show-toplevel"]).output()?.stdout; let path = std::str::from_utf8(&path)?.trim_end_matches('\n'); Ok(PathBuf::from(path)) } /// Returns the root path to set for the project root /// /// traverse the dir tree up and look for a `foundry.toml` file starting at the cwd, but only until /// the root dir of the current repo so that /// /// ```text /// -- foundry.toml /// /// -- repo /// |__ .git /// |__sub /// |__ cwd /// ``` /// will still detect `repo` as root pub fn find_project_root_path() -> std::io::Result<PathBuf> { let boundary = find_git_root_path().unwrap_or_else(|_| std::env::current_dir().unwrap()); let cwd = std::env::current_dir()?; let mut cwd = cwd.as_path(); // traverse as long as we're in the current git repo cwd while cwd.starts_with(&boundary) { let file_path = cwd.join(Config::FILE_NAME); if file_path.is_file() { return Ok(cwd.to_path_buf()) } if let Some(parent) = cwd.parent() { cwd = parent; } else { break } } // no foundry.toml found Ok(boundary) } /// Returns all [`Remapping`]s contained in the `remappings` str separated by newlines /// /// # Example /// /// ``` /// use foundry_config::remappings_from_newline; /// let remappings: Result<Vec<_>, _> = remappings_from_newline( /// r#" /// file-ds-test/=lib/ds-test/ /// file-other/=lib/other/ /// "#, /// ) /// .collect(); /// ``` pub fn remappings_from_newline( remappings: &str, ) -> impl Iterator<Item = Result<Remapping, RemappingError>> + '_ { remappings.lines().map(|x| x.trim()).filter(|x| !x.is_empty()).map(Remapping::from_str) } /// Returns the remappings from the given var /// /// Returns `None` if the env var is not set, otherwise all Remappings, See /// `remappings_from_newline` pub fn remappings_from_env_var(env_var: &str) -> Option<Result<Vec<Remapping>, RemappingError>> { let val = std::env::var(env_var).ok()?; Some(remappings_from_newline(&val).collect()) } /// Converts the `val` into a `figment::Value::Array` /// /// The values should be separated by commas, surrounding brackets are also supported `[a,b,c]` pub fn to_array_value(val: &str) -> Result<Value, figment::Error> { let value: Value = match Value::from(val) { Value::String(_, val) => val .trim_start_matches('[') .trim_end_matches(']') .split(',') .map(|s| s.to_string()) .collect::<Vec<_>>() .into(), Value::Empty(_, _) => Vec::<Value>::new().into(), val @ Value::Array(_, _) => val, _ => return Err(format!("Invalid value `{val}`, expected an array").into()), }; Ok(value) } /// Returns a list of _unique_ paths to all folders under `root` that contain a `foundry.toml` file /// /// This will also resolve symlinks /// /// # Example /// /// ```no_run /// use foundry_config::utils; /// let dirs = utils::foundry_toml_dirs("./lib"); /// ``` /// /// for following layout this will return /// `["lib/dep1", "lib/dep2"]` /// /// ```text /// lib /// └── dep1 /// │ ├── foundry.toml /// └── dep2 /// ├── foundry.toml /// ``` pub fn foundry_toml_dirs(root: impl AsRef<Path>) -> Vec<PathBuf> { walkdir::WalkDir::new(root) .max_depth(1) .into_iter() .filter_map(Result::ok) .filter(|e| e.file_type().is_dir()) .filter_map(|e| ethers_solc::utils::canonicalize(e.path()).ok()) .filter(|p| p.join(Config::FILE_NAME).exists()) .collect() }
30.182432
99
0.606223
fcf11b9dfe10a076e6a236630dda3067fa13daa3
374
// Copyright Kani Contributors // SPDX-License-Identifier: Apache-2.0 OR MIT use std::mem; #[kani::proof] fn main() { let mut var1 = kani::any::<i32>(); let mut var2 = kani::any::<i32>(); let old_var1 = var1; let old_var2 = var2; unsafe { mem::swap(&mut var1, &mut var2); } assert_eq!(var1, old_var2); assert_eq!(var2, old_var1); }
20.777778
45
0.593583
e233ebeb5998999ed1e9b271cf4030ad0b257549
18,663
use anyhow::{ensure, Context, Result}; use asn1::ObjectIdentifier; use lazy_static::lazy_static; use num_bigint::BigUint; use num_traits::One; use crate::padding::Padding; use crate::{ digest::{Digest, DigestOneShot}, math::{inv_mod, mod_exp, rand_prime, Interval}, }; lazy_static! { static ref E3: BigUint = 3u32.into(); } pub trait RsaKey { fn modulus(&self) -> &BigUint; fn pub_exp(&self) -> &BigUint; } pub trait RsaPrivateKey: RsaKey { fn priv_exp(&self) -> &BigUint; } #[derive(Clone)] pub struct RsaKeyImpl { pub modulus: BigUint, pub pub_exp: BigUint, pub priv_exp: Option<BigUint>, } impl RsaKey for RsaKeyImpl { fn modulus(&self) -> &BigUint { &self.modulus } fn pub_exp(&self) -> &BigUint { &self.pub_exp } } impl RsaPrivateKey for RsaKeyImpl { fn priv_exp(&self) -> &BigUint { &(self.priv_exp) .as_ref() .context("Not a private key") .unwrap() } } pub fn gen_rsa(bit_size: u64, pub_exp: &BigUint) -> (impl RsaKey, impl RsaPrivateKey) { let mut p = BigUint::one(); let mut trial = 0; loop { if trial % 10 == 0 { // println!("Generating first prime"); p = rand_prime(bit_size / 2); } trial += 1; // println!("Generating second prime"); let q = rand_prime(bit_size / 2); let totient = (&p - BigUint::one()) * (&q - BigUint::one()); let inverse = inv_mod(pub_exp, &totient); if inverse.is_err() { // println!("Retrying due to bad inverse: {:?}", inverse); continue; } let inverse = inverse.unwrap(); let modulus = &p * &q; let pub_key = RsaKeyImpl { modulus: modulus.clone(), pub_exp: pub_exp.to_owned(), priv_exp: None, }; let priv_key = RsaKeyImpl { modulus: modulus, pub_exp: pub_exp.to_owned(), priv_exp: Some(inverse), }; return (pub_key, priv_key); } } pub fn rsa_public_raw<R: RsaKey + ?Sized>(key: &R, data: &BigUint) -> BigUint { mod_exp(data, key.pub_exp(), key.modulus()) } pub fn rsa_private_raw<R: RsaPrivateKey + ?Sized>(key: &R, data: &BigUint) -> BigUint { mod_exp(data, key.priv_exp(), key.modulus()) } pub fn rsa_pkcs1_15_sign<H, K>(key: &K, data: &[u8]) -> Result<Vec<u8>> where K: RsaPrivateKey, H: Digest + DigestOneShot, { let keylength = key.modulus().bits(); let asn1_struct = rsa_pkcs1_15_generate_asn1_struct::<H>(data)?; let padded = Padding::Pkcs1PaddingSigning(keylength).pad(&asn1_struct)?; let signed = rsa_private_raw(key, &BigUint::from_bytes_be(&padded)); Ok(signed.to_bytes_be()) } fn rsa_pkcs1_15_generate_asn1_struct<H>(data: &[u8]) -> Result<Vec<u8>> where H: Digest + DigestOneShot, { let oid = H::oid().context("No OID registered")?; let digest = H::oneshot_digest(&data); Ok(asn1::write(|w| { // DigestInfo ::= SEQUENCE { // digestAlgorithm DigestAlgorithmIdentifier, // digest Digest } w.write_element_with_type::<asn1::Sequence>(&|w| { // AlgorithmIdentifier // DigestAlgorithmIdentifier ::= AlgorithmIdentifier // AlgorithmIdentifier ::= SEQUENCE { // OID // NULL // } w.write_element_with_type::<asn1::Sequence>(&|w| { w.write_element_with_type::<ObjectIdentifier>(oid.to_owned()); w.write_element_with_type::<()>(()); }); w.write_element_with_type::<&[u8]>(&digest); }); })) } fn trim_leading_zeros(data: &[u8]) -> &[u8] { let mut idx = 0; while data[idx] == 0 { idx += 1; } &data[idx..] } pub fn rsa_pkcs1_15_verify<H, K>(key: &K, data: &[u8], signature: &[u8], strict: bool) -> Result<()> where K: RsaKey, H: Digest + DigestOneShot, { let expected_struct = rsa_pkcs1_15_generate_asn1_struct::<H>(data)?; let keylength = key.modulus().bits(); let actual_padded = rsa_public_raw(key, &BigUint::from_bytes_be(signature)).to_bytes_be(); let actual_struct = Padding::Pkcs1PaddingSigning(keylength).unpad(&actual_padded)?; let actual_struct = trim_leading_zeros(&actual_struct); // Generate rather than parse if strict { ensure!(&expected_struct == actual_struct, "Invalid signature"); } else { // This section is horribly stupid, but it is annoying to incorrectly pass ASN.1 and leave a suffix, // so I fake it by just comparing the prefixes. ensure!( actual_struct.len() >= expected_struct.len(), "Structure too short" ); let prefix = &actual_struct[..expected_struct.len()]; println!("Expected: {}", hex::encode(&expected_struct)); println!("Actual: {}", hex::encode(&actual_struct)); ensure!(expected_struct == prefix, "Invalid signature"); } Ok(()) } #[allow(non_snake_case)] pub fn bleichenbacher<K, F>(key: &K, c: &BigUint, oracle: F) -> Result<BigUint> where K: RsaKey + ?Sized, F: Fn(&[u8]) -> bool, { // Setup let n = key.modulus(); let k = (n.bits() + 7) / 8; let one = BigUint::one(); let two: BigUint = 2u32.into(); let B = &one << (8 * (k - 2)); let two_B = &two * &B; let three_B = &two_B + &B; // println!( // "n = {}, 2B = {}, 3B = {}, n / 3B = {}", // n.to_str_radix(16), // two_B.to_str_radix(16), // three_B.to_str_radix(16), // n / &three_B // ); // Step 1: Blinding println!("Bleichenbacher step 1"); let mut s = BigUint::one(); let mut M = vec![Interval(two_B.clone(), &three_B - &one)]; let mut i = 1; // loop { // let s_e = rsa_public_raw(key, &s); // let c0 = (c * &s_e) % n; // if oracle(&c0.to_bytes_be()) { // break; // } // s += &one; // } let s_0 = s.clone(); while M.len() != 1 || !M.first().context("No more intervals?!")?.width().is_one() { // println!("S = {}", s); Interval::print_stats(&M); // Step 2 if i == 1 { println!("Bleichenbacher step 2.a"); s = n / &three_B; // println!("Starting value for si = {}", s); // println!("c = {}", c); loop { let s_e = rsa_public_raw(key, &s); let c1 = (c * &s_e) % n; if oracle(&c1.to_bytes_be()) { break; } s += &one; } } else if !M.len().is_one() { println!("Bleichenbacher step 2.b"); loop { s += &one; let s_e = rsa_public_raw(key, &s); let c1 = (c * &s_e) % n; if oracle(&c1.to_bytes_be()) { break; } } // todo!("Not yet implemented"); } else { println!("Bleichenbacher step 2.c"); let m = M.first().unwrap(); let (a, b) = (&m.0, &m.1); let top = (b * &s) - &two_B; // println!("Top: {}", top); let mut r = &two * (top / n); let mut rn = &r * n; 'two_c_loop: loop { // println!("r: {}", r); let start = (&two_B + &rn) / b; let end = (&three_B + &rn) / a; s = start; let mut s_e = rsa_public_raw(key, &s); while &s <= &end { // println!("s = {}", s); let c_n = (c * &s_e) % n; if oracle(&c_n.to_bytes_be()) { // println!("Found!"); break 'two_c_loop; } s += &one; s_e = rsa_public_raw(key, &s); //(s_e * &s) % n; } r += &one; rn += n; } // two_c_loop } println!("Bleichenbacher step 3"); // println!("s = {}", s); let s_minus_1 = &s - &one; // Step 3 M = M .iter() .flat_map(|m| { let mut working = vec![]; // println!("Interval: {}", m); let (a, b) = (&m.0, &m.1); // println!("Foo"); // println!("as ? 3B = {} ? {}", a * &s, &three_B); let start = ((a * &s) - &three_B + &one) / n; // println!("a = {}, si = {}", a, s); // println!("Bar"); let end = ((b * &s) - &two_B) / n; let mut r = start; while &r <= &end { let rn = &r * n; // println!("r: {}, rn: {}", r, rn); let lower_candidate = (&two_B + &rn + &s_minus_1) / &s; let upper_candidate = (&three_B - &one + &rn) / &s; let new_lower = a.max(&lower_candidate).clone(); let new_upper = b.min(&upper_candidate).clone(); if new_lower <= new_upper { working.push(Interval(new_lower, new_upper)); } r += &one; } working }) .collect(); // println!("{:?}", M); // Simplify intervals // println!("Baz"); let old_len = M.len(); if old_len != 1 { M = Interval::simplify(M); } println!("Simplified from {} intervals to {}", old_len, M.len()); // Step 4 increment i += 1; } // Step 4 Completion println!("Bleichenbacher step 4 (complete)"); let result = &M.first().context("No intervals?!")?.0; if s_0.is_one() { Ok(result.clone()) } else { let s_inv = inv_mod(&s_0, n)?; Ok((result * s_inv) % n) } } #[cfg(test)] mod tests { use crate::{ digest::Sha1, oracles::{Challenge46Oracle, Challenge47Oracle}, }; use anyhow::Result; use num_bigint::RandBigInt; use num_traits::Zero; use rand::RngCore; use rand_core::OsRng; use super::*; #[test] fn rsa_smoke() { let sizes = [512, 1024, 2048]; for size in sizes.iter() { println!("Testing size {}", size); for trial in 0..5 { println!(" Testing key: {}", trial); let (pub_key, priv_key) = gen_rsa(*size, &E3); println!(" Is valid?"); assert_eq!(pub_key.modulus(), priv_key.modulus()); assert!(*size - pub_key.modulus().bits() < 2); assert_eq!(pub_key.pub_exp(), priv_key.pub_exp()); let tmp: &BigUint = &E3; assert_eq!(tmp, pub_key.pub_exp()); let plaintext = crate::math::rand_bigint(pub_key.modulus()); let ciphertext = rsa_public_raw(&pub_key, &plaintext); let decrypted = rsa_private_raw(&priv_key, &ciphertext); assert_eq!(plaintext, decrypted); } } } #[test] fn challenge_40() -> Result<()> { let target_plaintext = OsRng.gen_biguint(300); println!("Gen key 1"); let (pub_0, _) = gen_rsa(512, &E3); println!("Gen key 2"); let (pub_1, _) = gen_rsa(512, &E3); println!("Gen key 3"); let (pub_2, _) = gen_rsa(512, &E3); println!("Let's get cracking!"); let c_0 = rsa_public_raw(&pub_0, &target_plaintext); let c_1 = rsa_public_raw(&pub_1, &target_plaintext); let c_2 = rsa_public_raw(&pub_2, &target_plaintext); // Mount actual attack let n_0 = pub_0.modulus(); let n_1 = pub_1.modulus(); let n_2 = pub_2.modulus(); let n_012 = n_0 * n_1 * n_2; let m_s_0 = &n_012 / n_0; let m_s_1 = &n_012 / n_1; let m_s_2 = &n_012 / n_2; let result = ((&c_0 * &m_s_0 * inv_mod(&m_s_0, &n_0)?) + (&c_1 * &m_s_1 * inv_mod(&m_s_1, &n_1)?) + (&c_2 * &m_s_2 * inv_mod(&m_s_2, &n_2)?)) % n_012; let result = result.nth_root(3); assert_eq!(target_plaintext, result); Ok(()) } #[test] fn challenge_41() -> Result<()> { let (pub_key, priv_key) = gen_rsa(512, &E3); println!("Modulus: {}", pub_key.modulus()); let target_plaintext = OsRng.gen_biguint(300); let victim_ciphertext = rsa_public_raw(&pub_key, &target_plaintext); let s: BigUint = 2u32.into(); let s_e = mod_exp(&s, pub_key.pub_exp(), pub_key.modulus()); let s_inv = inv_mod(&s, pub_key.modulus())?; let tampered = (&victim_ciphertext * &s_e) % pub_key.modulus(); // Oracle call let tampered_plaintext = rsa_private_raw(&priv_key, &tampered); // End oracle let decrypted = (&tampered_plaintext * s_inv) % pub_key.modulus(); assert_eq!(target_plaintext, decrypted); Ok(()) } #[test] fn rsa_sig_smoke() -> Result<()> { let mut msg = [0u8; 32]; OsRng.fill_bytes(&mut msg); let msg = msg; let (pub_key, priv_key) = gen_rsa(1024, &E3); let signature = rsa_pkcs1_15_sign::<Sha1, _>(&priv_key, &msg)?; assert!(rsa_pkcs1_15_verify::<Sha1, _>(&pub_key, &msg, &signature, true).is_ok()); assert!(rsa_pkcs1_15_verify::<Sha1, _>(&pub_key, &msg, &signature, false).is_ok()); let mut bad_msg = msg.clone(); bad_msg[0] ^= 0x14; assert!(rsa_pkcs1_15_verify::<Sha1, _>(&pub_key, &bad_msg, &signature, true).is_err()); assert!(rsa_pkcs1_15_verify::<Sha1, _>(&pub_key, &bad_msg, &signature, false).is_err()); let padded = rsa_public_raw(&pub_key, &BigUint::from_bytes_be(&signature)).to_bytes_be(); // println!("Padded: {}", hex::encode(&padded)); let unpadded = Padding::Pkcs1PaddingSigning(pub_key.modulus().bits()).unpad(&padded)?; // println!("Raw signature: {}", hex::encode(&unpadded)); let mut extended_signature = unpadded.clone(); extended_signature.resize(extended_signature.len() + 2, 0); let padded = Padding::Pkcs1PaddingSigning(pub_key.modulus().bits()) .pad(&extended_signature) .unwrap(); // println!("Padded: {}", hex::encode(&padded)); let signature = rsa_private_raw(&priv_key, &BigUint::from_bytes_be(&padded)).to_bytes_be(); assert!(rsa_pkcs1_15_verify::<Sha1, _>(&pub_key, &msg, &signature, true).is_err()); rsa_pkcs1_15_verify::<Sha1, _>(&pub_key, &msg, &signature, false) } #[test] fn challenge_42() -> Result<()> { let mut msg = [0u8; 32]; OsRng.fill_bytes(&mut msg); let msg = msg; println!("Generating key"); let (pub_key, _) = gen_rsa(2048, &E3); let mut asn1_struct = rsa_pkcs1_15_generate_asn1_struct::<Sha1>(&msg)?; let new_len = (2048 / 8) - 12; asn1_struct.resize(new_len, 0xff); let unpadded = Padding::Pkcs1PaddingSigning(pub_key.modulus().bits()).pad(&asn1_struct)?; println!("Calculating root"); let root: BigUint = BigUint::from_bytes_be(&unpadded).nth_root(3); let signature = root.to_bytes_be(); println!("Verifying"); assert!(rsa_pkcs1_15_verify::<Sha1, _>(&pub_key, &msg, &signature, true).is_err()); rsa_pkcs1_15_verify::<Sha1, _>(&pub_key, &msg, &signature, false) } #[test] #[ignore = "slow"] fn challenge_46() -> Result<()> { println!("Creating oracle"); let oracle = Challenge46Oracle::new(); let key = oracle.public_key(); let ciphertext = oracle.ciphertext(); let mut lower = BigUint::zero(); let mut upper = key.modulus().to_owned(); let mut current = BigUint::from_bytes_be(ciphertext); let two: BigUint = 2u32.into(); let two_e = rsa_public_raw(key, &two); println!("Starting loop"); let mut count = 0; while &upper > &lower { current = (current * &two_e) % key.modulus(); let midpoint = (&upper + &lower + BigUint::one()) / &two; if oracle.oracle(&current.to_bytes_be()) { // println!("Odd"); lower = midpoint; } else { // println!("Even"); upper = midpoint; } count += 1; let upper_guess = upper.to_bytes_be(); let lower_guess = lower.to_bytes_be(); println!( "Guess({})\n\t{}\n\t{}", count, hex::encode(&upper_guess), hex::encode(&lower_guess) ); let upper_guess = String::from_utf8_lossy(&upper_guess); let lower_guess = String::from_utf8_lossy(&lower_guess); println!("\t{}\n\t{}", upper_guess, &lower_guess); } // let upper_bytes = upper.to_bytes_be(); // println!("Guess hex: {}", hex::encode(&upper_bytes)); // let guess = String::from_utf8(upper_bytes)?; // oracle.assert_guess(&guess); // Don't know why but cannot get the final byte right Ok(()) } #[test] pub fn challenge_47() -> Result<()> { let oracle = Challenge47Oracle::new(256); let ciphertext = oracle.ciphertext(); let ciphertext = BigUint::from_bytes_be(ciphertext); let result = bleichenbacher(oracle.public_key(), &ciphertext, |c| oracle.lax(c))?; let plaintext = result.to_bytes_be(); let plaintext = Padding::Pkcs1PaddingEncryption(256).unpad(&plaintext)?; let plaintext = String::from_utf8_lossy(&plaintext); println!("Plaintext? {}", plaintext); oracle.assert_guess(&plaintext); Ok(()) } #[test] pub fn challenge_48() -> Result<()> { let oracle = Challenge47Oracle::new(768); let ciphertext = oracle.ciphertext(); let ciphertext = BigUint::from_bytes_be(ciphertext); let result = bleichenbacher(oracle.public_key(), &ciphertext, |c| oracle.lax(c))?; let plaintext = result.to_bytes_be(); // println!("Padded: {}", hex::encode(&plaintext)); let plaintext = Padding::Pkcs1PaddingEncryption(768).unpad(&plaintext)?; let plaintext = String::from_utf8_lossy(&plaintext); println!("Plaintext? {}", plaintext); oracle.assert_guess(&plaintext); Ok(()) } }
33.627027
108
0.518673
1134611767485afce6de16cb82790b7941d707d8
7,235
use super::DmlHandler; use crate::namespace_cache::NamespaceCache; use async_trait::async_trait; use data_types2::{DatabaseName, DeletePredicate, KafkaTopicId, QueryPoolId}; use iox_catalog::interface::Catalog; use observability_deps::tracing::*; use std::{fmt::Debug, marker::PhantomData, sync::Arc}; use thiserror::Error; use trace::ctx::SpanContext; /// An error auto-creating the request namespace. #[derive(Debug, Error)] pub enum NamespaceCreationError { /// An error returned from a namespace creation request. #[error("failed to create namespace: {0}")] Create(iox_catalog::interface::Error), } /// A layer to populate the [`Catalog`] with all the namespaces the router /// observes. /// /// Uses a [`NamespaceCache`] to limit issuing create requests to namespaces the /// router has not yet observed a schema for. #[derive(Debug)] pub struct NamespaceAutocreation<C, T> { catalog: Arc<dyn Catalog>, cache: C, topic_id: KafkaTopicId, query_id: QueryPoolId, retention: String, _input: PhantomData<T>, } impl<C, T> NamespaceAutocreation<C, T> { /// Return a new [`NamespaceAutocreation`] layer that ensures a requested /// namespace exists in `catalog`. /// /// If the namespace does not exist, it is created with the specified /// `topic_id`, `query_id` and `retention` policy. /// /// Namespaces are looked up in `cache`, skipping the creation request to /// the catalog if there's a hit. pub fn new( catalog: Arc<dyn Catalog>, cache: C, topic_id: KafkaTopicId, query_id: QueryPoolId, retention: String, ) -> Self { Self { catalog, cache, topic_id, query_id, retention, _input: Default::default(), } } } #[async_trait] impl<C, T> DmlHandler for NamespaceAutocreation<C, T> where C: NamespaceCache, T: Debug + Send + Sync, { type WriteError = NamespaceCreationError; type DeleteError = NamespaceCreationError; // This handler accepts any write input type, returning it to the caller // unmodified. type WriteInput = T; type WriteOutput = T; /// Write `batches` to `namespace`. async fn write( &self, namespace: &'_ DatabaseName<'static>, batches: Self::WriteInput, _span_ctx: Option<SpanContext>, ) -> Result<Self::WriteOutput, Self::WriteError> { // If the namespace does not exist in the schema cache (populated by the // schema validator) request an (idempotent) creation. if self.cache.get_schema(namespace).is_none() { trace!(%namespace, "namespace auto-create cache miss"); let mut repos = self.catalog.repositories().await; match repos .namespaces() .create( namespace.as_str(), &self.retention, self.topic_id, self.query_id, ) .await { Ok(_) => { debug!(%namespace, "created namespace"); } Err(iox_catalog::interface::Error::NameExists { .. }) => { // Either the cache has not yet converged to include this // namespace, or another thread raced populating the catalog // and beat this thread to it. debug!(%namespace, "spurious namespace create failed"); } Err(e) => { error!(error=%e, %namespace, "failed to auto-create namespace"); return Err(NamespaceCreationError::Create(e)); } } } Ok(batches) } /// Delete the data specified in `delete`. async fn delete( &self, _namespace: &DatabaseName<'static>, _table_name: &str, _predicate: &DeletePredicate, _span_ctx: Option<SpanContext>, ) -> Result<(), Self::DeleteError> { Ok(()) } } #[cfg(test)] mod tests { use super::*; use crate::namespace_cache::MemoryNamespaceCache; use data_types2::{Namespace, NamespaceId, NamespaceSchema}; use iox_catalog::mem::MemCatalog; use std::sync::Arc; #[tokio::test] async fn test_cache_hit() { let ns = DatabaseName::try_from("bananas").unwrap(); // Prep the cache before the test to cause a hit let cache = Arc::new(MemoryNamespaceCache::default()); cache.put_schema( ns.clone(), NamespaceSchema { id: NamespaceId::new(1), kafka_topic_id: KafkaTopicId::new(2), query_pool_id: QueryPoolId::new(3), tables: Default::default(), }, ); let metrics = Arc::new(metric::Registry::new()); let catalog: Arc<dyn Catalog> = Arc::new(MemCatalog::new(metrics)); let creator = NamespaceAutocreation::new( Arc::clone(&catalog), cache, KafkaTopicId::new(42), QueryPoolId::new(42), "inf".to_owned(), ); // Drive the code under test creator .write(&ns, (), None) .await .expect("handler should succeed"); // The cache hit should mean the catalog SHOULD NOT see a create request // for the namespace. let mut repos = catalog.repositories().await; assert!( repos .namespaces() .get_by_name(ns.as_str()) .await .expect("lookup should not error") .is_none(), "expected no request to the catalog" ); } #[tokio::test] async fn test_cache_miss() { let ns = DatabaseName::try_from("bananas").unwrap(); let cache = Arc::new(MemoryNamespaceCache::default()); let metrics = Arc::new(metric::Registry::new()); let catalog: Arc<dyn Catalog> = Arc::new(MemCatalog::new(metrics)); let creator = NamespaceAutocreation::new( Arc::clone(&catalog), cache, KafkaTopicId::new(42), QueryPoolId::new(42), "inf".to_owned(), ); creator .write(&ns, (), None) .await .expect("handler should succeed"); // The cache miss should mean the catalog MUST see a create request for // the namespace. let mut repos = catalog.repositories().await; let got = repos .namespaces() .get_by_name(ns.as_str()) .await .expect("lookup should not error") .expect("creation request should be sent to catalog"); assert_eq!( got, Namespace { id: NamespaceId::new(1), name: ns.to_string(), retention_duration: Some("inf".to_owned()), kafka_topic_id: KafkaTopicId::new(42), query_pool_id: QueryPoolId::new(42), max_tables: 10000, max_columns_per_table: 1000, } ); } }
31.320346
84
0.556462
161c4a47262a2b07e8e1a3691c4f6e2e2fc25064
12,495
// Service to verify accounts hashes with other trusted validator nodes. // // Each interval, publish the snapshat hash which is the full accounts state // hash on gossip. Monitor gossip for messages from validators in the --trusted-validators // set and halt the node if a mismatch is detected. use crate::snapshot_packager_service::PendingSnapshotPackage; use rayon::ThreadPool; use solana_gossip::cluster_info::{ClusterInfo, MAX_SNAPSHOT_HASHES}; use solana_runtime::{ accounts_db, snapshot_package::{AccountsPackage, AccountsPackagePre, AccountsPackageReceiver}, snapshot_utils::SnapshotArchiveInfoGetter, }; use solana_sdk::{clock::Slot, hash::Hash, pubkey::Pubkey}; use std::collections::{HashMap, HashSet}; use std::{ sync::{ atomic::{AtomicBool, Ordering}, mpsc::RecvTimeoutError, Arc, }, thread::{self, Builder, JoinHandle}, time::Duration, }; pub struct AccountsHashVerifier { t_accounts_hash_verifier: JoinHandle<()>, } impl AccountsHashVerifier { pub fn new( accounts_package_receiver: AccountsPackageReceiver, pending_snapshot_package: Option<PendingSnapshotPackage>, exit: &Arc<AtomicBool>, cluster_info: &Arc<ClusterInfo>, trusted_validators: Option<HashSet<Pubkey>>, halt_on_trusted_validators_accounts_hash_mismatch: bool, fault_injection_rate_slots: u64, snapshot_interval_slots: u64, ) -> Self { let exit = exit.clone(); let cluster_info = cluster_info.clone(); let t_accounts_hash_verifier = Builder::new() .name("solana-hash-accounts".to_string()) .spawn(move || { let mut hashes = vec![]; let mut thread_pool_storage = None; loop { if exit.load(Ordering::Relaxed) { break; } match accounts_package_receiver.recv_timeout(Duration::from_secs(1)) { Ok(accounts_package) => { if accounts_package.hash_for_testing.is_some() && thread_pool_storage.is_none() { thread_pool_storage = Some(accounts_db::make_min_priority_thread_pool()); } Self::process_accounts_package_pre( accounts_package, &cluster_info, &trusted_validators, halt_on_trusted_validators_accounts_hash_mismatch, &pending_snapshot_package, &mut hashes, &exit, fault_injection_rate_slots, snapshot_interval_slots, thread_pool_storage.as_ref(), ); } Err(RecvTimeoutError::Disconnected) => break, Err(RecvTimeoutError::Timeout) => (), } } }) .unwrap(); Self { t_accounts_hash_verifier, } } #[allow(clippy::too_many_arguments)] fn process_accounts_package_pre( accounts_package: AccountsPackagePre, cluster_info: &ClusterInfo, trusted_validators: &Option<HashSet<Pubkey>>, halt_on_trusted_validator_accounts_hash_mismatch: bool, pending_snapshot_package: &Option<PendingSnapshotPackage>, hashes: &mut Vec<(Slot, Hash)>, exit: &Arc<AtomicBool>, fault_injection_rate_slots: u64, snapshot_interval_slots: u64, thread_pool: Option<&ThreadPool>, ) { let accounts_package = solana_runtime::snapshot_utils::process_accounts_package_pre( accounts_package, thread_pool, None, ); Self::process_accounts_package( accounts_package, cluster_info, trusted_validators, halt_on_trusted_validator_accounts_hash_mismatch, pending_snapshot_package, hashes, exit, fault_injection_rate_slots, snapshot_interval_slots, ); } fn process_accounts_package( accounts_package: AccountsPackage, cluster_info: &ClusterInfo, trusted_validators: &Option<HashSet<Pubkey>>, halt_on_trusted_validator_accounts_hash_mismatch: bool, pending_snapshot_package: &Option<PendingSnapshotPackage>, hashes: &mut Vec<(Slot, Hash)>, exit: &Arc<AtomicBool>, fault_injection_rate_slots: u64, snapshot_interval_slots: u64, ) { let hash = *accounts_package.hash(); if fault_injection_rate_slots != 0 && accounts_package.slot() % fault_injection_rate_slots == 0 { // For testing, publish an invalid hash to gossip. use rand::{thread_rng, Rng}; use solana_sdk::hash::extend_and_hash; warn!("inserting fault at slot: {}", accounts_package.slot()); let rand = thread_rng().gen_range(0, 10); let hash = extend_and_hash(&hash, &[rand]); hashes.push((accounts_package.slot(), hash)); } else { hashes.push((accounts_package.slot(), hash)); } while hashes.len() > MAX_SNAPSHOT_HASHES { hashes.remove(0); } if halt_on_trusted_validator_accounts_hash_mismatch { let mut slot_to_hash = HashMap::new(); for (slot, hash) in hashes.iter() { slot_to_hash.insert(*slot, *hash); } if Self::should_halt(cluster_info, trusted_validators, &mut slot_to_hash) { exit.store(true, Ordering::Relaxed); } } if accounts_package.block_height % snapshot_interval_slots == 0 { if let Some(pending_snapshot_package) = pending_snapshot_package.as_ref() { *pending_snapshot_package.lock().unwrap() = Some(accounts_package); } } cluster_info.push_accounts_hashes(hashes.clone()); } fn should_halt( cluster_info: &ClusterInfo, trusted_validators: &Option<HashSet<Pubkey>>, slot_to_hash: &mut HashMap<Slot, Hash>, ) -> bool { let mut verified_count = 0; let mut highest_slot = 0; if let Some(trusted_validators) = trusted_validators.as_ref() { for trusted_validator in trusted_validators { let is_conflicting = cluster_info.get_accounts_hash_for_node(trusted_validator, |accounts_hashes| { accounts_hashes.iter().any(|(slot, hash)| { if let Some(reference_hash) = slot_to_hash.get(slot) { if *hash != *reference_hash { error!("Trusted validator {} produced conflicting hashes for slot: {} ({} != {})", trusted_validator, slot, hash, reference_hash, ); true } else { verified_count += 1; false } } else { highest_slot = std::cmp::max(*slot, highest_slot); slot_to_hash.insert(*slot, *hash); false } }) }).unwrap_or(false); if is_conflicting { return true; } } } inc_new_counter_info!("accounts_hash_verifier-hashes_verified", verified_count); datapoint_info!( "accounts_hash_verifier", ("highest_slot_verified", highest_slot, i64), ); false } pub fn join(self) -> thread::Result<()> { self.t_accounts_hash_verifier.join() } } #[cfg(test)] mod tests { use super::*; use solana_gossip::{cluster_info::make_accounts_hashes_message, contact_info::ContactInfo}; use solana_runtime::snapshot_utils::{ArchiveFormat, SnapshotVersion}; use solana_sdk::{ hash::hash, signature::{Keypair, Signer}, }; use solana_streamer::socket::SocketAddrSpace; fn new_test_cluster_info(contact_info: ContactInfo) -> ClusterInfo { ClusterInfo::new( contact_info, Arc::new(Keypair::new()), SocketAddrSpace::Unspecified, ) } #[test] fn test_should_halt() { let keypair = Keypair::new(); let contact_info = ContactInfo::new_localhost(&keypair.pubkey(), 0); let cluster_info = new_test_cluster_info(contact_info); let cluster_info = Arc::new(cluster_info); let mut trusted_validators = HashSet::new(); let mut slot_to_hash = HashMap::new(); assert!(!AccountsHashVerifier::should_halt( &cluster_info, &Some(trusted_validators.clone()), &mut slot_to_hash, )); let validator1 = Keypair::new(); let hash1 = hash(&[1]); let hash2 = hash(&[2]); { let message = make_accounts_hashes_message(&validator1, vec![(0, hash1)]).unwrap(); cluster_info.push_message(message); cluster_info.flush_push_queue(); } slot_to_hash.insert(0, hash2); trusted_validators.insert(validator1.pubkey()); assert!(AccountsHashVerifier::should_halt( &cluster_info, &Some(trusted_validators), &mut slot_to_hash, )); } #[test] fn test_max_hashes() { solana_logger::setup(); use std::path::PathBuf; use tempfile::TempDir; let keypair = Keypair::new(); let contact_info = ContactInfo::new_localhost(&keypair.pubkey(), 0); let cluster_info = new_test_cluster_info(contact_info); let cluster_info = Arc::new(cluster_info); let trusted_validators = HashSet::new(); let exit = Arc::new(AtomicBool::new(false)); let mut hashes = vec![]; for i in 0..MAX_SNAPSHOT_HASHES + 1 { let slot = 100 + i as u64; let block_height = 100 + i as u64; let slot_deltas = vec![]; let snapshot_links = TempDir::new().unwrap(); let storages = vec![]; let snapshot_archive_path = PathBuf::from("."); let hash = hash(&[i as u8]); let archive_format = ArchiveFormat::TarBzip2; let snapshot_version = SnapshotVersion::default(); let accounts_package = AccountsPackage::new( slot, block_height, slot_deltas, snapshot_links, storages, snapshot_archive_path, hash, archive_format, snapshot_version, ); AccountsHashVerifier::process_accounts_package( accounts_package, &cluster_info, &Some(trusted_validators.clone()), false, &None, &mut hashes, &exit, 0, 100, ); // sleep for 1ms to create a newer timestmap for gossip entry // otherwise the timestamp won't be newer. std::thread::sleep(Duration::from_millis(1)); } cluster_info.flush_push_queue(); let cluster_hashes = cluster_info .get_accounts_hash_for_node(&keypair.pubkey(), |c| c.clone()) .unwrap(); info!("{:?}", cluster_hashes); assert_eq!(hashes.len(), MAX_SNAPSHOT_HASHES); assert_eq!(cluster_hashes.len(), MAX_SNAPSHOT_HASHES); assert_eq!(cluster_hashes[0], (101, hash(&[1]))); assert_eq!( cluster_hashes[MAX_SNAPSHOT_HASHES - 1], ( 100 + MAX_SNAPSHOT_HASHES as u64, hash(&[MAX_SNAPSHOT_HASHES as u8]) ) ); } }
36.967456
114
0.544378
03e9d6a420e3736bc91d560acb79468f29b0cb0d
13,125
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. //! Defines the SORT plan use std::any::Any; use std::pin::Pin; use std::sync::Arc; use std::task::{Context, Poll}; use futures::stream::Stream; use futures::Future; use pin_project_lite::pin_project; pub use arrow::compute::SortOptions; use arrow::compute::{concat, lexsort_to_indices, take, SortColumn, TakeOptions}; use arrow::datatypes::SchemaRef; use arrow::error::Result as ArrowResult; use arrow::record_batch::RecordBatch; use arrow::{array::ArrayRef, error::ArrowError}; use super::{RecordBatchStream, SendableRecordBatchStream}; use crate::error::{DataFusionError, Result}; use crate::physical_plan::expressions::PhysicalSortExpr; use crate::physical_plan::{common, Distribution, ExecutionPlan, Partitioning}; use async_trait::async_trait; /// Sort execution plan #[derive(Debug)] pub struct SortExec { /// Input schema input: Arc<dyn ExecutionPlan>, /// Sort expressions expr: Vec<PhysicalSortExpr>, /// Number of threads to execute input partitions on before combining into a single partition concurrency: usize, } impl SortExec { /// Create a new sort execution plan pub fn try_new( expr: Vec<PhysicalSortExpr>, input: Arc<dyn ExecutionPlan>, concurrency: usize, ) -> Result<Self> { Ok(Self { expr, input, concurrency, }) } /// Input schema pub fn input(&self) -> &Arc<dyn ExecutionPlan> { &self.input } /// Sort expressions pub fn expr(&self) -> &[PhysicalSortExpr] { &self.expr } } #[async_trait] impl ExecutionPlan for SortExec { /// Return a reference to Any that can be used for downcasting fn as_any(&self) -> &dyn Any { self } fn schema(&self) -> SchemaRef { self.input.schema() } fn children(&self) -> Vec<Arc<dyn ExecutionPlan>> { vec![self.input.clone()] } /// Get the output partitioning of this plan fn output_partitioning(&self) -> Partitioning { Partitioning::UnknownPartitioning(1) } fn required_child_distribution(&self) -> Distribution { Distribution::SinglePartition } fn with_new_children( &self, children: Vec<Arc<dyn ExecutionPlan>>, ) -> Result<Arc<dyn ExecutionPlan>> { match children.len() { 1 => Ok(Arc::new(SortExec::try_new( self.expr.clone(), children[0].clone(), self.concurrency, )?)), _ => Err(DataFusionError::Internal( "SortExec wrong number of children".to_string(), )), } } async fn execute(&self, partition: usize) -> Result<SendableRecordBatchStream> { if 0 != partition { return Err(DataFusionError::Internal(format!( "SortExec invalid partition {}", partition ))); } // sort needs to operate on a single partition currently if 1 != self.input.output_partitioning().partition_count() { return Err(DataFusionError::Internal( "SortExec requires a single input partition".to_owned(), )); } let input = self.input.execute(0).await?; Ok(Box::pin(SortStream::new(input, self.expr.clone()))) } } fn sort_batches( batches: &Vec<RecordBatch>, schema: &SchemaRef, expr: &[PhysicalSortExpr], ) -> ArrowResult<Option<RecordBatch>> { if batches.is_empty() { return Ok(None); } // combine all record batches into one for each column let combined_batch = RecordBatch::try_new( schema.clone(), schema .fields() .iter() .enumerate() .map(|(i, _)| { concat( &batches .iter() .map(|batch| batch.column(i).as_ref()) .collect::<Vec<_>>(), ) }) .collect::<ArrowResult<Vec<ArrayRef>>>()?, )?; // sort combined record batch let indices = lexsort_to_indices( &expr .iter() .map(|e| e.evaluate_to_sort_column(&combined_batch)) .collect::<Result<Vec<SortColumn>>>() .map_err(DataFusionError::into_arrow_external_error)?, )?; // reorder all rows based on sorted indices let sorted_batch = RecordBatch::try_new( schema.clone(), combined_batch .columns() .iter() .map(|column| { take( column.as_ref(), &indices, // disable bound check overhead since indices are already generated from // the same record batch Some(TakeOptions { check_bounds: false, }), ) }) .collect::<ArrowResult<Vec<ArrayRef>>>()?, ); sorted_batch.map(Some) } pin_project! { struct SortStream { #[pin] output: futures::channel::oneshot::Receiver<ArrowResult<Option<RecordBatch>>>, finished: bool, schema: SchemaRef, } } impl SortStream { fn new(input: SendableRecordBatchStream, expr: Vec<PhysicalSortExpr>) -> Self { let (tx, rx) = futures::channel::oneshot::channel(); let schema = input.schema(); tokio::spawn(async move { let schema = input.schema(); let sorted_batch = common::collect(input) .await .map_err(DataFusionError::into_arrow_external_error) .and_then(move |batches| sort_batches(&batches, &schema, &expr)); tx.send(sorted_batch) }); Self { output: rx, finished: false, schema, } } } impl Stream for SortStream { type Item = ArrowResult<RecordBatch>; fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> { if self.finished { return Poll::Ready(None); } // is the output ready? let this = self.project(); let output_poll = this.output.poll(cx); match output_poll { Poll::Ready(result) => { *this.finished = true; // check for error in receiving channel and unwrap actual result let result = match result { Err(e) => Some(Err(ArrowError::ExternalError(Box::new(e)))), // error receiving Ok(result) => result.transpose(), }; Poll::Ready(result) } Poll::Pending => Poll::Pending, } } } impl RecordBatchStream for SortStream { fn schema(&self) -> SchemaRef { self.schema.clone() } } #[cfg(test)] mod tests { use super::*; use crate::physical_plan::expressions::col; use crate::physical_plan::memory::MemoryExec; use crate::physical_plan::merge::MergeExec; use crate::physical_plan::{ collect, csv::{CsvExec, CsvReadOptions}, }; use crate::test; use arrow::array::*; use arrow::datatypes::*; #[tokio::test] async fn test_sort() -> Result<()> { let schema = test::aggr_test_schema(); let partitions = 4; let path = test::create_partitioned_csv("aggregate_test_100.csv", partitions)?; let csv = CsvExec::try_new(&path, CsvReadOptions::new().schema(&schema), None, 1024)?; let sort_exec = Arc::new(SortExec::try_new( vec![ // c1 string column PhysicalSortExpr { expr: col("c1"), options: SortOptions::default(), }, // c2 uin32 column PhysicalSortExpr { expr: col("c2"), options: SortOptions::default(), }, // c7 uin8 column PhysicalSortExpr { expr: col("c7"), options: SortOptions::default(), }, ], Arc::new(MergeExec::new(Arc::new(csv))), 2, )?); let result: Vec<RecordBatch> = collect(sort_exec).await?; assert_eq!(result.len(), 1); let columns = result[0].columns(); let c1 = as_string_array(&columns[0]); assert_eq!(c1.value(0), "a"); assert_eq!(c1.value(c1.len() - 1), "e"); let c2 = as_primitive_array::<UInt32Type>(&columns[1]); assert_eq!(c2.value(0), 1); assert_eq!(c2.value(c2.len() - 1), 5,); let c7 = as_primitive_array::<UInt8Type>(&columns[6]); assert_eq!(c7.value(0), 15); assert_eq!(c7.value(c7.len() - 1), 254,); Ok(()) } #[tokio::test] async fn test_lex_sort_by_float() -> Result<()> { let schema = Arc::new(Schema::new(vec![ Field::new("a", DataType::Float32, true), Field::new("b", DataType::Float64, true), ])); // define data. let batch = RecordBatch::try_new( schema.clone(), vec![ Arc::new(Float32Array::from(vec![ Some(f32::NAN), None, None, Some(f32::NAN), Some(1.0_f32), Some(1.0_f32), Some(2.0_f32), Some(3.0_f32), ])), Arc::new(Float64Array::from(vec![ Some(200.0_f64), Some(20.0_f64), Some(10.0_f64), Some(100.0_f64), Some(f64::NAN), None, None, Some(f64::NAN), ])), ], )?; let sort_exec = Arc::new(SortExec::try_new( vec![ PhysicalSortExpr { expr: col("a"), options: SortOptions { descending: true, nulls_first: true, }, }, PhysicalSortExpr { expr: col("b"), options: SortOptions { descending: false, nulls_first: false, }, }, ], Arc::new(MemoryExec::try_new(&vec![vec![batch]], schema, None)?), 2, )?); assert_eq!(DataType::Float32, *sort_exec.schema().field(0).data_type()); assert_eq!(DataType::Float64, *sort_exec.schema().field(1).data_type()); let result: Vec<RecordBatch> = collect(sort_exec).await?; assert_eq!(result.len(), 1); let columns = result[0].columns(); assert_eq!(DataType::Float32, *columns[0].data_type()); assert_eq!(DataType::Float64, *columns[1].data_type()); let a = as_primitive_array::<Float32Type>(&columns[0]); let b = as_primitive_array::<Float64Type>(&columns[1]); // convert result to strings to allow comparing to expected result containing NaN let result: Vec<(Option<String>, Option<String>)> = (0..result[0].num_rows()) .map(|i| { let aval = if a.is_valid(i) { Some(a.value(i).to_string()) } else { None }; let bval = if b.is_valid(i) { Some(b.value(i).to_string()) } else { None }; (aval, bval) }) .collect(); let expected: Vec<(Option<String>, Option<String>)> = vec![ (None, Some("10".to_owned())), (None, Some("20".to_owned())), (Some("NaN".to_owned()), Some("100".to_owned())), (Some("NaN".to_owned()), Some("200".to_owned())), (Some("3".to_owned()), Some("NaN".to_owned())), (Some("2".to_owned()), None), (Some("1".to_owned()), Some("NaN".to_owned())), (Some("1".to_owned()), None), ]; assert_eq!(expected, result); Ok(()) } }
30.594406
99
0.521981
e98dd274434b74d979649c52b3a5ebdc5d43880a
1,202
use wavetable::WavHandler; use flexi_logger::{Logger, opt_format}; /// Load a wave file into memory and show some information about the data. fn main () { // Start as "RUST_LOG=info cargo run --example load_wavetable <filename>" // to show log info Logger::with_env_or_str("myprog=debug, mylib=warn") .format(opt_format) .start() .unwrap(); let filename = std::env::args().nth(1).expect("Please give name of wave file to analyze as argument"); let result = WavHandler::read_file(&filename); match result { Ok(wav_file) => { let info = wav_file.get_fmt(); let data = wav_file.get_samples(); println!("{}:", filename); println!("{} bytes with {} bits per sample, resulting in {} samples in {} channels, format {}", wav_file.get_num_bytes(), info.get_bits_per_sample(), data.get_num_samples(), info.get_num_channels(), data.get_type()); println!("{:?}", info); }, Err(()) => println!("Failed to read file {}", filename), } }
36.424242
107
0.540765
b9212f5bdb5b307058161defbc378fda93358246
6,346
use std::fmt; use std::io::Write as _; use std::time::Duration; use anyhow::{anyhow, Context as _}; use serde::Serialize; use structopt::StructOpt; use tokio::time::Instant; use crate::atcoder::AtcoderActor; use crate::cmd::Outcome; use crate::judge::{Judge, StatusKind, TotalStatus}; use crate::model::{AsSamples, ContestId, Problem, ProblemId, Service}; use crate::{Config, Console, Result}; static DEFAULT_TIME_LIMIT_MS: u64 = 60 * 1000; #[derive(StructOpt, Debug, Clone, PartialEq, Eq, Hash)] #[structopt(rename_all = "kebab")] pub struct TestOpt { /// Id of the problem to be tested #[structopt(name = "problem")] problem_id: ProblemId, /// If specified, uses only one sample sample_name: Option<String>, /// Tests using full testcases (only available for AtCoder) #[structopt(name = "full", long)] is_full: bool, /// Outpus one line per one sample #[structopt(long)] one_line: bool, /// Overrides time limit (in millisecs) of the problem #[structopt(long)] time_limit: Option<u64>, } fn testcase_or_sample(is_full: bool) -> &'static str { if is_full { "testcase" } else { "sample" } } impl TestOpt { pub fn run(&self, conf: &Config, cnsl: &mut Console) -> Result<TestOutcome> { let problem = conf.load_problem(&self.problem_id, cnsl)?; let problem_name = problem.name().to_owned(); let (total, compile_elapsed, test_elapsed) = self.compile_and_test(problem, conf, cnsl)?; // build output Ok(TestOutcome { service: Service::new(conf.service_id), contest_id: conf.contest_id.to_owned(), problem_id: self.problem_id.to_owned(), problem_name, total, compile_elapsed, test_elapsed, is_full: self.is_full, }) } async fn compile(&self, conf: &Config) -> Result<Duration> { let started_at = Instant::now(); let mut compile = conf.exec_compile(&self.problem_id)?; let exit_status = compile.status().await?; let elapsed = started_at.elapsed(); if !exit_status.success() { return Err(anyhow!( "Compile command returned non-zero status : {}", exit_status )); } Ok(elapsed) } async fn test( &self, problem: Problem, conf: &Config, cnsl: &mut Console, ) -> Result<(TotalStatus, Duration)> { let time_limit = self .time_limit .map(Duration::from_millis) .or_else(|| problem.time_limit()) .unwrap_or_else(|| Duration::from_millis(DEFAULT_TIME_LIMIT_MS)); let compare = problem.compare(); let samples = self.load_samples(problem, conf)?; let n_samples = samples.len(); let max_sample_name_len = samples.max_name_len(); if n_samples == 0 { return Err(anyhow!("Found no samples")); } // test source code with samples let started_at = Instant::now(); let mut statuses = Vec::new(); writeln!(cnsl)?; for (i, sample) in samples.enumerate() { let sample = sample?; let run = conf.exec_run(&self.problem_id)?; write!( cnsl, "[{:>2}/{:>2}] {} {:>l$} ... ", i + 1, n_samples, testcase_or_sample(self.is_full), sample.name(), l = max_sample_name_len, )?; let status = Judge::new(sample, time_limit, compare).test(run).await?; writeln!(cnsl, "{}", status)?; if !self.one_line { status.describe(cnsl)?; } statuses.push(status); } let elapsed = started_at.elapsed(); let total = TotalStatus::new(statuses); Ok((total, elapsed)) } fn load_samples(&self, problem: Problem, conf: &Config) -> Result<Box<dyn AsSamples>> { if self.is_full { let testcases_dir = conf.testcases_abs_dir(problem.id())?; let testcases = AtcoderActor::load_testcases(testcases_dir, &self.sample_name)?; Ok(Box::new(testcases)) } else { Ok(Box::new(problem.take_samples(&self.sample_name))) } } #[tokio::main] async fn compile_and_test( &self, problem: Problem, conf: &Config, cnsl: &mut Console, ) -> Result<(TotalStatus, Duration, Duration)> { let compile_elapsed = self.compile(conf).await.context("Failed to compile")?; let (total, test_elapsed) = self.test(problem, conf, cnsl).await?; Ok((total, compile_elapsed, test_elapsed)) } } #[derive(Serialize, Debug, Clone, PartialEq, Eq, Hash)] pub struct TestOutcome { service: Service, contest_id: ContestId, problem_id: ProblemId, problem_name: String, total: TotalStatus, compile_elapsed: Duration, test_elapsed: Duration, is_full: bool, } impl fmt::Display for TestOutcome { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!( f, "{} {} {} {} ({} {}s, compile: {:.2}s, test: {:.2}s)", self.service.id(), self.contest_id, self.problem_id, self.problem_name, self.total.count(), testcase_or_sample(self.is_full), (self.compile_elapsed.as_secs_f32()), (self.test_elapsed.as_secs_f32()), )?; write!(f, "{}", self.total) } } impl Outcome for TestOutcome { fn is_error(&self) -> bool { self.total.kind() != StatusKind::Ac } } #[cfg(test)] mod tests { use tempfile::tempdir; use super::*; use crate::cmd::tests::run_with; #[test] fn run_default() -> anyhow::Result<()> { let test_dir = tempdir()?; let fetch_opt = crate::cmd::FetchOpt::default_test(); run_with(&test_dir, |conf, cnsl| fetch_opt.run(conf, cnsl))?; let opt = TestOpt { problem_id: "c".into(), sample_name: None, is_full: false, one_line: false, time_limit: None, }; run_with(&test_dir, |conf, cnsl| opt.run(conf, cnsl))?; Ok(()) } }
29.793427
97
0.56382
14e3b0a31e10548670dd42ba829b83db5300f2af
5,979
#![cfg(feature = "dnssec")] #![cfg(not(windows))] extern crate futures; extern crate log; extern crate tokio; extern crate tokio_tcp; extern crate trust_dns; extern crate trust_dns_proto; mod server_harness; use std::env; use std::fs::File; use std::io::*; use std::net::*; use std::path::Path; use futures::Future; use tokio::runtime::current_thread::Runtime; use tokio_tcp::TcpStream as TokioTcpStream; use trust_dns::proto::error::ProtoError; use trust_dns::proto::tcp::{TcpClientConnect, TcpClientStream}; use trust_dns::proto::xfer::{ DnsMultiplexer, DnsMultiplexerConnect, DnsMultiplexerSerialResponse, DnsResponse, }; use trust_dns::client::*; use trust_dns::rr::dnssec::*; use server_harness::*; #[cfg(all(not(feature = "dnssec-ring"), feature = "dnssec-openssl"))] fn confg_toml() -> &'static str { "openssl_dnssec.toml" } #[cfg(all(feature = "dnssec-ring", not(feature = "dnssec-openssl")))] fn confg_toml() -> &'static str { "ring_dnssec.toml" } #[cfg(all(feature = "dnssec-ring", feature = "dnssec-openssl"))] fn confg_toml() -> &'static str { "all_supported_dnssec.toml" } fn trust_anchor(public_key_path: &Path, format: KeyFormat, algorithm: Algorithm) -> TrustAnchor { let mut file = File::open(public_key_path).expect("key not found"); let mut buf = Vec::<u8>::new(); file.read_to_end(&mut buf).expect("could not read key"); let key_pair = format .decode_key(&buf, Some("123456"), algorithm) .expect("could not decode key"); let public_key = key_pair.to_public_key().unwrap(); let mut trust_anchor = TrustAnchor::new(); trust_anchor.insert_trust_anchor(&public_key); trust_anchor } fn standard_conn( port: u16, ) -> ( ClientFuture< DnsMultiplexerConnect<TcpClientConnect, TcpClientStream<TokioTcpStream>, Signer>, DnsMultiplexer<TcpClientStream<TokioTcpStream>, Signer>, DnsMultiplexerSerialResponse, >, BasicClientHandle<impl Future<Item = DnsResponse, Error = ProtoError>>, ) { let addr: SocketAddr = ("127.0.0.1", port) .to_socket_addrs() .unwrap() .next() .unwrap(); let (stream, sender) = TcpClientStream::new(addr); ClientFuture::new(stream, sender, None) } fn generic_test(config_toml: &str, key_path: &str, key_format: KeyFormat, algorithm: Algorithm) { // use trust_dns::logger; // use log::LogLevel; // logger::TrustDnsLogger::enable_logging(LogLevel::Debug); let server_path = env::var("TDNS_SERVER_SRC_ROOT").unwrap_or_else(|_| ".".to_owned()); let server_path = Path::new(&server_path); named_test_harness(config_toml, |port, _, _| { let mut io_loop = Runtime::new().unwrap(); // verify all records are present let (bg, client) = standard_conn(port); io_loop.spawn(bg); query_all_dnssec_with_rfc6975(&mut io_loop, client, algorithm); let (bg, client) = standard_conn(port); io_loop.spawn(bg); query_all_dnssec_wo_rfc6975(&mut io_loop, client, algorithm); // test that request with Secure client is successful, i.e. validates chain let trust_anchor = trust_anchor(&server_path.join(key_path), key_format, algorithm); let (bg, client) = standard_conn(port); io_loop.spawn(bg); let mut client = SecureClientHandle::with_trust_anchor(client, trust_anchor); query_a(&mut io_loop, &mut client); }); } #[test] #[cfg(feature = "dnssec-openssl")] fn test_rsa_sha256() { generic_test( confg_toml(), "tests/named_test_configs/dnssec/rsa_2048.pem", KeyFormat::Pem, Algorithm::RSASHA256, ); } #[test] #[cfg(feature = "dnssec-openssl")] fn test_rsa_sha512() { generic_test( confg_toml(), "tests/named_test_configs/dnssec/rsa_2048.pem", KeyFormat::Pem, Algorithm::RSASHA512, ); } #[test] #[cfg(feature = "dnssec-openssl")] fn test_ecdsa_p256() { generic_test( confg_toml(), "tests/named_test_configs/dnssec/ecdsa_p256.pem", KeyFormat::Pem, Algorithm::ECDSAP256SHA256, ); } #[test] #[cfg(feature = "dnssec-openssl")] fn test_ecdsa_p384() { generic_test( confg_toml(), "tests/named_test_configs/dnssec/ecdsa_p384.pem", KeyFormat::Pem, Algorithm::ECDSAP384SHA384, ); } #[test] #[cfg(feature = "dnssec-ring")] fn test_ed25519() { generic_test( confg_toml(), "tests/named_test_configs/dnssec/ed25519.pk8", KeyFormat::Pkcs8, Algorithm::ED25519, ); } #[test] #[should_panic] fn test_rsa_sha1_fails() { generic_test( confg_toml(), "tests/named_test_configs/dnssec/rsa_2048.pem", KeyFormat::Pem, Algorithm::RSASHA1, ); } #[cfg(feature = "dnssec-openssl")] #[test] fn test_dnssec_restart_with_update_journal() { // TODO: make journal path configurable, it should be in target/tests/... let server_path = env::var("TDNS_SERVER_SRC_ROOT").unwrap_or_else(|_| ".".to_owned()); let server_path = Path::new(&server_path); let journal = server_path.join("tests/named_test_configs/example.com.jrnl"); std::fs::remove_file(&journal).ok(); generic_test( "dnssec_with_update.toml", "tests/named_test_configs/dnssec/rsa_2048.pem", KeyFormat::Pem, Algorithm::RSASHA256, ); // after running the above test, the journal file should exist assert!(journal.exists()); // and all dnssec tests should still pass generic_test( "dnssec_with_update.toml", "tests/named_test_configs/dnssec/rsa_2048.pem", KeyFormat::Pem, Algorithm::RSASHA256, ); // and journal should still exist assert!(journal.exists()); // cleanup... // TODO: fix journal path so that it doesn't leave the dir dirty... this might make windows an option after that std::fs::remove_file(&journal).expect("failed to cleanup after test"); }
28.20283
116
0.660144
de34585d2ef2b5b7c95ef24c6f7e7200413dddf6
3,148
use std::cmp::Ordering; use std::error::Error; use std::fmt::{self, Display}; use std::str::FromStr; use super::{Instruction, Op2, ParseInstructionError}; use crate::{lex, uarch, util}; #[derive(Debug)] pub struct And { op1: uarch, op2: Op2, } impl Display for And { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let label = "and"; let op1 = format!("r{}", self.op1); let op2 = match self.op2 { Op2::Reg(op2) => format!("r{}", op2), Op2::Imm(imm) => format!("{:#06x}", imm), }; write!(f, "{} {}, {}", label, op1, op2) } } impl From<uarch> for And { fn from(word: uarch) -> Self { assert_eq!((word >> 12), 0b0110); Self { op1: (word & 0x0f00) >> 8, op2: match (word & 0x0080) == 0 { true => Op2::Reg(word & 0x000f), false => Op2::Imm(util::sign_extend::<7, { uarch::BITS }>(word & 0x007f)), }, } } } impl From<And> for uarch { fn from(instr: And) -> Self { let mut word: uarch = 0; word |= 0b0110 << 12; word |= (instr.op1 << 8) & 0x0f00; word |= match instr.op2 { Op2::Reg(op2) => op2, Op2::Imm(imm) => 0x0080 | imm, } & 0x00ff; word } } impl FromStr for And { type Err = Box<dyn Error>; fn from_str(s: &str) -> Result<Self, Self::Err> { // Only operate on lowercase strings // (also creates an owned String from &str) let s = s.to_lowercase(); // Split into constituent tokens let tokens = lex::tokenize(&s).ok_or(ParseInstructionError::EmptyStr)?; // Ensure correct number of tokens match tokens.len().cmp(&4) { Ordering::Less => Err(ParseInstructionError::MissingOps), Ordering::Equal => Ok(()), Ordering::Greater => Err(ParseInstructionError::ExtraOps), }?; // Check instruction is correct (tokens[0] == "and") .then(|| ()) .ok_or(ParseInstructionError::BadInstruction)?; // Parse op1 let op1 = lex::parse_reg(&tokens[1])?; // Look for "," separator (tokens[2] == ",") .then(|| ()) .ok_or(ParseInstructionError::ExpectedSep)?; // Parse op2 let op2 = tokens[3].parse()?; // Ensure validity of ops (op1 < 0x10) .then(|| ()) .ok_or(ParseInstructionError::InvalidOp)?; match op2 { Op2::Reg(reg) if reg < 0x10 => Ok(()), Op2::Imm(imm) if imm < 0x80 => Ok(()), _ => Err(ParseInstructionError::InvalidOp), }?; // Create Self from parts Ok(Self { op1, op2 }) } } impl Instruction for And {} #[cfg(test)] mod tests { use super::*; #[test] fn sweep() { for mut word in 0x6000..=0x6fff { let instr = And::from(word); if let Op2::Reg(_) = instr.op2 { word &= 0xff8f; } let decoded: uarch = instr.into(); assert_eq!(decoded, word); } } }
28.107143
90
0.494917
69d5ad78999b99db1652e30eb8694647b8b5b7ce
4,972
use std::convert::TryInto; use std::fmt; use std::fmt::Display; use std::fmt::Formatter; use std::io; use std::mem::MaybeUninit; use std::os::raw::c_int; use std::os::unix::process::ExitStatusExt; use std::process; use std::process::Child; use std::thread; use std::time::Duration; use libc::id_t; use libc::CLD_EXITED; use libc::EINTR; use libc::ESRCH; use libc::P_PID; use libc::SIGKILL; use libc::WEXITED; use libc::WNOWAIT; use libc::WSTOPPED; #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[repr(C)] pub(super) struct ExitStatus { value: c_int, terminated: bool, } impl ExitStatus { pub(super) fn success(self) -> bool { !self.terminated && self.value == 0 } fn get_value(self, normal_exit: bool) -> Option<c_int> { Some(self.value).filter(|_| self.terminated != normal_exit) } pub(super) fn code(self) -> Option<c_int> { self.get_value(true) } pub(super) fn signal(self) -> Option<c_int> { self.get_value(false) } } impl Display for ExitStatus { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { if self.terminated { write!(formatter, "signal: {}", self.value) } else { write!(formatter, "exit code: {}", self.value) } } } impl From<process::ExitStatus> for ExitStatus { fn from(value: process::ExitStatus) -> Self { if let Some(exit_code) = value.code() { Self { value: exit_code, terminated: false, } } else if let Some(signal) = value.signal() { Self { value: signal, terminated: true, } } else { unreachable!() } } } pub(super) fn run_with_timeout<TReturn>( get_result_fn: impl 'static + FnOnce() -> TReturn + Send, time_limit: Duration, ) -> io::Result<Option<TReturn>> where TReturn: 'static + Send, { let (result_sender, result_receiver) = { #[cfg(feature = "crossbeam-channel")] { crossbeam_channel::bounded(0) } #[cfg(not(feature = "crossbeam-channel"))] { use std::sync::mpsc; mpsc::channel() } }; let _ = thread::Builder::new() .spawn(move || result_sender.send(get_result_fn()))?; Ok(result_receiver.recv_timeout(time_limit).ok()) } #[derive(Debug)] pub(super) struct Handle(id_t); impl Handle { fn check_syscall(result: c_int) -> io::Result<()> { if result >= 0 { Ok(()) } else { Err(io::Error::last_os_error()) } } pub(super) fn new(process: &Child) -> io::Result<Self> { Ok(Self::inherited(process)) } pub(super) fn inherited(process: &Child) -> Self { Self(process.id()) } pub(super) unsafe fn terminate(&self) -> io::Result<()> { let process_id = self.0.try_into().expect("process identifier is invalid"); let result = Self::check_syscall(libc::kill(process_id, SIGKILL)); if let Err(error) = &result { // This error is usually decoded to [ErrorKind::Other]: // https://github.com/rust-lang/rust/blob/49c68bd53f90e375bfb3cbba8c1c67a9e0adb9c0/src/libstd/sys/unix/mod.rs#L100-L123 if error.raw_os_error() == Some(ESRCH) { return Err(io::Error::new( io::ErrorKind::NotFound, "No such process", )); } } result } pub(super) fn wait_with_timeout( &self, time_limit: Duration, ) -> io::Result<Option<ExitStatus>> { // https://github.com/rust-lang/rust/blob/49c68bd53f90e375bfb3cbba8c1c67a9e0adb9c0/src/libstd/sys/unix/process/process_unix.rs#L432-L441 let process_id = self.0; run_with_timeout( move || loop { let mut process_info = MaybeUninit::uninit(); let result = Self::check_syscall(unsafe { libc::waitid( P_PID, process_id, process_info.as_mut_ptr(), WEXITED | WNOWAIT | WSTOPPED, ) }); match result { Ok(()) => { let process_info = unsafe { process_info.assume_init() }; break Ok(ExitStatus { value: unsafe { process_info.si_status() }, terminated: process_info.si_code != CLD_EXITED, }); } Err(error) => { if error.raw_os_error() != Some(EINTR) { break Err(error); } } } }, time_limit, )? .transpose() } }
27.932584
144
0.514883
fcde21e7196cdbe3cb782184d60f8381d3cf98bc
2,655
//! [Problem 101](https://projecteuler.net/problem=101) solver. #![warn( bad_style, unused, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results )] use num_bigint::BigInt; use num_rational::Ratio; use num_traits::{FromPrimitive, One, Zero}; use polynomial::Polynomial; fn u(n: BigInt) -> BigInt { let mut sum = BigInt::zero(); let mut prod = BigInt::one(); for _ in 0..11 { sum += &prod; prod = &prod * (-&n); } sum } // Lagrange Interpolating with Naville's algorithm fn op(ns: &[(BigInt, BigInt)]) -> Polynomial<BigInt> { let mut poly = Polynomial::new(vec![]); for (i, &(ref xi, ref yi)) in ns.iter().enumerate() { let mut term = Polynomial::new(vec![Ratio::from_integer(yi.clone())]); for (j, &(ref xj, ref _yj)) in ns.iter().enumerate() { if i == j { continue; } term = term * Polynomial::new(vec![ Ratio::new(-xj, xi - xj), Ratio::new(One::one(), xi - xj), ]); } poly = poly + term; } let data = poly.data().iter().map(Ratio::to_integer).collect(); Polynomial::new(data) } fn bop(ns: &[(BigInt, BigInt)]) -> BigInt { op(ns).eval(FromPrimitive::from_usize(ns.len() + 1).unwrap()) } fn u_to_vec(dim: u32, f: fn(BigInt) -> BigInt) -> Vec<(BigInt, BigInt)> { (0..(dim + 1)) .map(|i| { let n: BigInt = FromPrimitive::from_u32(i + 1).unwrap(); (n.clone(), f(n)) }) .collect() } fn solve() -> String { let un = u_to_vec(10, u); (0..10) .map(|i| bop(&un[..i + 1])) .fold(num_traits::zero::<BigInt>(), |acc, elt| acc + elt) .to_string() } common::problem!("37076114526", solve); #[cfg(test)] mod tests { use num_bigint::BigInt; use num_traits::ToPrimitive; #[test] fn op() { fn u(n: BigInt) -> BigInt { &n * &n * &n } let un = super::u_to_vec(3, u); assert_eq!("1", super::op(&un[..1]).pretty("n")); assert_eq!("-6+7*n", super::op(&un[..2]).pretty("n")); assert_eq!("6-11*n+6*n^2", super::op(&un[..3]).pretty("n")); assert_eq!("n^3", super::op(&un).pretty("n")); } #[test] fn bop() { fn u(n: BigInt) -> BigInt { &n * &n * &n } let un = super::u_to_vec(3, u); assert_eq!(1, super::bop(&un[..1]).to_i32().unwrap()); assert_eq!(15, super::bop(&un[..2]).to_i32().unwrap()); assert_eq!(58, super::bop(&un[..3]).to_i32().unwrap()); } }
25.776699
78
0.503578
6a89ed0d5ca05d45cfb09fbbf209e64879781140
30,454
use serde_json; use serde_json::Value; use object_cache::ObjectCache; use api::VcxStateType; use issuer_credential::{CredentialOffer, CredentialMessage, PaymentInfo}; use credential_request::CredentialRequest; use messages; use messages::{GeneralMessage, RemoteMessageType, ObjectWithVersion}; use messages::payload::{Payloads, PayloadKinds, Thread}; use messages::get_message; use messages::get_message::MessagePayload; use connection; use settings; use utils::libindy::anoncreds::{libindy_prover_create_credential_req, libindy_prover_store_credential}; use utils::libindy::anoncreds; use utils::libindy::payments::{pay_a_payee, PaymentTxn}; use utils::error; use utils::constants::DEFAULT_SERIALIZE_VERSION; use error::prelude::*; lazy_static! { static ref HANDLE_MAP: ObjectCache<Credential> = Default::default(); } impl Default for Credential { fn default() -> Credential { Credential { source_id: String::new(), state: VcxStateType::VcxStateNone, credential_name: None, credential_request: None, agent_did: None, agent_vk: None, my_did: None, my_vk: None, their_did: None, their_vk: None, credential_offer: None, msg_uid: None, cred_id: None, credential: None, payment_info: None, payment_txn: None, thread: Some(Thread::new()) } } } #[derive(Clone, Serialize, Deserialize, Debug, PartialEq)] pub struct Credential { source_id: String, state: VcxStateType, credential_name: Option<String>, credential_request: Option<CredentialRequest>, credential_offer: Option<CredentialOffer>, msg_uid: Option<String>, // the following 6 are pulled from the connection object agent_did: Option<String>, agent_vk: Option<String>, my_did: Option<String>, my_vk: Option<String>, their_did: Option<String>, their_vk: Option<String>, credential: Option<String>, cred_id: Option<String>, payment_info: Option<PaymentInfo>, payment_txn: Option<PaymentTxn>, thread: Option<Thread> } impl Credential { pub fn build_request(&self, my_did: &str, their_did: &str) -> VcxResult<CredentialRequest> { trace!("Credential::build_request >>> my_did: {}, their_did: {}", my_did, their_did); if self.state != VcxStateType::VcxStateRequestReceived { return Err(VcxError::from_msg(VcxErrorKind::NotReady, format!("credential {} has invalid state {} for sending credential request", self.source_id, self.state as u32))); } let prover_did = self.my_did.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidDid))?; let credential_offer = self.credential_offer.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredential))?; let (cred_def_id, cred_def_json) = anoncreds::get_cred_def_json(&credential_offer.cred_def_id)?; /* debug!("storing credential offer: {}", secret!(&credential_offer)); libindy_prover_store_credential_offer(wallet_h, &credential_offer).map_err(|ec| CredentialError::CommonError(ec))?; */ let (req, req_meta) = libindy_prover_create_credential_req(&prover_did, &credential_offer.libindy_offer, &cred_def_json) .map_err(|err| err.extend("Cannot create credential request"))?; Ok(CredentialRequest { libindy_cred_req: req, libindy_cred_req_meta: req_meta, cred_def_id, tid: String::new(), to_did: String::from(their_did), from_did: String::from(my_did), mid: String::new(), version: String::from("0.1"), msg_ref_id: None, }) } fn send_request(&mut self, connection_handle: u32) -> VcxResult<u32> { trace!("Credential::send_request >>> connection_handle: {}", connection_handle); debug!("sending credential request {} via connection: {}", self.source_id, connection::get_source_id(connection_handle).unwrap_or_default()); self.my_did = Some(connection::get_pw_did(connection_handle)?); self.my_vk = Some(connection::get_pw_verkey(connection_handle)?); self.agent_did = Some(connection::get_agent_did(connection_handle)?); self.agent_vk = Some(connection::get_agent_verkey(connection_handle)?); self.their_did = Some(connection::get_their_pw_did(connection_handle)?); self.their_vk = Some(connection::get_their_pw_verkey(connection_handle)?); debug!("verifier_did: {:?} -- verifier_vk: {:?} -- agent_did: {:?} -- agent_vk: {:?} -- remote_vk: {:?}", self.my_did, self.agent_did, self.agent_vk, self.their_vk, self.my_vk); let local_their_did = self.their_did.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let local_their_vk = self.their_vk.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let local_agent_did = self.agent_did.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let local_agent_vk = self.agent_vk.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let local_my_did = self.my_did.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let local_my_vk = self.my_vk.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; // if test mode, just get this. let cred_req: CredentialRequest = self.build_request(local_my_did, local_their_did)?; let cred_req_json = serde_json::to_string(&cred_req) .map_err(|err| VcxError::from_msg(VcxErrorKind::InvalidCredential, format!("Cannot serialize CredentialRequest: {}", err)))?; self.credential_request = Some(cred_req); let offer_msg_id = self.credential_offer.as_ref().and_then(|offer| offer.msg_ref_id.clone()) .ok_or(VcxError::from(VcxErrorKind::CreateCredentialRequest))?; if self.payment_info.is_some() { let (payment_txn, _) = self.submit_payment()?; self.payment_txn = Some(payment_txn); } let response = messages::send_message() .to(local_my_did)? .to_vk(local_my_vk)? .msg_type(&RemoteMessageType::CredReq)? .agent_did(local_agent_did)? .agent_vk(local_agent_vk)? .edge_agent_payload(&local_my_vk, &local_their_vk, &cred_req_json, PayloadKinds::CredReq, self.thread.clone())? .ref_msg_id(Some(offer_msg_id.to_string()))? .send_secure() .map_err(|err| err.extend(format!("{} could not send proof", self.source_id)))?; self.msg_uid = Some(response.get_msg_uid()?); self.state = VcxStateType::VcxStateOfferSent; return Ok(error::SUCCESS.code_num); } fn _check_msg(&mut self) -> VcxResult<()> { let agent_did = self.agent_did.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let agent_vk = self.agent_vk.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let my_did = self.my_did.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let my_vk = self.my_vk.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let msg_uid = self.msg_uid.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidCredentialHandle))?; let (_, payload) = get_message::get_ref_msg(msg_uid, my_did, my_vk, agent_did, agent_vk)?; let (credential, thread) = Payloads::decrypt(&my_vk, &payload)?; if let Some(_) = thread { let their_did = self.their_did.as_ref().map(String::as_str).unwrap_or(""); self.thread.as_mut().map(|thread| thread.increment_receiver(&their_did)); } let credential_msg: CredentialMessage = serde_json::from_str(&credential) .map_err(|err| VcxError::from_msg(VcxErrorKind::InvalidCredential, format!("Cannot deserialize CredentialMessage: {}", err)))?; let cred_req: &CredentialRequest = self.credential_request.as_ref() .ok_or(VcxError::from_msg(VcxErrorKind::InvalidCredential, "Cannot find CredentialRequest"))?; let (_, cred_def_json) = anoncreds::get_cred_def_json(&cred_req.cred_def_id) .map_err(|err| err.extend("Cannot get credential definition"))?; self.credential = Some(credential); self.cred_id = Some(libindy_prover_store_credential(None, &cred_req.libindy_cred_req_meta, &credential_msg.libindy_cred, &cred_def_json, match credential_msg.rev_reg_def_json.len() { 0 => None, _ => Some(&credential_msg.rev_reg_def_json), })?); self.state = VcxStateType::VcxStateAccepted; Ok(()) } fn update_state(&mut self) { trace!("Credential::update_state >>>"); match self.state { VcxStateType::VcxStateOfferSent => { //Check for messages let _ = self._check_msg(); } VcxStateType::VcxStateAccepted => { //Check for revocation } _ => { // NOOP there is nothing the check for a changed state } } } fn get_state(&self) -> u32 { trace!("Credential::get_state >>>"); self.state as u32 } fn get_credential(&self) -> VcxResult<String> { trace!("Credential::get_credential >>>"); if self.state != VcxStateType::VcxStateAccepted { return Err(VcxError::from(VcxErrorKind::InvalidState)); } let credential = self.credential.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidState))?; Ok(self.to_cred_string(&credential)) } fn get_credential_offer(&self) -> VcxResult<String> { trace!("Credential::get_credential_offer >>>"); if self.state != VcxStateType::VcxStateRequestReceived { return Err(VcxError::from(VcxErrorKind::InvalidState)); } let credential_offer = self.credential_offer.as_ref().ok_or(VcxError::from(VcxErrorKind::InvalidState))?; let credential_offer_json = serde_json::to_value(credential_offer) .map_err(|err| VcxError::from_msg(VcxErrorKind::InvalidCredential, format!("Cannot deserialize CredentilOffer: {}", err)))?; Ok(self.to_cred_offer_string(credential_offer_json)) } fn get_credential_id(&self) -> String { self.cred_id.as_ref().map(String::as_str).unwrap_or("").to_string() } fn set_payment_info(&self, json: &mut serde_json::Map<String, Value>) { if let Some(ref payment_info) = self.payment_info { json.insert("price".to_string(), Value::String(payment_info.price.to_string())); json.insert("payment_address".to_string(), Value::String(payment_info.payment_addr.to_string())); }; } fn to_cred_string(&self, cred: &str) -> String { let mut json = serde_json::Map::new(); json.insert("credential_id".to_string(), Value::String(self.get_credential_id())); json.insert("credential".to_string(), Value::String(cred.to_string())); self.set_payment_info(&mut json); serde_json::Value::from(json).to_string() } fn to_cred_offer_string(&self, cred_offer: Value) -> String { let mut json = serde_json::Map::new(); json.insert("credential_offer".to_string(), cred_offer); self.set_payment_info(&mut json); serde_json::Value::from(json).to_string() } fn set_source_id(&mut self, id: &str) { self.source_id = id.to_string(); } fn get_source_id(&self) -> &String { &self.source_id } fn get_payment_txn(&self) -> VcxResult<PaymentTxn> { trace!("Credential::get_payment_txn >>>"); match (&self.payment_txn, &self.payment_info) { (Some(ref payment_txn), Some(_)) => Ok(payment_txn.clone()), _ => Err(VcxError::from(VcxErrorKind::NoPaymentInformation)) } } fn is_payment_required(&self) -> bool { self.payment_info.is_some() } fn submit_payment(&self) -> VcxResult<(PaymentTxn, String)> { debug!("{} submitting payment for premium credential", self.source_id); match &self.payment_info { &Some(ref pi) => { let address = &pi.get_address(); let price = pi.get_price(); let (payment_txn, receipt) = pay_a_payee(price, address)?; Ok((payment_txn, receipt)) } &None => Err(VcxError::from(VcxErrorKind::NoPaymentInformation)), } } fn get_payment_info(&self) -> VcxResult<Option<PaymentInfo>> { trace!("Credential::get_payment_info >>>"); Ok(self.payment_info.clone()) } fn to_string(&self) -> VcxResult<String> { ObjectWithVersion::new(DEFAULT_SERIALIZE_VERSION, self.to_owned()) .serialize() .map_err(|err| err.extend("Cannot serialize Credential")) } fn from_str(data: &str) -> VcxResult<Credential> { ObjectWithVersion::deserialize(data) .map(|obj: ObjectWithVersion<Credential>| obj.data) .map_err(|err| err.extend("Cannot deserialize Credential")) } } //******************************************** // HANDLE FUNCTIONS //******************************************** fn handle_err(err: VcxError) -> VcxError { if err.kind() == VcxErrorKind::InvalidHandle { VcxError::from(VcxErrorKind::InvalidCredentialHandle) } else { err } } pub fn credential_create_with_offer(source_id: &str, offer: &str) -> VcxResult<u32> { trace!("credential_create_with_offer >>> source_id: {}, offer: {}", source_id, secret!(&offer)); let mut new_credential = _credential_create(source_id); let (offer, payment_info) = parse_json_offer(offer)?; new_credential.credential_offer = Some(offer); new_credential.payment_info = payment_info; new_credential.state = VcxStateType::VcxStateRequestReceived; debug!("inserting credential {} into handle map", source_id); HANDLE_MAP.add(new_credential) } fn _credential_create(source_id: &str) -> Credential { let mut new_credential: Credential = Default::default(); new_credential.state = VcxStateType::VcxStateInitialized; new_credential.set_source_id(source_id); new_credential } pub fn update_state(handle: u32) -> VcxResult<u32> { HANDLE_MAP.get_mut(handle, |obj| { debug!("updating state for credential {} with msg_id {:?}", obj.source_id, obj.msg_uid); obj.update_state(); Ok(error::SUCCESS.code_num) }) } pub fn get_credential(handle: u32) -> VcxResult<String> { HANDLE_MAP.get(handle, |obj| { debug!("getting credential {}", obj.get_source_id()); obj.get_credential() }) } pub fn get_payment_txn(handle: u32) -> VcxResult<PaymentTxn> { HANDLE_MAP.get(handle, |obj| { obj.get_payment_txn() }).or(Err(VcxError::from(VcxErrorKind::NoPaymentInformation))) } pub fn get_credential_offer(handle: u32) -> VcxResult<String> { HANDLE_MAP.get(handle, |obj| { debug!("getting credential offer {}", obj.source_id); obj.get_credential_offer() }) } pub fn get_credential_id(handle: u32) -> VcxResult<String> { HANDLE_MAP.get(handle, |obj| { Ok(obj.get_credential_id()) }) } pub fn get_state(handle: u32) -> VcxResult<u32> { HANDLE_MAP.get(handle, |obj| { Ok(obj.get_state()) }).map_err(handle_err) } pub fn send_credential_request(handle: u32, connection_handle: u32) -> VcxResult<u32> { HANDLE_MAP.get_mut(handle, |obj| { obj.send_request(connection_handle) }).map_err(handle_err) } pub fn get_credential_offer_msg(connection_handle: u32, msg_id: &str) -> VcxResult<String> { trace!("get_credential_offer_msg >>> connection_handle: {}, msg_id: {}", connection_handle, msg_id); let my_did = connection::get_pw_did(connection_handle)?; let my_vk = connection::get_pw_verkey(connection_handle)?; let agent_did = connection::get_agent_did(connection_handle)?; let agent_vk = connection::get_agent_verkey(connection_handle)?; if settings::test_agency_mode_enabled() { ::utils::httpclient::set_next_u8_response(::utils::constants::NEW_CREDENTIAL_OFFER_RESPONSE.to_vec()); } let message = get_message::get_connection_messages(&my_did, &my_vk, &agent_did, &agent_vk, Some(vec![msg_id.to_string()])) .map_err(|err| err.extend("Cannot get messages"))?; if message[0].msg_type != RemoteMessageType::CredOffer { return Err(VcxError::from_msg(VcxErrorKind::InvalidMessages, "Invalid message type")); } let payload = message.get(0).and_then(|msg| msg.payload.as_ref()) .ok_or(VcxError::from_msg(VcxErrorKind::InvalidMessagePack, "Payload not found"))?; let payload = _set_cred_offer_ref_message(&payload, &my_vk, &message[0].uid)?; serde_json::to_string_pretty(&payload) .map_err(|err| VcxError::from_msg(VcxErrorKind::InvalidMessages, format!("Cannot serialize credential offer: {}", err))) } pub fn get_credential_offer_messages(connection_handle: u32) -> VcxResult<String> { trace!("Credential::get_credential_offer_messages >>> connection_handle: {}", connection_handle); debug!("checking agent for credential offers from connection {}", connection::get_source_id(connection_handle).unwrap_or_default()); let my_did = connection::get_pw_did(connection_handle)?; let my_vk = connection::get_pw_verkey(connection_handle)?; let agent_did = connection::get_agent_did(connection_handle)?; let agent_vk = connection::get_agent_verkey(connection_handle)?; if settings::test_agency_mode_enabled() { ::utils::httpclient::set_next_u8_response(::utils::constants::NEW_CREDENTIAL_OFFER_RESPONSE.to_vec()); } let payload = get_message::get_connection_messages(&my_did, &my_vk, &agent_did, &agent_vk, None) .map_err(|err| err.extend("Cannot get messages"))?; let mut messages = Vec::new(); for msg in payload { if msg.msg_type == RemoteMessageType::CredOffer { let payload = msg.payload .ok_or(VcxError::from(VcxErrorKind::InvalidMessages))?; let payload = _set_cred_offer_ref_message(&payload, &my_vk, &msg.uid)?; messages.push(payload); } } serde_json::to_string_pretty(&messages) .or(Err(VcxError::from(VcxErrorKind::InvalidMessages))) } fn _set_cred_offer_ref_message(payload: &MessagePayload, my_vk: &str, msg_id: &str) -> VcxResult<Vec<Value>> { let (offer, thread) = Payloads::decrypt(my_vk, payload)?; let (mut offer, payment_info) = parse_json_offer(&offer)?; offer.msg_ref_id = Some(msg_id.to_owned()); if let Some(tr) = thread { offer.thread_id = tr.thid.clone(); } let mut payload = Vec::new(); payload.push(json!(offer)); if let Some(p) = payment_info { payload.push(json!(p)); } Ok(payload) } pub fn parse_json_offer(offer: &str) -> VcxResult<(CredentialOffer, Option<PaymentInfo>)> { let paid_offer: Value = serde_json::from_str(offer) .map_err(|err| VcxError::from_msg(VcxErrorKind::InvalidJson, format!("Cannot deserialize offer: {}", err)))?; let mut payment: Option<PaymentInfo> = None; let mut offer: Option<CredentialOffer> = None; if let Some(i) = paid_offer.as_array() { for entry in i.iter() { if entry.get("libindy_offer").is_some() { offer = Some(serde_json::from_value(entry.clone()) .map_err(|err| VcxError::from_msg(VcxErrorKind::InvalidJson, format!("Cannot deserialize offer: {}", err)))?); } if entry.get("payment_addr").is_some() { payment = Some(serde_json::from_value(entry.clone()) .map_err(|err| VcxError::from_msg(VcxErrorKind::InvalidJson, format!("Cannot deserialize payment address: {}", err)))?); } } } Ok((offer.ok_or(VcxError::from(VcxErrorKind::InvalidJson))?, payment)) } pub fn release(handle: u32) -> VcxResult<()> { HANDLE_MAP.release(handle).map_err(handle_err) } pub fn release_all() { HANDLE_MAP.drain().ok(); } pub fn is_valid_handle(handle: u32) -> bool { HANDLE_MAP.has_handle(handle) } pub fn to_string(handle: u32) -> VcxResult<String> { HANDLE_MAP.get(handle, |obj| { Credential::to_string(&obj) }) } pub fn get_source_id(handle: u32) -> VcxResult<String> { HANDLE_MAP.get(handle, |obj| { Ok(obj.get_source_id().clone()) }).map_err(handle_err) } pub fn from_string(credential_data: &str) -> VcxResult<u32> { let credential: Credential = Credential::from_str(credential_data)?; let new_handle = HANDLE_MAP.add(credential)?; debug!("inserting handle {} into proof table", new_handle); Ok(new_handle) } pub fn is_payment_required(handle: u32) -> VcxResult<bool> { HANDLE_MAP.get(handle, |obj| { Ok(obj.is_payment_required()) }).map_err(handle_err) } pub fn submit_payment(handle: u32) -> VcxResult<(PaymentTxn, String)> { HANDLE_MAP.get_mut(handle, |obj| { obj.submit_payment() }).map_err(handle_err) } pub fn get_payment_information(handle: u32) -> VcxResult<Option<PaymentInfo>> { HANDLE_MAP.get(handle, |obj| { obj.get_payment_info() }).map_err(handle_err) } #[cfg(test)] pub mod tests { extern crate serde_json; use super::*; use utils::httpclient; use api::VcxStateType; use serde_json::Value; pub const BAD_CREDENTIAL_OFFER: &str = r#"{"version": "0.1","to_did": "LtMgSjtFcyPwenK9SHCyb8","from_did": "LtMgSjtFcyPwenK9SHCyb8","claim": {"account_num": ["8BEaoLf8TBmK4BUyX8WWnA"],"name_on_account": ["Alice"]},"schema_seq_no": 48,"issuer_did": "Pd4fnFtRBcMKRVC2go5w3j","claim_name": "Account Certificate","claim_id": "3675417066","msg_ref_id": "ymy5nth"}"#; use utils::constants::{DEFAULT_SERIALIZED_CREDENTIAL, DEFAULT_SERIALIZED_CREDENTIAL_PAYMENT_REQUIRED}; use utils::libindy::payments::build_test_address; pub fn create_credential(offer: &str) -> Credential { let mut credential = _credential_create("source_id"); let (offer, payment_info) = ::credential::parse_json_offer(offer).unwrap(); credential.credential_offer = Some(offer); credential.payment_info = payment_info; credential.state = VcxStateType::VcxStateRequestReceived; credential.my_did = Some(settings::get_config_value(settings::CONFIG_INSTITUTION_DID).unwrap()); credential } fn create_credential_with_price(price: u64) -> Credential { let mut cred: Credential = Credential::from_str(DEFAULT_SERIALIZED_CREDENTIAL).unwrap(); cred.payment_info = Some(PaymentInfo { payment_required: "one-time".to_string(), payment_addr: build_test_address("OsdjtGKavZDBuG2xFw2QunVwwGs5IB3j"), price, }); cred } #[test] fn test_credential_defaults() { let credential = Credential::default(); assert_eq!(credential.build_request("test1", "test2").unwrap_err().kind(), VcxErrorKind::NotReady); } #[test] fn test_credential_create_with_offer() { let handle = credential_create_with_offer("test_credential_create_with_offer", ::utils::constants::CREDENTIAL_OFFER_JSON).unwrap(); assert!(handle > 0); } #[test] fn test_credential_create_with_bad_offer() { match credential_create_with_offer("test_credential_create_with_bad_offer", BAD_CREDENTIAL_OFFER) { Ok(_) => panic!("should have failed with bad credential offer"), Err(x) => assert_eq!(x.kind(), VcxErrorKind::InvalidJson) }; } #[test] fn test_credential_serialize_deserialize() { let handle = credential_create_with_offer("test_credential_serialize_deserialize", ::utils::constants::CREDENTIAL_OFFER_JSON).unwrap(); let credential_string = to_string(handle).unwrap(); release(handle).unwrap(); assert_eq!(release(handle).unwrap_err().kind(), VcxErrorKind::InvalidCredentialHandle); let handle = from_string(&credential_string).unwrap(); let cred1: Credential = Credential::from_str(&credential_string).unwrap(); assert_eq!(cred1.get_state(), 3); let cred2: Credential = Credential::from_str(&to_string(handle).unwrap()).unwrap(); assert!(!cred1.is_payment_required()); assert_eq!(cred1, cred2); let handle = from_string(DEFAULT_SERIALIZED_CREDENTIAL_PAYMENT_REQUIRED).unwrap(); let payment_required_credential: Credential = Credential::from_str(&to_string(handle).unwrap()).unwrap(); assert!(payment_required_credential.is_payment_required()) } #[test] fn full_credential_test() { init!("true"); let connection_h = connection::tests::build_test_connection(); let offers = get_credential_offer_messages(connection_h).unwrap(); let offers: Value = serde_json::from_str(&offers).unwrap(); let offers = serde_json::to_string(&offers[0]).unwrap(); let c_h = credential_create_with_offer("TEST_CREDENTIAL", &offers).unwrap(); assert_eq!(VcxStateType::VcxStateRequestReceived as u32, get_state(c_h).unwrap()); send_credential_request(c_h, connection_h).unwrap(); assert_eq!(VcxStateType::VcxStateOfferSent as u32, get_state(c_h).unwrap()); assert_eq!(get_credential_id(c_h).unwrap(), ""); httpclient::set_next_u8_response(::utils::constants::CREDENTIAL_RESPONSE.to_vec()); httpclient::set_next_u8_response(::utils::constants::UPDATE_CREDENTIAL_RESPONSE.to_vec()); update_state(c_h).unwrap(); assert_eq!(get_state(c_h).unwrap(), VcxStateType::VcxStateAccepted as u32); assert_eq!(get_credential_id(c_h).unwrap(), "cred_id"); // this is set in test mode assert!(get_credential(c_h).unwrap().len() > 100); let serialized = to_string(c_h).unwrap(); } #[test] fn test_get_credential_offer() { init!("true"); let connection_h = connection::tests::build_test_connection(); let offer = get_credential_offer_messages(connection_h).unwrap(); let o: serde_json::Value = serde_json::from_str(&offer).unwrap(); let credential_offer: CredentialOffer = serde_json::from_str(&o[0][0].to_string()).unwrap(); assert!(offer.len() > 50); } #[test] fn test_pay_for_credential_with_sufficient_funds() { init!("true"); let cred = create_credential_with_price(1); assert!(cred.is_payment_required()); let payment = serde_json::to_string(&cred.submit_payment().unwrap().0).unwrap(); assert!(payment.len() > 50); } #[test] fn test_pay_for_non_premium_credential() { init!("true"); let cred: Credential = Credential::from_str(DEFAULT_SERIALIZED_CREDENTIAL).unwrap(); assert!(cred.payment_info.is_none()); assert_eq!(cred.submit_payment().unwrap_err().kind(), VcxErrorKind::NoPaymentInformation); } #[test] fn test_pay_for_credential_with_insufficient_funds() { init!("true"); let cred = create_credential_with_price(10000000000); assert!(cred.submit_payment().is_err()); } #[test] fn test_pay_for_credential_with_handle() { init!("true"); let handle = from_string(DEFAULT_SERIALIZED_CREDENTIAL_PAYMENT_REQUIRED).unwrap(); submit_payment(handle).unwrap(); get_payment_information(handle).unwrap(); let handle2 = from_string(DEFAULT_SERIALIZED_CREDENTIAL).unwrap(); assert!(!is_payment_required(handle2).unwrap()); let invalid_handle = 12345; assert_eq!(is_payment_required(invalid_handle).unwrap_err().kind(), VcxErrorKind::InvalidCredentialHandle); } #[test] fn test_get_credential() { init!("true"); let handle = from_string(::utils::constants::DEFAULT_SERIALIZED_CREDENTIAL).unwrap(); let offer_string = get_credential_offer(handle).unwrap(); let handle = from_string(::utils::constants::FULL_CREDENTIAL_SERIALIZED).unwrap(); let cred_string = get_credential(handle).unwrap(); } #[test] fn test_submit_payment_through_credential_request() { init!("true"); use utils::libindy::payments::get_wallet_token_info; let balance = get_wallet_token_info().unwrap().get_balance(); assert!(balance > 0); let mut cred = create_credential_with_price(5); assert!(cred.send_request(1234).is_err()); let new_balance = get_wallet_token_info().unwrap().get_balance(); assert_eq!(new_balance, balance); } #[test] fn test_get_cred_offer_returns_json_string_with_cred_offer_json_nested() { init!("true"); let handle = from_string(::utils::constants::DEFAULT_SERIALIZED_CREDENTIAL).unwrap(); let offer_string = get_credential_offer(handle).unwrap(); let offer_value: serde_json::Value = serde_json::from_str(&offer_string).unwrap(); let offer_struct: CredentialOffer = serde_json::from_value(offer_value["credential_offer"].clone()).unwrap(); } }
41.098516
365
0.640638
d9fb207f0d7bf5bd7b9d1405fb33ce516fd13d60
7,080
//! First Person View Camera GameObject that can capture a scene //! //! TODO //! use { super::{ Camera, super::{ GameObject, light::Light, } }, crate::{ registration::{ relation::{ Child, Parent, ParentWrapper }, named::Named, id::ID }, scripting::{ Script, executor::Spawner, Scriptable, globals::{ EngineGlobals, Global } }, components::{ triangle_mesh::TriangleMesh, }, graphics::{ Drawable, draw_pass_manager::DrawPassManager, lighting_pass_manager::LightingPassManager, }, event::UserEvent, shaders::vs_draw, }, feo_math::{ linear_algebra::{ vector3::Vector3, matrix4::Matrix4 }, utils::space::Space, rotation::quaternion::Quaternion }, std::{ any::Any, sync::{ Arc, RwLock }, mem }, winit::event::Event, }; #[derive(Scriptable, GameObject, Parent, Child, Named, Drawable)] #[camera] pub struct FpvCamera{ id: ID, name: String, parent: ParentWrapper, main: bool, offset: Option<Vector3<f32>>, // offset should be defined by subspace itself fov: i32, near_plane: f32, far_plane: f32, aspect_ratio: f32, pub subspace: Space, script: Option<Box<Script<Self>>>, children: Vec<Arc<RwLock<dyn GameObject>>>, } impl std::fmt::Debug for FpvCamera { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("FpvCamera") .field("id", &self.id) .field("name", &self.name) .field("parent", &self.parent) .field("main", &self.main) .field("offset", &self.offset) .field("fov", &self.fov) .field("near_plane", &self.near_plane) .field("far_plane", &self.far_plane) .field("aspect_ratio", &self.aspect_ratio) .field("subspace", &self.subspace) .field("script", &self.script) .field("children", &self.children).finish() } } impl Clone for FpvCamera { fn clone(&self) -> Self { let id = self.id.get_system().take(); FpvCamera{ id, name: self.name.clone(), parent: self.parent.clone(), subspace: self.subspace, script: self.script.clone(), children: self.children.clone().into_iter().map(|_child| { // Dangerous todo!(); }).collect::<Vec<Arc<RwLock<dyn GameObject>>>>(), main: self.main, offset: self.offset, fov: self.fov, near_plane: self.near_plane, far_plane: self.far_plane, aspect_ratio: self.aspect_ratio, } } } impl PartialEq for FpvCamera{ fn eq(&self, other: &Self) -> bool { self.get_id() == other.get_id() } } impl FpvCamera { #[allow(clippy::too_many_arguments)] pub fn new( name: Option<&str>, main: bool, parent: Option<Arc<RwLock<dyn GameObject>>>, // TODO: automatic not great use parent wrapper position: Option<Vector3<f32>>, rotation: Option<Quaternion<f32>>, scale_factor: Option<Vector3<f32>>, offset: Option<Vector3<f32>>, fov: i32, near_plane: f32, far_plane: f32, aspect_ratio: f32, script: Option<Box<Script<Self>>>, engine_globals: EngineGlobals//&VulkanoEngine ) -> Result<Arc<RwLock<Self>>, &'static str> { // TODO: pass surface let id = engine_globals.id_system.take(); let subspace = Space::new(position, rotation, scale_factor); Ok(Arc::new(RwLock::new( FpvCamera { name: match name { Some(name) => name.to_string(), None => String::from("fpv_camera_") + id.to_string().as_str() }, id, parent: match parent { Some(game_object) => { ParentWrapper::GameObject(game_object) }, None => { ParentWrapper::Scene(engine_globals.scene) } }, main, offset, fov, near_plane, far_plane, aspect_ratio, subspace, script, children: Vec::new() }))) } } impl Camera for FpvCamera { fn as_any(&self) -> &dyn Any { self } fn as_gameobject(&self) -> &dyn GameObject { self } fn cast_gameobject_arc_rwlock(&self, this: Arc<RwLock<dyn Camera>>) -> Arc<RwLock<dyn GameObject>> { let this= Arc::into_raw(this).cast::<RwLock<Self>>(); let this = unsafe { Arc::from_raw(this) }; this as Arc<RwLock<dyn GameObject>> } fn is_main(&self) -> bool { self.main } fn get_z_step (&self, z_buffer_size: usize) -> f32 { ((self.far_plane / self.near_plane) * 0.5).powi(z_buffer_size as i32) } fn build_projection(&self) -> Matrix4<f32> { let half_h = self.near_plane * (self.fov as f32 * 0.5).tan(); let half_w = half_h * self.aspect_ratio; Matrix4::new( [ self.near_plane / half_w, 0.0, 0.0, 0.0], [ 0.0, self.near_plane / -half_h, /* <- flipped y to account for vulkano axes */ 0.0, 0.0], [ 0.0, 0.0, -(self.near_plane + self.far_plane) / (self.far_plane - self.near_plane), (-2.0 * self.far_plane * self.near_plane) / (self.far_plane - self.near_plane)], [ 0.0, 0.0, -1.0, 0.0] ) } fn build_viewspace(&self) -> Matrix4<f32> { self.get_inversed_subspace().build() } fn create_uniforms(&self) -> vs_draw::ty::Camera { vs_draw::ty::Camera { to_view: self.build_viewspace().transpose().into(), view_to_screen: self.build_projection().transpose().into() } } }
31.052632
222
0.454096
fedca3c7bf192a14070b54f8825bd05afdc0d555
88
pub mod bubble; pub mod canvas; pub mod edit; pub mod show; pub mod svg; pub mod utils;
12.571429
15
0.727273
edec8e187fd3715ff4527de1ce7668429d080e08
5,875
//! The validate_repo module owns the 'validate-repo' subcommand and provides methods for validating //! a given TUF repository by attempting to load the repository and download its targets. use crate::repo::{error as repo_error, repo_urls}; use crate::Args; use log::{info, trace}; use pubsys_config::InfraConfig; use snafu::{OptionExt, ResultExt}; use std::cmp::min; use std::fs::File; use std::io; use std::path::PathBuf; use std::sync::mpsc; use structopt::{clap, StructOpt}; use tough::{Repository, RepositoryLoader}; use url::Url; /// Validates a set of TUF repositories #[derive(Debug, StructOpt)] #[structopt(setting = clap::AppSettings::DeriveDisplayOrder)] pub(crate) struct ValidateRepoArgs { #[structopt(long)] /// Use this named repo infrastructure from Infra.toml repo: String, #[structopt(long)] /// The architecture of the repo being validated arch: String, #[structopt(long)] /// The variant of the repo being validated variant: String, #[structopt(long, parse(from_os_str))] /// Path to root.json for this repo root_role_path: PathBuf, #[structopt(long)] /// Specifies whether to validate all listed targets by attempting to download them validate_targets: bool, } /// If we are on a machine with a large number of cores, then we limit the number of simultaneous /// downloads to this arbitrarily chosen maximum. const MAX_DOWNLOAD_THREADS: usize = 16; /// Retrieves listed targets and attempts to download them for validation purposes. We use a Rayon /// thread pool instead of tokio for async execution because `reqwest::blocking` creates a tokio /// runtime (and multiple tokio runtimes are not supported). fn retrieve_targets(repo: &Repository) -> Result<(), Error> { let targets = &repo.targets().signed.targets; let thread_pool = rayon::ThreadPoolBuilder::new() .num_threads(min(num_cpus::get(), MAX_DOWNLOAD_THREADS)) .build() .context(error::ThreadPoolSnafu)?; // create the channels through which our download results will be passed let (tx, rx) = mpsc::channel(); for target in targets.keys().cloned() { let tx = tx.clone(); let mut reader = repo .read_target(&target) .with_context(|_| repo_error::ReadTargetSnafu { target: target.raw(), })? .with_context(|| error::TargetMissingSnafu { target: target.raw(), })?; info!("Downloading target: {}", target.raw()); thread_pool.spawn(move || { tx.send({ // tough's `Read` implementation validates the target as it's being downloaded io::copy(&mut reader, &mut io::sink()).context(error::TargetDownloadSnafu { target: target.raw(), }) }) // inability to send on this channel is unrecoverable .unwrap(); }); } // close all senders drop(tx); // block and await all downloads let results: Vec<Result<u64, error::Error>> = rx.into_iter().collect(); // check all results and return the first error we see for result in results { result?; } // no errors were found, the targets are validated Ok(()) } fn validate_repo( root_role_path: &PathBuf, metadata_url: Url, targets_url: &Url, validate_targets: bool, ) -> Result<(), Error> { // Load the repository let repo = RepositoryLoader::new( File::open(root_role_path).context(repo_error::FileSnafu { path: root_role_path, })?, metadata_url.clone(), targets_url.clone(), ) .load() .context(repo_error::RepoLoadSnafu { metadata_base_url: metadata_url.clone(), })?; info!("Loaded TUF repo: {}", metadata_url); if validate_targets { // Try retrieving listed targets retrieve_targets(&repo)?; } Ok(()) } /// Common entrypoint from main() pub(crate) fn run(args: &Args, validate_repo_args: &ValidateRepoArgs) -> Result<(), Error> { // If a lock file exists, use that, otherwise use Infra.toml let infra_config = InfraConfig::from_path_or_lock(&args.infra_config_path, false) .context(repo_error::ConfigSnafu)?; trace!("Parsed infra config: {:?}", infra_config); let repo_config = infra_config .repo .as_ref() .context(repo_error::MissingConfigSnafu { missing: "repo section", })? .get(&validate_repo_args.repo) .context(repo_error::MissingConfigSnafu { missing: format!("definition for repo {}", &validate_repo_args.repo), })?; let repo_urls = repo_urls( &repo_config, &validate_repo_args.variant, &validate_repo_args.arch, )? .context(repo_error::MissingRepoUrlsSnafu { repo: &validate_repo_args.repo, })?; validate_repo( &validate_repo_args.root_role_path, repo_urls.0, repo_urls.1, validate_repo_args.validate_targets, ) } mod error { use snafu::Snafu; use std::io; #[derive(Debug, Snafu)] #[snafu(visibility(pub(super)))] pub(crate) enum Error { #[snafu(display("Invalid percentage specified: {} is greater than 100", percentage))] InvalidPercentage { percentage: u8 }, #[snafu(context(false), display("{}", source))] Repo { source: crate::repo::Error }, #[snafu(display("Failed to download and write target '{}': {}", target, source))] TargetDownload { target: String, source: io::Error }, #[snafu(display("Missing target: {}", target))] TargetMissing { target: String }, #[snafu(display("Unable to create thread pool: {}", source))] ThreadPool { source: rayon::ThreadPoolBuildError }, } } pub(crate) use error::Error;
32.638889
100
0.633021
dea11dfa0e267ed884e0e02b1943fa2e920c5b1b
34,276
use std::{collections::BTreeMap, str::Split}; use lsp_types::{ notification::{ DidChangeTextDocument, DidChangeWatchedFiles, DidCloseTextDocument, DidDeleteFiles, DidOpenTextDocument, DidSaveTextDocument, Initialized, Notification, }, DeleteFilesParams, Diagnostic, DiagnosticSeverity, DidChangeTextDocumentParams, DidChangeWatchedFilesParams, DidOpenTextDocumentParams, FileChangeType, FileDelete, FileEvent, Position, PublishDiagnosticsParams, Range, TextDocumentItem, Url, VersionedTextDocumentIdentifier, }; use polar_core::{ diagnostic::{Diagnostic as PolarDiagnostic, Range as PolarRange}, polar::Polar, sources::Source, }; use serde_wasm_bindgen::{from_value, to_value}; use wasm_bindgen::prelude::*; #[wasm_bindgen] extern "C" { #[wasm_bindgen(js_namespace = console, js_name = log)] fn console_log(s: &str); } #[cfg(not(test))] fn log(s: &str) { #[allow(unused_unsafe)] unsafe { console_log(&("[pls] ".to_owned() + s)) } } #[cfg(test)] fn log(_: &str) {} type Documents = BTreeMap<Url, TextDocumentItem>; type Diagnostics = BTreeMap<Url, PublishDiagnosticsParams>; #[wasm_bindgen] pub struct PolarLanguageServer { documents: Documents, polar: Polar, send_diagnostics_callback: js_sys::Function, } fn range_from_polar_diagnostic_context(diagnostic: &PolarDiagnostic) -> Range { let context = match diagnostic { PolarDiagnostic::Error(e) => e.context.as_ref(), PolarDiagnostic::Warning(w) => w.context.as_ref(), }; if let Some(PolarRange { start, end }) = context.map(|c| c.range) { let start = Position { line: start.row as _, character: start.column as _, }; let end = Position { line: end.row as _, character: end.column as _, }; Range { start, end } } else { Range::default() } } fn uri_from_polar_diagnostic_context(diagnostic: &PolarDiagnostic) -> Option<Url> { let context = match diagnostic { PolarDiagnostic::Error(e) => e.context.as_ref(), PolarDiagnostic::Warning(w) => w.context.as_ref(), }; if let Some(context) = context { if let Some(filename) = context.source.filename.as_ref() { match Url::parse(filename) { Ok(uri) => return Some(uri), Err(err) => { log(&format!( "Url::parse error: {}\n\tFilename: {}\n\tDiagnostic: {}", err, filename, diagnostic )); } } } else { log(&format!( "source missing filename:\n\t{:?}\n\tDiagnostic: {}", context.source, diagnostic )); } } else { log(&format!("missing context:\n\t{:?}", diagnostic)); } None } fn empty_diagnostics_for_doc( (uri, doc): (&Url, &TextDocumentItem), ) -> (Url, PublishDiagnosticsParams) { let params = PublishDiagnosticsParams::new(uri.clone(), vec![], Some(doc.version)); (uri.clone(), params) } /// Public API exposed via WASM. #[wasm_bindgen] impl PolarLanguageServer { #[wasm_bindgen(constructor)] pub fn new(send_diagnostics_callback: &js_sys::Function) -> Self { console_error_panic_hook::set_once(); Self { documents: BTreeMap::new(), polar: Polar::default(), send_diagnostics_callback: send_diagnostics_callback.clone(), } } /// Catch-all handler for notifications sent by the LSP client. /// /// This function receives a notification's `method` and `params` and dispatches to the /// appropriate handler function based on `method`. #[wasm_bindgen(js_class = PolarLanguageServer, js_name = onNotification)] pub fn on_notification(&mut self, method: &str, params: JsValue) { match method { DidOpenTextDocument::METHOD => { let DidOpenTextDocumentParams { text_document } = from_value(params).unwrap(); let diagnostics = self.on_did_open_text_document(text_document); self.send_diagnostics(diagnostics); } DidChangeTextDocument::METHOD => { let params: DidChangeTextDocumentParams = from_value(params).unwrap(); // Ensure we receive full -- not incremental -- updates. assert_eq!(params.content_changes.len(), 1); let change = params.content_changes.into_iter().next().unwrap(); assert!(change.range.is_none()); let VersionedTextDocumentIdentifier { uri, version } = params.text_document; let updated_doc = TextDocumentItem::new(uri, "polar".into(), version, change.text); let diagnostics = self.on_did_change_text_document(updated_doc); self.send_diagnostics(diagnostics); } DidChangeWatchedFiles::METHOD => { let DidChangeWatchedFilesParams { changes } = from_value(params).unwrap(); let uris = changes.into_iter().map(|FileEvent { uri, typ }| { assert_eq!(typ, FileChangeType::Deleted); // We only watch for `Deleted` events. uri }); let diagnostics = self.on_did_delete_files(uris.collect()); self.send_diagnostics(diagnostics); } DidDeleteFiles::METHOD => { let DeleteFilesParams { files } = from_value(params).unwrap(); let mut uris = vec![]; for FileDelete { uri } in files { match Url::parse(&uri) { Ok(uri) => uris.push(uri), Err(e) => log(&format!("Failed to parse URI: {}", e)), } } let diagnostics = self.on_did_delete_files(uris); self.send_diagnostics(diagnostics); } // We don't care when a document is saved -- we already have the updated state thanks // to `DidChangeTextDocument`. DidSaveTextDocument::METHOD => (), // We don't care when a document is closed -- we care about all Polar files in a // workspace folder regardless of which ones remain open. DidCloseTextDocument::METHOD => (), // Nothing to do when we receive the `Initialized` notification. Initialized::METHOD => (), _ => log(&format!("on_notification {} {:?}", method, params)), } } } /// Individual LSP notification handlers. impl PolarLanguageServer { fn on_did_open_text_document(&mut self, doc: TextDocumentItem) -> Diagnostics { if let Some(TextDocumentItem { uri, .. }) = self.upsert_document(doc) { log(&format!("reopened tracked doc: {}", uri)); } self.reload_kb() } fn on_did_change_text_document(&mut self, doc: TextDocumentItem) -> Diagnostics { let uri = doc.uri.clone(); if self.upsert_document(doc).is_none() { log(&format!("updated untracked doc: {}", uri)); } self.reload_kb() } fn on_did_delete_files(&mut self, uris: Vec<Url>) -> Diagnostics { let mut diagnostics = Diagnostics::new(); for uri in uris { let mut msg = format!("deleting URI: {}", uri); if let Some(removed) = self.remove_document(&uri) { let (_, empty_diagnostics) = empty_diagnostics_for_doc((&uri, &removed)); if diagnostics.insert(uri, empty_diagnostics).is_some() { msg += "\n\tduplicate watched file event"; } } else { msg += "\n\tchecking if URI is dir"; let removed = self.remove_documents_in_dir(&uri); if removed.is_empty() { if uri.as_str().ends_with(".polar") { msg += "\n\tcannot remove untracked doc"; } } else { for (uri, params) in removed { msg += &format!("\n\t\tremoving dir member: {}", uri); if diagnostics.insert(uri, params).is_some() { msg += "\n\t\tduplicate watched file event"; } } } } log(&msg); } diagnostics.append(&mut self.reload_kb()); diagnostics } } /// Helper methods. impl PolarLanguageServer { fn upsert_document(&mut self, doc: TextDocumentItem) -> Option<TextDocumentItem> { self.documents.insert(doc.uri.clone(), doc) } fn remove_document(&mut self, uri: &Url) -> Option<TextDocumentItem> { self.documents.remove(uri) } /// Remove tracked docs inside `dir`. fn remove_documents_in_dir(&mut self, dir: &Url) -> Diagnostics { let (in_dir, not_in_dir): (Documents, Documents) = self.documents.clone().into_iter().partition(|(uri, _)| { // Zip pair of `Option<Split<char>>`s into `Option<(Split<char>, Split<char>)>`. let maybe_segments = dir.path_segments().zip(uri.path_segments()); // Compare paths (`Split<char>`) by zipping them together and comparing pairwise. let compare_paths = |(l, r): (Split<_>, Split<_>)| l.zip(r).all(|(l, r)| l == r); // If all path segments match b/w dir & uri, uri is in dir and should be removed. maybe_segments.map_or(false, compare_paths) }); // Replace tracked docs w/ docs that aren't in the removed dir. self.documents = not_in_dir; in_dir.iter().map(empty_diagnostics_for_doc).collect() } fn send_diagnostics(&self, diagnostics: Diagnostics) { let this = &JsValue::null(); for params in diagnostics.into_values() { let params = &to_value(&params).unwrap(); if let Err(e) = self.send_diagnostics_callback.call1(this, params) { log(&format!( "send_diagnostics params:\n\t{:?}\n\tJS error: {:?}", params, e )); } } } fn empty_diagnostics_for_all_documents(&self) -> Diagnostics { self.documents .iter() .map(empty_diagnostics_for_doc) .collect() } fn document_from_polar_diagnostic_context( &self, diagnostic: &PolarDiagnostic, ) -> Option<TextDocumentItem> { uri_from_polar_diagnostic_context(diagnostic).and_then(|uri| { if let Some(document) = self.documents.get(&uri) { Some(document.clone()) } else { let tracked_docs = self.documents.keys().map(ToString::to_string); let tracked_docs = tracked_docs.collect::<Vec<_>>().join(", "); log(&format!( "untracked doc: {}\n\tTracked: {}\n\tDiagnostic: {}", uri, tracked_docs, diagnostic )); None } }) } /// Create one or more `Diagnostic`s from `polar_core::diagnostic::Diagnostic`s, filtering out /// "ignored" diagnostics. fn diagnostics_from_polar_diagnostic( &self, diagnostic: PolarDiagnostic, ) -> Vec<(TextDocumentItem, Diagnostic)> { use polar_core::error::{ErrorKind::Validation, ValidationError::*}; use polar_core::warning::ValidationWarning::UnknownSpecializer; // Ignore diagnostics that depend on app data. match &diagnostic { PolarDiagnostic::Error(e) => match e.kind { Validation(UnregisteredClass { .. }) | Validation(SingletonVariable { .. }) => { return vec![]; } _ => (), }, PolarDiagnostic::Warning(w) if matches!(w.kind, UnknownSpecializer { .. }) => { return vec![]; } _ => (), } // NOTE(gj): We stringify the error / warning variant instead of the full `PolarError` / // `PolarWarning` because we don't want source context as part of the error message. let (message, severity) = match &diagnostic { PolarDiagnostic::Error(e) => (e.kind.to_string(), DiagnosticSeverity::Error), PolarDiagnostic::Warning(w) => (w.kind.to_string(), DiagnosticSeverity::Warning), }; // If the diagnostic applies to a single doc, use it; otherwise, default to emitting a // duplicate diagnostic for all docs. let docs = self .document_from_polar_diagnostic_context(&diagnostic) .map_or_else( || self.documents.values().cloned().collect(), |doc| vec![doc], ); docs.into_iter() .map(|doc| { let diagnostic = Diagnostic { range: range_from_polar_diagnostic_context(&diagnostic), severity: Some(severity), source: Some("Polar Language Server".to_owned()), message: message.clone(), ..Default::default() }; (doc, diagnostic) }) .collect() } /// Turn tracked documents into a set of Polar `Source` structs for `Polar::diagnostic_load`. fn documents_to_polar_sources(&self) -> Vec<Source> { self.documents .values() .map(|doc| Source { filename: Some(doc.uri.to_string()), src: doc.text.clone(), }) .collect() } fn load_documents(&self) -> Vec<PolarDiagnostic> { self.polar .diagnostic_load(self.documents_to_polar_sources()) } fn get_diagnostics(&self) -> Diagnostics { self.load_documents() .into_iter() .flat_map(|diagnostic| self.diagnostics_from_polar_diagnostic(diagnostic)) .fold(Diagnostics::new(), |mut acc, (doc, diagnostic)| { let params = acc.entry(doc.uri.clone()).or_insert_with(|| { PublishDiagnosticsParams::new(doc.uri, vec![], Some(doc.version)) }); params.diagnostics.push(diagnostic); acc }) } /// Reloads tracked documents into the `KnowledgeBase`, translates `polar-core` diagnostics /// into `polar-language-server` diagnostics, and returns a set of diagnostics for publishing. /// /// NOTE(gj): we republish 'empty' diagnostics for all documents in order to purge stale /// diagnostics. fn reload_kb(&self) -> Diagnostics { self.polar.clear_rules(); let mut diagnostics = self.empty_diagnostics_for_all_documents(); diagnostics.extend(self.get_diagnostics()); diagnostics } } #[cfg(test)] mod tests { use wasm_bindgen_test::*; use super::*; #[track_caller] fn new_pls() -> PolarLanguageServer { let noop = js_sys::Function::new_with_args("_params", ""); let pls = PolarLanguageServer::new(&noop); assert!(pls.reload_kb().is_empty()); pls } #[track_caller] fn polar_uri(path: &str) -> Url { Url::parse(&format!("file:///{}.polar", path)).unwrap() } #[track_caller] fn polar_doc(path: &str, contents: String) -> TextDocumentItem { TextDocumentItem::new(polar_uri(path), "polar".to_owned(), 0, contents) } #[track_caller] fn doc_with_no_errors(path: &str) -> TextDocumentItem { let file_name = path.split('/').last().unwrap(); polar_doc(path, format!("{}();", file_name)) } #[track_caller] fn doc_with_missing_semicolon(path: &str) -> TextDocumentItem { let file_name = path.split('/').last().unwrap(); polar_doc(path, format!("{}()", file_name)) } #[track_caller] fn add_doc_with_no_errors(pls: &mut PolarLanguageServer, path: &str) -> TextDocumentItem { let doc = doc_with_no_errors(path); assert!(pls.upsert_document(doc.clone()).is_none()); doc } #[track_caller] fn add_doc_with_missing_semicolon( pls: &mut PolarLanguageServer, path: &str, ) -> TextDocumentItem { let doc = doc_with_missing_semicolon(path); assert!(pls.upsert_document(doc.clone()).is_none()); doc } #[track_caller] fn update_text(doc: TextDocumentItem, text: &str) -> TextDocumentItem { TextDocumentItem::new(doc.uri, doc.language_id, doc.version + 1, text.into()) } #[track_caller] fn assert_missing_semicolon_error(diagnostics: &Diagnostics, docs: Vec<&TextDocumentItem>) { for doc in docs { let params = diagnostics.get(&doc.uri).unwrap(); assert_eq!(params.uri, doc.uri); assert_eq!(params.version.unwrap(), doc.version); assert_eq!(params.diagnostics.len(), 1, "{}", doc.uri.to_string()); let diagnostic = params.diagnostics.get(0).unwrap(); assert_eq!( diagnostic.message, "hit the end of the file unexpectedly. Did you forget a semi-colon" ); } } #[track_caller] fn assert_no_errors(diagnostics: &Diagnostics, docs: Vec<&TextDocumentItem>) { for doc in docs { let params = diagnostics.get(&doc.uri).unwrap(); assert_eq!(params.uri, doc.uri); assert_eq!(params.version.unwrap(), doc.version); assert!(params.diagnostics.is_empty(), "{:?}", params.diagnostics); } } #[track_caller] fn assert_missing_allow_rule_warning(diagnostics: &Diagnostics, docs: Vec<&TextDocumentItem>) { for doc in docs { let params = diagnostics.get(&doc.uri).unwrap(); assert_eq!(params.uri, doc.uri); assert_eq!(params.version.unwrap(), doc.version); assert_eq!(params.diagnostics.len(), 1, "{}", doc.uri.to_string()); let diagnostic = params.diagnostics.get(0).unwrap(); let expected = diagnostic .message .starts_with("Your policy does not contain an allow rule"); assert!(expected, "{}", diagnostic.message); } } #[allow(clippy::many_single_char_names)] #[wasm_bindgen_test] fn test_on_did_open_text_document() { let mut pls = new_pls(); let a = doc_with_no_errors("apple"); let b = doc_with_no_errors("banana"); let c = doc_with_missing_semicolon("canteloupe"); let d = doc_with_missing_semicolon("date"); let e = doc_with_no_errors("elderberry"); // Load a single doc w/ no errors. let diagnostics = pls.on_did_open_text_document(a.clone()); assert_eq!(diagnostics.len(), 1); assert_missing_allow_rule_warning(&diagnostics, vec![&a]); // Load a second doc w/ no errors. let diagnostics = pls.on_did_open_text_document(b.clone()); assert_eq!(diagnostics.len(), 2); assert_missing_allow_rule_warning(&diagnostics, vec![&a, &b]); // Load a third doc w/ errors. let diagnostics = pls.on_did_open_text_document(c.clone()); assert_eq!(diagnostics.len(), 3); // No 'missing allow rule' warnings b/c the parse error halts validation before reaching // that check. assert_no_errors(&diagnostics, vec![&a, &b]); assert_missing_semicolon_error(&diagnostics, vec![&c]); // Load a fourth doc w/ errors. let diagnostics = pls.on_did_open_text_document(d.clone()); assert_eq!(diagnostics.len(), 4); assert_no_errors(&diagnostics, vec![&a, &b]); assert_missing_semicolon_error(&diagnostics, vec![&c, &d]); // Load a fifth doc w/ no errors. let diagnostics = pls.on_did_open_text_document(e.clone()); assert_eq!(diagnostics.len(), 5); assert_no_errors(&diagnostics, vec![&a, &b, &e]); assert_missing_semicolon_error(&diagnostics, vec![&c, &d]); } #[wasm_bindgen_test] fn test_on_did_change_text_document() { let mut pls = new_pls(); // 'Change' untracked doc w/ no errors. let a0 = doc_with_no_errors("apple"); let diagnostics0 = pls.on_did_change_text_document(a0.clone()); assert_eq!(diagnostics0.len(), 1); assert_missing_allow_rule_warning(&diagnostics0, vec![&a0]); // Change tracked doc w/o introducing an error. let a1 = update_text(a0, "pie();"); let diagnostics1 = pls.on_did_change_text_document(a1.clone()); assert_eq!(diagnostics1.len(), 1); assert_missing_allow_rule_warning(&diagnostics1, vec![&a1]); // Change tracked doc, introducing an error. let a2 = update_text(a1, "pie()"); let diagnostics2 = pls.on_did_change_text_document(a2.clone()); assert_eq!(diagnostics2.len(), 1); assert_missing_semicolon_error(&diagnostics2, vec![&a2]); // 'Change' untracked doc, introducing a second error. let b3 = doc_with_missing_semicolon("banana"); let diagnostics3 = pls.on_did_change_text_document(b3.clone()); assert_eq!(diagnostics3.len(), 2); assert_missing_semicolon_error(&diagnostics3, vec![&a2, &b3]); // Change tracked doc, fixing an error. let a4 = update_text(a2, "pie();"); let diagnostics4 = pls.on_did_change_text_document(a4.clone()); assert_eq!(diagnostics4.len(), 2); // No 'missing allow rule' warnings b/c the parse error halts validation before reaching // that check. assert_no_errors(&diagnostics4, vec![&a4]); assert_missing_semicolon_error(&diagnostics4, vec![&b3]); // Change tracked doc, fixing the last error. let b5 = update_text(b3, "split();"); let diagnostics5 = pls.on_did_change_text_document(b5.clone()); assert_eq!(diagnostics5.len(), 2); assert_missing_allow_rule_warning(&diagnostics5, vec![&a4, &b5]); } #[wasm_bindgen_test] fn test_on_did_delete_files() { let mut pls = new_pls(); // Empty event has no effect. let diagnostics0 = pls.on_did_delete_files(vec![]); assert!(diagnostics0.is_empty()); assert!(pls.documents.is_empty()); // Deleting untracked doc has no effect. let events1 = vec![polar_uri("apple")]; let diagnostics1 = pls.on_did_delete_files(events1); assert!(diagnostics1.is_empty()); assert!(pls.documents.is_empty()); // Deleting tracked doc w/o error. let a2 = add_doc_with_no_errors(&mut pls, "apple"); let events2 = vec![a2.uri.clone()]; let diagnostics2 = pls.on_did_delete_files(events2); assert_eq!(diagnostics2.len(), 1); assert_no_errors(&diagnostics2, vec![&a2]); assert!(pls.documents.is_empty()); // Deleting tracked doc w/ error. let a3 = add_doc_with_missing_semicolon(&mut pls, "apple"); let events3 = vec![a3.uri.clone()]; let diagnostics3 = pls.on_did_delete_files(events3); assert_eq!(diagnostics3.len(), 1); assert_no_errors(&diagnostics3, vec![&a3]); assert!(pls.documents.is_empty()); // Deleting tracked doc w/o error; doc w/o error remains. let a4 = add_doc_with_no_errors(&mut pls, "apple"); let b4 = add_doc_with_no_errors(&mut pls, "banana"); let events4 = vec![a4.uri.clone()]; let diagnostics4 = pls.on_did_delete_files(events4); assert_eq!(diagnostics4.len(), 2); assert_no_errors(&diagnostics4, vec![&a4]); assert_missing_allow_rule_warning(&diagnostics4, vec![&b4]); assert!(pls.remove_document(&b4.uri).is_some()); assert!(pls.documents.is_empty()); // Deleting tracked doc w/ error; doc w/o error remains. let a5 = add_doc_with_missing_semicolon(&mut pls, "apple"); let b5 = add_doc_with_no_errors(&mut pls, "banana"); let events5 = vec![a5.uri.clone()]; let diagnostics5 = pls.on_did_delete_files(events5); assert_eq!(diagnostics5.len(), 2); assert_no_errors(&diagnostics4, vec![&a5]); assert_missing_allow_rule_warning(&diagnostics5, vec![&b5]); assert!(pls.remove_document(&b5.uri).is_some()); assert!(pls.documents.is_empty()); // Deleting tracked doc w/o error; doc w/ error remains. let a6 = add_doc_with_no_errors(&mut pls, "apple"); let b6 = add_doc_with_missing_semicolon(&mut pls, "banana"); let events6 = vec![a6.uri.clone()]; let diagnostics6 = pls.on_did_delete_files(events6); assert_eq!(diagnostics6.len(), 2); assert_no_errors(&diagnostics6, vec![&a6]); assert_missing_semicolon_error(&diagnostics6, vec![&b6]); assert!(pls.remove_document(&b6.uri).is_some()); assert!(pls.documents.is_empty()); // Deleting tracked doc w/ error; doc w/ error remains. let a7 = add_doc_with_missing_semicolon(&mut pls, "apple"); let b7 = add_doc_with_missing_semicolon(&mut pls, "banana"); let events7 = vec![a7.uri.clone()]; let diagnostics7 = pls.on_did_delete_files(events7); assert_eq!(diagnostics7.len(), 2); assert_no_errors(&diagnostics7, vec![&a7]); assert_missing_semicolon_error(&diagnostics7, vec![&b7]); assert!(pls.remove_document(&b7.uri).is_some()); assert!(pls.documents.is_empty()); // Deleting multiple docs at once. let a8 = add_doc_with_missing_semicolon(&mut pls, "apple"); let b8 = add_doc_with_missing_semicolon(&mut pls, "banana"); let c8 = add_doc_with_missing_semicolon(&mut pls, "canteloupe"); let d8 = add_doc_with_no_errors(&mut pls, "date"); let e8 = add_doc_with_no_errors(&mut pls, "elderberry"); let f8 = add_doc_with_no_errors(&mut pls, "fig"); let events8 = vec![ a8.uri.clone(), b8.uri.clone(), d8.uri.clone(), e8.uri.clone(), ]; let diagnostics8 = pls.on_did_delete_files(events8); assert_eq!(diagnostics8.len(), 6); // No 'missing allow rule' warnings b/c the parse error halts validation before reaching // that check. assert_no_errors(&diagnostics8, vec![&a8, &b8, &d8, &e8, &f8]); assert_missing_semicolon_error(&diagnostics8, vec![&c8]); assert!(pls.remove_document(&c8.uri).is_some()); assert!(pls.remove_document(&f8.uri).is_some()); assert!(pls.documents.is_empty()); // Deleting directories containing Polar files. let a9 = add_doc_with_missing_semicolon(&mut pls, "apple"); let b9 = add_doc_with_no_errors(&mut pls, "a/b/banana"); let ca9a = add_doc_with_no_errors(&mut pls, "a/b/c/ca/calabash"); let ca9b = add_doc_with_no_errors(&mut pls, "a/b/c/ca/canteloupe"); let ch9 = add_doc_with_no_errors(&mut pls, "a/b/c/ch/cherry"); let d9 = add_doc_with_no_errors(&mut pls, "a/b/c/d/date"); let g9a = add_doc_with_no_errors(&mut pls, "a/b/c/d/e/f/g/grape"); let g9b = add_doc_with_no_errors(&mut pls, "a/b/c/d/e/f/g/grapefruit"); // Deleting a deeply nested directory. let d_dir = Url::parse(d9.uri.as_str().strip_suffix("/date.polar").unwrap()).unwrap(); let events9a = vec![d_dir]; assert_eq!(pls.documents.len(), 8); let diagnostics9a = pls.on_did_delete_files(events9a); assert_eq!(diagnostics9a.len(), 8); assert_missing_semicolon_error(&diagnostics9a, vec![&a9]); // No 'missing allow rule' warnings b/c the parse error halts validation before reaching // that check. assert_no_errors( &diagnostics9a, vec![&b9, &ca9a, &ca9b, &ch9, &d9, &g9a, &g9b], ); assert_eq!(pls.documents.len(), 5); // Deleting multiple directories at once. let ca_dir = ca9a.uri.as_str().strip_suffix("/calabash.polar"); let ca_dir = Url::parse(ca_dir.unwrap()).unwrap(); let ch_dir = ch9.uri.as_str().strip_suffix("/cherry.polar"); let ch_dir = Url::parse(ch_dir.unwrap()).unwrap(); let events9b = vec![ca_dir, ch_dir]; assert_eq!(pls.documents.len(), 5); let diagnostics9b = pls.on_did_delete_files(events9b); assert_eq!(diagnostics9b.len(), 5); assert_missing_semicolon_error(&diagnostics9b, vec![&a9]); // No 'missing allow rule' warnings b/c the parse error halts validation before reaching // that check. assert_no_errors(&diagnostics9b, vec![&b9, &ca9a, &ca9b, &ch9]); assert_eq!(pls.documents.len(), 2); // Deleting a top-level directory. let a_dir = b9.uri.as_str().strip_suffix("/b/banana.polar"); let a_dir = Url::parse(a_dir.unwrap()).unwrap(); let events9c = vec![a_dir]; assert_eq!(pls.documents.len(), 2); let diagnostics9c = pls.on_did_delete_files(events9c); assert_eq!(diagnostics9c.len(), 2); assert_missing_semicolon_error(&diagnostics9c, vec![&a9]); // No 'missing allow rule' warnings b/c the parse error halts validation before reaching // that check. assert_no_errors(&diagnostics9c, vec![&b9]); assert_eq!(pls.documents.len(), 1); assert!(pls.remove_document(&a9.uri).is_some()); assert!(pls.documents.is_empty()); } #[wasm_bindgen_test] fn test_ignoring_errors_dependent_on_app_data() { let mut pls = new_pls(); let resource_block_unregistered_constant = r#" allow(_, _, _) if has_permission(_, _, _); has_permission(_: Actor, _: String, _: Resource); actor User {} "#; let doc = polar_doc("whatever", resource_block_unregistered_constant.to_owned()); pls.upsert_document(doc.clone()); // `load_documents()` API performs no filtering. let polar_diagnostics = pls.load_documents(); assert_eq!(polar_diagnostics.len(), 2, "{:?}", polar_diagnostics); let unknown_specializer = polar_diagnostics.get(0).unwrap(); let expected_message = "Unknown specializer String at line 3, column 41 of file file:///whatever.polar:\n\t003: has_permission(_: Actor, _: String, _: Resource);\n\t ^\n"; assert_eq!(unknown_specializer.to_string(), expected_message); let unregistered_class = polar_diagnostics.get(1).unwrap(); assert!(unregistered_class .to_string() .starts_with("Unregistered class: User")); // `reload_kb()` API filters out diagnostics dependent on app data. let diagnostics = pls.reload_kb(); let params = diagnostics.get(&doc.uri).unwrap(); assert_eq!(params.uri, doc.uri); assert_eq!(params.version.unwrap(), doc.version); assert!(params.diagnostics.is_empty(), "{:?}", params.diagnostics); let rule_type_unregistered_constant = r#" allow(_, _, _); type f(a: A); f(_: B); "#; let doc = polar_doc("whatever", rule_type_unregistered_constant.to_owned()); pls.upsert_document(doc.clone()); // `load_documents()` API performs no filtering. let polar_diagnostics = pls.load_documents(); assert_eq!(polar_diagnostics.len(), 2, "{:?}", polar_diagnostics); let unknown_specializer = polar_diagnostics.get(0).unwrap(); let expected_message = "Unknown specializer B at line 4, column 18 of file file:///whatever.polar:\n\t004: f(_: B);\n\t ^\n"; assert_eq!(unknown_specializer.to_string(), expected_message); let unregistered_constant = polar_diagnostics.get(1).unwrap(); let expected_message = "Unregistered class: A"; assert_eq!(unregistered_constant.to_string(), expected_message); // `reload_kb()` API filters out diagnostics dependent on app data. let diagnostics = pls.reload_kb(); let params = diagnostics.get(&doc.uri).unwrap(); assert_eq!(params.uri, doc.uri); assert_eq!(params.version.unwrap(), doc.version); assert!(params.diagnostics.is_empty(), "{:?}", params.diagnostics); let singleton_variable = "allow(a, _, _);".to_owned(); let doc = polar_doc("whatever", singleton_variable); pls.upsert_document(doc.clone()); // `load_documents()` API performs no filtering. let polar_diagnostics = pls.load_documents(); assert_eq!(polar_diagnostics.len(), 1, "{:?}", polar_diagnostics); let singleton_variable = polar_diagnostics.get(0).unwrap(); assert!(singleton_variable .to_string() .starts_with("Singleton variable a is unused or undefined; try renaming to _a or _")); // `reload_kb()` API filters out diagnostics dependent on app data. let diagnostics = pls.reload_kb(); let params = diagnostics.get(&doc.uri).unwrap(); assert_eq!(params.uri, doc.uri); assert_eq!(params.version.unwrap(), doc.version); assert!(params.diagnostics.is_empty(), "{:?}", params.diagnostics); } #[wasm_bindgen_test] fn test_diagnostic_range() { let mut pls = new_pls(); let debug = "debug"; let doc = polar_doc("whatever", debug.to_owned()); pls.upsert_document(doc.clone()); let diagnostics = pls.reload_kb(); let params = diagnostics.get(&doc.uri).unwrap(); assert_eq!(params.uri, doc.uri); assert_eq!(params.version.unwrap(), doc.version); assert_eq!(params.diagnostics.len(), 1); let diagnostic = params.diagnostics.get(0).unwrap(); assert_eq!( diagnostic.message, "debug is a reserved Polar word and cannot be used here" ); assert_eq!(diagnostic.range.start, Position::new(0, 0)); assert_eq!(diagnostic.range.end, Position::new(0, 5)); } }
41.698297
235
0.597357
f719a4b9bb1a88c828f57eb1c5318073f8f28b17
758
#![no_std] use contract::{ contract_api::{account, runtime, system}, unwrap_or_revert::UnwrapOrRevert, }; use types::{ApiError, URef, U512}; enum Arg { TargetPurse = 0, Amount = 1, } #[no_mangle] pub extern "C" fn call() { let target_purse: URef = runtime::get_arg(Arg::TargetPurse as u32) .unwrap_or_revert_with(ApiError::MissingArgument) .unwrap_or_revert_with(ApiError::InvalidArgument); let amount: U512 = runtime::get_arg(Arg::Amount as u32) .unwrap_or_revert_with(ApiError::MissingArgument) .unwrap_or_revert_with(ApiError::InvalidArgument); let source_purse = account::get_main_purse(); system::transfer_from_purse_to_purse(source_purse, target_purse, amount).unwrap_or_revert(); }
28.074074
96
0.711082
e90b2a0b9289e7c295ed368a9eed2eab9e0b5627
8,472
//! HIR for references to types. Paths in these are not yet resolved. They can //! be directly created from an ast::TypeRef, without further queries. use ra_syntax::ast::{self, TypeAscriptionOwner, TypeBoundsOwner}; use crate::{body::LowerCtx, path::Path}; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] pub enum Mutability { Shared, Mut, } impl Mutability { pub fn from_mutable(mutable: bool) -> Mutability { if mutable { Mutability::Mut } else { Mutability::Shared } } pub fn as_keyword_for_ref(self) -> &'static str { match self { Mutability::Shared => "", Mutability::Mut => "mut ", } } pub fn as_keyword_for_ptr(self) -> &'static str { match self { Mutability::Shared => "const ", Mutability::Mut => "mut ", } } } #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] pub enum Rawness { RawPtr, Ref, } impl Rawness { pub fn from_raw(is_raw: bool) -> Rawness { if is_raw { Rawness::RawPtr } else { Rawness::Ref } } } /// Compare ty::Ty #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeRef { Never, Placeholder, Tuple(Vec<TypeRef>), Path(Path), RawPtr(Box<TypeRef>, Mutability), Reference(Box<TypeRef>, Mutability), Array(Box<TypeRef> /*, Expr*/), Slice(Box<TypeRef>), /// A fn pointer. Last element of the vector is the return type. Fn(Vec<TypeRef>, bool /*varargs*/), // For ImplTrait(Vec<TypeBound>), DynTrait(Vec<TypeBound>), Error, } #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum TypeBound { Path(Path), // also for<> bounds // also Lifetimes Error, } impl TypeRef { /// Converts an `ast::TypeRef` to a `hir::TypeRef`. pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::TypeRef) -> Self { match node { ast::TypeRef::ParenType(inner) => TypeRef::from_ast_opt(&ctx, inner.type_ref()), ast::TypeRef::TupleType(inner) => { TypeRef::Tuple(inner.fields().map(|it| TypeRef::from_ast(ctx, it)).collect()) } ast::TypeRef::NeverType(..) => TypeRef::Never, ast::TypeRef::PathType(inner) => { // FIXME: Use `Path::from_src` inner .path() .and_then(|it| ctx.lower_path(it)) .map(TypeRef::Path) .unwrap_or(TypeRef::Error) } ast::TypeRef::PointerType(inner) => { let inner_ty = TypeRef::from_ast_opt(&ctx, inner.type_ref()); let mutability = Mutability::from_mutable(inner.mut_token().is_some()); TypeRef::RawPtr(Box::new(inner_ty), mutability) } ast::TypeRef::ArrayType(inner) => { TypeRef::Array(Box::new(TypeRef::from_ast_opt(&ctx, inner.type_ref()))) } ast::TypeRef::SliceType(inner) => { TypeRef::Slice(Box::new(TypeRef::from_ast_opt(&ctx, inner.type_ref()))) } ast::TypeRef::ReferenceType(inner) => { let inner_ty = TypeRef::from_ast_opt(&ctx, inner.type_ref()); let mutability = Mutability::from_mutable(inner.mut_token().is_some()); TypeRef::Reference(Box::new(inner_ty), mutability) } ast::TypeRef::PlaceholderType(_inner) => TypeRef::Placeholder, ast::TypeRef::FnPointerType(inner) => { let ret_ty = inner .ret_type() .and_then(|rt| rt.type_ref()) .map(|it| TypeRef::from_ast(ctx, it)) .unwrap_or_else(|| TypeRef::Tuple(Vec::new())); let mut is_varargs = false; let mut params = if let Some(pl) = inner.param_list() { if let Some(param) = pl.params().last() { is_varargs = param.dotdotdot_token().is_some(); } pl.params() .map(|p| p.ascribed_type()) .map(|it| TypeRef::from_ast_opt(&ctx, it)) .collect() } else { Vec::new() }; params.push(ret_ty); TypeRef::Fn(params, is_varargs) } // for types are close enough for our purposes to the inner type for now... ast::TypeRef::ForType(inner) => TypeRef::from_ast_opt(&ctx, inner.type_ref()), ast::TypeRef::ImplTraitType(inner) => { TypeRef::ImplTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) } ast::TypeRef::DynTraitType(inner) => { TypeRef::DynTrait(type_bounds_from_ast(ctx, inner.type_bound_list())) } } } pub(crate) fn from_ast_opt(ctx: &LowerCtx, node: Option<ast::TypeRef>) -> Self { if let Some(node) = node { TypeRef::from_ast(ctx, node) } else { TypeRef::Error } } pub(crate) fn unit() -> TypeRef { TypeRef::Tuple(Vec::new()) } pub fn walk(&self, f: &mut impl FnMut(&TypeRef)) { go(self, f); fn go(type_ref: &TypeRef, f: &mut impl FnMut(&TypeRef)) { f(type_ref); match type_ref { TypeRef::Fn(types, _) | TypeRef::Tuple(types) => { types.iter().for_each(|t| go(t, f)) } TypeRef::RawPtr(type_ref, _) | TypeRef::Reference(type_ref, _) | TypeRef::Array(type_ref) | TypeRef::Slice(type_ref) => go(&type_ref, f), TypeRef::ImplTrait(bounds) | TypeRef::DynTrait(bounds) => { for bound in bounds { match bound { TypeBound::Path(path) => go_path(path, f), TypeBound::Error => (), } } } TypeRef::Path(path) => go_path(path, f), TypeRef::Never | TypeRef::Placeholder | TypeRef::Error => {} }; } fn go_path(path: &Path, f: &mut impl FnMut(&TypeRef)) { if let Some(type_ref) = path.type_anchor() { go(type_ref, f); } for segment in path.segments().iter() { if let Some(args_and_bindings) = segment.args_and_bindings { for arg in &args_and_bindings.args { let crate::path::GenericArg::Type(type_ref) = arg; go(type_ref, f); } for binding in &args_and_bindings.bindings { if let Some(type_ref) = &binding.type_ref { go(type_ref, f); } for bound in &binding.bounds { match bound { TypeBound::Path(path) => go_path(path, f), TypeBound::Error => (), } } } } } } } } pub(crate) fn type_bounds_from_ast( lower_ctx: &LowerCtx, type_bounds_opt: Option<ast::TypeBoundList>, ) -> Vec<TypeBound> { if let Some(type_bounds) = type_bounds_opt { type_bounds.bounds().map(|it| TypeBound::from_ast(lower_ctx, it)).collect() } else { vec![] } } impl TypeBound { pub(crate) fn from_ast(ctx: &LowerCtx, node: ast::TypeBound) -> Self { match node.kind() { ast::TypeBoundKind::PathType(path_type) => { let path = match path_type.path() { Some(p) => p, None => return TypeBound::Error, }; let path = match ctx.lower_path(path) { Some(p) => p, None => return TypeBound::Error, }; TypeBound::Path(path) } ast::TypeBoundKind::ForType(_) | ast::TypeBoundKind::Lifetime(_) => TypeBound::Error, } } pub fn as_path(&self) -> Option<&Path> { match self { TypeBound::Path(p) => Some(p), _ => None, } } }
33.888
97
0.48737
dde4bb1835b9114c180b555b0200d9fb82eabb2d
9,310
//! Intermediate representation for C/C++ enumerations. use super::super::codegen::EnumVariation; use super::context::{BindgenContext, TypeId}; use super::item::Item; use super::ty::{Type, TypeKind}; use crate::clang; use crate::ir::annotations::Annotations; use crate::parse::{ClangItemParser, ParseError}; use crate::regex_set::RegexSet; /// An enum representing custom handling that can be given to a variant. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum EnumVariantCustomBehavior { /// This variant will be a module containing constants. ModuleConstify, /// This variant will be constified, that is, forced to generate a constant. Constify, /// This variant will be hidden entirely from the resulting enum. Hide, } /// A C/C++ enumeration. #[derive(Debug)] pub struct Enum { /// The representation used for this enum; it should be an `IntKind` type or /// an alias to one. /// /// It's `None` if the enum is a forward declaration and isn't defined /// anywhere else, see `tests/headers/func_ptr_in_struct.h`. repr: Option<TypeId>, /// The different variants, with explicit values. variants: Vec<EnumVariant>, } impl Enum { /// Construct a new `Enum` with the given representation and variants. pub fn new(repr: Option<TypeId>, variants: Vec<EnumVariant>) -> Self { Enum { repr, variants } } /// Get this enumeration's representation. pub fn repr(&self) -> Option<TypeId> { self.repr } /// Get this enumeration's variants. pub fn variants(&self) -> &[EnumVariant] { &self.variants } /// Construct an enumeration from the given Clang type. pub fn from_ty( ty: &clang::Type, ctx: &mut BindgenContext, ) -> Result<Self, ParseError> { use clang_sys::*; debug!("Enum::from_ty {:?}", ty); if ty.kind() != CXType_Enum { return Err(ParseError::Continue); } let declaration = ty.declaration().canonical(); let repr = declaration .enum_type() .and_then(|et| Item::from_ty(&et, declaration, None, ctx).ok()); let mut variants = vec![]; let variant_ty = repr.and_then(|r| ctx.resolve_type(r).safe_canonical_type(ctx)); let is_bool = variant_ty.map_or(false, Type::is_bool); // Assume signedness since the default type by the C standard is an int. let is_signed = variant_ty.map_or(true, |ty| match *ty.kind() { TypeKind::Int(ref int_kind) => int_kind.is_signed(), ref other => { panic!("Since when enums can be non-integers? {:?}", other) } }); let type_name = ty.spelling(); let type_name = if type_name.is_empty() { None } else { Some(type_name) }; let type_name = type_name.as_ref().map(String::as_str); let definition = declaration.definition().unwrap_or(declaration); definition.visit(|cursor| { if cursor.kind() == CXCursor_EnumConstantDecl { let value = if is_bool { cursor.enum_val_boolean().map(EnumVariantValue::Boolean) } else if is_signed { cursor.enum_val_signed().map(EnumVariantValue::Signed) } else { cursor.enum_val_unsigned().map(EnumVariantValue::Unsigned) }; if let Some(val) = value { let name = cursor.spelling(); let annotations = Annotations::new(&cursor); let custom_behavior = ctx .parse_callbacks() .and_then(|callbacks| { callbacks .enum_variant_behavior(type_name, &name, val) }) .or_else(|| { let annotations = annotations.as_ref()?; if annotations.hide() { Some(EnumVariantCustomBehavior::Hide) } else if annotations.constify_enum_variant() { Some(EnumVariantCustomBehavior::Constify) } else { None } }); let name = ctx .parse_callbacks() .and_then(|callbacks| { callbacks.enum_variant_name(type_name, &name, val) }) .or_else(|| { annotations .as_ref()? .use_instead_of()? .last() .cloned() }) .unwrap_or(name); let comment = cursor.raw_comment(); variants.push(EnumVariant::new( name, comment, val, custom_behavior, )); } } CXChildVisit_Continue }); Ok(Enum::new(repr, variants)) } fn is_matching_enum( &self, ctx: &BindgenContext, enums: &RegexSet, item: &Item, ) -> bool { let path = item.path_for_whitelisting(ctx); let enum_ty = item.expect_type(); if enums.matches(&path[1..].join("::")) { return true; } // Test the variants if the enum is anonymous. if enum_ty.name().is_some() { return false; } self.variants().iter().any(|v| enums.matches(&v.name())) } /// Returns the final representation of the enum. pub fn computed_enum_variation( &self, ctx: &BindgenContext, item: &Item, ) -> EnumVariation { // ModuleConsts has higher precedence before Rust in order to avoid // problems with overlapping match patterns. if self.is_matching_enum( ctx, &ctx.options().constified_enum_modules, item, ) { EnumVariation::ModuleConsts } else if self.is_matching_enum( ctx, &ctx.options().bitfield_enums, item, ) { EnumVariation::NewType { is_bitfield: true } } else if self.is_matching_enum(ctx, &ctx.options().newtype_enums, item) { EnumVariation::NewType { is_bitfield: false } } else if self.is_matching_enum( ctx, &ctx.options().rustified_enums, item, ) { EnumVariation::Rust { non_exhaustive: false, } } else if self.is_matching_enum( ctx, &ctx.options().rustified_non_exhaustive_enums, item, ) { EnumVariation::Rust { non_exhaustive: true, } } else if self.is_matching_enum( ctx, &ctx.options().constified_enums, item, ) { EnumVariation::Consts } else { ctx.options().default_enum_style } } } /// A single enum variant, to be contained only in an enum. #[derive(Debug)] pub struct EnumVariant { /// The name of the variant. name: String, /// An optional doc comment. comment: Option<String>, /// The integer value of the variant. val: EnumVariantValue, /// The custom behavior this variant may have, if any. custom_behavior: Option<EnumVariantCustomBehavior>, } /// A constant value assigned to an enumeration variant. #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum EnumVariantValue { /// A boolean constant. Boolean(bool), /// A signed constant. Signed(i64), /// An unsigned constant. Unsigned(u64), } impl EnumVariant { /// Construct a new enumeration variant from the given parts. pub fn new( name: String, comment: Option<String>, val: EnumVariantValue, custom_behavior: Option<EnumVariantCustomBehavior>, ) -> Self { EnumVariant { name, comment, val, custom_behavior, } } /// Get this variant's name. pub fn name(&self) -> &str { &self.name } /// Get this variant's value. pub fn val(&self) -> EnumVariantValue { self.val } /// Get this variant's documentation. pub fn comment(&self) -> Option<&str> { self.comment.as_ref().map(|s| &**s) } /// Returns whether this variant should be enforced to be a constant by code /// generation. pub fn force_constification(&self) -> bool { self.custom_behavior .map_or(false, |b| b == EnumVariantCustomBehavior::Constify) } /// Returns whether the current variant should be hidden completely from the /// resulting rust enum. pub fn hidden(&self) -> bool { self.custom_behavior .map_or(false, |b| b == EnumVariantCustomBehavior::Hide) } }
31.559322
80
0.52986
4860dd1810c48b56852a5ca044f99f6b3379510a
13,609
use crate::backend::audio::SoundHandle; use crate::character::Character; use crate::display_object::{Bitmap, TDisplayObject}; use crate::font::{Font, FontDescriptor}; use crate::prelude::*; use crate::property_map::PropertyMap; use crate::tag_utils::{SwfMovie, SwfSlice}; use crate::vminterface::AvmType; use crate::{avm1::function::FunctionObject, avm2::Domain as Avm2Domain}; use gc_arena::{Collect, Gc, GcCell, MutationContext}; use std::collections::HashMap; use std::sync::{Arc, Weak}; use swf::{CharacterId, TagCode}; use weak_table::PtrWeakKeyHashMap; /// Boxed error alias. type Error = Box<dyn std::error::Error>; /// The mappings between symbol names and constructors registered /// with `Object.registerClass`. #[derive(Collect)] #[collect(no_drop)] pub struct Avm1ConstructorRegistry<'gc> { symbol_map: GcCell<'gc, PropertyMap<FunctionObject<'gc>>>, is_case_sensitive: bool, } impl<'gc> Avm1ConstructorRegistry<'gc> { pub fn new(is_case_sensitive: bool, gc_context: MutationContext<'gc, '_>) -> Self { Self { symbol_map: GcCell::allocate(gc_context, PropertyMap::new()), is_case_sensitive, } } pub fn get(&self, symbol: &str) -> Option<FunctionObject<'gc>> { self.symbol_map .read() .get(symbol, self.is_case_sensitive) .copied() } pub fn set( &self, symbol: &str, constructor: Option<FunctionObject<'gc>>, gc_context: MutationContext<'gc, '_>, ) { let mut map = self.symbol_map.write(gc_context); if let Some(ctor) = constructor { map.insert(symbol, ctor, self.is_case_sensitive); } else { map.remove(symbol, self.is_case_sensitive); }; } } /// Symbol library for a single given SWF. #[derive(Collect)] #[collect(no_drop)] pub struct MovieLibrary<'gc> { characters: HashMap<CharacterId, Character<'gc>>, export_characters: PropertyMap<Character<'gc>>, jpeg_tables: Option<Vec<u8>>, fonts: HashMap<FontDescriptor, Font<'gc>>, avm_type: AvmType, avm2_domain: Option<Avm2Domain<'gc>>, /// Shared reference to the constructor registry used for this movie. /// Should be `None` if this is an AVM2 movie. avm1_constructor_registry: Option<Gc<'gc, Avm1ConstructorRegistry<'gc>>>, } impl<'gc> MovieLibrary<'gc> { pub fn new(avm_type: AvmType) -> Self { MovieLibrary { characters: HashMap::new(), export_characters: PropertyMap::new(), jpeg_tables: None, fonts: HashMap::new(), avm_type, avm2_domain: None, avm1_constructor_registry: None, } } pub fn register_character(&mut self, id: CharacterId, character: Character<'gc>) { // TODO(Herschel): What is the behavior if id already exists? if !self.contains_character(id) { if let Character::Font(font) = character.clone() { self.fonts.insert(font.descriptor(), font); } self.characters.insert(id, character); } else { log::error!("Character ID collision: Tried to register ID {} twice", id); } } /// Registers an export name for a given character ID. /// This character will then be instantiable from AVM1. pub fn register_export( &mut self, id: CharacterId, export_name: &str, ) -> Option<&Character<'gc>> { if let Some(character) = self.characters.get(&id) { self.export_characters .insert(export_name, character.clone(), false); Some(character) } else { log::warn!( "Can't register export {}: Character ID {} doesn't exist", export_name, id, ); None } } pub fn contains_character(&self, id: CharacterId) -> bool { self.characters.contains_key(&id) } pub fn character_by_id(&self, id: CharacterId) -> Option<&Character<'gc>> { self.characters.get(&id) } pub fn character_by_export_name(&self, name: &str) -> Option<&Character<'gc>> { self.export_characters.get(name, false) } pub fn avm1_constructor_registry(&self) -> Option<Gc<'gc, Avm1ConstructorRegistry<'gc>>> { self.avm1_constructor_registry } /// Instantiates the library item with the given character ID into a display object. /// The object must then be post-instantiated before being used. pub fn instantiate_by_id( &self, id: CharacterId, gc_context: MutationContext<'gc, '_>, ) -> Result<DisplayObject<'gc>, Box<dyn std::error::Error>> { if let Some(character) = self.characters.get(&id) { self.instantiate_display_object(character, gc_context) } else { log::error!("Tried to instantiate non-registered character ID {}", id); Err("Character id doesn't exist".into()) } } /// Instantiates the library item with the given export name into a display object. /// The object must then be post-instantiated before being used. pub fn instantiate_by_export_name( &self, export_name: &str, gc_context: MutationContext<'gc, '_>, ) -> Result<DisplayObject<'gc>, Box<dyn std::error::Error>> { if let Some(character) = self.export_characters.get(export_name, false) { self.instantiate_display_object(character, gc_context) } else { log::error!( "Tried to instantiate non-registered character {}", export_name ); Err("Character id doesn't exist".into()) } } /// Instantiates the given character into a display object. /// The object must then be post-instantiated before being used. fn instantiate_display_object( &self, character: &Character<'gc>, gc_context: MutationContext<'gc, '_>, ) -> Result<DisplayObject<'gc>, Box<dyn std::error::Error>> { match character { Character::Bitmap(bitmap) => Ok(bitmap.instantiate(gc_context)), Character::EditText(edit_text) => Ok(edit_text.instantiate(gc_context)), Character::Graphic(graphic) => Ok(graphic.instantiate(gc_context)), Character::MorphShape(morph_shape) => Ok(morph_shape.instantiate(gc_context)), Character::MovieClip(movie_clip) => Ok(movie_clip.instantiate(gc_context)), Character::Button(button) => Ok(button.instantiate(gc_context)), Character::Text(text) => Ok(text.instantiate(gc_context)), _ => Err("Not a DisplayObject".into()), } } pub fn get_bitmap(&self, id: CharacterId) -> Option<Bitmap<'gc>> { if let Some(&Character::Bitmap(bitmap)) = self.characters.get(&id) { Some(bitmap) } else { None } } pub fn get_font(&self, id: CharacterId) -> Option<Font<'gc>> { if let Some(&Character::Font(font)) = self.characters.get(&id) { Some(font) } else { None } } /// Find a font by it's name and parameters. pub fn get_font_by_name( &self, name: &str, is_bold: bool, is_italic: bool, ) -> Option<Font<'gc>> { let descriptor = FontDescriptor::from_parts(name, is_bold, is_italic); self.fonts.get(&descriptor).copied() } pub fn get_sound(&self, id: CharacterId) -> Option<SoundHandle> { if let Some(Character::Sound(sound)) = self.characters.get(&id) { Some(*sound) } else { None } } pub fn set_jpeg_tables(&mut self, data: Vec<u8>) { if self.jpeg_tables.is_some() { // SWF spec says there should only be one JPEGTables tag. // TODO: What is the behavior when there are multiples? log::warn!("SWF contains multiple JPEGTables tags"); return; } // Some SWFs have a JPEGTables tag with 0 length; ignore these. // (Does this happen when there is only a single DefineBits tag?) self.jpeg_tables = if data.is_empty() { None } else { Some(crate::backend::render::remove_invalid_jpeg_data(&data[..]).to_vec()) } } pub fn jpeg_tables(&self) -> Option<&[u8]> { self.jpeg_tables.as_ref().map(|data| &data[..]) } /// Check if the current movie's VM type is compatible with running code on /// a particular VM. If it is not, then this yields an error. pub fn check_avm_type(&mut self, new_type: AvmType) -> Result<(), Error> { if self.avm_type != new_type { return Err(format!( "Blocked attempt to run {:?} code on an {:?} movie.", new_type, self.avm_type ) .into()); } self.avm_type = new_type; Ok(()) } /// Get the VM type of this movie. pub fn avm_type(&self) -> AvmType { self.avm_type } pub fn set_avm2_domain(&mut self, avm2_domain: Avm2Domain<'gc>) { self.avm2_domain = Some(avm2_domain); } /// Get the AVM2 domain this movie runs under. /// /// Note that the presence of an AVM2 domain does *not* indicate that this /// movie provides AVM2 code. For example, a movie may have been loaded by /// AVM2 code into a particular domain, even though it turned out to be /// an AVM1 movie, and thus this domain is unused. pub fn avm2_domain(&self) -> Avm2Domain<'gc> { self.avm2_domain.unwrap() } } /// Symbol library for multiple movies. pub struct Library<'gc> { /// All the movie libraries. movie_libraries: PtrWeakKeyHashMap<Weak<SwfMovie>, MovieLibrary<'gc>>, /// The embedded device font. device_font: Option<Font<'gc>>, constructor_registry_case_insensitive: Gc<'gc, Avm1ConstructorRegistry<'gc>>, constructor_registry_case_sensitive: Gc<'gc, Avm1ConstructorRegistry<'gc>>, } unsafe impl<'gc> gc_arena::Collect for Library<'gc> { #[inline] fn trace(&self, cc: gc_arena::CollectionContext) { for (_, val) in self.movie_libraries.iter() { val.trace(cc); } self.device_font.trace(cc); self.constructor_registry_case_insensitive.trace(cc); self.constructor_registry_case_sensitive.trace(cc); } } impl<'gc> Library<'gc> { pub fn empty(gc_context: MutationContext<'gc, '_>) -> Self { Self { movie_libraries: PtrWeakKeyHashMap::new(), device_font: None, constructor_registry_case_insensitive: Gc::allocate( gc_context, Avm1ConstructorRegistry::new(false, gc_context), ), constructor_registry_case_sensitive: Gc::allocate( gc_context, Avm1ConstructorRegistry::new(true, gc_context), ), } } pub fn library_for_movie(&self, movie: Arc<SwfMovie>) -> Option<&MovieLibrary<'gc>> { self.movie_libraries.get(&movie) } pub fn library_for_movie_mut(&mut self, movie: Arc<SwfMovie>) -> &mut MovieLibrary<'gc> { if !self.movie_libraries.contains_key(&movie) { let slice = SwfSlice::from(movie.clone()); let mut reader = slice.read_from(0); let movie_version = movie.header().version; let vm_type = if movie_version > 8 { match reader.read_tag_code_and_length() { Ok((tag_code, _tag_len)) if TagCode::from_u16(tag_code) == Some(TagCode::FileAttributes) => { match reader.read_file_attributes() { Ok(attributes) if attributes.is_action_script_3 => AvmType::Avm2, Ok(_) => AvmType::Avm1, Err(e) => { log::error!("Got {} when reading FileAttributes", e); AvmType::Avm1 } } } // SWF defaults to AVM1 if FileAttributes is not the first tag. _ => AvmType::Avm1, } } else { AvmType::Avm1 }; let mut movie_library = MovieLibrary::new(vm_type); if vm_type == AvmType::Avm1 { movie_library.avm1_constructor_registry = Some(self.get_avm1_constructor_registry(movie_version)); } self.movie_libraries.insert(movie.clone(), movie_library); }; self.movie_libraries.get_mut(&movie).unwrap() } /// Returns the device font for use when a font is unavailable. pub fn device_font(&self) -> Option<Font<'gc>> { self.device_font } /// Sets the device font. pub fn set_device_font(&mut self, font: Option<Font<'gc>>) { self.device_font = font; } /// Gets the constructor registry to use for the given SWF version. /// Because SWFs v6 and v7+ use different case-sensitivity rules, Flash /// keeps two separate registries, one case-sensitive, the other not. fn get_avm1_constructor_registry( &mut self, swf_version: u8, ) -> Gc<'gc, Avm1ConstructorRegistry<'gc>> { if swf_version < 7 { self.constructor_registry_case_insensitive } else { self.constructor_registry_case_sensitive } } }
35.256477
94
0.5913
0ac641588a41917487bc06f7c2e92aa42385214f
3,040
pub use super::*; use std::rc::Rc; #[derive(Clone, Debug)] pub enum ExprPat { Const(f64), /// Pattern matching a variable VarPat(String), /// Pattern matching a constant ConstPat(String), /// Pattern matching any expression AnyPat(String), BinaryExpr(BinaryExpr<Self>), UnaryExpr(UnaryExpr<Self>), Parend(Rc<Self>), Braced(Rc<Self>), } impl Grammar for ExprPat {} impl Grammar for Rc<ExprPat> {} impl Expression for ExprPat { #[inline] fn is_const(&self) -> bool { matches!(self, Self::Const(_)) } } // TODO: We can't derive this because `f64` doesn't implement `Eq`. // This should be fixed by moving to a arbitrary-precision numeric type. impl Eq for ExprPat {} impl PartialEq for ExprPat { fn eq(&self, other: &ExprPat) -> bool { use ExprPat::*; match (self, other) { (Const(x), Const(y)) => (x - y).abs() < std::f64::EPSILON, (VarPat(x), VarPat(y)) => x == y, (ConstPat(x), ConstPat(y)) => x == y, (AnyPat(x), AnyPat(y)) => x == y, (BinaryExpr(x), BinaryExpr(y)) => x == y, (UnaryExpr(x), UnaryExpr(y)) => x == y, (Parend(x), Parend(y)) => x == y, (Braced(x), Braced(y)) => x == y, _ => false, } } } // TODO: We can do better than hashing to a string as well, but we'll save that til we have an // arbitrary-precision numeric type. impl core::hash::Hash for ExprPat { fn hash<H: core::hash::Hasher>(&self, state: &mut H) { use ExprPat::*; match self { // TODO: We can do better than hashing to a string as well, but we'll save that til we // have an arbitrary-precision numeric type. Const(f) => state.write(f.to_string().as_bytes()), VarPat(v) => v.hash(state), ConstPat(c) => c.hash(state), AnyPat(a) => a.hash(state), BinaryExpr(e) => e.hash(state), UnaryExpr(e) => e.hash(state), e @ Parend(_) => e.to_string().hash(state), e @ Braced(_) => e.to_string().hash(state), } } } impl From<BinaryExpr<Self>> for ExprPat { fn from(binary_expr: BinaryExpr<Self>) -> Self { Self::BinaryExpr(binary_expr) } } impl From<UnaryExpr<Self>> for ExprPat { fn from(unary_expr: UnaryExpr<Self>) -> Self { Self::UnaryExpr(unary_expr) } } impl fmt::Display for ExprPat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use ExprPat::*; write!( f, "{}", match self { Const(num) => num.to_string(), VarPat(var) | ConstPat(var) | AnyPat(var) => var.to_string(), BinaryExpr(binary_expr) => binary_expr.to_string(), UnaryExpr(unary_expr) => unary_expr.to_string(), Parend(expr) => format!("({})", expr.to_string()), Braced(expr) => format!("[{}]", expr.to_string()), } ) } }
31.020408
98
0.536513
61b9a24f2dbe9581bf670b9e16736bdbc382d087
3,882
extern crate rapier2d as rapier; // For the debug UI. use bevy::prelude::*; use bevy_rapier2d::prelude::*; use bevy::render::pass::ClearColor; use nalgebra::Isometry2; use rapier2d::pipeline::PhysicsPipeline; use ui::DebugUiPlugin; #[path = "../../src_debug_ui/mod.rs"] mod ui; fn main() { App::new() .insert_resource(ClearColor(Color::rgb( 0xF9 as f32 / 255.0, 0xF9 as f32 / 255.0, 0xFF as f32 / 255.0, ))) .insert_resource(Msaa::default()) .add_plugins(DefaultPlugins) .add_plugin(bevy_winit::WinitPlugin::default()) .add_plugin(bevy_wgpu::WgpuPlugin::default()) .add_plugin(RapierPhysicsPlugin::<NoUserData>::default()) .add_plugin(RapierRenderPlugin) .add_plugin(DebugUiPlugin) .add_startup_system(setup_graphics.system()) .add_startup_system(setup_physics.system()) .add_startup_system(enable_physics_profiling.system()) .run(); } fn enable_physics_profiling(mut pipeline: ResMut<PhysicsPipeline>) { pipeline.counters.enable() } fn setup_graphics(mut commands: Commands, mut configuration: ResMut<RapierConfiguration>) { configuration.scale = 10.0; let mut camera = OrthographicCameraBundle::new_2d(); camera.transform = Transform::from_translation(Vec3::new(0.0, 200.0, 0.0)); commands.spawn_bundle(PointLightBundle { transform: Transform::from_translation(Vec3::new(1000.0, 10.0, 2000.0)), point_light: PointLight { intensity: 100_000_000_.0, range: 6000.0, ..Default::default() }, ..Default::default() }); commands.spawn_bundle(camera); } pub fn setup_physics(mut commands: Commands) { /* * Ground */ let ground_size = 25.0; let collider = ColliderBundle { shape: ColliderShape::cuboid(ground_size, 1.0), ..Default::default() }; commands .spawn_bundle(collider) .insert(ColliderDebugRender::default()) .insert(ColliderPositionSync::Discrete); let collider = ColliderBundle { shape: ColliderShape::cuboid(ground_size * 2.0, 1.2), position: Isometry2::new( [ground_size, ground_size * 2.0].into(), std::f32::consts::FRAC_PI_2, ) .into(), ..Default::default() }; commands .spawn_bundle(collider) .insert(ColliderDebugRender::default()) .insert(ColliderPositionSync::Discrete); let collider = ColliderBundle { shape: ColliderShape::cuboid(ground_size * 2.0, 1.2), position: Isometry2::new( [-ground_size, ground_size * 2.0].into(), std::f32::consts::FRAC_PI_2, ) .into(), ..Default::default() }; commands .spawn_bundle(collider) .insert(ColliderDebugRender::default()) .insert(ColliderPositionSync::Discrete); /* * Create the cubes */ let num = 20; let rad = 0.5; let shift = rad * 2.0; let centerx = shift * (num as f32) / 2.0; let centery = shift / 2.0; let mut color = 0; for i in 0..num { for j in 0usize..num * 5 { let x = i as f32 * shift - centerx; let y = j as f32 * shift + centery + 2.0; color += 1; // Build the rigid body. let body = RigidBodyBundle { position: [x, y].into(), ..Default::default() }; let collider = ColliderBundle { shape: ColliderShape::cuboid(rad, rad), ..Default::default() }; commands .spawn_bundle(body) .insert_bundle(collider) .insert(ColliderDebugRender::with_id(color)) .insert(ColliderPositionSync::Discrete); } } }
29.409091
91
0.580113
f46bb610c6b3ca233122bbe9a6d9af27ec33f61a
8,291
#[doc = "Register `ANACTRL` reader"] pub struct R(crate::R<ANACTRL_SPEC>); impl core::ops::Deref for R { type Target = crate::R<ANACTRL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<ANACTRL_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<ANACTRL_SPEC>) -> Self { R(reader) } } #[doc = "Register `ANACTRL` writer"] pub struct W(crate::W<ANACTRL_SPEC>); impl core::ops::Deref for W { type Target = crate::W<ANACTRL_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<ANACTRL_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<ANACTRL_SPEC>) -> Self { W(writer) } } #[doc = "Field `LVI_EN` reader - Vow voltage detector enable bit."] pub struct LVI_EN_R(crate::FieldReader<bool, bool>); impl LVI_EN_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { LVI_EN_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for LVI_EN_R { type Target = crate::FieldReader<bool, bool>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `LVI_EN` writer - Vow voltage detector enable bit."] pub struct LVI_EN_W<'a> { w: &'a mut W, } impl<'a> LVI_EN_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1); self.w } } #[doc = "Field `PFD_CLK_SEL` reader - For normal USB operation, this bit field must remain at value 2'b00."] pub struct PFD_CLK_SEL_R(crate::FieldReader<u8, u8>); impl PFD_CLK_SEL_R { #[inline(always)] pub(crate) fn new(bits: u8) -> Self { PFD_CLK_SEL_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for PFD_CLK_SEL_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `PFD_CLK_SEL` writer - For normal USB operation, this bit field must remain at value 2'b00."] pub struct PFD_CLK_SEL_W<'a> { w: &'a mut W, } impl<'a> PFD_CLK_SEL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x03 << 2)) | ((value as u32 & 0x03) << 2); self.w } } #[doc = "Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins\n\nValue on reset: 1"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DEV_PULLDOWN_A { #[doc = "0: The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare disabled in device mode."] VALUE0 = 0, #[doc = "1: The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare enabled in device mode."] VALUE1 = 1, } impl From<DEV_PULLDOWN_A> for bool { #[inline(always)] fn from(variant: DEV_PULLDOWN_A) -> Self { variant as u8 != 0 } } #[doc = "Field `DEV_PULLDOWN` reader - Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins"] pub struct DEV_PULLDOWN_R(crate::FieldReader<bool, DEV_PULLDOWN_A>); impl DEV_PULLDOWN_R { #[inline(always)] pub(crate) fn new(bits: bool) -> Self { DEV_PULLDOWN_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> DEV_PULLDOWN_A { match self.bits { false => DEV_PULLDOWN_A::VALUE0, true => DEV_PULLDOWN_A::VALUE1, } } #[doc = "Checks if the value of the field is `VALUE0`"] #[inline(always)] pub fn is_value0(&self) -> bool { **self == DEV_PULLDOWN_A::VALUE0 } #[doc = "Checks if the value of the field is `VALUE1`"] #[inline(always)] pub fn is_value1(&self) -> bool { **self == DEV_PULLDOWN_A::VALUE1 } } impl core::ops::Deref for DEV_PULLDOWN_R { type Target = crate::FieldReader<bool, DEV_PULLDOWN_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `DEV_PULLDOWN` writer - Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins"] pub struct DEV_PULLDOWN_W<'a> { w: &'a mut W, } impl<'a> DEV_PULLDOWN_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: DEV_PULLDOWN_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare disabled in device mode."] #[inline(always)] pub fn value0(self) -> &'a mut W { self.variant(DEV_PULLDOWN_A::VALUE0) } #[doc = "The 15kohm nominal pulldowns on the USB_DP and USB_DM pinsare enabled in device mode."] #[inline(always)] pub fn value1(self) -> &'a mut W { self.variant(DEV_PULLDOWN_A::VALUE1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u32 & 0x01) << 10); self.w } } impl R { #[doc = "Bit 1 - Vow voltage detector enable bit."] #[inline(always)] pub fn lvi_en(&self) -> LVI_EN_R { LVI_EN_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bits 2:3 - For normal USB operation, this bit field must remain at value 2'b00."] #[inline(always)] pub fn pfd_clk_sel(&self) -> PFD_CLK_SEL_R { PFD_CLK_SEL_R::new(((self.bits >> 2) & 0x03) as u8) } #[doc = "Bit 10 - Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins"] #[inline(always)] pub fn dev_pulldown(&self) -> DEV_PULLDOWN_R { DEV_PULLDOWN_R::new(((self.bits >> 10) & 0x01) != 0) } } impl W { #[doc = "Bit 1 - Vow voltage detector enable bit."] #[inline(always)] pub fn lvi_en(&mut self) -> LVI_EN_W { LVI_EN_W { w: self } } #[doc = "Bits 2:3 - For normal USB operation, this bit field must remain at value 2'b00."] #[inline(always)] pub fn pfd_clk_sel(&mut self) -> PFD_CLK_SEL_W { PFD_CLK_SEL_W { w: self } } #[doc = "Bit 10 - Setting this field to 1'b1 will enable the 15kohm pulldown resistors on both USB_DP and USB_DM pins"] #[inline(always)] pub fn dev_pulldown(&mut self) -> DEV_PULLDOWN_W { DEV_PULLDOWN_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "USB PHY Analog Control Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [anactrl](index.html) module"] pub struct ANACTRL_SPEC; impl crate::RegisterSpec for ANACTRL_SPEC { type Ux = u32; } #[doc = "`read()` method returns [anactrl::R](R) reader structure"] impl crate::Readable for ANACTRL_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [anactrl::W](W) writer structure"] impl crate::Writable for ANACTRL_SPEC { type Writer = W; } #[doc = "`reset()` method sets ANACTRL to value 0x0a00_0402"] impl crate::Resettable for ANACTRL_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0x0a00_0402 } }
33.840816
419
0.610662
0e711673de80b8b83e4ec69d7080d3e95b12cf3c
24,828
use std::convert::TryFrom; use std::os::raw::c_char; use std::marker::PhantomData; use ttf_parser::{ GlyphId, GlyphPosSubTable, ScriptIndex, LanguageIndex, FeatureIndex, FeatureVariationIndex, }; use crate::ffi; use crate::common::f32_bound; use crate::{Tag, Variation}; #[derive(Debug)] struct Blob<'a> { ptr: *mut ffi::hb_blob_t, marker: PhantomData<&'a [u8]>, } impl<'a> Blob<'a> { fn with_bytes(bytes: &'a [u8]) -> Blob<'a> { unsafe { let hb_blob = ffi::hb_blob_create( bytes.as_ptr() as *const _, bytes.len() as u32, ffi::HB_MEMORY_MODE_READONLY, std::ptr::null_mut(), None, ); Blob { ptr: hb_blob, marker: PhantomData, } } } fn as_ptr(&self) -> *mut ffi::hb_blob_t { self.ptr } } impl<'a> Drop for Blob<'a> { fn drop(&mut self) { unsafe { ffi::hb_blob_destroy(self.ptr); } } } /// A wrapper around `hb_face_t`. /// /// Font face is objects represent a single face in a font family. More /// exactly, a font face represents a single face in a binary font file. Font /// faces are typically built from a binary blob and a face index. Font faces /// are used to create fonts. #[derive(Debug)] pub struct Face<'a> { ptr: *mut ffi::hb_face_t, blob: Blob<'a>, ttf: *const ttf_parser::Font<'a>, } impl<'a> Face<'a> { /// Creates a new `Face` from the data. pub fn new(data: &'a [u8], index: u32) -> Option<Face<'a>> { unsafe { let ttf = Box::new(ttf_parser::Font::from_data(data, index)?); let ttf = Box::into_raw(ttf); let blob = Blob::with_bytes(data); Some(Face { ptr: ffi::hb_face_create(blob.as_ptr(), ttf as *const _, index), blob, ttf, }) } } pub(crate) fn as_ptr(&self) -> *mut ffi::hb_face_t { self.ptr } /// Returns face's UPEM. pub fn upem(&self) -> u32 { unsafe { ffi::hb_face_get_upem(self.ptr) } } /// Sets face's UPEM. pub fn set_upem(&mut self, upem: u32) { unsafe { ffi::hb_face_set_upem(self.ptr, upem) }; } } impl<'a> Drop for Face<'a> { fn drop(&mut self) { unsafe { Box::from_raw(self.ttf as *mut ttf_parser::Font<'a>); ffi::hb_face_destroy(self.ptr); } } } /// A type representing a single font (i.e. a specific combination of typeface and typesize). #[derive(Debug)] pub struct Font<'a> { ptr: *mut ffi::hb_font_t, face: Face<'a>, } impl<'a> Font<'a> { /// Creates a new font from the specified `Face`. pub fn new(face: Face<'a>) -> Self { unsafe { Font { ptr: ffi::hb_font_create(face.as_ptr(), face.ttf as *const _), face, } } } pub(crate) fn ttf_parser(&self) -> &ttf_parser::Font { unsafe { &*(self.face.ttf as *const ttf_parser::Font) } } pub(crate) fn font_ptr(&self) -> *const ttf_parser::Font { self.face.ttf } pub(crate) fn from_ptr(font: *const ffi::hb_font_t) -> &'static Font<'static> { unsafe { &*(font as *const Font) } } pub(crate) fn as_ptr(&self) -> *mut ffi::hb_font_t { self.ptr } /// Returns the EM scale of the font. pub fn scale(&self) -> (i32, i32) { let mut result = (0i32, 0i32); unsafe { ffi::hb_font_get_scale(self.ptr, &mut result.0, &mut result.1) }; result } /// Sets the EM scale of the font. pub fn set_scale(&mut self, x: i32, y: i32) { unsafe { ffi::hb_font_set_scale(self.ptr, x, y) }; } /// Returns font's PPEM. pub fn ppem(&self) -> (u32, u32) { let mut result = (0u32, 0u32); unsafe { ffi::hb_font_get_ppem(self.ptr, &mut result.0, &mut result.1) }; result } /// Set font's PPEM. pub fn set_ppem(&mut self, x: u32, y: u32) { unsafe { ffi::hb_font_set_ppem(self.ptr, x, y) }; } /// Sets *point size* of the font. /// /// Set to 0 to unset. /// /// There are 72 points in an inch. pub fn set_ptem(&mut self, ptem: f32) { unsafe { ffi::hb_font_set_ptem(self.ptr, ptem) }; } /// Sets a font variations. pub fn set_variations(&mut self, variations: &[Variation]) { let ttf = unsafe { &*self.face.ttf }; let coords_len = try_opt!(ttf.variation_axes_count()).get() as usize; let mut coords = vec![0; coords_len]; for variation in variations { if let Some(axis) = ttf.variation_axis(variation.tag) { let mut v = f32_bound(axis.min_value, variation.value, axis.max_value); if v == axis.default_value { v = 0.0; } else if v < axis.default_value { v = (v - axis.default_value) / (axis.default_value - axis.min_value); } else { v = (v - axis.default_value) / (axis.max_value - axis.default_value) } coords[axis.index as usize] = (v * 16384.0).round() as i32; } } let _ = ttf.map_variation_coordinates(&mut coords); unsafe { ffi::hb_font_set_variations( self.ptr, coords.as_ptr() as *mut _, coords.len() as u32, ) } } } impl<'a> Drop for Font<'a> { fn drop(&mut self) { unsafe { ffi::hb_font_destroy(self.ptr); } } } pub(crate) fn ttf_parser_from_raw(ttf_parser_data: *const ffi::rb_ttf_parser_t) -> &'static ttf_parser::Font<'static> { unsafe { &*(ttf_parser_data as *const ttf_parser::Font) } } #[no_mangle] pub extern "C" fn rb_ot_get_nominal_glyph(ttf_parser_data: *const ffi::rb_ttf_parser_t, c: u32, glyph: *mut u32) -> i32 { match ttf_parser_from_raw(ttf_parser_data).glyph_index(char::try_from(c).unwrap()) { Some(g) => unsafe { *glyph = g.0 as u32; 1 } _ => 0, } } #[no_mangle] pub extern "C" fn rb_ot_get_variation_glyph(ttf_parser_data: *const ffi::rb_ttf_parser_t, c: u32, variant: u32, glyph: *mut u32) -> i32 { let font = ttf_parser_from_raw(ttf_parser_data); match font.glyph_variation_index(char::try_from(c).unwrap(), char::try_from(variant).unwrap()) { Some(g) => unsafe { *glyph = g.0 as u32; 1 } _ => 0, } } #[no_mangle] pub extern "C" fn rb_ot_get_glyph_bbox(ttf_parser_data: *const ffi::rb_ttf_parser_t, glyph: u32, extents: *mut ffi::hb_glyph_bbox_t) -> i32 { let font = ttf_parser_from_raw(ttf_parser_data); match font.glyph_bounding_box(GlyphId(u16::try_from(glyph).unwrap())) { Some(bbox) => unsafe { (*extents).x_min = bbox.x_min; (*extents).y_min = bbox.y_min; (*extents).x_max = bbox.x_max; (*extents).y_max = bbox.y_max; 1 } _ => 0, } } #[no_mangle] pub extern "C" fn rb_ot_get_glyph_name(ttf_parser_data: *const ffi::rb_ttf_parser_t, glyph: u32, mut raw_name: *mut c_char, len: u32) -> i32 { assert_ne!(len, 0); let font = ttf_parser_from_raw(ttf_parser_data); match font.glyph_name(GlyphId(u16::try_from(glyph).unwrap())) { Some(name) => unsafe { let len = std::cmp::min(name.len(), len as usize - 1); for b in &name.as_bytes()[0..len] { *raw_name = *b as c_char; raw_name = raw_name.offset(1); } *raw_name = b'\0' as c_char; 1 } _ => 0, } } #[no_mangle] pub extern "C" fn rb_ot_layout_has_glyph_classes(ttf_parser_data: *const ffi::rb_ttf_parser_t) -> i32 { ttf_parser_from_raw(ttf_parser_data).has_glyph_classes() as i32 } #[no_mangle] pub extern "C" fn rb_ot_get_glyph_class(ttf_parser_data: *const ffi::rb_ttf_parser_t, glyph: u32) -> u32 { match ttf_parser_from_raw(ttf_parser_data).glyph_class(GlyphId(u16::try_from(glyph).unwrap())) { Some(c) => c as u32, _ => 0, } } #[no_mangle] pub extern "C" fn rb_ot_get_mark_attachment_class(ttf_parser_data: *const ffi::rb_ttf_parser_t, glyph: u32) -> u32 { let font = ttf_parser_from_raw(ttf_parser_data); font.glyph_mark_attachment_class(GlyphId(u16::try_from(glyph).unwrap())).0 as u32 } #[no_mangle] pub extern "C" fn rb_ot_is_mark_glyph(ttf_parser_data: *const ffi::rb_ttf_parser_t, set_index: u32, glyph: u32) -> i32 { let font = ttf_parser_from_raw(ttf_parser_data); font.is_mark_glyph(GlyphId(u16::try_from(glyph).unwrap()), Some(set_index as u16)) as i32 } const GSUB_TABLE_TAG: Tag = Tag::from_bytes(b"GSUB"); const GPOS_TABLE_TAG: Tag = Tag::from_bytes(b"GPOS"); fn has_table(font: &ttf_parser::Font, tag: Tag) -> bool { match tag { GSUB_TABLE_TAG => font.substitution_table().is_some(), GPOS_TABLE_TAG => font.positioning_table().is_some(), _ => false, } } fn with_table<T, F>(font: &ttf_parser::Font, tag: Tag, f: F) -> T where F: FnOnce(&dyn GlyphPosSubTable) -> T { match tag { GSUB_TABLE_TAG => f(&font.substitution_table().unwrap()), GPOS_TABLE_TAG => f(&font.positioning_table().unwrap()), _ => unreachable!(), } } #[no_mangle] pub extern "C" fn rb_ot_layout_table_get_script_count( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, ) -> u32 { let font = ttf_parser_from_raw(ttf_parser_data); if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { table.scripts().count() as u32 }) } #[no_mangle] pub extern "C" fn rb_ot_layout_table_select_script( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, script_count: u32, script_tags: *const Tag, script_index: *mut u32, chosen_script: *mut Tag, ) -> ffi::hb_bool_t { let font = ttf_parser_from_raw(ttf_parser_data); let scripts = unsafe { std::slice::from_raw_parts(script_tags as *const _, script_count as usize) }; unsafe { *script_index = 0xFFFF; *chosen_script = Tag(0xFFFF); } if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { let script_by_tag = |tag| table.scripts().position(|s| s.tag() == tag); for script in scripts { if let Some(idx) = script_by_tag(*script) { unsafe { *script_index = idx as u32; *chosen_script = *script; } return 1; } } // try finding 'DFLT' if let Some(idx) = script_by_tag(Tag::default_script()) { unsafe { *script_index = idx as u32; *chosen_script = Tag::default_script(); } return 0; } // try with 'dflt'; MS site has had typos and many fonts use it now :( if let Some(idx) = script_by_tag(Tag::default_language()) { unsafe { *script_index = idx as u32; *chosen_script = Tag::default_language(); } return 0; } // try with 'latn'; some old fonts put their features there even though // they're really trying to support Thai, for example :( if let Some(idx) = script_by_tag(Tag::from_bytes(b"latn")) { unsafe { *script_index = idx as u32; *chosen_script = Tag::from_bytes(b"latn"); } return 0; } 0 }) } #[no_mangle] pub extern "C" fn rb_ot_layout_table_find_feature( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, feature_tag: Tag, feature_index: *mut u32, ) -> ffi::hb_bool_t { let font = ttf_parser_from_raw(ttf_parser_data); unsafe { *feature_index = 0xFFFF; } if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { if let Some(idx) = table.features().position(|f| f.tag == feature_tag) { unsafe { *feature_index = idx as u32; }; 1 } else { 0 } }) } #[no_mangle] pub extern "C" fn rb_ot_layout_script_select_language( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, script_index: u32, language_count: u32, language_tags: *mut Tag, language_index: *mut u32, ) -> ffi::hb_bool_t { let font = ttf_parser_from_raw(ttf_parser_data); let languages = unsafe { std::slice::from_raw_parts(language_tags as *const _, language_count as usize) }; unsafe { *language_index = 0xFFFF; } if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { let script = try_opt_or!(table.script_at(ScriptIndex(script_index as u16)), 0); for lang in languages { if let Some((idx, _)) = script.language_by_tag(*lang) { unsafe { *language_index = idx.0 as u32; } return 1; } } // try finding 'dflt' if let Some((idx, _)) = script.language_by_tag(Tag::default_language()) { unsafe { *language_index = idx.0 as u32; } return 0; } 0 }) } #[no_mangle] pub extern "C" fn rb_ot_layout_language_get_required_feature( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, script_index: u32, language_index: u32, feature_index: *mut u32, feature_tag: *mut Tag, ) -> ffi::hb_bool_t { let font = ttf_parser_from_raw(ttf_parser_data); unsafe { *feature_index = 0xFFFF; *feature_tag = Tag(0); } if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { let script = try_opt_or!(table.script_at(ScriptIndex(script_index as u16)), 0); let lang = try_opt_or!(script.language_at(LanguageIndex(language_index as u16)), 0); if let Some(idx) = lang.required_feature_index { if let Some(f) = table.feature_at(idx) { unsafe { *feature_index = idx.0 as u32; *feature_tag = f.tag; } return 1; } } 0 }) } #[no_mangle] pub extern "C" fn rb_ot_layout_language_find_feature( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, script_index: u32, language_index: u32, feature_tag: Tag, feature_index: *mut u32, ) -> ffi::hb_bool_t { let font = ttf_parser_from_raw(ttf_parser_data); unsafe { *feature_index = 0xFFFF; } if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { let script = try_opt_or!(table.script_at(ScriptIndex(script_index as u16)), 0); let lang = if language_index != 0xFFFF { try_opt_or!(script.language_at(LanguageIndex(language_index as u16)), 0) } else { try_opt_or!(script.default_language(), 0) }; for idx in lang.feature_indices { if let Some(feature) = table.feature_at(idx) { if feature.tag == feature_tag { unsafe { *feature_index = idx.0 as u32; } return 1; } } } 0 }) } #[no_mangle] pub extern "C" fn rb_ot_layout_table_get_lookup_count( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, ) -> u32 { let font = ttf_parser_from_raw(ttf_parser_data); if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { table.lookups().count() as u32 }) } #[no_mangle] pub extern "C" fn rb_ot_layout_table_find_feature_variations( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, coords: *const i32, num_coords: u32, variations_index: *mut u32, ) -> ffi::hb_bool_t { let font = ttf_parser_from_raw(ttf_parser_data); let coords = unsafe { std::slice::from_raw_parts(coords as *const _, num_coords as usize) }; unsafe { *variations_index = 0xFFFF_FFFF; } if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { for (i, var) in table.feature_variations().enumerate() { if var.evaluate(coords) { unsafe { *variations_index = i as u32; } return 1; } } 0 }) } #[no_mangle] pub extern "C" fn rb_ot_layout_feature_with_variations_get_lookups( ttf_parser_data: *const ffi::rb_ttf_parser_t, table_tag: Tag, feature_index: u32, variations_index: u32, start_offset: u32, lookup_count: *mut u32, mut lookup_indexes: *mut u32, ) -> u32 { let font = ttf_parser_from_raw(ttf_parser_data); unsafe { *lookup_count = 0; } if !has_table(font, table_tag) { return 0; } with_table(font, table_tag, |table| { let feature = if let Some(variation) = table.feature_variation_at(FeatureVariationIndex(variations_index)) { try_opt_or!(variation.substitutions(), 0) .find(|s| s.index() == FeatureIndex(feature_index as u16)) .and_then(|s| s.feature()) } else { table.feature_at(FeatureIndex(feature_index as u16)) }; let mut added = 0; if let Some(feature) = feature { for idx in feature.lookup_indices.into_iter().skip(start_offset as usize).take(lookup_count as usize) { unsafe { *lookup_indexes = idx.0 as u32; lookup_indexes = lookup_indexes.offset(1); added += 1; } } } unsafe { *lookup_count = added; } 0 }) } #[no_mangle] pub extern "C" fn rb_ot_layout_has_substitution( ttf_parser_data: *const ffi::rb_ttf_parser_t, ) -> ffi::hb_bool_t { ttf_parser_from_raw(ttf_parser_data).substitution_table().is_some() as i32 } #[no_mangle] pub extern "C" fn rb_ot_layout_has_positioning( ttf_parser_data: *const ffi::rb_ttf_parser_t, ) -> ffi::hb_bool_t { ttf_parser_from_raw(ttf_parser_data).positioning_table().is_some() as i32 } #[no_mangle] pub extern "C" fn hb_ot_get_var_axis_count(ttf_parser_data: *const ffi::rb_ttf_parser_t) -> u16 { ttf_parser_from_raw(ttf_parser_data).variation_axes_count().map(|n| n.get()).unwrap_or(0) } #[no_mangle] pub extern "C" fn rb_ot_has_vorg_data(ttf_parser_data: *const ffi::rb_ttf_parser_t) -> i32 { ttf_parser_from_raw(ttf_parser_data).glyph_y_origin(GlyphId(0)).is_some() as i32 } #[no_mangle] pub extern "C" fn rb_ot_get_y_origin(ttf_parser_data: *const ffi::rb_ttf_parser_t, glyph: u32) -> i32 { ttf_parser_from_raw(ttf_parser_data).glyph_y_origin(GlyphId(u16::try_from(glyph).unwrap())).unwrap_or(0) as i32 } mod metrics { use crate::Tag; pub const HORIZONTAL_ASCENDER: Tag = Tag::from_bytes(b"hasc"); pub const HORIZONTAL_DESCENDER: Tag = Tag::from_bytes(b"hdsc"); pub const HORIZONTAL_LINE_GAP: Tag = Tag::from_bytes(b"hlgp"); pub const VERTICAL_ASCENDER: Tag = Tag::from_bytes(b"vasc"); pub const VERTICAL_DESCENDER: Tag = Tag::from_bytes(b"vdsc"); pub const VERTICAL_LINE_GAP: Tag = Tag::from_bytes(b"vlgp"); } #[no_mangle] pub unsafe extern "C" fn rb_ot_metrics_get_position_common( ttf_parser_data: *const ffi::rb_ttf_parser_t, coords: *const i32, coord_count: u32, scale: i32, tag: u32, position: *mut i32, ) -> i32 { // TODO: Never executed. Add tests. let font = ttf_parser_from_raw(ttf_parser_data); let coords = std::slice::from_raw_parts(coords as *const _, coord_count as usize); let upem = font.units_per_em().unwrap_or(0) as f32; let offset = font.metrics_variation(Tag(tag), coords).unwrap_or(0.0); let rescale = |x: f32| ((x * scale as f32) / upem).round() as i32; match Tag(tag) { metrics::HORIZONTAL_ASCENDER => { *position = rescale((font.ascender() as f32 + offset).abs()); } metrics::HORIZONTAL_DESCENDER => { *position = rescale(-(font.descender() as f32 + offset).abs()); } metrics::HORIZONTAL_LINE_GAP => { *position = rescale(font.line_gap() as f32 + offset); } metrics::VERTICAL_ASCENDER => { let v = font.vertical_ascender().unwrap_or(0); *position = rescale((v as f32 + offset).abs()); } metrics::VERTICAL_DESCENDER => { let v = font.vertical_descender().unwrap_or(0); *position = rescale(-(v as f32 + offset).abs()); } metrics::VERTICAL_LINE_GAP => { let v = font.vertical_line_gap().unwrap_or(0); *position = rescale(v as f32 + offset); } _ => return 0, } 1 } #[no_mangle] pub extern "C" fn rb_font_get_advance(ttf_parser_data: *const ffi::rb_ttf_parser_t, glyph: u32, is_vertical: bool) -> u32 { let font = ttf_parser_from_raw(ttf_parser_data); let glyph = GlyphId(u16::try_from(glyph).unwrap()); let pem = font.units_per_em().unwrap_or(1000); if is_vertical { font.glyph_ver_advance(glyph).unwrap_or(pem) as u32 } else { font.glyph_hor_advance(glyph).unwrap_or(pem) as u32 } } #[no_mangle] pub extern "C" fn rb_font_get_advance_var( hb_font: *mut ffi::hb_font_t, ttf_parser_data: *const ffi::rb_ttf_parser_t, hb_glyph: u32, is_vertical: bool, coords: *const i32, coord_count: u32, ) -> u32 { let advance = rb_font_get_advance(ttf_parser_data, hb_glyph, is_vertical); let font = ttf_parser_from_raw(ttf_parser_data); let coords = unsafe { std::slice::from_raw_parts(coords as *const _, coord_count as usize) }; let glyph = GlyphId(u16::try_from(hb_glyph).unwrap()); if coords.is_empty() { return advance; } // TODO: check advance for negative values if !is_vertical && font.has_table(ttf_parser::TableName::HorizontalMetricsVariations) { let offset = font.glyph_hor_advance_variation(glyph, coords).unwrap_or(0.0).round(); return (advance as f32 + offset) as u32; } else if is_vertical && font.has_table(ttf_parser::TableName::VerticalMetricsVariations) { let offset = font.glyph_ver_advance_variation(glyph, coords).unwrap_or(0.0).round(); return (advance as f32 + offset) as u32; } unsafe { ffi::hb_ot_glyf_get_advance_var(hb_font, hb_glyph, is_vertical) } } #[no_mangle] pub extern "C" fn rb_font_get_side_bearing(ttf_parser_data: *const ffi::rb_ttf_parser_t, glyph: u32, is_vertical: bool) -> i32 { let font = ttf_parser_from_raw(ttf_parser_data); let glyph = GlyphId(u16::try_from(glyph).unwrap()); if is_vertical { font.glyph_ver_side_bearing(glyph).unwrap_or(0) as i32 } else { font.glyph_hor_side_bearing(glyph).unwrap_or(0) as i32 } } #[no_mangle] pub extern "C" fn rb_font_get_side_bearing_var( hb_font: *mut ffi::hb_font_t, ttf_parser_data: *const ffi::rb_ttf_parser_t, hb_glyph: u32, is_vertical: bool, coords: *const i32, coord_count: u32, ) -> i32 { let side_bearing = rb_font_get_side_bearing(ttf_parser_data, hb_glyph, is_vertical); let font = ttf_parser_from_raw(ttf_parser_data); let coords = unsafe { std::slice::from_raw_parts(coords as *const _, coord_count as usize) }; let glyph = GlyphId(u16::try_from(hb_glyph).unwrap()); if coords.is_empty() { return side_bearing; } if !is_vertical && font.has_table(ttf_parser::TableName::HorizontalMetricsVariations) { let offset = font.glyph_hor_side_bearing_variation(glyph, coords).unwrap_or(0.0).round(); return (side_bearing as f32 + offset) as i32; } else if is_vertical && font.has_table(ttf_parser::TableName::VerticalMetricsVariations) { let offset = font.glyph_ver_side_bearing_variation(glyph, coords).unwrap_or(0.0).round(); return (side_bearing as f32 + offset) as i32; } unsafe { ffi::hb_ot_glyf_get_side_bearing_var(hb_font, hb_glyph, is_vertical) } } #[no_mangle] pub extern "C" fn rb_face_get_glyph_count(ttf_parser_data: *const ffi::rb_ttf_parser_t) -> u32 { ttf_parser_from_raw(ttf_parser_data).number_of_glyphs() as u32 } #[no_mangle] pub extern "C" fn rb_face_get_upem(ttf_parser_data: *const ffi::rb_ttf_parser_t) -> u32 { ttf_parser_from_raw(ttf_parser_data).units_per_em().unwrap_or(1000) as u32 } #[no_mangle] pub extern "C" fn rb_face_index_to_loc_format(ttf_parser_data: *const ffi::rb_ttf_parser_t) -> u32 { ttf_parser_from_raw(ttf_parser_data).index_to_location_format().map(|f| f as u32).unwrap_or(0) }
30.538745
142
0.608265
9cb94c5dda19fdadc5c5dd262e0fbf2e483bafa3
25,195
// Generated from definition io.k8s.api.core.v1.Volume /// Volume represents a named volume in a pod that may be accessed by any container in the pod. #[derive(Clone, Debug, Default, PartialEq)] pub struct Volume { /// AWSElasticBlockStore represents an AWS Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore pub aws_elastic_block_store: Option<crate::v1_10::api::core::v1::AWSElasticBlockStoreVolumeSource>, /// AzureDisk represents an Azure Data Disk mount on the host and bind mount to the pod. pub azure_disk: Option<crate::v1_10::api::core::v1::AzureDiskVolumeSource>, /// AzureFile represents an Azure File Service mount on the host and bind mount to the pod. pub azure_file: Option<crate::v1_10::api::core::v1::AzureFileVolumeSource>, /// CephFS represents a Ceph FS mount on the host that shares a pod's lifetime pub cephfs: Option<crate::v1_10::api::core::v1::CephFSVolumeSource>, /// Cinder represents a cinder volume attached and mounted on kubelets host machine More info: https://releases.k8s.io/HEAD/examples/mysql-cinder-pd/README.md pub cinder: Option<crate::v1_10::api::core::v1::CinderVolumeSource>, /// ConfigMap represents a configMap that should populate this volume pub config_map: Option<crate::v1_10::api::core::v1::ConfigMapVolumeSource>, /// DownwardAPI represents downward API about the pod that should populate this volume pub downward_api: Option<crate::v1_10::api::core::v1::DownwardAPIVolumeSource>, /// EmptyDir represents a temporary directory that shares a pod's lifetime. More info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir pub empty_dir: Option<crate::v1_10::api::core::v1::EmptyDirVolumeSource>, /// FC represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod. pub fc: Option<crate::v1_10::api::core::v1::FCVolumeSource>, /// FlexVolume represents a generic volume resource that is provisioned/attached using an exec based plugin. pub flex_volume: Option<crate::v1_10::api::core::v1::FlexVolumeSource>, /// Flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running pub flocker: Option<crate::v1_10::api::core::v1::FlockerVolumeSource>, /// GCEPersistentDisk represents a GCE Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk pub gce_persistent_disk: Option<crate::v1_10::api::core::v1::GCEPersistentDiskVolumeSource>, /// GitRepo represents a git repository at a particular revision. pub git_repo: Option<crate::v1_10::api::core::v1::GitRepoVolumeSource>, /// Glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime. More info: https://releases.k8s.io/HEAD/examples/volumes/glusterfs/README.md pub glusterfs: Option<crate::v1_10::api::core::v1::GlusterfsVolumeSource>, /// HostPath represents a pre-existing file or directory on the host machine that is directly exposed to the container. This is generally used for system agents or other privileged things that are allowed to see the host machine. Most containers will NOT need this. More info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath pub host_path: Option<crate::v1_10::api::core::v1::HostPathVolumeSource>, /// ISCSI represents an ISCSI Disk resource that is attached to a kubelet's host machine and then exposed to the pod. More info: https://releases.k8s.io/HEAD/examples/volumes/iscsi/README.md pub iscsi: Option<crate::v1_10::api::core::v1::ISCSIVolumeSource>, /// Volume's name. Must be a DNS_LABEL and unique within the pod. More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names pub name: String, /// NFS represents an NFS mount on the host that shares a pod's lifetime More info: https://kubernetes.io/docs/concepts/storage/volumes#nfs pub nfs: Option<crate::v1_10::api::core::v1::NFSVolumeSource>, /// PersistentVolumeClaimVolumeSource represents a reference to a PersistentVolumeClaim in the same namespace. More info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims pub persistent_volume_claim: Option<crate::v1_10::api::core::v1::PersistentVolumeClaimVolumeSource>, /// PhotonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine pub photon_persistent_disk: Option<crate::v1_10::api::core::v1::PhotonPersistentDiskVolumeSource>, /// PortworxVolume represents a portworx volume attached and mounted on kubelets host machine pub portworx_volume: Option<crate::v1_10::api::core::v1::PortworxVolumeSource>, /// Items for all in one resources secrets, configmaps, and downward API pub projected: Option<crate::v1_10::api::core::v1::ProjectedVolumeSource>, /// Quobyte represents a Quobyte mount on the host that shares a pod's lifetime pub quobyte: Option<crate::v1_10::api::core::v1::QuobyteVolumeSource>, /// RBD represents a Rados Block Device mount on the host that shares a pod's lifetime. More info: https://releases.k8s.io/HEAD/examples/volumes/rbd/README.md pub rbd: Option<crate::v1_10::api::core::v1::RBDVolumeSource>, /// ScaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes. pub scale_io: Option<crate::v1_10::api::core::v1::ScaleIOVolumeSource>, /// Secret represents a secret that should populate this volume. More info: https://kubernetes.io/docs/concepts/storage/volumes#secret pub secret: Option<crate::v1_10::api::core::v1::SecretVolumeSource>, /// StorageOS represents a StorageOS volume attached and mounted on Kubernetes nodes. pub storageos: Option<crate::v1_10::api::core::v1::StorageOSVolumeSource>, /// VsphereVolume represents a vSphere volume attached and mounted on kubelets host machine pub vsphere_volume: Option<crate::v1_10::api::core::v1::VsphereVirtualDiskVolumeSource>, } impl<'de> serde::Deserialize<'de> for Volume { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { #[allow(non_camel_case_types)] enum Field { Key_aws_elastic_block_store, Key_azure_disk, Key_azure_file, Key_cephfs, Key_cinder, Key_config_map, Key_downward_api, Key_empty_dir, Key_fc, Key_flex_volume, Key_flocker, Key_gce_persistent_disk, Key_git_repo, Key_glusterfs, Key_host_path, Key_iscsi, Key_name, Key_nfs, Key_persistent_volume_claim, Key_photon_persistent_disk, Key_portworx_volume, Key_projected, Key_quobyte, Key_rbd, Key_scale_io, Key_secret, Key_storageos, Key_vsphere_volume, Other, } impl<'de> serde::Deserialize<'de> for Field { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = Field; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "field identifier") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error { Ok(match v { "awsElasticBlockStore" => Field::Key_aws_elastic_block_store, "azureDisk" => Field::Key_azure_disk, "azureFile" => Field::Key_azure_file, "cephfs" => Field::Key_cephfs, "cinder" => Field::Key_cinder, "configMap" => Field::Key_config_map, "downwardAPI" => Field::Key_downward_api, "emptyDir" => Field::Key_empty_dir, "fc" => Field::Key_fc, "flexVolume" => Field::Key_flex_volume, "flocker" => Field::Key_flocker, "gcePersistentDisk" => Field::Key_gce_persistent_disk, "gitRepo" => Field::Key_git_repo, "glusterfs" => Field::Key_glusterfs, "hostPath" => Field::Key_host_path, "iscsi" => Field::Key_iscsi, "name" => Field::Key_name, "nfs" => Field::Key_nfs, "persistentVolumeClaim" => Field::Key_persistent_volume_claim, "photonPersistentDisk" => Field::Key_photon_persistent_disk, "portworxVolume" => Field::Key_portworx_volume, "projected" => Field::Key_projected, "quobyte" => Field::Key_quobyte, "rbd" => Field::Key_rbd, "scaleIO" => Field::Key_scale_io, "secret" => Field::Key_secret, "storageos" => Field::Key_storageos, "vsphereVolume" => Field::Key_vsphere_volume, _ => Field::Other, }) } } deserializer.deserialize_identifier(Visitor) } } struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = Volume; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "struct Volume") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> { let mut value_aws_elastic_block_store: Option<crate::v1_10::api::core::v1::AWSElasticBlockStoreVolumeSource> = None; let mut value_azure_disk: Option<crate::v1_10::api::core::v1::AzureDiskVolumeSource> = None; let mut value_azure_file: Option<crate::v1_10::api::core::v1::AzureFileVolumeSource> = None; let mut value_cephfs: Option<crate::v1_10::api::core::v1::CephFSVolumeSource> = None; let mut value_cinder: Option<crate::v1_10::api::core::v1::CinderVolumeSource> = None; let mut value_config_map: Option<crate::v1_10::api::core::v1::ConfigMapVolumeSource> = None; let mut value_downward_api: Option<crate::v1_10::api::core::v1::DownwardAPIVolumeSource> = None; let mut value_empty_dir: Option<crate::v1_10::api::core::v1::EmptyDirVolumeSource> = None; let mut value_fc: Option<crate::v1_10::api::core::v1::FCVolumeSource> = None; let mut value_flex_volume: Option<crate::v1_10::api::core::v1::FlexVolumeSource> = None; let mut value_flocker: Option<crate::v1_10::api::core::v1::FlockerVolumeSource> = None; let mut value_gce_persistent_disk: Option<crate::v1_10::api::core::v1::GCEPersistentDiskVolumeSource> = None; let mut value_git_repo: Option<crate::v1_10::api::core::v1::GitRepoVolumeSource> = None; let mut value_glusterfs: Option<crate::v1_10::api::core::v1::GlusterfsVolumeSource> = None; let mut value_host_path: Option<crate::v1_10::api::core::v1::HostPathVolumeSource> = None; let mut value_iscsi: Option<crate::v1_10::api::core::v1::ISCSIVolumeSource> = None; let mut value_name: Option<String> = None; let mut value_nfs: Option<crate::v1_10::api::core::v1::NFSVolumeSource> = None; let mut value_persistent_volume_claim: Option<crate::v1_10::api::core::v1::PersistentVolumeClaimVolumeSource> = None; let mut value_photon_persistent_disk: Option<crate::v1_10::api::core::v1::PhotonPersistentDiskVolumeSource> = None; let mut value_portworx_volume: Option<crate::v1_10::api::core::v1::PortworxVolumeSource> = None; let mut value_projected: Option<crate::v1_10::api::core::v1::ProjectedVolumeSource> = None; let mut value_quobyte: Option<crate::v1_10::api::core::v1::QuobyteVolumeSource> = None; let mut value_rbd: Option<crate::v1_10::api::core::v1::RBDVolumeSource> = None; let mut value_scale_io: Option<crate::v1_10::api::core::v1::ScaleIOVolumeSource> = None; let mut value_secret: Option<crate::v1_10::api::core::v1::SecretVolumeSource> = None; let mut value_storageos: Option<crate::v1_10::api::core::v1::StorageOSVolumeSource> = None; let mut value_vsphere_volume: Option<crate::v1_10::api::core::v1::VsphereVirtualDiskVolumeSource> = None; while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? { match key { Field::Key_aws_elastic_block_store => value_aws_elastic_block_store = serde::de::MapAccess::next_value(&mut map)?, Field::Key_azure_disk => value_azure_disk = serde::de::MapAccess::next_value(&mut map)?, Field::Key_azure_file => value_azure_file = serde::de::MapAccess::next_value(&mut map)?, Field::Key_cephfs => value_cephfs = serde::de::MapAccess::next_value(&mut map)?, Field::Key_cinder => value_cinder = serde::de::MapAccess::next_value(&mut map)?, Field::Key_config_map => value_config_map = serde::de::MapAccess::next_value(&mut map)?, Field::Key_downward_api => value_downward_api = serde::de::MapAccess::next_value(&mut map)?, Field::Key_empty_dir => value_empty_dir = serde::de::MapAccess::next_value(&mut map)?, Field::Key_fc => value_fc = serde::de::MapAccess::next_value(&mut map)?, Field::Key_flex_volume => value_flex_volume = serde::de::MapAccess::next_value(&mut map)?, Field::Key_flocker => value_flocker = serde::de::MapAccess::next_value(&mut map)?, Field::Key_gce_persistent_disk => value_gce_persistent_disk = serde::de::MapAccess::next_value(&mut map)?, Field::Key_git_repo => value_git_repo = serde::de::MapAccess::next_value(&mut map)?, Field::Key_glusterfs => value_glusterfs = serde::de::MapAccess::next_value(&mut map)?, Field::Key_host_path => value_host_path = serde::de::MapAccess::next_value(&mut map)?, Field::Key_iscsi => value_iscsi = serde::de::MapAccess::next_value(&mut map)?, Field::Key_name => value_name = Some(serde::de::MapAccess::next_value(&mut map)?), Field::Key_nfs => value_nfs = serde::de::MapAccess::next_value(&mut map)?, Field::Key_persistent_volume_claim => value_persistent_volume_claim = serde::de::MapAccess::next_value(&mut map)?, Field::Key_photon_persistent_disk => value_photon_persistent_disk = serde::de::MapAccess::next_value(&mut map)?, Field::Key_portworx_volume => value_portworx_volume = serde::de::MapAccess::next_value(&mut map)?, Field::Key_projected => value_projected = serde::de::MapAccess::next_value(&mut map)?, Field::Key_quobyte => value_quobyte = serde::de::MapAccess::next_value(&mut map)?, Field::Key_rbd => value_rbd = serde::de::MapAccess::next_value(&mut map)?, Field::Key_scale_io => value_scale_io = serde::de::MapAccess::next_value(&mut map)?, Field::Key_secret => value_secret = serde::de::MapAccess::next_value(&mut map)?, Field::Key_storageos => value_storageos = serde::de::MapAccess::next_value(&mut map)?, Field::Key_vsphere_volume => value_vsphere_volume = serde::de::MapAccess::next_value(&mut map)?, Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; }, } } Ok(Volume { aws_elastic_block_store: value_aws_elastic_block_store, azure_disk: value_azure_disk, azure_file: value_azure_file, cephfs: value_cephfs, cinder: value_cinder, config_map: value_config_map, downward_api: value_downward_api, empty_dir: value_empty_dir, fc: value_fc, flex_volume: value_flex_volume, flocker: value_flocker, gce_persistent_disk: value_gce_persistent_disk, git_repo: value_git_repo, glusterfs: value_glusterfs, host_path: value_host_path, iscsi: value_iscsi, name: value_name.ok_or_else(|| serde::de::Error::missing_field("name"))?, nfs: value_nfs, persistent_volume_claim: value_persistent_volume_claim, photon_persistent_disk: value_photon_persistent_disk, portworx_volume: value_portworx_volume, projected: value_projected, quobyte: value_quobyte, rbd: value_rbd, scale_io: value_scale_io, secret: value_secret, storageos: value_storageos, vsphere_volume: value_vsphere_volume, }) } } deserializer.deserialize_struct( "Volume", &[ "awsElasticBlockStore", "azureDisk", "azureFile", "cephfs", "cinder", "configMap", "downwardAPI", "emptyDir", "fc", "flexVolume", "flocker", "gcePersistentDisk", "gitRepo", "glusterfs", "hostPath", "iscsi", "name", "nfs", "persistentVolumeClaim", "photonPersistentDisk", "portworxVolume", "projected", "quobyte", "rbd", "scaleIO", "secret", "storageos", "vsphereVolume", ], Visitor, ) } } impl serde::Serialize for Volume { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer { let mut state = serializer.serialize_struct( "Volume", 1 + self.aws_elastic_block_store.as_ref().map_or(0, |_| 1) + self.azure_disk.as_ref().map_or(0, |_| 1) + self.azure_file.as_ref().map_or(0, |_| 1) + self.cephfs.as_ref().map_or(0, |_| 1) + self.cinder.as_ref().map_or(0, |_| 1) + self.config_map.as_ref().map_or(0, |_| 1) + self.downward_api.as_ref().map_or(0, |_| 1) + self.empty_dir.as_ref().map_or(0, |_| 1) + self.fc.as_ref().map_or(0, |_| 1) + self.flex_volume.as_ref().map_or(0, |_| 1) + self.flocker.as_ref().map_or(0, |_| 1) + self.gce_persistent_disk.as_ref().map_or(0, |_| 1) + self.git_repo.as_ref().map_or(0, |_| 1) + self.glusterfs.as_ref().map_or(0, |_| 1) + self.host_path.as_ref().map_or(0, |_| 1) + self.iscsi.as_ref().map_or(0, |_| 1) + self.nfs.as_ref().map_or(0, |_| 1) + self.persistent_volume_claim.as_ref().map_or(0, |_| 1) + self.photon_persistent_disk.as_ref().map_or(0, |_| 1) + self.portworx_volume.as_ref().map_or(0, |_| 1) + self.projected.as_ref().map_or(0, |_| 1) + self.quobyte.as_ref().map_or(0, |_| 1) + self.rbd.as_ref().map_or(0, |_| 1) + self.scale_io.as_ref().map_or(0, |_| 1) + self.secret.as_ref().map_or(0, |_| 1) + self.storageos.as_ref().map_or(0, |_| 1) + self.vsphere_volume.as_ref().map_or(0, |_| 1), )?; if let Some(value) = &self.aws_elastic_block_store { serde::ser::SerializeStruct::serialize_field(&mut state, "awsElasticBlockStore", value)?; } if let Some(value) = &self.azure_disk { serde::ser::SerializeStruct::serialize_field(&mut state, "azureDisk", value)?; } if let Some(value) = &self.azure_file { serde::ser::SerializeStruct::serialize_field(&mut state, "azureFile", value)?; } if let Some(value) = &self.cephfs { serde::ser::SerializeStruct::serialize_field(&mut state, "cephfs", value)?; } if let Some(value) = &self.cinder { serde::ser::SerializeStruct::serialize_field(&mut state, "cinder", value)?; } if let Some(value) = &self.config_map { serde::ser::SerializeStruct::serialize_field(&mut state, "configMap", value)?; } if let Some(value) = &self.downward_api { serde::ser::SerializeStruct::serialize_field(&mut state, "downwardAPI", value)?; } if let Some(value) = &self.empty_dir { serde::ser::SerializeStruct::serialize_field(&mut state, "emptyDir", value)?; } if let Some(value) = &self.fc { serde::ser::SerializeStruct::serialize_field(&mut state, "fc", value)?; } if let Some(value) = &self.flex_volume { serde::ser::SerializeStruct::serialize_field(&mut state, "flexVolume", value)?; } if let Some(value) = &self.flocker { serde::ser::SerializeStruct::serialize_field(&mut state, "flocker", value)?; } if let Some(value) = &self.gce_persistent_disk { serde::ser::SerializeStruct::serialize_field(&mut state, "gcePersistentDisk", value)?; } if let Some(value) = &self.git_repo { serde::ser::SerializeStruct::serialize_field(&mut state, "gitRepo", value)?; } if let Some(value) = &self.glusterfs { serde::ser::SerializeStruct::serialize_field(&mut state, "glusterfs", value)?; } if let Some(value) = &self.host_path { serde::ser::SerializeStruct::serialize_field(&mut state, "hostPath", value)?; } if let Some(value) = &self.iscsi { serde::ser::SerializeStruct::serialize_field(&mut state, "iscsi", value)?; } serde::ser::SerializeStruct::serialize_field(&mut state, "name", &self.name)?; if let Some(value) = &self.nfs { serde::ser::SerializeStruct::serialize_field(&mut state, "nfs", value)?; } if let Some(value) = &self.persistent_volume_claim { serde::ser::SerializeStruct::serialize_field(&mut state, "persistentVolumeClaim", value)?; } if let Some(value) = &self.photon_persistent_disk { serde::ser::SerializeStruct::serialize_field(&mut state, "photonPersistentDisk", value)?; } if let Some(value) = &self.portworx_volume { serde::ser::SerializeStruct::serialize_field(&mut state, "portworxVolume", value)?; } if let Some(value) = &self.projected { serde::ser::SerializeStruct::serialize_field(&mut state, "projected", value)?; } if let Some(value) = &self.quobyte { serde::ser::SerializeStruct::serialize_field(&mut state, "quobyte", value)?; } if let Some(value) = &self.rbd { serde::ser::SerializeStruct::serialize_field(&mut state, "rbd", value)?; } if let Some(value) = &self.scale_io { serde::ser::SerializeStruct::serialize_field(&mut state, "scaleIO", value)?; } if let Some(value) = &self.secret { serde::ser::SerializeStruct::serialize_field(&mut state, "secret", value)?; } if let Some(value) = &self.storageos { serde::ser::SerializeStruct::serialize_field(&mut state, "storageos", value)?; } if let Some(value) = &self.vsphere_volume { serde::ser::SerializeStruct::serialize_field(&mut state, "vsphereVolume", value)?; } serde::ser::SerializeStruct::end(state) } }
57.654462
341
0.598015
7655097681ba2975e9ad5f4d558609e3ad84413d
28,834
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::any::Any; use std::cell::RefCell; use std::collections::BTreeSet; use std::env; use std::fmt::Debug; use std::fs; use std::hash::Hash; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::process::Command; use compile; use install; use dist; use util::{exe, libdir, add_lib_path}; use {Build, Mode}; use cache::{INTERNER, Interned, Cache}; use check; use flags::Subcommand; use doc; use tool; use native; pub use Compiler; pub struct Builder<'a> { pub build: &'a Build, pub top_stage: u32, pub kind: Kind, cache: Cache, stack: RefCell<Vec<Box<Any>>>, } impl<'a> Deref for Builder<'a> { type Target = Build; fn deref(&self) -> &Self::Target { self.build } } pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash { /// `PathBuf` when directories are created or to return a `Compiler` once /// it's been assembled. type Output: Clone; const DEFAULT: bool = false; /// Run this rule for all hosts without cross compiling. const ONLY_HOSTS: bool = false; /// Run this rule for all targets, but only with the native host. const ONLY_BUILD_TARGETS: bool = false; /// Only run this step with the build triple as host and target. const ONLY_BUILD: bool = false; /// Primary function to execute this rule. Can call `builder.ensure(...)` /// with other steps to run those. fn run(self, builder: &Builder) -> Self::Output; /// When bootstrap is passed a set of paths, this controls whether this rule /// will execute. However, it does not get called in a "default" context /// when we are not passed any paths; in that case, make_run is called /// directly. fn should_run(run: ShouldRun) -> ShouldRun; /// Build up a "root" rule, either as a default rule or from a path passed /// to us. /// /// When path is `None`, we are executing in a context where no paths were /// passed. When `./x.py build` is run, for example, this rule could get /// called if it is in the correct list below with a path of `None`. fn make_run(_run: RunConfig) { // It is reasonable to not have an implementation of make_run for rules // who do not want to get called from the root context. This means that // they are likely dependencies (e.g., sysroot creation) or similar, and // as such calling them from ./x.py isn't logical. unimplemented!() } } pub struct RunConfig<'a> { pub builder: &'a Builder<'a>, pub host: Interned<String>, pub target: Interned<String>, pub path: Option<&'a Path>, } struct StepDescription { default: bool, only_hosts: bool, only_build_targets: bool, only_build: bool, should_run: fn(ShouldRun) -> ShouldRun, make_run: fn(RunConfig), } impl StepDescription { fn from<S: Step>() -> StepDescription { StepDescription { default: S::DEFAULT, only_hosts: S::ONLY_HOSTS, only_build_targets: S::ONLY_BUILD_TARGETS, only_build: S::ONLY_BUILD, should_run: S::should_run, make_run: S::make_run, } } fn maybe_run(&self, builder: &Builder, path: Option<&Path>) { let build = builder.build; let hosts = if self.only_build_targets || self.only_build { build.build_triple() } else { &build.hosts }; // Determine the targets participating in this rule. let targets = if self.only_hosts { if build.config.run_host_only { &[] } else if self.only_build { build.build_triple() } else { &build.hosts } } else { &build.targets }; for host in hosts { for target in targets { let run = RunConfig { builder, path, host: *host, target: *target, }; (self.make_run)(run); } } } fn run(v: &[StepDescription], builder: &Builder, paths: &[PathBuf]) { let should_runs = v.iter().map(|desc| { (desc.should_run)(ShouldRun::new(builder)) }).collect::<Vec<_>>(); if paths.is_empty() { for (desc, should_run) in v.iter().zip(should_runs) { if desc.default && should_run.is_really_default { desc.maybe_run(builder, None); } } } else { for path in paths { let mut attempted_run = false; for (desc, should_run) in v.iter().zip(&should_runs) { if should_run.run(path) { attempted_run = true; desc.maybe_run(builder, Some(path)); } } if !attempted_run { eprintln!("Warning: no rules matched {}.", path.display()); } } } } } #[derive(Clone)] pub struct ShouldRun<'a> { pub builder: &'a Builder<'a>, // use a BTreeSet to maintain sort order paths: BTreeSet<PathBuf>, // If this is a default rule, this is an additional constraint placed on // it's run. Generally something like compiler docs being enabled. is_really_default: bool, } impl<'a> ShouldRun<'a> { fn new(builder: &'a Builder) -> ShouldRun<'a> { ShouldRun { builder, paths: BTreeSet::new(), is_really_default: true, // by default no additional conditions } } pub fn default_condition(mut self, cond: bool) -> Self { self.is_really_default = cond; self } pub fn krate(mut self, name: &str) -> Self { for (_, krate_path) in self.builder.crates(name) { self.paths.insert(PathBuf::from(krate_path)); } self } pub fn path(mut self, path: &str) -> Self { self.paths.insert(PathBuf::from(path)); self } // allows being more explicit about why should_run in Step returns the value passed to it pub fn never(self) -> ShouldRun<'a> { self } fn run(&self, path: &Path) -> bool { self.paths.iter().any(|p| path.ends_with(p)) } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Kind { Build, Test, Bench, Dist, Doc, Install, } impl<'a> Builder<'a> { fn get_step_descriptions(kind: Kind) -> Vec<StepDescription> { macro_rules! describe { ($($rule:ty),+ $(,)*) => {{ vec![$(StepDescription::from::<$rule>()),+] }}; } match kind { Kind::Build => describe!(compile::Std, compile::Test, compile::Rustc, compile::StartupObjects, tool::BuildManifest, tool::Rustbook, tool::ErrorIndex, tool::UnstableBookGen, tool::Tidy, tool::Linkchecker, tool::CargoTest, tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient, tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy, native::Llvm, tool::Rustfmt, tool::Miri), Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest, check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Rustdoc, check::Linkcheck, check::Cargotest, check::Cargo, check::Rls, check::Docs, check::ErrorIndex, check::Distcheck, check::Rustfmt, check::Miri, check::Clippy, check::RustdocJS), Kind::Bench => describe!(check::Crate, check::CrateLibrustc), Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook, doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon, doc::Reference, doc::Rustdoc, doc::RustByExample, doc::CargoBook), Kind::Dist => describe!(dist::Docs, dist::Mingw, dist::Rustc, dist::DebuggerScripts, dist::Std, dist::Analysis, dist::Src, dist::PlainSourceTarball, dist::Cargo, dist::Rls, dist::Rustfmt, dist::Extended, dist::HashSign, dist::DontDistWithMiriEnabled), Kind::Install => describe!(install::Docs, install::Std, install::Cargo, install::Rls, install::Rustfmt, install::Analysis, install::Src, install::Rustc), } } pub fn get_help(build: &Build, subcommand: &str) -> Option<String> { let kind = match subcommand { "build" => Kind::Build, "doc" => Kind::Doc, "test" => Kind::Test, "bench" => Kind::Bench, "dist" => Kind::Dist, "install" => Kind::Install, _ => return None, }; let builder = Builder { build, top_stage: build.config.stage.unwrap_or(2), kind, cache: Cache::new(), stack: RefCell::new(Vec::new()), }; let builder = &builder; let mut should_run = ShouldRun::new(builder); for desc in Builder::get_step_descriptions(builder.kind) { should_run = (desc.should_run)(should_run); } let mut help = String::from("Available paths:\n"); for path in should_run.paths { help.push_str(format!(" ./x.py {} {}\n", subcommand, path.display()).as_str()); } Some(help) } pub fn run(build: &Build) { let (kind, paths) = match build.config.cmd { Subcommand::Build { ref paths } => (Kind::Build, &paths[..]), Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]), Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]), Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]), Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]), Subcommand::Install { ref paths } => (Kind::Install, &paths[..]), Subcommand::Clean { .. } => panic!(), }; let builder = Builder { build, top_stage: build.config.stage.unwrap_or(2), kind, cache: Cache::new(), stack: RefCell::new(Vec::new()), }; StepDescription::run(&Builder::get_step_descriptions(builder.kind), &builder, paths); } pub fn default_doc(&self, paths: Option<&[PathBuf]>) { let paths = paths.unwrap_or(&[]); StepDescription::run(&Builder::get_step_descriptions(Kind::Doc), self, paths); } /// Obtain a compiler at a given stage and for a given host. Explicitly does /// not take `Compiler` since all `Compiler` instances are meant to be /// obtained through this function, since it ensures that they are valid /// (i.e., built and assembled). pub fn compiler(&self, stage: u32, host: Interned<String>) -> Compiler { self.ensure(compile::Assemble { target_compiler: Compiler { stage, host } }) } pub fn sysroot(&self, compiler: Compiler) -> Interned<PathBuf> { self.ensure(compile::Sysroot { compiler }) } /// Returns the libdir where the standard library and other artifacts are /// found for a compiler's sysroot. pub fn sysroot_libdir( &self, compiler: Compiler, target: Interned<String> ) -> Interned<PathBuf> { #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] struct Libdir { compiler: Compiler, target: Interned<String>, } impl Step for Libdir { type Output = Interned<PathBuf>; fn should_run(run: ShouldRun) -> ShouldRun { run.never() } fn run(self, builder: &Builder) -> Interned<PathBuf> { let compiler = self.compiler; let lib = if compiler.stage >= 1 && builder.build.config.libdir.is_some() { builder.build.config.libdir.clone().unwrap() } else { PathBuf::from("lib") }; let sysroot = builder.sysroot(self.compiler).join(lib) .join("rustlib").join(self.target).join("lib"); let _ = fs::remove_dir_all(&sysroot); t!(fs::create_dir_all(&sysroot)); INTERNER.intern_path(sysroot) } } self.ensure(Libdir { compiler, target }) } /// Returns the compiler's libdir where it stores the dynamic libraries that /// it itself links against. /// /// For example this returns `<sysroot>/lib` on Unix and `<sysroot>/bin` on /// Windows. pub fn rustc_libdir(&self, compiler: Compiler) -> PathBuf { if compiler.is_snapshot(self) { self.build.rustc_snapshot_libdir() } else { self.sysroot(compiler).join(libdir(&compiler.host)) } } /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic /// library lookup path. pub fn add_rustc_lib_path(&self, compiler: Compiler, cmd: &mut Command) { // Windows doesn't need dylib path munging because the dlls for the // compiler live next to the compiler and the system will find them // automatically. if cfg!(windows) { return } add_lib_path(vec![self.rustc_libdir(compiler)], cmd); } /// Get a path to the compiler specified. pub fn rustc(&self, compiler: Compiler) -> PathBuf { if compiler.is_snapshot(self) { self.initial_rustc.clone() } else { self.sysroot(compiler).join("bin").join(exe("rustc", &compiler.host)) } } pub fn rustdoc(&self, host: Interned<String>) -> PathBuf { self.ensure(tool::Rustdoc { host }) } pub fn rustdoc_cmd(&self, host: Interned<String>) -> Command { let mut cmd = Command::new(&self.out.join("bootstrap/debug/rustdoc")); let compiler = self.compiler(self.top_stage, host); cmd.env("RUSTC_STAGE", compiler.stage.to_string()) .env("RUSTC_SYSROOT", self.sysroot(compiler)) .env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.build.build)) .env("CFG_RELEASE_CHANNEL", &self.build.config.channel) .env("RUSTDOC_REAL", self.rustdoc(host)) .env("RUSTDOC_CRATE_VERSION", self.build.rust_version()) .env("RUSTC_BOOTSTRAP", "1"); if let Some(linker) = self.build.linker(host) { cmd.env("RUSTC_TARGET_LINKER", linker); } cmd } /// Prepares an invocation of `cargo` to be run. /// /// This will create a `Command` that represents a pending execution of /// Cargo. This cargo will be configured to use `compiler` as the actual /// rustc compiler, its output will be scoped by `mode`'s output directory, /// it will pass the `--target` flag for the specified `target`, and will be /// executing the Cargo command `cmd`. pub fn cargo(&self, compiler: Compiler, mode: Mode, target: Interned<String>, cmd: &str) -> Command { let mut cargo = Command::new(&self.initial_cargo); let out_dir = self.stage_out(compiler, mode); cargo.env("CARGO_TARGET_DIR", out_dir) .arg(cmd) .arg("--target") .arg(target); // If we were invoked from `make` then that's already got a jobserver // set up for us so no need to tell Cargo about jobs all over again. if env::var_os("MAKEFLAGS").is_none() && env::var_os("MFLAGS").is_none() { cargo.arg("-j").arg(self.jobs().to_string()); } // FIXME: Temporary fix for https://github.com/rust-lang/cargo/issues/3005 // Force cargo to output binaries with disambiguating hashes in the name cargo.env("__CARGO_DEFAULT_LIB_METADATA", &self.config.channel); let stage; if compiler.stage == 0 && self.local_rebuild { // Assume the local-rebuild rustc already has stage1 features. stage = 1; } else { stage = compiler.stage; } // Customize the compiler we're running. Specify the compiler to cargo // as our shim and then pass it some various options used to configure // how the actual compiler itself is called. // // These variables are primarily all read by // src/bootstrap/bin/{rustc.rs,rustdoc.rs} cargo.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target)) .env("RUSTC", self.out.join("bootstrap/debug/rustc")) .env("RUSTC_REAL", self.rustc(compiler)) .env("RUSTC_STAGE", stage.to_string()) .env("RUSTC_DEBUG_ASSERTIONS", self.config.rust_debug_assertions.to_string()) .env("RUSTC_SYSROOT", self.sysroot(compiler)) .env("RUSTC_LIBDIR", self.rustc_libdir(compiler)) .env("RUSTC_RPATH", self.config.rust_rpath.to_string()) .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc")) .env("RUSTDOC_REAL", if cmd == "doc" || cmd == "test" { self.rustdoc(compiler.host) } else { PathBuf::from("/path/to/nowhere/rustdoc/not/required") }) .env("TEST_MIRI", self.config.test_miri.to_string()) .env("RUSTC_ERROR_METADATA_DST", self.extended_error_dir()); if let Some(n) = self.config.rust_codegen_units { cargo.env("RUSTC_CODEGEN_UNITS", n.to_string()); } if let Some(host_linker) = self.build.linker(compiler.host) { cargo.env("RUSTC_HOST_LINKER", host_linker); } if let Some(target_linker) = self.build.linker(target) { cargo.env("RUSTC_TARGET_LINKER", target_linker); } if cmd != "build" { cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build))); } if mode != Mode::Tool { // Tools don't get debuginfo right now, e.g. cargo and rls don't // get compiled with debuginfo. // Adding debuginfo increases their sizes by a factor of 3-4. cargo.env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string()); cargo.env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string()); cargo.env("RUSTC_FORCE_UNSTABLE", "1"); // Currently the compiler depends on crates from crates.io, and // then other crates can depend on the compiler (e.g. proc-macro // crates). Let's say, for example that rustc itself depends on the // bitflags crate. If an external crate then depends on the // bitflags crate as well, we need to make sure they don't // conflict, even if they pick the same version of bitflags. We'll // want to make sure that e.g. a plugin and rustc each get their // own copy of bitflags. // Cargo ensures that this works in general through the -C metadata // flag. This flag will frob the symbols in the binary to make sure // they're different, even though the source code is the exact // same. To solve this problem for the compiler we extend Cargo's // already-passed -C metadata flag with our own. Our rustc.rs // wrapper around the actual rustc will detect -C metadata being // passed and frob it with this extra string we're passing in. cargo.env("RUSTC_METADATA_SUFFIX", "rustc"); } if let Some(x) = self.crt_static(target) { cargo.env("RUSTC_CRT_STATIC", x.to_string()); } // Enable usage of unstable features cargo.env("RUSTC_BOOTSTRAP", "1"); self.add_rust_test_threads(&mut cargo); // Almost all of the crates that we compile as part of the bootstrap may // have a build script, including the standard library. To compile a // build script, however, it itself needs a standard library! This // introduces a bit of a pickle when we're compiling the standard // library itself. // // To work around this we actually end up using the snapshot compiler // (stage0) for compiling build scripts of the standard library itself. // The stage0 compiler is guaranteed to have a libstd available for use. // // For other crates, however, we know that we've already got a standard // library up and running, so we can use the normal compiler to compile // build scripts in that situation. // // If LLVM support is disabled we need to use the snapshot compiler to compile // build scripts, as the new compiler doesnt support executables. if mode == Mode::Libstd || !self.build.config.llvm_enabled { cargo.env("RUSTC_SNAPSHOT", &self.initial_rustc) .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir()); } else { cargo.env("RUSTC_SNAPSHOT", self.rustc(compiler)) .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_libdir(compiler)); } // Ignore incremental modes except for stage0, since we're // not guaranteeing correctness across builds if the compiler // is changing under your feet.` if self.config.incremental && compiler.stage == 0 { let incr_dir = self.incremental_dir(compiler); cargo.env("RUSTC_INCREMENTAL", incr_dir); } if let Some(ref on_fail) = self.config.on_fail { cargo.env("RUSTC_ON_FAIL", on_fail); } cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity)); // Throughout the build Cargo can execute a number of build scripts // compiling C/C++ code and we need to pass compilers, archivers, flags, etc // obtained previously to those build scripts. // Build scripts use either the `cc` crate or `configure/make` so we pass // the options through environment variables that are fetched and understood by both. // // FIXME: the guard against msvc shouldn't need to be here if !target.contains("msvc") { let cc = self.cc(target); cargo.env(format!("CC_{}", target), cc) .env("CC", cc); let cflags = self.cflags(target).join(" "); cargo.env(format!("CFLAGS_{}", target), cflags.clone()) .env("CFLAGS", cflags.clone()); if let Some(ar) = self.ar(target) { let ranlib = format!("{} s", ar.display()); cargo.env(format!("AR_{}", target), ar) .env("AR", ar) .env(format!("RANLIB_{}", target), ranlib.clone()) .env("RANLIB", ranlib); } if let Ok(cxx) = self.cxx(target) { cargo.env(format!("CXX_{}", target), cxx) .env("CXX", cxx) .env(format!("CXXFLAGS_{}", target), cflags.clone()) .env("CXXFLAGS", cflags); } } if mode == Mode::Libstd && self.config.extended && compiler.is_final_stage(self) { cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string()); } // For `cargo doc` invocations, make rustdoc print the Rust version into the docs cargo.env("RUSTDOC_CRATE_VERSION", self.build.rust_version()); // Environment variables *required* throughout the build // // FIXME: should update code to not require this env var cargo.env("CFG_COMPILER_HOST_TRIPLE", target); // Set this for all builds to make sure doc builds also get it. cargo.env("CFG_RELEASE_CHANNEL", &self.build.config.channel); // This one's a bit tricky. As of the time of this writing the compiler // links to the `winapi` crate on crates.io. This crate provides raw // bindings to Windows system functions, sort of like libc does for // Unix. This crate also, however, provides "import libraries" for the // MinGW targets. There's an import library per dll in the windows // distribution which is what's linked to. These custom import libraries // are used because the winapi crate can reference Windows functions not // present in the MinGW import libraries. // // For example MinGW may ship libdbghelp.a, but it may not have // references to all the functions in the dbghelp dll. Instead the // custom import library for dbghelp in the winapi crates has all this // information. // // Unfortunately for us though the import libraries are linked by // default via `-ldylib=winapi_foo`. That is, they're linked with the // `dylib` type with a `winapi_` prefix (so the winapi ones don't // conflict with the system MinGW ones). This consequently means that // the binaries we ship of things like rustc_trans (aka the rustc_trans // DLL) when linked against *again*, for example with procedural macros // or plugins, will trigger the propagation logic of `-ldylib`, passing // `-lwinapi_foo` to the linker again. This isn't actually available in // our distribution, however, so the link fails. // // To solve this problem we tell winapi to not use its bundled import // libraries. This means that it will link to the system MinGW import // libraries by default, and the `-ldylib=foo` directives will still get // passed to the final linker, but they'll look like `-lfoo` which can // be resolved because MinGW has the import library. The downside is we // don't get newer functions from Windows, but we don't use any of them // anyway. cargo.env("WINAPI_NO_BUNDLED_LIBRARIES", "1"); if self.is_very_verbose() { cargo.arg("-v"); } if self.config.rust_optimize { // FIXME: cargo bench does not accept `--release` if cmd != "bench" { cargo.arg("--release"); } if self.config.rust_codegen_units.is_none() && self.build.is_rust_llvm(compiler.host) { cargo.env("RUSTC_THINLTO", "1"); } } if self.config.locked_deps { cargo.arg("--locked"); } if self.config.vendor || self.is_sudo { cargo.arg("--frozen"); } self.ci_env.force_coloring_in_ci(&mut cargo); cargo } /// Ensure that a given step is built, returning it's output. This will /// cache the step, so it is safe (and good!) to call this as often as /// needed to ensure that all dependencies are built. pub fn ensure<S: Step>(&'a self, step: S) -> S::Output { { let mut stack = self.stack.borrow_mut(); for stack_step in stack.iter() { // should skip if stack_step.downcast_ref::<S>().map_or(true, |stack_step| *stack_step != step) { continue; } let mut out = String::new(); out += &format!("\n\nCycle in build detected when adding {:?}\n", step); for el in stack.iter().rev() { out += &format!("\t{:?}\n", el); } panic!(out); } if let Some(out) = self.cache.get(&step) { self.build.verbose(&format!("{}c {:?}", " ".repeat(stack.len()), step)); return out; } self.build.verbose(&format!("{}> {:?}", " ".repeat(stack.len()), step)); stack.push(Box::new(step.clone())); } let out = step.clone().run(self); { let mut stack = self.stack.borrow_mut(); let cur_step = stack.pop().expect("step stack empty"); assert_eq!(cur_step.downcast_ref(), Some(&step)); } self.build.verbose(&format!("{}< {:?}", " ".repeat(self.stack.borrow().len()), step)); self.cache.put(step, out.clone()); out } }
39.881051
98
0.577339
d5fa2805b7544ac8ac7c351e71f848ab2e9b231f
297
use super::{Context, Executable}; use crate::{syntax::ast::node::Spread, Result, Value}; impl Executable for Spread { fn run(&self, interpreter: &mut Context) -> Result<Value> { // TODO: for now we can do nothing but return the value as-is self.val().run(interpreter) } }
29.7
69
0.649832
03aff3c5bd4e0be98f47802fbe7e925f88debe20
11,585
use super::{EntryType, ShrinkBehavior}; use std::collections::BinaryHeap; use std::path::Path; use std::sync::Arc; use std::time::{self, Duration}; use bytes::Bytes; use futures::future; use hashing::{Digest, Fingerprint, EMPTY_DIGEST}; use lmdb::Error::NotFound; use lmdb::{self, Cursor, Transaction}; use sharded_lmdb::{ShardedLmdb, VersionedFingerprint}; use workunit_store::ObservationMetric; #[derive(Debug, Clone)] pub struct ByteStore { inner: Arc<InnerStore>, } #[derive(Debug)] struct InnerStore { // Store directories separately from files because: // 1. They may have different lifetimes. // 2. It's nice to know whether we should be able to parse something as a proto. file_dbs: Result<Arc<ShardedLmdb>, String>, directory_dbs: Result<Arc<ShardedLmdb>, String>, executor: task_executor::Executor, } impl ByteStore { pub fn new<P: AsRef<Path>>( executor: task_executor::Executor, path: P, ) -> Result<ByteStore, String> { Self::new_with_options(executor, path, super::LocalOptions::default()) } pub fn new_with_options<P: AsRef<Path>>( executor: task_executor::Executor, path: P, options: super::LocalOptions, ) -> Result<ByteStore, String> { let root = path.as_ref(); let files_root = root.join("files"); let directories_root = root.join("directories"); Ok(ByteStore { inner: Arc::new(InnerStore { file_dbs: ShardedLmdb::new( files_root, options.files_max_size_bytes, executor.clone(), options.lease_time, options.shard_count, ) .map(Arc::new), directory_dbs: ShardedLmdb::new( directories_root, options.directories_max_size_bytes, executor.clone(), options.lease_time, options.shard_count, ) .map(Arc::new), executor, }), }) } pub fn executor(&self) -> &task_executor::Executor { &self.inner.executor } pub async fn entry_type(&self, fingerprint: Fingerprint) -> Result<Option<EntryType>, String> { if fingerprint == EMPTY_DIGEST.hash { // Technically this is valid as both; choose Directory in case a caller is checking whether // it _can_ be a Directory. return Ok(Some(EntryType::Directory)); } // In parallel, check for the given fingerprint in both databases. let d_dbs = self.inner.directory_dbs.clone()?; let is_dir = d_dbs.exists(fingerprint); let f_dbs = self.inner.file_dbs.clone()?; let is_file = f_dbs.exists(fingerprint); // TODO: Could technically use select to return slightly more quickly with the first // affirmative answer, but this is simpler. match future::try_join(is_dir, is_file).await? { (true, _) => Ok(Some(EntryType::Directory)), (_, true) => Ok(Some(EntryType::File)), (false, false) => Ok(None), } } pub async fn lease_all( &self, digests: impl Iterator<Item = (Digest, EntryType)>, ) -> Result<(), String> { // NB: Lease extension happens periodically in the background, so this code needn't be parallel. for (digest, entry_type) in digests { let dbs = match entry_type { EntryType::File => self.inner.file_dbs.clone(), EntryType::Directory => self.inner.directory_dbs.clone(), }; dbs? .lease(digest.hash) .await .map_err(|err| format!("Error leasing digest {:?}: {}", digest, err))?; } Ok(()) } /// /// Attempts to shrink the stored files to be no bigger than target_bytes /// (excluding lmdb overhead). /// /// Returns the size it was shrunk to, which may be larger than target_bytes. /// /// TODO: Use LMDB database statistics when lmdb-rs exposes them. /// pub fn shrink( &self, target_bytes: usize, shrink_behavior: ShrinkBehavior, ) -> Result<usize, String> { let mut used_bytes: usize = 0; let mut fingerprints_by_expired_ago = BinaryHeap::new(); self.aged_fingerprints( EntryType::File, &mut used_bytes, &mut fingerprints_by_expired_ago, )?; self.aged_fingerprints( EntryType::Directory, &mut used_bytes, &mut fingerprints_by_expired_ago, )?; while used_bytes > target_bytes { let aged_fingerprint = fingerprints_by_expired_ago .pop() .expect("lmdb corruption detected, sum of size of blobs exceeded stored blobs"); if aged_fingerprint.expired_seconds_ago == 0 { // Ran out of expired blobs - everything remaining is leased and cannot be collected. return Ok(used_bytes); } let lmdbs = match aged_fingerprint.entry_type { EntryType::File => self.inner.file_dbs.clone(), EntryType::Directory => self.inner.directory_dbs.clone(), }; let (env, database, lease_database) = lmdbs.clone()?.get(&aged_fingerprint.fingerprint); { env .begin_rw_txn() .and_then(|mut txn| { let key = VersionedFingerprint::new(aged_fingerprint.fingerprint, ShardedLmdb::SCHEMA_VERSION); txn.del(database, &key, None)?; txn .del(lease_database, &key, None) .or_else(|err| match err { NotFound => Ok(()), err => Err(err), })?; used_bytes -= aged_fingerprint.size_bytes; txn.commit() }) .map_err(|err| format!("Error garbage collecting: {}", err))?; } } if shrink_behavior == ShrinkBehavior::Compact { self.inner.file_dbs.clone()?.compact()?; } Ok(used_bytes) } fn aged_fingerprints( &self, entry_type: EntryType, used_bytes: &mut usize, fingerprints_by_expired_ago: &mut BinaryHeap<AgedFingerprint>, ) -> Result<(), String> { let database = match entry_type { EntryType::File => self.inner.file_dbs.clone(), EntryType::Directory => self.inner.directory_dbs.clone(), }; for &(ref env, ref database, ref lease_database) in &database?.all_lmdbs() { let txn = env .begin_ro_txn() .map_err(|err| format!("Error beginning transaction to garbage collect: {}", err))?; let mut cursor = txn .open_ro_cursor(*database) .map_err(|err| format!("Failed to open lmdb read cursor: {}", err))?; for (key, bytes) in cursor.iter() { *used_bytes += bytes.len(); // Random access into the lease_database is slower than iterating, but hopefully garbage // collection is rare enough that we can get away with this, rather than do two passes // here (either to populate leases into pre-populated AgedFingerprints, or to read sizes // when we delete from lmdb to track how much we've freed). let lease_until_unix_timestamp = txn .get(*lease_database, &key) .map(|b| { let mut array = [0_u8; 8]; array.copy_from_slice(b); u64::from_le_bytes(array) }) .unwrap_or_else(|e| match e { NotFound => 0, e => panic!("Error reading lease, probable lmdb corruption: {:?}", e), }); let leased_until = time::UNIX_EPOCH + Duration::from_secs(lease_until_unix_timestamp); let expired_seconds_ago = time::SystemTime::now() .duration_since(leased_until) .map(|t| t.as_secs()) // 0 indicates unleased. .unwrap_or(0); let v = VersionedFingerprint::from_bytes_unsafe(key); let fingerprint = v.get_fingerprint(); fingerprints_by_expired_ago.push(AgedFingerprint { expired_seconds_ago, fingerprint, size_bytes: bytes.len(), entry_type, }); } } Ok(()) } pub async fn remove(&self, entry_type: EntryType, digest: Digest) -> Result<bool, String> { let dbs = match entry_type { EntryType::Directory => self.inner.directory_dbs.clone(), EntryType::File => self.inner.file_dbs.clone(), }; dbs?.remove(digest.hash).await } pub async fn store_bytes( &self, entry_type: EntryType, bytes: Bytes, initial_lease: bool, ) -> Result<Digest, String> { let dbs = match entry_type { EntryType::Directory => self.inner.directory_dbs.clone(), EntryType::File => self.inner.file_dbs.clone(), }; let bytes2 = bytes.clone(); let digest = self .inner .executor .spawn_blocking(move || Digest::of_bytes(&bytes)) .await; dbs?.store_bytes(digest.hash, bytes2, initial_lease).await?; Ok(digest) } /// /// Loads bytes from the underlying LMDB store using the given function. Because the database is /// blocking, this accepts a function that views a slice rather than returning a clone of the /// data. The upshot is that the database is able to provide slices directly into shared memory. /// /// The provided function is guaranteed to be called in a context where it is safe to block. /// pub async fn load_bytes_with<T: Send + 'static, F: Fn(&[u8]) -> T + Send + Sync + 'static>( &self, entry_type: EntryType, digest: Digest, f: F, ) -> Result<Option<T>, String> { if digest == EMPTY_DIGEST { // Avoid I/O for this case. This allows some client-provided operations (like merging // snapshots) to work without needing to first store the empty snapshot. // // To maintain the guarantee that the given function is called in a blocking context, we // spawn it as a task. return Ok(Some(self.executor().spawn_blocking(move || f(&[])).await)); } if let Some(workunit_store_handle) = workunit_store::get_workunit_store_handle() { workunit_store_handle.store.record_observation( ObservationMetric::LocalStoreReadBlobSize, digest.size_bytes as u64, ); } let dbs = match entry_type { EntryType::Directory => self.inner.directory_dbs.clone(), EntryType::File => self.inner.file_dbs.clone(), }; dbs?.load_bytes_with(digest.hash, move |bytes| { if bytes.len() == digest.size_bytes { Ok(f(bytes)) } else { Err(format!("Got hash collision reading from store - digest {:?} was requested, but retrieved bytes with that fingerprint had length {}. Congratulations, you may have broken sha256! Underlying bytes: {:?}", digest, bytes.len(), bytes)) } }).await } pub fn all_digests(&self, entry_type: EntryType) -> Result<Vec<Digest>, String> { let database = match entry_type { EntryType::File => self.inner.file_dbs.clone(), EntryType::Directory => self.inner.directory_dbs.clone(), }; let mut digests = vec![]; for &(ref env, ref database, ref _lease_database) in &database?.all_lmdbs() { let txn = env .begin_ro_txn() .map_err(|err| format!("Error beginning transaction to garbage collect: {}", err))?; let mut cursor = txn .open_ro_cursor(*database) .map_err(|err| format!("Failed to open lmdb read cursor: {}", err))?; for (key, bytes) in cursor.iter() { let v = VersionedFingerprint::from_bytes_unsafe(key); let fingerprint = v.get_fingerprint(); digests.push(Digest::new(fingerprint, bytes.len())); } } Ok(digests) } } #[derive(Eq, PartialEq, Ord, PartialOrd)] struct AgedFingerprint { // expired_seconds_ago must be the first field for the Ord implementation. expired_seconds_ago: u64, fingerprint: Fingerprint, size_bytes: usize, entry_type: EntryType, }
33.973607
247
0.631852
eba8fdf1031dcaeee83d49491a5d68d82da3a158
4,086
use crate::{data_type, private, sys}; /// This tensor format specifies that the data is laid out in the following order: batch size, /// feature maps, rows, columns. The strides are implicitly defined in such a way that the data /// are contiguous in memory with no padding between images, feature maps, rows, and columns; /// the columns are the inner dimension and the images are the outermost dimension. #[allow(non_camel_case_types)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct NCHW; /// This tensor format specifies that the data is laid out in the following order: batch size, /// rows, columns, feature maps. The strides are implicitly defined in such a way that the data /// are contiguous in memory with no padding between images, rows, columns, and feature maps; the /// feature maps are the inner dimension and the images are the outermost dimension. #[allow(non_camel_case_types)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct NHWC; /// This tensor format specifies that the data is laid out in the following order: batch size, /// feature maps, rows, columns. However, each element of the tensor is a vector of multiple /// feature maps. /// /// This format is only supported with tensor data types [`i8`] and [`u8`]. #[allow(non_camel_case_types)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct NCHWVectC8x4; /// This tensor format specifies that the data is laid out in the following order: batch size, /// feature maps, rows, columns. However, each element of the tensor is a vector of multiple /// feature maps. /// /// This format is only supported with tensor data types [`i8`] and [`u8`]. #[allow(non_camel_case_types)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct NCHWVectC8x32; /// Specifies the raw type for the given tensor memory format. pub trait TensorFormat: private::Sealed { fn into_raw() -> sys::cudnnTensorFormat_t; } macro_rules! impl_cudnn_tensor_format { ($safe_type:ident, $raw_type:ident) => { impl private::Sealed for $safe_type {} impl TensorFormat for $safe_type { fn into_raw() -> sys::cudnnTensorFormat_t { sys::cudnnTensorFormat_t::$raw_type } } }; } impl_cudnn_tensor_format!(NCHW, CUDNN_TENSOR_NCHW); impl_cudnn_tensor_format!(NHWC, CUDNN_TENSOR_NHWC); impl_cudnn_tensor_format!(NCHWVectC8x4, CUDNN_TENSOR_NCHW_VECT_C); impl_cudnn_tensor_format!(NCHWVectC8x32, CUDNN_TENSOR_NCHW_VECT_C); /// Specifies the type supported by each tensor format. pub trait SupportedType<T> where Self: TensorFormat, T: data_type::DataType, { fn data_type() -> sys::cudnnDataType_t; } macro_rules! impl_cudnn_supported_type { ($tensor_format:ident, $safe_type:ident, $raw_type:ident) => { impl SupportedType<$safe_type> for $tensor_format { fn data_type() -> sys::cudnnDataType_t { sys::cudnnDataType_t::$raw_type } } }; } /// Data types supported by the NCHW tensor format. impl_cudnn_supported_type!(NCHW, f32, CUDNN_DATA_FLOAT); impl_cudnn_supported_type!(NCHW, f64, CUDNN_DATA_DOUBLE); impl_cudnn_supported_type!(NCHW, i8, CUDNN_DATA_INT8); impl_cudnn_supported_type!(NCHW, u8, CUDNN_DATA_UINT8); impl_cudnn_supported_type!(NCHW, i32, CUDNN_DATA_INT32); impl_cudnn_supported_type!(NCHW, i64, CUDNN_DATA_INT64); /// Data types supported by the NHWC tensor format. impl_cudnn_supported_type!(NHWC, f32, CUDNN_DATA_FLOAT); impl_cudnn_supported_type!(NHWC, f64, CUDNN_DATA_DOUBLE); impl_cudnn_supported_type!(NHWC, i8, CUDNN_DATA_INT8); impl_cudnn_supported_type!(NHWC, u8, CUDNN_DATA_UINT8); impl_cudnn_supported_type!(NHWC, i32, CUDNN_DATA_INT32); impl_cudnn_supported_type!(NHWC, i64, CUDNN_DATA_INT64); /// Data types supported by the NCHWVectC8x4 tensor format. impl_cudnn_supported_type!(NCHWVectC8x4, i8, CUDNN_DATA_INT8x4); impl_cudnn_supported_type!(NCHWVectC8x4, u8, CUDNN_DATA_UINT8x4); /// Data types supported by the NCHWVectC8x32 tensor format. impl_cudnn_supported_type!(NCHWVectC8x32, i8, CUDNN_DATA_INT8x32);
40.86
97
0.746451
ab449a6c24ff7b0f6c1d7157b344a1206fe024c7
2,107
use crate::net::ipv6::ip_utils::IPAddr; use crate::net::ipv6::ipv6::IP6Header; use crate::net::ipv6::ipv6_recv::IP6RecvClient; use crate::net::udp::udp::UDPHeader; use kernel::common::cells::OptionalCell; use kernel::debug; /// The UDP driver implements this client interface trait to receive /// packets passed up the network stack to the UDPReceiver, and then /// distributes them to userland applications from there. /// Kernel apps can also instantiate structs that implement this trait /// in order to receive UDP packets pub trait UDPRecvClient { fn receive( &self, src_addr: IPAddr, dst_addr: IPAddr, src_port: u16, dst_port: u16, payload: &[u8], ); } /// This struct is set as the client of an IP6Receiver, and passes /// received packets up to whatever app layer client assigns itself /// as the UDPRecvClient held by this UDPReciever. pub struct UDPReceiver<'a> { client: OptionalCell<&'a dyn UDPRecvClient>, } impl<'a> UDPReceiver<'a> { pub fn new() -> UDPReceiver<'a> { UDPReceiver { client: OptionalCell::empty(), } } pub fn set_client(&self, client: &'a dyn UDPRecvClient) { self.client.set(client); } } impl<'a> IP6RecvClient for UDPReceiver<'a> { fn receive(&self, ip_header: IP6Header, payload: &[u8]) { match UDPHeader::decode(payload).done() { Some((offset, udp_header)) => { let len = udp_header.get_len() as usize; if len > payload.len() { // TODO: ERROR debug!("[UDP_RECV] Error: UDP length too long"); return; } self.client.map(|client| { client.receive( ip_header.get_src_addr(), ip_header.get_dst_addr(), udp_header.get_src_port(), udp_header.get_dst_port(), &payload[offset..], ); }); } None => {} } } }
31.447761
70
0.560038
186bcdf475e992fce976343e9abf0f73dfd39f23
934
//! Kernel configuration. use crate::PAGE_SIZE; /// Kernel configuration passed by kernel when calls [`crate::primary_init_early()`]. #[repr(C)] #[derive(Debug, Clone)] pub struct KernelConfig { /// boot cmd line pub cmdline: &'static str, /// firmware type pub firmware_type: &'static str, /// UART base address pub uart_base: usize, /// GIC base address pub gic_base: usize, /// phystovirt offset pub phys_to_virt_offset: usize, } pub const PHYS_MEMORY_BASE: usize = 0x4000_0000; pub const UART_SIZE: usize = 0x1000; pub const VIRTIO_BASE: usize = 0x0a00_0000; pub const VIRTIO_SIZE: usize = 0x100; pub const PA_1TB_BITS: usize = 40; pub const PHYS_ADDR_MAX: usize = (1 << PA_1TB_BITS) - 1; pub const PHYS_ADDR_MASK: usize = PHYS_ADDR_MAX & !(PAGE_SIZE - 1); pub const PHYS_MEMORY_END: usize = PHYS_MEMORY_BASE + 100 * 1024 * 1024; pub const USER_TABLE_FLAG: usize = 0xabcd_0000_0000_0000;
32.206897
85
0.713062
ff3ef766769f0f1fead8eec03accb103c440ca6a
6,264
use std::{ collections::HashMap, ops::{BitOr, Shl}, }; use super::{Encodable, Label}; #[derive(Clone, Copy, PartialEq)] pub enum Reg { R0 = 0, R1 = 1, R2 = 2, R3 = 3, R4 = 4, R5 = 5, R6 = 6, R7 = 7, R8 = 8, R9 = 9, R10 = 10, R11 = 11, R12 = 12, R13 = 13, R14 = 14, R15 = 15, SP, LR, PC, } impl Reg { const fn val(self) -> u32 { match self { Reg::SP => Reg::R13.val(), Reg::LR => Reg::R14.val(), Reg::PC => Reg::R15.val(), reg => reg as u32, } } } pub enum AddrMode { DecrAfter = 0, IncrAfter = 1, DecrBefore = 2, IncrBefore = 3, } pub enum AddrMode2 { Offset, PreIndexed, PostIndexed, } pub enum Op { Addi(Reg, Reg, u32), Adrl(Reg, Label), Adri(Reg, i32), Ldm(AddrMode, Reg, bool, Vec<Reg>), Ldri(AddrMode2, Reg, Reg, i16), Ldrl(Reg, Label), Movr(Reg, Reg), Movw(Reg, u32), Subi(Reg, Reg, u32), Stm(AddrMode, Reg, bool, Vec<Reg>), Svc(u32), Placeholder, } impl From<Op> for u32 { fn from(op: Op) -> u32 { match op { Op::Addi(rd, rn, imm) => 0xe2800000 | rn << 16 | rd << 12 | imm, Op::Adri(rn, imm) => { if imm < 0 { Op::Subi(rn, Reg::PC, -imm as u32).into() } else { Op::Addi(rn, Reg::PC, imm as u32).into() } } Op::Ldm(mode, rn, wb, regs) => regs.into_iter().fold( 0xe8100000 | mode << 23 | (wb as u32) << 21 | rn << 16, |acc, rn| acc | 1 << rn, ), Op::Ldri(mode, rt, rn, imm) => { let (index, wback) = match mode { AddrMode2::Offset => (1, 0), AddrMode2::PreIndexed => (1, 1), AddrMode2::PostIndexed => (0, 1), }; 0xe4100000 | index << 24 | if imm < 0 { 0 } else { 1 } << 23 | wback << 21 | rn << 16 | rt << 12 | imm.abs() as u32 } Op::Movr(rd, rm) => 0xe1a00000 | rd << 12 | rm, Op::Movw(rd, imm) => 0xe3000000 | (imm >> 12) << 16 | rd << 12 | ((1 << 12) - 1) & imm, Op::Stm(mode, rn, wb, regs) => regs.into_iter().fold( 0xe8000000 | mode << 23 | (wb as u32) << 21 | rn << 16, |acc, rn| acc | 1 << rn, ), Op::Subi(rd, rn, imm) => 0xe2400000 | rn << 16 | rd << 12 | imm, Op::Svc(imm) => 0xef000000 | imm, _ => 0, } } } impl Encodable<4> for Op { fn enc(self, off: usize, labs: &HashMap<Label, usize>) -> [u8; 4] { u32::from(match self { Op::Adrl(rn, label) => Op::Adri(rn, Self::res_lab(label, labs, off)), Op::Ldrl(rt, label) => Op::Ldri( AddrMode2::Offset, rt, Reg::PC, Self::res_lab(label, labs, off) as i16, ), op => op, }) .to_le_bytes() } fn calc_offset(op_offset: i32, label_offset: i32) -> i32 { label_offset - op_offset - 8 } } /// https://documentation-service.arm.com/static/5f8daeb7f86e16515cdb8c4e impl TinyAsm { /// Encoding of ADD (immediate): `ADD <Rd>, <Rn>, #<uimm12>`. pub fn addi(self, rd: Reg, rn: Option<Reg>, imm: u16) -> Self { self.op(Op::Addi(rd, rn.unwrap_or(rd), imm as u32)) } /// Encoding of ADR: `ADR <Rd>, <label>`. pub fn adrl(mut self, rd: Reg, label: Label) -> Self { self.relocs.push((self.buf.len(), Op::Adrl(rd, label))); self.op(Op::Placeholder) } /// Encoding of LDMIA: `LDMIA <Rn>{!}, <registers>`. pub fn ldmia<const T: usize>(self, rn: Reg, wb: bool, regs: [Reg; T]) -> Self { self.op(Op::Ldm(AddrMode::IncrAfter, rn, wb, regs.to_vec())) } /// Encoding of MOV (register): `MOV <Rd>, <Rm>`. pub fn movr(self, rd: Reg, rm: Reg) -> Self { self.op(Op::Movr(rd, rm)) } /// Encoding of MOVW (immediate): `MOVW <Rd>, #<imm16>`. pub fn movw(self, rd: Reg, imm: u16) -> Self { self.op(Op::Movw(rd, imm as u32)) } /// Encoding of LDR (immediate): `LDR <Rt>, [<Rn>{, #+/-<imm12>}]`, `LDR<Rt>, [<Rn>], #+/-<imm12>`, `LDR <Rt>, [<Rn>, #+/-<imm12>]!`. pub fn ldri(self, mode: AddrMode2, rn: Reg, rt: Reg, imm: i16) -> Self { self.op(Op::Ldri(mode, rn, rt, imm)) } /// Encoding of LDR (label): `LDR <Rt>, <label>`. pub fn ldrl(mut self, rn: Reg, label: Label) -> Self { self.relocs.push((self.buf.len(), Op::Ldrl(rn, label))); self.op(Op::Placeholder) } /// Encoding of POP: `POP <registers>`. pub fn pop<const T: usize>(self, regs: [Reg; T]) -> Self { self.ldmia(Reg::SP, true, regs) } /// Encoding of PUSH: `PUSH <registers>`. pub fn push<const T: usize>(self, regs: [Reg; T]) -> Self { self.stmdb(Reg::SP, true, regs) } /// Encoding of STMDB: `STMDB <Rn>{!}, <registers>`. pub fn stmdb<const T: usize>(self, rn: Reg, wb: bool, regs: [Reg; T]) -> Self { self.op(Op::Stm(AddrMode::DecrBefore, rn, wb, regs.to_vec())) } /// Encoding of SUB (immediate): `SUB <Rd>, <Rn>, #<uimm12>`. pub fn subi(self, rd: Reg, rn: Option<Reg>, imm: u16) -> Self { self.op(Op::Subi(rd, rn.unwrap_or(rd), imm as u32)) } /// Encoding of SVC: `SVC #<imm24>`. pub fn svc(self, imm: u32) -> Self { self.op(Op::Svc(imm)) } } impl BitOr<Reg> for u32 { type Output = u32; fn bitor(self, rhs: Reg) -> Self::Output { self | rhs.val() } } impl Shl<Reg> for u32 { type Output = u32; fn shl(self, rhs: Reg) -> Self::Output { self << rhs.val() } } impl Shl<u32> for Reg { type Output = u32; fn shl(self, rhs: u32) -> Self::Output { self.val() << rhs as Self::Output } } impl Shl<u32> for AddrMode { type Output = u32; fn shl(self, rhs: u32) -> Self::Output { (self as Self::Output) << rhs } } pub type TinyAsm = super::TinyAsm<Op, 4>;
27.116883
137
0.469668
9b185419e5db1ee443a359aea4c1aa3e8d42c771
25,813
use crate::ast; use crate::collections::HashMap; use crate::error::CompileError; use crate::index_scopes::IndexScopes; use crate::items::Items; use crate::query::{Build, BuildEntry, Function, Indexed, IndexedEntry, InstanceFunction, Query}; use crate::sources::Sources; use crate::traits::Resolve as _; use crate::warning::Warnings; use crate::SourceId; use runestick::{Call, CompileMeta, Hash, Item, Source, Span, Type}; use std::sync::Arc; /// Import to process. pub(crate) struct Import { pub(crate) item: Item, pub(crate) ast: ast::DeclUse, pub(crate) source: Arc<Source>, pub(crate) source_id: usize, } pub(crate) struct Indexer<'a> { loaded: &'a mut HashMap<Item, (SourceId, Span)>, sources: &'a mut Sources, pub(crate) source_id: SourceId, pub(crate) source: Arc<Source>, pub(crate) query: &'a mut Query, pub(crate) warnings: &'a mut Warnings, /// Imports to process. imports: &'a mut Vec<Import>, pub(crate) items: Items, pub(crate) scopes: IndexScopes, /// Set if we are inside of an impl block. impl_items: Vec<Item>, } impl<'a> Indexer<'a> { /// Construct a new indexer. pub(crate) fn new( item: Item, loaded: &'a mut HashMap<Item, (SourceId, Span)>, sources: &'a mut Sources, source_id: SourceId, source: Arc<Source>, query: &'a mut Query, warnings: &'a mut Warnings, imports: &'a mut Vec<Import>, ) -> Self { Self { loaded, sources, source_id, source, query, warnings, imports, items: Items::new(item.into_vec()), scopes: IndexScopes::new(), impl_items: Vec::new(), } } /// Construct the calling convention based on the parameters. fn call(generator: bool, is_async: bool) -> Call { if is_async { if generator { Call::Stream } else { Call::Async } } else if generator { Call::Generator } else { Call::Immediate } } /// Handle a filesystem module. pub(crate) fn handle_file_mod(&mut self, decl_mod: &ast::DeclMod) -> Result<(), CompileError> { let span = decl_mod.span(); let name = decl_mod.name.resolve(&*self.source)?; let _guard = self.items.push_name(name); let path = match self.source.path() { Some(path) => path, None => { return Err(CompileError::UnsupportedFileMod { span }); } }; let base = match path.parent() { Some(parent) => parent.join(name), None => { return Err(CompileError::UnsupportedFileMod { span }); } }; let candidates = [ base.join("mod").with_extension("rn"), base.with_extension("rn"), ]; let mut found = None; for path in &candidates[..] { if path.is_file() { found = Some(path); break; } } let path = match found { Some(path) => path, None => { return Err(CompileError::ModNotFound { path: base.to_owned(), span, }); } }; let item = self.items.item(); if let Some(existing) = self.loaded.insert(item.clone(), (self.source_id, span)) { return Err(CompileError::ModAlreadyLoaded { item: item.clone(), span, existing, }); } let source = match Source::from_path(path) { Ok(source) => source, Err(error) => { return Err(CompileError::ModFileError { span, path: path.to_owned(), error, }); } }; self.sources.insert(item, source); Ok(()) } } pub(crate) trait Index<T> { /// Walk the current type with the given item. fn index(&mut self, item: &T) -> Result<(), CompileError>; } impl Index<ast::DeclFile> for Indexer<'_> { fn index(&mut self, decl_file: &ast::DeclFile) -> Result<(), CompileError> { for (decl, semi_colon) in &decl_file.decls { if let Some(semi_colon) = semi_colon { if !decl.needs_semi_colon() { self.warnings .uneccessary_semi_colon(self.source_id, semi_colon.span()); } } self.index(decl)?; } Ok(()) } } impl Index<ast::DeclFn> for Indexer<'_> { fn index(&mut self, decl_fn: &ast::DeclFn) -> Result<(), CompileError> { let span = decl_fn.span(); let is_toplevel = self.items.is_empty(); let _guard = self.items.push_name(decl_fn.name.resolve(&*self.source)?); let item = self.items.item(); let guard = self.scopes.push_function(decl_fn.async_.is_some()); for (arg, _) in &decl_fn.args.items { match arg { ast::FnArg::Self_(s) => { let span = s.span(); self.scopes.declare("self", span)?; } ast::FnArg::Ident(ident) => { let span = ident.span(); let ident = ident.resolve(&*self.source)?; self.scopes.declare(ident, span)?; } _ => (), } } self.index(&decl_fn.body)?; let f = guard.into_function(span)?; let call = Self::call(f.generator, f.is_async); let fun = Function { ast: decl_fn.clone(), call, }; if decl_fn.is_instance() { let impl_item = self .impl_items .last() .ok_or_else(|| CompileError::InstanceFunctionOutsideImpl { span })?; let f = InstanceFunction { ast: fun.ast, impl_item: impl_item.clone(), instance_span: span, call: fun.call, }; // NB: all instance functions must be pre-emptively built, // because statically we don't know if they will be used or // not. self.query.queue.push_back(BuildEntry { item: item.clone(), build: Build::InstanceFunction(f), source: self.source.clone(), source_id: self.source_id, }); let meta = CompileMeta::Function { value_type: Type::Hash(Hash::type_hash(&item)), item: item.clone(), }; self.query.unit.borrow_mut().insert_meta(meta)?; } else if is_toplevel { // NB: immediately compile all toplevel functions. self.query.queue.push_back(BuildEntry { item: item.clone(), build: Build::Function(fun), source: self.source.clone(), source_id: self.source_id, }); self.query .unit .borrow_mut() .insert_meta(CompileMeta::Function { value_type: Type::Hash(Hash::type_hash(&item)), item, })?; } else { // NB: non toplevel functions can be indexed for later construction. self.query.index( item, IndexedEntry { indexed: Indexed::Function(fun), source: self.source.clone(), source_id: self.source_id, }, span, )?; } Ok(()) } } impl Index<ast::ExprBlock> for Indexer<'_> { fn index(&mut self, expr_block: &ast::ExprBlock) -> Result<(), CompileError> { let span = expr_block.span(); if let Some(..) = &expr_block.async_ { let _guard = self.items.push_async_block(); let guard = self.scopes.push_closure(true); for (expr, _) in &expr_block.exprs { self.index(expr)?; } if let Some(expr) = &expr_block.trailing_expr { self.index(&**expr)?; } let c = guard.into_closure(span)?; let captures = Arc::new(c.captures); let call = Self::call(c.generator, c.is_async); self.query.index_async_block( self.items.item(), expr_block.clone(), captures, call, self.source.clone(), self.source_id, )?; } else { let _guard = self.items.push_block(); let _guard = self.scopes.push_scope(); for (expr, _) in &expr_block.exprs { self.index(expr)?; } if let Some(expr) = &expr_block.trailing_expr { self.index(&**expr)?; } } Ok(()) } } impl Index<ast::ExprLet> for Indexer<'_> { fn index(&mut self, expr_let: &ast::ExprLet) -> Result<(), CompileError> { self.index(&expr_let.pat)?; self.index(&*expr_let.expr)?; Ok(()) } } impl Index<ast::Ident> for Indexer<'_> { fn index(&mut self, ident: &ast::Ident) -> Result<(), CompileError> { let span = ident.span(); let ident = ident.resolve(&*self.source)?; self.scopes.declare(ident, span)?; Ok(()) } } impl Index<ast::Pat> for Indexer<'_> { fn index(&mut self, pat: &ast::Pat) -> Result<(), CompileError> { match pat { ast::Pat::PatPath(pat_path) => { if let Some(ident) = pat_path.path.try_as_ident() { self.index(ident)?; } } ast::Pat::PatObject(pat_object) => { self.index(pat_object)?; } ast::Pat::PatVec(pat_vec) => { self.index(pat_vec)?; } ast::Pat::PatTuple(pat_tuple) => { self.index(pat_tuple)?; } ast::Pat::PatByte(..) => (), ast::Pat::PatIgnore(..) => (), ast::Pat::PatNumber(..) => (), ast::Pat::PatString(..) => (), ast::Pat::PatUnit(..) => (), ast::Pat::PatChar(..) => (), } Ok(()) } } impl Index<ast::PatTuple> for Indexer<'_> { fn index(&mut self, pat_tuple: &ast::PatTuple) -> Result<(), CompileError> { for (pat, _) in &pat_tuple.items { self.index(&**pat)?; } Ok(()) } } impl Index<ast::PatObject> for Indexer<'_> { fn index(&mut self, pat_object: &ast::PatObject) -> Result<(), CompileError> { for (field, _) in &pat_object.fields { if let Some((_, pat)) = &field.binding { self.index(pat)?; } else { match &field.key { ast::LitObjectKey::Ident(ident) => { self.index(ident)?; } ast::LitObjectKey::LitStr(..) => (), } } } Ok(()) } } impl Index<ast::PatVec> for Indexer<'_> { fn index(&mut self, pat_vec: &ast::PatVec) -> Result<(), CompileError> { for (pat, _) in &pat_vec.items { self.index(&**pat)?; } Ok(()) } } impl Index<ast::Expr> for Indexer<'_> { fn index(&mut self, expr: &ast::Expr) -> Result<(), CompileError> { match expr { ast::Expr::Self_(..) => { self.scopes.mark_use("self"); } ast::Expr::Path(path) => { self.index(path)?; } ast::Expr::ExprLet(expr_let) => { self.index(expr_let)?; } ast::Expr::ExprBlock(block) => { self.index(block)?; } ast::Expr::ExprGroup(expr) => { self.index(&*expr.expr)?; } ast::Expr::ExprIf(expr_if) => { self.index(expr_if)?; } ast::Expr::ExprBinary(expr_binary) => { self.index(expr_binary)?; } ast::Expr::ExprMatch(expr_if) => { self.index(expr_if)?; } ast::Expr::Decl(decl) => { self.index(decl)?; } ast::Expr::ExprClosure(expr_closure) => { self.index(expr_closure)?; } ast::Expr::ExprWhile(expr_while) => { self.index(expr_while)?; } ast::Expr::ExprLoop(expr_loop) => { self.index(expr_loop)?; } ast::Expr::ExprFor(expr_for) => { self.index(expr_for)?; } ast::Expr::ExprIndexSet(expr_index_set) => { self.index(expr_index_set)?; } ast::Expr::ExprFieldAccess(expr_field_access) => { self.index(expr_field_access)?; } ast::Expr::ExprUnary(expr_unary) => { self.index(expr_unary)?; } ast::Expr::ExprIndexGet(expr_index_get) => { self.index(expr_index_get)?; } ast::Expr::ExprBreak(expr_break) => { self.index(expr_break)?; } ast::Expr::ExprYield(expr_yield) => { self.index(expr_yield)?; } ast::Expr::ExprReturn(expr_return) => { self.index(expr_return)?; } ast::Expr::ExprAwait(expr_await) => { self.index(expr_await)?; } ast::Expr::ExprTry(expr_try) => { self.index(expr_try)?; } ast::Expr::ExprSelect(expr_select) => { self.index(expr_select)?; } // ignored because they have no effect on indexing. ast::Expr::ExprCall(expr_call) => { self.index(expr_call)?; } ast::Expr::LitTemplate(lit_template) => { self.index(lit_template)?; } // NB: literals have nothing to index, they don't export language // items. ast::Expr::LitUnit(..) => (), ast::Expr::LitBool(..) => (), ast::Expr::LitByte(..) => (), ast::Expr::LitChar(..) => (), ast::Expr::LitNumber(..) => (), ast::Expr::LitObject(..) => (), ast::Expr::LitStr(..) => (), ast::Expr::LitByteStr(..) => (), ast::Expr::LitTuple(..) => (), ast::Expr::LitVec(..) => (), // NB: macros have nothing to index, they don't export language // items. ast::Expr::ExprCallMacro(..) => (), } Ok(()) } } impl Index<ast::ExprIf> for Indexer<'_> { fn index(&mut self, expr_if: &ast::ExprIf) -> Result<(), CompileError> { self.index(&expr_if.condition)?; self.index(&*expr_if.block)?; for expr_else_if in &expr_if.expr_else_ifs { self.index(&expr_else_if.condition)?; self.index(&*expr_else_if.block)?; } if let Some(expr_else) = &expr_if.expr_else { self.index(&*expr_else.block)?; } Ok(()) } } impl Index<ast::ExprBinary> for Indexer<'_> { fn index(&mut self, expr_binary: &ast::ExprBinary) -> Result<(), CompileError> { self.index(&*expr_binary.lhs)?; self.index(&*expr_binary.rhs)?; Ok(()) } } impl Index<ast::ExprMatch> for Indexer<'_> { fn index(&mut self, expr_match: &ast::ExprMatch) -> Result<(), CompileError> { self.index(&*expr_match.expr)?; for (branch, _) in &expr_match.branches { if let Some((_, condition)) = &branch.condition { self.index(&**condition)?; } let _guard = self.scopes.push_scope(); self.index(&branch.pat)?; self.index(&*branch.body)?; } Ok(()) } } impl Index<ast::Condition> for Indexer<'_> { fn index(&mut self, condition: &ast::Condition) -> Result<(), CompileError> { match condition { ast::Condition::Expr(expr) => { self.index(&**expr)?; } ast::Condition::ExprLet(expr_let) => { self.index(&**expr_let)?; } } Ok(()) } } impl Index<ast::Decl> for Indexer<'_> { fn index(&mut self, decl: &ast::Decl) -> Result<(), CompileError> { match decl { ast::Decl::DeclUse(import) => { self.imports.push(Import { item: self.items.item(), ast: import.clone(), source: self.source.clone(), source_id: self.source_id, }); } ast::Decl::DeclEnum(decl_enum) => { let _guard = self.items.push_name(decl_enum.name.resolve(&*self.source)?); let span = decl_enum.span(); let enum_item = self.items.item(); self.query.index_enum( enum_item.clone(), self.source.clone(), self.source_id, span, )?; for (variant, body, _) in &decl_enum.variants { let _guard = self.items.push_name(variant.resolve(&*self.source)?); let span = variant.span(); self.query.index_variant( self.items.item(), enum_item.clone(), body.clone(), self.source.clone(), self.source_id, span, )?; } } ast::Decl::DeclStruct(decl_struct) => { let _guard = self .items .push_name(decl_struct.ident.resolve(&*self.source)?); self.query.index_struct( self.items.item(), decl_struct.clone(), self.source.clone(), self.source_id, )?; } ast::Decl::DeclFn(decl_fn) => { self.index(decl_fn)?; } ast::Decl::DeclImpl(decl_impl) => { let mut guards = Vec::new(); for ident in decl_impl.path.components() { guards.push(self.items.push_name(ident.resolve(&*self.source)?)); } self.impl_items.push(self.items.item()); for decl_fn in &decl_impl.functions { self.index(decl_fn)?; } self.impl_items.pop(); } ast::Decl::DeclMod(decl_mod) => { if let Some(body) = &decl_mod.body { let name = decl_mod.name.resolve(&*self.source)?; let _guard = self.items.push_name(name); self.index(&*body.file)?; } else { self.handle_file_mod(decl_mod)?; } } } Ok(()) } } impl Index<ast::Path> for Indexer<'_> { fn index(&mut self, path: &ast::Path) -> Result<(), CompileError> { if let Some(ident) = path.try_as_ident() { let ident = ident.resolve(&*self.source)?; self.scopes.mark_use(ident); } Ok(()) } } impl Index<ast::ExprWhile> for Indexer<'_> { fn index(&mut self, expr_while: &ast::ExprWhile) -> Result<(), CompileError> { let _guard = self.scopes.push_scope(); self.index(&expr_while.condition)?; self.index(&*expr_while.body)?; Ok(()) } } impl Index<ast::ExprLoop> for Indexer<'_> { fn index(&mut self, expr_loop: &ast::ExprLoop) -> Result<(), CompileError> { let _guard = self.scopes.push_scope(); self.index(&*expr_loop.body)?; Ok(()) } } impl Index<ast::ExprFor> for Indexer<'_> { fn index(&mut self, expr_for: &ast::ExprFor) -> Result<(), CompileError> { // NB: creating the iterator is evaluated in the parent scope. self.index(&*expr_for.iter)?; let _guard = self.scopes.push_scope(); self.index(&expr_for.var)?; self.index(&*expr_for.body)?; Ok(()) } } impl Index<ast::ExprClosure> for Indexer<'_> { fn index(&mut self, expr_closure: &ast::ExprClosure) -> Result<(), CompileError> { let _guard = self.items.push_closure(); let guard = self.scopes.push_closure(expr_closure.async_.is_some()); let span = expr_closure.span(); for (arg, _) in expr_closure.args.as_slice() { match arg { ast::FnArg::Self_(s) => { return Err(CompileError::UnsupportedSelf { span: s.span() }); } ast::FnArg::Ident(ident) => { let ident = ident.resolve(&*self.source)?; self.scopes.declare(ident, span)?; } ast::FnArg::Ignore(..) => (), } } self.index(&*expr_closure.body)?; let c = guard.into_closure(span)?; let captures = Arc::new(c.captures); let call = Self::call(c.generator, c.is_async); self.query.index_closure( self.items.item(), expr_closure.clone(), captures, call, self.source.clone(), self.source_id, )?; Ok(()) } } impl Index<ast::ExprIndexSet> for Indexer<'_> { fn index(&mut self, expr_index_set: &ast::ExprIndexSet) -> Result<(), CompileError> { self.index(&*expr_index_set.value)?; self.index(&*expr_index_set.index)?; self.index(&*expr_index_set.target)?; Ok(()) } } impl Index<ast::ExprFieldAccess> for Indexer<'_> { fn index(&mut self, expr_field_access: &ast::ExprFieldAccess) -> Result<(), CompileError> { self.index(&*expr_field_access.expr)?; Ok(()) } } impl Index<ast::ExprUnary> for Indexer<'_> { fn index(&mut self, expr_unary: &ast::ExprUnary) -> Result<(), CompileError> { self.index(&*expr_unary.expr)?; Ok(()) } } impl Index<ast::ExprIndexGet> for Indexer<'_> { fn index(&mut self, expr_index_get: &ast::ExprIndexGet) -> Result<(), CompileError> { self.index(&*expr_index_get.index)?; self.index(&*expr_index_get.target)?; Ok(()) } } impl Index<ast::ExprBreak> for Indexer<'_> { fn index(&mut self, expr_break: &ast::ExprBreak) -> Result<(), CompileError> { if let Some(expr) = &expr_break.expr { match expr { ast::ExprBreakValue::Expr(expr) => { self.index(&**expr)?; } ast::ExprBreakValue::Label(..) => (), } } Ok(()) } } impl Index<ast::ExprYield> for Indexer<'_> { fn index(&mut self, expr_yield: &ast::ExprYield) -> Result<(), CompileError> { let span = expr_yield.span(); self.scopes.mark_yield(span)?; if let Some(expr) = &expr_yield.expr { self.index(&**expr)?; } Ok(()) } } impl Index<ast::ExprReturn> for Indexer<'_> { fn index(&mut self, expr_return: &ast::ExprReturn) -> Result<(), CompileError> { if let Some(expr) = expr_return.expr.as_deref() { self.index(expr)?; } Ok(()) } } impl Index<ast::ExprAwait> for Indexer<'_> { fn index(&mut self, expr_await: &ast::ExprAwait) -> Result<(), CompileError> { let span = expr_await.span(); self.scopes.mark_await(span)?; self.index(&*expr_await.expr)?; Ok(()) } } impl Index<ast::ExprTry> for Indexer<'_> { fn index(&mut self, expr_try: &ast::ExprTry) -> Result<(), CompileError> { self.index(&*expr_try.expr)?; Ok(()) } } impl Index<ast::ExprSelect> for Indexer<'_> { fn index(&mut self, expr_select: &ast::ExprSelect) -> Result<(), CompileError> { self.scopes.mark_await(expr_select.span())?; for (branch, _) in &expr_select.branches { // NB: expression to evaluate future is evaled in parent scope. self.index(&*branch.expr)?; let _guard = self.scopes.push_scope(); self.index(&branch.pat)?; self.index(&*branch.body)?; } if let Some((branch, _)) = &expr_select.default_branch { let _guard = self.scopes.push_scope(); self.index(&*branch.body)?; } Ok(()) } } impl Index<ast::ExprCall> for Indexer<'_> { fn index(&mut self, expr_call: &ast::ExprCall) -> Result<(), CompileError> { for (expr, _) in expr_call.args.items.iter() { self.index(expr)?; } self.index(&*expr_call.expr)?; Ok(()) } } impl Index<ast::LitTemplate> for Indexer<'_> { fn index(&mut self, lit_template: &ast::LitTemplate) -> Result<(), CompileError> { let template = lit_template.resolve(&*self.source)?; for c in &template.components { match c { ast::TemplateComponent::Expr(expr) => { self.index(&**expr)?; } ast::TemplateComponent::String(..) => (), } } Ok(()) } }
30.225995
99
0.482121
01c4199aba352de44aafb4eaf12d41efc71bb9bc
1,486
pub struct IconDoDisturbOn { props: crate::Props, } impl yew::Component for IconDoDisturbOn { type Properties = crate::Props; type Message = (); fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender { true } fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender { false } fn view(&self) -> yew::prelude::Html { yew::prelude::html! { <svg class=self.props.class.unwrap_or("") width=self.props.size.unwrap_or(24).to_string() height=self.props.size.unwrap_or(24).to_string() viewBox="0 0 24 24" fill=self.props.fill.unwrap_or("none") stroke=self.props.color.unwrap_or("currentColor") stroke-width=self.props.stroke_width.unwrap_or(2).to_string() stroke-linecap=self.props.stroke_linecap.unwrap_or("round") stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round") > <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0V0z" fill="none"/><path d="M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm0 18c-4.41 0-8-3.59-8-8s3.59-8 8-8 8 3.59 8 8-3.59 8-8 8zm-5-9h10v2H7z"/></svg> </svg> } } }
32.304348
289
0.574024
336b9851e49fb6b53243ad464648747c621ba2b3
218
#![feature(proc_macro, generators)] extern crate futures_await as futures; use futures::prelude::*; #[async] fn foo() -> Result<(), ()> { } #[async_stream(item = i32)] fn foos() -> Result<(), ()> { } fn main() {}
13.625
38
0.59633
5dbac4ade8a23ecc3350617319edcac16ad7ae5c
4,863
use console::{style, Term}; use rand::prelude::*; use std::io::{Read, Write}; use std::net::{TcpListener, TcpStream}; use std::str; use std::time::{Duration, SystemTime}; use std::thread::sleep; mod configuration; mod network; pub struct Runtime { canvas_width: u16, canvas_height: u16, config: configuration::Configuration, game_complete: bool, game_won: bool, display: Term, game_state: Vec<char>, } impl Runtime { // pub fn new() -> Runtime { // let mut runtime = Runtime { // canvas_height: 10, // canvas_width: 10, // config: configuration::Configuration { is_host: false }, // game_complete: false, // game_won: false, // display: Term::stdout(), // game_state: vec![], // }; // for n in 0..8 { // runtime.game_state.push(' '); // } // runtime // } pub fn new(terminal: Term, is_host: bool) -> Runtime { let mut runtime = Runtime { canvas_height: 10, canvas_width: 10, config: configuration::Configuration::new(), game_complete: false, game_won: false, display: terminal, game_state: vec![], }; for n in 0..8 { runtime.game_state.push(' '); } runtime.config.is_host = is_host; runtime } pub fn start(&mut self) -> Result<(), Box<dyn std::error::Error>> { // todo self.display.clear_screen()?; let (heigth, width) = self.display.size(); self.display.write_str(&"Loading...")?; sleep(Duration::from_secs(1)); // initialize game session if self.config.is_host { // to do: initialize connection let listener = TcpListener::bind(format!("127.0.0.1:{}", self.config.host_port))?; let responese_message = String::from("Hello from the host."); for stream in listener.incoming() { let mut stream = stream.unwrap(); loop { // let mut receive_response = String::new(); let mut host_buffer = [0; 1024]; stream.read(&mut host_buffer)?; println!("Server received: {}", str::from_utf8(&host_buffer).unwrap()); stream.write(&mut responese_message.as_bytes())?; stream.flush()?; sleep(Duration::from_secs(3)); } } } else { // we are now a client, make is so. // listener = TcpListener::bind(format!("127.0.0.1{}", self.config.host_port))?; let mut stream = TcpStream::connect(format!("127.0.0.1:{}", self.config.host_port)) .expect("Couldn't connect to the server!"); let sender = String::from("Hello there Mr. Wilson."); loop { stream.write(sender.as_bytes())?; stream.flush()?; let mut client_buffer = [0; 1024]; stream.read(&mut client_buffer)?; println!("Response: {}", str::from_utf8(&client_buffer).unwrap()); sleep(Duration::from_secs(3)); } } while !self.game_complete { // game loop self.game_complete = true; } self.draw(); Ok(()) } fn draw(&self) { let mut index = 0; self.display.clear_screen().unwrap(); self.display.move_cursor_to(0, 0).unwrap(); for n in 0..5 { let mut display_line = String::new(); if n % 2 == 0 { for p in 0..3 { match p { 0 => display_line.push(self.game_state[index]), 1 => { display_line.push('|'); display_line.push(self.game_state[index]); } 2 => { display_line.push('|'); display_line.push(self.game_state[index]); } _ => println!("nothing here!"), } } } else { display_line.push('-'); display_line.push('-'); display_line.push('-'); display_line.push('-'); display_line.push('-'); } self.display.write_str(&display_line).unwrap(); self.display.move_cursor_to(0, n + 1).unwrap(); index += 1; } // todo: remove sleep(Duration::from_secs(5)); } fn initialize_client(&self) { // this is where i collect the ip information. } }
30.204969
95
0.473576
676d16e997dd9ff7928b63d99c355f94709a3151
2,983
use std::{f64::consts::PI, fs::File, io::Write, path::Path}; use rustic_ray::{ shapes::Shape, shapes::Sphere, Canvas, Color, Intersection, Point, PointLight, Ray, Transformation, }; fn main() { let mut shape = Sphere::new(); shape.material.color = Color::new(1.0, 0.2, 1.0); draw_shape(&shape, "ch06_circle.ppm"); // shrink it along the y axis shape.transform = Transformation::new().scale(1.0, 0.5, 1.0).build(); draw_shape(&shape, "ch06_shrink_y.ppm"); // shrink it along the x axis shape.transform = Transformation::new().scale(0.5, 1.0, 1.0).build(); draw_shape(&shape, "ch06_shrink_x.ppm"); // shrink it and rotate it! shape.transform = Transformation::new() .scale(0.5, 1.0, 1.0) .rotate_z(PI / 4.0) .build(); draw_shape(&shape, "ch06_shrink_rotate.ppm"); // shrink it and skew it! shape.transform = Transformation::new() .scale(0.5, 1.0, 1.0) .shear(1.0, 0.0, 0.0, 0.0, 0.0, 0.0) .build(); draw_shape(&shape, "ch06_shrink_skew.ppm"); } fn draw_shape(shape: &Sphere, file_name: &str) { let light_position = Point::new(-10.0, 10.0, -10.0); let light_color = Color::new(1.0, 1.0, 1.0); let light = PointLight::new(light_position, light_color); let ray_origin = Point::new(0.0, 0.0, -5.0); let wall_z = 10.0; let wall_size = 7.0; let canvas_pixels = 400; let pixel_size = wall_size / canvas_pixels as f64; let half = wall_size / 2.0; let mut canvas = Canvas::new(canvas_pixels, canvas_pixels); for y in 0..canvas_pixels { let world_y = half - pixel_size * y as f64; for x in 0..canvas_pixels { let world_x = -half + pixel_size * x as f64; let position = Point::new(world_x, world_y, wall_z); let r = Ray::new(ray_origin, (position - ray_origin).normalize()); let xs = shape.intersect(r); if xs.is_some() { if let Some(hit) = Intersection::hit(&xs.unwrap()) { let point = r.position(hit.t); let normal = hit.object.normal_at(point, None); let eye = -r.direction; let color = hit .object .material() .lighting(shape, light, point, eye, normal, false); canvas.write_pixel(x, y, color); } } } } write_file(file_name, canvas.canvas_to_ppm().as_bytes()) } fn write_file(file_name: &str, ppm: &[u8]) { let path = Path::new(file_name); let display = path.display(); let mut file = match File::create(&path) { Err(why) => panic!("couldn't create {}: {}", display, why), Ok(file) => file, }; match file.write_all(ppm) { Err(why) => panic!("couldn't write to {}: {}", display, why), Ok(_) => println!("successfully wrote to {}", display), }; }
30.438776
87
0.555816
1af153e77c964eb7f3c35831cf7e3f96eef03c25
222
use std::env; fn main() { let target_os = env::var("CARGO_CFG_TARGET_OS"); match target_os.as_ref().map(|x| &**x) { Ok("macos") => println!("cargo:rustc-link-lib=framework=IOKit"), _ => {} } }
22.2
72
0.554054
deb0e4859f3f6a02216398612c6d85dc9e999833
349
#[doc = "Reader of register RD_KEY4_DATA1"] pub type R = crate::R<u32, super::RD_KEY4_DATA1>; #[doc = "Reader of field `KEY4_DATA1`"] pub type KEY4_DATA1_R = crate::R<u32, u32>; impl R { #[doc = "Bits 0:31"] #[inline(always)] pub fn key4_data1(&self) -> KEY4_DATA1_R { KEY4_DATA1_R::new((self.bits & 0xffff_ffff) as u32) } }
29.083333
59
0.633238
ff947d0f071099fa250f7efa52043486293817fd
619
#![no_main] #![no_std] #[allow(non_upper_case_globals)] #[allow(non_camel_case_types)] #[allow(non_snake_case)] use aux5::{entry, prelude::*, Delay, Leds}; /* Include the automatically generated bindings */ include!(concat!(env!("OUT_DIR"), "/bindings.rs")); #[entry] fn main() -> ! { let (mut delay, mut leds): (Delay, Leds) = aux5::init(); loop { for curr in 0..8 { let next = (curr + 1) % 8; let ticks = unsafe { saw() }; leds[next].on(); delay.delay_ms(ticks); leds[curr].off(); delay.delay_ms(ticks); } } }
20.633333
60
0.537964
1eaa1ab956ba7bc0949ee8d92ac94f4f41cbfaa8
1,213
/** * [76] Minimum Window Substring * * Given a string S and a string T, find the minimum window in S which will contain all the characters in T in complexity O(n). * * Example: * * * Input: S = "ADOBECODEBANC", T = "ABC" * Output: "BANC" * * * Note: * * * If there is no such window in S that covers all characters in T, return the empty string "". * If there is such window, you are guaranteed that there will always be only one unique minimum window in S. * * */ pub struct Solution {} // submission codes start here use std::collections::HashMap; impl Solution { pub fn min_window(s: String, t: String) -> String { if t.is_empty() || t.len() > s.len() { return "".to_owned(); } let (mut start, mut end) = (0_usize, 0_usize); let mut result = (0_usize, 0_usize); loop {} s[result.0..result.1].to_owned() } fn count_char(s: String) -> HashMap<char, i32> { let mut res = HashMap::new(); for ch in s.chars().into_iter() { *res.entry(ch).or_insert(0) += 1; } res } } // submission codes end #[cfg(test)] mod tests { use super::*; #[test] fn test_76() {} }
22.462963
127
0.577082
f8ca105358be7eb7cc67760c2b1d9a0881d2a0e8
989
use crate::asset::*; use crate::attributes::*; use crate::concept::{AoristConcept, AoristConceptBase, AoristRef, ConceptEnum}; use crate::template::*; use abi_stable::std_types::ROption; use aorist_attributes::*; use aorist_concept::{aorist, Constrainable}; use aorist_paste::paste; use aorist_primitives::AOption; use aorist_primitives::{attribute, derived_schema, AString, AVec}; use derivative::Derivative; #[cfg(feature = "python")] use pyo3::prelude::*; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use uuid::Uuid; derived_schema! { name: PolygonCollectionStatsUnionSchema, sources: - first: PolygonCollectionAsset, - second: PolygonCollectionAsset, attributes: id: KeyStringIdentifier("Polygon Identifier", false), name: FreeText("Polygon name", true), wkt: WKTString("WKT string", false), stats: JSON("JSON string of stats", false) fields: first_stats_prefix: AString, second_stats_prefix: AString }
30.90625
79
0.718908
226a88ca0962e86594514164390f41e2ed2183ac
3,008
//! ```elixir //! defmodule Chain do //! def counter(next_pid, output) do //! output.("spawned") //! //! receive do //! n -> //! output.("received #{n}") //! sent = send next_pid, n + 1 //! output.("sent #{sent} to #{next_pid}") //! end //! end //! //! def create_processes(n, output) when is_function(output, 1) do //! last = //! Enum.reduce( //! 1..n, //! self(), //! fn (_, send_to) -> //! spawn(Chain, :counter, [send_to, output]) //! end //! ) //! //! send(last, 0) # start the count by sending a zero to the last process //! //! receive do # and wait for the result to come back to us //! final_answer when is_integer(final_answer) -> //! "Result is #{inspect(final_answer)}" //! final_answer //! end //! end //! //! def console(n) do //! run(n, &console_output/1) //! end //! //! def dom(n) do //! run(n, &dom_output/1) //! end //! //! def none(n) do //! run(n, &none_output/1) //! end //! //! def on_submit(event) do //! {:ok, event_target} = Lumen.Web.Event.target(event) //! {:ok, n_input} = Lumen.Web.HTMLFormElement.element(event_target, "n") //! value_string = Lumen.Web.HTMLInputElement.value(n_input) //! n = :erlang.binary_to_integer(value_string) //! dom(n) //! end //! //! # Private Functions //! //! defp console_output(text) do //! IO.puts("#{self()} #{text}") //! end //! //! defp dom_output(text) do //! window = Lumen.Web.Window.window() //! document = Lumen.Web.Window.document(window) //! {:ok, tr} = Lumen.Web.Document.create_element(document, "tr") //! //! {:ok, pid_text} = Lumen.Web.Document.create_text_node(document, to_string(self())) //! {:ok, pid_td} = Lumen.Web.Document.create_element(document, "td") //! Lumen.Web.Element.append_child(pid_td, pid_text); //! Lumen.Web.Element.append_child(tr, pid_td) //! //! {:ok, text_text} = Lumen.Web.Document.create_text_node(document, text) //! {:ok, text_td} = Lumen.Web.Document.create_element(document, "td") //! Lumen.Web.Element.append_child(text_td, text_text); //! Lumen.Web.Element.append_child(tr, text_td) //! //! {:ok, output} = Lumen.Web.Document.get_element_by_id("output") //! Lumen.Web.Element.append_child(output, tr) //! end //! //! defp none_output(_text) do //! :ok //! end //! //! defp run(n, output) when is_function(output, 1) do //! {time, value} = :timer.tc(Chain, :create_processes, [n, output]) //! output.("Chain.run(#{n}) in #{time} microseconds") //! {time, value} //! end //! end //! ``` pub mod console_1; mod console_output_1; pub mod counter_2; pub mod create_processes_2; mod create_processes_reducer_2; pub mod dom_1; mod dom_output_1; pub mod none_1; mod none_output_1; pub mod on_submit_1; mod run_2; use liblumen_alloc::erts::term::Atom; fn module() -> Atom { Atom::try_from_str("Elixir.Chain").unwrap() }
28.11215
90
0.586769
bfad05ec22893a53239289516247c41439e06508
2,077
/* * Twilio - Api * * This is the public Twilio REST API. * * The version of the OpenAPI document: 1.25.0 * Contact: [email protected] * Generated by: https://openapi-generator.tech */ #[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] pub struct ApiV2010AccountMessageMessageFeedback { /// The SID of the Account that created the resource #[serde(rename = "account_sid", skip_serializing_if = "Option::is_none")] pub account_sid: Option<String>, /// The RFC 2822 date and time in GMT that the resource was created #[serde(rename = "date_created", skip_serializing_if = "Option::is_none")] pub date_created: Option<String>, /// The RFC 2822 date and time in GMT that the resource was last updated #[serde(rename = "date_updated", skip_serializing_if = "Option::is_none")] pub date_updated: Option<String>, /// The SID of the Message resource for which the feedback was provided #[serde(rename = "message_sid", skip_serializing_if = "Option::is_none")] pub message_sid: Option<String>, /// Whether the feedback has arrived #[serde(rename = "outcome", skip_serializing_if = "Option::is_none")] pub outcome: Option<Outcome>, /// The URI of the resource, relative to `https://api.twilio.com` #[serde(rename = "uri", skip_serializing_if = "Option::is_none")] pub uri: Option<String>, } impl ApiV2010AccountMessageMessageFeedback { pub fn new() -> ApiV2010AccountMessageMessageFeedback { ApiV2010AccountMessageMessageFeedback { account_sid: None, date_created: None, date_updated: None, message_sid: None, outcome: None, uri: None, } } } /// Whether the feedback has arrived #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] pub enum Outcome { #[serde(rename = "confirmed")] Confirmed, #[serde(rename = "unconfirmed")] Unconfirmed, } impl Default for Outcome { fn default() -> Outcome { Self::Confirmed } }
32.453125
91
0.669234
9caf52e7ffcda713de6b28a127c5882b13316eac
509
use super::MutRaw; use byteorder::{ByteOrder, LittleEndian}; pub trait ByteWritable: MutRaw { fn write_byte(&mut self, addr: u32, data: u8) { self.mut_raw(addr)[0] = data; } } pub trait HalfWordWritable: MutRaw { fn write_halfword(&mut self, addr: u32, data: u16) { LittleEndian::write_u16(self.mut_raw(addr), data); } } pub trait WordWritable: MutRaw { fn write_word(&mut self, addr: u32, data: u32) { LittleEndian::write_u32(self.mut_raw(addr), data); } }
24.238095
58
0.650295
0a7fe3c371a020b9b88638d9721bc7b61b46c236
3,298
#![allow(unreachable_pub)] use crate::{runtime::context, task::JoinHandle}; use std::future::Future; /// Factory which is used to configure the properties of a new task. /// /// **Note**: This is an [unstable API][unstable]. The public API of this type /// may break in 1.x releases. See [the documentation on unstable /// features][unstable] for details. /// /// Methods can be chained in order to configure it. /// /// Currently, there is only one configuration option: /// /// - [`name`], which specifies an associated name for /// the task /// /// There are three types of task that can be spawned from a Builder: /// - [`spawn_local`] for executing futures on the current thread /// - [`spawn`] for executing [`Send`] futures on the runtime /// - [`spawn_blocking`] for executing blocking code in the /// blocking thread pool. /// /// ## Example /// /// ```no_run /// use tokio::net::{TcpListener, TcpStream}; /// /// use std::io; /// /// async fn process(socket: TcpStream) { /// // ... /// # drop(socket); /// } /// /// #[tokio::main] /// async fn main() -> io::Result<()> { /// let listener = TcpListener::bind("127.0.0.1:8080").await?; /// /// loop { /// let (socket, _) = listener.accept().await?; /// /// tokio::task::Builder::new() /// .name("tcp connection handler") /// .spawn(async move { /// // Process each socket concurrently. /// process(socket).await /// }); /// } /// } /// ``` /// [unstable API]: crate#unstable-features /// [`name`]: Builder::name /// [`spawn_local`]: Builder::spawn_local /// [`spawn`]: Builder::spawn /// [`spawn_blocking`]: Builder::spawn_blocking #[derive(Default, Debug)] #[cfg_attr(docsrs, doc(cfg(all(tokio_unstable, feature = "tracing"))))] pub struct Builder<'a> { name: Option<&'a str>, } impl<'a> Builder<'a> { /// Creates a new task builder. pub fn new() -> Self { Self::default() } /// Assigns a name to the task which will be spawned. pub fn name(&self, name: &'a str) -> Self { Self { name: Some(name) } } /// Spawns a task on the executor. /// /// See [`task::spawn`](crate::task::spawn) for /// more details. #[track_caller] pub fn spawn<Fut>(self, future: Fut) -> JoinHandle<Fut::Output> where Fut: Future + Send + 'static, Fut::Output: Send + 'static, { super::spawn::spawn_inner(future, self.name) } /// Spawns a task on the current thread. /// /// See [`task::spawn_local`](crate::task::spawn_local) /// for more details. #[track_caller] pub fn spawn_local<Fut>(self, future: Fut) -> JoinHandle<Fut::Output> where Fut: Future + 'static, Fut::Output: 'static, { super::local::spawn_local_inner(future, self.name) } /// Spawns blocking code on the blocking threadpool. /// /// See [`task::spawn_blocking`](crate::task::spawn_blocking) /// for more details. #[track_caller] pub fn spawn_blocking<Function, Output>(self, function: Function) -> JoinHandle<Output> where Function: FnOnce() -> Output + Send + 'static, Output: Send + 'static, { context::current().spawn_blocking_inner(function, self.name) } }
29.185841
91
0.589145
4bc88105edb6b68e283f0ef431b3357e3038617d
10,947
use super::*; use rand::distributions::Alphanumeric; use rand::Rng; use std::collections::HashMap; use uuid::Uuid; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub enum GenMethod { FullyInformed, VSelf, RandomInformed, Random, } fn convert(bits: Vec<u8>) -> i16 { bits.iter() .fold(0, |result, &bit| (result << 1) ^ bit as i16) } fn generate_number_in_range(bits: Vec<u8>, min: i16, max: i16) -> i16 { let c = convert(bits); //i16 ti u16 for the ranging thing let conv: u16 = match c { c if c > 0 => { let d: u16 = c as u16; d + 32767 } 0 => 32767u16, _ => (c + 32767) as u16, }; //conv div full range mult part range + min ((((conv) as f64) / (32767.0 * 2.0)) * (max - min) as f64 + min as f64) as i16 } fn gen_number(method: GenMethod, bits: Vec<u8>, param: NumDescriptor) -> i16 { match method { GenMethod::FullyInformed => match param { NumDescriptor::Range((s, e)) => generate_number_in_range(bits, s as i16, e as i16), NumDescriptor::List(lst) => { lst[generate_number_in_range(bits, 0, lst.len() as i16 - 1) as usize] as i16 } NumDescriptor::Random => convert(bits), }, GenMethod::VSelf => convert(bits), GenMethod::RandomInformed => { let mut rng = rand::thread_rng(); match param { NumDescriptor::Range((s, e)) => rng.gen_range(s..e) as i16, NumDescriptor::List(lst) => lst[rng.gen_range(0..lst.len())] as i16, NumDescriptor::Random => rng.gen_range(-32768..32767), } } GenMethod::Random => { let mut rng = rand::thread_rng(); rng.gen_range(-32768..32767) } } } fn gen_string(method: GenMethod, bits: Vec<u8>, param: StringDescriptor) -> String { match method { GenMethod::FullyInformed => { match param { StringDescriptor::Uuid(_) => { //future - from values vec //we currently only support v4 in the attacker /* match v{ 1=> Uuid::new_v1(), 3=> Uuid::new_v3(), 4=> Uuid::new_v4(), 5=> Uuid::new_v5(), }*/ Uuid::new_v4().to_string() } StringDescriptor::List(lst) => { lst[generate_number_in_range(bits, 0, (lst.len() - 1) as i16) as usize].clone() } StringDescriptor::Random => { let sum1: u8 = bits.iter().sum(); let str1: String = rand::thread_rng() .sample_iter(&Alphanumeric) .take(sum1 as usize) .map(char::from) .collect(); str1 } _ => { let sum1: u8 = bits.iter().sum(); let str1: String = rand::thread_rng() .sample_iter(&Alphanumeric) .take(sum1 as usize) .map(char::from) .collect(); str1 } } } GenMethod::VSelf => { //repetitive for now, will be changed in later versions match param { StringDescriptor::Uuid(_) => { //future - from values vec Uuid::new_v4().to_string() } StringDescriptor::List(lst) => { lst[generate_number_in_range(bits, 0, lst.len() as i16 - 1) as usize].clone() } _ => { let sum1: u8 = bits.iter().sum(); let str1: String = rand::thread_rng() .sample_iter(&Alphanumeric) .take(sum1 as usize) .map(char::from) .collect(); str1 } } } GenMethod::RandomInformed => match param { StringDescriptor::List(lst) => { let mut rng = rand::thread_rng(); lst[rng.gen_range(0..lst.len())].clone() } _ => { let sum1: u8 = bits.iter().sum(); let str1: String = rand::thread_rng() .sample_iter(&Alphanumeric) .take(sum1 as usize) .map(char::from) .collect(); str1 } }, GenMethod::Random => { let mut rng = rand::thread_rng(); let sum1: usize = rng.gen_range(0..24); let str1: String = rng .sample_iter(&Alphanumeric) .take(sum1) .map(char::from) .collect(); str1 } } } pub fn gen_type(bits: Vec<u8>) -> GenMethod { let mut b = bits.chunks(bits.len() / 2); let bits_first = b.next().unwrap().to_vec(); let bits_second = b.next().unwrap().to_vec(); if convert(bits_first.clone()) >= convert(bits_second.clone()) { let mut bb = bits_first.chunks(bits_first.len() / 2); let b_f = bb.next().unwrap().to_vec(); let b_s = bb.next().unwrap().to_vec(); if convert(b_f) >= convert(b_s) { GenMethod::FullyInformed } else { GenMethod::VSelf } } else { let mut bb = bits_second.chunks(bits_first.len() / 2); let b_f = bb.next().unwrap().to_vec(); let b_s = bb.next().unwrap().to_vec(); if convert(b_f) >= convert(b_s) { GenMethod::RandomInformed } else { GenMethod::Random } } } #[derive(Debug, Clone, Serialize, Deserialize)] struct Parameter { name: String, value: String, #[serde(skip_serializing)] dm: QuePay, } fn params_to_payload(ep: &str, params: Vec<Parameter>) -> (String, String, String, Vec<Header>) { let mut payload = String::from('{'); let mut query = String::from('?'); let mut path_ext = ep.to_string(); let mut headers = vec![]; for param in params { match param.dm { QuePay::Payload => payload.push_str(&format!("\"{}\":{},", param.name, param.value)), QuePay::Query => query.push_str(&format!("{}={}&", param.name, param.value)), QuePay::Path => { path_ext = path_ext.replace(&format!("{}{}{}", '{', param.name, '}'), &param.value) } QuePay::Headers => { headers.push(Header { name: param.name, value: param.value, }); } _ => (), } } query.pop(); if payload.trim() == "{" { payload = String::new(); } else { payload.pop(); payload.push('}'); } (payload, query, path_ext, headers) } fn get_headers( custom_headers: &[Header], payload_headers: Vec<Header>, auth: &Authorization, ) -> HashMap<String, String> { let mut new: Vec<Header> = payload_headers .iter() .chain(custom_headers) .cloned() .collect(); if let Some(a) = auth.get_header() { new.push(a); } new.iter() .map(|h| (h.name.clone(), h.value.clone())) .collect() } async fn send_payload_request( method: Method, base_url: &str, ep: &str, params: Vec<Parameter>, headers: &[Header], auth: &Authorization, ) -> ReqRes { let client = reqwest::Client::new(); let method1 = reqwest::Method::from_bytes(method.to_string().as_bytes()).unwrap(); let (req_payload, req_query, path, headers1) = params_to_payload(ep, params); let h = get_headers(headers, headers1, auth); let req = client .request(method1, &format!("{}{}{}", base_url, path, req_query)) .body(req_payload.clone()) .headers((&h).try_into().expect("not valid headers")) .build() .unwrap(); let req_headers = req .headers() .iter() .map(|(n, v)| (n.to_string(), format!("{:?}", v))) .collect(); let res = client.execute(req).await.unwrap(); ReqRes { req_headers, res_headers: res .headers() .iter() .map(|(n, v)| (n.to_string(), format!("{:?}", v))) .collect(), path, method, status: res.status().as_u16(), req_payload, res_payload: res.text().await.unwrap(), req_query, } } async fn send_attack( base_url: &str, eps: Vec<(Method, String, Vec<Parameter>)>, headers: &[Header], auth: &Authorization, ) -> Vec<ReqRes> { let mut rr = vec![]; for ep in eps { rr.push(send_payload_request(ep.0, base_url, &ep.1, ep.2, headers, auth).await); } rr } pub async fn attack_flow( base_url: &str, genes: &[Gene], headers: &[Header], auth: &Authorization, ) -> (Vec<ReqRes>, Vec<String>) { let mut eps = vec![]; let mut choises = vec![]; for gene in genes { let mut params: Vec<Parameter> = vec![]; for c in gene.chromosomes() { let value = match &c.descriptor { ValueDescriptor::Number((nd, _)) => { let choise = gen_type(c.dna.clone()); choises.push(format!("{:?}", choise.clone())); gen_number(choise, c.dna.clone(), nd.clone()).to_string() } ValueDescriptor::String(d) => { let choise = gen_type(c.dna.clone()); choises.push(format!("{:?}", choise.clone())); gen_string(choise, c.dna.clone(), d.clone()) } ValueDescriptor::Bool => { if c.dna[0] == 1 { choises.push(String::from("bool:true")); String::from("true") } else { choises.push(String::from("bool:true")); String::from("false") } } ValueDescriptor::Unknown => { let choise = gen_type(c.dna.clone()); choises.push(format!("{:?}", choise.clone())); gen_string(choise, c.dna.clone(), StringDescriptor::Random) } }; params.push(Parameter { name: c.param_name.clone(), value, dm: c.delivery_method, }); } eps.push((gene.method, gene.ep.clone(), params)); } (send_attack(base_url, eps, headers, auth).await, choises) }
34.316614
99
0.472001
5b34ed0919001718d9f48b58a86c66a25aa45814
1,061
// FIXME: missing sysroot spans (#53081) // ignore-i586-unknown-linux-gnu // ignore-i586-unknown-linux-musl // ignore-i686-unknown-linux-musl use std::mem; struct Misc<T:?Sized>(T); fn check<T: Iterator, U: ?Sized>() { // suggest a where-clause, if needed mem::size_of::<U>(); //~^ ERROR the size for values of type mem::size_of::<Misc<U>>(); //~^ ERROR the size for values of type // ... even if T occurs as a type parameter <u64 as From<T>>::from; //~^ ERROR `u64: std::convert::From<T>` is not satisfied <u64 as From<<T as Iterator>::Item>>::from; //~^ ERROR `u64: std::convert::From<<T as std::iter::Iterator>::Item>` is not satisfied // ... but not if there are inference variables <Misc<_> as From<T>>::from; //~^ ERROR `Misc<_>: std::convert::From<T>` is not satisfied // ... and also not if the error is not related to the type mem::size_of::<[T]>(); //~^ ERROR the size for values of type mem::size_of::<[&U]>(); //~^ ERROR the size for values of type } fn main() { }
25.878049
91
0.602262
aba16e5d63c1a5823f85cf1ecc96b7bfd30a184a
10,817
use std::collections::HashMap; use ::value::Value; use serde_json::value::{RawValue, Value as JsonValue}; use serde_json::{Error, Map}; use vrl::prelude::*; fn parse_json(value: Value) -> Resolved { let bytes = value.try_bytes()?; let value = serde_json::from_slice::<'_, Value>(&bytes) .map_err(|e| format!("unable to parse json: {}", e))?; Ok(value) } // parse_json_with_depth method recursively traverses the value and returns raw JSON-formatted bytes // after reaching provided depth. fn parse_json_with_depth(value: Value, max_depth: Value) -> Resolved { let bytes = value.try_bytes()?; let parsed_depth = validate_depth(max_depth)?; let raw_value = serde_json::from_slice::<'_, &RawValue>(&bytes) .map_err(|e| format!("unable to read json: {}", e))?; let res = parse_layer(raw_value, parsed_depth) .map_err(|e| format!("unable to parse json with max depth: {}", e))?; Ok(Value::from(res)) } fn parse_layer(value: &RawValue, remaining_depth: u8) -> std::result::Result<JsonValue, Error> { let raw_value = value.get(); // RawValue is a JSON object. if raw_value.starts_with('{') { if remaining_depth == 0 { // If max_depth is reached, return the raw representation of the JSON object, // e.g., "{\"key\":\"value\"}" serde_json::value::to_value(raw_value) } else { // Parse each value of the object as a raw JSON value recursively with the same method. let map: HashMap<String, &RawValue> = serde_json::from_str(raw_value)?; let mut res_map: Map<String, JsonValue> = Map::with_capacity(map.len()); for (k, v) in map { res_map.insert(k, parse_layer(v, remaining_depth - 1)?); } Ok(serde_json::Value::from(res_map)) } // RawValue is a JSON array. } else if raw_value.starts_with('[') { if remaining_depth == 0 { // If max_depth is reached, return the raw representation of the JSON array, // e.g., "[\"one\",\"two\",\"three\"]" serde_json::value::to_value(raw_value) } else { // Parse all values of the array as a raw JSON value recursively with the same method. let arr: Vec<&RawValue> = serde_json::from_str(raw_value)?; let mut res_arr: Vec<JsonValue> = Vec::with_capacity(arr.len()); for v in arr { res_arr.push(parse_layer(v, remaining_depth - 1)?) } Ok(serde_json::Value::from(res_arr)) } // RawValue is not an object or array, do not need to traverse the doc further. // Parse and return the value. } else { serde_json::from_str(raw_value) } } fn validate_depth(value: Value) -> std::result::Result<u8, ExpressionError> { let res = value.try_integer()?; // The lower cap is 1 because it is pointless to use anything lower, // because 'data = parse_json!(.message, max_depth: 0)' equals to 'data = .message'. // // The upper cap is 128 because serde_json has the same recursion limit by default. // https://github.com/serde-rs/json/blob/4d57ebeea8d791b8a51c229552d2d480415d00e6/json/src/de.rs#L111 if !(1..=128).contains(&res) { Err(ExpressionError::from(format!( "max_depth value should be greater than 0 and less than 128, got {}", res ))) } else { Ok(res as u8) } } #[derive(Clone, Copy, Debug)] pub struct ParseJson; impl Function for ParseJson { fn identifier(&self) -> &'static str { "parse_json" } fn summary(&self) -> &'static str { "parse a string to a JSON type" } fn usage(&self) -> &'static str { indoc! {r#" Parses the provided `value` as JSON. Only JSON types are returned. If you need to convert a `string` into a `timestamp`, consider the `parse_timestamp` function. "#} } fn parameters(&self) -> &'static [Parameter] { &[ Parameter { keyword: "value", kind: kind::BYTES, required: true, }, Parameter { keyword: "max_depth", kind: kind::INTEGER, required: false, }, ] } fn examples(&self) -> &'static [Example] { &[ Example { title: "object", source: r#"parse_json!(s'{ "field": "value" }')"#, result: Ok(r#"{ "field": "value" }"#), }, Example { title: "array", source: r#"parse_json!("[true, 0]")"#, result: Ok("[true, 0]"), }, Example { title: "string", source: r#"parse_json!(s'"hello"')"#, result: Ok("hello"), }, Example { title: "integer", source: r#"parse_json!("42")"#, result: Ok("42"), }, Example { title: "float", source: r#"parse_json!("42.13")"#, result: Ok("42.13"), }, Example { title: "boolean", source: r#"parse_json!("false")"#, result: Ok("false"), }, Example { title: "invalid value", source: r#"parse_json!("{ INVALID }")"#, result: Err( r#"function call error for "parse_json" at (0:26): unable to parse json: key must be a string at line 1 column 3"#, ), }, Example { title: "max_depth", source: r#"parse_json!(s'{"first_level":{"second_level":"finish"}}', max_depth: 1)"#, result: Ok(r#"{"first_level":"{\"second_level\":\"finish\"}"}"#), }, ] } fn compile( &self, _state: (&mut state::LocalEnv, &mut state::ExternalEnv), _ctx: &mut FunctionCompileContext, mut arguments: ArgumentList, ) -> Compiled { let value = arguments.required("value"); let max_depth = arguments.optional("max_depth"); match max_depth { Some(max_depth) => Ok(Box::new(ParseJsonMaxDepthFn { value, max_depth })), None => Ok(Box::new(ParseJsonFn { value })), } } fn call_by_vm(&self, _ctx: &mut Context, args: &mut VmArgumentList) -> Resolved { let value = args.required("value"); let max_depth = args.optional("max_depth"); if let Some(max_depth) = max_depth { parse_json_with_depth(value, max_depth) } else { parse_json(value) } } } #[derive(Debug, Clone)] struct ParseJsonFn { value: Box<dyn Expression>, } impl Expression for ParseJsonFn { fn resolve(&self, ctx: &mut Context) -> Resolved { let value = self.value.resolve(ctx)?; parse_json(value) } fn type_def(&self, _: (&state::LocalEnv, &state::ExternalEnv)) -> TypeDef { type_def() } } #[derive(Debug, Clone)] struct ParseJsonMaxDepthFn { value: Box<dyn Expression>, max_depth: Box<dyn Expression>, } impl Expression for ParseJsonMaxDepthFn { fn resolve(&self, ctx: &mut Context) -> Resolved { let value = self.value.resolve(ctx)?; let max_depth = self.max_depth.resolve(ctx)?; parse_json_with_depth(value, max_depth) } fn type_def(&self, _: (&state::LocalEnv, &state::ExternalEnv)) -> TypeDef { type_def() } } fn inner_kind() -> Kind { Kind::null() | Kind::bytes() | Kind::integer() | Kind::float() | Kind::boolean() | Kind::array(Collection::any()) | Kind::object(Collection::any()) } fn type_def() -> TypeDef { TypeDef::bytes() .fallible() .add_boolean() .add_integer() .add_float() .add_null() .add_array(Collection::from_unknown(inner_kind())) .add_object(Collection::from_unknown(inner_kind())) } #[cfg(test)] mod tests { use super::*; test_function![ parse_json => ParseJson; parses { args: func_args![ value: r#"{"field": "value"}"# ], want: Ok(value!({ field: "value" })), tdef: type_def(), } complex_json { args: func_args![ value: r#"{"object": {"string":"value","number":42,"array":["hello","world"],"boolean":false}}"# ], want: Ok(value!({ object: {string: "value", number: 42, array: ["hello", "world"], boolean: false} })), tdef: type_def(), } invalid_json_errors { args: func_args![ value: r#"{"field": "value"# ], want: Err("unable to parse json: EOF while parsing a string at line 1 column 16"), tdef: TypeDef::bytes().fallible() .add_boolean() .add_integer() .add_float() .add_null() .add_array(Collection::from_unknown(inner_kind())) .add_object(Collection::from_unknown(inner_kind())), } max_depth { args: func_args![ value: r#"{"top_layer": {"layer_one": "finish", "layer_two": 2}}"#, max_depth: 1], want: Ok(value!({ top_layer: r#"{"layer_one": "finish", "layer_two": 2}"# })), tdef: type_def(), } max_depth_array { args: func_args![ value: r#"[{"top_layer": {"next_layer": ["finish"]}}]"#, max_depth: 2], want: Ok(value!([{ top_layer: r#"{"next_layer": ["finish"]}"# }])), tdef: type_def(), } max_depth_exceeds_layers { args: func_args![ value: r#"{"top_layer": {"layer_one": "finish", "layer_two": 2}}"#, max_depth: 10], want: Ok(value!({ top_layer: {layer_one: "finish", layer_two: 2} })), tdef: type_def(), } invalid_json_with_max_depth { args: func_args![ value: r#"{"field": "value"#, max_depth: 3 ], want: Err("unable to read json: EOF while parsing a string at line 1 column 16"), tdef: TypeDef::bytes().fallible() .add_boolean() .add_integer() .add_float() .add_null() .add_array(Collection::from_unknown(inner_kind())) .add_object(Collection::from_unknown(inner_kind())), } invalid_input_max_depth { args: func_args![ value: r#"{"top_layer": "finish"}"#, max_depth: 129], want: Err("max_depth value should be greater than 0 and less than 128, got 129"), tdef: type_def(), } ]; }
33.489164
135
0.533974
01be8de172c2670d0c6eadcbb3879e921b79590c
44,931
//! Trait Resolution. See the [rustc guide] for more information on how this works. //! //! [rustc guide]: https://rust-lang.github.io/rustc-guide/traits/resolution.html #[allow(dead_code)] pub mod auto_trait; mod chalk_fulfill; mod coherence; pub mod error_reporting; mod engine; mod fulfill; mod project; mod object_safety; mod on_unimplemented; mod select; mod specialize; mod structural_impls; pub mod codegen; mod util; pub mod query; use chalk_engine; use crate::hir; use crate::hir::def_id::DefId; use crate::infer::{InferCtxt, SuppressRegionErrors}; use crate::infer::outlives::env::OutlivesEnvironment; use crate::middle::region; use crate::mir::interpret::ErrorHandled; use rustc_macros::HashStable; use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; use crate::ty::subst::{InternalSubsts, SubstsRef}; use crate::ty::{self, AdtKind, List, Ty, TyCtxt, GenericParamDefKind, ToPredicate}; use crate::ty::error::{ExpectedFound, TypeError}; use crate::ty::fold::{TypeFolder, TypeFoldable, TypeVisitor}; use crate::util::common::ErrorReported; use std::fmt::Debug; use std::rc::Rc; pub use self::SelectionError::*; pub use self::FulfillmentErrorCode::*; pub use self::Vtable::*; pub use self::ObligationCauseCode::*; pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls}; pub use self::coherence::{OrphanCheckErr, OverlapResult}; pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation}; pub use self::project::MismatchedProjectionTypes; pub use self::project::{normalize, normalize_projection_type, poly_project_and_unify_type}; pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal, Normalized}; pub use self::object_safety::ObjectSafetyViolation; pub use self::object_safety::MethodViolationCode; pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote}; pub use self::select::{EvaluationCache, SelectionContext, SelectionCache}; pub use self::select::{EvaluationResult, IntercrateAmbiguityCause, OverflowError}; pub use self::specialize::{OverlapError, specialization_graph, translate_substs}; pub use self::specialize::find_associated_item; pub use self::specialize::specialization_graph::FutureCompatOverlapError; pub use self::specialize::specialization_graph::FutureCompatOverlapErrorKind; pub use self::engine::{TraitEngine, TraitEngineExt}; pub use self::util::{elaborate_predicates, elaborate_trait_ref, elaborate_trait_refs}; pub use self::util::{ supertraits, supertrait_def_ids, transitive_bounds, Supertraits, SupertraitDefIds, }; pub use self::util::{expand_trait_aliases, TraitAliasExpander}; pub use self::chalk_fulfill::{ CanonicalGoal as ChalkCanonicalGoal, FulfillmentContext as ChalkFulfillmentContext }; pub use self::ObligationCauseCode::*; pub use self::FulfillmentErrorCode::*; pub use self::SelectionError::*; pub use self::Vtable::*; /// Whether to enable bug compatibility with issue #43355. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum IntercrateMode { Issue43355, Fixed } /// The mode that trait queries run in. #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum TraitQueryMode { // Standard/un-canonicalized queries get accurate // spans etc. passed in and hence can do reasonable // error reporting on their own. Standard, // Canonicalized queries get dummy spans and hence // must generally propagate errors to // pre-canonicalization callsites. Canonical, } /// An `Obligation` represents some trait reference (e.g., `int: Eq`) for /// which the vtable must be found. The process of finding a vtable is /// called "resolving" the `Obligation`. This process consists of /// either identifying an `impl` (e.g., `impl Eq for int`) that /// provides the required vtable, or else finding a bound that is in /// scope. The eventual result is usually a `Selection` (defined below). #[derive(Clone, PartialEq, Eq, Hash)] pub struct Obligation<'tcx, T> { /// The reason we have to prove this thing. pub cause: ObligationCause<'tcx>, /// The environment in which we should prove this thing. pub param_env: ty::ParamEnv<'tcx>, /// The thing we are trying to prove. pub predicate: T, /// If we started proving this as a result of trying to prove /// something else, track the total depth to ensure termination. /// If this goes over a certain threshold, we abort compilation -- /// in such cases, we can not say whether or not the predicate /// holds for certain. Stupid halting problem; such a drag. pub recursion_depth: usize, } pub type PredicateObligation<'tcx> = Obligation<'tcx, ty::Predicate<'tcx>>; pub type TraitObligation<'tcx> = Obligation<'tcx, ty::PolyTraitPredicate<'tcx>>; /// The reason why we incurred this obligation; used for error reporting. #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct ObligationCause<'tcx> { pub span: Span, /// The ID of the fn body that triggered this obligation. This is /// used for region obligations to determine the precise /// environment in which the region obligation should be evaluated /// (in particular, closures can add new assumptions). See the /// field `region_obligations` of the `FulfillmentContext` for more /// information. pub body_id: hir::HirId, pub code: ObligationCauseCode<'tcx> } impl<'tcx> ObligationCause<'tcx> { pub fn span<'gcx>(&self, tcx: TyCtxt<'gcx, 'tcx>) -> Span { match self.code { ObligationCauseCode::CompareImplMethodObligation { .. } | ObligationCauseCode::MainFunctionType | ObligationCauseCode::StartFunctionType => { tcx.sess.source_map().def_span(self.span) } ObligationCauseCode::MatchExpressionArm { arm_span, .. } => arm_span, _ => self.span, } } } #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub enum ObligationCauseCode<'tcx> { /// Not well classified or should be obvious from the span. MiscObligation, /// A slice or array is WF only if `T: Sized`. SliceOrArrayElem, /// A tuple is WF only if its middle elements are `Sized`. TupleElem, /// This is the trait reference from the given projection. ProjectionWf(ty::ProjectionTy<'tcx>), /// In an impl of trait `X` for type `Y`, type `Y` must /// also implement all supertraits of `X`. ItemObligation(DefId), /// A type like `&'a T` is WF only if `T: 'a`. ReferenceOutlivesReferent(Ty<'tcx>), /// A type like `Box<Foo<'a> + 'b>` is WF only if `'b: 'a`. ObjectTypeBound(Ty<'tcx>, ty::Region<'tcx>), /// Obligation incurred due to an object cast. ObjectCastObligation(/* Object type */ Ty<'tcx>), // Various cases where expressions must be sized/copy/etc: /// L = X implies that L is Sized AssignmentLhsSized, /// (x1, .., xn) must be Sized TupleInitializerSized, /// S { ... } must be Sized StructInitializerSized, /// Type of each variable must be Sized VariableType(ast::NodeId), /// Argument type must be Sized SizedArgumentType, /// Return type must be Sized SizedReturnType, /// Yield type must be Sized SizedYieldType, /// [T,..n] --> T must be Copy RepeatVec, /// Types of fields (other than the last, except for packed structs) in a struct must be sized. FieldSized { adt_kind: AdtKind, last: bool }, /// Constant expressions must be sized. ConstSized, /// static items must have `Sync` type SharedStatic, BuiltinDerivedObligation(DerivedObligationCause<'tcx>), ImplDerivedObligation(DerivedObligationCause<'tcx>), /// error derived when matching traits/impls; see ObligationCause for more details CompareImplMethodObligation { item_name: ast::Name, impl_item_def_id: DefId, trait_item_def_id: DefId, }, /// Checking that this expression can be assigned where it needs to be // FIXME(eddyb) #11161 is the original Expr required? ExprAssignable, /// Computing common supertype in the arms of a match expression MatchExpressionArm { arm_span: Span, source: hir::MatchSource, prior_arms: Vec<Span>, last_ty: Ty<'tcx>, discrim_hir_id: hir::HirId, }, /// Computing common supertype in the pattern guard for the arms of a match expression MatchExpressionArmPattern { span: Span, ty: Ty<'tcx> }, /// Computing common supertype in an if expression IfExpression { then: Span, outer: Option<Span>, semicolon: Option<Span>, }, /// Computing common supertype of an if expression with no else counter-part IfExpressionWithNoElse, /// `main` has wrong type MainFunctionType, /// `start` has wrong type StartFunctionType, /// intrinsic has wrong type IntrinsicType, /// method receiver MethodReceiver, /// `return` with no expression ReturnNoExpression, /// `return` with an expression ReturnType(hir::HirId), /// Block implicit return BlockTailExpression(hir::HirId), /// #[feature(trivial_bounds)] is not enabled TrivialBound, } #[derive(Clone, Debug, PartialEq, Eq, Hash)] pub struct DerivedObligationCause<'tcx> { /// The trait reference of the parent obligation that led to the /// current obligation. Note that only trait obligations lead to /// derived obligations, so we just store the trait reference here /// directly. parent_trait_ref: ty::PolyTraitRef<'tcx>, /// The parent trait had this cause. parent_code: Rc<ObligationCauseCode<'tcx>> } pub type Obligations<'tcx, O> = Vec<Obligation<'tcx, O>>; pub type PredicateObligations<'tcx> = Vec<PredicateObligation<'tcx>>; pub type TraitObligations<'tcx> = Vec<TraitObligation<'tcx>>; /// The following types: /// * `WhereClause`, /// * `WellFormed`, /// * `FromEnv`, /// * `DomainGoal`, /// * `Goal`, /// * `Clause`, /// * `Environment`, /// * `InEnvironment`, /// are used for representing the trait system in the form of /// logic programming clauses. They are part of the interface /// for the chalk SLG solver. #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable)] pub enum WhereClause<'tcx> { Implemented(ty::TraitPredicate<'tcx>), ProjectionEq(ty::ProjectionPredicate<'tcx>), RegionOutlives(ty::RegionOutlivesPredicate<'tcx>), TypeOutlives(ty::TypeOutlivesPredicate<'tcx>), } #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable)] pub enum WellFormed<'tcx> { Trait(ty::TraitPredicate<'tcx>), Ty(Ty<'tcx>), } #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable)] pub enum FromEnv<'tcx> { Trait(ty::TraitPredicate<'tcx>), Ty(Ty<'tcx>), } #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable)] pub enum DomainGoal<'tcx> { Holds(WhereClause<'tcx>), WellFormed(WellFormed<'tcx>), FromEnv(FromEnv<'tcx>), Normalize(ty::ProjectionPredicate<'tcx>), } pub type PolyDomainGoal<'tcx> = ty::Binder<DomainGoal<'tcx>>; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable)] pub enum QuantifierKind { Universal, Existential, } #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable)] pub enum GoalKind<'tcx> { Implies(Clauses<'tcx>, Goal<'tcx>), And(Goal<'tcx>, Goal<'tcx>), Not(Goal<'tcx>), DomainGoal(DomainGoal<'tcx>), Quantified(QuantifierKind, ty::Binder<Goal<'tcx>>), Subtype(Ty<'tcx>, Ty<'tcx>), CannotProve, } pub type Goal<'tcx> = &'tcx GoalKind<'tcx>; pub type Goals<'tcx> = &'tcx List<Goal<'tcx>>; impl<'tcx> DomainGoal<'tcx> { pub fn into_goal(self) -> GoalKind<'tcx> { GoalKind::DomainGoal(self) } pub fn into_program_clause(self) -> ProgramClause<'tcx> { ProgramClause { goal: self, hypotheses: ty::List::empty(), category: ProgramClauseCategory::Other, } } } impl<'tcx> GoalKind<'tcx> { pub fn from_poly_domain_goal<'gcx>( domain_goal: PolyDomainGoal<'tcx>, tcx: TyCtxt<'gcx, 'tcx>, ) -> GoalKind<'tcx> { match domain_goal.no_bound_vars() { Some(p) => p.into_goal(), None => GoalKind::Quantified( QuantifierKind::Universal, domain_goal.map_bound(|p| tcx.mk_goal(p.into_goal())) ), } } } /// This matches the definition from Page 7 of "A Proof Procedure for the Logic of Hereditary /// Harrop Formulas". #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable)] pub enum Clause<'tcx> { Implies(ProgramClause<'tcx>), ForAll(ty::Binder<ProgramClause<'tcx>>), } impl Clause<'tcx> { pub fn category(self) -> ProgramClauseCategory { match self { Clause::Implies(clause) => clause.category, Clause::ForAll(clause) => clause.skip_binder().category, } } } /// Multiple clauses. pub type Clauses<'tcx> = &'tcx List<Clause<'tcx>>; /// A "program clause" has the form `D :- G1, ..., Gn`. It is saying /// that the domain goal `D` is true if `G1...Gn` are provable. This /// is equivalent to the implication `G1..Gn => D`; we usually write /// it with the reverse implication operator `:-` to emphasize the way /// that programs are actually solved (via backchaining, which starts /// with the goal to solve and proceeds from there). #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable)] pub struct ProgramClause<'tcx> { /// This goal will be considered true ... pub goal: DomainGoal<'tcx>, /// ... if we can prove these hypotheses (there may be no hypotheses at all): pub hypotheses: Goals<'tcx>, /// Useful for filtering clauses. pub category: ProgramClauseCategory, } #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable)] pub enum ProgramClauseCategory { ImpliedBound, WellFormed, Other, } /// A set of clauses that we assume to be true. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable)] pub struct Environment<'tcx> { pub clauses: Clauses<'tcx>, } impl Environment<'tcx> { pub fn with<G>(self, goal: G) -> InEnvironment<'tcx, G> { InEnvironment { environment: self, goal, } } } /// Something (usually a goal), along with an environment. #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable)] pub struct InEnvironment<'tcx, G> { pub environment: Environment<'tcx>, pub goal: G, } pub type Selection<'tcx> = Vtable<'tcx, PredicateObligation<'tcx>>; #[derive(Clone,Debug)] pub enum SelectionError<'tcx> { Unimplemented, OutputTypeParameterMismatch(ty::PolyTraitRef<'tcx>, ty::PolyTraitRef<'tcx>, ty::error::TypeError<'tcx>), TraitNotObjectSafe(DefId), ConstEvalFailure(ErrorHandled), Overflow, } pub struct FulfillmentError<'tcx> { pub obligation: PredicateObligation<'tcx>, pub code: FulfillmentErrorCode<'tcx> } #[derive(Clone)] pub enum FulfillmentErrorCode<'tcx> { CodeSelectionError(SelectionError<'tcx>), CodeProjectionError(MismatchedProjectionTypes<'tcx>), CodeSubtypeError(ExpectedFound<Ty<'tcx>>, TypeError<'tcx>), // always comes from a SubtypePredicate CodeAmbiguity, } /// When performing resolution, it is typically the case that there /// can be one of three outcomes: /// /// - `Ok(Some(r))`: success occurred with result `r` /// - `Ok(None)`: could not definitely determine anything, usually due /// to inconclusive type inference. /// - `Err(e)`: error `e` occurred pub type SelectionResult<'tcx, T> = Result<Option<T>, SelectionError<'tcx>>; /// Given the successful resolution of an obligation, the `Vtable` /// indicates where the vtable comes from. Note that while we call this /// a "vtable", it does not necessarily indicate dynamic dispatch at /// runtime. `Vtable` instances just tell the compiler where to find /// methods, but in generic code those methods are typically statically /// dispatched -- only when an object is constructed is a `Vtable` /// instance reified into an actual vtable. /// /// For example, the vtable may be tied to a specific impl (case A), /// or it may be relative to some bound that is in scope (case B). /// /// ``` /// impl<T:Clone> Clone<T> for Option<T> { ... } // Impl_1 /// impl<T:Clone> Clone<T> for Box<T> { ... } // Impl_2 /// impl Clone for int { ... } // Impl_3 /// /// fn foo<T:Clone>(concrete: Option<Box<int>>, /// param: T, /// mixed: Option<T>) { /// /// // Case A: Vtable points at a specific impl. Only possible when /// // type is concretely known. If the impl itself has bounded /// // type parameters, Vtable will carry resolutions for those as well: /// concrete.clone(); // Vtable(Impl_1, [Vtable(Impl_2, [Vtable(Impl_3)])]) /// /// // Case B: Vtable must be provided by caller. This applies when /// // type is a type parameter. /// param.clone(); // VtableParam /// /// // Case C: A mix of cases A and B. /// mixed.clone(); // Vtable(Impl_1, [VtableParam]) /// } /// ``` /// /// ### The type parameter `N` /// /// See explanation on `VtableImplData`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub enum Vtable<'tcx, N> { /// Vtable identifying a particular impl. VtableImpl(VtableImplData<'tcx, N>), /// Vtable for auto trait implementations. /// This carries the information and nested obligations with regards /// to an auto implementation for a trait `Trait`. The nested obligations /// ensure the trait implementation holds for all the constituent types. VtableAutoImpl(VtableAutoImplData<N>), /// Successful resolution to an obligation provided by the caller /// for some type parameter. The `Vec<N>` represents the /// obligations incurred from normalizing the where-clause (if /// any). VtableParam(Vec<N>), /// Virtual calls through an object. VtableObject(VtableObjectData<'tcx, N>), /// Successful resolution for a builtin trait. VtableBuiltin(VtableBuiltinData<N>), /// Vtable automatically generated for a closure. The `DefId` is the ID /// of the closure expression. This is a `VtableImpl` in spirit, but the /// impl is generated by the compiler and does not appear in the source. VtableClosure(VtableClosureData<'tcx, N>), /// Same as above, but for a function pointer type with the given signature. VtableFnPointer(VtableFnPointerData<'tcx, N>), /// Vtable automatically generated for a generator. VtableGenerator(VtableGeneratorData<'tcx, N>), /// Vtable for a trait alias. VtableTraitAlias(VtableTraitAliasData<'tcx, N>), } /// Identifies a particular impl in the source, along with a set of /// substitutions from the impl's type/lifetime parameters. The /// `nested` vector corresponds to the nested obligations attached to /// the impl's type parameters. /// /// The type parameter `N` indicates the type used for "nested /// obligations" that are required by the impl. During type check, this /// is `Obligation`, as one might expect. During codegen, however, this /// is `()`, because codegen only requires a shallow resolution of an /// impl, and nested obligations are satisfied later. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableImplData<'tcx, N> { pub impl_def_id: DefId, pub substs: SubstsRef<'tcx>, pub nested: Vec<N> } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableGeneratorData<'tcx, N> { pub generator_def_id: DefId, pub substs: ty::GeneratorSubsts<'tcx>, /// Nested obligations. This can be non-empty if the generator /// signature contains associated types. pub nested: Vec<N> } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableClosureData<'tcx, N> { pub closure_def_id: DefId, pub substs: ty::ClosureSubsts<'tcx>, /// Nested obligations. This can be non-empty if the closure /// signature contains associated types. pub nested: Vec<N> } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableAutoImplData<N> { pub trait_def_id: DefId, pub nested: Vec<N> } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableBuiltinData<N> { pub nested: Vec<N> } /// A vtable for some object-safe trait `Foo` automatically derived /// for the object type `Foo`. #[derive(PartialEq, Eq, Clone, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableObjectData<'tcx, N> { /// `Foo` upcast to the obligation trait. This will be some supertrait of `Foo`. pub upcast_trait_ref: ty::PolyTraitRef<'tcx>, /// The vtable is formed by concatenating together the method lists of /// the base object trait and all supertraits; this is the start of /// `upcast_trait_ref`'s methods in that vtable. pub vtable_base: usize, pub nested: Vec<N>, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableFnPointerData<'tcx, N> { pub fn_ty: Ty<'tcx>, pub nested: Vec<N> } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, HashStable)] pub struct VtableTraitAliasData<'tcx, N> { pub alias_def_id: DefId, pub substs: SubstsRef<'tcx>, pub nested: Vec<N>, } /// Creates predicate obligations from the generic bounds. pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>, param_env: ty::ParamEnv<'tcx>, generic_bounds: &ty::InstantiatedPredicates<'tcx>) -> PredicateObligations<'tcx> { util::predicates_for_generics(cause, 0, param_env, generic_bounds) } /// Determines whether the type `ty` is known to meet `bound` and /// returns true if so. Returns false if `ty` either does not meet /// `bound` or is not known to meet bound (note that this is /// conservative towards *no impl*, which is the opposite of the /// `evaluate` methods). pub fn type_known_to_meet_bound_modulo_regions<'a, 'gcx, 'tcx>( infcx: &InferCtxt<'a, 'gcx, 'tcx>, param_env: ty::ParamEnv<'tcx>, ty: Ty<'tcx>, def_id: DefId, span: Span, ) -> bool { debug!("type_known_to_meet_bound_modulo_regions(ty={:?}, bound={:?})", ty, infcx.tcx.def_path_str(def_id)); let trait_ref = ty::TraitRef { def_id, substs: infcx.tcx.mk_substs_trait(ty, &[]), }; let obligation = Obligation { param_env, cause: ObligationCause::misc(span, hir::DUMMY_HIR_ID), recursion_depth: 0, predicate: trait_ref.to_predicate(), }; let result = infcx.predicate_must_hold_modulo_regions(&obligation); debug!("type_known_to_meet_ty={:?} bound={} => {:?}", ty, infcx.tcx.def_path_str(def_id), result); if result && (ty.has_infer_types() || ty.has_closure_types()) { // Because of inference "guessing", selection can sometimes claim // to succeed while the success requires a guess. To ensure // this function's result remains infallible, we must confirm // that guess. While imperfect, I believe this is sound. // The handling of regions in this area of the code is terrible, // see issue #29149. We should be able to improve on this with // NLL. let mut fulfill_cx = FulfillmentContext::new_ignoring_regions(); // We can use a dummy node-id here because we won't pay any mind // to region obligations that arise (there shouldn't really be any // anyhow). let cause = ObligationCause::misc(span, hir::DUMMY_HIR_ID); fulfill_cx.register_bound(infcx, param_env, ty, def_id, cause); // Note: we only assume something is `Copy` if we can // *definitively* show that it implements `Copy`. Otherwise, // assume it is move; linear is always ok. match fulfill_cx.select_all_or_error(infcx) { Ok(()) => { debug!("type_known_to_meet_bound_modulo_regions: ty={:?} bound={} success", ty, infcx.tcx.def_path_str(def_id)); true } Err(e) => { debug!("type_known_to_meet_bound_modulo_regions: ty={:?} bound={} errors={:?}", ty, infcx.tcx.def_path_str(def_id), e); false } } } else { result } } fn do_normalize_predicates<'tcx>( tcx: TyCtxt<'tcx, 'tcx>, region_context: DefId, cause: ObligationCause<'tcx>, elaborated_env: ty::ParamEnv<'tcx>, predicates: Vec<ty::Predicate<'tcx>>, ) -> Result<Vec<ty::Predicate<'tcx>>, ErrorReported> { debug!( "do_normalize_predicates(predicates={:?}, region_context={:?}, cause={:?})", predicates, region_context, cause, ); let span = cause.span; tcx.infer_ctxt().enter(|infcx| { // FIXME. We should really... do something with these region // obligations. But this call just continues the older // behavior (i.e., doesn't cause any new bugs), and it would // take some further refactoring to actually solve them. In // particular, we would have to handle implied bounds // properly, and that code is currently largely confined to // regionck (though I made some efforts to extract it // out). -nmatsakis // // @arielby: In any case, these obligations are checked // by wfcheck anyway, so I'm not sure we have to check // them here too, and we will remove this function when // we move over to lazy normalization *anyway*. let fulfill_cx = FulfillmentContext::new_ignoring_regions(); let predicates = match fully_normalize( &infcx, fulfill_cx, cause, elaborated_env, &predicates, ) { Ok(predicates) => predicates, Err(errors) => { infcx.report_fulfillment_errors(&errors, None, false); return Err(ErrorReported) } }; debug!("do_normalize_predictes: normalized predicates = {:?}", predicates); let region_scope_tree = region::ScopeTree::default(); // We can use the `elaborated_env` here; the region code only // cares about declarations like `'a: 'b`. let outlives_env = OutlivesEnvironment::new(elaborated_env); infcx.resolve_regions_and_report_errors( region_context, &region_scope_tree, &outlives_env, SuppressRegionErrors::default(), ); let predicates = match infcx.fully_resolve(&predicates) { Ok(predicates) => predicates, Err(fixup_err) => { // If we encounter a fixup error, it means that some type // variable wound up unconstrained. I actually don't know // if this can happen, and I certainly don't expect it to // happen often, but if it did happen it probably // represents a legitimate failure due to some kind of // unconstrained variable, and it seems better not to ICE, // all things considered. tcx.sess.span_err(span, &fixup_err.to_string()); return Err(ErrorReported) } }; match tcx.lift_to_global(&predicates) { Some(predicates) => Ok(predicates), None => { // FIXME: shouldn't we, you know, actually report an error here? or an ICE? Err(ErrorReported) } } }) } // FIXME: this is gonna need to be removed ... /// Normalizes the parameter environment, reporting errors if they occur. pub fn normalize_param_env_or_error<'tcx>( tcx: TyCtxt<'tcx, 'tcx>, region_context: DefId, unnormalized_env: ty::ParamEnv<'tcx>, cause: ObligationCause<'tcx>, ) -> ty::ParamEnv<'tcx> { // I'm not wild about reporting errors here; I'd prefer to // have the errors get reported at a defined place (e.g., // during typeck). Instead I have all parameter // environments, in effect, going through this function // and hence potentially reporting errors. This ensures of // course that we never forget to normalize (the // alternative seemed like it would involve a lot of // manual invocations of this fn -- and then we'd have to // deal with the errors at each of those sites). // // In any case, in practice, typeck constructs all the // parameter environments once for every fn as it goes, // and errors will get reported then; so after typeck we // can be sure that no errors should occur. debug!("normalize_param_env_or_error(region_context={:?}, unnormalized_env={:?}, cause={:?})", region_context, unnormalized_env, cause); let mut predicates: Vec<_> = util::elaborate_predicates(tcx, unnormalized_env.caller_bounds.to_vec()) .collect(); debug!("normalize_param_env_or_error: elaborated-predicates={:?}", predicates); let elaborated_env = ty::ParamEnv::new( tcx.intern_predicates(&predicates), unnormalized_env.reveal, unnormalized_env.def_id ); // HACK: we are trying to normalize the param-env inside *itself*. The problem is that // normalization expects its param-env to be already normalized, which means we have // a circularity. // // The way we handle this is by normalizing the param-env inside an unnormalized version // of the param-env, which means that if the param-env contains unnormalized projections, // we'll have some normalization failures. This is unfortunate. // // Lazy normalization would basically handle this by treating just the // normalizing-a-trait-ref-requires-itself cycles as evaluation failures. // // Inferred outlives bounds can create a lot of `TypeOutlives` predicates for associated // types, so to make the situation less bad, we normalize all the predicates *but* // the `TypeOutlives` predicates first inside the unnormalized parameter environment, and // then we normalize the `TypeOutlives` bounds inside the normalized parameter environment. // // This works fairly well because trait matching does not actually care about param-env // TypeOutlives predicates - these are normally used by regionck. let outlives_predicates: Vec<_> = predicates.drain_filter(|predicate| { match predicate { ty::Predicate::TypeOutlives(..) => true, _ => false } }).collect(); debug!("normalize_param_env_or_error: predicates=(non-outlives={:?}, outlives={:?})", predicates, outlives_predicates); let non_outlives_predicates = match do_normalize_predicates(tcx, region_context, cause.clone(), elaborated_env, predicates) { Ok(predicates) => predicates, // An unnormalized env is better than nothing. Err(ErrorReported) => { debug!("normalize_param_env_or_error: errored resolving non-outlives predicates"); return elaborated_env } }; debug!("normalize_param_env_or_error: non-outlives predicates={:?}", non_outlives_predicates); // Not sure whether it is better to include the unnormalized TypeOutlives predicates // here. I believe they should not matter, because we are ignoring TypeOutlives param-env // predicates here anyway. Keeping them here anyway because it seems safer. let outlives_env: Vec<_> = non_outlives_predicates.iter().chain(&outlives_predicates).cloned().collect(); let outlives_env = ty::ParamEnv::new( tcx.intern_predicates(&outlives_env), unnormalized_env.reveal, None ); let outlives_predicates = match do_normalize_predicates(tcx, region_context, cause, outlives_env, outlives_predicates) { Ok(predicates) => predicates, // An unnormalized env is better than nothing. Err(ErrorReported) => { debug!("normalize_param_env_or_error: errored resolving outlives predicates"); return elaborated_env } }; debug!("normalize_param_env_or_error: outlives predicates={:?}", outlives_predicates); let mut predicates = non_outlives_predicates; predicates.extend(outlives_predicates); debug!("normalize_param_env_or_error: final predicates={:?}", predicates); ty::ParamEnv::new( tcx.intern_predicates(&predicates), unnormalized_env.reveal, unnormalized_env.def_id ) } pub fn fully_normalize<'a, 'gcx, 'tcx, T>( infcx: &InferCtxt<'a, 'gcx, 'tcx>, mut fulfill_cx: FulfillmentContext<'tcx>, cause: ObligationCause<'tcx>, param_env: ty::ParamEnv<'tcx>, value: &T) -> Result<T, Vec<FulfillmentError<'tcx>>> where T : TypeFoldable<'tcx> { debug!("fully_normalize_with_fulfillcx(value={:?})", value); let selcx = &mut SelectionContext::new(infcx); let Normalized { value: normalized_value, obligations } = project::normalize(selcx, param_env, cause, value); debug!("fully_normalize: normalized_value={:?} obligations={:?}", normalized_value, obligations); for obligation in obligations { fulfill_cx.register_predicate_obligation(selcx.infcx(), obligation); } debug!("fully_normalize: select_all_or_error start"); fulfill_cx.select_all_or_error(infcx)?; debug!("fully_normalize: select_all_or_error complete"); let resolved_value = infcx.resolve_vars_if_possible(&normalized_value); debug!("fully_normalize: resolved_value={:?}", resolved_value); Ok(resolved_value) } /// Normalizes the predicates and checks whether they hold in an empty /// environment. If this returns false, then either normalize /// encountered an error or one of the predicates did not hold. Used /// when creating vtables to check for unsatisfiable methods. fn normalize_and_test_predicates<'tcx>( tcx: TyCtxt<'tcx, 'tcx>, predicates: Vec<ty::Predicate<'tcx>>, ) -> bool { debug!("normalize_and_test_predicates(predicates={:?})", predicates); let result = tcx.infer_ctxt().enter(|infcx| { let param_env = ty::ParamEnv::reveal_all(); let mut selcx = SelectionContext::new(&infcx); let mut fulfill_cx = FulfillmentContext::new(); let cause = ObligationCause::dummy(); let Normalized { value: predicates, obligations } = normalize(&mut selcx, param_env, cause.clone(), &predicates); for obligation in obligations { fulfill_cx.register_predicate_obligation(&infcx, obligation); } for predicate in predicates { let obligation = Obligation::new(cause.clone(), param_env, predicate); fulfill_cx.register_predicate_obligation(&infcx, obligation); } fulfill_cx.select_all_or_error(&infcx).is_ok() }); debug!("normalize_and_test_predicates(predicates={:?}) = {:?}", predicates, result); result } fn substitute_normalize_and_test_predicates<'tcx>( tcx: TyCtxt<'tcx, 'tcx>, key: (DefId, SubstsRef<'tcx>), ) -> bool { debug!("substitute_normalize_and_test_predicates(key={:?})", key); let predicates = tcx.predicates_of(key.0).instantiate(tcx, key.1).predicates; let result = normalize_and_test_predicates(tcx, predicates); debug!("substitute_normalize_and_test_predicates(key={:?}) = {:?}", key, result); result } /// Given a trait `trait_ref`, iterates the vtable entries /// that come from `trait_ref`, including its supertraits. #[inline] // FIXME(#35870): avoid closures being unexported due to `impl Trait`. fn vtable_methods<'tcx>( tcx: TyCtxt<'tcx, 'tcx>, trait_ref: ty::PolyTraitRef<'tcx>, ) -> &'tcx [Option<(DefId, SubstsRef<'tcx>)>] { debug!("vtable_methods({:?})", trait_ref); tcx.arena.alloc_from_iter( supertraits(tcx, trait_ref).flat_map(move |trait_ref| { let trait_methods = tcx.associated_items(trait_ref.def_id()) .filter(|item| item.kind == ty::AssocKind::Method); // Now list each method's DefId and InternalSubsts (for within its trait). // If the method can never be called from this object, produce None. trait_methods.map(move |trait_method| { debug!("vtable_methods: trait_method={:?}", trait_method); let def_id = trait_method.def_id; // Some methods cannot be called on an object; skip those. if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) { debug!("vtable_methods: not vtable safe"); return None; } // the method may have some early-bound lifetimes, add // regions for those let substs = trait_ref.map_bound(|trait_ref| InternalSubsts::for_item(tcx, def_id, |param, _| match param.kind { GenericParamDefKind::Lifetime => tcx.lifetimes.re_erased.into(), GenericParamDefKind::Type { .. } | GenericParamDefKind::Const => { trait_ref.substs[param.index as usize] } } ) ); // the trait type may have higher-ranked lifetimes in it; // so erase them if they appear, so that we get the type // at some particular call site let substs = tcx.normalize_erasing_late_bound_regions( ty::ParamEnv::reveal_all(), &substs ); // It's possible that the method relies on where clauses that // do not hold for this particular set of type parameters. // Note that this method could then never be called, so we // do not want to try and codegen it, in that case (see #23435). let predicates = tcx.predicates_of(def_id).instantiate_own(tcx, substs); if !normalize_and_test_predicates(tcx, predicates.predicates) { debug!("vtable_methods: predicates do not hold"); return None; } Some((def_id, substs)) }) }) ) } impl<'tcx, O> Obligation<'tcx, O> { pub fn new(cause: ObligationCause<'tcx>, param_env: ty::ParamEnv<'tcx>, predicate: O) -> Obligation<'tcx, O> { Obligation { cause, param_env, recursion_depth: 0, predicate } } fn with_depth(cause: ObligationCause<'tcx>, recursion_depth: usize, param_env: ty::ParamEnv<'tcx>, predicate: O) -> Obligation<'tcx, O> { Obligation { cause, param_env, recursion_depth, predicate } } pub fn misc(span: Span, body_id: hir::HirId, param_env: ty::ParamEnv<'tcx>, trait_ref: O) -> Obligation<'tcx, O> { Obligation::new(ObligationCause::misc(span, body_id), param_env, trait_ref) } pub fn with<P>(&self, value: P) -> Obligation<'tcx,P> { Obligation { cause: self.cause.clone(), param_env: self.param_env, recursion_depth: self.recursion_depth, predicate: value } } } impl<'tcx> ObligationCause<'tcx> { #[inline] pub fn new(span: Span, body_id: hir::HirId, code: ObligationCauseCode<'tcx>) -> ObligationCause<'tcx> { ObligationCause { span, body_id, code } } pub fn misc(span: Span, body_id: hir::HirId) -> ObligationCause<'tcx> { ObligationCause { span, body_id, code: MiscObligation } } pub fn dummy() -> ObligationCause<'tcx> { ObligationCause { span: DUMMY_SP, body_id: hir::CRATE_HIR_ID, code: MiscObligation } } } impl<'tcx, N> Vtable<'tcx, N> { pub fn nested_obligations(self) -> Vec<N> { match self { VtableImpl(i) => i.nested, VtableParam(n) => n, VtableBuiltin(i) => i.nested, VtableAutoImpl(d) => d.nested, VtableClosure(c) => c.nested, VtableGenerator(c) => c.nested, VtableObject(d) => d.nested, VtableFnPointer(d) => d.nested, VtableTraitAlias(d) => d.nested, } } pub fn map<M, F>(self, f: F) -> Vtable<'tcx, M> where F: FnMut(N) -> M { match self { VtableImpl(i) => VtableImpl(VtableImplData { impl_def_id: i.impl_def_id, substs: i.substs, nested: i.nested.into_iter().map(f).collect(), }), VtableParam(n) => VtableParam(n.into_iter().map(f).collect()), VtableBuiltin(i) => VtableBuiltin(VtableBuiltinData { nested: i.nested.into_iter().map(f).collect(), }), VtableObject(o) => VtableObject(VtableObjectData { upcast_trait_ref: o.upcast_trait_ref, vtable_base: o.vtable_base, nested: o.nested.into_iter().map(f).collect(), }), VtableAutoImpl(d) => VtableAutoImpl(VtableAutoImplData { trait_def_id: d.trait_def_id, nested: d.nested.into_iter().map(f).collect(), }), VtableClosure(c) => VtableClosure(VtableClosureData { closure_def_id: c.closure_def_id, substs: c.substs, nested: c.nested.into_iter().map(f).collect(), }), VtableGenerator(c) => VtableGenerator(VtableGeneratorData { generator_def_id: c.generator_def_id, substs: c.substs, nested: c.nested.into_iter().map(f).collect(), }), VtableFnPointer(p) => VtableFnPointer(VtableFnPointerData { fn_ty: p.fn_ty, nested: p.nested.into_iter().map(f).collect(), }), VtableTraitAlias(d) => VtableTraitAlias(VtableTraitAliasData { alias_def_id: d.alias_def_id, substs: d.substs, nested: d.nested.into_iter().map(f).collect(), }), } } } impl<'tcx> FulfillmentError<'tcx> { fn new(obligation: PredicateObligation<'tcx>, code: FulfillmentErrorCode<'tcx>) -> FulfillmentError<'tcx> { FulfillmentError { obligation: obligation, code: code } } } impl<'tcx> TraitObligation<'tcx> { fn self_ty(&self) -> ty::Binder<Ty<'tcx>> { self.predicate.map_bound(|p| p.self_ty()) } } pub fn provide(providers: &mut ty::query::Providers<'_>) { *providers = ty::query::Providers { is_object_safe: object_safety::is_object_safe_provider, specialization_graph_of: specialize::specialization_graph_provider, specializes: specialize::specializes, codegen_fulfill_obligation: codegen::codegen_fulfill_obligation, vtable_methods, substitute_normalize_and_test_predicates, ..*providers }; } pub trait ExClauseFold<'tcx> where Self: chalk_engine::context::Context + Clone, { fn fold_ex_clause_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>( ex_clause: &chalk_engine::ExClause<Self>, folder: &mut F, ) -> chalk_engine::ExClause<Self>; fn visit_ex_clause_with<V: TypeVisitor<'tcx>>( ex_clause: &chalk_engine::ExClause<Self>, visitor: &mut V, ) -> bool; } pub trait ChalkContextLift<'tcx> where Self: chalk_engine::context::Context + Clone, { type LiftedExClause: Debug + 'tcx; type LiftedDelayedLiteral: Debug + 'tcx; type LiftedLiteral: Debug + 'tcx; fn lift_ex_clause_to_tcx<'gcx>( ex_clause: &chalk_engine::ExClause<Self>, tcx: TyCtxt<'gcx, 'tcx>, ) -> Option<Self::LiftedExClause>; fn lift_delayed_literal_to_tcx<'gcx>( ex_clause: &chalk_engine::DelayedLiteral<Self>, tcx: TyCtxt<'gcx, 'tcx>, ) -> Option<Self::LiftedDelayedLiteral>; fn lift_literal_to_tcx<'gcx>( ex_clause: &chalk_engine::Literal<Self>, tcx: TyCtxt<'gcx, 'tcx>, ) -> Option<Self::LiftedLiteral>; }
36.768412
99
0.641183
11cd1443f4ff620ec96f0fc427f551b4df2dab84
732
use std::collections::HashMap; use twelf::reexports::serde::{Deserialize, Serialize}; use twelf::{config, Layer}; #[config] #[derive(Debug)] struct Config { list: Vec<String>, labels: HashMap<String, String>, #[serde(flatten)] nested: Nested, } #[derive(Debug, Deserialize, Serialize)] struct Nested { inner: String, } fn main() { std::env::set_var("APP_INNER", "inner value"); std::env::set_var("APP_LIST", "value1,value2"); std::env::set_var("APP_LABELS", "key=value, key2=value2"); let config = Config::with_layers(&[ Layer::Json("./twelf/examples/config.json".into()), Layer::Env(Some(String::from("APP_"))), ]) .unwrap(); println!("config - {:?}", config); }
23.612903
62
0.621585
226b99d96660a19e94e07042e2b6f0608e3331fd
2,999
/* * Copyright (C) 2021 Vaticle * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ use futures::executor; use std::sync::Arc; use std::time::Instant; use typedb_protocol::options::Options; use typedb_protocol::session::Session_Type; use crate::common::Result; use crate::database::Database; use crate::rpc::builder::session::{close_req, open_req}; use crate::rpc::client::RpcClient; use crate::transaction; use crate::transaction::Transaction; #[derive(Copy, Clone, Debug)] pub enum Type { Data = 0, Schema = 1 } impl From<Type> for Session_Type { fn from(session_type: Type) -> Self { match session_type { Type::Data => Session_Type::DATA, Type::Schema => Session_Type::SCHEMA } } } pub struct Session { pub database: Database, pub session_type: Type, pub network_latency_millis: u32, pub(crate) session_id: Vec<u8>, pub(crate) rpc_client: Arc<RpcClient> } impl Session { pub(crate) async fn new(database: &str, session_type: Type, rpc_client: Arc<RpcClient>) -> Result<Self> { let start_time = Instant::now(); let open_req = open_req(database, Session_Type::from(session_type), Options::new()); let res = rpc_client.session_open(open_req).await?; Ok(Session { database: Database::new(String::from(database), Arc::clone(&rpc_client)), session_type, network_latency_millis: Session::compute_network_latency(start_time, res.server_duration_millis as u32), session_id: res.session_id, rpc_client }) } pub async fn transaction(&self, transaction_type: transaction::Type) -> Result<Transaction> { Transaction::new(&self.session_id, transaction_type, self.network_latency_millis, &self.rpc_client).await } fn compute_network_latency(start_time: Instant, server_duration_millis: u32) -> u32 { ((Instant::now() - start_time).as_millis() as u32) - server_duration_millis } } impl Drop for Session { #[allow(unused_must_use)] /* we can safely ignore the result of the session_close request */ fn drop(&mut self) { executor::block_on(self.rpc_client.session_close(close_req(self.session_id.clone()))); } }
34.471264
116
0.695232
1d74263596cfe5792ca4dfd62d5e03dfcd7102cd
6,368
use super::CallContext; use super::RibosomeT; use holochain_types::prelude::*; use std::sync::Arc; pub struct HostFnApi<Ribosome: RibosomeT> { ribosome: Arc<Ribosome>, call_context: Arc<CallContext>, } impl<Ribosome: RibosomeT> HostFnApi<Ribosome> { pub fn new(ribosome: Arc<Ribosome>, call_context: Arc<CallContext>) -> Self { Self { ribosome, call_context, } } } macro_rules! host_fn_api_impls { ( $( fn $f:ident ( $input:ty ) -> $output:ty; )* ) => { $( pub(crate) mod $f; )* impl<Ribosome: RibosomeT> HostFnApiT for HostFnApi<Ribosome> { $( fn $f(&self, input: $input) -> Result<$output, HostFnApiError> { $f::$f( self.ribosome.clone(), self.call_context.clone(), input.into() ).map_err(|e| HostFnApiError::RibosomeError(Box::new(e))) } )* } }; } // All host_fn_api_impls below rely on this import use holochain_zome_types as zt; host_fn_api_impls! { // ------------------------------------------------------------------ // These definitions are copy-pasted from // holochain_zome_types::zome_io // TODO: is there a way to unhygienically import this code in both places? // Info about the calling agent. fn agent_info (()) -> zt::info::AgentInfo; // @todo fn app_info (()) -> zt::info::AppInfo; // @todo fn dna_info (()) -> zt::info::DnaInfo; // @todo fn call_info (()) -> zt::info::CallInfo; fn call (zt::call::Call) -> zt::ZomeCallResponse; // Header hash of the DeleteLink element. fn call_remote (zt::call_remote::CallRemote) -> zt::ZomeCallResponse; // @todo List all the local capability claims. fn capability_claims (()) -> (); // @todo List all the local capability grants. fn capability_grants (()) -> (); // @todo Get the capability for the current zome call. fn capability_info (()) -> (); // The EntryDefId determines how a create is handled on the host side. // CapGrant and CapClaim are handled natively. // App entries are referenced by entry defs then SerializedBytes stuffed into an Entry::App. // Returns HeaderHash of the newly created element. fn create (zt::entry::EntryWithDefId) -> holo_hash::HeaderHash; fn create_x25519_keypair(()) -> holochain_zome_types::x_salsa20_poly1305::x25519::X25519PubKey; fn x_salsa20_poly1305_encrypt( holochain_zome_types::x_salsa20_poly1305::XSalsa20Poly1305Encrypt ) -> holochain_zome_types::x_salsa20_poly1305::encrypted_data::XSalsa20Poly1305EncryptedData; fn x_salsa20_poly1305_decrypt( holochain_zome_types::x_salsa20_poly1305::XSalsa20Poly1305Decrypt ) -> Option<holochain_zome_types::x_salsa20_poly1305::data::XSalsa20Poly1305Data>; // Sender, Recipient, Data. fn x_25519_x_salsa20_poly1305_encrypt (holochain_zome_types::x_salsa20_poly1305::X25519XSalsa20Poly1305Encrypt) -> holochain_zome_types::x_salsa20_poly1305::encrypted_data::XSalsa20Poly1305EncryptedData; // Recipient, Sender, Encrypted data. fn x_25519_x_salsa20_poly1305_decrypt (holochain_zome_types::x_salsa20_poly1305::X25519XSalsa20Poly1305Decrypt) -> Option<holochain_zome_types::x_salsa20_poly1305::data::XSalsa20Poly1305Data>; // Create a link between two entries. fn create_link (zt::link::CreateLinkInput) -> holo_hash::HeaderHash; // Delete an entry. fn delete (holo_hash::HeaderHash) -> holo_hash::HeaderHash; // Header hash of the CreateLink element. fn delete_link (holo_hash::HeaderHash) -> holo_hash::HeaderHash; // Header hash of the newly committed element. // Emit a Signal::App to subscribers on the interface fn emit_signal (zt::signal::AppSignal) -> (); // The trace host import takes a TraceMsg to output wherever the host wants to display it. // TraceMsg includes line numbers. so the wasm tells the host about it's own code structure. fn trace (zt::trace::TraceMsg) -> (); // Attempt to get a live entry from the cascade. fn get (zt::entry::GetInput) -> Option<zt::element::Element>; fn get_agent_activity (zt::agent_activity::GetAgentActivityInput) -> zt::query::AgentActivity; fn get_details (zt::entry::GetInput) -> Option<zt::metadata::Details>; // Get links by entry hash from the cascade. fn get_links (zt::link::GetLinksInput) -> zt::link::Links; fn get_link_details (zt::link::GetLinksInput) -> zt::link::LinkDetails; // Hash an entry on the host. fn hash_entry (zt::entry::Entry) -> holo_hash::EntryHash; // Query the source chain for data. fn query (zt::query::ChainQueryFilter) -> Vec<Element>; // the length of random bytes to create fn random_bytes (u32) -> zt::bytes::Bytes; // Remotely signal many agents without waiting for responses fn remote_signal (zt::signal::RemoteSignal) -> (); // // @todo // fn send (()) -> (); // @todo fn schedule (core::time::Duration) -> (); // @todo fn sleep (core::time::Duration) -> (); // @todo fn version (()) -> zt::version::ZomeApiVersion; // Attempt to have the keystore sign some data // The pubkey in the input needs to be found in the keystore for this to work fn sign (zt::signature::Sign) -> zt::signature::Signature; // Sign a list of datas with an ephemeral, randomly generated keypair. fn sign_ephemeral (zt::signature::SignEphemeral) -> zt::signature::EphemeralSignatures; // Current system time, in the opinion of the host, as a `Duration`. fn sys_time (()) -> core::time::Duration; // Same as but also takes the HeaderHash of the updated element. fn update (zt::entry::UpdateInput) -> holo_hash::HeaderHash; fn verify_signature (zt::signature::VerifySignature) -> bool; // There's nothing to go in or out of a noop. // Used to "defuse" host functions when side effects are not allowed. fn unreachable (()) -> (); // The zome and agent info are constants specific to the current zome and chain. // All the information is provided by core so there is no input value. // These are constant for the lifetime of a zome call. fn zome_info (()) -> zt::info::ZomeInfo; }
36.181818
207
0.65468
f704f726b57972a26fc126eca0f67fe6f8c144e1
300
mod api; mod asset; mod handlers; mod iex; mod portfolio; mod processor; mod slack; mod user; use warp::Filter; #[tokio::main] async fn main() { pretty_env_logger::init(); let api = api::compose_api().with(warp::log("exchange")); warp::serve(api).run(([127, 0, 0, 1], 8000)).await; }
15
61
0.643333
89c6e7238b9541e055673bfade11c6e40cd381c8
13,698
use bincode::serialized_size; use hashbrown::HashMap; use log::*; use rayon::prelude::*; use morgan::connection_info::ContactInfo; use morgan::gossip::*; use morgan::gossip_error_type::CrdsGossipError; use morgan::push_to_gossip::CRDS_GOSSIP_PUSH_MSG_TIMEOUT_MS; use morgan::propagation_value::CrdsValue; use morgan::propagation_value::CrdsValueLabel; use morgan_interface::hash::hash; use morgan_interface::pubkey::Pubkey; use morgan_interface::timing::timestamp; use std::sync::{Arc, Mutex}; type Node = Arc<Mutex<CrdsGossip>>; type Network = HashMap<Pubkey, Node>; fn star_network_create(num: usize) -> Network { let entry = CrdsValue::ContactInfo(ContactInfo::new_localhost(&Pubkey::new_rand(), 0)); let mut network: HashMap<_, _> = (1..num) .map(|_| { let new = CrdsValue::ContactInfo(ContactInfo::new_localhost(&Pubkey::new_rand(), 0)); let id = new.label().pubkey(); let mut node = CrdsGossip::default(); node.crds.insert(new.clone(), 0).unwrap(); node.crds.insert(entry.clone(), 0).unwrap(); node.set_self(&id); (new.label().pubkey(), Arc::new(Mutex::new(node))) }) .collect(); let mut node = CrdsGossip::default(); let id = entry.label().pubkey(); node.crds.insert(entry.clone(), 0).unwrap(); node.set_self(&id); network.insert(id, Arc::new(Mutex::new(node))); network } fn rstar_network_create(num: usize) -> Network { let entry = CrdsValue::ContactInfo(ContactInfo::new_localhost(&Pubkey::new_rand(), 0)); let mut origin = CrdsGossip::default(); let id = entry.label().pubkey(); origin.crds.insert(entry.clone(), 0).unwrap(); origin.set_self(&id); let mut network: HashMap<_, _> = (1..num) .map(|_| { let new = CrdsValue::ContactInfo(ContactInfo::new_localhost(&Pubkey::new_rand(), 0)); let id = new.label().pubkey(); let mut node = CrdsGossip::default(); node.crds.insert(new.clone(), 0).unwrap(); origin.crds.insert(new.clone(), 0).unwrap(); node.set_self(&id); (new.label().pubkey(), Arc::new(Mutex::new(node))) }) .collect(); network.insert(id, Arc::new(Mutex::new(origin))); network } fn ring_network_create(num: usize) -> Network { let mut network: HashMap<_, _> = (0..num) .map(|_| { let new = CrdsValue::ContactInfo(ContactInfo::new_localhost(&Pubkey::new_rand(), 0)); let id = new.label().pubkey(); let mut node = CrdsGossip::default(); node.crds.insert(new.clone(), 0).unwrap(); node.set_self(&id); (new.label().pubkey(), Arc::new(Mutex::new(node))) }) .collect(); let keys: Vec<Pubkey> = network.keys().cloned().collect(); for k in 0..keys.len() { let start_info = { let start = &network[&keys[k]]; let start_id = start.lock().unwrap().id.clone(); start .lock() .unwrap() .crds .lookup(&CrdsValueLabel::ContactInfo(start_id)) .unwrap() .clone() }; let end = network.get_mut(&keys[(k + 1) % keys.len()]).unwrap(); end.lock().unwrap().crds.insert(start_info, 0).unwrap(); } network } fn network_simulator_pull_only(network: &mut Network) { let num = network.len(); let (converged, bytes_tx) = network_run_pull(network, 0, num * 2, 0.9); trace!( "network_simulator_pull_{}: converged: {} total_bytes: {}", num, converged, bytes_tx ); assert!(converged >= 0.9); } fn network_simulator(network: &mut Network) { let num = network.len(); // run for a small amount of time let (converged, bytes_tx) = network_run_pull(network, 0, 10, 1.0); trace!("network_simulator_push_{}: converged: {}", num, converged); // make sure there is someone in the active set let network_values: Vec<Node> = network.values().cloned().collect(); network_values.par_iter().for_each(|node| { node.lock() .unwrap() .refresh_push_active_set(&HashMap::new()); }); let mut total_bytes = bytes_tx; for second in 1..num { let start = second * 10; let end = (second + 1) * 10; let now = (start * 100) as u64; // push a message to the network network_values.par_iter().for_each(|locked_node| { let node = &mut locked_node.lock().unwrap(); let mut m = node .crds .lookup(&CrdsValueLabel::ContactInfo(node.id)) .and_then(|v| v.contact_info().cloned()) .unwrap(); m.wallclock = now; node.process_push_message(vec![CrdsValue::ContactInfo(m)], now); }); // push for a bit let (queue_size, bytes_tx) = network_run_push(network, start, end); total_bytes += bytes_tx; trace!( "network_simulator_push_{}: queue_size: {} bytes: {}", num, queue_size, bytes_tx ); // pull for a bit let (converged, bytes_tx) = network_run_pull(network, start, end, 1.0); total_bytes += bytes_tx; trace!( "network_simulator_push_{}: converged: {} bytes: {} total_bytes: {}", num, converged, bytes_tx, total_bytes ); if converged > 0.9 { break; } } } fn network_run_push(network: &mut Network, start: usize, end: usize) -> (usize, usize) { let mut bytes: usize = 0; let mut num_msgs: usize = 0; let mut total: usize = 0; let num = network.len(); let mut prunes: usize = 0; let mut delivered: usize = 0; let network_values: Vec<Node> = network.values().cloned().collect(); for t in start..end { let now = t as u64 * 100; let requests: Vec<_> = network_values .par_iter() .map(|node| { node.lock().unwrap().purge(now); node.lock().unwrap().new_push_messages(now) }) .collect(); let transfered: Vec<_> = requests .into_par_iter() .map(|(from, peers, msgs)| { let mut bytes: usize = 0; let mut delivered: usize = 0; let mut num_msgs: usize = 0; let mut prunes: usize = 0; for to in peers { bytes += serialized_size(&msgs).unwrap() as usize; num_msgs += 1; let rsps = network .get(&to) .map(|node| node.lock().unwrap().process_push_message(msgs.clone(), now)) .unwrap(); bytes += serialized_size(&rsps).unwrap() as usize; prunes += rsps.len(); network .get(&from) .map(|node| { let mut node = node.lock().unwrap(); let destination = node.id; let now = timestamp(); node.process_prune_msg(&to, &destination, &rsps, now, now) .unwrap() }) .unwrap(); delivered += rsps.is_empty() as usize; } (bytes, delivered, num_msgs, prunes) }) .collect(); for (b, d, m, p) in transfered { bytes += b; delivered += d; num_msgs += m; prunes += p; } if now % CRDS_GOSSIP_PUSH_MSG_TIMEOUT_MS == 0 && now > 0 { network_values.par_iter().for_each(|node| { node.lock() .unwrap() .refresh_push_active_set(&HashMap::new()); }); } total = network_values .par_iter() .map(|v| v.lock().unwrap().push.num_pending()) .sum(); trace!( "network_run_push_{}: now: {} queue: {} bytes: {} num_msgs: {} prunes: {} delivered: {}", num, now, total, bytes, num_msgs, prunes, delivered, ); } (total, bytes) } fn network_run_pull( network: &mut Network, start: usize, end: usize, max_convergance: f64, ) -> (f64, usize) { let mut bytes: usize = 0; let mut msgs: usize = 0; let mut overhead: usize = 0; let mut convergance = 0f64; let num = network.len(); let network_values: Vec<Node> = network.values().cloned().collect(); for t in start..end { let now = t as u64 * 100; let requests: Vec<_> = { network_values .par_iter() .filter_map(|from| { from.lock() .unwrap() .new_pull_request(now, &HashMap::new()) .ok() }) .collect() }; let transfered: Vec<_> = requests .into_par_iter() .map(|(to, request, caller_info)| { let mut bytes: usize = 0; let mut msgs: usize = 0; let mut overhead: usize = 0; let from = caller_info.label().pubkey(); bytes += request.keys.len(); bytes += (request.bits.len() / 8) as usize; bytes += serialized_size(&caller_info).unwrap() as usize; let rsp = network .get(&to) .map(|node| { node.lock() .unwrap() .process_pull_request(caller_info, request, now) }) .unwrap(); bytes += serialized_size(&rsp).unwrap() as usize; msgs += rsp.len(); network.get(&from).map(|node| { node.lock() .unwrap() .mark_pull_request_creation_time(&from, now); overhead += node.lock().unwrap().process_pull_response(&from, rsp, now); }); (bytes, msgs, overhead) }) .collect(); for (b, m, o) in transfered { bytes += b; msgs += m; overhead += o; } let total: usize = network_values .par_iter() .map(|v| v.lock().unwrap().crds.table.len()) .sum(); convergance = total as f64 / ((num * num) as f64); if convergance > max_convergance { break; } trace!( "network_run_pull_{}: now: {} connections: {} convergance: {} bytes: {} msgs: {} overhead: {}", num, now, total, convergance, bytes, msgs, overhead ); } (convergance, bytes) } #[test] fn test_star_network_pull_50() { let mut network = star_network_create(50); network_simulator_pull_only(&mut network); } #[test] fn test_star_network_pull_100() { let mut network = star_network_create(100); network_simulator_pull_only(&mut network); } #[test] fn test_star_network_push_star_200() { let mut network = star_network_create(200); network_simulator(&mut network); } #[test] fn test_star_network_push_rstar_200() { let mut network = rstar_network_create(200); network_simulator(&mut network); } #[test] fn test_star_network_push_ring_200() { let mut network = ring_network_create(200); network_simulator(&mut network); } #[test] #[ignore] fn test_star_network_large_pull() { morgan_logger::setup(); let mut network = star_network_create(2000); network_simulator_pull_only(&mut network); } #[test] #[ignore] fn test_rstar_network_large_push() { morgan_logger::setup(); let mut network = rstar_network_create(4000); network_simulator(&mut network); } #[test] #[ignore] fn test_ring_network_large_push() { morgan_logger::setup(); let mut network = ring_network_create(4001); network_simulator(&mut network); } #[test] #[ignore] fn test_star_network_large_push() { morgan_logger::setup(); let mut network = star_network_create(4002); network_simulator(&mut network); } #[test] fn test_prune_errors() { let mut crds_gossip = CrdsGossip::default(); crds_gossip.id = Pubkey::new(&[0; 32]); let id = crds_gossip.id; let ci = ContactInfo::new_localhost(&Pubkey::new(&[1; 32]), 0); let prune_pubkey = Pubkey::new(&[2; 32]); crds_gossip .crds .insert(CrdsValue::ContactInfo(ci.clone()), 0) .unwrap(); crds_gossip.refresh_push_active_set(&HashMap::new()); let now = timestamp(); //incorrect dest let mut res = crds_gossip.process_prune_msg( &ci.id, &Pubkey::new(hash(&[1; 32]).as_ref()), &[prune_pubkey], now, now, ); assert_eq!(res.err(), Some(CrdsGossipError::BadPruneDestination)); //correct dest res = crds_gossip.process_prune_msg(&ci.id, &id, &[prune_pubkey], now, now); res.unwrap(); //test timeout let timeout = now + crds_gossip.push.prune_timeout * 2; res = crds_gossip.process_prune_msg(&ci.id, &id, &[prune_pubkey], now, timeout); assert_eq!(res.err(), Some(CrdsGossipError::PruneMessageTimeout)); }
34.417085
111
0.526573
28ab1f54e7322f3713d7e4ceeafc09dc7fbd1124
1,848
pub struct IconConnectWithoutContact { props: crate::Props, } impl yew::Component for IconConnectWithoutContact { type Properties = crate::Props; type Message = (); fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender { true } fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender { false } fn view(&self) -> yew::prelude::Html { yew::prelude::html! { <svg class=self.props.class.unwrap_or("") width=self.props.size.unwrap_or(24).to_string() height=self.props.size.unwrap_or(24).to_string() viewBox="0 0 24 24" fill=self.props.fill.unwrap_or("none") stroke=self.props.color.unwrap_or("currentColor") stroke-width=self.props.stroke_width.unwrap_or(2).to_string() stroke-linecap=self.props.stroke_linecap.unwrap_or("round") stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round") > <svg xmlns="http://www.w3.org/2000/svg" enable-background="new 0 0 24 24" height="24" viewBox="0 0 24 24" width="24"><rect fill="none" height="24" width="24"/><path d="M11,14H9c0-4.97,4.03-9,9-9v2C14.13,7,11,10.13,11,14z M18,11V9c-2.76,0-5,2.24-5,5h2C15,12.34,16.34,11,18,11z M7,4 c0-1.11-0.89-2-2-2S3,2.89,3,4s0.89,2,2,2S7,5.11,7,4z M11.45,4.5h-2C9.21,5.92,7.99,7,6.5,7h-3C2.67,7,2,7.67,2,8.5V11h6V8.74 C9.86,8.15,11.25,6.51,11.45,4.5z M19,17c1.11,0,2-0.89,2-2s-0.89-2-2-2s-2,0.89-2,2S17.89,17,19,17z M20.5,18h-3 c-1.49,0-2.71-1.08-2.95-2.5h-2c0.2,2.01,1.59,3.65,3.45,4.24V22h6v-2.5C22,18.67,21.33,18,20.5,18z"/></svg> </svg> } } }
40.173913
631
0.594156
76fd0819a39a5f5225e7b7b44feb0dfa1a1f203e
2,193
//! lint on multiple versions of a crate being used use cargo_metadata::{DependencyKind, Metadata, Node, Package, PackageId}; use clippy_utils::diagnostics::span_lint; use if_chain::if_chain; use itertools::Itertools; use rustc_hir::def_id::LOCAL_CRATE; use rustc_lint::LateContext; use rustc_span::source_map::DUMMY_SP; use super::MULTIPLE_CRATE_VERSIONS; pub(super) fn check(cx: &LateContext<'_>, metadata: &Metadata) { let local_name = cx.tcx.crate_name(LOCAL_CRATE); let mut packages = metadata.packages.clone(); packages.sort_by(|a, b| a.name.cmp(&b.name)); if_chain! { if let Some(resolve) = &metadata.resolve; if let Some(local_id) = packages .iter() .find_map(|p| if p.name == local_name.as_str() { Some(&p.id) } else { None }); then { for (name, group) in &packages.iter().group_by(|p| p.name.clone()) { let group: Vec<&Package> = group.collect(); if group.len() <= 1 { continue; } if group.iter().all(|p| is_normal_dep(&resolve.nodes, local_id, &p.id)) { let mut versions: Vec<_> = group.into_iter().map(|p| &p.version).collect(); versions.sort(); let versions = versions.iter().join(", "); span_lint( cx, MULTIPLE_CRATE_VERSIONS, DUMMY_SP, &format!("multiple versions for dependency `{}`: {}", name, versions), ); } } } } } fn is_normal_dep(nodes: &[Node], local_id: &PackageId, dep_id: &PackageId) -> bool { fn depends_on(node: &Node, dep_id: &PackageId) -> bool { node.deps.iter().any(|dep| { dep.pkg == *dep_id && dep .dep_kinds .iter() .any(|info| matches!(info.kind, DependencyKind::Normal)) }) } nodes .iter() .filter(|node| depends_on(node, dep_id)) .any(|node| node.id == *local_id || is_normal_dep(nodes, local_id, &node.id)) }
34.265625
95
0.5285
5d93308acc860290b1d98c75e32173254b9a54da
92
pub(crate) mod async_line_buffer; mod buffer_pool; pub(crate) use buffer_pool::BufferPool;
18.4
39
0.804348
6a4fc6f89180ee339f914127c1f421af80c70257
633
// Copyright (c) Facebook, Inc. and its affiliates. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. pub mod remote_store;
39.5625
75
0.744076
7af3d85aa8bc868d11c49b24a6b7141c5f55158f
5,095
#[doc = "Register `PIDR3` reader"] pub struct R(crate::R<PIDR3_SPEC>); impl core::ops::Deref for R { type Target = crate::R<PIDR3_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<PIDR3_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<PIDR3_SPEC>) -> Self { R(reader) } } #[doc = "Customer Modified. Indicates whether the customer has modified the behavior of the component. In most cases, this field is 0b0000. Customers change this value when they make authorized modifications to this component.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum CMOD_A { #[doc = "0: Indicates that the customer has not modified this component."] UNMODIFIED = 0, } impl From<CMOD_A> for u8 { #[inline(always)] fn from(variant: CMOD_A) -> Self { variant as _ } } #[doc = "Field `CMOD` reader - Customer Modified. Indicates whether the customer has modified the behavior of the component. In most cases, this field is 0b0000. Customers change this value when they make authorized modifications to this component."] pub struct CMOD_R(crate::FieldReader<u8, CMOD_A>); impl CMOD_R { pub(crate) fn new(bits: u8) -> Self { CMOD_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<CMOD_A> { match self.bits { 0 => Some(CMOD_A::UNMODIFIED), _ => None, } } #[doc = "Checks if the value of the field is `UNMODIFIED`"] #[inline(always)] pub fn is_unmodified(&self) -> bool { **self == CMOD_A::UNMODIFIED } } impl core::ops::Deref for CMOD_R { type Target = crate::FieldReader<u8, CMOD_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Indicates minor errata fixes specific to the revision of the component being used, for example metal fixes after implementation. In most cases, this field is 0b0000. Arm recommends that the component designers ensure that a metal fix can change this field if required, for example, by driving it from registers that reset to 0b0000.\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] #[repr(u8)] pub enum REVAND_A { #[doc = "0: Indicates that there are no errata fixes to this component."] NOERRATA = 0, } impl From<REVAND_A> for u8 { #[inline(always)] fn from(variant: REVAND_A) -> Self { variant as _ } } #[doc = "Field `REVAND` reader - Indicates minor errata fixes specific to the revision of the component being used, for example metal fixes after implementation. In most cases, this field is 0b0000. Arm recommends that the component designers ensure that a metal fix can change this field if required, for example, by driving it from registers that reset to 0b0000."] pub struct REVAND_R(crate::FieldReader<u8, REVAND_A>); impl REVAND_R { pub(crate) fn new(bits: u8) -> Self { REVAND_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> Option<REVAND_A> { match self.bits { 0 => Some(REVAND_A::NOERRATA), _ => None, } } #[doc = "Checks if the value of the field is `NOERRATA`"] #[inline(always)] pub fn is_no_errata(&self) -> bool { **self == REVAND_A::NOERRATA } } impl core::ops::Deref for REVAND_R { type Target = crate::FieldReader<u8, REVAND_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl R { #[doc = "Bits 0:3 - Customer Modified. Indicates whether the customer has modified the behavior of the component. In most cases, this field is 0b0000. Customers change this value when they make authorized modifications to this component."] #[inline(always)] pub fn cmod(&self) -> CMOD_R { CMOD_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 4:7 - Indicates minor errata fixes specific to the revision of the component being used, for example metal fixes after implementation. In most cases, this field is 0b0000. Arm recommends that the component designers ensure that a metal fix can change this field if required, for example, by driving it from registers that reset to 0b0000."] #[inline(always)] pub fn revand(&self) -> REVAND_R { REVAND_R::new(((self.bits >> 4) & 0x0f) as u8) } } #[doc = "Peripheral ID3 Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pidr3](index.html) module"] pub struct PIDR3_SPEC; impl crate::RegisterSpec for PIDR3_SPEC { type Ux = u32; } #[doc = "`read()` method returns [pidr3::R](R) reader structure"] impl crate::Readable for PIDR3_SPEC { type Reader = R; } #[doc = "`reset()` method sets PIDR3 to value 0"] impl crate::Resettable for PIDR3_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
41.08871
367
0.66261
5d98d905c97e2c795695c136740d7c7266ae3181
12,938
//! VFS stands for Virtual File System. //! //! When doing analysis, we don't want to do any IO, we want to keep all source //! code in memory. However, the actual source code is stored on disk, so you //! need to get it into the memory in the first place somehow. VFS is the //! component which does this. //! //! It is also responsible for watching the disk for changes, and for merging //! editor state (modified, unsaved files) with disk state. //! TODO: Some LSP clients support watching the disk, so this crate should //! to support custom watcher events (related to https://github.com/rust-analyzer/rust-analyzer/issues/131) //! //! VFS is based on a concept of roots: a set of directories on the file system //! which are watched for changes. Typically, there will be a root for each //! Cargo package. mod io; use std::{ cmp::Reverse, fmt, fs, mem, path::{Path, PathBuf}, sync::Arc, thread, }; use crossbeam_channel::Receiver; use ra_arena::{impl_arena_id, Arena, RawId, map::ArenaMap}; use relative_path::{Component, RelativePath, RelativePathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; pub use crate::io::TaskResult as VfsTask; use io::{TaskResult, Worker}; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct VfsRoot(pub RawId); impl_arena_id!(VfsRoot); /// Describes the contents of a single source root. /// /// `RootConfig` can be thought of as a glob pattern like `src/**.rs` which /// specifies the source root or as a function which takes a `PathBuf` and /// returns `true` iff path belongs to the source root pub(crate) struct RootConfig { root: PathBuf, excluded_dirs: Vec<PathBuf>, } pub(crate) struct Roots { roots: Arena<VfsRoot, Arc<RootConfig>>, } impl std::ops::Deref for Roots { type Target = Arena<VfsRoot, Arc<RootConfig>>; fn deref(&self) -> &Self::Target { &self.roots } } impl RootConfig { fn new(root: PathBuf, excluded_dirs: Vec<PathBuf>) -> RootConfig { RootConfig { root, excluded_dirs } } /// Checks if root contains a path and returns a root-relative path. pub(crate) fn contains(&self, path: &Path) -> Option<RelativePathBuf> { // First, check excluded dirs if self.excluded_dirs.iter().any(|it| path.starts_with(it)) { return None; } let rel_path = path.strip_prefix(&self.root).ok()?; let rel_path = RelativePathBuf::from_path(rel_path).ok()?; // Ignore some common directories. // // FIXME: don't hard-code, specify at source-root creation time using // gitignore for (i, c) in rel_path.components().enumerate() { if let Component::Normal(c) = c { if (i == 0 && c == "target") || c == ".git" || c == "node_modules" { return None; } } } if path.is_file() && rel_path.extension() != Some("rs") { return None; } Some(rel_path) } } impl Roots { pub(crate) fn new(mut paths: Vec<PathBuf>) -> Roots { let mut roots = Arena::default(); // A hack to make nesting work. paths.sort_by_key(|it| Reverse(it.as_os_str().len())); paths.dedup(); for (i, path) in paths.iter().enumerate() { let nested_roots = paths[..i] .iter() .filter(|it| it.starts_with(path)) .map(|it| it.clone()) .collect::<Vec<_>>(); let config = Arc::new(RootConfig::new(path.clone(), nested_roots)); roots.alloc(config.clone()); } Roots { roots } } pub(crate) fn find(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf)> { self.roots.iter().find_map(|(root, data)| data.contains(path).map(|it| (root, it))) } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct VfsFile(pub RawId); impl_arena_id!(VfsFile); struct VfsFileData { root: VfsRoot, path: RelativePathBuf, is_overlayed: bool, text: Arc<String>, } pub struct Vfs { roots: Arc<Roots>, files: Arena<VfsFile, VfsFileData>, root2files: ArenaMap<VfsRoot, FxHashSet<VfsFile>>, pending_changes: Vec<VfsChange>, worker: Worker, } impl fmt::Debug for Vfs { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("Vfs") .field("n_roots", &self.roots.len()) .field("n_files", &self.files.len()) .field("n_pending_changes", &self.pending_changes.len()) .finish() } } impl Vfs { pub fn new(roots: Vec<PathBuf>) -> (Vfs, Vec<VfsRoot>) { let roots = Arc::new(Roots::new(roots)); let worker = io::Worker::start(Arc::clone(&roots)); let mut root2files = ArenaMap::default(); for (root, config) in roots.iter() { root2files.insert(root, Default::default()); worker.sender().send(io::Task::AddRoot { root, config: Arc::clone(config) }).unwrap(); } let res = Vfs { roots, files: Arena::default(), root2files, worker, pending_changes: Vec::new() }; let vfs_roots = res.roots.iter().map(|(id, _)| id).collect(); (res, vfs_roots) } pub fn root2path(&self, root: VfsRoot) -> PathBuf { self.roots[root].root.clone() } pub fn path2file(&self, path: &Path) -> Option<VfsFile> { if let Some((_root, _path, Some(file))) = self.find_root(path) { return Some(file); } None } pub fn file2path(&self, file: VfsFile) -> PathBuf { let rel_path = &self.files[file].path; let root_path = &self.roots[self.files[file].root].root; rel_path.to_path(root_path) } pub fn file_for_path(&self, path: &Path) -> Option<VfsFile> { if let Some((_root, _path, Some(file))) = self.find_root(path) { return Some(file); } None } pub fn num_roots(&self) -> usize { self.roots.len() } pub fn load(&mut self, path: &Path) -> Option<VfsFile> { if let Some((root, rel_path, file)) = self.find_root(path) { return if let Some(file) = file { Some(file) } else { let text = fs::read_to_string(path).unwrap_or_default(); let text = Arc::new(text); let file = self.add_file(root, rel_path.clone(), Arc::clone(&text), false); let change = VfsChange::AddFile { file, text, root, path: rel_path }; self.pending_changes.push(change); Some(file) }; } None } pub fn task_receiver(&self) -> &Receiver<io::TaskResult> { self.worker.receiver() } pub fn handle_task(&mut self, task: io::TaskResult) { match task { TaskResult::BulkLoadRoot { root, files } => { let mut cur_files = Vec::new(); // While we were scanning the root in the background, a file might have // been open in the editor, so we need to account for that. let exising = self.root2files[root] .iter() .map(|&file| (self.files[file].path.clone(), file)) .collect::<FxHashMap<_, _>>(); for (path, text) in files { if let Some(&file) = exising.get(&path) { let text = Arc::clone(&self.files[file].text); cur_files.push((file, path, text)); continue; } let text = Arc::new(text); let file = self.add_file(root, path.clone(), Arc::clone(&text), false); cur_files.push((file, path, text)); } let change = VfsChange::AddRoot { root, files: cur_files }; self.pending_changes.push(change); } TaskResult::SingleFile { root, path, text } => { match (self.find_file(root, &path), text) { (Some(file), None) => { self.do_remove_file(root, path, file, false); } (None, Some(text)) => { self.do_add_file(root, path, text, false); } (Some(file), Some(text)) => { self.do_change_file(file, text, false); } (None, None) => (), } } } } fn do_add_file( &mut self, root: VfsRoot, path: RelativePathBuf, text: String, is_overlay: bool, ) -> Option<VfsFile> { let text = Arc::new(text); let file = self.add_file(root, path.clone(), text.clone(), is_overlay); self.pending_changes.push(VfsChange::AddFile { file, root, path, text }); Some(file) } fn do_change_file(&mut self, file: VfsFile, text: String, is_overlay: bool) { if !is_overlay && self.files[file].is_overlayed { return; } let text = Arc::new(text); self.change_file(file, text.clone(), is_overlay); self.pending_changes.push(VfsChange::ChangeFile { file, text }); } fn do_remove_file( &mut self, root: VfsRoot, path: RelativePathBuf, file: VfsFile, is_overlay: bool, ) { if !is_overlay && self.files[file].is_overlayed { return; } self.remove_file(file); self.pending_changes.push(VfsChange::RemoveFile { root, path, file }); } pub fn add_file_overlay(&mut self, path: &Path, text: String) -> Option<VfsFile> { if let Some((root, rel_path, file)) = self.find_root(path) { if let Some(file) = file { self.do_change_file(file, text, true); Some(file) } else { self.do_add_file(root, rel_path, text, true) } } else { None } } pub fn change_file_overlay(&mut self, path: &Path, new_text: String) { if let Some((_root, _path, file)) = self.find_root(path) { let file = file.expect("can't change a file which wasn't added"); self.do_change_file(file, new_text, true); } } pub fn remove_file_overlay(&mut self, path: &Path) -> Option<VfsFile> { if let Some((root, path, file)) = self.find_root(path) { let file = file.expect("can't remove a file which wasn't added"); let full_path = path.to_path(&self.roots[root].root); if let Ok(text) = fs::read_to_string(&full_path) { self.do_change_file(file, text, true); } else { self.do_remove_file(root, path, file, true); } Some(file) } else { None } } pub fn commit_changes(&mut self) -> Vec<VfsChange> { mem::replace(&mut self.pending_changes, Vec::new()) } /// Sutdown the VFS and terminate the background watching thread. pub fn shutdown(self) -> thread::Result<()> { self.worker.shutdown() } fn add_file( &mut self, root: VfsRoot, path: RelativePathBuf, text: Arc<String>, is_overlayed: bool, ) -> VfsFile { let data = VfsFileData { root, path, text, is_overlayed }; let file = self.files.alloc(data); self.root2files.get_mut(root).unwrap().insert(file); file } fn change_file(&mut self, file: VfsFile, new_text: Arc<String>, is_overlayed: bool) { let mut file_data = &mut self.files[file]; file_data.text = new_text; file_data.is_overlayed = is_overlayed; } fn remove_file(&mut self, file: VfsFile) { //FIXME: use arena with removal self.files[file].text = Default::default(); self.files[file].path = Default::default(); let root = self.files[file].root; let removed = self.root2files.get_mut(root).unwrap().remove(&file); assert!(removed); } fn find_root(&self, path: &Path) -> Option<(VfsRoot, RelativePathBuf, Option<VfsFile>)> { let (root, path) = self.roots.find(&path)?; let file = self.find_file(root, &path); Some((root, path, file)) } fn find_file(&self, root: VfsRoot, path: &RelativePath) -> Option<VfsFile> { self.root2files[root].iter().map(|&it| it).find(|&file| self.files[file].path == path) } } #[derive(Debug, Clone)] pub enum VfsChange { AddRoot { root: VfsRoot, files: Vec<(VfsFile, RelativePathBuf, Arc<String>)> }, AddFile { root: VfsRoot, file: VfsFile, path: RelativePathBuf, text: Arc<String> }, RemoveFile { root: VfsRoot, file: VfsFile, path: RelativePathBuf }, ChangeFile { file: VfsFile, text: Arc<String> }, }
34.409574
107
0.56137
9cf85324bf2596210b1d2b5dfcaf076f4f6cf6d4
318
//! # A network backend implementation. Either network manager or iwd. //! This depends on the cargo feature flag. Either "networkmanager" or "iwd". #[cfg(feature = "iwd")] mod iwd; #[cfg(feature = "networkmanager")] mod nm; #[cfg(feature = "iwd")] pub use iwd::*; #[cfg(feature = "networkmanager")] pub use nm::*;
22.714286
77
0.669811
9bb10535cc7a770c3b5c7eaaa3b65d15002b8347
368
use std::collections::BTreeMap; use crate::{Context, GqlValue, ResolveFut, ResolverResult}; #[async_trait::async_trait] pub trait CustomDirective: Send + Sync { async fn resolve_field( &self, ctx: &Context<'_>, directive_args: &BTreeMap<String, GqlValue>, resolve_fut: ResolveFut<'_>, ) -> ResolverResult<Option<GqlValue>>; }
26.285714
59
0.668478
d7fe9deb62050d65b0aa18e46262ac290be9ba42
1,659
use tbd_model_wrappers::Wrapper; use tbd_lifecycle::ModelLifeCycle; use tbd_fieldset::*; use tbd_keyed::Keyed; #[derive(Debug, Clone)] pub struct Post { // TODO: These pubs must go pub content: String } #[derive(Clone, Default)] pub struct ContentField(String); impl Field for ContentField { type Model = Post; type Type = String; fn name() -> &'static str { "content" } fn get(model: &Post) -> &String { &model.content } fn get_mut(model: &mut Post) -> &mut String { &mut model.content } } pub struct PostFieldSet { pub content: ContentField } impl FieldSet for PostFieldSet { type Model = Post; } impl ModelLifeCycle for Post { type PrimaryKey = i64; fn created(&mut self, pk: &[u8]) { } } impl<'a> From<&'a rusqlite::Row<'a, 'a>> for Post { fn from(row: &rusqlite::Row) -> Post { Post { content: row.get(1) } } } pub struct KeyedPost(pub Keyed<i64, Post>); impl<'a> From<&'a rusqlite::Row<'a, 'a>> for KeyedPost { fn from(row: &rusqlite::Row) -> KeyedPost { KeyedPost(Keyed::with_key( row.get(0), row.into() )) } } impl ModelLifeCycle for KeyedPost { type PrimaryKey = i64; fn created(&mut self, pk: &[u8]) { self.0.created(pk) } } impl Wrapper for Post { type Wrapping = Post; type Returning = Post; fn wrap(m: Post) -> Post { m } } impl Wrapper for KeyedPost { type Wrapping = Post; type Returning = KeyedPost; fn wrap(m: Post) -> KeyedPost { KeyedPost( Keyed::new(m) ) } }
17.463158
56
0.570223
bf05f38dcfe53f4991ec7b2acfcf0ebc57723406
8,978
/* * Copyright 2022 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use gstreamer::ClockTime; use gstreamer::prelude::*; use crate::catch_bail; use crate::error::{ParseError, ParseResult}; use crate::script::Pattern; pub trait EventAction: Send + Sync { fn exec(&self); } pub struct WindowAction { window: String, action: String, settings: Vec<String>, chan: crossbeam_channel::Sender<String>, } unsafe impl Send for WindowAction {} unsafe impl Sync for WindowAction {} impl EventAction for WindowAction { fn exec(&self) { let mut s = self.window.clone(); s.push_str(" "); s.push_str(self.action.as_str()); for setting in self.settings.iter() { s.push_str(" "); s.push_str(setting.as_str()); } let sent = self.chan.send(s); if let Err(_) = sent { panic!("Couldn't send window action over channel") } } } pub struct PlayAction { pipeline: gstreamer::Pipeline, action: String, } unsafe impl Send for PlayAction {} unsafe impl Sync for PlayAction {} impl EventAction for PlayAction { fn exec(&self) { let pipeline = self.pipeline.clone().dynamic_cast::<gstreamer::Pipeline>().unwrap(); let action = match self.action.as_str() { "start" => { gstreamer::State::Playing }, "pause" => { gstreamer::State::Paused }, "ready" => { gstreamer::State::Ready }, "null" => { gstreamer::State::Null }, a => panic!("Unknown pipeline state: {}", a) }; match pipeline.set_state(action) { Ok(_) => println!("Element {} set to {}", self.pipeline.name(), self.action), Err(err) => println!("PlayAction state change error: {:?}", err) } } } pub struct SeekAction { pipeline: gstreamer::Pipeline, rate: f64, time: f64, } unsafe impl Send for SeekAction {} unsafe impl Sync for SeekAction {} impl EventAction for SeekAction { fn exec(&self) { let result = self.pipeline.seek( self.rate, gstreamer::SeekFlags::FLUSH, gstreamer::SeekType::Set, ClockTime::from_nseconds((self.time * 1000000.0) as u64), gstreamer::SeekType::None, ClockTime::from_nseconds(0), ); if let Err(_) = result { println!("Seek event was not handled for pipeline {}", self.pipeline.name()) } } } pub struct SetPropAction { element: gstreamer::Element, prop: String, type_: String, setting: String, } unsafe impl Send for SetPropAction {} unsafe impl Sync for SetPropAction {} impl EventAction for SetPropAction { fn exec(&self) { let status = set_property(self.element.clone(), self.prop.clone(), self.type_.clone(), self.setting.clone()); if let Err(err) = status { panic!("{}",err.annotate("set_property")); } } } pub fn set_property(element: gstreamer::Element, prop: String, type_: String, value: String) -> ParseResult<()> { match type_.as_str() { "int" => { let val = catch_bail!(value.parse::<i32>(), format!("Unable to parse {} as {}", value, type_)); element.set_property(prop.as_str(), val); Ok(()) }, "float" => { let val = catch_bail!(value.parse::<f64>(), format!("Unable to parse {} as {}", value, type_)); element.set_property(prop.as_str(), val); Ok(()) } "string" => { element.set_property(prop.as_str(), value); Ok(()) }, "GstOrientation" => { let direction = match value.as_str() { "0" => gstreamer_video::VideoOrientationMethod::Identity, "1" => gstreamer_video::VideoOrientationMethod::_90r, "2" => gstreamer_video::VideoOrientationMethod::_180, "3" => gstreamer_video::VideoOrientationMethod::_90l, _ => return Err(ParseError::report_string(format!("Unknown GstOrientation index: {}", value))) }; element.set_property(prop.as_str(), direction); Ok(()) } _ => return Err(ParseError::report_string(format!("Unknown parameter type: {}", type_))), } } pub fn parse_leading_event<'a, I>(pattern: &mut Pattern, chan: crossbeam_channel::Sender<String>, args: &[&str], cmd_iter: &mut I) -> ParseResult<Vec<Box<dyn EventAction>>> where I: Iterator<Item = &'a str>, { let mut actions = Vec::new(); match args[0] { "terminate" => { actions.push( Box::new(WindowAction { window: "".to_string(), action: "terminate".to_string(), settings: vec![], chan }) as Box<dyn EventAction> ); }, "act" => { match parse_single_event(&pattern, chan.clone(), args) { Ok(t) => actions.push(t), Err(e) => return Err(e.annotate("act")), }; }, "wrap" => { loop { let line = cmd_iter.next().unwrap(); let args: Vec<&str> = line.split_whitespace().collect(); if args[0] == "parw" { break; } match parse_single_event(&pattern, chan.clone(), &args[..]) { Ok(t) => actions.push(t), Err(e) => return Err(e.annotate("wrap")), }; }; }, a => return Err(ParseError::report_string(format!("Unknown condition: {}", a))), } Ok(actions) } fn parse_single_event(pattern: &Pattern, chan: crossbeam_channel::Sender<String>, args: &[&str]) -> ParseResult<Box<dyn EventAction>> { let result = match args[0] { "act" => { let elem = match pattern.pipes.get(args[1]) { Some(e) => e.0.clone(), None => return Err(ParseError::report_string(format!("Unknown pipeline: {}", args[1]))) }; let pipeline = elem.clone().dynamic_cast::<gstreamer::Pipeline>().unwrap(); match args[2] { "prop" => { let mod_elem = match pipeline.by_name(args[3]) { Some(e) => e, None => return Err(ParseError::report_string(format!("Unknown element: {}", args[3]))), }; Box::new(SetPropAction { element: mod_elem, prop: args[4].to_string(), type_:args[5].to_string(), setting: args[6..].join(" "), }) as Box<dyn EventAction> }, "play" => { Box::new(PlayAction { pipeline: elem, action: args[3].to_string(), }) as Box<dyn EventAction> }, "seek" => { let time = match args[3].parse::<f64>() { Ok(f) => f, Err(_) => return Err(ParseError::report_string(format!("Could not parse as float: {}", args[3]))), }; let rate = match args[4].parse::<f64>() { Ok(f) => f, Err(_) => return Err(ParseError::report_string(format!("Could not parse as float: {}", args[4]))), }; Box::new(SeekAction { pipeline: elem, rate, time, }) as Box<dyn EventAction> }, "window" => { Box::new(WindowAction { window: args[1].to_string(), action: args[3].to_string(), settings: args[4..].iter().map(|&s| s.into()).collect::<Vec<String>>(), chan: chan.clone(), }) as Box<dyn EventAction> }, a => return Err(ParseError::report_string(format!("Unkown event type: {}", a))), } }, a => return Err(ParseError::report_string(format!("Unknown event header: {}", a))), }; Ok(result) }
35.070313
172
0.508688
567400b9c274bc90216ad01d04201901705e6359
11,262
use ra_syntax::{ ast::{self, HasStringValue}, AstToken, SyntaxKind::{RAW_STRING, STRING}, TextSize, }; use crate::{Assist, AssistCtx, AssistId}; // Assist: make_raw_string // // Adds `r#` to a plain string literal. // // ``` // fn main() { // "Hello,<|> World!"; // } // ``` // -> // ``` // fn main() { // r#"Hello, World!"#; // } // ``` pub(crate) fn make_raw_string(ctx: AssistCtx) -> Option<Assist> { let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?; let value = token.value()?; ctx.add_assist(AssistId("make_raw_string"), "Rewrite as raw string", |edit| { edit.target(token.syntax().text_range()); let max_hash_streak = count_hashes(&value); let mut hashes = String::with_capacity(max_hash_streak + 1); for _ in 0..hashes.capacity() { hashes.push('#'); } edit.replace(token.syntax().text_range(), format!("r{}\"{}\"{}", hashes, value, hashes)); }) } // Assist: make_usual_string // // Turns a raw string into a plain string. // // ``` // fn main() { // r#"Hello,<|> "World!""#; // } // ``` // -> // ``` // fn main() { // "Hello, \"World!\""; // } // ``` pub(crate) fn make_usual_string(ctx: AssistCtx) -> Option<Assist> { let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?; let value = token.value()?; ctx.add_assist(AssistId("make_usual_string"), "Rewrite as regular string", |edit| { edit.target(token.syntax().text_range()); // parse inside string to escape `"` let escaped = value.escape_default().to_string(); edit.replace(token.syntax().text_range(), format!("\"{}\"", escaped)); }) } // Assist: add_hash // // Adds a hash to a raw string literal. // // ``` // fn main() { // r#"Hello,<|> World!"#; // } // ``` // -> // ``` // fn main() { // r##"Hello, World!"##; // } // ``` pub(crate) fn add_hash(ctx: AssistCtx) -> Option<Assist> { let token = ctx.find_token_at_offset(RAW_STRING)?; ctx.add_assist(AssistId("add_hash"), "Add # to raw string", |edit| { edit.target(token.text_range()); edit.insert(token.text_range().start() + TextSize::of('r'), "#"); edit.insert(token.text_range().end(), "#"); }) } // Assist: remove_hash // // Removes a hash from a raw string literal. // // ``` // fn main() { // r#"Hello,<|> World!"#; // } // ``` // -> // ``` // fn main() { // r"Hello, World!"; // } // ``` pub(crate) fn remove_hash(ctx: AssistCtx) -> Option<Assist> { let token = ctx.find_token_at_offset(RAW_STRING)?; let text = token.text().as_str(); if text.starts_with("r\"") { // no hash to remove return None; } ctx.add_assist(AssistId("remove_hash"), "Remove hash from raw string", |edit| { edit.target(token.text_range()); let result = &text[2..text.len() - 1]; let result = if result.starts_with('\"') { // FIXME: this logic is wrong, not only the last has has to handled specially // no more hash, escape let internal_str = &result[1..result.len() - 1]; format!("\"{}\"", internal_str.escape_default().to_string()) } else { result.to_owned() }; edit.replace(token.text_range(), format!("r{}", result)); }) } fn count_hashes(s: &str) -> usize { let mut max_hash_streak = 0usize; for idx in s.match_indices("\"#").map(|(i, _)| i) { let (_, sub) = s.split_at(idx + 1); let nb_hash = sub.chars().take_while(|c| *c == '#').count(); if nb_hash > max_hash_streak { max_hash_streak = nb_hash; } } max_hash_streak } #[cfg(test)] mod test { use super::*; use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target}; #[test] fn make_raw_string_target() { check_assist_target( make_raw_string, r#" fn f() { let s = <|>"random\nstring"; } "#, r#""random\nstring""#, ); } #[test] fn make_raw_string_works() { check_assist( make_raw_string, r#" fn f() { let s = <|>"random\nstring"; } "#, r##" fn f() { let s = <|>r#"random string"#; } "##, ) } #[test] fn make_raw_string_works_inside_macros() { check_assist( make_raw_string, r#" fn f() { format!(<|>"x = {}", 92) } "#, r##" fn f() { format!(<|>r#"x = {}"#, 92) } "##, ) } #[test] fn make_raw_string_hashes_inside_works() { check_assist( make_raw_string, r###" fn f() { let s = <|>"#random##\nstring"; } "###, r####" fn f() { let s = <|>r#"#random## string"#; } "####, ) } #[test] fn make_raw_string_closing_hashes_inside_works() { check_assist( make_raw_string, r###" fn f() { let s = <|>"#random\"##\nstring"; } "###, r####" fn f() { let s = <|>r###"#random"## string"###; } "####, ) } #[test] fn make_raw_string_nothing_to_unescape_works() { check_assist( make_raw_string, r#" fn f() { let s = <|>"random string"; } "#, r##" fn f() { let s = <|>r#"random string"#; } "##, ) } #[test] fn make_raw_string_not_works_on_partial_string() { check_assist_not_applicable( make_raw_string, r#" fn f() { let s = "foo<|> } "#, ) } #[test] fn make_usual_string_not_works_on_partial_string() { check_assist_not_applicable( make_usual_string, r#" fn main() { let s = r#"bar<|> } "#, ) } #[test] fn add_hash_target() { check_assist_target( add_hash, r#" fn f() { let s = <|>r"random string"; } "#, r#"r"random string""#, ); } #[test] fn add_hash_works() { check_assist( add_hash, r#" fn f() { let s = <|>r"random string"; } "#, r##" fn f() { let s = <|>r#"random string"#; } "##, ) } #[test] fn add_more_hash_works() { check_assist( add_hash, r##" fn f() { let s = <|>r#"random"string"#; } "##, r###" fn f() { let s = <|>r##"random"string"##; } "###, ) } #[test] fn add_hash_not_works() { check_assist_not_applicable( add_hash, r#" fn f() { let s = <|>"random string"; } "#, ); } #[test] fn remove_hash_target() { check_assist_target( remove_hash, r##" fn f() { let s = <|>r#"random string"#; } "##, r##"r#"random string"#"##, ); } #[test] fn remove_hash_works() { check_assist( remove_hash, r##" fn f() { let s = <|>r#"random string"#; } "##, r#" fn f() { let s = <|>r"random string"; } "#, ) } #[test] fn remove_hash_with_quote_works() { check_assist( remove_hash, r##" fn f() { let s = <|>r#"random"str"ing"#; } "##, r#" fn f() { let s = <|>r"random\"str\"ing"; } "#, ) } #[test] fn remove_more_hash_works() { check_assist( remove_hash, r###" fn f() { let s = <|>r##"random string"##; } "###, r##" fn f() { let s = <|>r#"random string"#; } "##, ) } #[test] fn remove_hash_not_works() { check_assist_not_applicable( remove_hash, r#" fn f() { let s = <|>"random string"; } "#, ); } #[test] fn remove_hash_no_hash_not_works() { check_assist_not_applicable( remove_hash, r#" fn f() { let s = <|>r"random string"; } "#, ); } #[test] fn make_usual_string_target() { check_assist_target( make_usual_string, r##" fn f() { let s = <|>r#"random string"#; } "##, r##"r#"random string"#"##, ); } #[test] fn make_usual_string_works() { check_assist( make_usual_string, r##" fn f() { let s = <|>r#"random string"#; } "##, r#" fn f() { let s = <|>"random string"; } "#, ) } #[test] fn make_usual_string_with_quote_works() { check_assist( make_usual_string, r##" fn f() { let s = <|>r#"random"str"ing"#; } "##, r#" fn f() { let s = <|>"random\"str\"ing"; } "#, ) } #[test] fn make_usual_string_more_hash_works() { check_assist( make_usual_string, r###" fn f() { let s = <|>r##"random string"##; } "###, r##" fn f() { let s = <|>"random string"; } "##, ) } #[test] fn make_usual_string_not_works() { check_assist_not_applicable( make_usual_string, r#" fn f() { let s = <|>"random string"; } "#, ); } #[test] fn count_hashes_test() { assert_eq!(0, count_hashes("abc")); assert_eq!(0, count_hashes("###")); assert_eq!(1, count_hashes("\"#abc")); assert_eq!(0, count_hashes("#abc")); assert_eq!(2, count_hashes("#ab\"##c")); assert_eq!(4, count_hashes("#ab\"##\"####c")); } }
22.479042
97
0.405168
768a06b48637305573da709ec52f22b5906a41c2
94
fn main() { sscanf::scanf!(5usize, "{}", usize); sscanf::scanf!(b"5", "{}", usize); }
18.8
40
0.489362
de58fdeba829d6bcd42260d66e16371d7317d10b
3,055
mod commands; pub mod event; mod input; mod ui; use crate::apartment::player::decrease_stats; use self::commands::should_run_cmd_handler; use super::states::GameState; use bevy::prelude::*; use sysinfo::{System, SystemExt}; pub struct ConsolePlugin; impl Plugin for ConsolePlugin { fn build(&self, app: &mut AppBuilder) { app.add_event::<event::PrintConsoleEvent>() .add_event::<event::EnteredConsoleCommandEvent>() .add_system_set( SystemSet::on_enter(GameState::MainGame) .with_system(ui::build_ui.system().label("build_terminal")) .with_system(setup.system()), ) .add_system_set( SystemSet::on_enter(GameState::ConsoleOpenedState) .with_system(ui::open_console.system()) .with_system(input::opening_console_sound.system()), ) .add_system_set( SystemSet::on_update(GameState::ConsoleOpenedState) .with_system( input::handle_input_keys .system() .label("send_console_input"), ) .with_system(input::update_enter_command.system()) .with_system(ui::update_logs_area.system()), ) .add_system_set( SystemSet::on_update(GameState::ConsoleOpenedState) .with_run_criteria(should_run_cmd_handler.system()) .with_system(commands::commands_handler.system()) .before("send_console_input"), ) .add_system_set( SystemSet::on_update(GameState::ConsoleOpenedState) .with_system(decrease_stats.system()), ) .add_system_to_stage(CoreStage::PostUpdate, ui::apply_animation.system()) .add_system_set( SystemSet::on_exit(GameState::ConsoleOpenedState) .with_system(ui::close_console.system()) .with_system(input::closing_console_sound.system()), ) .insert_resource(ConsoleData::default()) .insert_resource(ConsoleAnimation { moving_speed: 15.0, ..Default::default() }) .init_resource::<System>() .add_system(event::add_message_events_to_console.system()) .add_system( input::trigger_open_console .system() .after("check_interactables"), ); } } #[derive(Default)] pub struct ConsoleData { pub enter_command: String, pub is_opening: bool, pub fully_opened: bool, pub messages: Vec<String>, } #[derive(Default)] pub struct ConsoleAnimation { pub start_position: Vec2, pub end_position: Vec2, pub moving_speed: f64, pub time_to_move: f64, pub start_time: f64, } fn setup(mut sys: ResMut<System>) { sys.refresh_all(); }
33.571429
85
0.564648
506b1d0f2c0ad37be2cb5944ed565e93ac659155
43
pub mod parts_registry; pub mod ui_bridge;
14.333333
23
0.813953
165cb39c99c14dd6905a2c16a6663c5f318150eb
1,216
use crate::dns::WhoAmi; use anyhow::Result; use async_trait::async_trait; use std::net::{Ipv4Addr, Ipv6Addr}; use std::str::FromStr; const IPV4_ENDPOINT: &'static str = "https://icanhazip.com"; const IPV6_ENDPOINT: &'static str = "https://ipv6.icanhazip.com"; pub struct ICanHazIp { http_client: reqwest::Client, } impl ICanHazIp { pub fn new() -> Result<Self> { Ok(Self { http_client: reqwest::ClientBuilder::new().build()?, }) } async fn do_get(&self, endpoint: &str) -> Result<String> { Ok(self .http_client .get(endpoint) .send() .await .map_err(|err| { log::error!("Failed to get ip from url: {}, err: {}", endpoint, err); err })? .text() .await?) } } #[async_trait] impl WhoAmi for ICanHazIp { async fn fetch_ipv4_addr(&self) -> Result<Ipv4Addr> { Ok(Ipv4Addr::from_str( &self.do_get(IPV4_ENDPOINT).await?.trim(), )?) } async fn fetch_ipv6_addr(&self) -> Result<Ipv6Addr> { Ok(Ipv6Addr::from_str( &self.do_get(IPV6_ENDPOINT).await?.trim(), )?) } }
24.32
85
0.544408