hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
efa7f5cf632cf57f64afa0d89ef12d262bd0dbbe
1,050
use std::{ collections::{BTreeSet, HashSet}, hash::{BuildHasher, Hash}, }; pub trait DuplicateInsertsLastWinsSet<T> { fn new(size_hint: Option<usize>) -> Self; /// Insert or replace the existing value fn replace(&mut self, value: T); } impl<T, S> DuplicateInsertsLastWinsSet<T> for HashSet<T, S> where T: Eq + Hash, S: BuildHasher + Default, { #[inline] fn new(size_hint: Option<usize>) -> Self { match size_hint { Some(size) => Self::with_capacity_and_hasher(size, S::default()), None => Self::with_hasher(S::default()), } } #[inline] fn replace(&mut self, value: T) { // Hashset already fulfils the contract self.replace(value); } } impl<T> DuplicateInsertsLastWinsSet<T> for BTreeSet<T> where T: Ord, { #[inline] fn new(_size_hint: Option<usize>) -> Self { Self::new() } #[inline] fn replace(&mut self, value: T) { // BTreeSet already fulfils the contract self.replace(value); } }
21.875
77
0.590476
2fb5f4247567ba9505f6c6013674d8ef124b923b
2,561
#![deny(warnings)] use warp::host::Authority; #[tokio::test] async fn exact() { let filter = warp::host::exact("known.com"); // no authority let req = warp::test::request(); assert!(req.filter(&filter).await.unwrap_err().is_not_found()); // specified in URI let req = warp::test::request().path("http://known.com/about-us"); assert!(req.filter(&filter).await.is_ok()); let req = warp::test::request().path("http://unknown.com/about-us"); assert!(req.filter(&filter).await.unwrap_err().is_not_found()); // specified in Host header let req = warp::test::request() .header("host", "known.com") .path("/about-us"); assert!(req.filter(&filter).await.is_ok()); let req = warp::test::request() .header("host", "unknown.com") .path("/about-us"); assert!(req.filter(&filter).await.unwrap_err().is_not_found()); // specified in both - matching let req = warp::test::request() .header("host", "known.com") .path("http://known.com/about-us"); assert!(req.filter(&filter).await.is_ok()); let req = warp::test::request() .header("host", "unknown.com") .path("http://unknown.com/about-us"); assert!(req.filter(&filter).await.unwrap_err().is_not_found()); // specified in both - mismatch let req = warp::test::request() .header("host", "known.com") .path("http://known2.com/about-us"); assert!(req .filter(&filter) .await .unwrap_err() .find::<warp::reject::InvalidHeader>() .is_some()); // bad host header - invalid chars let req = warp::test::request() .header("host", "😭") .path("http://known.com/about-us"); assert!(req .filter(&filter) .await .unwrap_err() .find::<warp::reject::InvalidHeader>() .is_some()); // bad host header - invalid format let req = warp::test::request() .header("host", "hello space.com") .path("http://known.com//about-us"); assert!(req .filter(&filter) .await .unwrap_err() .find::<warp::reject::InvalidHeader>() .is_some()); } #[tokio::test] async fn optional() { let filter = warp::host::optional(); let req = warp::test::request().header("host", "example.com"); assert_eq!( req.filter(&filter).await.unwrap(), Some(Authority::from_static("example.com")) ); let req = warp::test::request(); assert_eq!(req.filter(&filter).await.unwrap(), None); }
29.102273
72
0.572042
562d041a4d562583788cf6e41bcd1f9177aa8388
5,711
use amethyst::{ core::math::Vector3, core::transform::Transform, ecs::prelude::{Entity, WriteStorage}, }; use if_chain::if_chain; use super::projectile_trait::ProjectileTrait; use crate::minion::Minion; use utils::coord::Vector2; #[derive(PartialEq)] enum PulsingState { Increase, Decrease, Dieing, //Target lost } pub struct PulsingElectricBall { target: Option<Entity>, damage: i32, detonation_range: f32, speed: f32, delete: bool, fired: bool, normal_scale: Vector3<f32>, pulsing_state: PulsingState, last_direction: Option<Vector2>, } impl PulsingElectricBall { pub fn new(damage: i32, detonation_range: f32, speed: f32, normal_scale: Vector3<f32>) -> Self { PulsingElectricBall { target: None, damage, detonation_range, speed, delete: false, fired: false, normal_scale, pulsing_state: PulsingState::Increase, last_direction: None, } } fn is_in_range(&self, lhs: &Vector3<f32>, rhs: &Vector3<f32>) -> bool { let y_diff = lhs.y - rhs.y; let x_diff = lhs.x - rhs.x; let square_sum = y_diff * y_diff + x_diff * x_diff; square_sum.sqrt() < self.detonation_range } fn pulse(&mut self, scale: &mut Vector3<f32>, elapsed: f32) { self.adjust_scale(scale, elapsed); self.handle_scale_under_overflow(scale); scale.y = scale.x; } fn adjust_scale(&mut self, scale: &mut Vector3<f32>, elapsed: f32) { let diff = { if self.pulsing_state == PulsingState::Increase { self.normal_scale.x * elapsed } else { -self.normal_scale.x * elapsed } }; scale.x += diff; } fn handle_scale_under_overflow(&mut self, scale: &mut Vector3<f32>) { if scale.x > self.normal_scale.x { let overflow = scale.x - self.normal_scale.x; scale.x = self.normal_scale.x - overflow; self.pulsing_state = PulsingState::Decrease; } else if self.pulsing_state == PulsingState::Decrease && scale.x < self.normal_scale.x * 0.8 { let underflow = self.normal_scale.x * 0.8 - scale.x; scale.x = self.normal_scale.x * 0.8 + underflow; self.pulsing_state = PulsingState::Increase; } } fn hit_minion<'a>(&mut self, minions: &mut WriteStorage<'a, Minion>, target: Entity) { let minion = minions.get_mut(target); match minion { Some(minion) => minion.hit(self.damage), _ => (), } self.delete = true; } fn update_projectile_translation( &mut self, projectile_transform: &mut Transform, direction: &Vector2, elapsed: f32, ) { let projectile_translation = projectile_transform.translation().clone(); projectile_transform .set_translation_x(projectile_translation.x + direction.x * elapsed * self.speed); projectile_transform .set_translation_y(projectile_translation.y + direction.y * elapsed * self.speed); self.last_direction = Some(direction.clone()); } fn handle_going_beyond_target<'a>( &mut self, projectile_transform: &mut Transform, target_translation: &Vector3<f32>, direction: &Vector2, minions: &mut WriteStorage<'a, Minion>, target: Entity, ) { let mut new_direction = Vector2::new( target_translation.x - projectile_transform.translation().x, target_translation.y - projectile_transform.translation().y, ); new_direction.normalize(); if &new_direction != direction { projectile_transform.set_translation_x(target_translation.x); projectile_transform.set_translation_y(target_translation.y); self.hit_minion(minions, target); } } } impl ProjectileTrait for PulsingElectricBall { fn update<'a>( &mut self, projectile_entity: Entity, minions: &mut WriteStorage<'a, Minion>, transforms: &mut WriteStorage<'a, Transform>, elapsed: f32, ) { self.pulse( transforms.get_mut(projectile_entity).unwrap().scale_mut(), elapsed, ); if !self.fired { return (); } let projectile_transform = transforms.get(projectile_entity).unwrap(); if_chain! { if let Some(target) = self.target; if let Some(target_transform) = transforms.get(target); then { if self.is_in_range(projectile_transform.translation(), target_transform.translation()) { self.hit_minion(minions, target); } else { let target_translation = target_transform.translation().clone(); let projectile_translation = projectile_transform.translation(); let mut direction = Vector2::new(target_translation.x - projectile_translation.x, target_translation.y - projectile_translation.y); direction.normalize(); let projectile_transform_mut = transforms.get_mut(projectile_entity).unwrap(); self.update_projectile_translation(projectile_transform_mut, &direction, elapsed); self.handle_going_beyond_target(projectile_transform_mut, &target_translation, &direction, minions, target); } } else { let projectile_transform = transforms.get_mut(projectile_entity).unwrap(); let direction = self.last_direction.unwrap(); self.update_projectile_translation(projectile_transform, &direction, elapsed); self.pulsing_state = PulsingState::Dieing; if projectile_transform.scale().x < 0.01 { self.delete = true; } } } } fn dead(&self) -> bool { return self.delete; } fn fire(&mut self) { self.fired = true; } fn set_target(&mut self, entity: Entity) { self.target = Some(entity); } }
30.216931
124
0.651199
2fbfd273b02eef11a582aa6ec6b0fba94ee9276a
678
// Copyright lowRISC contributors. // Licensed under the Apache License, Version 2.0, see LICENSE for details. // SPDX-License-Identifier: Apache-2.0 // !! DO NOT EDIT !! // To regenerate this file, run `fuzz/generate_proto_tests.py`. #![no_main] #![allow(non_snake_case)] use libfuzzer_sys::fuzz_target; use manticore::mem::BumpArena; use manticore::protocol::Command; use manticore::protocol::wire::FromWire; use manticore::protocol::cerberus::DeviceId as C; fuzz_target!(|data: &[u8]| { let mut arena = vec![0; data.len()]; let arena = BumpArena::new(&mut arena); let mut data = data; let _ = <C as Command<'_>>::Req::from_wire(&mut data, &arena); });
26.076923
75
0.690265
878f3ce454de674dabd62a4249793854eaef30da
3,417
#[doc = "Register `ERASEPCR1` reader"] pub struct R(crate::R<ERASEPCR1_SPEC>); impl core::ops::Deref for R { type Target = crate::R<ERASEPCR1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<ERASEPCR1_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<ERASEPCR1_SPEC>) -> Self { R(reader) } } #[doc = "Register `ERASEPCR1` writer"] pub struct W(crate::W<ERASEPCR1_SPEC>); impl core::ops::Deref for W { type Target = crate::W<ERASEPCR1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<ERASEPCR1_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<ERASEPCR1_SPEC>) -> Self { W(writer) } } #[doc = "Field `ERASEPCR1` reader - Register for erasing a page in Code area. Equivalent to ERASEPAGE."] pub struct ERASEPCR1_R(crate::FieldReader<u32, u32>); impl ERASEPCR1_R { pub(crate) fn new(bits: u32) -> Self { ERASEPCR1_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for ERASEPCR1_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `ERASEPCR1` writer - Register for erasing a page in Code area. Equivalent to ERASEPAGE."] pub struct ERASEPCR1_W<'a> { w: &'a mut W, } impl<'a> ERASEPCR1_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff_ffff) | (value as u32 & 0xffff_ffff); self.w } } impl R { #[doc = "Bits 0:31 - Register for erasing a page in Code area. Equivalent to ERASEPAGE."] #[inline(always)] pub fn erasepcr1(&self) -> ERASEPCR1_R { ERASEPCR1_R::new((self.bits & 0xffff_ffff) as u32) } } impl W { #[doc = "Bits 0:31 - Register for erasing a page in Code area. Equivalent to ERASEPAGE."] #[inline(always)] pub fn erasepcr1(&mut self) -> ERASEPCR1_W { ERASEPCR1_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Deprecated register - Register for erasing a page in Code area. Equivalent to ERASEPAGE.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [erasepcr1](index.html) module"] pub struct ERASEPCR1_SPEC; impl crate::RegisterSpec for ERASEPCR1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [erasepcr1::R](R) reader structure"] impl crate::Readable for ERASEPCR1_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [erasepcr1::W](W) writer structure"] impl crate::Writable for ERASEPCR1_SPEC { type Writer = W; } #[doc = "`reset()` method sets ERASEPCR1 to value 0"] impl crate::Resettable for ERASEPCR1_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
33.174757
478
0.628329
29f9546d52ac9583797ca844cecf98b514d72c18
3,157
#[doc = "Register `FC0_REF_KHZ` reader"] pub struct R(crate::R<FC0_REF_KHZ_SPEC>); impl core::ops::Deref for R { type Target = crate::R<FC0_REF_KHZ_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<FC0_REF_KHZ_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<FC0_REF_KHZ_SPEC>) -> Self { R(reader) } } #[doc = "Register `FC0_REF_KHZ` writer"] pub struct W(crate::W<FC0_REF_KHZ_SPEC>); impl core::ops::Deref for W { type Target = crate::W<FC0_REF_KHZ_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<FC0_REF_KHZ_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<FC0_REF_KHZ_SPEC>) -> Self { W(writer) } } #[doc = "Field `FC0_REF_KHZ` reader - "] pub struct FC0_REF_KHZ_R(crate::FieldReader<u32, u32>); impl FC0_REF_KHZ_R { pub(crate) fn new(bits: u32) -> Self { FC0_REF_KHZ_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for FC0_REF_KHZ_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `FC0_REF_KHZ` writer - "] pub struct FC0_REF_KHZ_W<'a> { w: &'a mut W, } impl<'a> FC0_REF_KHZ_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0x000f_ffff) | (value as u32 & 0x000f_ffff); self.w } } impl R { #[doc = "Bits 0:19"] #[inline(always)] pub fn fc0_ref_khz(&self) -> FC0_REF_KHZ_R { FC0_REF_KHZ_R::new((self.bits & 0x000f_ffff) as u32) } } impl W { #[doc = "Bits 0:19"] #[inline(always)] pub fn fc0_ref_khz(&mut self) -> FC0_REF_KHZ_W { FC0_REF_KHZ_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Reference clock frequency in kHz This register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api). For information about available fields see [fc0_ref_khz](index.html) module"] pub struct FC0_REF_KHZ_SPEC; impl crate::RegisterSpec for FC0_REF_KHZ_SPEC { type Ux = u32; } #[doc = "`read()` method returns [fc0_ref_khz::R](R) reader structure"] impl crate::Readable for FC0_REF_KHZ_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [fc0_ref_khz::W](W) writer structure"] impl crate::Writable for FC0_REF_KHZ_SPEC { type Writer = W; } #[doc = "`reset()` method sets FC0_REF_KHZ to value 0"] impl crate::Resettable for FC0_REF_KHZ_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
29.504673
300
0.617358
3af49fe81587a0eeec939a2e01a63953b1c7a9e6
33,517
//! Module builds and stores the module tree and manifest tree. //! Dependency cycles are detected and modules are marked clean //! if no changes were made to them. //! use std::fmt::Debug; use std::path::{Path, PathBuf}; use std::time::SystemTime; use __core::ops::{Deref, DerefMut}; use cranelift::entity::packed_option::ReservedValue; use cranelift::entity::*; use quick_proc::*; use crate::ast::{Ast, Vis}; use crate::lexer::*; use crate::util::sdbm::ID; use crate::util::storage::*; use crate::{ast, lexer}; type Result<T = ()> = std::result::Result<T, Error>; /// Root manifest is always the first manifest, constant removes ambiguity. pub const ROOT_MANIFEST: Manifest = Manifest(0); /// Builtin module is always the first module, constant removes ambiguity. pub const BUILTIN_MODULE: Mod = Mod(0); /// Root module is always the second module, constant removes ambiguity. pub const ROOT_MODULE: Mod = Mod(1); /// Environment variable under which compiler searches already downloaded dependencies. pub const CACHE_VAR: &str = "METAFLOW_CACHE"; /// Source file extension. pub const SOURCE_EXT: &str = "mf"; /// Manifest file extension. pub const MANIFEST_EXT: &str = "mfm"; /// Ctx embeds ast context and module tree. #[derive(Debug, Clone, Default)] pub struct Ctx { ctx: ast::Ctx, seen_manifests: EntitySet<Manifest>, seen_modules: EntitySet<Mod>, clean_modules: EntitySet<Mod>, manifest_lookup: Map<Manifest>, manifests: PoolMap<Manifest, ManifestEnt>, module_lookup: Map<Mod>, modules: PoolMap<Mod, ModEnt>, module_ctxs: SecondaryMap<Mod, ModCtx>, } impl Ctx { /// Loads all modules and manifests into tree. It returns the order /// in which modules should be processed. pub fn compute_module_tree(&mut self, root: &str) -> Result<Vec<Mod>> { if self.modules.len() == 0 { self.load_builtin_module(); } let mut path_buffer = PathBuf::new(); self.load_manifests(root, &mut path_buffer)?; let in_code_path = self.manifests[ROOT_MANIFEST].name; let mut frontier = vec![( in_code_path, Token::default(), Option::<(Option<Span>, Mod)>::None, ROOT_MANIFEST, )]; let builtin_span = self.builtin_span("builtin"); // cleared each loop let mut imports = self.temp_vec(); let mut temp_data = ast::Data::default(); let mut saved_data = ast::Data::default(); let mut reloc = ast::Reloc::default(); let mut collector = ast::Collector::default(); // loop eliminates recursion while let Some((in_code_path, token, from, manifest_id)) = frontier.pop() { let module = self.load_module(in_code_path, token, manifest_id, &mut path_buffer)?; let ModCtx { name, source, manifest, .. } = self.module_ctxs[module]; let module_ent = std::mem::take(&mut self.modules[module]); if let Some((nick, parent_module)) = from { let nick_span = nick.unwrap_or(name); let nick = self.hash_span(nick_span); self.module_ctxs[module].used.push(parent_module); self.module_ctxs[parent_module] .deps .push((nick_span, module)); self.import_item( parent_module, nick, Item::new(item::Kind::Mod(module), parent_module, token), )?; } if self.seen_modules.contains(module) { self.modules[module] = module_ent; continue; } let mut ast_state = ast::State::new(source, &self.ctx).map_err(Into::into)?; { let mut data = ast::DataCollector::new(&mut temp_data, &mut saved_data, &mut reloc); ast::Parser::new(&mut ast_state, &mut data, &mut self.ctx, &mut collector) .parse_imports(&mut imports) .map_err(Into::into)?; } for import in imports.drain(..) { let path = self.display(import.path()); let head = Path::new(path) .components() .next() .ok_or_else(|| Error::new(error::Kind::MissingPathStem, import.token()))? .as_os_str() .to_str() .unwrap(); let id = ID::new(head); let manifest_ent = &self.manifests[manifest]; // here we see that first segment of path sets manifest let manifest = if id == self.hash_span(manifest_ent.name) { manifest } else { manifest_ent .find_dep(id) .ok_or_else(|| Error::new(error::Kind::ImportNotFound, import.token()))? .clone() }; frontier.push(( import.path(), import.token(), Some((import.nickname(), module)), manifest, )); } self.module_ctxs[module] .deps .push((builtin_span, BUILTIN_MODULE)); self.import_item( module, ID::new("builtin"), Item::new(item::Kind::Mod(BUILTIN_MODULE), module, token), )?; self.module_ctxs[module].ast_state = ast_state; self.seen_modules.insert(module); self.modules[module] = module_ent; } let order = self .create_order(ROOT_MODULE) .map_err(|err| Error::new(error::Kind::CyclicDependency(err), Token::default()))?; Ok(order) } /// Loads the module and returns reference. `in_code_path` should point to /// content of string defining import in 'use' statement. `token` is used for /// error display. `manifest` is the is of manifest of project that contains /// it. `path_buffer` should be empty and will remain empty after call. pub fn load_module( &mut self, in_code_path: Span, token: Token, manifest: Manifest, path_buffer: &mut PathBuf, ) -> Result<Mod> { let manifest_ent = &self.manifests[manifest]; // in case this is dependency or command line argument is not '.' path_buffer.push(Path::new(self.display(manifest_ent.base_path))); path_buffer.push(Path::new(self.display(manifest_ent.root_path))); let manifest_name = self.display(manifest_ent.name); path_buffer.push(Path::new(manifest_name)); let root = self.display(in_code_path); let module_path = Path::new(root); // finding module name span let name_len = module_path.file_stem().unwrap().len(); let whole_len = module_path.file_name().unwrap().len(); let len = in_code_path.len(); let name = in_code_path.slice(len - whole_len..len - name_len + whole_len); // now we have to strip first path segment from root span and replace it with real name let module_path = module_path .strip_prefix( module_path .components() .next() .map(|c| c.as_os_str().to_str().unwrap()) .unwrap_or(""), ) .unwrap(); path_buffer.push(module_path); path_buffer.set_extension(SOURCE_EXT); // done, path is constructed let id = ID::new(path_buffer.to_str().unwrap()); let modified = std::fs::metadata(&path_buffer) .map_err(|err| Error::new(error::Kind::FileReadError(path_buffer.clone(), err), token))? .modified() .ok(); let content = std::fs::read_to_string(&path_buffer).map_err(|err| { Error::new(error::Kind::FileReadError(path_buffer.clone(), err), token) })?; let source = SourceEnt::new(path_buffer.to_str().unwrap().to_string(), content); let source = self.add_source(source); path_buffer.clear(); // stop if module is clean let saved_module = self.module_lookup.get(id).cloned(); let module = if let Some(module) = saved_module { let module_ent = &mut self.modules[module]; module_ent.id = id; if Some(module_ent.modified) != modified { // if we cant get the modification time juts use unique // time so module gets always refreshed module_ent.modified = modified.unwrap_or(SystemTime::now()); self.clean_modules.remove(module); } module } else { let module = ModEnt { id, ..Default::default() }; let module = self.modules.push(module); let shadow = self.module_lookup.insert(id, module); debug_assert!(shadow.is_none()); module }; self.module_ctxs[module] = ModCtx { name, source, manifest, ..Default::default() }; Ok(module) } /// Loads and builds manifest tree. `base_path` should point to directory with manifest. /// `path_buffer` should be empty and will remain empty after call. pub fn load_manifests(&mut self, base_path: &str, path_buffer: &mut PathBuf) -> Result { let cache_root = std::env::var(CACHE_VAR) .map_err(|_| Error::new(error::Kind::MissingCache, Token::default()))?; let id = ID::new(base_path); let manifest_id = if let Some(&manifest) = self.manifest_lookup.get(id) { manifest } else { let module = self.manifests.push(ManifestEnt { id, base_path: self.ctx.builtin_span(base_path), ..ManifestEnt::default() }); self.manifest_lookup.insert(id, module); module }; let mut frontier = vec![(manifest_id, ast::Dep::default())]; let mut saved_data = ast::Data::default(); let mut temp_data = ast::Data::default(); let mut reloc = ast::Reloc::default(); let mut collector = ast::Collector::default(); while let Some((manifest_id, import)) = frontier.pop() { if self.seen_manifests.contains(manifest_id) { continue; } let manifest_base_path = self.display(self.manifests[manifest_id].base_path); path_buffer.clear(); path_buffer.push(Path::new(manifest_base_path)); if !path_buffer.exists() { if import.external() { self.download(import, manifest_base_path)?; } else { return Err(Error::new( error::Kind::MissingDependency(path_buffer.clone()), import.token(), )); } } path_buffer.push(Path::new("project")); path_buffer.set_extension(MANIFEST_EXT); let content = std::fs::read_to_string(&path_buffer).map_err(|err| { Error::new( error::Kind::ManifestReadError(path_buffer.clone(), err), import.token(), ) })?; let source = SourceEnt::new(path_buffer.to_str().unwrap().to_string(), content); let source = self.add_source(source); self.manifests[manifest_id].source = source; let manifest = { let mut state = ast::State::new(source, &self.ctx).map_err(Into::into)?; let mut data = ast::DataCollector::new(&mut saved_data, &mut temp_data, &mut reloc); ast::Parser::new(&mut state, &mut data, self, &mut collector) .parse_manifest() .map_err(Into::into)? }; let root_file_span = manifest .find_attr(ID::new("root")) .unwrap_or_else(|| self.builtin_span("main.mf")); let root_file = self.display(root_file_span); let parent_len = Path::new(root_file).parent().unwrap().as_os_str().len(); let name_len = Path::new(root_file) .file_stem() .ok_or_else(|| Error::new(error::Kind::MissingPathStem, import.token()))? .len(); let whole_len = Path::new(root_file).file_name().unwrap().len(); let len = root_file_span.len(); let name = root_file_span.slice(len - whole_len..len - whole_len + name_len); let root_path = root_file_span.slice(0..parent_len); let manifest_ent = &mut self.manifests[manifest_id]; manifest_ent.name = name; manifest_ent.root_path = root_path; for dep in manifest.deps() { path_buffer.clear(); let dep_path = self.display(dep.path()); if dep.external() { path_buffer.push(Path::new(&cache_root)); path_buffer.push(Path::new(dep_path)); path_buffer.push(Path::new(self.display(dep.version()))); } else { path_buffer.push(Path::new(base_path)); path_buffer.push(Path::new(dep_path)); } let id = ID::new(path_buffer.to_str().unwrap()); let manifest = if let Some(&manifest) = self.manifest_lookup.get(id) { manifest } else { let module = self.manifests.push(ManifestEnt { base_path: self.ctx.builtin_span(path_buffer.to_str().unwrap()), ..ManifestEnt::default() }); self.manifest_lookup.insert(id, module); module }; let id = self.hash_span(dep.name()); self.manifests[manifest_id].deps.push((id, manifest)); frontier.push((manifest, dep.clone())); } self.seen_manifests.insert(manifest_id); } let mut stack = vec![]; let mut map = vec![(false, false); self.manifests.len()]; if let Some(cycle) = self.detect_cycles(Manifest::new(0), &mut stack, &mut map, None) { return Err(Error::new( error::Kind::CyclicManifests(cycle), Token::default(), )); } path_buffer.clear(); Ok(()) } /// Downloads the dependency pointed by `dep`. `destination` is /// path to directory where files should be located. pub fn download(&self, dep: ast::Dep, destination: &str) -> Result { std::fs::create_dir_all(destination).unwrap(); let link = format!("https://{}", self.display(dep.path())); let code = std::process::Command::new("git") .args(&[ "clone", "--depth", "1", "--branch", self.display(dep.version()), &link, destination, ]) .status() .map_err(|err| Error::new(error::Kind::DownloadError(err), dep.token()))?; if !code.success() { return Err(Error::new(error::Kind::DownloadFailed, dep.token())); } Ok(()) } /// Returns whether accessing item inside `target` with `vis` from `accessor` pub fn can_access(&self, accessor: Mod, target: Mod, vis: Vis) -> bool { matches!( ( accessor == target, self.module_ctxs[accessor].manifest == self.module_ctxs[target].manifest, vis ), (true, ..) | (_, true, Vis::None | Vis::Public) | (.., Vis::Public) ) } /// Computes hash of span content. pub fn hash_span(&self, span: Span) -> ID { ID::new(self.display(span)) } /// Computes hash fo token content. pub fn hash_token(&self, token: Token) -> ID { ID::new(self.display_token(token)) } /// Creates a module order fro given root. It returns the sequence /// of modules creating cycle as error. pub fn create_order(&self, root: Mod) -> std::result::Result<Vec<Mod>, Vec<Mod>> { let mut ordering = Vec::with_capacity(self.modules.len()); let mut stack = Vec::with_capacity(self.modules.len()); let mut lookup = vec![(false, false); self.modules.len()]; if let Some(cycle) = self.detect_cycles(root, &mut stack, &mut lookup, Some(&mut ordering)) { return Err(cycle); } return Ok(ordering); } /// Collects scopes of a module. pub fn collect_scopes(&self, module: Mod, buffer: &mut Vec<Mod>) { let module_ent = &self.module_ctxs[module]; buffer.push(module); buffer.extend(module_ent.deps.iter().map(|dep| dep.1)); } /// Loads a builtin module. Source code is included with macro. pub fn load_builtin_module(&mut self) { let content = include_str!("builtin.mf").to_string(); let name = "builtin.mf".to_string(); let source = SourceEnt::new(name, content); let source = self.add_source(source); let module = ModEnt { id: ID::new("builtin"), modified: SystemTime::now(), ..Default::default() }; let module = self.modules.push(module); self.module_ctxs[module].ast_state = ast::State::new(source, &self.ctx).unwrap(); } /// Computes ast of module. If true is returned, parsing was /// interrupted by top level 'break'. pub fn compute_ast<'a>( &'a mut self, module: Mod, buffer: &'a mut ast::DataCollector<'a>, collector: &'a mut ast::Collector, ) -> Result<bool> { ast::Parser::new( &mut self.module_ctxs[module].ast_state, buffer, &mut self.ctx, collector, ) .parse() .map_err(|err| Error::new(error::Kind::AError(err), Token::default())) } pub fn collect_imported_items(&self, module: Mod, buffer: &mut Vec<(ID, Item)>) { for &(.., module) in self.module_ctxs[module].deps.iter() { buffer.extend_from_slice(self.modules[module].owned_items.as_slice()); } } /// Finds item in scope, if collision occurred, or item does not exist, method returns error. pub fn find_item(&self, module: Mod, id: ID, hint: Token) -> Result<Item> { let scope = &self.module_ctxs[module].scope; let item = scope .get(id) .ok_or_else(|| Error::new(error::Kind::ItemNotFound, hint))? .clone(); if item.kind == item::Kind::Collision { let candidates = self.module_ctxs[module] .deps .iter() .filter_map(|&(span, module)| { scope.get(id.add(self.modules[module].id)).map(|_| span) }) .collect::<Vec<_>>(); return Err(Error::new(error::Kind::ItemCollision(candidates), hint)); } Ok(item) } /// Adds item to module, which means it will be inserted both to owned items and to scope. /// Error can originate from [`Self::import_item()`]. pub fn add_item(&mut self, module: Mod, id: ID, item: Item) -> Result { self.modules[module].owned_items.push((id, item)); self.import_item(module, id, item) } /// Imports item into the scope. This can trigger moving items behind external module scope /// if two items have same hash. IF collision between two `module`-owned items occurs, method returns /// error. pub fn import_item(&mut self, module: Mod, mut id: ID, item: Item) -> Result { let scope = &mut self.module_ctxs[module].scope; if let Some(&collision) = scope.get(id) { if collision.kind == item::Kind::Collision { if item.module != module { id = id.add(self.modules[item.module].id); } } else if collision.module == module { if item.module == module { return Err(Error::new( error::Kind::Redefinition(collision.hint), item.hint, )); } id = id.add(self.modules[item.module].id); } else { let redirect = id.add(self.modules[collision.module].id); let shadow = scope.insert(redirect, collision); debug_assert!( shadow.is_none(), "seems like unlucky hashing corrupted the scope" ); scope.insert(id, Item::collision()); if item.module != module { id = id.add(self.modules[item.module].id); } } let shadow = scope.insert(id, item); debug_assert!( shadow.is_none() || shadow.unwrap().kind == item::Kind::Collision, "this means that we did not detect collision when compiling module {:?} {}", shadow, token::Display::new(self.sources(), &item.hint), ); } else { scope.insert(id, item); } Ok(()) } pub fn push_item(&mut self, module: Mod, id: ID, item: item::Kind) -> Option<Item> { let item = Item::new(item, module, Token::default()); self.module_ctxs[module].scope.insert(id, item) } pub fn pop_item(&mut self, module: Mod, id: ID, shadow: Option<Item>) { if let Some(shadow) = shadow { self.module_ctxs[module].scope.insert(id, shadow); } else { self.module_ctxs[module].scope.remove(id); } } pub fn module_id(&self, module: Mod) -> ID { self.modules[module].id } pub fn find_item_unchecked(&self, module: Mod, id: ID) -> Option<Item> { self.module_ctxs[module].scope.get(id).cloned() } pub fn find_attribute(&self, ast_data: &ast::Data, attributes: Ast, name: &str) -> Option<Ast> { let id = ID::new(name); for &attr in ast_data.sons(attributes) { let attr_id = self.hash_token(ast_data.son_ent(attr, 0).token()); if id == attr_id { return Some(attr); } } None } } crate::impl_entity!(Fun, Global, Local, Ty, Const, Bound); #[derive(Debug, Clone, Copy, Default, RealQuickSer)] pub struct Item { kind: item::Kind, module: Mod, hint: Token, } impl Item { pub fn new(kind: item::Kind, module: Mod, hint: Token) -> Item { Item { kind, module, hint } } pub fn collision() -> Self { Self { kind: item::Kind::Collision, ..Default::default() } } pub fn kind(&self) -> item::Kind { self.kind } } pub mod item { use super::*; /// Kind specifies to what [`Item`] points to. #[derive(Debug, Clone, Copy, PartialEq, Eq, RealQuickSer)] pub enum Kind { /// Item is colliding with another and needs /// to be referred to by module path. Collision, /// Item refers to imported module. Mod(Mod), /// Item refers to type. Ty(Ty), /// Item refers to const. Const(Const), /// Item refers to global. Global(Global), /// Item refers to local value. Local(Local), /// Item refers to function. Fun(Fun), /// Item refers to bound. Bound(Bound), } impl std::fmt::Display for Kind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Kind::Collision => write!(f, "collision"), Kind::Mod(..) => write!(f, "module"), Kind::Ty(..) => write!(f, "type"), Kind::Const(..) => write!(f, "constant"), Kind::Global(..) => write!(f, "global variable"), Kind::Local(..) => write!(f, "local variable"), Kind::Fun(..) => write!(f, "function"), Kind::Bound(..) => write!(f, "bound"), } } } impl Default for Kind { fn default() -> Self { Kind::Collision } } } type ManifestDep = (ID, Manifest); /// #[derive(Debug, Clone, Default, QuickSer)] pub struct ManifestEnt { id: ID, base_path: Span, name: Span, root_path: Span, deps: Vec<ManifestDep>, source: Source, } impl ManifestEnt { /// Finds dependant manifest by hash of its alias. pub fn find_dep(&self, id: ID) -> Option<Manifest> { self.deps.iter().find_map(|dep| { if dep.0 == id { Some(dep.1.clone()) } else { None } }) } } impl TreeStorage<Manifest> for Ctx { fn node_dep(&self, id: Manifest, idx: usize) -> Manifest { self.manifests[id].deps[idx].1 } fn node_len(&self, id: Manifest) -> usize { self.manifests[id].deps.len() } fn len(&self) -> usize { self.manifests.len() } } impl ErrorDisplayState<Error> for Ctx { fn fmt(&self, e: &Error, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match e.kind() { error::Kind::ItemCollision(candidates) => { writeln!( f, "tri specifying module this item comes from, here are all candidates:" )?; for &candidate in candidates { writeln!(f, " {}:: ", self.display(candidate))?; } } error::Kind::ItemNotFound => { writeln!(f, "item not found in current scope")?; } error::Kind::Redefinition(token) => { writeln!( f, "is redefinition of\n{}", token::Display::new(self.sources(), token) )?; } error::Kind::InvalidPathEncoding => { writeln!(f, "invalid path encoding")?; } error::Kind::MissingPathStem => { writeln!(f, "root attribute of the manifest if missing path stem (simply is not pointing to file)")?; } error::Kind::MissingCache => { writeln!(f, "missing dependency cache, the environment variable 'METAFLOW_CACHE' has to be set")?; } error::Kind::ImportNotFound => { writeln!( f, "root of module import not found inside manifest, nor it is root of current project" )?; } error::Kind::FileReadError(path, error) => { writeln!(f, "error reading module '{}', this may be due to invalid project structure, original error: {}", path.as_os_str().to_str().unwrap(), error)?; } error::Kind::ManifestReadError(path, error) => { writeln!( f, "error reading manifest '{}', original error: {}", path.as_os_str().to_str().unwrap(), error )?; } error::Kind::AError(error) => { writeln!(f, "{}", ErrorDisplay::new(self.deref(), error))?; } error::Kind::CyclicDependency(cycle) => { writeln!(f, "cyclic module dependency detected:")?; for &id in cycle.iter() { writeln!(f, " {}", self.source(self.module_ctxs[id].source).name())?; } } error::Kind::CyclicManifests(cycle) => { writeln!(f, "cyclic package dependency detected:")?; for &id in cycle.iter() { writeln!(f, " {}", self.display(self.manifests[id].name))?; } } error::Kind::MissingDependency(path) => { writeln!( f, "missing dependency '{}'", path.as_os_str().to_str().unwrap() )?; } error::Kind::DownloadError(error) => { writeln!(f, "error downloading dependency, original error: {}", error)?; } error::Kind::DownloadFailed => { writeln!(f, "failed to download dependency")?; } } Ok(()) } fn sources(&self) -> &lexer::Ctx { self.ctx.sources() } } impl Deref for Ctx { type Target = ast::Ctx; fn deref(&self) -> &Self::Target { &self.ctx } } impl DerefMut for Ctx { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.ctx } } /// Struct contains data that should not be serialized with Module. #[derive(Debug, Clone, Default)] pub struct ModCtx { scope: Map<Item>, name: Span, source: Source, manifest: Manifest, ast_state: ast::State, deps: Vec<(Span, Mod)>, used: Vec<Mod>, } impl TreeStorage<Mod> for Ctx { fn node_dep(&self, id: Mod, idx: usize) -> Mod { self.module_ctxs[id].deps[idx].1 } fn node_len(&self, id: Mod) -> usize { self.module_ctxs[id].deps.len() } fn len(&self) -> usize { self.modules.len() } } crate::impl_entity!(Mod, Manifest); /// #[derive(Debug, Clone, QuickDefault, QuickSer)] pub struct ModEnt { id: ID, #[default(SystemTime::UNIX_EPOCH)] modified: SystemTime, owned_items: Vec<(ID, Item)>, } /// Error create upon module building failure. #[derive(Debug)] pub struct Error { kind: error::Kind, token: Token, } impl Error { /// Creates new error. pub fn new(kind: error::Kind, token: Token) -> Self { Self { kind, token } } /// Returns error kind. pub fn kind(&self) -> &error::Kind { &self.kind } } impl Into<Error> for ast::Error { fn into(self) -> Error { Error { kind: error::Kind::AError(self), token: Token::default(), } } } impl DisplayError for Error { fn token(&self) -> Token { self.token } } mod error { use super::*; #[derive(Debug)] pub enum Kind { ItemCollision(Vec<Span>), ItemNotFound, Redefinition(Token), InvalidPathEncoding, MissingPathStem, MissingCache, ImportNotFound, FileReadError(PathBuf, std::io::Error), ManifestReadError(PathBuf, std::io::Error), AError(ast::Error), CyclicDependency(Vec<Mod>), CyclicManifests(Vec<Manifest>), MissingDependency(PathBuf), DownloadError(std::io::Error), DownloadFailed, } } /// Tree storage generalizes tree cycle detection. pub trait TreeStorage<I: EntityRef + 'static + Debug> where Self: Sized, { /// Returns dependency of node at given index. fn node_dep(&self, id: I, idx: usize) -> I; /// Returns number of dependencies of node. fn node_len(&self, id: I) -> usize; /// Returns number of nodes. fn len(&self) -> usize; /// Returns none if no cycles found, otherwise returns sequence /// of nodes creating the cycle. `stack` should be empty, lookup /// has to be as long as the number of nodes. Optionally, ordering /// can be passed to create order in which no children is preceding /// its parents. fn detect_cycles( &self, root: I, stack: &mut Vec<(I, usize)>, lookup: &mut [(bool, bool)], mut ordering: Option<&mut Vec<I>>, ) -> Option<Vec<I>> { debug_assert!(stack.is_empty()); stack.push((root, 0)); while let Some(&(node, index)) = stack.last() { let (seen, in_recurse) = lookup[node.index()]; if in_recurse { return Some( stack .drain(stack.iter().position(|i| i.0 == node).unwrap()..) .map(|i| i.0) .collect(), ); } let done = self.node_len(node) == index; if done || seen { if !seen { ordering.as_mut().map(|o| o.push(node)); } lookup[node.index()].0 = true; stack.pop().unwrap(); if stack.len() != 0 { lookup[stack[stack.len() - 1].0.index()].1 = false; } continue; } let len = stack.len(); stack[len - 1].1 += 1; lookup[node.index()] = (false, true); stack.push((self.node_dep(node, index), 0)); } None } } /// Module test. pub fn test() { const PATH: &str = "src/modules/test_project"; let mut context = Ctx::default(); context .compute_module_tree(PATH) .map_err(|e| panic!("{}", ErrorDisplay::new(&context, &e))) .unwrap(); }
33.350249
167
0.525763
21989935bd6de83494c4305ae8bc359a1793e33f
13
fn main(){}
6.5
12
0.461538
d646b735a1466aeae0598f412c4efeb2de84366f
1,893
use super::{BuilderMap, MetaMapBuilder, TileType}; use rltk::RandomNumberGenerator; pub struct RoomCornerRounder {} impl MetaMapBuilder for RoomCornerRounder { fn build_map(&mut self, rng: &mut rltk::RandomNumberGenerator, build_data: &mut BuilderMap) { self.build(rng, build_data); } } impl RoomCornerRounder { pub fn new() -> Box<Self> { Box::new(RoomCornerRounder {}) } fn fill_if_corner(&mut self, x: i32, y: i32, build_data: &mut BuilderMap) { let w = build_data.map.width; let h = build_data.map.height; let idx = build_data.map.xy_idx(x, y); let mut neighbor_walls = 0; if x > 0 && build_data.map.tiles[idx - 1] == TileType::Wall { neighbor_walls += 1; } if y > 0 && build_data.map.tiles[idx - w as usize] == TileType::Wall { neighbor_walls += 1; } if x < w - 2 && build_data.map.tiles[idx + 1] == TileType::Wall { neighbor_walls += 1; } if y < h - 2 && build_data.map.tiles[idx + w as usize] == TileType::Wall { neighbor_walls += 1; } if neighbor_walls == 2 { build_data.map.tiles[idx] = TileType::Wall; } } fn build(&mut self, _rng: &mut RandomNumberGenerator, build_data: &mut BuilderMap) { let rooms = if let Some(rooms_builder) = &build_data.rooms { rooms_builder.clone() } else { panic!("Room Rounding require a builder with room structures"); }; for room in rooms.iter() { self.fill_if_corner(room.x1 + 1, room.y1 + 1, build_data); self.fill_if_corner(room.x2, room.y1 + 1, build_data); self.fill_if_corner(room.x1 + 1, room.y2, build_data); self.fill_if_corner(room.x2, room.y2, build_data); build_data.take_snapshot(); } } }
33.210526
97
0.579503
bb88959759da3694a1943aefbc5846ec7aeb9199
5,992
use ark_ec::ProjectiveCurve; use ark_ff::PrimeField; use ark_std::{marker::PhantomData, vec::Vec, UniformRand}; #[derive(Clone)] pub struct ShachamPublicParameters<G: ProjectiveCurve> { pub u: G, pub v: G, pub w: G, } #[derive(Clone)] pub struct ShachamSecretKey<G: ProjectiveCurve> { pub scalar_x: Vec<G::ScalarField>, pub scalar_y: Vec<G::ScalarField>, pub scalar_z: Vec<G::ScalarField>, } #[derive(Clone)] pub struct ShachamPublicKey<G: ProjectiveCurve> { pub pp: ShachamPublicParameters<G>, pub y: Vec<G>, pub z: Vec<G>, } #[derive(Clone)] pub struct ShachamCiphertext<G: ProjectiveCurve> { pub r1: G, pub r2: G, pub r3: G, pub e: Vec<G>, } pub struct ShachamEncryption<G: ProjectiveCurve> { pub pairing_engine_phantom: PhantomData<G>, } impl<G: ProjectiveCurve> ShachamEncryption<G> { pub fn setup<R: ark_std::rand::Rng>(rng: &mut R) -> ShachamPublicParameters<G> { let u: G = G::rand(rng); let v: G = G::rand(rng); let w: G = G::rand(rng); ShachamPublicParameters::<G> { u, v, w } } pub fn key_generation<R: ark_std::rand::Rng>( pp: &ShachamPublicParameters<G>, len: usize, rng: &mut R, ) -> (ShachamSecretKey<G>, ShachamPublicKey<G>) { let mut scalar_x = Vec::<G::ScalarField>::new(); let mut scalar_y = Vec::<G::ScalarField>::new(); let mut scalar_z = Vec::<G::ScalarField>::new(); for _ in 0..len { scalar_x.push(G::ScalarField::rand(rng)); scalar_y.push(G::ScalarField::rand(rng)); scalar_z.push(G::ScalarField::rand(rng)); } let mut y = Vec::<G>::new(); let mut z = Vec::<G>::new(); for i in 0..len { y.push(pp.u.mul(&scalar_x[i].into_repr()) + pp.w.mul(&scalar_z[i].into_repr())); z.push(pp.v.mul(&scalar_y[i].into_repr()) + pp.w.mul(&scalar_z[i].into_repr())); } let sk = ShachamSecretKey::<G> { scalar_x, scalar_y, scalar_z, }; let pk = ShachamPublicKey::<G> { pp: (*pp).clone(), y, z, }; (sk, pk) } pub fn encrypt<R: ark_std::rand::Rng>( pk: &ShachamPublicKey<G>, plaintext: &Vec<G>, rng: &mut R, ) -> ShachamCiphertext<G> { assert!(plaintext.len() <= pk.y.len()); let len = plaintext.len(); let a = G::ScalarField::rand(rng); let b = G::ScalarField::rand(rng); let r1 = pk.pp.u.mul(&a.into_repr()); let r2 = pk.pp.v.mul(&b.into_repr()); let r3 = pk.pp.w.mul(&(a + b).into_repr()); let mut e = Vec::<G>::new(); for i in 0..len { e.push(plaintext[i] + pk.y[i].mul(&a.into_repr()) + pk.z[i].mul(&b.into_repr())); } ShachamCiphertext::<G> { r1, r2, r3, e } } pub fn decrypt(sk: &ShachamSecretKey<G>, ciphertext: &ShachamCiphertext<G>) -> Vec<G> { let mut plaintext = Vec::new(); let len = sk.scalar_x.len(); for i in 0..len { plaintext.push( ciphertext.e[i] - ciphertext.r1.mul(&sk.scalar_x[i].into_repr()) - ciphertext.r2.mul(&sk.scalar_y[i].into_repr()) - ciphertext.r3.mul(&sk.scalar_z[i].into_repr()), ); } plaintext } pub fn rerand<R: ark_std::rand::Rng>( pk: &ShachamPublicKey<G>, ciphertext: &ShachamCiphertext<G>, rng: &mut R, ) -> ShachamCiphertext<G> { let len = ciphertext.e.len(); let a_new = G::ScalarField::rand(rng); let b_new = G::ScalarField::rand(rng); let r1_new = ciphertext.r1 + pk.pp.u.mul(&a_new.into_repr()); let r2_new = ciphertext.r2 + pk.pp.v.mul(&b_new.into_repr()); let r3_new = ciphertext.r3 + pk.pp.w.mul(&(a_new + b_new).into_repr()); let mut e_new = Vec::<G>::new(); for i in 0..len { e_new.push( ciphertext.e[i] + pk.y[i].mul(&a_new.into_repr()) + pk.z[i].mul(&b_new.into_repr()), ); } ShachamCiphertext::<G> { r1: r1_new, r2: r2_new, r3: r3_new, e: e_new, } } } #[cfg(test)] mod test { use crate::shacham_encryption::ShachamEncryption; use ark_bls12_381::G1Projective; use ark_std::UniformRand; #[test] fn test_encrypt_decrypt() { let mut rng = ark_std::test_rng(); let len = 10; let mut pt = Vec::new(); for _ in 0..len { pt.push(G1Projective::rand(&mut rng)); } let pp = ShachamEncryption::<G1Projective>::setup(&mut rng); let (sk, pk) = ShachamEncryption::<G1Projective>::key_generation(&pp, len, &mut rng); let ct = ShachamEncryption::encrypt(&pk, &pt, &mut rng); let pt_recovered = ShachamEncryption::decrypt(&sk, &ct); for i in 0..len { assert!( pt[i].eq(&pt_recovered[i]), "Decrypted results do not match the plaintexts." ); } } #[test] fn test_rerandomization() { let mut rng = ark_std::test_rng(); let len = 10; let mut pt = Vec::new(); for _ in 0..len { pt.push(G1Projective::rand(&mut rng)); } let pp = ShachamEncryption::<G1Projective>::setup(&mut rng); let (sk, pk) = ShachamEncryption::<G1Projective>::key_generation(&pp, len, &mut rng); let ct = ShachamEncryption::encrypt(&pk, &pt, &mut rng); let ct_rerand = ShachamEncryption::rerand(&pk, &ct, &mut rng); let pt_recovered = ShachamEncryption::decrypt(&sk, &ct_rerand); for i in 0..len { assert!( pt[i].eq(&pt_recovered[i]), "Decrypted results of rerandomized ciphertexts do not match the plaintexts." ); } } }
28.398104
100
0.534546
ac945c31c20a00315685b29fd21863b28ab8d0b5
2,325
use crate::{Map, Route}; #[derive(Debug)] pub struct NaiveFinder<'a> { map: &'a Map, current_position: (usize, usize), current_route: usize, routes: Vec<Route>, } impl<'a> NaiveFinder<'a> { pub fn new(map: &'a Map) -> NaiveFinder<'a> { NaiveFinder { map, current_position: (0, 0), current_route: 0, routes: Vec::new(), } } pub fn reset(&mut self) { *self = NaiveFinder::new(self.map); } pub fn start(&mut self) -> Route { self.reset(); loop { let mut new_routes = Vec::new(); let mut not_looped = true; for route in self.routes.iter_mut() { not_looped = false; let mut founds = Vec::new(); let mut minimum = usize::MAX; for (x, y) in route.next_steps() { if route.contains(&(x, y)) { continue; }; if let Some(cost) = self.map.get(x, y).copied() { let cost = cost as usize; if cost < minimum { founds = vec![(x, y)]; minimum = cost; continue; } if cost == minimum { founds.push((x, y)); minimum = cost; continue; } } } for found in founds { let mut new_route = route.clone(); new_route.push(found, minimum); new_routes.push(new_route); } } if let Some(end_found) = self.routes.iter().find_map(|x| { if x.last() == Some(&self.map.endpoint()) { Some(x) } else { None } }) { return end_found.clone(); } new_routes.sort_by_key(|x| x.cost / x.path.len()); new_routes.truncate(50); self.routes = new_routes; if not_looped { break; } } Route::new() } }
28.703704
70
0.381935
d5794cea357e0dd8bc1c173b1241e7c02e8f76da
7,253
pub use ethabi; // Re-export pub use hex; // Re-export use ethabi::ethereum_types::{Address, U256}; pub struct TakeLastXBytes(pub usize); /// Represents a data type in solidity /// ```rust /// use eth_encode_packed::SolidityDataType; /// use eth_encode_packed::TakeLastXBytes; /// use eth_encode_packed::ethabi::ethereum_types::{U256, Address}; /// // Uint24 /// SolidityDataType::NumberWithShift(U256::from(3838), TakeLastXBytes(24)); /// // String /// SolidityDataType::String("ipfs-cid-url-very-long"); /// // Bool /// SolidityDataType::Bool(true); /// // Address /// use std::convert::TryInto; /// /// let address = hex::decode("d8b934580fcE35a11B58C6D73aDeE468a2833fa8").unwrap(); /// let address: [u8; 20] = address.try_into().unwrap(); /// SolidityDataType::Address(Address::from(address)); /// ``` pub enum SolidityDataType<'a> { String(&'a str), Address(Address), Bytes(&'a [u8]), Bool(bool), Number(U256), NumberWithShift(U256, TakeLastXBytes), } pub mod abi { use crate::SolidityDataType; /// Pack a single `SolidityDataType` into bytes fn pack<'a>(data_type: &'a SolidityDataType) -> Vec<u8> { let mut res = Vec::new(); match data_type { SolidityDataType::String(s) => { res.extend(s.as_bytes()); } SolidityDataType::Address(a) => { res.extend(a.0); } SolidityDataType::Number(n) => { for b in n.0.iter().rev() { let bytes = b.to_be_bytes(); res.extend(bytes); } } SolidityDataType::Bytes(b) => { res.extend(*b); } SolidityDataType::Bool(b) => { if *b { res.push(1); } else { res.push(0); } } SolidityDataType::NumberWithShift(n, to_take) => { let local_res = n.0.iter().rev().fold(vec![], |mut acc, i| { let bytes = i.to_be_bytes(); acc.extend(bytes); acc }); let to_skip = local_res.len() - (to_take.0 / 8); let local_res = local_res.into_iter().skip(to_skip).collect::<Vec<u8>>(); res.extend(local_res); } }; return res; } /// ```rust /// use eth_encode_packed::hex; /// use eth_encode_packed::SolidityDataType; /// use eth_encode_packed::TakeLastXBytes; /// use eth_encode_packed::abi; /// use eth_encode_packed::ethabi::ethereum_types::{Address, U256}; /// use std::convert::TryInto; /// /// let address = hex::decode("d8b934580fcE35a11B58C6D73aDeE468a2833fa8").unwrap(); /// let address: [u8; 20] = address.try_into().unwrap(); /// let input = vec![ /// SolidityDataType::NumberWithShift(U256::from(3838), TakeLastXBytes(24)), /// SolidityDataType::Number(U256::from(4001)), /// SolidityDataType::String("this-is-a-sample-string"), /// SolidityDataType::Address(Address::from(address)), /// SolidityDataType::Number(U256::from(1)), /// ]; /// let (_bytes, hash) = abi::encode_packed(&input); /// let hash = format!("0x{:}", hash); /// let expected = "0x000efe0000000000000000000000000000000000000000000000000000000000000fa1746869732d69732d612d73616d706c652d737472696e67d8b934580fce35a11b58c6d73adee468a2833fa80000000000000000000000000000000000000000000000000000000000000001"; /// assert_eq!(hash, expected); /// ``` pub fn encode_packed(items: &[SolidityDataType]) -> (Vec<u8>, String) { let res = items.iter().fold(Vec::new(), |mut acc, i| { let pack = pack(i); acc.push(pack); acc }); let res = res.join(&[][..]); let hexed = hex::encode(&res); (res, hexed) } } #[cfg(test)] mod tests { use std::convert::TryInto; use super::*; #[test] fn test_normal_use_case() { let address = hex::decode("d8b934580fcE35a11B58C6D73aDeE468a2833fa8").unwrap(); let address: [u8; 20] = address.try_into().unwrap(); let input = vec![ SolidityDataType::NumberWithShift(U256::from(3838), TakeLastXBytes(24)), SolidityDataType::Number(U256::from(4001)), SolidityDataType::String("this-is-a-sample-string"), SolidityDataType::Address(Address::from(address)), SolidityDataType::Number(U256::from(1)), ]; let (_bytes, hash) = abi::encode_packed(&input); let hash = format!("0x{:}", hash); let expected = "0x000efe0000000000000000000000000000000000000000000000000000000000000fa1746869732d69732d612d73616d706c652d737472696e67d8b934580fce35a11b58c6d73adee468a2833fa80000000000000000000000000000000000000000000000000000000000000001"; assert_eq!(hash, expected); } #[test] fn test_uint24() { let input = vec![SolidityDataType::NumberWithShift( U256::from(4001), TakeLastXBytes(24), )]; let (_bytes, hash) = abi::encode_packed(&input); let hash = format!("0x{:}", hash); let expected = "0x000fa1"; assert_eq!(hash, expected); } #[test] fn test_uint256() { let input = vec![SolidityDataType::Number(U256::from(3838110))]; let (_bytes, hash) = abi::encode_packed(&input); let hash = format!("0x{:}", hash); let expected = "0x00000000000000000000000000000000000000000000000000000000003a909e"; assert_eq!(hash, expected); } #[test] fn test_string() { let input = vec![SolidityDataType::String("this-is-a-sample-string")]; let (_bytes, hash) = abi::encode_packed(&input); let hash = format!("0x{:}", hash); let expected = "0x746869732d69732d612d73616d706c652d737472696e67"; assert_eq!(hash, expected); } #[test] fn test_address() { let address = hex::decode("d8b934580fcE35a11B58C6D73aDeE468a2833fa8").unwrap(); let address: [u8; 20] = address.try_into().unwrap(); let input = vec![SolidityDataType::Address(Address::from(address))]; let (_bytes, hash) = abi::encode_packed(&input); let hash = format!("0x{:}", hash); let expected = "0xd8b934580fce35a11b58c6d73adee468a2833fa8"; assert_eq!(hash, expected); } #[test] fn test_bool() { let input = vec![SolidityDataType::Bool(false)]; let (_bytes, hash) = abi::encode_packed(&input); let hash = format!("0x{:}", hash); let expected = "0x00"; assert_eq!(hash, expected); } #[test] fn test_normal_bytes() { let bytes = "abababababababababababababababababababababababababababababab"; let bytes = hex::decode(bytes).unwrap(); let bytes: [u8; 30] = bytes.try_into().unwrap(); let input = vec![SolidityDataType::Bytes(&bytes)]; let (_bytes, hash) = abi::encode_packed(&input); let hash = format!("0x{:}", hash); let expected = "0xabababababababababababababababababababababababababababababab"; assert_eq!(hash, expected); } }
36.265
248
0.589963
5005996fe360e15d4fdaf73d4775a06c9c213fb2
1,118
pub use crate::object::Py_TPFLAGS_STRING_SUBCLASS as Py_TPFLAGS_BYTES_SUBCLASS; pub use crate::stringobject::PyStringObject as PyBytesObject; pub use crate::stringobject::PyString_AS_STRING as PyBytes_AS_STRING; pub use crate::stringobject::PyString_AsString as PyBytes_AsString; pub use crate::stringobject::PyString_AsStringAndSize as PyBytes_AsStringAndSize; pub use crate::stringobject::PyString_Check as PyBytes_Check; pub use crate::stringobject::PyString_CheckExact as PyBytes_CheckExact; pub use crate::stringobject::PyString_Concat as PyBytes_Concat; pub use crate::stringobject::PyString_ConcatAndDel as PyBytes_ConcatAndDel; pub use crate::stringobject::PyString_Format as PyBytes_Format; pub use crate::stringobject::PyString_FromFormat as PyBytes_FromFormat; pub use crate::stringobject::PyString_FromString as PyBytes_FromString; pub use crate::stringobject::PyString_FromStringAndSize as PyBytes_FromStringAndSize; pub use crate::stringobject::PyString_GET_SIZE as PyBytes_GET_SIZE; pub use crate::stringobject::PyString_Size as PyBytes_Size; pub use crate::stringobject::PyString_Type as PyBytes_Type;
65.764706
85
0.856887
feb37e483569c04e7d11e3ae0a10dfc942833f34
1,907
use librumqttd::{async_locallink::construct_broker, Config}; use std::thread; fn main() { pretty_env_logger::init(); let config: Config = confy::load_path("config/rumqttd.conf").unwrap(); let (mut router, console, servers, builder) = construct_broker(config); thread::spawn(move || { router.start().unwrap(); }); thread::spawn(console); // connect to get a receiver // TODO: Connect with a function which return tx and rx to prevent // doing publishes before connecting // NOTE: Connection buffer should be atleast total number of possible // topics + 3 (request types). If inflight is full with more topics // in tracker, it's possible that router never responnds current // inflight requests. But other pending requests should still be able // to progress let mut rt = tokio::runtime::Builder::new_multi_thread(); rt.enable_all(); rt.build().unwrap().block_on(async { let (mut tx, mut rx) = builder.connect("localclient", 200).await.unwrap(); tx.subscribe(std::iter::once("#")).await.unwrap(); // subscribe and publish in a separate thread let pub_task = tokio::spawn(async move { for _ in 0..10usize { for i in 0..200usize { let topic = format!("hello/{}/world", i); tx.publish(topic, false, vec![0; 1024]).await.unwrap(); } } }); let sub_task = tokio::spawn(async move { let mut count = 0; loop { let message = rx.recv().await.unwrap(); // println!("T = {}, P = {:?}", message.topic, message.payload.len()); count += message.payload.len(); println!("{}", count); } }); servers.await; pub_task.await.unwrap(); sub_task.await.unwrap(); }); }
35.314815
86
0.571054
bfe516dd734059f6967f13d56147a7f9a2b7bec2
1,853
#[doc = "Register `OUTLINK_DSCR_BF1` reader"] pub struct R(crate::R<OUTLINK_DSCR_BF1_SPEC>); impl core::ops::Deref for R { type Target = crate::R<OUTLINK_DSCR_BF1_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<OUTLINK_DSCR_BF1_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<OUTLINK_DSCR_BF1_SPEC>) -> Self { R(reader) } } #[doc = "Field `DMA_OUTLINK_DSCR_BF1` reader - The content of current out descriptor data buffer pointer."] pub struct DMA_OUTLINK_DSCR_BF1_R(crate::FieldReader<u32, u32>); impl DMA_OUTLINK_DSCR_BF1_R { #[inline(always)] pub(crate) fn new(bits: u32) -> Self { DMA_OUTLINK_DSCR_BF1_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for DMA_OUTLINK_DSCR_BF1_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl R { #[doc = "Bits 0:31 - The content of current out descriptor data buffer pointer."] #[inline(always)] pub fn dma_outlink_dscr_bf1(&self) -> DMA_OUTLINK_DSCR_BF1_R { DMA_OUTLINK_DSCR_BF1_R::new(self.bits as u32) } } #[doc = "\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [outlink_dscr_bf1](index.html) module"] pub struct OUTLINK_DSCR_BF1_SPEC; impl crate::RegisterSpec for OUTLINK_DSCR_BF1_SPEC { type Ux = u32; } #[doc = "`read()` method returns [outlink_dscr_bf1::R](R) reader structure"] impl crate::Readable for OUTLINK_DSCR_BF1_SPEC { type Reader = R; } #[doc = "`reset()` method sets OUTLINK_DSCR_BF1 to value 0"] impl crate::Resettable for OUTLINK_DSCR_BF1_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
34.314815
219
0.674582
0e853d14dab3408e451227855917ea49d8ac35dd
3,504
//! Simple colors provides macros for styling text with colors, backgrounds and styles like bold, //! italic and underline. //! //! ![Licenses](https://img.shields.io/crates/l/simple_colors) //! //! <div> //! <img alt="green" src="https://raw.githubusercontent.com/jomy10/simple_colors/master/assets/gif/green.gif" width="400"/> //! <img alt="all" src="https://raw.githubusercontent.com/jomy10/simple_colors/master/assets/gif/all.gif" width="400"/> //! <img alt="bg" src="https://raw.githubusercontent.com/jomy10/simple_colors/master/assets/gif/bg_blue.gif" width="400"/> //! <img alt="bold" src="https://raw.githubusercontent.com/jomy10/simple_colors/master/assets/gif/bold.gif" width="400"/> //! </div> //! //! # Usage //! ```rust //! # use simple_colors::{white, red, printlnc}; //! # fn main() { //! println!("{}", red!("This is red")); //! printlnc!(red!("This is also red")); //! printlnc!(format!("{}, {}.", white!("This is white"), red!("this is red"))) //! # } //! ``` //! //! <img alt="red_output" src="https://raw.githubusercontent.com/jomy10/simple_colors/master/assets/img/red_ex.png" width="500"/> //! //! ```rust //! # use simple_colors::{color, red, Color}; //! # fn main() { //! println!("{}", color!(Color::Red, "This is red")); //! println!("{}", red!("This will be the same color")); //! pritlnc!(bg_red("This text has a red background")); //! # } //! ``` //! //! ```rust //! # use simple_colors::{bold, green}; //! # fn main() { //! println!("{}", bold!(green!("This text is bold and green"))); //! # } //! ``` //! //! <img alt="output" src="https://raw.githubusercontent.com/jomy10/simple_colors/master/assets/img/ex1.png" width="500"/> //! //! # Define your own styles //! You can create your own styles like: //! //! ```rust //! # use simple_colors::{color, Style, Color}; //! # fn main() { //! struct MyCustomStyle; //! impl simple_colors::custom::Style for MyCustomStyle { //! fn get_style_code(&self) -> String { //! // This will return a code for bold and light blue text //! format!("{}{}", //! Style::Bold.get_style_code(), //! Color::LightBlue.get_style_code() //! ) //! } //! } //! //! println!("{}", color!(MyCustomStyle, //! "This text is light blue and bold, \ //! but on some terminals it is purple.")) //! # } //! ``` //! //! <img alt="output" src="https://raw.githubusercontent.com/jomy10/simple_colors/master/assets/img/custom_ex1.png" width="500"/> //! //! ```rust //! # use simple_colors::{color, Color, Style}; //! # fn main() { //! enum MyCustomStyles { //! Style1, //! Style2 //! } //! impl simple_colors::custom::Style for MyCustomStyles { //! fn get_style_code(&self) -> String { //! match self { //! // Style1 will be bold and light blue //! MyCustomStyles::Style1 => "\x1b[1m\x1b[94m".to_string(), //! // Style2 will be bold and red //! MyCustomStyles::Style2 => //! format!( //! "{}{}", //! Style::Bold.get_style_code(), //! Color::Red.get_style_code() //! ) //! } //! } //! } //! //! println!("{}", color!(MyCustomStyles::Style2, "Some text that is both bold and red")) //! # } //! ``` //! //! <img alt="output" src="https://raw.githubusercontent.com/jomy10/simple_colors/master/assets/img/custom_ex2.png" width="500"/> //! mod macros; pub use macros::*; pub mod custom; mod enums; pub use enums::*;
34.019417
129
0.575342
2310e41d7e67316c8f7d998237326ee9221944ff
6,009
// WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!! #[cfg(any(feature = "all", feature = "android-icu-text-AlphabeticIndex_ImmutableIndex"))] __jni_bindgen! { /// public final class [AlphabeticIndex.ImmutableIndex](https://developer.android.com/reference/android/icu/text/AlphabeticIndex.ImmutableIndex.html) /// /// Required feature: android-icu-text-AlphabeticIndex_ImmutableIndex public final class AlphabeticIndex_ImmutableIndex ("android/icu/text/AlphabeticIndex$ImmutableIndex") extends crate::java::lang::Object, implements crate::java::lang::Iterable { // // Not emitting: Non-public method // /// [ImmutableIndex](https://developer.android.com/reference/android/icu/text/AlphabeticIndex.ImmutableIndex.html#ImmutableIndex()) // fn new<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::icu::text::AlphabeticIndex_ImmutableIndex>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // // class.path == "android/icu/text/AlphabeticIndex$ImmutableIndex", java.flags == (empty), .name == "<init>", .descriptor == "()V" // unsafe { // let __jni_args = []; // let (__jni_class, __jni_method) = __jni_env.require_class_method("android/icu/text/AlphabeticIndex$ImmutableIndex\0", "<init>\0", "()V\0"); // __jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr()) // } // } /// [getBucketCount](https://developer.android.com/reference/android/icu/text/AlphabeticIndex.ImmutableIndex.html#getBucketCount()) pub fn getBucketCount<'env>(&'env self) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/icu/text/AlphabeticIndex$ImmutableIndex", java.flags == PUBLIC, .name == "getBucketCount", .descriptor == "()I" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/icu/text/AlphabeticIndex$ImmutableIndex\0", "getBucketCount\0", "()I\0"); __jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [getBucketIndex](https://developer.android.com/reference/android/icu/text/AlphabeticIndex.ImmutableIndex.html#getBucketIndex(java.lang.CharSequence)) /// /// Required features: "java-lang-CharSequence" #[cfg(any(feature = "all", all(feature = "java-lang-CharSequence")))] pub fn getBucketIndex<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::CharSequence>>) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/icu/text/AlphabeticIndex$ImmutableIndex", java.flags == PUBLIC, .name == "getBucketIndex", .descriptor == "(Ljava/lang/CharSequence;)I" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/icu/text/AlphabeticIndex$ImmutableIndex\0", "getBucketIndex\0", "(Ljava/lang/CharSequence;)I\0"); __jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [getBucket](https://developer.android.com/reference/android/icu/text/AlphabeticIndex.ImmutableIndex.html#getBucket(int)) /// /// Required features: "android-icu-text-AlphabeticIndex_Bucket" #[cfg(any(feature = "all", all(feature = "android-icu-text-AlphabeticIndex_Bucket")))] pub fn getBucket<'env>(&'env self, arg0: i32) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::icu::text::AlphabeticIndex_Bucket>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/icu/text/AlphabeticIndex$ImmutableIndex", java.flags == PUBLIC, .name == "getBucket", .descriptor == "(I)Landroid/icu/text/AlphabeticIndex$Bucket;" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0)]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/icu/text/AlphabeticIndex$ImmutableIndex\0", "getBucket\0", "(I)Landroid/icu/text/AlphabeticIndex$Bucket;\0"); __jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [iterator](https://developer.android.com/reference/android/icu/text/AlphabeticIndex.ImmutableIndex.html#iterator()) /// /// Required features: "java-util-Iterator" #[cfg(any(feature = "all", all(feature = "java-util-Iterator")))] pub fn iterator<'env>(&'env self) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::util::Iterator>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/icu/text/AlphabeticIndex$ImmutableIndex", java.flags == PUBLIC, .name == "iterator", .descriptor == "()Ljava/util/Iterator;" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/icu/text/AlphabeticIndex$ImmutableIndex\0", "iterator\0", "()Ljava/util/Iterator;\0"); __jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } } }
79.065789
271
0.662506
ed249518e4be05c29ee5f3ae33a0989547f5aa8f
96,644
// ================================================================= // // * WARNING * // // This file is generated! // // Changes made to this file will be overwritten. If changes are // required to the generated code, the service_crategen project // must be updated to generate the changes. // // ================================================================= use std::error::Error; use std::fmt; use std::io; #[allow(warnings)] use futures::future; use futures::Future; use rusoto_core::region; use rusoto_core::request::{BufferedHttpResponse, DispatchSignedRequest}; use rusoto_core::{Client, RusotoFuture}; use rusoto_core::credential::{CredentialsError, ProvideAwsCredentials}; use rusoto_core::request::HttpDispatchError; use rusoto_core::signature::SignedRequest; use serde_json; use serde_json::from_slice; use serde_json::Value as SerdeJsonValue; /// <p>Contains information about a backup of an AWS CloudHSM cluster.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct Backup { /// <p>The identifier (ID) of the backup.</p> #[serde(rename = "BackupId")] pub backup_id: String, /// <p>The state of the backup.</p> #[serde(rename = "BackupState")] #[serde(skip_serializing_if = "Option::is_none")] pub backup_state: Option<String>, /// <p>The identifier (ID) of the cluster that was backed up.</p> #[serde(rename = "ClusterId")] #[serde(skip_serializing_if = "Option::is_none")] pub cluster_id: Option<String>, #[serde(rename = "CopyTimestamp")] #[serde(skip_serializing_if = "Option::is_none")] pub copy_timestamp: Option<f64>, /// <p>The date and time when the backup was created.</p> #[serde(rename = "CreateTimestamp")] #[serde(skip_serializing_if = "Option::is_none")] pub create_timestamp: Option<f64>, #[serde(rename = "SourceBackup")] #[serde(skip_serializing_if = "Option::is_none")] pub source_backup: Option<String>, #[serde(rename = "SourceCluster")] #[serde(skip_serializing_if = "Option::is_none")] pub source_cluster: Option<String>, #[serde(rename = "SourceRegion")] #[serde(skip_serializing_if = "Option::is_none")] pub source_region: Option<String>, } /// <p>Contains one or more certificates or a certificate signing request (CSR).</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct Certificates { /// <p>The HSM hardware certificate issued (signed) by AWS CloudHSM.</p> #[serde(rename = "AwsHardwareCertificate")] #[serde(skip_serializing_if = "Option::is_none")] pub aws_hardware_certificate: Option<String>, /// <p>The cluster certificate issued (signed) by the issuing certificate authority (CA) of the cluster's owner.</p> #[serde(rename = "ClusterCertificate")] #[serde(skip_serializing_if = "Option::is_none")] pub cluster_certificate: Option<String>, /// <p>The cluster's certificate signing request (CSR). The CSR exists only when the cluster's state is <code>UNINITIALIZED</code>.</p> #[serde(rename = "ClusterCsr")] #[serde(skip_serializing_if = "Option::is_none")] pub cluster_csr: Option<String>, /// <p>The HSM certificate issued (signed) by the HSM hardware.</p> #[serde(rename = "HsmCertificate")] #[serde(skip_serializing_if = "Option::is_none")] pub hsm_certificate: Option<String>, /// <p>The HSM hardware certificate issued (signed) by the hardware manufacturer.</p> #[serde(rename = "ManufacturerHardwareCertificate")] #[serde(skip_serializing_if = "Option::is_none")] pub manufacturer_hardware_certificate: Option<String>, } /// <p>Contains information about an AWS CloudHSM cluster.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct Cluster { /// <p>The cluster's backup policy.</p> #[serde(rename = "BackupPolicy")] #[serde(skip_serializing_if = "Option::is_none")] pub backup_policy: Option<String>, /// <p>Contains one or more certificates or a certificate signing request (CSR).</p> #[serde(rename = "Certificates")] #[serde(skip_serializing_if = "Option::is_none")] pub certificates: Option<Certificates>, /// <p>The cluster's identifier (ID).</p> #[serde(rename = "ClusterId")] #[serde(skip_serializing_if = "Option::is_none")] pub cluster_id: Option<String>, /// <p>The date and time when the cluster was created.</p> #[serde(rename = "CreateTimestamp")] #[serde(skip_serializing_if = "Option::is_none")] pub create_timestamp: Option<f64>, /// <p>The type of HSM that the cluster contains.</p> #[serde(rename = "HsmType")] #[serde(skip_serializing_if = "Option::is_none")] pub hsm_type: Option<String>, /// <p>Contains information about the HSMs in the cluster.</p> #[serde(rename = "Hsms")] #[serde(skip_serializing_if = "Option::is_none")] pub hsms: Option<Vec<Hsm>>, /// <p>The default password for the cluster's Pre-Crypto Officer (PRECO) user.</p> #[serde(rename = "PreCoPassword")] #[serde(skip_serializing_if = "Option::is_none")] pub pre_co_password: Option<String>, /// <p>The identifier (ID) of the cluster's security group.</p> #[serde(rename = "SecurityGroup")] #[serde(skip_serializing_if = "Option::is_none")] pub security_group: Option<String>, /// <p>The identifier (ID) of the backup used to create the cluster. This value exists only when the cluster was created from a backup.</p> #[serde(rename = "SourceBackupId")] #[serde(skip_serializing_if = "Option::is_none")] pub source_backup_id: Option<String>, /// <p>The cluster's state.</p> #[serde(rename = "State")] #[serde(skip_serializing_if = "Option::is_none")] pub state: Option<String>, /// <p>A description of the cluster's state.</p> #[serde(rename = "StateMessage")] #[serde(skip_serializing_if = "Option::is_none")] pub state_message: Option<String>, /// <p>A map of the cluster's subnets and their corresponding Availability Zones.</p> #[serde(rename = "SubnetMapping")] #[serde(skip_serializing_if = "Option::is_none")] pub subnet_mapping: Option<::std::collections::HashMap<String, String>>, /// <p>The identifier (ID) of the virtual private cloud (VPC) that contains the cluster.</p> #[serde(rename = "VpcId")] #[serde(skip_serializing_if = "Option::is_none")] pub vpc_id: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct CopyBackupToRegionRequest { #[serde(rename = "BackupId")] pub backup_id: String, #[serde(rename = "DestinationRegion")] pub destination_region: String, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct CopyBackupToRegionResponse { #[serde(rename = "DestinationBackup")] #[serde(skip_serializing_if = "Option::is_none")] pub destination_backup: Option<DestinationBackup>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct CreateClusterRequest { /// <p>The type of HSM to use in the cluster. Currently the only allowed value is <code>hsm1.medium</code>.</p> #[serde(rename = "HsmType")] pub hsm_type: String, /// <p>The identifier (ID) of the cluster backup to restore. Use this value to restore the cluster from a backup instead of creating a new cluster. To find the backup ID, use <a>DescribeBackups</a>.</p> #[serde(rename = "SourceBackupId")] #[serde(skip_serializing_if = "Option::is_none")] pub source_backup_id: Option<String>, /// <p><p>The identifiers (IDs) of the subnets where you are creating the cluster. You must specify at least one subnet. If you specify multiple subnets, they must meet the following criteria:</p> <ul> <li> <p>All subnets must be in the same virtual private cloud (VPC).</p> </li> <li> <p>You can specify only one subnet per Availability Zone.</p> </li> </ul></p> #[serde(rename = "SubnetIds")] pub subnet_ids: Vec<String>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct CreateClusterResponse { /// <p>Information about the cluster that was created.</p> #[serde(rename = "Cluster")] #[serde(skip_serializing_if = "Option::is_none")] pub cluster: Option<Cluster>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct CreateHsmRequest { /// <p>The Availability Zone where you are creating the HSM. To find the cluster's Availability Zones, use <a>DescribeClusters</a>.</p> #[serde(rename = "AvailabilityZone")] pub availability_zone: String, /// <p>The identifier (ID) of the HSM's cluster. To find the cluster ID, use <a>DescribeClusters</a>.</p> #[serde(rename = "ClusterId")] pub cluster_id: String, /// <p>The HSM's IP address. If you specify an IP address, use an available address from the subnet that maps to the Availability Zone where you are creating the HSM. If you don't specify an IP address, one is chosen for you from that subnet.</p> #[serde(rename = "IpAddress")] #[serde(skip_serializing_if = "Option::is_none")] pub ip_address: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct CreateHsmResponse { /// <p>Information about the HSM that was created.</p> #[serde(rename = "Hsm")] #[serde(skip_serializing_if = "Option::is_none")] pub hsm: Option<Hsm>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DeleteClusterRequest { /// <p>The identifier (ID) of the cluster that you are deleting. To find the cluster ID, use <a>DescribeClusters</a>.</p> #[serde(rename = "ClusterId")] pub cluster_id: String, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct DeleteClusterResponse { /// <p>Information about the cluster that was deleted.</p> #[serde(rename = "Cluster")] #[serde(skip_serializing_if = "Option::is_none")] pub cluster: Option<Cluster>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DeleteHsmRequest { /// <p>The identifier (ID) of the cluster that contains the HSM that you are deleting.</p> #[serde(rename = "ClusterId")] pub cluster_id: String, /// <p>The identifier (ID) of the elastic network interface (ENI) of the HSM that you are deleting.</p> #[serde(rename = "EniId")] #[serde(skip_serializing_if = "Option::is_none")] pub eni_id: Option<String>, /// <p>The IP address of the elastic network interface (ENI) of the HSM that you are deleting.</p> #[serde(rename = "EniIp")] #[serde(skip_serializing_if = "Option::is_none")] pub eni_ip: Option<String>, /// <p>The identifier (ID) of the HSM that you are deleting.</p> #[serde(rename = "HsmId")] #[serde(skip_serializing_if = "Option::is_none")] pub hsm_id: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct DeleteHsmResponse { /// <p>The identifier (ID) of the HSM that was deleted.</p> #[serde(rename = "HsmId")] #[serde(skip_serializing_if = "Option::is_none")] pub hsm_id: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DescribeBackupsRequest { /// <p>One or more filters to limit the items returned in the response.</p> <p>Use the <code>backupIds</code> filter to return only the specified backups. Specify backups by their backup identifier (ID).</p> <p>Use the <code>clusterIds</code> filter to return only the backups for the specified clusters. Specify clusters by their cluster identifier (ID).</p> <p>Use the <code>states</code> filter to return only backups that match the specified state.</p> #[serde(rename = "Filters")] #[serde(skip_serializing_if = "Option::is_none")] pub filters: Option<::std::collections::HashMap<String, Vec<String>>>, /// <p>The maximum number of backups to return in the response. When there are more backups than the number you specify, the response contains a <code>NextToken</code> value.</p> #[serde(rename = "MaxResults")] #[serde(skip_serializing_if = "Option::is_none")] pub max_results: Option<i64>, /// <p>The <code>NextToken</code> value that you received in the previous response. Use this value to get more backups.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, #[serde(rename = "SortAscending")] #[serde(skip_serializing_if = "Option::is_none")] pub sort_ascending: Option<bool>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct DescribeBackupsResponse { /// <p>A list of backups.</p> #[serde(rename = "Backups")] #[serde(skip_serializing_if = "Option::is_none")] pub backups: Option<Vec<Backup>>, /// <p>An opaque string that indicates that the response contains only a subset of backups. Use this value in a subsequent <code>DescribeBackups</code> request to get more backups.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct DescribeClustersRequest { /// <p>One or more filters to limit the items returned in the response.</p> <p>Use the <code>clusterIds</code> filter to return only the specified clusters. Specify clusters by their cluster identifier (ID).</p> <p>Use the <code>vpcIds</code> filter to return only the clusters in the specified virtual private clouds (VPCs). Specify VPCs by their VPC identifier (ID).</p> <p>Use the <code>states</code> filter to return only clusters that match the specified state.</p> #[serde(rename = "Filters")] #[serde(skip_serializing_if = "Option::is_none")] pub filters: Option<::std::collections::HashMap<String, Vec<String>>>, /// <p>The maximum number of clusters to return in the response. When there are more clusters than the number you specify, the response contains a <code>NextToken</code> value.</p> #[serde(rename = "MaxResults")] #[serde(skip_serializing_if = "Option::is_none")] pub max_results: Option<i64>, /// <p>The <code>NextToken</code> value that you received in the previous response. Use this value to get more clusters.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct DescribeClustersResponse { /// <p>A list of clusters.</p> #[serde(rename = "Clusters")] #[serde(skip_serializing_if = "Option::is_none")] pub clusters: Option<Vec<Cluster>>, /// <p>An opaque string that indicates that the response contains only a subset of clusters. Use this value in a subsequent <code>DescribeClusters</code> request to get more clusters.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct DestinationBackup { #[serde(rename = "CreateTimestamp")] #[serde(skip_serializing_if = "Option::is_none")] pub create_timestamp: Option<f64>, #[serde(rename = "SourceBackup")] #[serde(skip_serializing_if = "Option::is_none")] pub source_backup: Option<String>, #[serde(rename = "SourceCluster")] #[serde(skip_serializing_if = "Option::is_none")] pub source_cluster: Option<String>, #[serde(rename = "SourceRegion")] #[serde(skip_serializing_if = "Option::is_none")] pub source_region: Option<String>, } /// <p>Contains information about a hardware security module (HSM) in an AWS CloudHSM cluster.</p> #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct Hsm { /// <p>The Availability Zone that contains the HSM.</p> #[serde(rename = "AvailabilityZone")] #[serde(skip_serializing_if = "Option::is_none")] pub availability_zone: Option<String>, /// <p>The identifier (ID) of the cluster that contains the HSM.</p> #[serde(rename = "ClusterId")] #[serde(skip_serializing_if = "Option::is_none")] pub cluster_id: Option<String>, /// <p>The identifier (ID) of the HSM's elastic network interface (ENI).</p> #[serde(rename = "EniId")] #[serde(skip_serializing_if = "Option::is_none")] pub eni_id: Option<String>, /// <p>The IP address of the HSM's elastic network interface (ENI).</p> #[serde(rename = "EniIp")] #[serde(skip_serializing_if = "Option::is_none")] pub eni_ip: Option<String>, /// <p>The HSM's identifier (ID).</p> #[serde(rename = "HsmId")] pub hsm_id: String, /// <p>The HSM's state.</p> #[serde(rename = "State")] #[serde(skip_serializing_if = "Option::is_none")] pub state: Option<String>, /// <p>A description of the HSM's state.</p> #[serde(rename = "StateMessage")] #[serde(skip_serializing_if = "Option::is_none")] pub state_message: Option<String>, /// <p>The subnet that contains the HSM's elastic network interface (ENI).</p> #[serde(rename = "SubnetId")] #[serde(skip_serializing_if = "Option::is_none")] pub subnet_id: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct InitializeClusterRequest { /// <p>The identifier (ID) of the cluster that you are claiming. To find the cluster ID, use <a>DescribeClusters</a>.</p> #[serde(rename = "ClusterId")] pub cluster_id: String, /// <p>The cluster certificate issued (signed) by your issuing certificate authority (CA). The certificate must be in PEM format and can contain a maximum of 5000 characters.</p> #[serde(rename = "SignedCert")] pub signed_cert: String, /// <p>The issuing certificate of the issuing certificate authority (CA) that issued (signed) the cluster certificate. This can be a root (self-signed) certificate or a certificate chain that begins with the certificate that issued the cluster certificate and ends with a root certificate. The certificate or certificate chain must be in PEM format and can contain a maximum of 5000 characters.</p> #[serde(rename = "TrustAnchor")] pub trust_anchor: String, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct InitializeClusterResponse { /// <p>The cluster's state.</p> #[serde(rename = "State")] #[serde(skip_serializing_if = "Option::is_none")] pub state: Option<String>, /// <p>A description of the cluster's state.</p> #[serde(rename = "StateMessage")] #[serde(skip_serializing_if = "Option::is_none")] pub state_message: Option<String>, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct ListTagsRequest { /// <p>The maximum number of tags to return in the response. When there are more tags than the number you specify, the response contains a <code>NextToken</code> value.</p> #[serde(rename = "MaxResults")] #[serde(skip_serializing_if = "Option::is_none")] pub max_results: Option<i64>, /// <p>The <code>NextToken</code> value that you received in the previous response. Use this value to get more tags.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, /// <p>The cluster identifier (ID) for the cluster whose tags you are getting. To find the cluster ID, use <a>DescribeClusters</a>.</p> #[serde(rename = "ResourceId")] pub resource_id: String, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct ListTagsResponse { /// <p>An opaque string that indicates that the response contains only a subset of tags. Use this value in a subsequent <code>ListTags</code> request to get more tags.</p> #[serde(rename = "NextToken")] #[serde(skip_serializing_if = "Option::is_none")] pub next_token: Option<String>, /// <p>A list of tags.</p> #[serde(rename = "TagList")] pub tag_list: Vec<Tag>, } /// <p>Contains a tag. A tag is a key-value pair.</p> #[derive(Default, Debug, Clone, PartialEq, Serialize, Deserialize)] pub struct Tag { /// <p>The key of the tag.</p> #[serde(rename = "Key")] pub key: String, /// <p>The value of the tag.</p> #[serde(rename = "Value")] pub value: String, } #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct TagResourceRequest { /// <p>The cluster identifier (ID) for the cluster that you are tagging. To find the cluster ID, use <a>DescribeClusters</a>.</p> #[serde(rename = "ResourceId")] pub resource_id: String, /// <p>A list of one or more tags.</p> #[serde(rename = "TagList")] pub tag_list: Vec<Tag>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct TagResourceResponse {} #[derive(Default, Debug, Clone, PartialEq, Serialize)] pub struct UntagResourceRequest { /// <p>The cluster identifier (ID) for the cluster whose tags you are removing. To find the cluster ID, use <a>DescribeClusters</a>.</p> #[serde(rename = "ResourceId")] pub resource_id: String, /// <p>A list of one or more tag keys for the tags that you are removing. Specify only the tag keys, not the tag values.</p> #[serde(rename = "TagKeyList")] pub tag_key_list: Vec<String>, } #[derive(Default, Debug, Clone, PartialEq, Deserialize)] #[cfg_attr(test, derive(Serialize))] pub struct UntagResourceResponse {} /// Errors returned by CopyBackupToRegion #[derive(Debug, PartialEq)] pub enum CopyBackupToRegionError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl CopyBackupToRegionError { pub fn from_response(res: BufferedHttpResponse) -> CopyBackupToRegionError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return CopyBackupToRegionError::CloudHsmAccessDenied(String::from( error_message, )); } "CloudHsmInternalFailureException" => { return CopyBackupToRegionError::CloudHsmInternalFailure(String::from( error_message, )); } "CloudHsmInvalidRequestException" => { return CopyBackupToRegionError::CloudHsmInvalidRequest(String::from( error_message, )); } "CloudHsmResourceNotFoundException" => { return CopyBackupToRegionError::CloudHsmResourceNotFound(String::from( error_message, )); } "CloudHsmServiceException" => { return CopyBackupToRegionError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return CopyBackupToRegionError::Validation(error_message.to_string()); } _ => {} } } return CopyBackupToRegionError::Unknown(res); } } impl From<serde_json::error::Error> for CopyBackupToRegionError { fn from(err: serde_json::error::Error) -> CopyBackupToRegionError { CopyBackupToRegionError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for CopyBackupToRegionError { fn from(err: CredentialsError) -> CopyBackupToRegionError { CopyBackupToRegionError::Credentials(err) } } impl From<HttpDispatchError> for CopyBackupToRegionError { fn from(err: HttpDispatchError) -> CopyBackupToRegionError { CopyBackupToRegionError::HttpDispatch(err) } } impl From<io::Error> for CopyBackupToRegionError { fn from(err: io::Error) -> CopyBackupToRegionError { CopyBackupToRegionError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for CopyBackupToRegionError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CopyBackupToRegionError { fn description(&self) -> &str { match *self { CopyBackupToRegionError::CloudHsmAccessDenied(ref cause) => cause, CopyBackupToRegionError::CloudHsmInternalFailure(ref cause) => cause, CopyBackupToRegionError::CloudHsmInvalidRequest(ref cause) => cause, CopyBackupToRegionError::CloudHsmResourceNotFound(ref cause) => cause, CopyBackupToRegionError::CloudHsmService(ref cause) => cause, CopyBackupToRegionError::Validation(ref cause) => cause, CopyBackupToRegionError::Credentials(ref err) => err.description(), CopyBackupToRegionError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } CopyBackupToRegionError::ParseError(ref cause) => cause, CopyBackupToRegionError::Unknown(_) => "unknown error", } } } /// Errors returned by CreateCluster #[derive(Debug, PartialEq)] pub enum CreateClusterError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl CreateClusterError { pub fn from_response(res: BufferedHttpResponse) -> CreateClusterError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return CreateClusterError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return CreateClusterError::CloudHsmInternalFailure(String::from(error_message)); } "CloudHsmInvalidRequestException" => { return CreateClusterError::CloudHsmInvalidRequest(String::from(error_message)); } "CloudHsmResourceNotFoundException" => { return CreateClusterError::CloudHsmResourceNotFound(String::from(error_message)); } "CloudHsmServiceException" => { return CreateClusterError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return CreateClusterError::Validation(error_message.to_string()); } _ => {} } } return CreateClusterError::Unknown(res); } } impl From<serde_json::error::Error> for CreateClusterError { fn from(err: serde_json::error::Error) -> CreateClusterError { CreateClusterError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for CreateClusterError { fn from(err: CredentialsError) -> CreateClusterError { CreateClusterError::Credentials(err) } } impl From<HttpDispatchError> for CreateClusterError { fn from(err: HttpDispatchError) -> CreateClusterError { CreateClusterError::HttpDispatch(err) } } impl From<io::Error> for CreateClusterError { fn from(err: io::Error) -> CreateClusterError { CreateClusterError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for CreateClusterError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CreateClusterError { fn description(&self) -> &str { match *self { CreateClusterError::CloudHsmAccessDenied(ref cause) => cause, CreateClusterError::CloudHsmInternalFailure(ref cause) => cause, CreateClusterError::CloudHsmInvalidRequest(ref cause) => cause, CreateClusterError::CloudHsmResourceNotFound(ref cause) => cause, CreateClusterError::CloudHsmService(ref cause) => cause, CreateClusterError::Validation(ref cause) => cause, CreateClusterError::Credentials(ref err) => err.description(), CreateClusterError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CreateClusterError::ParseError(ref cause) => cause, CreateClusterError::Unknown(_) => "unknown error", } } } /// Errors returned by CreateHsm #[derive(Debug, PartialEq)] pub enum CreateHsmError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl CreateHsmError { pub fn from_response(res: BufferedHttpResponse) -> CreateHsmError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return CreateHsmError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return CreateHsmError::CloudHsmInternalFailure(String::from(error_message)); } "CloudHsmInvalidRequestException" => { return CreateHsmError::CloudHsmInvalidRequest(String::from(error_message)); } "CloudHsmResourceNotFoundException" => { return CreateHsmError::CloudHsmResourceNotFound(String::from(error_message)); } "CloudHsmServiceException" => { return CreateHsmError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return CreateHsmError::Validation(error_message.to_string()); } _ => {} } } return CreateHsmError::Unknown(res); } } impl From<serde_json::error::Error> for CreateHsmError { fn from(err: serde_json::error::Error) -> CreateHsmError { CreateHsmError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for CreateHsmError { fn from(err: CredentialsError) -> CreateHsmError { CreateHsmError::Credentials(err) } } impl From<HttpDispatchError> for CreateHsmError { fn from(err: HttpDispatchError) -> CreateHsmError { CreateHsmError::HttpDispatch(err) } } impl From<io::Error> for CreateHsmError { fn from(err: io::Error) -> CreateHsmError { CreateHsmError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for CreateHsmError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for CreateHsmError { fn description(&self) -> &str { match *self { CreateHsmError::CloudHsmAccessDenied(ref cause) => cause, CreateHsmError::CloudHsmInternalFailure(ref cause) => cause, CreateHsmError::CloudHsmInvalidRequest(ref cause) => cause, CreateHsmError::CloudHsmResourceNotFound(ref cause) => cause, CreateHsmError::CloudHsmService(ref cause) => cause, CreateHsmError::Validation(ref cause) => cause, CreateHsmError::Credentials(ref err) => err.description(), CreateHsmError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), CreateHsmError::ParseError(ref cause) => cause, CreateHsmError::Unknown(_) => "unknown error", } } } /// Errors returned by DeleteCluster #[derive(Debug, PartialEq)] pub enum DeleteClusterError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl DeleteClusterError { pub fn from_response(res: BufferedHttpResponse) -> DeleteClusterError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return DeleteClusterError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return DeleteClusterError::CloudHsmInternalFailure(String::from(error_message)); } "CloudHsmInvalidRequestException" => { return DeleteClusterError::CloudHsmInvalidRequest(String::from(error_message)); } "CloudHsmResourceNotFoundException" => { return DeleteClusterError::CloudHsmResourceNotFound(String::from(error_message)); } "CloudHsmServiceException" => { return DeleteClusterError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return DeleteClusterError::Validation(error_message.to_string()); } _ => {} } } return DeleteClusterError::Unknown(res); } } impl From<serde_json::error::Error> for DeleteClusterError { fn from(err: serde_json::error::Error) -> DeleteClusterError { DeleteClusterError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for DeleteClusterError { fn from(err: CredentialsError) -> DeleteClusterError { DeleteClusterError::Credentials(err) } } impl From<HttpDispatchError> for DeleteClusterError { fn from(err: HttpDispatchError) -> DeleteClusterError { DeleteClusterError::HttpDispatch(err) } } impl From<io::Error> for DeleteClusterError { fn from(err: io::Error) -> DeleteClusterError { DeleteClusterError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DeleteClusterError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DeleteClusterError { fn description(&self) -> &str { match *self { DeleteClusterError::CloudHsmAccessDenied(ref cause) => cause, DeleteClusterError::CloudHsmInternalFailure(ref cause) => cause, DeleteClusterError::CloudHsmInvalidRequest(ref cause) => cause, DeleteClusterError::CloudHsmResourceNotFound(ref cause) => cause, DeleteClusterError::CloudHsmService(ref cause) => cause, DeleteClusterError::Validation(ref cause) => cause, DeleteClusterError::Credentials(ref err) => err.description(), DeleteClusterError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DeleteClusterError::ParseError(ref cause) => cause, DeleteClusterError::Unknown(_) => "unknown error", } } } /// Errors returned by DeleteHsm #[derive(Debug, PartialEq)] pub enum DeleteHsmError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl DeleteHsmError { pub fn from_response(res: BufferedHttpResponse) -> DeleteHsmError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return DeleteHsmError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return DeleteHsmError::CloudHsmInternalFailure(String::from(error_message)); } "CloudHsmInvalidRequestException" => { return DeleteHsmError::CloudHsmInvalidRequest(String::from(error_message)); } "CloudHsmResourceNotFoundException" => { return DeleteHsmError::CloudHsmResourceNotFound(String::from(error_message)); } "CloudHsmServiceException" => { return DeleteHsmError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return DeleteHsmError::Validation(error_message.to_string()); } _ => {} } } return DeleteHsmError::Unknown(res); } } impl From<serde_json::error::Error> for DeleteHsmError { fn from(err: serde_json::error::Error) -> DeleteHsmError { DeleteHsmError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for DeleteHsmError { fn from(err: CredentialsError) -> DeleteHsmError { DeleteHsmError::Credentials(err) } } impl From<HttpDispatchError> for DeleteHsmError { fn from(err: HttpDispatchError) -> DeleteHsmError { DeleteHsmError::HttpDispatch(err) } } impl From<io::Error> for DeleteHsmError { fn from(err: io::Error) -> DeleteHsmError { DeleteHsmError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DeleteHsmError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DeleteHsmError { fn description(&self) -> &str { match *self { DeleteHsmError::CloudHsmAccessDenied(ref cause) => cause, DeleteHsmError::CloudHsmInternalFailure(ref cause) => cause, DeleteHsmError::CloudHsmInvalidRequest(ref cause) => cause, DeleteHsmError::CloudHsmResourceNotFound(ref cause) => cause, DeleteHsmError::CloudHsmService(ref cause) => cause, DeleteHsmError::Validation(ref cause) => cause, DeleteHsmError::Credentials(ref err) => err.description(), DeleteHsmError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DeleteHsmError::ParseError(ref cause) => cause, DeleteHsmError::Unknown(_) => "unknown error", } } } /// Errors returned by DescribeBackups #[derive(Debug, PartialEq)] pub enum DescribeBackupsError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl DescribeBackupsError { pub fn from_response(res: BufferedHttpResponse) -> DescribeBackupsError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return DescribeBackupsError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return DescribeBackupsError::CloudHsmInternalFailure(String::from( error_message, )); } "CloudHsmInvalidRequestException" => { return DescribeBackupsError::CloudHsmInvalidRequest(String::from(error_message)); } "CloudHsmResourceNotFoundException" => { return DescribeBackupsError::CloudHsmResourceNotFound(String::from( error_message, )); } "CloudHsmServiceException" => { return DescribeBackupsError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return DescribeBackupsError::Validation(error_message.to_string()); } _ => {} } } return DescribeBackupsError::Unknown(res); } } impl From<serde_json::error::Error> for DescribeBackupsError { fn from(err: serde_json::error::Error) -> DescribeBackupsError { DescribeBackupsError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for DescribeBackupsError { fn from(err: CredentialsError) -> DescribeBackupsError { DescribeBackupsError::Credentials(err) } } impl From<HttpDispatchError> for DescribeBackupsError { fn from(err: HttpDispatchError) -> DescribeBackupsError { DescribeBackupsError::HttpDispatch(err) } } impl From<io::Error> for DescribeBackupsError { fn from(err: io::Error) -> DescribeBackupsError { DescribeBackupsError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DescribeBackupsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeBackupsError { fn description(&self) -> &str { match *self { DescribeBackupsError::CloudHsmAccessDenied(ref cause) => cause, DescribeBackupsError::CloudHsmInternalFailure(ref cause) => cause, DescribeBackupsError::CloudHsmInvalidRequest(ref cause) => cause, DescribeBackupsError::CloudHsmResourceNotFound(ref cause) => cause, DescribeBackupsError::CloudHsmService(ref cause) => cause, DescribeBackupsError::Validation(ref cause) => cause, DescribeBackupsError::Credentials(ref err) => err.description(), DescribeBackupsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeBackupsError::ParseError(ref cause) => cause, DescribeBackupsError::Unknown(_) => "unknown error", } } } /// Errors returned by DescribeClusters #[derive(Debug, PartialEq)] pub enum DescribeClustersError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl DescribeClustersError { pub fn from_response(res: BufferedHttpResponse) -> DescribeClustersError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return DescribeClustersError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return DescribeClustersError::CloudHsmInternalFailure(String::from( error_message, )); } "CloudHsmInvalidRequestException" => { return DescribeClustersError::CloudHsmInvalidRequest(String::from( error_message, )); } "CloudHsmServiceException" => { return DescribeClustersError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return DescribeClustersError::Validation(error_message.to_string()); } _ => {} } } return DescribeClustersError::Unknown(res); } } impl From<serde_json::error::Error> for DescribeClustersError { fn from(err: serde_json::error::Error) -> DescribeClustersError { DescribeClustersError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for DescribeClustersError { fn from(err: CredentialsError) -> DescribeClustersError { DescribeClustersError::Credentials(err) } } impl From<HttpDispatchError> for DescribeClustersError { fn from(err: HttpDispatchError) -> DescribeClustersError { DescribeClustersError::HttpDispatch(err) } } impl From<io::Error> for DescribeClustersError { fn from(err: io::Error) -> DescribeClustersError { DescribeClustersError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for DescribeClustersError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for DescribeClustersError { fn description(&self) -> &str { match *self { DescribeClustersError::CloudHsmAccessDenied(ref cause) => cause, DescribeClustersError::CloudHsmInternalFailure(ref cause) => cause, DescribeClustersError::CloudHsmInvalidRequest(ref cause) => cause, DescribeClustersError::CloudHsmService(ref cause) => cause, DescribeClustersError::Validation(ref cause) => cause, DescribeClustersError::Credentials(ref err) => err.description(), DescribeClustersError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), DescribeClustersError::ParseError(ref cause) => cause, DescribeClustersError::Unknown(_) => "unknown error", } } } /// Errors returned by InitializeCluster #[derive(Debug, PartialEq)] pub enum InitializeClusterError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl InitializeClusterError { pub fn from_response(res: BufferedHttpResponse) -> InitializeClusterError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return InitializeClusterError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return InitializeClusterError::CloudHsmInternalFailure(String::from( error_message, )); } "CloudHsmInvalidRequestException" => { return InitializeClusterError::CloudHsmInvalidRequest(String::from( error_message, )); } "CloudHsmResourceNotFoundException" => { return InitializeClusterError::CloudHsmResourceNotFound(String::from( error_message, )); } "CloudHsmServiceException" => { return InitializeClusterError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return InitializeClusterError::Validation(error_message.to_string()); } _ => {} } } return InitializeClusterError::Unknown(res); } } impl From<serde_json::error::Error> for InitializeClusterError { fn from(err: serde_json::error::Error) -> InitializeClusterError { InitializeClusterError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for InitializeClusterError { fn from(err: CredentialsError) -> InitializeClusterError { InitializeClusterError::Credentials(err) } } impl From<HttpDispatchError> for InitializeClusterError { fn from(err: HttpDispatchError) -> InitializeClusterError { InitializeClusterError::HttpDispatch(err) } } impl From<io::Error> for InitializeClusterError { fn from(err: io::Error) -> InitializeClusterError { InitializeClusterError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for InitializeClusterError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for InitializeClusterError { fn description(&self) -> &str { match *self { InitializeClusterError::CloudHsmAccessDenied(ref cause) => cause, InitializeClusterError::CloudHsmInternalFailure(ref cause) => cause, InitializeClusterError::CloudHsmInvalidRequest(ref cause) => cause, InitializeClusterError::CloudHsmResourceNotFound(ref cause) => cause, InitializeClusterError::CloudHsmService(ref cause) => cause, InitializeClusterError::Validation(ref cause) => cause, InitializeClusterError::Credentials(ref err) => err.description(), InitializeClusterError::HttpDispatch(ref dispatch_error) => { dispatch_error.description() } InitializeClusterError::ParseError(ref cause) => cause, InitializeClusterError::Unknown(_) => "unknown error", } } } /// Errors returned by ListTags #[derive(Debug, PartialEq)] pub enum ListTagsError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl ListTagsError { pub fn from_response(res: BufferedHttpResponse) -> ListTagsError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return ListTagsError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return ListTagsError::CloudHsmInternalFailure(String::from(error_message)); } "CloudHsmInvalidRequestException" => { return ListTagsError::CloudHsmInvalidRequest(String::from(error_message)); } "CloudHsmResourceNotFoundException" => { return ListTagsError::CloudHsmResourceNotFound(String::from(error_message)); } "CloudHsmServiceException" => { return ListTagsError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return ListTagsError::Validation(error_message.to_string()); } _ => {} } } return ListTagsError::Unknown(res); } } impl From<serde_json::error::Error> for ListTagsError { fn from(err: serde_json::error::Error) -> ListTagsError { ListTagsError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for ListTagsError { fn from(err: CredentialsError) -> ListTagsError { ListTagsError::Credentials(err) } } impl From<HttpDispatchError> for ListTagsError { fn from(err: HttpDispatchError) -> ListTagsError { ListTagsError::HttpDispatch(err) } } impl From<io::Error> for ListTagsError { fn from(err: io::Error) -> ListTagsError { ListTagsError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for ListTagsError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for ListTagsError { fn description(&self) -> &str { match *self { ListTagsError::CloudHsmAccessDenied(ref cause) => cause, ListTagsError::CloudHsmInternalFailure(ref cause) => cause, ListTagsError::CloudHsmInvalidRequest(ref cause) => cause, ListTagsError::CloudHsmResourceNotFound(ref cause) => cause, ListTagsError::CloudHsmService(ref cause) => cause, ListTagsError::Validation(ref cause) => cause, ListTagsError::Credentials(ref err) => err.description(), ListTagsError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), ListTagsError::ParseError(ref cause) => cause, ListTagsError::Unknown(_) => "unknown error", } } } /// Errors returned by TagResource #[derive(Debug, PartialEq)] pub enum TagResourceError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl TagResourceError { pub fn from_response(res: BufferedHttpResponse) -> TagResourceError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return TagResourceError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return TagResourceError::CloudHsmInternalFailure(String::from(error_message)); } "CloudHsmInvalidRequestException" => { return TagResourceError::CloudHsmInvalidRequest(String::from(error_message)); } "CloudHsmResourceNotFoundException" => { return TagResourceError::CloudHsmResourceNotFound(String::from(error_message)); } "CloudHsmServiceException" => { return TagResourceError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return TagResourceError::Validation(error_message.to_string()); } _ => {} } } return TagResourceError::Unknown(res); } } impl From<serde_json::error::Error> for TagResourceError { fn from(err: serde_json::error::Error) -> TagResourceError { TagResourceError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for TagResourceError { fn from(err: CredentialsError) -> TagResourceError { TagResourceError::Credentials(err) } } impl From<HttpDispatchError> for TagResourceError { fn from(err: HttpDispatchError) -> TagResourceError { TagResourceError::HttpDispatch(err) } } impl From<io::Error> for TagResourceError { fn from(err: io::Error) -> TagResourceError { TagResourceError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for TagResourceError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for TagResourceError { fn description(&self) -> &str { match *self { TagResourceError::CloudHsmAccessDenied(ref cause) => cause, TagResourceError::CloudHsmInternalFailure(ref cause) => cause, TagResourceError::CloudHsmInvalidRequest(ref cause) => cause, TagResourceError::CloudHsmResourceNotFound(ref cause) => cause, TagResourceError::CloudHsmService(ref cause) => cause, TagResourceError::Validation(ref cause) => cause, TagResourceError::Credentials(ref err) => err.description(), TagResourceError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), TagResourceError::ParseError(ref cause) => cause, TagResourceError::Unknown(_) => "unknown error", } } } /// Errors returned by UntagResource #[derive(Debug, PartialEq)] pub enum UntagResourceError { /// <p>The request was rejected because the requester does not have permission to perform the requested operation.</p> CloudHsmAccessDenied(String), /// <p>The request was rejected because of an AWS CloudHSM internal failure. The request can be retried.</p> CloudHsmInternalFailure(String), /// <p>The request was rejected because it is not a valid request.</p> CloudHsmInvalidRequest(String), /// <p>The request was rejected because it refers to a resource that cannot be found.</p> CloudHsmResourceNotFound(String), /// <p>The request was rejected because an error occurred.</p> CloudHsmService(String), /// An error occurred dispatching the HTTP request HttpDispatch(HttpDispatchError), /// An error was encountered with AWS credentials. Credentials(CredentialsError), /// A validation error occurred. Details from AWS are provided. Validation(String), /// An error occurred parsing the response payload. ParseError(String), /// An unknown error occurred. The raw HTTP response is provided. Unknown(BufferedHttpResponse), } impl UntagResourceError { pub fn from_response(res: BufferedHttpResponse) -> UntagResourceError { if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) { let raw_error_type = json .get("__type") .and_then(|e| e.as_str()) .unwrap_or("Unknown"); let error_message = json.get("message").and_then(|m| m.as_str()).unwrap_or(""); let pieces: Vec<&str> = raw_error_type.split("#").collect(); let error_type = pieces.last().expect("Expected error type"); match *error_type { "CloudHsmAccessDeniedException" => { return UntagResourceError::CloudHsmAccessDenied(String::from(error_message)); } "CloudHsmInternalFailureException" => { return UntagResourceError::CloudHsmInternalFailure(String::from(error_message)); } "CloudHsmInvalidRequestException" => { return UntagResourceError::CloudHsmInvalidRequest(String::from(error_message)); } "CloudHsmResourceNotFoundException" => { return UntagResourceError::CloudHsmResourceNotFound(String::from(error_message)); } "CloudHsmServiceException" => { return UntagResourceError::CloudHsmService(String::from(error_message)); } "ValidationException" => { return UntagResourceError::Validation(error_message.to_string()); } _ => {} } } return UntagResourceError::Unknown(res); } } impl From<serde_json::error::Error> for UntagResourceError { fn from(err: serde_json::error::Error) -> UntagResourceError { UntagResourceError::ParseError(err.description().to_string()) } } impl From<CredentialsError> for UntagResourceError { fn from(err: CredentialsError) -> UntagResourceError { UntagResourceError::Credentials(err) } } impl From<HttpDispatchError> for UntagResourceError { fn from(err: HttpDispatchError) -> UntagResourceError { UntagResourceError::HttpDispatch(err) } } impl From<io::Error> for UntagResourceError { fn from(err: io::Error) -> UntagResourceError { UntagResourceError::HttpDispatch(HttpDispatchError::from(err)) } } impl fmt::Display for UntagResourceError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.description()) } } impl Error for UntagResourceError { fn description(&self) -> &str { match *self { UntagResourceError::CloudHsmAccessDenied(ref cause) => cause, UntagResourceError::CloudHsmInternalFailure(ref cause) => cause, UntagResourceError::CloudHsmInvalidRequest(ref cause) => cause, UntagResourceError::CloudHsmResourceNotFound(ref cause) => cause, UntagResourceError::CloudHsmService(ref cause) => cause, UntagResourceError::Validation(ref cause) => cause, UntagResourceError::Credentials(ref err) => err.description(), UntagResourceError::HttpDispatch(ref dispatch_error) => dispatch_error.description(), UntagResourceError::ParseError(ref cause) => cause, UntagResourceError::Unknown(_) => "unknown error", } } } /// Trait representing the capabilities of the CloudHSM V2 API. CloudHSM V2 clients implement this trait. pub trait CloudHsmv2 { fn copy_backup_to_region( &self, input: CopyBackupToRegionRequest, ) -> RusotoFuture<CopyBackupToRegionResponse, CopyBackupToRegionError>; /// <p>Creates a new AWS CloudHSM cluster.</p> fn create_cluster( &self, input: CreateClusterRequest, ) -> RusotoFuture<CreateClusterResponse, CreateClusterError>; /// <p>Creates a new hardware security module (HSM) in the specified AWS CloudHSM cluster.</p> fn create_hsm( &self, input: CreateHsmRequest, ) -> RusotoFuture<CreateHsmResponse, CreateHsmError>; /// <p>Deletes the specified AWS CloudHSM cluster. Before you can delete a cluster, you must delete all HSMs in the cluster. To see if the cluster contains any HSMs, use <a>DescribeClusters</a>. To delete an HSM, use <a>DeleteHsm</a>.</p> fn delete_cluster( &self, input: DeleteClusterRequest, ) -> RusotoFuture<DeleteClusterResponse, DeleteClusterError>; /// <p>Deletes the specified HSM. To specify an HSM, you can use its identifier (ID), the IP address of the HSM's elastic network interface (ENI), or the ID of the HSM's ENI. You need to specify only one of these values. To find these values, use <a>DescribeClusters</a>.</p> fn delete_hsm( &self, input: DeleteHsmRequest, ) -> RusotoFuture<DeleteHsmResponse, DeleteHsmError>; /// <p>Gets information about backups of AWS CloudHSM clusters.</p> <p>This is a paginated operation, which means that each response might contain only a subset of all the backups. When the response contains only a subset of backups, it includes a <code>NextToken</code> value. Use this value in a subsequent <code>DescribeBackups</code> request to get more backups. When you receive a response with no <code>NextToken</code> (or an empty or null value), that means there are no more backups to get.</p> fn describe_backups( &self, input: DescribeBackupsRequest, ) -> RusotoFuture<DescribeBackupsResponse, DescribeBackupsError>; /// <p>Gets information about AWS CloudHSM clusters.</p> <p>This is a paginated operation, which means that each response might contain only a subset of all the clusters. When the response contains only a subset of clusters, it includes a <code>NextToken</code> value. Use this value in a subsequent <code>DescribeClusters</code> request to get more clusters. When you receive a response with no <code>NextToken</code> (or an empty or null value), that means there are no more clusters to get.</p> fn describe_clusters( &self, input: DescribeClustersRequest, ) -> RusotoFuture<DescribeClustersResponse, DescribeClustersError>; /// <p>Claims an AWS CloudHSM cluster by submitting the cluster certificate issued by your issuing certificate authority (CA) and the CA's root certificate. Before you can claim a cluster, you must sign the cluster's certificate signing request (CSR) with your issuing CA. To get the cluster's CSR, use <a>DescribeClusters</a>.</p> fn initialize_cluster( &self, input: InitializeClusterRequest, ) -> RusotoFuture<InitializeClusterResponse, InitializeClusterError>; /// <p>Gets a list of tags for the specified AWS CloudHSM cluster.</p> <p>This is a paginated operation, which means that each response might contain only a subset of all the tags. When the response contains only a subset of tags, it includes a <code>NextToken</code> value. Use this value in a subsequent <code>ListTags</code> request to get more tags. When you receive a response with no <code>NextToken</code> (or an empty or null value), that means there are no more tags to get.</p> fn list_tags(&self, input: ListTagsRequest) -> RusotoFuture<ListTagsResponse, ListTagsError>; /// <p>Adds or overwrites one or more tags for the specified AWS CloudHSM cluster.</p> fn tag_resource( &self, input: TagResourceRequest, ) -> RusotoFuture<TagResourceResponse, TagResourceError>; /// <p>Removes the specified tag or tags from the specified AWS CloudHSM cluster.</p> fn untag_resource( &self, input: UntagResourceRequest, ) -> RusotoFuture<UntagResourceResponse, UntagResourceError>; } /// A client for the CloudHSM V2 API. #[derive(Clone)] pub struct CloudHsmv2Client { client: Client, region: region::Region, } impl CloudHsmv2Client { /// Creates a client backed by the default tokio event loop. /// /// The client will use the default credentials provider and tls client. pub fn new(region: region::Region) -> CloudHsmv2Client { CloudHsmv2Client { client: Client::shared(), region: region, } } pub fn new_with<P, D>( request_dispatcher: D, credentials_provider: P, region: region::Region, ) -> CloudHsmv2Client where P: ProvideAwsCredentials + Send + Sync + 'static, P::Future: Send, D: DispatchSignedRequest + Send + Sync + 'static, D::Future: Send, { CloudHsmv2Client { client: Client::new_with(credentials_provider, request_dispatcher), region: region, } } } impl CloudHsmv2 for CloudHsmv2Client { fn copy_backup_to_region( &self, input: CopyBackupToRegionRequest, ) -> RusotoFuture<CopyBackupToRegionResponse, CopyBackupToRegionError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.CopyBackupToRegion"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<CopyBackupToRegionResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(CopyBackupToRegionError::from_response(response))), ) } }) } /// <p>Creates a new AWS CloudHSM cluster.</p> fn create_cluster( &self, input: CreateClusterRequest, ) -> RusotoFuture<CreateClusterResponse, CreateClusterError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.CreateCluster"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<CreateClusterResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(CreateClusterError::from_response(response))), ) } }) } /// <p>Creates a new hardware security module (HSM) in the specified AWS CloudHSM cluster.</p> fn create_hsm( &self, input: CreateHsmRequest, ) -> RusotoFuture<CreateHsmResponse, CreateHsmError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.CreateHsm"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<CreateHsmResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(CreateHsmError::from_response(response))), ) } }) } /// <p>Deletes the specified AWS CloudHSM cluster. Before you can delete a cluster, you must delete all HSMs in the cluster. To see if the cluster contains any HSMs, use <a>DescribeClusters</a>. To delete an HSM, use <a>DeleteHsm</a>.</p> fn delete_cluster( &self, input: DeleteClusterRequest, ) -> RusotoFuture<DeleteClusterResponse, DeleteClusterError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.DeleteCluster"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<DeleteClusterResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(DeleteClusterError::from_response(response))), ) } }) } /// <p>Deletes the specified HSM. To specify an HSM, you can use its identifier (ID), the IP address of the HSM's elastic network interface (ENI), or the ID of the HSM's ENI. You need to specify only one of these values. To find these values, use <a>DescribeClusters</a>.</p> fn delete_hsm( &self, input: DeleteHsmRequest, ) -> RusotoFuture<DeleteHsmResponse, DeleteHsmError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.DeleteHsm"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<DeleteHsmResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(DeleteHsmError::from_response(response))), ) } }) } /// <p>Gets information about backups of AWS CloudHSM clusters.</p> <p>This is a paginated operation, which means that each response might contain only a subset of all the backups. When the response contains only a subset of backups, it includes a <code>NextToken</code> value. Use this value in a subsequent <code>DescribeBackups</code> request to get more backups. When you receive a response with no <code>NextToken</code> (or an empty or null value), that means there are no more backups to get.</p> fn describe_backups( &self, input: DescribeBackupsRequest, ) -> RusotoFuture<DescribeBackupsResponse, DescribeBackupsError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.DescribeBackups"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<DescribeBackupsResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(DescribeBackupsError::from_response(response))), ) } }) } /// <p>Gets information about AWS CloudHSM clusters.</p> <p>This is a paginated operation, which means that each response might contain only a subset of all the clusters. When the response contains only a subset of clusters, it includes a <code>NextToken</code> value. Use this value in a subsequent <code>DescribeClusters</code> request to get more clusters. When you receive a response with no <code>NextToken</code> (or an empty or null value), that means there are no more clusters to get.</p> fn describe_clusters( &self, input: DescribeClustersRequest, ) -> RusotoFuture<DescribeClustersResponse, DescribeClustersError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.DescribeClusters"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<DescribeClustersResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(DescribeClustersError::from_response(response))), ) } }) } /// <p>Claims an AWS CloudHSM cluster by submitting the cluster certificate issued by your issuing certificate authority (CA) and the CA's root certificate. Before you can claim a cluster, you must sign the cluster's certificate signing request (CSR) with your issuing CA. To get the cluster's CSR, use <a>DescribeClusters</a>.</p> fn initialize_cluster( &self, input: InitializeClusterRequest, ) -> RusotoFuture<InitializeClusterResponse, InitializeClusterError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.InitializeCluster"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<InitializeClusterResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(InitializeClusterError::from_response(response))), ) } }) } /// <p>Gets a list of tags for the specified AWS CloudHSM cluster.</p> <p>This is a paginated operation, which means that each response might contain only a subset of all the tags. When the response contains only a subset of tags, it includes a <code>NextToken</code> value. Use this value in a subsequent <code>ListTags</code> request to get more tags. When you receive a response with no <code>NextToken</code> (or an empty or null value), that means there are no more tags to get.</p> fn list_tags(&self, input: ListTagsRequest) -> RusotoFuture<ListTagsResponse, ListTagsError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.ListTags"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<ListTagsResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(ListTagsError::from_response(response))), ) } }) } /// <p>Adds or overwrites one or more tags for the specified AWS CloudHSM cluster.</p> fn tag_resource( &self, input: TagResourceRequest, ) -> RusotoFuture<TagResourceResponse, TagResourceError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.TagResource"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<TagResourceResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(TagResourceError::from_response(response))), ) } }) } /// <p>Removes the specified tag or tags from the specified AWS CloudHSM cluster.</p> fn untag_resource( &self, input: UntagResourceRequest, ) -> RusotoFuture<UntagResourceResponse, UntagResourceError> { let mut request = SignedRequest::new("POST", "cloudhsm", &self.region, "/"); request.set_endpoint_prefix("cloudhsmv2".to_string()); request.set_content_type("application/x-amz-json-1.1".to_owned()); request.add_header("x-amz-target", "BaldrApiService.UntagResource"); let encoded = serde_json::to_string(&input).unwrap(); request.set_payload(Some(encoded.into_bytes())); self.client.sign_and_dispatch(request, |response| { if response.status.is_success() { Box::new(response.buffer().from_err().map(|response| { let mut body = response.body; if body.is_empty() || body == b"null" { body = b"{}".to_vec(); } serde_json::from_str::<UntagResourceResponse>( String::from_utf8_lossy(body.as_ref()).as_ref(), ) .unwrap() })) } else { Box::new( response .buffer() .from_err() .and_then(|response| Err(UntagResourceError::from_response(response))), ) } }) } } #[cfg(test)] mod protocol_tests {}
45.330206
507
0.632455
ed775dd79be7734033b95b30ffda658450418ef2
4,183
// Generated from definition io.k8s.api.apps.v1.StatefulSetUpdateStrategy /// StatefulSetUpdateStrategy indicates the strategy that the StatefulSet controller will use to perform updates. It includes any additional parameters necessary to perform the update for the indicated strategy. #[derive(Clone, Debug, Default, PartialEq)] pub struct StatefulSetUpdateStrategy { /// RollingUpdate is used to communicate parameters when Type is RollingUpdateStatefulSetStrategyType. pub rolling_update: Option<crate::v1_10::api::apps::v1::RollingUpdateStatefulSetStrategy>, /// Type indicates the type of the StatefulSetUpdateStrategy. Default is RollingUpdate. pub type_: Option<String>, } impl<'de> serde::Deserialize<'de> for StatefulSetUpdateStrategy { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { #[allow(non_camel_case_types)] enum Field { Key_rolling_update, Key_type_, Other, } impl<'de> serde::Deserialize<'de> for Field { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = Field; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "field identifier") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error { Ok(match v { "rollingUpdate" => Field::Key_rolling_update, "type" => Field::Key_type_, _ => Field::Other, }) } } deserializer.deserialize_identifier(Visitor) } } struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = StatefulSetUpdateStrategy; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "struct StatefulSetUpdateStrategy") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> { let mut value_rolling_update: Option<crate::v1_10::api::apps::v1::RollingUpdateStatefulSetStrategy> = None; let mut value_type_: Option<String> = None; while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? { match key { Field::Key_rolling_update => value_rolling_update = serde::de::MapAccess::next_value(&mut map)?, Field::Key_type_ => value_type_ = serde::de::MapAccess::next_value(&mut map)?, Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; }, } } Ok(StatefulSetUpdateStrategy { rolling_update: value_rolling_update, type_: value_type_, }) } } deserializer.deserialize_struct( "StatefulSetUpdateStrategy", &[ "rollingUpdate", "type", ], Visitor, ) } } impl serde::Serialize for StatefulSetUpdateStrategy { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer { let mut state = serializer.serialize_struct( "StatefulSetUpdateStrategy", self.rolling_update.as_ref().map_or(0, |_| 1) + self.type_.as_ref().map_or(0, |_| 1), )?; if let Some(value) = &self.rolling_update { serde::ser::SerializeStruct::serialize_field(&mut state, "rollingUpdate", value)?; } if let Some(value) = &self.type_ { serde::ser::SerializeStruct::serialize_field(&mut state, "type", value)?; } serde::ser::SerializeStruct::end(state) } }
41.415842
211
0.558212
7955397892b7726fc1d1891b26231a03e140c447
7,693
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Experimental extensions to `std` for Windows. //! //! For now, this module is limited to extracting handles, file //! descriptors, and sockets, but its functionality will grow over //! time. #![stable(feature = "rust1", since = "1.0.0")] #[unstable(feature = "io_ext", reason = "organization may change slightly and the primitives \ provided may be tweaked")] pub mod io { use fs; use libc; use net; use sys_common::AsInner; #[allow(deprecated)] use old_io; /// Raw HANDLEs. pub type Handle = libc::HANDLE; /// Raw SOCKETs. pub type Socket = libc::SOCKET; /// Extract raw handles. pub trait AsRawHandle { /// Extract the raw handle, without taking any ownership. fn as_raw_handle(&self) -> Handle; } #[allow(deprecated)] impl AsRawHandle for old_io::fs::File { fn as_raw_handle(&self) -> Handle { self.as_inner().handle() } } impl AsRawHandle for fs::File { fn as_raw_handle(&self) -> Handle { self.as_inner().handle().raw() } } #[allow(deprecated)] impl AsRawHandle for old_io::pipe::PipeStream { fn as_raw_handle(&self) -> Handle { self.as_inner().handle() } } #[allow(deprecated)] impl AsRawHandle for old_io::net::pipe::UnixStream { fn as_raw_handle(&self) -> Handle { self.as_inner().handle() } } #[allow(deprecated)] impl AsRawHandle for old_io::net::pipe::UnixListener { fn as_raw_handle(&self) -> Handle { self.as_inner().handle() } } #[allow(deprecated)] impl AsRawHandle for old_io::net::pipe::UnixAcceptor { fn as_raw_handle(&self) -> Handle { self.as_inner().handle() } } /// Extract raw sockets. pub trait AsRawSocket { fn as_raw_socket(&self) -> Socket; } #[allow(deprecated)] impl AsRawSocket for old_io::net::tcp::TcpStream { fn as_raw_socket(&self) -> Socket { self.as_inner().fd() } } #[allow(deprecated)] impl AsRawSocket for old_io::net::tcp::TcpListener { fn as_raw_socket(&self) -> Socket { self.as_inner().socket() } } #[allow(deprecated)] impl AsRawSocket for old_io::net::tcp::TcpAcceptor { fn as_raw_socket(&self) -> Socket { self.as_inner().socket() } } #[allow(deprecated)] impl AsRawSocket for old_io::net::udp::UdpSocket { fn as_raw_socket(&self) -> Socket { self.as_inner().fd() } } impl AsRawSocket for net::TcpStream { fn as_raw_socket(&self) -> Socket { *self.as_inner().socket().as_inner() } } impl AsRawSocket for net::TcpListener { fn as_raw_socket(&self) -> Socket { *self.as_inner().socket().as_inner() } } impl AsRawSocket for net::UdpSocket { fn as_raw_socket(&self) -> Socket { *self.as_inner().socket().as_inner() } } } /// Windows-specific extensions to the primitives in the `std::ffi` module. #[stable(feature = "rust1", since = "1.0.0")] pub mod ffi { use ffi::{OsString, OsStr}; use sys::os_str::Buf; use sys_common::wtf8::Wtf8Buf; use sys_common::{FromInner, AsInner}; pub use sys_common::wtf8::EncodeWide; /// Windows-specific extensions to `OsString`. #[stable(feature = "rust1", since = "1.0.0")] pub trait OsStringExt { /// Create an `OsString` from a potentially ill-formed UTF-16 slice of /// 16-bit code units. /// /// This is lossless: calling `.encode_wide()` on the resulting string /// will always return the original code units. #[stable(feature = "rust1", since = "1.0.0")] fn from_wide(wide: &[u16]) -> Self; } #[stable(feature = "rust1", since = "1.0.0")] impl OsStringExt for OsString { fn from_wide(wide: &[u16]) -> OsString { FromInner::from_inner(Buf { inner: Wtf8Buf::from_wide(wide) }) } } /// Windows-specific extensions to `OsStr`. #[stable(feature = "rust1", since = "1.0.0")] pub trait OsStrExt { /// Re-encode an `OsStr` as a wide character sequence, /// i.e. potentially ill-formed UTF-16. /// /// This is lossless. Note that the encoding does not include a final /// null. #[stable(feature = "rust1", since = "1.0.0")] fn encode_wide(&self) -> EncodeWide; } #[stable(feature = "rust1", since = "1.0.0")] impl OsStrExt for OsStr { fn encode_wide(&self) -> EncodeWide { self.as_inner().inner.encode_wide() } } } /// Windows-specific extensions for the primitives in `std::fs` #[unstable(feature = "fs_ext", reason = "may require more thought/methods")] pub mod fs { use fs::OpenOptions; use sys_common::AsInnerMut; /// Windows-specific extensions to `OpenOptions` pub trait OpenOptionsExt { /// Override the `dwDesiredAccess` argument to the call to `CreateFile` /// with the specified value. fn desired_access(&mut self, access: i32) -> &mut Self; /// Override the `dwCreationDisposition` argument to the call to /// `CreateFile` with the specified value. /// /// This will override any values of the standard `create` flags, for /// example. fn creation_disposition(&mut self, val: i32) -> &mut Self; /// Override the `dwFlagsAndAttributes` argument to the call to /// `CreateFile` with the specified value. /// /// This will override any values of the standard flags on the /// `OpenOptions` structure. fn flags_and_attributes(&mut self, val: i32) -> &mut Self; /// Override the `dwShareMode` argument to the call to `CreateFile` with /// the specified value. /// /// This will override any values of the standard flags on the /// `OpenOptions` structure. fn share_mode(&mut self, val: i32) -> &mut Self; } impl OpenOptionsExt for OpenOptions { fn desired_access(&mut self, access: i32) -> &mut OpenOptions { self.as_inner_mut().desired_access(access); self } fn creation_disposition(&mut self, access: i32) -> &mut OpenOptions { self.as_inner_mut().creation_disposition(access); self } fn flags_and_attributes(&mut self, access: i32) -> &mut OpenOptions { self.as_inner_mut().flags_and_attributes(access); self } fn share_mode(&mut self, access: i32) -> &mut OpenOptions { self.as_inner_mut().share_mode(access); self } } } /// A prelude for conveniently writing platform-specific code. /// /// Includes all extension traits, and some important type definitions. #[stable(feature = "rust1", since = "1.0.0")] pub mod prelude { #[doc(no_inline)] pub use super::io::{Socket, Handle, AsRawSocket, AsRawHandle}; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::ffi::{OsStrExt, OsStringExt}; #[doc(no_inline)] pub use super::fs::OpenOptionsExt; }
32.188285
82
0.603016
7205ce3722054fee48e3badc8281b4410be2b194
26,933
use baseview::{Event, EventStatus, Window, WindowHandler, WindowScalePolicy}; use futures::StreamExt; use iced_futures::futures; use iced_futures::futures::channel::mpsc; use iced_graphics::Viewport; use iced_native::{event::Status, Cache, UserInterface}; use iced_native::{Debug, Executor, Runtime, Size}; use mpsc::SendError; use raw_window_handle::{HasRawWindowHandle, RawWindowHandle}; use std::cell::RefCell; use std::mem::ManuallyDrop; use std::pin::Pin; use std::rc::Rc; use crate::application::State; use crate::{proxy::Proxy, Application, Compositor, Renderer, Settings}; pub(crate) enum RuntimeEvent<Message: 'static + Send> { Baseview((baseview::Event, bool)), UserEvent(Message), MainEventsCleared, UpdateSwapChain, OnFrame, WillClose, } /// This struct creates subscriptions for common window events. #[allow(missing_debug_implementations)] pub struct WindowSubs<Message: Clone> { /// The message to send right before each rendering frame. pub on_frame: Option<Message>, /// The message to send when the window is about to close. pub on_window_will_close: Option<Message>, } impl<Message: Clone> Default for WindowSubs<Message> { fn default() -> Self { WindowSubs { on_frame: None, on_window_will_close: None, } } } #[derive(Debug, Clone, Copy)] enum WindowQueueMessage { CloseWindow, } /// Used to request things from the `baseview` window. #[allow(missing_debug_implementations)] pub struct WindowQueue { tx: mpsc::UnboundedSender<WindowQueueMessage>, } impl WindowQueue { fn new() -> (Self, mpsc::UnboundedReceiver<WindowQueueMessage>) { let (tx, rx) = mpsc::unbounded(); (Self { tx }, rx) } /// Quit the current application and close the window. pub fn close_window(&mut self) -> Result<(), SendError> { self.tx.start_send(WindowQueueMessage::CloseWindow) } } /// Use this to send custom events to the iced window. /// /// Please note this channel is ***not*** realtime-safe and should never be /// be used to send events from the audio thread. Use a realtime-safe ring /// buffer instead. #[allow(missing_debug_implementations)] pub struct WindowHandle<Message: 'static + Send> { bv_handle: baseview::WindowHandle, tx: mpsc::UnboundedSender<RuntimeEvent<Message>>, } impl<Message: 'static + Send> WindowHandle<Message> { pub(crate) fn new( bv_handle: baseview::WindowHandle, tx: mpsc::UnboundedSender<RuntimeEvent<Message>>, ) -> Self { Self { bv_handle, tx } } /// Send a custom `baseview::Event` to the window. /// /// Please note this channel is ***not*** realtime-safe and should never be /// be used to send events from the audio thread. Use a realtime-safe ring /// buffer instead. pub fn send_baseview_event( &mut self, event: baseview::Event, ) -> Result<(), SendError> { self.tx.start_send(RuntimeEvent::Baseview((event, false))) } /// Send a custom message to the window. /// /// Please note this channel is ***not*** realtime-safe and should never be /// used to send events from the audio thread. Use a realtime-safe ring /// buffer instead. pub fn send_message(&mut self, msg: Message) -> Result<(), SendError> { self.tx.start_send(RuntimeEvent::UserEvent(msg)) } /// Signal the window to close. pub fn close_window(&mut self) { self.bv_handle.close(); } /// Returns `true` if the window is still open, and `false` if the window /// was closed/dropped. pub fn is_open(&self) -> bool { self.bv_handle.is_open() } } unsafe impl<Message: 'static + Send> HasRawWindowHandle for WindowHandle<Message> { fn raw_window_handle(&self) -> RawWindowHandle { self.bv_handle.raw_window_handle() } } /// Handles an iced_baseview application #[allow(missing_debug_implementations)] pub struct IcedWindow<A: Application + 'static + Send> { sender: mpsc::UnboundedSender<RuntimeEvent<A::Message>>, instance: Pin<Box<dyn futures::Future<Output = ()>>>, runtime_context: futures::task::Context<'static>, runtime_rx: mpsc::UnboundedReceiver<A::Message>, window_queue_rx: mpsc::UnboundedReceiver<WindowQueueMessage>, event_status: Rc<RefCell<EventStatus>>, } impl<A: Application + 'static + Send> IcedWindow<A> { fn new( window: &mut baseview::Window<'_>, flags: A::Flags, scale_policy: WindowScalePolicy, logical_width: f64, logical_height: f64, sender: mpsc::UnboundedSender<RuntimeEvent<A::Message>>, receiver: mpsc::UnboundedReceiver<RuntimeEvent<A::Message>>, ) -> IcedWindow<A> { use futures::task; #[cfg(feature = "wgpu")] use iced_graphics::window::Compositor as IGCompositor; #[cfg(feature = "glow")] #[cfg(not(feature = "wgpu"))] use iced_graphics::window::GLCompositor as IGCompositor; let mut debug = Debug::new(); debug.startup_started(); let (runtime_tx, runtime_rx) = mpsc::unbounded::<A::Message>(); let mut runtime = { let proxy = Proxy::new(runtime_tx); let executor = <A::Executor as Executor>::new().unwrap(); Runtime::new(executor, proxy) }; let (application, init_command) = { let flags = flags; runtime.enter(|| A::new(flags)) }; let mut window_subs = WindowSubs::default(); let subscription = application.subscription(&mut window_subs); runtime.spawn(init_command); runtime.track(subscription); // Assume scale for now until there is an event with a new one. let scale = match scale_policy { WindowScalePolicy::ScaleFactor(scale) => scale, WindowScalePolicy::SystemScaleFactor => 1.0, }; let physical_size = Size::new( (logical_width * scale) as u32, (logical_height * scale) as u32, ); let viewport = Viewport::with_physical_size(physical_size, scale); let renderer_settings = A::renderer_settings(); #[cfg(feature = "wgpu")] let (mut compositor, renderer) = <Compositor as IGCompositor>::new(renderer_settings).unwrap(); #[cfg(feature = "glow")] #[cfg(not(feature = "wgpu"))] let (context, compositor, renderer) = { let context = raw_gl_context::GlContext::create(window, renderer_settings.0) .unwrap(); context.make_current(); #[allow(unsafe_code)] let (compositor, renderer) = unsafe { <Compositor as IGCompositor>::new(renderer_settings.1, |s| { context.get_proc_address(s) }) .unwrap() }; context.make_not_current(); (context, compositor, renderer) }; #[cfg(feature = "wgpu")] let surface = compositor.create_surface(window); let state = State::new(&application, viewport, scale_policy); let event_status = Rc::new(RefCell::new(EventStatus::Ignored)); let (window_queue, window_queue_rx) = WindowQueue::new(); #[cfg(feature = "wgpu")] let instance = Box::pin(run_instance( application, compositor, renderer, runtime, debug, receiver, window_queue, surface, state, window_subs, event_status.clone(), )); #[cfg(feature = "glow")] #[cfg(not(feature = "wgpu"))] let instance = Box::pin(run_instance( application, compositor, renderer, runtime, debug, receiver, window_queue, context, state, window_subs, event_status.clone(), )); let runtime_context = task::Context::from_waker(task::noop_waker_ref()); Self { sender, instance, runtime_context, runtime_rx, window_queue_rx, event_status, } } /// Open a new child window. /// /// * `parent` - The parent window. /// * `settings` - The settings of the window. pub fn open_parented<P>( parent: &P, settings: Settings<A::Flags>, ) -> WindowHandle<A::Message> where P: HasRawWindowHandle, { let scale_policy = settings.window.scale; let logical_width = settings.window.size.width as f64; let logical_height = settings.window.size.height as f64; let flags = settings.flags; let (sender, receiver) = mpsc::unbounded(); let sender_clone = sender.clone(); let bv_handle = Window::open_parented( parent, settings.window, move |window: &mut baseview::Window<'_>| -> IcedWindow<A> { IcedWindow::new( window, flags, scale_policy, logical_width, logical_height, sender_clone, receiver, ) }, ); WindowHandle::new(bv_handle, sender) } /// Open a new window as if it had a parent window. /// /// * `settings` - The settings of the window. pub fn open_as_if_parented( settings: Settings<A::Flags>, ) -> WindowHandle<A::Message> { let scale_policy = settings.window.scale; let logical_width = settings.window.size.width as f64; let logical_height = settings.window.size.height as f64; let flags = settings.flags; let (sender, receiver) = mpsc::unbounded(); let sender_clone = sender.clone(); let bv_handle = Window::open_as_if_parented( settings.window, move |window: &mut baseview::Window<'_>| -> IcedWindow<A> { IcedWindow::new( window, flags, scale_policy, logical_width, logical_height, sender_clone, receiver, ) }, ); WindowHandle::new(bv_handle, sender) } /// Open a new window that blocks the current thread until the window is destroyed. /// /// * `settings` - The settings of the window. pub fn open_blocking(settings: Settings<A::Flags>) { let scale_policy = settings.window.scale; let logical_width = settings.window.size.width as f64; let logical_height = settings.window.size.height as f64; let flags = settings.flags; let (sender, receiver) = mpsc::unbounded(); Window::open_blocking( settings.window, move |window: &mut baseview::Window<'_>| -> IcedWindow<A> { IcedWindow::new( window, flags, scale_policy, logical_width, logical_height, sender, receiver, ) }, ); } } impl<A: Application + 'static + Send> WindowHandler for IcedWindow<A> { fn on_frame(&mut self, window: &mut Window<'_>) { // Send event to render the frame. self.sender .start_send(RuntimeEvent::UpdateSwapChain) .expect("Send event"); // Flush all messages. This will block until the instance is finished. let _ = self.instance.as_mut().poll(&mut self.runtime_context); // Poll subscriptions and send the corresponding messages. while let Ok(Some(message)) = self.runtime_rx.try_next() { self.sender .start_send(RuntimeEvent::UserEvent(message)) .expect("Send event"); } // Send the event to the instance. self.sender .start_send(RuntimeEvent::MainEventsCleared) .expect("Send event"); // Send event to render the frame. self.sender .start_send(RuntimeEvent::OnFrame) .expect("Send event"); // Flush all messages. This will block until the instance is finished. let _ = self.instance.as_mut().poll(&mut self.runtime_context); while let Ok(Some(msg)) = self.window_queue_rx.try_next() { match msg { WindowQueueMessage::CloseWindow => { window.close(); } } } } fn on_event( &mut self, window: &mut Window<'_>, event: Event, ) -> EventStatus { let status = if requests_exit(&event) { self.sender .start_send(RuntimeEvent::WillClose) .expect("Send event"); // Flush all messages so the application receives the close event. This will block until the instance is finished. let _ = self.instance.as_mut().poll(&mut self.runtime_context); EventStatus::Ignored } else { // Send the event to the instance. self.sender .start_send(RuntimeEvent::Baseview((event, true))) .expect("Send event"); // Flush all messages so the application receives the event. This will block until the instance is finished. let _ = self.instance.as_mut().poll(&mut self.runtime_context); // TODO: make this Copy *self.event_status.borrow() }; while let Ok(Some(msg)) = self.window_queue_rx.try_next() { match msg { WindowQueueMessage::CloseWindow => { window.close(); } } } status } } // This may appear to be asynchronous, but it is actually a blocking future on the same thread. // This is a necessary workaround for the issue described here: // https://github.com/hecrj/iced/pull/597 #[allow(clippy::too_many_arguments)] async fn run_instance<A, E>( mut application: A, mut compositor: Compositor, mut renderer: Renderer, mut runtime: Runtime<E, Proxy<A::Message>, A::Message>, mut debug: Debug, mut receiver: mpsc::UnboundedReceiver<RuntimeEvent<A::Message>>, mut window_queue: WindowQueue, #[rustfmt::skip] #[cfg(feature = "wgpu")] surface: <Compositor as iced_graphics::window::Compositor>::Surface, #[rustfmt::skip] #[cfg(feature = "glow")] #[cfg(not(feature = "wgpu"))] gl_context: raw_gl_context::GlContext, mut state: State<A>, mut window_subs: WindowSubs<A::Message>, event_status: Rc<RefCell<EventStatus>>, ) where A: Application + 'static + Send, E: Executor + 'static, { #[cfg(feature = "wgpu")] use iced_graphics::window::Compositor as IGCompositor; #[cfg(feature = "glow")] #[cfg(not(feature = "wgpu"))] use iced_graphics::window::GLCompositor as IGCompositor; //let clipboard = Clipboard::new(window); // TODO: clipboard let mut viewport_version = state.viewport_version(); #[cfg(feature = "wgpu")] let mut swap_chain = { let physical_size = state.physical_size(); compositor.create_swap_chain( &surface, physical_size.width, physical_size.height, ) }; let mut user_interface = ManuallyDrop::new(build_user_interface( &mut application, Cache::default(), &mut renderer, state.logical_size(), &mut debug, )); let mut primitive = user_interface.draw(&mut renderer, state.cursor_position()); let mut mouse_interaction = iced_native::mouse::Interaction::default(); let mut events = Vec::new(); let mut messages = Vec::new(); let mut redraw_requested = true; let mut did_process_event = false; let mut modifiers = iced_core::keyboard::Modifiers { shift: false, control: false, alt: false, logo: false, }; debug.startup_finished(); let mut clipboard = iced_native::clipboard::Null; // TODO: clipboard while let Some(event) = receiver.next().await { match event { RuntimeEvent::Baseview((event, do_send_status)) => { state.update(&event, &mut debug); crate::conversion::baseview_to_iced_events( event, &mut events, &mut modifiers, ); if events.is_empty() { if do_send_status { *event_status.borrow_mut() = EventStatus::Ignored; } continue; } debug.event_processing_started(); let statuses = user_interface.update( &events, state.cursor_position(), &renderer, &mut clipboard, // TODO: clipboard &mut messages, ); if do_send_status { let mut final_status = EventStatus::Ignored; for status in &statuses { if let Status::Captured = status { final_status = EventStatus::Captured; break; } } *event_status.borrow_mut() = final_status; } debug.event_processing_finished(); for event in events.drain(..).zip(statuses.into_iter()) { runtime.broadcast(event); } did_process_event = true; } RuntimeEvent::MainEventsCleared => { if let Some(message) = &window_subs.on_frame { messages.push(message.clone()); } if !did_process_event && events.is_empty() && messages.is_empty() { continue; } did_process_event = false; if !events.is_empty() { debug.event_processing_started(); let statuses = user_interface.update( &events, state.cursor_position(), &renderer, &mut clipboard, // TODO: clipboard &mut messages, ); debug.event_processing_finished(); for event in events.drain(..).zip(statuses.into_iter()) { runtime.broadcast(event); } } if !messages.is_empty() { let cache = ManuallyDrop::into_inner(user_interface).into_cache(); // Update application update( &mut application, &mut runtime, &mut debug, &mut messages, &mut window_subs, &mut window_queue, ); // Update window state.synchronize(&application); user_interface = ManuallyDrop::new(build_user_interface( &mut application, cache, &mut renderer, state.logical_size(), &mut debug, )); } debug.draw_started(); primitive = user_interface.draw(&mut renderer, state.cursor_position()); debug.draw_finished(); redraw_requested = true; } RuntimeEvent::UserEvent(message) => { messages.push(message); } RuntimeEvent::UpdateSwapChain => { let current_viewport_version = state.viewport_version(); if viewport_version != current_viewport_version { let physical_size = state.physical_size(); #[cfg(feature = "wgpu")] { swap_chain = compositor.create_swap_chain( &surface, physical_size.width, physical_size.height, ); } #[cfg(feature = "glow")] #[cfg(not(feature = "wgpu"))] { gl_context.make_current(); compositor.resize_viewport(physical_size); gl_context.make_not_current(); } let logical_size = state.logical_size(); debug.layout_started(); user_interface = ManuallyDrop::new( ManuallyDrop::into_inner(user_interface) .relayout(logical_size, &mut renderer), ); debug.layout_finished(); debug.draw_started(); primitive = user_interface .draw(&mut renderer, state.cursor_position()); debug.draw_finished(); viewport_version = current_viewport_version; } } RuntimeEvent::OnFrame => { if redraw_requested { debug.render_started(); #[cfg(feature = "wgpu")] let new_mouse_interaction = compositor.draw( &mut renderer, &mut swap_chain, state.viewport(), state.background_color(), &primitive, &debug.overlay(), ); #[cfg(feature = "glow")] #[cfg(not(feature = "wgpu"))] let new_mouse_interaction = { gl_context.make_current(); let new_mouse_interaction = compositor.draw( &mut renderer, state.viewport(), state.background_color(), &primitive, &debug.overlay(), ); gl_context.swap_buffers(); gl_context.make_not_current(); new_mouse_interaction }; debug.render_finished(); if new_mouse_interaction != mouse_interaction { // TODO: set window cursor icon /* window.set_cursor_icon(conversion::mouse_interaction( new_mouse_interaction, )); */ mouse_interaction = new_mouse_interaction; } redraw_requested = false; // TODO: Handle animations! // Maybe we can use `ControlFlow::WaitUntil` for this. } } RuntimeEvent::WillClose => { if let Some(message) = &window_subs.on_window_will_close { // Send message to user before exiting the loop. messages.push(message.clone()); let cache = ManuallyDrop::into_inner(user_interface).into_cache(); // Update application update( &mut application, &mut runtime, &mut debug, &mut messages, &mut window_subs, &mut window_queue, ); // Update window state.synchronize(&application); user_interface = ManuallyDrop::new(build_user_interface( &mut application, cache, &mut renderer, state.logical_size(), &mut debug, )); } break; } } } receiver.close(); // Manually drop the user interface drop(ManuallyDrop::into_inner(user_interface)); } /// Returns true if the provided event should cause an [`Application`] to /// exit. pub fn requests_exit(event: &baseview::Event) -> bool { match event { baseview::Event::Window(baseview::WindowEvent::WillClose) => true, #[cfg(target_os = "macos")] baseview::Event::Keyboard(event) => { if event.code == keyboard_types::Code::KeyQ && event.modifiers == keyboard_types::Modifiers::META && event.state == keyboard_types::KeyState::Down { return true; } false } _ => false, } } /// Builds a [`UserInterface`] for the provided [`Application`], logging /// [`struct@Debug`] information accordingly. pub fn build_user_interface<'a, A: Application + 'static + Send>( application: &'a mut A, cache: Cache, renderer: &mut Renderer, size: Size, debug: &mut Debug, ) -> UserInterface<'a, A::Message, Renderer> { debug.view_started(); let view = application.view(); debug.view_finished(); debug.layout_started(); let user_interface = UserInterface::build(view, size, cache, renderer); debug.layout_finished(); user_interface } /// Updates an [`Application`] by feeding it the provided messages, spawning any /// resulting [`Command`], and tracking its [`Subscription`]. pub fn update<A: Application, E: Executor>( application: &mut A, runtime: &mut Runtime<E, Proxy<A::Message>, A::Message>, debug: &mut Debug, messages: &mut Vec<A::Message>, window_subs: &mut WindowSubs<A::Message>, window_queue: &mut WindowQueue, ) { for message in messages.drain(..) { debug.log_message(&message); debug.update_started(); let command = runtime.enter(|| application.update(window_queue, message)); debug.update_finished(); runtime.spawn(command); } let subscription = application.subscription(window_subs); runtime.track(subscription); }
31.911137
126
0.529351
fc5130fa50b8439b3788a7c9eb55fdf3e04debc3
6,588
use std::convert::TryInto; use crate::datatype::{Datatype, Endianness}; use crate::view::View; pub type Color = [u8; 4]; fn rgba_from_color(color: colorgrad::Color) -> Color { [ (color.r * 255.0) as u8, (color.g * 255.0) as u8, (color.b * 255.0) as u8, 255, ] } pub trait Style { fn init(&mut self, _view: &View) {} fn color_at_index(&mut self, view: &View, view_index: isize) -> Color; } pub struct Colorful; impl Style for Colorful { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some(b) = view.byte_at(view_index) { [b, b.overflowing_mul(2).0, b.overflowing_mul(4).0, 255] } else { [0, 0, 0, 0] } } } pub struct Grayscale; impl Style for Grayscale { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some(b) = view.byte_at(view_index) { [b, b, b, 255] } else { [0, 0, 0, 0] } } } pub struct Category; impl Style for Category { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some(b) = view.byte_at(view_index) { if b == 0x00 { [0, 0, 0, 255] } else if b == 0xFF { [255, 255, 255, 255] } else if b.is_ascii_alphanumeric() { [60, 178, 255, 255] } else if b.is_ascii_punctuation() { [0, 129, 213, 255] } else if b.is_ascii_whitespace() { [162, 218, 255, 255] } else if b.is_ascii() { [60, 255, 137, 255] } else { [249, 53, 94, 255] } } else { [0, 0, 0, 0] } } } pub struct ColorGradient { byte_color: [Color; 256], } impl ColorGradient { pub fn new(gradient: colorgrad::Gradient) -> Self { let mut byte_color = [[0, 0, 0, 0]; 256]; for (byte, color) in byte_color.iter_mut().enumerate() { let gradient_color = gradient.at((byte as f64) / 255.0f64); *color = rgba_from_color(gradient_color); } ColorGradient { byte_color } } } impl Style for ColorGradient { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some(b) = view.byte_at(view_index) { self.byte_color[b as usize] } else { [0, 0, 0, 0] } } } pub struct DatatypeStyle { datatype: Datatype, endianness: Endianness, colors: Vec<Color>, range: (f32, f32), } impl DatatypeStyle { pub fn new(datatype: Datatype, endianness: Endianness, range: (f32, f32)) -> Self { let num_colors = 1024; let mut colors = Vec::new(); colors.reserve(num_colors); let gradient = colorgrad::plasma(); for i in 0..num_colors { colors.push(rgba_from_color( gradient.at((i as f64) / (num_colors as f64)), )); } DatatypeStyle { datatype, endianness, colors, range, } } pub fn color_from_float(&self, t: f32) -> Color { let num_colors = self.colors.len(); let index = ((t - f32::EPSILON) * num_colors as f32) as isize; index .try_into() .ok() .and_then(|i: usize| self.colors.get(i)) .copied() .unwrap_or([0, 0, 0, 0]) } } impl Style for DatatypeStyle { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { view.slice_at(view_index, self.datatype.size()) .and_then(|slice| self.datatype.read_as_float_from(slice, self.endianness)) .map(|t| { let (min, max) = self.range; self.color_from_float((t - min) / (max - min)) }) .unwrap_or([0, 0, 0, 0]) } } pub struct RGBA; impl Style for RGBA { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some(int) = view.be_u32_at(view_index) { int.to_be_bytes() } else { [0, 0, 0, 0] } } } pub struct ABGR; impl Style for ABGR { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some(int) = view.be_u32_at(view_index) { int.to_le_bytes() } else { [0, 0, 0, 0] } } } pub struct RGB; impl Style for RGB { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some([r, g, b]) = view.rgb_at(view_index) { [r, g, b, 255] } else { [0, 0, 0, 0] } } } pub struct BGR; impl Style for BGR { fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some([b, g, r]) = view.rgb_at(view_index) { [r, g, b, 255] } else { [0, 0, 0, 0] } } } pub struct Entropy { window_size: usize, window_size_f64: f64, counts: [i32; 256], /// Cache the gradient color byte_color: [Color; 256], } impl Entropy { pub fn with_window_size(window_size: usize) -> Entropy { let gradient = colorgrad::magma(); let mut byte_color = [[0, 0, 0, 0]; 256]; for (byte, color) in byte_color.iter_mut().enumerate() { let gradient_color = gradient.at((byte as f64) / 255.0f64); *color = rgba_from_color(gradient_color); } Entropy { window_size, window_size_f64: window_size as f64, counts: [0; 256], byte_color, } } } impl Style for Entropy { fn init(&mut self, _: &View) {} fn color_at_index(&mut self, view: &View, view_index: isize) -> Color { if let Some(bytes) = view.slice_at(view_index, self.window_size) { self.counts.fill(0); for byte in bytes.iter() { self.counts[*byte as usize] += 1; } let mut entropy = 0.0f64; for count in self.counts { if count > 0 { let p = (count as f64) / self.window_size_f64; entropy -= p * p.log2(); } } entropy *= 1.0f64 / 8.0f64; let discretized_entropy: usize = ((entropy * self.byte_color.len() as f64) as usize) .clamp(0, self.byte_color.len() - 1); self.byte_color[discretized_entropy] } else { [0, 0, 0, 0] } } }
26.142857
96
0.512599
9c0ef55b3d83acf4f4e329ef426d4963b9b0c80e
5,785
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use hair::*; use repr::*; use std::fmt::{Debug, Formatter, Error}; use std::hash::{Hash, Hasher}; use std::rc::Rc; use self::rustc::middle::def_id::DefId; use self::rustc::middle::infer::InferCtxt; use self::rustc::middle::region::CodeExtent; use self::rustc::middle::subst::{self, Subst, Substs}; use self::rustc::middle::ty::{self, Ty}; use self::rustc_front::hir; use self::syntax::ast; use self::syntax::codemap::Span; use self::syntax::parse::token::{self, special_idents, InternedString}; extern crate rustc; extern crate rustc_front; extern crate syntax; #[derive(Copy, Clone)] pub struct Cx<'a,'tcx:'a> { pub tcx: &'a ty::ctxt<'tcx>, pub infcx: &'a InferCtxt<'a,'tcx>, } impl<'a,'tcx> Cx<'a,'tcx> { pub fn new(infcx: &'a InferCtxt<'a,'tcx>) -> Cx<'a,'tcx> { Cx { tcx: infcx.tcx, infcx: infcx } } } pub use self::pattern::PatNode; impl<'a,'tcx:'a> Hair for Cx<'a, 'tcx> { type VarId = ast::NodeId; type DefId = DefId; type AdtDef = ty::AdtDef<'tcx>; type Name = ast::Name; type InternedString = InternedString; type Bytes = Rc<Vec<u8>>; type Span = Span; type Projection = ty::ProjectionTy<'tcx>; type Substs = &'tcx subst::Substs<'tcx>; type ClosureSubsts = &'tcx ty::ClosureSubsts<'tcx>; type Ty = Ty<'tcx>; type Region = ty::Region; type CodeExtent = CodeExtent; type Pattern = PatNode<'tcx>; type Expr = &'tcx hir::Expr; type Stmt = &'tcx hir::Stmt; type Block = &'tcx hir::Block; type InlineAsm = &'tcx hir::InlineAsm; fn unit_ty(&mut self) -> Ty<'tcx> { self.tcx.mk_nil() } fn usize_ty(&mut self) -> Ty<'tcx> { self.tcx.types.usize } fn bool_ty(&mut self) -> Ty<'tcx> { self.tcx.types.bool } fn partial_eq(&mut self, ty: Ty<'tcx>) -> ItemRef<Self> { let eq_def_id = self.tcx.lang_items.eq_trait().unwrap(); self.cmp_method_ref(eq_def_id, "eq", ty) } fn partial_le(&mut self, ty: Ty<'tcx>) -> ItemRef<Self> { let ord_def_id = self.tcx.lang_items.ord_trait().unwrap(); self.cmp_method_ref(ord_def_id, "le", ty) } fn num_variants(&mut self, adt_def: ty::AdtDef<'tcx>) -> usize { adt_def.variants.len() } fn fields(&mut self, adt_def: ty::AdtDef<'tcx>, variant_index: usize) -> Vec<Field<Self>> { adt_def.variants[variant_index] .fields .iter() .enumerate() .map(|(index, field)| { if field.name == special_idents::unnamed_field.name { Field::Indexed(index) } else { Field::Named(field.name) } }) .collect() } fn needs_drop(&mut self, ty: Ty<'tcx>, span: Self::Span) -> bool { if self.infcx.type_moves_by_default(ty, span) { // FIXME(#21859) we should do an add'l check here to determine if // any dtor will execute, but the relevant fn // (`type_needs_drop`) is currently factored into // `librustc_trans`, so we can't easily do so. true } else { // if type implements Copy, cannot require drop false } } fn span_bug(&mut self, span: Self::Span, message: &str) -> ! { self.tcx.sess.span_bug(span, message) } } impl<'a,'tcx:'a> Cx<'a,'tcx> { fn cmp_method_ref(&mut self, trait_def_id: DefId, method_name: &str, arg_ty: Ty<'tcx>) -> ItemRef<Cx<'a,'tcx>> { let method_name = token::intern(method_name); let substs = Substs::new_trait(vec![arg_ty], vec![], arg_ty); for trait_item in self.tcx.trait_items(trait_def_id).iter() { match *trait_item { ty::ImplOrTraitItem::MethodTraitItem(ref method) => { if method.name == method_name { let method_ty = self.tcx.lookup_item_type(method.def_id); let method_ty = method_ty.ty.subst(self.tcx, &substs); return ItemRef { ty: method_ty, def_id: method.def_id, substs: self.tcx.mk_substs(substs), }; } } ty::ImplOrTraitItem::ConstTraitItem(..) | ty::ImplOrTraitItem::TypeTraitItem(..) => { } } } self.tcx.sess.bug( &format!("found no method `{}` in `{:?}`", method_name, trait_def_id)); } } // We only need this impl so that we do deriving for things that are // defined relative to the `Hair` trait. See `Hair` trait for more // details. impl<'a,'tcx> PartialEq for Cx<'a,'tcx> { fn eq(&self, _: &Cx<'a,'tcx>) -> bool { panic!("Cx should never ACTUALLY be compared for equality") } } impl<'a,'tcx> Eq for Cx<'a,'tcx> { } impl<'a,'tcx> Hash for Cx<'a,'tcx> { fn hash<H: Hasher>(&self, _: &mut H) { panic!("Cx should never ACTUALLY be hashed") } } impl<'a,'tcx> Debug for Cx<'a,'tcx> { fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> { write!(fmt, "Tcx") } } mod block; mod expr; mod pattern; mod to_ref;
31.440217
95
0.561798
0e1df78495703552a92c3ce660132a3f1aab10d1
5,644
// Generated from definition io.k8s.api.networking.v1.NetworkPolicyPeer /// NetworkPolicyPeer describes a peer to allow traffic from. Only certain combinations of fields are allowed #[derive(Clone, Debug, Default, PartialEq)] pub struct NetworkPolicyPeer { /// IPBlock defines policy on a particular IPBlock. If this field is set then neither of the other fields can be. pub ip_block: Option<crate::v1_11::api::networking::v1::IPBlock>, /// Selects Namespaces using cluster-scoped labels. This field follows standard label selector semantics; if present but empty, it selects all namespaces. /// /// If PodSelector is also set, then the NetworkPolicyPeer as a whole selects the Pods matching PodSelector in the Namespaces selected by NamespaceSelector. Otherwise it selects all Pods in the Namespaces selected by NamespaceSelector. pub namespace_selector: Option<crate::v1_11::apimachinery::pkg::apis::meta::v1::LabelSelector>, /// This is a label selector which selects Pods. This field follows standard label selector semantics; if present but empty, it selects all pods. /// /// If NamespaceSelector is also set, then the NetworkPolicyPeer as a whole selects the Pods matching PodSelector in the Namespaces selected by NamespaceSelector. Otherwise it selects the Pods matching PodSelector in the policy's own Namespace. pub pod_selector: Option<crate::v1_11::apimachinery::pkg::apis::meta::v1::LabelSelector>, } impl<'de> serde::Deserialize<'de> for NetworkPolicyPeer { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { #[allow(non_camel_case_types)] enum Field { Key_ip_block, Key_namespace_selector, Key_pod_selector, Other, } impl<'de> serde::Deserialize<'de> for Field { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = Field; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "field identifier") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error { Ok(match v { "ipBlock" => Field::Key_ip_block, "namespaceSelector" => Field::Key_namespace_selector, "podSelector" => Field::Key_pod_selector, _ => Field::Other, }) } } deserializer.deserialize_identifier(Visitor) } } struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = NetworkPolicyPeer; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "struct NetworkPolicyPeer") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> { let mut value_ip_block: Option<crate::v1_11::api::networking::v1::IPBlock> = None; let mut value_namespace_selector: Option<crate::v1_11::apimachinery::pkg::apis::meta::v1::LabelSelector> = None; let mut value_pod_selector: Option<crate::v1_11::apimachinery::pkg::apis::meta::v1::LabelSelector> = None; while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? { match key { Field::Key_ip_block => value_ip_block = serde::de::MapAccess::next_value(&mut map)?, Field::Key_namespace_selector => value_namespace_selector = serde::de::MapAccess::next_value(&mut map)?, Field::Key_pod_selector => value_pod_selector = serde::de::MapAccess::next_value(&mut map)?, Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; }, } } Ok(NetworkPolicyPeer { ip_block: value_ip_block, namespace_selector: value_namespace_selector, pod_selector: value_pod_selector, }) } } deserializer.deserialize_struct( "NetworkPolicyPeer", &[ "ipBlock", "namespaceSelector", "podSelector", ], Visitor, ) } } impl serde::Serialize for NetworkPolicyPeer { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer { let mut state = serializer.serialize_struct( "NetworkPolicyPeer", self.ip_block.as_ref().map_or(0, |_| 1) + self.namespace_selector.as_ref().map_or(0, |_| 1) + self.pod_selector.as_ref().map_or(0, |_| 1), )?; if let Some(value) = &self.ip_block { serde::ser::SerializeStruct::serialize_field(&mut state, "ipBlock", value)?; } if let Some(value) = &self.namespace_selector { serde::ser::SerializeStruct::serialize_field(&mut state, "namespaceSelector", value)?; } if let Some(value) = &self.pod_selector { serde::ser::SerializeStruct::serialize_field(&mut state, "podSelector", value)?; } serde::ser::SerializeStruct::end(state) } }
47.830508
248
0.588058
752a199ab8faae2a5a8029abd53cdcc617135a43
4,721
//! The asynchronous implementation of a websocket server. use bytes::BytesMut; use futures::{Future, Stream}; use server::upgrade::async::{IntoWs, Upgrade}; use server::InvalidConnection; use server::{NoTlsAcceptor, WsServer}; use std::io; use std::net::SocketAddr; use std::net::ToSocketAddrs; use tokio_core::net::{TcpListener, TcpStream}; pub use tokio_core::reactor::Handle; #[cfg(any(feature = "async-ssl"))] use native_tls::TlsAcceptor; #[cfg(any(feature = "async-ssl"))] use tokio_tls::{TlsAcceptor as TlsAcceptorExt, TlsStream}; /// The asynchronous specialization of a websocket server. /// Use this struct to create asynchronous servers. pub type Server<S> = WsServer<S, TcpListener>; /// A stream of websocket connections and addresses the server generates. /// /// Each item of the stream is the address of the incoming connection and an `Upgrade` /// struct which lets the user decide whether to turn the connection into a websocket /// connection or reject it. pub type Incoming<S> = Box<Stream<Item = (Upgrade<S>, SocketAddr), Error = InvalidConnection<S, BytesMut>> + Send>; /// Asynchronous methods for creating an async server and accepting incoming connections. impl WsServer<NoTlsAcceptor, TcpListener> { /// Bind a websocket server to an address. /// Creating a websocket server can be done immediately so this does not /// return a `Future` but a simple `Result`. pub fn bind<A: ToSocketAddrs>(addr: A, handle: &Handle) -> io::Result<Self> { let tcp = ::std::net::TcpListener::bind(addr)?; let address = tcp.local_addr()?; Ok(Server { listener: TcpListener::from_listener(tcp, &address, handle)?, ssl_acceptor: NoTlsAcceptor, }) } /// Turns the server into a stream of connection objects. /// /// Each item of the stream is the address of the incoming connection and an `Upgrade` /// struct which lets the user decide whether to turn the connection into a websocket /// connection or reject it. /// /// See the [`examples/async-server.rs`] /// (https://github.com/cyderize/rust-websocket/blob/master/examples/async-server.rs) /// example for a good echo server example. pub fn incoming(self) -> Incoming<TcpStream> { let future = self .listener .incoming() .map_err(|e| InvalidConnection { stream: None, parsed: None, buffer: None, error: e.into(), }) .and_then(|(stream, a)| { stream .into_ws() .map_err(|(stream, req, buf, err)| InvalidConnection { stream: Some(stream), parsed: req, buffer: Some(buf), error: err, }) .map(move |u| (u, a)) }); Box::new(future) } } /// Asynchronous methods for creating an async SSL server and accepting incoming connections. #[cfg(any(feature = "async-ssl"))] impl WsServer<TlsAcceptor, TcpListener> { /// Bind an SSL websocket server to an address. /// Creating a websocket server can be done immediately so this does not /// return a `Future` but a simple `Result`. /// /// Since this is an SSL server one needs to provide a `TlsAcceptor` that contains /// the server's SSL information. pub fn bind_secure<A: ToSocketAddrs>( addr: A, acceptor: TlsAcceptor, handle: &Handle, ) -> io::Result<Self> { let tcp = ::std::net::TcpListener::bind(addr)?; let address = tcp.local_addr()?; Ok(Server { listener: TcpListener::from_listener(tcp, &address, handle)?, ssl_acceptor: acceptor, }) } /// Turns the server into a stream of connection objects. /// /// Each item of the stream is the address of the incoming connection and an `Upgrade` /// struct which lets the user decide whether to turn the connection into a websocket /// connection or reject it. /// /// See the [`examples/async-server.rs`] /// (https://github.com/cyderize/rust-websocket/blob/master/examples/async-server.rs) /// example for a good echo server example. pub fn incoming(self) -> Incoming<TlsStream<TcpStream>> { let acceptor = TlsAcceptorExt::from(self.ssl_acceptor); let future = self .listener .incoming() .map_err(|e| InvalidConnection { stream: None, parsed: None, buffer: None, error: e.into(), }) .and_then(move |(stream, a)| { acceptor .accept(stream) .map_err(|e| { InvalidConnection { stream: None, parsed: None, buffer: None, // TODO: better error types error: io::Error::new(io::ErrorKind::Other, e).into(), } }) .map(move |s| (s, a)) }) .and_then(|(stream, a)| { stream .into_ws() .map_err(|(stream, req, buf, err)| InvalidConnection { stream: Some(stream), parsed: req, buffer: Some(buf), error: err, }) .map(move |u| (u, a)) }); Box::new(future) } }
31.898649
93
0.670832
509a81689b2f49f407aba7984a19dd85ae113072
143
pub mod pieces; pub mod board; pub mod moves; #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
11.916667
29
0.524476
feeeba448e8f1cf5766822a95cbf02ce07c428c7
3,208
use command_service::communication::MessageRouter; use command_service::input::{Error, KafkaInput}; use command_service::output::{ DruidOutputPlugin, OutputArgs, OutputPlugin, PostgresOutputPlugin, SleighOutputPlugin, VictoriaMetricsOutputPlugin, }; use command_service::report::{FullReportSenderBase, ReportSender, ReportServiceConfig}; use command_service::{args::Args, input::GRPCInput, input::InputConfig}; use log::trace; use rpc::command_service::command_service_server::CommandServiceServer; use std::net::{Ipv4Addr, SocketAddrV4}; use structopt::StructOpt; use tonic::transport::Server; use utils::metrics; #[tokio::main] async fn main() -> anyhow::Result<()> { env_logger::init(); let args: Args = Args::from_args(); trace!("Environment: {:?}", args); metrics::serve(); let input_config = args.input_config()?; match args.output_config { OutputArgs::Sleigh(sleigh_config) => { start_services( input_config, args.report_config, SleighOutputPlugin::new(sleigh_config).await?, ) .await } OutputArgs::Postgres(postgres_config) => { start_services( input_config, args.report_config, PostgresOutputPlugin::new(postgres_config).await?, ) .await } OutputArgs::Druid(druid_config) => { start_services( input_config, args.report_config, DruidOutputPlugin::new(druid_config).await?, ) .await } OutputArgs::VictoriaMetrics(victoria_metrics_config) => { start_services( input_config, args.report_config, VictoriaMetricsOutputPlugin::new(victoria_metrics_config)?, ) .await } }?; Ok(()) } async fn start_services( input_config: InputConfig, report_config: ReportServiceConfig, output: impl OutputPlugin, ) -> Result<(), Error> { let report_service = match (report_config.topic, report_config.broker) { (Some(topic), Some(broker)) => ReportSender::Full( FullReportSenderBase::new(broker, topic, output.name().to_string()) .await .map_err(Error::FailedToInitializeReporting)?, ), (None, None) => ReportSender::Disabled, _ => panic!("Must provide both topic and brokers for reporting service to enable it"), }; let message_router = MessageRouter::new(report_service, output); match input_config { InputConfig::Kafka(input_config) => { KafkaInput::new(input_config, message_router) .await? .listen() .await? } InputConfig::GRpc(input_config) => { let input = GRPCInput::new(message_router); let addr = SocketAddrV4::new(Ipv4Addr::new(0, 0, 0, 0), input_config.grpc_port); Server::builder() .add_service(CommandServiceServer::new(input)) .serve(addr.into()) .await?; } } Ok(()) }
31.762376
94
0.591646
5d480bb57da2c3f1163b2d85e266d4c10c3c4bed
548
#![allow(non_camel_case_types)] #![allow(non_upper_case_globals)] #![allow(non_snake_case)] use crate::kernel_metadata::siginfo_str_repr; use fmt::{Display, Formatter}; use std::{fmt, fmt::Debug}; include!(concat!(env!("OUT_DIR"), "/signal_bindings_generated.rs")); impl Debug for siginfo_t { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str(&siginfo_str_repr(self)) } } impl Display for siginfo_t { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str(&siginfo_str_repr(self)) } }
24.909091
68
0.669708
0ac31fbd0f885982d47aedab0ed4f6713983e2a9
13,342
//! Data structures and methods for dealing with armatures. //! //! @see https://docs.blender.org/manual/en/dev/modeling/armature/introduction.html - Armature Introduction #[macro_use] extern crate serde_derive; use std::collections::HashMap; use crate::serde::serialize_hashmap_deterministic; pub use self::action::*; pub use self::bone::*; pub use self::coordinate_system::*; pub use self::export::*; pub use self::interpolate::*; use std::borrow::Borrow; use std::hash::Hash; mod action; mod bone; mod convert; mod coordinate_system; mod export; mod interpolate; mod serde; #[cfg(test)] mod test_util; /// Something went wrong in the Blender child process that was trying to parse your armature data. #[derive(Debug, thiserror::Error)] pub enum BlenderError { /// Errors in Blender are written to stderr. We capture the stderr from the `blender` child /// process that we spawned when attempting to export armature from a `.blend` file. #[error( "There was an issue while exporting armature: Blender stderr output: {}", _0 )] Stderr(String), } /// All of the data about a Blender armature that we've exported from Blender. /// A BlenderArmature should have all of the data that you need to implement skeletal /// animation. /// /// If you have other needs, such as a way to know the model space position of any bone at any /// time so that you can, say, render a baseball in on top of your hand bone.. Open an issue. /// (I plan to support this specific example in the future) #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Default)] // TODO: BlenderArmature<T: Bone> for DQ and matrix pub struct BlenderArmature { name: String, #[serde(serialize_with = "serialize_hashmap_deterministic")] joint_indices: HashMap<String, u8>, bone_child_to_parent: HashMap<u8, u8>, inverse_bind_poses: Vec<Bone>, #[serde(serialize_with = "serialize_hashmap_deterministic")] bone_space_actions: HashMap<String, Action>, #[serde(serialize_with = "serialize_hashmap_deterministic")] bone_groups: HashMap<String, Vec<u8>>, #[serde(default)] coordinate_system: CoordinateSystem, } impl BlenderArmature { /// The name of the armature pub fn name(&self) -> &String { &self.name } /// Set the name of the armature. /// /// # Example /// /// ``` /// # use blender_armature::BlenderArmature; /// let mut armature = BlenderArmature::default(); /// armature.set_name("Some Name".to_string()); /// /// assert_eq!(armature.name(), "Some Name"); /// ``` pub fn set_name(&mut self, name: String) { self.name = name; } /// Blender [bone groups] /// /// Maps bone group name to a vector of the bones indices that are in that bone group. /// /// ```rust /// # use blender_armature::{Action, BlenderArmature, FrameOffset, SampleDesc, JointIndicesRef}; /// # use std::time::Duration; /// /// let armature = create_blender_armature(); /// /// let joint_indices = armature.bone_groups().get("My bone group").unwrap(); /// /// let sample_desc = SampleDesc { /// frame_offset: FrameOffset::new_with_elapsed_time_and_frames_per_second( /// Duration::from_secs(2), /// 24, /// ), /// should_loop: false /// }; /// /// let _bones = armature.interpolate_bones( /// "SomeAction", /// JointIndicesRef::Some(joint_indices), /// sample_desc /// ); /// /// # fn create_blender_armature() -> BlenderArmature { /// # let mut b = BlenderArmature::default(); /// # b.insert_bone_space_action("SomeAction".to_string(), Action::new()); /// # b.create_bone_group("My bone group".to_string(), vec![]); /// # b /// # } /// ``` /// /// [bone groups]: https://docs.blender.org/manual/en/latest/animation/armatures/properties/bone_groups.html pub fn bone_groups(&self) -> &HashMap<String, Vec<u8>> { &self.bone_groups } /// Create a new bone group pub fn create_bone_group(&mut self, name: String, joint_indices: Vec<u8>) { self.bone_groups.insert(name, joint_indices); } /// Get a bone's index into the various Vec<Bone> data structures that hold bone data. /// /// # Example /// /// ``` /// use blender_armature::BlenderArmature; /// let mut armature = BlenderArmature::default(); /// /// armature.insert_joint_index("Spine".to_string(), 0); /// /// assert_eq!(armature.joint_indices().len(), 1); /// ``` pub fn joint_indices(&self) -> &HashMap<String, u8> { &self.joint_indices } /// Set a bone's index into the various Vec<Bone> data structures that hold bone data. /// /// # Example /// /// ``` /// use blender_armature::BlenderArmature; /// let mut armature = BlenderArmature::default(); /// /// armature.insert_joint_index("Spine".to_string(), 0); /// armature.insert_joint_index("UpperArm".to_string(), 2); /// /// assert_eq!(armature.joint_indices().len(), 2); /// ``` pub fn insert_joint_index(&mut self, joint_name: String, joint_idx: u8) { self.joint_indices.insert(joint_name, joint_idx); } /// Every bone's inverse bind pose. /// /// # From Blender /// When exporting from Blender these include the armature's world space matrix. /// /// So, effectively these are `(armature_world_space_matrix * bone_bind_pose).inverse()` pub fn inverse_bind_poses(&self) -> &Vec<Bone> { &self.inverse_bind_poses } /// Set the inverse bind poses. pub fn set_inverse_bind_poses(&mut self, poses: Vec<Bone>) { self.inverse_bind_poses = poses; } /// All of the actions defined on the armature, keyed by action name. /// /// FIXME: Rename to `bone_local_space_actions` pub fn bone_space_actions(&self) -> &HashMap<String, Action> { &self.bone_space_actions } /// Insert an action into the map of actions. pub fn insert_bone_space_action(&mut self, name: String, action: Action) { self.bone_space_actions.insert(name, action); } /// Remove an action from the map. pub fn remove_bone_space_action<Q>(&mut self, name: &Q) -> Option<Action> where String: Borrow<Q>, Q: Hash + Eq, { self.bone_space_actions.remove(name) } /// A map of a bone chil to its parent /// /// If a bone is not stored in this map then it does not have a parent. pub fn bone_child_to_parent(&self) -> &HashMap<u8, u8> { &self.bone_child_to_parent } /// # Example /// /// ``` /// # use blender_armature::BlenderArmature; /// let mut armature = BlenderArmature::default(); /// /// let child_idx = 4; /// let parent_idx = 2; /// /// armature.insert_joint_index("UpperArm".to_string(), parent_idx); /// armature.insert_joint_index("Lower Arm".to_string(), child_idx); /// /// armature.insert_child_to_parent(child_idx, parent_idx); /// ``` pub fn insert_child_to_parent(&mut self, child: u8, parent: u8) { self.bone_child_to_parent.insert(child, parent); } } /// The pose bones at an individual keyframe time #[derive(Debug, Serialize, Deserialize, PartialEq)] #[cfg_attr(test, derive(Default, Clone))] pub struct Keyframe { frame: u16, bones: Vec<Bone>, } impl Keyframe { #[allow(missing_docs)] pub fn new(frame: u16, bones: Vec<Bone>) -> Self { Keyframe { frame, bones } } /// All of the bones for this keyframe. pub fn bones(&self) -> &Vec<Bone> { &self.bones } /// All of the bones for this keyframe. pub fn bones_mut(&mut self) -> &mut Vec<Bone> { &mut self.bones } /// The frame number pub fn frame(&self) -> u16 { self.frame } } // TODO: These methods can be abstracted into calling a method that takes a callback impl BlenderArmature { /// Tranpose all of the bone matrices in our armature's action keyframes. /// Blender uses row major matrices, but OpenGL uses column major matrices so you'll /// usually want to transpose your matrices before using them. pub fn transpose_actions(&mut self) { for (_name, action) in self.bone_space_actions.iter_mut() { for (_bone_idx, keyframes) in action.keyframes_mut().iter_mut() { for bone in keyframes.iter_mut() { bone.bone_mut().transpose(); } } } for bone in self.inverse_bind_poses.iter_mut() { bone.transpose(); } } } impl BlenderArmature { /// Convert your action matrices into dual quaternions so that you can implement /// dual quaternion linear blending. pub fn matrices_to_dual_quats(&mut self) { for (_, keyframes) in self.bone_space_actions.iter_mut() { for (bone_idx, keyframes) in keyframes.keyframes_mut().iter_mut() { for bone_keyframe in keyframes.iter_mut() { bone_keyframe .set_bone(BlenderArmature::matrix_to_dual_quat(&bone_keyframe.bone())); } } } for bone in self.inverse_bind_poses.iter_mut() { *bone = BlenderArmature::matrix_to_dual_quat(bone); } } } impl Bone { fn transpose(&mut self) { match self { Bone::Matrix(ref mut matrix) => { matrix.transpose_mut(); } Bone::DualQuat(_) => unimplemented!(), }; } // DELETE ME fn multiply(&mut self, rhs: Bone) { match self { Bone::Matrix(lhs_matrix) => match rhs { Bone::Matrix(rhs_matrix) => { // *self = Bone::Matrix(rhs_matrix * *lhs_matrix) } Bone::DualQuat(_) => {} }, Bone::DualQuat(_) => {} }; } } // DELETE ME impl BlenderArmature { /// Iterate over all of the action bones and apply and multiply in the inverse bind pose. /// /// TODO: another function to apply bind shape matrix? Most armatures seem to export an identity /// bind shape matrix but that might not be the same for every armature. /// /// TODO: Do not mutate the matrices and instead just return the new values and let the caller /// handle caching them? Would mean less moving parts in our data structures and you always /// know exactly what you are getting. Right now you have no way actions of knowing whether or /// not actions have their bind poses pre-multiplied in. pub fn apply_inverse_bind_poses(&mut self) { for (_name, action) in self.bone_space_actions.iter_mut() { for (bone_idx, keyframe) in action.keyframes_mut().iter_mut() { for (index, bone) in keyframe.iter_mut().enumerate() { bone.bone_mut() .multiply(self.inverse_bind_poses[*bone_idx as usize]); } } } } } #[cfg(test)] mod tests { use super::*; use crate::interpolate::tests::dq_to_bone; use crate::test_util::action_with_keyframes; use nalgebra::Matrix4; #[test] fn convert_actions_to_dual_quats() { let mut keyframes = vec![]; keyframes.push(BoneKeyframe::new( 1, Bone::Matrix(Matrix4::from_column_slice(&[ 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, ])), )); let mut start_armature = BlenderArmature { bone_space_actions: action_with_keyframes(keyframes), ..BlenderArmature::default() }; start_armature.matrices_to_dual_quats(); let mut new_keyframes = vec![]; new_keyframes.push(BoneKeyframe::new( 1, dq_to_bone([1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]), )); let expected_armature = BlenderArmature { bone_space_actions: action_with_keyframes(new_keyframes), ..start_armature.clone() }; assert_eq!(start_armature, expected_armature); } // TODO: Function to return these start_actions that we keep using #[test] fn transpose_actions() { let keyframes = vec![BoneKeyframe::new( 1, Bone::Matrix(Matrix4::from_column_slice(&[ 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 5.0, 1.0, ])), )]; let mut start_armature = BlenderArmature { bone_space_actions: action_with_keyframes(keyframes), ..BlenderArmature::default() }; start_armature.transpose_actions(); let new_keyframes = vec![BoneKeyframe::new( 1, Bone::Matrix(Matrix4::from_column_slice(&[ 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 5.0, 0.0, 0.0, 0.0, 1.0, ])), )]; let expected_armature = BlenderArmature { bone_space_actions: action_with_keyframes(new_keyframes), ..start_armature.clone() }; assert_eq!(start_armature, expected_armature); } }
32.305085
112
0.603882
1ca6e64042d956ff3f701531ed6b28d91daa4ced
45,580
#![allow(clippy::len_zero)] #![allow(clippy::many_single_char_names)] #[allow(unused_imports)] use log::{debug, error, info, trace, warn}; #[cfg(feature = "dx12")] use gfx_backend_dx12 as back; #[cfg(feature = "metal")] use gfx_backend_metal as back; #[cfg(feature = "vulkan")] use gfx_backend_vulkan as back; use arrayvec::ArrayVec; use core::{ marker::PhantomData, mem::{size_of, ManuallyDrop}, ops::Deref, }; use gfx_hal::{ adapter::{Adapter, MemoryTypeId, PhysicalDevice}, buffer::{IndexBufferView, Usage as BufferUsage}, command::{ClearColor, ClearValue, CommandBuffer, MultiShot, Primary}, device::Device, format::{Aspects, ChannelType, Format, Swizzle}, image::{Extent, Layout, SubresourceRange, Usage, ViewKind}, memory::{Properties, Requirements}, pass::{Attachment, AttachmentLoadOp, AttachmentOps, AttachmentStoreOp, Subpass, SubpassDesc}, pool::{CommandPool, CommandPoolCreateFlags}, pso::{ AttributeDesc, BakedStates, BasePipeline, BlendDesc, BlendOp, BlendState, ColorBlendDesc, ColorMask, DepthStencilDesc, DepthTest, DescriptorSetLayoutBinding, ElemOffset, ElemStride, Element, EntryPoint, Face, Factor, FrontFace, GraphicsPipelineDesc, GraphicsShaderSet, InputAssemblerDesc, LogicOp, PipelineCreationFlags, PipelineStage, PolygonMode, Rasterizer, Rect, ShaderStageFlags, Specialization, StencilTest, VertexBufferDesc, Viewport, }, queue::{ capability::{Capability, Supports, Transfer}, family::QueueGroup, CommandQueue, Submission, }, window::{Backbuffer, Extent2D, FrameSync, PresentMode, Swapchain, SwapchainConfig}, Backend, DescriptorPool, Gpu, Graphics, IndexType, Instance, Primitive, QueueFamily, Surface, }; use std::time::Instant; use winit::{ dpi::LogicalSize, CreationError, Event, EventsLoop, Window, WindowBuilder, WindowEvent, }; pub const WINDOW_NAME: &str = "Textures"; pub const VERTEX_SOURCE: &str = "#version 450 layout (location = 0) in vec2 position; layout (location = 1) in vec3 color; layout (location = 2) in vec2 vert_uv; layout (location = 0) out gl_PerVertex { vec4 gl_Position; }; layout (location = 1) out vec3 frag_color; layout (location = 2) out vec2 frag_uv; void main() { gl_Position = vec4(position, 0.0, 1.0); frag_color = color; frag_uv = vert_uv; }"; pub const FRAGMENT_SOURCE: &str = "#version 450 layout (push_constant) uniform PushConsts { float time; } push; layout(set = 0, binding = 0) uniform texture2D tex; layout(set = 0, binding = 1) uniform sampler samp; layout (location = 1) in vec3 frag_color; layout (location = 2) in vec2 frag_uv; layout (location = 0) out vec4 color; void main() { float time01 = -0.9 * abs(sin(push.time * 0.7)) + 0.9; vec4 tex_color = texture(sampler2D(tex, samp), frag_uv); color = mix(tex_color, vec4(frag_color, 1.0), time01); }"; pub static CREATURE_BYTES: &[u8] = include_bytes!("creature.png"); #[derive(Debug, Clone, Copy)] pub struct Quad { pub x: f32, pub y: f32, pub w: f32, pub h: f32, } impl Quad { pub fn vertex_attributes(self) -> [f32; 4 * (2 + 3 + 2)] { let x = self.x; let y = self.y; let w = self.w; let h = self.h; #[cfg_attr(rustfmt, rustfmt_skip)] [ // X Y R G B U V x , y+h, 1.0, 0.0, 0.0, /* red */ 0.0, 1.0, /* bottom left */ x , y , 0.0, 1.0, 0.0, /* green */ 0.0, 0.0, /* top left */ x+w, y , 0.0, 0.0, 1.0, /* blue */ 1.0, 0.0, /* bottom right */ x+w, y+h, 1.0, 0.0, 1.0, /* magenta */ 1.0, 1.0, /* top right */ ] } } pub struct BufferBundle<B: Backend, D: Device<B>> { pub buffer: ManuallyDrop<B::Buffer>, pub requirements: Requirements, pub memory: ManuallyDrop<B::Memory>, pub phantom: PhantomData<D>, } impl<B: Backend, D: Device<B>> BufferBundle<B, D> { pub fn new( adapter: &Adapter<B>, device: &D, size: usize, usage: BufferUsage, ) -> Result<Self, &'static str> { unsafe { let mut buffer = device .create_buffer(size as u64, usage) .map_err(|_| "Couldn't create a buffer!")?; let requirements = device.get_buffer_requirements(&buffer); let memory_type_id = adapter .physical_device .memory_properties() .memory_types .iter() .enumerate() .find(|&(id, memory_type)| { requirements.type_mask & (1 << id) != 0 && memory_type.properties.contains(Properties::CPU_VISIBLE) }) .map(|(id, _)| MemoryTypeId(id)) .ok_or("Couldn't find a memory type to support the buffer!")?; let memory = device .allocate_memory(memory_type_id, requirements.size) .map_err(|_| "Couldn't allocate buffer memory!")?; device .bind_buffer_memory(&memory, 0, &mut buffer) .map_err(|_| "Couldn't bind the buffer memory!")?; Ok(Self { buffer: ManuallyDrop::new(buffer), requirements, memory: ManuallyDrop::new(memory), phantom: PhantomData, }) } } pub unsafe fn manually_drop(&self, device: &D) { use core::ptr::read; device.destroy_buffer(ManuallyDrop::into_inner(read(&self.buffer))); device.free_memory(ManuallyDrop::into_inner(read(&self.memory))); } } pub struct LoadedImage<B: Backend, D: Device<B>> { pub image: ManuallyDrop<B::Image>, pub requirements: Requirements, pub memory: ManuallyDrop<B::Memory>, pub image_view: ManuallyDrop<B::ImageView>, pub sampler: ManuallyDrop<B::Sampler>, pub phantom: PhantomData<D>, } impl<B: Backend, D: Device<B>> LoadedImage<B, D> { pub fn new<C: Capability + Supports<Transfer>>( adapter: &Adapter<B>, device: &D, command_pool: &mut CommandPool<B, C>, command_queue: &mut CommandQueue<B, C>, img: image::RgbaImage, ) -> Result<Self, &'static str> { unsafe { // 0. First we compute some memory related values. let pixel_size = size_of::<image::Rgba<u8>>(); let row_size = pixel_size * (img.width() as usize); let limits = adapter.physical_device.limits(); let row_alignment_mask = limits.min_buffer_copy_pitch_alignment as u32 - 1; let row_pitch = ((row_size as u32 + row_alignment_mask) & !row_alignment_mask) as usize; debug_assert!(row_pitch as usize >= row_size); // 1. make a staging buffer with enough memory for the image, and a // transfer_src usage let required_bytes = row_pitch * img.height() as usize; let staging_bundle = BufferBundle::new(&adapter, device, required_bytes, BufferUsage::TRANSFER_SRC)?; // 2. use mapping writer to put the image data into that buffer let mut writer = device .acquire_mapping_writer::<u8>(&staging_bundle.memory, 0..staging_bundle.requirements.size) .map_err(|_| "Couldn't acquire a mapping writer to the staging buffer!")?; for y in 0..img.height() as usize { let row = &(*img)[y * row_size..(y + 1) * row_size]; let dest_base = y * row_pitch; writer[dest_base..dest_base + row.len()].copy_from_slice(row); } device .release_mapping_writer(writer) .map_err(|_| "Couldn't release the mapping writer to the staging buffer!")?; // 3. Make an image with transfer_dst and SAMPLED usage let mut the_image = device .create_image( gfx_hal::image::Kind::D2(img.width(), img.height(), 1, 1), 1, Format::Rgba8Srgb, gfx_hal::image::Tiling::Optimal, gfx_hal::image::Usage::TRANSFER_DST | gfx_hal::image::Usage::SAMPLED, gfx_hal::image::ViewCapabilities::empty(), ) .map_err(|_| "Couldn't create the image!")?; // 4. allocate memory for the image and bind it let requirements = device.get_image_requirements(&the_image); let memory_type_id = adapter .physical_device .memory_properties() .memory_types .iter() .enumerate() .find(|&(id, memory_type)| { // BIG NOTE: THIS IS DEVICE LOCAL NOT CPU VISIBLE requirements.type_mask & (1 << id) != 0 && memory_type.properties.contains(Properties::DEVICE_LOCAL) }) .map(|(id, _)| MemoryTypeId(id)) .ok_or("Couldn't find a memory type to support the image!")?; let memory = device .allocate_memory(memory_type_id, requirements.size) .map_err(|_| "Couldn't allocate image memory!")?; device .bind_image_memory(&memory, 0, &mut the_image) .map_err(|_| "Couldn't bind the image memory!")?; // 5. create image view and sampler let image_view = device .create_image_view( &the_image, gfx_hal::image::ViewKind::D2, Format::Rgba8Srgb, gfx_hal::format::Swizzle::NO, SubresourceRange { aspects: Aspects::COLOR, levels: 0..1, layers: 0..1, }, ) .map_err(|_| "Couldn't create the image view!")?; let sampler = device .create_sampler(gfx_hal::image::SamplerInfo::new( gfx_hal::image::Filter::Nearest, gfx_hal::image::WrapMode::Tile, )) .map_err(|_| "Couldn't create the sampler!")?; // 6. create a command buffer let mut cmd_buffer = command_pool.acquire_command_buffer::<gfx_hal::command::OneShot>(); cmd_buffer.begin(); // 7. Use a pipeline barrier to transition the image from empty/undefined // to TRANSFER_WRITE/TransferDstOptimal let image_barrier = gfx_hal::memory::Barrier::Image { states: (gfx_hal::image::Access::empty(), Layout::Undefined) ..( gfx_hal::image::Access::TRANSFER_WRITE, Layout::TransferDstOptimal, ), target: &the_image, families: None, range: SubresourceRange { aspects: Aspects::COLOR, levels: 0..1, layers: 0..1, }, }; cmd_buffer.pipeline_barrier( PipelineStage::TOP_OF_PIPE..PipelineStage::TRANSFER, gfx_hal::memory::Dependencies::empty(), &[image_barrier], ); // 8. perform copy from staging buffer to image cmd_buffer.copy_buffer_to_image( &staging_bundle.buffer, &the_image, Layout::TransferDstOptimal, &[gfx_hal::command::BufferImageCopy { buffer_offset: 0, buffer_width: (row_pitch / pixel_size) as u32, buffer_height: img.height(), image_layers: gfx_hal::image::SubresourceLayers { aspects: Aspects::COLOR, level: 0, layers: 0..1, }, image_offset: gfx_hal::image::Offset { x: 0, y: 0, z: 0 }, image_extent: gfx_hal::image::Extent { width: img.width(), height: img.height(), depth: 1, }, }], ); // 9. use pipeline barrier to transition the image to SHADER_READ access/ // ShaderReadOnlyOptimal layout let image_barrier = gfx_hal::memory::Barrier::Image { states: ( gfx_hal::image::Access::TRANSFER_WRITE, Layout::TransferDstOptimal, ) ..( gfx_hal::image::Access::SHADER_READ, Layout::ShaderReadOnlyOptimal, ), target: &the_image, families: None, range: SubresourceRange { aspects: Aspects::COLOR, levels: 0..1, layers: 0..1, }, }; cmd_buffer.pipeline_barrier( PipelineStage::TRANSFER..PipelineStage::FRAGMENT_SHADER, gfx_hal::memory::Dependencies::empty(), &[image_barrier], ); // 10. Submit the cmd buffer to queue and wait for it cmd_buffer.finish(); let upload_fence = device .create_fence(false) .map_err(|_| "Couldn't create an upload fence!")?; command_queue.submit_nosemaphores(Some(&cmd_buffer), Some(&upload_fence)); device .wait_for_fence(&upload_fence, core::u64::MAX) .map_err(|_| "Couldn't wait for the fence!")?; device.destroy_fence(upload_fence); // 11. Destroy the staging bundle and one shot buffer now that we're done staging_bundle.manually_drop(device); command_pool.free(Some(cmd_buffer)); Ok(Self { image: ManuallyDrop::new(the_image), requirements, memory: ManuallyDrop::new(memory), image_view: ManuallyDrop::new(image_view), sampler: ManuallyDrop::new(sampler), phantom: PhantomData, }) } } pub unsafe fn manually_drop(&self, device: &D) { use core::ptr::read; device.destroy_sampler(ManuallyDrop::into_inner(read(&self.sampler))); device.destroy_image_view(ManuallyDrop::into_inner(read(&self.image_view))); device.destroy_image(ManuallyDrop::into_inner(read(&self.image))); device.free_memory(ManuallyDrop::into_inner(read(&self.memory))); } } pub struct HalState { creation_instant: Instant, vertices: BufferBundle<back::Backend, back::Device>, indexes: BufferBundle<back::Backend, back::Device>, texture: LoadedImage<back::Backend, back::Device>, descriptor_set_layouts: Vec<<back::Backend as Backend>::DescriptorSetLayout>, descriptor_pool: ManuallyDrop<<back::Backend as Backend>::DescriptorPool>, descriptor_set: ManuallyDrop<<back::Backend as Backend>::DescriptorSet>, pipeline_layout: ManuallyDrop<<back::Backend as Backend>::PipelineLayout>, graphics_pipeline: ManuallyDrop<<back::Backend as Backend>::GraphicsPipeline>, current_frame: usize, frames_in_flight: usize, in_flight_fences: Vec<<back::Backend as Backend>::Fence>, render_finished_semaphores: Vec<<back::Backend as Backend>::Semaphore>, image_available_semaphores: Vec<<back::Backend as Backend>::Semaphore>, command_buffers: Vec<CommandBuffer<back::Backend, Graphics, MultiShot, Primary>>, command_pool: ManuallyDrop<CommandPool<back::Backend, Graphics>>, framebuffers: Vec<<back::Backend as Backend>::Framebuffer>, image_views: Vec<(<back::Backend as Backend>::ImageView)>, render_pass: ManuallyDrop<<back::Backend as Backend>::RenderPass>, render_area: Rect, queue_group: QueueGroup<back::Backend, Graphics>, swapchain: ManuallyDrop<<back::Backend as Backend>::Swapchain>, device: ManuallyDrop<back::Device>, _adapter: Adapter<back::Backend>, _surface: <back::Backend as Backend>::Surface, _instance: ManuallyDrop<back::Instance>, } impl HalState { /// Creates a new, fully initialized HalState. pub fn new(window: &Window) -> Result<Self, &'static str> { // Create An Instance let instance = back::Instance::create(WINDOW_NAME, 1); // Create A Surface let mut surface = instance.create_surface(window); // Select An Adapter let adapter = instance .enumerate_adapters() .into_iter() .find(|a| { a.queue_families .iter() .any(|qf| qf.supports_graphics() && surface.supports_queue_family(qf)) }) .ok_or("Couldn't find a graphical Adapter!")?; // Open A Device and take out a QueueGroup let (mut device, mut queue_group) = { let queue_family = adapter .queue_families .iter() .find(|qf| qf.supports_graphics() && surface.supports_queue_family(qf)) .ok_or("Couldn't find a QueueFamily with graphics!")?; let Gpu { device, mut queues } = unsafe { adapter .physical_device .open(&[(&queue_family, &[1.0; 1])]) .map_err(|_| "Couldn't open the PhysicalDevice!")? }; let queue_group = queues .take::<Graphics>(queue_family.id()) .ok_or("Couldn't take ownership of the QueueGroup!")?; if queue_group.queues.len() > 0 { Ok(()) } else { Err("The QueueGroup did not have any CommandQueues available!") }?; (device, queue_group) }; // Create A Swapchain, this is extra long let (swapchain, extent, backbuffer, format, frames_in_flight) = { let (caps, preferred_formats, present_modes, composite_alphas) = surface.compatibility(&adapter.physical_device); info!("{:?}", caps); info!("Preferred Formats: {:?}", preferred_formats); info!("Present Modes: {:?}", present_modes); info!("Composite Alphas: {:?}", composite_alphas); // let present_mode = { use gfx_hal::window::PresentMode::*; [Mailbox, Fifo, Relaxed, Immediate] .iter() .cloned() .find(|pm| present_modes.contains(pm)) .ok_or("No PresentMode values specified!")? }; let composite_alpha = { use gfx_hal::window::CompositeAlpha::*; [Opaque, Inherit, PreMultiplied, PostMultiplied] .iter() .cloned() .find(|ca| composite_alphas.contains(ca)) .ok_or("No CompositeAlpha values specified!")? }; let format = match preferred_formats { None => Format::Rgba8Srgb, Some(formats) => match formats .iter() .find(|format| format.base_format().1 == ChannelType::Srgb) .cloned() { Some(srgb_format) => srgb_format, None => formats .get(0) .cloned() .ok_or("Preferred format list was empty!")?, }, }; let extent = { let window_client_area = window .get_inner_size() .ok_or("Window doesn't exist!")? .to_physical(window.get_hidpi_factor()); Extent2D { width: caps.extents.end.width.min(window_client_area.width as u32), height: caps .extents .end .height .min(window_client_area.height as u32), } }; let image_count = if present_mode == PresentMode::Mailbox { (caps.image_count.end - 1).min(3) } else { (caps.image_count.end - 1).min(2) }; let image_layers = 1; let image_usage = if caps.usage.contains(Usage::COLOR_ATTACHMENT) { Usage::COLOR_ATTACHMENT } else { Err("The Surface isn't capable of supporting color!")? }; let swapchain_config = SwapchainConfig { present_mode, composite_alpha, format, extent, image_count, image_layers, image_usage, }; info!("{:?}", swapchain_config); // let (swapchain, backbuffer) = unsafe { device .create_swapchain(&mut surface, swapchain_config, None) .map_err(|_| "Failed to create the swapchain!")? }; (swapchain, extent, backbuffer, format, image_count as usize) }; // Create Our Sync Primitives let (image_available_semaphores, render_finished_semaphores, in_flight_fences) = { let mut image_available_semaphores: Vec<<back::Backend as Backend>::Semaphore> = vec![]; let mut render_finished_semaphores: Vec<<back::Backend as Backend>::Semaphore> = vec![]; let mut in_flight_fences: Vec<<back::Backend as Backend>::Fence> = vec![]; for _ in 0..frames_in_flight { in_flight_fences.push( device .create_fence(true) .map_err(|_| "Could not create a fence!")?, ); image_available_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create a semaphore!")?, ); render_finished_semaphores.push( device .create_semaphore() .map_err(|_| "Could not create a semaphore!")?, ); } ( image_available_semaphores, render_finished_semaphores, in_flight_fences, ) }; // Define A RenderPass let render_pass = { let color_attachment = Attachment { format: Some(format), samples: 1, ops: AttachmentOps { load: AttachmentLoadOp::Clear, store: AttachmentStoreOp::Store, }, stencil_ops: AttachmentOps::DONT_CARE, layouts: Layout::Undefined..Layout::Present, }; let subpass = SubpassDesc { colors: &[(0, Layout::ColorAttachmentOptimal)], depth_stencil: None, inputs: &[], resolves: &[], preserves: &[], }; unsafe { device .create_render_pass(&[color_attachment], &[subpass], &[]) .map_err(|_| "Couldn't create a render pass!")? } }; // Create The ImageViews let image_views: Vec<_> = match backbuffer { Backbuffer::Images(images) => images .into_iter() .map(|image| unsafe { device .create_image_view( &image, ViewKind::D2, format, Swizzle::NO, SubresourceRange { aspects: Aspects::COLOR, levels: 0..1, layers: 0..1, }, ) .map_err(|_| "Couldn't create the image_view for the image!") }) .collect::<Result<Vec<_>, &str>>()?, Backbuffer::Framebuffer(_) => unimplemented!("Can't handle framebuffer backbuffer!"), }; // Create Our FrameBuffers let framebuffers: Vec<<back::Backend as Backend>::Framebuffer> = { image_views .iter() .map(|image_view| unsafe { device .create_framebuffer( &render_pass, vec![image_view], Extent { width: extent.width as u32, height: extent.height as u32, depth: 1, }, ) .map_err(|_| "Failed to create a framebuffer!") }) .collect::<Result<Vec<_>, &str>>()? }; // Create Our CommandPool let mut command_pool = unsafe { device .create_command_pool_typed(&queue_group, CommandPoolCreateFlags::RESET_INDIVIDUAL) .map_err(|_| "Could not create the raw command pool!")? }; // Create Our CommandBuffers let command_buffers: Vec<_> = framebuffers .iter() .map(|_| command_pool.acquire_command_buffer()) .collect(); // Build our pipeline and vertex buffer let (descriptor_set_layouts, descriptor_pool, descriptor_set, pipeline_layout, gfx_pipeline) = Self::create_pipeline(&mut device, extent, &render_pass)?; const F32_XY_RGB_UV_QUAD: usize = size_of::<f32>() * (2 + 3 + 2) * 4; let vertices = BufferBundle::new(&adapter, &device, F32_XY_RGB_UV_QUAD, BufferUsage::VERTEX)?; const U16_QUAD_INDICES: usize = size_of::<u16>() * 2 * 3; let indexes = BufferBundle::new(&adapter, &device, U16_QUAD_INDICES, BufferUsage::INDEX)?; // Write the index data just once. unsafe { let mut data_target = device .acquire_mapping_writer(&indexes.memory, 0..indexes.requirements.size) .map_err(|_| "Failed to acquire an index buffer mapping writer!")?; const INDEX_DATA: &[u16] = &[0, 1, 2, 2, 3, 0]; data_target[..INDEX_DATA.len()].copy_from_slice(&INDEX_DATA); device .release_mapping_writer(data_target) .map_err(|_| "Couldn't release the index buffer mapping writer!")?; } // 4. You create the actual descriptors which you want to write into the // allocated descriptor set (in this case an image and a sampler) let texture = LoadedImage::new( &adapter, &device, &mut command_pool, &mut queue_group.queues[0], image::load_from_memory(CREATURE_BYTES) .expect("Binary corrupted!") .to_rgba(), )?; // 5. You write the descriptors into the descriptor set using // write_descriptor_sets which you pass a set of DescriptorSetWrites // which each write in one or more descriptors to the set unsafe { device.write_descriptor_sets(vec![ gfx_hal::pso::DescriptorSetWrite { set: &descriptor_set, binding: 0, array_offset: 0, descriptors: Some(gfx_hal::pso::Descriptor::Image( texture.image_view.deref(), Layout::Undefined, )), }, gfx_hal::pso::DescriptorSetWrite { set: &descriptor_set, binding: 1, array_offset: 0, descriptors: Some(gfx_hal::pso::Descriptor::Sampler(texture.sampler.deref())), }, ]); } Ok(Self { creation_instant: Instant::now(), vertices, indexes, texture, descriptor_pool: ManuallyDrop::new(descriptor_pool), descriptor_set: ManuallyDrop::new(descriptor_set), _instance: ManuallyDrop::new(instance), _surface: surface, _adapter: adapter, device: ManuallyDrop::new(device), queue_group, swapchain: ManuallyDrop::new(swapchain), render_area: extent.to_extent().rect(), render_pass: ManuallyDrop::new(render_pass), image_views, framebuffers, command_pool: ManuallyDrop::new(command_pool), command_buffers, image_available_semaphores, render_finished_semaphores, in_flight_fences, frames_in_flight, current_frame: 0, descriptor_set_layouts, pipeline_layout: ManuallyDrop::new(pipeline_layout), graphics_pipeline: ManuallyDrop::new(gfx_pipeline), }) } #[allow(clippy::type_complexity)] fn create_pipeline( device: &mut back::Device, extent: Extent2D, render_pass: &<back::Backend as Backend>::RenderPass, ) -> Result< ( Vec<<back::Backend as Backend>::DescriptorSetLayout>, <back::Backend as Backend>::DescriptorPool, <back::Backend as Backend>::DescriptorSet, <back::Backend as Backend>::PipelineLayout, <back::Backend as Backend>::GraphicsPipeline, ), &'static str, > { let mut compiler = shaderc::Compiler::new().ok_or("shaderc not found!")?; let vertex_compile_artifact = compiler .compile_into_spirv( VERTEX_SOURCE, shaderc::ShaderKind::Vertex, "vertex.vert", "main", None, ) .map_err(|e| { error!("{}", e); "Couldn't compile vertex shader!" })?; let fragment_compile_artifact = compiler .compile_into_spirv( FRAGMENT_SOURCE, shaderc::ShaderKind::Fragment, "fragment.frag", "main", None, ) .map_err(|e| { error!("{}", e); "Couldn't compile fragment shader!" })?; let vertex_shader_module = unsafe { device .create_shader_module(vertex_compile_artifact.as_binary_u8()) .map_err(|_| "Couldn't make the vertex module")? }; let fragment_shader_module = unsafe { device .create_shader_module(fragment_compile_artifact.as_binary_u8()) .map_err(|_| "Couldn't make the fragment module")? }; let (descriptor_set_layouts, descriptor_pool, descriptor_set, layout, gfx_pipeline) = { let (vs_entry, fs_entry) = ( EntryPoint { entry: "main", module: &vertex_shader_module, specialization: Specialization { constants: &[], data: &[], }, }, EntryPoint { entry: "main", module: &fragment_shader_module, specialization: Specialization { constants: &[], data: &[], }, }, ); let shaders = GraphicsShaderSet { vertex: vs_entry, hull: None, domain: None, geometry: None, fragment: Some(fs_entry), }; let input_assembler = InputAssemblerDesc::new(Primitive::TriangleList); let vertex_buffers: Vec<VertexBufferDesc> = vec![VertexBufferDesc { binding: 0, stride: (size_of::<f32>() * (2 + 3 + 2)) as ElemStride, rate: 0, }]; let position_attribute = AttributeDesc { location: 0, binding: 0, element: Element { format: Format::Rg32Float, offset: 0, }, }; let color_attribute = AttributeDesc { location: 1, binding: 0, element: Element { format: Format::Rgb32Float, offset: (size_of::<f32>() * 2) as ElemOffset, }, }; let uv_attribute = AttributeDesc { location: 2, binding: 0, element: Element { format: Format::Rg32Float, offset: (size_of::<f32>() * 5) as ElemOffset, }, }; let attributes: Vec<AttributeDesc> = vec![position_attribute, color_attribute, uv_attribute]; let rasterizer = Rasterizer { depth_clamping: false, polygon_mode: PolygonMode::Fill, cull_face: Face::NONE, front_face: FrontFace::Clockwise, depth_bias: None, conservative: false, }; let depth_stencil = DepthStencilDesc { depth: DepthTest::Off, depth_bounds: false, stencil: StencilTest::Off, }; let blender = { let blend_state = BlendState::On { color: BlendOp::Add { src: Factor::One, dst: Factor::Zero, }, alpha: BlendOp::Add { src: Factor::One, dst: Factor::Zero, }, }; BlendDesc { logic_op: Some(LogicOp::Copy), targets: vec![ColorBlendDesc(ColorMask::ALL, blend_state)], } }; let baked_states = BakedStates { viewport: Some(Viewport { rect: extent.to_extent().rect(), depth: (0.0..1.0), }), scissor: Some(extent.to_extent().rect()), blend_color: None, depth_bounds: None, }; // 1. you make a DescriptorSetLayout which is the layout of one descriptor // set let descriptor_set_layouts: Vec<<back::Backend as Backend>::DescriptorSetLayout> = vec![unsafe { device .create_descriptor_set_layout( &[ DescriptorSetLayoutBinding { binding: 0, ty: gfx_hal::pso::DescriptorType::SampledImage, count: 1, stage_flags: ShaderStageFlags::FRAGMENT, immutable_samplers: false, }, DescriptorSetLayoutBinding { binding: 1, ty: gfx_hal::pso::DescriptorType::Sampler, count: 1, stage_flags: ShaderStageFlags::FRAGMENT, immutable_samplers: false, }, ], &[], ) .map_err(|_| "Couldn't make a DescriptorSetLayout")? }]; // 2. you create a descriptor pool, and when making that descriptor pool // you specify how many sets you want to be able to allocate from the // pool, as well as the maximum number of each kind of descriptor you // want to be able to allocate from that pool, total, for all sets. let mut descriptor_pool = unsafe { device .create_descriptor_pool( 1, // sets &[ gfx_hal::pso::DescriptorRangeDesc { ty: gfx_hal::pso::DescriptorType::SampledImage, count: 1, }, gfx_hal::pso::DescriptorRangeDesc { ty: gfx_hal::pso::DescriptorType::Sampler, count: 1, }, ], ) .map_err(|_| "Couldn't create a descriptor pool!")? }; // 3. you allocate said descriptor set from the pool you made earlier let descriptor_set = unsafe { descriptor_pool .allocate_set(&descriptor_set_layouts[0]) .map_err(|_| "Couldn't make a Descriptor Set!")? }; let push_constants = vec![(ShaderStageFlags::FRAGMENT, 0..1)]; let layout = unsafe { device .create_pipeline_layout(&descriptor_set_layouts, push_constants) .map_err(|_| "Couldn't create a pipeline layout")? }; let gfx_pipeline = { let desc = GraphicsPipelineDesc { shaders, rasterizer, vertex_buffers, attributes, input_assembler, blender, depth_stencil, multisampling: None, baked_states, layout: &layout, subpass: Subpass { index: 0, main_pass: render_pass, }, flags: PipelineCreationFlags::empty(), parent: BasePipeline::None, }; unsafe { device.create_graphics_pipeline(&desc, None).map_err(|e| { error!("{}", e); "Couldn't create a graphics pipeline!" })? } }; ( descriptor_set_layouts, descriptor_pool, descriptor_set, layout, gfx_pipeline, ) }; unsafe { device.destroy_shader_module(vertex_shader_module); device.destroy_shader_module(fragment_shader_module); } Ok(( descriptor_set_layouts, descriptor_pool, descriptor_set, layout, gfx_pipeline, )) } /// Draw a frame that's just cleared to the color specified. pub fn draw_clear_frame(&mut self, color: [f32; 4]) -> Result<(), &'static str> { // SETUP FOR THIS FRAME let flight_fence = &self.in_flight_fences[self.current_frame]; let image_available = &self.image_available_semaphores[self.current_frame]; let render_finished = &self.render_finished_semaphores[self.current_frame]; // Advance the frame _before_ we start using the `?` operator self.current_frame = (self.current_frame + 1) % self.frames_in_flight; let (i_u32, i_usize) = unsafe { self .device .wait_for_fence(flight_fence, core::u64::MAX) .map_err(|_| "Failed to wait on the fence!")?; self .device .reset_fence(flight_fence) .map_err(|_| "Couldn't reset the fence!")?; let image_index = self .swapchain .acquire_image(core::u64::MAX, FrameSync::Semaphore(image_available)) .map_err(|_| "Couldn't acquire an image from the swapchain!")?; (image_index, image_index as usize) }; // RECORD COMMANDS unsafe { let buffer = &mut self.command_buffers[i_usize]; let clear_values = [ClearValue::Color(ClearColor::Float(color))]; buffer.begin(false); buffer.begin_render_pass_inline( &self.render_pass, &self.framebuffers[i_usize], self.render_area, clear_values.iter(), ); buffer.finish(); } // SUBMISSION AND PRESENT let command_buffers = &self.command_buffers[i_usize..=i_usize]; let wait_semaphores: ArrayVec<[_; 1]> = [(image_available, PipelineStage::COLOR_ATTACHMENT_OUTPUT)].into(); let signal_semaphores: ArrayVec<[_; 1]> = [render_finished].into(); // yes, you have to write it twice like this. yes, it's silly. let present_wait_semaphores: ArrayVec<[_; 1]> = [render_finished].into(); let submission = Submission { command_buffers, wait_semaphores, signal_semaphores, }; let the_command_queue = &mut self.queue_group.queues[0]; unsafe { the_command_queue.submit(submission, Some(flight_fence)); self .swapchain .present(the_command_queue, i_u32, present_wait_semaphores) .map_err(|_| "Failed to present into the swapchain!") } } pub fn draw_quad_frame(&mut self, quad: Quad) -> Result<(), &'static str> { // SETUP FOR THIS FRAME let flight_fence = &self.in_flight_fences[self.current_frame]; let image_available = &self.image_available_semaphores[self.current_frame]; let render_finished = &self.render_finished_semaphores[self.current_frame]; // Advance the frame _before_ we start using the `?` operator self.current_frame = (self.current_frame + 1) % self.frames_in_flight; let (i_u32, i_usize) = unsafe { self .device .wait_for_fence(flight_fence, core::u64::MAX) .map_err(|_| "Failed to wait on the fence!")?; self .device .reset_fence(flight_fence) .map_err(|_| "Couldn't reset the fence!")?; let image_index = self .swapchain .acquire_image(core::u64::MAX, FrameSync::Semaphore(image_available)) .map_err(|_| "Couldn't acquire an image from the swapchain!")?; (image_index, image_index as usize) }; // WRITE THE QUAD DATA unsafe { let mut data_target = self .device .acquire_mapping_writer(&self.vertices.memory, 0..self.vertices.requirements.size) .map_err(|_| "Failed to acquire a vertex buffer mapping writer!")?; let data = quad.vertex_attributes(); data_target[..data.len()].copy_from_slice(&data); self .device .release_mapping_writer(data_target) .map_err(|_| "Couldn't release the VB mapping writer!")?; } // DETERMINE THE TIME DATA let duration = Instant::now().duration_since(self.creation_instant); let time_f32 = duration.as_secs() as f32 + duration.subsec_nanos() as f32 * 1e-9; // RECORD COMMANDS unsafe { let buffer = &mut self.command_buffers[i_usize]; const QUAD_CLEAR: [ClearValue; 1] = [ClearValue::Color(ClearColor::Float([0.1, 0.2, 0.3, 1.0]))]; buffer.begin(false); { let mut encoder = buffer.begin_render_pass_inline( &self.render_pass, &self.framebuffers[i_usize], self.render_area, QUAD_CLEAR.iter(), ); encoder.bind_graphics_pipeline(&self.graphics_pipeline); let vertex_buffers: ArrayVec<[_; 1]> = [(self.vertices.buffer.deref(), 0)].into(); encoder.bind_vertex_buffers(0, vertex_buffers); encoder.bind_index_buffer(IndexBufferView { buffer: &self.indexes.buffer, offset: 0, index_type: IndexType::U16, }); // 6. You actually bind the descriptor set in the command buffer before // the draw call using bind_graphics_descriptor_sets encoder.bind_graphics_descriptor_sets( &self.pipeline_layout, 0, Some(self.descriptor_set.deref()), &[], ); encoder.push_graphics_constants( &self.pipeline_layout, ShaderStageFlags::FRAGMENT, 0, &[time_f32.to_bits()], ); encoder.draw_indexed(0..6, 0, 0..1); } buffer.finish(); } // SUBMISSION AND PRESENT let command_buffers = &self.command_buffers[i_usize..=i_usize]; let wait_semaphores: ArrayVec<[_; 1]> = [(image_available, PipelineStage::COLOR_ATTACHMENT_OUTPUT)].into(); let signal_semaphores: ArrayVec<[_; 1]> = [render_finished].into(); // yes, you have to write it twice like this. yes, it's silly. let present_wait_semaphores: ArrayVec<[_; 1]> = [render_finished].into(); let submission = Submission { command_buffers, wait_semaphores, signal_semaphores, }; let the_command_queue = &mut self.queue_group.queues[0]; unsafe { the_command_queue.submit(submission, Some(flight_fence)); self .swapchain .present(the_command_queue, i_u32, present_wait_semaphores) .map_err(|_| "Failed to present into the swapchain!") } } } impl core::ops::Drop for HalState { /// We have to clean up "leaf" elements before "root" elements. Basically, we /// clean up in reverse of the order that we created things. fn drop(&mut self) { let _ = self.device.wait_idle(); unsafe { for descriptor_set_layout in self.descriptor_set_layouts.drain(..) { self .device .destroy_descriptor_set_layout(descriptor_set_layout) } for fence in self.in_flight_fences.drain(..) { self.device.destroy_fence(fence) } for semaphore in self.render_finished_semaphores.drain(..) { self.device.destroy_semaphore(semaphore) } for semaphore in self.image_available_semaphores.drain(..) { self.device.destroy_semaphore(semaphore) } for framebuffer in self.framebuffers.drain(..) { self.device.destroy_framebuffer(framebuffer); } for image_view in self.image_views.drain(..) { self.device.destroy_image_view(image_view); } // LAST RESORT STYLE CODE, NOT TO BE IMITATED LIGHTLY self.vertices.manually_drop(self.device.deref()); self.indexes.manually_drop(self.device.deref()); self.texture.manually_drop(self.device.deref()); use core::ptr::read; // this implicitly frees all descriptor sets from this pool self .device .destroy_descriptor_pool(ManuallyDrop::into_inner(read(&self.descriptor_pool))); self .device .destroy_pipeline_layout(ManuallyDrop::into_inner(read(&self.pipeline_layout))); self .device .destroy_graphics_pipeline(ManuallyDrop::into_inner(read(&self.graphics_pipeline))); self .device .destroy_command_pool(ManuallyDrop::into_inner(read(&self.command_pool)).into_raw()); self .device .destroy_render_pass(ManuallyDrop::into_inner(read(&self.render_pass))); self .device .destroy_swapchain(ManuallyDrop::into_inner(read(&self.swapchain))); ManuallyDrop::drop(&mut self.device); ManuallyDrop::drop(&mut self._instance); } } } #[derive(Debug)] pub struct WinitState { pub events_loop: EventsLoop, pub window: Window, } impl WinitState { /// Constructs a new `EventsLoop` and `Window` pair. /// /// The specified title and size are used, other elements are default. /// ## Failure /// It's possible for the window creation to fail. This is unlikely. pub fn new<T: Into<String>>(title: T, size: LogicalSize) -> Result<Self, CreationError> { let events_loop = EventsLoop::new(); let output = WindowBuilder::new() .with_title(title) .with_dimensions(size) .build(&events_loop); output.map(|window| Self { events_loop, window, }) } } impl Default for WinitState { /// Makes an 800x600 window with the `WINDOW_NAME` value as the title. /// ## Panics /// If a `CreationError` occurs. fn default() -> Self { Self::new( WINDOW_NAME, LogicalSize { width: 800.0, height: 600.0, }, ) .expect("Could not create a window!") } } #[derive(Debug, Clone, Default)] pub struct UserInput { pub end_requested: bool, pub new_frame_size: Option<(f64, f64)>, pub new_mouse_position: Option<(f64, f64)>, } impl UserInput { pub fn poll_events_loop(events_loop: &mut EventsLoop) -> Self { let mut output = UserInput::default(); events_loop.poll_events(|event| match event { Event::WindowEvent { event: WindowEvent::CloseRequested, .. } => output.end_requested = true, Event::WindowEvent { event: WindowEvent::Resized(logical), .. } => { output.new_frame_size = Some((logical.width, logical.height)); } Event::WindowEvent { event: WindowEvent::CursorMoved { position, .. }, .. } => { output.new_mouse_position = Some((position.x, position.y)); } _ => (), }); output } } #[derive(Debug, Clone, Copy, Default)] pub struct LocalState { pub frame_width: f64, pub frame_height: f64, pub mouse_x: f64, pub mouse_y: f64, } impl LocalState { pub fn update_from_input(&mut self, input: UserInput) { if let Some(frame_size) = input.new_frame_size { self.frame_width = frame_size.0; self.frame_height = frame_size.1; } if let Some(position) = input.new_mouse_position { self.mouse_x = position.0; self.mouse_y = position.1; } } } fn do_the_render(hal_state: &mut HalState, local_state: &LocalState) -> Result<(), &'static str> { let x1 = 100.0; let y1 = 100.0; let x2 = local_state.mouse_x as f32; let y2 = local_state.mouse_y as f32; let quad = Quad { x: (x1 / local_state.frame_width as f32) * 2.0 - 1.0, y: (y1 / local_state.frame_height as f32) * 2.0 - 1.0, w: ((x2 - x1) / local_state.frame_width as f32) * 2.0, h: ((y2 - y1) / local_state.frame_height as f32) * 2.0, }; hal_state.draw_quad_frame(quad) } fn main() { simple_logger::init().unwrap(); let mut winit_state = WinitState::default(); let mut hal_state = match HalState::new(&winit_state.window) { Ok(state) => state, Err(e) => panic!(e), }; let (frame_width, frame_height) = winit_state .window .get_inner_size() .map(|logical| logical.into()) .unwrap_or((0.0, 0.0)); let mut local_state = LocalState { frame_width, frame_height, mouse_x: 0.0, mouse_y: 0.0, }; loop { let inputs = UserInput::poll_events_loop(&mut winit_state.events_loop); if inputs.end_requested { break; } if inputs.new_frame_size.is_some() { debug!("Window changed size, restarting HalState..."); drop(hal_state); hal_state = match HalState::new(&winit_state.window) { Ok(state) => state, Err(e) => panic!(e), }; } local_state.update_from_input(inputs); if let Err(e) = do_the_render(&mut hal_state, &local_state) { error!("Rendering Error: {:?}", e); debug!("Auto-restarting HalState..."); drop(hal_state); hal_state = match HalState::new(&winit_state.window) { Ok(state) => state, Err(e) => panic!(e), }; } } }
33.343087
99
0.608161
bf841ee2b7600f88deb8b41c0a970f0bccc8e433
50,980
//! See `CompletionContext` structure. use std::iter; use base_db::SourceDatabaseExt; use hir::{ HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo, }; use ide_db::{ active_parameter::ActiveParameter, base_db::{FilePosition, SourceDatabase}, famous_defs::FamousDefs, RootDatabase, }; use rustc_hash::FxHashSet; use syntax::{ algo::{find_node_at_offset, non_trivia_sibling}, ast::{self, AttrKind, HasName, NameOrNameRef}, match_ast, AstNode, NodeOrToken, SyntaxKind::{self, *}, SyntaxNode, SyntaxToken, TextRange, TextSize, T, }; use text_edit::Indel; use crate::{ patterns::{ determine_location, determine_prev_sibling, for_is_prev2, is_in_loop_body, previous_token, ImmediateLocation, ImmediatePrevSibling, }, CompletionConfig, }; const COMPLETION_MARKER: &str = "intellijRulezz"; #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub(crate) enum PatternRefutability { Refutable, Irrefutable, } pub(crate) enum Visible { Yes, Editable, No, } #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub(super) enum PathKind { Expr, Type, Attr { kind: AttrKind, annotated_item_kind: Option<SyntaxKind> }, Derive, Mac, Pat, Vis { has_in_token: bool }, Use, } #[derive(Debug)] pub(crate) struct PathCompletionCtx { /// If this is a call with () already there has_call_parens: bool, /// Whether this path stars with a `::`. pub(super) is_absolute_path: bool, /// The qualifier of the current path if it exists. pub(super) qualifier: Option<PathQualifierCtx>, pub(super) kind: Option<PathKind>, /// Whether the path segment has type args or not. pub(super) has_type_args: bool, /// `true` if we are a statement or a last expr in the block. pub(super) can_be_stmt: bool, pub(super) in_loop_body: bool, } #[derive(Debug)] pub(crate) struct PathQualifierCtx { pub(crate) path: ast::Path, pub(crate) resolution: Option<PathResolution>, /// Whether this path consists solely of `super` segments pub(crate) is_super_chain: bool, /// Whether the qualifier comes from a use tree parent or not pub(crate) use_tree_parent: bool, } #[derive(Debug)] pub(super) struct PatternContext { pub(super) refutability: PatternRefutability, pub(super) param_ctx: Option<(ast::ParamList, ast::Param, ParamKind)>, pub(super) has_type_ascription: bool, pub(super) parent_pat: Option<ast::Pat>, pub(super) ref_token: Option<SyntaxToken>, pub(super) mut_token: Option<SyntaxToken>, } #[derive(Debug)] pub(super) enum LifetimeContext { LifetimeParam { is_decl: bool, param: ast::LifetimeParam }, Lifetime, LabelRef, LabelDef, } #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum ParamKind { Function(ast::Fn), Closure(ast::ClosureExpr), } /// `CompletionContext` is created early during completion to figure out, where /// exactly is the cursor, syntax-wise. #[derive(Debug)] pub(crate) struct CompletionContext<'a> { pub(super) sema: Semantics<'a, RootDatabase>, pub(super) scope: SemanticsScope<'a>, pub(super) db: &'a RootDatabase, pub(super) config: &'a CompletionConfig, pub(super) position: FilePosition, /// The token before the cursor, in the original file. pub(super) original_token: SyntaxToken, /// The token before the cursor, in the macro-expanded file. pub(super) token: SyntaxToken, /// The crate of the current file. pub(super) krate: Option<hir::Crate>, /// The module of the `scope`. pub(super) module: Option<hir::Module>, pub(super) expected_name: Option<NameOrNameRef>, pub(super) expected_type: Option<Type>, /// The parent function of the cursor position if it exists. pub(super) function_def: Option<ast::Fn>, /// The parent impl of the cursor position if it exists. pub(super) impl_def: Option<ast::Impl>, /// The NameLike under the cursor in the original file if it exists. pub(super) name_syntax: Option<ast::NameLike>, pub(super) incomplete_let: bool, pub(super) completion_location: Option<ImmediateLocation>, pub(super) prev_sibling: Option<ImmediatePrevSibling>, pub(super) fake_attribute_under_caret: Option<ast::Attr>, pub(super) previous_token: Option<SyntaxToken>, pub(super) lifetime_ctx: Option<LifetimeContext>, pub(super) pattern_ctx: Option<PatternContext>, pub(super) path_context: Option<PathCompletionCtx>, pub(super) existing_derives: FxHashSet<hir::Macro>, pub(super) locals: Vec<(Name, Local)>, } impl<'a> CompletionContext<'a> { /// The range of the identifier that is being completed. pub(crate) fn source_range(&self) -> TextRange { // check kind of macro-expanded token, but use range of original token let kind = self.token.kind(); match kind { CHAR => { // assume we are completing a lifetime but the user has only typed the ' cov_mark::hit!(completes_if_lifetime_without_idents); TextRange::at(self.original_token.text_range().start(), TextSize::from(1)) } IDENT | LIFETIME_IDENT | UNDERSCORE => self.original_token.text_range(), _ if kind.is_keyword() => self.original_token.text_range(), _ => TextRange::empty(self.position.offset), } } pub(crate) fn previous_token_is(&self, kind: SyntaxKind) -> bool { self.previous_token.as_ref().map_or(false, |tok| tok.kind() == kind) } pub(crate) fn famous_defs(&self) -> FamousDefs { FamousDefs(&self.sema, self.krate) } pub(crate) fn dot_receiver(&self) -> Option<&ast::Expr> { match &self.completion_location { Some( ImmediateLocation::MethodCall { receiver, .. } | ImmediateLocation::FieldAccess { receiver, .. }, ) => receiver.as_ref(), _ => None, } } pub(crate) fn has_dot_receiver(&self) -> bool { matches!( &self.completion_location, Some(ImmediateLocation::FieldAccess { receiver, .. } | ImmediateLocation::MethodCall { receiver,.. }) if receiver.is_some() ) } pub(crate) fn expects_assoc_item(&self) -> bool { matches!(self.completion_location, Some(ImmediateLocation::Trait | ImmediateLocation::Impl)) } pub(crate) fn expects_variant(&self) -> bool { matches!(self.completion_location, Some(ImmediateLocation::Variant)) } pub(crate) fn expects_non_trait_assoc_item(&self) -> bool { matches!(self.completion_location, Some(ImmediateLocation::Impl)) } pub(crate) fn expects_item(&self) -> bool { matches!(self.completion_location, Some(ImmediateLocation::ItemList)) } pub(crate) fn expects_generic_arg(&self) -> bool { matches!(self.completion_location, Some(ImmediateLocation::GenericArgList(_))) } pub(crate) fn has_block_expr_parent(&self) -> bool { matches!(self.completion_location, Some(ImmediateLocation::StmtList)) } pub(crate) fn expects_ident_ref_expr(&self) -> bool { matches!(self.completion_location, Some(ImmediateLocation::RefExpr)) } pub(crate) fn expect_field(&self) -> bool { matches!( self.completion_location, Some(ImmediateLocation::RecordField | ImmediateLocation::TupleField) ) } pub(crate) fn has_impl_or_trait_prev_sibling(&self) -> bool { matches!( self.prev_sibling, Some(ImmediatePrevSibling::ImplDefType | ImmediatePrevSibling::TraitDefName) ) } pub(crate) fn has_impl_prev_sibling(&self) -> bool { matches!(self.prev_sibling, Some(ImmediatePrevSibling::ImplDefType)) } pub(crate) fn has_visibility_prev_sibling(&self) -> bool { matches!(self.prev_sibling, Some(ImmediatePrevSibling::Visibility)) } pub(crate) fn after_if(&self) -> bool { matches!(self.prev_sibling, Some(ImmediatePrevSibling::IfExpr)) } pub(crate) fn is_path_disallowed(&self) -> bool { self.previous_token_is(T![unsafe]) || matches!( self.prev_sibling, Some(ImmediatePrevSibling::Attribute | ImmediatePrevSibling::Visibility) ) || matches!( self.completion_location, Some( ImmediateLocation::ModDeclaration(_) | ImmediateLocation::RecordPat(_) | ImmediateLocation::RecordExpr(_) | ImmediateLocation::Rename ) ) } pub(crate) fn expects_expression(&self) -> bool { matches!(self.path_context, Some(PathCompletionCtx { kind: Some(PathKind::Expr), .. })) } pub(crate) fn expects_type(&self) -> bool { matches!(self.path_context, Some(PathCompletionCtx { kind: Some(PathKind::Type), .. })) } pub(crate) fn path_is_call(&self) -> bool { self.path_context.as_ref().map_or(false, |it| it.has_call_parens) } pub(crate) fn is_non_trivial_path(&self) -> bool { matches!( self.path_context, Some( PathCompletionCtx { is_absolute_path: true, .. } | PathCompletionCtx { qualifier: Some(_), .. } ) ) } pub(crate) fn path_qual(&self) -> Option<&ast::Path> { self.path_context.as_ref().and_then(|it| it.qualifier.as_ref().map(|it| &it.path)) } pub(crate) fn path_kind(&self) -> Option<PathKind> { self.path_context.as_ref().and_then(|it| it.kind) } /// Checks if an item is visible and not `doc(hidden)` at the completion site. pub(crate) fn is_visible<I>(&self, item: &I) -> Visible where I: hir::HasVisibility + hir::HasAttrs + hir::HasCrate + Copy, { self.is_visible_impl(&item.visibility(self.db), &item.attrs(self.db), item.krate(self.db)) } pub(crate) fn is_scope_def_hidden(&self, scope_def: ScopeDef) -> bool { if let (Some(attrs), Some(krate)) = (scope_def.attrs(self.db), scope_def.krate(self.db)) { return self.is_doc_hidden(&attrs, krate); } false } /// Check if an item is `#[doc(hidden)]`. pub(crate) fn is_item_hidden(&self, item: &hir::ItemInNs) -> bool { let attrs = item.attrs(self.db); let krate = item.krate(self.db); match (attrs, krate) { (Some(attrs), Some(krate)) => self.is_doc_hidden(&attrs, krate), _ => false, } } pub(crate) fn is_immediately_after_macro_bang(&self) -> bool { self.token.kind() == BANG && self.token.parent().map_or(false, |it| it.kind() == MACRO_CALL) } /// Whether the given trait is an operator trait or not. pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool { match trait_.attrs(self.db).lang() { Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()), None => false, } } /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items. pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { let _p = profile::span("CompletionContext::process_all_names"); self.scope.process_all_names(&mut |name, def| { if self.is_scope_def_hidden(def) { return; } f(name, def); }) } fn is_visible_impl( &self, vis: &hir::Visibility, attrs: &hir::Attrs, defining_crate: hir::Crate, ) -> Visible { let module = match self.module { Some(it) => it, None => return Visible::No, }; if !vis.is_visible_from(self.db, module.into()) { if !self.config.enable_private_editable { return Visible::No; } // If the definition location is editable, also show private items let root_file = defining_crate.root_file(self.db); let source_root_id = self.db.file_source_root(root_file); let is_editable = !self.db.source_root(source_root_id).is_library; return if is_editable { Visible::Editable } else { Visible::No }; } if self.is_doc_hidden(attrs, defining_crate) { Visible::No } else { Visible::Yes } } fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool { let krate = match self.krate { Some(it) => it, None => return true, }; if krate != defining_crate && attrs.has_doc_hidden() { // `doc(hidden)` items are only completed within the defining crate. return true; } false } } // CompletionContext construction impl<'a> CompletionContext<'a> { pub(super) fn new( db: &'a RootDatabase, position @ FilePosition { file_id, offset }: FilePosition, config: &'a CompletionConfig, ) -> Option<CompletionContext<'a>> { let _p = profile::span("CompletionContext::new"); let sema = Semantics::new(db); let original_file = sema.parse(file_id); // Insert a fake ident to get a valid parse tree. We will use this file // to determine context, though the original_file will be used for // actual completion. let file_with_fake_ident = { let parse = db.parse(file_id); let edit = Indel::insert(offset, COMPLETION_MARKER.to_string()); parse.reparse(&edit).tree() }; let fake_ident_token = file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?; let original_token = original_file.syntax().token_at_offset(offset).left_biased()?; let token = sema.descend_into_macros_single(original_token.clone()); let scope = sema.scope_at_offset(&token.parent()?, offset); let krate = scope.krate(); let module = scope.module(); let mut locals = vec![]; scope.process_all_names(&mut |name, scope| { if let ScopeDef::Local(local) = scope { locals.push((name, local)); } }); let mut ctx = CompletionContext { sema, scope, db, config, position, original_token, token, krate, module, expected_name: None, expected_type: None, function_def: None, impl_def: None, name_syntax: None, lifetime_ctx: None, pattern_ctx: None, completion_location: None, prev_sibling: None, fake_attribute_under_caret: None, previous_token: None, path_context: None, locals, incomplete_let: false, existing_derives: Default::default(), }; ctx.expand_and_fill( original_file.syntax().clone(), file_with_fake_ident.syntax().clone(), offset, fake_ident_token, ); Some(ctx) } /// Do the attribute expansion at the current cursor position for both original file and fake file /// as long as possible. As soon as one of the two expansions fail we stop to stay in sync. fn expand_and_fill( &mut self, mut original_file: SyntaxNode, mut speculative_file: SyntaxNode, mut offset: TextSize, mut fake_ident_token: SyntaxToken, ) { let _p = profile::span("CompletionContext::expand_and_fill"); let mut derive_ctx = None; 'expansion: loop { let parent_item = |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast); let ancestor_items = iter::successors( Option::zip( find_node_at_offset::<ast::Item>(&original_file, offset), find_node_at_offset::<ast::Item>(&speculative_file, offset), ), |(a, b)| parent_item(a).zip(parent_item(b)), ); for (actual_item, item_with_fake_ident) in ancestor_items { match ( self.sema.expand_attr_macro(&actual_item), self.sema.speculative_expand_attr_macro( &actual_item, &item_with_fake_ident, fake_ident_token.clone(), ), ) { // maybe parent items have attributes (None, None) => (), // successful expansions (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { let new_offset = fake_mapped_token.text_range().start(); if new_offset > actual_expansion.text_range().end() { break 'expansion; } original_file = actual_expansion; speculative_file = fake_expansion; fake_ident_token = fake_mapped_token; offset = new_offset; continue 'expansion; } // exactly one expansion failed, inconsistent state so stop expanding completely _ => break 'expansion, } } let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) { Some(it) => it, None => break, }; let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) { Some(it) => it, None => break, }; // Expand pseudo-derive expansion if let (Some(orig_attr), Some(spec_attr)) = ( orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()), spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()), ) { match ( self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr), self.sema.speculative_expand_derive_as_pseudo_attr_macro( &orig_attr, &spec_attr, fake_ident_token.clone(), ), ) { // Clearly not a derive macro (None, None) => (), // successful expansions (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { let new_offset = fake_mapped_token.text_range().start(); derive_ctx = Some((actual_expansion, fake_expansion, new_offset)); break 'expansion; } // exactly one expansion failed, inconsistent state so stop expanding completely _ => break 'expansion, } } // Expand fn-like macro calls if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast), spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast), ) { let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text()); let mac_call_path1 = macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()); if mac_call_path0 != mac_call_path1 { break; } let speculative_args = match macro_call_with_fake_ident.token_tree() { Some(tt) => tt, None => break, }; match ( self.sema.expand(&actual_macro_call), self.sema.speculative_expand( &actual_macro_call, &speculative_args, fake_ident_token.clone(), ), ) { // successful expansions (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { let new_offset = fake_mapped_token.text_range().start(); if new_offset > actual_expansion.text_range().end() { break; } original_file = actual_expansion; speculative_file = fake_expansion; fake_ident_token = fake_mapped_token; offset = new_offset; continue; } _ => break, } } break; } self.fill(&original_file, speculative_file, offset, derive_ctx); } fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) { let mut node = match self.token.parent() { Some(it) => it, None => return (None, None), }; loop { break match_ast! { match node { ast::LetStmt(it) => { cov_mark::hit!(expected_type_let_with_leading_char); cov_mark::hit!(expected_type_let_without_leading_char); let ty = it.pat() .and_then(|pat| self.sema.type_of_pat(&pat)) .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it))) .map(TypeInfo::original); let name = match it.pat() { Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name), Some(_) | None => None, }; (ty, name) }, ast::LetExpr(it) => { cov_mark::hit!(expected_type_if_let_without_leading_char); let ty = it.pat() .and_then(|pat| self.sema.type_of_pat(&pat)) .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it))) .map(TypeInfo::original); (ty, None) }, ast::ArgList(_) => { cov_mark::hit!(expected_type_fn_param); ActiveParameter::at_token( &self.sema, self.token.clone(), ).map(|ap| { let name = ap.ident().map(NameOrNameRef::Name); let ty = if has_ref(&self.token) { cov_mark::hit!(expected_type_fn_param_ref); ap.ty.remove_ref() } else { Some(ap.ty) }; (ty, name) }) .unwrap_or((None, None)) }, ast::RecordExprFieldList(it) => { // wouldn't try {} be nice... (|| { if self.token.kind() == T![..] || self.token.prev_token().map(|t| t.kind()) == Some(T![..]) { cov_mark::hit!(expected_type_struct_func_update); let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?; let ty = self.sema.type_of_expr(&record_expr.into())?; Some(( Some(ty.original), None )) } else { cov_mark::hit!(expected_type_struct_field_without_leading_char); let expr_field = self.token.prev_sibling_or_token()? .into_node() .and_then(ast::RecordExprField::cast)?; let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?; Some(( Some(ty), expr_field.field_name().map(NameOrNameRef::NameRef), )) } })().unwrap_or((None, None)) }, ast::RecordExprField(it) => { if let Some(expr) = it.expr() { cov_mark::hit!(expected_type_struct_field_with_leading_char); ( self.sema.type_of_expr(&expr).map(TypeInfo::original), it.field_name().map(NameOrNameRef::NameRef), ) } else { cov_mark::hit!(expected_type_struct_field_followed_by_comma); let ty = self.sema.resolve_record_field(&it) .map(|(_, _, ty)| ty); ( ty, it.field_name().map(NameOrNameRef::NameRef), ) } }, ast::MatchExpr(it) => { cov_mark::hit!(expected_type_match_arm_without_leading_char); let ty = it.expr().and_then(|e| self.sema.type_of_expr(&e)).map(TypeInfo::original); (ty, None) }, ast::IfExpr(it) => { let ty = it.condition() .and_then(|e| self.sema.type_of_expr(&e)) .map(TypeInfo::original); (ty, None) }, ast::IdentPat(it) => { cov_mark::hit!(expected_type_if_let_with_leading_char); cov_mark::hit!(expected_type_match_arm_with_leading_char); let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original); (ty, None) }, ast::Fn(it) => { cov_mark::hit!(expected_type_fn_ret_with_leading_char); cov_mark::hit!(expected_type_fn_ret_without_leading_char); let def = self.sema.to_def(&it); (def.map(|def| def.ret_type(self.db)), None) }, ast::ClosureExpr(it) => { let ty = self.sema.type_of_expr(&it.into()); ty.and_then(|ty| ty.original.as_callable(self.db)) .map(|c| (Some(c.return_type()), None)) .unwrap_or((None, None)) }, ast::ParamList(_) => (None, None), ast::Stmt(_) => (None, None), ast::Item(_) => (None, None), _ => { match node.parent() { Some(n) => { node = n; continue; }, None => (None, None), } }, } }; } } fn fill( &mut self, original_file: &SyntaxNode, file_with_fake_ident: SyntaxNode, offset: TextSize, derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize)>, ) { let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap(); let syntax_element = NodeOrToken::Token(fake_ident_token); if for_is_prev2(syntax_element.clone()) { // for pat $0 // there is nothing to complete here except `in` keyword // don't bother populating the context // FIXME: the completion calculations should end up good enough // such that this special case becomes unnecessary return; } self.previous_token = previous_token(syntax_element.clone()); self.fake_attribute_under_caret = syntax_element.ancestors().find_map(ast::Attr::cast); self.incomplete_let = syntax_element.ancestors().take(6).find_map(ast::LetStmt::cast).map_or(false, |it| { it.syntax().text_range().end() == syntax_element.text_range().end() }); (self.expected_type, self.expected_name) = self.expected_type_and_name(); // Overwrite the path kind for derives if let Some((original_file, file_with_fake_ident, offset)) = derive_ctx { let attr = self .sema .token_ancestors_with_macros(self.token.clone()) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .find_map(ast::Attr::cast); if let Some(attr) = &attr { self.existing_derives = self.sema.resolve_derive_macro(attr).into_iter().flatten().flatten().collect(); } if let Some(ast::NameLike::NameRef(name_ref)) = find_node_at_offset(&file_with_fake_ident, offset) { self.name_syntax = find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); if let Some((path_ctx, _)) = Self::classify_name_ref(&self.sema, &original_file, name_ref) { self.path_context = Some(PathCompletionCtx { kind: Some(PathKind::Derive), ..path_ctx }); } } return; } let name_like = match find_node_at_offset(&file_with_fake_ident, offset) { Some(it) => it, None => return, }; self.completion_location = determine_location(&self.sema, original_file, offset, &name_like); self.prev_sibling = determine_prev_sibling(&name_like); self.name_syntax = find_node_at_offset(original_file, name_like.syntax().text_range().start()); self.impl_def = self .sema .token_ancestors_with_macros(self.token.clone()) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .find_map(ast::Impl::cast); self.function_def = self .sema .token_ancestors_with_macros(self.token.clone()) .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) .find_map(ast::Fn::cast); match name_like { ast::NameLike::Lifetime(lifetime) => { self.lifetime_ctx = Self::classify_lifetime(&self.sema, original_file, lifetime); } ast::NameLike::NameRef(name_ref) => { if let Some((path_ctx, pat_ctx)) = Self::classify_name_ref(&self.sema, original_file, name_ref) { self.path_context = Some(path_ctx); self.pattern_ctx = pat_ctx; } } ast::NameLike::Name(name) => { self.pattern_ctx = Self::classify_name(&self.sema, original_file, name); } } } fn classify_lifetime( _sema: &Semantics<RootDatabase>, _original_file: &SyntaxNode, lifetime: ast::Lifetime, ) -> Option<LifetimeContext> { let parent = lifetime.syntax().parent()?; if parent.kind() == ERROR { return None; } Some(match_ast! { match parent { ast::LifetimeParam(param) => LifetimeContext::LifetimeParam { is_decl: param.lifetime().as_ref() == Some(&lifetime), param }, ast::BreakExpr(_) => LifetimeContext::LabelRef, ast::ContinueExpr(_) => LifetimeContext::LabelRef, ast::Label(_) => LifetimeContext::LabelDef, _ => LifetimeContext::Lifetime, } }) } fn classify_name( _sema: &Semantics<RootDatabase>, original_file: &SyntaxNode, name: ast::Name, ) -> Option<PatternContext> { let bind_pat = name.syntax().parent().and_then(ast::IdentPat::cast)?; let is_name_in_field_pat = bind_pat .syntax() .parent() .and_then(ast::RecordPatField::cast) .map_or(false, |pat_field| pat_field.name_ref().is_none()); if is_name_in_field_pat { return None; } Some(pattern_context_for(original_file, bind_pat.into())) } fn classify_name_ref( sema: &Semantics<RootDatabase>, original_file: &SyntaxNode, name_ref: ast::NameRef, ) -> Option<(PathCompletionCtx, Option<PatternContext>)> { let parent = name_ref.syntax().parent()?; let segment = ast::PathSegment::cast(parent)?; let path = segment.parent_path(); let mut path_ctx = PathCompletionCtx { has_call_parens: false, is_absolute_path: false, qualifier: None, has_type_args: false, can_be_stmt: false, in_loop_body: false, kind: None, }; let mut pat_ctx = None; path_ctx.in_loop_body = is_in_loop_body(name_ref.syntax()); path_ctx.kind = path.syntax().ancestors().find_map(|it| { // using Option<Option<PathKind>> as extra controlflow let kind = match_ast! { match it { ast::PathType(_) => Some(PathKind::Type), ast::PathExpr(it) => { path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); Some(PathKind::Expr) }, ast::TupleStructPat(it) => { path_ctx.has_call_parens = true; pat_ctx = Some(pattern_context_for(original_file, it.into())); Some(PathKind::Pat) }, ast::RecordPat(it) => { pat_ctx = Some(pattern_context_for(original_file, it.into())); Some(PathKind::Pat) }, ast::PathPat(it) => { pat_ctx = Some(pattern_context_for(original_file, it.into())); Some(PathKind::Pat) }, ast::MacroCall(it) => it.excl_token().and(Some(PathKind::Mac)), ast::Meta(meta) => (|| { let attr = meta.parent_attr()?; let kind = attr.kind(); let attached = attr.syntax().parent()?; let is_trailing_outer_attr = kind != AttrKind::Inner && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none(); let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) }; Some(PathKind::Attr { kind, annotated_item_kind, }) })(), ast::Visibility(it) => Some(PathKind::Vis { has_in_token: it.in_token().is_some() }), ast::UseTree(_) => Some(PathKind::Use), _ => return None, } }; Some(kind) }).flatten(); path_ctx.has_type_args = segment.generic_arg_list().is_some(); if let Some((path, use_tree_parent)) = path_or_use_tree_qualifier(&path) { if !use_tree_parent { path_ctx.is_absolute_path = path.top_path().segment().map_or(false, |it| it.coloncolon_token().is_some()); } let path = path .segment() .and_then(|it| find_node_in_file(original_file, &it)) .map(|it| it.parent_path()); path_ctx.qualifier = path.map(|path| { let res = sema.resolve_path(&path); let is_super_chain = iter::successors(Some(path.clone()), |p| p.qualifier()) .all(|p| p.segment().and_then(|s| s.super_token()).is_some()); PathQualifierCtx { path, resolution: res, is_super_chain, use_tree_parent } }); return Some((path_ctx, pat_ctx)); } if let Some(segment) = path.segment() { if segment.coloncolon_token().is_some() { path_ctx.is_absolute_path = true; return Some((path_ctx, pat_ctx)); } } // Find either enclosing expr statement (thing with `;`) or a // block. If block, check that we are the last expr. path_ctx.can_be_stmt = name_ref .syntax() .ancestors() .find_map(|node| { if let Some(stmt) = ast::ExprStmt::cast(node.clone()) { return Some(stmt.syntax().text_range() == name_ref.syntax().text_range()); } if let Some(stmt_list) = ast::StmtList::cast(node) { return Some( stmt_list.tail_expr().map(|e| e.syntax().text_range()) == Some(name_ref.syntax().text_range()), ); } None }) .unwrap_or(false); Some((path_ctx, pat_ctx)) } } fn pattern_context_for(original_file: &SyntaxNode, pat: ast::Pat) -> PatternContext { let mut is_param = None; let (refutability, has_type_ascription) = pat .syntax() .ancestors() .skip_while(|it| ast::Pat::can_cast(it.kind())) .next() .map_or((PatternRefutability::Irrefutable, false), |node| { let refutability = match_ast! { match node { ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()), ast::Param(param) => { let has_type_ascription = param.ty().is_some(); is_param = (|| { let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?; let param_list = find_node_in_file_compensated(original_file, &fake_param_list)?; let param_list_owner = param_list.syntax().parent()?; let kind = match_ast! { match param_list_owner { ast::ClosureExpr(closure) => ParamKind::Closure(closure), ast::Fn(fn_) => ParamKind::Function(fn_), _ => return None, } }; Some((param_list, param, kind)) })(); return (PatternRefutability::Irrefutable, has_type_ascription) }, ast::MatchArm(_) => PatternRefutability::Refutable, ast::LetExpr(_) => PatternRefutability::Refutable, ast::ForExpr(_) => PatternRefutability::Irrefutable, _ => PatternRefutability::Irrefutable, } }; (refutability, false) }); let (ref_token, mut_token) = match &pat { ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()), _ => (None, None), }; PatternContext { refutability, param_ctx: is_param, has_type_ascription, parent_pat: pat.syntax().parent().and_then(ast::Pat::cast), mut_token, ref_token, } } fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> { let syntax_range = syntax.text_range(); let range = node.syntax().text_range(); let intersection = range.intersect(syntax_range)?; syntax.covering_element(intersection).ancestors().find_map(N::cast) } /// Compensates for the offset introduced by the fake ident /// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead. fn find_node_in_file_compensated<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> { let syntax_range = syntax.text_range(); let range = node.syntax().text_range(); let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?; if end < range.start() { return None; } let range = TextRange::new(range.start(), end); // our inserted ident could cause `range` to be go outside of the original syntax, so cap it let intersection = range.intersect(syntax_range)?; syntax.covering_element(intersection).ancestors().find_map(N::cast) } fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> { if let Some(qual) = path.qualifier() { return Some((qual, false)); } let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?; let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?; Some((use_tree.path()?, true)) } fn has_ref(token: &SyntaxToken) -> bool { let mut token = token.clone(); for skip in [IDENT, WHITESPACE, T![mut]] { if token.kind() == skip { token = match token.prev_token() { Some(it) => it, None => return false, } } } token.kind() == T![&] } const OP_TRAIT_LANG_NAMES: &[&str] = &[ "add_assign", "add", "bitand_assign", "bitand", "bitor_assign", "bitor", "bitxor_assign", "bitxor", "deref_mut", "deref", "div_assign", "div", "eq", "fn_mut", "fn_once", "fn", "index_mut", "index", "mul_assign", "mul", "neg", "not", "partial_ord", "rem_assign", "rem", "shl_assign", "shl", "shr_assign", "shr", "sub", ]; #[cfg(test)] mod tests { use expect_test::{expect, Expect}; use hir::HirDisplay; use crate::tests::{position, TEST_CONFIG}; use super::CompletionContext; fn check_expected_type_and_name(ra_fixture: &str, expect: Expect) { let (db, pos) = position(ra_fixture); let config = TEST_CONFIG; let completion_context = CompletionContext::new(&db, pos, &config).unwrap(); let ty = completion_context .expected_type .map(|t| t.display_test(&db).to_string()) .unwrap_or("?".to_owned()); let name = completion_context .expected_name .map_or_else(|| "?".to_owned(), |name| name.to_string()); expect.assert_eq(&format!("ty: {}, name: {}", ty, name)); } #[test] fn expected_type_let_without_leading_char() { cov_mark::check!(expected_type_let_without_leading_char); check_expected_type_and_name( r#" fn foo() { let x: u32 = $0; } "#, expect![[r#"ty: u32, name: x"#]], ); } #[test] fn expected_type_let_with_leading_char() { cov_mark::check!(expected_type_let_with_leading_char); check_expected_type_and_name( r#" fn foo() { let x: u32 = c$0; } "#, expect![[r#"ty: u32, name: x"#]], ); } #[test] fn expected_type_let_pat() { check_expected_type_and_name( r#" fn foo() { let x$0 = 0u32; } "#, expect![[r#"ty: u32, name: ?"#]], ); check_expected_type_and_name( r#" fn foo() { let $0 = 0u32; } "#, expect![[r#"ty: u32, name: ?"#]], ); } #[test] fn expected_type_fn_param() { cov_mark::check!(expected_type_fn_param); check_expected_type_and_name( r#" fn foo() { bar($0); } fn bar(x: u32) {} "#, expect![[r#"ty: u32, name: x"#]], ); check_expected_type_and_name( r#" fn foo() { bar(c$0); } fn bar(x: u32) {} "#, expect![[r#"ty: u32, name: x"#]], ); } #[test] fn expected_type_fn_param_ref() { cov_mark::check!(expected_type_fn_param_ref); check_expected_type_and_name( r#" fn foo() { bar(&$0); } fn bar(x: &u32) {} "#, expect![[r#"ty: u32, name: x"#]], ); check_expected_type_and_name( r#" fn foo() { bar(&mut $0); } fn bar(x: &mut u32) {} "#, expect![[r#"ty: u32, name: x"#]], ); check_expected_type_and_name( r#" fn foo() { bar(& c$0); } fn bar(x: &u32) {} "#, expect![[r#"ty: u32, name: x"#]], ); check_expected_type_and_name( r#" fn foo() { bar(&mut c$0); } fn bar(x: &mut u32) {} "#, expect![[r#"ty: u32, name: x"#]], ); check_expected_type_and_name( r#" fn foo() { bar(&c$0); } fn bar(x: &u32) {} "#, expect![[r#"ty: u32, name: x"#]], ); } #[test] fn expected_type_struct_field_without_leading_char() { cov_mark::check!(expected_type_struct_field_without_leading_char); check_expected_type_and_name( r#" struct Foo { a: u32 } fn foo() { Foo { a: $0 }; } "#, expect![[r#"ty: u32, name: a"#]], ) } #[test] fn expected_type_struct_field_followed_by_comma() { cov_mark::check!(expected_type_struct_field_followed_by_comma); check_expected_type_and_name( r#" struct Foo { a: u32 } fn foo() { Foo { a: $0, }; } "#, expect![[r#"ty: u32, name: a"#]], ) } #[test] fn expected_type_generic_struct_field() { check_expected_type_and_name( r#" struct Foo<T> { a: T } fn foo() -> Foo<u32> { Foo { a: $0 } } "#, expect![[r#"ty: u32, name: a"#]], ) } #[test] fn expected_type_struct_field_with_leading_char() { cov_mark::check!(expected_type_struct_field_with_leading_char); check_expected_type_and_name( r#" struct Foo { a: u32 } fn foo() { Foo { a: c$0 }; } "#, expect![[r#"ty: u32, name: a"#]], ); } #[test] fn expected_type_match_arm_without_leading_char() { cov_mark::check!(expected_type_match_arm_without_leading_char); check_expected_type_and_name( r#" enum E { X } fn foo() { match E::X { $0 } } "#, expect![[r#"ty: E, name: ?"#]], ); } #[test] fn expected_type_match_arm_with_leading_char() { cov_mark::check!(expected_type_match_arm_with_leading_char); check_expected_type_and_name( r#" enum E { X } fn foo() { match E::X { c$0 } } "#, expect![[r#"ty: E, name: ?"#]], ); } #[test] fn expected_type_if_let_without_leading_char() { cov_mark::check!(expected_type_if_let_without_leading_char); check_expected_type_and_name( r#" enum Foo { Bar, Baz, Quux } fn foo() { let f = Foo::Quux; if let $0 = f { } } "#, expect![[r#"ty: Foo, name: ?"#]], ) } #[test] fn expected_type_if_let_with_leading_char() { cov_mark::check!(expected_type_if_let_with_leading_char); check_expected_type_and_name( r#" enum Foo { Bar, Baz, Quux } fn foo() { let f = Foo::Quux; if let c$0 = f { } } "#, expect![[r#"ty: Foo, name: ?"#]], ) } #[test] fn expected_type_fn_ret_without_leading_char() { cov_mark::check!(expected_type_fn_ret_without_leading_char); check_expected_type_and_name( r#" fn foo() -> u32 { $0 } "#, expect![[r#"ty: u32, name: ?"#]], ) } #[test] fn expected_type_fn_ret_with_leading_char() { cov_mark::check!(expected_type_fn_ret_with_leading_char); check_expected_type_and_name( r#" fn foo() -> u32 { c$0 } "#, expect![[r#"ty: u32, name: ?"#]], ) } #[test] fn expected_type_fn_ret_fn_ref_fully_typed() { check_expected_type_and_name( r#" fn foo() -> u32 { foo$0 } "#, expect![[r#"ty: u32, name: ?"#]], ) } #[test] fn expected_type_closure_param_return() { // FIXME: make this work with `|| $0` check_expected_type_and_name( r#" //- minicore: fn fn foo() { bar(|| a$0); } fn bar(f: impl FnOnce() -> u32) {} "#, expect![[r#"ty: u32, name: ?"#]], ); } #[test] fn expected_type_generic_function() { check_expected_type_and_name( r#" fn foo() { bar::<u32>($0); } fn bar<T>(t: T) {} "#, expect![[r#"ty: u32, name: t"#]], ); } #[test] fn expected_type_generic_method() { check_expected_type_and_name( r#" fn foo() { S(1u32).bar($0); } struct S<T>(T); impl<T> S<T> { fn bar(self, t: T) {} } "#, expect![[r#"ty: u32, name: t"#]], ); } #[test] fn expected_type_functional_update() { cov_mark::check!(expected_type_struct_func_update); check_expected_type_and_name( r#" struct Foo { field: u32 } fn foo() { Foo { ..$0 } } "#, expect![[r#"ty: Foo, name: ?"#]], ); } #[test] fn expected_type_param_pat() { check_expected_type_and_name( r#" struct Foo { field: u32 } fn foo(a$0: Foo) {} "#, expect![[r#"ty: Foo, name: ?"#]], ); check_expected_type_and_name( r#" struct Foo { field: u32 } fn foo($0: Foo) {} "#, // FIXME make this work, currently fails due to pattern recovery eating the `:` expect![[r#"ty: ?, name: ?"#]], ); } }
34.72752
127
0.521106
14ecf27813d3109bb775b037c81465aa9fc99ba3
1,746
//! The Rust parser and macro expander. //! //! # Note //! //! This API is completely unstable and subject to change. #![doc( html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/", test(attr(deny(warnings))) )] #![feature(box_syntax)] #![feature(box_patterns)] #![cfg_attr(bootstrap, feature(const_fn_unsize))] #![feature(const_fn_transmute)] #![feature(const_panic)] #![feature(crate_visibility_modifier)] #![feature(iter_zip)] #![feature(label_break_value)] #![feature(nll)] #![feature(min_specialization)] #![feature(trusted_step)] #![recursion_limit = "256"] #[macro_use] extern crate rustc_macros; #[macro_export] macro_rules! unwrap_or { ($opt:expr, $default:expr) => { match $opt { Some(x) => x, None => $default, } }; } pub mod util { pub mod classify; pub mod comments; pub mod literal; pub mod parser; } pub mod ast; pub mod ast_like; pub mod attr; pub mod entry; pub mod expand; pub mod mut_visit; pub mod node_id; pub mod ptr; pub mod token; pub mod tokenstream; pub mod visit; pub use self::ast::*; pub use self::ast_like::AstLike; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; /// Requirements for a `StableHashingContext` to be used in this crate. /// This is a hack to allow using the `HashStable_Generic` derive macro /// instead of implementing everything in `rustc_middle`. pub trait HashStableContext: rustc_span::HashStableContext { fn hash_attr(&mut self, _: &ast::Attribute, hasher: &mut StableHasher); } impl<AstCtx: crate::HashStableContext> HashStable<AstCtx> for ast::Attribute { fn hash_stable(&self, hcx: &mut AstCtx, hasher: &mut StableHasher) { hcx.hash_attr(self, hasher) } }
23.917808
78
0.690722
f7750a7462e6bc3f9d6c530adb3a7f8d893ff102
2,706
use timely::order::TotalOrder; use timely::dataflow::*; use timely::dataflow::operators::probe::Handle as ProbeHandle; use differential_dataflow::operators::*; use differential_dataflow::operators::arrange::ArrangeBySelf; use differential_dataflow::difference::DiffPair; use differential_dataflow::lattice::Lattice; use {Arrangements, Experiment, Collections}; use ::types::create_date; // -- $ID$ // -- TPC-H/TPC-R Promotion Effect Query (Q14) // -- Functional Query Definition // -- Approved February 1998 // :x // :o // select // 100.00 * sum(case // when p_type like 'PROMO%' // then l_extendedprice * (1 - l_discount) // else 0 // end) / sum(l_extendedprice * (1 - l_discount)) as promo_revenue // from // lineitem, // part // where // l_partkey = p_partkey // and l_shipdate >= date ':1' // and l_shipdate < date ':1' + interval '1' month; // :n -1 fn starts_with(source: &[u8], query: &[u8]) -> bool { source.len() >= query.len() && &source[..query.len()] == query } pub fn query<G: Scope>(collections: &mut Collections<G>, probe: &mut ProbeHandle<G::Timestamp>) where G::Timestamp: Lattice+TotalOrder+Ord { let lineitems = collections .lineitems() .explode(|l| if create_date(1995,9,1) <= l.ship_date && l.ship_date < create_date(1995,10,1) { Some((l.part_key, (l.extended_price * (100 - l.discount) / 100) as isize )) } else { None } ) .arrange_by_self(); collections .parts() .explode(|p| Some((p.part_key, DiffPair::new(1, if starts_with(&p.typ.as_bytes(), b"PROMO") { 1 } else { 0 })))) .arrange_by_self() .join_core(&lineitems, |&_part_key, _, _| Some(())) .count_total() // .inspect(|x| println!("{:?}", x)) .probe_with(probe); } pub fn query_arranged<G: Scope<Timestamp=usize>>( scope: &mut G, probe: &mut ProbeHandle<usize>, experiment: &mut Experiment, arrangements: &mut Arrangements, ) where G::Timestamp: Lattice+TotalOrder+Ord { let arrangements = arrangements.in_scope(scope, experiment); experiment .lineitem(scope) .explode(|l| if create_date(1995,9,1) <= l.ship_date && l.ship_date < create_date(1995,10,1) { Some((l.part_key, (l.extended_price * (100 - l.discount) / 100) as isize )) } else { None } ) .arrange_by_self() .join_core(&arrangements.part, |_pk,&(),p| Some(DiffPair::new(1, if starts_with(&p.typ.as_bytes(), b"PROMO") { 1 } else { 0 }))) .explode(|dp| Some(((),dp))) .count_total() .probe_with(probe); }
31.465116
136
0.596083
6ab6888e16b1b2b0a3fe176a858022d72f09770e
29,701
// Copyright 2019 The Grin Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::api::TLSConfig; use crate::config::GRIN_WALLET_DIR; use crate::util::file::get_first_line; use crate::util::{to_hex, Mutex, ZeroingString}; /// Argument parsing and error handling for wallet commands use clap::ArgMatches; use failure::Fail; use grin_wallet_config::{TorConfig, WalletConfig}; use grin_wallet_controller::command; use grin_wallet_controller::{Error, ErrorKind}; use grin_wallet_impls::tor::config::is_tor_address; use grin_wallet_impls::{DefaultLCProvider, DefaultWalletImpl}; use grin_wallet_impls::{PathToSlate, SlateGetter as _}; use grin_wallet_libwallet::Slate; use grin_wallet_libwallet::{ address, IssueInvoiceTxArgs, NodeClient, WalletInst, WalletLCProvider, }; use grin_wallet_util::grin_core as core; use grin_wallet_util::grin_core::core::amount_to_hr_string; use grin_wallet_util::grin_core::global; use grin_wallet_util::grin_keychain as keychain; use linefeed::terminal::Signal; use linefeed::{Interface, ReadResult}; use rpassword; use std::env; use std::path::{Path, PathBuf}; use std::sync::Arc; // define what to do on argument error macro_rules! arg_parse { ( $r:expr ) => { match $r { Ok(res) => res, Err(e) => { return Err(ErrorKind::ArgumentError(format!("{}", e)).into()); } } }; } /// Simple error definition, just so we can return errors from all commands /// and let the caller figure out what to do #[derive(Clone, Eq, PartialEq, Debug, Fail)] pub enum ParseError { #[fail(display = "Invalid Arguments: {}", _0)] ArgumentError(String), #[fail(display = "Parsing IO error: {}", _0)] IOError(String), #[fail(display = "User Cancelled")] CancelledError, } impl From<std::io::Error> for ParseError { fn from(e: std::io::Error) -> ParseError { ParseError::IOError(format!("{}", e)) } } fn prompt_password_stdout(prompt: &str) -> ZeroingString { ZeroingString::from(rpassword::prompt_password_stdout(prompt).unwrap()) } pub fn prompt_password(password: &Option<ZeroingString>) -> ZeroingString { match password { None => prompt_password_stdout("Password: "), Some(p) => p.clone(), } } fn getenv(key: &str) -> Option<String> { // Accessing an env var let ret = match env::var(key) { Ok(val) => Some(val), Err(_) => None, }; return ret; } fn getpassword() -> Option<String> { getenv("MWC_PASSWORD") } fn prompt_password_confirm() -> ZeroingString { let env_password = getpassword(); if env_password.is_some() { ZeroingString::from(env_password.unwrap()) } else { let mut first = ZeroingString::from("first"); let mut second = ZeroingString::from("second"); while first != second { first = prompt_password_stdout("Password: "); second = prompt_password_stdout("Confirm Password: "); } first } } fn getrecoveryphrase() -> Option<String> { getenv("MWC_RECOVERY_PHRASE") } fn prompt_recovery_phrase<L, C, K>( wallet: Arc<Mutex<Box<dyn WalletInst<'static, L, C, K>>>>, ) -> Result<ZeroingString, ParseError> where DefaultWalletImpl<'static, C>: WalletInst<'static, L, C, K>, L: WalletLCProvider<'static, C, K>, C: NodeClient + 'static, K: keychain::Keychain + 'static, { let interface = Arc::new(Interface::new("recover")?); let mut phrase = ZeroingString::from(""); interface.set_report_signal(Signal::Interrupt, true); interface.set_prompt("phrase> ")?; loop { println!("Please enter your recovery phrase:"); let env_recovery_phrase = getrecoveryphrase(); if env_recovery_phrase.is_some() { phrase = ZeroingString::from(env_recovery_phrase.unwrap()); break; } let res = interface.read_line()?; match res { ReadResult::Eof => break, ReadResult::Signal(sig) => { if sig == Signal::Interrupt { interface.cancel_read_line()?; return Err(ParseError::CancelledError); } } ReadResult::Input(line) => { let mut w_lock = wallet.lock(); let p = w_lock.lc_provider().unwrap(); if p.validate_mnemonic(ZeroingString::from(line.clone())) .is_ok() { phrase = ZeroingString::from(line); break; } else { println!(); println!("Recovery word phrase is invalid."); println!(); interface.set_buffer(&line)?; } } } } Ok(phrase) } fn prompt_pay_invoice(slate: &Slate, method: &str, dest: &str) -> Result<bool, ParseError> { let interface = Arc::new(Interface::new("pay")?); let amount = amount_to_hr_string(slate.amount, false); interface.set_report_signal(Signal::Interrupt, true); interface.set_prompt( "To proceed, type the exact amount of the invoice as displayed above (or Q/q to quit) > ", )?; println!(); println!( "This command will pay the amount specified in the invoice using your wallet's funds." ); println!("After you confirm, the following will occur: "); println!(); println!( "* {} of your wallet funds will be added to the transaction to pay this invoice.", amount ); if method == "http" { println!("* The resulting transaction will IMMEDIATELY be sent to the wallet listening at: '{}'.", dest); } else { println!("* The resulting transaction will be saved to the file '{}', which you can manually send back to the invoice creator.", dest); } println!(); println!("The invoice slate's participant info is:"); for m in slate.participant_messages().messages { println!("{}", m); } println!("Please review the above information carefully before proceeding"); println!(); loop { let res = interface.read_line()?; match res { ReadResult::Eof => return Ok(false), ReadResult::Signal(sig) => { if sig == Signal::Interrupt { interface.cancel_read_line()?; return Err(ParseError::CancelledError); } } ReadResult::Input(line) => { match line.trim() { "Q" | "q" => return Err(ParseError::CancelledError), result => { if result == amount { return Ok(true); } else { println!("Please enter exact amount of the invoice as shown above or Q to quit"); println!(); } } } } } } } // instantiate wallet (needed by most functions) pub fn inst_wallet<L, C, K>( config: WalletConfig, node_client: C, ) -> Result<Arc<Mutex<Box<dyn WalletInst<'static, L, C, K>>>>, ParseError> where DefaultWalletImpl<'static, C>: WalletInst<'static, L, C, K>, L: WalletLCProvider<'static, C, K>, C: NodeClient + 'static, K: keychain::Keychain + 'static, { let mut wallet = Box::new( DefaultWalletImpl::<'static, C>::new(config.get_max_reorg_len(), node_client.clone()) .unwrap(), ) as Box<dyn WalletInst<'static, L, C, K>>; let lc = wallet.lc_provider().unwrap(); let _ = lc.set_top_level_directory(&config.data_file_dir); Ok(Arc::new(Mutex::new(wallet))) } // parses a required value, or throws error with message otherwise fn parse_required<'a>(args: &'a ArgMatches, name: &str) -> Result<&'a str, ParseError> { let arg = args.value_of(name); match arg { Some(ar) => Ok(ar), None => { let msg = format!("Value for argument '{}' is required in this context", name,); Err(ParseError::ArgumentError(msg)) } } } // parses a number, or throws error with message otherwise fn parse_u64(arg: &str, name: &str) -> Result<u64, ParseError> { let val = arg.parse::<u64>(); match val { Ok(v) => Ok(v), Err(e) => { let msg = format!("Could not parse {} as a whole number. e={}", name, e); Err(ParseError::ArgumentError(msg)) } } } // As above, but optional fn parse_u64_or_none(arg: Option<&str>) -> Option<u64> { let val = match arg { Some(a) => a.parse::<u64>(), None => return None, }; match val { Ok(v) => Some(v), Err(_) => None, } } pub fn parse_global_args( config: &WalletConfig, args: &ArgMatches, ) -> Result<command::GlobalArgs, ParseError> { let account = parse_required(args, "account")?; let mut show_spent = false; if args.is_present("show_spent") { show_spent = true; } let api_secret = get_first_line(config.api_secret_path.clone()); let node_api_secret = get_first_line(config.node_api_secret_path.clone()); let password = match args.value_of("pass") { None => None, Some(p) => Some(ZeroingString::from(p)), }; let tls_conf = match config.tls_certificate_file.clone() { None => None, Some(file) => { let key = match config.tls_certificate_key.clone() { Some(k) => k, None => { let msg = format!("Private key for certificate is not set"); return Err(ParseError::ArgumentError(msg)); } }; Some(TLSConfig::new(file, key)) } }; let chain_type = match config.chain_type.clone() { None => { let param_ref = global::CHAIN_TYPE.read(); param_ref.clone() } Some(c) => c, }; Ok(command::GlobalArgs { account: account.to_owned(), show_spent: show_spent, chain_type: chain_type, api_secret: api_secret, node_api_secret: node_api_secret, password: password, tls_conf: tls_conf, }) } pub fn parse_init_args<L, C, K>( wallet: Arc<Mutex<Box<dyn WalletInst<'static, L, C, K>>>>, config: &WalletConfig, g_args: &command::GlobalArgs, args: &ArgMatches, ) -> Result<command::InitArgs, ParseError> where DefaultWalletImpl<'static, C>: WalletInst<'static, L, C, K>, L: WalletLCProvider<'static, C, K>, C: NodeClient + 'static, K: keychain::Keychain + 'static, { let list_length = match args.is_present("short_wordlist") { false => 32, true => 16, }; let recovery_phrase = match args.is_present("recover") { true => Some(prompt_recovery_phrase(wallet)?), false => None, }; if recovery_phrase.is_some() { println!("Please provide a new password for the recovered wallet"); } else { println!("Please enter a password for your new wallet"); } let password = match g_args.password.clone() { Some(p) => p, None => prompt_password_confirm(), }; Ok(command::InitArgs { list_length: list_length, password: password, config: config.clone(), recovery_phrase: recovery_phrase, restore: false, }) } pub fn parse_recover_args( g_args: &command::GlobalArgs, ) -> Result<command::RecoverArgs, ParseError> where { let passphrase = prompt_password(&g_args.password); Ok(command::RecoverArgs { passphrase: passphrase, }) } pub fn parse_listen_args( config: &mut WalletConfig, tor_config: &mut TorConfig, args: &ArgMatches, ) -> Result<command::ListenArgs, ParseError> { if let Some(port) = args.value_of("port") { config.api_listen_port = port.parse().unwrap(); } let method = parse_required(args, "method")?; if args.is_present("no_tor") { tor_config.use_tor_listener = false; } Ok(command::ListenArgs { method: method.to_owned(), }) } pub fn parse_owner_api_args( config: &mut WalletConfig, args: &ArgMatches, ) -> Result<(), ParseError> { if let Some(port) = args.value_of("port") { config.owner_api_listen_port = Some(port.parse().unwrap()); } if args.is_present("run_foreign") { config.owner_api_include_foreign = Some(true); } Ok(()) } pub fn parse_account_args(account_args: &ArgMatches) -> Result<command::AccountArgs, ParseError> { let create = match account_args.value_of("create") { None => None, Some(s) => Some(s.to_owned()), }; Ok(command::AccountArgs { create: create }) } pub fn parse_send_args(args: &ArgMatches) -> Result<command::SendArgs, ParseError> { // amount let amount = parse_required(args, "amount")?; let amount = core::core::amount_from_hr_string(amount); let amount = match amount { Ok(a) => a, Err(e) => { let msg = format!( "Could not parse amount as a number with optional decimal point. e={}", e ); return Err(ParseError::ArgumentError(msg)); } }; // message let message = match args.is_present("message") { true => Some(args.value_of("message").unwrap().to_owned()), false => None, }; // minimum_confirmations let min_c = parse_required(args, "minimum_confirmations")?; let min_c = parse_u64(min_c, "minimum_confirmations")?; // selection_strategy let selection_strategy = parse_required(args, "selection_strategy")?; // estimate_selection_strategies let estimate_selection_strategies = args.is_present("estimate_selection_strategies"); // method let method = parse_required(args, "method")?; // dest let dest = { if method == "self" { match args.value_of("dest") { Some(d) => d, None => "default", } } else { if !estimate_selection_strategies { parse_required(args, "dest")? } else { "" } } }; if !estimate_selection_strategies && method == "http" && !dest.starts_with("http://") && !dest.starts_with("https://") && is_tor_address(&dest).is_err() { let msg = format!( "HTTP Destination should start with http://: or https://: {}", dest, ); return Err(ParseError::ArgumentError(msg)); } // change_outputs let change_outputs = parse_required(args, "change_outputs")?; let change_outputs = parse_u64(change_outputs, "change_outputs")? as usize; // fluff let fluff = args.is_present("fluff"); // ttl_blocks let ttl_blocks = parse_u64_or_none(args.value_of("ttl_blocks")); // max_outputs let max_outputs = 500; // target slate version to create/send let target_slate_version = { match args.is_present("slate_version") { true => { let v = parse_required(args, "slate_version")?; Some(parse_u64(v, "slate_version")? as u16) } false => None, } }; let payment_proof_address = { match args.is_present("request_payment_proof") { true => { // if the destination address is a TOR address, we don't need the address // separately match address::pubkey_from_onion_v3(&dest) { Ok(k) => Some(to_hex(k.to_bytes().to_vec())), Err(_) => Some(parse_required(args, "proof_address")?.to_owned()), } } false => None, } }; Ok(command::SendArgs { amount: amount, message: message, minimum_confirmations: min_c, selection_strategy: selection_strategy.to_owned(), estimate_selection_strategies, method: method.to_owned(), dest: dest.to_owned(), change_outputs: change_outputs, fluff: fluff, max_outputs: max_outputs, payment_proof_address, ttl_blocks, target_slate_version: target_slate_version, }) } pub fn parse_receive_args(receive_args: &ArgMatches) -> Result<command::ReceiveArgs, ParseError> { // message let message = match receive_args.is_present("message") { true => Some(receive_args.value_of("message").unwrap().to_owned()), false => None, }; // input let tx_file = parse_required(receive_args, "input")?; // validate input if !Path::new(&tx_file).is_file() { let msg = format!("File {} not found.", &tx_file); return Err(ParseError::ArgumentError(msg)); } Ok(command::ReceiveArgs { input: tx_file.to_owned(), message: message, }) } pub fn parse_finalize_args(args: &ArgMatches) -> Result<command::FinalizeArgs, ParseError> { let fluff = args.is_present("fluff"); let nopost = args.is_present("nopost"); let tx_file = parse_required(args, "input")?; if !Path::new(&tx_file).is_file() { let msg = format!("File {} not found.", tx_file); return Err(ParseError::ArgumentError(msg)); } let dest_file = match args.is_present("dest") { true => Some(args.value_of("dest").unwrap().to_owned()), false => None, }; Ok(command::FinalizeArgs { input: tx_file.to_owned(), fluff: fluff, nopost: nopost, dest: dest_file.to_owned(), }) } pub fn parse_issue_invoice_args( args: &ArgMatches, ) -> Result<command::IssueInvoiceArgs, ParseError> { let amount = parse_required(args, "amount")?; let amount = core::core::amount_from_hr_string(amount); let amount = match amount { Ok(a) => a, Err(e) => { let msg = format!( "Could not parse amount as a number with optional decimal point. e={}", e ); return Err(ParseError::ArgumentError(msg)); } }; // message let message = match args.is_present("message") { true => Some(args.value_of("message").unwrap().to_owned()), false => None, }; // target slate version to create let target_slate_version = { match args.is_present("slate_version") { true => { let v = parse_required(args, "slate_version")?; Some(parse_u64(v, "slate_version")? as u16) } false => None, } }; // dest (output file) let dest = parse_required(args, "dest")?; Ok(command::IssueInvoiceArgs { dest: dest.into(), issue_args: IssueInvoiceTxArgs { dest_acct_name: None, address: Some(String::from(dest)), amount, message, target_slate_version, }, }) } pub fn parse_process_invoice_args( args: &ArgMatches, prompt: bool, ) -> Result<command::ProcessInvoiceArgs, ParseError> { // TODO: display and prompt for confirmation of what we're doing // message let message = match args.is_present("message") { true => Some(args.value_of("message").unwrap().to_owned()), false => None, }; // minimum_confirmations let min_c = parse_required(args, "minimum_confirmations")?; let min_c = parse_u64(min_c, "minimum_confirmations")?; // selection_strategy let selection_strategy = parse_required(args, "selection_strategy")?; // estimate_selection_strategies let estimate_selection_strategies = args.is_present("estimate_selection_strategies"); // method let method = parse_required(args, "method")?; // dest let dest = { if method == "self" { match args.value_of("dest") { Some(d) => d, None => "default", } } else { if !estimate_selection_strategies { parse_required(args, "dest")? } else { "" } } }; if !estimate_selection_strategies && method == "http" && !dest.starts_with("http://") && !dest.starts_with("https://") { let msg = format!( "HTTP Destination should start with http://: or https://: {}", dest, ); return Err(ParseError::ArgumentError(msg)); } // ttl_blocks let ttl_blocks = parse_u64_or_none(args.value_of("ttl_blocks")); // max_outputs let max_outputs = 500; // file input only let tx_file = parse_required(args, "input")?; if prompt { // Now we need to prompt the user whether they want to do this, // which requires reading the slate let slate = match PathToSlate((&tx_file).into()).get_tx() { Ok(s) => s, Err(e) => return Err(ParseError::ArgumentError(format!("{}", e))), }; prompt_pay_invoice(&slate, method, dest)?; } Ok(command::ProcessInvoiceArgs { message: message, minimum_confirmations: min_c, selection_strategy: selection_strategy.to_owned(), estimate_selection_strategies, method: method.to_owned(), dest: dest.to_owned(), max_outputs: max_outputs, input: tx_file.to_owned(), ttl_blocks, }) } pub fn parse_info_args(args: &ArgMatches) -> Result<command::InfoArgs, ParseError> { // minimum_confirmations let mc = parse_required(args, "minimum_confirmations")?; let mc = parse_u64(mc, "minimum_confirmations")?; Ok(command::InfoArgs { minimum_confirmations: mc, }) } pub fn parse_check_args(args: &ArgMatches) -> Result<command::CheckArgs, ParseError> { let delete_unconfirmed = args.is_present("delete_unconfirmed"); let start_height = parse_u64_or_none(args.value_of("start_height")); Ok(command::CheckArgs { start_height: start_height, delete_unconfirmed: delete_unconfirmed, }) } pub fn parse_txs_args(args: &ArgMatches) -> Result<command::TxsArgs, ParseError> { let tx_id = match args.value_of("id") { None => None, Some(tx) => Some(parse_u64(tx, "id")? as u32), }; let tx_slate_id = match args.value_of("txid") { None => None, Some(tx) => match tx.parse() { Ok(t) => Some(t), Err(e) => { let msg = format!("Could not parse txid parameter. e={}", e); return Err(ParseError::ArgumentError(msg)); } }, }; if tx_id.is_some() && tx_slate_id.is_some() { let msg = format!("At most one of 'id' (-i) or 'txid' (-t) may be provided."); return Err(ParseError::ArgumentError(msg)); } Ok(command::TxsArgs { id: tx_id, tx_slate_id: tx_slate_id, }) } pub fn parse_post_args(args: &ArgMatches) -> Result<command::PostArgs, ParseError> { let tx_file = parse_required(args, "input")?; let fluff = args.is_present("fluff"); Ok(command::PostArgs { input: tx_file.to_owned(), fluff: fluff, }) } pub fn parse_submit_args(args: &ArgMatches) -> Result<command::SubmitArgs, ParseError> { // input let tx_file = parse_required(args, "input")?; // validate input if !Path::new(&tx_file).is_file() { let msg = format!("File {} not found.", &tx_file); return Err(ParseError::ArgumentError(msg)); } // check fluff flag let fluff = args.is_present("fluff"); Ok(command::SubmitArgs { input: tx_file.to_owned(), fluff: fluff, }) } pub fn parse_repost_args(args: &ArgMatches) -> Result<command::RepostArgs, ParseError> { let tx_id = match args.value_of("id") { None => None, Some(tx) => Some(parse_u64(tx, "id")? as u32), }; let fluff = args.is_present("fluff"); let dump_file = match args.value_of("dumpfile") { None => None, Some(d) => Some(d.to_owned()), }; Ok(command::RepostArgs { id: tx_id.unwrap(), dump_file: dump_file, fluff: fluff, }) } pub fn parse_cancel_args(args: &ArgMatches) -> Result<command::CancelArgs, ParseError> { let mut tx_id_string = ""; let tx_id = match args.value_of("id") { None => None, Some(tx) => Some(parse_u64(tx, "id")? as u32), }; let tx_slate_id = match args.value_of("txid") { None => None, Some(tx) => match tx.parse() { Ok(t) => { tx_id_string = tx; Some(t) } Err(e) => { let msg = format!("Could not parse txid parameter. e={}", e); return Err(ParseError::ArgumentError(msg)); } }, }; if (tx_id.is_none() && tx_slate_id.is_none()) || (tx_id.is_some() && tx_slate_id.is_some()) { let msg = format!("'id' (-i) or 'txid' (-t) argument is required."); return Err(ParseError::ArgumentError(msg)); } Ok(command::CancelArgs { tx_id: tx_id, tx_slate_id: tx_slate_id, tx_id_string: tx_id_string.to_owned(), }) } pub fn wallet_command<C, F>( wallet_args: &ArgMatches, mut wallet_config: WalletConfig, tor_config: Option<TorConfig>, mut node_client: C, test_mode: bool, wallet_inst_cb: F, ) -> Result<String, Error> where C: NodeClient + 'static + Clone, F: FnOnce( Arc< Mutex< Box< dyn WalletInst< 'static, DefaultLCProvider<'static, C, keychain::ExtKeychain>, C, keychain::ExtKeychain, >, >, >, >, ), { if let Some(t) = wallet_config.chain_type.clone() { core::global::set_mining_mode(t); } if wallet_args.is_present("external") { wallet_config.api_listen_interface = "0.0.0.0".to_string(); } if let Some(dir) = wallet_args.value_of("top_level_dir") { wallet_config.data_file_dir = dir.to_string().clone(); } if let Some(sa) = wallet_args.value_of("api_server_address") { wallet_config.check_node_api_http_addr = sa.to_string().clone(); } let global_wallet_args = arg_parse!(parse_global_args(&wallet_config, &wallet_args)); node_client.set_node_url(&wallet_config.check_node_api_http_addr); node_client.set_node_api_secret(global_wallet_args.node_api_secret.clone()); // legacy hack to avoid the need for changes in existing mwc-wallet.toml files // remove `wallet_data` from end of path as // new lifecycle provider assumes mwc_wallet.toml is in root of data directory let mut top_level_wallet_dir = PathBuf::from(wallet_config.clone().data_file_dir); if top_level_wallet_dir.ends_with(GRIN_WALLET_DIR) { top_level_wallet_dir.pop(); wallet_config.data_file_dir = top_level_wallet_dir.to_str().unwrap().into(); } // for backwards compatibility: If tor config doesn't exist in the file, assume // the top level directory for data let tor_config = match tor_config { Some(tc) => tc, None => { let mut tc = TorConfig::default(); tc.send_config_dir = wallet_config.data_file_dir.clone(); tc } }; // Instantiate wallet (doesn't open the wallet) let wallet = inst_wallet::<DefaultLCProvider<C, keychain::ExtKeychain>, C, keychain::ExtKeychain>( wallet_config.clone(), node_client, ) .unwrap_or_else(|e| { println!("Error: {}", e); std::process::exit(1); }); { let mut wallet_lock = wallet.lock(); let lc = wallet_lock.lc_provider().unwrap(); let _ = lc.set_top_level_directory(&wallet_config.data_file_dir); } // provide wallet instance back to the caller (handy for testing with // local wallet proxy, etc) wallet_inst_cb(wallet.clone()); // don't open wallet for certain lifecycle commands let mut open_wallet = true; match wallet_args.subcommand() { ("init", Some(_)) => open_wallet = false, ("recover", _) => open_wallet = false, ("owner_api", _) => { // If wallet exists, open it. Otherwise, that's fine too. let mut wallet_lock = wallet.lock(); let lc = wallet_lock.lc_provider().unwrap(); open_wallet = lc.wallet_exists(None, wallet_config.wallet_data_dir.as_deref())?; } _ => {} } let keychain_mask = match open_wallet { true => { let mut wallet_lock = wallet.lock(); let lc = wallet_lock.lc_provider().unwrap(); let mask = lc.open_wallet( None, prompt_password(&global_wallet_args.password), false, false, wallet_config.wallet_data_dir.as_deref(), )?; if let Some(account) = wallet_args.value_of("account") { let wallet_inst = lc.wallet_inst()?; wallet_inst.set_parent_key_id_by_name(account)?; } mask } false => None, }; let km = (&keychain_mask).as_ref(); let res = match wallet_args.subcommand() { ("init", Some(args)) => { let a = arg_parse!(parse_init_args( wallet.clone(), &wallet_config, &global_wallet_args, &args )); command::init( wallet, &global_wallet_args, a, wallet_config.wallet_data_dir.as_deref(), ) } ("recover", Some(_)) => { let a = arg_parse!(parse_recover_args(&global_wallet_args,)); command::recover(wallet, a, wallet_config.wallet_data_dir.as_deref()) } ("listen", Some(args)) => { let mut c = wallet_config.clone(); let mut t = tor_config.clone(); let a = arg_parse!(parse_listen_args(&mut c, &mut t, &args)); command::listen( wallet, Arc::new(Mutex::new(keychain_mask)), &c, &t, &a, &global_wallet_args.clone(), ) } ("owner_api", Some(args)) => { let mut c = wallet_config.clone(); let mut g = global_wallet_args.clone(); g.tls_conf = None; arg_parse!(parse_owner_api_args(&mut c, &args)); command::owner_api(wallet, keychain_mask, &c, &tor_config, &g) } ("web", Some(_)) => command::owner_api( wallet, keychain_mask, &wallet_config, &tor_config, &global_wallet_args, ), ("account", Some(args)) => { let a = arg_parse!(parse_account_args(&args)); command::account(wallet, km, a) } ("send", Some(args)) => { let a = arg_parse!(parse_send_args(&args)); command::send( wallet, km, Some(tor_config), a, wallet_config.dark_background_color_scheme.unwrap_or(true), ) } ("receive", Some(args)) => { let a = arg_parse!(parse_receive_args(&args)); command::receive(wallet, km, &global_wallet_args, a) } ("finalize", Some(args)) => { let a = arg_parse!(parse_finalize_args(&args)); command::finalize(wallet, km, a, false) } ("finalize_invoice", Some(args)) => { let a = arg_parse!(parse_finalize_args(&args)); command::finalize(wallet, km, a, true) } ("invoice", Some(args)) => { let a = arg_parse!(parse_issue_invoice_args(&args)); command::issue_invoice_tx(wallet, km, a) } ("pay", Some(args)) => { let a = arg_parse!(parse_process_invoice_args(&args, !test_mode)); command::process_invoice( wallet, km, Some(tor_config), a, wallet_config.dark_background_color_scheme.unwrap_or(true), ) } ("info", Some(args)) => { let a = arg_parse!(parse_info_args(&args)); command::info( wallet, km, &global_wallet_args, a, wallet_config.dark_background_color_scheme.unwrap_or(true), ) } ("outputs", Some(_)) => command::outputs( wallet, km, &global_wallet_args, wallet_config.dark_background_color_scheme.unwrap_or(true), ), ("txs", Some(args)) => { let a = arg_parse!(parse_txs_args(&args)); command::txs( wallet, km, &global_wallet_args, a, wallet_config.dark_background_color_scheme.unwrap_or(true), ) } ("post", Some(args)) => { let a = arg_parse!(parse_post_args(&args)); command::post(wallet, km, a) } // Submit is a synonim for 'post'. Since MWC intoduce it ealier, let's keep it ("submit", Some(args)) => { let a = arg_parse!(parse_submit_args(&args)); command::submit(wallet, km, a) } ("repost", Some(args)) => { let a = arg_parse!(parse_repost_args(&args)); command::repost(wallet, km, a) } ("cancel", Some(args)) => { let a = arg_parse!(parse_cancel_args(&args)); command::cancel(wallet, km, a) } ("address", Some(_)) => command::address(wallet, &global_wallet_args, km), ("scan", Some(args)) => { let a = arg_parse!(parse_check_args(&args)); command::scan(wallet, km, a) } ("dump-wallet-data", Some(_)) => command::dump_wallet_data(wallet, km), _ => { let msg = format!("Unknown wallet command, use 'mwc help wallet' for details"); return Err(ErrorKind::ArgumentError(msg).into()); } }; if let Err(e) = res { Err(e) } else { Ok(wallet_args.subcommand().0.to_owned()) } }
27.000909
137
0.673041
3368be048ca9fbbfd665403bfbe8139c163514b4
1,571
//! 如果想要尝试自己实现动态分配器,使用此文件替换 heap.rs //! //! 具体分配算法需要在 algorithm::allocator 里面实现, //! 这里将其中的 VectorAllocator 接入 GlobalAlloc,作为全局分配器 use super::config::KERNEL_HEAP_SIZE; use algorithm::{VectorAllocator, VectorAllocatorImpl}; use core::cell::UnsafeCell; /// 进行动态内存分配所用的堆空间 /// /// 大小为 [`KERNEL_HEAP_SIZE`] /// 这段空间编译后会被放在操作系统执行程序的 bss 段 static mut HEAP_SPACE: [u8; KERNEL_HEAP_SIZE] = [0; KERNEL_HEAP_SIZE]; #[global_allocator] static HEAP: Heap = Heap(UnsafeCell::new(None)); /// Heap 将分配器封装并放在 static 中。它不安全,但在这个问题中不考虑安全性 struct Heap(UnsafeCell<Option<VectorAllocatorImpl>>); /// 利用 VectorAllocator 的接口实现全局分配器的 GlobalAlloc trait unsafe impl alloc::alloc::GlobalAlloc for Heap { unsafe fn alloc(&self, layout: core::alloc::Layout) -> *mut u8 { let offset = (*self.0.get()) .as_mut() .unwrap() .alloc(layout.size(), layout.align()) .expect("Heap overflow"); &mut HEAP_SPACE[offset] as *mut u8 } unsafe fn dealloc(&self, ptr: *mut u8, layout: core::alloc::Layout) { let offset = ptr as usize - &HEAP_SPACE as *const _ as usize; (*self.0.get()) .as_mut() .unwrap() .dealloc(offset, layout.size(), layout.align()); } } unsafe impl Sync for Heap {} /// 初始化操作系统运行时堆空间 pub fn init() { // 告诉分配器使用这一段预留的空间作为堆 unsafe { (*HEAP.0.get()).replace(VectorAllocatorImpl::new(KERNEL_HEAP_SIZE)); } } /// 空间分配错误的回调,直接 panic 退出 #[alloc_error_handler] fn alloc_error_handler(_: alloc::alloc::Layout) -> ! { panic!("alloc error") }
28.563636
76
0.649905
1ef3a4dd536c923ce849c8cf0a9fcb371b58c512
5,263
//! Evolution, ways that different Pokemon within an evolution family are //! related. use std::convert::TryFrom; use serde::Deserialize; use serde::Serialize; use crate::api::Endpoint; use crate::model::item::Item; use crate::model::location::Location; use crate::model::mov::Move; use crate::model::resource::NamedResource; use crate::model::resource::Resource; use crate::model::species::Species; use crate::model::text::Localized; use crate::model::ty::Type; /// A family of Pokemon related by evolution. /// /// This structure forms a tree rooted at the "base" stage for this Pokemon. /// For example, Pikachu's family is rooted at Pichu. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Family { /// This family's numeric ID. pub id: u32, /// The item needed to breed the base stage Pokemon, if necessary. pub baby_trigger_item: Option<Resource<Item>>, /// The base stage for this family. #[serde(rename = "link")] pub base_stage: Stage, } /// A stage within an evolution [`Family`]. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Stage { /// Whether this is a baby Pokemon stage. pub is_baby: bool, /// The species at this stage. pub species: Resource<Species>, /// Conditions that can move the previous stage to this one. /// /// There may be more than one condition; for example, Milotic can evolve from /// Feebas either by holding a Prism Scale, or by having maxed-out Beauty. /// (Trivia: both of these work in all games since Generation V!) #[serde(rename = "evolution_details")] pub conditions: Vec<Condition>, /// Stages that this species can evolve into. pub evolves_to: Vec<Stage>, } /// A set of conditions that must all hold for a particular [`Stage`] to be /// reached. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Condition { /// The event that triggers the evolution (such as a level-up). pub trigger: Resource<Trigger>, /// An item that can be used to directly trigger evolution. pub item: Option<Resource<Item>>, /// The gender this Pokemon must be during the trigger. pub gender: Option<u32>, /// An item that must be held during the trigger. pub held_item: Option<Resource<Item>>, /// A location evolution must be triggered at. pub location: Option<Resource<Location>>, /// Whether it must be raining during the trigger. pub needs_overworld_rain: bool, /// The minimum level during the trigger. pub min_level: Option<u32>, /// The minimum happiness level during the trigger. pub min_happiness: Option<u32>, /// The minimum Beauty level during the trigger. pub min_beauty: Option<u32>, /// The minimum affection level during the trigger. pub min_affection: Option<u32>, /// A species that must be present in the party during the trigger. pub party_species: Option<Resource<Species>>, /// A Pokemon type that must be present in the party during the trigger. pub party_type: Option<NamedResource<Type>>, /// A move that must be known during the trigger. pub known_move: Option<Resource<Move>>, /// A type of move that must be known during the trigger. pub known_move_type: Option<NamedResource<Type>>, /// A relation between Attack and Defense required during the trigger. #[serde(rename = "relative_physical_stats")] pub relative_stats: RelativeStats, /// The time of day it must be during the trigger. // TODO: newtype pub time_of_day: String, /// Which species this Pokemon must be traded for during the trigger. pub trade_species: Option<Resource<Species>>, /// Whether the physical game must be held upside-down during the trigger. pub turn_upside_down: bool, } /// A requirement on the relative values of a Pokemon's Attack and Defense /// statistics. #[derive( Copy, Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Serialize, Deserialize, )] #[allow(missing_docs)] #[serde(into = "i8")] #[serde(try_from = "i8")] pub enum RelativeStats { AttackGreater, DefenceGreater, Equal, } impl From<RelativeStats> for i8 { fn from(r: RelativeStats) -> Self { match r { RelativeStats::AttackGreater => 1, RelativeStats::DefenceGreater => -1, RelativeStats::Equal => 0, } } } #[doc(hidden)] #[derive(Debug, thiserror::Error)] #[error("value must be in range -1..=1")] pub struct RelativeStatsFromError; impl TryFrom<i8> for RelativeStats { type Error = RelativeStatsFromError; fn try_from(x: i8) -> Result<Self, Self::Error> { match x { 1 => Ok(Self::AttackGreater), -1 => Ok(Self::DefenceGreater), 0 => Ok(Self::Equal), _ => Err(RelativeStatsFromError), } } } impl Endpoint for Family { const NAME: &'static str = "evolution-chain"; } /// An event that can trigger evolution. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Trigger { /// This trigger's numeric ID. pub id: u32, /// This trigger's API name. pub name: String, /// The name of this trigger in various languages. #[serde(rename = "names")] pub localized_names: Localized, /// Pokemon species that result from this trigger. #[serde(rename = "pokemon_species")] pub results: Vec<Resource<Species>>, } impl Endpoint for Trigger { const NAME: &'static str = "evolution-trigger"; }
30.074286
80
0.700171
4ad548cdf1ccca20dd068dcdbab591cb6d941160
1,043
use std::{borrow::Cow, str::FromStr}; /// Used for sorting languages. #[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)] pub enum Sort { /// Sort by number blank lines. Blanks, /// Sort by number comments lines. Comments, /// Sort by number code lines. Code, /// Sort by number files lines. Files, /// Sort by number of lines. Lines, } impl FromStr for Sort { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s.to_lowercase().as_ref() { "blanks" => Sort::Blanks, "comments" => Sort::Comments, "code" => Sort::Code, "files" => Sort::Files, "lines" => Sort::Lines, s => return Err(format!("Unsupported sorting option: {}", s)), }) } } impl<'a> From<Sort> for Cow<'a, Sort> { fn from(from: Sort) -> Self { Cow::Owned(from) } } impl<'a> From<&'a Sort> for Cow<'a, Sort> { fn from(from: &'a Sort) -> Self { Cow::Borrowed(from) } }
23.704545
74
0.534995
b94d6ba87dfff6c3729fb88b5083dce03f5103cd
1,688
use super::Error; use crate::kamp::argv::KeyValue; use std::fmt::Write; const KAKOUNE_INIT: &str = r#" define-command -hidden -override kamp-init %{ declare-option -hidden str kamp_out declare-option -hidden str kamp_err evaluate-commands %sh{ kamp_out="${TMPDIR:-/tmp/}${kak_session}-kamp.out" kamp_err="${TMPDIR:-/tmp/}${kak_session}-kamp.err" mkfifo "$kamp_out" "$kamp_err" echo "set-option global kamp_out '$kamp_out'" echo "set-option global kamp_err '$kamp_err'" } } define-command -hidden -override kamp-end %{ nop %sh{ rm -f "$kak_opt_kamp_out" "$kak_opt_kamp_err" } } hook global KakBegin .* kamp-init hook global KakEnd .* kamp-end"#; pub(crate) fn init(export: Vec<KeyValue>, alias: bool) -> Result<String, Error> { let user_exports = export.into_iter().fold(String::new(), |mut buf, next| { buf.push_str("export "); buf.push_str(&next.key); buf.push_str("='"); buf.push_str(&next.value); buf.push_str("'\n"); (0..8).for_each(|_| buf.push(' ')); buf }); let mut buf = String::new(); #[rustfmt::skip] writeln!(&mut buf, r#"define-command -override kamp-connect -params 1.. -command-completion %{{ %arg{{1}} sh -c %{{ {}export KAKOUNE_SESSION="$1" export KAKOUNE_CLIENT="$2" shift 3 [ $# = 0 ] && set "$SHELL" "$@" }} -- %val{{session}} %val{{client}} %arg{{@}} }} -docstring 'run Kakoune command in connected context'"#, user_exports)?; writeln!(&mut buf, "{}", KAKOUNE_INIT)?; if alias { writeln!(&mut buf, "alias global connect kamp-connect")?; } Ok(buf) }
28.610169
99
0.592417
79a32b0fa1ec7825202d414a49cfaa5bd129998d
27,010
// Copyright 2020 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { anyhow::Error, bt_a2dp::{codec::MediaCodecConfig, media_task::*}, bt_a2dp_metrics as metrics, bt_avdtp::{self as avdtp, MediaStream}, fidl::endpoints::create_request_stream, fidl_fuchsia_media as media, fidl_fuchsia_media_sessions2 as sessions2, fuchsia_async as fasync, fuchsia_bluetooth::{inspect::DataStreamInspect, types::PeerId}, fuchsia_cobalt::CobaltSender, fuchsia_trace as trace, futures::{ channel::oneshot, future::{BoxFuture, Future, Shared}, lock::Mutex, select, FutureExt, StreamExt, TryFutureExt, }, log::{info, trace, warn}, std::sync::Arc, thiserror::Error, }; use crate::avrcp_relay::AvrcpRelay; use crate::player; #[derive(Clone)] pub struct SinkTaskBuilder { cobalt_sender: CobaltSender, publisher: sessions2::PublisherProxy, audio_consumer_factory: media::SessionAudioConsumerFactoryProxy, domain: String, } impl SinkTaskBuilder { pub fn new( cobalt_sender: CobaltSender, publisher: sessions2::PublisherProxy, audio_consumer_factory: media::SessionAudioConsumerFactoryProxy, domain: String, ) -> Self { Self { cobalt_sender, publisher, audio_consumer_factory, domain } } } impl MediaTaskBuilder for SinkTaskBuilder { fn configure( &self, peer_id: &PeerId, codec_config: &MediaCodecConfig, data_stream_inspect: DataStreamInspect, ) -> BoxFuture<'static, Result<Box<dyn MediaTaskRunner>, MediaTaskError>> { let builder = self.clone(); let peer_id = peer_id.clone(); let codec_config = codec_config.clone(); Box::pin(async move { Ok::<Box<dyn MediaTaskRunner>, _>(Box::new(ConfiguredSinkTask::new( codec_config, builder, data_stream_inspect, peer_id, ))) }) } } struct ConfiguredSinkTask { /// Configuration providing the format of encoded audio requested. codec_config: MediaCodecConfig, /// The ID of the peer that this is configured for. peer_id: PeerId, /// A clone of the Builder at the time this was configured. builder: SinkTaskBuilder, /// Data Stream inspect object for tracking total bytes / current transfer speed. stream_inspect: Arc<Mutex<DataStreamInspect>>, /// Future that will return the Session ID for Media, if we have started the session. session_id_fut: Option<Shared<oneshot::Receiver<u64>>>, /// Session Task (AVRCP relay) if it is started. _session_task: Option<fasync::Task<()>>, } impl ConfiguredSinkTask { fn new( codec_config: MediaCodecConfig, builder: SinkTaskBuilder, stream_inspect: DataStreamInspect, peer_id: PeerId, ) -> Self { Self { codec_config, builder, peer_id, stream_inspect: Arc::new(Mutex::new(stream_inspect)), session_id_fut: None, _session_task: None, } } fn establish_session(&mut self) -> impl Future<Output = u64> { if self.session_id_fut.is_none() { // Need to start the session task and send the result. let (sender, recv) = futures::channel::oneshot::channel(); self.session_id_fut = Some(recv.shared()); let peer_id = self.peer_id.clone(); let builder = self.builder.clone(); let session_fut = async move { let (player_client, player_requests) = match create_request_stream() { Ok((client, requests)) => (client, requests), Err(e) => { warn!("{}: Couldn't create player FIDL client: {:?}", peer_id, e); return; } }; let registration = sessions2::PlayerRegistration { domain: Some(builder.domain), ..sessions2::PlayerRegistration::EMPTY }; match builder.publisher.publish(player_client, registration).await { Ok(session_id) => { info!("{}: Published session {}", peer_id, session_id); // If the receiver has hung up, this task will be dropped. let _ = sender.send(session_id); // We ignore AVRCP relay errors, they are logged. if let Ok(relay_task) = AvrcpRelay::start(peer_id, player_requests) { relay_task.await; } } Err(e) => warn!("{}: Couldn't publish session: {:?}", peer_id, e), }; }; self._session_task = Some(fasync::Task::local(session_fut)); } self.session_id_fut .as_ref() .cloned() .expect("just set this") .map_ok_or_else(|_e| 0, |id| id) } } impl MediaTaskRunner for ConfiguredSinkTask { fn start(&mut self, stream: MediaStream) -> Result<Box<dyn MediaTask>, MediaTaskError> { let codec_config = self.codec_config.clone(); let audio_factory = self.builder.audio_consumer_factory.clone(); let stream_inspect = self.stream_inspect.clone(); let session_id_fut = self.establish_session(); let media_player_fut = async move { let session_id = session_id_fut.await; media_stream_task( stream, Box::new(move || { player::Player::new(session_id, codec_config.clone(), audio_factory.clone()) }), stream_inspect, ) .await }; let _ = self.stream_inspect.try_lock().map(|mut l| l.start()); let codec_type = self.codec_config.codec_type().clone(); let cobalt_sender = self.builder.cobalt_sender.clone(); let task = RunningSinkTask::start(media_player_fut, cobalt_sender, codec_type); Ok(Box::new(task)) } fn reconfigure(&mut self, codec_config: &MediaCodecConfig) -> Result<(), MediaTaskError> { self.codec_config = codec_config.clone(); Ok(()) } } #[derive(Error, Debug)] enum StreamingError { /// The media stream ended. #[error("Media stream ended")] MediaStreamEnd, /// The media stream returned an error. The error is provided. #[error("Media stream error: {:?}", _0)] MediaStreamError(avdtp::Error), /// The Media Player closed unexpectedly. #[error("Player closed unexpectedlky")] PlayerClosed, } /// Sink task which is running a given media_task future, and will send it's result to multiple /// interested parties. /// Reports the streaming metrics to Cobalt when streaming has completed. struct RunningSinkTask { media_task: Option<fasync::Task<()>>, result_fut: Shared<fasync::Task<Result<(), MediaTaskError>>>, } impl RunningSinkTask { fn start( media_task: impl Future<Output = Result<(), MediaTaskError>> + Send + 'static, cobalt_sender: CobaltSender, codec_type: avdtp::MediaCodecType, ) -> Self { let (sender, receiver) = oneshot::channel(); let wrapped_media_task = fasync::Task::spawn(async move { let result = media_task.await; let _ = sender.send(result); }); let recv_task = fasync::Task::spawn(async move { // Receives the result of the media task, or Canceled, from the stop() dropping it receiver.await.unwrap_or(Ok(())) }); let result_fut = recv_task.shared(); let cobalt_result = result_fut.clone(); fasync::Task::spawn(async move { let start_time = fasync::Time::now(); trace::instant!("bt-a2dp", "Media:Start", trace::Scope::Thread); let _ = cobalt_result.await; trace::instant!("bt-a2dp", "Media:Stop", trace::Scope::Thread); let end_time = fasync::Time::now(); report_stream_metrics( cobalt_sender, &codec_type, (end_time - start_time).into_seconds(), ); }) .detach(); Self { media_task: Some(wrapped_media_task), result_fut } } } impl MediaTask for RunningSinkTask { fn finished(&mut self) -> BoxFuture<'static, Result<(), MediaTaskError>> { self.result_fut.clone().boxed() } fn stop(&mut self) -> Result<(), MediaTaskError> { if let Some(_task) = self.media_task.take() { info!("Media Task stopped via stop signal"); } // Either there was already a result, or we just send Ok(()) by dropping the sender. self.result().unwrap_or(Ok(())) } } /// Wrapper function for media streaming that handles creation of the Player and the media stream /// metrics reporting async fn media_stream_task( mut stream: (impl futures::Stream<Item = avdtp::Result<Vec<u8>>> + std::marker::Unpin), player_gen: Box<dyn Fn() -> Result<player::Player, Error> + Send>, inspect: Arc<Mutex<DataStreamInspect>>, ) -> Result<(), MediaTaskError> { loop { let mut player = player_gen() .map_err(|e| MediaTaskError::Other(format!("Can't setup player: {:?}", e)))?; // Get the first status from the player to confirm it is setup. if let player::PlayerEvent::Closed = player.next_event().await { return Err(MediaTaskError::Other(format!("Player failed during startup"))); } match decode_media_stream(&mut stream, player, inspect.clone()).await { StreamingError::PlayerClosed => info!("Player closed, rebuilding.."), e => { return Err(MediaTaskError::Other(format!( "Unrecoverable streaming error: {:?}", e ))); } }; } } /// Decodes a media stream by starting a Player and transferring media stream packets from AVDTP /// to the player. Restarts the player on player errors. /// Ends when signaled from `end_signal`, or when the media transport stream is closed. async fn decode_media_stream( stream: &mut (impl futures::Stream<Item = avdtp::Result<Vec<u8>>> + std::marker::Unpin), mut player: player::Player, inspect: Arc<Mutex<DataStreamInspect>>, ) -> StreamingError { let mut packet_count: u64 = 0; let _ = inspect.try_lock().map(|mut l| l.start()); loop { select! { stream_packet = stream.next().fuse() => { let pkt = match stream_packet { None => return StreamingError::MediaStreamEnd, Some(Err(e)) => return StreamingError::MediaStreamError(e), Some(Ok(packet)) => packet, }; packet_count += 1; // link incoming and outgoing flows togther with shared duration event trace::duration!("bt-a2dp", "Profile packet received"); trace::flow_end!("bluetooth", "ProfilePacket", packet_count); let _ = inspect.try_lock().map(|mut l| { l.record_transferred(pkt.len(), fasync::Time::now()); }); if let Err(e) = player.push_payload(&pkt.as_slice()).await { info!("can't push packet: {:?}", e); } }, player_event = player.next_event().fuse() => { match player_event { player::PlayerEvent::Closed => return StreamingError::PlayerClosed, player::PlayerEvent::Status(s) => { trace!("PlayerEvent Status happened: {:?}", s); }, } }, } } } fn report_stream_metrics( mut cobalt_sender: CobaltSender, codec_type: &avdtp::MediaCodecType, duration_seconds: i64, ) { let codec = match codec_type { &avdtp::MediaCodecType::AUDIO_SBC => { metrics::A2dpStreamDurationInSecondsMetricDimensionCodec::Sbc } &avdtp::MediaCodecType::AUDIO_AAC => { metrics::A2dpStreamDurationInSecondsMetricDimensionCodec::Aac } _ => metrics::A2dpStreamDurationInSecondsMetricDimensionCodec::Unknown, }; cobalt_sender.log_elapsed_time( metrics::A2DP_STREAM_DURATION_IN_SECONDS_METRIC_ID, codec as u32, duration_seconds, ); } #[cfg(all(test, feature = "test_encoding"))] mod tests { use super::*; use { fidl::endpoints::create_proxy_and_stream, fidl_fuchsia_cobalt::{CobaltEvent, EventPayload}, fidl_fuchsia_media::{ AudioConsumerRequest, AudioConsumerStatus, SessionAudioConsumerFactoryMarker, StreamSinkRequest, }, fidl_fuchsia_media_sessions2::{PublisherMarker, PublisherRequest}, fuchsia_bluetooth::types::Channel, fuchsia_inspect as inspect, fuchsia_inspect_derive::WithInspect, fuchsia_zircon::DurationNum, futures::{channel::mpsc, pin_mut, task::Poll, StreamExt}, std::sync::RwLock, }; use crate::tests::fake_cobalt_sender; #[test] fn sink_task_works_without_session() { let mut exec = fasync::Executor::new().expect("executor should build"); let (send, _recv) = fake_cobalt_sender(); let (proxy, mut session_requests) = fidl::endpoints::create_proxy_and_stream::<PublisherMarker>().unwrap(); let (audio_consumer_factory_proxy, mut audio_factory_requests) = create_proxy_and_stream::<SessionAudioConsumerFactoryMarker>() .expect("proxy pair creation"); let builder = SinkTaskBuilder::new(send, proxy, audio_consumer_factory_proxy, "Tests".to_string()); let sbc_config = MediaCodecConfig::min_sbc(); let configured_fut = builder.configure(&PeerId(1), &sbc_config, DataStreamInspect::default()); pin_mut!(configured_fut); let mut configured_task = exec.run_singlethreaded(&mut configured_fut).expect("ok configure"); // Should't start session until we start a stream. assert!(exec.run_until_stalled(&mut session_requests.next()).is_pending()); let (local, _remote) = Channel::create(); let local = Arc::new(RwLock::new(local)); let stream = MediaStream::new(Arc::new(parking_lot::Mutex::new(true)), Arc::downgrade(&local)); let mut running_task = configured_task.start(stream).expect("media task should start"); // Should try to publish the session now. match exec.run_until_stalled(&mut session_requests.next()) { Poll::Ready(Some(Ok(PublisherRequest::Publish { responder, .. }))) => { drop(responder); } x => panic!("Expected a publisher request, got {:?}", x), }; drop(session_requests); let finished_fut = running_task.finished(); pin_mut!(finished_fut); // Shouldn't end the running media task assert!(exec.run_until_stalled(&mut finished_fut).is_pending()); // Should try to start the player match exec.run_until_stalled(&mut audio_factory_requests.next()) { Poll::Ready(Some(Ok( media::SessionAudioConsumerFactoryRequest::CreateAudioConsumer { .. }, ))) => {} x => panic!("Expected a audio consumer request, got {:?}", x), }; } #[test] fn dropped_task_reports_metrics() { let mut exec = fasync::Executor::new().expect("executor should build"); let (send, mut recv) = fake_cobalt_sender(); let (proxy, mut session_requests) = fidl::endpoints::create_proxy_and_stream::<PublisherMarker>().unwrap(); let (audio_consumer_factory_proxy, _audio_factory_requests) = create_proxy_and_stream::<SessionAudioConsumerFactoryMarker>() .expect("proxy pair creation"); let builder = SinkTaskBuilder::new(send, proxy, audio_consumer_factory_proxy, "Tests".to_string()); let sbc_config = MediaCodecConfig::min_sbc(); let configured_fut = builder.configure(&PeerId(1), &sbc_config, DataStreamInspect::default()); pin_mut!(configured_fut); let mut configured_task = exec.run_singlethreaded(&mut configured_fut).expect("ok configure"); // Should't start session until we start a stream. assert!(exec.run_until_stalled(&mut session_requests.next()).is_pending()); let (local, _remote) = Channel::create(); let local = Arc::new(RwLock::new(local)); let stream = MediaStream::new(Arc::new(parking_lot::Mutex::new(true)), Arc::downgrade(&local)); let mut running_task = configured_task.start(stream).expect("media task should start"); running_task.stop().expect("task to stop with okay"); drop(running_task); // Should receive a metrics report. match exec.run_singlethreaded(&mut recv.next()) { Some(CobaltEvent { metric_id: metrics::A2DP_STREAM_DURATION_IN_SECONDS_METRIC_ID, .. }) => {} x => panic!("Expected A2DP Duration CobaltEvent, got {:?}", x), } } fn setup_media_stream_test( ) -> (fasync::Executor, MediaCodecConfig, Arc<Mutex<DataStreamInspect>>) { let exec = fasync::Executor::new().expect("executor should build"); let sbc_config = MediaCodecConfig::min_sbc(); let inspect = Arc::new(Mutex::new(DataStreamInspect::default())); (exec, sbc_config, inspect) } #[test] /// Test that cobalt metrics are sent after stream ends fn test_cobalt_metrics() { let (send, mut recv) = fake_cobalt_sender(); const TEST_DURATION: i64 = 1; report_stream_metrics(send, &avdtp::MediaCodecType::AUDIO_AAC, TEST_DURATION); let event = recv.try_next().expect("no stream error").expect("event present"); assert_eq!( event, CobaltEvent { metric_id: metrics::A2DP_STREAM_DURATION_IN_SECONDS_METRIC_ID, event_codes: vec![ metrics::A2dpStreamDurationInSecondsMetricDimensionCodec::Aac as u32 ], component: None, payload: EventPayload::ElapsedMicros(TEST_DURATION), } ); } #[test] fn decode_media_stream_empty() { let (mut exec, sbc_config, inspect) = setup_media_stream_test(); let (player, _sink_requests, _consumer_requests, _vmo) = player::tests::setup_player(&mut exec, sbc_config); let mut empty_stream = futures::stream::empty(); let decode_fut = decode_media_stream(&mut empty_stream, player, inspect); pin_mut!(decode_fut); match exec.run_until_stalled(&mut decode_fut) { Poll::Ready(StreamingError::MediaStreamEnd) => {} x => panic!("Expected decoding to end when media stream ended, got {:?}", x), }; } #[test] fn decode_media_stream_error() { let (mut exec, sbc_config, inspect) = setup_media_stream_test(); let (player, _sink_requests, _consumer_requests, _vmo) = player::tests::setup_player(&mut exec, sbc_config); let mut error_stream = futures::stream::poll_fn(|_| -> Poll<Option<avdtp::Result<Vec<u8>>>> { Poll::Ready(Some(Err(avdtp::Error::PeerDisconnected))) }); let decode_fut = decode_media_stream(&mut error_stream, player, inspect); pin_mut!(decode_fut); match exec.run_until_stalled(&mut decode_fut) { Poll::Ready(StreamingError::MediaStreamError(avdtp::Error::PeerDisconnected)) => {} x => panic!("Expected decoding to end with included error, got {:?}", x), }; } #[test] fn decode_media_player_closed() { let (mut exec, sbc_config, inspect) = setup_media_stream_test(); let (player, mut sink_requests, mut consumer_requests, _vmo) = player::tests::setup_player(&mut exec, sbc_config); let mut pending_stream = futures::stream::pending(); let decode_fut = decode_media_stream(&mut pending_stream, player, inspect); pin_mut!(decode_fut); match exec.run_until_stalled(&mut decode_fut) { Poll::Pending => {} x => panic!("Expected pending immediately after with no input but got {:?}", x), }; let responder = match exec.run_until_stalled(&mut consumer_requests.select_next_some()) { Poll::Ready(Ok(AudioConsumerRequest::WatchStatus { responder, .. })) => responder, x => panic!("Expected a watch status request from the player setup, but got {:?}", x), }; drop(responder); drop(consumer_requests); loop { match exec.run_until_stalled(&mut sink_requests.select_next_some()) { Poll::Pending => {} x => info!("Got sink request: {:?}", x), }; match exec.run_until_stalled(&mut decode_fut) { Poll::Ready(StreamingError::PlayerClosed) => break, Poll::Pending => {} x => panic!("Expected decoding to end when player closed, got {:?}", x), }; } } #[test] fn decode_media_stream_stats() { let mut exec = fasync::Executor::new_with_fake_time().expect("executor should build"); let sbc_config = MediaCodecConfig::min_sbc(); let (player, mut sink_requests, _consumer_requests, _vmo) = player::tests::setup_player(&mut exec, sbc_config); let inspector = inspect::component::inspector(); let root = inspector.root(); let d = DataStreamInspect::default().with_inspect(root, "stream").expect("attach to tree"); let inspect = Arc::new(Mutex::new(d)); exec.set_fake_time(fasync::Time::from_nanos(5_678900000)); let (mut media_sender, mut media_receiver) = mpsc::channel(1); let decode_fut = decode_media_stream(&mut media_receiver, player, inspect); pin_mut!(decode_fut); assert!(exec.run_until_stalled(&mut decode_fut).is_pending()); fuchsia_inspect::assert_inspect_tree!(inspector, root: { stream: { start_time: 5_678900000i64, total_bytes: 0 as u64, bytes_per_second_current: 0 as u64, }}); // raw rtp header with sequence number of 1 followed by 1 sbc frame let raw = vec![ 128, 96, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0x9c, 0xb1, 0x20, 0x3b, 0x80, 0x00, 0x00, 0x11, 0x7f, 0xfa, 0xab, 0xef, 0x7f, 0xfa, 0xab, 0xef, 0x80, 0x4a, 0xab, 0xaf, 0x80, 0xf2, 0xab, 0xcf, 0x83, 0x8a, 0xac, 0x32, 0x8a, 0x78, 0x8a, 0x53, 0x90, 0xdc, 0xad, 0x49, 0x96, 0xba, 0xaa, 0xe6, 0x9c, 0xa2, 0xab, 0xac, 0xa2, 0x72, 0xa9, 0x2d, 0xa8, 0x9a, 0xab, 0x75, 0xae, 0x82, 0xad, 0x49, 0xb4, 0x6a, 0xad, 0xb1, 0xba, 0x52, 0xa9, 0xa8, 0xc0, 0x32, 0xad, 0x11, 0xc6, 0x5a, 0xab, 0x3a, ]; let sbc_packet_size = 85u64; media_sender.try_send(Ok(raw.clone())).expect("should be able to send into stream"); exec.set_fake_time(fasync::Time::after(1.seconds())); assert!(exec.run_until_stalled(&mut decode_fut).is_pending()); // We should have updated the rx stats. fuchsia_inspect::assert_inspect_tree!(inspector, root: { stream: { start_time: 5_678900000i64, total_bytes: sbc_packet_size, bytes_per_second_current: sbc_packet_size, }}); // Should get a packet send to the sink eventually as player gets around to it loop { assert!(exec.run_until_stalled(&mut decode_fut).is_pending()); match exec.run_until_stalled(&mut sink_requests.select_next_some()) { Poll::Ready(Ok(StreamSinkRequest::SendPacket { .. })) => break, Poll::Pending => {} x => panic!("Expected to receive a packet from sending data.. got {:?}", x), }; } } #[test] fn media_stream_task_reopens_player() { let mut exec = fasync::Executor::new_with_fake_time().expect("executor should build"); let (audio_consumer_factory_proxy, mut audio_consumer_factory_request_stream) = create_proxy_and_stream::<SessionAudioConsumerFactoryMarker>() .expect("proxy pair creation"); let sbc_config = MediaCodecConfig::min_sbc(); let inspect = Arc::new(Mutex::new(DataStreamInspect::default())); let pending_stream = futures::stream::pending(); let codec_type = sbc_config.codec_type().clone(); let session_id = 1; let media_stream_fut = media_stream_task( pending_stream, Box::new(move || { player::Player::new( session_id, sbc_config.clone(), audio_consumer_factory_proxy.clone(), ) }), inspect, ); pin_mut!(media_stream_fut); assert!(exec.run_until_stalled(&mut media_stream_fut).is_pending()); let (_sink_request_stream, mut audio_consumer_request_stream, _sink_vmo) = player::tests::expect_player_setup( &mut exec, &mut audio_consumer_factory_request_stream, codec_type.clone(), session_id, ); player::tests::respond_event_status( &mut exec, &mut audio_consumer_request_stream, AudioConsumerStatus { min_lead_time: Some(50), max_lead_time: Some(500), error: None, presentation_timeline: None, ..AudioConsumerStatus::EMPTY }, ); drop(audio_consumer_request_stream); assert!(exec.run_until_stalled(&mut media_stream_fut).is_pending()); // Should set up the player again after it closes. let (_sink_request_stream, audio_consumer_request_stream, _sink_vmo) = player::tests::expect_player_setup( &mut exec, &mut audio_consumer_factory_request_stream, codec_type, session_id, ); // This time we don't respond to the event status, so the player failed immediately after // trying to be rebuilt and we end. drop(audio_consumer_request_stream); assert!(exec.run_until_stalled(&mut media_stream_fut).is_ready()); } }
38.975469
99
0.598482
e911dc0409711605080beb05495e12d669ec0cb2
1,923
#![allow(dead_code, unused_imports)] /// /// This lib will mimic the way CS50 `push` lib works. /// The purpose of this libe is to: /// 1. Allow to have a Canvas with Virtual Width/Height, which will act as render target /// that can then be scaled to the Actual Window size to look like a retro game. /// use ggez::graphics::{self, Canvas, Color, FilterMode}; use ggez::nalgebra::Vector2; use ggez::{conf, Context, GameResult}; pub struct Push { canvas: Canvas, scale: Vector2<f32>, } impl Push { pub fn new( ctx: &mut Context, virtual_width: f32, virtual_height: f32, screen_width: f32, screen_height: f32, ) -> GameResult<Self> { let dpi_factor = graphics::window(ctx).get_hidpi_factor() as f32; let canvas = Canvas::new( ctx, virtual_width as u16 * dpi_factor as u16, virtual_height as u16 * dpi_factor as u16, conf::NumSamples::One, // As we don't need any anti-aliasing, for retro game looks )?; // Upscaling should be done with integral values. // Need to test this theory as a separate task, using canvas Ok(Push { canvas, scale: Vector2::new( screen_width / (virtual_width * dpi_factor), screen_height / (virtual_height * dpi_factor), ), }) } pub fn start(&mut self, ctx: &mut Context) -> GameResult { self.canvas.set_filter(FilterMode::Nearest); graphics::set_canvas(ctx, Some(&self.canvas)); graphics::clear(ctx, graphics::Color::from_rgba(0, 0, 0, 0)); Ok(()) } pub fn end(&mut self, ctx: &mut Context) -> GameResult { graphics::set_canvas(ctx, None); graphics::draw( ctx, &self.canvas, graphics::DrawParam::default().scale(self.scale), )?; Ok(()) } }
30.046875
94
0.583463
5697ace69236b05dfc2eb95a2f386ae2c8e5b1ce
3,013
#![cfg(feature = "full")] use tokio::time::{self, sleep, Duration}; use tokio_stream::{self, StreamExt}; use tokio_test::*; use futures::StreamExt as _; async fn maybe_sleep(idx: i32) -> i32 { if idx % 2 == 0 { sleep(ms(200)).await; } idx } fn ms(n: u64) -> Duration { Duration::from_millis(n) } #[tokio::test] async fn basic_usage() { time::pause(); // Items 2 and 4 time out. If we run the stream until it completes, // we end up with the following items: // // [Ok(1), Err(Elapsed), Ok(2), Ok(3), Err(Elapsed), Ok(4)] let stream = stream::iter(1..=4).then(maybe_sleep).timeout(ms(100)); let mut stream = task::spawn(stream); // First item completes immediately assert_ready_eq!(stream.poll_next(), Some(Ok(1))); // Second item is delayed 200ms, times out after 100ms assert_pending!(stream.poll_next()); time::advance(ms(150)).await; let v = assert_ready!(stream.poll_next()); assert!(v.unwrap().is_err()); assert_pending!(stream.poll_next()); time::advance(ms(100)).await; assert_ready_eq!(stream.poll_next(), Some(Ok(2))); // Third item is ready immediately assert_ready_eq!(stream.poll_next(), Some(Ok(3))); // Fourth item is delayed 200ms, times out after 100ms assert_pending!(stream.poll_next()); time::advance(ms(60)).await; assert_pending!(stream.poll_next()); // nothing ready yet time::advance(ms(60)).await; let v = assert_ready!(stream.poll_next()); assert!(v.unwrap().is_err()); // timeout! time::advance(ms(120)).await; assert_ready_eq!(stream.poll_next(), Some(Ok(4))); // Done. assert_ready_eq!(stream.poll_next(), None); } #[tokio::test] async fn return_elapsed_errors_only_once() { time::pause(); let stream = stream::iter(1..=3).then(maybe_sleep).timeout(ms(50)); let mut stream = task::spawn(stream); // First item completes immediately assert_ready_eq!(stream.poll_next(), Some(Ok(1))); // Second item is delayed 200ms, times out after 50ms. Only one `Elapsed` // error is returned. assert_pending!(stream.poll_next()); // time::advance(ms(51)).await; let v = assert_ready!(stream.poll_next()); assert!(v.unwrap().is_err()); // timeout! // deadline elapses again, but no error is returned time::advance(ms(50)).await; assert_pending!(stream.poll_next()); time::advance(ms(100)).await; assert_ready_eq!(stream.poll_next(), Some(Ok(2))); assert_ready_eq!(stream.poll_next(), Some(Ok(3))); // Done assert_ready_eq!(stream.poll_next(), None); } #[tokio::test] async fn no_timeouts() { let stream = stream::iter(vec![1, 3, 5]) .then(maybe_sleep) .timeout(ms(100)); let mut stream = task::spawn(stream); assert_ready_eq!(stream.poll_next(), Some(Ok(1))); assert_ready_eq!(stream.poll_next(), Some(Ok(3))); assert_ready_eq!(stream.poll_next(), Some(Ok(5))); assert_ready_eq!(stream.poll_next(), None); }
27.390909
77
0.63923
29f84291bb7aba68c86227ee121dd8237c860a52
3,356
// Copyright (c) 2018-2021 Rafael Villar Burke <[email protected]> // Distributed under the MIT License // (See acoompanying LICENSE file or a copy at http://opensource.org/licenses/MIT) use std::{fmt::Debug, ops::Deref}; use super::bvh::{Bounded, Intersectable}; use super::ray::Ray; use crate::{point, Point3}; /// Axis aligned bounding box definida por puntos extremos #[derive(Copy, Clone, PartialEq)] pub struct AABB { pub min: Point3, pub max: Point3, } impl Debug for AABB { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let min = self.min; let max = self.max; write!( f, "AABB (min: point![{}, {}, {}], max: point![{}, {}, {}])", min.x, min.y, min.z, max.x, max.y, max.z ) } } impl AABB { /// Constructor pub fn new(min: Point3, max: Point3) -> Self { Self { min, max } } /// Punto medio de la AABB pub fn center(self) -> Point3 { nalgebra::center(&self.max, &self.min) } /// Calcula AABB que incluye a este y otro elemento pub fn join(self, other: Self) -> Self { let minx: f32 = self.min.x.min(other.min.x); let miny: f32 = self.min.y.min(other.min.y); let minz: f32 = self.min.z.min(other.min.z); let maxx: f32 = self.max.x.max(other.max.x); let maxy: f32 = self.max.y.max(other.max.y); let maxz: f32 = self.max.z.max(other.max.z); Self { min: point![minx, miny, minz], max: point![maxx, maxy, maxz], } } } impl Default for AABB { fn default() -> Self { Self { min: point![f32::INFINITY, f32::INFINITY, f32::INFINITY], max: point![f32::NEG_INFINITY, f32::NEG_INFINITY, f32::NEG_INFINITY], } } } impl Bounded for AABB { fn aabb(&self) -> AABB { *self } } impl<T: Bounded, U: Deref<Target = [T]>> Bounded for U { fn aabb(&self) -> AABB { self.iter() .fold(AABB::default(), |res, elem| res.join(elem.aabb())) } } impl Intersectable for AABB { /// Detecta si existe intersección de AABB y rayo usando el algoritmo de Cyrus-Beck /// https://gdbooks.gitbooks.io/3dcollisions/content/Chapter3/raycast_aabb.html /// NaN es siempre distinto, de modo que las comparaciones con NaN son correctas /// Las AABB deben tener ancho > 0 en todas las dimensiones fn intersects(&self, ray: &Ray) -> Option<f32> { let idx = 1.0 / ray.dir.x; let idy = 1.0 / ray.dir.y; let idz = 1.0 / ray.dir.z; let t1 = (self.min.x - ray.origin.x) * idx; let t2 = (self.max.x - ray.origin.x) * idx; let t3 = (self.min.y - ray.origin.y) * idy; let t4 = (self.max.y - ray.origin.y) * idy; let t5 = (self.min.z - ray.origin.z) * idz; let t6 = (self.max.z - ray.origin.z) * idz; let tmin = t1.min(t2).max(t3.min(t4)).max(t5.min(t6)); let tmax = t1.max(t2).min(t3.max(t4)).min(t5.max(t6)); // Si tmax < 0 la línea interseca pero el AABB está detrás if tmax < 0.0 { // t = tmax; return None; } // Si tmin > tmax el rayo no corta AABB if tmin > tmax { // t = tmax; return None; } // t = tmin; Some(tmin) } }
29.699115
87
0.547676
aca68a72c4db904fc071e288304c4864134221a1
12,475
use std::cmp::max; use std::ops::{Add, Deref}; use crate::index::{EdgeIndex, IndexType, VertexIndex}; use crate::infra::CompactIndexMap; use crate::marker::{Direction, EdgeType}; pub trait VertexRef<V> { fn index(&self) -> VertexIndex; fn data(&self) -> &V; } pub trait EdgeRef<E, Ty: EdgeType> { fn index(&self) -> EdgeIndex; fn data(&self) -> &E; fn src(&self) -> VertexIndex; fn dst(&self) -> VertexIndex; fn is_directed(&self) -> bool { Ty::is_directed() } } pub trait HyperEdgeRef<E, Ty: EdgeType> { fn index(&self) -> EdgeIndex; fn data(&self) -> &E; fn vertices(&self) -> &[VertexIndex]; fn is_directed(&self) -> bool { Ty::is_directed() } } enum WeakRefData<'a, T> { Borrowed(&'a T), Owned(T), } pub struct WeakRef<'a, T> { data: WeakRefData<'a, T>, } impl<'a, T> WeakRef<'a, T> { pub fn borrowed(borrowed: &'a T) -> Self { Self { data: WeakRefData::Borrowed(borrowed), } } pub fn owned(owned: T) -> Self { Self { data: WeakRefData::Owned(owned), } } } impl<T> Deref for WeakRef<'_, T> { type Target = T; fn deref(&self) -> &Self::Target { match self.data { WeakRefData::Borrowed(data) => data, WeakRefData::Owned(ref data) => data, } } } impl<T> AsRef<T> for WeakRef<'_, T> { fn as_ref(&self) -> &T { match self.data { WeakRefData::Borrowed(data) => data, WeakRefData::Owned(ref data) => data, } } } pub trait Vertices<V> { type VertexRef<'a, T: 'a>: VertexRef<T>; type VertexIndicesIter<'a>: Iterator<Item = VertexIndex> where Self: 'a; type VerticesIter<'a, T: 'a>: Iterator<Item = Self::VertexRef<'a, T>> where Self: 'a; fn vertex_count(&self) -> usize; fn vertex_bound(&self) -> usize; fn vertex(&self, index: VertexIndex) -> Option<&V>; fn vertex_indices(&self) -> Self::VertexIndicesIter<'_>; fn vertices(&self) -> Self::VerticesIter<'_, V>; fn contains_vertex(&self, index: VertexIndex) -> bool { self.vertex(index).is_some() } fn vertex_index_map(&self) -> CompactIndexMap<VertexIndex> { // Should be overridden to use `isomorphic` whenever possible. CompactIndexMap::new(self.vertex_indices()) } } pub trait VerticesMut<V>: Vertices<V> { fn vertex_mut(&mut self, index: VertexIndex) -> Option<&mut V>; fn add_vertex(&mut self, vertex: V) -> VertexIndex; fn remove_vertex(&mut self, index: VertexIndex) -> Option<V>; fn replace_vertex(&mut self, index: VertexIndex, vertex: V) -> V; fn clear(&mut self) { // Should be overridden by an efficient implementation whenever // possible. let vertices = self.vertex_indices().collect::<Vec<_>>(); for vertex in vertices { self.remove_vertex(vertex); } } } pub trait VerticesWeak<V> { type VertexIndex = VertexIndex; fn vertex_count_hint(&self) -> Option<usize>; fn vertex_bound_hint(&self) -> Option<usize>; fn vertex_weak(&self, index: Self::VertexIndex) -> Option<WeakRef<'_, V>>; } pub trait Edges<E, Ty: EdgeType> { type EdgeRef<'a, T: 'a>: EdgeRef<T, Ty>; type EdgeIndicesIter<'a>: Iterator<Item = EdgeIndex> where Self: 'a; type EdgesIter<'a, T: 'a>: Iterator<Item = Self::EdgeRef<'a, T>> where Self: 'a; fn edge_count(&self) -> usize; fn edge_bound(&self) -> usize; fn edge(&self, index: EdgeIndex) -> Option<&E>; fn endpoints(&self, index: EdgeIndex) -> Option<(VertexIndex, VertexIndex)>; fn edge_index(&self, src: VertexIndex, dst: VertexIndex) -> Option<EdgeIndex>; fn edge_indices(&self) -> Self::EdgeIndicesIter<'_>; fn edges(&self) -> Self::EdgesIter<'_, E>; fn contains_edge(&self, index: EdgeIndex) -> bool { self.edge(index).is_some() } fn edge_index_map(&self) -> CompactIndexMap<EdgeIndex> { // Should be overridden to use `isomorphic` whenever possible. CompactIndexMap::new(self.edge_indices()) } fn is_directed(&self) -> bool { Ty::is_directed() } } pub trait EdgesMut<E, Ty: EdgeType>: Edges<E, Ty> { fn edge_mut(&mut self, index: EdgeIndex) -> Option<&mut E>; fn add_edge(&mut self, src: VertexIndex, dst: VertexIndex, edge: E) -> EdgeIndex; fn remove_edge(&mut self, index: EdgeIndex) -> Option<E>; fn replace_edge(&mut self, index: EdgeIndex, edge: E) -> E; fn clear_edges(&mut self) { // Should be overridden by an efficient implementation whenever // possible. let edges = self.edge_indices().collect::<Vec<_>>(); for edge in edges { self.remove_edge(edge); } } } pub trait EdgesWeak<E, Ty: EdgeType> { type VertexIndex = VertexIndex; type EdgeIndex = EdgeIndex; fn edge_count_hint(&self) -> Option<usize>; fn edge_bound_hint(&self) -> Option<usize>; fn edge_weak(&self, index: Self::EdgeIndex) -> Option<WeakRef<'_, E>>; fn endpoints_weak( &self, index: Self::EdgeIndex, ) -> Option<(Self::VertexIndex, Self::VertexIndex)>; fn edge_index_weak( &self, src: Self::VertexIndex, dst: Self::VertexIndex, ) -> Option<Self::EdgeIndex>; fn contains_edge_weak(&self, index: Self::EdgeIndex) -> bool { self.edge_weak(index).is_some() } fn is_directed_weak(&self) -> bool { Ty::is_directed() } } pub trait MultiEdges<E, Ty: EdgeType>: Edges<E, Ty> { type MultiEdgeIndicesIter<'a>: Iterator<Item = EdgeIndex> where Self: 'a; fn multi_edge_index( &self, src: VertexIndex, dst: VertexIndex, ) -> Self::MultiEdgeIndicesIter<'_>; } pub trait HyperEdges<E, Ty: EdgeType> { fn edge_count(&self) -> usize; fn edge(&self, index: EdgeIndex) -> Option<&E>; fn edge_index(&self, vertices: &[VertexIndex]) -> Option<EdgeIndex>; fn contains_edge(&self, index: EdgeIndex) -> bool { self.edge(index).is_some() } } pub trait NeighborRef { fn index(&self) -> VertexIndex; fn edge(&self) -> EdgeIndex; fn src(&self) -> VertexIndex; fn dir(&self) -> Direction; } pub trait Neighbors { type NeighborRef<'a>: NeighborRef; type NeighborsIter<'a>: Iterator<Item = Self::NeighborRef<'a>> where Self: 'a; fn neighbors(&self, src: VertexIndex) -> Self::NeighborsIter<'_>; fn neighbors_directed(&self, src: VertexIndex, dir: Direction) -> Self::NeighborsIter<'_>; fn degree(&self, index: VertexIndex) -> usize { self.neighbors(index).count() } fn degree_directed(&self, index: VertexIndex, dir: Direction) -> usize { self.neighbors_directed(index, dir).count() } } pub trait IntoEdge<E, Ty: EdgeType> { fn unpack(self) -> (VertexIndex, VertexIndex, E); } pub trait Create<V, E, Ty: EdgeType>: VerticesMut<V> + EdgesMut<E, Ty> + Default { fn with_capacity(vertex_count: usize, edge_count: usize) -> Self; } pub trait ExtendWithEdges<T, V, E, Ty: EdgeType> where T: IntoEdge<E, Ty>, V: Default, Self: Create<V, E, Ty>, { fn extend_with_edges<I>(&mut self, iter: I) where I: IntoIterator<Item = T>; fn from_edges<I>(iter: I) -> Self where I: IntoIterator<Item = T>, { let iter = iter.into_iter(); let edge_count = iter.size_hint().1.unwrap_or(32); let vertex_count = max(edge_count / 4, 2); let mut graph = Self::with_capacity(vertex_count, edge_count); graph.extend_with_edges(iter); graph } } impl<T, V, E, Ty: EdgeType, G> ExtendWithEdges<T, V, E, Ty> for G where T: IntoEdge<E, Ty>, V: Default, G: Create<V, E, Ty>, { fn extend_with_edges<I>(&mut self, iter: I) where I: IntoIterator<Item = T>, { for edge in iter { let (src, dst, edge) = edge.unpack(); let vertex_bound = max(src, dst).to_usize(); while self.vertex_count() <= vertex_bound { self.add_vertex(V::default()); } self.add_edge(src, dst, edge); } } } pub trait ExtendWithVertices<V, E, Ty: EdgeType> where Self: Create<V, E, Ty>, { fn extend_with_vertices<I>(&mut self, iter: I) where I: IntoIterator<Item = V>; fn from_vertices<I>(iter: I) -> Self where I: IntoIterator<Item = V>, { let iter = iter.into_iter(); let vertex_count = iter.size_hint().1.unwrap_or(32); let mut graph = Self::with_capacity(vertex_count, 0); graph.extend_with_vertices(iter); graph } } impl<V, E, Ty: EdgeType, G> ExtendWithVertices<V, E, Ty> for G where G: Create<V, E, Ty>, { fn extend_with_vertices<I>(&mut self, iter: I) where I: IntoIterator<Item = V>, { for vertex in iter { self.add_vertex(vertex); } } } pub trait StableIndices {} pub trait Guarantee { fn is_loop_free() -> bool { false } fn has_paths_only() -> bool { false } fn has_trees_only() -> bool { // Paths are also trees by definition. Self::has_paths_only() } fn has_bipartite_only() -> bool { // Paths and trees are bipartite by definition. Self::has_paths_only() || Self::has_trees_only() } fn is_connected<Ty: EdgeType>() -> bool { false } } pub trait Constrained<G> { type Error; fn check(graph: &G) -> Result<(), Self::Error>; fn constrain(graph: G) -> Result<Self, Self::Error> where Self: Sized; } pub trait Weight: Ord + Add<Self, Output = Self> + Clone + Sized { fn zero() -> Self; fn inf() -> Self; fn is_unsigned() -> bool; } mod imp { use super::*; impl<'a, V> VertexRef<V> for (VertexIndex, &'a V) { fn index(&self) -> VertexIndex { self.0 } fn data(&self) -> &V { &self.1 } } impl<'a, E, Ty: EdgeType> EdgeRef<E, Ty> for (EdgeIndex, &'a E, VertexIndex, VertexIndex) { fn index(&self) -> EdgeIndex { self.0 } fn data(&self) -> &E { &self.1 } fn src(&self) -> VertexIndex { self.2 } fn dst(&self) -> VertexIndex { self.3 } } impl NeighborRef for (VertexIndex, EdgeIndex, VertexIndex, Direction) { fn index(&self) -> VertexIndex { self.0 } fn edge(&self) -> EdgeIndex { self.1 } fn src(&self) -> VertexIndex { self.2 } fn dir(&self) -> Direction { self.3 } } impl<E, Ty: EdgeType, I: Into<VertexIndex>> IntoEdge<E, Ty> for (I, I, E) { fn unpack(self) -> (VertexIndex, VertexIndex, E) { (self.0.into(), self.1.into(), self.2) } } impl<E: Clone, Ty: EdgeType, I: Into<VertexIndex> + Clone> IntoEdge<E, Ty> for &(I, I, E) { fn unpack(self) -> (VertexIndex, VertexIndex, E) { (self.0.clone().into(), self.1.clone().into(), self.2.clone()) } } impl<E: Default, Ty: EdgeType, I: Into<VertexIndex>> IntoEdge<E, Ty> for (I, I) { fn unpack(self) -> (VertexIndex, VertexIndex, E) { (self.0.into(), self.1.into(), E::default()) } } impl<E: Default, Ty: EdgeType, I: Into<VertexIndex> + Clone> IntoEdge<E, Ty> for &(I, I) { fn unpack(self) -> (VertexIndex, VertexIndex, E) { (self.0.clone().into(), self.1.clone().into(), E::default()) } } macro_rules! impl_num_weight { ($ty:ty, $is_unsigned:expr) => { impl Weight for $ty { fn zero() -> Self { 0 } fn inf() -> Self { <$ty>::MAX } fn is_unsigned() -> bool { $is_unsigned } } }; } impl_num_weight!(i8, false); impl_num_weight!(i16, false); impl_num_weight!(i32, false); impl_num_weight!(i64, false); impl_num_weight!(u8, true); impl_num_weight!(u16, true); impl_num_weight!(u32, true); impl_num_weight!(u64, true); impl_num_weight!(isize, false); impl_num_weight!(usize, true); }
25.774793
95
0.566253
8af922704888a2c9f59f901ab49cd15c644b91fb
50,313
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. #[derive(Debug)] pub(crate) struct Handle< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { client: aws_smithy_client::Client<C, M, R>, conf: crate::Config, } /// Client for AWS IoT Events Data /// /// Client for invoking operations on AWS IoT Events Data. Each operation on AWS IoT Events Data is a method on this /// this struct. `.send()` MUST be invoked on the generated operations to dispatch the request to the service. /// /// # Examples /// **Constructing a client and invoking an operation** /// ```rust,no_run /// # async fn docs() { /// // create a shared configuration. This can be used & shared between multiple service clients. /// let shared_config = aws_config::load_from_env().await; /// let client = aws_sdk_ioteventsdata::Client::new(&shared_config); /// // invoke an operation /// /* let rsp = client /// .<operation_name>(). /// .<param>("some value") /// .send().await; */ /// # } /// ``` /// **Constructing a client with custom configuration** /// ```rust,no_run /// use aws_config::RetryConfig; /// # async fn docs() { /// let shared_config = aws_config::load_from_env().await; /// let config = aws_sdk_ioteventsdata::config::Builder::from(&shared_config) /// .retry_config(RetryConfig::disabled()) /// .build(); /// let client = aws_sdk_ioteventsdata::Client::from_conf(config); /// # } #[derive(std::fmt::Debug)] pub struct Client< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<Handle<C, M, R>>, } impl<C, M, R> std::clone::Clone for Client<C, M, R> { fn clone(&self) -> Self { Self { handle: self.handle.clone(), } } } #[doc(inline)] pub use aws_smithy_client::Builder; impl<C, M, R> From<aws_smithy_client::Client<C, M, R>> for Client<C, M, R> { fn from(client: aws_smithy_client::Client<C, M, R>) -> Self { Self::with_config(client, crate::Config::builder().build()) } } impl<C, M, R> Client<C, M, R> { /// Creates a client with the given service configuration. pub fn with_config(client: aws_smithy_client::Client<C, M, R>, conf: crate::Config) -> Self { Self { handle: std::sync::Arc::new(Handle { client, conf }), } } /// Returns the client's configuration. pub fn conf(&self) -> &crate::Config { &self.handle.conf } } impl<C, M, R> Client<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Constructs a fluent builder for the `BatchAcknowledgeAlarm` operation. /// /// See [`BatchAcknowledgeAlarm`](crate::client::fluent_builders::BatchAcknowledgeAlarm) for more information about the /// operation and its arguments. pub fn batch_acknowledge_alarm(&self) -> fluent_builders::BatchAcknowledgeAlarm<C, M, R> { fluent_builders::BatchAcknowledgeAlarm::new(self.handle.clone()) } /// Constructs a fluent builder for the `BatchDisableAlarm` operation. /// /// See [`BatchDisableAlarm`](crate::client::fluent_builders::BatchDisableAlarm) for more information about the /// operation and its arguments. pub fn batch_disable_alarm(&self) -> fluent_builders::BatchDisableAlarm<C, M, R> { fluent_builders::BatchDisableAlarm::new(self.handle.clone()) } /// Constructs a fluent builder for the `BatchEnableAlarm` operation. /// /// See [`BatchEnableAlarm`](crate::client::fluent_builders::BatchEnableAlarm) for more information about the /// operation and its arguments. pub fn batch_enable_alarm(&self) -> fluent_builders::BatchEnableAlarm<C, M, R> { fluent_builders::BatchEnableAlarm::new(self.handle.clone()) } /// Constructs a fluent builder for the `BatchPutMessage` operation. /// /// See [`BatchPutMessage`](crate::client::fluent_builders::BatchPutMessage) for more information about the /// operation and its arguments. pub fn batch_put_message(&self) -> fluent_builders::BatchPutMessage<C, M, R> { fluent_builders::BatchPutMessage::new(self.handle.clone()) } /// Constructs a fluent builder for the `BatchResetAlarm` operation. /// /// See [`BatchResetAlarm`](crate::client::fluent_builders::BatchResetAlarm) for more information about the /// operation and its arguments. pub fn batch_reset_alarm(&self) -> fluent_builders::BatchResetAlarm<C, M, R> { fluent_builders::BatchResetAlarm::new(self.handle.clone()) } /// Constructs a fluent builder for the `BatchSnoozeAlarm` operation. /// /// See [`BatchSnoozeAlarm`](crate::client::fluent_builders::BatchSnoozeAlarm) for more information about the /// operation and its arguments. pub fn batch_snooze_alarm(&self) -> fluent_builders::BatchSnoozeAlarm<C, M, R> { fluent_builders::BatchSnoozeAlarm::new(self.handle.clone()) } /// Constructs a fluent builder for the `BatchUpdateDetector` operation. /// /// See [`BatchUpdateDetector`](crate::client::fluent_builders::BatchUpdateDetector) for more information about the /// operation and its arguments. pub fn batch_update_detector(&self) -> fluent_builders::BatchUpdateDetector<C, M, R> { fluent_builders::BatchUpdateDetector::new(self.handle.clone()) } /// Constructs a fluent builder for the `DescribeAlarm` operation. /// /// See [`DescribeAlarm`](crate::client::fluent_builders::DescribeAlarm) for more information about the /// operation and its arguments. pub fn describe_alarm(&self) -> fluent_builders::DescribeAlarm<C, M, R> { fluent_builders::DescribeAlarm::new(self.handle.clone()) } /// Constructs a fluent builder for the `DescribeDetector` operation. /// /// See [`DescribeDetector`](crate::client::fluent_builders::DescribeDetector) for more information about the /// operation and its arguments. pub fn describe_detector(&self) -> fluent_builders::DescribeDetector<C, M, R> { fluent_builders::DescribeDetector::new(self.handle.clone()) } /// Constructs a fluent builder for the `ListAlarms` operation. /// /// See [`ListAlarms`](crate::client::fluent_builders::ListAlarms) for more information about the /// operation and its arguments. pub fn list_alarms(&self) -> fluent_builders::ListAlarms<C, M, R> { fluent_builders::ListAlarms::new(self.handle.clone()) } /// Constructs a fluent builder for the `ListDetectors` operation. /// /// See [`ListDetectors`](crate::client::fluent_builders::ListDetectors) for more information about the /// operation and its arguments. pub fn list_detectors(&self) -> fluent_builders::ListDetectors<C, M, R> { fluent_builders::ListDetectors::new(self.handle.clone()) } } pub mod fluent_builders { //! //! Utilities to ergonomically construct a request to the service. //! //! Fluent builders are created through the [`Client`](crate::client::Client) by calling //! one if its operation methods. After parameters are set using the builder methods, //! the `send` method can be called to initiate the request. //! /// Fluent builder constructing a request to `BatchAcknowledgeAlarm`. /// /// <p>Acknowledges one or more alarms. The alarms change to the <code>ACKNOWLEDGED</code> state /// after you acknowledge them.</p> #[derive(std::fmt::Debug)] pub struct BatchAcknowledgeAlarm< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::batch_acknowledge_alarm_input::Builder, } impl<C, M, R> BatchAcknowledgeAlarm<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `BatchAcknowledgeAlarm`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::BatchAcknowledgeAlarmOutput, aws_smithy_http::result::SdkError<crate::error::BatchAcknowledgeAlarmError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::BatchAcknowledgeAlarmInputOperationOutputAlias, crate::output::BatchAcknowledgeAlarmOutput, crate::error::BatchAcknowledgeAlarmError, crate::input::BatchAcknowledgeAlarmInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `acknowledgeActionRequests`. /// /// To override the contents of this collection use [`set_acknowledge_action_requests`](Self::set_acknowledge_action_requests). /// /// <p>The list of acknowledge action requests. You can specify up to 10 requests per operation.</p> pub fn acknowledge_action_requests( mut self, inp: impl Into<crate::model::AcknowledgeAlarmActionRequest>, ) -> Self { self.inner = self.inner.acknowledge_action_requests(inp); self } /// <p>The list of acknowledge action requests. You can specify up to 10 requests per operation.</p> pub fn set_acknowledge_action_requests( mut self, input: std::option::Option<std::vec::Vec<crate::model::AcknowledgeAlarmActionRequest>>, ) -> Self { self.inner = self.inner.set_acknowledge_action_requests(input); self } } /// Fluent builder constructing a request to `BatchDisableAlarm`. /// /// <p>Disables one or more alarms. The alarms change to the <code>DISABLED</code> state after /// you disable them.</p> #[derive(std::fmt::Debug)] pub struct BatchDisableAlarm< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::batch_disable_alarm_input::Builder, } impl<C, M, R> BatchDisableAlarm<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `BatchDisableAlarm`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::BatchDisableAlarmOutput, aws_smithy_http::result::SdkError<crate::error::BatchDisableAlarmError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::BatchDisableAlarmInputOperationOutputAlias, crate::output::BatchDisableAlarmOutput, crate::error::BatchDisableAlarmError, crate::input::BatchDisableAlarmInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `disableActionRequests`. /// /// To override the contents of this collection use [`set_disable_action_requests`](Self::set_disable_action_requests). /// /// <p>The list of disable action requests. You can specify up to 10 requests per operation.</p> pub fn disable_action_requests( mut self, inp: impl Into<crate::model::DisableAlarmActionRequest>, ) -> Self { self.inner = self.inner.disable_action_requests(inp); self } /// <p>The list of disable action requests. You can specify up to 10 requests per operation.</p> pub fn set_disable_action_requests( mut self, input: std::option::Option<std::vec::Vec<crate::model::DisableAlarmActionRequest>>, ) -> Self { self.inner = self.inner.set_disable_action_requests(input); self } } /// Fluent builder constructing a request to `BatchEnableAlarm`. /// /// <p>Enables one or more alarms. The alarms change to the <code>NORMAL</code> state after you /// enable them.</p> #[derive(std::fmt::Debug)] pub struct BatchEnableAlarm< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::batch_enable_alarm_input::Builder, } impl<C, M, R> BatchEnableAlarm<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `BatchEnableAlarm`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::BatchEnableAlarmOutput, aws_smithy_http::result::SdkError<crate::error::BatchEnableAlarmError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::BatchEnableAlarmInputOperationOutputAlias, crate::output::BatchEnableAlarmOutput, crate::error::BatchEnableAlarmError, crate::input::BatchEnableAlarmInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `enableActionRequests`. /// /// To override the contents of this collection use [`set_enable_action_requests`](Self::set_enable_action_requests). /// /// <p>The list of enable action requests. You can specify up to 10 requests per operation.</p> pub fn enable_action_requests( mut self, inp: impl Into<crate::model::EnableAlarmActionRequest>, ) -> Self { self.inner = self.inner.enable_action_requests(inp); self } /// <p>The list of enable action requests. You can specify up to 10 requests per operation.</p> pub fn set_enable_action_requests( mut self, input: std::option::Option<std::vec::Vec<crate::model::EnableAlarmActionRequest>>, ) -> Self { self.inner = self.inner.set_enable_action_requests(input); self } } /// Fluent builder constructing a request to `BatchPutMessage`. /// /// <p>Sends a set of messages to the AWS IoT Events system. Each message payload is transformed into /// the input you specify (<code>"inputName"</code>) and ingested into any detectors that monitor /// that input. If multiple messages are sent, the order in which the messages are processed isn't /// guaranteed. To guarantee ordering, you must send messages one at a time and wait for a /// successful response.</p> #[derive(std::fmt::Debug)] pub struct BatchPutMessage< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::batch_put_message_input::Builder, } impl<C, M, R> BatchPutMessage<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `BatchPutMessage`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::BatchPutMessageOutput, aws_smithy_http::result::SdkError<crate::error::BatchPutMessageError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::BatchPutMessageInputOperationOutputAlias, crate::output::BatchPutMessageOutput, crate::error::BatchPutMessageError, crate::input::BatchPutMessageInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `messages`. /// /// To override the contents of this collection use [`set_messages`](Self::set_messages). /// /// <p>The list of messages to send. Each message has the following format: <code>'{ "messageId": /// "string", "inputName": "string", "payload": "string"}'</code> /// </p> pub fn messages(mut self, inp: impl Into<crate::model::Message>) -> Self { self.inner = self.inner.messages(inp); self } /// <p>The list of messages to send. Each message has the following format: <code>'{ "messageId": /// "string", "inputName": "string", "payload": "string"}'</code> /// </p> pub fn set_messages( mut self, input: std::option::Option<std::vec::Vec<crate::model::Message>>, ) -> Self { self.inner = self.inner.set_messages(input); self } } /// Fluent builder constructing a request to `BatchResetAlarm`. /// /// <p>Resets one or more alarms. The alarms return to the <code>NORMAL</code> state after you /// reset them.</p> #[derive(std::fmt::Debug)] pub struct BatchResetAlarm< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::batch_reset_alarm_input::Builder, } impl<C, M, R> BatchResetAlarm<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `BatchResetAlarm`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::BatchResetAlarmOutput, aws_smithy_http::result::SdkError<crate::error::BatchResetAlarmError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::BatchResetAlarmInputOperationOutputAlias, crate::output::BatchResetAlarmOutput, crate::error::BatchResetAlarmError, crate::input::BatchResetAlarmInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `resetActionRequests`. /// /// To override the contents of this collection use [`set_reset_action_requests`](Self::set_reset_action_requests). /// /// <p>The list of reset action requests. You can specify up to 10 requests per operation.</p> pub fn reset_action_requests( mut self, inp: impl Into<crate::model::ResetAlarmActionRequest>, ) -> Self { self.inner = self.inner.reset_action_requests(inp); self } /// <p>The list of reset action requests. You can specify up to 10 requests per operation.</p> pub fn set_reset_action_requests( mut self, input: std::option::Option<std::vec::Vec<crate::model::ResetAlarmActionRequest>>, ) -> Self { self.inner = self.inner.set_reset_action_requests(input); self } } /// Fluent builder constructing a request to `BatchSnoozeAlarm`. /// /// <p>Changes one or more alarms to the snooze mode. The alarms change to the /// <code>SNOOZE_DISABLED</code> state after you set them to the snooze mode.</p> #[derive(std::fmt::Debug)] pub struct BatchSnoozeAlarm< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::batch_snooze_alarm_input::Builder, } impl<C, M, R> BatchSnoozeAlarm<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `BatchSnoozeAlarm`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::BatchSnoozeAlarmOutput, aws_smithy_http::result::SdkError<crate::error::BatchSnoozeAlarmError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::BatchSnoozeAlarmInputOperationOutputAlias, crate::output::BatchSnoozeAlarmOutput, crate::error::BatchSnoozeAlarmError, crate::input::BatchSnoozeAlarmInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `snoozeActionRequests`. /// /// To override the contents of this collection use [`set_snooze_action_requests`](Self::set_snooze_action_requests). /// /// <p>The list of snooze action requests. You can specify up to 10 requests per operation.</p> pub fn snooze_action_requests( mut self, inp: impl Into<crate::model::SnoozeAlarmActionRequest>, ) -> Self { self.inner = self.inner.snooze_action_requests(inp); self } /// <p>The list of snooze action requests. You can specify up to 10 requests per operation.</p> pub fn set_snooze_action_requests( mut self, input: std::option::Option<std::vec::Vec<crate::model::SnoozeAlarmActionRequest>>, ) -> Self { self.inner = self.inner.set_snooze_action_requests(input); self } } /// Fluent builder constructing a request to `BatchUpdateDetector`. /// /// <p>Updates the state, variable values, and timer settings of one or more detectors /// (instances) of a specified detector model.</p> #[derive(std::fmt::Debug)] pub struct BatchUpdateDetector< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::batch_update_detector_input::Builder, } impl<C, M, R> BatchUpdateDetector<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `BatchUpdateDetector`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::BatchUpdateDetectorOutput, aws_smithy_http::result::SdkError<crate::error::BatchUpdateDetectorError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::BatchUpdateDetectorInputOperationOutputAlias, crate::output::BatchUpdateDetectorOutput, crate::error::BatchUpdateDetectorError, crate::input::BatchUpdateDetectorInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// Appends an item to `detectors`. /// /// To override the contents of this collection use [`set_detectors`](Self::set_detectors). /// /// <p>The list of detectors (instances) to update, along with the values to update.</p> pub fn detectors(mut self, inp: impl Into<crate::model::UpdateDetectorRequest>) -> Self { self.inner = self.inner.detectors(inp); self } /// <p>The list of detectors (instances) to update, along with the values to update.</p> pub fn set_detectors( mut self, input: std::option::Option<std::vec::Vec<crate::model::UpdateDetectorRequest>>, ) -> Self { self.inner = self.inner.set_detectors(input); self } } /// Fluent builder constructing a request to `DescribeAlarm`. /// /// <p>Retrieves information about an alarm.</p> #[derive(std::fmt::Debug)] pub struct DescribeAlarm< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::describe_alarm_input::Builder, } impl<C, M, R> DescribeAlarm<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `DescribeAlarm`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DescribeAlarmOutput, aws_smithy_http::result::SdkError<crate::error::DescribeAlarmError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::DescribeAlarmInputOperationOutputAlias, crate::output::DescribeAlarmOutput, crate::error::DescribeAlarmError, crate::input::DescribeAlarmInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The name of the alarm model.</p> pub fn alarm_model_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.alarm_model_name(inp); self } /// <p>The name of the alarm model.</p> pub fn set_alarm_model_name( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_alarm_model_name(input); self } /// <p>The value of the key used as a filter to select only the alarms associated with the /// <a href="https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key">key</a>.</p> pub fn key_value(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.key_value(inp); self } /// <p>The value of the key used as a filter to select only the alarms associated with the /// <a href="https://docs.aws.amazon.com/iotevents/latest/apireference/API_CreateAlarmModel.html#iotevents-CreateAlarmModel-request-key">key</a>.</p> pub fn set_key_value(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_key_value(input); self } } /// Fluent builder constructing a request to `DescribeDetector`. /// /// <p>Returns information about the specified detector (instance).</p> #[derive(std::fmt::Debug)] pub struct DescribeDetector< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::describe_detector_input::Builder, } impl<C, M, R> DescribeDetector<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `DescribeDetector`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::DescribeDetectorOutput, aws_smithy_http::result::SdkError<crate::error::DescribeDetectorError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::DescribeDetectorInputOperationOutputAlias, crate::output::DescribeDetectorOutput, crate::error::DescribeDetectorError, crate::input::DescribeDetectorInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The name of the detector model whose detectors (instances) you want information /// about.</p> pub fn detector_model_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.detector_model_name(inp); self } /// <p>The name of the detector model whose detectors (instances) you want information /// about.</p> pub fn set_detector_model_name( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_detector_model_name(input); self } /// <p>A filter used to limit results to detectors (instances) created because of the given key /// ID.</p> pub fn key_value(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.key_value(inp); self } /// <p>A filter used to limit results to detectors (instances) created because of the given key /// ID.</p> pub fn set_key_value(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_key_value(input); self } } /// Fluent builder constructing a request to `ListAlarms`. /// /// <p>Lists one or more alarms. The operation returns only the metadata associated with each /// alarm.</p> #[derive(std::fmt::Debug)] pub struct ListAlarms< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_alarms_input::Builder, } impl<C, M, R> ListAlarms<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `ListAlarms`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::ListAlarmsOutput, aws_smithy_http::result::SdkError<crate::error::ListAlarmsError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::ListAlarmsInputOperationOutputAlias, crate::output::ListAlarmsOutput, crate::error::ListAlarmsError, crate::input::ListAlarmsInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The name of the alarm model.</p> pub fn alarm_model_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.alarm_model_name(inp); self } /// <p>The name of the alarm model.</p> pub fn set_alarm_model_name( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_alarm_model_name(input); self } /// <p>The token that you can use to return the next set of results.</p> pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } /// <p>The token that you can use to return the next set of results.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum number of results to be returned per request.</p> pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } /// <p>The maximum number of results to be returned per request.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } /// Fluent builder constructing a request to `ListDetectors`. /// /// <p>Lists detectors (the instances of a detector model).</p> #[derive(std::fmt::Debug)] pub struct ListDetectors< C = aws_smithy_client::erase::DynConnector, M = crate::middleware::DefaultMiddleware, R = aws_smithy_client::retry::Standard, > { handle: std::sync::Arc<super::Handle<C, M, R>>, inner: crate::input::list_detectors_input::Builder, } impl<C, M, R> ListDetectors<C, M, R> where C: aws_smithy_client::bounds::SmithyConnector, M: aws_smithy_client::bounds::SmithyMiddleware<C>, R: aws_smithy_client::retry::NewRequestPolicy, { /// Creates a new `ListDetectors`. pub(crate) fn new(handle: std::sync::Arc<super::Handle<C, M, R>>) -> Self { Self { handle, inner: Default::default(), } } /// Sends the request and returns the response. /// /// If an error occurs, an `SdkError` will be returned with additional details that /// can be matched against. /// /// By default, any retryable failures will be retried twice. Retry behavior /// is configurable with the [RetryConfig](aws_smithy_types::retry::RetryConfig), which can be /// set when configuring the client. pub async fn send( self, ) -> std::result::Result< crate::output::ListDetectorsOutput, aws_smithy_http::result::SdkError<crate::error::ListDetectorsError>, > where R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy< crate::input::ListDetectorsInputOperationOutputAlias, crate::output::ListDetectorsOutput, crate::error::ListDetectorsError, crate::input::ListDetectorsInputOperationRetryAlias, >, { let input = self.inner.build().map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; let op = input .make_operation(&self.handle.conf) .await .map_err(|err| { aws_smithy_http::result::SdkError::ConstructionFailure(err.into()) })?; self.handle.client.call(op).await } /// <p>The name of the detector model whose detectors (instances) are listed.</p> pub fn detector_model_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.detector_model_name(inp); self } /// <p>The name of the detector model whose detectors (instances) are listed.</p> pub fn set_detector_model_name( mut self, input: std::option::Option<std::string::String>, ) -> Self { self.inner = self.inner.set_detector_model_name(input); self } /// <p>A filter that limits results to those detectors (instances) in the given state.</p> pub fn state_name(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.state_name(inp); self } /// <p>A filter that limits results to those detectors (instances) in the given state.</p> pub fn set_state_name(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_state_name(input); self } /// <p>The token that you can use to return the next set of results.</p> pub fn next_token(mut self, inp: impl Into<std::string::String>) -> Self { self.inner = self.inner.next_token(inp); self } /// <p>The token that you can use to return the next set of results.</p> pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self { self.inner = self.inner.set_next_token(input); self } /// <p>The maximum number of results to be returned per request.</p> pub fn max_results(mut self, inp: i32) -> Self { self.inner = self.inner.max_results(inp); self } /// <p>The maximum number of results to be returned per request.</p> pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self { self.inner = self.inner.set_max_results(input); self } } } impl<C> Client<C, crate::middleware::DefaultMiddleware, aws_smithy_client::retry::Standard> { /// Creates a client with the given service config and connector override. pub fn from_conf_conn(conf: crate::Config, conn: C) -> Self { let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default(); let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default(); let sleep_impl = conf.sleep_impl.clone(); let mut builder = aws_smithy_client::Builder::new() .connector(conn) .middleware(crate::middleware::DefaultMiddleware::new()); builder.set_retry_config(retry_config.into()); builder.set_timeout_config(timeout_config); if let Some(sleep_impl) = sleep_impl { builder.set_sleep_impl(Some(sleep_impl)); } let client = builder.build(); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } } impl Client< aws_smithy_client::erase::DynConnector, crate::middleware::DefaultMiddleware, aws_smithy_client::retry::Standard, > { /// Creates a new client from a shared config. #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn new(config: &aws_types::config::Config) -> Self { Self::from_conf(config.into()) } /// Creates a new client from the service [`Config`](crate::Config). #[cfg(any(feature = "rustls", feature = "native-tls"))] pub fn from_conf(conf: crate::Config) -> Self { let retry_config = conf.retry_config.as_ref().cloned().unwrap_or_default(); let timeout_config = conf.timeout_config.as_ref().cloned().unwrap_or_default(); let sleep_impl = conf.sleep_impl.clone(); let mut builder = aws_smithy_client::Builder::dyn_https() .middleware(crate::middleware::DefaultMiddleware::new()); builder.set_retry_config(retry_config.into()); builder.set_timeout_config(timeout_config); // the builder maintains a try-state. To avoid suppressing the warning when sleep is unset, // only set it if we actually have a sleep impl. if let Some(sleep_impl) = sleep_impl { builder.set_sleep_impl(Some(sleep_impl)); } let client = builder.build(); Self { handle: std::sync::Arc::new(Handle { client, conf }), } } }
43.187124
157
0.597082
48cb07bd0952e2278f82b26ed3849d1f294d8f11
5,755
/// Trait used to limit data types for images pub trait ImageDataType: Clone + From<u8> + Copy {} // Types supported for the image data are u8, u16, f32, and f64 impl ImageDataType for u8 {} impl ImageDataType for u16 {} impl ImageDataType for f32 {} impl ImageDataType for f64 {} /// Basic representation of an image. pub struct Image<T> where T: ImageDataType { /// Number of columns in the image cols: u32, /// Number of rows in the image rows: u32, /// The color depth of the image depth: u8, /// The color type of the image color_type: ColorType, /// Vector of image data. Images are stored as a vector in memory to be as /// flexible as possible. /// /// The vector is organised as a flat version of a matrix. To access color /// channel c for the pixel at (x, y) (with indexing starting at zero being /// the top left pixel), you would access element /// data[depth * (y * cols + x) + c]. data: Vec<T> } #[derive(Debug, PartialEq)] pub enum ColorType { Mono, RGB, RGBA } impl std::fmt::Display for ColorType { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ColorType::Mono => write!(f, "Monochromatic"), ColorType::RGB => write!(f, "RGB"), ColorType::RGBA => write!(f, "RGBA") } } } impl<T> Image<T> where T: ImageDataType { pub fn new(cols: u32, rows: u32, color_type: ColorType) -> Image<T> { match color_type { ColorType::Mono => Image::<T> { cols: cols, rows: rows, depth: 1 as u8, color_type: color_type, data: vec![T::from(0 as u8); (cols * rows) as usize] }, ColorType::RGB => Image::<T> { cols: cols, rows: rows, depth: 3 as u8, color_type: color_type, data: vec![T::from(0 as u8); (cols * rows * 3) as usize] }, ColorType::RGBA => Image::<T> { cols: cols, rows: rows, depth: 4 as u8, color_type: color_type, data: vec![T::from(0 as u8); (cols * rows * 4) as usize] } } } /// Get the data for a particular pixel /// /// # Arguments /// *`col` - The zero-based column index for the pixel /// *`row` - The zero-based row index for the pixel /// /// # Returns /// Vector of channels for the indexed pixel, or error if pixel is out of /// bounds for the image. A vector is returned here to allow multi-channel /// data retrieval. For single channel (mono) using `get_pixel_mono` may be /// simpler. pub fn get_pixel_data(&self, col: u32, row: u32) -> Result<Vec<T>, String> { if (col < self.cols) && (row < self.rows) { let idx_bot = (self.depth as u32 * (col * self.cols + row)) as usize; let idx_top = idx_bot + self.depth as usize; let pix_data: Vec<T> = self.data[idx_bot..idx_top].to_vec(); Ok(pix_data) } else { Err(format!( "Out of bounds: Cannot get pixel data for ({}, {}) since the \ image is only {}x{}.", col, row, self.cols, self.rows)) } } /// Get monochromatic pixel information for the image pub fn get_pixel_mono(&self, col: u32, row: u32) -> Result<T, String> { if self.color_type != ColorType::Mono { return Err(format!( "Cannot get monochromatic data for an image with a {} color \ type.", self.color_type)); } if (col < self.cols) && (row < self.rows) { let idx = (col * self.cols + row) as usize; Ok(self.data[idx]) } else { Err(format!( "Out of bounds: Cannot get pixel data for ({}, {}) since the \ image is only {}x{}.", col, row, self.cols, self.rows)) } } pub fn set_pixel_data(&mut self, col: u32, row: u32, val: Vec<T>) -> Result<(), String> { if (col < self.cols) && (row < self.rows) { let idx_bot = (self.depth as u32 * (col * self.cols + row)) as usize; for i in 0..(self.depth as usize) { self.data[i + idx_bot] = val[i]; } Ok(()) } else { Err(format!( "Out of bounds: Cannot get pixel data for ({}, {}) since the \ image is only {}x{}.", col, row, self.cols, self.rows)) } } } #[cfg(test)] mod tests { use crate::image::*; #[test] fn image_init() { let img_mono: Image<u8> = Image::new(2, 2, ColorType::Mono); println!( "Created a new image with:\n {} columns, {} rows, {} channels", img_mono.cols, img_mono.rows, img_mono.depth); println!(" Data: {:?}", img_mono.data); let img_rgb: Image<u8> = Image::new(2, 2, ColorType::RGB); println!( "Created a new image with:\n {} columns, {} rows, {} channels", img_rgb.cols, img_rgb.rows, img_rgb.depth); println!(" Data: {:?}", img_rgb.data); println!("{:?}", img_mono.get_pixel_data(1, 0).unwrap()); println!("{:?}", img_rgb.get_pixel_data(1, 0).unwrap()); println!("{}", img_mono.get_pixel_mono(1, 0).unwrap()); println!("{}", img_rgb.get_pixel_mono(1, 0).unwrap()); } }
30.611702
93
0.504431
e5c93006c43d49edd5b18fff439690dc0422b6cb
257,958
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use decl_provider::DeclProvider; use ffi::{Maybe::Just, Slice, Str}; use hhbc_by_ref_ast_class_expr::ClassExpr; use hhbc_by_ref_ast_constant_folder as ast_constant_folder; use hhbc_by_ref_emit_adata as emit_adata; use hhbc_by_ref_emit_fatal as emit_fatal; use hhbc_by_ref_emit_pos::{emit_pos, emit_pos_then}; use hhbc_by_ref_emit_symbol_refs as emit_symbol_refs; use hhbc_by_ref_emit_type_constant as emit_type_constant; use hhbc_by_ref_env::{emitter::Emitter, Env, Flags as EnvFlags}; use hhbc_by_ref_hhbc_ast::*; use hhbc_by_ref_hhbc_id::{class, r#const, function, method, prop, Id}; use hhbc_by_ref_hhbc_string_utils as string_utils; use hhbc_by_ref_instruction_sequence::{ instr, unrecoverable, Error::{self, Unrecoverable}, InstrSeq, Result, }; use hhbc_by_ref_label::Label; use hhbc_by_ref_local::Local; use hhbc_by_ref_options::{CompilerFlags, HhvmFlags, LangFlags, Options}; use hhbc_by_ref_runtime::TypedValue; use hhbc_by_ref_scope::scope; use hhbc_by_ref_symbol_refs_state::IncludePath; use itertools::Either; use lazy_static::lazy_static; use naming_special_names_rust::{ emitter_special_functions, fb, pseudo_consts, pseudo_functions, special_functions, special_idents, superglobals, typehints, user_attributes, }; use oxidized::{ aast, aast_defs, aast_visitor::{visit, visit_mut, AstParams, Node, NodeMut, Visitor, VisitorMut}, ast, ast_defs, local_id, pos::Pos, }; use regex::Regex; use hash::HashSet; use indexmap::IndexSet; use std::{ collections::BTreeMap, convert::TryInto, iter, result::Result as StdResult, str::FromStr, }; #[derive(Debug)] pub struct EmitJmpResult<'arena> { // generated instruction sequence pub instrs: InstrSeq<'arena>, // does instruction sequence fall through is_fallthrough: bool, // was label associated with emit operation used is_label_used: bool, } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum LValOp { Set, SetOp(EqOp), IncDec(IncdecOp), Unset, } impl LValOp { fn is_incdec(&self) -> bool { if let Self::IncDec(_) = self { return true; }; false } } pub fn is_local_this<'a, 'arena>(env: &Env<'a, 'arena>, lid: &local_id::LocalId) -> bool { local_id::get_name(lid) == special_idents::THIS && env.scope.has_this() && !env.scope.is_toplevel() } mod inout_locals { use crate::*; use hash::HashMap; use oxidized::{aast_defs::Lid, aast_visitor, aast_visitor::Node, ast, ast_defs}; use std::marker::PhantomData; pub(super) struct AliasInfo { first_inout: isize, last_write: isize, num_uses: usize, } impl Default for AliasInfo { fn default() -> Self { AliasInfo { first_inout: std::isize::MAX, last_write: std::isize::MIN, num_uses: 0, } } } impl AliasInfo { pub(super) fn add_inout(&mut self, i: isize) { if i < self.first_inout { self.first_inout = i; } } pub(super) fn add_write(&mut self, i: isize) { if i > self.last_write { self.last_write = i; } } pub(super) fn add_use(&mut self) { self.num_uses += 1 } pub(super) fn in_range(&self, i: isize) -> bool { i > self.first_inout || i <= self.last_write } pub(super) fn has_single_ref(&self) -> bool { self.num_uses < 2 } } pub(super) type AliasInfoMap<'ast> = HashMap<&'ast str, AliasInfo>; pub(super) fn new_alias_info_map<'ast>() -> AliasInfoMap<'ast> { HashMap::default() } fn add_write<'ast>(name: &'ast str, i: usize, map: &mut AliasInfoMap<'ast>) { map.entry(name.as_ref()).or_default().add_write(i as isize); } fn add_inout<'ast>(name: &'ast str, i: usize, map: &mut AliasInfoMap<'ast>) { map.entry(name.as_ref()).or_default().add_inout(i as isize); } fn add_use<'ast>(name: &'ast str, map: &mut AliasInfoMap<'ast>) { map.entry(name.as_ref()).or_default().add_use(); } // determines if value of a local 'name' that appear in parameter 'i' // should be saved to local because it might be overwritten later pub(super) fn should_save_local_value(name: &str, i: usize, aliases: &AliasInfoMap) -> bool { aliases .get(name) .map_or(false, |alias| alias.in_range(i as isize)) } pub(super) fn should_move_local_value<'arena>( local: &Local<'arena>, aliases: &AliasInfoMap, ) -> bool { match local { Local::Named(name) => aliases .get(name.as_str()) .map_or(true, |alias| alias.has_single_ref()), Local::Unnamed(_) => false, } } pub(super) fn collect_written_variables<'ast, 'arena>( env: &Env<'ast, 'arena>, args: &'ast [ast::Expr], ) -> AliasInfoMap<'ast> { let mut acc = HashMap::default(); args.iter() .enumerate() .for_each(|(i, arg)| handle_arg(env, true, i, arg, &mut acc)); acc } fn handle_arg<'ast, 'arena>( env: &Env<'ast, 'arena>, is_top: bool, i: usize, arg: &'ast ast::Expr, acc: &mut AliasInfoMap<'ast>, ) { use ast::{Expr, Expr_}; let Expr(_, _, e) = arg; // inout $v if let Some((ast_defs::ParamKind::Pinout, Expr(_, _, Expr_::Lvar(lid)))) = e.as_callconv() { let Lid(_, lid) = &**lid; if !is_local_this(env, &lid) { add_use(&lid.1, acc); return if is_top { add_inout(lid.1.as_str(), i, acc); } else { add_write(lid.1.as_str(), i, acc); }; } } // $v if let Some(Lid(_, (_, id))) = e.as_lvar() { return add_use(id.as_str(), acc); } // dive into argument value aast_visitor::visit( &mut Visitor(PhantomData), &mut Ctx { state: acc, env, i }, arg, ) .unwrap(); } struct Visitor<'r, 'arena>(PhantomData<(&'arena (), &'r ())>); pub struct Ctx<'r, 'ast: 'r, 'arena: 'r> { state: &'r mut AliasInfoMap<'ast>, env: &'r Env<'ast, 'arena>, i: usize, } impl<'r, 'ast: 'r, 'arena: 'r> aast_visitor::Visitor<'ast> for Visitor<'r, 'arena> { type P = aast_visitor::AstParams<Ctx<'r, 'ast, 'arena>, ()>; fn object(&mut self) -> &mut dyn aast_visitor::Visitor<'ast, P = Self::P> { self } fn visit_expr_( &mut self, c: &mut Ctx<'r, 'ast, 'arena>, p: &'ast ast::Expr_, ) -> std::result::Result<(), ()> { // f(inout $v) or f(&$v) if let ast::Expr_::Call(expr) = p { let (_, _, args, uarg) = &**expr; args.iter() .for_each(|arg| handle_arg(&c.env, false, c.i, arg, &mut c.state)); if let Some(arg) = uarg.as_ref() { handle_arg(&c.env, false, c.i, arg, &mut c.state) } Ok(()) } else { p.recurse(c, self.object())?; Ok(match p { // lhs op= _ ast::Expr_::Binop(expr) => { let (bop, left, _) = &**expr; if let ast_defs::Bop::Eq(_) = bop { collect_lvars_hs(c, left) } } // $i++ or $i-- ast::Expr_::Unop(expr) => { let (uop, e) = &**expr; match uop { ast_defs::Uop::Uincr | ast_defs::Uop::Udecr => collect_lvars_hs(c, e), _ => {} } } // $v ast::Expr_::Lvar(expr) => { let Lid(_, (_, id)) = &**expr; add_use(id, &mut c.state); } _ => {} }) } } } // impl<'ast, 'a, 'arena> aast_visitor::Visitor<'ast> for Visitor<'a, 'arena> // collect lvars on the left hand side of '=' operator fn collect_lvars_hs<'r, 'ast, 'arena>(ctx: &mut Ctx<'r, 'ast, 'arena>, expr: &'ast ast::Expr) { let ast::Expr(_, _, e) = expr; match &*e { ast::Expr_::Lvar(lid) => { let Lid(_, lid) = &**lid; if !is_local_this(&ctx.env, &lid) { add_use(lid.1.as_str(), &mut ctx.state); add_write(lid.1.as_str(), ctx.i, &mut ctx.state); } } ast::Expr_::List(exprs) => exprs.iter().for_each(|expr| collect_lvars_hs(ctx, expr)), _ => {} } } } //mod inout_locals pub fn get_type_structure_for_hint<'arena, 'decl, D: DeclProvider<'decl>>( alloc: &'arena bumpalo::Bump, e: &mut Emitter<'arena, 'decl, D>, tparams: &[&str], targ_map: &IndexSet<&str>, hint: &aast::Hint, ) -> std::result::Result<InstrSeq<'arena>, hhbc_by_ref_instruction_sequence::Error> { let targ_map: BTreeMap<&str, i64> = targ_map .iter() .enumerate() .map(|(i, n)| (*n, i as i64)) .collect(); let tv = emit_type_constant::hint_to_type_constant( alloc, e.options(), tparams, &targ_map, &hint, false, false, )?; let i = Str::from(emit_adata::get_array_identifier(alloc, e, &tv)); Ok(instr::lit_const(alloc, InstructLitConst::Dict(i))) } pub struct Setrange { pub op: SetrangeOp, pub size: usize, pub vec: bool, } /// kind of value stored in local #[derive(Debug, Clone, Copy)] pub enum StoredValueKind { Local, Expr, } /// represents sequence of instructions interleaved with temp locals. /// <(i, None) :: rest> - is emitted i :: <rest> (commonly used for final instructions in sequence) /// <(i, Some(l, local_kind)) :: rest> is emitted as /// /// i /// .try { /// setl/popl l; depending on local_kind /// <rest> /// } .catch { /// unset l /// throw /// } /// unsetl l type InstrSeqWithLocals<'arena> = Vec<(InstrSeq<'arena>, Option<(Local<'arena>, StoredValueKind)>)>; /// result of emit_array_get enum ArrayGetInstr<'arena> { /// regular $a[..] that does not need to spill anything Regular(InstrSeq<'arena>), /// subscript expression used as inout argument that need to spill intermediate values: Inout { /// instruction sequence with locals to load value load: InstrSeqWithLocals<'arena>, /// instruction to set value back (can use locals defined in load part) store: InstrSeq<'arena>, }, } struct ArrayGetBaseData<'arena, T> { base_instrs: T, cls_instrs: InstrSeq<'arena>, setup_instrs: InstrSeq<'arena>, base_stack_size: StackIndex, cls_stack_size: StackIndex, } /// result of emit_base enum ArrayGetBase<'arena> { /// regular <base> part in <base>[..] that does not need to spill anything Regular(ArrayGetBaseData<'arena, InstrSeq<'arena>>), /// base of subscript expression used as inout argument that need to spill /// intermediate values Inout { /// instructions to load base part load: ArrayGetBaseData<'arena, InstrSeqWithLocals<'arena>>, /// instruction to load base part for setting inout argument back store: InstrSeq<'arena>, }, } pub fn emit_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( emitter: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expression: &ast::Expr, ) -> Result<InstrSeq<'arena>> { use aast_defs::Lid; use ast::Expr_; let alloc = env.arena; let ast::Expr(_, pos, expr) = expression; match expr { Expr_::Float(_) | Expr_::EnumClassLabel(_) | Expr_::String(_) | Expr_::Int(_) | Expr_::Null | Expr_::False | Expr_::True => { let v = ast_constant_folder::expr_to_typed_value(alloc, emitter, expression) .map_err(|_| unrecoverable("expr_to_typed_value failed"))?; Ok(emit_pos_then(alloc, pos, instr::typedvalue(alloc, v))) } Expr_::PrefixedString(e) => emit_expr(emitter, env, &e.1), Expr_::Lvar(e) => { let Lid(pos, _) = &**e; Ok(InstrSeq::gather( alloc, vec![ emit_pos(alloc, pos), emit_local(emitter, env, BareThisOp::Notice, e)?, ], )) } Expr_::ClassConst(e) => emit_class_const(emitter, env, pos, &e.0, &e.1), Expr_::Unop(e) => emit_unop(emitter, env, pos, e), Expr_::Binop(_) => emit_binop(emitter, env, pos, expression), Expr_::Pipe(e) => emit_pipe(emitter, env, e), Expr_::Is(is_expr) => { let (e, h) = &**is_expr; let is = emit_is(emitter, env, pos, h)?; Ok(InstrSeq::gather( alloc, vec![emit_expr(emitter, env, e)?, is], )) } Expr_::As(e) => emit_as(emitter, env, pos, e), Expr_::Cast(e) => emit_cast(emitter, env, pos, &(e.0).1, &e.1), Expr_::Eif(e) => emit_conditional_expr(emitter, env, pos, &e.0, &e.1, &e.2), Expr_::ArrayGet(e) => { let (base_expr, opt_elem_expr) = &**e; Ok(emit_array_get( emitter, env, pos, None, QueryOp::CGet, base_expr, opt_elem_expr.as_ref(), false, false, )? .0) } Expr_::ObjGet(e) => { Ok(emit_obj_get(emitter, env, pos, QueryOp::CGet, &e.0, &e.1, &e.2, false)?.0) } Expr_::Call(c) => emit_call_expr(emitter, env, pos, None, c), Expr_::New(e) => emit_new(emitter, env, pos, e, false), Expr_::FunctionPointer(fp) => emit_function_pointer(emitter, env, pos, &fp.0, &fp.1), Expr_::Record(e) => emit_record(emitter, env, pos, e), Expr_::Darray(e) => Ok(emit_pos_then( alloc, pos, emit_collection(emitter, env, expression, &mk_afkvalues(&e.1), None)?, )), Expr_::Varray(e) => Ok(emit_pos_then( alloc, pos, emit_collection(emitter, env, expression, &mk_afvalues(&e.1), None)?, )), Expr_::Collection(e) => emit_named_collection_str(emitter, env, expression, e), Expr_::ValCollection(e) => { let (kind, _, es) = &**e; let fields = mk_afvalues(es); let collection_typ = match kind { aast_defs::VcKind::Vector => CollectionType::Vector, aast_defs::VcKind::ImmVector => CollectionType::ImmVector, aast_defs::VcKind::Set => CollectionType::Set, aast_defs::VcKind::ImmSet => CollectionType::ImmSet, _ => return emit_collection(emitter, env, expression, &fields, None), }; emit_named_collection(emitter, env, pos, expression, &fields, collection_typ) } Expr_::Pair(e) => { let (_, e1, e2) = (**e).to_owned(); let fields = mk_afvalues(&[e1, e2]); emit_named_collection(emitter, env, pos, expression, &fields, CollectionType::Pair) } Expr_::KeyValCollection(e) => { let (kind, _, fields) = &**e; let fields = mk_afkvalues( &fields .to_owned() .into_iter() .map(|ast::Field(e1, e2)| (e1, e2)) .collect::<Vec<_>>() .as_slice(), ); let collection_typ = match kind { aast_defs::KvcKind::Map => CollectionType::Map, aast_defs::KvcKind::ImmMap => CollectionType::ImmMap, _ => return emit_collection(emitter, env, expression, &fields, None), }; emit_named_collection(emitter, env, pos, expression, &fields, collection_typ) } Expr_::Clone(e) => Ok(emit_pos_then(alloc, pos, emit_clone(emitter, env, e)?)), Expr_::Shape(e) => Ok(emit_pos_then( alloc, pos, emit_shape(emitter, env, expression, e)?, )), Expr_::Await(e) => emit_await(emitter, env, pos, e), // TODO: emit readonly expressions Expr_::ReadonlyExpr(e) => emit_expr(emitter, env, e), Expr_::Yield(e) => emit_yield(emitter, env, pos, e), Expr_::Efun(e) => Ok(emit_pos_then( alloc, pos, emit_lambda(emitter, env, &e.0, &e.1)?, )), Expr_::ClassGet(e) => emit_class_get(emitter, env, QueryOp::CGet, &e.0, &e.1), Expr_::String2(es) => emit_string2(emitter, env, pos, es), Expr_::Id(e) => Ok(emit_pos_then(alloc, pos, emit_id(emitter, env, e)?)), Expr_::Xml(_) => Err(unrecoverable( "emit_xhp: syntax should have been converted during rewriting", )), Expr_::Callconv(_) => Err(unrecoverable( "emit_callconv: This should have been caught at emit_arg", )), Expr_::Import(e) => emit_import(emitter, env, pos, &e.0, &e.1), Expr_::Omitted => Ok(instr::empty(alloc)), Expr_::Lfun(_) => Err(unrecoverable( "expected Lfun to be converted to Efun during closure conversion emit_expr", )), Expr_::List(_) => Err(emit_fatal::raise_fatal_parse( pos, "list() can only be used as an lvar. Did you mean to use tuple()?", )), Expr_::Tuple(e) => Ok(emit_pos_then( alloc, pos, emit_collection(emitter, env, expression, &mk_afvalues(&e), None)?, )), Expr_::This | Expr_::Lplaceholder(_) | Expr_::Dollardollar(_) => { unimplemented!("TODO(hrust) Codegen after naming pass on AAST") } Expr_::ExpressionTree(et) => emit_expr(emitter, env, &et.runtime_expr), Expr_::ETSplice(_) => Err(unrecoverable( "expression trees: splice should be erased during rewriting", )), Expr_::FunId(_) | Expr_::MethodId(_) | Expr_::MethodCaller(_) | Expr_::SmethodId(_) | Expr_::Hole(_) => { unimplemented!("TODO(hrust)") } } } fn emit_exprs<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, exprs: &[ast::Expr], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; if exprs.is_empty() { Ok(instr::empty(alloc)) } else { Ok(InstrSeq::gather( alloc, exprs .iter() .map(|expr| emit_expr(e, env, expr)) .collect::<Result<Vec<_>>>()?, )) } } fn emit_id<'a, 'arena, 'decl, D: DeclProvider<'decl>>( emitter: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, id: &ast::Sid, ) -> Result<InstrSeq<'arena>> { use pseudo_consts::*; use InstructLitConst::*; let alloc = env.arena; let ast_defs::Id(p, s) = id; let res = match s.as_str() { G__FILE__ => instr::lit_const(alloc, File), G__DIR__ => instr::lit_const(alloc, Dir), G__METHOD__ => instr::lit_const(alloc, Method), G__FUNCTION_CREDENTIAL__ => instr::lit_const(alloc, FuncCred), G__CLASS__ => InstrSeq::gather(alloc, vec![instr::self_(alloc), instr::classname(alloc)]), G__COMPILER_FRONTEND__ => instr::string(alloc, "hackc"), G__LINE__ => instr::int( alloc, p.info_pos_extended().1.try_into().map_err(|_| { emit_fatal::raise_fatal_parse(p, "error converting end of line from usize to isize") })?, ), G__NAMESPACE__ => instr::string(alloc, env.namespace.name.as_ref().map_or("", |s| &s[..])), EXIT | DIE => return emit_exit(emitter, env, None), _ => { // panic!("TODO: uncomment after D19350786 lands") // let cid: ConstId = r#const::ConstType::from_ast_name(&s); let cid: ConstId = (alloc, string_utils::strip_global_ns(&s)).into(); emit_symbol_refs::add_constant(alloc, emitter, cid.clone()); return Ok(emit_pos_then(alloc, p, instr::lit_const(alloc, CnsE(cid)))); } }; Ok(res) } fn emit_exit<'a, 'arena, 'decl, D: DeclProvider<'decl>>( emitter: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr_opt: Option<&ast::Expr>, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; Ok(InstrSeq::gather( alloc, vec![ expr_opt.map_or_else(|| Ok(instr::int(alloc, 0)), |e| emit_expr(emitter, env, e))?, instr::exit(alloc), ], )) } fn emit_yield<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, af: &ast::Afield, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; Ok(match af { ast::Afield::AFvalue(v) => InstrSeq::gather( alloc, vec![ emit_expr(e, env, v)?, emit_pos(alloc, pos), instr::yield_(alloc), ], ), ast::Afield::AFkvalue(k, v) => InstrSeq::gather( alloc, vec![ emit_expr(e, env, k)?, emit_expr(e, env, v)?, emit_pos(alloc, pos), instr::yieldk(alloc), ], ), }) } fn parse_include<'arena>(alloc: &'arena bumpalo::Bump, e: &ast::Expr) -> IncludePath<'arena> { fn strip_backslash(s: &mut String) { if s.starts_with('/') { *s = s[1..].into() } } fn split_var_lit(e: &ast::Expr) -> (String, String) { match &e.2 { ast::Expr_::Binop(x) if x.0.is_dot() => { let (v, l) = split_var_lit(&x.2); if v.is_empty() { let (var, lit) = split_var_lit(&x.1); (var, format!("{}{}", lit, l)) } else { (v, String::new()) } } ast::Expr_::String(lit) => (String::new(), lit.to_string()), _ => (text_of_expr(e), String::new()), } } let (mut var, mut lit) = split_var_lit(e); if var == pseudo_consts::G__DIR__ { var = String::new(); strip_backslash(&mut lit); } if var.is_empty() { if std::path::Path::new(lit.as_str()).is_relative() { IncludePath::SearchPathRelative(Str::new_str(alloc, lit)) } else { IncludePath::Absolute(Str::new_str(alloc, lit)) } } else { strip_backslash(&mut lit); IncludePath::IncludeRootRelative(Str::new_str(alloc, var), Str::new_str(alloc, lit)) } } fn text_of_expr(e: &ast::Expr) -> String { match &e.2 { ast::Expr_::String(s) => format!("\'{}\'", s), ast::Expr_::Id(id) => id.1.to_string(), ast::Expr_::Lvar(lid) => local_id::get_name(&lid.1).to_string(), ast::Expr_::ArrayGet(x) => match ((x.0).2.as_lvar(), x.1.as_ref()) { (Some(ast::Lid(_, id)), Some(e_)) => { format!("{}[{}]", local_id::get_name(&id), text_of_expr(e_)) } _ => "unknown".into(), }, _ => "unknown".into(), } } fn text_of_class_id(cid: &ast::ClassId) -> String { match &cid.2 { ast::ClassId_::CIparent => "parent".into(), ast::ClassId_::CIself => "self".into(), ast::ClassId_::CIstatic => "static".into(), ast::ClassId_::CIexpr(e) => text_of_expr(e), ast::ClassId_::CI(ast_defs::Id(_, id)) => id.into(), } } fn text_of_prop(prop: &ast::ClassGetExpr) -> String { match prop { ast::ClassGetExpr::CGstring((_, s)) => s.into(), ast::ClassGetExpr::CGexpr(e) => text_of_expr(e), } } fn emit_import<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, flavor: &ast::ImportFlavor, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { use ast::ImportFlavor; let alloc = env.arena; let inc = parse_include(alloc, expr); emit_symbol_refs::add_include(alloc, e, inc.clone()); let (expr_instrs, import_op_instr) = match flavor { ImportFlavor::Include => (emit_expr(e, env, expr)?, instr::incl(alloc)), ImportFlavor::Require => (emit_expr(e, env, expr)?, instr::req(alloc)), ImportFlavor::IncludeOnce => (emit_expr(e, env, expr)?, instr::inclonce(alloc)), ImportFlavor::RequireOnce => { match inc.into_doc_root_relative(alloc, e.options().hhvm.include_roots.get()) { IncludePath::DocRootRelative(path) => { let expr = ast::Expr((), pos.clone(), ast::Expr_::String(path.as_str().into())); (emit_expr(e, env, &expr)?, instr::reqdoc(alloc)) } _ => (emit_expr(e, env, expr)?, instr::reqonce(alloc)), } } }; Ok(InstrSeq::gather( alloc, vec![expr_instrs, emit_pos(alloc, pos), import_op_instr], )) } fn emit_string2<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, es: &[ast::Expr], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; if es.is_empty() { Err(unrecoverable("String2 with zero araguments is impossible")) } else if es.len() == 1 { Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, &es[0])?, emit_pos(alloc, pos), instr::cast_string(alloc), ], )) } else { Ok(InstrSeq::gather( alloc, vec![ emit_two_exprs(e, env, &es[0].1, &es[0], &es[1])?, emit_pos(alloc, pos), instr::concat(alloc), InstrSeq::gather( alloc, (&es[2..]) .iter() .map(|expr| { Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, expr)?, emit_pos(alloc, pos), instr::concat(alloc), ], )) }) .collect::<Result<_>>()?, ), ], )) } } fn emit_clone<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; Ok(InstrSeq::gather( alloc, vec![emit_expr(e, env, expr)?, instr::clone(alloc)], )) } fn emit_lambda<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, fndef: &ast::Fun_, ids: &[aast_defs::Lid], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; // Closure conversion puts the class number used for CreateCl in the "name" // of the function definition let fndef_name = &(fndef.name).1; let cls_num = fndef_name .parse::<isize>() .map_err(|err| Unrecoverable(err.to_string()))?; let explicit_use = e.emit_global_state().explicit_use_set.contains(fndef_name); let is_in_lambda = env.scope.is_in_lambda(); Ok(InstrSeq::gather( alloc, vec![ InstrSeq::gather( alloc, ids.iter() .map(|ast::Lid(pos, id)| { match string_utils::reified::is_captured_generic(local_id::get_name(id)) { Some((is_fun, i)) => { if is_in_lambda { Ok(instr::cgetl( alloc, Local::Named(Str::new_str( alloc, string_utils::reified::reified_generic_captured_name( is_fun, i as usize, ) .as_str(), )), )) } else { emit_reified_generic_instrs( alloc, &Pos::make_none(), is_fun, i as usize, ) } } None => Ok({ let lid = get_local(e, env, pos, local_id::get_name(id))?; if explicit_use { instr::cgetl(alloc, lid) } else { instr::cugetl(alloc, lid) } }), } }) .collect::<Result<Vec<_>>>()?, ), instr::createcl(alloc, ids.len(), cls_num), ], )) } pub fn emit_await<'a, 'arena, 'decl, D: DeclProvider<'decl>>( emitter: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let ast::Expr(_, _, e) = expr; let alloc = env.arena; let cant_inline_gen_functions = !emitter .options() .hhvm .flags .contains(HhvmFlags::JIT_ENABLE_RENAME_FUNCTION); match e.as_call() { Some((ast::Expr(_, _, ast::Expr_::Id(id)), _, args, None)) if (cant_inline_gen_functions && args.len() == 1 && string_utils::strip_global_ns(&(*id.1)) == "gena") => { inline_gena_call(emitter, env, &args[0]) } _ => { let after_await = emitter.label_gen_mut().next_regular(); let instrs = match e { ast::Expr_::Call(c) => { emit_call_expr(emitter, env, pos, Some(after_await.clone()), &*c)? } _ => emit_expr(emitter, env, expr)?, }; Ok(InstrSeq::gather( alloc, vec![ instrs, emit_pos(alloc, pos), instr::dup(alloc), instr::istypec(alloc, IstypeOp::OpNull), instr::jmpnz(alloc, after_await.clone()), instr::await_(alloc), instr::label(alloc, after_await), ], )) } } } fn inline_gena_call<'a, 'arena, 'decl, D: DeclProvider<'decl>>( emitter: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, arg: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let load_arr = emit_expr(emitter, env, arg)?; let async_eager_label = emitter.label_gen_mut().next_regular(); scope::with_unnamed_local(alloc, emitter, |alloc, e, arr_local| { let before = InstrSeq::gather( alloc, vec![ load_arr, instr::cast_dict(alloc), instr::popl(alloc, arr_local), ], ); let inner = InstrSeq::gather( alloc, vec![ instr::nulluninit(alloc), instr::nulluninit(alloc), instr::cgetl(alloc, arr_local), instr::fcallclsmethodd( alloc, FcallArgs::new( FcallFlags::default(), 1, Slice::empty(), Slice::empty(), Some(async_eager_label), 1, None, ), method::from_raw_string(alloc, "fromDict"), class::from_raw_string(alloc, "HH\\AwaitAllWaitHandle"), ), instr::await_(alloc), instr::label(alloc, async_eager_label), instr::popc(alloc), emit_iter( alloc, e, instr::cgetl(alloc, arr_local), |alloc, val_local, key_local| { InstrSeq::gather( alloc, vec![ instr::cgetl(alloc, val_local), instr::whresult(alloc), instr::basel(alloc, arr_local, MemberOpMode::Define), instr::setm(alloc, 0, MemberKey::EL(key_local, ReadOnlyOp::Any)), instr::popc(alloc), ], ) }, )?, ], ); let after = instr::pushl(alloc, arr_local); Ok((before, inner, after)) }) } fn emit_iter< 'arena, 'decl, D: DeclProvider<'decl>, F: FnOnce(&'arena bumpalo::Bump, Local<'arena>, Local<'arena>) -> InstrSeq<'arena>, >( alloc: &'arena bumpalo::Bump, e: &mut Emitter<'arena, 'decl, D>, collection: InstrSeq<'arena>, f: F, ) -> Result<InstrSeq<'arena>> { scope::with_unnamed_locals_and_iterators(alloc, e, |alloc, e| { let iter_id = e.iterator_mut().get(); let val_id = e.local_gen_mut().get_unnamed(); let key_id = e.local_gen_mut().get_unnamed(); let loop_end = e.label_gen_mut().next_regular(); let loop_next = e.label_gen_mut().next_regular(); let iter_args = IterArgs { iter_id, key_id: Just(key_id), val_id, }; let iter_init = InstrSeq::gather( alloc, vec![ collection, instr::iterinit(alloc, iter_args.clone(), loop_end), ], ); let iterate = InstrSeq::gather( alloc, vec![ instr::label(alloc, loop_next), f(alloc, val_id, key_id), instr::iternext(alloc, iter_args, loop_next), ], ); let iter_done = InstrSeq::gather( alloc, vec![ instr::unsetl(alloc, val_id), instr::unsetl(alloc, key_id), instr::label(alloc, loop_end), ], ); Ok((iter_init, iterate, iter_done)) }) } fn emit_shape<'a, 'arena, 'decl, D: DeclProvider<'decl>>( emitter: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, fl: &[(ast_defs::ShapeFieldName, ast::Expr)], ) -> Result<InstrSeq<'arena>> { fn extract_shape_field_name_pstring<'a, 'arena>( env: &Env<'a, 'arena>, pos: &Pos, field: &ast_defs::ShapeFieldName, ) -> Result<ast::Expr_> { use ast_defs::ShapeFieldName as SF; Ok(match field { SF::SFlitInt(s) => ast::Expr_::mk_int(s.1.clone()), SF::SFlitStr(s) => ast::Expr_::mk_string(s.1.clone()), SF::SFclassConst(id, p) => { if is_reified_tparam(env, true, &id.1).is_some() || is_reified_tparam(env, false, &id.1).is_some() { return Err(emit_fatal::raise_fatal_parse( &id.0, "Reified generics cannot be used in shape keys", )); } else { ast::Expr_::mk_class_const( ast::ClassId((), pos.clone(), ast::ClassId_::CI(id.clone())), p.clone(), ) } } }) } let pos = &expr.1; // TODO(hrust): avoid clone let fl = fl .iter() .map(|(f, e)| { Ok(( ast::Expr( (), pos.clone(), extract_shape_field_name_pstring(env, pos, f)?, ), e.clone(), )) }) .collect::<Result<Vec<_>>>()?; emit_expr( emitter, env, &ast::Expr((), pos.clone(), ast::Expr_::mk_darray(None, fl)), ) } fn emit_vec_collection<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, fields: &[ast::Afield], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; match ast_constant_folder::vec_to_typed_value(alloc, e, fields) { Ok(tv) => emit_static_collection::<D>(env, None, pos, tv), Err(_) => emit_value_only_collection(e, env, pos, fields, InstructLitConst::NewVec), } } fn emit_named_collection<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, fields: &[ast::Afield], collection_type: CollectionType, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let emit_vector_like = |e: &mut Emitter<'arena, 'decl, D>, collection_type| { Ok(if fields.is_empty() { emit_pos_then(alloc, pos, instr::newcol(alloc, collection_type)) } else { InstrSeq::gather( alloc, vec![ emit_vec_collection(e, env, pos, fields)?, instr::colfromarray(alloc, collection_type), ], ) }) }; let emit_map_or_set = |e: &mut Emitter<'arena, 'decl, D>, collection_type| { if fields.is_empty() { Ok(emit_pos_then( alloc, pos, instr::newcol(alloc, collection_type), )) } else { emit_collection(e, env, expr, fields, Some(collection_type)) } }; use CollectionType as C; match collection_type { C::Dict | C::Vec | C::Keyset => { let instr = emit_collection(e, env, expr, fields, None)?; Ok(emit_pos_then(alloc, pos, instr)) } C::Vector | C::ImmVector => emit_vector_like(e, collection_type), C::Map | C::ImmMap | C::Set | C::ImmSet => emit_map_or_set(e, collection_type), C::Pair => Ok(InstrSeq::gather( alloc, vec![ InstrSeq::gather( alloc, fields .iter() .map(|f| match f { ast::Afield::AFvalue(v) => emit_expr(e, env, v), _ => Err(unrecoverable("impossible Pair argument")), }) .collect::<Result<_>>()?, ), instr::new_pair(alloc), ], )), _ => Err(unrecoverable("Unexpected named collection type")), } } fn emit_named_collection_str<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, (ast_defs::Id(pos, name), _, fields): &( ast::Sid, Option<ast::CollectionTarg>, Vec<ast::Afield>, ), ) -> Result<InstrSeq<'arena>> { let name = string_utils::strip_ns(name); let name = string_utils::types::fix_casing(name); use CollectionType::*; let ctype = match name { "dict" => Dict, "vec" => Vec, "keyset" => Keyset, "Vector" => Vector, "ImmVector" => ImmVector, "Map" => Map, "ImmMap" => ImmMap, "Set" => Set, "ImmSet" => ImmSet, "Pair" => Pair, _ => { return Err(unrecoverable(format!( "collection: {} does not exist", name ))); } }; emit_named_collection(e, env, pos, expr, fields, ctype) } fn mk_afkvalues(es: &[(ast::Expr, ast::Expr)]) -> Vec<ast::Afield> { es.to_owned() .into_iter() .map(|(e1, e2)| ast::Afield::mk_afkvalue(e1, e2)) .collect() } fn mk_afvalues(es: &[ast::Expr]) -> Vec<ast::Afield> { es.to_owned() .into_iter() .map(ast::Afield::mk_afvalue) .collect() } fn emit_collection<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, fields: &[ast::Afield], transform_to_collection: Option<CollectionType>, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let pos = &expr.1; match ast_constant_folder::expr_to_typed_value_( alloc, e, expr, true, /*allow_map*/ false, /*force_class_const*/ ) { Ok(tv) => emit_static_collection::<D>(env, transform_to_collection, pos, tv), Err(_) => emit_dynamic_collection(e, env, expr, fields), } } fn emit_static_collection<'a, 'arena, 'decl, D: DeclProvider<'decl>>( env: &Env<'a, 'arena>, transform_to_collection: Option<CollectionType>, pos: &Pos, tv: TypedValue<'arena>, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let transform_instr = match transform_to_collection { Some(collection_type) => instr::colfromarray(alloc, collection_type), _ => instr::empty(alloc), }; Ok(InstrSeq::gather( alloc, vec![ emit_pos(alloc, pos), instr::typedvalue(alloc, tv), transform_instr, ], )) } fn expr_and_new<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, instr_to_add_new: InstrSeq<'arena>, instr_to_add: InstrSeq<'arena>, field: &ast::Afield, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; match field { ast::Afield::AFvalue(v) => Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, v)?, emit_pos(alloc, pos), instr_to_add_new, ], )), ast::Afield::AFkvalue(k, v) => Ok(InstrSeq::gather( alloc, vec![emit_two_exprs(e, env, &k.1, k, v)?, instr_to_add], )), } } fn emit_keyvalue_collection<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, fields: &[ast::Afield], ctype: CollectionType, constructor: InstructLitConst<'arena>, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let (transform_instr, add_elem_instr) = match ctype { CollectionType::Dict | CollectionType::Array => { (instr::empty(alloc), instr::add_new_elemc(alloc)) } _ => ( instr::colfromarray(alloc, ctype), InstrSeq::gather(alloc, vec![instr::dup(alloc), instr::add_elemc(alloc)]), ), }; let emitted_pos = emit_pos(alloc, pos); Ok(InstrSeq::gather( alloc, vec![ InstrSeq::<'arena>::clone(alloc, &emitted_pos), instr::lit_const(alloc, constructor), fields .iter() .map(|f| { expr_and_new( e, env, pos, InstrSeq::<'arena>::clone(alloc, &add_elem_instr), instr::add_elemc(alloc), f, ) }) .collect::<Result<_>>() .map(|s| InstrSeq::gather(alloc, s))?, emitted_pos, transform_instr, ], )) } fn non_numeric(s: &str) -> bool { // Note(hrust): OCaml Int64.of_string and float_of_string ignore underscores let s = s.replace("_", ""); lazy_static! { static ref HEX: Regex = Regex::new(r"(?P<sign>^-?)0[xX](?P<digits>.*)").unwrap(); static ref OCTAL: Regex = Regex::new(r"(?P<sign>^-?)0[oO](?P<digits>.*)").unwrap(); static ref BINARY: Regex = Regex::new(r"(?P<sign>^-?)0[bB](?P<digits>.*)").unwrap(); static ref FLOAT: Regex = Regex::new(r"(?P<int>\d*)\.(?P<dec>[0-9--0]*)(?P<zeros>0*)").unwrap(); static ref NEG_FLOAT: Regex = Regex::new(r"(?P<int>-\d*)\.(?P<dec>[0-9--0]*)(?P<zeros>0*)").unwrap(); static ref HEX_RADIX: u32 = 16; static ref OCTAL_RADIX: u32 = 8; static ref BINARY_RADIX: u32 = 2; } fn int_from_str(s: &str) -> std::result::Result<i64, ()> { // Note(hrust): OCaml Int64.of_string reads decimal, hexadecimal, octal, and binary (if HEX.is_match(s) { u64::from_str_radix(&HEX.replace(s, "${sign}${digits}"), *HEX_RADIX).map(|x| x as i64) } else if OCTAL.is_match(s) { u64::from_str_radix(&OCTAL.replace(s, "${sign}${digits}"), *OCTAL_RADIX) .map(|x| x as i64) } else if BINARY.is_match(s) { u64::from_str_radix(&BINARY.replace(s, "${sign}${digits}"), *BINARY_RADIX) .map(|x| x as i64) } else { i64::from_str(&s) }) .map_err(|_| ()) } fn float_from_str_radix(s: &str, radix: u32) -> std::result::Result<f64, ()> { let i = i64::from_str_radix(&s.replace(".", ""), radix).map_err(|_| ())?; Ok(match s.matches('.').count() { 0 => i as f64, 1 => { let pow = s.split('.').last().unwrap().len(); (i as f64) / f64::from(radix).powi(pow as i32) } _ => return Err(()), }) } fn out_of_bounds(s: &str) -> bool { // compare strings instead of floats to avoid rounding imprecision if FLOAT.is_match(s) { FLOAT.replace(s, "${int}.${dec}").trim_end_matches('.') > i64::MAX.to_string().as_str() } else if NEG_FLOAT.is_match(s) { NEG_FLOAT.replace(s, "${int}.${dec}").trim_end_matches('.') > i64::MIN.to_string().as_str() } else { false } } fn float_from_str(s: &str) -> std::result::Result<f64, ()> { // Note(hrust): OCaml float_of_string ignores leading whitespace, reads decimal and hexadecimal let s = s.trim_start(); if HEX.is_match(s) { float_from_str_radix(&HEX.replace(s, "${sign}${digits}"), *HEX_RADIX) } else { let out_of_bounds = |f: f64| out_of_bounds(s) && (f > i64::MAX as f64 || f < i64::MIN as f64); let validate_float = |f: f64| { if out_of_bounds(f) || f.is_infinite() || f.is_nan() { Err(()) } else { Ok(f) } }; f64::from_str(s).map_err(|_| ()).and_then(validate_float) } } int_from_str(&s).is_err() && float_from_str(&s).is_err() } fn is_struct_init<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, fields: &[ast::Afield], allow_numerics: bool, ) -> Result<bool> { let alloc = env.arena; let mut are_all_keys_non_numeric_strings = true; let mut uniq_keys = HashSet::<bstr::BString>::default(); for f in fields.iter() { if let ast::Afield::AFkvalue(key, _) = f { // TODO(hrust): if key is String, don't clone and call fold_expr let mut key = key.clone(); ast_constant_folder::fold_expr(&mut key, alloc, e) .map_err(|e| unrecoverable(format!("{}", e)))?; if let ast::Expr(_, _, ast::Expr_::String(s)) = key { are_all_keys_non_numeric_strings = are_all_keys_non_numeric_strings && non_numeric( // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { std::str::from_utf8_unchecked(s.as_slice()) }, ); uniq_keys.insert(s); } else { are_all_keys_non_numeric_strings = false; } continue; } are_all_keys_non_numeric_strings = false; } let num_keys = fields.len(); let limit = *(e.options().max_array_elem_size_on_the_stack.get()) as usize; Ok((allow_numerics || are_all_keys_non_numeric_strings) && uniq_keys.len() == num_keys && num_keys <= limit && num_keys != 0) } fn emit_struct_array< 'a, 'arena, 'decl, D: DeclProvider<'decl>, C: FnOnce( &'arena bumpalo::Bump, &mut Emitter<'arena, 'decl, D>, &'arena [&'arena str], ) -> Result<InstrSeq<'arena>>, >( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, fields: &[ast::Afield], ctor: C, ) -> Result<InstrSeq<'arena>> { use ast::{Expr as E, Expr_ as E_}; let alloc = env.arena; let (keys, value_instrs): (Vec<String>, _) = fields .iter() .map(|f| match f { ast::Afield::AFkvalue(k, v) => match k { E(_, _, E_::String(s)) => Ok(( // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { String::from_utf8_unchecked(s.clone().into()) }, emit_expr(e, env, v)?, )), _ => { let mut k = k.clone(); ast_constant_folder::fold_expr(&mut k, alloc, e) .map_err(|e| unrecoverable(format!("{}", e)))?; match k { E(_, _, E_::String(s)) => Ok(( // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { String::from_utf8_unchecked(s.into()) }, emit_expr(e, env, v)?, )), _ => Err(unrecoverable("Key must be a string")), } } }, _ => Err(unrecoverable("impossible")), }) .collect::<Result<Vec<(String, InstrSeq<'arena>)>>>()? .into_iter() .unzip(); let keys_ = bumpalo::collections::Vec::from_iter_in( keys.into_iter() .map(|x| bumpalo::collections::String::from_str_in(x.as_str(), alloc).into_bump_str()), alloc, ) .into_bump_slice(); Ok(InstrSeq::gather( alloc, vec![ InstrSeq::gather(alloc, value_instrs), emit_pos(alloc, pos), ctor(alloc, e, keys_)?, ], )) } fn emit_dynamic_collection<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, fields: &[ast::Afield], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let pos = &expr.1; let count = fields.len(); let emit_dict = |e: &mut Emitter<'arena, 'decl, D>| { if is_struct_init(e, env, fields, true)? { emit_struct_array(e, env, pos, fields, |alloc, _, x| { Ok(instr::newstructdict(alloc, x)) }) } else { let ctor = InstructLitConst::NewDictArray(count as isize); emit_keyvalue_collection(e, env, pos, fields, CollectionType::Dict, ctor) } }; let emit_collection_helper = |e: &mut Emitter<'arena, 'decl, D>, ctype| { if is_struct_init(e, env, fields, true)? { Ok(InstrSeq::gather( alloc, vec![ emit_struct_array(e, env, pos, fields, |alloc, _, x| { Ok(instr::newstructdict(alloc, x)) })?, emit_pos(alloc, pos), instr::colfromarray(alloc, ctype), ], )) } else { let ctor = InstructLitConst::NewDictArray(count as isize); emit_keyvalue_collection(e, env, pos, fields, ctype, ctor) } }; use ast::Expr_ as E_; match &expr.2 { E_::ValCollection(v) if v.0 == ast::VcKind::Vec => { emit_value_only_collection(e, env, pos, fields, InstructLitConst::NewVec) } E_::Collection(v) if (v.0).1 == "vec" => { emit_value_only_collection(e, env, pos, fields, InstructLitConst::NewVec) } E_::Tuple(_) => emit_value_only_collection(e, env, pos, fields, InstructLitConst::NewVec), E_::ValCollection(v) if v.0 == ast::VcKind::Keyset => { emit_value_only_collection(e, env, pos, fields, InstructLitConst::NewKeysetArray) } E_::Collection(v) if (v.0).1 == "keyset" => { emit_value_only_collection(e, env, pos, fields, InstructLitConst::NewKeysetArray) } E_::Collection(v) if (v.0).1 == "dict" => emit_dict(e), E_::KeyValCollection(v) if v.0 == ast::KvcKind::Dict => emit_dict(e), E_::Collection(v) if string_utils::strip_ns(&(v.0).1) == "Set" => { emit_collection_helper(e, CollectionType::Set) } E_::ValCollection(v) if v.0 == ast::VcKind::Set => { emit_collection_helper(e, CollectionType::Set) } E_::Collection(v) if string_utils::strip_ns(&(v.0).1) == "ImmSet" => { emit_collection_helper(e, CollectionType::ImmSet) } E_::ValCollection(v) if v.0 == ast::VcKind::ImmSet => { emit_collection_helper(e, CollectionType::ImmSet) } E_::Collection(v) if string_utils::strip_ns(&(v.0).1) == "Map" => { emit_collection_helper(e, CollectionType::Map) } E_::KeyValCollection(v) if v.0 == ast::KvcKind::Map => { emit_collection_helper(e, CollectionType::Map) } E_::Collection(v) if string_utils::strip_ns(&(v.0).1) == "ImmMap" => { emit_collection_helper(e, CollectionType::ImmMap) } E_::KeyValCollection(v) if v.0 == ast::KvcKind::ImmMap => { emit_collection_helper(e, CollectionType::ImmMap) } E_::Varray(_) => { let instrs = emit_value_only_collection(e, env, pos, fields, InstructLitConst::NewVec); Ok(instrs?) } E_::Darray(_) => { if is_struct_init(e, env, fields, false /* allow_numerics */)? { let instrs = emit_struct_array(e, env, pos, fields, |alloc, _, arg| { let instr = instr::newstructdict(alloc, arg); Ok(emit_pos_then(alloc, pos, instr)) }); Ok(instrs?) } else { let constr = InstructLitConst::NewDictArray(count as isize); let instrs = emit_keyvalue_collection(e, env, pos, fields, CollectionType::Array, constr); Ok(instrs?) } } _ => Err(unrecoverable("plain PHP arrays cannot be constructed")), } } fn emit_value_only_collection< 'a, 'arena, 'decl, D: DeclProvider<'decl>, F: FnOnce(isize) -> InstructLitConst<'arena>, >( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, fields: &[ast::Afield], constructor: F, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let limit = *(e.options().max_array_elem_size_on_the_stack.get()) as usize; let inline = | alloc: &'arena bumpalo::Bump, e: &mut Emitter<'arena, 'decl, D>, exprs: &[ast::Afield], | -> Result<InstrSeq<'arena>> { let mut instrs = vec![]; for expr in exprs.iter() { instrs.push(emit_expr(e, env, expr.value())?) } Ok(InstrSeq::gather( alloc, vec![ InstrSeq::gather(alloc, instrs), emit_pos(alloc, pos), instr::lit_const(alloc, constructor(exprs.len() as isize)), ], )) }; let outofline = | alloc: &'arena bumpalo::Bump, e: &mut Emitter<'arena, 'decl, D>, exprs: &[ast::Afield], | -> Result<InstrSeq<'arena>> { let mut instrs = vec![]; for expr in exprs.iter() { instrs.push(emit_expr(e, env, expr.value())?); instrs.push(instr::add_new_elemc(alloc)); } Ok(InstrSeq::gather(alloc, instrs)) }; let (x1, x2) = fields.split_at(std::cmp::min(fields.len(), limit)); Ok(match (x1, x2) { ([], []) => instr::empty(alloc), (_, []) => inline(alloc, e, x1)?, _ => { let outofline_instrs = outofline(alloc, e, x2)?; let inline_instrs = inline(alloc, e, x1)?; InstrSeq::gather(alloc, vec![inline_instrs, outofline_instrs]) } }) } fn emit_record<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, (cid, es): &(ast::Sid, Vec<(ast::Expr, ast::Expr)>), ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let es = mk_afkvalues(es); let id = class::ClassType::from_ast_name_and_mangle(alloc, &cid.1); emit_symbol_refs::add_class(alloc, e, id); emit_struct_array(e, env, pos, &es, |alloc, _, keys| { Ok(instr::new_record(alloc, id, keys)) }) } fn emit_call_isset_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, outer_pos: &Pos, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let pos = &expr.1; if let Some((base_expr, opt_elem_expr)) = expr.2.as_array_get() { return Ok(emit_array_get( e, env, pos, None, QueryOp::Isset, base_expr, opt_elem_expr.as_ref(), false, false, )? .0); } if let Some((cid, id, _)) = expr.2.as_class_get() { return emit_class_get(e, env, QueryOp::Isset, cid, id); } if let Some((expr_, prop, nullflavor, _)) = expr.2.as_obj_get() { return Ok(emit_obj_get(e, env, pos, QueryOp::Isset, expr_, prop, nullflavor, false)?.0); } if let Some(lid) = expr.2.as_lvar() { let name = local_id::get_name(&lid.1); return Ok(if superglobals::is_any_global(&name) { InstrSeq::gather( alloc, vec![ emit_pos(alloc, outer_pos), instr::string(alloc, string_utils::locals::strip_dollar(&name)), emit_pos(alloc, outer_pos), instr::issetg(alloc), ], ) } else if is_local_this(env, &lid.1) && !env.flags.contains(hhbc_by_ref_env::Flags::NEEDS_LOCAL_THIS) { InstrSeq::gather( alloc, vec![ emit_pos(alloc, outer_pos), emit_local(e, env, BareThisOp::NoNotice, lid)?, emit_pos(alloc, outer_pos), instr::istypec(alloc, IstypeOp::OpNull), instr::not(alloc), ], ) } else { emit_pos_then( alloc, outer_pos, instr::issetl(alloc, get_local(e, env, &lid.0, name)?), ) }); } Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, expr)?, instr::istypec(alloc, IstypeOp::OpNull), instr::not(alloc), ], )) } fn emit_call_isset_exprs<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, exprs: &[ast::Expr], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; match exprs { [] => Err(emit_fatal::raise_fatal_parse( pos, "Cannot use isset() without any arguments", )), [expr] => emit_call_isset_expr(e, env, pos, expr), _ => { let its_done = e.label_gen_mut().next_regular(); Ok(InstrSeq::gather( alloc, vec![ InstrSeq::gather( alloc, exprs .iter() .enumerate() .map(|(i, expr)| { Ok(InstrSeq::gather( alloc, vec![ emit_call_isset_expr(e, env, pos, expr)?, if i < exprs.len() - 1 { InstrSeq::gather( alloc, vec![ instr::dup(alloc), instr::jmpz(alloc, its_done), instr::popc(alloc), ], ) } else { instr::empty(alloc) }, ], )) }) .collect::<Result<Vec<_>>>()?, ), instr::label(alloc, its_done), ], )) } } } fn emit_tag_provenance_here<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, es: &[ast::Expr], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let pop = if es.len() == 1 { instr::empty(alloc) } else { instr::popc(alloc) }; Ok(InstrSeq::gather( alloc, vec![emit_exprs(e, env, es)?, emit_pos(alloc, pos), pop], )) } fn emit_array_mark_legacy<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, es: &[ast::Expr], legacy: bool, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let default = if es.len() == 1 { instr::false_(alloc) } else { instr::empty(alloc) }; let mark = if legacy { instr::instr(alloc, Instruct::IMisc(InstructMisc::ArrayMarkLegacy)) } else { instr::instr(alloc, Instruct::IMisc(InstructMisc::ArrayUnmarkLegacy)) }; Ok(InstrSeq::gather( alloc, vec![emit_exprs(e, env, es)?, emit_pos(alloc, pos), default, mark], )) } fn emit_idx<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, es: &[ast::Expr], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let default = if es.len() == 2 { instr::null(alloc) } else { instr::empty(alloc) }; Ok(InstrSeq::gather( alloc, vec![ emit_exprs(e, env, es)?, emit_pos(alloc, pos), default, instr::idx(alloc), ], )) } fn emit_call<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, targs: &[ast::Targ], args: &[ast::Expr], uarg: Option<&ast::Expr>, async_eager_label: Option<Label>, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; if let Some(ast_defs::Id(_, s)) = expr.as_id() { let fid = function::FunctionType::<'arena>::from_ast_name(alloc, s); emit_symbol_refs::add_function(alloc, e, fid); } let fcall_args = get_fcall_args( e, alloc, args, uarg, async_eager_label, env.call_context.clone(), false, ); match expr.2.as_id() { None => emit_call_default(e, env, pos, expr, targs, args, uarg, fcall_args), Some(ast_defs::Id(_, id)) => { let fq = function::FunctionType::<'arena>::from_ast_name(alloc, id); let lower_fq_name = fq.to_raw_string(); emit_special_function(e, env, pos, args, uarg, lower_fq_name) .transpose() .unwrap_or_else(|| { emit_call_default(e, env, pos, expr, targs, args, uarg, fcall_args) }) } } } fn emit_call_default<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, targs: &[ast::Targ], args: &[ast::Expr], uarg: Option<&ast::Expr>, fcall_args: FcallArgs<'arena>, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; scope::with_unnamed_locals(alloc, e, |alloc, em| { let FcallArgs(_, _, num_ret, _, _, _, _) = &fcall_args; let num_uninit = num_ret - 1; let (lhs, fcall) = emit_call_lhs_and_fcall(em, env, expr, fcall_args, targs)?; let (args, inout_setters) = emit_args_inout_setters(em, env, args)?; let uargs = match uarg { Some(uarg) => emit_expr(em, env, uarg)?, None => instr::empty(alloc), }; Ok(( instr::empty(alloc), InstrSeq::gather( alloc, vec![ InstrSeq::gather( alloc, iter::repeat_with(|| instr::nulluninit(alloc)) .take(num_uninit) .collect::<Vec<_>>(), ), lhs, args, uargs, emit_pos(alloc, pos), fcall, inout_setters, ], ), instr::empty(alloc), )) }) } pub fn emit_reified_targs<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, targs: &[&ast::Hint], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let current_fun_tparams = env.scope.get_fun_tparams(); let current_cls_tparams = env.scope.get_class_tparams(); let is_in_lambda = env.scope.is_in_lambda(); let is_soft = |ual: &Vec<ast::UserAttribute>| ual.iter().any(|ua| user_attributes::is_soft(&ua.name.1)); let same_as_targs = |tparams: &[ast::Tparam]| { tparams.len() == targs.len() && tparams.iter().zip(targs).all(|(tp, ta)| { ta.1.as_happly().map_or(false, |(id, hs)| { id.1 == tp.name.1 && hs.is_empty() && !is_soft(&tp.user_attributes) && tp.reified.is_reified() }) }) }; Ok(if !is_in_lambda && same_as_targs(&current_fun_tparams) { instr::cgetl( alloc, Local::Named(Str::new_str( alloc, string_utils::reified::GENERICS_LOCAL_NAME, )), ) } else if !is_in_lambda && same_as_targs(&current_cls_tparams[..]) { InstrSeq::gather( alloc, vec![ instr::checkthis(alloc), instr::baseh(alloc), instr::querym( alloc, 0, QueryOp::CGet, MemberKey::PT( prop::from_raw_string(alloc, string_utils::reified::PROP_NAME), ReadOnlyOp::Any, ), ), ], ) } else { InstrSeq::gather( alloc, vec![ InstrSeq::gather( alloc, targs .iter() .map(|h| Ok(emit_reified_arg(e, env, pos, false, h)?.0)) .collect::<Result<Vec<_>>>()?, ), instr::new_vec_array(alloc, targs.len() as isize), ], ) }) } fn get_erased_tparams<'a, 'arena>(env: &'a Env<'a, 'arena>) -> Vec<&'a str> { env.scope .get_tparams() .iter() .filter_map(|tparam| match tparam.reified { ast::ReifyKind::Erased => Some(tparam.name.1.as_str()), _ => None, }) .collect() } pub fn has_non_tparam_generics(env: &Env, hints: &[ast::Hint]) -> bool { let erased_tparams = get_erased_tparams(env); hints.iter().any(|hint| { hint.1 .as_happly() .map_or(true, |(id, _)| !erased_tparams.contains(&id.1.as_str())) }) } fn has_non_tparam_generics_targs(env: &Env, targs: &[ast::Targ]) -> bool { let erased_tparams = get_erased_tparams(env); targs.iter().any(|targ| { (targ.1) .1 .as_happly() .map_or(true, |(id, _)| !erased_tparams.contains(&id.1.as_str())) }) } fn from_ast_null_flavor(nullflavor: ast::OgNullFlavor) -> ObjNullFlavor { match nullflavor { ast::OgNullFlavor::OGNullsafe => ObjNullFlavor::NullSafe, ast::OgNullFlavor::OGNullthrows => ObjNullFlavor::NullThrows, } } fn emit_object_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; match &expr.2 { ast::Expr_::Lvar(x) if is_local_this(env, &x.1) => Ok(instr::this(alloc)), _ => emit_expr(e, env, expr), } } fn emit_call_lhs_and_fcall<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, mut fcall_args: FcallArgs<'arena>, targs: &[ast::Targ], ) -> Result<(InstrSeq<'arena>, InstrSeq<'arena>)> { let ast::Expr(_, pos, expr_) = expr; use ast::{Expr as E, Expr_ as E_}; let alloc = env.arena; let emit_generics = |e: &mut Emitter<'arena, 'decl, D>, env, fcall_args: &mut FcallArgs<'arena>| { let does_not_have_non_tparam_generics = !has_non_tparam_generics_targs(env, targs); if does_not_have_non_tparam_generics { Ok(instr::empty(alloc)) } else { fcall_args.0 |= FcallFlags::HAS_GENERICS; emit_reified_targs( e, env, pos, targs .iter() .map(|targ| &targ.1) .collect::<Vec<_>>() .as_slice(), ) } }; let emit_fcall_func = | e: &mut Emitter<'arena, 'decl, D>, env, expr: &ast::Expr, fcall_args: FcallArgs<'arena>, | -> Result<(InstrSeq<'arena>, InstrSeq<'arena>)> { let tmp = e.local_gen_mut().get_unnamed(); Ok(( InstrSeq::gather( alloc, vec![ instr::nulluninit(alloc), instr::nulluninit(alloc), emit_expr(e, env, expr)?, instr::popl(alloc, tmp), ], ), InstrSeq::gather( alloc, vec![ instr::pushl(alloc, tmp), instr::fcallfunc(alloc, fcall_args), ], ), )) }; match expr_ { E_::ObjGet(o) => { if o.as_ref().3 { // Case ($x->foo)(...). let expr = E( (), pos.clone(), E_::ObjGet(Box::new((o.0.clone(), o.1.clone(), o.2.clone(), false))), ); emit_fcall_func(e, env, &expr, fcall_args) } else { // Case $x->foo(...). let emit_id = | e: &mut Emitter<'arena, 'decl, D>, obj, id, null_flavor: &ast::OgNullFlavor, mut fcall_args, | { let name: method::MethodType<'arena> = (alloc, string_utils::strip_global_ns(id)).into(); let obj = emit_object_expr(e, env, obj)?; let generics = emit_generics(e, env, &mut fcall_args)?; let null_flavor = from_ast_null_flavor(*null_flavor); Ok(( InstrSeq::gather(alloc, vec![obj, instr::nulluninit(alloc)]), InstrSeq::gather( alloc, vec![ generics, instr::fcallobjmethodd(alloc, fcall_args, name, null_flavor), ], ), )) }; match o.as_ref() { (obj, E(_, _, E_::String(id)), null_flavor, _) => { emit_id( e, obj, // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { std::str::from_utf8_unchecked(id.as_slice()) }, null_flavor, fcall_args, ) } (E(_, pos, E_::New(new_exp)), E(_, _, E_::Id(id)), null_flavor, _) if fcall_args.1 == 0 => { let cexpr = ClassExpr::class_id_to_class_expr( e, false, false, &env.scope, &new_exp.0, ); match &cexpr { ClassExpr::Id(ast_defs::Id(_, name)) if string_utils::strip_global_ns(name) == "ReflectionClass" => { let fid = match string_utils::strip_global_ns(&id.1) { "isAbstract" => { Some("__SystemLib\\reflection_class_is_abstract") } "isInterface" => { Some("__SystemLib\\reflection_class_is_interface") } "isFinal" => Some("__SystemLib\\reflection_class_is_final"), "getName" => Some("__SystemLib\\reflection_class_get_name"), _ => None, }; match fid { None => { emit_id(e, &o.as_ref().0, &id.1, null_flavor, fcall_args) } Some(fid) => { let fcall_args = FcallArgs::new( FcallFlags::default(), 1, Slice::empty(), Slice::empty(), None, 1, None, ); let newobj_instrs = emit_new(e, env, pos, &new_exp, true); Ok(( InstrSeq::gather( alloc, vec![ instr::nulluninit(alloc), instr::nulluninit(alloc), newobj_instrs?, ], ), InstrSeq::gather( alloc, vec![instr::fcallfuncd( alloc, fcall_args, function::FunctionType::<'arena>::from_ast_name( alloc, fid, ), )], ), )) } } } _ => emit_id(e, &o.as_ref().0, &id.1, null_flavor, fcall_args), } } (obj, E(_, _, E_::Id(id)), null_flavor, _) => { emit_id(e, obj, &id.1, null_flavor, fcall_args) } (obj, method_expr, null_flavor, _) => { let obj = emit_object_expr(e, env, obj)?; let tmp = e.local_gen_mut().get_unnamed(); let null_flavor = from_ast_null_flavor(*null_flavor); Ok(( InstrSeq::gather( alloc, vec![ obj, instr::nulluninit(alloc), emit_expr(e, env, method_expr)?, instr::popl(alloc, tmp), ], ), InstrSeq::gather( alloc, vec![ instr::pushl(alloc, tmp), instr::fcallobjmethod(alloc, fcall_args, null_flavor), ], ), )) } } } } E_::ClassConst(cls_const) => { let (cid, (_, id)) = &**cls_const; let mut cexpr = ClassExpr::class_id_to_class_expr(e, false, false, &env.scope, cid); if let ClassExpr::Id(ast_defs::Id(_, name)) = &cexpr { if let Some(reified_var_cexpr) = get_reified_var_cexpr::<D>(env, pos, &name)? { cexpr = reified_var_cexpr; } } let method_id: method::MethodType = (alloc, string_utils::strip_global_ns(&id)).into(); Ok(match cexpr { // Statically known ClassExpr::Id(ast_defs::Id(_, cname)) => { let cid = class::ClassType::<'arena>::from_ast_name_and_mangle(alloc, &cname); emit_symbol_refs::add_class(alloc, e, cid.clone()); let generics = emit_generics(e, env, &mut fcall_args)?; ( InstrSeq::gather( alloc, vec![instr::nulluninit(alloc), instr::nulluninit(alloc)], ), InstrSeq::gather( alloc, vec![ generics, instr::fcallclsmethodd(alloc, fcall_args, method_id, cid), ], ), ) } ClassExpr::Special(clsref) => { let generics = emit_generics(e, env, &mut fcall_args)?; ( InstrSeq::gather( alloc, vec![instr::nulluninit(alloc), instr::nulluninit(alloc)], ), InstrSeq::gather( alloc, vec![ generics, instr::fcallclsmethodsd(alloc, fcall_args, clsref, method_id), ], ), ) } ClassExpr::Expr(expr) => { let generics = emit_generics(e, env, &mut fcall_args)?; ( InstrSeq::gather( alloc, vec![instr::nulluninit(alloc), instr::nulluninit(alloc)], ), InstrSeq::gather( alloc, vec![ generics, instr::string(alloc, method_id.to_raw_string()), emit_expr(e, env, &expr)?, instr::classgetc(alloc), instr::fcallclsmethod( alloc, IsLogAsDynamicCallOp::DontLogAsDynamicCall, fcall_args, ), ], ), ) } ClassExpr::Reified(instrs) => { let tmp = e.local_gen_mut().get_unnamed(); ( InstrSeq::gather( alloc, vec![ instr::nulluninit(alloc), instr::nulluninit(alloc), instrs, instr::popl(alloc, tmp), ], ), InstrSeq::gather( alloc, vec![ instr::string(alloc, method_id.to_raw_string()), instr::pushl(alloc, tmp), instr::classgetc(alloc), instr::fcallclsmethod( alloc, IsLogAsDynamicCallOp::LogAsDynamicCall, fcall_args, ), ], ), ) } }) } E_::ClassGet(c) => { if c.as_ref().2 { // Case (Foo::$bar)(...). let expr = E( (), pos.clone(), E_::ClassGet(Box::new((c.0.clone(), c.1.clone(), false))), ); emit_fcall_func(e, env, &expr, fcall_args) } else { // Case Foo::bar(...). let (cid, cls_get_expr, _) = &**c; let mut cexpr = ClassExpr::class_id_to_class_expr(e, false, false, &env.scope, cid); if let ClassExpr::Id(ast_defs::Id(_, name)) = &cexpr { if let Some(reified_var_cexpr) = get_reified_var_cexpr::<D>(env, pos, &name)? { cexpr = reified_var_cexpr; } } let emit_meth_name = |e: &mut Emitter<'arena, 'decl, D>| match &cls_get_expr { ast::ClassGetExpr::CGstring((pos, id)) => Ok(emit_pos_then( alloc, pos, instr::cgetl(alloc, Local::Named(Str::new_str(alloc, id.as_str()))), )), ast::ClassGetExpr::CGexpr(expr) => emit_expr(e, env, expr), }; Ok(match cexpr { ClassExpr::Id(cid) => { let tmp = e.local_gen_mut().get_unnamed(); ( InstrSeq::gather( alloc, vec![ instr::nulluninit(alloc), instr::nulluninit(alloc), emit_meth_name(e)?, instr::popl(alloc, tmp), ], ), InstrSeq::gather( alloc, vec![ instr::pushl(alloc, tmp), emit_known_class_id(alloc, e, &cid), instr::fcallclsmethod( alloc, IsLogAsDynamicCallOp::LogAsDynamicCall, fcall_args, ), ], ), ) } ClassExpr::Special(clsref) => { let tmp = e.local_gen_mut().get_unnamed(); ( InstrSeq::gather( alloc, vec![ instr::nulluninit(alloc), instr::nulluninit(alloc), emit_meth_name(e)?, instr::popl(alloc, tmp), ], ), InstrSeq::gather( alloc, vec![ instr::pushl(alloc, tmp), instr::fcallclsmethods(alloc, fcall_args, clsref), ], ), ) } ClassExpr::Expr(expr) => { let cls = e.local_gen_mut().get_unnamed(); let meth = e.local_gen_mut().get_unnamed(); ( InstrSeq::gather( alloc, vec![ instr::nulluninit(alloc), instr::nulluninit(alloc), emit_expr(e, env, &expr)?, instr::popl(alloc, cls), emit_meth_name(e)?, instr::popl(alloc, meth), ], ), InstrSeq::gather( alloc, vec![ instr::pushl(alloc, meth), instr::pushl(alloc, cls), instr::classgetc(alloc), instr::fcallclsmethod( alloc, IsLogAsDynamicCallOp::LogAsDynamicCall, fcall_args, ), ], ), ) } ClassExpr::Reified(instrs) => { let cls = e.local_gen_mut().get_unnamed(); let meth = e.local_gen_mut().get_unnamed(); ( InstrSeq::gather( alloc, vec![ instr::nulluninit(alloc), instr::nulluninit(alloc), instrs, instr::popl(alloc, cls), emit_meth_name(e)?, instr::popl(alloc, meth), ], ), InstrSeq::gather( alloc, vec![ instr::pushl(alloc, meth), instr::pushl(alloc, cls), instr::classgetc(alloc), instr::fcallclsmethod( alloc, IsLogAsDynamicCallOp::LogAsDynamicCall, fcall_args, ), ], ), ) } }) } } E_::Id(id) => { let FcallArgs(flags, num_args, _, _, _, _, _) = fcall_args; let fq_id = match string_utils::strip_global_ns(&id.1) { "min" if num_args == 2 && !flags.contains(FcallFlags::HAS_UNPACK) => { function::FunctionType::<'arena>::from_ast_name(alloc, "__SystemLib\\min2") } "max" if num_args == 2 && !flags.contains(FcallFlags::HAS_UNPACK) => { function::FunctionType::<'arena>::from_ast_name(alloc, "__SystemLib\\max2") } _ => (alloc, string_utils::strip_global_ns(&id.1)).into(), }; let generics = emit_generics(e, env, &mut fcall_args)?; Ok(( InstrSeq::gather( alloc, vec![instr::nulluninit(alloc), instr::nulluninit(alloc)], ), InstrSeq::gather( alloc, vec![generics, instr::fcallfuncd(alloc, fcall_args, fq_id)], ), )) } E_::String(s) => { // TODO(hrust) should be able to accept `let fq_id = function::from_raw_string(s);` let fq_id = (alloc, s.to_string().as_str()).into(); let generics = emit_generics(e, env, &mut fcall_args)?; Ok(( InstrSeq::gather( alloc, vec![instr::nulluninit(alloc), instr::nulluninit(alloc)], ), InstrSeq::gather( alloc, vec![generics, instr::fcallfuncd(alloc, fcall_args, fq_id)], ), )) } _ => emit_fcall_func(e, env, expr, fcall_args), } } fn get_reified_var_cexpr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( env: &Env<'a, 'arena>, pos: &Pos, name: &str, ) -> Result<Option<ClassExpr<'arena>>> { let alloc = env.arena; Ok(emit_reified_type_opt::<D>(env, pos, name)?.map(|instrs| { ClassExpr::Reified(InstrSeq::gather( alloc, vec![ instrs, instr::basec(alloc, 0, MemberOpMode::Warn), instr::querym( alloc, 1, QueryOp::CGet, MemberKey::ET(Str::from("classname"), ReadOnlyOp::Any), ), ], )) })) } fn emit_args_inout_setters<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, args: &[ast::Expr], ) -> Result<(InstrSeq<'arena>, InstrSeq<'arena>)> { let alloc = env.arena; let aliases = if has_inout_arg(args) { inout_locals::collect_written_variables(env, args) } else { inout_locals::new_alias_info_map() }; fn emit_arg_and_inout_setter<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, i: usize, arg: &ast::Expr, aliases: &inout_locals::AliasInfoMap, ) -> Result<(InstrSeq<'arena>, InstrSeq<'arena>)> { use ast::Expr_ as E_; let alloc = env.arena; match &arg.2 { E_::Callconv(cc) if (cc.0).is_pinout() => { match &(cc.1).2 { // inout $var E_::Lvar(l) => { let local = get_local(e, env, &l.0, local_id::get_name(&l.1))?; let move_instrs = if !env.flags.contains(hhbc_by_ref_env::Flags::IN_TRY) && inout_locals::should_move_local_value(&local, aliases) { InstrSeq::gather( alloc, vec![instr::null(alloc), instr::popl(alloc, local)], ) } else { instr::empty(alloc) }; Ok(( InstrSeq::gather(alloc, vec![instr::cgetl(alloc, local), move_instrs]), instr::popl(alloc, local), )) } // inout $arr[...][...] E_::ArrayGet(ag) => { let array_get_result = emit_array_get_( e, env, &(cc.1).1, None, QueryOp::InOut, &ag.0, ag.1.as_ref(), false, false, Some((i, aliases)), )? .0; Ok(match array_get_result { ArrayGetInstr::Regular(instrs) => { let setter_base = emit_array_get( e, env, &(cc.1).1, Some(MemberOpMode::Define), QueryOp::InOut, &ag.0, ag.1.as_ref(), true, false, )? .0; let (mk, warninstr) = get_elem_member_key(e, env, 0, ag.1.as_ref(), false)?; let setter = InstrSeq::gather( alloc, vec![ warninstr, setter_base, instr::setm(alloc, 0, mk), instr::popc(alloc), ], ); (instrs, setter) } ArrayGetInstr::Inout { load, store } => { let (mut ld, mut st) = (vec![], vec![store]); for (instr, local_kind_opt) in load.into_iter() { match local_kind_opt { None => ld.push(instr), Some((l, kind)) => { let unset = instr::unsetl(alloc, l); let set = match kind { StoredValueKind::Expr => instr::setl(alloc, l), _ => instr::popl(alloc, l), }; ld.push(instr); ld.push(set); st.push(unset); } } } (InstrSeq::gather(alloc, ld), InstrSeq::gather(alloc, st)) } }) } _ => Err(unrecoverable( "emit_arg_and_inout_setter: Unexpected inout expression type", )), } } _ => Ok((emit_expr(e, env, arg)?, instr::empty(alloc))), } } let (instr_args, instr_setters): (Vec<InstrSeq>, Vec<InstrSeq>) = args .iter() .enumerate() .map(|(i, arg)| emit_arg_and_inout_setter(e, env, i, arg, &aliases)) .collect::<Result<Vec<_>>>()? .into_iter() .unzip(); let instr_args = InstrSeq::gather(alloc, instr_args); let instr_setters = InstrSeq::gather(alloc, instr_setters); if has_inout_arg(args) { let retval = e.local_gen_mut().get_unnamed(); Ok(( instr_args, InstrSeq::gather( alloc, vec![ instr::popl(alloc, retval), instr_setters, instr::pushl(alloc, retval), ], ), )) } else { Ok((instr_args, instr::empty(alloc))) } } fn get_fcall_args<'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, alloc: &'arena bumpalo::Bump, args: &[ast::Expr], uarg: Option<&ast::Expr>, async_eager_label: Option<Label>, context: Option<String>, lock_while_unwinding: bool, ) -> FcallArgs<'arena> { let num_args = args.len(); let num_rets = 1 + args.iter().filter(|x| is_inout_arg(*x)).count(); let mut flags = FcallFlags::default(); flags.set(FcallFlags::HAS_UNPACK, uarg.is_some()); flags.set(FcallFlags::LOCK_WHILE_UNWINDING, lock_while_unwinding); let is_readonly_arg = if e .options() .hhvm .flags .contains(HhvmFlags::ENABLE_READONLY_IN_EMITTER) { |expr| is_readonly_expr(expr) } else { |_expr| false }; FcallArgs::new( flags, num_rets, Slice::fill_iter(alloc, args.iter().map(is_inout_arg)), Slice::fill_iter(alloc, args.iter().map(is_readonly_arg)), async_eager_label, num_args, context .map(|s| bumpalo::collections::String::from_str_in(s.as_str(), alloc).into_bump_str()), ) } fn is_inout_arg(e: &ast::Expr) -> bool { e.2.as_callconv().map_or(false, |cc| cc.0.is_pinout()) } fn is_readonly_expr(e: &ast::Expr) -> bool { match &e.2 { ast::Expr_::ReadonlyExpr(_) => true, _ => false, } } fn has_inout_arg(es: &[ast::Expr]) -> bool { es.iter().any(is_inout_arg) } fn emit_special_function<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, args: &[ast::Expr], uarg: Option<&ast::Expr>, lower_fq_name: &str, ) -> Result<Option<InstrSeq<'arena>>> { use ast::{Expr as E, Expr_ as E_}; let alloc = env.arena; let nargs = args.len() + uarg.map_or(0, |_| 1); let fun_and_clsmeth_disabled = e .options() .hhvm .hack_lang .flags .contains(LangFlags::DISALLOW_FUN_AND_CLS_METH_PSEUDO_FUNCS); match (lower_fq_name, args) { (id, _) if id == special_functions::ECHO => Ok(Some(InstrSeq::gather( alloc, args.iter() .enumerate() .map(|(i, arg)| { Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, arg)?, emit_pos(alloc, pos), instr::print(alloc), if i == nargs - 1 { instr::empty(alloc) } else { instr::popc(alloc) }, ], )) }) .collect::<Result<_>>()?, ))), ("unsafe_cast", &[]) => Ok(Some(instr::null(alloc))), ("unsafe_cast", args) => Ok(Some(emit_expr(e, env, &args[0])?)), ("HH\\Readonly\\as_mut", args) if args.len() == 1 => Ok(Some(emit_expr(e, env, &args[0])?)), ("HH\\Readonly\\as_mut", _) => Err(emit_fatal::raise_fatal_runtime( pos, format!( "HH\\Readonly\\as_mut() expects exactly 1 parameter, {} given", nargs.to_string() ), )), ("HH\\invariant", args) if args.len() >= 2 => { let l = e.label_gen_mut().next_regular(); let expr_id = ast::Expr( (), pos.clone(), ast::Expr_::mk_id(ast_defs::Id( pos.clone(), "\\hh\\invariant_violation".into(), )), ); let call = ast::Expr( (), pos.clone(), ast::Expr_::mk_call(expr_id, vec![], args[1..].to_owned(), uarg.cloned()), ); let ignored_expr = emit_ignored_expr(e, env, &Pos::make_none(), &call)?; Ok(Some(InstrSeq::gather( alloc, vec![ emit_expr(e, env, &args[0])?, instr::jmpnz(alloc, l), ignored_expr, emit_fatal::emit_fatal_runtime(alloc, pos, "invariant_violation"), instr::label(alloc, l), instr::null(alloc), ], ))) } ("HH\\sequence", &[]) => Ok(Some(instr::null(alloc))), ("HH\\sequence", args) => { let es = args .iter() .map(|arg| emit_expr(e, env, arg)) .collect::<Result<Vec<_>>>()?; // This is horrible but we can't use `intersperse` because // `InstrSeq` doesn't implement `Clone`. let mut iss = Vec::new(); let len = es.len(); for (count, e) in es.into_iter().enumerate() { iss.push(e); if count != len - 1 { iss.push(instr::popc(alloc)); } } Ok(Some(InstrSeq::gather(alloc, iss))) } ("class_exists", &[ref arg1, ..]) | ("trait_exists", &[ref arg1, ..]) | ("interface_exists", &[ref arg1, ..]) if nargs == 1 || nargs == 2 => { let class_kind = match lower_fq_name { "class_exists" => ClassishKind::Class, "interface_exists" => ClassishKind::Interface, "trait_exists" => ClassishKind::Trait, _ => return Err(unrecoverable("emit_special_function: class_kind")), }; Ok(Some(InstrSeq::gather( alloc, vec![ emit_expr(e, env, arg1)?, instr::cast_string(alloc), if nargs == 1 { instr::true_(alloc) } else { InstrSeq::gather( alloc, vec![emit_expr(e, env, &args[1])?, instr::cast_bool(alloc)], ) }, instr::oodeclexists(alloc, class_kind), ], ))) } ("exit", _) | ("die", _) if nargs == 0 || nargs == 1 => { Ok(Some(emit_exit(e, env, args.first())?)) } ("HH\\fun", _) => { if fun_and_clsmeth_disabled { match args { [ast::Expr(_, _, ast::Expr_::String(func_name))] => { Err(emit_fatal::raise_fatal_parse( pos, format!( "`fun()` is disabled; switch to first-class references like `{}<>`", func_name ), )) } _ => Err(emit_fatal::raise_fatal_runtime( pos, "Constant string expected in fun()", )), } } else if nargs != 1 { Err(emit_fatal::raise_fatal_runtime( pos, format!( "fun() expects exactly 1 parameter, {} given", nargs.to_string() ), )) } else { match args { [ast::Expr(_, _, ast::Expr_::String(func_name))] => { Ok(Some(emit_hh_fun( e, env, pos, &[], // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { std::str::from_utf8_unchecked(func_name.as_slice()) }, )?)) } _ => Err(emit_fatal::raise_fatal_runtime( pos, "Constant string expected in fun()", )), } } } ("__systemlib\\meth_caller", _) => { // used by meth_caller() to directly emit func ptr if nargs != 1 { return Err(emit_fatal::raise_fatal_runtime( pos, format!( "fun() expects exactly 1 parameter, {} given", nargs.to_string() ), )); } match args { [E(_, _, E_::String(ref func_name))] => Ok(Some(instr::resolve_meth_caller( alloc, ( alloc, string_utils::strip_global_ns( // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { std::str::from_utf8_unchecked(func_name.as_slice()) }, ), ) .into(), ))), _ => Err(emit_fatal::raise_fatal_runtime( pos, "Constant string expected in fun()", )), } } ("__systemlib\\__debugger_is_uninit", _) => { if nargs != 1 { Err(emit_fatal::raise_fatal_runtime( pos, format!( "__debugger_is_uninit() expects exactly 1 parameter {} given", nargs ), )) } else { match args { [E(_, _, E_::Lvar(id))] => Ok(Some(instr::isunsetl( alloc, get_local(e, env, pos, id.name())?, ))), _ => Err(emit_fatal::raise_fatal_runtime( pos, "Local variable expected in __debugger_is_uninit()", )), } } } ("__SystemLib\\get_enum_member_by_label", _) if e.systemlib() => { let local = match args { [E(_, _, E_::Lvar(id))] => get_local(e, env, pos, id.name()), _ => Err(emit_fatal::raise_fatal_runtime( pos, "Argument must be the label argument", )), }?; Ok(Some(InstrSeq::gather( alloc, vec![instr::lateboundcls(alloc), instr::clscnsl(alloc, local)], ))) } ("HH\\inst_meth", _) => match args { [obj_expr, method_name] => Ok(Some(emit_inst_meth(e, env, obj_expr, method_name)?)), _ => Err(emit_fatal::raise_fatal_runtime( pos, format!( "inst_meth() expects exactly 2 parameters, {} given", nargs.to_string() ), )), }, ("HH\\class_meth", _) if fun_and_clsmeth_disabled => Err(emit_fatal::raise_fatal_parse( pos, "`class_meth()` is disabled; switch to first-class references like `C::bar<>`", )), ("HH\\class_meth", &[ref cls, ref meth, ..]) if nargs == 2 => { if meth.2.is_string() { if cls.2.is_string() || cls .2 .as_class_const() .map_or(false, |(_, (_, id))| string_utils::is_class(id)) || cls .2 .as_id() .map_or(false, |ast_defs::Id(_, id)| id == pseudo_consts::G__CLASS__) { return Ok(Some(emit_class_meth(e, env, cls, meth)?)); } } Err(emit_fatal::raise_fatal_runtime( pos, concat!( "class_meth() expects a literal class name or ::class constant, ", "followed by a constant string that refers to a static method ", "on that class" ), )) } ("HH\\class_meth", _) => Err(emit_fatal::raise_fatal_runtime( pos, format!( "class_meth() expects exactly 2 parameters, {} given", nargs.to_string() ), )), ("HH\\global_set", _) => match *args { [ref gkey, ref gvalue] => Ok(Some(InstrSeq::gather( alloc, vec![ emit_expr(e, env, gkey)?, emit_expr(e, env, gvalue)?, emit_pos(alloc, pos), instr::setg(alloc), instr::popc(alloc), instr::null(alloc), ], ))), _ => Err(emit_fatal::raise_fatal_runtime( pos, format!( "global_set() expects exactly 2 parameters, {} given", nargs.to_string() ), )), }, ("HH\\global_unset", _) => match *args { [ref gkey] => Ok(Some(InstrSeq::gather( alloc, vec![ emit_expr(e, env, gkey)?, emit_pos(alloc, pos), instr::unsetg(alloc), instr::null(alloc), ], ))), _ => Err(emit_fatal::raise_fatal_runtime( pos, format!( "global_unset() expects exactly 1 parameter, {} given", nargs.to_string() ), )), }, ("__hhvm_internal_whresult", &[E(_, _, E_::Lvar(ref param))]) if e.systemlib() => { Ok(Some(InstrSeq::gather( alloc, vec![ instr::cgetl( alloc, Local::Named(Str::new_str(alloc, local_id::get_name(&param.1))), ), instr::whresult(alloc), ], ))) } ("HH\\array_mark_legacy", _) if args.len() == 1 || args.len() == 2 => { Ok(Some(emit_array_mark_legacy(e, env, pos, args, true)?)) } ("HH\\array_unmark_legacy", _) if args.len() == 1 || args.len() == 2 => { Ok(Some(emit_array_mark_legacy(e, env, pos, args, false)?)) } ("HH\\tag_provenance_here", _) if args.len() == 1 || args.len() == 2 => { Ok(Some(emit_tag_provenance_here(e, env, pos, args)?)) } _ => Ok( match (args, istype_op(lower_fq_name), is_isexp_op(lower_fq_name)) { (&[ref arg_expr], _, Some(ref h)) => { let is_expr = emit_is(e, env, pos, &h)?; Some(InstrSeq::gather( alloc, vec![emit_expr(e, env, &arg_expr)?, is_expr], )) } (&[E(_, _, E_::Lvar(ref arg_id))], Some(i), _) if superglobals::is_any_global(arg_id.name()) => { Some(InstrSeq::gather( alloc, vec![ emit_local(e, env, BareThisOp::NoNotice, &arg_id)?, emit_pos(alloc, pos), instr::istypec(alloc, i), ], )) } (&[E(_, _, E_::Lvar(ref arg_id))], Some(i), _) if !is_local_this(env, &arg_id.1) => { Some(instr::istypel( alloc, get_local(e, env, &arg_id.0, &(arg_id.1).1)?, i, )) } (&[ref arg_expr], Some(i), _) => Some(InstrSeq::gather( alloc, vec![ emit_expr(e, env, &arg_expr)?, emit_pos(alloc, pos), instr::istypec(alloc, i), ], )), _ => match get_call_builtin_func_info(lower_fq_name) { Some((nargs, i)) if nargs == args.len() => Some(InstrSeq::gather( alloc, vec![ emit_exprs(e, env, args)?, emit_pos(alloc, pos), instr::instr(alloc, i), ], )), _ => None, }, }, ), } } fn emit_inst_meth<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, obj_expr: &ast::Expr, method_name: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let instrs = InstrSeq::gather( alloc, vec![ emit_expr(e, env, obj_expr)?, emit_expr(e, env, method_name)?, instr::new_vec_array(alloc, 2), ], ); Ok(instrs) } fn emit_class_meth<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, cls: &ast::Expr, meth: &ast::Expr, ) -> Result<InstrSeq<'arena>> { use ast::Expr_ as E_; let alloc = env.arena; if e.options() .hhvm .flags .contains(HhvmFlags::EMIT_CLS_METH_POINTERS) { let method_id = match &meth.2 { E_::String(method_name) => (alloc, method_name.to_string().as_str()).into(), _ => return Err(unrecoverable("emit_class_meth: unhandled method")), }; if let Some((cid, (_, id))) = cls.2.as_class_const() { if string_utils::is_class(id) { return emit_class_meth_native(e, env, &cls.1, cid, method_id, &[]); } } if let Some(ast_defs::Id(_, s)) = cls.2.as_id() { if s == pseudo_consts::G__CLASS__ { return Ok(instr::resolveclsmethods( alloc, SpecialClsRef::Self_, method_id, )); } } if let Some(class_name) = cls.2.as_string() { return Ok(instr::resolveclsmethodd( alloc, ( alloc, string_utils::strip_global_ns( // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { std::str::from_utf8_unchecked(class_name.as_slice()) }, ), ) .into(), method_id, )); } Err(unrecoverable("emit_class_meth: unhandled method")) } else { let instrs = InstrSeq::gather( alloc, vec![ emit_expr(e, env, cls)?, emit_expr(e, env, meth)?, instr::new_vec_array(alloc, 2), ], ); Ok(instrs) } } fn emit_class_meth_native<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, cid: &ast::ClassId, method_id: MethodId<'arena>, targs: &[ast::Targ], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let mut cexpr = ClassExpr::class_id_to_class_expr(e, false, true, &env.scope, cid); if let ClassExpr::Id(ast_defs::Id(_, name)) = &cexpr { if let Some(reified_var_cexpr) = get_reified_var_cexpr::<D>(env, pos, &name)? { cexpr = reified_var_cexpr; } } let has_generics = has_non_tparam_generics_targs(env, targs); let mut emit_generics = || -> Result<InstrSeq<'arena>> { emit_reified_targs( e, env, pos, &targs.iter().map(|targ| &targ.1).collect::<Vec<_>>(), ) }; Ok(match cexpr { ClassExpr::Id(ast_defs::Id(_, name)) if !has_generics => instr::resolveclsmethodd( alloc, class::ClassType::<'arena>::from_ast_name_and_mangle(alloc, &name), method_id, ), ClassExpr::Id(ast_defs::Id(_, name)) => InstrSeq::gather( alloc, vec![ emit_generics()?, instr::resolverclsmethodd( alloc, class::ClassType::<'arena>::from_ast_name_and_mangle(alloc, &name), method_id, ), ], ), ClassExpr::Special(clsref) if !has_generics => { instr::resolveclsmethods(alloc, clsref, method_id) } ClassExpr::Special(clsref) => InstrSeq::gather( alloc, vec![ emit_generics()?, instr::resolverclsmethods(alloc, clsref, method_id), ], ), ClassExpr::Reified(instrs) if !has_generics => InstrSeq::gather( alloc, vec![ instrs, instr::classgetc(alloc), instr::resolveclsmethod(alloc, method_id), ], ), ClassExpr::Reified(instrs) => InstrSeq::gather( alloc, vec![ instrs, instr::classgetc(alloc), emit_generics()?, instr::resolverclsmethod(alloc, method_id), ], ), ClassExpr::Expr(_) => { return Err(unrecoverable( "emit_class_meth_native: ClassExpr::Expr should be impossible", )); } }) } fn get_call_builtin_func_info<'arena>(id: impl AsRef<str>) -> Option<(usize, Instruct<'arena>)> { use {Instruct::*, InstructGet::*, InstructIsset::*, InstructMisc::*, InstructOperator::*}; match id.as_ref() { "array_key_exists" => Some((2, IMisc(AKExists))), "hphp_array_idx" => Some((3, IMisc(ArrayIdx))), "intval" => Some((1, IOp(CastInt))), "boolval" => Some((1, IOp(CastBool))), "strval" => Some((1, IOp(CastString))), "floatval" | "doubleval" => Some((1, IOp(CastDouble))), "HH\\vec" => Some((1, IOp(CastVec))), "HH\\keyset" => Some((1, IOp(CastKeyset))), "HH\\dict" => Some((1, IOp(CastDict))), "HH\\varray" => Some((1, IOp(CastVec))), "HH\\darray" => Some((1, IOp(CastDict))), // TODO: enforce that this returns readonly "HH\\global_readonly_get" => Some((1, IGet(CGetG))), "HH\\global_get" => Some((1, IGet(CGetG))), "HH\\global_isset" => Some((1, IIsset(IssetG))), _ => None, } } fn emit_function_pointer<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, fpid: &ast::FunctionPtrId, targs: &[ast::Targ], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let instrs = match fpid { // This is a function name. Equivalent to HH\fun('str') ast::FunctionPtrId::FPId(id) => emit_hh_fun(e, env, pos, targs, id.name())?, // class_meth ast::FunctionPtrId::FPClassConst(cid, method_id) => { // TODO(hrust) should accept `let method_id = method::MethodType::from_ast_name(&(cc.1).1);` let method_id: method::MethodType<'arena> = (alloc, string_utils::strip_global_ns(&method_id.1)).into(); emit_class_meth_native(e, env, pos, cid, method_id, targs)? } }; Ok(emit_pos_then(alloc, pos, instrs)) } fn emit_hh_fun<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, targs: &[ast::Targ], fname: &str, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let fname = string_utils::strip_global_ns(fname); if has_non_tparam_generics_targs(env, targs) { let generics = emit_reified_targs( e, env, pos, targs .iter() .map(|targ| &targ.1) .collect::<Vec<_>>() .as_slice(), )?; Ok(InstrSeq::gather( alloc, vec![generics, instr::resolve_rfunc(alloc, (alloc, fname).into())], )) } else { Ok(instr::resolve_func(alloc, (alloc, fname).into())) } } fn emit_is<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, h: &ast::Hint, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let (ts_instrs, is_static) = emit_reified_arg(e, env, pos, true, h)?; Ok(if is_static { match &*h.1 { aast_defs::Hint_::Happly(ast_defs::Id(_, id), hs) if hs.is_empty() && string_utils::strip_hh_ns(&id) == typehints::THIS => { instr::islateboundcls(alloc) } _ => InstrSeq::gather( alloc, vec![ get_type_structure_for_hint(alloc, e, &[], &IndexSet::new(), h)?, instr::is_type_structc_resolve(alloc), ], ), } } else { InstrSeq::gather( alloc, vec![ts_instrs, instr::is_type_structc_dontresolve(alloc)], ) }) } fn istype_op(id: impl AsRef<str>) -> Option<IstypeOp> { use IstypeOp::*; match id.as_ref() { "is_int" | "is_integer" | "is_long" => Some(OpInt), "is_bool" => Some(OpBool), "is_float" | "is_real" | "is_double" => Some(OpDbl), "is_string" => Some(OpStr), "is_object" => Some(OpObj), "is_null" => Some(OpNull), "is_scalar" => Some(OpScalar), "HH\\is_keyset" => Some(OpKeyset), "HH\\is_dict" => Some(OpDict), "HH\\is_vec" => Some(OpVec), "HH\\is_varray" => Some(OpVec), "HH\\is_darray" => Some(OpDict), "HH\\is_any_array" => Some(OpArrLike), "HH\\is_class_meth" => Some(OpClsMeth), "HH\\is_fun" => Some(OpFunc), "HH\\is_php_array" => Some(OpLegacyArrLike), "HH\\is_array_marked_legacy" => Some(OpLegacyArrLike), "HH\\is_class" => Some(OpClass), _ => None, } } fn is_isexp_op(lower_fq_id: impl AsRef<str>) -> Option<ast::Hint> { let h = |s: &str| { Some(ast::Hint::new( Pos::make_none(), ast::Hint_::mk_happly(ast::Id(Pos::make_none(), s.into()), vec![]), )) }; match lower_fq_id.as_ref() { "is_int" | "is_integer" | "is_long" => h("\\HH\\int"), "is_bool" => h("\\HH\\bool"), "is_float" | "is_real" | "is_double" => h("\\HH\\float"), "is_string" => h("\\HH\\string"), "is_null" => h("\\HH\\void"), "HH\\is_keyset" => h("\\HH\\keyset"), "HH\\is_dict" => h("\\HH\\dict"), "HH\\is_vec" => h("\\HH\\vec"), _ => None, } } fn emit_eval<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, expr)?, emit_pos(alloc, pos), instr::eval(alloc), ], )) } #[allow(clippy::needless_lifetimes)] fn has_reified_types<'a, 'arena>(env: &Env<'a, 'arena>) -> bool { for param in env.scope.get_tparams() { match param.reified { oxidized::ast::ReifyKind::Reified => { return true; } _ => {} } } false } fn emit_call_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, async_eager_label: Option<Label>, (expr, targs, args, uarg): &(ast::Expr, Vec<ast::Targ>, Vec<ast::Expr>, Option<ast::Expr>), ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let jit_enable_rename_function = e .options() .hhvm .flags .contains(HhvmFlags::JIT_ENABLE_RENAME_FUNCTION); use {ast::Expr as E, ast::Expr_ as E_}; match (&expr.2, &args[..], uarg) { (E_::Id(id), [E(_, _, E_::String(data))], None) if id.1 == special_functions::HHAS_ADATA => { // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. let v = TypedValue::mk_hhas_adata( alloc.alloc_str(unsafe { std::str::from_utf8_unchecked(data.as_ref()) }), ); Ok(emit_pos_then(alloc, pos, instr::typedvalue(alloc, v))) } (E_::Id(id), _, None) if id.1 == pseudo_functions::ISSET => { emit_call_isset_exprs(e, env, pos, args) } (E_::Id(id), args, None) if id.1 == fb::IDX && !jit_enable_rename_function && (args.len() == 2 || args.len() == 3) => { emit_idx(e, env, pos, args) } (E_::Id(id), [arg1], None) if id.1 == emitter_special_functions::EVAL => { emit_eval(e, env, pos, arg1) } (E_::Id(id), [arg1], None) if id.1 == emitter_special_functions::SET_FRAME_METADATA => { Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, arg1)?, emit_pos(alloc, pos), instr::popl(alloc, Local::Named(Slice::new("$86metadata".as_bytes()))), instr::null(alloc), ], )) } (E_::Id(id), [], None) if id.1 == pseudo_functions::EXIT || id.1 == pseudo_functions::DIE => { let exit = emit_exit(e, env, None)?; Ok(emit_pos_then(alloc, pos, exit)) } (E_::Id(id), [arg1], None) if id.1 == pseudo_functions::EXIT || id.1 == pseudo_functions::DIE => { let exit = emit_exit(e, env, Some(arg1))?; Ok(emit_pos_then(alloc, pos, exit)) } (E_::Id(id), [], _) if id.1 == emitter_special_functions::SYSTEMLIB_REIFIED_GENERICS && e.systemlib() && has_reified_types(env) => { // Rewrite __systemlib_reified_generics() to $0ReifiedGenerics, // but only in systemlib functions that take a reified generic. let lvar = E::new( (), pos.clone(), E_::Lvar(Box::new(ast::Lid( pos.clone(), local_id::make_unscoped(string_utils::reified::GENERICS_LOCAL_NAME), ))), ); emit_expr(e, env, &lvar) } (_, _, _) => { let instrs = emit_call( e, env, pos, expr, targs, args, uarg.as_ref(), async_eager_label, )?; Ok(emit_pos_then(alloc, pos, instrs)) } } } pub fn emit_reified_generic_instrs<'arena>( alloc: &'arena bumpalo::Bump, pos: &Pos, is_fun: bool, index: usize, ) -> Result<InstrSeq<'arena>> { let base = if is_fun { instr::basel( alloc, Local::Named(Str::new_str( alloc, string_utils::reified::GENERICS_LOCAL_NAME, )), MemberOpMode::Warn, ) } else { InstrSeq::gather( alloc, vec![ instr::checkthis(alloc), instr::baseh(alloc), instr::dim_warn_pt( alloc, prop::from_raw_string(alloc, string_utils::reified::PROP_NAME), ReadOnlyOp::Any, ), ], ) }; Ok(emit_pos_then( alloc, pos, InstrSeq::gather( alloc, vec![ base, instr::querym( alloc, 0, QueryOp::CGet, MemberKey::EI(index.try_into().unwrap(), ReadOnlyOp::Any), ), ], ), )) } fn emit_reified_type<'a, 'arena, 'decl, D: DeclProvider<'decl>>( env: &Env<'a, 'arena>, pos: &Pos, name: &str, ) -> Result<InstrSeq<'arena>> { emit_reified_type_opt::<D>(env, pos, name)? .ok_or_else(|| emit_fatal::raise_fatal_runtime(&Pos::make_none(), "Invalid reified param")) } fn emit_reified_type_opt<'a, 'arena, 'decl, D: DeclProvider<'decl>>( env: &Env<'a, 'arena>, pos: &Pos, name: &str, ) -> Result<Option<InstrSeq<'arena>>> { let alloc = env.arena; let is_in_lambda = env.scope.is_in_lambda(); let cget_instr = |is_fun, i| { instr::cgetl( env.arena, Local::Named(Str::new_str( alloc, string_utils::reified::reified_generic_captured_name(is_fun, i).as_str(), )), ) }; let check = |is_soft| -> Result<()> { if is_soft { Err(emit_fatal::raise_fatal_parse( pos, format!( "{} is annotated to be a soft reified generic, it cannot be used until the __Soft annotation is removed", name ), )) } else { Ok(()) } }; let emit = |(i, is_soft), is_fun| { check(is_soft)?; Ok(Some(if is_in_lambda { cget_instr(is_fun, i) } else { emit_reified_generic_instrs(alloc, pos, is_fun, i)? })) }; match is_reified_tparam(env, true, name) { Some((i, is_soft)) => emit((i, is_soft), true), None => match is_reified_tparam(env, false, name) { Some((i, is_soft)) => emit((i, is_soft), false), None => Ok(None), }, } } fn emit_known_class_id<'arena, 'decl, D: DeclProvider<'decl>>( alloc: &'arena bumpalo::Bump, e: &mut Emitter<'arena, 'decl, D>, id: &ast_defs::Id, ) -> InstrSeq<'arena> { let cid = class::ClassType::from_ast_name(alloc, &id.1); let cid_string = instr::string(alloc, cid.to_raw_string()); emit_symbol_refs::add_class(alloc, e, cid); InstrSeq::gather(alloc, vec![cid_string, instr::classgetc(alloc)]) } fn emit_load_class_ref<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, cexpr: ClassExpr<'arena>, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let instrs = match cexpr { ClassExpr::Special(SpecialClsRef::Self_) => instr::self_(alloc), ClassExpr::Special(SpecialClsRef::Static) => instr::lateboundcls(alloc), ClassExpr::Special(SpecialClsRef::Parent) => instr::parent(alloc), ClassExpr::Id(id) => emit_known_class_id(alloc, e, &id), ClassExpr::Expr(expr) => InstrSeq::gather( alloc, vec![ emit_pos(alloc, pos), emit_expr(e, env, &expr)?, instr::classgetc(alloc), ], ), ClassExpr::Reified(instrs) => InstrSeq::gather( alloc, vec![emit_pos(alloc, pos), instrs, instr::classgetc(alloc)], ), }; Ok(emit_pos_then(alloc, pos, instrs)) } fn emit_new<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, (cid, targs, args, uarg, _): &( ast::ClassId, Vec<ast::Targ>, Vec<ast::Expr>, Option<ast::Expr>, (), ), is_reflection_class_builtin: bool, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; if has_inout_arg(args) { return Err(unrecoverable("Unexpected inout arg in new expr")); } let resolve_self = match &cid.2.as_ciexpr() { Some(ci_expr) => match ci_expr.as_id() { Some(ast_defs::Id(_, n)) if string_utils::is_self(n) => env .scope .get_class_tparams() .iter() .all(|tp| tp.reified.is_erased()), Some(ast_defs::Id(_, n)) if string_utils::is_parent(n) => { env.scope .get_class() .map_or(true, |cls| match cls.get_extends() { [h, ..] => { h.1.as_happly() .map_or(true, |(_, l)| !has_non_tparam_generics(env, l)) } _ => true, }) } _ => true, }, _ => true, }; use HasGenericsOp as H; let cexpr = ClassExpr::class_id_to_class_expr(e, false, resolve_self, &env.scope, cid); let (cexpr, has_generics) = match &cexpr { ClassExpr::Id(ast_defs::Id(_, name)) => match emit_reified_type_opt::<D>(env, pos, name)? { Some(instrs) => { if targs.is_empty() { (ClassExpr::Reified(instrs), H::MaybeGenerics) } else { return Err(emit_fatal::raise_fatal_parse( pos, "Cannot have higher kinded reified generics", )); } } None if !has_non_tparam_generics_targs(env, targs) => (cexpr, H::NoGenerics), None => (cexpr, H::HasGenerics), }, _ => (cexpr, H::NoGenerics), }; if is_reflection_class_builtin { scope::with_unnamed_locals(alloc, e, |alloc, e| { let (instr_args, _) = emit_args_inout_setters(e, env, args)?; let instr_uargs = match uarg { None => instr::empty(alloc), Some(uarg) => emit_expr(e, env, uarg)?, }; Ok(( instr::empty(alloc), InstrSeq::gather(alloc, vec![instr_args, instr_uargs]), instr::empty(alloc), )) }) } else { let newobj_instrs = match cexpr { ClassExpr::Id(ast_defs::Id(_, cname)) => { let id = class::ClassType::<'arena>::from_ast_name_and_mangle(alloc, &cname); emit_symbol_refs::add_class(alloc, e, id); match has_generics { H::NoGenerics => InstrSeq::gather( alloc, vec![emit_pos(alloc, pos), instr::newobjd(alloc, id)], ), H::HasGenerics => InstrSeq::gather( alloc, vec![ emit_pos(alloc, pos), emit_reified_targs( e, env, pos, &targs.iter().map(|t| &t.1).collect::<Vec<_>>(), )?, instr::newobjrd(alloc, id), ], ), H::MaybeGenerics => { return Err(unrecoverable( "Internal error: This case should have been transformed", )); } } } ClassExpr::Special(cls_ref) => InstrSeq::gather( alloc, vec![emit_pos(alloc, pos), instr::newobjs(alloc, cls_ref)], ), ClassExpr::Reified(instrs) if has_generics == H::MaybeGenerics => InstrSeq::gather( alloc, vec![instrs, instr::classgetts(alloc), instr::newobjr(alloc)], ), _ => InstrSeq::gather( alloc, vec![ emit_load_class_ref(e, env, pos, cexpr)?, instr::newobj(alloc), ], ), }; scope::with_unnamed_locals(alloc, e, |alloc, e| { let (instr_args, _) = emit_args_inout_setters(e, env, args)?; let instr_uargs = match uarg { None => instr::empty(alloc), Some(uarg) => emit_expr(e, env, uarg)?, }; Ok(( instr::empty(alloc), InstrSeq::gather( alloc, vec![ newobj_instrs, instr::dup(alloc), instr::nulluninit(alloc), instr_args, instr_uargs, emit_pos(alloc, pos), instr::fcallctor( alloc, get_fcall_args( e, alloc, args, uarg.as_ref(), None, env.call_context.clone(), true, ), ), instr::popc(alloc), instr::lockobj(alloc), ], ), instr::empty(alloc), )) }) } } fn emit_obj_get<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, query_op: QueryOp, expr: &ast::Expr, prop: &ast::Expr, nullflavor: &ast_defs::OgNullFlavor, null_coalesce_assignment: bool, ) -> Result<(InstrSeq<'arena>, Option<NumParams>)> { let alloc = env.arena; if let Some(ast::Lid(pos, id)) = expr.2.as_lvar() { if local_id::get_name(&id) == special_idents::THIS && nullflavor.eq(&ast_defs::OgNullFlavor::OGNullsafe) { return Err(emit_fatal::raise_fatal_parse( pos, "?-> is not allowed with $this", )); } } if let Some(ast_defs::Id(_, s)) = prop.2.as_id() { if string_utils::is_xhp(s) { return Ok((emit_xhp_obj_get(e, env, pos, &expr, s, nullflavor)?, None)); } } let mode = if null_coalesce_assignment { MemberOpMode::Warn } else { get_querym_op_mode(&query_op) }; let prop_stack_size = emit_prop_expr(e, env, nullflavor, 0, prop, null_coalesce_assignment)?.2; let ( base_expr_instrs_begin, base_expr_instrs_end, base_setup_instrs, base_stack_size, cls_stack_size, ) = emit_base( e, env, expr, mode, true, BareThisOp::Notice, null_coalesce_assignment, prop_stack_size, 0, )?; let (mk, prop_instrs, _) = emit_prop_expr( e, env, nullflavor, cls_stack_size, prop, null_coalesce_assignment, )?; let total_stack_size = (prop_stack_size + base_stack_size + cls_stack_size) as usize; let num_params = if null_coalesce_assignment { 0 } else { total_stack_size }; let final_instr = instr::querym(alloc, num_params, query_op, mk); // Don't pop elems/props from the stack during the lookup for null // coalesce assignment in case we do a write later. let querym_n_unpopped = if null_coalesce_assignment { Some(total_stack_size) } else { None }; let instr = InstrSeq::gather( alloc, vec![ base_expr_instrs_begin, prop_instrs, base_expr_instrs_end, emit_pos(alloc, pos), base_setup_instrs, final_instr, ], ); Ok((instr, querym_n_unpopped)) } // Get the member key for a property, and return any instructions and // the size of the stack in the case that the property cannot be // placed inline in the instruction. fn emit_prop_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, nullflavor: &ast_defs::OgNullFlavor, stack_index: StackIndex, prop: &ast::Expr, null_coalesce_assignment: bool, ) -> Result<(MemberKey<'arena>, InstrSeq<'arena>, StackIndex)> { let alloc = env.arena; let mk = match &prop.2 { ast::Expr_::Id(id) => { let ast_defs::Id(pos, name) = &**id; if name.starts_with('$') { MemberKey::PL(get_local(e, env, pos, name)?, ReadOnlyOp::Any) } else { // Special case for known property name let pid: prop::PropType<'arena> = prop::PropType::<'arena>::from_ast_name( alloc, string_utils::strip_global_ns(&name), ); match nullflavor { ast_defs::OgNullFlavor::OGNullthrows => MemberKey::PT(pid, ReadOnlyOp::Any), ast_defs::OgNullFlavor::OGNullsafe => MemberKey::QT(pid, ReadOnlyOp::Any), } } } // Special case for known property name ast::Expr_::String(name) => { let pid: prop::PropType<'arena> = prop::PropType::<'arena>::from_ast_name( alloc, string_utils::strip_global_ns( // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { std::str::from_utf8_unchecked(name.as_slice()) }, ), ); match nullflavor { ast_defs::OgNullFlavor::OGNullthrows => MemberKey::PT(pid, ReadOnlyOp::Any), ast_defs::OgNullFlavor::OGNullsafe => MemberKey::QT(pid, ReadOnlyOp::Any), } } ast::Expr_::Lvar(lid) if !(is_local_this(env, &lid.1)) => MemberKey::PL( get_local(e, env, &lid.0, local_id::get_name(&lid.1))?, ReadOnlyOp::Any, ), _ => { // General case MemberKey::PC(stack_index, ReadOnlyOp::Any) } }; // For nullsafe access, insist that property is known Ok(match mk { MemberKey::PL(_, _) | MemberKey::PC(_, _) if nullflavor.eq(&ast_defs::OgNullFlavor::OGNullsafe) => { return Err(emit_fatal::raise_fatal_parse( &prop.1, "?-> can only be used with scalar property names", )); } MemberKey::PC(_, _) => (mk, emit_expr(e, env, prop)?, 1), MemberKey::PL(local, ReadOnlyOp::Any) if null_coalesce_assignment => ( MemberKey::PC(stack_index, ReadOnlyOp::Any), instr::cgetl(alloc, local), 1, ), _ => (mk, instr::empty(alloc), 0), }) } fn emit_xhp_obj_get<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, s: &str, nullflavor: &ast_defs::OgNullFlavor, ) -> Result<InstrSeq<'arena>> { use ast::Expr as E; use ast::Expr_ as E_; let f = E( (), pos.clone(), E_::mk_obj_get( expr.clone(), E( (), pos.clone(), E_::mk_id(ast_defs::Id(pos.clone(), "getAttribute".into())), ), nullflavor.clone(), false, ), ); let args = vec![E( (), pos.clone(), E_::mk_string(string_utils::clean(s).into()), )]; emit_call(e, env, pos, &f, &[], &args[..], None, None) } fn emit_array_get<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, outer_pos: &Pos, mode: Option<MemberOpMode>, query_op: QueryOp, base: &ast::Expr, elem: Option<&ast::Expr>, no_final: bool, null_coalesce_assignment: bool, ) -> Result<(InstrSeq<'arena>, Option<usize>)> { let result = emit_array_get_( e, env, outer_pos, mode, query_op, base, elem, no_final, null_coalesce_assignment, None, )?; match result { (ArrayGetInstr::Regular(i), querym_n_unpopped) => Ok((i, querym_n_unpopped)), (ArrayGetInstr::Inout { .. }, _) => Err(unrecoverable("unexpected inout")), } } fn emit_array_get_<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, outer_pos: &Pos, mode: Option<MemberOpMode>, query_op: QueryOp, base_expr: &ast::Expr, elem: Option<&ast::Expr>, no_final: bool, null_coalesce_assignment: bool, inout_param_info: Option<(usize, &inout_locals::AliasInfoMap)>, ) -> Result<(ArrayGetInstr<'arena>, Option<usize>)> { use ast::Expr as E; let alloc = env.arena; match (base_expr, elem) { (E(_, pos, _), None) if !env .flags .contains(hhbc_by_ref_env::Flags::ALLOWS_ARRAY_APPEND) => { Err(emit_fatal::raise_fatal_runtime( pos, "Can't use [] for reading", )) } _ => { let local_temp_kind = get_local_temp_kind(env, false, inout_param_info, elem); let mode = if null_coalesce_assignment { MemberOpMode::Warn } else { mode.unwrap_or_else(|| get_querym_op_mode(&query_op)) }; let (elem_instrs, elem_stack_size) = emit_elem(e, env, elem, local_temp_kind, null_coalesce_assignment)?; let base_result = emit_base_( e, env, base_expr, mode, false, match query_op { QueryOp::Isset => BareThisOp::NoNotice, _ => BareThisOp::Notice, }, null_coalesce_assignment, elem_stack_size, 0, inout_param_info, )?; let cls_stack_size = match &base_result { ArrayGetBase::Regular(base) => base.cls_stack_size, ArrayGetBase::Inout { load, .. } => load.cls_stack_size, }; let (memberkey, warninstr) = get_elem_member_key(e, env, cls_stack_size, elem, null_coalesce_assignment)?; let mut querym_n_unpopped = None; let mut make_final = |total_stack_size: StackIndex, memberkey: MemberKey<'arena>| -> InstrSeq { if no_final { instr::empty(alloc) } else if null_coalesce_assignment { querym_n_unpopped = Some(total_stack_size as usize); instr::querym(alloc, 0, query_op, memberkey) } else { instr::querym(alloc, total_stack_size as usize, query_op, memberkey) } }; let instr = match (base_result, local_temp_kind) { (ArrayGetBase::Regular(base), None) => // neither base nor expression needs to store anything { ArrayGetInstr::Regular(InstrSeq::gather( alloc, vec![ warninstr, base.base_instrs, elem_instrs, base.cls_instrs, emit_pos(alloc, outer_pos), base.setup_instrs, make_final( base.base_stack_size + base.cls_stack_size + elem_stack_size, memberkey, ), ], )) } (ArrayGetBase::Regular(base), Some(local_kind)) => { // base does not need temp locals but index expression does let local = e.local_gen_mut().get_unnamed(); let load = vec![ // load base and indexer, value of indexer will be saved in local ( InstrSeq::gather( alloc, vec![InstrSeq::clone(alloc, &base.base_instrs), elem_instrs], ), Some((local, local_kind)), ), // finish loading the value ( InstrSeq::gather( alloc, vec![ warninstr, base.base_instrs, emit_pos(alloc, outer_pos), base.setup_instrs, make_final( base.base_stack_size + base.cls_stack_size + elem_stack_size, memberkey, ), ], ), None, ), ]; let store = InstrSeq::gather( alloc, vec![ emit_store_for_simple_base( e, env, outer_pos, elem_stack_size, base_expr, local, false, )?, instr::popc(alloc), ], ); ArrayGetInstr::Inout { load, store } } ( ArrayGetBase::Inout { load: ArrayGetBaseData { mut base_instrs, cls_instrs, setup_instrs, base_stack_size, cls_stack_size, }, store, }, None, ) => { // base needs temp locals, indexer - does not, // simply concat two instruction sequences base_instrs.push(( InstrSeq::gather( alloc, vec![ warninstr, elem_instrs, cls_instrs, emit_pos(alloc, outer_pos), setup_instrs, make_final( base_stack_size + cls_stack_size + elem_stack_size, memberkey.clone(), ), ], ), None, )); let store = InstrSeq::gather( alloc, vec![store, instr::setm(alloc, 0, memberkey), instr::popc(alloc)], ); ArrayGetInstr::Inout { load: base_instrs, store, } } ( ArrayGetBase::Inout { load: ArrayGetBaseData { mut base_instrs, cls_instrs, setup_instrs, base_stack_size, cls_stack_size, }, store, }, Some(local_kind), ) => { // both base and index need temp locals, // create local for index value let local = e.local_gen_mut().get_unnamed(); base_instrs.push((elem_instrs, Some((local, local_kind)))); base_instrs.push(( InstrSeq::gather( alloc, vec![ warninstr, cls_instrs, emit_pos(alloc, outer_pos), setup_instrs, make_final( base_stack_size + cls_stack_size + elem_stack_size, memberkey, ), ], ), None, )); let store = InstrSeq::gather( alloc, vec![ store, instr::setm(alloc, 0, MemberKey::EL(local, ReadOnlyOp::Any)), instr::popc(alloc), ], ); ArrayGetInstr::Inout { load: base_instrs, store, } } }; Ok((instr, querym_n_unpopped)) } } } fn is_special_class_constant_accessed_with_class_id(cname: &ast::ClassId_, id: &str) -> bool { let is_self_parent_or_static = match cname { ast::ClassId_::CIexpr(ast::Expr(_, _, ast::Expr_::Id(id))) => { string_utils::is_self(&id.1) || string_utils::is_parent(&id.1) || string_utils::is_static(&id.1) } _ => false, }; string_utils::is_class(id) && !is_self_parent_or_static } fn emit_elem<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, elem: Option<&ast::Expr>, local_temp_kind: Option<StoredValueKind>, null_coalesce_assignment: bool, ) -> Result<(InstrSeq<'arena>, StackIndex)> { let alloc = env.arena; Ok(match elem { None => (instr::empty(alloc), 0), Some(expr) if expr.2.is_int() || expr.2.is_string() => (instr::empty(alloc), 0), Some(expr) => match &expr.2 { ast::Expr_::Lvar(x) if !is_local_this(env, &x.1) => { if local_temp_kind.is_some() { ( instr::cgetquietl( alloc, get_local(e, env, &x.0, local_id::get_name(&x.1))?, ), 0, ) } else if null_coalesce_assignment { ( instr::cgetl(alloc, get_local(e, env, &x.0, local_id::get_name(&x.1))?), 1, ) } else { (instr::empty(alloc), 0) } } ast::Expr_::ClassConst(x) if is_special_class_constant_accessed_with_class_id(&(x.0).2, &(x.1).1) => { (instr::empty(alloc), 0) } _ => (emit_expr(e, env, expr)?, 1), }, }) } fn get_elem_member_key<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, stack_index: StackIndex, elem: Option<&ast::Expr>, null_coalesce_assignment: bool, ) -> Result<(MemberKey<'arena>, InstrSeq<'arena>)> { use ast::ClassId_ as CI_; use ast::Expr as E; use ast::Expr_ as E_; let alloc = env.arena; match elem { // ELement missing (so it's array append) None => Ok((MemberKey::W, instr::empty(alloc))), Some(elem_expr) => match &elem_expr.2 { // Special case for local E_::Lvar(x) if !is_local_this(env, &x.1) => Ok(( { if null_coalesce_assignment { MemberKey::EC(stack_index, ReadOnlyOp::Any) } else { MemberKey::EL( get_local(e, env, &x.0, local_id::get_name(&x.1))?, ReadOnlyOp::Any, ) } }, instr::empty(alloc), )), // Special case for literal integer E_::Int(s) => match ast_constant_folder::expr_to_typed_value(alloc, e, elem_expr) { Ok(TypedValue::Int(i)) => { Ok((MemberKey::EI(i, ReadOnlyOp::Any), instr::empty(alloc))) } _ => Err(Unrecoverable(format!("{} is not a valid integer index", s))), }, // Special case for literal string E_::String(s) => { // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. let s = unsafe { std::str::from_utf8_unchecked(s.as_slice()) }; let s = bumpalo::collections::String::from_str_in(s, alloc).into_bump_str(); Ok(( MemberKey::ET(Str::from(s), ReadOnlyOp::Any), instr::empty(alloc), )) } // Special case for class name E_::ClassConst(x) if is_special_class_constant_accessed_with_class_id(&(x.0).2, &(x.1).1) => { let cname = match (&(x.0).2, env.scope.get_class()) { (CI_::CIself, Some(cd)) => string_utils::strip_global_ns(cd.get_name_str()), (CI_::CIexpr(E(_, _, E_::Id(id))), _) => string_utils::strip_global_ns(&id.1), (CI_::CI(id), _) => string_utils::strip_global_ns(&id.1), _ => { return Err(Unrecoverable( "Unreachable due to is_special_class_constant_accessed_with_class_id" .into(), )); } }; let fq_id = class::ClassType::<'arena>::from_ast_name(alloc, &cname).to_raw_string(); if e.options().emit_class_pointers() > 0 { Ok(( MemberKey::ET(Str::from(fq_id), ReadOnlyOp::Any), instr::raise_class_string_conversion_warning(alloc), )) } else { Ok(( MemberKey::ET(Str::from(fq_id), ReadOnlyOp::Any), instr::empty(alloc), )) } } _ => { // General case Ok(( MemberKey::EC(stack_index, ReadOnlyOp::Any), instr::empty(alloc), )) } }, } } fn emit_store_for_simple_base<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, elem_stack_size: isize, base: &ast::Expr, local: Local<'arena>, is_base: bool, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let (base_expr_instrs_begin, base_expr_instrs_end, base_setup_instrs, _, _) = emit_base( e, env, base, MemberOpMode::Define, false, BareThisOp::Notice, false, elem_stack_size, 0, )?; let memberkey = MemberKey::EL(local, ReadOnlyOp::Any); Ok(InstrSeq::gather( alloc, vec![ base_expr_instrs_begin, base_expr_instrs_end, emit_pos(alloc, pos), base_setup_instrs, if is_base { instr::dim(alloc, MemberOpMode::Define, memberkey) } else { instr::setm(alloc, 0, memberkey) }, ], )) } fn get_querym_op_mode(query_op: &QueryOp) -> MemberOpMode { match query_op { QueryOp::InOut => MemberOpMode::InOut, QueryOp::CGet => MemberOpMode::Warn, _ => MemberOpMode::ModeNone, } } fn emit_class_get<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, query_op: QueryOp, cid: &ast::ClassId, prop: &ast::ClassGetExpr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let cexpr = ClassExpr::class_id_to_class_expr(e, false, false, &env.scope, cid); Ok(InstrSeq::gather( alloc, vec![ InstrSeq::from((alloc, emit_class_expr(e, env, cexpr, prop)?)), match query_op { QueryOp::CGet => instr::cgets(alloc, ReadOnlyOp::Any), QueryOp::Isset => instr::issets(alloc), QueryOp::CGetQuiet => { return Err(Unrecoverable("emit_class_get: CGetQuiet".into())); } QueryOp::InOut => return Err(Unrecoverable("emit_class_get: InOut".into())), }, ], )) } fn emit_conditional_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, etest: &ast::Expr, etrue: &Option<ast::Expr>, efalse: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; Ok(match etrue.as_ref() { Some(etrue) => { let false_label = e.label_gen_mut().next_regular(); let end_label = e.label_gen_mut().next_regular(); let r = emit_jmpz(e, env, etest, false_label)?; // only emit false branch if false_label is used let false_branch = if r.is_label_used { InstrSeq::gather( alloc, vec![instr::label(alloc, false_label), emit_expr(e, env, efalse)?], ) } else { instr::empty(alloc) }; // only emit true branch if there is fallthrough from condition let true_branch = if r.is_fallthrough { InstrSeq::gather( alloc, vec![ emit_expr(e, env, etrue)?, emit_pos(alloc, pos), instr::jmp(alloc, end_label), ], ) } else { instr::empty(alloc) }; InstrSeq::gather( alloc, vec![ r.instrs, true_branch, false_branch, // end_label is used to jump out of true branch so they should be emitted together if r.is_fallthrough { instr::label(alloc, end_label) } else { instr::empty(alloc) }, ], ) } None => { let end_label = e.label_gen_mut().next_regular(); let efalse_instr = emit_expr(e, env, efalse)?; let etest_instr = emit_expr(e, env, etest)?; InstrSeq::gather( alloc, vec![ etest_instr, instr::dup(alloc), instr::jmpnz(alloc, end_label), instr::popc(alloc), efalse_instr, instr::label(alloc, end_label), ], ) } }) } fn emit_local<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, notice: BareThisOp, lid: &aast_defs::Lid, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let ast::Lid(pos, id) = lid; let id_name = local_id::get_name(id); if superglobals::is_superglobal(id_name) { Ok(InstrSeq::gather( alloc, vec![ instr::string(alloc, string_utils::locals::strip_dollar(id_name)), emit_pos(alloc, pos), instr::cgetg(alloc), ], )) } else { let local = get_local(e, env, pos, id_name)?; Ok( if is_local_this(env, id) && !env.flags.contains(EnvFlags::NEEDS_LOCAL_THIS) { emit_pos_then(alloc, pos, instr::barethis(alloc, notice)) } else { instr::cgetl(alloc, local) }, ) } } fn emit_class_const<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, cid: &ast::ClassId, id: &ast_defs::Pstring, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let mut cexpr = ClassExpr::class_id_to_class_expr(e, false, true, &env.scope, cid); if let ClassExpr::Id(ast_defs::Id(_, name)) = &cexpr { if let Some(reified_var_cexpr) = get_reified_var_cexpr::<D>(env, pos, &name)? { cexpr = reified_var_cexpr; } } match cexpr { ClassExpr::Id(ast_defs::Id(pos, name)) => { let cid = class::ClassType::from_ast_name_and_mangle(alloc, &name); let cname = cid.to_raw_string(); Ok(if string_utils::is_class(&id.1) { if e.options().emit_class_pointers() == 1 { emit_pos_then(alloc, &pos, instr::resolveclass(alloc, cid)) } else if e.options().emit_class_pointers() == 2 { emit_pos_then(alloc, &pos, instr::lazyclass(alloc, cid)) } else { emit_pos_then(alloc, &pos, instr::string(alloc, cname)) } } else { emit_symbol_refs::add_class(alloc, e, cid.clone()); // TODO(hrust) enabel `let const_id = r#const::ConstType::from_ast_name(&id.1);`, // `from_ast_name` should be able to accpet Cow<str> let const_id: r#const::ConstType = (alloc, string_utils::strip_global_ns(&id.1)).into(); emit_pos_then(alloc, &pos, instr::clscnsd(alloc, const_id, cid)) }) } _ => { let load_const = if string_utils::is_class(&id.1) { if e.options().emit_class_pointers() == 2 { instr::lazyclassfromclass(alloc) } else { instr::classname(alloc) } } else { // TODO(hrust) enabel `let const_id = r#const::ConstType::from_ast_name(&id.1);`, // `from_ast_name` should be able to accpet Cow<str> let const_id: r#const::ConstType = (alloc, string_utils::strip_global_ns(&id.1)).into(); instr::clscns(alloc, const_id) }; if string_utils::is_class(&id.1) && e.options().emit_class_pointers() == 1 { emit_load_class_ref(e, env, pos, cexpr) } else { Ok(InstrSeq::gather( alloc, vec![emit_load_class_ref(e, env, pos, cexpr)?, load_const], )) } } } } fn emit_unop<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, (uop, expr): &(ast_defs::Uop, ast::Expr), ) -> Result<InstrSeq<'arena>> { use ast_defs::Uop as U; let alloc = env.arena; match uop { U::Utild | U::Unot => Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, expr)?, emit_pos_then(alloc, pos, from_unop(alloc, e.options(), uop)?), ], )), U::Uplus | U::Uminus => Ok(InstrSeq::gather( alloc, vec![ emit_pos(alloc, pos), instr::int(alloc, 0), emit_expr(e, env, expr)?, emit_pos_then(alloc, pos, from_unop(alloc, e.options(), uop)?), ], )), U::Uincr | U::Udecr | U::Upincr | U::Updecr => emit_lval_op( e, env, pos, LValOp::IncDec(unop_to_incdec_op(e.options(), uop)?), expr, None, false, ), U::Usilence => e.local_scope(|e| { let temp_local = e.local_gen_mut().get_unnamed(); Ok(InstrSeq::gather( alloc, vec![ emit_pos(alloc, pos), instr::silence_start(alloc, temp_local), { let try_instrs = emit_expr(e, env, expr)?; let catch_instrs = InstrSeq::gather( alloc, vec![emit_pos(alloc, pos), instr::silence_end(alloc, temp_local)], ); InstrSeq::create_try_catch( alloc, e.label_gen_mut(), None, false, /* skip_throw */ try_instrs, catch_instrs, ) }, emit_pos(alloc, pos), instr::silence_end(alloc, temp_local), ], )) }), } } fn unop_to_incdec_op(opts: &Options, op: &ast_defs::Uop) -> Result<IncdecOp> { let if_check_or = |op1, op2| Ok(if opts.check_int_overflow() { op1 } else { op2 }); use {ast_defs::Uop as U, IncdecOp as I}; match op { U::Uincr => if_check_or(I::PreIncO, I::PreInc), U::Udecr => if_check_or(I::PreDecO, I::PreDec), U::Upincr => if_check_or(I::PostIncO, I::PostInc), U::Updecr => if_check_or(I::PostDecO, I::PostDec), _ => Err(Unrecoverable("invalid incdec op".into())), } } fn from_unop<'arena>( alloc: &'arena bumpalo::Bump, opts: &Options, op: &ast_defs::Uop, ) -> Result<InstrSeq<'arena>> { use ast_defs::Uop as U; Ok(match op { U::Utild => instr::bitnot(alloc), U::Unot => instr::not(alloc), U::Uplus => { if opts.check_int_overflow() { instr::addo(alloc) } else { instr::add(alloc) } } U::Uminus => { if opts.check_int_overflow() { instr::subo(alloc) } else { instr::sub(alloc) } } _ => { return Err(Unrecoverable( "this unary operation cannot be translated".into(), )); } }) } fn binop_to_eqop(opts: &Options, op: &ast_defs::Bop) -> Option<EqOp> { use {ast_defs::Bop as B, EqOp::*}; match op { B::Plus => Some(if opts.check_int_overflow() { PlusEqualO } else { PlusEqual }), B::Minus => Some(if opts.check_int_overflow() { MinusEqualO } else { MinusEqual }), B::Star => Some(if opts.check_int_overflow() { MulEqualO } else { MulEqual }), B::Slash => Some(DivEqual), B::Starstar => Some(PowEqual), B::Amp => Some(AndEqual), B::Bar => Some(OrEqual), B::Xor => Some(XorEqual), B::Ltlt => Some(SlEqual), B::Gtgt => Some(SrEqual), B::Percent => Some(ModEqual), B::Dot => Some(ConcatEqual), _ => None, } } #[allow(clippy::needless_lifetimes)] fn optimize_null_checks<'arena, 'decl, D: DeclProvider<'decl>>( e: &Emitter<'arena, 'decl, D>, ) -> bool { e.options() .hack_compiler_flags .contains(CompilerFlags::OPTIMIZE_NULL_CHECKS) } fn from_binop<'arena>( alloc: &'arena bumpalo::Bump, opts: &Options, op: &ast_defs::Bop, ) -> Result<InstrSeq<'arena>> { use ast_defs::Bop as B; Ok(match op { B::Plus => { if opts.check_int_overflow() { instr::addo(alloc) } else { instr::add(alloc) } } B::Minus => { if opts.check_int_overflow() { instr::subo(alloc) } else { instr::sub(alloc) } } B::Star => { if opts.check_int_overflow() { instr::mulo(alloc) } else { instr::mul(alloc) } } B::Slash => instr::div(alloc), B::Eqeq => instr::eq(alloc), B::Eqeqeq => instr::same(alloc), B::Starstar => instr::pow(alloc), B::Diff => instr::neq(alloc), B::Diff2 => instr::nsame(alloc), B::Lt => instr::lt(alloc), B::Lte => instr::lte(alloc), B::Gt => instr::gt(alloc), B::Gte => instr::gte(alloc), B::Dot => instr::concat(alloc), B::Amp => instr::bitand(alloc), B::Bar => instr::bitor(alloc), B::Ltlt => instr::shl(alloc), B::Gtgt => instr::shr(alloc), B::Cmp => instr::cmp(alloc), B::Percent => instr::mod_(alloc), B::Xor => instr::bitxor(alloc), B::Eq(_) => return Err(Unrecoverable("assignment is emitted differently".into())), B::QuestionQuestion => { return Err(Unrecoverable( "null coalescence is emitted differently".into(), )); } B::Barbar | B::Ampamp => { return Err(Unrecoverable( "short-circuiting operator cannot be generated as a simple binop".into(), )); } }) } fn emit_first_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, ) -> Result<(InstrSeq<'arena>, bool)> { let alloc = env.arena; Ok(match &expr.2 { ast::Expr_::Lvar(l) if !((is_local_this(env, &l.1) && !env.flags.contains(EnvFlags::NEEDS_LOCAL_THIS)) || superglobals::is_any_global(local_id::get_name(&l.1))) => { ( instr::cgetl2(alloc, get_local(e, env, &l.0, local_id::get_name(&l.1))?), true, ) } _ => (emit_expr(e, env, expr)?, false), }) } pub fn emit_two_exprs<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, outer_pos: &Pos, e1: &ast::Expr, e2: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let (instrs1, is_under_top) = emit_first_expr(e, env, e1)?; let instrs2 = emit_expr(e, env, e2)?; let instrs2_is_var = e2.2.is_lvar(); Ok(InstrSeq::gather( alloc, if is_under_top { if instrs2_is_var { vec![emit_pos(alloc, outer_pos), instrs2, instrs1] } else { vec![instrs2, emit_pos(alloc, outer_pos), instrs1] } } else if instrs2_is_var { vec![instrs1, emit_pos(alloc, outer_pos), instrs2] } else { vec![instrs1, instrs2, emit_pos(alloc, outer_pos)] }, )) } fn emit_quiet_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, null_coalesce_assignment: bool, ) -> Result<(InstrSeq<'arena>, Option<NumParams>)> { let alloc = env.arena; match &expr.2 { ast::Expr_::Lvar(lid) if !is_local_this(env, &lid.1) => Ok(( instr::cgetquietl(alloc, get_local(e, env, pos, local_id::get_name(&lid.1))?), None, )), ast::Expr_::ArrayGet(x) => emit_array_get( e, env, pos, None, QueryOp::CGetQuiet, &x.0, x.1.as_ref(), false, null_coalesce_assignment, ), ast::Expr_::ObjGet(x) => { if x.as_ref().3 { Ok((emit_expr(e, env, expr)?, None)) } else { emit_obj_get( e, env, pos, QueryOp::CGetQuiet, &x.0, &x.1, &x.2, null_coalesce_assignment, ) } } _ => Ok((emit_expr(e, env, expr)?, None)), } } fn emit_null_coalesce_assignment<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, e1: &ast::Expr, e2: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let end_label = e.label_gen_mut().next_regular(); let do_set_label = e.label_gen_mut().next_regular(); let l_nonnull = e.local_gen_mut().get_unnamed(); let (quiet_instr, querym_n_unpopped) = emit_quiet_expr(e, env, pos, e1, true)?; let emit_popc_n = |n_unpopped| match n_unpopped { Some(n) => InstrSeq::gather( alloc, iter::repeat_with(|| instr::popc(alloc)) .take(n) .collect::<Vec<_>>(), ), None => instr::empty(alloc), }; Ok(InstrSeq::gather( alloc, vec![ quiet_instr, instr::dup(alloc), instr::istypec(alloc, IstypeOp::OpNull), instr::jmpnz(alloc, do_set_label), instr::popl(alloc, l_nonnull), emit_popc_n(querym_n_unpopped), instr::pushl(alloc, l_nonnull), instr::jmp(alloc, end_label), instr::label(alloc, do_set_label), instr::popc(alloc), emit_lval_op(e, env, pos, LValOp::Set, e1, Some(e2), true)?, instr::label(alloc, end_label), ], )) } fn emit_short_circuit_op<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let its_true = e.label_gen_mut().next_regular(); let its_done = e.label_gen_mut().next_regular(); let jmp_instrs = emit_jmpnz(e, env, expr, its_true)?; Ok(if jmp_instrs.is_fallthrough { InstrSeq::gather( alloc, vec![ jmp_instrs.instrs, emit_pos(alloc, pos), instr::false_(alloc), instr::jmp(alloc, its_done), if jmp_instrs.is_label_used { InstrSeq::gather( alloc, vec![ instr::label(alloc, its_true), emit_pos(alloc, pos), instr::true_(alloc), ], ) } else { instr::empty(alloc) }, instr::label(alloc, its_done), ], ) } else { InstrSeq::gather( alloc, vec![ jmp_instrs.instrs, if jmp_instrs.is_label_used { InstrSeq::gather( alloc, vec![ instr::label(alloc, its_true), emit_pos(alloc, pos), instr::true_(alloc), ], ) } else { instr::empty(alloc) }, ], ) }) } fn emit_binop<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let (op, e1, e2) = expr.2.as_binop().unwrap(); use ast_defs::Bop as B; match op { B::Ampamp | B::Barbar => emit_short_circuit_op(e, env, pos, &expr), B::Eq(None) => emit_lval_op(e, env, pos, LValOp::Set, e1, Some(e2), false), B::Eq(Some(eop)) if eop.is_question_question() => { emit_null_coalesce_assignment(e, env, pos, e1, e2) } B::Eq(Some(eop)) => match binop_to_eqop(e.options(), eop) { None => Err(Unrecoverable("illegal eq op".into())), Some(op) => emit_lval_op(e, env, pos, LValOp::SetOp(op), e1, Some(e2), false), }, B::QuestionQuestion => { let end_label = e.label_gen_mut().next_regular(); let rhs = emit_expr(e, env, e2)?; Ok(InstrSeq::gather( alloc, vec![ emit_quiet_expr(e, env, pos, e1, false)?.0, instr::dup(alloc), instr::istypec(alloc, IstypeOp::OpNull), instr::not(alloc), instr::jmpnz(alloc, end_label), instr::popc(alloc), rhs, instr::label(alloc, end_label), ], )) } _ => { let default = |e: &mut Emitter<'arena, 'decl, D>| { Ok(InstrSeq::gather( alloc, vec![ emit_two_exprs(e, env, pos, e1, e2)?, from_binop(alloc, e.options(), op)?, ], )) }; if optimize_null_checks(e) { match op { B::Eqeqeq if e2.2.is_null() => emit_is_null(e, env, e1), B::Eqeqeq if e1.2.is_null() => emit_is_null(e, env, e2), B::Diff2 if e2.2.is_null() => Ok(InstrSeq::gather( alloc, vec![emit_is_null(e, env, e1)?, instr::not(alloc)], )), B::Diff2 if e1.2.is_null() => Ok(InstrSeq::gather( alloc, vec![emit_is_null(e, env, e2)?, instr::not(alloc)], )), _ => default(e), } } else { default(e) } } } } fn emit_pipe<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, (_, e1, e2): &(aast_defs::Lid, ast::Expr, ast::Expr), ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let lhs_instrs = emit_expr(e, env, e1)?; scope::with_unnamed_local(alloc, e, |alloc, e, local| { // TODO(hrust) avoid cloning env let mut pipe_env = env.clone(); pipe_env.with_pipe_var(local); let rhs_instrs = emit_expr(e, &pipe_env, e2)?; Ok(( InstrSeq::gather(alloc, vec![lhs_instrs, instr::popl(alloc, local)]), rhs_instrs, instr::unsetl(alloc, local), )) }) } fn emit_as<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, (expr, h, is_nullable): &(ast::Expr, aast_defs::Hint, bool), ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; e.local_scope(|e| { let arg_local = e.local_gen_mut().get_unnamed(); let type_struct_local = e.local_gen_mut().get_unnamed(); let (ts_instrs, is_static) = emit_reified_arg(e, env, pos, true, h)?; let then_label = e.label_gen_mut().next_regular(); let done_label = e.label_gen_mut().next_regular(); let main_block = |ts_instrs, resolve| { InstrSeq::gather( alloc, vec![ ts_instrs, instr::setl(alloc, type_struct_local), match resolve { TypestructResolveOp::Resolve => instr::is_type_structc_resolve(alloc), TypestructResolveOp::DontResolve => { instr::is_type_structc_dontresolve(alloc) } }, instr::jmpnz(alloc, then_label), if *is_nullable { InstrSeq::gather( alloc, vec![instr::null(alloc), instr::jmp(alloc, done_label)], ) } else { InstrSeq::gather( alloc, vec![ instr::pushl(alloc, arg_local), instr::pushl(alloc, type_struct_local), instr::throwastypestructexception(alloc), ], ) }, ], ) }; let i2 = if is_static { main_block( get_type_structure_for_hint(alloc, e, &[], &IndexSet::new(), h)?, TypestructResolveOp::Resolve, ) } else { main_block(ts_instrs, TypestructResolveOp::DontResolve) }; let i1 = emit_expr(e, env, expr)?; Ok(InstrSeq::gather( alloc, vec![ i1, instr::setl(alloc, arg_local), i2, instr::label(alloc, then_label), instr::pushl(alloc, arg_local), instr::unsetl(alloc, type_struct_local), instr::label(alloc, done_label), ], )) }) } fn emit_cast<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, hint: &aast_defs::Hint_, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; use aast_defs::Hint_ as H_; let op = match hint { H_::Happly(ast_defs::Id(_, id), hints) if hints.is_empty() => { let id = string_utils::strip_ns(id); match string_utils::strip_hh_ns(&id).as_ref() { typehints::INT => instr::cast_int(alloc), typehints::BOOL => instr::cast_bool(alloc), typehints::STRING => instr::cast_string(alloc), typehints::FLOAT => instr::cast_double(alloc), _ => { return Err(emit_fatal::raise_fatal_parse( pos, format!("Invalid cast type: {}", id), )); } } } _ => return Err(emit_fatal::raise_fatal_parse(pos, "Invalid cast type")), }; Ok(InstrSeq::gather( alloc, vec![emit_expr(e, env, expr)?, emit_pos(alloc, pos), op], )) } pub fn emit_unset_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; emit_lval_op_nonlist( e, env, &expr.1, LValOp::Unset, expr, instr::empty(alloc), 0, false, ) } pub fn emit_set_range_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &mut Env<'a, 'arena>, pos: &Pos, name: &str, kind: Setrange, args: &[&ast::Expr], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; let raise_fatal = |msg: &str| { Err(emit_fatal::raise_fatal_parse( pos, format!("{} {}", name, msg), )) }; let (base, offset, src, args) = if args.len() >= 3 { (&args[0], &args[1], &args[2], &args[3..]) } else { return raise_fatal("expects at least 3 arguments"); }; let count_instrs = match (args, kind.vec) { ([c], true) => emit_expr(e, env, c)?, ([], _) => instr::int(alloc, -1), (_, false) => return raise_fatal("expects no more than 3 arguments"), (_, true) => return raise_fatal("expects no more than 4 arguments"), }; let (base_expr, cls_expr, base_setup, base_stack, cls_stack) = emit_base( e, env, base, MemberOpMode::Define, false, /* is_object */ BareThisOp::Notice, false, /*null_coalesce_assignment*/ 3, /* base_offset */ 3, /* rhs_stack_size */ )?; Ok(InstrSeq::gather( alloc, vec![ base_expr, cls_expr, emit_expr(e, env, offset)?, emit_expr(e, env, src)?, count_instrs, base_setup, instr::instr( alloc, Instruct::IFinal(InstructFinal::SetRangeM( (base_stack + cls_stack) .try_into() .expect("StackIndex overflow"), kind.size.try_into().expect("Setrange size overflow"), kind.op, )), ), ], )) } pub fn is_reified_tparam<'a, 'arena>( env: &Env<'a, 'arena>, is_fun: bool, name: &str, ) -> Option<(usize, bool)> { let is = |tparams: &[ast::Tparam]| { let is_soft = |ual: &Vec<ast::UserAttribute>| { ual.iter().any(|ua| user_attributes::is_soft(&ua.name.1)) }; use ast::ReifyKind::*; tparams.iter().enumerate().find_map(|(i, tp)| { if (tp.reified == Reified || tp.reified == SoftReified) && tp.name.1 == name { Some((i, is_soft(&tp.user_attributes))) } else { None } }) }; if is_fun { is(env.scope.get_fun_tparams()) } else { is(&env.scope.get_class_tparams()[..]) } } /// Emit code for a base expression `expr` that forms part of /// an element access `expr[elem]` or field access `expr->fld`. /// The instructions are divided into three sections: /// 1. base and element/property expression instructions: /// push non-trivial base and key values on the stack /// 2. class instructions: emitted when the base is a static property access. /// A sequence of instructions that pushes the property and the class on the /// stack to be consumed by a BaseSC. (Foo::$bar) /// 3. base selector instructions: a sequence of Base/Dim instructions that /// actually constructs the base address from "member keys" that are inlined /// in the instructions, or pulled from the key values that /// were pushed on the stack in section 1. /// 4. (constructed by the caller) a final accessor e.g. QueryM or setter /// e.g. SetOpM instruction that has the final key inlined in the /// instruction, or pulled from the key values that were pushed on the /// stack in section 1. /// /// The function returns a 5-tuple: /// (base_instrs, cls_instrs, base_setup_instrs, base_stack_size, cls_stack_size) /// where base_instrs is section 1 above, cls_instrs is section 2, base_setup_instrs /// is section 3, stack_size is the number of values pushed on the stack by /// section 1, and cls_stack_size is the number of values pushed on the stack by /// section 2. /// /// For example, the r-value expression $arr[3][$ix+2] /// will compile to /// # Section 1, pushing the value of $ix+2 on the stack /// Int 2 /// CGetL2 $ix /// AddO /// # Section 2, constructing the base address of $arr[3] /// BaseL $arr Warn /// Dim Warn EI:3 /// # Section 3, indexing the array using the value at stack position 0 (EC:0) /// QueryM 1 CGet EC:0 /// fn emit_base<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, mode: MemberOpMode, is_object: bool, notice: BareThisOp, null_coalesce_assignment: bool, base_offset: StackIndex, rhs_stack_size: StackIndex, ) -> Result<( InstrSeq<'arena>, InstrSeq<'arena>, InstrSeq<'arena>, StackIndex, StackIndex, )> { let result = emit_base_( e, env, expr, mode, is_object, notice, null_coalesce_assignment, base_offset, rhs_stack_size, None, )?; match result { ArrayGetBase::Regular(i) => Ok(( i.base_instrs, i.cls_instrs, i.setup_instrs, i.base_stack_size as isize, i.cls_stack_size as isize, )), ArrayGetBase::Inout { .. } => Err(unrecoverable("unexpected input")), } } fn is_trivial(env: &Env, is_base: bool, expr: &ast::Expr) -> bool { use ast::Expr_ as E_; match &expr.2 { E_::Int(_) | E_::String(_) => true, E_::Lvar(x) => !is_local_this(env, &x.1) || env.flags.contains(EnvFlags::NEEDS_LOCAL_THIS), E_::ArrayGet(_) if !is_base => false, E_::ArrayGet(x) => { is_trivial(env, is_base, &x.0) && (x.1) .as_ref() .map_or(true, |e| is_trivial(env, is_base, &e)) } _ => false, } } fn get_local_temp_kind<'a, 'arena>( env: &Env<'a, 'arena>, is_base: bool, inout_param_info: Option<(usize, &inout_locals::AliasInfoMap)>, expr: Option<&ast::Expr>, ) -> Option<StoredValueKind> { match (expr, inout_param_info) { (_, None) => None, (Some(ast::Expr(_, _, ast::Expr_::Lvar(id))), Some((i, aliases))) if inout_locals::should_save_local_value(id.name(), i, aliases) => { Some(StoredValueKind::Local) } (Some(e), _) => { if is_trivial(env, is_base, e) { None } else { Some(StoredValueKind::Expr) } } (None, _) => None, } } fn emit_base_<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, mode: MemberOpMode, is_object: bool, notice: BareThisOp, null_coalesce_assignment: bool, base_offset: StackIndex, rhs_stack_size: StackIndex, inout_param_info: Option<(usize, &inout_locals::AliasInfoMap)>, ) -> Result<ArrayGetBase<'arena>> { let alloc = env.arena; let pos = &expr.1; let expr_ = &expr.2; let base_mode = if mode == MemberOpMode::InOut { MemberOpMode::Warn } else { mode }; let local_temp_kind = get_local_temp_kind(env, true, inout_param_info, Some(expr)); let emit_default = | e: &mut Emitter<'arena, 'decl, D>, base_instrs, cls_instrs, setup_instrs, base_stack_size, cls_stack_size, | { match local_temp_kind { Some(local_temp) => { let local = e.local_gen_mut().get_unnamed(); ArrayGetBase::Inout { load: ArrayGetBaseData { base_instrs: vec![(base_instrs, Some((local, local_temp)))], cls_instrs, setup_instrs, base_stack_size, cls_stack_size, }, store: instr::basel(alloc, local, MemberOpMode::Define), } } _ => ArrayGetBase::Regular(ArrayGetBaseData { base_instrs, cls_instrs, setup_instrs, base_stack_size, cls_stack_size, }), } }; let emit_expr_default = |e: &mut Emitter<'arena, 'decl, D>, env, expr: &ast::Expr| -> Result<ArrayGetBase> { let base_expr_instrs = emit_expr(e, env, expr)?; Ok(emit_default( e, base_expr_instrs, instr::empty(alloc), emit_pos_then(alloc, pos, instr::basec(alloc, base_offset, base_mode)), 1, 0, )) }; use ast::Expr_ as E_; match expr_ { E_::Lvar(x) if superglobals::is_superglobal(&(x.1).1) => { let base_instrs = emit_pos_then( alloc, &x.0, instr::string(alloc, string_utils::locals::strip_dollar(&(x.1).1)), ); Ok(emit_default( e, base_instrs, instr::empty(alloc), instr::basegc(alloc, base_offset, base_mode), 1, 0, )) } E_::Lvar(x) if is_object && (x.1).1 == special_idents::THIS => { let base_instrs = emit_pos_then(alloc, &x.0, instr::checkthis(alloc)); Ok(emit_default( e, base_instrs, instr::empty(alloc), instr::baseh(alloc), 0, 0, )) } E_::Lvar(x) if !is_local_this(env, &x.1) || env.flags.contains(EnvFlags::NEEDS_LOCAL_THIS) => { let v = get_local(e, env, &x.0, &(x.1).1)?; let base_instr = if local_temp_kind.is_some() { instr::cgetquietl(alloc, v) } else { instr::empty(alloc) }; Ok(emit_default( e, base_instr, instr::empty(alloc), instr::basel(alloc, v, base_mode), 0, 0, )) } E_::Lvar(lid) => { let local = emit_local(e, env, notice, lid)?; Ok(emit_default( e, local, instr::empty(alloc), instr::basec(alloc, base_offset, base_mode), 1, 0, )) } E_::ArrayGet(x) => match (&(x.0).1, x.1.as_ref()) { // $a[] can not be used as the base of an array get unless as an lval (_, None) if !env .flags .contains(hhbc_by_ref_env::Flags::ALLOWS_ARRAY_APPEND) => { Err(emit_fatal::raise_fatal_runtime( pos, "Can't use [] for reading", )) } // base is in turn array_get - do a specific handling for inout params // if necessary (_, opt_elem_expr) => { let base_expr = &x.0; let local_temp_kind = get_local_temp_kind(env, false, inout_param_info, opt_elem_expr); let (elem_instrs, elem_stack_size) = emit_elem( e, env, opt_elem_expr, local_temp_kind, null_coalesce_assignment, )?; let base_result = emit_base_( e, env, base_expr, mode, false, notice, null_coalesce_assignment, base_offset + elem_stack_size, rhs_stack_size, inout_param_info, )?; let cls_stack_size = match &base_result { ArrayGetBase::Regular(base) => base.cls_stack_size, ArrayGetBase::Inout { load, .. } => load.cls_stack_size, }; let (mk, warninstr) = get_elem_member_key( e, env, base_offset + cls_stack_size, opt_elem_expr, null_coalesce_assignment, )?; let make_setup_instrs = |base_setup_instrs: InstrSeq<'arena>| { InstrSeq::gather( alloc, vec![warninstr, base_setup_instrs, instr::dim(alloc, mode, mk)], ) }; Ok(match (base_result, local_temp_kind) { // both base and index don't use temps - fallback to default handler (ArrayGetBase::Regular(base), None) => emit_default( e, InstrSeq::gather(alloc, vec![base.base_instrs, elem_instrs]), base.cls_instrs, make_setup_instrs(base.setup_instrs), base.base_stack_size + elem_stack_size, base.cls_stack_size, ), // base does not need temps but index does (ArrayGetBase::Regular(base), Some(local_temp)) => { let local = e.local_gen_mut().get_unnamed(); let base_instrs = InstrSeq::gather(alloc, vec![base.base_instrs, elem_instrs]); ArrayGetBase::Inout { load: ArrayGetBaseData { // store result of instr_begin to temp base_instrs: vec![(base_instrs, Some((local, local_temp)))], cls_instrs: base.cls_instrs, setup_instrs: make_setup_instrs(base.setup_instrs), base_stack_size: base.base_stack_size + elem_stack_size, cls_stack_size: base.cls_stack_size, }, store: emit_store_for_simple_base( e, env, pos, elem_stack_size, base_expr, local, true, )?, } } // base needs temps, index - does not ( ArrayGetBase::Inout { load: ArrayGetBaseData { mut base_instrs, cls_instrs, setup_instrs, base_stack_size, cls_stack_size, }, store, }, None, ) => { base_instrs.push((elem_instrs, None)); ArrayGetBase::Inout { load: ArrayGetBaseData { base_instrs, cls_instrs, setup_instrs: make_setup_instrs(setup_instrs), base_stack_size: base_stack_size + elem_stack_size, cls_stack_size, }, store: InstrSeq::gather( alloc, vec![store, instr::dim(alloc, MemberOpMode::Define, mk)], ), } } // both base and index needs locals ( ArrayGetBase::Inout { load: ArrayGetBaseData { mut base_instrs, cls_instrs, setup_instrs, base_stack_size, cls_stack_size, }, store, }, Some(local_kind), ) => { let local = e.local_gen_mut().get_unnamed(); base_instrs.push((elem_instrs, Some((local, local_kind)))); ArrayGetBase::Inout { load: ArrayGetBaseData { base_instrs, cls_instrs, setup_instrs: make_setup_instrs(setup_instrs), base_stack_size: base_stack_size + elem_stack_size, cls_stack_size, }, store: InstrSeq::gather( alloc, vec![ store, instr::dim( alloc, MemberOpMode::Define, MemberKey::EL(local, ReadOnlyOp::Any), ), ], ), } } }) } }, E_::ObjGet(x) => { if x.as_ref().3 { emit_expr_default(e, env, expr) } else { let (base_expr, prop_expr, null_flavor, _) = &**x; Ok(match prop_expr.2.as_id() { Some(ast_defs::Id(_, s)) if string_utils::is_xhp(&s) => { let base_instrs = emit_xhp_obj_get(e, env, pos, base_expr, &s, null_flavor)?; emit_default( e, base_instrs, instr::empty(alloc), instr::basec(alloc, base_offset, base_mode), 1, 0, ) } _ => { let prop_stack_size = emit_prop_expr( e, env, null_flavor, 0, prop_expr, null_coalesce_assignment, )? .2; let ( base_expr_instrs_begin, base_expr_instrs_end, base_setup_instrs, base_stack_size, cls_stack_size, ) = emit_base( e, env, base_expr, mode, true, BareThisOp::Notice, null_coalesce_assignment, base_offset + prop_stack_size, rhs_stack_size, )?; let (mk, prop_instrs, _) = emit_prop_expr( e, env, null_flavor, base_offset + cls_stack_size, prop_expr, null_coalesce_assignment, )?; let total_stack_size = prop_stack_size + base_stack_size; let final_instr = instr::dim(alloc, mode, mk); emit_default( e, InstrSeq::gather(alloc, vec![base_expr_instrs_begin, prop_instrs]), base_expr_instrs_end, InstrSeq::gather(alloc, vec![base_setup_instrs, final_instr]), total_stack_size, cls_stack_size, ) } }) } } E_::ClassGet(x) => { if x.2 { emit_expr_default(e, env, expr) } else { let (cid, prop, _) = &**x; let cexpr = ClassExpr::class_id_to_class_expr(e, false, false, &env.scope, cid); let (cexpr_begin, cexpr_end) = emit_class_expr(e, env, cexpr, prop)?; Ok(emit_default( e, cexpr_begin, cexpr_end, instr::basesc( alloc, base_offset + 1, rhs_stack_size, base_mode, ReadOnlyOp::Any, ), 1, 1, )) } } _ => emit_expr_default(e, env, expr), } } pub fn emit_ignored_exprs<'a, 'arena, 'decl, D: DeclProvider<'decl>>( emitter: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, exprs: &[ast::Expr], ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; exprs .iter() .map(|e| emit_ignored_expr(emitter, env, pos, e)) .collect::<Result<Vec<_>>>() .map(|x| InstrSeq::gather(alloc, x)) } // TODO(hrust): change pos from &Pos to Option<&Pos>, since Pos::make_none() still allocate mem. pub fn emit_ignored_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( emitter: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; Ok(InstrSeq::gather( alloc, vec![ emit_expr(emitter, env, expr)?, emit_pos_then(alloc, pos, instr::popc(alloc)), ], )) } pub fn emit_lval_op<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, op: LValOp, expr1: &ast::Expr, expr2: Option<&ast::Expr>, null_coalesce_assignment: bool, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; match (op, &expr1.2, expr2) { (LValOp::Set, ast::Expr_::List(l), Some(expr2)) => { let instr_rhs = emit_expr(e, env, expr2)?; let has_elements = l.iter().any(|e| !e.2.is_omitted()); if !has_elements { Ok(instr_rhs) } else { scope::with_unnamed_local(alloc, e, |alloc, e, local| { let loc = if can_use_as_rhs_in_list_assignment(&expr2.2)? { Some(&local) } else { None }; let (instr_lhs, instr_assign) = emit_lval_op_list(e, env, pos, loc, &[], expr1, false)?; Ok(( InstrSeq::gather( alloc, vec![instr_lhs, instr_rhs, instr::popl(alloc, local)], ), instr_assign, instr::pushl(alloc, local), )) }) } } _ => e.local_scope(|e| { let (rhs_instrs, rhs_stack_size) = match expr2 { None => (instr::empty(alloc), 0), Some(ast::Expr(_, _, ast::Expr_::Yield(af))) => { let temp = e.local_gen_mut().get_unnamed(); ( InstrSeq::gather( alloc, vec![ emit_yield(e, env, pos, af)?, instr::setl(alloc, temp), instr::popc(alloc), instr::pushl(alloc, temp), ], ), 1, ) } Some(expr) => (emit_expr(e, env, expr)?, 1), }; emit_lval_op_nonlist( e, env, pos, op, expr1, rhs_instrs, rhs_stack_size, null_coalesce_assignment, ) }), } } fn can_use_as_rhs_in_list_assignment(expr: &ast::Expr_) -> Result<bool> { use aast::Expr_ as E_; Ok(match expr { E_::Call(c) if ((c.0).2) .as_id() .map_or(false, |id| id.1 == special_functions::ECHO) => { false } E_::ObjGet(o) if !o.as_ref().3 => true, E_::ClassGet(c) if !c.as_ref().2 => true, E_::Lvar(_) | E_::ArrayGet(_) | E_::Call(_) | E_::FunctionPointer(_) | E_::New(_) | E_::Record(_) | E_::Yield(_) | E_::Cast(_) | E_::Eif(_) | E_::Tuple(_) | E_::Varray(_) | E_::Darray(_) | E_::Collection(_) | E_::Clone(_) | E_::Unop(_) | E_::As(_) | E_::Await(_) | E_::ReadonlyExpr(_) | E_::ClassConst(_) => true, E_::Pipe(p) => can_use_as_rhs_in_list_assignment(&(p.2).2)?, E_::Binop(b) => { if let ast_defs::Bop::Eq(None) = &b.0 { if (b.1).2.is_list() { return can_use_as_rhs_in_list_assignment(&(b.2).2); } } b.0.is_plus() || b.0.is_question_question() || b.0.is_any_eq() } _ => false, }) } // Given a local $local and a list of integer array indices i_1, ..., i_n, // generate code to extract the value of $local[i_n]...[i_1]: // BaseL $local Warn // Dim Warn EI:i_n ... // Dim Warn EI:i_2 // QueryM 0 CGet EI:i_1 fn emit_array_get_fixed<'arena, 'decl, D: DeclProvider<'decl>>( alloc: &'arena bumpalo::Bump, last_usage: bool, local: Local<'arena>, indices: &[isize], ) -> InstrSeq<'arena> { let (base, stack_count) = if last_usage { ( InstrSeq::gather( alloc, vec![ instr::pushl(alloc, local), instr::basec(alloc, 0, MemberOpMode::Warn), ], ), 1, ) } else { (instr::basel(alloc, local, MemberOpMode::Warn), 0) }; let indices = InstrSeq::gather( alloc, indices .iter() .enumerate() .rev() .map(|(i, ix)| { let mk = MemberKey::EI(*ix as i64, ReadOnlyOp::Any); if i == 0 { instr::querym(alloc, stack_count, QueryOp::CGet, mk) } else { instr::dim(alloc, MemberOpMode::Warn, mk) } }) .collect(), ); InstrSeq::gather(alloc, vec![base, indices]) } // Generate code for each lvalue assignment in a list destructuring expression. // Lvalues are assigned right-to-left, regardless of the nesting structure. So // list($a, list($b, $c)) = $d // and list(list($a, $b), $c) = $d // will both assign to $c, $b and $a in that order. // Returns a pair of instructions: // 1. initialization part of the left hand side // 2. assignment // this is necessary to handle cases like: // list($a[$f()]) = b(); // here f() should be invoked before b() pub fn emit_lval_op_list<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, outer_pos: &Pos, local: Option<&Local<'arena>>, indices: &[isize], expr: &ast::Expr, last_usage: bool, ) -> Result<(InstrSeq<'arena>, InstrSeq<'arena>)> { use ast::Expr_ as E_; use hhbc_by_ref_options::Php7Flags; let alloc = env.arena; let is_ltr = e.options().php7_flags.contains(Php7Flags::LTR_ASSIGN); match &expr.2 { E_::List(exprs) => { let last_non_omitted = if last_usage { // last usage of the local will happen when processing last non-omitted // element in the list - find it if is_ltr { exprs.iter().rposition(|v| !v.2.is_omitted()) } else { // in right-to-left case result list will be reversed // so we need to find first non-omitted expression exprs.iter().rev().rposition(|v| !v.2.is_omitted()) } } else { None }; let (lhs_instrs, set_instrs): (Vec<InstrSeq<'arena>>, Vec<InstrSeq<'arena>>) = exprs .iter() .enumerate() .map(|(i, expr)| { let mut new_indices = vec![i as isize]; new_indices.extend_from_slice(indices); emit_lval_op_list( e, env, outer_pos, local, &new_indices[..], expr, last_non_omitted.map_or(false, |j| j == i), ) }) .collect::<Result<Vec<_>>>()? .into_iter() .unzip(); Ok(( InstrSeq::gather(alloc, lhs_instrs), InstrSeq::gather( alloc, if !is_ltr { set_instrs.into_iter().rev().collect() } else { set_instrs }, ), )) } E_::Omitted => Ok((instr::empty(alloc), instr::empty(alloc))), _ => { // Generate code to access the element from the array let access_instrs = match (local, indices) { (Some(loc), [_, ..]) => { emit_array_get_fixed::<D>(alloc, last_usage, loc.to_owned(), indices) } (Some(loc), []) => { if last_usage { instr::pushl(alloc, loc.to_owned()) } else { instr::cgetl(alloc, loc.to_owned()) } } (None, _) => instr::null(alloc), }; // Generate code to assign to the lvalue *) // Return pair: side effects to initialize lhs + assignment let (lhs_instrs, rhs_instrs, set_op) = emit_lval_op_nonlist_steps( e, env, outer_pos, LValOp::Set, expr, access_instrs, 1, false, )?; Ok(if is_ltr { ( instr::empty(alloc), InstrSeq::gather( alloc, vec![lhs_instrs, rhs_instrs, set_op, instr::popc(alloc)], ), ) } else { ( lhs_instrs, InstrSeq::gather( alloc, vec![instr::empty(alloc), rhs_instrs, set_op, instr::popc(alloc)], ), ) }) } } } pub fn emit_lval_op_nonlist<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, outer_pos: &Pos, op: LValOp, expr: &ast::Expr, rhs_instrs: InstrSeq<'arena>, rhs_stack_size: isize, null_coalesce_assignment: bool, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; emit_lval_op_nonlist_steps( e, env, outer_pos, op, expr, rhs_instrs, rhs_stack_size, null_coalesce_assignment, ) .map(|(lhs, rhs, setop)| InstrSeq::gather(alloc, vec![lhs, rhs, setop])) } pub fn emit_final_global_op<'arena, 'decl, D: DeclProvider<'decl>>( alloc: &'arena bumpalo::Bump, pos: &Pos, op: LValOp, ) -> InstrSeq<'arena> { use LValOp as L; match op { L::Set => emit_pos_then(alloc, pos, instr::setg(alloc)), L::SetOp(op) => instr::setopg(alloc, op), L::IncDec(op) => instr::incdecg(alloc, op), L::Unset => emit_pos_then(alloc, pos, instr::unsetg(alloc)), } } pub fn emit_final_local_op<'arena, 'decl, D: DeclProvider<'decl>>( alloc: &'arena bumpalo::Bump, pos: &Pos, op: LValOp, lid: Local<'arena>, ) -> InstrSeq<'arena> { use LValOp as L; emit_pos_then( alloc, pos, match op { L::Set => instr::setl(alloc, lid), L::SetOp(op) => instr::setopl(alloc, lid, op), L::IncDec(op) => instr::incdecl(alloc, lid, op), L::Unset => instr::unsetl(alloc, lid), }, ) } fn emit_final_member_op<'arena, 'decl, D: DeclProvider<'decl>>( alloc: &'arena bumpalo::Bump, stack_size: usize, op: LValOp, mk: MemberKey<'arena>, ) -> InstrSeq<'arena> { use LValOp as L; match op { L::Set => instr::setm(alloc, stack_size, mk), L::SetOp(op) => instr::setopm(alloc, stack_size, op, mk), L::IncDec(op) => instr::incdecm(alloc, stack_size, op, mk), L::Unset => instr::unsetm(alloc, stack_size, mk), } } fn emit_final_static_op<'arena, 'decl, D: DeclProvider<'decl>>( alloc: &'arena bumpalo::Bump, cid: &ast::ClassId, prop: &ast::ClassGetExpr, op: LValOp, ) -> Result<InstrSeq<'arena>> { use LValOp as L; Ok(match op { L::Set => instr::sets(alloc, ReadOnlyOp::Any), L::SetOp(op) => instr::setops(alloc, op), L::IncDec(op) => instr::incdecs(alloc, op), L::Unset => { let pos = match prop { ast::ClassGetExpr::CGstring((pos, _)) | ast::ClassGetExpr::CGexpr(ast::Expr(_, pos, _)) => pos, }; let cid = text_of_class_id(cid); let id = text_of_prop(prop); emit_fatal::emit_fatal_runtime( alloc, pos, format!( "Attempt to unset static property {}::{}", string_utils::strip_ns(&cid), id, ), ) } }) } pub fn emit_lval_op_nonlist_steps<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, outer_pos: &Pos, op: LValOp, expr: &ast::Expr, rhs_instrs: InstrSeq<'arena>, rhs_stack_size: isize, null_coalesce_assignment: bool, ) -> Result<(InstrSeq<'arena>, InstrSeq<'arena>, InstrSeq<'arena>)> { let f = |alloc: &'arena bumpalo::Bump, env: &mut Env<'a, 'arena>| { use ast::Expr_ as E_; let pos = &expr.1; Ok(match &expr.2 { E_::Lvar(v) if superglobals::is_any_global(local_id::get_name(&v.1)) => ( emit_pos_then( alloc, &v.0, instr::string(alloc, string_utils::lstrip(local_id::get_name(&v.1), "$")), ), rhs_instrs, emit_final_global_op::<D>(alloc, outer_pos, op), ), E_::Lvar(v) if is_local_this(env, &v.1) && op.is_incdec() => ( emit_local(e, env, BareThisOp::Notice, v)?, rhs_instrs, instr::empty(alloc), ), E_::Lvar(v) if !is_local_this(env, &v.1) || op == LValOp::Unset => { (instr::empty(alloc), rhs_instrs, { let lid = get_local(e, env, &v.0, &(v.1).1)?; emit_final_local_op::<D>(alloc, outer_pos, op, lid) }) } E_::ArrayGet(x) => match (&(x.0).1, x.1.as_ref()) { (_, None) if !env .flags .contains(hhbc_by_ref_env::Flags::ALLOWS_ARRAY_APPEND) => { return Err(emit_fatal::raise_fatal_runtime( pos, "Can't use [] for reading", )); } (_, opt_elem_expr) => { let mode = match op { LValOp::Unset => MemberOpMode::Unset, _ => MemberOpMode::Define, }; let (elem_instrs, elem_stack_size) = emit_elem(e, env, opt_elem_expr, None, null_coalesce_assignment)?; let base_offset = elem_stack_size + rhs_stack_size; let ( base_expr_instrs_begin, base_expr_instrs_end, base_setup_instrs, base_stack_size, cls_stack_size, ) = emit_base( e, env, &x.0, mode, false, BareThisOp::Notice, null_coalesce_assignment, base_offset, rhs_stack_size, )?; let (mk, warninstr) = get_elem_member_key( e, env, rhs_stack_size + cls_stack_size, opt_elem_expr, null_coalesce_assignment, )?; let total_stack_size = elem_stack_size + base_stack_size + cls_stack_size; let final_instr = emit_pos_then( alloc, pos, emit_final_member_op::<D>(alloc, total_stack_size as usize, op, mk), ); ( // Don't emit instructions for elems as these were not popped from // the stack by the final member op during the lookup of a null // coalesce assignment. if null_coalesce_assignment { instr::empty(alloc) } else { InstrSeq::gather( alloc, vec![base_expr_instrs_begin, elem_instrs, base_expr_instrs_end], ) }, rhs_instrs, InstrSeq::gather( alloc, vec![ emit_pos(alloc, pos), warninstr, base_setup_instrs, final_instr, ], ), ) } }, E_::ObjGet(x) if !x.as_ref().3 => { let (e1, e2, nullflavor, _) = &**x; if nullflavor.eq(&ast_defs::OgNullFlavor::OGNullsafe) { return Err(emit_fatal::raise_fatal_parse( pos, "?-> is not allowed in write context", )); } let mode = match op { LValOp::Unset => MemberOpMode::Unset, _ => MemberOpMode::Define, }; let prop_stack_size = emit_prop_expr(e, env, nullflavor, 0, e2, null_coalesce_assignment)?.2; let base_offset = prop_stack_size + rhs_stack_size; let ( base_expr_instrs_begin, base_expr_instrs_end, base_setup_instrs, base_stack_size, cls_stack_size, ) = emit_base( e, env, e1, mode, true, BareThisOp::Notice, null_coalesce_assignment, base_offset, rhs_stack_size, )?; let (mk, prop_instrs, _) = emit_prop_expr( e, env, nullflavor, rhs_stack_size + cls_stack_size, e2, null_coalesce_assignment, )?; let total_stack_size = prop_stack_size + base_stack_size + cls_stack_size; let final_instr = emit_pos_then( alloc, pos, emit_final_member_op::<D>(alloc, total_stack_size as usize, op, mk), ); ( // Don't emit instructions for props as these were not popped from // the stack by the final member op during the lookup of a null // coalesce assignment. if null_coalesce_assignment { instr::empty(alloc) } else { InstrSeq::gather( alloc, vec![base_expr_instrs_begin, prop_instrs, base_expr_instrs_end], ) }, rhs_instrs, InstrSeq::gather(alloc, vec![base_setup_instrs, final_instr]), ) } E_::ClassGet(x) if !x.as_ref().2 => { let (cid, prop, _) = &**x; let cexpr = ClassExpr::class_id_to_class_expr(e, false, false, &env.scope, cid); let final_instr_ = emit_final_static_op::<D>(alloc, cid, prop, op)?; let final_instr = emit_pos_then(alloc, pos, final_instr_); ( InstrSeq::from((alloc, emit_class_expr(e, env, cexpr, prop)?)), rhs_instrs, final_instr, ) } E_::Unop(uop) => ( instr::empty(alloc), rhs_instrs, InstrSeq::gather( alloc, vec![ emit_lval_op_nonlist( e, env, pos, op, &uop.1, instr::empty(alloc), rhs_stack_size, false, )?, from_unop(alloc, e.options(), &uop.0)?, ], ), ), _ => { return Err(emit_fatal::raise_fatal_parse( pos, "Can't use return value in write context", )); } }) }; // TODO(shiqicao): remove clone! let alloc = env.arena; let mut env = env.clone(); match op { LValOp::Set | LValOp::SetOp(_) | LValOp::IncDec(_) => { env.with_allows_array_append(alloc, f) } _ => f(alloc, &mut env), } } fn emit_class_expr<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, cexpr: ClassExpr<'arena>, prop: &ast::ClassGetExpr, ) -> Result<(InstrSeq<'arena>, InstrSeq<'arena>)> { let load_prop = |alloc: &'arena bumpalo::Bump, e: &mut Emitter<'arena, 'decl, D>| match prop { ast::ClassGetExpr::CGstring((pos, id)) => Ok(emit_pos_then( alloc, pos, instr::string(alloc, string_utils::locals::strip_dollar(id)), )), ast::ClassGetExpr::CGexpr(expr) => emit_expr(e, env, expr), }; let alloc = env.arena; Ok(match &cexpr { ClassExpr::Expr(expr) if expr.2.is_call() || expr.2.is_binop() || expr.2.is_class_get() || expr .2 .as_lvar() .map_or(false, |ast::Lid(_, id)| local_id::get_name(id) == "$this") => { let cexpr_local = emit_expr(e, env, expr)?; ( instr::empty(alloc), InstrSeq::gather( alloc, vec![ cexpr_local, scope::stash_top_in_unnamed_local(alloc, e, load_prop)?, instr::classgetc(alloc), ], ), ) } _ => { let pos = match prop { ast::ClassGetExpr::CGstring((pos, _)) | ast::ClassGetExpr::CGexpr(ast::Expr(_, pos, _)) => pos, }; ( load_prop(alloc, e)?, emit_load_class_ref(e, env, pos, cexpr)?, ) } }) } pub fn fixup_type_arg<'a, 'b, 'arena>( env: &Env<'b, 'arena>, isas: bool, hint: &'a ast::Hint, ) -> Result<impl AsRef<ast::Hint> + 'a> { struct Checker<'s> { erased_tparams: &'s [&'s str], isas: bool, } impl<'ast, 's> Visitor<'ast> for Checker<'s> { type P = AstParams<(), Option<Error>>; fn object(&mut self) -> &mut dyn Visitor<'ast, P = Self::P> { self } fn visit_hint_fun( &mut self, c: &mut (), hf: &ast::HintFun, ) -> StdResult<(), Option<Error>> { hf.param_tys.accept(c, self.object())?; hf.return_ty.accept(c, self.object()) } fn visit_hint(&mut self, c: &mut (), h: &ast::Hint) -> StdResult<(), Option<Error>> { use ast::{Hint_ as H_, Id}; match h.1.as_ref() { H_::Happly(Id(_, id), _) if self.erased_tparams.contains(&id.as_str()) && self.isas => { return Err(Some(emit_fatal::raise_fatal_parse( &h.0, "Erased generics are not allowed in is/as expressions", ))); } H_::Haccess(_, _) => return Ok(()), _ => {} } h.recurse(c, self.object()) } fn visit_hint_(&mut self, c: &mut (), h: &ast::Hint_) -> StdResult<(), Option<Error>> { use ast::{Hint_ as H_, Id}; match h { H_::Happly(Id(_, id), _) if self.erased_tparams.contains(&id.as_str()) => Err(None), _ => h.recurse(c, self.object()), } } } struct Updater<'s> { erased_tparams: &'s [&'s str], } impl<'ast, 's> VisitorMut<'ast> for Updater<'s> { type P = AstParams<(), ()>; fn object(&mut self) -> &mut dyn VisitorMut<'ast, P = Self::P> { self } fn visit_hint_fun(&mut self, c: &mut (), hf: &mut ast::HintFun) -> StdResult<(), ()> { <Vec<ast::Hint> as NodeMut<Self::P>>::accept(&mut hf.param_tys, c, self.object())?; <ast::Hint as NodeMut<Self::P>>::accept(&mut hf.return_ty, c, self.object()) } fn visit_hint_(&mut self, c: &mut (), h: &mut ast::Hint_) -> StdResult<(), ()> { use ast::{Hint_ as H_, Id}; match h { H_::Happly(Id(_, id), _) if self.erased_tparams.contains(&id.as_str()) => { Ok(*id = "_".into()) } _ => h.recurse(c, self.object()), } } } let erased_tparams = get_erased_tparams(env); let erased_tparams = erased_tparams.as_slice(); let mut checker = Checker { erased_tparams, isas, }; match visit(&mut checker, &mut (), hint) { Ok(()) => Ok(Either::Left(hint)), Err(Some(error)) => Err(error), Err(None) => { let mut updater = Updater { erased_tparams }; let mut hint = hint.clone(); visit_mut(&mut updater, &mut (), &mut hint).unwrap(); Ok(Either::Right(hint)) } } } pub fn emit_reified_arg<'b, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'b, 'arena>, pos: &Pos, isas: bool, hint: &ast::Hint, ) -> Result<(InstrSeq<'arena>, bool)> { struct Collector<'ast, 'a> { current_tags: &'a HashSet<&'a str>, acc: IndexSet<&'ast str>, } impl<'ast, 'a> Collector<'ast, 'a> { fn add_name(&mut self, name: &'ast str) { if self.current_tags.contains(name) && !self.acc.contains(name) { self.acc.insert(name); } } } impl<'ast, 'a> Visitor<'ast> for Collector<'ast, 'a> { type P = AstParams<(), ()>; fn object(&mut self) -> &mut dyn Visitor<'ast, P = Self::P> { self } fn visit_hint_(&mut self, c: &mut (), h_: &'ast ast::Hint_) -> StdResult<(), ()> { use ast::{Hint_ as H_, Id}; match h_ { H_::Haccess(_, sids) => Ok(sids.iter().for_each(|Id(_, name)| self.add_name(name))), H_::Habstr(name, h) | H_::Happly(Id(_, name), h) => { self.add_name(name); h.accept(c, self.object()) } _ => h_.recurse(c, self.object()), } } } let hint = fixup_type_arg(env, isas, hint)?; let hint = hint.as_ref(); fn f<'a>(mut acc: HashSet<&'a str>, tparam: &'a ast::Tparam) -> HashSet<&'a str> { if tparam.reified != ast::ReifyKind::Erased { acc.insert(&tparam.name.1); } acc } let current_tags = env .scope .get_fun_tparams() .iter() .fold(HashSet::<&str>::default(), |acc, t| f(acc, &*t)); let class_tparams = env.scope.get_class_tparams(); let current_tags = class_tparams .iter() .fold(current_tags, |acc, t| f(acc, &*t)); let mut collector = Collector { current_tags: &current_tags, acc: IndexSet::new(), }; visit(&mut collector, &mut (), hint).unwrap(); match hint.1.as_ref() { ast::Hint_::Happly(ast::Id(_, name), hs) if hs.is_empty() && current_tags.contains(name.as_str()) => { Ok((emit_reified_type::<D>(env, pos, name)?, false)) } _ => { let alloc = env.arena; let ts = get_type_structure_for_hint(alloc, e, &[], &collector.acc, hint)?; let ts_list = if collector.acc.is_empty() { ts } else { let values = collector .acc .iter() .map(|v| emit_reified_type::<D>(env, pos, v)) .collect::<Result<Vec<_>>>()?; InstrSeq::gather(alloc, vec![InstrSeq::gather(alloc, values), ts]) }; Ok(( InstrSeq::gather( alloc, vec![ ts_list, instr::combine_and_resolve_type_struct( alloc, (collector.acc.len() + 1) as isize, ), ], ), collector.acc.is_empty(), )) } } } pub fn get_local<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, pos: &Pos, s: &str, ) -> std::result::Result<Local<'arena>, hhbc_by_ref_instruction_sequence::Error> { let alloc: &'arena bumpalo::Bump = env.arena; if s == special_idents::DOLLAR_DOLLAR { match &env.pipe_var { None => Err(emit_fatal::raise_fatal_runtime( pos, "Pipe variables must occur only in the RHS of pipe expressions", )), Some(var) => Ok(*var), } } else if special_idents::is_tmp_var(s) { Ok(*e.local_gen().get_unnamed_for_tempname(s)) } else { Ok(Local::Named(Str::new_str(alloc, s))) } } pub fn emit_is_null<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, ) -> Result<InstrSeq<'arena>> { let alloc = env.arena; if let Some(ast::Lid(pos, id)) = expr.2.as_lvar() { if !is_local_this(env, id) { return Ok(instr::istypel( alloc, get_local(e, env, pos, local_id::get_name(id))?, IstypeOp::OpNull, )); } } Ok(InstrSeq::gather( alloc, vec![ emit_expr(e, env, expr)?, instr::istypec(alloc, IstypeOp::OpNull), ], )) } pub fn emit_jmpnz<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, label: Label, ) -> Result<EmitJmpResult<'arena>> { let alloc: &'arena bumpalo::Bump = env.arena; let ast::Expr(_, pos, expr_) = expr; let opt = optimize_null_checks(e); Ok( match ast_constant_folder::expr_to_typed_value(alloc, e, expr) { Ok(tv) => { if Into::<bool>::into(tv) { EmitJmpResult { instrs: emit_pos_then(alloc, pos, instr::jmp(alloc, label)), is_fallthrough: false, is_label_used: true, } } else { EmitJmpResult { instrs: emit_pos_then(alloc, pos, instr::empty(alloc)), is_fallthrough: true, is_label_used: false, } } } Err(_) => { use {ast::Expr_ as E, ast_defs::Uop as U}; match expr_ { E::Unop(uo) if uo.0 == U::Unot => emit_jmpz(e, env, &uo.1, label)?, E::Binop(bo) if bo.0.is_barbar() => { let r1 = emit_jmpnz(e, env, &bo.1, label)?; if r1.is_fallthrough { let r2 = emit_jmpnz(e, env, &bo.2, label)?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather(alloc, vec![r1.instrs, r2.instrs]), ), is_fallthrough: r2.is_fallthrough, is_label_used: r1.is_label_used || r2.is_label_used, } } else { r1 } } E::Binop(bo) if bo.0.is_ampamp() => { let skip_label = e.label_gen_mut().next_regular(); let r1 = emit_jmpz(e, env, &bo.1, skip_label)?; if !r1.is_fallthrough { EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather( alloc, if r1.is_label_used { vec![r1.instrs, instr::label(alloc, skip_label)] } else { vec![r1.instrs] }, ), ), is_fallthrough: r1.is_label_used, is_label_used: false, } } else { let r2 = emit_jmpnz(e, env, &bo.2, label)?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather( alloc, if r1.is_label_used { vec![ r1.instrs, r2.instrs, instr::label(alloc, skip_label), ] } else { vec![r1.instrs, r2.instrs] }, ), ), is_fallthrough: r2.is_fallthrough || r1.is_label_used, is_label_used: r2.is_label_used, } } } E::Binop(bo) if bo.0.is_eqeqeq() && ((bo.1).2.is_null() || (bo.2).2.is_null()) && opt => { let is_null = emit_is_null(e, env, if (bo.1).2.is_null() { &bo.2 } else { &bo.1 })?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather(alloc, vec![is_null, instr::jmpnz(alloc, label)]), ), is_fallthrough: true, is_label_used: true, } } E::Binop(bo) if bo.0.is_diff2() && ((bo.1).2.is_null() || (bo.2).2.is_null()) && opt => { let is_null = emit_is_null(e, env, if (bo.1).2.is_null() { &bo.2 } else { &bo.1 })?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather(alloc, vec![is_null, instr::jmpz(alloc, label)]), ), is_fallthrough: true, is_label_used: true, } } _ => { let instr = emit_expr(e, env, expr)?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather(alloc, vec![instr, instr::jmpnz(alloc, label)]), ), is_fallthrough: true, is_label_used: true, } } } } }, ) } pub fn emit_jmpz<'a, 'arena, 'decl, D: DeclProvider<'decl>>( e: &mut Emitter<'arena, 'decl, D>, env: &Env<'a, 'arena>, expr: &ast::Expr, label: Label, ) -> std::result::Result<EmitJmpResult<'arena>, hhbc_by_ref_instruction_sequence::Error> { let alloc: &'arena bumpalo::Bump = env.arena; let ast::Expr(_, pos, expr_) = expr; let opt = optimize_null_checks(e); Ok( match ast_constant_folder::expr_to_typed_value(alloc, e, expr) { Ok(v) => { let b: bool = v.into(); if b { EmitJmpResult { instrs: emit_pos_then(alloc, pos, instr::empty(alloc)), is_fallthrough: true, is_label_used: false, } } else { EmitJmpResult { instrs: emit_pos_then(alloc, pos, instr::jmp(alloc, label)), is_fallthrough: false, is_label_used: true, } } } Err(_) => { use {ast::Expr_ as E, ast_defs::Uop as U}; match expr_ { E::Unop(uo) if uo.0 == U::Unot => emit_jmpnz(e, env, &uo.1, label)?, E::Binop(bo) if bo.0.is_barbar() => { let skip_label = e.label_gen_mut().next_regular(); let r1 = emit_jmpnz(e, env, &bo.1, skip_label)?; if !r1.is_fallthrough { EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather( alloc, if r1.is_label_used { vec![r1.instrs, instr::label(alloc, skip_label)] } else { vec![r1.instrs] }, ), ), is_fallthrough: r1.is_label_used, is_label_used: false, } } else { let r2 = emit_jmpz(e, env, &bo.2, label)?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather( alloc, if r1.is_label_used { vec![ r1.instrs, r2.instrs, instr::label(alloc, skip_label), ] } else { vec![r1.instrs, r2.instrs] }, ), ), is_fallthrough: r2.is_fallthrough || r1.is_label_used, is_label_used: r2.is_label_used, } } } E::Binop(bo) if bo.0.is_ampamp() => { let r1 = emit_jmpz(e, env, &bo.1, label)?; if r1.is_fallthrough { let r2 = emit_jmpz(e, env, &bo.2, label)?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather(alloc, vec![r1.instrs, r2.instrs]), ), is_fallthrough: r2.is_fallthrough, is_label_used: r1.is_label_used || r2.is_label_used, } } else { EmitJmpResult { instrs: emit_pos_then(alloc, pos, r1.instrs), is_fallthrough: false, is_label_used: r1.is_label_used, } } } E::Binop(bo) if bo.0.is_eqeqeq() && ((bo.1).2.is_null() || (bo.2).2.is_null()) && opt => { let is_null = emit_is_null(e, env, if (bo.1).2.is_null() { &bo.2 } else { &bo.1 })?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather(alloc, vec![is_null, instr::jmpz(alloc, label)]), ), is_fallthrough: true, is_label_used: true, } } E::Binop(bo) if bo.0.is_diff2() && ((bo.1).2.is_null() || (bo.2).2.is_null()) && opt => { let is_null = emit_is_null(e, env, if (bo.1).2.is_null() { &bo.2 } else { &bo.1 })?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather(alloc, vec![is_null, instr::jmpnz(alloc, label)]), ), is_fallthrough: true, is_label_used: true, } } _ => { let instr = emit_expr(e, env, expr)?; EmitJmpResult { instrs: emit_pos_then( alloc, pos, InstrSeq::gather(alloc, vec![instr, instr::jmpz(alloc, label)]), ), is_fallthrough: true, is_label_used: true, } } } } }, ) }
36.558673
125
0.437207
d5a1459c1d44138321c09c43bb29068f7fb2c243
5,733
#![feature(test)] extern crate test; #[macro_use] extern crate lazy_static; use std::collections::{HashSet, VecDeque}; lazy_static! { static ref INPUT: Data = include_str!("../input").parse().expect("invalid input"); } struct Data(Vec<u32>, Vec<u32>); impl std::str::FromStr for Data { type Err = String; fn from_str(input: &str) -> Result<Self, Self::Err> { let mut parts = input.split("\n\n"); let mut parse = || { parts .next() .ok_or_else(|| "expecting player".to_string()) .and_then(|part| { part.lines() .skip(1) .map(|line| { line.parse() .map_err(|_| format!("invalid number: {}", line)) }) .collect() }) }; Ok(Data(parse()?, parse()?)) } } enum Winner { Player1, Player2, } struct Game { player_1: VecDeque<u32>, player_2: VecDeque<u32>, history: HashSet<(VecDeque<u32>, VecDeque<u32>)>, } impl Game { fn new(player_1: &[u32], player_2: &[u32]) -> Self { Self { player_1: player_1.iter().copied().collect(), player_2: player_2.iter().copied().collect(), history: HashSet::new(), } } fn play(&mut self) -> (Winner, &mut [u32]) { use Winner::*; loop { { let k = (self.player_1.to_owned(), self.player_2.to_owned()); if self.history.contains(&k) { break (Player1, self.player_1.make_contiguous()); } else { self.history.insert(k); } } match (self.player_1.pop_front(), self.player_2.pop_front()) { (Some(card_1), Some(card_2)) if (card_1 as usize) <= self.player_1.len() && (card_2 as usize) <= self.player_2.len() => { match Game::new( &self.player_1.make_contiguous()[0..card_1 as usize], &self.player_2.make_contiguous()[0..card_2 as usize], ) .play() { (Player1, _) => { self.player_1.push_back(card_1); self.player_1.push_back(card_2); } (Player2, _) => { self.player_2.push_back(card_2); self.player_2.push_back(card_1); } } } (Some(card_1), Some(card_2)) if card_1 > card_2 => { self.player_1.push_back(card_1); self.player_1.push_back(card_2); } (Some(card_1), Some(card_2)) if card_1 < card_2 => { self.player_2.push_back(card_2); self.player_2.push_back(card_1); } (Some(card), None) => { self.player_1.push_front(card); break (Player1, self.player_1.make_contiguous()); } (None, Some(card)) => { self.player_2.push_front(card); break (Player2, self.player_2.make_contiguous()); } u => unreachable!(format!( "c: {:?} p1: {:?} p2: {:?}", u, self.player_1, self.player_2 )), } } } } fn solve_1(input: &Data) -> usize { let mut player_1 = input.0.iter().copied().collect::<VecDeque<_>>(); let mut player_2 = input.1.iter().copied().collect::<VecDeque<_>>(); loop { match (player_1.pop_front(), player_2.pop_front()) { (Some(card_1), Some(card_2)) if card_1 > card_2 => { player_1.push_back(card_1); player_1.push_back(card_2); } (Some(card_1), Some(card_2)) if card_2 > card_1 => { player_2.push_back(card_2); player_2.push_back(card_1); } (Some(card), None) => { player_1.push_front(card); break; } (None, Some(card)) => { player_2.push_front(card); break; } _ => unreachable!(), } } let mut winner = if player_1.is_empty() { player_2 } else { player_1 }; winner.make_contiguous().reverse(); winner .iter() .enumerate() .map(|(i, v)| *v as usize * (i + 1)) .sum() } fn solve_2(input: &Data) -> usize { let mut game = Game::new(&input.0, &input.1); let (_, cards) = game.play(); cards.reverse(); cards .iter() .enumerate() .map(|(i, v)| *v as usize * (i + 1)) .sum() } pub fn part_1() -> usize { solve_1(&INPUT) } pub fn part_2() -> usize { solve_2(&INPUT) } #[cfg(test)] mod tests { use super::*; use test::Bencher; lazy_static! { static ref INPUT: Data = r"Player 1: 9 2 6 3 1 Player 2: 5 8 4 7 10" .parse() .expect("invalid input"); } #[test] fn same_results_part_1() { assert_eq!(solve_1(&INPUT), 306); } #[test] fn same_results_part_2() { assert_eq!(solve_2(&INPUT), 291); } #[bench] fn bench_part_1(b: &mut Bencher) { b.iter(part_1); } #[bench] fn bench_part_2(b: &mut Bencher) { b.iter(part_2); } }
25.70852
86
0.442875
d5547ea5add0016aa9930b1e5491384c7d8267f8
1,729
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(const_fn, const_size_of, const_align_of)] use std::mem; // Get around the limitations of CTFE in today's Rust. const fn choice_u64(c: bool, a: u64, b: u64) -> u64 { (-(c as i64) as u64) & a | (-(!c as i64) as u64) & b } const fn max_usize(a: usize, b: usize) -> usize { choice_u64(a > b, a as u64, b as u64) as usize } const fn align_to(size: usize, align: usize) -> usize { (size + (align - 1)) & !(align - 1) } const fn packed_union_size_of<A, B>() -> usize { max_usize(mem::size_of::<A>(), mem::size_of::<B>()) } const fn union_align_of<A, B>() -> usize { max_usize(mem::align_of::<A>(), mem::align_of::<B>()) } const fn union_size_of<A, B>() -> usize { align_to(packed_union_size_of::<A, B>(), union_align_of::<A, B>()) } macro_rules! fake_union { ($name:ident { $a:ty, $b:ty }) => ( struct $name { _align: ([$a; 0], [$b; 0]), _bytes: [u8; union_size_of::<$a, $b>()] } ) } // Check that we can (poorly) emulate unions by // calling size_of and align_of at compile-time. fake_union!(U { u16, [u8; 3] }); fn test(u: U) { assert_eq!(mem::size_of_val(&u._bytes), 4); } fn main() { assert_eq!(mem::size_of::<U>(), 4); assert_eq!(mem::align_of::<U>(), 2); }
28.344262
70
0.620012
8af17743f0c21215c279c089ed565b0e9cfdea2b
397
use crate::CONFIG_SEED; use solana_program::pubkey::Pubkey; pub struct Config { /// Only admin can change this account pub admin: [u8; 32], /// Canvas width pub width: u32, /// Canvas height pub height: u32, } impl Config { pub fn get_config_pubkey_with_bump() -> (Pubkey, u8) { Pubkey::find_program_address(&[CONFIG_SEED.as_bytes()], &crate::id()) } }
19.85
77
0.634761
5d1b2f307889958da731d7276da9fea7f829b71a
50
pub mod tweet; pub mod websocket; mod translator;
12.5
18
0.78
e2f57f157ef3817bf462d08345be539c08452eb0
15,613
//! Securely zero memory with a simple trait ([Zeroize]) built on stable Rust //! primitives which guarantee the operation will not be "optimized away". //! //! ## About //! //! [Zeroing memory securely is hard] - compilers optimize for performance, and //! in doing so they love to "optimize away" unnecessary zeroing calls. There are //! many documented "tricks" to attempt to avoid these optimizations and ensure //! that a zeroing routine is performed reliably. //! //! This crate isn't about tricks: it uses [core::ptr::write_volatile] //! and [core::sync::atomic] memory fences to provide easy-to-use, portable //! zeroing behavior which works on all of Rust's core number types and slices //! thereof, implemented in pure Rust with no usage of FFI or assembly. //! //! - No insecure fallbacks! //! - No dependencies! //! - No FFI or inline assembly! **WASM friendly** (and tested)! //! - `#![no_std]` i.e. **embedded-friendly**! //! - No functionality besides securely zeroing memory! //! //! ## Usage //! //! ``` //! use zeroize::Zeroize; //! //! fn main() { //! // Protip: don't embed secrets in your source code. //! // This is just an example. //! let mut secret = b"Air shield password: 1,2,3,4,5".to_vec(); //! // [ ... ] open the air shield here //! //! // Now that we're done using the secret, zero it out. //! secret.zeroize(); //! } //! ``` //! //! The [Zeroize] trait is impl'd on all of Rust's core scalar types including //! integers, floats, `bool`, and `char`. //! //! Additionally, it's implemented on slices and `IterMut`s of the above types. //! //! When the `std` feature is enabled (which it is by default), it's also impl'd //! for `Vec`s of the above types as well as `String`, where it provides //! [Vec::clear()] / [String::clear()]-like behavior (truncating to zero-length) //! but ensures the backing memory is securely zeroed with some caveats. //! (NOTE: see "Stack/Heap Zeroing Notes" for important `Vec`/`String` details) //! //! The [DefaultIsZeroes] marker trait can be impl'd on types which also //! impl [Default], which implements [Zeroize] by overwriting a value with //! the default value. //! //! ## Custom Derive Support //! //! **NOTICE**: Previous versions of `zeroize` automatically derived //! `Drop`. This has been *REMOVED* and you now *MUST* explicitly specify //! either `zeroize(drop)` or `zeroize(no_drop)` (see below). //! //! This crate has custom derive support for the `Zeroize` trait, which //! automatically calls `zeroize()` on all members of a struct or tuple struct. //! //! Additionally it supports the following attributes (you *MUST* pick one): //! //! - `#[zeroize(no_drop)]`: derive only `Zeroize` without adding a `Drop` impl //! - `#[zeroize(drop)]`: call `zeroize()` when this item is dropped //! //! Example which derives `Drop`: //! //! ``` //! use zeroize::Zeroize; //! //! // This struct will be zeroized on drop //! #[derive(Zeroize)] //! #[zeroize(drop)] //! struct MyStruct([u8; 32]); //! ``` //! //! Example which does not derive `Drop` (useful for e.g. `Copy` types) //! //! ``` //! use zeroize::Zeroize; //! //! // This struct will *NOT* be zeroized on drop //! #[derive(Copy, Clone, Zeroize)] //! #[zeroize(no_drop)] //! struct MyStruct([u8; 32]); //! ``` //! //! ## `Zeroizing<Z>`: wrapper for zeroizing arbitrary values on drop //! //! `Zeroizing<Z: Zeroize>` is a generic wrapper type that impls `Deref` //! and `DerefMut`, allowing access to an inner value of type `Z`, and also //! impls a `Drop` handler which calls `zeroize()` on its contents: //! //! ``` //! use zeroize::Zeroizing; //! //! fn main() { //! let mut secret = Zeroizing::new([0u8; 5]); //! //! // Set the air shield password //! // Protip (again): don't embed secrets in your source code. //! secret.copy_from_slice(&[1, 2, 3, 4, 5]); //! assert_eq!(secret.as_ref(), &[1, 2, 3, 4, 5]); //! //! // The contents of `secret` will be automatically zeroized on drop //! } //! ``` //! //! ## What guarantees does this crate provide? //! //! This crate guarantees the following: //! //! 1. The zeroing operation can't be "optimized away" by the compiler. //! 2. All subsequent reads to memory will see "zeroized" values. //! //! LLVM's volatile semantics ensure #1 is true. //! //! Additionally, thanks to work by the [Unsafe Code Guidelines Working Group], //! we can now fairly confidently say #2 is true as well. Previously there were //! worries that the approach used by this crate (mixing volatile and //! non-volatile accesses) was undefined behavior due to language contained //! in the documentation for `write_volatile`, however after some discussion //! [these remarks have been removed] and the specific usage pattern in this //! crate is considered to be well-defined. //! //! Additionally this crate leverages [compiler_fence] from //! [core::sync::atomic] with the strictest ordering ([Ordering::SeqCst]) //! as a precaution to help ensure reads are not reordered before memory has //! been zeroed. //! //! All of that said, there is still potential for microarchitectural attacks //! (ala Spectre/Meltdown) to leak "zeroized" secrets through covert channels. //! This crate makes no guarantees that zeroized values cannot be leaked //! through such channels, as they represent flaws in the underlying hardware. //! //! ## Stack/Heap Zeroing Notes //! //! This crate can be used to zero values from either the stack or the heap. //! //! However, be aware several operations in Rust can unintentionally leave //! copies of data in memory. This includes but is not limited to: //! //! - Moves and `Copy` //! - Heap reallocation when using `Vec` and `String` //! - Borrowers of a reference making copies of the data //! //! [`Pin`][pin] can be leveraged in conjunction with this crate to ensure //! data kept on the stack isn't moved. //! //! The `Zeroize` impls for `Vec` and `String` zeroize the entire capacity of //! their backing buffer, but cannot guarantee copies of the data were not //! previously made by buffer reallocation. It's therefore important when //! attempting to zeroize such buffers to initialize them to the correct //! capacity, and take care to prevent subsequent reallocation. //! //! The `secrecy` crate provides higher-level abstractions for eliminating //! usage patterns which can cause reallocations: //! //! <https://crates.io/crates/secrecy> //! //! ## What about: clearing registers, mlock, mprotect, etc? //! //! This crate is focused on providing simple, unobtrusive support for reliably //! zeroing memory using the best approach possible on stable Rust. //! //! Clearing registers is a difficult problem that can't easily be solved by //! something like a crate, and requires either inline ASM or rustc support. //! See <https://github.com/rust-lang/rust/issues/17046> for background on //! this particular problem. //! //! Other memory protection mechanisms are interesting and useful, but often //! overkill (e.g. defending against RAM scraping or attackers with swap access). //! In as much as there may be merit to these approaches, there are also many //! other crates that already implement more sophisticated memory protections. //! Such protections are explicitly out-of-scope for this crate. //! //! Zeroing memory is [good cryptographic hygiene] and this crate seeks to promote //! it in the most unobtrusive manner possible. This includes omitting complex //! `unsafe` memory protection systems and just trying to make the best memory //! zeroing crate available. //! //! [Zeroize]: https://docs.rs/zeroize/latest/zeroize/trait.Zeroize.html //! [Zeroing memory securely is hard]: http://www.daemonology.net/blog/2014-09-04-how-to-zero-a-buffer.html //! [Vec::clear()]: https://doc.rust-lang.org/std/vec/struct.Vec.html#method.clear //! [String::clear()]: https://doc.rust-lang.org/std/string/struct.String.html#method.clear //! [DefaultIsZeroes]: https://docs.rs/zeroize/latest/zeroize/trait.DefaultIsZeroes.html //! [Default]: https://doc.rust-lang.org/std/default/trait.Default.html //! [core::ptr::write_volatile]: https://doc.rust-lang.org/core/ptr/fn.write_volatile.html //! [Unsafe Code Guidelines Working Group]: https://github.com/rust-lang/unsafe-code-guidelines //! [these remarks have been removed]: https://github.com/rust-lang/rust/pull/60972 //! [core::sync::atomic]: https://doc.rust-lang.org/stable/core/sync/atomic/index.html //! [Ordering::SeqCst]: https://doc.rust-lang.org/std/sync/atomic/enum.Ordering.html#variant.SeqCst //! [compiler_fence]: https://doc.rust-lang.org/stable/core/sync/atomic/fn.compiler_fence.html //! [pin]: https://doc.rust-lang.org/std/pin/struct.Pin.html //! [good cryptographic hygiene]: https://cryptocoding.net/index.php/Coding_rules#Clean_memory_of_secret_data #![no_std] #![deny(warnings, missing_docs, trivial_casts, unused_qualifications)] #![doc(html_root_url = "https://docs.rs/zeroize/0.9.2")] #[cfg(all(feature = "alloc", not(feature = "std")))] #[allow(unused_imports)] // rustc bug? #[macro_use] extern crate alloc; #[cfg(any(feature = "std", test))] #[cfg_attr(test, macro_use)] extern crate std; #[cfg(feature = "zeroize_derive")] #[allow(unused_imports)] #[macro_use] extern crate zeroize_derive; #[cfg(feature = "zeroize_derive")] #[doc(hidden)] pub use zeroize_derive::*; use core::{ops, ptr, slice::IterMut, sync::atomic}; #[cfg(all(feature = "alloc", not(feature = "std")))] use alloc::{string::String, vec::Vec}; #[cfg(feature = "std")] use std::{string::String, vec::Vec}; /// Trait for securely erasing types from memory pub trait Zeroize { /// Zero out this object from memory (using Rust or OS intrinsics which /// ensure the zeroization operation is not "optimized away") fn zeroize(&mut self); } /// Marker trait for types whose `Default` is the desired zeroization result pub trait DefaultIsZeroes: Copy + Default + Sized {} impl<Z> Zeroize for Z where Z: DefaultIsZeroes, { fn zeroize(&mut self) { volatile_write(self, Z::default()); atomic_fence(); } } macro_rules! impl_zeroize_with_default { ($($type:ty),+) => { $(impl DefaultIsZeroes for $type {})+ }; } impl_zeroize_with_default!(i8, i16, i32, i64, i128, isize); impl_zeroize_with_default!(u8, u16, u32, u64, u128, usize); impl_zeroize_with_default!(f32, f64, char, bool); /// Implement `Zeroize` on arrays of types that impl `Zeroize` macro_rules! impl_zeroize_for_array { ($($size:expr),+) => { $( impl<Z> Zeroize for [Z; $size] where Z: Zeroize { fn zeroize(&mut self) { self.iter_mut().zeroize(); } } )+ }; } // TODO(tarcieri): const generics impl_zeroize_for_array!( 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64 ); impl<'a, Z> Zeroize for IterMut<'a, Z> where Z: Zeroize, { fn zeroize(&mut self) { for elem in self { elem.zeroize(); } } } impl<Z> Zeroize for Option<Z> where Z: Zeroize, { fn zeroize(&mut self) { match self { Some(value) => value.zeroize(), None => (), } } } /// Impl `Zeroize` on slices of types that can be zeroized with `Default`. /// /// This impl can eventually be optimized using an memset intrinsic, /// such as `core::intrinsics::volatile_set_memory`. For that reason the blanket /// impl on slices is bounded by `DefaultIsZeroes`. /// /// To zeroize a mut slice of `Z: Zeroize` which does not impl /// `DefaultIsZeroes`, call `iter_mut().zeroize()`. impl<Z> Zeroize for [Z] where Z: DefaultIsZeroes, { fn zeroize(&mut self) { volatile_set(self, Z::default()); atomic_fence(); } } #[cfg(feature = "alloc")] impl<Z> Zeroize for Vec<Z> where Z: DefaultIsZeroes, { fn zeroize(&mut self) { self.resize(self.capacity(), Default::default()); self.as_mut_slice().zeroize(); self.clear(); } } #[cfg(feature = "alloc")] impl Zeroize for String { fn zeroize(&mut self) { unsafe { self.as_bytes_mut() }.zeroize(); debug_assert!(self.as_bytes().iter().all(|b| *b == 0)); self.clear(); } } /// `Zeroizing` is a a wrapper for any `Z: Zeroize` type which implements a /// `Drop` handler which zeroizes dropped values. pub struct Zeroizing<Z: Zeroize>(Z); impl<Z> Zeroizing<Z> where Z: Zeroize, { /// Wrap a value in `Zeroizing`, ensuring it's zeroized on drop. pub fn new(value: Z) -> Self { Zeroizing(value) } } impl<Z> ops::Deref for Zeroizing<Z> where Z: Zeroize, { type Target = Z; fn deref(&self) -> &Z { &self.0 } } impl<Z> ops::DerefMut for Zeroizing<Z> where Z: Zeroize, { fn deref_mut(&mut self) -> &mut Z { &mut self.0 } } impl<Z> Zeroize for Zeroizing<Z> where Z: Zeroize, { fn zeroize(&mut self) { self.0.zeroize(); } } // We could `derive(Zeroize)` for this, but doing it by hand allows `Zeroizing` // to function regardless of whether the `zeroize_derive` feature is enabled // or not. impl<Z> Drop for Zeroizing<Z> where Z: Zeroize, { fn drop(&mut self) { self.0.zeroize() } } /// Use fences to prevent accesses from being reordered before this /// point, which should hopefully help ensure that all accessors /// see zeroes after this point. #[inline] fn atomic_fence() { atomic::compiler_fence(atomic::Ordering::SeqCst); } /// Perform a volatile write to the destination #[inline] fn volatile_write<T: Copy + Sized>(dst: &mut T, src: T) { unsafe { ptr::write_volatile(dst, src) } } /// Perform a volatile `memset` operation which fills a slice with a value #[inline] fn volatile_set<T: Copy + Sized>(dst: &mut [T], src: T) { // TODO(tarcieri): use `volatile_set_memory` on nightly? for elem in dst { volatile_write(elem, src); } } #[cfg(test)] mod tests { use super::*; #[cfg(all(feature = "alloc", not(feature = "std")))] use alloc::boxed::Box; #[cfg(feature = "std")] use std::boxed::Box; #[test] fn zeroize_byte_arrays() { let mut arr = [42u8; 64]; arr.zeroize(); assert_eq!(arr.as_ref(), [0u8; 64].as_ref()); } #[cfg(feature = "alloc")] #[test] fn zeroize_vec() { let mut vec = vec![42; 3]; vec.zeroize(); assert!(vec.is_empty()); } #[cfg(feature = "alloc")] #[test] fn zeroize_vec_past_len() { let mut vec = Vec::with_capacity(5); for i in 0..4 { vec.push(10 + i); } vec.clear(); // safe if: new_len <= capacity AND elements "were initialised" unsafe { vec.set_len(1); } assert_eq!(10, vec[0], "clear() hasn't erased our push()es"); vec.clear(); vec.zeroize(); unsafe { vec.set_len(4); } for i in 0..4 { assert_eq!(0, vec[i], "it's been zero'd"); } } #[cfg(feature = "alloc")] #[test] fn zeroize_string() { let mut string = String::from("Hello, world!"); string.zeroize(); assert!(string.is_empty()); } #[cfg(feature = "alloc")] #[test] fn zeroize_box() { let mut boxed_arr = Box::new([42u8; 3]); boxed_arr.zeroize(); assert_eq!(boxed_arr.as_ref(), &[0u8; 3]); } }
32.191753
109
0.646705
e6ddf8778ccf035dbd8f095f45f8d15a784c081b
16,383
//! The main parser interface. use crate::ast; use crate::parse::parser::{Parser, emit_unclosed_delims}; use crate::parse::token::Nonterminal; use crate::tokenstream::{self, TokenStream, TokenTree}; use crate::print::pprust; use crate::sess::ParseSess; use errors::{FatalError, Level, Diagnostic, DiagnosticBuilder}; #[cfg(target_arch = "x86_64")] use rustc_data_structures::static_assert_size; use rustc_data_structures::sync::Lrc; use syntax_pos::{Span, SourceFile, FileName}; use std::borrow::Cow; use std::path::Path; use std::str; use log::info; #[cfg(test)] mod tests; #[macro_use] pub mod parser; pub mod lexer; pub mod token; crate mod classify; crate mod literal; crate mod unescape_error_reporting; pub type PResult<'a, T> = Result<T, DiagnosticBuilder<'a>>; // `PResult` is used a lot. Make sure it doesn't unintentionally get bigger. // (See also the comment on `DiagnosticBuilderInner`.) #[cfg(target_arch = "x86_64")] static_assert_size!(PResult<'_, bool>, 16); #[derive(Clone)] pub struct Directory<'a> { pub path: Cow<'a, Path>, pub ownership: DirectoryOwnership, } #[derive(Copy, Clone)] pub enum DirectoryOwnership { Owned { // None if `mod.rs`, `Some("foo")` if we're in `foo.rs`. relative: Option<ast::Ident>, }, UnownedViaBlock, UnownedViaMod(bool /* legacy warnings? */), } // A bunch of utility functions of the form `parse_<thing>_from_<source>` // where <thing> includes crate, expr, item, stmt, tts, and one that // uses a HOF to parse anything, and <source> includes file and // `source_str`. /// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder. macro_rules! panictry_buffer { ($handler:expr, $e:expr) => ({ use std::result::Result::{Ok, Err}; use errors::FatalError; match $e { Ok(e) => e, Err(errs) => { for e in errs { $handler.emit_diagnostic(&e); } FatalError.raise() } } }) } pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> { let mut parser = new_parser_from_file(sess, input); parser.parse_crate_mod() } pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, Vec<ast::Attribute>> { let mut parser = new_parser_from_file(sess, input); parser.parse_inner_attributes() } pub fn parse_crate_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<'_, ast::Crate> { new_parser_from_source_str(sess, name, source).parse_crate_mod() } pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: &ParseSess) -> PResult<'_, Vec<ast::Attribute>> { new_parser_from_source_str(sess, name, source).parse_inner_attributes() } pub fn parse_stream_from_source_str( name: FileName, source: String, sess: &ParseSess, override_span: Option<Span>, ) -> TokenStream { let (stream, mut errors) = source_file_to_stream( sess, sess.source_map().new_source_file(name, source), override_span, ); emit_unclosed_delims(&mut errors, &sess.span_diagnostic); stream } /// Creates a new parser from a source string. pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source)) } /// Creates a new parser from a source string. Returns any buffered errors from lexing the initial /// token stream. pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Result<Parser<'_>, Vec<Diagnostic>> { let mut parser = maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))?; parser.recurse_into_file_modules = false; Ok(parser) } /// Creates a new parser, handling errors as appropriate if the file doesn't exist. pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { source_file_to_parser(sess, file_to_source_file(sess, path, None)) } /// Creates a new parser, returning buffered diagnostics if the file doesn't exist, /// or from lexing the initial token stream. pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Result<Parser<'a>, Vec<Diagnostic>> { let file = try_file_to_source_file(sess, path, None).map_err(|db| vec![db])?; maybe_source_file_to_parser(sess, file) } /// Given a session, a crate config, a path, and a span, add /// the file at the given path to the `source_map`, and returns a parser. /// On an error, uses the given span as the source of the problem. pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, directory_ownership: DirectoryOwnership, module_name: Option<String>, sp: Span) -> Parser<'a> { let mut p = source_file_to_parser(sess, file_to_source_file(sess, path, Some(sp))); p.directory.ownership = directory_ownership; p.root_module_name = module_name; p } /// Given a `source_file` and config, returns a parser. fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file)) } /// Given a `source_file` and config, return a parser. Returns any buffered errors from lexing the /// initial token stream. fn maybe_source_file_to_parser( sess: &ParseSess, source_file: Lrc<SourceFile>, ) -> Result<Parser<'_>, Vec<Diagnostic>> { let end_pos = source_file.end_pos; let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; if parser.token == token::Eof && parser.token.span.is_dummy() { parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt()); } Ok(parser) } // Must preserve old name for now, because `quote!` from the *existing* // compiler expands into it. pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> { stream_to_parser(sess, tts.into_iter().collect(), crate::MACRO_ARGUMENTS) } // Base abstractions /// Given a session and a path and an optional span (for error reporting), /// add the path to the session's source_map and return the new source_file or /// error when a file can't be read. fn try_file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Result<Lrc<SourceFile>, Diagnostic> { sess.source_map().load_file(path) .map_err(|e| { let msg = format!("couldn't read {}: {}", path.display(), e); let mut diag = Diagnostic::new(Level::Fatal, &msg); if let Some(sp) = spanopt { diag.set_span(sp); } diag }) } /// Given a session and a path and an optional span (for error reporting), /// adds the path to the session's `source_map` and returns the new `source_file`. fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> { match try_file_to_source_file(sess, path, spanopt) { Ok(source_file) => source_file, Err(d) => { sess.span_diagnostic.emit_diagnostic(&d); FatalError.raise(); } } } /// Given a `source_file`, produces a sequence of token trees. pub fn source_file_to_stream( sess: &ParseSess, source_file: Lrc<SourceFile>, override_span: Option<Span>, ) -> (TokenStream, Vec<lexer::UnmatchedBrace>) { panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span)) } /// Given a source file, produces a sequence of token trees. Returns any buffered errors from /// parsing the token stream. pub fn maybe_file_to_stream( sess: &ParseSess, source_file: Lrc<SourceFile>, override_span: Option<Span>, ) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> { let srdr = lexer::StringReader::new(sess, source_file, override_span); let (token_trees, unmatched_braces) = srdr.into_token_trees(); match token_trees { Ok(stream) => Ok((stream, unmatched_braces)), Err(err) => { let mut buffer = Vec::with_capacity(1); err.buffer(&mut buffer); // Not using `emit_unclosed_delims` to use `db.buffer` for unmatched in unmatched_braces { let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!( "incorrect close delimiter: `{}`", pprust::token_kind_to_string(&token::CloseDelim(unmatched.found_delim)), )); db.span_label(unmatched.found_span, "incorrect close delimiter"); if let Some(sp) = unmatched.candidate_span { db.span_label(sp, "close delimiter possibly meant for this"); } if let Some(sp) = unmatched.unclosed_span { db.span_label(sp, "un-closed delimiter"); } db.buffer(&mut buffer); } Err(buffer) } } } /// Given a stream and the `ParseSess`, produces a parser. pub fn stream_to_parser<'a>( sess: &'a ParseSess, stream: TokenStream, subparser_name: Option<&'static str>, ) -> Parser<'a> { Parser::new(sess, stream, None, true, false, subparser_name) } /// Given a stream, the `ParseSess` and the base directory, produces a parser. /// /// Use this function when you are creating a parser from the token stream /// and also care about the current working directory of the parser (e.g., /// you are trying to resolve modules defined inside a macro invocation). /// /// # Note /// /// The main usage of this function is outside of rustc, for those who uses /// libsyntax as a library. Please do not remove this function while refactoring /// just because it is not used in rustc codebase! pub fn stream_to_parser_with_base_dir<'a>( sess: &'a ParseSess, stream: TokenStream, base_dir: Directory<'a>, ) -> Parser<'a> { Parser::new(sess, stream, Some(base_dir), true, false, None) } // NOTE(Centril): The following probably shouldn't be here but it acknowledges the // fact that architecturally, we are using parsing (read on below to understand why). pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> TokenStream { // A `Nonterminal` is often a parsed AST item. At this point we now // need to convert the parsed AST to an actual token stream, e.g. // un-parse it basically. // // Unfortunately there's not really a great way to do that in a // guaranteed lossless fashion right now. The fallback here is to just // stringify the AST node and reparse it, but this loses all span // information. // // As a result, some AST nodes are annotated with the token stream they // came from. Here we attempt to extract these lossless token streams // before we fall back to the stringification. let tokens = match *nt { Nonterminal::NtItem(ref item) => { prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtTraitItem(ref item) => { prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtImplItem(ref item) => { prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) } Nonterminal::NtIdent(ident, is_raw) => { Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into()) } Nonterminal::NtLifetime(ident) => { Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into()) } Nonterminal::NtTT(ref tt) => { Some(tt.clone().into()) } _ => None, }; // FIXME(#43081): Avoid this pretty-print + reparse hack let source = pprust::nonterminal_to_string(nt); let filename = FileName::macro_expansion_source_code(&source); let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span)); // During early phases of the compiler the AST could get modified // directly (e.g., attributes added or removed) and the internal cache // of tokens my not be invalidated or updated. Consequently if the // "lossless" token stream disagrees with our actual stringification // (which has historically been much more battle-tested) then we go // with the lossy stream anyway (losing span information). // // Note that the comparison isn't `==` here to avoid comparing spans, // but it *also* is a "probable" equality which is a pretty weird // definition. We mostly want to catch actual changes to the AST // like a `#[cfg]` being processed or some weird `macro_rules!` // expansion. // // What we *don't* want to catch is the fact that a user-defined // literal like `0xf` is stringified as `15`, causing the cached token // stream to not be literal `==` token-wise (ignoring spans) to the // token stream we got from stringification. // // Instead the "probably equal" check here is "does each token // recursively have the same discriminant?" We basically don't look at // the token values here and assume that such fine grained token stream // modifications, including adding/removing typically non-semantic // tokens such as extra braces and commas, don't happen. if let Some(tokens) = tokens { if tokens.probably_equal_for_proc_macro(&tokens_for_real) { return tokens } info!("cached tokens found, but they're not \"probably equal\", \ going with stringified version"); } return tokens_for_real } fn prepend_attrs( sess: &ParseSess, attrs: &[ast::Attribute], tokens: Option<&tokenstream::TokenStream>, span: syntax_pos::Span ) -> Option<tokenstream::TokenStream> { let tokens = tokens?; if attrs.len() == 0 { return Some(tokens.clone()) } let mut builder = tokenstream::TokenStreamBuilder::new(); for attr in attrs { assert_eq!(attr.style, ast::AttrStyle::Outer, "inner attributes should prevent cached tokens from existing"); let source = pprust::attribute_to_string(attr); let macro_filename = FileName::macro_expansion_source_code(&source); if attr.is_sugared_doc { let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); builder.push(stream); continue } // synthesize # [ $path $tokens ] manually here let mut brackets = tokenstream::TokenStreamBuilder::new(); // For simple paths, push the identifier directly if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() { let ident = attr.path.segments[0].ident; let token = token::Ident(ident.name, ident.as_str().starts_with("r#")); brackets.push(tokenstream::TokenTree::token(token, ident.span)); // ... and for more complicated paths, fall back to a reparse hack that // should eventually be removed. } else { let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); brackets.push(stream); } brackets.push(attr.tokens.clone()); // The span we list here for `#` and for `[ ... ]` are both wrong in // that it encompasses more than each token, but it hopefully is "good // enough" for now at least. builder.push(tokenstream::TokenTree::token(token::Pound, attr.span)); let delim_span = tokenstream::DelimSpan::from_single(attr.span); builder.push(tokenstream::TokenTree::Delimited( delim_span, token::DelimToken::Bracket, brackets.build().into())); } builder.push(tokens.clone()); Some(builder.build()) }
39.100239
100
0.650736
5012f10104319b6dbc31301342ce74424b84c4bd
2,750
//! All objects related to the auth flows defined by Spotify API use crate::{ custom_serde::{duration_second, space_separated_scopes}, ModelResult, }; use std::{ collections::HashSet, fs, io::{Read, Write}, path::Path, }; use chrono::{DateTime, Duration, Utc}; use serde::{Deserialize, Serialize}; /// Spotify access token information /// /// [Reference](https://developer.spotify.com/documentation/general/guides/authorization-guide/) #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Token { /// An access token that can be provided in subsequent calls pub access_token: String, /// The time period for which the access token is valid. #[serde(with = "duration_second")] pub expires_in: Duration, /// The valid time for which the access token is available represented /// in ISO 8601 combined date and time. pub expires_at: Option<DateTime<Utc>>, /// A token that can be sent to the Spotify Accounts service /// in place of an authorization code pub refresh_token: Option<String>, /// A list of [scopes](https://developer.spotify.com/documentation/general/guides/scopes/) /// which have been granted for this `access_token` /// /// You may use the `scopes!` macro in /// [`rspotify-macros`](https://docs.rs/rspotify-macros) to build it at /// compile time easily. // The token response from spotify is singular, hence the rename to `scope` #[serde(default, with = "space_separated_scopes", rename = "scope")] pub scopes: HashSet<String>, } impl Default for Token { fn default() -> Self { Token { access_token: String::new(), expires_in: Duration::seconds(0), expires_at: Some(Utc::now()), refresh_token: None, scopes: HashSet::new(), } } } impl Token { /// Tries to initialize the token from a cache file. pub fn from_cache<T: AsRef<Path>>(path: T) -> ModelResult<Self> { let mut file = fs::File::open(path)?; let mut tok_str = String::new(); file.read_to_string(&mut tok_str)?; let tok = serde_json::from_str::<Token>(&tok_str)?; Ok(tok) } /// Saves the token information into its cache file. pub fn write_cache<T: AsRef<Path>>(&self, path: T) -> ModelResult<()> { let token_info = serde_json::to_string(&self)?; let mut file = fs::OpenOptions::new().write(true).create(true).open(path)?; file.set_len(0)?; file.write_all(token_info.as_bytes())?; Ok(()) } /// Check if the token is expired pub fn is_expired(&self) -> bool { self.expires_at .map_or(true, |x| Utc::now().timestamp() > x.timestamp()) } }
32.352941
96
0.631636
918f1eb8c09159f3473fe84fa01a3152a9d0d3d1
203
// EULER: Multiples of 3 and 5 // https://projecteuler.net/problem=1 fn main() { let sum = range(1i, 1000i).filter(|&x| {x % 3 == 0 || x % 5 == 0}).fold(0i, |a, b| a + b); println!("{}", sum); }
29
94
0.522167
76afe569c221b3b0f0793c0239c1579d179c680d
2,430
use std::path::{Path, PathBuf}; use std::{fs, io}; pub fn entries(bin: &Path) -> io::Result<impl Iterator<Item = (PathBuf, PathBuf)>> { Ok(bin .read_dir()? .filter_map(Result::ok) .map(|entry| entry.path()) .filter_map(|sym| sym.read_link().map(|dest| (sym, dest)).ok())) } pub fn iterate( bin: &Path, repos: Vec<PathBuf>, ) -> io::Result<impl Iterator<Item = (PathBuf, PathBuf)>> { Ok(bin .read_dir()? .filter_map(Result::ok) .map(|entry| entry.path()) .filter_map(|sym| sym.read_link().map(|dest| (sym, dest)).ok()) .filter(move |(_, dest)| repos.iter().any(|repo| dest.starts_with(repo)))) } pub fn link_path(bin: &Path, exe: &Path) -> io::Result<PathBuf> { exe.file_name() .map(|osname| bin.join(osname)) .ok_or(io::Error::new( io::ErrorKind::NotFound, "no file name found", )) } pub fn create(bin: &Path, exe: &Path) -> io::Result<PathBuf> { fs::create_dir_all(bin).ok(); let sym = link_path(bin, exe)?; { #[cfg(target_family = "unix")] { use std::os::unix::fs::symlink; symlink(exe, &sym) } #[cfg(target_family = "windows")] { use std::os::windows::fs::symlink_file; symlink_file(exe, &sym) } } .map(|_| sym) } pub fn deref_rec(path: &Path) -> PathBuf { match path.read_link() { Ok(dest) => deref_rec(&match dest.is_relative() { false => dest, true => path.parent().unwrap().join(dest), }), Err(_) => path.into(), } } fn is_executable(path: &Path) -> bool { let path = deref_rec(path); path.is_file() && { #[cfg(target_family = "unix")] { use std::os::unix::fs::MetadataExt; path.metadata() .map(|meta| meta.mode() & 0o100 > 0) .unwrap_or(false) } #[cfg(target_family = "windows")] { path.extension() .map(|ostyp| ostyp.to_str().map(|typ| typ == "exe" || typ == "bat")) .flatten() .unwrap_or(false) } } } pub fn executables(dir: &Path) -> io::Result<impl Iterator<Item = PathBuf>> { Ok(dir .read_dir()? .filter_map(Result::ok) .map(|entry| entry.path()) .filter(|path| is_executable(path))) }
27.613636
84
0.506584
56d726ed4e96e52c786f09853d2ef54a3910cd4b
684
// use ply_rs::{parser::Parser, ply::DefaultElement}; // use std::fs::File; // use std::path::Path; // impl GetSerPoint for Point { // fn ser_point(&self) -> SerPoint { // SerPoint { // x: self.x, // y: self.y, // z: self.z, // } // } // } // pub fn read<'a, P: AsRef<Path>>(path: P) { // unimplemented!() // let mut file = File::open(path).unwrap(); // let ply = Parser::<DefaultElement>::new().read_ply(&mut file).unwrap(); // let iter = ply.payload.into_iter(); // for (something, other_thing) in iter { // println!("{}", something); // println!("{:?}", other_thing); // } // }
26.307692
78
0.498538
db6f7d5d3ad5782d3b5d60dd14fea75203b9acea
2,472
// Copyright 2019 The Starlark in Rust Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Implementation of `inspect` builtin. use crate::values::Value; starlark_module! { global => /// Return some internals about the value. /// /// This function is to be used for debugging only, it's format is not specified, /// and may change any time. /// /// Examples: /// /// ``` /// # use starlark::stdlib::starlark_default; /// # assert!(starlark_default(" /// a = [] /// 'List' in inspect(a).rust_type_name /// # ").unwrap()); /// ``` inspect(value, /) { Ok(Value::new(value.inspect())) } } #[cfg(test)] mod test { use crate::eval::noload; use crate::stdlib::global_environment_for_repl_and_tests; use crate::syntax::dialect::Dialect; use crate::values::Immutable; use crate::values::TypedValue; use crate::values::Value; use std::iter; #[test] fn inspect() { struct TestInspectable {} impl TypedValue for TestInspectable { type Holder = Immutable<Self>; const TYPE: &'static str = "test_inspectable"; fn values_for_descendant_check_and_freeze<'a>( &'a self, ) -> Box<dyn Iterator<Item = Value>> { Box::new(iter::empty()) } fn inspect_custom(&self) -> Value { Value::from("test test") } } let (mut env, type_values) = global_environment_for_repl_and_tests(); env.set("ti", Value::new(TestInspectable {})).unwrap(); let custom = noload::eval( &Default::default(), "test.sky", "inspect(ti).custom", Dialect::Bzl, &mut env, &type_values, ) .unwrap(); assert_eq!( "test test", custom.downcast_ref::<String>().unwrap().as_str() ); } }
29.082353
85
0.58212
e99e94c81da64cd6908c2dd1b206afe02f6ecf2c
5,022
//! De/Serialization of hexadecimal encoded bytes //! //! This modules is only available when using the `hex` feature of the crate. use crate::de::DeserializeAs; use crate::formats::{Format, Lowercase, Uppercase}; use crate::ser::SerializeAs; use serde::de::Error; use serde::{Deserialize, Deserializer, Serializer}; use std::borrow::Cow; use std::convert::{TryFrom, TryInto}; use std::marker::PhantomData; /// Serialize bytes as a hex string /// /// The type serializes a sequence of bytes as a hexadecimal string. /// It works on any type implementing `AsRef<[u8]>` for serialization and `From<Vec<u8>>` for deserialization. /// /// The format type parameter specifies if the hex string should use lower- or uppercase characters. /// Valid options are the types [`Lowercase`] and [`Uppercase`]. /// Deserialization always supports lower- and uppercase characters, even mixed in one string. /// /// # Example /// /// ```rust /// # #[cfg(feature = "macros")] { /// # use serde_derive::{Deserialize, Serialize}; /// # use serde_json::json; /// # use serde_with::serde_as; /// # /// #[serde_as] /// # #[derive(Debug, PartialEq, Eq)] /// #[derive(Deserialize, Serialize)] /// struct BytesLowercase( /// // Equivalent to serde_with::hex::Hex<serde_with::formats::Lowercase> /// #[serde_as(as = "serde_with::hex::Hex")] /// Vec<u8> /// ); /// /// #[serde_as] /// # #[derive(Debug, PartialEq, Eq)] /// #[derive(Deserialize, Serialize)] /// struct BytesUppercase( /// #[serde_as(as = "serde_with::hex::Hex<serde_with::formats::Uppercase>")] /// Vec<u8> /// ); /// /// let b = b"Hello World!"; /// /// // Hex with lowercase letters /// assert_eq!( /// json!("48656c6c6f20576f726c6421"), /// serde_json::to_value(BytesLowercase(b.to_vec())).unwrap() /// ); /// // Hex with uppercase letters /// assert_eq!( /// json!("48656C6C6F20576F726C6421"), /// serde_json::to_value(BytesUppercase(b.to_vec())).unwrap() /// ); /// /// // Serialization always work from lower- and uppercase characters, even mixed case. /// assert_eq!( /// BytesLowercase(vec![0x00, 0xaa, 0xbc, 0x99, 0xff]), /// serde_json::from_value(json!("00aAbc99FF")).unwrap() /// ); /// assert_eq!( /// BytesUppercase(vec![0x00, 0xaa, 0xbc, 0x99, 0xff]), /// serde_json::from_value(json!("00aAbc99FF")).unwrap() /// ); /// /// ///////////////////////////////////// /// // Arrays are supported in Rust 1.48+ /// /// # #[rustversion::since(1.48)] /// # fn test_array() { /// #[serde_as] /// # #[derive(Debug, PartialEq, Eq)] /// #[derive(Deserialize, Serialize)] /// struct ByteArray( /// // Equivalent to serde_with::hex::Hex<serde_with::formats::Lowercase> /// #[serde_as(as = "serde_with::hex::Hex")] /// [u8; 12] /// ); /// /// let b = b"Hello World!"; /// /// assert_eq!( /// json!("48656c6c6f20576f726c6421"), /// serde_json::to_value(ByteArray(b.clone())).unwrap() /// ); /// /// // Serialization always work from lower- and uppercase characters, even mixed case. /// assert_eq!( /// ByteArray([0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0xaa, 0xbc, 0x99, 0xff]), /// serde_json::from_value(json!("0011223344556677aAbc99FF")).unwrap() /// ); /// /// // Remember that the conversion may fail. (The following errors are specific to fixed-size arrays) /// let error_result: Result<ByteArray, _> = serde_json::from_value(json!("42")); // Too short /// error_result.unwrap_err(); /// /// let error_result: Result<ByteArray, _> = /// serde_json::from_value(json!("000000000000000000000000000000")); // Too long /// error_result.unwrap_err(); /// # }; /// # #[rustversion::before(1.48)] /// # fn test_array() {} /// # test_array(); /// # } /// ``` #[derive(Copy, Clone, Debug, Default)] pub struct Hex<FORMAT: Format = Lowercase>(PhantomData<FORMAT>); impl<T> SerializeAs<T> for Hex<Lowercase> where T: AsRef<[u8]>, { fn serialize_as<S>(source: &T, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&hex::encode(source)) } } impl<T> SerializeAs<T> for Hex<Uppercase> where T: AsRef<[u8]>, { fn serialize_as<S>(source: &T, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&hex::encode_upper(source)) } } impl<'de, T, FORMAT> DeserializeAs<'de, T> for Hex<FORMAT> where T: TryFrom<Vec<u8>>, FORMAT: Format, { fn deserialize_as<D>(deserializer: D) -> Result<T, D::Error> where D: Deserializer<'de>, { <Cow<'de, str> as Deserialize<'de>>::deserialize(deserializer) .and_then(|s| hex::decode(&*s).map_err(Error::custom)) .and_then(|vec: Vec<u8>| { let length = vec.len(); vec.try_into().map_err(|_e: T::Error| { Error::custom(format!( "Can't convert a Byte Vector of length {} to the output type.", length )) }) }) } }
31.3875
110
0.607726
8f0e0df19137bf319f7e8c1a5e89996c6713f0ae
40,810
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::PCR { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `PS`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PSR { #[doc = "Internal pull-down resistor is enabled on the corresponding pin, if the corresponding Port Pull Enable Register bit is set."] _0, #[doc = "Internal pull-up resistor is enabled on the corresponding pin, if the corresponding Port Pull Enable Register bit is set."] _1, } impl PSR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PSR::_0 => false, PSR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PSR { match value { false => PSR::_0, true => PSR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == PSR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == PSR::_1 } } #[doc = "Possible values of the field `PE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PER { #[doc = "Internal pull-up or pull-down resistor is not enabled on the corresponding pin."] _0, #[doc = "Internal pull-up or pull-down resistor is enabled on the corresponding pin, provided pin is configured as a digital input."] _1, } impl PER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PER::_0 => false, PER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PER { match value { false => PER::_0, true => PER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == PER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == PER::_1 } } #[doc = "Possible values of the field `SRE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum SRER { #[doc = "Fast slew rate is configured on the corresponding pin, if pin is configured as a digital output."] _0, #[doc = "Slow slew rate is configured on the corresponding pin, if pin is configured as a digital output."] _1, } impl SRER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { SRER::_0 => false, SRER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> SRER { match value { false => SRER::_0, true => SRER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == SRER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == SRER::_1 } } #[doc = "Possible values of the field `PFE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum PFER { #[doc = "Passive Input Filter is disabled on the corresponding pin."] _0, #[doc = "Passive Input Filter is enabled on the corresponding pin, provided pin is configured as a digital input. A low pass filter (10 MHz to 30 MHz bandwidth) is enabled on the digital input path. Disable the Passive Input Filter when supporting high speed interfaces (> 2 MHz) on the pin."] _1, } impl PFER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { PFER::_0 => false, PFER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> PFER { match value { false => PFER::_0, true => PFER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == PFER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == PFER::_1 } } #[doc = "Possible values of the field `ODE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ODER { #[doc = "Open Drain output is disabled on the corresponding pin."] _0, #[doc = "Open Drain output is enabled on the corresponding pin, provided pin is configured as a digital output."] _1, } impl ODER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { ODER::_0 => false, ODER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> ODER { match value { false => ODER::_0, true => ODER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == ODER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == ODER::_1 } } #[doc = "Possible values of the field `DSE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum DSER { #[doc = "Low drive strength is configured on the corresponding pin, if pin is configured as a digital output."] _0, #[doc = "High drive strength is configured on the corresponding pin, if pin is configured as a digital output."] _1, } impl DSER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { DSER::_0 => false, DSER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> DSER { match value { false => DSER::_0, true => DSER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == DSER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == DSER::_1 } } #[doc = "Possible values of the field `MUX`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum MUXR { #[doc = "Pin Disabled (Analog)."] _000, #[doc = "Alternative 1 (GPIO)."] _001, #[doc = "Alternative 2 (chip specific)."] _010, #[doc = "Alternative 3 (chip specific)."] _011, #[doc = "Alternative 4 (chip specific)."] _100, #[doc = "Alternative 5 (chip specific)."] _101, #[doc = "Alternative 6 (chip specific)."] _110, #[doc = "Alternative 7 (chip specific / JTAG / NMI)."] _111, } impl MUXR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { MUXR::_000 => 0, MUXR::_001 => 1, MUXR::_010 => 2, MUXR::_011 => 3, MUXR::_100 => 4, MUXR::_101 => 5, MUXR::_110 => 6, MUXR::_111 => 7, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> MUXR { match value { 0 => MUXR::_000, 1 => MUXR::_001, 2 => MUXR::_010, 3 => MUXR::_011, 4 => MUXR::_100, 5 => MUXR::_101, 6 => MUXR::_110, 7 => MUXR::_111, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_000`"] #[inline] pub fn is_000(&self) -> bool { *self == MUXR::_000 } #[doc = "Checks if the value of the field is `_001`"] #[inline] pub fn is_001(&self) -> bool { *self == MUXR::_001 } #[doc = "Checks if the value of the field is `_010`"] #[inline] pub fn is_010(&self) -> bool { *self == MUXR::_010 } #[doc = "Checks if the value of the field is `_011`"] #[inline] pub fn is_011(&self) -> bool { *self == MUXR::_011 } #[doc = "Checks if the value of the field is `_100`"] #[inline] pub fn is_100(&self) -> bool { *self == MUXR::_100 } #[doc = "Checks if the value of the field is `_101`"] #[inline] pub fn is_101(&self) -> bool { *self == MUXR::_101 } #[doc = "Checks if the value of the field is `_110`"] #[inline] pub fn is_110(&self) -> bool { *self == MUXR::_110 } #[doc = "Checks if the value of the field is `_111`"] #[inline] pub fn is_111(&self) -> bool { *self == MUXR::_111 } } #[doc = "Possible values of the field `LK`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum LKR { #[doc = "Pin Control Register bits [15:0] are not locked."] _0, #[doc = "Pin Control Register bits [15:0] are locked and cannot be updated until the next System Reset."] _1, } impl LKR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { LKR::_0 => false, LKR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> LKR { match value { false => LKR::_0, true => LKR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == LKR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == LKR::_1 } } #[doc = "Possible values of the field `IRQC`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum IRQCR { #[doc = "Interrupt/DMA Request disabled."] _0000, #[doc = "DMA Request on rising edge."] _0001, #[doc = "DMA Request on falling edge."] _0010, #[doc = "DMA Request on either edge."] _0011, #[doc = "Interrupt when logic zero."] _1000, #[doc = "Interrupt on rising edge."] _1001, #[doc = "Interrupt on falling edge."] _1010, #[doc = "Interrupt on either edge."] _1011, #[doc = "Interrupt when logic one."] _1100, #[doc = r" Reserved"] _Reserved(u8), } impl IRQCR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { IRQCR::_0000 => 0, IRQCR::_0001 => 1, IRQCR::_0010 => 2, IRQCR::_0011 => 3, IRQCR::_1000 => 8, IRQCR::_1001 => 9, IRQCR::_1010 => 10, IRQCR::_1011 => 11, IRQCR::_1100 => 12, IRQCR::_Reserved(bits) => bits, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> IRQCR { match value { 0 => IRQCR::_0000, 1 => IRQCR::_0001, 2 => IRQCR::_0010, 3 => IRQCR::_0011, 8 => IRQCR::_1000, 9 => IRQCR::_1001, 10 => IRQCR::_1010, 11 => IRQCR::_1011, 12 => IRQCR::_1100, i => IRQCR::_Reserved(i), } } #[doc = "Checks if the value of the field is `_0000`"] #[inline] pub fn is_0000(&self) -> bool { *self == IRQCR::_0000 } #[doc = "Checks if the value of the field is `_0001`"] #[inline] pub fn is_0001(&self) -> bool { *self == IRQCR::_0001 } #[doc = "Checks if the value of the field is `_0010`"] #[inline] pub fn is_0010(&self) -> bool { *self == IRQCR::_0010 } #[doc = "Checks if the value of the field is `_0011`"] #[inline] pub fn is_0011(&self) -> bool { *self == IRQCR::_0011 } #[doc = "Checks if the value of the field is `_1000`"] #[inline] pub fn is_1000(&self) -> bool { *self == IRQCR::_1000 } #[doc = "Checks if the value of the field is `_1001`"] #[inline] pub fn is_1001(&self) -> bool { *self == IRQCR::_1001 } #[doc = "Checks if the value of the field is `_1010`"] #[inline] pub fn is_1010(&self) -> bool { *self == IRQCR::_1010 } #[doc = "Checks if the value of the field is `_1011`"] #[inline] pub fn is_1011(&self) -> bool { *self == IRQCR::_1011 } #[doc = "Checks if the value of the field is `_1100`"] #[inline] pub fn is_1100(&self) -> bool { *self == IRQCR::_1100 } } #[doc = "Possible values of the field `ISF`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ISFR { #[doc = "Configured interrupt has not been detected."] _0, #[doc = "Configured interrupt has been detected. If pin is configured to generate a DMA request then the corresponding flag will be cleared automatically at the completion of the requested DMA transfer, otherwise the flag remains set until a logic one is written to that flag. If configured for a level sensitive interrupt that remains asserted then flag will set again immediately."] _1, } impl ISFR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { ISFR::_0 => false, ISFR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> ISFR { match value { false => ISFR::_0, true => ISFR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == ISFR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == ISFR::_1 } } #[doc = "Values that can be written to the field `PS`"] pub enum PSW { #[doc = "Internal pull-down resistor is enabled on the corresponding pin, if the corresponding Port Pull Enable Register bit is set."] _0, #[doc = "Internal pull-up resistor is enabled on the corresponding pin, if the corresponding Port Pull Enable Register bit is set."] _1, } impl PSW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PSW::_0 => false, PSW::_1 => true, } } } #[doc = r" Proxy"] pub struct _PSW<'a> { w: &'a mut W, } impl<'a> _PSW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PSW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Internal pull-down resistor is enabled on the corresponding pin, if the corresponding Port Pull Enable Register bit is set."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(PSW::_0) } #[doc = "Internal pull-up resistor is enabled on the corresponding pin, if the corresponding Port Pull Enable Register bit is set."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(PSW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PE`"] pub enum PEW { #[doc = "Internal pull-up or pull-down resistor is not enabled on the corresponding pin."] _0, #[doc = "Internal pull-up or pull-down resistor is enabled on the corresponding pin, provided pin is configured as a digital input."] _1, } impl PEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PEW::_0 => false, PEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _PEW<'a> { w: &'a mut W, } impl<'a> _PEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Internal pull-up or pull-down resistor is not enabled on the corresponding pin."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(PEW::_0) } #[doc = "Internal pull-up or pull-down resistor is enabled on the corresponding pin, provided pin is configured as a digital input."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(PEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `SRE`"] pub enum SREW { #[doc = "Fast slew rate is configured on the corresponding pin, if pin is configured as a digital output."] _0, #[doc = "Slow slew rate is configured on the corresponding pin, if pin is configured as a digital output."] _1, } impl SREW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { SREW::_0 => false, SREW::_1 => true, } } } #[doc = r" Proxy"] pub struct _SREW<'a> { w: &'a mut W, } impl<'a> _SREW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: SREW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Fast slew rate is configured on the corresponding pin, if pin is configured as a digital output."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(SREW::_0) } #[doc = "Slow slew rate is configured on the corresponding pin, if pin is configured as a digital output."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(SREW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `PFE`"] pub enum PFEW { #[doc = "Passive Input Filter is disabled on the corresponding pin."] _0, #[doc = "Passive Input Filter is enabled on the corresponding pin, provided pin is configured as a digital input. A low pass filter (10 MHz to 30 MHz bandwidth) is enabled on the digital input path. Disable the Passive Input Filter when supporting high speed interfaces (> 2 MHz) on the pin."] _1, } impl PFEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { PFEW::_0 => false, PFEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _PFEW<'a> { w: &'a mut W, } impl<'a> _PFEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: PFEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Passive Input Filter is disabled on the corresponding pin."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(PFEW::_0) } #[doc = "Passive Input Filter is enabled on the corresponding pin, provided pin is configured as a digital input. A low pass filter (10 MHz to 30 MHz bandwidth) is enabled on the digital input path. Disable the Passive Input Filter when supporting high speed interfaces (> 2 MHz) on the pin."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(PFEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `ODE`"] pub enum ODEW { #[doc = "Open Drain output is disabled on the corresponding pin."] _0, #[doc = "Open Drain output is enabled on the corresponding pin, provided pin is configured as a digital output."] _1, } impl ODEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { ODEW::_0 => false, ODEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _ODEW<'a> { w: &'a mut W, } impl<'a> _ODEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: ODEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Open Drain output is disabled on the corresponding pin."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(ODEW::_0) } #[doc = "Open Drain output is enabled on the corresponding pin, provided pin is configured as a digital output."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(ODEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `DSE`"] pub enum DSEW { #[doc = "Low drive strength is configured on the corresponding pin, if pin is configured as a digital output."] _0, #[doc = "High drive strength is configured on the corresponding pin, if pin is configured as a digital output."] _1, } impl DSEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { DSEW::_0 => false, DSEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _DSEW<'a> { w: &'a mut W, } impl<'a> _DSEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: DSEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Low drive strength is configured on the corresponding pin, if pin is configured as a digital output."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(DSEW::_0) } #[doc = "High drive strength is configured on the corresponding pin, if pin is configured as a digital output."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(DSEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `MUX`"] pub enum MUXW { #[doc = "Pin Disabled (Analog)."] _000, #[doc = "Alternative 1 (GPIO)."] _001, #[doc = "Alternative 2 (chip specific)."] _010, #[doc = "Alternative 3 (chip specific)."] _011, #[doc = "Alternative 4 (chip specific)."] _100, #[doc = "Alternative 5 (chip specific)."] _101, #[doc = "Alternative 6 (chip specific)."] _110, #[doc = "Alternative 7 (chip specific / JTAG / NMI)."] _111, } impl MUXW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { MUXW::_000 => 0, MUXW::_001 => 1, MUXW::_010 => 2, MUXW::_011 => 3, MUXW::_100 => 4, MUXW::_101 => 5, MUXW::_110 => 6, MUXW::_111 => 7, } } } #[doc = r" Proxy"] pub struct _MUXW<'a> { w: &'a mut W, } impl<'a> _MUXW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: MUXW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "Pin Disabled (Analog)."] #[inline] pub fn _000(self) -> &'a mut W { self.variant(MUXW::_000) } #[doc = "Alternative 1 (GPIO)."] #[inline] pub fn _001(self) -> &'a mut W { self.variant(MUXW::_001) } #[doc = "Alternative 2 (chip specific)."] #[inline] pub fn _010(self) -> &'a mut W { self.variant(MUXW::_010) } #[doc = "Alternative 3 (chip specific)."] #[inline] pub fn _011(self) -> &'a mut W { self.variant(MUXW::_011) } #[doc = "Alternative 4 (chip specific)."] #[inline] pub fn _100(self) -> &'a mut W { self.variant(MUXW::_100) } #[doc = "Alternative 5 (chip specific)."] #[inline] pub fn _101(self) -> &'a mut W { self.variant(MUXW::_101) } #[doc = "Alternative 6 (chip specific)."] #[inline] pub fn _110(self) -> &'a mut W { self.variant(MUXW::_110) } #[doc = "Alternative 7 (chip specific / JTAG / NMI)."] #[inline] pub fn _111(self) -> &'a mut W { self.variant(MUXW::_111) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 7; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `LK`"] pub enum LKW { #[doc = "Pin Control Register bits [15:0] are not locked."] _0, #[doc = "Pin Control Register bits [15:0] are locked and cannot be updated until the next System Reset."] _1, } impl LKW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { LKW::_0 => false, LKW::_1 => true, } } } #[doc = r" Proxy"] pub struct _LKW<'a> { w: &'a mut W, } impl<'a> _LKW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: LKW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Pin Control Register bits [15:0] are not locked."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(LKW::_0) } #[doc = "Pin Control Register bits [15:0] are locked and cannot be updated until the next System Reset."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(LKW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 15; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `IRQC`"] pub enum IRQCW { #[doc = "Interrupt/DMA Request disabled."] _0000, #[doc = "DMA Request on rising edge."] _0001, #[doc = "DMA Request on falling edge."] _0010, #[doc = "DMA Request on either edge."] _0011, #[doc = "Interrupt when logic zero."] _1000, #[doc = "Interrupt on rising edge."] _1001, #[doc = "Interrupt on falling edge."] _1010, #[doc = "Interrupt on either edge."] _1011, #[doc = "Interrupt when logic one."] _1100, } impl IRQCW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { IRQCW::_0000 => 0, IRQCW::_0001 => 1, IRQCW::_0010 => 2, IRQCW::_0011 => 3, IRQCW::_1000 => 8, IRQCW::_1001 => 9, IRQCW::_1010 => 10, IRQCW::_1011 => 11, IRQCW::_1100 => 12, } } } #[doc = r" Proxy"] pub struct _IRQCW<'a> { w: &'a mut W, } impl<'a> _IRQCW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: IRQCW) -> &'a mut W { unsafe { self.bits(variant._bits()) } } #[doc = "Interrupt/DMA Request disabled."] #[inline] pub fn _0000(self) -> &'a mut W { self.variant(IRQCW::_0000) } #[doc = "DMA Request on rising edge."] #[inline] pub fn _0001(self) -> &'a mut W { self.variant(IRQCW::_0001) } #[doc = "DMA Request on falling edge."] #[inline] pub fn _0010(self) -> &'a mut W { self.variant(IRQCW::_0010) } #[doc = "DMA Request on either edge."] #[inline] pub fn _0011(self) -> &'a mut W { self.variant(IRQCW::_0011) } #[doc = "Interrupt when logic zero."] #[inline] pub fn _1000(self) -> &'a mut W { self.variant(IRQCW::_1000) } #[doc = "Interrupt on rising edge."] #[inline] pub fn _1001(self) -> &'a mut W { self.variant(IRQCW::_1001) } #[doc = "Interrupt on falling edge."] #[inline] pub fn _1010(self) -> &'a mut W { self.variant(IRQCW::_1010) } #[doc = "Interrupt on either edge."] #[inline] pub fn _1011(self) -> &'a mut W { self.variant(IRQCW::_1011) } #[doc = "Interrupt when logic one."] #[inline] pub fn _1100(self) -> &'a mut W { self.variant(IRQCW::_1100) } #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 15; const OFFSET: u8 = 16; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `ISF`"] pub enum ISFW { #[doc = "Configured interrupt has not been detected."] _0, #[doc = "Configured interrupt has been detected. If pin is configured to generate a DMA request then the corresponding flag will be cleared automatically at the completion of the requested DMA transfer, otherwise the flag remains set until a logic one is written to that flag. If configured for a level sensitive interrupt that remains asserted then flag will set again immediately."] _1, } impl ISFW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { ISFW::_0 => false, ISFW::_1 => true, } } } #[doc = r" Proxy"] pub struct _ISFW<'a> { w: &'a mut W, } impl<'a> _ISFW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: ISFW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Configured interrupt has not been detected."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(ISFW::_0) } #[doc = "Configured interrupt has been detected. If pin is configured to generate a DMA request then the corresponding flag will be cleared automatically at the completion of the requested DMA transfer, otherwise the flag remains set until a logic one is written to that flag. If configured for a level sensitive interrupt that remains asserted then flag will set again immediately."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(ISFW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 24; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0 - Pull Select"] #[inline] pub fn ps(&self) -> PSR { PSR::_from({ const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 1 - Pull Enable"] #[inline] pub fn pe(&self) -> PER { PER::_from({ const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 2 - Slew Rate Enable"] #[inline] pub fn sre(&self) -> SRER { SRER::_from({ const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 4 - Passive Filter Enable"] #[inline] pub fn pfe(&self) -> PFER { PFER::_from({ const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 5 - Open Drain Enable"] #[inline] pub fn ode(&self) -> ODER { ODER::_from({ const MASK: bool = true; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 6 - Drive Strength Enable"] #[inline] pub fn dse(&self) -> DSER { DSER::_from({ const MASK: bool = true; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bits 8:10 - Pin Mux Control"] #[inline] pub fn mux(&self) -> MUXR { MUXR::_from({ const MASK: u8 = 7; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bit 15 - Lock Register"] #[inline] pub fn lk(&self) -> LKR { LKR::_from({ const MASK: bool = true; const OFFSET: u8 = 15; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bits 16:19 - Interrupt Configuration"] #[inline] pub fn irqc(&self) -> IRQCR { IRQCR::_from({ const MASK: u8 = 15; const OFFSET: u8 = 16; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bit 24 - Interrupt Status Flag"] #[inline] pub fn isf(&self) -> ISFR { ISFR::_from({ const MASK: bool = true; const OFFSET: u8 = 24; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0 - Pull Select"] #[inline] pub fn ps(&mut self) -> _PSW { _PSW { w: self } } #[doc = "Bit 1 - Pull Enable"] #[inline] pub fn pe(&mut self) -> _PEW { _PEW { w: self } } #[doc = "Bit 2 - Slew Rate Enable"] #[inline] pub fn sre(&mut self) -> _SREW { _SREW { w: self } } #[doc = "Bit 4 - Passive Filter Enable"] #[inline] pub fn pfe(&mut self) -> _PFEW { _PFEW { w: self } } #[doc = "Bit 5 - Open Drain Enable"] #[inline] pub fn ode(&mut self) -> _ODEW { _ODEW { w: self } } #[doc = "Bit 6 - Drive Strength Enable"] #[inline] pub fn dse(&mut self) -> _DSEW { _DSEW { w: self } } #[doc = "Bits 8:10 - Pin Mux Control"] #[inline] pub fn mux(&mut self) -> _MUXW { _MUXW { w: self } } #[doc = "Bit 15 - Lock Register"] #[inline] pub fn lk(&mut self) -> _LKW { _LKW { w: self } } #[doc = "Bits 16:19 - Interrupt Configuration"] #[inline] pub fn irqc(&mut self) -> _IRQCW { _IRQCW { w: self } } #[doc = "Bit 24 - Interrupt Status Flag"] #[inline] pub fn isf(&mut self) -> _ISFW { _ISFW { w: self } } }
28.281358
388
0.522053
489d362be427660e6a5e864104128de066ab2a22
22,305
//! # The Rust Standard Library //! //! The Rust Standard Library is the foundation of portable Rust software, a //! set of minimal and battle-tested shared abstractions for the [broader Rust //! ecosystem][crates.io]. It offers core types, like [`Vec<T>`] and //! [`Option<T>`], library-defined [operations on language //! primitives](#primitives), [standard macros](#macros), [I/O] and //! [multithreading], among [many other things][other]. //! //! `std` is available to all Rust crates by default. Therefore, the //! standard library can be accessed in [`use`] statements through the path //! `std`, as in [`use std::env`]. //! //! # How to read this documentation //! //! If you already know the name of what you are looking for, the fastest way to //! find it is to use the <a href="#" onclick="focusSearchBar();">search //! bar</a> at the top of the page. //! //! Otherwise, you may want to jump to one of these useful sections: //! //! * [`std::*` modules](#modules) //! * [Primitive types](#primitives) //! * [Standard macros](#macros) //! * [The Rust Prelude] //! //! If this is your first time, the documentation for the standard library is //! written to be casually perused. Clicking on interesting things should //! generally lead you to interesting places. Still, there are important bits //! you don't want to miss, so read on for a tour of the standard library and //! its documentation! //! //! Once you are familiar with the contents of the standard library you may //! begin to find the verbosity of the prose distracting. At this stage in your //! development you may want to press the `[-]` button near the top of the //! page to collapse it into a more skimmable view. //! //! While you are looking at that `[-]` button also notice the `source` //! link. Rust's API documentation comes with the source code and you are //! encouraged to read it. The standard library source is generally high //! quality and a peek behind the curtains is often enlightening. //! //! # What is in the standard library documentation? //! //! First of all, The Rust Standard Library is divided into a number of focused //! modules, [all listed further down this page](#modules). These modules are //! the bedrock upon which all of Rust is forged, and they have mighty names //! like [`std::slice`] and [`std::cmp`]. Modules' documentation typically //! includes an overview of the module along with examples, and are a smart //! place to start familiarizing yourself with the library. //! //! Second, implicit methods on [primitive types] are documented here. This can //! be a source of confusion for two reasons: //! //! 1. While primitives are implemented by the compiler, the standard library //! implements methods directly on the primitive types (and it is the only //! library that does so), which are [documented in the section on //! primitives](#primitives). //! 2. The standard library exports many modules *with the same name as //! primitive types*. These define additional items related to the primitive //! type, but not the all-important methods. //! //! So for example there is a [page for the primitive type //! `i32`](primitive::i32) that lists all the methods that can be called on //! 32-bit integers (very useful), and there is a [page for the module //! `std::i32`] that documents the constant values [`MIN`] and [`MAX`] (rarely //! useful). //! //! Note the documentation for the primitives [`str`] and [`[T]`][prim@slice] (also //! called 'slice'). Many method calls on [`String`] and [`Vec<T>`] are actually //! calls to methods on [`str`] and [`[T]`][prim@slice] respectively, via [deref //! coercions][deref-coercions]. //! //! Third, the standard library defines [The Rust Prelude], a small collection //! of items - mostly traits - that are imported into every module of every //! crate. The traits in the prelude are pervasive, making the prelude //! documentation a good entry point to learning about the library. //! //! And finally, the standard library exports a number of standard macros, and //! [lists them on this page](#macros) (technically, not all of the standard //! macros are defined by the standard library - some are defined by the //! compiler - but they are documented here the same). Like the prelude, the //! standard macros are imported by default into all crates. //! //! # Contributing changes to the documentation //! //! Check out the rust contribution guidelines [here]( //! https://rustc-dev-guide.rust-lang.org/contributing.html#writing-documentation). //! The source for this documentation can be found on //! [GitHub](https://github.com/rust-lang/rust). //! To contribute changes, make sure you read the guidelines first, then submit //! pull-requests for your suggested changes. //! //! Contributions are appreciated! If you see a part of the docs that can be //! improved, submit a PR, or chat with us first on [Discord][rust-discord] //! #docs. //! //! # A Tour of The Rust Standard Library //! //! The rest of this crate documentation is dedicated to pointing out notable //! features of The Rust Standard Library. //! //! ## Containers and collections //! //! The [`option`] and [`result`] modules define optional and error-handling //! types, [`Option<T>`] and [`Result<T, E>`]. The [`iter`] module defines //! Rust's iterator trait, [`Iterator`], which works with the [`for`] loop to //! access collections. //! //! The standard library exposes three common ways to deal with contiguous //! regions of memory: //! //! * [`Vec<T>`] - A heap-allocated *vector* that is resizable at runtime. //! * [`[T; N]`][prim@array] - An inline *array* with a fixed size at compile time. //! * [`[T]`][prim@slice] - A dynamically sized *slice* into any other kind of contiguous //! storage, whether heap-allocated or not. //! //! Slices can only be handled through some kind of *pointer*, and as such come //! in many flavors such as: //! //! * `&[T]` - *shared slice* //! * `&mut [T]` - *mutable slice* //! * [`Box<[T]>`][owned slice] - *owned slice* //! //! [`str`], a UTF-8 string slice, is a primitive type, and the standard library //! defines many methods for it. Rust [`str`]s are typically accessed as //! immutable references: `&str`. Use the owned [`String`] for building and //! mutating strings. //! //! For converting to strings use the [`format!`] macro, and for converting from //! strings use the [`FromStr`] trait. //! //! Data may be shared by placing it in a reference-counted box or the [`Rc`] //! type, and if further contained in a [`Cell`] or [`RefCell`], may be mutated //! as well as shared. Likewise, in a concurrent setting it is common to pair an //! atomically-reference-counted box, [`Arc`], with a [`Mutex`] to get the same //! effect. //! //! The [`collections`] module defines maps, sets, linked lists and other //! typical collection types, including the common [`HashMap<K, V>`]. //! //! ## Platform abstractions and I/O //! //! Besides basic data types, the standard library is largely concerned with //! abstracting over differences in common platforms, most notably Windows and //! Unix derivatives. //! //! Common types of I/O, including [files], [TCP], [UDP], are defined in the //! [`io`], [`fs`], and [`net`] modules. //! //! The [`thread`] module contains Rust's threading abstractions. [`sync`] //! contains further primitive shared memory types, including [`atomic`] and //! [`mpsc`], which contains the channel types for message passing. //! //! [I/O]: io //! [`MIN`]: i32::MIN //! [`MAX`]: i32::MAX //! [page for the module `std::i32`]: crate::i32 //! [TCP]: net::TcpStream //! [The Rust Prelude]: prelude //! [UDP]: net::UdpSocket //! [`Arc`]: sync::Arc //! [owned slice]: boxed //! [`Cell`]: cell::Cell //! [`FromStr`]: str::FromStr //! [`HashMap<K, V>`]: collections::HashMap //! [`Mutex`]: sync::Mutex //! [`Option<T>`]: option::Option //! [`Rc`]: rc::Rc //! [`RefCell`]: cell::RefCell //! [`Result<T, E>`]: result::Result //! [`Vec<T>`]: vec::Vec //! [`atomic`]: sync::atomic //! [`for`]: ../book/ch03-05-control-flow.html#looping-through-a-collection-with-for //! [`str`]: prim@str //! [`mpsc`]: sync::mpsc //! [`std::cmp`]: cmp //! [`std::slice`]: mod@slice //! [`use std::env`]: env/index.html //! [`use`]: ../book/ch07-02-defining-modules-to-control-scope-and-privacy.html //! [crates.io]: https://crates.io //! [deref-coercions]: ../book/ch15-02-deref.html#implicit-deref-coercions-with-functions-and-methods //! [files]: fs::File //! [multithreading]: thread //! [other]: #what-is-in-the-standard-library-documentation //! [primitive types]: ../book/ch03-02-data-types.html //! [rust-discord]: https://discord.gg/rust-lang //! [array]: prim@array //! [slice]: prim@slice #![cfg_attr(not(feature = "restricted-std"), stable(feature = "rust1", since = "1.0.0"))] #![cfg_attr(feature = "restricted-std", unstable(feature = "restricted_std", issue = "none"))] #![doc( html_playground_url = "https://play.rust-lang.org/", issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/", test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))) )] #![cfg_attr( not(bootstrap), doc(cfg_hide( not(test), not(any(test, bootstrap)), no_global_oom_handling, not(no_global_oom_handling) )) )] // Don't link to std. We are std. #![no_std] #![warn(deprecated_in_future)] #![warn(missing_docs)] #![warn(missing_debug_implementations)] #![allow(explicit_outlives_requirements)] #![allow(unused_lifetimes)] // Tell the compiler to link to either panic_abort or panic_unwind #![needs_panic_runtime] // std may use features in a platform-specific way #![allow(unused_features)] #![feature(rustc_allow_const_fn_unstable)] #![cfg_attr(test, feature(internal_output_capture, print_internals, update_panic_count))] #![cfg_attr( all(target_vendor = "fortanix", target_env = "sgx"), feature(slice_index_methods, coerce_unsized, sgx_platform) )] #![deny(rustc::existing_doc_keyword)] // std is implemented with unstable features, many of which are internal // compiler details that will never be stable // NB: the following list is sorted to minimize merge conflicts. #![feature(alloc_error_handler)] #![feature(alloc_layout_extra)] #![feature(allocator_api)] #![feature(allocator_internals)] #![feature(allow_internal_unsafe)] #![feature(allow_internal_unstable)] #![feature(arbitrary_self_types)] #![feature(array_error_internals)] #![feature(assert_matches)] #![feature(associated_type_bounds)] #![feature(async_stream)] #![feature(atomic_mut_ptr)] #![feature(auto_traits)] #![feature(bench_black_box)] #![feature(bool_to_option)] #![feature(box_syntax)] #![feature(c_unwind)] #![feature(c_variadic)] #![feature(cfg_accessible)] #![feature(cfg_eval)] #![feature(cfg_target_has_atomic)] #![feature(cfg_target_thread_local)] #![feature(char_error_internals)] #![feature(char_internals)] #![cfg_attr(not(bootstrap), feature(concat_bytes))] #![feature(concat_idents)] #![feature(const_fn_floating_point_arithmetic)] #![feature(const_fn_fn_ptr_basics)] #![feature(const_fn_trait_bound)] #![feature(const_format_args)] #![feature(const_io_structs)] #![feature(const_ip)] #![feature(const_ipv4)] #![feature(const_ipv6)] #![feature(const_option)] #![feature(const_mut_refs)] #![feature(const_socketaddr)] #![feature(const_trait_impl)] #![feature(container_error_extra)] #![feature(core_intrinsics)] #![feature(core_panic)] #![feature(custom_test_frameworks)] #![feature(decl_macro)] #![feature(doc_cfg)] #![feature(doc_cfg_hide)] #![feature(rustdoc_internals)] #![feature(doc_masked)] #![feature(doc_notable_trait)] #![feature(dropck_eyepatch)] #![feature(duration_checked_float)] #![feature(duration_constants)] #![feature(edition_panic)] #![feature(exact_size_is_empty)] #![feature(exhaustive_patterns)] #![feature(extend_one)] #![feature(fn_traits)] #![feature(float_minimum_maximum)] #![feature(format_args_nl)] #![feature(gen_future)] #![feature(generator_trait)] #![feature(get_mut_unchecked)] #![feature(hashmap_internals)] #![feature(int_error_internals)] #![feature(integer_atomics)] #![feature(int_log)] #![feature(into_future)] #![feature(intra_doc_pointers)] #![feature(lang_items)] #![feature(linkage)] #![feature(log_syntax)] #![feature(map_try_insert)] #![feature(maybe_uninit_slice)] #![feature(maybe_uninit_uninit_array)] #![feature(maybe_uninit_write_slice)] #![feature(min_specialization)] #![feature(mixed_integer_ops)] #![feature(must_not_suspend)] #![feature(needs_panic_runtime)] #![feature(negative_impls)] #![feature(never_type)] #![feature(new_uninit)] #![feature(nll)] #![feature(nonnull_slice_from_raw_parts)] #![feature(once_cell)] #![feature(panic_info_message)] #![feature(panic_internals)] #![feature(panic_can_unwind)] #![feature(panic_unwind)] #![feature(pin_static_ref)] #![feature(portable_simd)] #![feature(prelude_import)] #![feature(ptr_as_uninit)] #![feature(ptr_internals)] #![feature(rustc_attrs)] #![feature(rustc_private)] #![feature(saturating_int_impl)] #![feature(slice_concat_ext)] #![feature(slice_internals)] #![feature(slice_ptr_get)] #![feature(slice_ptr_len)] #![feature(staged_api)] #![feature(std_internals)] #![feature(stdsimd)] #![feature(stmt_expr_attributes)] #![feature(str_internals)] #![feature(test)] #![feature(thread_local)] #![feature(thread_local_internals)] #![feature(toowned_clone_into)] #![feature(total_cmp)] #![feature(trace_macros)] #![feature(try_blocks)] #![feature(try_reserve_kind)] #![feature(unboxed_closures)] #![feature(unwrap_infallible)] #![feature(vec_into_raw_parts)] // NB: the above list is sorted to minimize merge conflicts. #![default_lib_allocator] // Explicitly import the prelude. The compiler uses this same unstable attribute // to import the prelude implicitly when building crates that depend on std. #[prelude_import] #[allow(unused)] use prelude::v1::*; // Access to Bencher, etc. #[cfg(test)] extern crate test; #[allow(unused_imports)] // macros from `alloc` are not used on all platforms #[macro_use] extern crate alloc as alloc_crate; #[doc(masked)] #[allow(unused_extern_crates)] extern crate libc; // We always need an unwinder currently for backtraces #[doc(masked)] #[allow(unused_extern_crates)] extern crate unwind; // During testing, this crate is not actually the "real" std library, but rather // it links to the real std library, which was compiled from this same source // code. So any lang items std defines are conditionally excluded (or else they // would generate duplicate lang item errors), and any globals it defines are // _not_ the globals used by "real" std. So this import, defined only during // testing gives test-std access to real-std lang items and globals. See #2912 #[cfg(test)] extern crate std as realstd; // The standard macros that are not built-in to the compiler. #[macro_use] mod macros; // The Rust prelude pub mod prelude; // Public module declarations and re-exports #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::borrow; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::boxed; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::fmt; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::format; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::rc; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::slice; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::str; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::string; #[stable(feature = "rust1", since = "1.0.0")] pub use alloc_crate::vec; #[stable(feature = "rust1", since = "1.0.0")] pub use core::any; #[stable(feature = "simd_arch", since = "1.27.0")] // The `no_inline`-attribute is required to make the documentation of all // targets available. // See https://github.com/rust-lang/rust/pull/57808#issuecomment-457390549 for // more information. #[doc(no_inline)] // Note (#82861): required for correct documentation pub use core::arch; #[stable(feature = "core_array", since = "1.36.0")] pub use core::array; #[stable(feature = "rust1", since = "1.0.0")] pub use core::cell; #[stable(feature = "rust1", since = "1.0.0")] pub use core::char; #[stable(feature = "rust1", since = "1.0.0")] pub use core::clone; #[stable(feature = "rust1", since = "1.0.0")] pub use core::cmp; #[stable(feature = "rust1", since = "1.0.0")] pub use core::convert; #[stable(feature = "rust1", since = "1.0.0")] pub use core::default; #[stable(feature = "futures_api", since = "1.36.0")] pub use core::future; #[stable(feature = "rust1", since = "1.0.0")] pub use core::hash; #[stable(feature = "core_hint", since = "1.27.0")] pub use core::hint; #[stable(feature = "i128", since = "1.26.0")] #[allow(deprecated, deprecated_in_future)] pub use core::i128; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::i16; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::i32; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::i64; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::i8; #[stable(feature = "rust1", since = "1.0.0")] pub use core::intrinsics; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::isize; #[stable(feature = "rust1", since = "1.0.0")] pub use core::iter; #[stable(feature = "rust1", since = "1.0.0")] pub use core::marker; #[stable(feature = "rust1", since = "1.0.0")] pub use core::mem; #[stable(feature = "rust1", since = "1.0.0")] pub use core::ops; #[stable(feature = "rust1", since = "1.0.0")] pub use core::option; #[stable(feature = "pin", since = "1.33.0")] pub use core::pin; #[stable(feature = "rust1", since = "1.0.0")] pub use core::ptr; #[stable(feature = "rust1", since = "1.0.0")] pub use core::result; #[unstable(feature = "portable_simd", issue = "86656")] pub use core::simd; #[unstable(feature = "async_stream", issue = "79024")] pub use core::stream; #[stable(feature = "i128", since = "1.26.0")] #[allow(deprecated, deprecated_in_future)] pub use core::u128; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::u16; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::u32; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::u64; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::u8; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::usize; pub mod f32; pub mod f64; #[macro_use] pub mod thread; pub mod ascii; pub mod backtrace; pub mod collections; pub mod env; pub mod error; pub mod ffi; pub mod fs; pub mod io; pub mod net; pub mod num; pub mod os; pub mod panic; pub mod path; pub mod process; pub mod sync; pub mod time; #[unstable(feature = "once_cell", issue = "74465")] pub mod lazy; #[stable(feature = "futures_api", since = "1.36.0")] pub mod task { //! Types and Traits for working with asynchronous tasks. #[doc(inline)] #[stable(feature = "futures_api", since = "1.36.0")] pub use core::task::*; #[doc(inline)] #[stable(feature = "wake_trait", since = "1.51.0")] pub use alloc::task::*; } // The runtime entry point and a few unstable public functions used by the // compiler #[macro_use] pub mod rt; // Platform-abstraction modules mod sys; mod sys_common; pub mod alloc; // Private support modules mod panicking; #[path = "../../backtrace/src/lib.rs"] #[allow(dead_code, unused_attributes)] mod backtrace_rs; #[stable(feature = "simd_x86", since = "1.27.0")] pub use std_detect::is_x86_feature_detected; #[doc(hidden)] #[unstable(feature = "stdsimd", issue = "48556")] pub use std_detect::*; #[unstable(feature = "stdsimd", issue = "48556")] pub use std_detect::{ is_aarch64_feature_detected, is_arm_feature_detected, is_mips64_feature_detected, is_mips_feature_detected, is_powerpc64_feature_detected, is_powerpc_feature_detected, is_riscv_feature_detected, }; // Re-export macros defined in libcore. #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::{ assert_eq, assert_ne, debug_assert, debug_assert_eq, debug_assert_ne, matches, r#try, todo, unimplemented, unreachable, write, writeln, }; // Re-export built-in macros defined through libcore. #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] #[allow(deprecated)] pub use core::{ assert, assert_matches, cfg, column, compile_error, concat, concat_idents, const_format_args, env, file, format_args, format_args_nl, include, include_bytes, include_str, line, log_syntax, module_path, option_env, stringify, trace_macros, }; #[unstable( feature = "concat_bytes", issue = "87555", reason = "`concat_bytes` is not stable enough for use and is subject to change" )] #[cfg(not(bootstrap))] pub use core::concat_bytes; #[stable(feature = "core_primitive", since = "1.43.0")] pub use core::primitive; // Include a number of private modules that exist solely to provide // the rustdoc documentation for primitive types. Using `include!` // because rustdoc only looks for these modules at the crate level. include!("primitive_docs.rs"); // Include a number of private modules that exist solely to provide // the rustdoc documentation for the existing keywords. Using `include!` // because rustdoc only looks for these modules at the crate level. include!("keyword_docs.rs"); // This is required to avoid an unstable error when `restricted-std` is not // enabled. The use of #![feature(restricted_std)] in rustc-std-workspace-std // is unconditional, so the unstable feature needs to be defined somewhere. #[unstable(feature = "restricted_std", issue = "none")] mod __restricted_std_workaround {} mod sealed { /// This trait being unreachable from outside the crate /// prevents outside implementations of our extension traits. /// This allows adding more trait methods in the future. #[unstable(feature = "sealed", issue = "none")] pub trait Sealed {} }
36.505728
101
0.703026
235be6b0db512ae5cd1890f7503136803f26b541
1,825
use gcores_rss::{get, Channel, Param}; use lambda_runtime::{handler_fn, Context, Error}; use log::LevelFilter; use rusoto_core::{ByteStream, Region}; use rusoto_s3::{PutObjectRequest, S3Client, S3}; use serde::{Deserialize, Serialize}; use simple_error::SimpleError; use simple_logger::SimpleLogger; use std::error::Error as SError; #[derive(Deserialize)] struct Request { #[serde(rename = "storage_param")] s3_param: S3Param, channel: Channel, param: Param, } #[derive(Deserialize)] struct S3Param { bucket: String, key: String, acl: Option<String>, content_type: Option<String>, } #[derive(Serialize)] struct Response { req_id: String, } #[tokio::main] async fn main() -> Result<(), Error> { SimpleLogger::new() .with_level(LevelFilter::Info) .init() .unwrap(); lambda_runtime::run(handler_fn(fetch_save)).await?; Ok(()) } fn to_simple(e: Box<dyn SError>) -> SimpleError { SimpleError::new(e.to_string()) } pub(crate) async fn fetch_save(event: Request, ctx: Context) -> Result<Response, SimpleError> { let Request { s3_param, param, channel, } = event; let xml: String = get(param, channel).await.map_err(to_simple)?; save_to_s3(s3_param, xml.to_string()).map_err(to_simple)?; Ok(Response { req_id: ctx.request_id, }) } fn save_to_s3(param: S3Param, val: String) -> Result<(), Box<dyn SError>> { let S3Param { acl, bucket, key, content_type, } = param; S3Client::new(Region::UsEast1) .put_object(PutObjectRequest { acl, body: Some(ByteStream::from(val.into_bytes())), bucket, key, content_type, ..Default::default() }) .sync()?; Ok(()) }
23.101266
95
0.610959
e2e81d53a7a9a6748362799c0c325df48534dcb9
16,515
use std::borrow::Cow; use std::sync::{Mutex, RwLock, RwLockReadGuard}; use std::any::{Any, TypeId}; use std::result::Result as StdResult; use std::string::String as StdString; use std::usize; use base::metadata::{Metadata, MetadataEnv}; use base::symbol::{Name, Symbol, SymbolRef}; use base::types::{Alias, AliasData, ArcType, Generic, Type, Kind, KindEnv, TypeEnv, PrimitiveEnv, ArcKind}; use base::fnv::FnvMap; use macros::MacroEnv; use {Error, Result}; use types::*; use interner::{Interner, InternedStr}; use gc::{Gc, GcPtr, Traverseable, Move}; use compiler::{CompiledFunction, Variable, CompilerEnv}; use api::IO; use lazy::Lazy; use value::BytecodeFunction; pub use value::{ClosureDataDef, Userdata}; pub use value::Value;//FIXME Value should not be exposed pub use thread::{Thread, RootedThread, Status, Root, RootStr, RootedValue}; fn new_bytecode(gc: &mut Gc, vm: &GlobalVmState, f: CompiledFunction) -> Result<GcPtr<BytecodeFunction>> { let CompiledFunction { id, args, max_stack_size, instructions, inner_functions, strings, module_globals, records, .. } = f; let fs = try!(inner_functions.into_iter() .map(|inner| new_bytecode(gc, vm, inner)) .collect()); let globals = module_globals.into_iter() .map(|index| vm.env.read().unwrap().globals[index.as_ref()].value) .collect(); let records = try!(records.into_iter() .map(|vec| { vec.into_iter() .map(|field| Ok(try!(vm.interner.write().unwrap().intern(gc, field.as_ref())))) .collect::<Result<_>>() }) .collect()); gc.alloc(Move(BytecodeFunction { name: id, args: args, max_stack_size: max_stack_size, instructions: instructions, inner_functions: fs, strings: strings, globals: globals, records: records, })) } #[derive(Debug)] pub struct Global { pub id: Symbol, pub typ: ArcType, pub metadata: Metadata, pub value: Value, } impl Traverseable for Global { fn traverse(&self, gc: &mut Gc) { self.value.traverse(gc); } } pub struct GlobalVmState { env: RwLock<VmEnv>, generics: RwLock<FnvMap<StdString, ArcType>>, typeids: RwLock<FnvMap<TypeId, ArcType>>, interner: RwLock<Interner>, macros: MacroEnv, // FIXME These fields should not be public pub gc: Mutex<Gc>, // List of all generation 0 threads (ie, threads allocated by the global gc). when doing a // generation 0 sweep these threads are scanned as generation 0 values may be refered to by any // thread pub generation_0_threads: RwLock<Vec<GcPtr<Thread>>>, } impl Traverseable for GlobalVmState { fn traverse(&self, gc: &mut Gc) { for g in self.env.read().unwrap().globals.values() { g.traverse(gc); } // Also need to check the interned string table self.interner.read().unwrap().traverse(gc); self.generation_0_threads.read().unwrap().traverse(gc); } } /// A borrowed structure which implements `CompilerEnv`, `TypeEnv` and `KindEnv` allowing the /// typechecker and compiler to lookup things in the virtual machine. #[derive(Debug)] pub struct VmEnv { pub type_infos: TypeInfos, pub globals: FnvMap<StdString, Global>, } impl CompilerEnv for VmEnv { fn find_var(&self, id: &Symbol) -> Option<Variable<Symbol>> { self.globals .get(id.as_ref()) .map(|g| Variable::Global(g.id.clone())) .or_else(|| self.type_infos.find_var(id)) } } impl KindEnv for VmEnv { fn find_kind(&self, type_name: &SymbolRef) -> Option<ArcKind> { self.type_infos .find_kind(type_name) } } impl TypeEnv for VmEnv { fn find_type(&self, id: &SymbolRef) -> Option<&ArcType> { self.globals .get(AsRef::<str>::as_ref(id)) .map(|g| &g.typ) .or_else(|| { self.type_infos .id_to_type .values() .filter_map(|alias| { alias.typ .as_ref() .and_then(|typ| { match **typ { Type::Variants(ref ctors) => { ctors.iter().find(|ctor| *ctor.0 == *id).map(|t| &t.1) } _ => None, } }) }) .next() .map(|ctor| ctor) }) } fn find_type_info(&self, id: &SymbolRef) -> Option<&Alias<Symbol, ArcType>> { self.type_infos .find_type_info(id) } fn find_record(&self, fields: &[Symbol]) -> Option<(&ArcType, &ArcType)> { self.type_infos.find_record(fields) } } impl PrimitiveEnv for VmEnv { fn get_bool(&self) -> &ArcType { self.find_type_info("std.types.Bool") .ok() .and_then(|alias| match alias { Cow::Borrowed(alias) => alias.typ.as_ref(), Cow::Owned(_) => panic!("Expected to be able to retrieve a borrowed bool type"), }) .expect("std.types.Bool") } } impl MetadataEnv for VmEnv { fn get_metadata(&self, id: &Symbol) -> Option<&Metadata> { self.globals .get(AsRef::<str>::as_ref(id)) .map(|g| &g.metadata) } } fn map_cow_option<T, U, F>(cow: Cow<T>, f: F) -> Option<Cow<U>> where T: Clone, U: Clone, F: FnOnce(&T) -> Option<&U>, { match cow { Cow::Borrowed(b) => f(b).map(Cow::Borrowed), Cow::Owned(o) => f(&o).map(|u| Cow::Owned(u.clone())), } } impl VmEnv { pub fn find_type_info(&self, name: &str) -> Result<Cow<Alias<Symbol, ArcType>>> { let name = Name::new(name); let module_str = name.module().as_str(); if module_str == "" { return match self.type_infos.id_to_type.get(name.as_str()) { Some(alias) => Ok(Cow::Borrowed(alias)), None => Err(Error::UndefinedBinding(name.as_str().into())), }; } let (_, typ) = try!(self.get_binding(name.module().as_str())); let maybe_type_info = map_cow_option(typ.clone(), |typ| { let field_name = name.name(); typ.type_field_iter() .find(|field| field.name.as_ref() == field_name.as_str()) .map(|field| &field.typ) }); maybe_type_info.ok_or_else(move || { Error::UndefinedField(typ.into_owned(), name.name().as_str().into()) }) } pub fn get_binding(&self, name: &str) -> Result<(Value, Cow<ArcType>)> { use base::instantiate; let globals = &self.globals; let mut module = Name::new(name); let global; // Try to find a global by successively reducing the module path // Input: "x.y.z.w" // Test: "x.y.z" // Test: "x.y" // Test: "x" // Test: -> Error loop { if module.as_str() == "" { return Err(Error::UndefinedBinding(name.into())); } if let Some(g) = globals.get(module.as_str()) { global = g; break; } module = module.module(); } let remaining_offset = module.as_str().len() + 1;//Add 1 byte for the '.' if remaining_offset >= name.len() { // No fields left return Ok((global.value, Cow::Borrowed(&global.typ))); } let remaining_fields = Name::new(&name[remaining_offset..]); let mut typ = Cow::Borrowed(&global.typ); let mut value = global.value; for mut field_name in remaining_fields.components() { if field_name.starts_with('(') && field_name.ends_with(')') { field_name = &field_name[1..field_name.len() - 1]; } else if field_name.chars().any(|c| "+-*/&|=<>".chars().any(|x| x == c)) { return Err(Error::Message(format!("Operators cannot be used as fields \ directly. To access an operator field, \ enclose the operator with parentheses \ before passing it in. (test.(+) instead of \ test.+)"))); } typ = match typ { Cow::Borrowed(typ) => instantiate::remove_aliases_cow(self, typ), Cow::Owned(typ) => Cow::Owned(instantiate::remove_aliases(self, typ)), }; // HACK Can't return the data directly due to the use of cow on the type let next_type = map_cow_option(typ.clone(), |typ| { typ.field_iter() .enumerate() .find(|&(_, field)| field.name.as_ref() == field_name) .map(|(index, field)| { match value { Value::Data(data) => { value = data.fields[index]; &field.typ } _ => panic!("Unexpected value {:?}", value), } }) }); typ = try!(next_type.ok_or_else(move || { Error::UndefinedField(typ.into_owned(), field_name.into()) })); } Ok((value, typ)) } pub fn get_metadata(&self, name_str: &str) -> Result<&Metadata> { let globals = &self.globals; let name = Name::new(name_str); let mut components = name.components(); let global = match components.next() { Some(comp) => { try!(globals.get(comp) .or_else(|| { components = name.name().components(); globals.get(name.module().as_str()) }) .ok_or_else(|| Error::MetadataDoesNotExist(name_str.into()))) } None => return Err(Error::MetadataDoesNotExist(name_str.into())), }; let mut metadata = &global.metadata; for field_name in components { metadata = try!(metadata.module .get(field_name) .ok_or_else(|| Error::MetadataDoesNotExist(name_str.into()))); } Ok(metadata) } } impl GlobalVmState { /// Creates a new virtual machine pub fn new() -> GlobalVmState { let mut vm = GlobalVmState { env: RwLock::new(VmEnv { globals: FnvMap::default(), type_infos: TypeInfos::new(), }), generics: RwLock::new(FnvMap::default()), typeids: RwLock::new(FnvMap::default()), interner: RwLock::new(Interner::new()), gc: Mutex::new(Gc::new(0, usize::MAX)), macros: MacroEnv::new(), generation_0_threads: RwLock::new(Vec::new()), }; vm.add_types() .unwrap(); vm } fn add_types(&mut self) -> StdResult<(), (TypeId, ArcType)> { use api::generic::A; use api::Generic; fn add_type<T: Any>(ids: &mut FnvMap<TypeId, ArcType>, env: &mut VmEnv, name: &str, typ: ArcType) { ids.insert(TypeId::of::<T>(), typ); // Insert aliases so that `find_info` can retrieve information about the primitives env.type_infos.id_to_type.insert(name.into(), Alias::from(AliasData { name: Symbol::from(name), args: Vec::new(), typ: None, })); } { let ids = self.typeids.get_mut().unwrap(); let env = self.env.get_mut().unwrap(); add_type::<()>(ids, env, "()", Type::unit()); add_type::<VmInt>(ids, env, "Int", Type::int()); add_type::<u8>(ids, env, "Byte", Type::byte()); add_type::<f64>(ids, env, "Float", Type::float()); add_type::<::std::string::String>(ids, env, "String", Type::string()); add_type::<char>(ids, env, "Char", Type::char()); } self.register_type::<IO<Generic<A>>>("IO", &["a"]).unwrap(); self.register_type::<Lazy<Generic<A>>>("Lazy", &["a"]).unwrap(); self.register_type::<RootedThread>("Thread", &[]).unwrap(); Ok(()) } pub fn new_function(&self, f: CompiledFunction) -> Result<GcPtr<BytecodeFunction>> { new_bytecode(&mut self.gc.lock().unwrap(), self, f) } pub fn get_type<T: ?Sized + Any>(&self) -> ArcType { let id = TypeId::of::<T>(); self.typeids .read() .unwrap() .get(&id) .cloned() .unwrap_or_else(|| panic!("Expected type to be inserted before get_type call")) } /// Checks if a global exists called `name` pub fn global_exists(&self, name: &str) -> bool { self.env.read().unwrap().globals.get(name).is_some() } /// TODO dont expose this directly pub fn set_global(&self, id: Symbol, typ: ArcType, metadata: Metadata, value: Value) -> Result<()> { let mut env = self.env.write().unwrap(); let globals = &mut env.globals; let global = Global { id: id.clone(), typ: typ, metadata: metadata, value: value, }; globals.insert(StdString::from(id.as_ref()), global); Ok(()) } pub fn get_generic(&self, name: &str) -> ArcType { let mut generics = self.generics.write().unwrap(); if let Some(g) = generics.get(name) { return g.clone(); } let g: ArcType = Type::generic(Generic { id: Symbol::from(name), kind: Kind::typ(), }); generics.insert(name.into(), g.clone()); g } /// Registers a new type called `name` pub fn register_type<T: ?Sized + Any>(&self, name: &str, args: &[&str]) -> Result<ArcType> { let mut env = self.env.write().unwrap(); let type_infos = &mut env.type_infos; if type_infos.id_to_type.contains_key(name) { Err(Error::TypeAlreadyExists(name.into())) } else { let id = TypeId::of::<T>(); let arg_types: Vec<_> = args.iter().map(|g| self.get_generic(g)).collect(); let args = arg_types.iter() .map(|g| match **g { Type::Generic(ref g) => g.clone(), _ => unreachable!(), }) .collect(); let n = Symbol::from(name); let typ: ArcType = Type::app(Type::ident(n.clone()), arg_types); self.typeids .write() .unwrap() .insert(id, typ.clone()); let t = self.typeids.read().unwrap().get(&id).unwrap().clone(); type_infos.id_to_type.insert(name.into(), Alias::from(AliasData { name: n, args: args, typ: None, })); Ok(t) } } pub fn get_macros(&self) -> &MacroEnv { &self.macros } pub fn intern(&self, s: &str) -> Result<InternedStr> { self.interner.write().unwrap().intern(&mut *self.gc.lock().unwrap(), s) } /// Returns a borrowed structure which implements `CompilerEnv` pub fn get_env<'b>(&'b self) -> RwLockReadGuard<'b, VmEnv> { self.env.read().unwrap() } }
35.824295
99
0.49125
7af227a11b99db96992ed09a3b2b620dfa01d4fa
3,176
/* automatically generated by rust-bindgen */ #![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)] #[repr(C)] #[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)] pub struct foo { pub bar: [foo__bindgen_ty_1; 2usize], pub baz: [[[foo__bindgen_ty_2; 4usize]; 3usize]; 2usize], } #[repr(C)] #[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)] pub struct foo__bindgen_ty_1 { pub a: ::std::os::raw::c_int, pub b: ::std::os::raw::c_int, } #[test] fn bindgen_test_layout_foo__bindgen_ty_1() { assert_eq!( ::std::mem::size_of::<foo__bindgen_ty_1>(), 8usize, concat!("Size of: ", stringify!(foo__bindgen_ty_1)) ); assert_eq!( ::std::mem::align_of::<foo__bindgen_ty_1>(), 4usize, concat!("Alignment of ", stringify!(foo__bindgen_ty_1)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo__bindgen_ty_1>())).a as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(foo__bindgen_ty_1), "::", stringify!(a) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo__bindgen_ty_1>())).b as *const _ as usize }, 4usize, concat!( "Offset of field: ", stringify!(foo__bindgen_ty_1), "::", stringify!(b) ) ); } #[repr(C)] #[derive(Debug, Default, Copy, Clone, Hash, PartialEq, Eq)] pub struct foo__bindgen_ty_2 { pub a: ::std::os::raw::c_int, pub b: ::std::os::raw::c_int, } #[test] fn bindgen_test_layout_foo__bindgen_ty_2() { assert_eq!( ::std::mem::size_of::<foo__bindgen_ty_2>(), 8usize, concat!("Size of: ", stringify!(foo__bindgen_ty_2)) ); assert_eq!( ::std::mem::align_of::<foo__bindgen_ty_2>(), 4usize, concat!("Alignment of ", stringify!(foo__bindgen_ty_2)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo__bindgen_ty_2>())).a as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(foo__bindgen_ty_2), "::", stringify!(a) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo__bindgen_ty_2>())).b as *const _ as usize }, 4usize, concat!( "Offset of field: ", stringify!(foo__bindgen_ty_2), "::", stringify!(b) ) ); } #[test] fn bindgen_test_layout_foo() { assert_eq!( ::std::mem::size_of::<foo>(), 208usize, concat!("Size of: ", stringify!(foo)) ); assert_eq!( ::std::mem::align_of::<foo>(), 4usize, concat!("Alignment of ", stringify!(foo)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo>())).bar as *const _ as usize }, 0usize, concat!("Offset of field: ", stringify!(foo), "::", stringify!(bar)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<foo>())).baz as *const _ as usize }, 16usize, concat!("Offset of field: ", stringify!(foo), "::", stringify!(baz)) ); }
27.859649
88
0.532746
f8f2fcc9ceb0b9a784aa2b69b2fb1514fd0cce14
988
// structs1.rs // Address all the TODOs to make the tests pass! struct ColorClassicStruct { name: &'static str, hex: &'static str, } struct ColorTupleStruct(&'static str, &'static str); #[derive(Debug)] struct UnitStruct; #[cfg(test)] mod tests { use super::*; #[test] fn classic_c_structs() { let name = "green"; let hex = "#00FF00"; let green = ColorClassicStruct{name, hex}; assert_eq!(green.name, "green"); assert_eq!(green.hex, "#00FF00"); } #[test] fn tuple_structs() { // TODO: Instantiate a tuple struct! let green = ColorTupleStruct("green", "#00FF00"); assert_eq!(green.0, "green"); assert_eq!(green.1, "#00FF00"); } #[test] fn unit_structs() { // TODO: Instantiate a unit struct! let unit_struct = UnitStruct; let message = format!("{:?}s are fun!", unit_struct); assert_eq!(message, "UnitStructs are fun!"); } }
21.478261
61
0.574899
9b65d7960f559230843bdbebb04aee7c39ec9aff
2,313
/* Copyright (C) 2018-2019 [email protected] Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use super::super::*; use super::*; #[cfg(not(feature = "std"))] use alloc::string::String; use core::cell::RefCell; pub(super) struct SimpleInstr { orig_ip: u64, ip: u64, block: Rc<RefCell<Block>>, size: u32, instruction: Instruction, } impl SimpleInstr { pub(super) fn new(block_encoder: &mut BlockEncoder, block: Rc<RefCell<Block>>, instruction: &Instruction) -> Self { Self { orig_ip: instruction.ip(), ip: 0, block, size: block_encoder.get_instruction_size(instruction, instruction.ip()), instruction: *instruction, } } } impl Instr for SimpleInstr { fn block(&self) -> Rc<RefCell<Block>> { Rc::clone(&self.block) } fn size(&self) -> u32 { self.size } fn ip(&self) -> u64 { self.ip } fn set_ip(&mut self, new_ip: u64) { self.ip = new_ip } fn orig_ip(&self) -> u64 { self.orig_ip } fn initialize(&mut self, _block_encoder: &BlockEncoder) {} fn optimize(&mut self) -> bool { false } fn encode(&mut self, block: &mut Block) -> Result<(ConstantOffsets, bool), String> { match block.encoder.encode(&self.instruction, self.ip) { Err(err) => Err(InstrUtils::create_error_message(&err, &self.instruction)), Ok(_) => Ok((block.encoder.get_constant_offsets(), true)), } } }
27.535714
116
0.723735
f556680606b420b15c95897f2bea261c9a58ff30
5,637
use crate::page::{EnumColumn, Inner, NGramStats, NGramsTableRow, NumberColumn, Page, TextColumn}; use anyhow::{bail, Result}; use num::ToPrimitive; use pinwheel::prelude::*; use std::sync::Arc; use tangram_app_common::{ error::{bad_request, not_found, redirect_to_login, service_unavailable}, heuristics::{ TRAINING_STATS_TEXT_COLUMN_MAX_TOKENS_TO_SHOW_IN_CHART, TRAINING_STATS_TEXT_COLUMN_MAX_TOKENS_TO_SHOW_IN_TABLE, }, model::get_model_bytes, path_components, user::{authorize_user, authorize_user_for_model}, Context, }; use tangram_app_layouts::model_layout::{model_layout_info, ModelNavItem}; use tangram_id::Id; pub async fn get(request: &mut http::Request<hyper::Body>) -> Result<http::Response<hyper::Body>> { let context = request.extensions().get::<Arc<Context>>().unwrap().clone(); let (model_id, column_name) = if let ["repos", _, "models", model_id, "training_stats", "columns", column_name] = path_components(&request).as_slice() { (model_id.to_owned(), column_name.to_owned()) } else { bail!("unexpected path"); }; let mut db = match context.database_pool.begin().await { Ok(db) => db, Err(_) => return Ok(service_unavailable()), }; let user = match authorize_user(&request, &mut db, context.options.auth_enabled()).await? { Ok(user) => user, Err(_) => return Ok(redirect_to_login()), }; let model_id: Id = match model_id.parse() { Ok(model_id) => model_id, Err(_) => return Ok(bad_request()), }; if !authorize_user_for_model(&mut db, &user, model_id).await? { return Ok(not_found()); } let bytes = get_model_bytes(&context.storage, model_id).await?; let model = tangram_model::from_bytes(&bytes)?; let (column_stats, target_column_stats) = match model.inner() { tangram_model::ModelInnerReader::Regressor(regressor) => { let regressor = regressor.read(); ( regressor.overall_column_stats(), regressor.overall_target_column_stats(), ) } tangram_model::ModelInnerReader::BinaryClassifier(binary_classifier) => { let binary_classifier = binary_classifier.read(); ( binary_classifier.overall_column_stats(), binary_classifier.overall_target_column_stats(), ) } tangram_model::ModelInnerReader::MulticlassClassifier(multiclass_classifier) => { let multiclass_classifier = multiclass_classifier.read(); ( multiclass_classifier.overall_column_stats(), multiclass_classifier.overall_target_column_stats(), ) } }; let column_index = column_stats .iter() .position(|column_stats| column_stats.column_name() == column_name); let column = if target_column_stats.column_name() == column_name { target_column_stats } else if let Some(column_index) = column_index { column_stats.get(column_index).unwrap() } else { return Ok(not_found()); }; let inner = match column { tangram_model::ColumnStatsReader::UnknownColumn(_) => unimplemented!(), tangram_model::ColumnStatsReader::NumberColumn(column_stats) => { let column_stats = column_stats.read(); Inner::Number(NumberColumn { invalid_count: column_stats.invalid_count(), min: column_stats.min(), max: column_stats.max(), mean: column_stats.mean(), name: column_stats.column_name().to_owned(), p25: column_stats.p25(), p50: column_stats.p50(), p75: column_stats.p75(), std: column_stats.std(), unique_count: column_stats.unique_count(), }) } tangram_model::ColumnStatsReader::EnumColumn(column_stats) => { let column_stats = column_stats.read(); let total_count: u64 = column_stats .histogram() .iter() .map(|(_, count)| count) .sum(); Inner::Enum(EnumColumn { unique_values_chart_data: Some( column_stats .histogram() .iter() .map(|(value, count)| (value.to_owned(), count)) .collect(), ), unique_values_table_rows: Some( column_stats .histogram() .iter() .map(|(value, count)| { ( value.to_owned(), count, count.to_f64().unwrap() / total_count.to_f64().unwrap(), ) }) .collect(), ), invalid_count: column_stats.invalid_count(), name: column_stats.column_name().to_owned(), unique_count: column_stats.unique_count(), }) } tangram_model::ColumnStatsReader::TextColumn(column_stats) => { let column_stats = column_stats.read(); let ngram_count = column_stats.top_ngrams().len(); let mut top_ngrams_chart_values = column_stats .top_ngrams() .iter() .map(|(ngram, entry)| NGramStats { ngram: ngram.to_string(), row_count: entry.row_count(), occurrence_count: entry.occurrence_count(), }) .collect::<Vec<_>>(); top_ngrams_chart_values.sort_by(|a, b| { a.occurrence_count .partial_cmp(&b.occurrence_count) .unwrap() .reverse() }); let ngrams_table_rows = top_ngrams_chart_values .iter() .take(TRAINING_STATS_TEXT_COLUMN_MAX_TOKENS_TO_SHOW_IN_TABLE) .cloned() .map(|ngram| NGramsTableRow { ngram: ngram.ngram, count: ngram.occurrence_count, }) .collect(); top_ngrams_chart_values .truncate(TRAINING_STATS_TEXT_COLUMN_MAX_TOKENS_TO_SHOW_IN_CHART); Inner::Text(TextColumn { name: column_stats.column_name().to_owned(), ngram_count, top_ngrams_chart_values, ngrams_table_rows, }) } }; let model_layout_info = model_layout_info(&mut db, &context, model_id, ModelNavItem::TrainingStats).await?; let page = Page { inner, model_layout_info, }; let html = html(page); let response = http::Response::builder() .status(http::StatusCode::OK) .body(hyper::Body::from(html)) .unwrap(); Ok(response) }
31.316667
114
0.694873
bfb51c0bbc6e9d92920d34adecc25b8d2b584faf
195
// Copyright 2018 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. pub mod ast; pub mod codegen;
27.857143
73
0.748718
1dc4af97fcb8badb973db3eb16c20eba36978895
1,856
use crate::disk::fs::FileSystem; use crate::disk::manager::{DiskManager}; use futures_cpupool::Builder; const DEFAULT_PENDING_SIZE: usize = 10; const DEFAULT_COMPLETED_SIZE: usize = 10; /// `DiskManagerBuilder` for building `DiskManager`s with different settings. pub struct DiskManagerBuilder { builder: Builder, pending_size: usize, completed_size: usize } impl DiskManagerBuilder { /// Create a new `DiskManagerBuilder`. pub fn new() -> DiskManagerBuilder { DiskManagerBuilder{ builder: Builder::new(), pending_size: DEFAULT_PENDING_SIZE, completed_size: DEFAULT_COMPLETED_SIZE } } /// Use a custom `Builder` for the `CpuPool`. pub fn with_worker_config(mut self, config: Builder) -> DiskManagerBuilder { self.builder = config; self } /// Specify the buffer capacity for pending `IDiskMessage`s. pub fn with_sink_buffer_capacity(mut self, size: usize) -> DiskManagerBuilder { self.pending_size = size; self } /// Specify the buffer capacity for completed `ODiskMessage`s. pub fn with_stream_buffer_capacity(mut self, size: usize) -> DiskManagerBuilder { self.completed_size = size; self } /// Retrieve the `CpuPool` builder. pub fn worker_config(&mut self) -> &mut Builder { &mut self.builder } /// Retrieve the sink buffer capacity. pub fn sink_buffer_capacity(&self) -> usize { self.pending_size } /// Retrieve the stream buffer capacity. pub fn stream_buffer_capacity(&self) -> usize { self.completed_size } /// Build a `DiskManager` with the given `FileSystem`. pub fn build<F>(self, fs: F) -> DiskManager<F> where F: FileSystem + Send + Sync + 'static { DiskManager::from_builder(self, fs) } }
29.935484
88
0.655711
e926a615470666e78d93ec4a86fc06812251217d
128
// TODO:RG show a timer? // or maybe buttons to activate certain buffs -> more light, teleportation, things like that // Menus?
32
92
0.726563
48f80ef8b768a9972d02a37eb51edef3412a9e81
762
// Rust Bitcoin Library // Written in 2014 by // Andrew Poelstra <[email protected]> // // To the extent possible under law, the author(s) have dedicated all // copyright and related and neighboring rights to this software to // the public domain worldwide. This software is distributed without // any warranty. // // You should have received a copy of the CC0 Public Domain Dedication // along with this software. // If not, see <http://creativecommons.org/publicdomain/zero/1.0/>. // //! Bitcoin block data. //! //! This module defines structures and functions for storing the blocks and //! transactions which make up the Bitcoin system. //! pub mod constants; pub mod opcodes; pub mod script; pub mod transaction; pub mod block; pub mod witness;
27.214286
75
0.742782
c1a2179ea8307c3cbec01c59c419b5178df64295
2,936
use std::ops::Range; use bio_types::genome::{AbstractInterval, Interval}; use crate::core::counting::buffers::{IntervalCounts, RawCounts}; use super::CountsBuffer; use super::NucCounts; use crate::core::workload::ROIWorkload; #[derive(Clone)] pub struct FlatBuffer { inner: Vec<NucCounts>, interval: Interval, rois: Vec<Interval>, coverage: u32, } impl FlatBuffer { pub fn new(maxsize: usize) -> Self { let mut inner = Vec::with_capacity(maxsize); inner.resize(maxsize, NucCounts::zeros()); let interval = Interval::new(String::default(), 0..maxsize as u64); let rois: Vec<Interval> = vec![interval.clone()]; Self { inner, interval, rois, coverage: 0 } } } impl CountsBuffer for FlatBuffer { #[inline] fn interval(&self) -> &Interval { &self.interval } fn rois(&self) -> &[Interval] { &self.rois } #[inline] fn buffer(&self) -> &[NucCounts] { &self.inner } #[inline] fn buffer_mut(&mut self) -> &mut [NucCounts] { &mut self.inner } #[inline] fn add_matched(&mut self, _: &[Range<u32>]) { self.coverage += 1; } #[inline] fn results(&self) -> Vec<IntervalCounts> { vec![IntervalCounts { roi: &self.interval, name: "", cnts: RawCounts { nuc: &self.inner, coverage: self.coverage }, }] } fn reset(&mut self, workload: ROIWorkload) { let (interval, _) = workload.dissolve(); let newlen = interval.range().end - interval.range().start; debug_assert!(newlen > 0); self.inner.clear(); self.inner.resize(newlen as usize, NucCounts::zeros()); self.rois = vec![interval.clone()]; self.interval = interval; self.coverage = 0; } } #[cfg(test)] mod tests { use super::*; #[test] fn reset() { let mut dummy = FlatBuffer::new(10); for x in [20, 10, 5] { dummy.reset(ROIWorkload::new(Interval::new("".into(), 0..x), vec![])); // previous changes must be cleaned assert!(dummy.buffer().iter().all(|x| x.coverage() == 0), "{:?}", dummy.buffer()); // new dummy changes dummy.buffer_mut()[0].T = 100; } } #[test] fn content() { let mut dummy = FlatBuffer::new(10); let interval = Interval::new("".into(), 0..3); dummy.reset(ROIWorkload::new(interval.clone(), vec![])); dummy.buffer_mut()[0].A = 10; dummy.buffer_mut()[2].C = 3; dummy.add_matched(&[0..1, 2..3]); let counts = [NucCounts { A: 10, C: 0, G: 0, T: 0 }, NucCounts::zeros(), NucCounts { A: 0, C: 3, G: 0, T: 0 }]; let expected = vec![IntervalCounts { roi: &interval, name: "", cnts: RawCounts { nuc: &counts, coverage: 1 } }]; let content = dummy.results(); assert_eq!(content, expected); } }
27.698113
120
0.553815
2fac6278d10dde563a0d86d8a14d9a9411413a79
2,795
impl Solution { pub fn original_digits(s: String) -> String { let mut map = CharMap::from_str(&s); let mut result = vec![]; loop { let digit = map.next_digit(); if digit > 9 { break; } else { map.minus(digit); result.push(DIGITS[digit]); } } result.sort_unstable(); result.into_iter().collect::<String>() } } struct CharMap([u32; 26]); impl CharMap { fn from_str(s: &str) -> CharMap { let mut map = [0u32; 26]; for c in s.as_bytes() { let idx = *c as u8 - b'a'; map[idx as usize] += 1; } CharMap(map) } #[inline] fn is_empty(&self) -> bool { self.0 == [0; 26] } fn next_digit(&self) -> usize { if self.is_empty() { return 10; } for &(idx, digit) in &DIGIT_EXISTS_MAP { if self.0[idx] > 0 { return digit; } } unreachable!(); } fn minus(&mut self, digit: usize) { for i in 0usize..26 { self.0[i] -= DIGITS_MAP[digit as usize][i]; } } } const DIGITS: [char; 10] = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']; const DIGIT_EXISTS_MAP: [(usize, usize); 10] = [ (25, 0), (22, 2), (23, 6), (20, 4), (17, 3), (18, 7), (14, 1), (21, 5), (19, 8), (8, 9), ]; const DIGITS_MAP: [[u32; 26]; 10] = [ [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, ], [ 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, ], [ 0, 0, 0, 0, 2, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, ], [ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, ], [ 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, ], [ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, ], [ 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, ], [ 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, ], [ 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ], ]; pub struct Solution; #[test] fn test_original_digits() { let cases = vec![ ("owoztneoer", "012"), ("fviefuro", "45"), ("zerozero", "00"), ]; for (s, expected) in cases { assert_eq!(Solution::original_digits(s.to_string()), expected); } }
23.686441
85
0.377818
4a664d042b918de98f908faee7aced9049c5767f
8,454
use std::mem::take; use itertools::Itertools; use log::debug; use ordered_float::OrderedFloat; use roaring::RoaringBitmap; use super::{Criterion, CriterionParameters, CriterionResult}; use crate::search::criteria::{resolve_query_tree, CriteriaBuilder}; use crate::search::facet::FacetNumberIter; use crate::search::query_tree::Operation; use crate::{FieldId, Index, Result}; /// Threshold on the number of candidates that will make /// the system to choose between one algorithm or another. const CANDIDATES_THRESHOLD: u64 = 1000; pub struct AscDesc<'t> { index: &'t Index, rtxn: &'t heed::RoTxn<'t>, field_name: String, field_id: Option<FieldId>, ascending: bool, query_tree: Option<Operation>, candidates: Box<dyn Iterator<Item = heed::Result<RoaringBitmap>> + 't>, allowed_candidates: RoaringBitmap, bucket_candidates: RoaringBitmap, faceted_candidates: RoaringBitmap, parent: Box<dyn Criterion + 't>, } impl<'t> AscDesc<'t> { pub fn asc( index: &'t Index, rtxn: &'t heed::RoTxn, parent: Box<dyn Criterion + 't>, field_name: String, ) -> Result<Self> { Self::new(index, rtxn, parent, field_name, true) } pub fn desc( index: &'t Index, rtxn: &'t heed::RoTxn, parent: Box<dyn Criterion + 't>, field_name: String, ) -> Result<Self> { Self::new(index, rtxn, parent, field_name, false) } fn new( index: &'t Index, rtxn: &'t heed::RoTxn, parent: Box<dyn Criterion + 't>, field_name: String, ascending: bool, ) -> Result<Self> { let fields_ids_map = index.fields_ids_map(rtxn)?; let field_id = fields_ids_map.id(&field_name); let faceted_candidates = match field_id { Some(field_id) => index.number_faceted_documents_ids(rtxn, field_id)?, None => RoaringBitmap::default(), }; Ok(AscDesc { index, rtxn, field_name, field_id, ascending, query_tree: None, candidates: Box::new(std::iter::empty()), allowed_candidates: RoaringBitmap::new(), faceted_candidates, bucket_candidates: RoaringBitmap::new(), parent, }) } } impl<'t> Criterion for AscDesc<'t> { #[logging_timer::time("AscDesc::{}")] fn next(&mut self, params: &mut CriterionParameters) -> Result<Option<CriterionResult>> { // remove excluded candidates when next is called, instead of doing it in the loop. self.allowed_candidates -= params.excluded_candidates; loop { debug!( "Facet {}({}) iteration", if self.ascending { "Asc" } else { "Desc" }, self.field_name ); match self.candidates.next().transpose()? { None if !self.allowed_candidates.is_empty() => { return Ok(Some(CriterionResult { query_tree: self.query_tree.clone(), candidates: Some(take(&mut self.allowed_candidates)), filtered_candidates: None, bucket_candidates: Some(take(&mut self.bucket_candidates)), })); } None => match self.parent.next(params)? { Some(CriterionResult { query_tree, candidates, filtered_candidates, bucket_candidates, }) => { self.query_tree = query_tree; let mut candidates = match (&self.query_tree, candidates) { (_, Some(candidates)) => candidates, (Some(qt), None) => { let context = CriteriaBuilder::new(&self.rtxn, &self.index)?; resolve_query_tree(&context, qt, params.wdcache)? } (None, None) => self.index.documents_ids(self.rtxn)?, }; if let Some(filtered_candidates) = filtered_candidates { candidates &= filtered_candidates; } match bucket_candidates { Some(bucket_candidates) => self.bucket_candidates |= bucket_candidates, None => self.bucket_candidates |= &candidates, } if candidates.is_empty() { continue; } self.allowed_candidates = &candidates - params.excluded_candidates; self.candidates = match self.field_id { Some(field_id) => facet_ordered( self.index, self.rtxn, field_id, self.ascending, candidates & &self.faceted_candidates, )?, None => Box::new(std::iter::empty()), }; } None => return Ok(None), }, Some(mut candidates) => { candidates -= params.excluded_candidates; self.allowed_candidates -= &candidates; return Ok(Some(CriterionResult { query_tree: self.query_tree.clone(), candidates: Some(candidates), filtered_candidates: None, bucket_candidates: Some(take(&mut self.bucket_candidates)), })); } } } } } /// Returns an iterator over groups of the given candidates in ascending or descending order. /// /// It will either use an iterative or a recursive method on the whole facet database depending /// on the number of candidates to rank. fn facet_ordered<'t>( index: &'t Index, rtxn: &'t heed::RoTxn, field_id: FieldId, ascending: bool, candidates: RoaringBitmap, ) -> Result<Box<dyn Iterator<Item = heed::Result<RoaringBitmap>> + 't>> { if candidates.len() <= CANDIDATES_THRESHOLD { let iter = iterative_facet_ordered_iter(index, rtxn, field_id, ascending, candidates)?; Ok(Box::new(iter.map(Ok)) as Box<dyn Iterator<Item = _>>) } else { let facet_fn = if ascending { FacetNumberIter::new_reducing } else { FacetNumberIter::new_reverse_reducing }; let iter = facet_fn(rtxn, index, field_id, candidates)?; Ok(Box::new(iter.map(|res| res.map(|(_, docids)| docids)))) } } /// Fetch the whole list of candidates facet values one by one and order them by it. /// /// This function is fast when the amount of candidates to rank is small. fn iterative_facet_ordered_iter<'t>( index: &'t Index, rtxn: &'t heed::RoTxn, field_id: FieldId, ascending: bool, candidates: RoaringBitmap, ) -> Result<impl Iterator<Item = RoaringBitmap> + 't> { let mut docids_values = Vec::with_capacity(candidates.len() as usize); for docid in candidates.iter() { let left = (field_id, docid, f64::MIN); let right = (field_id, docid, f64::MAX); let mut iter = index.field_id_docid_facet_f64s.range(rtxn, &(left..=right))?; let entry = if ascending { iter.next() } else { iter.last() }; if let Some(((_, _, value), ())) = entry.transpose()? { docids_values.push((docid, OrderedFloat(value))); } } docids_values.sort_unstable_by_key(|(_, v)| *v); let iter = docids_values.into_iter(); let iter = if ascending { Box::new(iter) as Box<dyn Iterator<Item = _>> } else { Box::new(iter.rev()) }; // The itertools GroupBy iterator doesn't provide an owned version, we are therefore // required to collect the result into an owned collection (a Vec). // https://github.com/rust-itertools/itertools/issues/499 let vec: Vec<_> = iter .group_by(|(_, v)| v.clone()) .into_iter() .map(|(_, ids)| ids.map(|(id, _)| id).collect()) .collect(); Ok(vec.into_iter()) }
37.40708
99
0.53525
09d43da1c5ef1a469aeed9a3a8dfd9db0c869d18
562
//! Library of common Bitcoin functionality shared by all crates. #![allow(clippy::type_complexity)] #![deny(missing_docs, unsafe_code)] pub mod block; pub mod collections; pub mod network; pub mod p2p; pub use bitcoin; pub use bitcoin_hashes; pub use nonempty; /// Return the function path at the current source location. #[macro_export] macro_rules! source { () => {{ fn f() {} fn type_of<T>(_: T) -> &'static str { std::any::type_name::<T>() } let name = type_of(f); &name[..name.len() - 3] }}; }
22.48
65
0.613879
c19df041d78887a309124501505935156cd74f0e
68,591
use diesel::pg::PgConnection; use diesel::*; use graph_mock::MockMetricsRegistry; use graphql_parser::schema as s; use hex_literal::hex; use lazy_static::lazy_static; use std::collections::HashSet; use std::str::FromStr; use std::time::Duration; use test_store::*; use graph::components::store::{EntityFilter, EntityKey, EntityOrder, EntityQuery}; use graph::data::store::scalar; use graph::data::subgraph::schema::*; use graph::data::subgraph::*; use graph::prelude::*; use graph_store_postgres::layout_for_tests::STRING_PREFIX_SIZE; use graph_store_postgres::Store as DieselStore; use web3::types::{Address, H256}; const USER_GQL: &str = " interface ColorAndAge { id: ID!, age: Int, favorite_color: String } type User implements ColorAndAge @entity { id: ID!, name: String, bin_name: Bytes, email: String, age: Int, seconds_age: BigInt, weight: BigDecimal, coffee: Boolean, favorite_color: String } type Person implements ColorAndAge @entity { id: ID!, name: String, age: Int, favorite_color: String } type Manual @entity { id: ID!, text: String } "; const USER: &str = "User"; lazy_static! { static ref TEST_SUBGRAPH_ID_STRING: String = String::from("testsubgraph"); static ref TEST_SUBGRAPH_ID: SubgraphDeploymentId = SubgraphDeploymentId::new(TEST_SUBGRAPH_ID_STRING.as_str()).unwrap(); static ref TEST_SUBGRAPH_SCHEMA: Schema = Schema::parse(USER_GQL, TEST_SUBGRAPH_ID.clone()).expect("Failed to parse user schema"); static ref TEST_BLOCK_0_PTR: EthereumBlockPointer = ( H256::from(hex!( "bd34884280958002c51d3f7b5f853e6febeba33de0f40d15b0363006533c924f" )), 0u64 ) .into(); static ref TEST_BLOCK_1_PTR: EthereumBlockPointer = ( H256::from(hex!( "8511fa04b64657581e3f00e14543c1d522d5d7e771b54aa3060b662ade47da13" )), 1u64 ) .into(); static ref TEST_BLOCK_2_PTR: EthereumBlockPointer = ( H256::from(hex!( "b98fb783b49de5652097a989414c767824dff7e7fd765a63b493772511db81c1" )), 2u64 ) .into(); static ref TEST_BLOCK_3_PTR: EthereumBlockPointer = ( H256::from(hex!( "977c084229c72a0fa377cae304eda9099b6a2cb5d83b25cdf0f0969b69874255" )), 3u64 ) .into(); static ref TEST_BLOCK_3A_PTR: EthereumBlockPointer = ( H256::from(hex!( "d163aec0592c7cb00c2700ab65dcaac93289f5d250b3b889b39198b07e1fbe4a" )), 3u64 ) .into(); static ref TEST_BLOCK_4_PTR: EthereumBlockPointer = ( H256::from(hex!( "007a03cdf635ebb66f5e79ae66cc90ca23d98031665649db056ff9c6aac2d74d" )), 4u64 ) .into(); static ref TEST_BLOCK_4A_PTR: EthereumBlockPointer = ( H256::from(hex!( "8fab27e9e9285b0a39110f4d9877f05d0f43d2effa157e55f4dcc49c3cf8cbd7" )), 4u64 ) .into(); static ref TEST_BLOCK_5_PTR: EthereumBlockPointer = ( H256::from(hex!( "e8b3b02b936c4a4a331ac691ac9a86e197fb7731f14e3108602c87d4dac55160" )), 5u64 ) .into(); } /// Test harness for running database integration tests. fn run_test<R, F>(test: F) where F: FnOnce(Arc<DieselStore>) -> R + Send + 'static, R: IntoFuture<Item = ()> + Send + 'static, R::Error: Send + Debug, R::Future: Send, { let store = STORE.clone(); // Lock regardless of poisoning. This also forces sequential test execution. let mut runtime = match STORE_RUNTIME.lock() { Ok(guard) => guard, Err(err) => err.into_inner(), }; runtime .block_on(async { // Reset state before starting remove_test_data(store.clone()); // Seed database with test data insert_test_data(store.clone()); // Run test test(store).into_future().compat().await }) .unwrap_or_else(|e| panic!("Failed to run Store test: {:?}", e)); } /// Inserts test data into the store. /// /// Inserts data in test blocks 1, 2, and 3, leaving test blocks 3A, 4, and 4A for the tests to /// use. fn insert_test_data(store: Arc<DieselStore>) { let manifest = SubgraphManifest { id: TEST_SUBGRAPH_ID.clone(), location: "/ipfs/test".to_owned(), spec_version: "1".to_owned(), description: None, repository: None, schema: TEST_SUBGRAPH_SCHEMA.clone(), data_sources: vec![], graft: None, templates: vec![], }; // Create SubgraphDeploymentEntity let ops = SubgraphDeploymentEntity::new(&manifest, false, None).create_operations(&*TEST_SUBGRAPH_ID); store .create_subgraph_deployment(&TEST_SUBGRAPH_SCHEMA, ops) .unwrap(); let test_entity_1 = create_test_entity( "1", USER, "Johnton", "[email protected]", 67 as i32, 184.4, false, None, ); transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *GENESIS_PTR, vec![test_entity_1], ) .unwrap(); let test_entity_2 = create_test_entity( "2", USER, "Cindini", "[email protected]", 43 as i32, 159.1, true, Some("red"), ); let test_entity_3_1 = create_test_entity( "3", USER, "Shaqueeena", "[email protected]", 28 as i32, 111.7, false, Some("blue"), ); transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_1_PTR, vec![test_entity_2, test_entity_3_1], ) .unwrap(); let test_entity_3_2 = create_test_entity( "3", USER, "Shaqueeena", "[email protected]", 28 as i32, 111.7, false, None, ); transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_2_PTR, vec![test_entity_3_2], ) .unwrap(); } /// Creates a test entity. fn create_test_entity( id: &str, entity_type: &str, name: &str, email: &str, age: i32, weight: f64, coffee: bool, favorite_color: Option<&str>, ) -> EntityOperation { let mut test_entity = Entity::new(); test_entity.insert("id".to_owned(), Value::String(id.to_owned())); test_entity.insert("name".to_owned(), Value::String(name.to_owned())); let bin_name = scalar::Bytes::from_str(&hex::encode(name)).unwrap(); test_entity.insert("bin_name".to_owned(), Value::Bytes(bin_name)); test_entity.insert("email".to_owned(), Value::String(email.to_owned())); test_entity.insert("age".to_owned(), Value::Int(age)); test_entity.insert( "seconds_age".to_owned(), Value::BigInt(BigInt::from(age) * 31557600.into()), ); test_entity.insert("weight".to_owned(), Value::BigDecimal(weight.into())); test_entity.insert("coffee".to_owned(), Value::Bool(coffee)); test_entity.insert( "favorite_color".to_owned(), favorite_color .map(|s| Value::String(s.to_owned())) .unwrap_or(Value::Null), ); EntityOperation::Set { key: EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: entity_type.to_owned(), entity_id: id.to_owned(), }, data: test_entity, } } /// Removes test data from the database behind the store. fn remove_test_data(store: Arc<graph_store_postgres::Store>) { let url = postgres_test_url(); let conn = PgConnection::establish(url.as_str()).expect("Failed to connect to Postgres"); graph_store_postgres::store::delete_all_entities_for_test_use_only(&store, &conn) .expect("Failed to remove entity test data"); } fn get_entity_count( store: Arc<graph_store_postgres::Store>, subgraph_id: &SubgraphDeploymentId, ) -> u64 { let key = SubgraphDeploymentEntity::key(subgraph_id.clone()); let entity = store.get(key).unwrap().unwrap(); entity .get("entityCount") .unwrap() .clone() .as_bigint() .unwrap() .to_u64() } #[test] fn delete_entity() { run_test(|store| -> Result<(), ()> { let entity_key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "3".to_owned(), }; // Check that there is an entity to remove. store.get(entity_key.clone()).unwrap().unwrap(); let count = get_entity_count(store.clone(), &TEST_SUBGRAPH_ID); transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![EntityOperation::Remove { key: entity_key.clone(), }], ) .unwrap(); assert_eq!( count, get_entity_count(store.clone(), &TEST_SUBGRAPH_ID) + 1 ); // Check that that the deleted entity id is not present assert!(store.get(entity_key).unwrap().is_none()); Ok(()) }) } /// Check that user 1 was inserted correctly #[test] fn get_entity_1() { run_test(|store| -> Result<(), ()> { let key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "1".to_owned(), }; let result = store.get(key).unwrap(); let mut expected_entity = Entity::new(); expected_entity.insert("__typename".to_owned(), USER.into()); expected_entity.insert("id".to_owned(), "1".into()); expected_entity.insert("name".to_owned(), "Johnton".into()); expected_entity.insert( "bin_name".to_owned(), Value::Bytes("Johnton".as_bytes().into()), ); expected_entity.insert("email".to_owned(), "[email protected]".into()); expected_entity.insert("age".to_owned(), Value::Int(67 as i32)); expected_entity.insert( "seconds_age".to_owned(), Value::BigInt(BigInt::from(2114359200)), ); expected_entity.insert("weight".to_owned(), Value::BigDecimal(184.4.into())); expected_entity.insert("coffee".to_owned(), Value::Bool(false)); // "favorite_color" was set to `Null` earlier and should be absent // Check that the expected entity was returned assert_eq!(result, Some(expected_entity)); Ok(()) }) } /// Check that user 3 was updated correctly #[test] fn get_entity_3() { run_test(|store| -> Result<(), ()> { let key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "3".to_owned(), }; let result = store.get(key).unwrap(); let mut expected_entity = Entity::new(); expected_entity.insert("__typename".to_owned(), USER.into()); expected_entity.insert("id".to_owned(), "3".into()); expected_entity.insert("name".to_owned(), "Shaqueeena".into()); expected_entity.insert( "bin_name".to_owned(), Value::Bytes("Shaqueeena".as_bytes().into()), ); expected_entity.insert("email".to_owned(), "[email protected]".into()); expected_entity.insert("age".to_owned(), Value::Int(28 as i32)); expected_entity.insert( "seconds_age".to_owned(), Value::BigInt(BigInt::from(883612800)), ); expected_entity.insert("weight".to_owned(), Value::BigDecimal(111.7.into())); expected_entity.insert("coffee".to_owned(), Value::Bool(false)); // "favorite_color" was set to `Null` earlier and should be absent // Check that the expected entity was returned assert_eq!(result, Some(expected_entity)); Ok(()) }) } #[test] fn insert_entity() { run_test(|store| -> Result<(), ()> { let entity_key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "7".to_owned(), }; let test_entity = create_test_entity( "7", USER, "Wanjon", "[email protected]", 76 as i32, 111.7, true, Some("green"), ); let count = get_entity_count(store.clone(), &TEST_SUBGRAPH_ID); transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![test_entity], ) .unwrap(); assert_eq!( count + 1, get_entity_count(store.clone(), &TEST_SUBGRAPH_ID) ); // Check that new record is in the store store.get(entity_key).unwrap().unwrap(); Ok(()) }) } #[test] fn update_existing() { run_test(|store| -> Result<(), ()> { let entity_key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "1".to_owned(), }; let op = create_test_entity( "1", USER, "Wanjon", "[email protected]", 76 as i32, 111.7, true, Some("green"), ); let mut new_data = match op { EntityOperation::Set { ref data, .. } => data.clone(), _ => unreachable!(), }; // Verify that the entity before updating is different from what we expect afterwards assert_ne!(store.get(entity_key.clone()).unwrap().unwrap(), new_data); // Set test entity; as the entity already exists an update should be performed let count = get_entity_count(store.clone(), &TEST_SUBGRAPH_ID); transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![op], ) .unwrap(); assert_eq!(count, get_entity_count(store.clone(), &TEST_SUBGRAPH_ID)); // Verify that the entity in the store has changed to what we have set. let bin_name = match new_data.get("bin_name") { Some(Value::Bytes(bytes)) => bytes.clone(), _ => unreachable!(), }; new_data.insert("__typename".to_owned(), USER.into()); new_data.insert("bin_name".to_owned(), Value::Bytes(bin_name)); assert_eq!(store.get(entity_key).unwrap(), Some(new_data)); Ok(()) }) } #[test] fn partially_update_existing() { run_test(|store| -> Result<(), ()> { let entity_key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "1".to_owned(), }; let partial_entity = Entity::from(vec![ ("id", Value::from("1")), ("name", Value::from("Johnny Boy")), ("email", Value::Null), ]); let original_entity = store .get(entity_key.clone()) .unwrap() .expect("entity not found"); // Set test entity; as the entity already exists an update should be performed transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![EntityOperation::Set { key: entity_key.clone(), data: partial_entity.clone(), }], ) .unwrap(); // Obtain the updated entity from the store let updated_entity = store.get(entity_key).unwrap().expect("entity not found"); // Verify that the values of all attributes we have set were either unset // (in the case of Value::Null) or updated to the new values assert_eq!(updated_entity.get("id"), partial_entity.get("id")); assert_eq!(updated_entity.get(USER), partial_entity.get(USER)); assert_eq!(updated_entity.get("email"), None); // Verify that all attributes we have not set have remained at their old values assert_eq!(updated_entity.get("age"), original_entity.get("age")); assert_eq!(updated_entity.get("weight"), original_entity.get("weight")); assert_eq!(updated_entity.get("coffee"), original_entity.get("coffee")); Ok(()) }) } fn test_find(expected_entity_ids: Vec<&str>, query: EntityQuery) { let expected_entity_ids: Vec<String> = expected_entity_ids.into_iter().map(str::to_owned).collect(); run_test(move |store| -> Result<(), ()> { let entities = store .find(query) .expect("store.find failed to execute query"); let entity_ids: Vec<_> = entities .into_iter() .map(|entity| match entity.get("id") { Some(Value::String(id)) => id.to_owned(), Some(_) => panic!("store.find returned entity with non-string ID attribute"), None => panic!("store.find returned entity with no ID attribute"), }) .collect(); assert_eq!(entity_ids, expected_entity_ids); Ok(()) }) } fn user_query() -> EntityQuery { EntityQuery::new( TEST_SUBGRAPH_ID.clone(), BLOCK_NUMBER_MAX, EntityCollection::All(vec![USER.to_owned()]), ) } #[test] fn find_string_contains() { test_find( vec!["2"], user_query().filter(EntityFilter::Contains("name".into(), "ind".into())), ) } #[test] fn find_string_equal() { test_find( vec!["2"], user_query().filter(EntityFilter::Equal("name".to_owned(), "Cindini".into())), ) } #[test] fn find_string_not_equal() { test_find( vec!["1", "3"], user_query() .filter(EntityFilter::Not("name".to_owned(), "Cindini".into())) .order_by("name", ValueType::String, EntityOrder::Ascending), ) } #[test] fn find_string_greater_than() { test_find( vec!["3"], user_query().filter(EntityFilter::GreaterThan("name".to_owned(), "Kundi".into())), ) } #[test] fn find_string_less_than_order_by_asc() { test_find( vec!["2", "1"], user_query() .filter(EntityFilter::LessThan("name".to_owned(), "Kundi".into())) .order_by("name", ValueType::String, EntityOrder::Ascending), ) } #[test] fn find_string_less_than_order_by_desc() { test_find( vec!["1", "2"], user_query() .filter(EntityFilter::LessThan("name".to_owned(), "Kundi".into())) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_string_less_than_range() { test_find( vec!["1"], user_query() .filter(EntityFilter::LessThan("name".to_owned(), "ZZZ".into())) .order_by("name", ValueType::String, EntityOrder::Descending) .first(1) .skip(1), ) } #[test] fn find_string_multiple_and() { test_find( vec!["2"], user_query() .filter(EntityFilter::And(vec![ EntityFilter::LessThan("name".to_owned(), "Cz".into()), EntityFilter::Equal("name".to_owned(), "Cindini".into()), ])) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_string_ends_with() { test_find( vec!["2"], user_query() .filter(EntityFilter::EndsWith("name".to_owned(), "ini".into())) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_string_not_ends_with() { test_find( vec!["3", "1"], user_query() .filter(EntityFilter::NotEndsWith("name".to_owned(), "ini".into())) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_string_in() { test_find( vec!["1"], user_query() .filter(EntityFilter::In("name".to_owned(), vec!["Johnton".into()])) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_string_not_in() { test_find( vec!["1", "2"], user_query() .filter(EntityFilter::NotIn( "name".to_owned(), vec!["Shaqueeena".into()], )) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_float_equal() { test_find( vec!["1"], user_query().filter(EntityFilter::Equal( "weight".to_owned(), Value::BigDecimal(184.4.into()), )), ) } #[test] fn find_float_not_equal() { test_find( vec!["3", "2"], user_query() .filter(EntityFilter::Not( "weight".to_owned(), Value::BigDecimal(184.4.into()), )) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_float_greater_than() { test_find( vec!["1"], user_query().filter(EntityFilter::GreaterThan( "weight".to_owned(), Value::BigDecimal(160.0.into()), )), ) } #[test] fn find_float_less_than() { test_find( vec!["2", "3"], user_query() .filter(EntityFilter::LessThan( "weight".to_owned(), Value::BigDecimal(160.0.into()), )) .order_by("name", ValueType::String, EntityOrder::Ascending), ) } #[test] fn find_float_less_than_order_by_desc() { test_find( vec!["3", "2"], user_query() .filter(EntityFilter::LessThan( "weight".to_owned(), Value::BigDecimal(160.0.into()), )) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_float_less_than_range() { test_find( vec!["2"], user_query() .filter(EntityFilter::LessThan( "weight".to_owned(), Value::BigDecimal(161.0.into()), )) .order_by("name", ValueType::String, EntityOrder::Descending) .first(1) .skip(1), ) } #[test] fn find_float_in() { test_find( vec!["3", "1"], user_query() .filter(EntityFilter::In( "weight".to_owned(), vec![ Value::BigDecimal(184.4.into()), Value::BigDecimal(111.7.into()), ], )) .order_by("name", ValueType::String, EntityOrder::Descending) .first(5), ) } #[test] fn find_float_not_in() { test_find( vec!["2"], user_query() .filter(EntityFilter::NotIn( "weight".to_owned(), vec![ Value::BigDecimal(184.4.into()), Value::BigDecimal(111.7.into()), ], )) .order_by("name", ValueType::String, EntityOrder::Descending) .first(5), ) } #[test] fn find_int_equal() { test_find( vec!["1"], user_query() .filter(EntityFilter::Equal("age".to_owned(), Value::Int(67 as i32))) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_int_not_equal() { test_find( vec!["3", "2"], user_query() .filter(EntityFilter::Not("age".to_owned(), Value::Int(67 as i32))) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_int_greater_than() { test_find( vec!["1"], user_query().filter(EntityFilter::GreaterThan( "age".to_owned(), Value::Int(43 as i32), )), ) } #[test] fn find_int_greater_or_equal() { test_find( vec!["2", "1"], user_query() .filter(EntityFilter::GreaterOrEqual( "age".to_owned(), Value::Int(43 as i32), )) .order_by("name", ValueType::String, EntityOrder::Ascending), ) } #[test] fn find_int_less_than() { test_find( vec!["2", "3"], user_query() .filter(EntityFilter::LessThan( "age".to_owned(), Value::Int(50 as i32), )) .order_by("name", ValueType::String, EntityOrder::Ascending), ) } #[test] fn find_int_less_or_equal() { test_find( vec!["2", "3"], user_query() .filter(EntityFilter::LessOrEqual( "age".to_owned(), Value::Int(43 as i32), )) .order_by("name", ValueType::String, EntityOrder::Ascending), ) } #[test] fn find_int_less_than_order_by_desc() { test_find( vec!["3", "2"], user_query() .filter(EntityFilter::LessThan( "age".to_owned(), Value::Int(50 as i32), )) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_int_less_than_range() { test_find( vec!["2"], user_query() .filter(EntityFilter::LessThan( "age".to_owned(), Value::Int(67 as i32), )) .order_by("name", ValueType::String, EntityOrder::Descending) .first(1) .skip(1), ) } #[test] fn find_int_in() { test_find( vec!["1", "2"], user_query() .filter(EntityFilter::In( "age".to_owned(), vec![Value::Int(67 as i32), Value::Int(43 as i32)], )) .order_by("name", ValueType::String, EntityOrder::Descending) .first(5), ) } #[test] fn find_int_not_in() { test_find( vec!["3"], user_query() .filter(EntityFilter::NotIn( "age".to_owned(), vec![Value::Int(67 as i32), Value::Int(43 as i32)], )) .order_by("name", ValueType::String, EntityOrder::Descending) .first(5), ) } #[test] fn find_bool_equal() { test_find( vec!["2"], user_query() .filter(EntityFilter::Equal("coffee".to_owned(), Value::Bool(true))) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_bool_not_equal() { test_find( vec!["1", "3"], user_query() .filter(EntityFilter::Not("coffee".to_owned(), Value::Bool(true))) .order_by("name", ValueType::String, EntityOrder::Ascending), ) } #[test] fn find_bool_in() { test_find( vec!["2"], user_query() .filter(EntityFilter::In( "coffee".to_owned(), vec![Value::Bool(true)], )) .order_by("name", ValueType::String, EntityOrder::Descending) .first(5), ) } #[test] fn find_bool_not_in() { test_find( vec!["3", "1"], user_query() .filter(EntityFilter::NotIn( "coffee".to_owned(), vec![Value::Bool(true)], )) .order_by("name", ValueType::String, EntityOrder::Descending) .first(5), ) } #[test] fn find_bytes_equal() { test_find( vec!["1"], user_query() .filter(EntityFilter::Equal( "bin_name".to_owned(), Value::Bytes("Johnton".as_bytes().into()), )) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_null_equal() { test_find( vec!["3", "1"], user_query() .filter(EntityFilter::Equal( "favorite_color".to_owned(), Value::Null, )) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_null_not_equal() { test_find( vec!["2"], user_query() .filter(EntityFilter::Not("favorite_color".to_owned(), Value::Null)) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_null_not_in() { test_find( vec!["2"], user_query() .filter(EntityFilter::NotIn( "favorite_color".to_owned(), vec![Value::Null], )) .order_by("name", ValueType::String, EntityOrder::Descending), ) } #[test] fn find_order_by_float() { test_find( vec!["3", "2", "1"], user_query().order_by("weight", ValueType::BigDecimal, EntityOrder::Ascending), ); test_find( vec!["1", "2", "3"], user_query().order_by("weight", ValueType::BigDecimal, EntityOrder::Descending), ); } #[test] fn find_order_by_id() { test_find( vec!["1", "2", "3"], user_query().order_by("id", ValueType::String, EntityOrder::Ascending), ); test_find( vec!["3", "2", "1"], user_query().order_by("id", ValueType::String, EntityOrder::Descending), ); } #[test] fn find_order_by_int() { test_find( vec!["3", "2", "1"], user_query().order_by("age", ValueType::Int, EntityOrder::Ascending), ); test_find( vec!["1", "2", "3"], user_query().order_by("age", ValueType::Int, EntityOrder::Descending), ); } #[test] fn find_order_by_string() { test_find( vec!["2", "1", "3"], user_query().order_by("name", ValueType::String, EntityOrder::Ascending), ); test_find( vec!["3", "1", "2"], user_query().order_by("name", ValueType::String, EntityOrder::Descending), ); } #[test] fn find_where_nested_and_or() { test_find( vec!["1", "2"], user_query() .filter(EntityFilter::And(vec![EntityFilter::Or(vec![ EntityFilter::Equal("id".to_owned(), Value::from("1")), EntityFilter::Equal("id".to_owned(), Value::from("2")), ])])) .order_by("id", ValueType::String, EntityOrder::Ascending), ) } fn make_entity_change( entity_type: &str, entity_id: &str, op: EntityChangeOperation, ) -> EntityChange { EntityChange { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: entity_type.to_owned(), entity_id: entity_id.to_owned(), operation: op, } } fn make_deployment_change(entity_id: &str, op: EntityChangeOperation) -> EntityChange { EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "SubgraphDeployment".to_owned(), entity_id: entity_id.to_owned(), operation: op, } } // Get as many events as expected contains from stream and check that they // are equal to the expected events fn check_events( stream: StoreEventStream<impl Stream<Item = StoreEvent, Error = ()> + Send>, expected: Vec<StoreEvent>, ) -> impl Future<Item = (), Error = tokio::time::Elapsed> { stream .take(expected.len() as u64) .collect() .timeout(Duration::from_secs(3)) .map_ok(move |events| { let events = events.unwrap(); assert_eq!(events.len(), expected.len()); assert_eq!(events, expected); }) .compat() } // Subscribe to store events from the store. This implementation works // very hard to make sure that only events that were generated after the // subscription started will actually be seen on the returned stream fn subscribe_and_consume( store: Arc<DieselStore>, subgraph: &SubgraphDeploymentId, entity_type: &str, ) -> StoreEventStream<impl Stream<Item = StoreEvent, Error = ()> + Send> { const MARKER: &str = "Subgraph"; const MARKER_ID: &str = "fake marker"; let subscription = store.subscribe(vec![ (subgraph.clone(), entity_type.to_owned()), (SUBGRAPHS_ID.clone(), MARKER.to_owned()), ]); // Generate fake activity on the stream by removing a nonexistent entity and // skip anything on the stream before this faked event. That makes sure that // users of the stream do not see any events that were generated before the // subscription was started which can happen depending on how fast the event // processing thread is compared to the thread that runs this code. // // There is still a small possibility that earlier events get through since // the fake event is generated in its own transaction and Postgres is free to // reorder delivery of events across transactions, like those generated by // setup code in the tests. let op = MetadataOperation::Remove { entity: MARKER.to_owned(), id: MARKER_ID.to_owned(), }; store .apply_metadata_operations(vec![op]) .expect("Failed to apply marker operation"); let source = subscription .skip_while(move |event| { // Skip events until we see the fake event we generated above future::ok( event .changes .iter() .all(|change| change.entity_id != MARKER_ID), ) }) .skip(1) .filter_map(move |event| { // Remove anything about MARKER entities from the events let changes = event .changes .into_iter() .filter(|change| change.entity_type != MARKER) .collect::<HashSet<_>>(); if changes.is_empty() { None } else { Some(StoreEvent { tag: event.tag, changes, }) } }); StoreEventStream::new(source) } fn check_basic_revert( store: Arc<graph_store_postgres::Store>, expected: StoreEvent, subgraph_id: &SubgraphDeploymentId, entity_type: &str, ) -> impl Future<Item = (), Error = tokio::time::Elapsed> { let this_query = user_query() .filter(EntityFilter::Equal( "name".to_owned(), Value::String("Shaqueeena".to_owned()), )) .order_by("name", ValueType::String, EntityOrder::Descending); let subscription = subscribe_and_consume(store.clone(), subgraph_id, entity_type); // Revert block 3 store .revert_block_operations( TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_2_PTR, *TEST_BLOCK_1_PTR, ) .unwrap(); let returned_entities = store .find(this_query.clone()) .expect("store.find operation failed"); // There should be 1 user returned in results assert_eq!(1, returned_entities.len()); // Check if the first user in the result vector has email "[email protected]" let returned_name = returned_entities[0].get(&"email".to_owned()); let test_value = Value::String("[email protected]".to_owned()); assert!(returned_name.is_some()); assert_eq!(&test_value, returned_name.unwrap()); check_events(subscription, vec![expected]) } #[test] fn revert_block_basic_user() { run_test(|store| { let expected = StoreEvent::new(vec![make_entity_change( USER, "3", EntityChangeOperation::Set, )]); let count = get_entity_count(store.clone(), &TEST_SUBGRAPH_ID); check_basic_revert(store.clone(), expected, &TEST_SUBGRAPH_ID, USER).and_then(move |x| { assert_eq!(count, get_entity_count(store.clone(), &TEST_SUBGRAPH_ID)); Ok(x) }) }) } #[test] fn revert_block_basic_subgraphs() { run_test(|store| { let expected = StoreEvent::new(vec![make_deployment_change( "testsubgraph", EntityChangeOperation::Set, )]); let subgraphs = SubgraphDeploymentId::new("subgraphs").unwrap(); check_basic_revert(store.clone(), expected, &subgraphs, "SubgraphDeployment") }) } #[test] fn revert_block_with_delete() { run_test(|store| { let this_query = user_query() .filter(EntityFilter::Equal( "name".to_owned(), Value::String("Cindini".to_owned()), )) .order_by("name", ValueType::String, EntityOrder::Descending); // Delete entity with id=2 let del_key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "2".to_owned(), }; // Process deletion transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![EntityOperation::Remove { key: del_key }], ) .unwrap(); let subscription = subscribe_and_consume(store.clone(), &TEST_SUBGRAPH_ID, USER); // Revert deletion let count = get_entity_count(store.clone(), &TEST_SUBGRAPH_ID); store .revert_block_operations( TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, *TEST_BLOCK_2_PTR, ) .unwrap(); assert_eq!( count + 1, get_entity_count(store.clone(), &TEST_SUBGRAPH_ID) ); // Query after revert let returned_entities = store .find(this_query.clone()) .expect("store.find operation failed"); // There should be 1 entity returned in results assert_eq!(1, returned_entities.len()); // Check if "[email protected]" is in result set let returned_name = returned_entities[0].get(&"email".to_owned()); let test_value = Value::String("[email protected]".to_owned()); assert!(returned_name.is_some()); assert_eq!(&test_value, returned_name.unwrap()); // Check that the subscription notified us of the changes let expected = StoreEvent::new(vec![make_entity_change( USER, "2", EntityChangeOperation::Set, )]); // The last event is the one for the reversion check_events(subscription, vec![expected]) }) } #[test] fn revert_block_with_partial_update() { run_test(|store| { let entity_key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "1".to_owned(), }; let partial_entity = Entity::from(vec![ ("id", Value::from("1")), ("name", Value::from("Johnny Boy")), ("email", Value::Null), ]); let original_entity = store .get(entity_key.clone()) .unwrap() .expect("missing entity"); // Set test entity; as the entity already exists an update should be performed transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![EntityOperation::Set { key: entity_key.clone(), data: partial_entity.clone(), }], ) .unwrap(); let subscription = subscribe_and_consume(store.clone(), &TEST_SUBGRAPH_ID, USER); // Perform revert operation, reversing the partial update let count = get_entity_count(store.clone(), &TEST_SUBGRAPH_ID); store .revert_block_operations( TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, *TEST_BLOCK_2_PTR, ) .unwrap(); assert_eq!(count, get_entity_count(store.clone(), &TEST_SUBGRAPH_ID)); // Obtain the reverted entity from the store let reverted_entity = store .get(entity_key.clone()) .unwrap() .expect("missing entity"); // Verify that the entity has been returned to its original state assert_eq!(reverted_entity, original_entity); // Check that the subscription notified us of the changes let expected = StoreEvent::new(vec![make_entity_change( USER, "1", EntityChangeOperation::Set, )]); check_events(subscription, vec![expected]) }) } fn mock_data_source() -> DataSource { DataSource { kind: String::from("ethereum/contract"), name: String::from("example data source"), network: Some(String::from("mainnet")), source: Source { address: Some(Address::from_str("0123123123012312312301231231230123123123").unwrap()), abi: String::from("123123"), start_block: 0, }, mapping: Mapping { kind: String::from("ethereum/events"), api_version: String::from("0.1.0"), language: String::from("wasm/assemblyscript"), entities: vec![], abis: vec![], event_handlers: vec![], call_handlers: vec![], block_handlers: vec![], link: Link { link: "link".to_owned(), }, runtime: Arc::new(Vec::new()), }, templates: vec![DataSourceTemplate { kind: String::from("ethereum/contract"), name: String::from("example template"), network: Some(String::from("mainnet")), source: TemplateSource { abi: String::from("foo"), }, mapping: Mapping { kind: String::from("ethereum/events"), api_version: String::from("0.1.0"), language: String::from("wasm/assemblyscript"), entities: vec![], abis: vec![], event_handlers: vec![], call_handlers: vec![], block_handlers: vec![], link: Link { link: "link".to_owned(), }, runtime: Arc::new(Vec::new()), }, }], context: None, } } #[test] fn revert_block_with_dynamic_data_source_operations() { run_test(|store| { // Create operations to add a user let user_key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: "1".to_owned(), }; let partial_entity = Entity::from(vec![ ("id", Value::from("1")), ("name", Value::from("Johnny Boy")), ("email", Value::Null), ]); // Get the original user for comparisons let original_user = store .get(user_key.clone()) .unwrap() .expect("missing entity"); // Create operations to add a dynamic data source let data_source = mock_data_source(); let dynamic_ds = DynamicEthereumContractDataSourceEntity::from(( &TEST_SUBGRAPH_ID.clone(), &data_source, &TEST_BLOCK_4_PTR.clone(), )); let mut ops = vec![EntityOperation::Set { key: user_key.clone(), data: partial_entity.clone(), }]; ops.extend(dynamic_ds.write_entity_operations("dynamic-data-source")); // Add user and dynamic data source to the store transact_entity_operations(&store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, ops) .unwrap(); // Verify that the user is no longer the original assert_ne!( store .get(user_key.clone()) .unwrap() .expect("missing entity"), original_user ); // Verify that the dynamic data source exists afterwards let dynamic_ds_key = EntityKey { subgraph_id: SUBGRAPHS_ID.clone(), entity_type: String::from(DynamicEthereumContractDataSourceEntity::TYPENAME), entity_id: String::from("dynamic-data-source"), }; store .get(dynamic_ds_key.clone()) .unwrap() .expect("dynamic data source entity wasn't written to store"); let subscription = subscribe_and_consume(store.clone(), &TEST_SUBGRAPH_ID, USER); // Revert block that added the user and the dynamic data source store .revert_block_operations( TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, *TEST_BLOCK_2_PTR, ) .expect("revert block operations failed unexpectedly"); // Verify that the user is the original again assert_eq!( store .get(user_key.clone()) .unwrap() .expect("missing entity"), original_user ); // Verify that the dynamic data source is gone after the reversion assert!(store.get(dynamic_ds_key.clone()).unwrap().is_none()); // Verify that the right change events were emitted for the reversion let expected_events = vec![StoreEvent { tag: 3, changes: HashSet::from_iter( vec![ EntityChange { subgraph_id: SubgraphDeploymentId::new("testsubgraph").unwrap(), entity_type: USER.into(), entity_id: "1".into(), operation: EntityChangeOperation::Set, }, EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "DynamicEthereumContractDataSource".into(), entity_id: "dynamic-data-source".into(), operation: EntityChangeOperation::Removed, }, EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "EthereumContractSource".into(), entity_id: "dynamic-data-source-source".into(), operation: EntityChangeOperation::Removed, }, EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "EthereumContractMapping".into(), entity_id: "dynamic-data-source-mapping".into(), operation: EntityChangeOperation::Removed, }, EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "EthereumContractDataSourceTemplate".into(), entity_id: "dynamic-data-source-templates-0".into(), operation: EntityChangeOperation::Removed, }, EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "EthereumContractDataSourceTemplateSource".into(), entity_id: "dynamic-data-source-templates-0-source".into(), operation: EntityChangeOperation::Removed, }, EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "EthereumContractMapping".into(), entity_id: "dynamic-data-source-templates-0-mapping".into(), operation: EntityChangeOperation::Removed, }, ] .into_iter(), ), }]; check_events(subscription, expected_events) }) } #[test] fn entity_changes_are_fired_and_forwarded_to_subscriptions() { run_test(|store| { let subgraph_id = SubgraphDeploymentId::new("EntityChangeTestSubgraph").unwrap(); let schema = Schema::parse(USER_GQL, subgraph_id.clone()).expect("Failed to parse user schema"); let manifest = SubgraphManifest { id: subgraph_id.clone(), location: "/ipfs/test".to_owned(), spec_version: "1".to_owned(), description: None, repository: None, schema: schema.clone(), data_sources: vec![], graft: None, templates: vec![], }; // Create SubgraphDeploymentEntity let ops = SubgraphDeploymentEntity::new(&manifest, false, Some(*TEST_BLOCK_0_PTR)) .create_operations(&subgraph_id); store.create_subgraph_deployment(&schema, ops).unwrap(); // Create store subscriptions let meta_subscription = subscribe_and_consume(store.clone(), &SUBGRAPHS_ID, "SubgraphDeployment"); let subscription = subscribe_and_consume(store.clone(), &subgraph_id, USER); // Add two entities to the store let added_entities = vec![ ( "1".to_owned(), Entity::from(vec![ ("id", Value::from("1")), ("name", Value::from("Johnny Boy")), ]), ), ( "2".to_owned(), Entity::from(vec![ ("id", Value::from("2")), ("name", Value::from("Tessa")), ]), ), ]; transact_entity_operations( &store, subgraph_id.clone(), *TEST_BLOCK_1_PTR, added_entities .iter() .map(|(id, data)| EntityOperation::Set { key: EntityKey { subgraph_id: subgraph_id.clone(), entity_type: USER.to_owned(), entity_id: id.to_owned(), }, data: data.to_owned(), }) .collect(), ) .unwrap(); // Update an entity in the store let updated_entity = Entity::from(vec![ ("id", Value::from("1")), ("name", Value::from("Johnny")), ]); let update_op = EntityOperation::Set { key: EntityKey { subgraph_id: subgraph_id.clone(), entity_type: USER.to_owned(), entity_id: "1".to_owned(), }, data: updated_entity.clone(), }; // Delete an entity in the store let delete_op = EntityOperation::Remove { key: EntityKey { subgraph_id: subgraph_id.clone(), entity_type: USER.to_owned(), entity_id: "2".to_owned(), }, }; // Commit update & delete ops transact_entity_operations( &store, subgraph_id.clone(), *TEST_BLOCK_2_PTR, vec![update_op, delete_op], ) .unwrap(); // We're expecting two meta data events to be written to the meta data subscription let meta_expected = vec![ StoreEvent::new(vec![EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "SubgraphDeployment".to_owned(), entity_id: "EntityChangeTestSubgraph".to_owned(), operation: EntityChangeOperation::Set, }]), StoreEvent::new(vec![EntityChange { subgraph_id: SubgraphDeploymentId::new("subgraphs").unwrap(), entity_type: "SubgraphDeployment".to_owned(), entity_id: "EntityChangeTestSubgraph".to_owned(), operation: EntityChangeOperation::Set, }]), ]; // FIXME: This does not await, meaning calling this doesn't // do anything. But, waiting here causes the test to hang. let _ignore_future = check_events(meta_subscription, meta_expected); // We're expecting two events to be written to the subscription stream let expected = vec![ StoreEvent::new(vec![ EntityChange { subgraph_id: subgraph_id.clone(), entity_type: USER.to_owned(), entity_id: added_entities[0].clone().0, operation: EntityChangeOperation::Set, }, EntityChange { subgraph_id: subgraph_id.clone(), entity_type: USER.to_owned(), entity_id: added_entities[1].clone().0, operation: EntityChangeOperation::Set, }, ]), StoreEvent::new(vec![ EntityChange { subgraph_id: subgraph_id.clone(), entity_type: USER.to_owned(), entity_id: "1".to_owned(), operation: EntityChangeOperation::Set, }, EntityChange { subgraph_id: subgraph_id.clone(), entity_type: USER.to_owned(), entity_id: added_entities[1].clone().0, operation: EntityChangeOperation::Removed, }, ]), ]; check_events(subscription, expected) }) } #[test] fn throttle_subscription_delivers() { run_test(|store| { let meta_subscription = subscribe_and_consume(store.clone(), &SUBGRAPHS_ID, "SubgraphDeployment") .throttle_while_syncing( &*LOGGER, store.clone(), SUBGRAPHS_ID.clone(), Duration::from_millis(500), ); let subscription = subscribe_and_consume(store.clone(), &TEST_SUBGRAPH_ID, USER) .throttle_while_syncing( &*LOGGER, store.clone(), TEST_SUBGRAPH_ID.clone(), Duration::from_millis(500), ); let user4 = create_test_entity( "4", USER, "Steve", "[email protected]", 72 as i32, 120.7, false, None, ); transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![user4], ) .unwrap(); let meta_expected = StoreEvent::new(vec![make_deployment_change( "testsubgraph", EntityChangeOperation::Set, )]); // FIXME: This does not await, meaning calling this doesn't // do anything. But, waiting here causes the test to hang. let _ignore_future = check_events(meta_subscription, vec![meta_expected]); let expected = StoreEvent::new(vec![make_entity_change( USER, "4", EntityChangeOperation::Set, )]); check_events(subscription, vec![expected]) }) } #[test] fn throttle_subscription_throttles() { run_test( |store| -> Box<dyn Future<Item = (), Error = TimeoutError<()>> + Send> { // Throttle for a very long time (30s) let subscription = subscribe_and_consume(store.clone(), &TEST_SUBGRAPH_ID, USER) .throttle_while_syncing( &*LOGGER, store.clone(), TEST_SUBGRAPH_ID.clone(), Duration::from_secs(30), ); let user4 = create_test_entity( "4", USER, "Steve", "[email protected]", 72 as i32, 120.7, false, None, ); transact_entity_operations( &store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![user4], ) .unwrap(); // Make sure we time out waiting for the subscription Box::new( subscription .take(1) .collect() .timeout(Duration::from_millis(500)) .compat() .then(|res| { assert!(res.is_err()); future::ok(()) }), ) }, ) } #[test] fn subgraph_schema_types_have_subgraph_id_directive() { run_test(|store| -> Result<(), ()> { let schema = store .api_schema(&TEST_SUBGRAPH_ID) .expect("test subgraph should have a schema"); for typedef in schema .document .definitions .iter() .filter_map(|def| match def { s::Definition::TypeDefinition(typedef) => Some(typedef), _ => None, }) { // Verify that all types have a @subgraphId directive on them let directive = match typedef { s::TypeDefinition::Object(t) => &t.directives, s::TypeDefinition::Interface(t) => &t.directives, s::TypeDefinition::Enum(t) => &t.directives, s::TypeDefinition::Scalar(t) => &t.directives, s::TypeDefinition::Union(t) => &t.directives, s::TypeDefinition::InputObject(t) => &t.directives, } .iter() .find(|directive| directive.name == "subgraphId") .expect("all subgraph schema types should have a @subgraphId directive"); // Verify that all @subgraphId directives match the subgraph assert_eq!( directive.arguments, [( String::from("id"), s::Value::String(TEST_SUBGRAPH_ID_STRING.to_string()) )] ); } Ok(()) }) } #[test] fn handle_large_string_with_index() { const NAME: &str = "name"; const ONE: &str = "large_string_one"; const TWO: &str = "large_string_two"; fn make_insert_op(id: &str, name: &str) -> EntityModification { let mut data = Entity::new(); data.set("id", id); data.set(NAME, name); let key = EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: USER.to_owned(), entity_id: id.to_owned(), }; EntityModification::Insert { key, data } }; run_test(|store| -> Result<(), ()> { let index = AttributeIndexDefinition { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_number: 0, attribute_number: 0, field_value_type: ValueType::String, attribute_name: NAME.to_owned(), entity_name: USER.to_owned(), }; store .build_entity_attribute_indexes(&*TEST_SUBGRAPH_ID, vec![index]) .unwrap(); // We have to produce a massive string (1_000_000 chars) because // the repeated text compresses so well. This leads to an error // 'index row requires 11488 bytes, maximum size is 8191' if // used with a btree index without size limitation let long_text = std::iter::repeat("Quo usque tandem") .take(62500) .collect::<String>(); let other_text = long_text.clone() + "X"; let metrics_registry = Arc::new(MockMetricsRegistry::new()); let stopwatch_metrics = StopwatchMetrics::new( Logger::root(slog::Discard, o!()), TEST_SUBGRAPH_ID.clone(), metrics_registry.clone(), ); store .transact_block_operations( TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, vec![ make_insert_op(ONE, &long_text), make_insert_op(TWO, &other_text), ], stopwatch_metrics, ) .expect("Failed to insert large text"); let query = user_query() .first(5) .filter(EntityFilter::Equal( NAME.to_owned(), long_text.clone().into(), )) .order_by(NAME, ValueType::String, EntityOrder::Ascending); let ids = store .find(query) .expect("Could not find entity") .iter() .map(|e| e.id()) .collect::<Result<Vec<_>, _>>() .expect("Found entities without an id"); assert_eq!(vec![ONE], ids); // Make sure we check the full string and not just a prefix let mut prefix = long_text.clone(); prefix.truncate(STRING_PREFIX_SIZE); let query = user_query() .first(5) .filter(EntityFilter::LessOrEqual(NAME.to_owned(), prefix.into())) .order_by(NAME, ValueType::String, EntityOrder::Ascending); let ids = store .find(query) .expect("Could not find entity") .iter() .map(|e| e.id()) .collect::<Result<Vec<_>, _>>() .expect("Found entities without an id"); // Users with name 'Cindini' and 'Johnton' assert_eq!(vec!["2", "1"], ids); Ok(()) }) } #[derive(Clone)] struct WindowQuery(EntityQuery, Arc<DieselStore>); impl WindowQuery { fn new(store: &Arc<DieselStore>) -> Self { WindowQuery( user_query() .filter(EntityFilter::GreaterThan("age".into(), Value::from(0))) .first(10), store.clone(), ) .default_window() } fn default_window(mut self) -> Self { let entity_types = match self.0.collection { EntityCollection::All(entity_types) => entity_types, EntityCollection::Window(_) => { unreachable!("we do not use this method with a windowed collection") } }; let windows = entity_types .into_iter() .map(|child_type| { let attribute = WindowAttribute::Scalar("favorite_color".to_owned()); let link = EntityLink::Direct(attribute); let ids = vec!["red", "green", "yellow", "blue"] .into_iter() .map(String::from) .collect(); EntityWindow { child_type, link, ids, } }) .collect(); self.0.collection = EntityCollection::Window(windows); self } fn first(self, first: u32) -> Self { WindowQuery(self.0.first(first), self.1) } fn skip(self, skip: u32) -> Self { WindowQuery(self.0.skip(skip), self.1) } fn order(self, attr: &str, dir: EntityOrder) -> Self { WindowQuery(self.0.order_by(attr, ValueType::String, dir), self.1) } fn above(self, age: i32) -> Self { WindowQuery( self.0 .filter(EntityFilter::GreaterThan("age".into(), Value::from(age))), self.1, ) } fn against_color_and_age(self) -> Self { let mut query = self.0; query.collection = EntityCollection::All(vec![USER.to_owned(), "Person".to_owned()]); WindowQuery(query, self.1).default_window() } fn expect(&self, expected_ids: Vec<&str>, qid: &str) { let query = self.0.clone(); let store = &self.1; let entity_ids = store .find(query) .expect("store.find failed to execute query") .into_iter() .map(|entity| match entity.get("id") { Some(Value::String(id)) => id.to_owned(), Some(_) => panic!("store.find returned entity with non-string ID attribute"), None => panic!("store.find returned entity with no ID attribute"), }) .collect::<Vec<_>>(); assert_eq!(expected_ids, entity_ids, "Failed query: {}", qid); } } #[test] fn window() { fn make_color_end_age(entity_type: &str, id: &str, color: &str, age: i32) -> EntityOperation { let mut entity = Entity::new(); entity.set("id", id.to_owned()); entity.set("age", age); entity.set("favorite_color", color); EntityOperation::Set { key: EntityKey { subgraph_id: TEST_SUBGRAPH_ID.clone(), entity_type: entity_type.to_owned(), entity_id: id.to_owned(), }, data: entity, } } fn make_user(id: &str, color: &str, age: i32) -> EntityOperation { make_color_end_age(USER, id, color, age) } fn make_person(id: &str, color: &str, age: i32) -> EntityOperation { make_color_end_age("Person", id, color, age) } let ops = vec![ make_user("4", "green", 34), make_user("5", "green", 17), make_user("6", "green", 41), make_user("7", "red", 25), make_user("8", "red", 45), make_user("9", "yellow", 37), make_user("10", "blue", 27), make_user("11", "blue", 19), make_person("p1", "green", 12), make_person("p2", "red", 15), ]; run_test(|store| -> Result<(), ()> { use EntityOrder::*; transact_entity_operations(&store, TEST_SUBGRAPH_ID.clone(), *TEST_BLOCK_3_PTR, ops) .expect("Failed to create test users"); // Get the first 2 entries in each 'color group' WindowQuery::new(&store) .first(2) .expect(vec!["10", "11", "4", "5", "2", "7", "9"], "q1"); WindowQuery::new(&store) .first(1) .expect(vec!["10", "4", "2", "9"], "q2"); WindowQuery::new(&store) .first(1) .skip(1) .expect(vec!["11", "5", "7"], "q3"); WindowQuery::new(&store) .first(1) .skip(1) .order("id", Descending) .expect(vec!["10", "5", "7"], "q4"); WindowQuery::new(&store) .first(1) .skip(1) .order("favorite_color", Descending) .expect(vec!["11", "5", "7"], "q5"); WindowQuery::new(&store) .first(1) .skip(1) .order("favorite_color", Descending) .above(25) .expect(vec!["6", "8"], "q6"); // Check queries for interfaces WindowQuery::new(&store) .first(1) .skip(1) .order("favorite_color", Descending) .above(12) .against_color_and_age() .expect(vec!["11", "5", "7"], "q7"); WindowQuery::new(&store) .first(1) .order("age", Ascending) .above(12) .against_color_and_age() .expect(vec!["11", "5", "p2", "9"], "q8"); Ok(()) }); } #[test] fn find_at_block() { fn shaqueeena_at_block(block: BlockNumber, email: &'static str) { run_test(move |store| -> Result<(), ()> { let mut query = user_query() .filter(EntityFilter::Equal("name".to_owned(), "Shaqueeena".into())) .order_by("name", ValueType::String, EntityOrder::Descending); query.block = block; let entities = store .find(query) .expect("store.find failed to execute query"); assert_eq!(1, entities.len()); let entity = entities.first().unwrap(); assert_eq!(Some(&Value::from(email)), entity.get("email")); Ok(()) }) } // These tests only make sense with relational storage if *USING_RELATIONAL_STORAGE { shaqueeena_at_block(1, "[email protected]"); shaqueeena_at_block(2, "[email protected]"); shaqueeena_at_block(7000, "[email protected]"); } } #[test] fn cleanup_cached_blocks() { run_test(|store| -> Result<(), ()> { // This test is somewhat silly in that there is nothing to clean up. // The main purpose for this test is to ensure that the SQL query // we run in `cleanup_cached_blocks` to figure out the first block // that should be removed is syntactically correct let cleaned = store.cleanup_cached_blocks(10).expect("cleanup succeeds"); assert_eq!((0, 0), cleaned); Ok(()) }) }
30.938656
100
0.538351
67fdb58ec2ae16490e171cf13a95081bdec09754
1,610
#[doc = "Writer for register CLOCKSTOP"] pub type W = crate::W<u32, super::CLOCKSTOP>; #[doc = "Register CLOCKSTOP `reset()`'s with value 0"] impl crate::ResetValue for super::CLOCKSTOP { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum STOP_AW { #[doc = "1: Stop all trace and debug clocks."] STOP = 1, } impl From<STOP_AW> for bool { #[inline(always)] fn from(variant: STOP_AW) -> Self { variant as u8 != 0 } } #[doc = "Write proxy for field `STOP`"] pub struct STOP_W<'a> { w: &'a mut W, } impl<'a> STOP_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: STOP_AW) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Stop all trace and debug clocks."] #[inline(always)] pub fn stop(self) -> &'a mut W { self.variant(STOP_AW::STOP) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl W { #[doc = "Bit 0"] #[inline(always)] pub fn stop(&mut self) -> STOP_W { STOP_W { w: self } } }
25.15625
70
0.539752
900d2a2c0d480c44601bb14f1b589349da38f86c
157
pub mod add_fees; pub mod algo_token; pub mod generate_link; pub mod generate_swap_txs; pub mod search_token; pub mod suggested_fees; pub mod to_sign_wc_js;
19.625
26
0.821656
5d89d21fa0d2b6c69583ac830bb62fa596ab7ffc
4,498
//! Looks for items missing (or incorrectly having) doctests. //! //! This pass is overloaded and runs two different lints. //! //! - MISSING_DOC_CODE_EXAMPLES: this lint is **UNSTABLE** and looks for public items missing doctests. //! - PRIVATE_DOC_TESTS: this lint is **STABLE** and looks for private items with doctests. use crate::clean; use crate::clean::*; use crate::core::DocContext; use crate::html::markdown::{find_testable_code, ErrorCodes, Ignore, LangString}; use crate::visit::DocVisitor; use crate::visit_ast::inherits_doc_hidden; use rustc_hir as hir; use rustc_middle::lint::LintLevelSource; use rustc_session::lint; use rustc_span::symbol::sym; struct DocTestVisibilityLinter<'a, 'tcx> { cx: &'a mut DocContext<'tcx>, } impl<'a, 'tcx> DocVisitor for DocTestVisibilityLinter<'a, 'tcx> { fn visit_item(&mut self, item: &Item) { let dox = item.attrs.collapsed_doc_value().unwrap_or_else(String::new); look_for_tests(self.cx, &dox, &item); self.visit_item_recur(item) } } pub(crate) struct Tests { pub(crate) found_tests: usize, } impl crate::doctest::Tester for Tests { fn add_test(&mut self, _: String, config: LangString, _: usize) { if config.rust && config.ignore == Ignore::None { self.found_tests += 1; } } } crate fn should_have_doc_example(cx: &DocContext<'_>, item: &clean::Item) -> bool { if !cx.cache.access_levels.is_public(item.def_id.expect_def_id()) || matches!( *item.kind, clean::StructFieldItem(_) | clean::VariantItem(_) | clean::AssocConstItem(_, _) | clean::AssocTypeItem(_, _) | clean::TypedefItem(_, _) | clean::StaticItem(_) | clean::ConstantItem(_) | clean::ExternCrateItem { .. } | clean::ImportItem(_) | clean::PrimitiveItem(_) | clean::KeywordItem(_) // check for trait impl | clean::ImplItem(clean::Impl { trait_: Some(_), .. }) ) { return false; } // The `expect_def_id()` should be okay because `local_def_id_to_hir_id` // would presumably panic if a fake `DefIndex` were passed. let hir_id = cx.tcx.hir().local_def_id_to_hir_id(item.def_id.expect_def_id().expect_local()); // check if parent is trait impl if let Some(parent_hir_id) = cx.tcx.hir().find_parent_node(hir_id) { if let Some(parent_node) = cx.tcx.hir().find(parent_hir_id) { if matches!( parent_node, hir::Node::Item(hir::Item { kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(_), .. }), .. }) ) { return false; } } } if cx.tcx.hir().attrs(hir_id).lists(sym::doc).has_word(sym::hidden) || inherits_doc_hidden(cx.tcx, hir_id) || cx.tcx.hir().span(hir_id).in_derive_expansion() { return false; } let (level, source) = cx.tcx.lint_level_at_node(crate::lint::MISSING_DOC_CODE_EXAMPLES, hir_id); level != lint::Level::Allow || matches!(source, LintLevelSource::Default) } crate fn look_for_tests<'tcx>(cx: &DocContext<'tcx>, dox: &str, item: &Item) { let hir_id = match DocContext::as_local_hir_id(cx.tcx, item.def_id) { Some(hir_id) => hir_id, None => { // If non-local, no need to check anything. return; } }; let mut tests = Tests { found_tests: 0 }; find_testable_code(dox, &mut tests, ErrorCodes::No, false, None); if tests.found_tests == 0 && cx.tcx.sess.is_nightly_build() { if should_have_doc_example(cx, item) { debug!("reporting error for {:?} (hir_id={:?})", item, hir_id); let sp = item.attr_span(cx.tcx); cx.tcx.struct_span_lint_hir( crate::lint::MISSING_DOC_CODE_EXAMPLES, hir_id, sp, |lint| lint.build("missing code example in this documentation").emit(), ); } } else if tests.found_tests > 0 && !cx.cache.access_levels.is_exported(item.def_id.expect_def_id()) { cx.tcx.struct_span_lint_hir( crate::lint::PRIVATE_DOC_TESTS, hir_id, item.attr_span(cx.tcx), |lint| lint.build("documentation test in private item").emit(), ); } }
34.335878
103
0.58715
1d18a49b0143873988f8f0c14f147ebc5f3b5790
463
use axum::extract::{FromRequest, RequestParts}; use headers::{Cookie, HeaderMapExt}; use crate::error::HttpError; pub struct Cookies(pub Option<Cookie>); #[async_trait::async_trait] impl<B> FromRequest<B> for Cookies where B: Send + Sync, { type Rejection = HttpError; async fn from_request( req: &mut RequestParts<B>, ) -> Result<Self, Self::Rejection> { Ok(Cookies(req.headers().unwrap().typed_try_get::<Cookie>()?)) } }
22.047619
70
0.665227
d65cd73463e634373e76af3ea380ef8078f887c8
4,001
//! Platform-dependent platform abstraction. //! //! The `std::sys` module is the abstracted interface through which //! `std` talks to the underlying operating system. It has different //! implementations for different operating system families, today //! just Unix and Windows, and initial support for Redox. //! //! The centralization of platform-specific code in this module is //! enforced by the "platform abstraction layer" tidy script in //! `tools/tidy/src/pal.rs`. //! //! This module is closely related to the platform-independent system //! integration code in `std::sys_common`. See that module's //! documentation for details. //! //! In the future it would be desirable for the independent //! implementations of this module to be extracted to their own crates //! that `std` can link to, thus enabling their implementation //! out-of-tree via crate replacement. Though due to the complex //! inter-dependencies within `std` that will be a challenging goal to //! achieve. #![allow(missing_debug_implementations)] cfg_if! { if #[cfg(unix)] { mod unix; pub use self::unix::*; } else if #[cfg(windows)] { mod windows; pub use self::windows::*; } else if #[cfg(target_os = "cloudabi")] { mod cloudabi; pub use self::cloudabi::*; } else if #[cfg(target_os = "redox")] { mod redox; pub use self::redox::*; } else if #[cfg(target_os = "wasi")] { mod wasi; pub use self::wasi::*; } else if #[cfg(target_arch = "wasm32")] { mod wasm; pub use self::wasm::*; } else if #[cfg(target_arch = "bpf")] { mod bpf; pub use self::bpf::*; } else if #[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] { mod sgx; pub use self::sgx::*; } else { compile_error!("libstd doesn't compile for this platform yet"); } } // Import essential modules from both platforms when documenting. These are // then later used in the `std::os` module when documenting, for example, // Windows when we're compiling for Linux. #[cfg(rustdoc)] cfg_if! { if #[cfg(any(unix, target_os = "redox"))] { // On unix we'll document what's already available #[stable(feature = "rust1", since = "1.0.0")] pub use self::ext as unix_ext; } else if #[cfg(any(target_os = "cloudabi", target_arch = "wasm32", all(target_vendor = "fortanix", target_env = "sgx")))] { // On CloudABI and wasm right now the module below doesn't compile // (missing things in `libc` which is empty) so just omit everything // with an empty module #[unstable(issue = "0", feature = "std_internals")] #[allow(missing_docs)] pub mod unix_ext {} } else { // On other platforms like Windows document the bare bones of unix use crate::os::linux as platform; #[path = "unix/ext/mod.rs"] pub mod unix_ext; } } #[cfg(rustdoc)] cfg_if! { if #[cfg(windows)] { // On windows we'll just be documenting what's already available #[allow(missing_docs)] #[stable(feature = "rust1", since = "1.0.0")] pub use self::ext as windows_ext; } else if #[cfg(any(target_os = "cloudabi", target_arch = "wasm32", all(target_vendor = "fortanix", target_env = "sgx")))] { // On CloudABI and wasm right now the shim below doesn't compile, so // just omit it #[unstable(issue = "0", feature = "std_internals")] #[allow(missing_docs)] pub mod windows_ext {} } else { // On all other platforms (aka linux/osx/etc) then pull in a "minimal" // amount of windows goop which ends up compiling #[macro_use] #[path = "windows/compat.rs"] mod compat; #[path = "windows/c.rs"] mod c; #[path = "windows/ext/mod.rs"] pub mod windows_ext; } }
36.045045
80
0.6016
e57ce521be45180a613e0593ae778d58c13dfde7
33,085
// Copyright 2016 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! A general b-tree structure suitable for ropes and the like. use std::sync::Arc; use std::cmp::min; use interval::Interval; const MIN_CHILDREN: usize = 4; const MAX_CHILDREN: usize = 8; pub trait NodeInfo: Clone { /// The type of the leaf. /// /// A given NodeInfo is for exactly one type of leaf. That is why /// the leaf type is an associated type rather than a type parameter. type L : Leaf; /// An operator that combines info from two subtrees. It is intended /// (but not strictly enforced) that this operator be associative and /// obey an identity property. In mathematical terms, the accumulate /// method is the sum operator of a monoid. fn accumulate(&mut self, other: &Self); /// A mapping from a leaf into the info type. It is intended (but /// not strictly enforced) that applying the accumulate method to /// the info derived from two leaves gives the same result as /// deriving the info from the concatenation of the two leaves. In /// mathematical terms, the compute_info method is a monoid /// homomorphism. fn compute_info(&Self::L) -> Self; /// The identity of the monoid. Need not be implemented because it /// can be computed from the leaf default. /// /// This is hear to demonstrate that this is a monoid. fn identity() -> Self { Self::compute_info(&Self::L::default()) } /// The interval covered by this node. The default impl is sufficient for most types, /// but interval trees may need to override it. fn interval(&self, len: usize) -> Interval { Interval::new_closed_closed(0, len) } } pub trait Leaf: Sized + Clone + Default { /// Measurement of leaf in base units. /// A 'base unit' refers to the smallest discrete unit /// by which a given concrete type can be indexed. /// Concretely, for Rust's String type the base unit is the byte. fn len(&self) -> usize; /// Generally a minimum size requirement for leaves. fn is_ok_child(&self) -> bool; /// Combine other into self, optionly splitting in two. /// Interval is in "base units". /// Generally implements a maximum size. /// /// TODO: What does Interval represent? /// /// Invariants: /// /// - If one or the other input is empty, then no split. /// - If either input satisfies is_ok_child, then on return self /// satisfies this, as does the optional split. fn push_maybe_split(&mut self, other: &Self, iv: Interval) -> Option<Self>; /// same meaning as push_maybe_split starting from an empty /// leaf, but maybe can be implemented more efficiently? /// /// TODO: remove if it doesn't pull its weight fn subseq(&self, iv: Interval) -> Self { let mut result = Self::default(); if result.push_maybe_split(self, iv).is_some() { panic!("unexpected split"); } result } } /// A b-tree node storing leaves at the bottom, and with info /// retained at each node. It is implemented with atomic reference counting /// and copy-on-write semantics, so an immutable clone is a very cheap /// operation, and nodes can be shared across threads. Even so, it is /// designed to be updated in place, with efficiency similar to a mutable /// data structure, using uniqueness of reference count to detect when /// this operation is safe. /// /// When the leaf is a string, this is a rope data structure (a persistent /// rope in functional programming jargon). However, it is not restricted /// to strings, and it is expected to be the basis for a number of data /// structures useful for text processing. #[derive(Clone)] pub struct Node<N: NodeInfo>(Arc<NodeBody<N>>); #[derive(Clone)] struct NodeBody<N: NodeInfo> { height: usize, len: usize, info: N, val: NodeVal<N>, } #[derive(Clone)] enum NodeVal<N: NodeInfo> { Leaf(N::L), Internal(Vec<Node<N>>), } // also consider making Metric a newtype for usize, so type system can // help separate metrics /// A trait for quickly processing attributes of a NodeInfo. /// /// For the conceptual background see the /// [blog post, Rope science, part 2: metrics](https://github.com/google/xi-editor/blob/master/docs/docs/rope_science_02.md). pub trait Metric<N: NodeInfo> { /// Return the number of boundarys in the NodeInfo::Leaf /// /// The usize argument is the total size/length of the node, in base units. fn measure(&N, usize) -> usize; /// Returns the smallest offset, in base units, for an offset in measured units. /// /// Invariants: /// /// - `from_base_units(to_base_units(x)) == x` is True for valid `x` fn to_base_units(l: &N::L, in_measured_units: usize) -> usize; /// Returns the smallest offset in measured units corresponding to an offset in base units. /// /// Invariants: /// /// - `from_base_units(to_base_units(x)) == x` is True for valid `x` fn from_base_units(l: &N::L, in_base_units: usize) -> usize; /// Return whether the offset in base units is a boundary of this metric. /// If a boundary is at end of a leaf then this method must return true. /// However, A boundary at the beginning of a leaf is optional /// (the previous leaf will be queried). fn is_boundary(l: &N::L, offset: usize) -> bool; /// Returns the index of the boundary directly preceding offset, /// or None if no such boundary exists. Input and result are in base units. fn prev(l: &N::L, offset: usize) -> Option<usize>; /// Returns the index of the first boundary for which index > offset, /// or None if no such boundary exists. Input and result are in base units. fn next(l: &N::L, offset: usize) -> Option<usize>; /// Returns true if the measured units in this metric can span multiple leaves. /// As an example, in a metric that measures lines in a rope, /// a line may start in one leaf and end in another; /// however in a metric measuring bytes, /// storage of a single byte cannot extend across leaves. fn can_fragment() -> bool; } impl<N: NodeInfo> Node<N> { pub fn from_leaf(l: N::L) -> Node<N> { let len = l.len(); let info = N::compute_info(&l); Node(Arc::new( NodeBody { height: 0, len, info, val: NodeVal::Leaf(l), })) } fn from_nodes(nodes: Vec<Node<N>>) -> Node<N> { let height = nodes[0].0.height + 1; let mut len = nodes[0].0.len; let mut info = nodes[0].0.info.clone(); for child in &nodes[1..] { len += child.0.len; info.accumulate(&child.0.info); } Node(Arc::new( NodeBody { height, len, info, val: NodeVal::Internal(nodes), })) } pub fn len(&self) -> usize { self.0.len } fn height(&self) -> usize { self.0.height } fn is_leaf(&self) -> bool { self.0.height == 0 } fn interval(&self) -> Interval { self.0.info.interval(self.0.len) } fn get_children(&self) -> &[Node<N>] { if let NodeVal::Internal(ref v) = self.0.val { v } else { panic!("get_children called on leaf node"); } } /// Returns the first child with a positive measure, starting from the `j`th. /// Also, returns the offset we have skipped; note that if it returns `None`in the first component, we skip all the children. fn next_positive_measure_child<M: Metric<N>>(&self, j: usize) -> (Option<usize>, usize) { let children = self.get_children(); let mut offset = 0; for i in j .. children.len() { if children[i].measure::<M>() > 0 { return (Some(i), offset); } else { offset += children[i].len(); } } (None, offset) } fn get_leaf(&self) -> &N::L { if let NodeVal::Leaf(ref l) = self.0.val { l } else { panic!("get_leaf called on internal node"); } } fn is_ok_child(&self) -> bool { match self.0.val { NodeVal::Leaf(ref l) => l.is_ok_child(), NodeVal::Internal(ref nodes) => (nodes.len() >= MIN_CHILDREN) } } fn merge_nodes(children1: &[Node<N>], children2: &[Node<N>]) -> Node<N> { let n_children = children1.len() + children2.len(); if n_children <= MAX_CHILDREN { Node::from_nodes([children1, children2].concat()) } else { // Note: this leans left. Splitting at midpoint is also an option let splitpoint = min(MAX_CHILDREN, n_children - MIN_CHILDREN); let mut iter = children1.iter().chain(children2.iter()).cloned(); let left = iter.by_ref().take(splitpoint).collect(); let right = iter.collect(); let parent_nodes = vec![Node::from_nodes(left), Node::from_nodes(right)]; Node::from_nodes(parent_nodes) } } fn merge_leaves(mut rope1: Node<N>, rope2: Node<N>) -> Node<N> { debug_assert!(rope1.is_leaf() && rope2.is_leaf()); let both_ok = rope1.get_leaf().is_ok_child() && rope2.get_leaf().is_ok_child(); if both_ok { return Node::from_nodes(vec![rope1, rope2]); } match { let node1 = Arc::make_mut(&mut rope1.0); let leaf2 = rope2.get_leaf(); if let NodeVal::Leaf(ref mut leaf1) = node1.val { let leaf2_iv = Interval::new_closed_closed(0, leaf2.len()); let new = leaf1.push_maybe_split(leaf2, leaf2_iv); node1.len = leaf1.len(); node1.info = N::compute_info(leaf1); new } else { panic!("merge_leaves called on non-leaf"); } } { Some(new) => { Node::from_nodes(vec![ rope1, Node::from_leaf(new), ]) } None => { rope1 } } } pub fn concat(rope1: Node<N>, rope2: Node<N>) -> Node<N> { use std::cmp::Ordering; let h1 = rope1.height(); let h2 = rope2.height(); match h1.cmp(&h2) { Ordering::Less => { let children2 = rope2.get_children(); if h1 == h2 - 1 && rope1.is_ok_child() { return Node::merge_nodes(&[rope1], children2); } let newrope = Node::concat(rope1, children2[0].clone()); if newrope.height() == h2 - 1 { Node::merge_nodes(&[newrope], &children2[1..]) } else { Node::merge_nodes(newrope.get_children(), &children2[1..]) } }, Ordering::Equal => { if rope1.is_ok_child() && rope2.is_ok_child() { return Node::from_nodes(vec![rope1, rope2]); } if h1 == 0 { return Node::merge_leaves(rope1, rope2); } Node::merge_nodes(rope1.get_children(), rope2.get_children()) }, Ordering::Greater => { let children1 = rope1.get_children(); if h2 == h1 - 1 && rope2.is_ok_child() { return Node::merge_nodes(children1, &[rope2]); } let lastix = children1.len() - 1; let newrope = Node::concat(children1[lastix].clone(), rope2); if newrope.height() == h1 - 1 { Node::merge_nodes(&children1[..lastix], &[newrope]) } else { Node::merge_nodes(&children1[..lastix], newrope.get_children()) } } } } pub fn measure<M: Metric<N>>(&self) -> usize { M::measure(&self.0.info, self.0.len) } /* // TODO: not sure if this belongs in the public interface, cursor // might subsume all real use cases. // calls the given function with leaves forming the sequence fn visit_subseq<F>(&self, iv: Interval, f: &mut F) where F: FnMut(&N::L) -> () { if iv.is_empty() { return; } match self.0.val { NodeVal::Leaf(ref l) => { if iv == Interval::new_closed_closed(0, l.len()) { f(l); } else { f(&l.clone().subseq(iv)); } } NodeVal::Internal(ref v) => { let mut offset = 0; for child in v { if iv.is_before(offset) { break; } let child_iv = Interval::new_closed_closed(0, child.len()); // easier just to use signed ints? let rec_iv = iv.intersect(child_iv.translate(offset)) .translate_neg(offset); child.visit_subseq::<F>(rec_iv, f); offset += child_iv.size(); } return; } } } */ pub fn push_subseq(&self, b: &mut TreeBuilder<N>, iv: Interval) { if iv.is_empty() { return; } if iv == self.interval() { b.push(self.clone()); return; } match self.0.val { NodeVal::Leaf(ref l) => { b.push_leaf_slice(l, iv); } NodeVal::Internal(ref v) => { let mut offset = 0; for child in v { if iv.is_before(offset) { break; } let child_iv = child.interval(); // easier just to use signed ints? let rec_iv = iv.intersect(child_iv.translate(offset)) .translate_neg(offset); child.push_subseq(b, rec_iv); offset += child.len(); } return; } } } pub fn subseq(&self, iv: Interval) -> Node<N> { let mut b = TreeBuilder::new(); self.push_subseq(&mut b, iv); b.build() } pub fn edit(&mut self, iv: Interval, new: Node<N>) { let mut b = TreeBuilder::new(); let self_iv = Interval::new_closed_closed(0, self.len()); self.push_subseq(&mut b, self_iv.prefix(iv)); b.push(new); self.push_subseq(&mut b, self_iv.suffix(iv)); *self = b.build(); } // doesn't deal with endpoint, handle that specially if you need it pub fn convert_metrics<M1: Metric<N>, M2: Metric<N>>(&self, mut m1: usize) -> usize { if m1 == 0 { return 0; } // If M1 can fragment, then we must land on the leaf containing // the m1 boundary. Otherwise, we can land on the beginning of // the leaf immediately following the M1 boundary, which may be // more efficient. let m1_fudge = if M1::can_fragment() { 1 } else { 0 }; let mut m2 = 0; let mut node = self; while node.height() > 0 { for child in node.get_children() { let child_m1 = child.measure::<M1>(); if m1 < child_m1 + m1_fudge { node = child; break; } m2 += child.measure::<M2>(); m1 -= child_m1; } } let l = node.get_leaf(); let base = M1::to_base_units(l, m1); m2 + M2::from_base_units(l, base) } } impl<N: NodeInfo> Default for Node<N> { fn default() -> Node<N> { Node::from_leaf(N::L::default()) } } pub struct TreeBuilder<N: NodeInfo>(Option<Node<N>>); impl<N: NodeInfo> TreeBuilder<N> { pub fn new() -> TreeBuilder<N> { TreeBuilder(None) } // TODO: more sophisticated implementation, so pushing a sequence // is amortized O(n), rather than O(n log n) as now. pub fn push(&mut self, n: Node<N>) { match self.0.take() { None => self.0 = Some(n), Some(buf) => self.0 = Some(Node::concat(buf, n)) } } pub fn push_leaf(&mut self, l: N::L) { self.push(Node::from_leaf(l)) } pub fn push_leaf_slice(&mut self, l: &N::L, iv: Interval) { self.push(Node::from_leaf(l.subseq(iv))) } pub fn build(self) -> Node<N> { match self.0 { Some(r) => r, None => Node::from_leaf(N::L::default()) } } } const CURSOR_CACHE_SIZE: usize = 4; pub struct Cursor<'a, N: 'a + NodeInfo> { root: &'a Node<N>, position: usize, cache: [Option<(&'a Node<N>, usize)>; CURSOR_CACHE_SIZE], leaf: Option<&'a N::L>, offset_of_leaf: usize, } impl<'a, N: NodeInfo> Cursor<'a, N> { pub fn new(n: &'a Node<N>, position: usize) -> Cursor<'a, N> { let mut result = Cursor { root: n, position, cache: [None; CURSOR_CACHE_SIZE], leaf: None, offset_of_leaf: 0, }; result.descend(); result } pub fn total_len(&self) -> usize { self.root.len() } /// return value is leaf (if cursor is valid) and offset within leaf /// /// invariant: offset is at end of leaf iff end of rope pub fn get_leaf(&self) -> Option<(&'a N::L, usize)> { self.leaf.map(|l| (l, self.position - self.offset_of_leaf)) } pub fn set(&mut self, position: usize) { self.position = position; if let Some(l) = self.leaf { if self.position >= self.offset_of_leaf && self.position < self.offset_of_leaf + l.len() { return; } } // TODO: walk up tree to find leaf if nearby self.descend(); } pub fn pos(&self) -> usize { self.position } pub fn is_boundary<M: Metric<N>>(&mut self) -> bool { if self.leaf.is_none() { // not at a valid position return false; } if self.position == 0 || (self.position == self.offset_of_leaf && !M::can_fragment()) { return true; } if self.position > self.offset_of_leaf { return M::is_boundary(self.leaf.unwrap(), self.position - self.offset_of_leaf); } // tricky case, at beginning of leaf, need to query end of previous // leaf; TODO: would be nice if we could do it another way that didn't // make the method &self mut. let l = self.prev_leaf().unwrap().0; let result = M::is_boundary(l, l.len()); let _ = self.next_leaf(); result } /// Moves the cursor to the previous boundary, or to the beginning of the /// rope. In the former case, returns the position of the first character /// past this boundary. In the latter case, returns `0`. pub fn prev<M: Metric<N>>(&mut self) -> Option<(usize)> { if self.position == 0 || self.leaf.is_none() { self.leaf = None; return None; } let orig_pos = self.position; let offset_in_leaf = orig_pos - self.offset_of_leaf; if let Some(l) = self.leaf { if offset_in_leaf > 0 { if let Some(offset_in_leaf) = M::prev(l, offset_in_leaf) { self.position = self.offset_of_leaf + offset_in_leaf; return Some(self.position); } } } else { panic!("inconsistent, shouldn't get here"); } // not in same leaf, need to scan backwards // TODO: walk up tree to skip measure-0 nodes loop { if self.offset_of_leaf == 0 { self.position = 0; return Some(self.position); } if let Some((l, _)) = self.prev_leaf() { // TODO: node already has this, no need to recompute. But, we // should be looking at nodes anyway at this point, as we need // to walk up the tree. let node_info = N::compute_info(l); if M::measure(&node_info, l.len()) == 0 { // leaf doesn't contain boundary, keep scanning continue; } if self.offset_of_leaf + l.len() < orig_pos && M::is_boundary(l, l.len()) { let _ = self.next_leaf(); return Some(self.position); } if let Some(offset_in_leaf) = M::prev(l, l.len()) { self.position = self.offset_of_leaf + offset_in_leaf; return Some(self.position); } else { panic!("metric is inconsistent, metric > 0 but no boundary"); } } } } /// Moves the cursor to the next boundary, or to the end of the rope. In the /// former case, returns the position of the first character past this /// boundary. In the latter case, returns the length of the rope. pub fn next<M: Metric<N>>(&mut self) -> Option<(usize)> { if self.position >= self.root.len() || self.leaf.is_none() { self.leaf = None; return None; } if let Some(offset) = self.next_inside_leaf::<M>() { return Some(offset); } if let Some(l) = self.leaf { self.position = self.offset_of_leaf + l.len(); for i in 0..CURSOR_CACHE_SIZE { if self.cache[i].is_none() { // we are at the root of the tree. return Some(self.root.len()); } let (node, j) = self.cache[i].unwrap(); let (next_j, offset) = node.next_positive_measure_child::<M>(j+1); self.position += offset; if let Some(next_j) = next_j { self.cache[i] = Some((node, next_j)); let mut node_down = &node.get_children()[next_j]; for k in (0..i).rev() { let (pm_child, offset) = node_down.next_positive_measure_child::<M>(0); let pm_child = pm_child.unwrap(); // at least one child must have positive measure self.position += offset; self.cache[k] = Some((node_down, pm_child)); node_down = &node_down.get_children()[pm_child]; } self.leaf = Some(node_down.get_leaf()); self.offset_of_leaf = self.position; return self.next_inside_leaf::<M>(); } } // At this point, we know that (1) the next boundary is not not in // the cached subtree, (2) self.position corresponds to the begining // of the first leaf after the cached subtree. self.descend(); return self.next::<M>(); } else { panic!("inconsistent, shouldn't get here"); } } /// Tries to find the next boundary in the leaf the cursor is currently in. #[inline(always)] fn next_inside_leaf<M: Metric<N>>(&mut self) -> Option<usize> { if let Some(l) = self.leaf { let offset_in_leaf = self.position - self.offset_of_leaf; if let Some(offset_in_leaf) = M::next(l, offset_in_leaf) { if offset_in_leaf == l.len() && self.offset_of_leaf + offset_in_leaf != self.root.len() { let _ = self.next_leaf(); } else { self.position = self.offset_of_leaf + offset_in_leaf; } return Some(self.position); } if self.offset_of_leaf + l.len() == self.root.len() { self.position = self.root.len(); return Some(self.position); } } else { panic!("inconsistent, shouldn't get here"); } None } /// same return as get_leaf, moves to beginning of next leaf pub fn next_leaf(&mut self) -> Option<(&'a N::L, usize)> { if let Some(leaf) = self.leaf { self.position = self.offset_of_leaf + leaf.len(); } else { self.leaf = None; return None; } for i in 0..CURSOR_CACHE_SIZE { if self.cache[i].is_none() { // this probably can't happen self.leaf = None; return None; } let (node, j) = self.cache[i].unwrap(); if j + 1 < node.get_children().len() { self.cache[i] = Some((node, j + 1)); let mut node_down = &node.get_children()[j + 1]; for k in (0..i).rev() { self.cache[k] = Some((node_down, 0)); node_down = &node_down.get_children()[0]; } self.leaf = Some(node_down.get_leaf()); self.offset_of_leaf = self.position; return self.get_leaf(); } } if self.offset_of_leaf + self.leaf.unwrap().len() == self.root.len() { self.leaf = None; return None; } self.descend(); self.get_leaf() } /// same return as get_leaf, moves to beginning of prev leaf pub fn prev_leaf(&mut self) -> Option<(&'a N::L, usize)> { if self.offset_of_leaf == 0 || Some(self.leaf).is_none() { self.leaf = None; return None; } for i in 0..CURSOR_CACHE_SIZE { if self.cache[i].is_none() { self.leaf = None; return None; } let (node, j) = self.cache[i].unwrap(); if j > 0 { self.cache[i] = Some((node, j - 1)); let mut node_down = &node.get_children()[j - 1]; for k in (0..i).rev() { let last_ix = node_down.get_children().len() - 1; self.cache[k] = Some((node_down, last_ix)); node_down = &node_down.get_children()[last_ix]; } let leaf = node_down.get_leaf(); self.leaf = Some(leaf); self.offset_of_leaf -= leaf.len(); self.position = self.offset_of_leaf; return self.get_leaf(); } } self.position = self.offset_of_leaf - 1; self.descend(); self.position = self.offset_of_leaf; self.get_leaf() } fn descend(&mut self) { let mut node = self.root; let mut offset = 0; while node.height() > 0 { let children = node.get_children(); let mut i = 0; loop { if i + 1 == children.len() { break; } let nextoff = offset + children[i].len(); if nextoff > self.position { break; } offset = nextoff; i += 1; } let cache_ix = node.height() - 1; if cache_ix < CURSOR_CACHE_SIZE { self.cache[cache_ix] = Some((node, i)); } node = &children[i]; } self.leaf = Some(node.get_leaf()); self.offset_of_leaf = offset; } } /* // How to access the slice type for a leaf, if available. This will // be super helpful in building a chunk iterator (which requires // slices if it's going to conform to Rust's iterator protocol) fn slice<'a, L: Leaf + Index<RangeFull>>(l: &'a L) -> &'a L::Output { l.index(RangeFull) } */ /* // TODO: the following is an example, written during development but // not actually used. Either make it real or delete it. #[derive(Clone, Default)] struct BytesLeaf(Vec<u8>); #[derive(Clone)] struct BytesInfo(usize); // leaf doesn't have to be a newtype impl Leaf for Vec<u8> { fn len(&self) -> usize { self.len() } fn is_ok_child(&self) -> bool { self.len() >= 512 } fn push_maybe_split(&mut self, other: &Vec<u8>, iv: Interval) -> Option<Vec<u8>> { let (start, end) = iv.start_end(); self.extend_from_slice(&other[start..end]); if self.len() <= 1024 { None } else { let splitpoint = self.len() / 2; let new = self[splitpoint..].to_owned(); self.truncate(splitpoint); Some(new) } } } impl NodeInfo for BytesInfo { type L = Vec<u8>; type BaseMetric = BytesMetric; fn accumulate(&mut self, other: &Self) { self.0 += other.0; } fn compute_info(l: &Vec<u8>) -> BytesInfo { BytesInfo(l.len()) } } struct BytesMetric(()); impl Metric<BytesInfo> for BytesMetric { fn measure(_: &BytesInfo, len: usize) -> usize { len } fn to_base_units(_: &Vec<u8>, in_measured_units: usize) -> usize { in_measured_units } fn from_base_units(_: &Vec<u8>, in_base_units: usize) -> usize { in_base_units } fn is_boundary(_: &Vec<u8>, _: usize) -> bool { true } fn prev(_: &Vec<u8>, offset: usize) -> Option<usize> { if offset > 0 { Some(offset - 1) } else { None } } fn next(l: &Vec<u8>, offset: usize) -> Option<usize> { if offset < l.len() { Some(offset + 1) } else { None } } fn can_fragment() -> bool { false } } */ #[cfg(test)] mod test { use ::rope::*; use super::*; fn build_triangle(n: u32) -> String { let mut s = String::new(); let mut line = String::new(); for _ in 0 .. n { s += &line; s += "\n"; line += "a"; } s } #[test] fn cursor_next_triangle() { let n = 2_000; let text = Rope::from(build_triangle(n)); let mut cursor = Cursor::new(&text, 0); let mut prev_offset = cursor.pos(); for i in 1..(n+1) as usize { let offset = cursor.next::<LinesMetric>().expect("arrived at the end too soon"); assert_eq!(offset - prev_offset, i); prev_offset = offset; } assert_eq!(cursor.next::<LinesMetric>(), None); } #[test] fn cursor_next_empty() { let text = Rope::from(String::new()); let mut cursor = Cursor::new(&text, 0); assert_eq!(cursor.next::<LinesMetric>(), None); assert_eq!(cursor.pos(), 0); } #[test] fn cursor_next_misc() { cursor_next_for("toto"); cursor_next_for("toto\n"); cursor_next_for("toto\ntata"); cursor_next_for("歴史\n科学的"); cursor_next_for("\n歴史\n科学的\n"); cursor_next_for(&build_triangle(100)); } fn cursor_next_for(s: &str) { let r = Rope::from(s.to_owned()); for i in 0..r.len() { let mut c = Cursor::new(&r, i); let it = c.next::<LinesMetric>(); let pos = c.pos(); assert!(s.as_bytes()[i..pos-1].iter().all(|c| *c != b'\n'), "missed linebreak"); if pos < s.len() { assert!(it.is_some(), "must be Some(_)"); assert!(s.as_bytes()[pos-1] == b'\n', "not a linebreak"); } } } #[test] fn cursor_prev_misc() { cursor_prev_for("toto"); cursor_prev_for("toto\n"); cursor_prev_for("toto\ntata"); cursor_prev_for("歴史\n科学的"); cursor_prev_for("\n歴史\n科学的\n"); cursor_prev_for(&build_triangle(100)); } fn cursor_prev_for(s: &str) { let r = Rope::from(s.to_owned()); for i in 0..r.len() { let mut c = Cursor::new(&r, i); let it = c.prev::<LinesMetric>(); let pos = c.pos(); assert!(s.as_bytes()[pos..i].iter().all(|c| *c != b'\n'), "missed linebreak"); if i == 0 && s.as_bytes()[i] == b'\n' { assert_eq!(pos, 0); } if pos > 0 { assert!(it.is_some(), "must be Some(_)"); assert!(s.as_bytes()[pos-1] == b'\n', "not a linebreak"); } } } }
34.108247
129
0.524951
fbaba7b21c1f9bc4db30e71d4171718833261fd9
37,668
use std::collections::{BTreeMap, BTreeSet, }; use std::fmt; use std::marker::PhantomData; use serde::{Deserialize, Serialize, }; use serde::de::{Visitor, MapAccess, SeqAccess, }; use rustc_target::spec; mod link_args { use serde::*; use std::collections::BTreeMap; use rustc_target::spec; use super::*; pub type InK = spec::LinkerFlavor; pub type InV = Vec<String>; pub type OutK = LinkerFlavor; pub type OutV = InV; pub type Output = BTreeMap<InK, InV>; pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { btree_map::deserialize::<D, InK, InV, OutK, OutV>(deserializer) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { btree_map::serialize::<S, InK, InV, OutK, OutV>(this, serializer) } } mod crt_objects { use serde::*; use std::collections::BTreeMap; use rustc_target::spec; use super::*; pub type InK = spec::LinkOutputKind; pub type InV = Vec<String>; pub type OutK = LinkOutputKind; pub type OutV = InV; pub type Output = BTreeMap<InK, InV>; pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { btree_map::deserialize::<D, InK, InV, OutK, OutV>(deserializer) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { btree_map::serialize::<S, InK, InV, OutK, OutV>(this, serializer) } } mod crt_objects_fallback { use serde::*; use rustc_target::spec; use super::*; pub type In = spec::crt_objects::CrtObjectsFallback; pub type Out = CrtObjectsFallback; pub type Output = Option<In>; pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { option::deserialize::<D, In, Out>(deserializer) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { option::serialize::<S, In, Out>(this, serializer) } } mod code_model { use serde::*; use rustc_target::spec; use super::*; pub type In = spec::CodeModel; pub type Out = CodeModel; pub type Output = Option<In>; pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { option::deserialize::<D, In, Out>(deserializer) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { option::serialize::<S, In, Out>(this, serializer) } } /// should be `abis`. oops. mod apis { use serde::*; use rustc_target::spec; use super::*; pub type In = spec::abi::Abi; pub type Out = Abi; pub type Output = Vec<spec::abi::Abi>; pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { vec::deserialize::<D, In, Out>(deserializer) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { vec::serialize::<S, In, Out>(this, serializer) } } pub mod abi { use serde::*; use rustc_target::spec; use super::*; pub type Output = spec::abi::Abi; #[allow(dead_code)] // for completeness. pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { Abi::deserialize::<D>(deserializer) .map(Into::into) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { Abi::serialize::<S>(&Abi::from(*this), serializer) } } #[derive(Serialize, Deserialize)] #[serde(remote = "::rustc_target::spec::Target")] pub struct Target { /// Target triple to pass to LLVM. pub llvm_target: String, /// String to use as the `target_endian` `cfg` variable. pub target_endian: String, /// String to use as the `target_pointer_width` `cfg` variable. pub target_pointer_width: String, /// Width of c_int type pub target_c_int_width: String, /// OS name to use for conditional compilation. pub target_os: String, /// Environment name to use for conditional compilation. pub target_env: String, /// Vendor name to use for conditional compilation. pub target_vendor: String, /// Architecture to use for ABI considerations. Valid options: "x86", /// "x86_64", "arm", "aarch64", "mips", "powerpc", and "powerpc64". pub arch: String, /// [Data layout](http://llvm.org/docs/LangRef.html#data-layout) to pass to LLVM. pub data_layout: String, /// Linker flavor #[serde(with = "self::linker_flavor")] pub linker_flavor: ::rustc_target::spec::LinkerFlavor, /// Optional settings with defaults. #[serde(with = "TargetOptions")] pub options: ::rustc_target::spec::TargetOptions, } #[derive(Serialize, Deserialize)] #[serde(remote = "::rustc_target::spec::TargetOptions")] pub struct TargetOptions { /// Whether the target is built-in or loaded from a custom target specification. pub is_builtin: bool, /// Linker to invoke pub linker: Option<String>, /// LLD flavor #[serde(with = "LldFlavor")] pub lld_flavor: ::rustc_target::spec::LldFlavor, /// Linker arguments that are passed *before* any user-defined libraries. #[serde(with = "self::link_args")] pub pre_link_args: ::rustc_target::spec::LinkArgs, /// Objects to link before all others, always found within the /// sysroot folder. #[serde(with = "self::crt_objects")] pub pre_link_objects: ::rustc_target::spec::crt_objects::CrtObjects, #[serde(with = "self::crt_objects")] pub post_link_objects: ::rustc_target::spec::crt_objects::CrtObjects, /// Same as `(pre|post)_link_objects`, but when we fail to pull the objects with help of the /// target's native gcc and fall back to the "self-contained" mode and pull them manually. /// See `crt_objects.rs` for some more detailed documentation. #[serde(with = "self::crt_objects")] pub pre_link_objects_fallback: ::rustc_target::spec::crt_objects::CrtObjects, #[serde(with = "self::crt_objects")] pub post_link_objects_fallback: ::rustc_target::spec::crt_objects::CrtObjects, /// Which logic to use to determine whether to fall back to the "self-contained" mode or not. #[serde(with = "self::crt_objects_fallback")] pub crt_objects_fallback: Option<::rustc_target::spec::crt_objects::CrtObjectsFallback>, /// Linker arguments that are unconditionally passed after any /// user-defined but before post_link_objects. Standard platform /// libraries that should be always be linked to, usually go here. #[serde(with = "self::link_args")] pub late_link_args: ::rustc_target::spec::LinkArgs, /// Linker arguments used in addition to `late_link_args` if at least one /// Rust dependency is dynamically linked. #[serde(with = "self::link_args")] pub late_link_args_dynamic: ::rustc_target::spec::LinkArgs, /// Linker arguments used in addition to `late_link_args` if aall Rust /// dependencies are statically linked. #[serde(with = "self::link_args")] pub late_link_args_static: ::rustc_target::spec::LinkArgs, /// Objects to link after all others, always found within the /// sysroot folder. /// Linker arguments that are unconditionally passed *after* any /// user-defined libraries. #[serde(with = "self::link_args")] pub post_link_args: ::rustc_target::spec::LinkArgs, /// Optional link script applied to `dylib` and `executable` crate types. /// This is a string containing the script, not a path. Can only be applied /// to linkers where `linker_is_gnu` is true. pub link_script: Option<String>, /// Environment variables to be set before invoking the linker. pub link_env: Vec<(String, String)>, /// Environment variables to be removed for the linker invocation. pub link_env_remove: Vec<String>, /// Extra arguments to pass to the external assembler (when used) pub asm_args: Vec<String>, /// Default CPU to pass to LLVM. Corresponds to `llc -mcpu=$cpu`. Defaults /// to "generic". pub cpu: String, /// Default target features to pass to LLVM. These features will *always* be /// passed, and cannot be disabled even via `-C`. Corresponds to `llc /// -mattr=$features`. pub features: String, /// Whether dynamic linking is available on this target. Defaults to false. pub dynamic_linking: bool, /// If dynamic linking is available, whether only cdylibs are supported. pub only_cdylib: bool, /// Whether executables are available on this target. iOS, for example, only allows static /// libraries. Defaults to false. pub executables: bool, /// Relocation model to use in object file. Corresponds to `llc /// -relocation-model=$relocation_model`. Defaults to "pic". #[serde(with = "RelocModel")] pub relocation_model: ::rustc_target::spec::RelocModel, /// Code model to use. Corresponds to `llc -code-model=$code_model`. #[serde(with = "self::code_model")] pub code_model: Option<::rustc_target::spec::CodeModel>, /// TLS model to use. Options are "global-dynamic" (default), "local-dynamic", "initial-exec" /// and "local-exec". This is similar to the -ftls-model option in GCC/Clang. #[serde(with = "TlsModel")] pub tls_model: ::rustc_target::spec::TlsModel, /// Do not emit code that uses the "red zone", if the ABI has one. Defaults to false. pub disable_redzone: bool, /// Eliminate frame pointers from stack frames if possible. Defaults to true. pub eliminate_frame_pointer: bool, /// Emit each function in its own section. Defaults to true. pub function_sections: bool, /// String to prepend to the name of every dynamic library. Defaults to "lib". pub dll_prefix: String, /// String to append to the name of every dynamic library. Defaults to ".so". pub dll_suffix: String, /// String to append to the name of every executable. pub exe_suffix: String, /// String to prepend to the name of every static library. Defaults to "lib". pub staticlib_prefix: String, /// String to append to the name of every static library. Defaults to ".a". pub staticlib_suffix: String, /// OS family to use for conditional compilation. Valid options: "unix", "windows". pub target_family: Option<String>, /// Whether the target toolchain's ABI supports returning small structs as an integer. pub abi_return_struct_as_int: bool, /// Whether the target toolchain is like macOS's. Only useful for compiling against iOS/macOS, /// in particular running dsymutil and some other stuff like `-dead_strip`. Defaults to false. pub is_like_osx: bool, /// Whether the target toolchain is like Solaris's. /// Only useful for compiling against Illumos/Solaris, /// as they have a different set of linker flags. Defaults to false. pub is_like_solaris: bool, /// Whether the target toolchain is like Windows'. Only useful for compiling against Windows, /// only really used for figuring out how to find libraries, since Windows uses its own /// library naming convention. Defaults to false. pub is_like_windows: bool, pub is_like_msvc: bool, /// Whether the target toolchain is like Android's. Only useful for compiling against Android. /// Defaults to false. pub is_like_android: bool, /// Whether the target toolchain is like Emscripten's. Only useful for compiling with /// Emscripten toolchain. /// Defaults to false. pub is_like_emscripten: bool, /// Whether the target toolchain is like Fuchsia's. pub is_like_fuchsia: bool, /// Whether the linker support GNU-like arguments such as -O. Defaults to false. pub linker_is_gnu: bool, /// The MinGW toolchain has a known issue that prevents it from correctly /// handling COFF object files with more than 2<sup>15</sup> sections. Since each weak /// symbol needs its own COMDAT section, weak linkage implies a large /// number sections that easily exceeds the given limit for larger /// codebases. Consequently we want a way to disallow weak linkage on some /// platforms. pub allows_weak_linkage: bool, /// Whether the linker support rpaths or not. Defaults to false. pub has_rpath: bool, /// Whether to disable linking to the default libraries, typically corresponds /// to `-nodefaultlibs`. Defaults to true. pub no_default_libraries: bool, /// Dynamically linked executables can be compiled as position independent /// if the default relocation model of position independent code is not /// changed. This is a requirement to take advantage of ASLR, as otherwise /// the functions in the executable are not randomized and can be used /// during an exploit of a vulnerability in any code. pub position_independent_executables: bool, /// Executables that are both statically linked and position-independent are supported. pub static_position_independent_executables: bool, /// Determines if the target always requires using the PLT for indirect /// library calls or not. This controls the default value of the `-Z plt` flag. pub needs_plt: bool, /// Either partial, full, or off. Full RELRO makes the dynamic linker /// resolve all symbols at startup and marks the GOT read-only before /// starting the program, preventing overwriting the GOT. #[serde(with = "RelroLevel")] pub relro_level: ::rustc_target::spec::RelroLevel, /// Format that archives should be emitted in. This affects whether we use /// LLVM to assemble an archive or fall back to the system linker, and /// currently only "gnu" is used to fall into LLVM. Unknown strings cause /// the system linker to be used. pub archive_format: String, /// Is asm!() allowed? Defaults to true. pub allow_asm: bool, /// Whether the runtime startup code requires the `main` function be passed /// `argc` and `argv` values. pub main_needs_argc_argv: bool, /// Flag indicating whether ELF TLS (e.g. #[thread_local]) is available for /// this target. pub has_elf_tls: bool, // This is mainly for easy compatibility with emscripten. // If we give emcc .o files that are actually .bc files it // will 'just work'. pub obj_is_bitcode: bool, /// Whether the target requires that emitted object code includes bitcode. pub forces_embed_bitcode: bool, /// Content of the LLVM cmdline section associated with embedded bitcode. pub bitcode_llvm_cmdline: String, /// Don't use this field; instead use the `.min_atomic_width()` method. pub min_atomic_width: Option<u64>, /// Don't use this field; instead use the `.max_atomic_width()` method. pub max_atomic_width: Option<u64>, /// Whether the target supports atomic CAS operations natively pub atomic_cas: bool, /// Panic strategy: "unwind" or "abort" #[serde(with = "PanicStrategy")] pub panic_strategy: ::rustc_target::spec::PanicStrategy, /// A blacklist of ABIs unsupported by the current target. Note that generic /// ABIs are considered to be supported on all platforms and cannot be blacklisted. #[serde(with = "self::apis")] pub unsupported_abis: Vec<::rustc_target::spec::abi::Abi>, /// Whether or not linking dylibs to a static CRT is allowed. pub crt_static_allows_dylibs: bool, /// Whether or not the CRT is statically linked by default. pub crt_static_default: bool, /// Whether or not crt-static is respected by the compiler (or is a no-op). pub crt_static_respected: bool, /// Whether or not stack probes (__rust_probestack) are enabled pub stack_probes: bool, /// The minimum alignment for global symbols. pub min_global_align: Option<u64>, /// Default number of codegen units to use in debug mode pub default_codegen_units: Option<u64>, /// Whether to generate trap instructions in places where optimization would /// otherwise produce control flow that falls through into unrelated memory. pub trap_unreachable: bool, /// This target requires everything to be compiled with LTO to emit a final /// executable, aka there is no native linker for this target. pub requires_lto: bool, /// This target has no support for threads. pub singlethread: bool, /// Whether library functions call lowering/optimization is disabled in LLVM /// for this target unconditionally. pub no_builtins: bool, /// The default visibility for symbols in this target should be "hidden" /// rather than "default" pub default_hidden_visibility: bool, /// Whether a .debug_gdb_scripts section will be added to the output object file pub emit_debug_gdb_scripts: bool, /// Whether or not to unconditionally `uwtable` attributes on functions, /// typically because the platform needs to unwind for things like stack /// unwinders. pub requires_uwtable: bool, /// Whether or not SIMD types are passed by reference in the Rust ABI, /// typically required if a target can be compiled with a mixed set of /// target features. This is `true` by default, and `false` for targets like /// wasm32 where the whole program either has simd or not. pub simd_types_indirect: bool, /// Pass a list of symbol which should be exported in the dylib to the linker. pub limit_rdylib_exports: bool, /// If set, have the linker export exactly these symbols, instead of using /// the usual logic to figure this out from the crate itself. pub override_export_symbols: Option<Vec<String>>, /// Description of all address spaces and how they are shared with one another. /// Defaults to a single, flat, address space. Note it is generally assumed that /// the address space `0` is your flat address space. #[serde(with = "AddrSpaces")] pub addr_spaces: ::rustc_target::spec::AddrSpaces, /// Determines how or whether the MergeFunctions LLVM pass should run for /// this target. Either "disabled", "trampolines", or "aliases". /// The MergeFunctions pass is generally useful, but some targets may need /// to opt out. The default is "aliases". /// /// Workaround for: https://github.com/rust-lang/rust/issues/57356 #[serde(with = "MergeFunctions")] pub merge_functions: ::rustc_target::spec::MergeFunctions, /// Use platform dependent mcount function pub target_mcount: String, /// LLVM ABI name, corresponds to the '-mabi' parameter available in multilib C compilers pub llvm_abiname: String, /// Whether or not RelaxElfRelocation flag will be passed to the linker pub relax_elf_relocations: bool, /// Additional arguments to pass to LLVM, similar to the `-C llvm-args` codegen option. pub llvm_args: Vec<String>, /// Whether to use legacy .ctors initialization hooks rather than .init_array. Defaults /// to false (uses .init_array). pub use_ctors_section: bool, /// Whether the linker is instructed to add a `GNU_EH_FRAME` ELF header /// used to locate unwinding information is passed /// (only has effect if the linker is `ld`-like). pub eh_frame_header: bool, } #[derive(Serialize, Deserialize)] pub enum CrtObjectsFallback { Musl, Mingw, Wasm, } impl Into<spec::crt_objects::CrtObjectsFallback> for CrtObjectsFallback { fn into(self) -> spec::crt_objects::CrtObjectsFallback { use self::CrtObjectsFallback::*; use rustc_target::spec::crt_objects::CrtObjectsFallback; match self { Musl => CrtObjectsFallback::Musl, Mingw => CrtObjectsFallback::Mingw, Wasm => CrtObjectsFallback::Wasm, } } } impl From<spec::crt_objects::CrtObjectsFallback> for CrtObjectsFallback { fn from(v: spec::crt_objects::CrtObjectsFallback) -> CrtObjectsFallback { use rustc_target::spec::crt_objects::CrtObjectsFallback::*; match v { Musl => CrtObjectsFallback::Musl, Mingw => CrtObjectsFallback::Mingw, Wasm => CrtObjectsFallback::Wasm, } } } #[derive(Serialize, Deserialize)] pub enum LinkOutputKind { /// Dynamically linked non position-independent executable. DynamicNoPicExe, /// Dynamically linked position-independent executable. DynamicPicExe, /// Statically linked non position-independent executable. StaticNoPicExe, /// Statically linked position-independent executable. StaticPicExe, /// Regular dynamic library ("dynamically linked"). DynamicDylib, /// Dynamic library with bundled libc ("statically linked"). StaticDylib, } impl Into<spec::LinkOutputKind> for LinkOutputKind { fn into(self) -> spec::LinkOutputKind { use self::LinkOutputKind::*; use rustc_target::spec::LinkOutputKind; match self { DynamicNoPicExe => LinkOutputKind::DynamicNoPicExe, DynamicPicExe => LinkOutputKind::DynamicPicExe, StaticNoPicExe => LinkOutputKind::StaticNoPicExe, StaticPicExe => LinkOutputKind::StaticPicExe, DynamicDylib => LinkOutputKind::DynamicDylib, StaticDylib => LinkOutputKind::StaticDylib, } } } impl From<spec::LinkOutputKind> for LinkOutputKind { fn from(v: spec::LinkOutputKind) -> LinkOutputKind { use rustc_target::spec::LinkOutputKind::*; match v { DynamicNoPicExe => LinkOutputKind::DynamicNoPicExe, DynamicPicExe => LinkOutputKind::DynamicPicExe, StaticNoPicExe => LinkOutputKind::StaticNoPicExe, StaticPicExe => LinkOutputKind::StaticPicExe, DynamicDylib => LinkOutputKind::DynamicDylib, StaticDylib => LinkOutputKind::StaticDylib, } } } #[derive(Serialize, Deserialize)] pub enum CodeModel { Tiny, Small, Kernel, Medium, Large, } impl Into<spec::CodeModel> for CodeModel { fn into(self) -> spec::CodeModel { use self::CodeModel::*; use rustc_target::spec::CodeModel; match self { Tiny => CodeModel::Tiny, Small => CodeModel::Small, Kernel => CodeModel::Kernel, Medium => CodeModel::Medium, Large => CodeModel::Large, } } } impl From<spec::CodeModel> for CodeModel { fn from(v: spec::CodeModel) -> CodeModel { use rustc_target::spec::CodeModel::*; match v { Tiny => CodeModel::Tiny, Small => CodeModel::Small, Kernel => CodeModel::Kernel, Medium => CodeModel::Medium, Large => CodeModel::Large, } } } #[derive(Serialize, Deserialize)] #[serde(remote = "::rustc_target::spec::RelocModel")] pub enum RelocModel { Static, Pic, DynamicNoPic, Ropi, Rwpi, RopiRwpi, } #[derive(Serialize, Deserialize)] #[serde(remote = "::rustc_target::spec::TlsModel")] pub enum TlsModel { GeneralDynamic, LocalDynamic, InitialExec, LocalExec, } #[derive(Serialize, Deserialize)] #[serde(remote = "::rustc_target::spec::AddrSpaceIdx")] pub struct AddrSpaceIdx(pub u32); mod addr_spaces { use serde::*; use std::collections::BTreeMap; use rustc_target::spec; use super::*; pub type InK = spec::AddrSpaceKind; pub type InV = spec::AddrSpaceProps; pub type OutK = AddrSpaceKind; pub type OutV = AddrSpaceProps; pub type Output = BTreeMap<InK, InV>; pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { btree_map::deserialize::<D, InK, InV, OutK, OutV>(deserializer) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { btree_map::serialize::<S, InK, InV, OutK, OutV>(this, serializer) } } #[derive(Serialize, Deserialize)] #[serde(remote = "::rustc_target::spec::AddrSpaces")] pub struct AddrSpaces(#[serde(with = "self::addr_spaces")] pub BTreeMap< ::rustc_target::spec::AddrSpaceKind, ::rustc_target::spec::AddrSpaceProps >); #[derive(Serialize, Deserialize, Ord, PartialOrd, PartialEq, Eq)] pub enum AddrSpaceKind { Flat, Alloca, /// aka constant ReadOnly, /// aka global ReadWrite, Instruction, Named(String), } impl Into<spec::AddrSpaceKind> for AddrSpaceKind { fn into(self) -> spec::AddrSpaceKind { match self { AddrSpaceKind::Flat => spec::AddrSpaceKind::Flat, AddrSpaceKind::Alloca => spec::AddrSpaceKind::Alloca, AddrSpaceKind::ReadOnly => spec::AddrSpaceKind::ReadOnly, AddrSpaceKind::ReadWrite => spec::AddrSpaceKind::ReadWrite, AddrSpaceKind::Instruction => spec::AddrSpaceKind::Instruction, AddrSpaceKind::Named(name) => spec::AddrSpaceKind::Named(name), } } } impl From<spec::AddrSpaceKind> for AddrSpaceKind { fn from(v: spec::AddrSpaceKind) -> Self { match v { spec::AddrSpaceKind::Flat => AddrSpaceKind::Flat, spec::AddrSpaceKind::Alloca => AddrSpaceKind::Alloca, spec::AddrSpaceKind::ReadOnly => AddrSpaceKind::ReadOnly, spec::AddrSpaceKind::ReadWrite => AddrSpaceKind::ReadWrite, spec::AddrSpaceKind::Instruction => AddrSpaceKind::Instruction, spec::AddrSpaceKind::Named(name) => AddrSpaceKind::Named(name), } } } mod addr_space_props_shared_with { use serde::*; use std::collections::BTreeSet; use rustc_target::spec; use super::*; pub type InK = spec::AddrSpaceKind; pub type OutK = AddrSpaceKind; pub type Output = BTreeSet<InK>; pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { btree_set::deserialize::<D, InK, OutK>(deserializer) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { btree_set::serialize::<S, InK, OutK>(this, serializer) } } /// TODO: make it so we don't have to clone this to serialize it. #[derive(Serialize, Deserialize)] pub struct AddrSpaceProps { #[serde(with = "AddrSpaceIdx")] pub index: spec::AddrSpaceIdx, /// Indicates which addr spaces this addr space can be addrspacecast-ed to. #[serde(with = "self::addr_space_props_shared_with")] pub shared_with: BTreeSet<spec::AddrSpaceKind>, } impl Into<spec::AddrSpaceProps> for AddrSpaceProps { fn into(self) -> spec::AddrSpaceProps { let AddrSpaceProps { index, shared_with, } = self; spec::AddrSpaceProps { index, shared_with, } } } impl From<spec::AddrSpaceProps> for AddrSpaceProps { fn from(v: spec::AddrSpaceProps) -> Self { let spec::AddrSpaceProps { index, shared_with, } = v; AddrSpaceProps { index, shared_with, } } } #[derive(Serialize, Deserialize)] #[serde(remote = "spec::RelroLevel")] pub enum RelroLevel { Full, Partial, Off, None, } #[derive(Serialize, Deserialize)] #[serde(remote = "spec::PanicStrategy")] pub enum PanicStrategy { Unwind, Abort, } #[derive(Serialize, Deserialize)] pub enum LinkerFlavor { Em, Gcc, Ld, Msvc, Lld(#[serde(with = "LldFlavor")] ::rustc_target::spec::LldFlavor), PtxLinker, } mod linker_flavor { use serde::*; use rustc_target::spec; use super::*; pub type Output = spec::LinkerFlavor; pub fn deserialize<'de, D>(deserializer: D) -> Result<Output, D::Error> where D: Deserializer<'de>, { Ok(LinkerFlavor::deserialize(deserializer)?.into()) } pub fn serialize<S>(this: &Output, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { let this = this.clone().into(); LinkerFlavor::serialize(&this, serializer) } } impl Into<spec::LinkerFlavor> for LinkerFlavor { fn into(self) -> spec::LinkerFlavor { use self::LinkerFlavor::*; use rustc_target::spec::LinkerFlavor; match self { Em => LinkerFlavor::Em, Gcc => LinkerFlavor::Gcc, Ld => LinkerFlavor::Ld, Msvc => LinkerFlavor::Msvc, Lld(v) => LinkerFlavor::Lld(v.into()), PtxLinker => LinkerFlavor::PtxLinker, } } } impl From<spec::LinkerFlavor> for LinkerFlavor { fn from(v: spec::LinkerFlavor) -> LinkerFlavor { use rustc_target::spec::LinkerFlavor::*; match v { Em => LinkerFlavor::Em, Gcc => LinkerFlavor::Gcc, Ld => LinkerFlavor::Ld, Msvc => LinkerFlavor::Msvc, Lld(v) => LinkerFlavor::Lld(v.into()), PtxLinker => LinkerFlavor::PtxLinker, } } } #[derive(Serialize, Deserialize)] #[serde(remote = "spec::LldFlavor")] pub enum LldFlavor { Wasm, Ld64, Ld, Link, } impl Into<spec::LldFlavor> for LldFlavor { fn into(self) -> spec::LldFlavor { use self::LldFlavor::*; use rustc_target::spec::LldFlavor; match self { Wasm => LldFlavor::Wasm, Ld64 => LldFlavor::Ld64, Ld => LldFlavor::Ld, Link => LldFlavor::Link, } } } impl From<spec::LldFlavor> for LldFlavor { fn from(v: spec::LldFlavor) -> LldFlavor { use rustc_target::spec::LldFlavor::*; match v { Wasm => LldFlavor::Wasm, Ld64 => LldFlavor::Ld64, Ld => LldFlavor::Ld, Link => LldFlavor::Link, } } } #[derive(Serialize, Deserialize)] #[serde(remote = "::rustc_target::spec::MergeFunctions")] pub enum MergeFunctions { Disabled, Trampolines, Aliases } #[derive(Serialize, Deserialize)] pub enum Abi { // NB: This ordering MUST match the AbiDatas array below. // (This is ensured by the test indices_are_correct().) // Single platform ABIs Cdecl, Stdcall, Fastcall, Vectorcall, Thiscall, Aapcs, Win64, SysV64, PtxKernel, Msp430Interrupt, X86Interrupt, AmdGpuKernel, SpirKernel, EfiApi, AvrInterrupt, AvrNonBlockingInterrupt, // Multiplatform / generic ABIs Rust, C, System, RustIntrinsic, RustCall, PlatformIntrinsic, Unadjusted } impl Into<spec::abi::Abi> for Abi { fn into(self) -> spec::abi::Abi { use self::Abi::*; use rustc_target::spec::abi::Abi; match self { // Single platform ABIs Cdecl => Abi::Cdecl, Stdcall => Abi::Stdcall, Fastcall => Abi::Fastcall, Vectorcall => Abi::Vectorcall, Thiscall => Abi::Thiscall, Aapcs => Abi::Aapcs, Win64 => Abi::Win64, SysV64 => Abi::SysV64, PtxKernel => Abi::PtxKernel, Msp430Interrupt => Abi::Msp430Interrupt, X86Interrupt => Abi::X86Interrupt, AmdGpuKernel => Abi::AmdGpuKernel, SpirKernel => Abi::SpirKernel, EfiApi => Abi::EfiApi, AvrInterrupt => Abi::AvrInterrupt, AvrNonBlockingInterrupt => Abi::AvrNonBlockingInterrupt, // Multiplatform / generic ABIs Rust => Abi::Rust, C => Abi::C, System => Abi::System, RustIntrinsic => Abi::RustIntrinsic, RustCall => Abi::RustCall, PlatformIntrinsic => Abi::PlatformIntrinsic, Unadjusted => Abi::Unadjusted, } } } impl From<spec::abi::Abi> for Abi { fn from(v: spec::abi::Abi) -> Abi { use rustc_target::spec::abi::Abi::*; match v { // Single platform ABIs Cdecl => Abi::Cdecl, Stdcall => Abi::Stdcall, Fastcall => Abi::Fastcall, Vectorcall => Abi::Vectorcall, Thiscall => Abi::Thiscall, Aapcs => Abi::Aapcs, Win64 => Abi::Win64, SysV64 => Abi::SysV64, PtxKernel => Abi::PtxKernel, Msp430Interrupt => Abi::Msp430Interrupt, X86Interrupt => Abi::X86Interrupt, AmdGpuKernel => Abi::AmdGpuKernel, SpirKernel => Abi::SpirKernel, EfiApi => Abi::EfiApi, AvrInterrupt => Abi::AvrInterrupt, AvrNonBlockingInterrupt => Abi::AvrNonBlockingInterrupt, // Multiplatform / generic ABIs Rust => Abi::Rust, C => Abi::C, System => Abi::System, RustIntrinsic => Abi::RustIntrinsic, RustCall => Abi::RustCall, PlatformIntrinsic => Abi::PlatformIntrinsic, Unadjusted => Abi::Unadjusted, } } } pub struct BTreeMapDeVisitor<InK, InV, OutK, OutV> { _m: PhantomData<(InK, InV, OutK, OutV)>, } impl<InK, InV, OutK, OutV> Default for BTreeMapDeVisitor<InK, InV, OutK, OutV> { fn default() -> Self { BTreeMapDeVisitor { _m: PhantomData, } } } impl<'de, InK, InV, OutK, OutV> Visitor<'de> for BTreeMapDeVisitor<InK, InV, OutK, OutV> where InK: Ord, OutK: Deserialize<'de> + Into<InK>, OutV: Deserialize<'de> + Into<InV>, { // The type that our Visitor is going to produce. type Value = BTreeMap<InK, InV>; // Format a message stating what data this Visitor expects to receive. fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter .write_str("BTreeMap for type which don't impl Deserialize") } fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error> where M: MapAccess<'de>, { let mut map = BTreeMap::new(); // While there are entries remaining in the input, add them // into our map. while let Some((key, value)) = access.next_entry()? { let key: OutK = key; let value: OutV = value; let key: InK = key.into(); let value: InV = value.into(); map.insert(key, value); } Ok(map) } } pub struct BTreeSetDeVisitor<InK, OutK> { _m: PhantomData<(InK, OutK)>, } impl<InK, OutK> Default for BTreeSetDeVisitor<InK, OutK> { fn default() -> Self { BTreeSetDeVisitor { _m: PhantomData, } } } impl<'de, InK, OutK> Visitor<'de> for BTreeSetDeVisitor<InK, OutK> where InK: Ord, OutK: Deserialize<'de> + Into<InK>, { // The type that our Visitor is going to produce. type Value = BTreeSet<InK>; // Format a message stating what data this Visitor expects to receive. fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter .write_str("BTreeMap for type which don't impl Deserialize") } fn visit_seq<M>(self, mut access: M) -> Result<Self::Value, M::Error> where M: SeqAccess<'de>, { let mut map = BTreeSet::new(); // While there are entries remaining in the input, add them // into our map. while let Some(key) = access.next_element()? { let key: OutK = key; let key: InK = key.into(); map.insert(key); } Ok(map) } } pub struct VecDeVisitor<InK, OutK> { _m: PhantomData<(InK, OutK)>, } impl<InK, OutK> Default for VecDeVisitor<InK, OutK> { fn default() -> Self { VecDeVisitor { _m: PhantomData, } } } impl<'de, InK, OutK> Visitor<'de> for VecDeVisitor<InK, OutK> where OutK: Deserialize<'de> + Into<InK>, { // The type that our Visitor is going to produce. type Value = Vec<InK>; // Format a message stating what data this Visitor expects to receive. fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter .write_str("BTreeMap for type which don't impl Deserialize") } fn visit_seq<M>(self, mut access: M) -> Result<Self::Value, M::Error> where M: SeqAccess<'de>, { let size = access.size_hint().unwrap_or_default(); let mut map = Vec::with_capacity(size); // While there are entries remaining in the input, add them // into our map. while let Some(key) = access.next_element()? { let key: OutK = key; let key: InK = key.into(); map.push(key); } Ok(map) } } mod btree_set { use serde::*; use serde::ser::*; use std::collections::BTreeSet; use super::*; pub fn deserialize<'de, D, InK, OutK>(deserializer: D) -> Result<BTreeSet<InK>, D::Error> where D: Deserializer<'de>, InK: Ord, OutK: Deserialize<'de> + Into<InK>, { let visitor: BTreeSetDeVisitor<InK, OutK> = Default::default(); let r = deserializer.deserialize_seq(visitor)?; Ok(r) } pub fn serialize<S, InK, OutK>(this: &BTreeSet<InK>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, InK: Clone + Into<OutK> + Ord, OutK: Serialize, { let mut map = serializer.serialize_seq(Some(this.len()))?; for k in this.iter() { let k: OutK = k.clone().into(); map.serialize_element(&k)?; } map.end() } } mod btree_map { use serde::*; use serde::ser::*; use std::collections::BTreeMap; use super::*; pub fn deserialize<'de, D, InK, InV, OutK, OutV>(deserializer: D) -> Result<BTreeMap<InK, InV>, D::Error> where D: Deserializer<'de>, InK: Ord, OutK: Deserialize<'de> + Into<InK>, OutV: Deserialize<'de> + Into<InV>, { let visitor: BTreeMapDeVisitor<InK, InV, OutK, OutV> = Default::default(); let r = deserializer.deserialize_map(visitor)?; Ok(r) } pub fn serialize<S, InK, InV, OutK, OutV>(this: &BTreeMap<InK, InV>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, InK: Clone + Into<OutK> + Ord, InV: Clone + Into<OutV>, OutK: Serialize, OutV: Serialize, { let mut map = serializer.serialize_map(Some(this.len()))?; for (k, v) in this.iter() { let k: OutK = k.clone().into(); let v: OutV = v.clone().into(); map.serialize_entry(&k, &v)?; } map.end() } } mod vec { use serde::*; use serde::ser::*; use super::*; pub fn deserialize<'de, D, InK, OutK>(deserializer: D) -> Result<Vec<InK>, D::Error> where D: Deserializer<'de>, OutK: Deserialize<'de> + Into<InK>, { let visitor: VecDeVisitor<InK, OutK> = Default::default(); let r = deserializer.deserialize_seq(visitor)?; Ok(r) } pub fn serialize<S, InK, OutK>(this: &Vec<InK>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, InK: Clone + Into<OutK>, OutK: Serialize, { let mut map = serializer.serialize_seq(Some(this.len()))?; for k in this.iter() { let k: OutK = k.clone().into(); map.serialize_element(&k)?; } map.end() } } mod option { use serde::*; pub fn deserialize<'de, D, InK, OutK>(deserializer: D) -> Result<Option<InK>, D::Error> where D: Deserializer<'de>, OutK: Deserialize<'de> + Into<InK>, { let i = <Option<OutK>>::deserialize(deserializer)? .map(Into::into); Ok(i) } pub fn serialize<S, InK, OutK>(this: &Option<InK>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, InK: Clone + Into<OutK>, OutK: Serialize, { this.clone().map(Into::into) .serialize(serializer) } }
31.600671
96
0.673251
2f489a7ed141eaca32566e5f91b5cdd998ad3176
19,268
//! This module implements the global `RegExp` object. //! //! `The `RegExp` object is used for matching text with a pattern. //! //! More information: //! - [ECMAScript reference][spec] //! - [MDN documentation][mdn] //! //! [spec]: https://tc39.es/ecma262/#sec-regexp-constructor //! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp use crate::{ builtins::BuiltIn, gc::{empty_trace, Finalize, Trace}, object::{ConstructorBuilder, ObjectData}, property::{Attribute, DataDescriptor}, value::{RcString, Value}, BoaProfiler, Context, Result, }; use regress::Regex; #[cfg(test)] mod tests; /// The internal representation on a `RegExp` object. #[derive(Debug, Clone, Finalize)] pub struct RegExp { /// Regex matcher. matcher: Regex, /// Update last_index, set if global or sticky flags are set. use_last_index: bool, /// String of parsed flags. flags: Box<str>, /// Flag 's' - dot matches newline characters. dot_all: bool, /// Flag 'g' global: bool, /// Flag 'i' - ignore case. ignore_case: bool, /// Flag 'm' - '^' and '$' match beginning/end of line. multiline: bool, /// Flag 'y' sticky: bool, /// Flag 'u' - Unicode. unicode: bool, pub(crate) original_source: Box<str>, original_flags: Box<str>, } // Only safe while regress::Regex doesn't implement Trace itself. unsafe impl Trace for RegExp { empty_trace!(); } impl BuiltIn for RegExp { const NAME: &'static str = "RegExp"; fn attribute() -> Attribute { Attribute::WRITABLE | Attribute::NON_ENUMERABLE | Attribute::CONFIGURABLE } fn init(context: &mut Context) -> (&'static str, Value, Attribute) { let _timer = BoaProfiler::global().start_event(Self::NAME, "init"); let regexp_object = ConstructorBuilder::with_standard_object( context, Self::constructor, context.standard_objects().regexp_object().clone(), ) .name(Self::NAME) .length(Self::LENGTH) .property("lastIndex", 0, Attribute::all()) .method(Self::test, "test", 1) .method(Self::exec, "exec", 1) .method(Self::to_string, "toString", 0) .build(); // TODO: add them RegExp accessor properties (Self::NAME, regexp_object.into(), Self::attribute()) } } impl RegExp { /// The name of the object. pub(crate) const NAME: &'static str = "RegExp"; /// The amount of arguments this function object takes. pub(crate) const LENGTH: usize = 2; /// Create a new `RegExp` pub(crate) fn constructor(this: &Value, args: &[Value], _: &mut Context) -> Result<Value> { let arg = args.get(0).ok_or_else(Value::undefined)?; let (regex_body, mut regex_flags) = match arg { Value::String(ref body) => { // first argument is a string -> use it as regex pattern ( body.to_string().into_boxed_str(), String::new().into_boxed_str(), ) } Value::Object(ref obj) => { let obj = obj.borrow(); if let Some(regex) = obj.as_regexp() { // first argument is another `RegExp` object, so copy its pattern and flags (regex.original_source.clone(), regex.original_flags.clone()) } else { ( String::new().into_boxed_str(), String::new().into_boxed_str(), ) } } _ => return Err(Value::undefined()), }; // if a second argument is given and it's a string, use it as flags if let Some(Value::String(flags)) = args.get(1) { regex_flags = flags.to_string().into_boxed_str(); } // parse flags let mut sorted_flags = String::new(); let mut dot_all = false; let mut global = false; let mut ignore_case = false; let mut multiline = false; let mut sticky = false; let mut unicode = false; if regex_flags.contains('g') { global = true; sorted_flags.push('g'); } if regex_flags.contains('i') { ignore_case = true; sorted_flags.push('i'); } if regex_flags.contains('m') { multiline = true; sorted_flags.push('m'); } if regex_flags.contains('s') { dot_all = true; sorted_flags.push('s'); } if regex_flags.contains('u') { unicode = true; sorted_flags.push('u'); } if regex_flags.contains('y') { sticky = true; sorted_flags.push('y'); } let matcher = Regex::with_flags(&regex_body, sorted_flags.as_str()) .expect("failed to create matcher"); let regexp = RegExp { matcher, use_last_index: global || sticky, flags: sorted_flags.into_boxed_str(), dot_all, global, ignore_case, multiline, sticky, unicode, original_source: regex_body, original_flags: regex_flags, }; this.set_data(ObjectData::RegExp(Box::new(regexp))); Ok(this.clone()) } // /// `RegExp.prototype.dotAll` // /// // /// The `dotAll` property indicates whether or not the "`s`" flag is used with the regular expression. // /// // /// More information: // /// - [ECMAScript reference][spec] // /// - [MDN documentation][mdn] // /// // /// [spec]: https://tc39.es/ecma262/#sec-get-regexp.prototype.dotAll // /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/dotAll // fn get_dot_all(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> { // this.with_internal_state_ref(|regex: &RegExp| Ok(Value::from(regex.dot_all))) // } // /// `RegExp.prototype.flags` // /// // /// The `flags` property returns a string consisting of the [`flags`][flags] of the current regular expression object. // /// // /// More information: // /// - [ECMAScript reference][spec] // /// - [MDN documentation][mdn] // /// // /// [spec]: https://tc39.es/ecma262/#sec-get-regexp.prototype.flags // /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/flags // /// [flags]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions#Advanced_searching_with_flags_2 // fn get_flags(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> { // this.with_internal_state_ref(|regex: &RegExp| Ok(Value::from(regex.flags.clone()))) // } // /// `RegExp.prototype.global` // /// // /// The `global` property indicates whether or not the "`g`" flag is used with the regular expression. // /// // /// More information: // /// - [ECMAScript reference][spec] // /// - [MDN documentation][mdn] // /// // /// [spec]: https://tc39.es/ecma262/#sec-get-regexp.prototype.global // /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/global // fn get_global(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> { // this.with_internal_state_ref(|regex: &RegExp| Ok(Value::from(regex.global))) // } // /// `RegExp.prototype.ignoreCase` // /// // /// The `ignoreCase` property indicates whether or not the "`i`" flag is used with the regular expression. // /// // /// More information: // /// - [ECMAScript reference][spec] // /// - [MDN documentation][mdn] // /// // /// [spec]: https://tc39.es/ecma262/#sec-get-regexp.prototype.ignorecase // /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/ignoreCase // fn get_ignore_case(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> { // this.with_internal_state_ref(|regex: &RegExp| Ok(Value::from(regex.ignore_case))) // } // /// `RegExp.prototype.multiline` // /// // /// The multiline property indicates whether or not the "m" flag is used with the regular expression. // /// // /// More information: // /// - [ECMAScript reference][spec] // /// - [MDN documentation][mdn] // /// // /// [spec]: https://tc39.es/ecma262/#sec-get-regexp.prototype.multiline // /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/multiline // fn get_multiline(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> { // this.with_internal_state_ref(|regex: &RegExp| Ok(Value::from(regex.multiline))) // } // /// `RegExp.prototype.source` // /// // /// The `source` property returns a `String` containing the source text of the regexp object, // /// and it doesn't contain the two forward slashes on both sides and any flags. // /// // /// More information: // /// - [ECMAScript reference][spec] // /// - [MDN documentation][mdn] // /// // /// [spec]: https://tc39.es/ecma262/#sec-get-regexp.prototype.source // /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/source // fn get_source(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> { // Ok(this.get_internal_slot("OriginalSource")) // } // /// `RegExp.prototype.sticky` // /// // /// The `flags` property returns a string consisting of the [`flags`][flags] of the current regular expression object. // /// // /// More information: // /// - [ECMAScript reference][spec] // /// - [MDN documentation][mdn] // /// // /// [spec]: https://tc39.es/ecma262/#sec-get-regexp.prototype.sticky // /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/sticky // fn get_sticky(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> { // this.with_internal_state_ref(|regex: &RegExp| Ok(Value::from(regex.sticky))) // } // /// `RegExp.prototype.unicode` // /// // /// The unicode property indicates whether or not the "`u`" flag is used with a regular expression. // /// unicode is a read-only property of an individual regular expression instance. // /// // /// More information: // /// - [ECMAScript reference][spec] // /// - [MDN documentation][mdn] // /// // /// [spec]: https://tc39.es/ecma262/#sec-get-regexp.prototype.unicode // /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/unicode // fn get_unicode(this: &Value, _: &[Value], _: &mut Context) -> Result<Value> { // this.with_internal_state_ref(|regex: &RegExp| Ok(Value::from(regex.unicode))) // } /// `RegExp.prototype.test( string )` /// /// The `test()` method executes a search for a match between a regular expression and a specified string. /// /// Returns `true` or `false`. /// /// More information: /// - [ECMAScript reference][spec] /// - [MDN documentation][mdn] /// /// [spec]: https://tc39.es/ecma262/#sec-regexp.prototype.test /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/test pub(crate) fn test(this: &Value, args: &[Value], ctx: &mut Context) -> Result<Value> { let arg_str = args .get(0) .expect("could not get argument") .to_string(ctx)?; let mut last_index = this.get_field("lastIndex").to_index(ctx)?; let result = if let Some(object) = this.as_object() { let object = object.borrow(); let regex = object.as_regexp().unwrap(); let result = if let Some(m) = regex.matcher.find_from(arg_str.as_str(), last_index).next() { if regex.use_last_index { last_index = m.end(); } true } else { if regex.use_last_index { last_index = 0; } false }; Ok(Value::boolean(result)) } else { panic!("object is not a regexp") }; this.set_field("lastIndex", Value::from(last_index)); result } /// `RegExp.prototype.exec( string )` /// /// The exec() method executes a search for a match in a specified string. /// /// Returns a result array, or `null`. /// /// More information: /// - [ECMAScript reference][spec] /// - [MDN documentation][mdn] /// /// [spec]: https://tc39.es/ecma262/#sec-regexp.prototype.exec /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/exec pub(crate) fn exec(this: &Value, args: &[Value], ctx: &mut Context) -> Result<Value> { let arg_str = args .get(0) .expect("could not get argument") .to_string(ctx)?; let mut last_index = this.get_field("lastIndex").to_index(ctx)?; let result = if let Some(object) = this.as_object() { let object = object.borrow(); let regex = object.as_regexp().unwrap(); let result = { if let Some(m) = regex.matcher.find_from(arg_str.as_str(), last_index).next() { if regex.use_last_index { last_index = m.end(); } let groups = m.captures.len() + 1; let mut result = Vec::with_capacity(groups); for i in 0..groups { if let Some(range) = m.group(i) { result.push(Value::from( arg_str.get(range).expect("Could not get slice"), )); } else { result.push(Value::undefined()); } } let result = Value::from(result); result.set_property("index", DataDescriptor::new(m.start(), Attribute::all())); result.set_property("input", DataDescriptor::new(arg_str, Attribute::all())); result } else { if regex.use_last_index { last_index = 0; } Value::null() } }; Ok(result) } else { panic!("object is not a regexp") }; this.set_field("lastIndex", Value::from(last_index)); result } /// `RegExp.prototype[ @@match ]( string )` /// /// This method retrieves the matches when matching a string against a regular expression. /// /// More information: /// - [ECMAScript reference][spec] /// - [MDN documentation][mdn] /// /// [spec]: https://tc39.es/ecma262/#sec-regexp.prototype-@@match /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/@@match pub(crate) fn r#match(this: &Value, arg: RcString, ctx: &mut Context) -> Result<Value> { let (matcher, flags) = if let Some(object) = this.as_object() { let object = object.borrow(); let regex = object.as_regexp().unwrap(); (regex.matcher.clone(), regex.flags.clone()) } else { panic!("object is not a regexp") }; if flags.contains('g') { let mut matches = Vec::new(); for mat in matcher.find_iter(&arg) { matches.push(Value::from(&arg[mat.range()])); } if matches.is_empty() { return Ok(Value::null()); } Ok(Value::from(matches)) } else { Self::exec(this, &[Value::from(arg)], ctx) } } /// `RegExp.prototype.toString()` /// /// Return a string representing the regular expression. /// /// More information: /// - [ECMAScript reference][spec] /// - [MDN documentation][mdn] /// /// [spec]: https://tc39.es/ecma262/#sec-regexp.prototype.tostring /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/toString #[allow(clippy::wrong_self_convention)] pub(crate) fn to_string(this: &Value, _: &[Value], context: &mut Context) -> Result<Value> { let (body, flags) = if let Some(object) = this.as_object() { let object = object.borrow(); let regex = object.as_regexp().unwrap(); (regex.original_source.clone(), regex.flags.clone()) } else { return context.throw_type_error(format!( "Method RegExp.prototype.toString called on incompatible receiver {}", this.display() )); }; Ok(Value::from(format!("/{}/{}", body, flags))) } /// `RegExp.prototype[ @@matchAll ]( string )` /// /// The `[@@matchAll]` method returns all matches of the regular expression against a string. /// /// More information: /// - [ECMAScript reference][spec] /// - [MDN documentation][mdn] /// /// [spec]: https://tc39.es/ecma262/#sec-regexp-prototype-matchall /// [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp/@@matchAll // TODO: it's returning an array, it should return an iterator pub(crate) fn match_all(this: &Value, arg_str: String) -> Result<Value> { let matches = if let Some(object) = this.as_object() { let object = object.borrow(); let regex = object.as_regexp().unwrap(); let mut matches = Vec::new(); for mat in regex.matcher.find_iter(&arg_str) { let match_vec: Vec<Value> = mat .groups() .map(|group| match group { Some(range) => Value::from(&arg_str[range]), None => Value::undefined(), }) .collect(); let match_val = Value::from(match_vec); match_val.set_property("index", DataDescriptor::new(mat.start(), Attribute::all())); match_val.set_property( "input", DataDescriptor::new(arg_str.clone(), Attribute::all()), ); matches.push(match_val); if !regex.flags.contains('g') { break; } } matches } else { panic!("object is not a regexp") }; let length = matches.len(); let result = Value::from(matches); result.set_field("length", Value::from(length)); result.set_data(ObjectData::Array); Ok(result) } }
37.929134
133
0.54972
4ac8cf76af705fe323de060e501d207cc555b177
4,627
use anyhow::{bail, Context, Result}; use std::fs::{read_link, symlink_metadata}; use std::io::{empty, BufWriter, Write}; use std::path::Path; use tar::{Builder, Header}; use walkdir::WalkDir; use crate::{ compression::{CombinedEncoder, CompressionFormats}, util::*, }; actor! { #[derive(Debug)] pub struct Tarballer { /// The input folder to be compressed. input: String = "package", /// The prefix of the tarballs. output: String = "./dist", /// The folder in which the input is to be found. work_dir: String = "./workdir", /// The formats used to compress the tarball. compression_formats: CompressionFormats = CompressionFormats::default(), } } impl Tarballer { /// Generates the actual tarballs pub fn run(self) -> Result<()> { let tarball_name = self.output.clone() + ".tar"; let encoder = CombinedEncoder::new( self.compression_formats .iter() .map(|f| f.encode(&tarball_name)) .collect::<Result<Vec<_>>>()?, ); // Sort files by their suffix, to group files with the same name from // different locations (likely identical) and files with the same // extension (likely containing similar data). let (dirs, mut files) = get_recursive_paths(&self.work_dir, &self.input) .context("failed to collect file paths")?; files.sort_by(|a, b| a.bytes().rev().cmp(b.bytes().rev())); // Write the tar into both encoded files. We write all directories // first, so files may be directly created. (See rust-lang/rustup.rs#1092.) let buf = BufWriter::with_capacity(1024 * 1024, encoder); let mut builder = Builder::new(buf); let pool = rayon::ThreadPoolBuilder::new() .num_threads(2) .build() .unwrap(); pool.install(move || { for path in dirs { let src = Path::new(&self.work_dir).join(&path); builder .append_dir(&path, &src) .with_context(|| format!("failed to tar dir '{}'", src.display()))?; } for path in files { let src = Path::new(&self.work_dir).join(&path); append_path(&mut builder, &src, &path) .with_context(|| format!("failed to tar file '{}'", src.display()))?; } builder .into_inner() .context("failed to finish writing .tar stream")? .into_inner() .ok() .unwrap() .finish()?; Ok(()) }) } } fn append_path<W: Write>(builder: &mut Builder<W>, src: &Path, path: &String) -> Result<()> { let stat = symlink_metadata(src)?; let mut header = Header::new_gnu(); header.set_metadata(&stat); if stat.file_type().is_symlink() { let link = read_link(src)?; header.set_link_name(&link)?; builder.append_data(&mut header, path, &mut empty())?; } else { if cfg!(windows) { // Windows doesn't really have a mode, so `tar` never marks files executable. // Use an extension whitelist to update files that usually should be so. const EXECUTABLES: [&'static str; 4] = ["exe", "dll", "py", "sh"]; if let Some(ext) = src.extension().and_then(|s| s.to_str()) { if EXECUTABLES.contains(&ext) { let mode = header.mode()?; header.set_mode(mode | 0o111); } } } let file = open_file(src)?; builder.append_data(&mut header, path, &file)?; } Ok(()) } /// Returns all `(directories, files)` under the source path. fn get_recursive_paths<P, Q>(root: P, name: Q) -> Result<(Vec<String>, Vec<String>)> where P: AsRef<Path>, Q: AsRef<Path>, { let root = root.as_ref(); let name = name.as_ref(); if !name.is_relative() && !name.starts_with(root) { bail!( "input '{}' is not in work dir '{}'", name.display(), root.display() ); } let mut dirs = vec![]; let mut files = vec![]; for entry in WalkDir::new(root.join(name)) { let entry = entry?; let path = entry.path().strip_prefix(root)?; let path = path_to_str(&path)?; if entry.file_type().is_dir() { dirs.push(path.to_owned()); } else { files.push(path.to_owned()); } } Ok((dirs, files)) }
33.05
93
0.53512
75e18e1a279f6c8f706872c43a30e49d40e45066
1,021
// structs1.rs // Address all the TODOs to make the tests pass! struct ColorClassicStruct<'a> { name: &'a str, hex: &'a str, } struct ColorTupleStruct<'a>(&'a str, &'a str); #[derive(Debug)] struct UnitStruct; #[cfg(test)] mod tests { use super::*; #[test] fn classic_c_structs() { // TODO: Instantiate a classic c struct! let green = ColorClassicStruct { name: "green", hex: "#00FF00", }; assert_eq!(green.name, "green"); assert_eq!(green.hex, "#00FF00"); } #[test] fn tuple_structs() { // TODO: Instantiate a tuple struct! let green = ColorTupleStruct("green", "#00FF00"); assert_eq!(green.0, "green"); assert_eq!(green.1, "#00FF00"); } #[test] fn unit_structs() { // TODO: Instantiate a unit struct! let unit_struct = UnitStruct; let message = format!("{:?}s are fun!", unit_struct); assert_eq!(message, "UnitStructs are fun!"); } }
21.723404
61
0.555338
d7687d498c9bc9f24f20cb05422352882fc11f63
3,915
//! TxsApp Subcommands //! //! This is where you specify the subcommands of your application. //! //! The default application comes with two subcommands: //! //! - `start`: launches the application //! - `version`: print application version //! //! See the `impl Configurable` below for how to specify the path to the //! application's configuration file. mod create_account_cmd; mod create_validator_cmd; mod oracle_upgrade_cmd; mod version_cmd; pub mod autopay_batch_cmd; pub mod demo_cmd; mod relay_cmd; mod valset_cmd; mod autopay_cmd; mod wallet_cmd; mod authkey_cmd; use abscissa_core::{Command, Configurable, Help, Options, Runnable}; use ol::commands::CONFIG_FILE; use crate::config::AppCfg; use crate::entrypoint; use self::{ create_account_cmd::CreateAccountCmd, create_validator_cmd::CreateValidatorCmd, oracle_upgrade_cmd::OracleUpgradeCmd, oracle_upgrade_cmd::OracleUpgradeHashCmd, version_cmd::VersionCmd, autopay_batch_cmd::AutopayBatchCmd, autopay_cmd::AutopayCmd, demo_cmd::DemoCmd, relay_cmd::RelayCmd, valset_cmd::ValSetCmd, wallet_cmd::WalletCmd, authkey_cmd::AuthkeyCmd, }; use std::path::PathBuf; /// TxsApp Subcommands #[derive(Command, Debug, Options, Runnable)] pub enum TxsCmd { /// The `create-account` subcommand #[options(help = "submit tx to create a user account from account.json file")] CreateAccount(CreateAccountCmd), /// The `create-validator` subcommand #[options(help = "submit tx to create a validator from account.json file")] CreateValidator(CreateValidatorCmd), /// The `oracle-upgrade` subcommand #[options(help = "submit an oracle transaction to upgrade stdlib")] OracleUpgrade(OracleUpgradeCmd), /// The `oracle-upgrade-hash` subcommand #[options(help = "submit an oracle transaction to upgrade stdlib")] OracleUpgradeHash(OracleUpgradeHashCmd), /// The `autopay` subcommand #[options(help = "enable or disable autopay")] Autopay(AutopayCmd), /// The `autopay-batch` subcommand #[options(help = "batch autopay transactions from json file")] AutopayBatch(AutopayBatchCmd), // --- End of STDLIB SCRIPT COMMANDS --- /// The `help` subcommand #[options(help = "get usage information")] Help(Help<Self>), /// The `version` subcommand #[options(help = "display version information")] Version(VersionCmd), /// The `demo` subcommand #[options(help = "noop demo transaction, prints `hello world` in move")] Demo(DemoCmd), /// The `relay` subcommand #[options(help = "submit a saved transaction from file")] Relay(RelayCmd), /// The `valset` subcommand #[options(help = "join or leave the validator universe, i.e. candidate for validator set")] ValSet(ValSetCmd), /// The `wallet` subcommand #[options(help = "set a wallet type to the address")] Wallet(WalletCmd), /// The `authkey` subcommand to rotate an auth key (change mnemonic that controls address) #[options(help = "rotate an account's authorization key")] Authkey(AuthkeyCmd), } /// This trait allows you to define how application configuration is loaded. impl Configurable<AppCfg> for TxsCmd { /// Location of the configuration file fn config_path(&self) -> Option<PathBuf> { // Check if the config file exists, and if it does not, ignore it. // If you'd like for a missing configuration file to be a hard error // instead, always return `Some(CONFIG_FILE)` here. let mut config_path = entrypoint::get_node_home(); config_path.push(CONFIG_FILE); if config_path.exists() { // println!("initializing from config file: {:?}", config_path); Some(config_path) } else { // println!("config file not yet existing: {:?}", config_path); None } } }
31.829268
95
0.681226
39315509992fe6650d601622012662108b5620f5
2,380
use core::ptr::{self, NonNull}; use core::task::Waker; use atomic_polyfill::{compiler_fence, AtomicPtr, Ordering}; use crate::executor::raw::{task_from_waker, wake_task, TaskHeader}; /// Utility struct to register and wake a waker. #[derive(Debug)] pub struct WakerRegistration { waker: Option<NonNull<TaskHeader>>, } impl WakerRegistration { pub const fn new() -> Self { Self { waker: None } } /// Register a waker. Overwrites the previous waker, if any. pub fn register(&mut self, w: &Waker) { let w = unsafe { task_from_waker(w) }; match self.waker { // Optimization: If both the old and new Wakers wake the same task, do nothing. Some(w2) if w == w2 => {} Some(w2) => { // We had a waker registered for another task. Wake it, so the other task can // reregister itself if it's still interested. // // If two tasks are waiting on the same thing concurrently, this will cause them // to wake each other in a loop fighting over this WakerRegistration. This wastes // CPU but things will still work. // // If the user wants to have two tasks waiting on the same thing they should use // a more appropriate primitive that can store multiple wakers. unsafe { wake_task(w2) } self.waker = Some(w); } None => self.waker = Some(w), } } /// Wake the registered waker, if any. pub fn wake(&mut self) { if let Some(w) = self.waker.take() { unsafe { wake_task(w) } } } } pub struct AtomicWaker { waker: AtomicPtr<TaskHeader>, } impl AtomicWaker { pub const fn new() -> Self { Self { waker: AtomicPtr::new(ptr::null_mut()), } } /// Register a waker. Overwrites the previous waker, if any. pub fn register(&self, w: &Waker) { let w = unsafe { task_from_waker(w) }; self.waker.store(w.as_ptr(), Ordering::Relaxed); compiler_fence(Ordering::SeqCst); } /// Wake the registered waker, if any. pub fn wake(&self) { let w2 = self.waker.load(Ordering::Relaxed); if let Some(w2) = NonNull::new(w2) { unsafe { wake_task(w2) }; } } }
30.909091
97
0.566807
e8bce211d6c42ebf9121fd5a5b8b66eda5ea1315
4,837
// each block has the same instructions // but different c1, c2, and c3 values // type 1 // ------ // inp w -> a digit between 1 and 9 // mul x 0 // add x z // mod x 26 // div z 1 -> c1 // add x 14 -> c2 | x = (z % 26) + 14 .. C2 is always greater than 9, i.e. x is always greater than 9, i.e. x != w // eql x w // eql x 0 // mul y 0 // add y 25 // mul y x // add y 1 // mul z y // mul y 0 // add y w // add y 8 -> c3 // mul y x // add z y // z = 26z + w + c3 // type 2 // ------ // inp w // mul x 0 // add x z // mod x 26 // div z 26 -> c1 // add x -12 -> c2 // eql x w // eql x 0 // mul y 0 // add y 25 // mul y x // add y 1 // mul z y // mul y 0 // add y w // add y 9 -> c3 // mul y x // add z y // z = if x == w { z / 26 } else { 26 * floor(z / 26) + w + c3 } // There are two types of blocks // Type 1 (C1 == 1) => z = 26z + w + C3 // Type 2 (C1 == 26) => if x == w { z = z / 26 } else { 26 * floor(z / 26) + w + C3 } // The puzzle input contains: // 7 blocks of type 1 which always increase z and // 7 blocks of type 2 which either decrease or "align" z // the goal is z == 0, i.e. we need 7 increase and 7 decrease ops // One option is to brute force the input values given in order to make // sure that there is an equal amount of increase and decrease operations. // Another implementation idea: // z can be modeled as an arithmetic stack across the 14 input ops where // each op can be simplified as follows (see also the check() method)/ // Note, that we only need to care about 'w + C3'. // inp w // x = z.top() + C2 // if C1 == 26 { // z.pop() // if x != w { // z.push(w + C3) // } // The goal is to adapt the w's to have 7 push and 7 pop operations. use aoc::PuzzleInput; type Input = NoInput; type Output = usize; register!( "input/day24.txt"; (input: input!(verbatim Input)) -> Output { part1(); part2(); } ); fn part1() -> Output { let mut z = vec![]; let mut res = vec![]; // We start with the max value for w as input // for each op and adapt them if necessary let w = 9; (0..14).for_each(|op| { if C1[op] == 1 { // type 1 operation // we just push w + C3 to the stack and // store w (9) as input for that op z.push((res.len(), w + C3[op])); res.push(w); } else { // type 2 operation assert!(C1[op] == 26); assert!(C2[op] <= 0); let (j, v) = z.pop().unwrap(); // We need to make sure that w stays within its bounds. if v + C2[op] > w { // If the corresponding push operation picked a value // for w that is too large, we need to adapt it now. res[j] -= v + C2[op] - w; res.push(w); } else { res.push(v + C2[op]); } } }); res.iter() .map(std::string::ToString::to_string) .collect::<String>() .parse::<usize>() .unwrap() } fn part2() -> Output { let mut z: Vec<(usize, i32)> = vec![]; let mut res: Vec<i32> = vec![]; // We start with the min value for w as input // for each op and adapt them if necessary let w = 1; (0..14).for_each(|op| { if C1[op] == 1 { z.push((res.len(), w + C3[op])); res.push(w); } else { let (j, v) = z.pop().unwrap(); if v + C2[op] <= 0 { res[j] += -(v + C2[op]) + w; res.push(w); } else { res.push(v + C2[op]); } } }); res.iter() .map(std::string::ToString::to_string) .collect::<String>() .parse::<usize>() .unwrap() } #[rustfmt::skip] const C1: [i32;14] = [1, 1, 1, 1, 26, 1, 26, 26, 1, 1, 26, 26, 26, 26]; #[rustfmt::skip] const C2: [i32;14] = [14, 13, 13, 12, -12, 12, -2, -11, 13, 14, 0, -12, -13, -6]; #[rustfmt::skip] const C3: [i32;14] = [8, 8, 3, 10, 8, 8, 8, 5, 9, 3, 4, 9, 2, 7]; pub struct NoInput; impl PuzzleInput for NoInput { type Out = Self; fn from_input(input: &str) -> Self::Out { Self } } #[cfg(test)] mod tests { use super::*; use aoc::{Solution, SolutionExt}; use test::Bencher; #[test] fn test() { let (res1, res2) = Solver::run_on_input(); assert_eq!(res1, 79997391969649); assert_eq!(res2, 16931171414113); } #[bench] fn bench_parsing(b: &mut Bencher) { let input = Solver::puzzle_input(); b.bytes = input.len() as u64; b.iter(|| Solver::parse_input(input)); } #[bench] fn bench_pt1(b: &mut Bencher) { b.iter(|| part1()); } #[bench] fn bench_pt2(b: &mut Bencher) { b.iter(|| part2()); } }
23.254808
114
0.493281
f978158dc942036c676b536c7c0cd9a8dde818e1
6,739
// Copyright 2018 The Exonum Team // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use futures::future::{done, Future}; use tokio_codec::{Decoder, Framed}; use tokio_io::{AsyncRead, AsyncWrite}; use std::net::SocketAddr; use super::wrapper::NoiseWrapper; use crate::{ crypto::{ x25519::{self, into_x25519_keypair, into_x25519_public_key}, PublicKey, SecretKey, }, events::{ codec::MessagesCodec, noise::{Handshake, HandshakeRawMessage, HandshakeResult}, }, messages::{Connect, Signed}, node::state::SharedConnectList, storage::StorageValue, }; /// Params needed to establish secured connection using Noise Protocol. #[derive(Debug, Clone)] pub struct HandshakeParams { pub public_key: x25519::PublicKey, pub secret_key: x25519::SecretKey, pub remote_key: Option<x25519::PublicKey>, pub connect_list: SharedConnectList, pub connect: Signed<Connect>, max_message_len: u32, } impl HandshakeParams { pub fn new( public_key: PublicKey, secret_key: SecretKey, connect_list: SharedConnectList, connect: Signed<Connect>, max_message_len: u32, ) -> Self { let (public_key, secret_key) = into_x25519_keypair(public_key, secret_key).unwrap(); HandshakeParams { public_key, secret_key, max_message_len, remote_key: None, connect, connect_list, } } pub fn set_remote_key(&mut self, remote_key: PublicKey) { self.remote_key = Some(into_x25519_public_key(remote_key)); } } #[derive(Debug)] pub struct NoiseHandshake { noise: NoiseWrapper, peer_address: SocketAddr, max_message_len: u32, connect_list: SharedConnectList, connect: Signed<Connect>, } impl NoiseHandshake { pub fn initiator(params: &HandshakeParams, peer_address: &SocketAddr) -> Self { let noise = NoiseWrapper::initiator(params); NoiseHandshake { noise, peer_address: *peer_address, max_message_len: params.max_message_len, connect_list: params.connect_list.clone(), connect: params.connect.clone(), } } pub fn responder(params: &HandshakeParams, peer_address: &SocketAddr) -> Self { let noise = NoiseWrapper::responder(params); NoiseHandshake { noise, peer_address: *peer_address, max_message_len: params.max_message_len, connect_list: params.connect_list.clone(), connect: params.connect.clone(), } } pub fn read_handshake_msg<S: AsyncRead + 'static>( mut self, stream: S, ) -> impl Future<Item = (S, Self, Vec<u8>), Error = failure::Error> { HandshakeRawMessage::read(stream).and_then(move |(stream, msg)| { let message = self.noise.read_handshake_msg(&msg.0)?; Ok((stream, self, message)) }) } pub fn write_handshake_msg<S: AsyncWrite + 'static>( mut self, stream: S, msg: &[u8], ) -> impl Future<Item = (S, Self), Error = failure::Error> { done(self.noise.write_handshake_msg(msg)) .map_err(|e| e.into()) .and_then(|buf| HandshakeRawMessage(buf).write(stream)) .map(move |(stream, _)| (stream, self)) } pub fn finalize<S: AsyncRead + AsyncWrite + 'static>( self, stream: S, message: Vec<u8>, ) -> Result<(Framed<S, MessagesCodec>, Vec<u8>), failure::Error> { let remote_static_key = { // Panic because with selected handshake pattern we must have // `remote_static_key` on final step of handshake. let rs = self .noise .session .get_remote_static() .expect("Remote static key is not present!"); x25519::PublicKey::from_slice(rs).expect("Remote static key is not valid x25519 key!") }; if !self.is_peer_allowed(&remote_static_key) { bail!("peer is not in ConnectList") } let noise = self.noise.into_transport_mode()?; let framed = MessagesCodec::new(self.max_message_len, noise).framed(stream); Ok((framed, message)) } fn is_peer_allowed(&self, remote_static_key: &x25519::PublicKey) -> bool { self.connect_list .peers() .iter() .map(|info| into_x25519_public_key(info.public_key)) .any(|key| remote_static_key == &key) } } impl Handshake for NoiseHandshake { fn listen<S>(self, stream: S) -> HandshakeResult<S> where S: AsyncRead + AsyncWrite + 'static, { let peer_address = self.peer_address; let connect = self.connect.clone(); let framed = self .read_handshake_msg(stream) .and_then(|(stream, handshake, _)| { handshake.write_handshake_msg(stream, &connect.into_bytes()) }) .and_then(|(stream, handshake)| handshake.read_handshake_msg(stream)) .and_then(|(stream, handshake, message)| handshake.finalize(stream, message)) .map_err(move |e| { e.context(format!("peer {} disconnected", peer_address)) .into() }); Box::new(framed) } fn send<S>(self, stream: S) -> HandshakeResult<S> where S: AsyncRead + AsyncWrite + 'static, { let peer_address = self.peer_address; let connect = self.connect.clone(); let framed = self .write_handshake_msg(stream, &[]) .and_then(|(stream, handshake)| handshake.read_handshake_msg(stream)) .and_then(|(stream, handshake, message)| { ( handshake.write_handshake_msg(stream, &connect.into_bytes()), Ok(message), ) }) .and_then(|((stream, handshake), message)| handshake.finalize(stream, message)) .map_err(move |e| { e.context(format!("peer {} disconnected", peer_address)) .into() }); Box::new(framed) } }
33.197044
98
0.600386
fedf00ad907a4838d06654ee0b0b27c62305278c
866
pub fn compress(src: &str) -> String { let mut compressed = String::new(); let mut chars = src.chars().peekable(); while let Some(c) = chars.peek().cloned() { let mut counter = 0; while let Some(n) = chars.peek().cloned() { if n == c { counter += 1; chars.next(); } else { break; } } compressed.push_str(counter.to_string().as_str()); compressed.push(c); } compressed } #[cfg(test)] mod tests { use super::*; #[test] fn compress_empty_string() { assert_eq!(compress(""), ""); } #[test] fn compress_unique_chars_string() { assert_eq!(compress("abc"), "1a1b1c"); } #[test] fn compress_doubled_chars_string() { assert_eq!(compress("aabbcc"), "2a2b2c"); } }
22.205128
58
0.5
799ac9fe5539b25c136a465a37be57605ca257c6
2,877
#[doc = "Register `T3CKR` reader"] pub struct R(crate::R<T3CKR_SPEC>); impl core::ops::Deref for R { type Target = crate::R<T3CKR_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::convert::From<crate::R<T3CKR_SPEC>> for R { fn from(reader: crate::R<T3CKR_SPEC>) -> Self { R(reader) } } #[doc = "Register `T3CKR` writer"] pub struct W(crate::W<T3CKR_SPEC>); impl core::ops::Deref for W { type Target = crate::W<T3CKR_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl core::convert::From<crate::W<T3CKR_SPEC>> for W { fn from(writer: crate::W<T3CKR_SPEC>) -> Self { W(writer) } } #[doc = "Field `T3CKR` reader - "] pub struct T3CKR_R(crate::FieldReader<u8, u8>); impl T3CKR_R { pub(crate) fn new(bits: u8) -> Self { T3CKR_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for T3CKR_R { type Target = crate::FieldReader<u8, u8>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `T3CKR` writer - "] pub struct T3CKR_W<'a> { w: &'a mut W, } impl<'a> T3CKR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } impl R { #[doc = "Bits 0:3"] #[inline(always)] pub fn t3ckr(&self) -> T3CKR_R { T3CKR_R::new((self.bits & 0x0f) as u8) } } impl W { #[doc = "Bits 0:3"] #[inline(always)] pub fn t3ckr(&mut self) -> T3CKR_W { T3CKR_W { w: self } } #[doc = "Writes raw bits to the register."] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "T3CKR register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [t3ckr](index.html) module"] pub struct T3CKR_SPEC; impl crate::RegisterSpec for T3CKR_SPEC { type Ux = u32; } #[doc = "`read()` method returns [t3ckr::R](R) reader structure"] impl crate::Readable for T3CKR_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [t3ckr::W](W) writer structure"] impl crate::Writable for T3CKR_SPEC { type Writer = W; } #[doc = "`reset()` method sets T3CKR to value 0"] impl crate::Resettable for T3CKR_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
28.77
400
0.589503
0174bfec32d7ea01abf6bf97b7cd602e83580016
57,775
use clean::AttributesExt; use std::cmp::Ordering; use rustc_data_structures::fx::FxHashMap; use rustc_hir as hir; use rustc_hir::def::CtorKind; use rustc_hir::def_id::DefId; use rustc_middle::middle::stability; use rustc_middle::ty::layout::LayoutError; use rustc_middle::ty::TyCtxt; use rustc_span::hygiene::MacroKind; use rustc_span::symbol::{kw, sym, Symbol}; use super::{ collect_paths_for_type, document, ensure_trailing_slash, item_ty_to_strs, notable_traits_decl, render_assoc_item, render_assoc_items, render_attributes_in_code, render_attributes_in_pre, render_impl, render_impl_summary, render_stability_since_raw, write_srclink, AssocItemLink, Context, }; use crate::clean::{self, GetDefId}; use crate::formats::item_type::ItemType; use crate::formats::{AssocItemRender, Impl, RenderMode}; use crate::html::escape::Escape; use crate::html::format::{ print_abi_with_space, print_constness_with_space, print_where_clause, Buffer, PrintWithSpace, }; use crate::html::highlight; use crate::html::layout::Page; use crate::html::markdown::MarkdownSummaryLine; pub(super) fn print_item(cx: &Context<'_>, item: &clean::Item, buf: &mut Buffer, page: &Page<'_>) { debug_assert!(!item.is_stripped()); // Write the breadcrumb trail header for the top buf.write_str("<h1 class=\"fqn\"><span class=\"in-band\">"); let name = match *item.kind { clean::ModuleItem(_) => { if item.is_crate() { "Crate " } else { "Module " } } clean::FunctionItem(..) | clean::ForeignFunctionItem(..) => "Function ", clean::TraitItem(..) => "Trait ", clean::StructItem(..) => "Struct ", clean::UnionItem(..) => "Union ", clean::EnumItem(..) => "Enum ", clean::TypedefItem(..) => "Type Definition ", clean::MacroItem(..) => "Macro ", clean::ProcMacroItem(ref mac) => match mac.kind { MacroKind::Bang => "Macro ", MacroKind::Attr => "Attribute Macro ", MacroKind::Derive => "Derive Macro ", }, clean::PrimitiveItem(..) => "Primitive Type ", clean::StaticItem(..) | clean::ForeignStaticItem(..) => "Static ", clean::ConstantItem(..) => "Constant ", clean::ForeignTypeItem => "Foreign Type ", clean::KeywordItem(..) => "Keyword ", clean::OpaqueTyItem(..) => "Opaque Type ", clean::TraitAliasItem(..) => "Trait Alias ", _ => { // We don't generate pages for any other type. unreachable!(); } }; buf.write_str(name); if !item.is_primitive() && !item.is_keyword() { let cur = &cx.current; let amt = if item.is_mod() { cur.len() - 1 } else { cur.len() }; for (i, component) in cur.iter().enumerate().take(amt) { write!( buf, "<a href=\"{}index.html\">{}</a>::<wbr>", "../".repeat(cur.len() - i - 1), component ); } } write!(buf, "<a class=\"{}\" href=\"#\">{}</a>", item.type_(), item.name.as_ref().unwrap()); write!( buf, "<button id=\"copy-path\" onclick=\"copy_path(this)\" title=\"copy path\">\ <img src=\"{static_root_path}clipboard{suffix}.svg\" \ width=\"19\" height=\"18\" \ alt=\"Copy item import\" \ title=\"Copy item import to clipboard\">\ </button>", static_root_path = page.get_static_root_path(), suffix = page.resource_suffix, ); buf.write_str("</span>"); // in-band buf.write_str("<span class=\"out-of-band\">"); render_stability_since_raw( buf, item.stable_since(cx.tcx()).as_deref(), item.const_stability(cx.tcx()), None, None, ); buf.write_str( "<span id=\"render-detail\">\ <a id=\"toggle-all-docs\" href=\"javascript:void(0)\" \ title=\"collapse all docs\">\ [<span class=\"inner\">&#x2212;</span>]\ </a>\ </span>", ); // Write `src` tag // // When this item is part of a `crate use` in a downstream crate, the // [src] link in the downstream documentation will actually come back to // this page, and this link will be auto-clicked. The `id` attribute is // used to find the link to auto-click. if cx.shared.include_sources && !item.is_primitive() { write_srclink(cx, item, buf); } buf.write_str("</span></h1>"); // out-of-band match *item.kind { clean::ModuleItem(ref m) => item_module(buf, cx, item, &m.items), clean::FunctionItem(ref f) | clean::ForeignFunctionItem(ref f) => { item_function(buf, cx, item, f) } clean::TraitItem(ref t) => item_trait(buf, cx, item, t), clean::StructItem(ref s) => item_struct(buf, cx, item, s), clean::UnionItem(ref s) => item_union(buf, cx, item, s), clean::EnumItem(ref e) => item_enum(buf, cx, item, e), clean::TypedefItem(ref t, _) => item_typedef(buf, cx, item, t), clean::MacroItem(ref m) => item_macro(buf, cx, item, m), clean::ProcMacroItem(ref m) => item_proc_macro(buf, cx, item, m), clean::PrimitiveItem(_) => item_primitive(buf, cx, item), clean::StaticItem(ref i) | clean::ForeignStaticItem(ref i) => item_static(buf, cx, item, i), clean::ConstantItem(ref c) => item_constant(buf, cx, item, c), clean::ForeignTypeItem => item_foreign_type(buf, cx, item), clean::KeywordItem(_) => item_keyword(buf, cx, item), clean::OpaqueTyItem(ref e) => item_opaque_ty(buf, cx, item, e), clean::TraitAliasItem(ref ta) => item_trait_alias(buf, cx, item, ta), _ => { // We don't generate pages for any other type. unreachable!(); } } } /// For large structs, enums, unions, etc, determine whether to hide their fields fn should_hide_fields(n_fields: usize) -> bool { n_fields > 12 } fn toggle_open(w: &mut Buffer, text: &str) { write!( w, "<details class=\"rustdoc-toggle type-contents-toggle\">\ <summary class=\"hideme\">\ <span>Show {}</span>\ </summary>", text ); } fn toggle_close(w: &mut Buffer) { w.write_str("</details>"); } fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[clean::Item]) { document(w, cx, item, None); let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()).collect::<Vec<usize>>(); // the order of item types in the listing fn reorder(ty: ItemType) -> u8 { match ty { ItemType::ExternCrate => 0, ItemType::Import => 1, ItemType::Primitive => 2, ItemType::Module => 3, ItemType::Macro => 4, ItemType::Struct => 5, ItemType::Enum => 6, ItemType::Constant => 7, ItemType::Static => 8, ItemType::Trait => 9, ItemType::Function => 10, ItemType::Typedef => 12, ItemType::Union => 13, _ => 14 + ty as u8, } } fn cmp( i1: &clean::Item, i2: &clean::Item, idx1: usize, idx2: usize, tcx: TyCtxt<'_>, ) -> Ordering { let ty1 = i1.type_(); let ty2 = i2.type_(); if ty1 != ty2 { return (reorder(ty1), idx1).cmp(&(reorder(ty2), idx2)); } let s1 = i1.stability(tcx).as_ref().map(|s| s.level); let s2 = i2.stability(tcx).as_ref().map(|s| s.level); if let (Some(a), Some(b)) = (s1, s2) { match (a.is_stable(), b.is_stable()) { (true, true) | (false, false) => {} (false, true) => return Ordering::Less, (true, false) => return Ordering::Greater, } } let lhs = i1.name.unwrap_or(kw::Empty).as_str(); let rhs = i2.name.unwrap_or(kw::Empty).as_str(); compare_names(&lhs, &rhs) } if cx.shared.sort_modules_alphabetically { indices.sort_by(|&i1, &i2| cmp(&items[i1], &items[i2], i1, i2, cx.tcx())); } // This call is to remove re-export duplicates in cases such as: // // ``` // crate mod foo { // crate mod bar { // crate trait Double { fn foo(); } // } // } // // crate use foo::bar::*; // crate use foo::*; // ``` // // `Double` will appear twice in the generated docs. // // FIXME: This code is quite ugly and could be improved. Small issue: DefId // can be identical even if the elements are different (mostly in imports). // So in case this is an import, we keep everything by adding a "unique id" // (which is the position in the vector). indices.dedup_by_key(|i| { ( items[*i].def_id, if items[*i].name.as_ref().is_some() { Some(full_path(cx, &items[*i])) } else { None }, items[*i].type_(), if items[*i].is_import() { *i } else { 0 }, ) }); debug!("{:?}", indices); let mut curty = None; for &idx in &indices { let myitem = &items[idx]; if myitem.is_stripped() { continue; } let myty = Some(myitem.type_()); if curty == Some(ItemType::ExternCrate) && myty == Some(ItemType::Import) { // Put `extern crate` and `use` re-exports in the same section. curty = myty; } else if myty != curty { if curty.is_some() { w.write_str("</table>"); } curty = myty; let (short, name) = item_ty_to_strs(myty.unwrap()); write!( w, "<h2 id=\"{id}\" class=\"section-header\">\ <a href=\"#{id}\">{name}</a></h2>\n<table>", id = cx.derive_id(short.to_owned()), name = name ); } match *myitem.kind { clean::ExternCrateItem { ref src } => { use crate::html::format::anchor; match *src { Some(ref src) => write!( w, "<tr><td><code>{}extern crate {} as {};", myitem.visibility.print_with_space(myitem.def_id, cx), anchor(myitem.def_id.expect_real(), &*src.as_str(), cx), myitem.name.as_ref().unwrap(), ), None => write!( w, "<tr><td><code>{}extern crate {};", myitem.visibility.print_with_space(myitem.def_id, cx), anchor( myitem.def_id.expect_real(), &*myitem.name.as_ref().unwrap().as_str(), cx ), ), } w.write_str("</code></td></tr>"); } clean::ImportItem(ref import) => { let (stab, stab_tags) = if let Some(import_def_id) = import.source.did { let ast_attrs = cx.tcx().get_attrs(import_def_id); let import_attrs = Box::new(clean::Attributes::from_ast(ast_attrs, None)); // Just need an item with the correct def_id and attrs let import_item = clean::Item { def_id: import_def_id.into(), attrs: import_attrs, cfg: ast_attrs.cfg(cx.sess()), ..myitem.clone() }; let stab = import_item.stability_class(cx.tcx()); let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx())); (stab, stab_tags) } else { (None, None) }; let add = if stab.is_some() { " " } else { "" }; write!( w, "<tr class=\"{stab}{add}import-item\">\ <td><code>{vis}{imp}</code></td>\ <td class=\"docblock-short\">{stab_tags}</td>\ </tr>", stab = stab.unwrap_or_default(), add = add, vis = myitem.visibility.print_with_space(myitem.def_id, cx), imp = import.print(cx), stab_tags = stab_tags.unwrap_or_default(), ); } _ => { if myitem.name.is_none() { continue; } let unsafety_flag = match *myitem.kind { clean::FunctionItem(ref func) | clean::ForeignFunctionItem(ref func) if func.header.unsafety == hir::Unsafety::Unsafe => { "<a title=\"unsafe function\" href=\"#\"><sup>⚠</sup></a>" } _ => "", }; let stab = myitem.stability_class(cx.tcx()); let add = if stab.is_some() { " " } else { "" }; let doc_value = myitem.doc_value().unwrap_or_default(); write!( w, "<tr class=\"{stab}{add}module-item\">\ <td><a class=\"{class}\" href=\"{href}\" \ title=\"{title}\">{name}</a>{unsafety_flag}</td>\ <td class=\"docblock-short\">{stab_tags}{docs}</td>\ </tr>", name = *myitem.name.as_ref().unwrap(), stab_tags = extra_info_tags(myitem, item, cx.tcx()), docs = MarkdownSummaryLine(&doc_value, &myitem.links(cx)).into_string(), class = myitem.type_(), add = add, stab = stab.unwrap_or_default(), unsafety_flag = unsafety_flag, href = item_path(myitem.type_(), &myitem.name.unwrap().as_str()), title = [full_path(cx, myitem), myitem.type_().to_string()] .iter() .filter_map(|s| if !s.is_empty() { Some(s.as_str()) } else { None }) .collect::<Vec<_>>() .join(" "), ); } } } if curty.is_some() { w.write_str("</table>"); } } /// Render the stability, deprecation and portability tags that are displayed in the item's summary /// at the module level. fn extra_info_tags(item: &clean::Item, parent: &clean::Item, tcx: TyCtxt<'_>) -> String { let mut tags = String::new(); fn tag_html(class: &str, title: &str, contents: &str) -> String { format!(r#"<span class="stab {}" title="{}">{}</span>"#, class, Escape(title), contents) } // The trailing space after each tag is to space it properly against the rest of the docs. if let Some(depr) = &item.deprecation(tcx) { let mut message = "Deprecated"; if !stability::deprecation_in_effect( depr.is_since_rustc_version, depr.since.map(|s| s.as_str()).as_deref(), ) { message = "Deprecation planned"; } tags += &tag_html("deprecated", "", message); } // The "rustc_private" crates are permanently unstable so it makes no sense // to render "unstable" everywhere. if item .stability(tcx) .as_ref() .map(|s| s.level.is_unstable() && s.feature != sym::rustc_private) == Some(true) { tags += &tag_html("unstable", "", "Experimental"); } let cfg = match (&item.cfg, parent.cfg.as_ref()) { (Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg), (cfg, _) => cfg.as_deref().cloned(), }; debug!("Portability {:?} - {:?} = {:?}", item.cfg, parent.cfg, cfg); if let Some(ref cfg) = cfg { tags += &tag_html("portability", &cfg.render_long_plain(), &cfg.render_short_html()); } tags } fn item_function(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, f: &clean::Function) { let vis = it.visibility.print_with_space(it.def_id, cx).to_string(); let constness = print_constness_with_space(&f.header.constness, it.const_stability(cx.tcx())); let asyncness = f.header.asyncness.print_with_space(); let unsafety = f.header.unsafety.print_with_space(); let abi = print_abi_with_space(f.header.abi).to_string(); let name = it.name.as_ref().unwrap(); let generics_len = format!("{:#}", f.generics.print(cx)).len(); let header_len = "fn ".len() + vis.len() + constness.len() + asyncness.len() + unsafety.len() + abi.len() + name.as_str().len() + generics_len; w.write_str("<pre class=\"rust fn\">"); render_attributes_in_pre(w, it, ""); w.reserve(header_len); write!( w, "{vis}{constness}{asyncness}{unsafety}{abi}fn \ {name}{generics}{decl}{notable_traits}{where_clause}</pre>", vis = vis, constness = constness, asyncness = asyncness, unsafety = unsafety, abi = abi, name = name, generics = f.generics.print(cx), where_clause = print_where_clause(&f.generics, cx, 0, true), decl = f.decl.full_print(header_len, 0, f.header.asyncness, cx), notable_traits = notable_traits_decl(&f.decl, cx), ); document(w, cx, it, None) } fn item_trait(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Trait) { let bounds = bounds(&t.bounds, false, cx); let types = t.items.iter().filter(|m| m.is_associated_type()).collect::<Vec<_>>(); let consts = t.items.iter().filter(|m| m.is_associated_const()).collect::<Vec<_>>(); let required = t.items.iter().filter(|m| m.is_ty_method()).collect::<Vec<_>>(); let provided = t.items.iter().filter(|m| m.is_method()).collect::<Vec<_>>(); // Output the trait definition wrap_into_docblock(w, |w| { w.write_str("<pre class=\"rust trait\">"); render_attributes_in_pre(w, it, ""); write!( w, "{}{}{}trait {}{}{}", it.visibility.print_with_space(it.def_id, cx), t.unsafety.print_with_space(), if t.is_auto { "auto " } else { "" }, it.name.as_ref().unwrap(), t.generics.print(cx), bounds ); if !t.generics.where_predicates.is_empty() { write!(w, "{}", print_where_clause(&t.generics, cx, 0, true)); } else { w.write_str(" "); } if t.items.is_empty() { w.write_str("{ }"); } else { // FIXME: we should be using a derived_id for the Anchors here w.write_str("{\n"); let mut toggle = false; // If there are too many associated types, hide _everything_ if should_hide_fields(types.len()) { toggle = true; toggle_open(w, "associated items"); } for t in &types { render_assoc_item(w, t, AssocItemLink::Anchor(None), ItemType::Trait, cx); w.write_str(";\n"); } // If there are too many associated constants, hide everything after them // We also do this if the types + consts is large because otherwise we could // render a bunch of types and _then_ a bunch of consts just because both were // _just_ under the limit if !toggle && should_hide_fields(types.len() + consts.len()) { toggle = true; toggle_open(w, "associated constants and methods"); } if !types.is_empty() && !consts.is_empty() { w.write_str("\n"); } for t in &consts { render_assoc_item(w, t, AssocItemLink::Anchor(None), ItemType::Trait, cx); w.write_str(";\n"); } if !toggle && should_hide_fields(required.len() + provided.len()) { toggle = true; toggle_open(w, "methods"); } if !consts.is_empty() && !required.is_empty() { w.write_str("\n"); } for (pos, m) in required.iter().enumerate() { render_assoc_item(w, m, AssocItemLink::Anchor(None), ItemType::Trait, cx); w.write_str(";\n"); if pos < required.len() - 1 { w.write_str("<div class=\"item-spacer\"></div>"); } } if !required.is_empty() && !provided.is_empty() { w.write_str("\n"); } for (pos, m) in provided.iter().enumerate() { render_assoc_item(w, m, AssocItemLink::Anchor(None), ItemType::Trait, cx); match *m.kind { clean::MethodItem(ref inner, _) if !inner.generics.where_predicates.is_empty() => { w.write_str(",\n { ... }\n"); } _ => { w.write_str(" { ... }\n"); } } if pos < provided.len() - 1 { w.write_str("<div class=\"item-spacer\"></div>"); } } if toggle { toggle_close(w); } w.write_str("}"); } w.write_str("</pre>") }); // Trait documentation document(w, cx, it, None); fn write_small_section_header(w: &mut Buffer, id: &str, title: &str, extra_content: &str) { write!( w, "<h2 id=\"{0}\" class=\"small-section-header\">\ {1}<a href=\"#{0}\" class=\"anchor\"></a>\ </h2>{2}", id, title, extra_content ) } fn trait_item(w: &mut Buffer, cx: &Context<'_>, m: &clean::Item, t: &clean::Item) { let name = m.name.as_ref().unwrap(); info!("Documenting {} on {:?}", name, t.name); let item_type = m.type_(); let id = cx.derive_id(format!("{}.{}", item_type, name)); let mut content = Buffer::empty_from(w); document(&mut content, cx, m, Some(t)); let toggled = !content.is_empty(); if toggled { write!(w, "<details class=\"rustdoc-toggle\" open><summary>"); } write!(w, "<div id=\"{}\" class=\"method has-srclink\">", id); write!(w, "<div class=\"rightside\">"); render_stability_since(w, m, t, cx.tcx()); write_srclink(cx, m, w); write!(w, "</div>"); write!(w, "<code>"); render_assoc_item(w, m, AssocItemLink::Anchor(Some(&id)), ItemType::Impl, cx); w.write_str("</code>"); w.write_str("</div>"); if toggled { write!(w, "</summary>"); w.push_buffer(content); write!(w, "</details>"); } } if !types.is_empty() { write_small_section_header( w, "associated-types", "Associated Types", "<div class=\"methods\">", ); for t in types { trait_item(w, cx, t, it); } w.write_str("</div>"); } if !consts.is_empty() { write_small_section_header( w, "associated-const", "Associated Constants", "<div class=\"methods\">", ); for t in consts { trait_item(w, cx, t, it); } w.write_str("</div>"); } // Output the documentation for each function individually if !required.is_empty() { write_small_section_header( w, "required-methods", "Required methods", "<div class=\"methods\">", ); for m in required { trait_item(w, cx, m, it); } w.write_str("</div>"); } if !provided.is_empty() { write_small_section_header( w, "provided-methods", "Provided methods", "<div class=\"methods\">", ); for m in provided { trait_item(w, cx, m, it); } w.write_str("</div>"); } // If there are methods directly on this trait object, render them here. render_assoc_items(w, cx, it, it.def_id.expect_real(), AssocItemRender::All); if let Some(implementors) = cx.cache.implementors.get(&it.def_id.expect_real()) { // The DefId is for the first Type found with that name. The bool is // if any Types with the same name but different DefId have been found. let mut implementor_dups: FxHashMap<Symbol, (DefId, bool)> = FxHashMap::default(); for implementor in implementors { match implementor.inner_impl().for_ { clean::ResolvedPath { ref path, did, is_generic: false, .. } | clean::BorrowedRef { type_: box clean::ResolvedPath { ref path, did, is_generic: false, .. }, .. } => { let &mut (prev_did, ref mut has_duplicates) = implementor_dups.entry(path.last()).or_insert((did, false)); if prev_did != did { *has_duplicates = true; } } _ => {} } } let (local, foreign) = implementors.iter().partition::<Vec<_>, _>(|i| { i.inner_impl() .for_ .def_id_full(cx.cache()) .map_or(true, |d| cx.cache.paths.contains_key(&d)) }); let (mut synthetic, mut concrete): (Vec<&&Impl>, Vec<&&Impl>) = local.iter().partition(|i| i.inner_impl().synthetic); synthetic.sort_by(|a, b| compare_impl(a, b, cx)); concrete.sort_by(|a, b| compare_impl(a, b, cx)); if !foreign.is_empty() { write_small_section_header(w, "foreign-impls", "Implementations on Foreign Types", ""); for implementor in foreign { let provided_methods = implementor.inner_impl().provided_trait_methods(cx.tcx()); let assoc_link = AssocItemLink::GotoSource(implementor.impl_item.def_id, &provided_methods); render_impl( w, cx, &implementor, it, assoc_link, RenderMode::Normal, false, None, true, false, &[], ); } } write_small_section_header( w, "implementors", "Implementors", "<div class=\"item-list\" id=\"implementors-list\">", ); for implementor in concrete { render_implementor(cx, implementor, it, w, &implementor_dups, &[]); } w.write_str("</div>"); if t.is_auto { write_small_section_header( w, "synthetic-implementors", "Auto implementors", "<div class=\"item-list\" id=\"synthetic-implementors-list\">", ); for implementor in synthetic { render_implementor( cx, implementor, it, w, &implementor_dups, &collect_paths_for_type(implementor.inner_impl().for_.clone(), &cx.cache), ); } w.write_str("</div>"); } } else { // even without any implementations to write in, we still want the heading and list, so the // implementors javascript file pulled in below has somewhere to write the impls into write_small_section_header( w, "implementors", "Implementors", "<div class=\"item-list\" id=\"implementors-list\"></div>", ); if t.is_auto { write_small_section_header( w, "synthetic-implementors", "Auto implementors", "<div class=\"item-list\" id=\"synthetic-implementors-list\"></div>", ); } } write!( w, "<script type=\"text/javascript\" \ src=\"{root_path}/implementors/{path}/{ty}.{name}.js\" async>\ </script>", root_path = vec![".."; cx.current.len()].join("/"), path = if it.def_id.is_local() { cx.current.join("/") } else { let (ref path, _) = cx.cache.external_paths[&it.def_id.expect_real()]; path[..path.len() - 1].join("/") }, ty = it.type_(), name = *it.name.as_ref().unwrap() ); } fn item_trait_alias(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::TraitAlias) { w.write_str("<pre class=\"rust trait-alias\">"); render_attributes_in_pre(w, it, ""); write!( w, "trait {}{}{} = {};</pre>", it.name.as_ref().unwrap(), t.generics.print(cx), print_where_clause(&t.generics, cx, 0, true), bounds(&t.bounds, true, cx) ); document(w, cx, it, None); // Render any items associated directly to this alias, as otherwise they // won't be visible anywhere in the docs. It would be nice to also show // associated items from the aliased type (see discussion in #32077), but // we need #14072 to make sense of the generics. render_assoc_items(w, cx, it, it.def_id.expect_real(), AssocItemRender::All) } fn item_opaque_ty(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::OpaqueTy) { w.write_str("<pre class=\"rust opaque\">"); render_attributes_in_pre(w, it, ""); write!( w, "type {}{}{where_clause} = impl {bounds};</pre>", it.name.as_ref().unwrap(), t.generics.print(cx), where_clause = print_where_clause(&t.generics, cx, 0, true), bounds = bounds(&t.bounds, false, cx), ); document(w, cx, it, None); // Render any items associated directly to this alias, as otherwise they // won't be visible anywhere in the docs. It would be nice to also show // associated items from the aliased type (see discussion in #32077), but // we need #14072 to make sense of the generics. render_assoc_items(w, cx, it, it.def_id.expect_real(), AssocItemRender::All) } fn item_typedef(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Typedef) { w.write_str("<pre class=\"rust typedef\">"); render_attributes_in_pre(w, it, ""); write!( w, "type {}{}{where_clause} = {type_};</pre>", it.name.as_ref().unwrap(), t.generics.print(cx), where_clause = print_where_clause(&t.generics, cx, 0, true), type_ = t.type_.print(cx), ); document(w, cx, it, None); let def_id = it.def_id.expect_real(); // Render any items associated directly to this alias, as otherwise they // won't be visible anywhere in the docs. It would be nice to also show // associated items from the aliased type (see discussion in #32077), but // we need #14072 to make sense of the generics. render_assoc_items(w, cx, it, def_id, AssocItemRender::All); } fn item_union(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, s: &clean::Union) { wrap_into_docblock(w, |w| { w.write_str("<pre class=\"rust union\">"); render_attributes_in_pre(w, it, ""); render_union(w, it, Some(&s.generics), &s.fields, "", true, cx); w.write_str("</pre>") }); document(w, cx, it, None); let mut fields = s .fields .iter() .filter_map(|f| match *f.kind { clean::StructFieldItem(ref ty) => Some((f, ty)), _ => None, }) .peekable(); if fields.peek().is_some() { write!( w, "<h2 id=\"fields\" class=\"fields small-section-header\">\ Fields<a href=\"#fields\" class=\"anchor\"></a></h2>" ); for (field, ty) in fields { let name = field.name.as_ref().expect("union field name"); let id = format!("{}.{}", ItemType::StructField, name); write!( w, "<span id=\"{id}\" class=\"{shortty} small-section-header\">\ <a href=\"#{id}\" class=\"anchor field\"></a>\ <code>{name}: {ty}</code>\ </span>", id = id, name = name, shortty = ItemType::StructField, ty = ty.print(cx), ); if let Some(stability_class) = field.stability_class(cx.tcx()) { write!(w, "<span class=\"stab {stab}\"></span>", stab = stability_class); } document(w, cx, field, Some(it)); } } let def_id = it.def_id.expect_real(); render_assoc_items(w, cx, it, def_id, AssocItemRender::All); document_type_layout(w, cx, def_id); } fn item_enum(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, e: &clean::Enum) { wrap_into_docblock(w, |w| { w.write_str("<pre class=\"rust enum\">"); render_attributes_in_pre(w, it, ""); write!( w, "{}enum {}{}{}", it.visibility.print_with_space(it.def_id, cx), it.name.as_ref().unwrap(), e.generics.print(cx), print_where_clause(&e.generics, cx, 0, true), ); if e.variants.is_empty() && !e.variants_stripped { w.write_str(" {}"); } else { w.write_str(" {\n"); let toggle = should_hide_fields(e.variants.len()); if toggle { toggle_open(w, "variants"); } for v in &e.variants { w.write_str(" "); let name = v.name.as_ref().unwrap(); match *v.kind { clean::VariantItem(ref var) => match var { clean::Variant::CLike => write!(w, "{}", name), clean::Variant::Tuple(ref tys) => { write!(w, "{}(", name); for (i, ty) in tys.iter().enumerate() { if i > 0 { w.write_str(",&nbsp;") } write!(w, "{}", ty.print(cx)); } w.write_str(")"); } clean::Variant::Struct(ref s) => { render_struct(w, v, None, s.struct_type, &s.fields, " ", false, cx); } }, _ => unreachable!(), } w.write_str(",\n"); } if e.variants_stripped { w.write_str(" // some variants omitted\n"); } if toggle { toggle_close(w); } w.write_str("}"); } w.write_str("</pre>") }); document(w, cx, it, None); if !e.variants.is_empty() { write!( w, "<h2 id=\"variants\" class=\"variants small-section-header\">\ Variants{}<a href=\"#variants\" class=\"anchor\"></a></h2>", document_non_exhaustive_header(it) ); document_non_exhaustive(w, it); for variant in &e.variants { let id = cx.derive_id(format!("{}.{}", ItemType::Variant, variant.name.as_ref().unwrap())); write!( w, "<div id=\"{id}\" class=\"variant small-section-header\">\ <a href=\"#{id}\" class=\"anchor field\"></a>\ <code>{name}", id = id, name = variant.name.as_ref().unwrap() ); if let clean::VariantItem(clean::Variant::Tuple(ref tys)) = *variant.kind { w.write_str("("); for (i, ty) in tys.iter().enumerate() { if i > 0 { w.write_str(",&nbsp;"); } write!(w, "{}", ty.print(cx)); } w.write_str(")"); } w.write_str("</code>"); render_stability_since(w, variant, it, cx.tcx()); w.write_str("</div>"); document(w, cx, variant, Some(it)); document_non_exhaustive(w, variant); use crate::clean::Variant; if let clean::VariantItem(Variant::Struct(ref s)) = *variant.kind { toggle_open(w, "fields"); let variant_id = cx.derive_id(format!( "{}.{}.fields", ItemType::Variant, variant.name.as_ref().unwrap() )); write!(w, "<div class=\"autohide sub-variant\" id=\"{id}\">", id = variant_id); write!( w, "<h3>Fields of <b>{name}</b></h3><div>", name = variant.name.as_ref().unwrap() ); for field in &s.fields { use crate::clean::StructFieldItem; if let StructFieldItem(ref ty) = *field.kind { let id = cx.derive_id(format!( "variant.{}.field.{}", variant.name.as_ref().unwrap(), field.name.as_ref().unwrap() )); write!( w, "<span id=\"{id}\" class=\"variant small-section-header\">\ <a href=\"#{id}\" class=\"anchor field\"></a>\ <code>{f}:&nbsp;{t}</code>\ </span>", id = id, f = field.name.as_ref().unwrap(), t = ty.print(cx) ); document(w, cx, field, Some(variant)); } } w.write_str("</div></div>"); toggle_close(w); } } } let def_id = it.def_id.expect_real(); render_assoc_items(w, cx, it, def_id, AssocItemRender::All); document_type_layout(w, cx, def_id); } fn item_macro(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, t: &clean::Macro) { wrap_into_docblock(w, |w| { highlight::render_with_highlighting( &t.source, w, Some("macro"), None, None, it.span(cx.tcx()).inner().edition(), None, ); }); document(w, cx, it, None) } fn item_proc_macro(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, m: &clean::ProcMacro) { let name = it.name.as_ref().expect("proc-macros always have names"); match m.kind { MacroKind::Bang => { w.push_str("<pre class=\"rust macro\">"); write!(w, "{}!() {{ /* proc-macro */ }}", name); w.push_str("</pre>"); } MacroKind::Attr => { w.push_str("<pre class=\"rust attr\">"); write!(w, "#[{}]", name); w.push_str("</pre>"); } MacroKind::Derive => { w.push_str("<pre class=\"rust derive\">"); write!(w, "#[derive({})]", name); if !m.helpers.is_empty() { w.push_str("\n{\n"); w.push_str(" // Attributes available to this derive:\n"); for attr in &m.helpers { writeln!(w, " #[{}]", attr); } w.push_str("}\n"); } w.push_str("</pre>"); } } document(w, cx, it, None) } fn item_primitive(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item) { document(w, cx, it, None); render_assoc_items(w, cx, it, it.def_id.expect_real(), AssocItemRender::All) } fn item_constant(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, c: &clean::Constant) { w.write_str("<pre class=\"rust const\">"); render_attributes_in_code(w, it); write!( w, "{vis}const {name}: {typ}", vis = it.visibility.print_with_space(it.def_id, cx), name = it.name.as_ref().unwrap(), typ = c.type_.print(cx), ); let value = c.value(cx.tcx()); let is_literal = c.is_literal(cx.tcx()); let expr = c.expr(cx.tcx()); if value.is_some() || is_literal { write!(w, " = {expr};", expr = Escape(&expr)); } else { w.write_str(";"); } if !is_literal { if let Some(value) = &value { let value_lowercase = value.to_lowercase(); let expr_lowercase = expr.to_lowercase(); if value_lowercase != expr_lowercase && value_lowercase.trim_end_matches("i32") != expr_lowercase { write!(w, " // {value}", value = Escape(value)); } } } w.write_str("</pre>"); document(w, cx, it, None) } fn item_struct(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, s: &clean::Struct) { wrap_into_docblock(w, |w| { w.write_str("<pre class=\"rust struct\">"); render_attributes_in_code(w, it); render_struct(w, it, Some(&s.generics), s.struct_type, &s.fields, "", true, cx); w.write_str("</pre>") }); document(w, cx, it, None); let mut fields = s .fields .iter() .filter_map(|f| match *f.kind { clean::StructFieldItem(ref ty) => Some((f, ty)), _ => None, }) .peekable(); if let CtorKind::Fictive = s.struct_type { if fields.peek().is_some() { write!( w, "<h2 id=\"fields\" class=\"fields small-section-header\">\ Fields{}<a href=\"#fields\" class=\"anchor\"></a></h2>", document_non_exhaustive_header(it) ); document_non_exhaustive(w, it); for (field, ty) in fields { let id = cx.derive_id(format!( "{}.{}", ItemType::StructField, field.name.as_ref().unwrap() )); write!( w, "<span id=\"{id}\" class=\"{item_type} small-section-header\">\ <a href=\"#{id}\" class=\"anchor field\"></a>\ <code>{name}: {ty}</code>\ </span>", item_type = ItemType::StructField, id = id, name = field.name.as_ref().unwrap(), ty = ty.print(cx) ); document(w, cx, field, Some(it)); } } } let def_id = it.def_id.expect_real(); render_assoc_items(w, cx, it, def_id, AssocItemRender::All); document_type_layout(w, cx, def_id); } fn item_static(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, s: &clean::Static) { w.write_str("<pre class=\"rust static\">"); render_attributes_in_code(w, it); write!( w, "{vis}static {mutability}{name}: {typ}</pre>", vis = it.visibility.print_with_space(it.def_id, cx), mutability = s.mutability.print_with_space(), name = it.name.as_ref().unwrap(), typ = s.type_.print(cx) ); document(w, cx, it, None) } fn item_foreign_type(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item) { w.write_str("<pre class=\"rust foreigntype\">extern {\n"); render_attributes_in_code(w, it); write!( w, " {}type {};\n}}</pre>", it.visibility.print_with_space(it.def_id, cx), it.name.as_ref().unwrap(), ); document(w, cx, it, None); render_assoc_items(w, cx, it, it.def_id.expect_real(), AssocItemRender::All) } fn item_keyword(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item) { document(w, cx, it, None) } /// Compare two strings treating multi-digit numbers as single units (i.e. natural sort order). crate fn compare_names(mut lhs: &str, mut rhs: &str) -> Ordering { /// Takes a non-numeric and a numeric part from the given &str. fn take_parts<'a>(s: &mut &'a str) -> (&'a str, &'a str) { let i = s.find(|c: char| c.is_ascii_digit()); let (a, b) = s.split_at(i.unwrap_or(s.len())); let i = b.find(|c: char| !c.is_ascii_digit()); let (b, c) = b.split_at(i.unwrap_or(b.len())); *s = c; (a, b) } while !lhs.is_empty() || !rhs.is_empty() { let (la, lb) = take_parts(&mut lhs); let (ra, rb) = take_parts(&mut rhs); // First process the non-numeric part. match la.cmp(ra) { Ordering::Equal => (), x => return x, } // Then process the numeric part, if both sides have one (and they fit in a u64). if let (Ok(ln), Ok(rn)) = (lb.parse::<u64>(), rb.parse::<u64>()) { match ln.cmp(&rn) { Ordering::Equal => (), x => return x, } } // Then process the numeric part again, but this time as strings. match lb.cmp(rb) { Ordering::Equal => (), x => return x, } } Ordering::Equal } pub(super) fn full_path(cx: &Context<'_>, item: &clean::Item) -> String { let mut s = cx.current.join("::"); s.push_str("::"); s.push_str(&item.name.unwrap().as_str()); s } pub(super) fn item_path(ty: ItemType, name: &str) -> String { match ty { ItemType::Module => format!("{}index.html", ensure_trailing_slash(name)), _ => format!("{}.{}.html", ty, name), } } fn bounds(t_bounds: &[clean::GenericBound], trait_alias: bool, cx: &Context<'_>) -> String { let mut bounds = String::new(); if !t_bounds.is_empty() { if !trait_alias { bounds.push_str(": "); } for (i, p) in t_bounds.iter().enumerate() { if i > 0 { bounds.push_str(" + "); } bounds.push_str(&p.print(cx).to_string()); } } bounds } fn wrap_into_docblock<F>(w: &mut Buffer, f: F) where F: FnOnce(&mut Buffer), { w.write_str("<div class=\"docblock type-decl\">"); f(w); w.write_str("</div>") } fn render_stability_since( w: &mut Buffer, item: &clean::Item, containing_item: &clean::Item, tcx: TyCtxt<'_>, ) { render_stability_since_raw( w, item.stable_since(tcx).as_deref(), item.const_stability(tcx), containing_item.stable_since(tcx).as_deref(), containing_item.const_stable_since(tcx).as_deref(), ) } fn compare_impl<'a, 'b>(lhs: &'a &&Impl, rhs: &'b &&Impl, cx: &Context<'_>) -> Ordering { let lhss = format!("{}", lhs.inner_impl().print(false, cx)); let rhss = format!("{}", rhs.inner_impl().print(false, cx)); // lhs and rhs are formatted as HTML, which may be unnecessary compare_names(&lhss, &rhss) } fn render_implementor( cx: &Context<'_>, implementor: &Impl, trait_: &clean::Item, w: &mut Buffer, implementor_dups: &FxHashMap<Symbol, (DefId, bool)>, aliases: &[String], ) { // If there's already another implementor that has the same abridged name, use the // full path, for example in `std::iter::ExactSizeIterator` let use_absolute = match implementor.inner_impl().for_ { clean::ResolvedPath { ref path, is_generic: false, .. } | clean::BorrowedRef { type_: box clean::ResolvedPath { ref path, is_generic: false, .. }, .. } => implementor_dups[&path.last()].1, _ => false, }; render_impl_summary( w, cx, implementor, trait_, trait_, false, Some(use_absolute), false, aliases, ); } fn render_union( w: &mut Buffer, it: &clean::Item, g: Option<&clean::Generics>, fields: &[clean::Item], tab: &str, structhead: bool, cx: &Context<'_>, ) { write!( w, "{}{}{}", it.visibility.print_with_space(it.def_id, cx), if structhead { "union " } else { "" }, it.name.as_ref().unwrap() ); if let Some(g) = g { write!(w, "{}", g.print(cx)); write!(w, "{}", print_where_clause(&g, cx, 0, true)); } write!(w, " {{\n{}", tab); let count_fields = fields.iter().filter(|f| matches!(*f.kind, clean::StructFieldItem(..))).count(); let toggle = should_hide_fields(count_fields); if toggle { toggle_open(w, "fields"); } for field in fields { if let clean::StructFieldItem(ref ty) = *field.kind { write!( w, " {}{}: {},\n{}", field.visibility.print_with_space(field.def_id, cx), field.name.as_ref().unwrap(), ty.print(cx), tab ); } } if it.has_stripped_fields().unwrap() { write!(w, " // some fields omitted\n{}", tab); } if toggle { toggle_close(w); } w.write_str("}"); } fn render_struct( w: &mut Buffer, it: &clean::Item, g: Option<&clean::Generics>, ty: CtorKind, fields: &[clean::Item], tab: &str, structhead: bool, cx: &Context<'_>, ) { write!( w, "{}{}{}", it.visibility.print_with_space(it.def_id, cx), if structhead { "struct " } else { "" }, it.name.as_ref().unwrap() ); if let Some(g) = g { write!(w, "{}", g.print(cx)) } match ty { CtorKind::Fictive => { if let Some(g) = g { write!(w, "{}", print_where_clause(g, cx, 0, true),) } w.write_str(" {"); let count_fields = fields.iter().filter(|f| matches!(*f.kind, clean::StructFieldItem(..))).count(); let has_visible_fields = count_fields > 0; let toggle = should_hide_fields(count_fields); if toggle { toggle_open(w, "fields"); } for field in fields { if let clean::StructFieldItem(ref ty) = *field.kind { write!( w, "\n{} {}{}: {},", tab, field.visibility.print_with_space(field.def_id, cx), field.name.as_ref().unwrap(), ty.print(cx), ); } } if has_visible_fields { if it.has_stripped_fields().unwrap() { write!(w, "\n{} // some fields omitted", tab); } write!(w, "\n{}", tab); } else if it.has_stripped_fields().unwrap() { // If there are no visible fields we can just display // `{ /* fields omitted */ }` to save space. write!(w, " /* fields omitted */ "); } if toggle { toggle_close(w); } w.write_str("}"); } CtorKind::Fn => { w.write_str("("); for (i, field) in fields.iter().enumerate() { if i > 0 { w.write_str(", "); } match *field.kind { clean::StrippedItem(box clean::StructFieldItem(..)) => write!(w, "_"), clean::StructFieldItem(ref ty) => { write!( w, "{}{}", field.visibility.print_with_space(field.def_id, cx), ty.print(cx), ) } _ => unreachable!(), } } w.write_str(")"); if let Some(g) = g { write!(w, "{}", print_where_clause(g, cx, 0, false),) } w.write_str(";"); } CtorKind::Const => { // Needed for PhantomData. if let Some(g) = g { write!(w, "{}", print_where_clause(g, cx, 0, false),) } w.write_str(";"); } } } fn document_non_exhaustive_header(item: &clean::Item) -> &str { if item.is_non_exhaustive() { " (Non-exhaustive)" } else { "" } } fn document_non_exhaustive(w: &mut Buffer, item: &clean::Item) { if item.is_non_exhaustive() { write!( w, "<details class=\"rustdoc-toggle non-exhaustive\">\ <summary class=\"hideme\"><span>{}</span></summary>\ <div class=\"docblock\">", { if item.is_struct() { "This struct is marked as non-exhaustive" } else if item.is_enum() { "This enum is marked as non-exhaustive" } else if item.is_variant() { "This variant is marked as non-exhaustive" } else { "This type is marked as non-exhaustive" } } ); if item.is_struct() { w.write_str( "Non-exhaustive structs could have additional fields added in future. \ Therefore, non-exhaustive structs cannot be constructed in external crates \ using the traditional <code>Struct { .. }</code> syntax; cannot be \ matched against without a wildcard <code>..</code>; and \ struct update syntax will not work.", ); } else if item.is_enum() { w.write_str( "Non-exhaustive enums could have additional variants added in future. \ Therefore, when matching against variants of non-exhaustive enums, an \ extra wildcard arm must be added to account for any future variants.", ); } else if item.is_variant() { w.write_str( "Non-exhaustive enum variants could have additional fields added in future. \ Therefore, non-exhaustive enum variants cannot be constructed in external \ crates and cannot be matched against.", ); } else { w.write_str( "This type will require a wildcard arm in any match statements or constructors.", ); } w.write_str("</div></details>"); } } fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) { if !cx.shared.show_type_layout { return; } writeln!(w, "<h2 class=\"small-section-header\">Layout</h2>"); writeln!(w, "<div class=\"docblock\">"); let tcx = cx.tcx(); let param_env = tcx.param_env(ty_def_id); let ty = tcx.type_of(ty_def_id); match tcx.layout_of(param_env.and(ty)) { Ok(ty_layout) => { writeln!( w, "<div class=\"warning\"><p><strong>Note:</strong> Most layout information is \ completely unstable and may be different between compiler versions and platforms. \ The only exception is types with certain <code>repr(...)</code> attributes. \ Please see the Rust Reference’s \ <a href=\"https://doc.rust-lang.org/reference/type-layout.html\">“Type Layout”</a> \ chapter for details on type layout guarantees.</p></div>" ); if ty_layout.layout.abi.is_unsized() { writeln!(w, "<p><strong>Size:</strong> (unsized)</p>"); } else { let bytes = ty_layout.layout.size.bytes(); writeln!( w, "<p><strong>Size:</strong> {size} byte{pl}</p>", size = bytes, pl = if bytes == 1 { "" } else { "s" }, ); } } // This kind of layout error can occur with valid code, e.g. if you try to // get the layout of a generic type such as `Vec<T>`. Err(LayoutError::Unknown(_)) => { writeln!( w, "<p><strong>Note:</strong> Unable to compute type layout, \ possibly due to this type having generic parameters. \ Layout can only be computed for concrete, fully-instantiated types.</p>" ); } // This kind of error probably can't happen with valid code, but we don't // want to panic and prevent the docs from building, so we just let the // user know that we couldn't compute the layout. Err(LayoutError::SizeOverflow(_)) => { writeln!( w, "<p><strong>Note:</strong> Encountered an error during type layout; \ the type was too big.</p>" ); } } writeln!(w, "</div>"); }
35.952085
101
0.488412
762a8bc748220a4b5c6538135b90c48087e1db1d
23,519
// Copyright 2018 Parity Technologies (UK) Ltd. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. #![recursion_limit = "256"] use quote::quote; use proc_macro::TokenStream; use syn::{parse_macro_input, DeriveInput, Data, DataStruct, Ident}; /// Generates a delegating `NetworkBehaviour` implementation for the struct this is used for. See /// the trait documentation for better description. #[proc_macro_derive(NetworkBehaviour, attributes(behaviour))] pub fn hello_macro_derive(input: TokenStream) -> TokenStream { let ast = parse_macro_input!(input as DeriveInput); build(&ast) } /// The actual implementation. fn build(ast: &DeriveInput) -> TokenStream { match ast.data { Data::Struct(ref s) => build_struct(ast, s), Data::Enum(_) => unimplemented!("Deriving NetworkBehaviour is not implemented for enums"), Data::Union(_) => unimplemented!("Deriving NetworkBehaviour is not implemented for unions"), } } /// The version for structs fn build_struct(ast: &DeriveInput, data_struct: &DataStruct) -> TokenStream { let name = &ast.ident; let (_, ty_generics, where_clause) = ast.generics.split_for_impl(); let multiaddr = quote!{::libp2p::core::Multiaddr}; let trait_to_impl = quote!{::libp2p::swarm::NetworkBehaviour}; let net_behv_event_proc = quote!{::libp2p::swarm::NetworkBehaviourEventProcess}; let either_ident = quote!{::libp2p::core::either::EitherOutput}; let network_behaviour_action = quote!{::libp2p::swarm::NetworkBehaviourAction}; let into_protocols_handler = quote!{::libp2p::swarm::IntoProtocolsHandler}; let protocols_handler = quote!{::libp2p::swarm::ProtocolsHandler}; let into_proto_select_ident = quote!{::libp2p::swarm::IntoProtocolsHandlerSelect}; let peer_id = quote!{::libp2p::core::PeerId}; let connection_id = quote!{::libp2p::core::connection::ConnectionId}; let connected_point = quote!{::libp2p::core::ConnectedPoint}; let listener_id = quote!{::libp2p::core::connection::ListenerId}; let poll_parameters = quote!{::libp2p::swarm::PollParameters}; // Build the generics. let impl_generics = { let tp = ast.generics.type_params(); let lf = ast.generics.lifetimes(); let cst = ast.generics.const_params(); quote!{<#(#lf,)* #(#tp,)* #(#cst,)*>} }; // Whether or not we require the `NetworkBehaviourEventProcess` trait to be implemented. let event_process = { let mut event_process = true; // Default to true for backwards compatibility for meta_items in ast.attrs.iter().filter_map(get_meta_items) { for meta_item in meta_items { match meta_item { syn::NestedMeta::Meta(syn::Meta::NameValue(ref m)) if m.path.is_ident("event_process") => { if let syn::Lit::Bool(ref b) = m.lit { event_process = b.value } } _ => () } } } event_process }; // The final out event. // If we find a `#[behaviour(out_event = "Foo")]` attribute on the struct, we set `Foo` as // the out event. Otherwise we use `()`. let out_event = { let mut out = quote!{()}; for meta_items in ast.attrs.iter().filter_map(get_meta_items) { for meta_item in meta_items { match meta_item { syn::NestedMeta::Meta(syn::Meta::NameValue(ref m)) if m.path.is_ident("out_event") => { if let syn::Lit::Str(ref s) = m.lit { let ident: syn::Type = syn::parse_str(&s.value()).unwrap(); out = quote!{#ident}; } } _ => () } } } out }; // Build the `where ...` clause of the trait implementation. let where_clause = { let additional = data_struct.fields.iter() .filter(|x| !is_ignored(x)) .flat_map(|field| { let ty = &field.ty; vec![ quote!{#ty: #trait_to_impl}, if event_process { quote!{Self: #net_behv_event_proc<<#ty as #trait_to_impl>::OutEvent>} } else { quote!{#out_event: From< <#ty as #trait_to_impl>::OutEvent >} } ] }) .collect::<Vec<_>>(); if let Some(where_clause) = where_clause { if where_clause.predicates.trailing_punct() { Some(quote!{#where_clause #(#additional),*}) } else { Some(quote!{#where_clause, #(#additional),*}) } } else { Some(quote!{where #(#additional),*}) } }; // Build the list of statements to put in the body of `addresses_of_peer()`. let addresses_of_peer_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ out.extend(self.#i.addresses_of_peer(peer_id)); }, None => quote!{ out.extend(self.#field_n.addresses_of_peer(peer_id)); }, }) }) }; // Build the list of statements to put in the body of `inject_connected()`. let inject_connected_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_connected(peer_id); }, None => quote!{ self.#field_n.inject_connected(peer_id); }, }) }) }; // Build the list of statements to put in the body of `inject_disconnected()`. let inject_disconnected_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_disconnected(peer_id); }, None => quote!{ self.#field_n.inject_disconnected(peer_id); }, }) }) }; // Build the list of statements to put in the body of `inject_connection_established()`. let inject_connection_established_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_connection_established(peer_id, connection_id, endpoint); }, None => quote!{ self.#field_n.inject_connection_established(peer_id, connection_id, endpoint); }, }) }) }; // Build the list of statements to put in the body of `inject_address_change()`. let inject_address_change_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_address_change(peer_id, connection_id, old, new); }, None => quote!{ self.#field_n.inject_address_change(peer_id, connection_id, old, new); }, }) }) }; // Build the list of statements to put in the body of `inject_connection_closed()`. let inject_connection_closed_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_connection_closed(peer_id, connection_id, endpoint); }, None => quote!{ self.#field_n.inject_connection_closed(peer_id, connection_id, endpoint); }, }) }) }; // Build the list of statements to put in the body of `inject_addr_reach_failure()`. let inject_addr_reach_failure_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_addr_reach_failure(peer_id, addr, error); }, None => quote!{ self.#field_n.inject_addr_reach_failure(peer_id, addr, error); }, }) }) }; // Build the list of statements to put in the body of `inject_dial_failure()`. let inject_dial_failure_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_dial_failure(peer_id); }, None => quote!{ self.#field_n.inject_dial_failure(peer_id); }, }) }) }; // Build the list of statements to put in the body of `inject_new_listen_addr()`. let inject_new_listen_addr_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_new_listen_addr(addr); }, None => quote!{ self.#field_n.inject_new_listen_addr(addr); }, }) }) }; // Build the list of statements to put in the body of `inject_expired_listen_addr()`. let inject_expired_listen_addr_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_expired_listen_addr(addr); }, None => quote!{ self.#field_n.inject_expired_listen_addr(addr); }, }) }) }; // Build the list of statements to put in the body of `inject_new_external_addr()`. let inject_new_external_addr_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None; } Some(match field.ident { Some(ref i) => quote!{ self.#i.inject_new_external_addr(addr); }, None => quote!{ self.#field_n.inject_new_external_addr(addr); }, }) }) }; // Build the list of statements to put in the body of `inject_listener_error()`. let inject_listener_error_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None } Some(match field.ident { Some(ref i) => quote!(self.#i.inject_listener_error(id, err);), None => quote!(self.#field_n.inject_listener_error(id, err);) }) }) }; // Build the list of statements to put in the body of `inject_listener_closed()`. let inject_listener_closed_stmts = { data_struct.fields.iter().enumerate().filter_map(move |(field_n, field)| { if is_ignored(&field) { return None } Some(match field.ident { Some(ref i) => quote!(self.#i.inject_listener_closed(id, reason);), None => quote!(self.#field_n.inject_listener_closed(id, reason);) }) }) }; // Build the list of variants to put in the body of `inject_event()`. // // The event type is a construction of nested `#either_ident`s of the events of the children. // We call `inject_event` on the corresponding child. let inject_node_event_stmts = data_struct.fields.iter().enumerate().filter(|f| !is_ignored(&f.1)).enumerate().map(|(enum_n, (field_n, field))| { let mut elem = if enum_n != 0 { quote!{ #either_ident::Second(ev) } } else { quote!{ ev } }; for _ in 0 .. data_struct.fields.iter().filter(|f| !is_ignored(f)).count() - 1 - enum_n { elem = quote!{ #either_ident::First(#elem) }; } Some(match field.ident { Some(ref i) => quote!{ #elem => #trait_to_impl::inject_event(&mut self.#i, peer_id, connection_id, ev) }, None => quote!{ #elem => #trait_to_impl::inject_event(&mut self.#field_n, peer_id, connection_id, ev) }, }) }); // The `ProtocolsHandler` associated type. let protocols_handler_ty = { let mut ph_ty = None; for field in data_struct.fields.iter() { if is_ignored(&field) { continue; } let ty = &field.ty; let field_info = quote!{ <#ty as #trait_to_impl>::ProtocolsHandler }; match ph_ty { Some(ev) => ph_ty = Some(quote!{ #into_proto_select_ident<#ev, #field_info> }), ref mut ev @ None => *ev = Some(field_info), } } ph_ty.unwrap_or(quote!{()}) // TODO: `!` instead }; // The content of `new_handler()`. // Example output: `self.field1.select(self.field2.select(self.field3))`. let new_handler = { let mut out_handler = None; for (field_n, field) in data_struct.fields.iter().enumerate() { if is_ignored(&field) { continue; } let field_name = match field.ident { Some(ref i) => quote!{ self.#i }, None => quote!{ self.#field_n }, }; let builder = quote! { #field_name.new_handler() }; match out_handler { Some(h) => out_handler = Some(quote!{ #into_protocols_handler::select(#h, #builder) }), ref mut h @ None => *h = Some(builder), } } out_handler.unwrap_or(quote!{()}) // TODO: incorrect }; // The method to use to poll. // If we find a `#[behaviour(poll_method = "poll")]` attribute on the struct, we call // `self.poll()` at the end of the polling. let poll_method = { let mut poll_method = quote!{std::task::Poll::Pending}; for meta_items in ast.attrs.iter().filter_map(get_meta_items) { for meta_item in meta_items { match meta_item { syn::NestedMeta::Meta(syn::Meta::NameValue(ref m)) if m.path.is_ident("poll_method") => { if let syn::Lit::Str(ref s) = m.lit { let ident: Ident = syn::parse_str(&s.value()).unwrap(); poll_method = quote!{#name::#ident(self, cx, poll_params)}; } } _ => () } } } poll_method }; // List of statements to put in `poll()`. // // We poll each child one by one and wrap around the output. let poll_stmts = data_struct.fields.iter().enumerate().filter(|f| !is_ignored(&f.1)).enumerate().map(|(enum_n, (field_n, field))| { let field_name = match field.ident { Some(ref i) => quote!{ self.#i }, None => quote!{ self.#field_n }, }; let mut wrapped_event = if enum_n != 0 { quote!{ #either_ident::Second(event) } } else { quote!{ event } }; for _ in 0 .. data_struct.fields.iter().filter(|f| !is_ignored(f)).count() - 1 - enum_n { wrapped_event = quote!{ #either_ident::First(#wrapped_event) }; } let generate_event_match_arm = if event_process { quote! { std::task::Poll::Ready(#network_behaviour_action::GenerateEvent(event)) => { #net_behv_event_proc::inject_event(self, event) } } } else { quote! { std::task::Poll::Ready(#network_behaviour_action::GenerateEvent(event)) => { return std::task::Poll::Ready(#network_behaviour_action::GenerateEvent(event.into())) } } }; Some(quote!{ loop { match #trait_to_impl::poll(&mut #field_name, cx, poll_params) { #generate_event_match_arm std::task::Poll::Ready(#network_behaviour_action::DialAddress { address }) => { return std::task::Poll::Ready(#network_behaviour_action::DialAddress { address }); } std::task::Poll::Ready(#network_behaviour_action::DialPeer { peer_id, condition }) => { return std::task::Poll::Ready(#network_behaviour_action::DialPeer { peer_id, condition }); } std::task::Poll::Ready(#network_behaviour_action::NotifyHandler { peer_id, handler, event }) => { return std::task::Poll::Ready(#network_behaviour_action::NotifyHandler { peer_id, handler, event: #wrapped_event, }); } std::task::Poll::Ready(#network_behaviour_action::ReportObservedAddr { address }) => { return std::task::Poll::Ready(#network_behaviour_action::ReportObservedAddr { address }); } std::task::Poll::Pending => break, } } }) }); // Now the magic happens. let final_quote = quote!{ impl #impl_generics #trait_to_impl for #name #ty_generics #where_clause { type ProtocolsHandler = #protocols_handler_ty; type OutEvent = #out_event; fn new_handler(&mut self) -> Self::ProtocolsHandler { use #into_protocols_handler; #new_handler } fn addresses_of_peer(&mut self, peer_id: &#peer_id) -> Vec<#multiaddr> { let mut out = Vec::new(); #(#addresses_of_peer_stmts);* out } fn inject_connected(&mut self, peer_id: &#peer_id) { #(#inject_connected_stmts);* } fn inject_disconnected(&mut self, peer_id: &#peer_id) { #(#inject_disconnected_stmts);* } fn inject_connection_established(&mut self, peer_id: &#peer_id, connection_id: &#connection_id, endpoint: &#connected_point) { #(#inject_connection_established_stmts);* } fn inject_address_change(&mut self, peer_id: &#peer_id, connection_id: &#connection_id, old: &#connected_point, new: &#connected_point) { #(#inject_address_change_stmts);* } fn inject_connection_closed(&mut self, peer_id: &#peer_id, connection_id: &#connection_id, endpoint: &#connected_point) { #(#inject_connection_closed_stmts);* } fn inject_addr_reach_failure(&mut self, peer_id: Option<&#peer_id>, addr: &#multiaddr, error: &dyn std::error::Error) { #(#inject_addr_reach_failure_stmts);* } fn inject_dial_failure(&mut self, peer_id: &#peer_id) { #(#inject_dial_failure_stmts);* } fn inject_new_listen_addr(&mut self, addr: &#multiaddr) { #(#inject_new_listen_addr_stmts);* } fn inject_expired_listen_addr(&mut self, addr: &#multiaddr) { #(#inject_expired_listen_addr_stmts);* } fn inject_new_external_addr(&mut self, addr: &#multiaddr) { #(#inject_new_external_addr_stmts);* } fn inject_listener_error(&mut self, id: #listener_id, err: &(dyn std::error::Error + 'static)) { #(#inject_listener_error_stmts);* } fn inject_listener_closed(&mut self, id: #listener_id, reason: std::result::Result<(), &std::io::Error>) { #(#inject_listener_closed_stmts);* } fn inject_event( &mut self, peer_id: #peer_id, connection_id: #connection_id, event: <<Self::ProtocolsHandler as #into_protocols_handler>::Handler as #protocols_handler>::OutEvent ) { match event { #(#inject_node_event_stmts),* } } fn poll(&mut self, cx: &mut std::task::Context, poll_params: &mut impl #poll_parameters) -> std::task::Poll<#network_behaviour_action<<<Self::ProtocolsHandler as #into_protocols_handler>::Handler as #protocols_handler>::InEvent, Self::OutEvent>> { use libp2p::futures::prelude::*; #(#poll_stmts)* let f: std::task::Poll<#network_behaviour_action<<<Self::ProtocolsHandler as #into_protocols_handler>::Handler as #protocols_handler>::InEvent, Self::OutEvent>> = #poll_method; f } } }; final_quote.into() } fn get_meta_items(attr: &syn::Attribute) -> Option<Vec<syn::NestedMeta>> { if attr.path.segments.len() == 1 && attr.path.segments[0].ident == "behaviour" { match attr.parse_meta() { Ok(syn::Meta::List(ref meta)) => Some(meta.nested.iter().cloned().collect()), Ok(_) => None, Err(e) => { eprintln!("error parsing attribute metadata: {}", e); None } } } else { None } } /// Returns true if a field is marked as ignored by the user. fn is_ignored(field: &syn::Field) -> bool { for meta_items in field.attrs.iter().filter_map(get_meta_items) { for meta_item in meta_items { match meta_item { syn::NestedMeta::Meta(syn::Meta::Path(ref m)) if m.is_ident("ignore") => { return true; } _ => () } } } false }
40.410653
259
0.555976
62b7e5f8781ef072eef3573986a31d1fe5be289e
12,713
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::json_log::JsonLogEntry; use anyhow::{bail, ensure, Result}; use std::time::Instant; pub const TRACE_EVENT: &str = "trace_event"; pub const TRACE_EDGE: &str = "trace_edge"; pub const LIBRA_TRACE: &str = "libra_trace"; use std::{ collections::HashMap, sync::atomic::{AtomicUsize, Ordering}, }; // This is poor's man AtomicReference from crossbeam // It have few unsafe lines, but does not require extra dependency // Sampling rate is the form of (nominator, denominator) static mut SAMPLING_CONFIG: Option<Sampling> = None; static LIBRA_TRACE_STATE: AtomicUsize = AtomicUsize::new(UNINITIALIZED); const UNINITIALIZED: usize = 0; const INITIALIZING: usize = 1; const INITIALIZED: usize = 2; struct Sampling(HashMap<&'static str, CategorySampling>); struct CategorySampling { denominator: u64, nominator: u64, } #[macro_export] macro_rules! trace_event { ($stage:expr, $node:tt) => { if $crate::is_selected($crate::node_sampling_data!($node)) { trace_event!($stage; {$crate::format_node!($node), module_path!(), Option::<u64>::None}); } }; ($stage:expr; {$node:expr, $path:expr, $duration:expr}) => { let json = serde_json::json!({ "path": $path, "node": $node, "stage": $stage, "duration": $duration, }); $crate::send_logs!($crate::libra_trace::TRACE_EVENT, json); } } #[macro_export] macro_rules! node_sampling_data { ({$type:expr, $($rest:tt)*}) => {{ use std::hash::Hasher; let mut hasher = std::collections::hash_map::DefaultHasher::new(); node_sampling_data!(hasher; $($rest)*); ($type, hasher.finish())} }; ($hasher:expr; $p:expr, $($rest:tt)*) => { std::hash::Hash::hash(&$p, &mut $hasher); node_sampling_data!($hasher; $($rest)*); }; ($hasher:expr; $p:expr) => { std::hash::Hash::hash(&$p, &mut $hasher); } } #[macro_export] macro_rules! send_logs { ($name:expr, $json:expr) => { let log_entry = $crate::json_log::JsonLogEntry::new($name, $json); $crate::json_log::send_json_log(log_entry.clone()); libra_logger::send_struct_log!(libra_logger::StructuredLogEntry::new_named( $crate::libra_trace::LIBRA_TRACE ) .data($name, log_entry)); }; } #[macro_export] macro_rules! trace_code_block { ($stage:expr, $node:tt) => { let trace_guard = $crate::libra_trace::TraceBlockGuard::new_entered( concat!($stage, "::done"), $crate::format_node!($node), module_path!(), ); trace_event!($stage, $node); }; ($stage:expr, $node:tt, $guard_vec:tt) => { let trace_guard = $crate::libra_trace::TraceBlockGuard::new_entered( concat!($stage, "::done"), $crate::format_node!($node), module_path!(), ); trace_event!($stage, $node); $guard_vec.push(trace_guard); }; } pub struct TraceBlockGuard { stage: &'static str, node: String, module_path: &'static str, started: Instant, } impl TraceBlockGuard { pub fn new_entered( stage: &'static str, node: String, module_path: &'static str, ) -> TraceBlockGuard { let started = Instant::now(); TraceBlockGuard { stage, node, module_path, started, } } } impl Drop for TraceBlockGuard { fn drop(&mut self) { let duration = format!("{:.0?}", Instant::now().duration_since(self.started)); trace_event!(self.stage; {self.node, self.module_path, duration}); } } #[macro_export] macro_rules! end_trace { ($stage:expr, $node:tt) => { if $crate::is_selected($crate::node_sampling_data!($node)) { let json = serde_json::json!({ "path": module_path!(), "node": $crate::format_node!($node), "stage": $stage, "end": true, }); $crate::send_logs!($crate::libra_trace::TRACE_EVENT, json); } }; } #[macro_export] macro_rules! trace_edge { ($stage:expr, $node_from:tt, $node_to:tt) => { if $crate::is_selected($crate::node_sampling_data!($node_from)) { let json = serde_json::json!({ "path": module_path!(), "node": $crate::format_node!($node_from), "node_to": $crate::format_node!($node_to), "stage": $stage, }); $crate::send_logs!($crate::libra_trace::TRACE_EDGE, json); } }; } #[macro_export] macro_rules! format_node { ({$($node_part:expr),+}) => { format!($crate::__trace_fmt_gen!($($node_part),+), $($node_part),+) } } // Internal helper macro // Transforms (expr, expr, ...) into "{}::{}::..." #[macro_export] macro_rules! __trace_fmt_gen { ($p:expr) => {"{}"}; ($p:expr, $($par:expr),+) => {concat!("{}::", $crate::__trace_fmt_gen!($($par),+))} } pub fn random_node(entries: &[JsonLogEntry], f_stage: &str, prefix: &str) -> Option<String> { for entry in entries { if entry.name != TRACE_EVENT { continue; } let node = entry .json .get("node") .expect("TRACE_EVENT::node not found") .as_str() .expect("TRACE_EVENT::node is not a string"); let stage = entry .json .get("stage") .expect("TRACE_EVENT::stage not found") .as_str() .expect("TRACE_EVENT::stage is not a string"); if node.starts_with(prefix) && stage == f_stage { return Some(node.to_string()); } } None } pub fn trace_node(entries: &[JsonLogEntry], node_name: &str) { let mut nodes = vec![]; nodes.push(node_name); for entry in entries { if entry.name != TRACE_EDGE { continue; } let node_from = entry .json .get("node") .expect("TRACE_EDGE::node not found") .as_str() .expect("TRACE_EDGE::node is not a string"); if nodes.contains(&node_from) { let node_to = entry .json .get("node_to") .expect("TRACE_EDGE::node_to not found") .as_str() .expect("TRACE_EDGE::node_to is not a string"); nodes.push(node_to); } } let mut start_time = None; for entry in entries .iter() .filter(|entry| entry.name.starts_with("trace_")) { let node = entry .json .get("node") .expect("TRACE_EVENT::node not found") .as_str() .expect("TRACE_EVENT::node is not a string"); if !nodes.contains(&node) { continue; } let ts = entry.timestamp as u64; let peer = entry .json .get("peer") .and_then(|v| v.as_str()) .unwrap_or(""); if start_time.is_none() { start_time = Some(ts); } let trace_time = ts - start_time.unwrap(); let stage = entry .json .get("stage") .expect("::stage not found") .as_str() .expect("::stage is not a string"); let path = entry .json .get("path") .expect("::path not found") .as_str() .expect("::path is not a string"); let duration = entry.json.get("duration").and_then(|v| v.as_str()); let crate_name = crate_name(path); match entry.name.as_ref() { TRACE_EVENT => { let node = entry .json .get("node") .expect("TRACE_EVENT::node not found") .as_str() .expect("TRACE_EVENT::node is not a string"); if nodes.contains(&node) { let end = entry.json.get("end").and_then(|m| m.as_bool()); let end_str = end.map_or("", |f| if f { " *end" } else { "" }); let duration_str = duration.map_or("".to_string(), |d| format!(" [{}]", d)); println!( "{}[{:^11}] +{:05} {:*^10} {} {}{}{}{}", crate_color(crate_name), crate_name, trace_time, peer, node, stage, duration_str, end_str, reset_color() ); if end == Some(true) { return; } } } TRACE_EDGE => { let node_to = entry .json .get("node_to") .expect("TRACE_EDGE::node_to not found") .as_str() .expect("TRACE_EDGE::node_to is not a string"); println!( "{}[{:^11}] +{:05} {:*^10} {}->{} {}{}", crate_color(crate_name), crate_name, trace_time, peer, node, node_to, stage, reset_color() ); } _ => {} } } } fn reset_color() -> &'static str { "\x1B[K\x1B[49m" } fn crate_color(path: &str) -> &'static str { match path { "consensus" => "\x1B[43m", "mempool" => "\x1B[46m", "executor" => "\x1B[104m", "ac" => "\x1B[103m", "json_rpc" => "\x1B[103m", "vm" => "\x1B[45m", _ => "\x1B[49m", } } fn crate_name(path: &str) -> &str { let name = match path.find("::") { Some(pos) => &path[0..pos], None => path, }; let name = if name.starts_with("libra_") { &name["libra_".len()..] } else { name }; abbreviate_crate(name) } fn abbreviate_crate(name: &str) -> &str { match name { "admission_control_service" => "ac", _ => name, } } // This is exact copy of similar function in log crate /// Sets libra trace config pub fn set_libra_trace(config: &HashMap<String, String>) -> Result<()> { match parse_sampling_config(config) { Ok(sampling) => unsafe { match LIBRA_TRACE_STATE.compare_and_swap(UNINITIALIZED, INITIALIZING, Ordering::SeqCst) { UNINITIALIZED => { SAMPLING_CONFIG = Some(sampling); LIBRA_TRACE_STATE.store(INITIALIZED, Ordering::SeqCst); Ok(()) } INITIALIZING => { while LIBRA_TRACE_STATE.load(Ordering::SeqCst) == INITIALIZING {} bail!("Failed to initialize LIBRA_TRACE_STATE"); } _ => bail!("Failed to initialize LIBRA_TRACE_STATE"), } }, Err(s) => bail!("Failed to parse sampling config: {}", s), } } fn parse_sampling_config(config: &HashMap<String, String>) -> Result<Sampling> { let mut map = HashMap::new(); for (category, rate) in config { let k: &'static str = Box::leak(category.clone().into_boxed_str()); let v = rate.split('/').collect::<Vec<&str>>(); ensure!( v.len() == 2, "Failed to parse {:?} in nominator/denominator format", rate ); let v = CategorySampling { nominator: v[0].parse::<u64>()?, denominator: v[1].parse::<u64>()?, }; map.insert(k, v); } Ok(Sampling(map)) } /// Checks if libra trace is enabled pub fn libra_trace_set() -> bool { LIBRA_TRACE_STATE.load(Ordering::SeqCst) == INITIALIZED } pub fn is_selected(node: (&'static str, u64)) -> bool { if !libra_trace_set() { return false; } unsafe { match &SAMPLING_CONFIG { Some(Sampling(sampling)) => { if let Some(sampling_rate) = sampling.get(node.0) { node.1 % sampling_rate.denominator < sampling_rate.nominator } else { // assume no sampling if sampling category is not found and return true true } } None => false, } } }
30.486811
101
0.496972
db60616531805c83a5ac1f4174cbb36ae84d2be8
2,837
use element::ElementEntry; use hdk3::prelude::*; /// an example inner value that can be serialized into the contents of Entry::App() #[derive(Deserialize, Serialize, SerializedBytes)] enum ThisWasmEntry { AlwaysValidates, NeverValidates, } impl From<&ThisWasmEntry> for EntryDefId { fn from(entry: &ThisWasmEntry) -> Self { match entry { ThisWasmEntry::AlwaysValidates => "always_validates", ThisWasmEntry::NeverValidates => "never_validates", } .into() } } impl From<&ThisWasmEntry> for CrdtType { fn from(_: &ThisWasmEntry) -> Self { Self } } impl From<&ThisWasmEntry> for RequiredValidations { fn from(_: &ThisWasmEntry) -> Self { 5.into() } } impl From<&ThisWasmEntry> for EntryVisibility { fn from(_: &ThisWasmEntry) -> Self { Self::Public } } impl From<&ThisWasmEntry> for EntryDef { fn from(entry: &ThisWasmEntry) -> Self { Self { id: entry.into(), crdt_type: entry.into(), required_validations: entry.into(), visibility: entry.into(), required_validation_type: Default::default(), } } } impl TryFrom<&Entry> for ThisWasmEntry { type Error = EntryError; fn try_from(entry: &Entry) -> Result<Self, Self::Error> { match entry { Entry::App(eb) => Ok(Self::try_from(SerializedBytes::from(eb.to_owned()))?), _ => Err( SerializedBytesError::FromBytes("failed to deserialize ThisWasmEntry".into()) .into(), ), } } } entry_defs![ (&ThisWasmEntry::AlwaysValidates).into(), (&ThisWasmEntry::NeverValidates).into() ]; #[hdk_extern] fn validate(data: ValidateData) -> ExternResult<ValidateCallbackResult> { let element = data.element; let entry = element.into_inner().1; let entry = match entry { ElementEntry::Present(e) => e, _ => return Ok(ValidateCallbackResult::Valid), }; if let Entry::Agent(_) = entry { return Ok(ValidateCallbackResult::Valid); } Ok(match ThisWasmEntry::try_from(&entry) { Ok(ThisWasmEntry::AlwaysValidates) => ValidateCallbackResult::Valid, Ok(ThisWasmEntry::NeverValidates) => { ValidateCallbackResult::Invalid("NeverValidates never validates".to_string()) } _ => ValidateCallbackResult::Invalid("Not a ThisWasmEntry".to_string()), }) } fn _commit_validate(to_commit: ThisWasmEntry) -> ExternResult<HeaderHash> { Ok(create_entry(&to_commit)?) } #[hdk_extern] fn always_validates(_: ()) -> ExternResult<HeaderHash> { _commit_validate(ThisWasmEntry::AlwaysValidates) } #[hdk_extern] fn never_validates(_: ()) -> ExternResult<HeaderHash> { _commit_validate(ThisWasmEntry::NeverValidates) }
27.813725
93
0.632006
edca288a688aa413390c7e8d9017336cdf37b15f
6,489
use crate::actix::Actor as ActixActor; use crate::actix::ActorContext; use crate::actix::AsyncContext; use crate::actix::Running; use crate::actix::StreamHandler; use crate::actix_web::middleware; use crate::actix_web::web; use crate::actix_web::web::Data as ActixData; use crate::actix_web::web::Payload; use crate::actix_web::App as ActixApp; use crate::actix_web::Error as HttpError; use crate::actix_web::HttpRequest; use crate::actix_web::HttpResponse; use crate::actix_web::HttpServer as ActixHttpServer; use crate::actix_web_actors::ws::start as ws_start; use crate::actix_web_actors::ws::Message as WsMessage; use crate::actix_web_actors::ws::ProtocolError as WsProtocolError; use crate::actix_web_actors::ws::WebsocketContext; use crate::auth::AuthMode; use crate::common_types::CommonResponse; use crate::debug; use crate::futures::future::ok; use crate::futures::prelude::*; use crate::info; use crate::ACTOR_MAILBOX_CAPACITY; use crate::NOTFOUND_MESSAGE; use std::collections::HashMap; use std::io::Result as IOResult; use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; use std::sync::Arc; use std::time::Duration; use std::time::Instant; pub struct PeriodicWebsocketConfig { pub binding_url: String, pub binding_path: String, pub max_clients: usize, pub periodic_interval: Duration, pub rapid_request_limit: Duration, pub periodic_message_getter: Arc<&'static (dyn Fn() -> String + Sync + Send)>, pub auth: AuthMode, } pub struct PeriodicWebsocketState { pub active_clients: AtomicUsize, pub rejection_counter: AtomicUsize, pub config: PeriodicWebsocketConfig, } pub(crate) struct PeriodicBroadcastActor { last_request_stopwatch: Instant, rapid_request_limit: Duration, periodic_interval: Duration, client_closed_callback: Box<dyn Fn()>, periodic_message_getter: Arc<&'static (dyn Fn() -> String + Sync + Send)>, } impl PeriodicWebsocketState { pub fn new(config: PeriodicWebsocketConfig) -> Self { Self { active_clients: AtomicUsize::new(0), rejection_counter: AtomicUsize::new(0), config, } } } impl PeriodicBroadcastActor { fn new(config: &'static PeriodicWebsocketConfig, client_closed_callback: Box<dyn Fn()>) -> Self { Self { last_request_stopwatch: Instant::now(), rapid_request_limit: config.rapid_request_limit, periodic_interval: config.periodic_interval, client_closed_callback, periodic_message_getter: config.periodic_message_getter.clone(), } } } impl ActixActor for PeriodicBroadcastActor { type Context = WebsocketContext<Self>; fn started(&mut self, context: &mut Self::Context) { context.set_mailbox_capacity(ACTOR_MAILBOX_CAPACITY); self.start_periodic_broadcast(context); } fn stopping(&mut self, _: &mut Self::Context) -> Running { (*self.client_closed_callback)(); Running::Stop } } impl StreamHandler<WsMessage, WsProtocolError> for PeriodicBroadcastActor { fn handle(&mut self, payload: WsMessage, context: &mut Self::Context) { if self.last_request_stopwatch.elapsed() < self.rapid_request_limit { context.stop(); return; } self.last_request_stopwatch = Instant::now(); match payload { WsMessage::Close(_) => context.stop(), WsMessage::Ping(ping_payload) => context.pong(&ping_payload), WsMessage::Text(text) => { if text.len() < 4 { return; } if let "ping" = &text.to_lowercase()[0..4] { context.text("pong") } } _ => (), } } } impl PeriodicBroadcastActor { fn start_periodic_broadcast(&self, context: &mut <Self as ActixActor>::Context) { let tick_handler = self.periodic_message_getter.clone(); context.run_interval(self.periodic_interval, move |_, ctx| { ctx.text(tick_handler()); }); } } fn reject_unmapped_handler( shared_state: ActixData<Arc<&'static PeriodicWebsocketState>>, ) -> Box<dyn Future<Item = HttpResponse, Error = HttpError>> { shared_state.rejection_counter.fetch_add(1, Ordering::Relaxed); debug!( "Rejected counter increased to {}", shared_state.rejection_counter.load(Ordering::Relaxed) ); let mut error = Vec::default(); error.push(NOTFOUND_MESSAGE.to_owned()); let response_data = CommonResponse { error, result: HashMap::new(), }; Box::new(ok::<_, HttpError>( HttpResponse::NotFound().body(serde_json::to_string(&response_data).unwrap()), )) } fn ws_upgrader( shared_state: ActixData<Arc<&'static PeriodicWebsocketState>>, request: HttpRequest, stream: Payload, ) -> Result<HttpResponse, HttpError> { let PeriodicWebsocketState { active_clients, config, .. } = shared_state.get_ref().as_ref(); config.auth.validate(&request)?; let upgrade_result = ws_start( PeriodicBroadcastActor::new( &config, Box::new(move || { let active_clients = active_clients.fetch_sub(1, Ordering::Relaxed); info!( "Client connection closed, current active client is {}", active_clients - 1 ); }), ), &request, stream, ); if upgrade_result.is_ok() { let active_clients = shared_state.active_clients.fetch_add(1, Ordering::Relaxed); info!( "Client connection successful, current active client is {}", active_clients + 1 ); } upgrade_result } pub fn run_periodic_websocket_service(state: Arc<&'static PeriodicWebsocketState>) -> IOResult<()> { let PeriodicWebsocketConfig { binding_url, binding_path, max_clients, .. } = &state.config; let shared_data = ActixData::new(state); ActixHttpServer::new(move || { ActixApp::new() .register_data(shared_data.clone()) .wrap(middleware::Logger::default()) .service(web::resource(&binding_path).route(web::get().to(ws_upgrader))) .default_service(web::route().to_async(reject_unmapped_handler)) }) .maxconn(*max_clients) .shutdown_timeout(1) .bind(binding_url) .unwrap() .run() }
32.445
101
0.647557
bb415e1ae3d5bf3115ecbcd5aa83be9453d8fb18
5,360
use crate::{Message, MessageSegment, Result, User}; use chrono::{DateTime, Utc}; use serde::Serialize; use std::{collections::HashMap, fmt::Display}; #[derive(Debug, Clone)] pub struct Event { pub id: String, pub platform: String, pub time: DateTime<Utc>, pub content: EventContent, pub bot_user: User, } impl Event { pub fn build<S: Display>(id: S) -> EventBuilder { EventBuilder { id: id.to_string(), platform: String::new(), time: Utc::now(), bot_user: User::new("-1"), } } pub fn platform<S: Display>(mut self, platform: S) -> Self { self.platform = platform.to_string(); self } pub fn time(mut self, time: DateTime<Utc>) -> Self { self.time = time; self } pub fn bot_user<S: Display>(mut self, user: User) -> Self { self.bot_user = user; self } pub(crate) fn to_json(&self) -> Result<String> { let ret = serde_json::to_string(&EventJson::from(self.clone()))?; Ok(ret) } } pub struct EventBuilder { id: String, platform: String, time: DateTime<Utc>, bot_user: User, } impl EventBuilder { pub fn platform<S: Display>(mut self, platform: S) -> Self { self.platform = platform.to_string(); self } pub fn time(mut self, time: DateTime<Utc>) -> Self { self.time = time; self } pub fn bot_user<S: Display>(mut self, user: User) -> Self { self.bot_user = user; self } pub fn message(self, message: Message) -> Event { Event { id: self.id, platform: self.platform, time: self.time, content: EventContent::Message(message), bot_user: self.bot_user, } } pub fn notice(self, notice: Notice) -> Event { Event { id: self.id, platform: self.platform, time: self.time, content: EventContent::Notice(notice), bot_user: self.bot_user, } } pub fn request(self, request: Request) -> Event { Event { id: self.id, platform: self.platform, time: self.time, content: EventContent::Request(request), bot_user: self.bot_user, } } pub fn meta(self, meta: Meta) -> Event { Event { id: self.id, platform: self.platform, time: self.time, content: EventContent::Meta(meta), bot_user: self.bot_user, } } } #[derive(Serialize)] struct EventJson { id: String, platform: String, time: i64, self_id: String, r#type: String, detail_type: Option<String>, sub_type: String, message: Option<Vec<MessageSegment>>, message_id: Option<String>, user_id: Option<String>, alt_message: Option<String>, group_id: Option<String>, flag: Option<String>, } impl From<Event> for EventJson { fn from(event: Event) -> Self { Self { id: event.id, platform: event.platform, time: event.time.timestamp(), self_id: event.bot_user.id.clone(), r#type: event.content.r#type(), detail_type: if let EventContent::Message(message) = &event.content { use crate::message::MessageSource; Some( match message.source { MessageSource::Private(_) => "private", MessageSource::Group(_) => "group", } .to_string(), ) } else { None }, sub_type: String::new(), message: if let EventContent::Message(message) = &event.content { Some(message.content.clone()) } else { None }, message_id: if let EventContent::Message(message) = &event.content { Some(message.id.clone()) } else { None }, user_id: if let EventContent::Message(message) = &event.content { Some(message.sender.id.clone()) } else { None }, alt_message: None, group_id: if let EventContent::Message(message) = &event.content { use crate::message::MessageSource; if let MessageSource::Group(group) = &message.source { Some(group.id.clone()) } else { None } } else { None }, flag: None, } } } #[derive(Debug, Clone)] pub enum EventContent { Message(Message), Notice(Notice), Request(Request), Meta(Meta), } impl EventContent { fn r#type(&self) -> String { match self { Self::Message(_) => "message", Self::Notice(_) => "notice", Self::Request(_) => "request", Self::Meta(_) => "meta", } .to_string() } } #[derive(Debug, Clone)] pub struct Notice {} #[derive(Debug, Clone)] pub struct Request { flag: String, } #[derive(Debug, Clone)] pub struct Meta { extended: HashMap<String, String>, }
24.474886
81
0.508209
0932a498da48e808c7841260d6571e0f49e43400
2,735
use std::fmt; use geo::Coordinate; use rust_editor::{ actions::{Action, Redo, Undo}, gizmo::{GetPosition, Id}, }; use uuid::Uuid; use crate::map::{intersection::Intersection, map::Map}; pub(crate) struct DeleteIntersection { id: Uuid, position: Coordinate<f64>, } impl DeleteIntersection { pub fn new(intersection: &Intersection) -> Self { DeleteIntersection { id: intersection.id(), position: intersection.position(), } } } impl Undo<Map> for DeleteIntersection { fn undo(&mut self, map: &mut Map) { map.add_intersection(Intersection::new_with_id(self.position, self.id)); } } impl Redo<Map> for DeleteIntersection { fn redo(&mut self, map: &mut Map) { map.intersections_mut().remove(&self.id); /* if let Some(removed) = { self.update_bounding_box(); return Some(removed); } None */ } } impl Action<Map> for DeleteIntersection {} impl fmt::Display for DeleteIntersection { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[delete_intersection] id={}, position={:#?}", self.id, self.position) } } #[cfg(test)] mod tests { use geo::Coordinate; use rust_editor::{ actions::{Redo, Undo}, gizmo::Id, }; use uuid::Uuid; use crate::map::{ actions::intersection::delete::DeleteIntersection, intersection::Intersection, map::Map, }; fn create_map() -> Map { Map::new(100, 100) } fn add_intersection(position: Coordinate<f64>, map: &mut Map) -> Uuid { let id = Uuid::new_v4(); let intersection = Intersection::new_with_id(position, id); map.intersections.insert(intersection.id(), intersection); id } #[test] fn intersection_delete_redo_works() { let mut map = create_map(); let position = Coordinate { x: 100., y: 100. }; add_intersection(position, &mut map); assert_eq!(map.intersections.len(), 1); let mut action = DeleteIntersection::new(map.intersections.values().next().unwrap()); action.redo(&mut map); assert_eq!(map.intersections.len(), 0); } #[test] fn intersection_delete_undo_works() { let mut map = create_map(); let position = Coordinate { x: 100., y: 100. }; add_intersection(position, &mut map); assert_eq!(map.intersections.len(), 1); let mut action = DeleteIntersection::new(map.intersections.values().next().unwrap()); action.redo(&mut map); assert_eq!(map.intersections.len(), 0); action.undo(&mut map); assert_eq!(map.intersections.len(), 1); } }
24.20354
96
0.595978
898500e8f11f93da7b0ac2cbc5dfc8f556941dda
10,704
extern crate clap; //use clap::{Arg, App, SubCommand}; use tree_sitter::*; //use std::{fs::File, fs::OpenOptions}; /* use std::io::{ prelude::*, BufReader, BufWriter, SeekFrom::Start }; use std::convert::TryInto; */ use std::io::{Error, ErrorKind}; use serde::Deserialize; #[derive(Debug, Deserialize)] struct SymbolMapping { #[serde(rename = "Name")] name : String, #[serde(rename = "ASCII")] ascii : String, #[serde(rename = "Unicode")] unicode : String } impl SymbolMapping { fn canonical_ascii(&self) -> &str { return self.ascii.split(";").next().unwrap(); } fn ascii_query(&self) -> Query { let query = self.ascii .split(";") .map(|a| a.replace("\\", "\\\\")) .map(|a| format!("\"{}\"", a)) .reduce(|a, b| a + " " + &b) .unwrap(); let query = format!("({} [{}] @match)", self.name, query); println!("{}", query); return Query::new(tree_sitter_tlaplus::language(), &query).unwrap(); } fn to_ascii(&self, text : &mut String, node : &Node) -> InputEdit { *text = text[..node.start_byte()].to_string() + self.canonical_ascii() + &text[node.end_byte()..]; return InputEdit { start_byte: node.start_byte(), old_end_byte: node.end_byte(), new_end_byte: node.start_byte() + self.canonical_ascii().len(), start_position: node.start_position(), old_end_position: node.end_position(), new_end_position: Point::new(node.start_position().row, node.start_position().column + self.canonical_ascii().len()) }; } fn unicode_query(&self) -> Query { let query = format!("({} \"{}\" @match)", self.name, self.unicode); return Query::new(tree_sitter_tlaplus::language(), &query).unwrap(); } fn to_unicode(&self, text : &mut String, node : &Node) -> InputEdit { *text = text[..node.start_byte()].to_string() + self.unicode.as_str() + &text[node.end_byte()..]; return InputEdit { start_byte: node.start_byte(), old_end_byte: node.end_byte(), new_end_byte: node.start_byte() + self.unicode.len(), start_position: node.start_position(), old_end_position: node.end_position(), new_end_position: Point::new(node.start_position().row, node.start_position().column + self.unicode.len()) }; } } fn get_unicode_mappings() -> Result<Vec<SymbolMapping>, Error> { let exe_path = std::env::current_exe()?; let exe_dir = exe_path.as_path().parent().ok_or( Error::new(ErrorKind::Other, "Exe does not have parent") )?; let csv_path = exe_dir.join("tla-unicode.csv"); let mut reader = csv::Reader::from_path(csv_path)?; let mut records = Vec::new(); for result in reader.deserialize() { let record : SymbolMapping = result?; records.push(record); } return Ok(records); } fn rewrite_next_to_unicode( mappings : &Vec<SymbolMapping>, text : &mut String, tree : &Tree, cursor : &mut QueryCursor ) -> Option<InputEdit> { for mapping in mappings { //println!("Mapping [{}] -> [{}]", mapping.ascii[0], mapping.unicode); let query = mapping.ascii_query(); for m in cursor.matches(&query, tree.root_node(), |_| "") { for c in m.captures { //println!("{:?}", c); return Some(mapping.to_unicode(text, &c.node)); } } } return None; } fn rewrite_next_to_ascii( mappings : &Vec<SymbolMapping>, text : &mut String, tree : &Tree, cursor : &mut QueryCursor ) -> Option<InputEdit> { for mapping in mappings { //println!("Mapping [{}] -> [{}]", mapping.ascii[0], mapping.unicode); let query = mapping.unicode_query(); for m in cursor.matches(&query, tree.root_node(), |_| "") { for c in m.captures { //println!("{:?}", c); return Some(mapping.to_ascii(text, &c.node)); } } } return None; } fn rewrite() { let mappings = get_unicode_mappings().expect("BAD"); let mut input = r#"---- MODULE Test ---- op == \A n \in Nat : TRUE op2 == ∀ n \in Nat : TRUE op3 == \forall n \in Nat : TRUE ===="#.to_string(); println!("{}", input); let mut parser = Parser::new(); parser.set_language(tree_sitter_tlaplus::language()).expect("Error loading TLA+ grammar"); let mut tree = parser.parse(&input, None).unwrap(); let mut cursor = QueryCursor::new(); while let Some(edit) = rewrite_next_to_unicode(&mappings, &mut input, &tree, &mut cursor) { tree.edit(&edit); tree = parser.parse(&input, Some(&tree)).unwrap(); } println!("{}", input); while let Some(edit) = rewrite_next_to_ascii(&mappings, &mut input, &tree, &mut cursor) { tree.edit(&edit); tree = parser.parse(&input, Some(&tree)).unwrap(); } println!("{}", input); } /* fn symbol_to_unicode(node_name : &str) -> Option<&str> { match node_name { "\\A" => Some("∀"), "\\in" => Some("∈"), "==" => Some("≜"), _ => None } } fn walk_tree(mut cursor : TreeCursor) -> Option<(Node, &str, &str)> { loop { if let Some(uc) = symbol_to_unicode(cursor.node().kind()) { return Some((cursor.node(), cursor.node().kind(), uc)); } // Try to go to first child. // If no such child exists, try to go to next sibling. // If no such sibling exists, try to go to next sibling of parent. // If no such node exists, walk up tree until finding a sibling of a parent. // If no parent exists, we have walked the entire tree. if !cursor.goto_first_child() { loop { if cursor.goto_next_sibling() { break; } else { if !cursor.goto_parent() { return None; } } } } } } fn to_unicode( spec : &str, ignore_errors : bool ) { let mut parser = Parser::new(); parser.set_language(tree_sitter_tlaplus::language()).expect("Error loading TLA+ grammar"); match OpenOptions::new().read(true).open(spec) { Ok(f) => { let lines : Vec<String> = BufReader::new(f) .lines() .map(|l| l.expect("Could not parse line")) .collect(); let tree = parser.parse_with(&mut |_byte: usize, position: Point| -> &[u8] { let row = position.row as usize; let column = position.column as usize; if row < lines.len() { if column < lines[row].as_bytes().len() { &lines[row].as_bytes()[column..] } else { "\n".as_bytes() } } else { &[] } }, None).unwrap(); if tree.root_node().has_error() && !ignore_errors { println!("Cannot translate file due to parse errors; use --ignore-error flag to force translation."); std::process::exit(-1); } if let Some((n, k, uc)) = walk_tree(tree.walk()) { match OpenOptions::new().write(true).open(spec) { Ok(f) => { let mut out = BufWriter::new(f); out.seek(Start(n.start_byte().try_into().unwrap())); out.write(uc.as_bytes()); } Err(e) => { println!("Error opening file [{}]: [{}]", spec, e); std::process::exit(-1); } } } println!("{}", tree.root_node().has_error()); println!("{}", tree.root_node().to_sexp()); } Err(e) => { println!("Error opening file [{}]: [{}]", spec, e); std::process::exit(-1); } } } fn to_ascii(spec : &mut File, ignore_errors : bool) { } */ fn main() { rewrite(); /* let matches = App::new("TLA+ Unicode Converter") .version("0.1.0") .author("Andrew Helwer <[email protected]>") .about("Converts symbols in TLA+ specs to and from unicode") .arg(Arg::with_name("out_file") .short("o") .long("out") .help("Output file; rewrites input file if not given") .takes_value(true) .required(false) ) .arg(Arg::with_name("ignore_parse_errors") .short("e") .long("ignore-error") .help("Whether to convert file despite parse errors") .required(false) ) .subcommand(SubCommand::with_name("unicode") .about("Converts symbols in TLA+ spec to unicode from ASCII") .arg(Arg::with_name("spec") .help("The TLA+ spec file to convert") .required(true) .index(1) ) ).subcommand(SubCommand::with_name("ascii") .about("Converts symbols in TLA+ spec from unicode to ASCII") .arg(Arg::with_name("spec") .help("The TLA+ spec file to convert") .required(true) .index(1) ) ).get_matches(); let ignore_errors = matches.is_present("ignore_parse_errors"); if let Some(subcommand_matches) = matches.subcommand_matches("unicode") { let mut spec = subcommand_matches.value_of("spec").unwrap(); if let Some(out_file) = matches.value_of("out_file") { if let Err(e) = std::fs::copy(spec, out_file) { println!("Failed to copy [{}] to [{}]: {}", spec, out_file, e); std::process::exit(-1); } spec = out_file; } to_unicode(spec, ignore_errors); } else if let Some(matches) = matches.subcommand_matches("ascii") { let file_path_str = matches.value_of("spec").unwrap(); match OpenOptions::new().read(true).write(true).open(file_path_str) { Ok(mut spec) => to_ascii(&mut spec, ignore_errors), Err(e) => println!("Error opening input file [{}]: [{}]", file_path_str, e) } } else { println!("{}", matches.usage()); } */ }
33.242236
128
0.51392
eb796e238bcd70a3281e311f4f94ccc5169c6bd4
13,525
use clap::{App, Arg}; use futures_lite::{ stream::{self, StreamExt}, AsyncReadExt, AsyncWriteExt, }; use glommio::{ enclose, io::{ BufferedFile, DmaFile, DmaStreamReader, DmaStreamReaderBuilder, DmaStreamWriterBuilder, MergedBufferLimit, ReadAmplificationLimit, StreamReaderBuilder, StreamWriterBuilder, }, LocalExecutorBuilder, Placement, }; use pretty_bytes::converter; use std::{ cell::Cell, fs, path::PathBuf, rc::Rc, time::{Duration, Instant}, }; struct BenchDirectory { path: PathBuf, } impl Drop for BenchDirectory { fn drop(&mut self) { let _ = fs::remove_dir_all(&self.path); } } impl BenchDirectory { fn new(path: PathBuf) -> Self { fs::create_dir_all(&path).unwrap(); BenchDirectory { path } } } async fn stream_write<T: AsyncWriteExt + std::marker::Unpin, S: Into<String>>( mut stream: T, name: S, file_size: u64, ) { let contents = vec![1; 512 << 10]; let start = Instant::now(); for _ in 0..(file_size / (contents.len() as u64)) { stream.write_all(&contents).await.unwrap(); } let name = name.into(); let endw = Instant::now(); let time = start.elapsed(); let bytes = converter::convert(file_size as _); let rate = converter::convert((file_size as f64 / time.as_secs_f64()) as _); println!("{}: Wrote {} in {:#?}, {}/s", &name, bytes, time, rate); stream.close().await.unwrap(); let rate = converter::convert((file_size as f64 / start.elapsed().as_secs_f64()) as _); let time = endw.elapsed(); println!( "{}: Closed in {:#?}, Amortized total {}/s", &name, time, rate ); } async fn stream_scan<T: AsyncReadExt + std::marker::Unpin, S: Into<String>>( mut stream: T, name: S, ) -> T { let mut buf = vec![0; 4 << 10]; let expected = vec![1; 4 << 10]; let mut bytes_read = 0; let mut ops = 0; let start = Instant::now(); loop { let res = stream.read(&mut buf).await.unwrap(); bytes_read += res; ops += 1; if res == 0 { break; } assert_eq!(expected, buf); } let time = start.elapsed(); let name = name.into(); let bytes = converter::convert(bytes_read as _); let rate = converter::convert((bytes_read as f64 / time.as_secs_f64()) as _); println!( "{}: Scanned {} in {:#?}, {}/s, {} IOPS", &name, bytes, time, rate, (ops as f64 / time.as_secs_f64()) as usize ); stream } async fn stream_scan_alt_api<S: Into<String>>( mut stream: DmaStreamReader, name: S, buffer_size: usize, ) { let mut expected = Vec::with_capacity(buffer_size); expected.resize(buffer_size, 1u8); let mut bytes_read = 0; let mut ops = 0; let start = Instant::now(); loop { let buffer = stream.get_buffer_aligned(buffer_size as _).await.unwrap(); bytes_read += buffer.len(); ops += 1; if buffer.len() < buffer_size { break; } assert_eq!(*expected, *buffer); } let time = start.elapsed(); let name = name.into(); let bytes = converter::convert(bytes_read as _); let rate = converter::convert((bytes_read as f64 / time.as_secs_f64()) as _); println!( "{}: Scanned {} in {:#?}, {}/s, {} IOPS", &name, bytes, time, rate, (ops as f64 / time.as_secs_f64()) as usize ); stream.close().await.unwrap(); } enum Reader { Direct(Rc<DmaFile>), Buffered(BufferedFile), } impl Reader { async fn read(&self, pos: u64, io_size: u64, _expected: &[u8]) { match &self { Reader::Direct(file) => { file.read_at_aligned(pos, io_size as _).await.unwrap(); } Reader::Buffered(file) => { file.read_at(pos, io_size as _).await.unwrap(); } } } async fn read_many<S: Iterator<Item = (u64, usize)>>( &self, iovs: S, _expected: &[u8], max_buffer_size: usize, ) { match &self { Reader::Direct(file) => { file.read_many( futures_lite::stream::iter(iovs), MergedBufferLimit::Custom(max_buffer_size), ReadAmplificationLimit::NoAmplification, ) .for_each(|_| {}) .await; } Reader::Buffered(_) => { panic!("bulk io is not available for buffered files") } } } async fn close(self) { match self { Reader::Direct(file) => { file.close_rc().await.unwrap(); } Reader::Buffered(file) => { file.close().await.unwrap(); } } } } async fn random_read<S: Into<String>>( file: Reader, name: S, random: u64, parallelism: usize, io_size: u64, ) { let end = (random / io_size) - 1; let name = name.into(); let mut expected = Vec::with_capacity(io_size as _); expected.resize(io_size as _, 1); let file = Rc::new(file); let iops = Rc::new(Cell::new(0)); let time = Instant::now(); let mut tasks = Vec::new(); for _ in 0..parallelism { tasks.push( glommio::spawn_local(enclose! { (file, iops, expected) async move { while time.elapsed() < Duration::from_secs(20) { let pos = fastrand::u64(0..end); file.read(pos * io_size, io_size as _, &expected).await; iops.set(iops.get() + 1); } }}) .detach(), ); } let finished = stream::iter(tasks).then(|f| f).count().await; match Rc::try_unwrap(file) { Err(_) => unreachable!(), Ok(file) => file.close().await, }; assert_eq!(finished, parallelism as _); let bytes = converter::convert(random as _); let dur = time.elapsed(); println!( "{}: Random Read (uniform) size span of {}, for {:#?}, {} IOPS", &name, bytes, dur, (iops.get() as f64 / dur.as_secs_f64()) as usize ); } async fn random_many_read<S: Into<String>>( file: Reader, name: S, random: u64, parallelism: usize, io_size: u64, max_buffer_size: usize, ) { let end = (random / io_size) - 1; let name = name.into(); let mut expected = Vec::with_capacity(io_size as _); expected.resize(io_size as _, 1); let file = Rc::new(file); let iops = Rc::new(Cell::new(0)); let time = Instant::now(); let mut tasks = Vec::new(); for _ in 0..parallelism { tasks.push( glommio::spawn_local(enclose! { (file, iops, expected) async move { while time.elapsed() < Duration::from_secs(20) { file.read_many((0..parallelism).map(|_| { let pos = fastrand::u64(0..end); ((pos * io_size) as u64, io_size as usize) }), &expected, max_buffer_size).await; iops.set(iops.get() + parallelism); } }}) .detach(), ); } let finished = stream::iter(tasks).then(|f| f).count().await; match Rc::try_unwrap(file) { Err(_) => unreachable!(), Ok(file) => file.close().await, }; assert_eq!(finished, parallelism as _); let bytes = converter::convert(random as _); let max_merged = converter::convert(max_buffer_size as _); let dur = time.elapsed(); println!( "{}: Random Bulk Read (uniform) size span of {}, for {:#?} (max merged size of {}), {} \ IOPS", &name, bytes, dur, max_merged, (iops.get() as f64 / dur.as_secs_f64()) as usize ); } fn main() { let matches = App::new("storage example") .version("0.1.0") .author("Glauber Costa <[email protected]>") .about("demonstrate glommio's storage APIs") .arg( Arg::with_name("storage_dir") .long("dir") .takes_value(true) .required(true) .help("The directory where to write and read file for this test"), ) .arg( Arg::with_name("file_size") .long("size-gb") .takes_value(true) .required(false) .help("size of the file in GB (default: 2 * memory_size)"), ) .get_matches(); let path = matches.value_of("storage_dir").unwrap(); let mut dir = PathBuf::from(path); assert!(dir.exists()); dir.push("benchfiles"); assert!(!dir.exists(), "{:?} already exists", dir); let dir = BenchDirectory::new(dir); let total_memory = sys_info::mem_info().unwrap().total << 10; let file_size = matches .value_of("file_size") .map(|s| s.parse::<u64>().unwrap() << 30) .unwrap_or(total_memory * 2); let random = total_memory / 10; let local_ex = LocalExecutorBuilder::new(Placement::Fixed(0)) .spin_before_park(Duration::from_millis(10)) .spawn(move || async move { let mut dio_filename = dir.path.clone(); dio_filename.push("benchfile-dio-1"); let mut buf_filename = dir.path.clone(); buf_filename.push("benchfile-buf-1"); let file = BufferedFile::create(&buf_filename).await.unwrap(); let stream = StreamWriterBuilder::new(file).build(); stream_write(stream, "Buffered I/O", file_size).await; let file = DmaFile::create(&dio_filename).await.unwrap(); let stream = DmaStreamWriterBuilder::new(file) .with_write_behind(1) .with_buffer_size(512 << 10) .build(); stream_write(stream, "Direct I/O", file_size).await; let file = DmaFile::create(&dio_filename).await.unwrap(); let stream = DmaStreamWriterBuilder::new(file) .with_write_behind(10) .with_buffer_size(512 << 10) .build(); stream_write(stream, "Direct I/O, write-behind", file_size).await; let file = BufferedFile::open(&buf_filename).await.unwrap(); let stream = StreamReaderBuilder::new(file).build(); let stream = stream_scan(stream, "Buffered I/O").await; stream.close().await.unwrap(); let file = DmaFile::open(&dio_filename).await.unwrap(); let stream = DmaStreamReaderBuilder::new(file) .with_read_ahead(1) .with_buffer_size(4 << 10) .build(); let stream = stream_scan(stream, "Direct I/O").await; stream.close().await.unwrap(); let file = DmaFile::open(&dio_filename).await.unwrap(); let stream = DmaStreamReaderBuilder::new(file) .with_read_ahead(50) .with_buffer_size(4 << 10) .build(); let stream = stream_scan(stream, "Direct I/O, read ahead").await; stream.close().await.unwrap(); let file = DmaFile::open(&dio_filename).await.unwrap(); let stream = DmaStreamReaderBuilder::new(file) .with_read_ahead(50) .with_buffer_size(4 << 10) .build(); stream_scan_alt_api(stream, "Direct I/O, glommio API", 4 << 10).await; let file = DmaFile::open(&dio_filename).await.unwrap(); let stream = DmaStreamReaderBuilder::new(file) .with_read_ahead(10) .with_buffer_size(512 << 10) .build(); stream_scan_alt_api(stream, "Direct I/O, glommio API, large buffer", 512 << 10).await; let file = BufferedFile::open(&buf_filename).await.unwrap(); random_read(Reader::Buffered(file), "Buffered I/O", random, 50, 4096).await; let file = Rc::new(DmaFile::open(&dio_filename).await.unwrap()); random_read(Reader::Direct(file), "Direct I/O", random, 50, 4096).await; let file = Rc::new(DmaFile::open(&dio_filename).await.unwrap()); random_many_read(Reader::Direct(file), "Direct I/O", random, 50, 4096, 0).await; let file = Rc::new(DmaFile::open(&dio_filename).await.unwrap()); random_many_read(Reader::Direct(file), "Direct I/O", random, 50, 4096, 131072).await; let file = BufferedFile::open(&buf_filename).await.unwrap(); random_read(Reader::Buffered(file), "Buffered I/O", file_size, 50, 4096).await; let file = Rc::new(DmaFile::open(&dio_filename).await.unwrap()); random_read(Reader::Direct(file), "Direct I/O", file_size, 50, 4096).await; let file = Rc::new(DmaFile::open(&dio_filename).await.unwrap()); random_many_read(Reader::Direct(file), "Direct I/O", file_size, 50, 4096, 0).await; let file = Rc::new(DmaFile::open(&dio_filename).await.unwrap()); random_many_read( Reader::Direct(file), "Direct I/O", file_size, 50, 4096, 131072, ) .await; }) .unwrap(); local_ex.join().unwrap(); }
30.949657
98
0.537671