hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
fc98e7dade2606c8eaef670790b4f1790607cf37
287
extern crate unicode_segmentation; use unicode_segmentation::UnicodeSegmentation; // Found some useful information for this exercise here: // * https://docs.rs/unicode-reverse/1.0.8/unicode_reverse/ pub fn reverse(input: &str) -> String { input.graphemes(true).rev().collect() }
28.7
61
0.749129
e46df554d045bea5652ceee86c6a734de859375e
15,105
#![feature(default_alloc_error_handler)] #![no_main] #![no_std] extern crate alloc; mod native_device; use native_device::*; use color_thrust_interface::device::*; use color_thrust_interface::params_and_regs::*; use linalg::*; use strugl::*; use xw::{marv, stdio, uart}; use alloc::vec::Vec; use core::fmt::Write; #[no_mangle] fn main() -> ! { let mut c = Context::new(NativeDevice::new()); writeln!(stdio::stdout(), "ready for commands").unwrap(); loop { // TODO: Proper command uart::write_u8(0x02); loop { // TODO: This obviously won't work once NativeDevice is a proper singleton, but it's fine for now! let mut device = NativeDevice::new(); // TODO: Proper command match uart::read_u8() { 0x00 => { // Write word let addr = uart::read_u32_le(); let data = uart::read_u32_le(); device.write_reg(addr, data); } 0x01 => { // Read word let addr = uart::read_u32_le(); let data = device.read_reg(addr); uart::write_u32_le(data); } 0x02 => { // Write tile for i in 0..TILE_DIM * TILE_DIM / 4 { let data = uart::read_u128_le(); device.write_color_buffer_word(i, data); } } 0x03 => { // Read tile for i in 0..TILE_DIM * TILE_DIM / 4 { let data = device.read_color_buffer_word(i); uart::write_u128_le(data); } } 0x04 => { // Rasterize let start_cycles = marv::cycles(); device.write_reg(REG_START_ADDR, 1); // TODO: Proper value while device.read_reg(REG_STATUS_ADDR) != 0 { // TODO: Proper value // Do nothing } let end_cycles = marv::cycles(); let elapsed_cycles = end_cycles - start_cycles; uart::write_u64_le(elapsed_cycles); } 0x05 => { // End frame break; } 0x06 => { // Render and transmit entire frame via strugl fn cube(v: &mut Vec<Vertex>) { let red = Iv4::new(1.0, 0.0, 0.0, 1.0); let green = Iv4::new(0.0, 1.0, 0.0, 1.0); let blue = Iv4::new(0.0, 0.0, 1.0, 1.0); let white = Iv4::splat(1.0); // Front face v.push(Vertex { position: Iv4::new(-1.0, -1.0, 1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); v.push(Vertex { position: Iv4::new(1.0, -1.0, 1.0, 1.0), color: green, tex_coord: Iv2::new(1.0, 0.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, 1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, 1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, 1.0, 1.0, 1.0), color: blue, tex_coord: Iv2::new(0.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, -1.0, 1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); // Back face v.push(Vertex { position: Iv4::new(1.0, -1.0, -1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); v.push(Vertex { position: Iv4::new(-1.0, -1.0, -1.0, 1.0), color: green, tex_coord: Iv2::new(1.0, 0.0), }); v.push(Vertex { position: Iv4::new(-1.0, 1.0, -1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, 1.0, -1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, -1.0, 1.0), color: blue, tex_coord: Iv2::new(0.0, 1.0), }); v.push(Vertex { position: Iv4::new(1.0, -1.0, -1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); // Left face v.push(Vertex { position: Iv4::new(-1.0, -1.0, -1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); v.push(Vertex { position: Iv4::new(-1.0, -1.0, 1.0, 1.0), color: green, tex_coord: Iv2::new(1.0, 0.0), }); v.push(Vertex { position: Iv4::new(-1.0, 1.0, 1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, 1.0, 1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, 1.0, -1.0, 1.0), color: blue, tex_coord: Iv2::new(0.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, -1.0, -1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); // Right face v.push(Vertex { position: Iv4::new(1.0, -1.0, 1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); v.push(Vertex { position: Iv4::new(1.0, -1.0, -1.0, 1.0), color: green, tex_coord: Iv2::new(1.0, 0.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, -1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, -1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, 1.0, 1.0), color: blue, tex_coord: Iv2::new(0.0, 1.0), }); v.push(Vertex { position: Iv4::new(1.0, -1.0, 1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); // Top face v.push(Vertex { position: Iv4::new(-1.0, 1.0, 1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, 1.0, 1.0), color: green, tex_coord: Iv2::new(1.0, 0.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, -1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(1.0, 1.0, -1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, 1.0, -1.0, 1.0), color: blue, tex_coord: Iv2::new(0.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, 1.0, 1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); // Bottom face v.push(Vertex { position: Iv4::new(-1.0, -1.0, -1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); v.push(Vertex { position: Iv4::new(1.0, -1.0, -1.0, 1.0), color: green, tex_coord: Iv2::new(1.0, 0.0), }); v.push(Vertex { position: Iv4::new(1.0, -1.0, 1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(1.0, -1.0, 1.0, 1.0), color: white, tex_coord: Iv2::new(1.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, -1.0, 1.0, 1.0), color: blue, tex_coord: Iv2::new(0.0, 1.0), }); v.push(Vertex { position: Iv4::new(-1.0, -1.0, -1.0, 1.0), color: red, tex_coord: Iv2::new(0.0, 0.0), }); } let mut v = Vec::new(); cube(&mut v); let frame_time = 6.0;//start_time.elapsed().as_secs_f64(); c.clear(); let start_cycles = marv::cycles(); c.depth_test_enable = true; c.depth_write_mask_enable = true; //c.texture = Some(texture.clone()); c.projection = Im4::perspective(90.0, WIDTH as f32 / HEIGHT as f32, 1.0, 1000.0); let mut view = Im4::translation(/*-1.0*/0.0, 0.0, -3.0/*-4.0*/); let t = (frame_time * 0.1) as f32; view *= Im4::rotation_x(t * 1.1); view *= Im4::rotation_y(t * 0.47); view *= Im4::rotation_z(t * 0.73); /*let mut v = Vec::new(); let mut model = Im4::identity(); model *= Im4::translation(-0.5, 0.0, 0.0); let t = (frame_time * 0.1) as f32; model *= Im4::rotation_x(t); model *= Im4::rotation_y(t * 0.67); model *= Im4::rotation_z(t * 0.133); c.model_view = view * model; c.texture_filter = TextureFilter::Nearest; cube(&mut v); c.render(&mut v);*/ //let mut rng: Pcg32 = SeedableRng::seed_from_u64(0xfadebabedeadbeef); let mut model = Im4::identity(); //model *= Im4::translation(0.5, 0.0, 0.0); /*let t = (frame_time * 0.2) as f32 + rng.gen::<f32>() * 30.0; model *= Im4::rotation_x(t * 1.1); model *= Im4::rotation_y(t * 0.47); model *= Im4::rotation_z(t * 0.73); model *= Im4::translation(0.0, -0.6 + rng.gen::<f32>() * 1.2, -0.6 + rng.gen::<f32>() * 1.2); model *= Im4::scale(1.0 + rng.gen::<f32>() * 0.5, 0.1 + rng.gen::<f32>() * 0.2, 0.04); model *= Im4::translation(0.5 + rng.gen::<f32>() * 0.5, 0.0, 0.0);*/ c.model_view = view * model; let transparent = false;//rng.gen::<bool>(); if transparent { c.depth_write_mask_enable = false; c.blend_src_factor = BlendSrcFactor::One; c.blend_dst_factor = BlendDstFactor::One; } else { c.depth_write_mask_enable = true; c.blend_src_factor = BlendSrcFactor::One; c.blend_dst_factor = BlendDstFactor::Zero; } c.render(&mut v); let end_cycles = marv::cycles(); let elapsed_cycles = end_cycles - start_cycles; // TODO: Proper command uart::write_u8(0x03); uart::write_u64_le(elapsed_cycles); for y in 0..HEIGHT { for x in 0..WIDTH { uart::write_u32_le(c.back_buffer[y * WIDTH + x]); } } break; } command => { panic!("unrecognized command: 0x{:02x}", command); } } } } }
40.934959
113
0.322807
e2a7d614405478cc51f03fc433b4403ccf178598
12,216
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::block_info::{BlockInfo, Round}; use crate::{ account_address::AccountAddress, transaction::Version, validator_set::ValidatorSet, validator_verifier::{ValidatorVerifier, VerifyError}, }; use failure::prelude::*; use libra_crypto::{ hash::{CryptoHash, CryptoHasher}, HashValue, *, }; use libra_crypto_derive::CryptoHasher; #[cfg(any(test, feature = "fuzzing"))] use proptest_derive::Arbitrary; use serde::{Deserialize, Serialize}; use std::collections::BTreeMap; use std::{ convert::{TryFrom, TryInto}, fmt::{Display, Formatter}, }; /// This structure serves a dual purpose. /// /// First, if this structure is signed by 2f+1 validators it signifies the state of the ledger at /// version `version` -- it contains the transaction accumulator at that version which commits to /// all historical transactions. This structure may be expanded to include other information that /// is derived from that accumulator (e.g. the current time according to the time contract) to /// reduce the number of proofs a client must get. /// /// Second, the structure contains a `consensus_data_hash` value. This is the hash of an internal /// data structure that represents a block that is voted on in HotStuff. If 2f+1 signatures are /// gathered on the same ledger info that represents a Quorum Certificate (QC) on the consensus /// data. /// /// Combining these two concepts, when a validator votes on a block, B it votes for a /// LedgerInfo with the `version` being the latest version that will be committed if B gets 2f+1 /// votes. It sets `consensus_data_hash` to represent B so that if those 2f+1 votes are gathered a /// QC is formed on B. #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, CryptoHasher)] #[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))] pub struct LedgerInfo { commit_info: BlockInfo, /// Hash of consensus specific data that is opaque to all parts of the system other than /// consensus. consensus_data_hash: HashValue, } impl Display for LedgerInfo { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!(f, "LedgerInfo: [commit_info: {}]", self.commit_info()) } } impl LedgerInfo { /// Constructs a `LedgerInfo` object based on the given commit info and vote data hash. pub fn new(commit_info: BlockInfo, consensus_data_hash: HashValue) -> Self { Self { commit_info, consensus_data_hash, } } /// The `BlockInfo` of a committed block. pub fn commit_info(&self) -> &BlockInfo { &self.commit_info } /// A series of wrapper functions for the data stored in the commit info. For the detailed /// information, please refer to `BlockInfo` pub fn epoch(&self) -> u64 { self.commit_info.epoch() } pub fn round(&self) -> Round { self.commit_info.round() } pub fn consensus_block_id(&self) -> HashValue { self.commit_info.id() } pub fn transaction_accumulator_hash(&self) -> HashValue { self.commit_info.executed_state_id() } pub fn version(&self) -> Version { self.commit_info.version() } /// A ledger info is nominal if it's not certifying any real version. pub fn is_zero(&self) -> bool { self.version() == 0 } pub fn timestamp_usecs(&self) -> u64 { self.commit_info.timestamp_usecs() } pub fn next_validator_set(&self) -> Option<&ValidatorSet> { self.commit_info.next_validator_set() } /// Returns hash of consensus voting data in this `LedgerInfo`. pub fn consensus_data_hash(&self) -> HashValue { self.consensus_data_hash } pub fn set_consensus_data_hash(&mut self, consensus_data_hash: HashValue) { self.consensus_data_hash = consensus_data_hash; } /// To bootstrap the system until we execute and commit the genesis txn before start. #[cfg(any(test, feature = "fuzzing"))] pub fn genesis() -> Self { Self::new(BlockInfo::genesis(), HashValue::zero()) } } impl TryFrom<crate::proto::types::LedgerInfo> for LedgerInfo { type Error = Error; fn try_from(proto: crate::proto::types::LedgerInfo) -> Result<Self> { let version = proto.version; let transaction_accumulator_hash = HashValue::from_slice(&proto.transaction_accumulator_hash)?; let consensus_data_hash = HashValue::from_slice(&proto.consensus_data_hash)?; let consensus_block_id = HashValue::from_slice(&proto.consensus_block_id)?; let epoch = proto.epoch; let round = proto.round; let timestamp_usecs = proto.timestamp_usecs; let next_validator_set = if let Some(validator_set_proto) = proto.next_validator_set { Some(ValidatorSet::try_from(validator_set_proto)?) } else { None }; Ok(LedgerInfo::new( BlockInfo::new( epoch, round, consensus_block_id, transaction_accumulator_hash, version, timestamp_usecs, next_validator_set, ), consensus_data_hash, )) } } impl From<LedgerInfo> for crate::proto::types::LedgerInfo { fn from(ledger_info: LedgerInfo) -> Self { Self { version: ledger_info.version(), transaction_accumulator_hash: ledger_info.transaction_accumulator_hash().to_vec(), consensus_data_hash: ledger_info.consensus_data_hash().to_vec(), consensus_block_id: ledger_info.consensus_block_id().to_vec(), epoch: ledger_info.epoch(), round: ledger_info.round(), timestamp_usecs: ledger_info.timestamp_usecs(), next_validator_set: ledger_info.next_validator_set().cloned().map(Into::into), } } } impl CryptoHash for LedgerInfo { type Hasher = LedgerInfoHasher; fn hash(&self) -> HashValue { let mut state = Self::Hasher::default(); state.write(&lcs::to_bytes(self).expect("Serialization should work.")); state.finish() } } /// The validator node returns this structure which includes signatures /// from validators that confirm the state. The client needs to only pass back /// the LedgerInfo element since the validator node doesn't need to know the signatures /// again when the client performs a query, those are only there for the client /// to be able to verify the state #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] pub struct LedgerInfoWithSignatures<Sig> { ledger_info: LedgerInfo, /// The validator is identified by its account address: in order to verify a signature /// one needs to retrieve the public key of the validator for the given epoch. signatures: BTreeMap<AccountAddress, Sig>, } impl<Sig> Display for LedgerInfoWithSignatures<Sig> { fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { write!(f, "{}", self.ledger_info) } } impl<Sig: Signature> LedgerInfoWithSignatures<Sig> { pub fn new(ledger_info: LedgerInfo, signatures: BTreeMap<AccountAddress, Sig>) -> Self { LedgerInfoWithSignatures { ledger_info, signatures, } } pub fn ledger_info(&self) -> &LedgerInfo { &self.ledger_info } pub fn add_signature(&mut self, validator: AccountAddress, signature: Sig) { self.signatures.entry(validator).or_insert(signature); } pub fn remove_signature(&mut self, validator: AccountAddress) { self.signatures.remove(&validator); } pub fn signatures(&self) -> &BTreeMap<AccountAddress, Sig> { &self.signatures } pub fn verify( &self, validator: &ValidatorVerifier<Sig::VerifyingKeyMaterial>, ) -> ::std::result::Result<(), VerifyError> { if self.ledger_info.is_zero() { // We're not trying to verify nominal ledger info that does not carry any information. return Ok(()); } let ledger_hash = self.ledger_info().hash(); validator.batch_verify_aggregated_signature(ledger_hash, self.signatures()) } } impl<Sig: Signature> TryFrom<crate::proto::types::LedgerInfoWithSignatures> for LedgerInfoWithSignatures<Sig> { type Error = Error; fn try_from(proto: crate::proto::types::LedgerInfoWithSignatures) -> Result<Self> { let ledger_info = proto .ledger_info .ok_or_else(|| format_err!("Missing ledger_info"))? .try_into()?; let signatures_proto = proto.signatures; let num_signatures = signatures_proto.len(); let signatures = signatures_proto .into_iter() .map(|proto| { let validator_id = AccountAddress::try_from(proto.validator_id)?; let signature_bytes: &[u8] = proto.signature.as_ref(); let signature = Sig::try_from(signature_bytes)?; Ok((validator_id, signature)) }) .collect::<Result<BTreeMap<_, _>>>()?; ensure!( signatures.len() == num_signatures, "Signatures should be from different validators." ); Ok(LedgerInfoWithSignatures { ledger_info, signatures, }) } } impl<Sig: Signature> From<LedgerInfoWithSignatures<Sig>> for crate::proto::types::LedgerInfoWithSignatures { fn from(ledger_info_with_sigs: LedgerInfoWithSignatures<Sig>) -> Self { let ledger_info = Some(ledger_info_with_sigs.ledger_info.into()); let signatures = ledger_info_with_sigs .signatures .into_iter() .map( |(validator_id, signature)| crate::proto::types::ValidatorSignature { validator_id: validator_id.to_vec(), signature: signature.to_bytes().to_vec(), }, ) .collect(); Self { signatures, ledger_info, } } } #[cfg(test)] mod tests { use crate::block_info::BlockInfo; use crate::ledger_info::{LedgerInfo, LedgerInfoWithSignatures}; use crate::validator_signer::ValidatorSigner; use libra_crypto::{ed25519::*, HashValue}; use std::collections::BTreeMap; #[test] fn test_signatures_hash() { let ledger_info = LedgerInfo::new(BlockInfo::empty(), HashValue::zero()); let random_hash = HashValue::random(); const NUM_SIGNERS: u8 = 7; // Generate NUM_SIGNERS random signers. let validator_signers: Vec<ValidatorSigner<Ed25519PrivateKey>> = (0..NUM_SIGNERS) .map(|i| ValidatorSigner::random([i; 32])) .collect(); let mut author_to_signature_map = BTreeMap::new(); for validator in validator_signers.iter() { author_to_signature_map.insert( validator.author(), validator.sign_message(random_hash).unwrap(), ); } let ledger_info_with_signatures = LedgerInfoWithSignatures::new(ledger_info.clone(), author_to_signature_map); // Add the signatures in reverse order and ensure the serialization matches let mut author_to_signature_map = BTreeMap::new(); for validator in validator_signers.iter().rev() { author_to_signature_map.insert( validator.author(), validator.sign_message(random_hash).unwrap(), ); } let ledger_info_with_signatures_reversed = LedgerInfoWithSignatures::new(ledger_info.clone(), author_to_signature_map); let ledger_info_with_signatures_bytes = lcs::to_bytes(&ledger_info_with_signatures).expect("block serialization failed"); let ledger_info_with_signatures_reversed_bytes = lcs::to_bytes(&ledger_info_with_signatures_reversed) .expect("block serialization failed"); assert_eq!( ledger_info_with_signatures_bytes, ledger_info_with_signatures_reversed_bytes ); } }
35.103448
98
0.646038
76dd234ecf5fd08ec8cca153911514ae9380e0bc
984
use wasm_bindgen::prelude::*; use crate::hash_primitive::HashPrimitive; static MAX_DEFAULT: usize = 1000000; #[wasm_bindgen] pub struct Pbkdf2Parameters { pub primitive: HashPrimitive, pub iterations: usize, } #[wasm_bindgen] pub fn primitive_name(p: HashPrimitive) -> String { String::from(p.name()) } #[wasm_bindgen] pub fn identify_all( password: &[u8], hash: &[u8], salt: &[u8], max: Option<usize>, ) -> Option<Pbkdf2Parameters> { match crate::identify_all(password, hash, salt, Some(max.unwrap_or(MAX_DEFAULT))) { None => None, Some((primitive, iterations)) => { Some(Pbkdf2Parameters { primitive, iterations, }) } } } #[wasm_bindgen] pub fn identify_iterations( password: &[u8], hash: &[u8], salt: &[u8], primitive: HashPrimitive, max: Option<usize>, ) -> Option<usize> { primitive.get_identifier()(password, hash, salt, max) }
21.391304
87
0.610772
035477e9b2d99a9edca78559aaa4b13ff2c7cf54
2,012
#![cfg(all( any(feature = "2d", feature = "3d"), not(all(feature = "2d", feature = "3d")), ))] use bevy::core::CorePlugin; use bevy::prelude::*; use bevy::reflect::TypeRegistryArc; use heron_core::{CollisionShape, PhysicMaterial, PhysicsSteps, RigidBody}; use heron_rapier::rapier::geometry::ColliderSet; use heron_rapier::RapierPlugin; use std::time::Duration; fn test_app() -> App { let mut builder = App::build(); builder .init_resource::<TypeRegistryArc>() .insert_resource(PhysicsSteps::every_frame(Duration::from_secs(1))) .add_plugin(CorePlugin) .add_plugin(RapierPlugin); builder.app } #[test] fn restitution_can_be_defined_when_creating_body() { let mut app = test_app(); let restitution = 0.42; let entity = app .world .spawn() .insert_bundle(( GlobalTransform::default(), RigidBody::Dynamic, CollisionShape::Sphere { radius: 10.0 }, PhysicMaterial { restitution, ..Default::default() }, )) .id(); app.update(); let colliders = app.world.get_resource::<ColliderSet>().unwrap(); let collider = colliders.get(*app.world.get(entity).unwrap()).unwrap(); assert_eq!(restitution, collider.restitution) } #[test] fn restitution_can_be_updated() { let mut app = test_app(); let entity = app .world .spawn() .insert_bundle(( GlobalTransform::default(), RigidBody::Dynamic, CollisionShape::Sphere { radius: 10.0 }, )) .id(); app.update(); let restitution = 2.0; app.world.entity_mut(entity).insert(PhysicMaterial { restitution, ..Default::default() }); app.update(); let colliders = app.world.get_resource::<ColliderSet>().unwrap(); let collider = colliders.get(*app.world.get(entity).unwrap()).unwrap(); assert_eq!(restitution, collider.restitution) }
24.839506
75
0.602386
219d4c73581967f351535a95ee78a4f19360bd8b
6,244
use std::fmt; use std::str::FromStr; #[macro_export] macro_rules! asm { (@$label:expr) => { $crate::asm::Instruction::A($crate::asm::Load::from($label)) }; ($dest:ident = $comp:expr) => { $crate::asm::Instruction::C( Some($crate::asm::Dest::$dest), stringify!($comp).parse().unwrap(), None, ) }; ($comp:expr ; $jump:ident) => { $crate::asm::Instruction::C( None, stringify!($comp).parse().unwrap(), Some($crate::asm::Jump::$jump), ) }; (($label:expr)) => { $crate::asm::Instruction::Label(String::from($label)) }; } #[derive(Clone, Debug, Eq, PartialEq)] pub enum Instruction { A(Load), C(Option<Dest>, Comp, Option<Jump>), Label(String), } impl fmt::Display for Instruction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Instruction::A(load) => write!(f, "@{}", load), Instruction::C(Some(dest), comp, Some(jump)) => write!(f, "{}={};{}", dest, comp, jump), Instruction::C(Some(dest), comp, None) => write!(f, "{}={}", dest, comp), Instruction::C(None, comp, Some(jump)) => write!(f, "{};{}", comp, jump), Instruction::C(None, comp, None) => write!(f, "{}", comp), Instruction::Label(s) => write!(f, "({})", s), } } } #[derive(Clone, Debug, Eq, PartialEq)] pub enum Load { Constant(u16), Symbol(String), } impl From<u16> for Load { fn from(n: u16) -> Self { Load::Constant(n) } } impl From<&str> for Load { fn from(sym: &str) -> Self { Load::Symbol(sym.to_owned()) } } impl From<&String> for Load { fn from(sym: &String) -> Self { Load::Symbol(sym.clone()) } } impl From<String> for Load { fn from(sym: String) -> Self { Load::Symbol(sym) } } impl fmt::Display for Load { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Load::Constant(n) => write!(f, "{}", n), Load::Symbol(s) => write!(f, "{}", s), } } } #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Dest { M, D, MD, A, AM, AD, AMD, } impl fmt::Display for Dest { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Dest::M => write!(f, "M"), Dest::D => write!(f, "D"), Dest::MD => write!(f, "MD"), Dest::A => write!(f, "A"), Dest::AM => write!(f, "AM"), Dest::AD => write!(f, "AD"), Dest::AMD => write!(f, "AMD"), } } } #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Comp { Zero, One, NegOne, D, A, NotD, NotA, NegD, NegA, DPlusOne, APlusOne, DMinusOne, AMinusOne, DPlusA, DMinusA, AMinusD, DAndA, DOrA, M, NotM, NegM, MPlusOne, MMinusOne, DPlusM, DMinusM, MMinusD, DAndM, DOrM, } impl fmt::Display for Comp { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Comp::Zero => write!(f, "0"), Comp::One => write!(f, "1"), Comp::NegOne => write!(f, "-1"), Comp::D => write!(f, "D"), Comp::A => write!(f, "A"), Comp::NotD => write!(f, "!D"), Comp::NotA => write!(f, "!A"), Comp::NegD => write!(f, "-D"), Comp::NegA => write!(f, "-A"), Comp::DPlusOne => write!(f, "D+1"), Comp::APlusOne => write!(f, "A+1"), Comp::DMinusOne => write!(f, "D-1"), Comp::AMinusOne => write!(f, "A-1"), Comp::DPlusA => write!(f, "D+A"), Comp::DMinusA => write!(f, "D-A"), Comp::AMinusD => write!(f, "A-D"), Comp::DAndA => write!(f, "D&A"), Comp::DOrA => write!(f, "D|A"), Comp::M => write!(f, "M"), Comp::NotM => write!(f, "!M"), Comp::NegM => write!(f, "-M"), Comp::MPlusOne => write!(f, "M+1"), Comp::MMinusOne => write!(f, "M-1"), Comp::DPlusM => write!(f, "D+M"), Comp::DMinusM => write!(f, "D-M"), Comp::MMinusD => write!(f, "M-D"), Comp::DAndM => write!(f, "D&M"), Comp::DOrM => write!(f, "D|M"), } } } impl FromStr for Comp { type Err = (); fn from_str(s: &str) -> Result<Self, Self::Err> { match &s.replace(' ', "")[..] { "0" => Ok(Comp::Zero), "1" => Ok(Comp::One), "-1" => Ok(Comp::NegOne), "D" => Ok(Comp::D), "A" => Ok(Comp::A), "!D" => Ok(Comp::NotD), "!A" => Ok(Comp::NotA), "-D" => Ok(Comp::NegD), "-A" => Ok(Comp::NegA), "D+1" => Ok(Comp::DPlusOne), "A+1" => Ok(Comp::APlusOne), "D-1" => Ok(Comp::DMinusOne), "A-1" => Ok(Comp::AMinusOne), "D+A" => Ok(Comp::DPlusA), "D-A" => Ok(Comp::DMinusA), "A-D" => Ok(Comp::AMinusD), "D&A" => Ok(Comp::DAndA), "D|A" => Ok(Comp::DOrA), "M" => Ok(Comp::M), "!M" => Ok(Comp::NotM), "-M" => Ok(Comp::NegM), "M+1" => Ok(Comp::MPlusOne), "M-1" => Ok(Comp::MMinusOne), "D+M" => Ok(Comp::DPlusM), "D-M" => Ok(Comp::DMinusM), "M-D" => Ok(Comp::MMinusD), "D&M" => Ok(Comp::DAndM), "D|M" => Ok(Comp::DOrM), _ => Err(()), } } } #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Jump { JGT, JEQ, JGE, JLT, JNE, JLE, JMP, } impl fmt::Display for Jump { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Jump::JGT => write!(f, "JGT"), Jump::JEQ => write!(f, "JEQ"), Jump::JGE => write!(f, "JGE"), Jump::JLT => write!(f, "JLT"), Jump::JNE => write!(f, "JNE"), Jump::JLE => write!(f, "JLE"), Jump::JMP => write!(f, "JMP"), } } }
25.908714
100
0.430173
acd21f65e722b3174d99ae88644697fcb5f6e6b4
589
//! [Problem 10 - Project Euler](https://projecteuler.net/problem=10) //! //! The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17. //! //! ```rust //! # use project_euler::p010_summation_of_primes::*; //! assert_eq!(compute(10), 17) //! ``` //! //! Find the sum of all the primes below two million. //! //! ```rust //! # use project_euler::p010_summation_of_primes::*; //! assert_eq!(compute(2_000_000), 142_913_828_922); //! ``` use super::util; pub fn compute(max_exclusive: u32) -> u64 { util::primes() .take_while(|prime| *prime < max_exclusive as u64) .sum() }
24.541667
69
0.616299
678e981f07c4193d5773b99c08c50a37815dcfcf
1,792
const TIME_STEP: f32 = 0.01; const GRAV_CONST: f32 = 6.67384e-11; #[derive(Copy, Clone, Debug, PartialEq)] struct Coord { x: f32, y: f32, } impl Coord { pub fn new() -> Self { Self{ x: 0.0, y: 0.0} } pub fn from_xy(x: f32, y: f32) -> Self { Self{x,y} } } impl std::ops::Add for Coord { type Output = Self; fn add(self, other: Self) -> Self { Self { x: self.x + other.x, y: self.y + other.y, } } } impl std::ops::AddAssign for Coord { fn add_assign(&mut self, other: Self) { self.x += other.x; self.y += other.y; } } struct Entity { pos: Coord, mass: f32, radius: f32, speed: Coord, accel: Coord, } struct Universe { content: Vec<Entity>, } impl Universe { pub fn simulate_st(&mut self) { for i in 0..self.content.len() { let mut accel = Coord::new(); let this = &self.content[i]; for j in 0..self.content.len() { if i == j { continue; } let other = &self.content[j]; let dx = other.pos.x - this.pos.x; let dy = other.pos.y - this.pos.y; let dist = (dx.powf(2.0) + dy.powf(2.0)).sqrt(); let force = GRAV_CONST * other.mass * dist.powf(-2.0); accel.x += force * (dx / dist); accel.y += force * (dy / dist); } self.content[i].accel = accel; } for this in &mut self.content { this.speed += this.accel; this.pos += this.speed; } } } fn main() { let mut u = Universe { content: vec![], }; u.simulate_st(); println!("G = {}", GRAV_CONST); }
22.683544
70
0.464844
fcb0b09c98b380179c396d731fdd21aacec96350
681
// test2.rs // This is a test for the following sections: // - Tests // This test isn't testing our function -- make it do that in such a way that // the test passes. Then write a second test that tests that we get the result // we expect to get when we call `times_two` with a negative number. // No hints, you can do this :) pub fn times_two(num: i32) -> i32 { num * 2 } #[cfg(test)] mod tests { use super::*; #[test] fn returns_twice_of_positive_numbers() { assert_eq!(times_two(4), 8); } #[test] fn returns_twice_of_negative_numbers() { // TODO write an assert for `times_two(-4)` assert_eq!(-8, times_two(-4)); } }
23.482759
78
0.631424
deee58ce4ae344a763ad915e1058ea9065793570
1,908
extern crate alloc; use alloc::rc::Rc; use core::cell::{Ref, RefCell}; use std::fs::read; use wasmtime_api::*; #[test] fn test_import_calling_export() { struct Callback { pub other: RefCell<Option<HostRef<Func>>>, } impl Callable for Callback { fn call(&self, _params: &[Val], _results: &mut [Val]) -> Result<(), HostRef<Trap>> { self.other .borrow() .as_ref() .expect("expected a function ref") .borrow() .call(&[]) .expect("expected function not to trap"); Ok(()) } } let engine = HostRef::new(Engine::new(Config::default())); let store = HostRef::new(Store::new(engine)); let module = HostRef::new( Module::new( store.clone(), &read("tests/import_calling_export.wasm").expect("failed to read wasm file"), ) .expect("failed to create module"), ); let callback = Rc::new(Callback { other: RefCell::new(None), }); let callback_func = HostRef::new(Func::new( store.clone(), FuncType::new(Box::new([]), Box::new([])), callback.clone(), )); let imports = vec![callback_func.into()]; let instance = HostRef::new( Instance::new(store.clone(), module, imports.as_slice()) .expect("failed to instantiate module"), ); let exports = Ref::map(instance.borrow(), |instance| instance.exports()); assert!(!exports.is_empty()); let run_func = exports[0] .func() .expect("expected a run func in the module"); *callback.other.borrow_mut() = Some( exports[1] .func() .expect("expected an other func in the module") .clone(), ); run_func .borrow() .call(&[]) .expect("expected function not to trap"); }
26.5
92
0.534591
0e68e681f53b5d35105ba3ee92826aa84ffe7203
750
use crate::types::Product; use anyhow::Error; use yew::callback::Callback; use yew::format::{Json, Nothing}; use yew::services::fetch::{FetchService, FetchTask, Request, Response}; pub type FetchResponse<T> = Response<Json<Result<T, Error>>>; type FetchCallback<T> = Callback<FetchResponse<T>>; pub fn get_products(callback: FetchCallback<Vec<Product>>) -> FetchTask { let req = Request::get("/products/products.json") .body(Nothing) .unwrap(); FetchService::fetch(req, callback).unwrap() } pub fn get_product(id: i32, callback: FetchCallback<Product>) -> FetchTask { let req = Request::get(format!("/products/{}.json", id)) .body(Nothing) .unwrap(); FetchService::fetch(req, callback).unwrap() }
30
76
0.677333
71787518bdd891b9ab32258909fffd656406b1df
8,564
use std::collections::HashMap; use serde_json::Value; use crate::http::AttachmentType; use crate::model::channel::MessageFlags; /// A builder to create the inner content of a [`Webhook`]'s execution. /// /// This is a structured way of cleanly creating the inner execution payload, /// to reduce potential argument counts. /// /// Refer to the documentation for [`execute_webhook`] on restrictions with /// execution payloads and its fields. /// /// # Examples /// /// Creating two embeds, and then sending them as part of the delivery /// payload of [`Webhook::execute`]: /// /// ```rust,no_run /// use serenity::http::Http; /// use serenity::model::channel::Embed; /// use serenity::utils::Colour; /// /// # async fn run() -> Result<(), Box<dyn std::error::Error>> { /// # let http = Http::default(); /// let id = 245037420704169985; /// let token = "ig5AO-wdVWpCBtUUMxmgsWryqgsW3DChbKYOINftJ4DCrUbnkedoYZD0VOH1QLr-S3sV"; /// let webhook = http.get_webhook_with_token(id, token).await?; /// /// let website = Embed::fake(|e| { /// e.title("The Rust Language Website") /// .description("Rust is a systems programming language.") /// .colour(Colour::from_rgb(222, 165, 132)) /// }); /// /// let resources = Embed::fake(|e| { /// e.title("Rust Resources") /// .description("A few resources to help with learning Rust") /// .colour(0xDEA584) /// .field("The Rust Book", "A comprehensive resource for Rust.", false) /// .field("Rust by Example", "A collection of Rust examples", false) /// }); /// /// webhook /// .execute(&http, false, |w| { /// w.content("Here's some information on Rust:").embeds(vec![website, resources]) /// }) /// .await?; /// # Ok(()) /// # } /// ``` /// /// [`Webhook`]: crate::model::webhook::Webhook /// [`Webhook::execute`]: crate::model::webhook::Webhook::execute /// [`execute_webhook`]: crate::http::client::Http::execute_webhook #[derive(Clone, Debug)] pub struct ExecuteWebhook<'a>(pub HashMap<&'static str, Value>, pub Vec<AttachmentType<'a>>); impl<'a> ExecuteWebhook<'a> { /// Override the default avatar of the webhook with an image URL. /// /// # Examples /// /// Overriding the default avatar: /// /// ```rust,no_run /// # use serenity::http::Http; /// # /// # async fn run() -> Result<(), Box<dyn std::error::Error>> { /// # let http = Http::default(); /// # let webhook = http.get_webhook_with_token(0, "").await?; /// # /// let avatar_url = "https://i.imgur.com/KTs6whd.jpg"; /// /// webhook.execute(&http, false, |w| w.avatar_url(avatar_url).content("Here's a webhook")).await?; /// # Ok(()) /// # } /// ``` pub fn avatar_url<S: ToString>(&mut self, avatar_url: S) -> &mut Self { self.0.insert("avatar_url", Value::String(avatar_url.to_string())); self } /// Set the content of the message. /// /// Note that when setting at least one embed via [`Self::embeds`], this may be /// omitted. /// /// # Examples /// /// Sending a webhook with a content of `"foo"`: /// /// ```rust,no_run /// # use serenity::http::Http; /// # /// # async fn run() -> Result<(), Box<dyn std::error::Error>> { /// # let http = Http::default(); /// # let webhook = http.get_webhook_with_token(0, "").await?; /// # /// let execution = webhook.execute(&http, false, |w| w.content("foo")).await; /// /// if let Err(why) = execution { /// println!("Err sending webhook: {:?}", why); /// } /// # Ok(()) /// # } /// ``` pub fn content<S: ToString>(&mut self, content: S) -> &mut Self { self.0.insert("content", Value::String(content.to_string())); self } /// Appends a file to the webhook message. pub fn add_file<T: Into<AttachmentType<'a>>>(&mut self, file: T) -> &mut Self { self.1.push(file.into()); self } /// Appends a list of files to the webhook message. pub fn add_files<T: Into<AttachmentType<'a>>, It: IntoIterator<Item = T>>( &mut self, files: It, ) -> &mut Self { self.1.extend(files.into_iter().map(|f| f.into())); self } /// Sets a list of files to include in the webhook message. /// /// Calling this multiple times will overwrite the file list. /// To append files, call [`Self::add_file`] or [`Self::add_files`] instead. pub fn files<T: Into<AttachmentType<'a>>, It: IntoIterator<Item = T>>( &mut self, files: It, ) -> &mut Self { self.1 = files.into_iter().map(|f| f.into()).collect(); self } /// Set the embeds associated with the message. /// /// This should be used in combination with [`Embed::fake`], creating one /// or more fake embeds to send to the API. /// /// # Examples /// /// Refer to the [struct-level documentation] for an example on how to use /// embeds. /// /// [`Embed::fake`]: crate::model::channel::Embed::fake /// [`Webhook::execute`]: crate::model::webhook::Webhook::execute /// [struct-level documentation]: #examples pub fn embeds(&mut self, embeds: Vec<Value>) -> &mut Self { self.0.insert("embeds", Value::Array(embeds)); self } /// Whether the message is a text-to-speech message. /// /// # Examples /// /// Sending a webhook with text-to-speech enabled: /// /// ```rust,no_run /// # use serenity::http::Http; /// # /// # async fn run() -> Result<(), Box<dyn std::error::Error>> { /// # let http = Http::default(); /// # let webhook = http.get_webhook_with_token(0, "").await?; /// # /// let execution = webhook.execute(&http, false, |w| w.content("hello").tts(true)).await; /// /// if let Err(why) = execution { /// println!("Err sending webhook: {:?}", why); /// } /// # Ok(()) /// # } /// ``` pub fn tts(&mut self, tts: bool) -> &mut Self { self.0.insert("tts", Value::Bool(tts)); self } /// Override the default username of the webhook. /// /// # Examples /// /// Overriding the username to `"hakase"`: /// /// ```rust,no_run /// # use serenity::http::Http; /// # /// # async fn run() -> Result<(), Box<dyn std::error::Error>> { /// # let http = Http::default(); /// # let webhook = http.get_webhook_with_token(0, "").await?; /// # /// let execution = webhook.execute(&http, false, |w| w.content("hello").username("hakase")).await; /// /// if let Err(why) = execution { /// println!("Err sending webhook: {:?}", why); /// } /// # Ok(()) /// # } /// ``` pub fn username<S: ToString>(&mut self, username: S) -> &mut Self { self.0.insert("username", Value::String(username.to_string())); self } /// Sets the flags for the message. /// /// # Examples /// /// Supressing an embed on the message. /// /// ```rust,no_run /// # use serenity::http::Http; /// # use serenity::model::channel::MessageFlags; /// # /// # async fn run() -> Result<(), Box<dyn std::error::Error>> { /// # let http = Http::default(); /// # let webhook = http.get_webhook_with_token(0, "").await?; /// # /// let execution = webhook /// .execute(&http, false, |w| { /// w.content("https://docs.rs/serenity/latest/serenity/") /// .flags(MessageFlags::SUPPRESS_EMBEDS) /// }) /// .await; /// /// if let Err(why) = execution { /// println!("Err sending webhook: {:?}", why); /// } /// # Ok(()) /// # } /// ``` pub fn flags(&mut self, flags: MessageFlags) -> &mut Self { self.0.insert("flags", Value::Number(serde_json::Number::from(flags.bits))); self } } impl<'a> Default for ExecuteWebhook<'a> { /// Returns a default set of values for a [`Webhook`] execution. /// /// The only default value is [`Self::tts`] being set to `false`. /// /// # Examples /// /// Creating an [`ExecuteWebhook`] builder: /// /// ```rust /// use serenity::builder::ExecuteWebhook; /// /// let executor = ExecuteWebhook::default(); /// ``` /// /// [`Webhook`]: crate::model::webhook::Webhook fn default() -> ExecuteWebhook<'a> { let mut map = HashMap::new(); map.insert("tts", Value::Bool(false)); ExecuteWebhook(map, vec![]) } }
31.955224
103
0.549159
509b3fa98e7e52aa2202adfbcbbc056db6de0b98
16,107
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::SC3 { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = "Possible values of the field `AVGS`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AVGSR { #[doc = "4 samples averaged."] _00, #[doc = "8 samples averaged."] _01, #[doc = "16 samples averaged."] _10, #[doc = "32 samples averaged."] _11, } impl AVGSR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u8 { match *self { AVGSR::_00 => 0, AVGSR::_01 => 1, AVGSR::_10 => 2, AVGSR::_11 => 3, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: u8) -> AVGSR { match value { 0 => AVGSR::_00, 1 => AVGSR::_01, 2 => AVGSR::_10, 3 => AVGSR::_11, _ => unreachable!(), } } #[doc = "Checks if the value of the field is `_00`"] #[inline] pub fn is_00(&self) -> bool { *self == AVGSR::_00 } #[doc = "Checks if the value of the field is `_01`"] #[inline] pub fn is_01(&self) -> bool { *self == AVGSR::_01 } #[doc = "Checks if the value of the field is `_10`"] #[inline] pub fn is_10(&self) -> bool { *self == AVGSR::_10 } #[doc = "Checks if the value of the field is `_11`"] #[inline] pub fn is_11(&self) -> bool { *self == AVGSR::_11 } } #[doc = "Possible values of the field `AVGE`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum AVGER { #[doc = "Hardware average function disabled."] _0, #[doc = "Hardware average function enabled."] _1, } impl AVGER { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { AVGER::_0 => false, AVGER::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> AVGER { match value { false => AVGER::_0, true => AVGER::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == AVGER::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == AVGER::_1 } } #[doc = "Possible values of the field `ADCO`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ADCOR { #[doc = "One conversion or one set of conversions if the hardware average function is enabled, that is, AVGE=1, after initiating a conversion."] _0, #[doc = "Continuous conversions or sets of conversions if the hardware average function is enabled, that is, AVGE=1, after initiating a conversion."] _1, } impl ADCOR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { ADCOR::_0 => false, ADCOR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> ADCOR { match value { false => ADCOR::_0, true => ADCOR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == ADCOR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == ADCOR::_1 } } #[doc = "Possible values of the field `CALF`"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum CALFR { #[doc = "Calibration completed normally."] _0, #[doc = "Calibration failed. ADC accuracy specifications are not guaranteed."] _1, } impl CALFR { #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { match *self { CALFR::_0 => false, CALFR::_1 => true, } } #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _from(value: bool) -> CALFR { match value { false => CALFR::_0, true => CALFR::_1, } } #[doc = "Checks if the value of the field is `_0`"] #[inline] pub fn is_0(&self) -> bool { *self == CALFR::_0 } #[doc = "Checks if the value of the field is `_1`"] #[inline] pub fn is_1(&self) -> bool { *self == CALFR::_1 } } #[doc = r" Value of the field"] pub struct CALR { bits: bool, } impl CALR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = "Values that can be written to the field `AVGS`"] pub enum AVGSW { #[doc = "4 samples averaged."] _00, #[doc = "8 samples averaged."] _01, #[doc = "16 samples averaged."] _10, #[doc = "32 samples averaged."] _11, } impl AVGSW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> u8 { match *self { AVGSW::_00 => 0, AVGSW::_01 => 1, AVGSW::_10 => 2, AVGSW::_11 => 3, } } } #[doc = r" Proxy"] pub struct _AVGSW<'a> { w: &'a mut W, } impl<'a> _AVGSW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: AVGSW) -> &'a mut W { { self.bits(variant._bits()) } } #[doc = "4 samples averaged."] #[inline] pub fn _00(self) -> &'a mut W { self.variant(AVGSW::_00) } #[doc = "8 samples averaged."] #[inline] pub fn _01(self) -> &'a mut W { self.variant(AVGSW::_01) } #[doc = "16 samples averaged."] #[inline] pub fn _10(self) -> &'a mut W { self.variant(AVGSW::_10) } #[doc = "32 samples averaged."] #[inline] pub fn _11(self) -> &'a mut W { self.variant(AVGSW::_11) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bits(self, value: u8) -> &'a mut W { const MASK: u8 = 3; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `AVGE`"] pub enum AVGEW { #[doc = "Hardware average function disabled."] _0, #[doc = "Hardware average function enabled."] _1, } impl AVGEW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { AVGEW::_0 => false, AVGEW::_1 => true, } } } #[doc = r" Proxy"] pub struct _AVGEW<'a> { w: &'a mut W, } impl<'a> _AVGEW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: AVGEW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Hardware average function disabled."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(AVGEW::_0) } #[doc = "Hardware average function enabled."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(AVGEW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `ADCO`"] pub enum ADCOW { #[doc = "One conversion or one set of conversions if the hardware average function is enabled, that is, AVGE=1, after initiating a conversion."] _0, #[doc = "Continuous conversions or sets of conversions if the hardware average function is enabled, that is, AVGE=1, after initiating a conversion."] _1, } impl ADCOW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { ADCOW::_0 => false, ADCOW::_1 => true, } } } #[doc = r" Proxy"] pub struct _ADCOW<'a> { w: &'a mut W, } impl<'a> _ADCOW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: ADCOW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "One conversion or one set of conversions if the hardware average function is enabled, that is, AVGE=1, after initiating a conversion."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(ADCOW::_0) } #[doc = "Continuous conversions or sets of conversions if the hardware average function is enabled, that is, AVGE=1, after initiating a conversion."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(ADCOW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 3; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = "Values that can be written to the field `CALF`"] pub enum CALFW { #[doc = "Calibration completed normally."] _0, #[doc = "Calibration failed. ADC accuracy specifications are not guaranteed."] _1, } impl CALFW { #[allow(missing_docs)] #[doc(hidden)] #[inline] pub fn _bits(&self) -> bool { match *self { CALFW::_0 => false, CALFW::_1 => true, } } } #[doc = r" Proxy"] pub struct _CALFW<'a> { w: &'a mut W, } impl<'a> _CALFW<'a> { #[doc = r" Writes `variant` to the field"] #[inline] pub fn variant(self, variant: CALFW) -> &'a mut W { { self.bit(variant._bits()) } } #[doc = "Calibration completed normally."] #[inline] pub fn _0(self) -> &'a mut W { self.variant(CALFW::_0) } #[doc = "Calibration failed. ADC accuracy specifications are not guaranteed."] #[inline] pub fn _1(self) -> &'a mut W { self.variant(CALFW::_1) } #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _CALW<'a> { w: &'a mut W, } impl<'a> _CALW<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 7; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:1 - Hardware Average Select"] #[inline] pub fn avgs(&self) -> AVGSR { AVGSR::_from({ const MASK: u8 = 3; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u8 }) } #[doc = "Bit 2 - Hardware Average Enable"] #[inline] pub fn avge(&self) -> AVGER { AVGER::_from({ const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 3 - Continuous Conversion Enable"] #[inline] pub fn adco(&self) -> ADCOR { ADCOR::_from({ const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 6 - Calibration Failed Flag"] #[inline] pub fn calf(&self) -> CALFR { CALFR::_from({ const MASK: bool = true; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u32) != 0 }) } #[doc = "Bit 7 - Calibration"] #[inline] pub fn cal(&self) -> CALR { let bits = { const MASK: bool = true; const OFFSET: u8 = 7; ((self.bits >> OFFSET) & MASK as u32) != 0 }; CALR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:1 - Hardware Average Select"] #[inline] pub fn avgs(&mut self) -> _AVGSW { _AVGSW { w: self } } #[doc = "Bit 2 - Hardware Average Enable"] #[inline] pub fn avge(&mut self) -> _AVGEW { _AVGEW { w: self } } #[doc = "Bit 3 - Continuous Conversion Enable"] #[inline] pub fn adco(&mut self) -> _ADCOW { _ADCOW { w: self } } #[doc = "Bit 6 - Calibration Failed Flag"] #[inline] pub fn calf(&mut self) -> _CALFW { _CALFW { w: self } } #[doc = "Bit 7 - Calibration"] #[inline] pub fn cal(&mut self) -> _CALW { _CALW { w: self } } }
26.105348
153
0.506426
9c84a766316b3f2d82fd3f5ccb81e35b813bdf72
1,267
//! Parameter sets for simulating different reverberant environments. //! //! The tapped delays and tapped gains specify the start of the reverb, and must //! be the same length. They represent the initial reflection paths, and are //! used to sculpt the echoes of the room. //! //! The comb delays are used to fill out the reverberations and create a steady //! decay; they sculpt the color of the room. use oxcable::types::Time; /// A container for Moorer reverberator parameters. #[derive(Copy, Clone, Debug)] pub struct Room<'a> { pub tapped_delays: &'a[Time], pub tapped_gains: &'a[f32], pub comb_delays: &'a[Time] } /// A generic concert hall. /// /// This room was modeled by Moorer in his original paper, where he described /// the Moorer reverbator. pub static HALL: Room<'static> = Room { // For these parameters, see pg. 24 from Moorer paper tapped_delays: &[190, 948, 992, 1182, 1191, 1314, 2020, 2139, 2523, 2589, 2624, 2699, 3118, 3122, 3202, 3268, 3321, 3515], tapped_gains: &[0.841, 0.504, 0.491, 0.379, 0.380, 0.346, 0.289, 0.272, 192.0, 0.193, 0.217, 0.181, 0.180, 0.181, 0.176, 0.142, 0.167, 0.134], // For these parameters, see pg. 18 from Moorer comb_delays: &[2205, 2470, 2690, 2999, 3175, 3440], };
36.2
80
0.676401
d58f05fbb4166713bb0b21281245506371a5bc21
115,401
#![doc = "generated by AutoRust 0.1.0"] #![allow(unused_mut)] #![allow(unused_variables)] #![allow(unused_imports)] use crate::models::*; use snafu::{ResultExt, Snafu}; pub mod operations { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn list(operation_config: &crate::OperationConfig) -> std::result::Result<OperationListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/providers/Microsoft.MachineLearningServices/operations", operation_config.base_path(), ); let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: OperationListResult = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod workspaces { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, ) -> std::result::Result<Workspace, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(get::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(get::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Workspace = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, parameters: &Workspace, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(create_or_update::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(create_or_update::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(create_or_update::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .context(create_or_update::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Workspace = serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: Workspace = serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?; Ok(create_or_update::Response::Created201(rsp_value)) } http::StatusCode::ACCEPTED => Ok(create_or_update::Response::Accepted202), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?; create_or_update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod create_or_update { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(Workspace), Created201(Workspace), Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, parameters: &WorkspaceUpdateParameters, ) -> std::result::Result<Workspace, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(update::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(update::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(update::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(update::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: Workspace = serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?; update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod update { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(delete::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(delete::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list_by_resource_group( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, skiptoken: Option<&str>, ) -> std::result::Result<WorkspaceListResult, list_by_resource_group::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces", operation_config.base_path(), subscription_id, resource_group_name ); let mut url = url::Url::parse(url_str).context(list_by_resource_group::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list_by_resource_group::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(skiptoken) = skiptoken { url.query_pairs_mut().append_pair("$skiptoken", skiptoken); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list_by_resource_group::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .context(list_by_resource_group::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: WorkspaceListResult = serde_json::from_slice(rsp_body).context(list_by_resource_group::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list_by_resource_group::DeserializeError { body: rsp_body.clone() })?; list_by_resource_group::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_by_resource_group { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list_keys( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, ) -> std::result::Result<ListWorkspaceKeysResult, list_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/listKeys", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(list_keys::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list_keys::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list_keys::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list_keys::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: ListWorkspaceKeysResult = serde_json::from_slice(rsp_body).context(list_keys::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list_keys::DeserializeError { body: rsp_body.clone() })?; list_keys::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_keys { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn resync_keys( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, ) -> std::result::Result<(), resync_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/resyncKeys", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(resync_keys::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(resync_keys::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(resync_keys::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(resync_keys::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(resync_keys::DeserializeError { body: rsp_body.clone() })?; resync_keys::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod resync_keys { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list_by_subscription( operation_config: &crate::OperationConfig, subscription_id: &str, skiptoken: Option<&str>, ) -> std::result::Result<WorkspaceListResult, list_by_subscription::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/workspaces", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).context(list_by_subscription::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list_by_subscription::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(skiptoken) = skiptoken { url.query_pairs_mut().append_pair("$skiptoken", skiptoken); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list_by_subscription::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .context(list_by_subscription::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: WorkspaceListResult = serde_json::from_slice(rsp_body).context(list_by_subscription::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list_by_subscription::DeserializeError { body: rsp_body.clone() })?; list_by_subscription::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_by_subscription { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod workspace_features { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, ) -> std::result::Result<ListAmlUserFeatureResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/features", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: ListAmlUserFeatureResult = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod notebooks { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn prepare( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, ) -> std::result::Result<prepare::Response, prepare::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/prepareNotebook", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(prepare::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(prepare::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(prepare::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(prepare::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: NotebookResourceInfo = serde_json::from_slice(rsp_body).context(prepare::DeserializeError { body: rsp_body.clone() })?; Ok(prepare::Response::Ok200(rsp_value)) } http::StatusCode::ACCEPTED => Ok(prepare::Response::Accepted202), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(prepare::DeserializeError { body: rsp_body.clone() })?; prepare::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod prepare { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(NotebookResourceInfo), Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod usages { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, location: &str, ) -> std::result::Result<ListUsagesResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/usages", operation_config.base_path(), subscription_id, location ); let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: ListUsagesResult = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); list::UnexpectedResponse { status_code, body: rsp_body.clone(), } .fail() } } } pub mod list { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, ParseUrlError { source: url::ParseError }, BuildRequestError { source: http::Error }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> }, SerializeError { source: Box<dyn std::error::Error + Sync + Send> }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod virtual_machine_sizes { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, location: &str, subscription_id: &str, ) -> std::result::Result<VirtualMachineSizeListResult, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/vmSizes", operation_config.base_path(), subscription_id, location ); let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: VirtualMachineSizeListResult = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); list::UnexpectedResponse { status_code, body: rsp_body.clone(), } .fail() } } } pub mod list { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, ParseUrlError { source: url::ParseError }, BuildRequestError { source: http::Error }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> }, SerializeError { source: Box<dyn std::error::Error + Sync + Send> }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod quotas { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn update( operation_config: &crate::OperationConfig, location: &str, parameters: &QuotaUpdateParameters, subscription_id: &str, ) -> std::result::Result<UpdateWorkspaceQuotasResult, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/updateQuotas", operation_config.base_path(), subscription_id, location ); let mut url = url::Url::parse(url_str).context(update::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(update::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(update::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(update::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: UpdateWorkspaceQuotasResult = serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?; update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod update { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, location: &str, ) -> std::result::Result<ListWorkspaceQuotas, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/locations/{}/Quotas", operation_config.base_path(), subscription_id, location ); let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: ListWorkspaceQuotas = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); list::UnexpectedResponse { status_code, body: rsp_body.clone(), } .fail() } } } pub mod list { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, ParseUrlError { source: url::ParseError }, BuildRequestError { source: http::Error }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> }, SerializeError { source: Box<dyn std::error::Error + Sync + Send> }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } } pub mod workspace_connections { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn list( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, target: Option<&str>, category: Option<&str>, ) -> std::result::Result<PaginatedWorkspaceConnectionsList, list::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/connections", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(list::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(target) = target { url.query_pairs_mut().append_pair("target", target); } if let Some(category) = category { url.query_pairs_mut().append_pair("category", category); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PaginatedWorkspaceConnectionsList = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list::DeserializeError { body: rsp_body.clone() })?; list::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, connection_name: &str, ) -> std::result::Result<WorkspaceConnection, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/connections/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, connection_name ); let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(get::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(get::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: WorkspaceConnection = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn create( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, connection_name: &str, parameters: &WorkspaceConnectionDto, ) -> std::result::Result<WorkspaceConnection, create::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/connections/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, connection_name ); let mut url = url::Url::parse(url_str).context(create::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(create::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(create::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(create::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: WorkspaceConnection = serde_json::from_slice(rsp_body).context(create::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(create::DeserializeError { body: rsp_body.clone() })?; create::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod create { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, connection_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/connections/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, connection_name ); let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(delete::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(delete::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod machine_learning_compute { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn list_by_workspace( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, skiptoken: Option<&str>, ) -> std::result::Result<PaginatedComputeResourcesList, list_by_workspace::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(list_by_workspace::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list_by_workspace::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); if let Some(skiptoken) = skiptoken { url.query_pairs_mut().append_pair("$skiptoken", skiptoken); } let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list_by_workspace::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .context(list_by_workspace::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PaginatedComputeResourcesList = serde_json::from_slice(rsp_body).context(list_by_workspace::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list_by_workspace::DeserializeError { body: rsp_body.clone() })?; list_by_workspace::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_by_workspace { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, ) -> std::result::Result<ComputeResource, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(get::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(get::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: ComputeResource = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn create_or_update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, parameters: &ComputeResource, ) -> std::result::Result<create_or_update::Response, create_or_update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(create_or_update::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(create_or_update::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(create_or_update::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .context(create_or_update::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: ComputeResource = serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?; Ok(create_or_update::Response::Ok200(rsp_value)) } http::StatusCode::CREATED => { let rsp_body = rsp.body(); let rsp_value: ComputeResource = serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?; Ok(create_or_update::Response::Created201(rsp_value)) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(create_or_update::DeserializeError { body: rsp_body.clone() })?; create_or_update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod create_or_update { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200(ComputeResource), Created201(ComputeResource), } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn update( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, parameters: &ClusterUpdateParameters, ) -> std::result::Result<ComputeResource, update::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(update::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PATCH); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(update::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(update::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(update::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: ComputeResource = serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(update::DeserializeError { body: rsp_body.clone() })?; update::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod update { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, underlying_resource_action: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(delete::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); url.query_pairs_mut() .append_pair("underlyingResourceAction", underlying_resource_action); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(delete::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, Accepted202, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list_nodes( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, ) -> std::result::Result<AmlComputeNodesInformation, list_nodes::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/listNodes", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(list_nodes::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list_nodes::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list_nodes::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list_nodes::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: AmlComputeNodesInformation = serde_json::from_slice(rsp_body).context(list_nodes::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list_nodes::DeserializeError { body: rsp_body.clone() })?; list_nodes::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_nodes { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn list_keys( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, ) -> std::result::Result<ComputeSecrets, list_keys::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/listKeys", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(list_keys::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list_keys::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list_keys::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list_keys::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: ComputeSecrets = serde_json::from_slice(rsp_body).context(list_keys::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list_keys::DeserializeError { body: rsp_body.clone() })?; list_keys::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_keys { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn start( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, ) -> std::result::Result<(), start::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/start", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(start::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(start::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(start::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(start::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(start::DeserializeError { body: rsp_body.clone() })?; start::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod start { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn stop( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, ) -> std::result::Result<(), stop::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/stop", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(stop::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(stop::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(stop::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(stop::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(stop::DeserializeError { body: rsp_body.clone() })?; stop::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod stop { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn restart( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, compute_name: &str, ) -> std::result::Result<(), restart::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/computes/{}/restart", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, compute_name ); let mut url = url::Url::parse(url_str).context(restart::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::POST); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(restart::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(restart::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(restart::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(()), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(restart::DeserializeError { body: rsp_body.clone() })?; restart::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod restart { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub async fn list_skus( operation_config: &crate::OperationConfig, subscription_id: &str, ) -> std::result::Result<SkuListResult, list_skus::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/providers/Microsoft.MachineLearningServices/workspaces/skus", operation_config.base_path(), subscription_id ); let mut url = url::Url::parse(url_str).context(list_skus::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list_skus::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list_skus::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(list_skus::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: SkuListResult = serde_json::from_slice(rsp_body).context(list_skus::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(list_skus::DeserializeError { body: rsp_body.clone() })?; list_skus::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod list_skus { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub mod private_endpoint_connections { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn get( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, private_endpoint_connection_name: &str, ) -> std::result::Result<PrivateEndpointConnection, get::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, private_endpoint_connection_name ); let mut url = url::Url::parse(url_str).context(get::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(get::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(get::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(get::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PrivateEndpointConnection = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(get::DeserializeError { body: rsp_body.clone() })?; get::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod get { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn put( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, private_endpoint_connection_name: &str, properties: &PrivateEndpointConnection, ) -> std::result::Result<PrivateEndpointConnection, put::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, private_endpoint_connection_name ); let mut url = url::Url::parse(url_str).context(put::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::PUT); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(put::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(put::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(put::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PrivateEndpointConnection = serde_json::from_slice(rsp_body).context(put::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(put::DeserializeError { body: rsp_body.clone() })?; put::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod put { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } pub async fn delete( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, private_endpoint_connection_name: &str, ) -> std::result::Result<delete::Response, delete::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateEndpointConnections/{}", operation_config.base_path(), subscription_id, resource_group_name, workspace_name, private_endpoint_connection_name ); let mut url = url::Url::parse(url_str).context(delete::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::DELETE); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(delete::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(delete::BuildRequestError)?; let rsp = http_client.execute_request(req).await.context(delete::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => Ok(delete::Response::Ok200), http::StatusCode::ACCEPTED => Ok(delete::Response::Accepted202), http::StatusCode::NO_CONTENT => Ok(delete::Response::NoContent204), status_code => { let rsp_body = rsp.body(); let rsp_value: MachineLearningServiceError = serde_json::from_slice(rsp_body).context(delete::DeserializeError { body: rsp_body.clone() })?; delete::DefaultResponse { status_code, value: rsp_value, } .fail() } } } pub mod delete { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug)] pub enum Response { Ok200, Accepted202, NoContent204, } #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { DefaultResponse { status_code: http::StatusCode, value: models::MachineLearningServiceError, }, ParseUrlError { source: url::ParseError, }, BuildRequestError { source: http::Error, }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send>, }, SerializeError { source: Box<dyn std::error::Error + Sync + Send>, }, DeserializeError { source: serde_json::Error, body: bytes::Bytes, }, GetTokenError { source: azure_core::errors::AzureError, }, } } } pub mod private_link_resources { use crate::models::*; use snafu::{ResultExt, Snafu}; pub async fn list_by_workspace( operation_config: &crate::OperationConfig, subscription_id: &str, resource_group_name: &str, workspace_name: &str, ) -> std::result::Result<PrivateLinkResourceListResult, list_by_workspace::Error> { let http_client = operation_config.http_client(); let url_str = &format!( "{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.MachineLearningServices/workspaces/{}/privateLinkResources", operation_config.base_path(), subscription_id, resource_group_name, workspace_name ); let mut url = url::Url::parse(url_str).context(list_by_workspace::ParseUrlError)?; let mut req_builder = http::request::Builder::new(); req_builder = req_builder.method(http::Method::GET); if let Some(token_credential) = operation_config.token_credential() { let token_response = token_credential .get_token(operation_config.token_credential_resource()) .await .context(list_by_workspace::GetTokenError)?; req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret())); } url.query_pairs_mut().append_pair("api-version", operation_config.api_version()); let req_body = bytes::Bytes::from_static(azure_core::EMPTY_BODY); req_builder = req_builder.uri(url.as_str()); let req = req_builder.body(req_body).context(list_by_workspace::BuildRequestError)?; let rsp = http_client .execute_request(req) .await .context(list_by_workspace::ExecuteRequestError)?; match rsp.status() { http::StatusCode::OK => { let rsp_body = rsp.body(); let rsp_value: PrivateLinkResourceListResult = serde_json::from_slice(rsp_body).context(list_by_workspace::DeserializeError { body: rsp_body.clone() })?; Ok(rsp_value) } status_code => { let rsp_body = rsp.body(); list_by_workspace::UnexpectedResponse { status_code, body: rsp_body.clone(), } .fail() } } } pub mod list_by_workspace { use crate::{models, models::*}; use snafu::Snafu; #[derive(Debug, Snafu)] #[snafu(visibility(pub(crate)))] pub enum Error { UnexpectedResponse { status_code: http::StatusCode, body: bytes::Bytes }, ParseUrlError { source: url::ParseError }, BuildRequestError { source: http::Error }, ExecuteRequestError { source: Box<dyn std::error::Error + Sync + Send> }, SerializeError { source: Box<dyn std::error::Error + Sync + Send> }, DeserializeError { source: serde_json::Error, body: bytes::Bytes }, GetTokenError { source: azure_core::errors::AzureError }, } } }
42.101788
140
0.560116
5b8d6f82af65d1b1d7b2cdb3b84db525aaa8c1e4
10,702
#![cfg(feature = "test-bpf")] mod utils; use anchor_lang::AccountDeserialize; use mpl_auction_house::{pda::*, AuctionHouse}; use mpl_testing_utils::{ assert_error, solana::{airdrop, create_associated_token_account, create_mint}, }; use solana_program_test::*; use solana_sdk::{ instruction::InstructionError, signature::Keypair, signer::Signer, transaction::TransactionError, transport::TransportError, }; use spl_token; use std::assert_eq; use utils::setup_functions; #[tokio::test] async fn init_native_success() { let mut context = setup_functions::auction_house_program_test() .start_with_context() .await; // Payer Wallet let payer_wallet = Keypair::new(); airdrop(&mut context, &payer_wallet.pubkey(), 10_000_000_000) .await .unwrap(); let twd_key = payer_wallet.pubkey(); let fwd_key = payer_wallet.pubkey(); let t_mint_key = spl_token::native_mint::id(); let tdw_ata = twd_key; let seller_fee_basis_points: u16 = 100; let authority = Keypair::new(); airdrop(&mut context, &authority.pubkey(), 10_000_000_000) .await .unwrap(); // Derive Auction House Key let (auction_house_address, bump) = find_auction_house_address(&authority.pubkey(), &t_mint_key); let (auction_fee_account_key, fee_payer_bump) = find_auction_house_fee_account_address(&auction_house_address); // Derive Auction House Treasury Key let (auction_house_treasury_key, treasury_bump) = find_auction_house_treasury_address(&auction_house_address); let auction_house = setup_functions::create_auction_house( &mut context, &authority, &twd_key, &fwd_key, &t_mint_key, &tdw_ata, &auction_house_address, bump, &auction_fee_account_key, fee_payer_bump, &auction_house_treasury_key, treasury_bump, seller_fee_basis_points, false, false, ); let auction_house_account = auction_house.await.unwrap(); let auction_house_acc = context .banks_client .get_account(auction_house_account) .await .expect("account not found") .expect("account empty"); let auction_house_data = AuctionHouse::try_deserialize(&mut auction_house_acc.data.as_ref()).unwrap(); assert_eq!( auction_fee_account_key, auction_house_data.auction_house_fee_account ); assert_eq!( auction_house_treasury_key, auction_house_data.auction_house_treasury ); assert_eq!(tdw_ata, auction_house_data.treasury_withdrawal_destination); assert_eq!(fwd_key, auction_house_data.fee_withdrawal_destination); assert_eq!(t_mint_key, auction_house_data.treasury_mint); assert_eq!(authority.pubkey(), auction_house_data.authority); assert_eq!(authority.pubkey(), auction_house_data.creator); assert_eq!(bump, auction_house_data.bump); assert_eq!(treasury_bump, auction_house_data.treasury_bump); assert_eq!(fee_payer_bump, auction_house_data.fee_payer_bump); assert_eq!( seller_fee_basis_points, auction_house_data.seller_fee_basis_points ); assert_eq!(false, auction_house_data.requires_sign_off); assert_eq!(false, auction_house_data.can_change_sale_price); } #[tokio::test] async fn init_native_success_reinitialize_fail() { let mut context = setup_functions::auction_house_program_test() .start_with_context() .await; // Payer Wallet let payer_wallet = Keypair::new(); airdrop(&mut context, &payer_wallet.pubkey(), 10_000_000_000) .await .unwrap(); let twd_key = payer_wallet.pubkey(); let fwd_key = payer_wallet.pubkey(); let t_mint_key = spl_token::native_mint::id(); let tdw_ata = twd_key; let seller_fee_basis_points: u16 = 100; let authority = Keypair::new(); airdrop(&mut context, &authority.pubkey(), 10_000_000_000) .await .unwrap(); // Derive Auction House Key let (auction_house_address, bump) = find_auction_house_address(&authority.pubkey(), &t_mint_key); let (auction_fee_account_key, fee_payer_bump) = find_auction_house_fee_account_address(&auction_house_address); // Derive Auction House Treasury Key let (auction_house_treasury_key, treasury_bump) = find_auction_house_treasury_address(&auction_house_address); setup_functions::create_auction_house( &mut context, &authority, &twd_key, &fwd_key, &t_mint_key, &tdw_ata, &auction_house_address, bump, &auction_fee_account_key, fee_payer_bump, &auction_house_treasury_key, treasury_bump, seller_fee_basis_points, false, false, ) .await .unwrap(); let malicious_wallet = Keypair::new(); airdrop(&mut context, &malicious_wallet.pubkey(), 10_000_000_000) .await .unwrap(); let hacked_twd_key = malicious_wallet.pubkey(); let hacked_fwd_key = malicious_wallet.pubkey(); let hacked_tdw_ata = twd_key; let seller_fee_basis_points: u16 = 100; // Derive Auction House Key let hacked_auction_house = setup_functions::create_auction_house( &mut context, &authority, &hacked_twd_key, &hacked_fwd_key, &t_mint_key, &hacked_tdw_ata, &auction_house_address, bump, &auction_fee_account_key, fee_payer_bump, &auction_house_treasury_key, treasury_bump, seller_fee_basis_points, false, false, ) .await .unwrap_err(); match hacked_auction_house { TransportError::TransactionError(TransactionError::InstructionError( 0, InstructionError::Custom(0), )) => (), _ => assert!(false, "Expected custom error"), } } #[tokio::test] async fn init_mint_success() { let mut context = setup_functions::auction_house_program_test() .start_with_context() .await; // Payer Wallet let payer_wallet = Keypair::new(); airdrop(&mut context, &payer_wallet.pubkey(), 10_000_000_000) .await .unwrap(); let mint_key = Keypair::new(); create_mint(&mut context, &mint_key, &payer_wallet.pubkey(), None) .await .unwrap(); let twd_key = payer_wallet.pubkey(); let fwd_key = payer_wallet.pubkey(); let t_mint_key = mint_key.pubkey(); let tdw_ata = create_associated_token_account(&mut context, &payer_wallet, &t_mint_key) .await .unwrap(); let seller_fee_basis_points: u16 = 100; let authority = Keypair::new(); airdrop(&mut context, &authority.pubkey(), 10_000_000_000) .await .unwrap(); // Derive Auction House Key let (auction_house_address, bump) = find_auction_house_address(&authority.pubkey(), &t_mint_key); let (auction_fee_account_key, fee_payer_bump) = find_auction_house_fee_account_address(&auction_house_address); // Derive Auction House Treasury Key let (auction_house_treasury_key, treasury_bump) = find_auction_house_treasury_address(&auction_house_address); let auction_house = setup_functions::create_auction_house( &mut context, &authority, &twd_key, &fwd_key, &t_mint_key, &tdw_ata, &auction_house_address, bump, &auction_fee_account_key, fee_payer_bump, &auction_house_treasury_key, treasury_bump, seller_fee_basis_points, false, false, ); let auction_house_account = auction_house.await.unwrap(); let auction_house_acc = context .banks_client .get_account(auction_house_account) .await .expect("account not found") .expect("account empty"); let auction_house_data = AuctionHouse::try_deserialize(&mut auction_house_acc.data.as_ref()).unwrap(); assert_eq!( auction_fee_account_key, auction_house_data.auction_house_fee_account ); assert_eq!( auction_house_treasury_key, auction_house_data.auction_house_treasury ); assert_eq!(tdw_ata, auction_house_data.treasury_withdrawal_destination); assert_eq!(fwd_key, auction_house_data.fee_withdrawal_destination); assert_eq!(t_mint_key, auction_house_data.treasury_mint); assert_eq!(authority.pubkey(), auction_house_data.authority); assert_eq!(authority.pubkey(), auction_house_data.creator); assert_eq!(bump, auction_house_data.bump); assert_eq!(treasury_bump, auction_house_data.treasury_bump); assert_eq!(fee_payer_bump, auction_house_data.fee_payer_bump); assert_eq!( seller_fee_basis_points, auction_house_data.seller_fee_basis_points ); assert_eq!(false, auction_house_data.requires_sign_off); assert_eq!(false, auction_house_data.can_change_sale_price); } #[tokio::test] async fn init_mint_failure() { let mut context = setup_functions::auction_house_program_test() .start_with_context() .await; // Payer Wallet let payer_wallet = Keypair::new(); airdrop(&mut context, &payer_wallet.pubkey(), 10_000_000_000) .await .unwrap(); let twd_key = payer_wallet.pubkey(); let fwd_key = payer_wallet.pubkey(); let t_mint_key = spl_token::native_mint::id(); let tdw_ata = create_associated_token_account(&mut context, &payer_wallet, &t_mint_key) .await .unwrap(); let seller_fee_basis_points: u16 = 100; let authority = Keypair::new(); airdrop(&mut context, &authority.pubkey(), 10_000_000_000) .await .unwrap(); // Derive Auction House Key let (auction_house_address, bump) = find_auction_house_address(&authority.pubkey(), &t_mint_key); let (auction_fee_account_key, fee_payer_bump) = find_auction_house_fee_account_address(&auction_house_address); // Derive Auction House Treasury Key let (auction_house_treasury_key, treasury_bump) = find_auction_house_treasury_address(&auction_house_address); let err = setup_functions::create_auction_house( &mut context, &authority, &twd_key, &fwd_key, &t_mint_key, &tdw_ata, &auction_house_address, bump, &auction_fee_account_key, fee_payer_bump, &auction_house_treasury_key, treasury_bump, seller_fee_basis_points, false, false, ) .await .unwrap_err(); println!("{:?}", err.to_string()); assert_error!(err, 6000); }
32.727829
91
0.675575
fbffc28b60241e94f88b47c7134c3d3667eef1cd
1,351
//! Run `cargo test` and fix `assert_eq!`s. mod fix; mod parse_code; mod parse_out; use std::{ env, io::{self, Write}, process::{self, Command}, str, }; fn main() -> io::Result<()> { let cargo = env::var("CARGO").unwrap_or_else(|_| "cargo".to_string()); let exitcode = loop { eprint!("Running tests..."); let output = Command::new(&cargo) .arg("test") .args(env::args_os().skip(1).skip_while(|s| s == "fixeq")) .output()?; let forward_output = || -> io::Result<()> { eprintln!("Last 'cargo test' output:"); io::stderr().flush()?; io::stdout().write_all(&output.stdout)?; io::stderr().write_all(&output.stderr)?; Ok(()) }; if output.status.success() { eprintln!(" succeeded."); forward_output()?; break 0; } let out = str::from_utf8(&output.stdout).unwrap_or(""); let failures = parse_out::find_assert_eq_failures(out); let count = fix::fix(failures)?; if count == 0 { eprintln!(" failed."); forward_output()?; break output.status.code().unwrap_or(0); } else { eprintln!(" fixed {} assert_eq!s.", count); } }; process::exit(exitcode); }
26.490196
74
0.50037
bfe118eefb8d46b4bfd59a4f3d124f38c1fb9f43
13,087
use std::marker; use std::ptr; use std::slice; use libc::{c_int, c_uint, c_void, size_t}; use {raw, panic, Repository, Error, Oid, Revwalk, Buf}; use util::Binding; /// Stages that are reported by the PackBuilder progress callback. pub enum PackBuilderStage { /// Adding objects to the pack AddingObjects, /// Deltafication of the pack Deltafication, } pub type ProgressCb<'a> = FnMut(PackBuilderStage, u32, u32) -> bool + 'a; pub type ForEachCb<'a> = FnMut(&[u8]) -> bool + 'a; /// A builder for creating a packfile pub struct PackBuilder<'repo> { raw: *mut raw::git_packbuilder, progress: Option<Box<Box<ProgressCb<'repo>>>>, _marker: marker::PhantomData<&'repo Repository>, } impl<'repo> PackBuilder<'repo> { /// Insert a single object. For an optimal pack it's mandatory to insert /// objects in recency order, commits followed by trees and blobs. pub fn insert_object(&mut self, id: Oid, name: Option<&str>) -> Result<(), Error> { let name = try!(::opt_cstr(name)); unsafe { try_call!(raw::git_packbuilder_insert(self.raw, id.raw(), name)); } Ok(()) } /// Insert a root tree object. This will add the tree as well as all /// referenced trees and blobs. pub fn insert_tree(&mut self, id: Oid) -> Result<(), Error> { unsafe { try_call!(raw::git_packbuilder_insert_tree(self.raw, id.raw())); } Ok(()) } /// Insert a commit object. This will add a commit as well as the completed /// referenced tree. pub fn insert_commit(&mut self, id: Oid) -> Result<(), Error> { unsafe { try_call!(raw::git_packbuilder_insert_commit(self.raw, id.raw())); } Ok(()) } /// Insert objects as given by the walk. Those commits and all objects they /// reference will be inserted into the packbuilder. pub fn insert_walk(&mut self, walk: &mut Revwalk) -> Result<(), Error> { unsafe { try_call!(raw::git_packbuilder_insert_walk(self.raw, walk.raw())); } Ok(()) } /// Recursively insert an object and its referenced objects. Insert the /// object as well as any object it references. pub fn insert_recursive(&mut self, id: Oid, name: Option<&str>) -> Result<(), Error> { let name = try!(::opt_cstr(name)); unsafe { try_call!(raw::git_packbuilder_insert_recur(self.raw, id.raw(), name)); } Ok(()) } /// Write the contents of the packfile to an in-memory buffer. The contents /// of the buffer will become a valid packfile, even though there will be /// no attached index. pub fn write_buf(&mut self, buf: &mut Buf) -> Result<(), Error> { unsafe { try_call!(raw::git_packbuilder_write_buf(buf.raw(), self.raw)); } Ok(()) } /// Create the new pack and pass each object to the callback. pub fn foreach<F>(&mut self, mut cb: F) -> Result<(), Error> where F: FnMut(&[u8]) -> bool { let mut cb = &mut cb as &mut ForEachCb; let ptr = &mut cb as *mut _; unsafe { try_call!(raw::git_packbuilder_foreach(self.raw, foreach_c, ptr as *mut _)); } Ok(()) } /// `progress` will be called with progress information during pack /// building. Be aware that this is called inline with pack building /// operations, so performance may be affected. /// /// There can only be one progress callback attached, this will replace any /// existing one. See `unset_progress_callback` to remove the current /// progress callback without attaching a new one. pub fn set_progress_callback<F>(&mut self, progress: F) -> Result<(), Error> where F: FnMut(PackBuilderStage, u32, u32) -> bool + 'repo { let mut progress = Box::new(Box::new(progress) as Box<ProgressCb>); let ptr = &mut *progress as *mut _; let progress_c = Some(progress_c as raw::git_packbuilder_progress); unsafe { try_call!(raw::git_packbuilder_set_callbacks(self.raw, progress_c, ptr as *mut _)); } self.progress = Some(progress); Ok(()) } /// Remove the current progress callback. See `set_progress_callback` to /// set the progress callback. pub fn unset_progress_callback(&mut self) -> Result<(), Error> { unsafe { try_call!(raw::git_packbuilder_set_callbacks(self.raw, None, ptr::null_mut())); self.progress = None; } Ok(()) } /// Get the total number of objects the packbuilder will write out. pub fn object_count(&self) -> usize { unsafe { raw::git_packbuilder_object_count(self.raw) } } /// Get the number of objects the packbuilder has already written out. pub fn written(&self) -> usize { unsafe { raw::git_packbuilder_written(self.raw) } } /// Get the packfile's hash. A packfile's name is derived from the sorted /// hashing of all object names. This is only correct after the packfile /// has been written. pub fn hash(&self) -> Option<Oid> { if self.object_count() == 0 { unsafe { Some(Binding::from_raw(raw::git_packbuilder_hash(self.raw))) } } else { None } } } impl<'repo> Binding for PackBuilder<'repo> { type Raw = *mut raw::git_packbuilder; unsafe fn from_raw(ptr: *mut raw::git_packbuilder) -> PackBuilder<'repo> { PackBuilder { raw: ptr, progress: None, _marker: marker::PhantomData, } } fn raw(&self) -> *mut raw::git_packbuilder { self.raw } } impl<'repo> Drop for PackBuilder<'repo> { fn drop(&mut self) { unsafe { raw::git_packbuilder_set_callbacks(self.raw, None, ptr::null_mut()); raw::git_packbuilder_free(self.raw); } } } impl Binding for PackBuilderStage { type Raw = raw::git_packbuilder_stage_t; unsafe fn from_raw(raw: raw::git_packbuilder_stage_t) -> PackBuilderStage { match raw { raw::GIT_PACKBUILDER_ADDING_OBJECTS => PackBuilderStage::AddingObjects, raw::GIT_PACKBUILDER_DELTAFICATION => PackBuilderStage::Deltafication, _ => panic!("Unknown git diff binary kind"), } } fn raw(&self) -> raw::git_packbuilder_stage_t { match *self { PackBuilderStage::AddingObjects => raw::GIT_PACKBUILDER_ADDING_OBJECTS, PackBuilderStage::Deltafication => raw::GIT_PACKBUILDER_DELTAFICATION, } } } extern fn foreach_c(buf: *const c_void, size: size_t, data: *mut c_void) -> c_int { unsafe { let buf = slice::from_raw_parts(buf as *const u8, size as usize); let r = panic::wrap(|| { let data = data as *mut &mut ForEachCb; (*data)(buf) }); if r == Some(true) { 0 } else { -1 } } } extern fn progress_c(stage: raw::git_packbuilder_stage_t, current: c_uint, total: c_uint, data: *mut c_void) -> c_int { unsafe { let stage = Binding::from_raw(stage); let r = panic::wrap(|| { let data = data as *mut Box<ProgressCb>; (*data)(stage, current, total) }); if r == Some(true) { 0 } else { -1 } } } #[cfg(test)] mod tests { use std::fs::File; use std::path::Path; use {Buf, Repository, Oid}; fn commit(repo: &Repository) -> (Oid, Oid) { let mut index = t!(repo.index()); let root = repo.path().parent().unwrap(); t!(File::create(&root.join("foo"))); t!(index.add_path(Path::new("foo"))); let tree_id = t!(index.write_tree()); let tree = t!(repo.find_tree(tree_id)); let sig = t!(repo.signature()); let head_id = t!(repo.refname_to_id("HEAD")); let parent = t!(repo.find_commit(head_id)); let commit = t!(repo.commit(Some("HEAD"), &sig, &sig, "commit", &tree, &[&parent])); (commit, tree_id) } fn pack_header(len: u8) -> Vec<u8> { [].into_iter() .chain(b"PACK") // signature .chain(&[0, 0, 0, 2]) // version number .chain(&[0, 0, 0, len]) // number of objects .cloned().collect::<Vec<u8>>() } fn empty_pack_header() -> Vec<u8> { pack_header(0).iter() .chain(&[0x02, 0x9d, 0x08, 0x82, 0x3b, // ^ 0xd8, 0xa8, 0xea, 0xb5, 0x10, // | SHA-1 of the zero 0xad, 0x6a, 0xc7, 0x5c, 0x82, // | object pack header 0x3c, 0xfd, 0x3e, 0xd3, 0x1e]) // v .cloned().collect::<Vec<u8>>() } #[test] fn smoke() { let (_td, repo) = ::test::repo_init(); let _builder = t!(repo.packbuilder()); } #[test] fn smoke_write_buf() { let (_td, repo) = ::test::repo_init(); let mut builder = t!(repo.packbuilder()); let mut buf = Buf::new(); t!(builder.write_buf(&mut buf)); assert!(builder.hash().unwrap().is_zero()); assert_eq!(&*buf, &*empty_pack_header()); } #[test] fn smoke_foreach() { let (_td, repo) = ::test::repo_init(); let mut builder = t!(repo.packbuilder()); let mut buf = Vec::<u8>::new(); t!(builder.foreach(|bytes| { buf.extend(bytes); true })); assert_eq!(&*buf, &*empty_pack_header()); } #[test] fn insert_write_buf() { let (_td, repo) = ::test::repo_init(); let mut builder = t!(repo.packbuilder()); let mut buf = Buf::new(); let (commit, _tree) = commit(&repo); t!(builder.insert_object(commit, None)); assert_eq!(builder.object_count(), 1); t!(builder.write_buf(&mut buf)); // Just check that the correct number of objects are written assert_eq!(&buf[0..12], &*pack_header(1)); } #[test] fn insert_tree_write_buf() { let (_td, repo) = ::test::repo_init(); let mut builder = t!(repo.packbuilder()); let mut buf = Buf::new(); let (_commit, tree) = commit(&repo); // will insert the tree itself and the blob, 2 objects t!(builder.insert_tree(tree)); assert_eq!(builder.object_count(), 2); t!(builder.write_buf(&mut buf)); // Just check that the correct number of objects are written assert_eq!(&buf[0..12], &*pack_header(2)); } #[test] fn insert_commit_write_buf() { let (_td, repo) = ::test::repo_init(); let mut builder = t!(repo.packbuilder()); let mut buf = Buf::new(); let (commit, _tree) = commit(&repo); // will insert the commit, its tree and the blob, 3 objects t!(builder.insert_commit(commit)); assert_eq!(builder.object_count(), 3); t!(builder.write_buf(&mut buf)); // Just check that the correct number of objects are written assert_eq!(&buf[0..12], &*pack_header(3)); } #[test] fn progress_callback() { let mut progress_called = false; { let (_td, repo) = ::test::repo_init(); let mut builder = t!(repo.packbuilder()); let (commit, _tree) = commit(&repo); t!(builder.set_progress_callback(|_, _, _| { progress_called = true; true })); t!(builder.insert_commit(commit)); t!(builder.write_buf(&mut Buf::new())); } assert_eq!(progress_called, true); } #[test] fn clear_progress_callback() { let mut progress_called = false; { let (_td, repo) = ::test::repo_init(); let mut builder = t!(repo.packbuilder()); let (commit, _tree) = commit(&repo); t!(builder.set_progress_callback(|_, _, _| { progress_called = true; true })); t!(builder.unset_progress_callback()); t!(builder.insert_commit(commit)); t!(builder.write_buf(&mut Buf::new())); } assert_eq!(progress_called, false); } }
33.816537
83
0.53152
e8c14dc96a5da2f732cc54f32631f281ee671088
707
mod query_crypto_get_account_balance; mod query_crypto_get_info; mod query_file_get_contents; mod query_file_get_info; mod query_get_transaction_receipt; mod query_transaction_get_record; mod query_contract_get_bytecode; mod query_crypto_get_claim; mod query_get_by_key; mod query_contract_get_info; pub mod query; pub use self::{ query::{ Query, QueryInner }, query_crypto_get_account_balance::*, query_crypto_get_info::*, query_file_get_contents::*, query_file_get_info::*, query_get_transaction_receipt::*, query_transaction_get_record::*, query_contract_get_bytecode::*, query_crypto_get_account_balance::*, query_crypto_get_claim::*, query_get_by_key::*, };
28.28
40
0.792079
1e4b1ae07776212a8261dc882d999c45b7f71baf
25,956
use rustc_error_codes::*; use rustc_errors::{struct_span_err, Handler}; use rustc_feature::{AttributeGate, BUILTIN_ATTRIBUTE_MAP}; use rustc_feature::{Features, GateIssue, UnstableFeatures}; use rustc_span::source_map::Spanned; use rustc_span::symbol::sym; use rustc_span::Span; use syntax::ast::{self, AssocTyConstraint, AssocTyConstraintKind, NodeId}; use syntax::ast::{GenericParam, GenericParamKind, PatKind, RangeEnd, VariantData}; use syntax::attr; use syntax::sess::{feature_err, feature_err_issue, ParseSess}; use syntax::visit::{self, FnKind, Visitor}; use log::debug; macro_rules! gate_feature_fn { ($cx: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr) => {{ let (cx, has_feature, span, name, explain) = (&*$cx, $has_feature, $span, $name, $explain); let has_feature: bool = has_feature(&$cx.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); if !has_feature && !span.allows_unstable($name) { feature_err_issue(cx.parse_sess, name, span, GateIssue::Language, explain).emit(); } }}; } macro_rules! gate_feature_post { ($cx: expr, $feature: ident, $span: expr, $explain: expr) => { gate_feature_fn!($cx, |x: &Features| x.$feature, $span, sym::$feature, $explain) }; } pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) { PostExpansionVisitor { parse_sess, features }.visit_attribute(attr) } struct PostExpansionVisitor<'a> { parse_sess: &'a ParseSess, features: &'a Features, } impl<'a> PostExpansionVisitor<'a> { fn check_abi(&self, abi: ast::StrLit) { let ast::StrLit { symbol_unescaped, span, .. } = abi; match &*symbol_unescaped.as_str() { // Stable "Rust" | "C" | "cdecl" | "stdcall" | "fastcall" | "aapcs" | "win64" | "sysv64" | "system" => {} "rust-intrinsic" => { gate_feature_post!(&self, intrinsics, span, "intrinsics are subject to change"); } "platform-intrinsic" => { gate_feature_post!( &self, platform_intrinsics, span, "platform intrinsics are experimental and possibly buggy" ); } "vectorcall" => { gate_feature_post!( &self, abi_vectorcall, span, "vectorcall is experimental and subject to change" ); } "thiscall" => { gate_feature_post!( &self, abi_thiscall, span, "thiscall is experimental and subject to change" ); } "rust-call" => { gate_feature_post!( &self, unboxed_closures, span, "rust-call ABI is subject to change" ); } "ptx-kernel" => { gate_feature_post!( &self, abi_ptx, span, "PTX ABIs are experimental and subject to change" ); } "unadjusted" => { gate_feature_post!( &self, abi_unadjusted, span, "unadjusted ABI is an implementation detail and perma-unstable" ); } "msp430-interrupt" => { gate_feature_post!( &self, abi_msp430_interrupt, span, "msp430-interrupt ABI is experimental and subject to change" ); } "x86-interrupt" => { gate_feature_post!( &self, abi_x86_interrupt, span, "x86-interrupt ABI is experimental and subject to change" ); } "amdgpu-kernel" => { gate_feature_post!( &self, abi_amdgpu_kernel, span, "amdgpu-kernel ABI is experimental and subject to change" ); } "efiapi" => { gate_feature_post!( &self, abi_efiapi, span, "efiapi ABI is experimental and subject to change" ); } abi => self .parse_sess .span_diagnostic .delay_span_bug(span, &format!("unrecognized ABI not caught in lowering: {}", abi)), } } fn check_extern(&self, ext: ast::Extern) { if let ast::Extern::Explicit(abi) = ext { self.check_abi(abi); } } fn maybe_report_invalid_custom_discriminants(&self, variants: &[ast::Variant]) { let has_fields = variants.iter().any(|variant| match variant.data { VariantData::Tuple(..) | VariantData::Struct(..) => true, VariantData::Unit(..) => false, }); let discriminant_spans = variants .iter() .filter(|variant| match variant.data { VariantData::Tuple(..) | VariantData::Struct(..) => false, VariantData::Unit(..) => true, }) .filter_map(|variant| variant.disr_expr.as_ref().map(|c| c.value.span)) .collect::<Vec<_>>(); if !discriminant_spans.is_empty() && has_fields { let mut err = feature_err( self.parse_sess, sym::arbitrary_enum_discriminant, discriminant_spans.clone(), "custom discriminant values are not allowed in enums with tuple or struct variants", ); for sp in discriminant_spans { err.span_label(sp, "disallowed custom discriminant"); } for variant in variants.iter() { match &variant.data { VariantData::Struct(..) => { err.span_label(variant.span, "struct variant defined here"); } VariantData::Tuple(..) => { err.span_label(variant.span, "tuple variant defined here"); } VariantData::Unit(..) => {} } } err.emit(); } } fn check_gat(&self, generics: &ast::Generics, span: Span) { if !generics.params.is_empty() { gate_feature_post!( &self, generic_associated_types, span, "generic associated types are unstable" ); } if !generics.where_clause.predicates.is_empty() { gate_feature_post!( &self, generic_associated_types, span, "where clauses on associated types are unstable" ); } } /// Feature gate `impl Trait` inside `type Alias = $type_expr;`. fn check_impl_trait(&self, ty: &ast::Ty) { struct ImplTraitVisitor<'a> { vis: &'a PostExpansionVisitor<'a>, } impl Visitor<'_> for ImplTraitVisitor<'_> { fn visit_ty(&mut self, ty: &ast::Ty) { if let ast::TyKind::ImplTrait(..) = ty.kind { gate_feature_post!( &self.vis, type_alias_impl_trait, ty.span, "`impl Trait` in type aliases is unstable" ); } visit::walk_ty(self, ty); } } ImplTraitVisitor { vis: self }.visit_ty(ty); } } impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { fn visit_attribute(&mut self, attr: &ast::Attribute) { let attr_info = attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name)).map(|a| **a); // Check feature gates for built-in attributes. if let Some((.., AttributeGate::Gated(_, name, descr, has_feature))) = attr_info { gate_feature_fn!(self, has_feature, attr.span, name, descr); } // Check unstable flavors of the `#[doc]` attribute. if attr.check_name(sym::doc) { for nested_meta in attr.meta_item_list().unwrap_or_default() { macro_rules! gate_doc { ($($name:ident => $feature:ident)*) => { $(if nested_meta.check_name(sym::$name) { let msg = concat!("`#[doc(", stringify!($name), ")]` is experimental"); gate_feature_post!(self, $feature, attr.span, msg); })* }} gate_doc!( include => external_doc cfg => doc_cfg masked => doc_masked spotlight => doc_spotlight alias => doc_alias keyword => doc_keyword ); } } } fn visit_name(&mut self, sp: Span, name: ast::Name) { if !name.as_str().is_ascii() { gate_feature_post!( &self, non_ascii_idents, self.parse_sess.source_map().def_span(sp), "non-ascii idents are not fully supported" ); } } fn visit_item(&mut self, i: &'a ast::Item) { match i.kind { ast::ItemKind::ForeignMod(ref foreign_module) => { if let Some(abi) = foreign_module.abi { self.check_abi(abi); } } ast::ItemKind::Fn(..) => { if attr::contains_name(&i.attrs[..], sym::plugin_registrar) { gate_feature_post!( &self, plugin_registrar, i.span, "compiler plugins are experimental and possibly buggy" ); } if attr::contains_name(&i.attrs[..], sym::start) { gate_feature_post!( &self, start, i.span, "`#[start]` functions are experimental \ and their signature may change \ over time" ); } if attr::contains_name(&i.attrs[..], sym::main) { gate_feature_post!( &self, main, i.span, "declaration of a non-standard `#[main]` \ function may change over time, for now \ a top-level `fn main()` is required" ); } } ast::ItemKind::Struct(..) => { for attr in attr::filter_by_name(&i.attrs[..], sym::repr) { for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(sym::simd) { gate_feature_post!( &self, repr_simd, attr.span, "SIMD types are experimental and possibly buggy" ); } } } } ast::ItemKind::Enum(ast::EnumDef { ref variants, .. }, ..) => { for variant in variants { match (&variant.data, &variant.disr_expr) { (ast::VariantData::Unit(..), _) => {} (_, Some(disr_expr)) => gate_feature_post!( &self, arbitrary_enum_discriminant, disr_expr.value.span, "discriminants on non-unit variants are experimental" ), _ => {} } } let has_feature = self.features.arbitrary_enum_discriminant; if !has_feature && !i.span.allows_unstable(sym::arbitrary_enum_discriminant) { self.maybe_report_invalid_custom_discriminants(&variants); } } ast::ItemKind::Impl(_, polarity, defaultness, ..) => { if polarity == ast::ImplPolarity::Negative { gate_feature_post!( &self, optin_builtin_traits, i.span, "negative trait bounds are not yet fully implemented; \ use marker types for now" ); } if let ast::Defaultness::Default = defaultness { gate_feature_post!(&self, specialization, i.span, "specialization is unstable"); } } ast::ItemKind::Trait(ast::IsAuto::Yes, ..) => { gate_feature_post!( &self, optin_builtin_traits, i.span, "auto traits are experimental and possibly buggy" ); } ast::ItemKind::TraitAlias(..) => { gate_feature_post!(&self, trait_alias, i.span, "trait aliases are experimental"); } ast::ItemKind::MacroDef(ast::MacroDef { legacy: false, .. }) => { let msg = "`macro` is experimental"; gate_feature_post!(&self, decl_macro, i.span, msg); } ast::ItemKind::TyAlias(ref ty, ..) => self.check_impl_trait(&ty), _ => {} } visit::walk_item(self, i); } fn visit_foreign_item(&mut self, i: &'a ast::ForeignItem) { match i.kind { ast::ForeignItemKind::Fn(..) | ast::ForeignItemKind::Static(..) => { let link_name = attr::first_attr_value_str_by_name(&i.attrs, sym::link_name); let links_to_llvm = match link_name { Some(val) => val.as_str().starts_with("llvm."), _ => false, }; if links_to_llvm { gate_feature_post!( &self, link_llvm_intrinsics, i.span, "linking to LLVM intrinsics is experimental" ); } } ast::ForeignItemKind::Ty => { gate_feature_post!(&self, extern_types, i.span, "extern types are experimental"); } ast::ForeignItemKind::Macro(..) => {} } visit::walk_foreign_item(self, i) } fn visit_ty(&mut self, ty: &'a ast::Ty) { match ty.kind { ast::TyKind::BareFn(ref bare_fn_ty) => { self.check_extern(bare_fn_ty.ext); } ast::TyKind::Never => { gate_feature_post!(&self, never_type, ty.span, "the `!` type is experimental"); } _ => {} } visit::walk_ty(self, ty) } fn visit_fn_ret_ty(&mut self, ret_ty: &'a ast::FunctionRetTy) { if let ast::FunctionRetTy::Ty(ref output_ty) = *ret_ty { if let ast::TyKind::Never = output_ty.kind { // Do nothing. } else { self.visit_ty(output_ty) } } } fn visit_expr(&mut self, e: &'a ast::Expr) { match e.kind { ast::ExprKind::Box(_) => { gate_feature_post!( &self, box_syntax, e.span, "box expression syntax is experimental; you can call `Box::new` instead" ); } ast::ExprKind::Type(..) => { // To avoid noise about type ascription in common syntax errors, only emit if it // is the *only* error. if self.parse_sess.span_diagnostic.err_count() == 0 { gate_feature_post!( &self, type_ascription, e.span, "type ascription is experimental" ); } } ast::ExprKind::TryBlock(_) => { gate_feature_post!(&self, try_blocks, e.span, "`try` expression is experimental"); } ast::ExprKind::Block(_, opt_label) => { if let Some(label) = opt_label { gate_feature_post!( &self, label_break_value, label.ident.span, "labels on blocks are unstable" ); } } _ => {} } visit::walk_expr(self, e) } fn visit_arm(&mut self, arm: &'a ast::Arm) { visit::walk_arm(self, arm) } fn visit_pat(&mut self, pattern: &'a ast::Pat) { match &pattern.kind { PatKind::Slice(pats) => { for pat in &*pats { let span = pat.span; let inner_pat = match &pat.kind { PatKind::Ident(.., Some(pat)) => pat, _ => pat, }; if inner_pat.is_rest() { gate_feature_post!( &self, slice_patterns, span, "subslice patterns are unstable" ); } } } PatKind::Box(..) => { gate_feature_post!( &self, box_patterns, pattern.span, "box pattern syntax is experimental" ); } PatKind::Range(_, _, Spanned { node: RangeEnd::Excluded, .. }) => { gate_feature_post!( &self, exclusive_range_pattern, pattern.span, "exclusive range pattern syntax is experimental" ); } _ => {} } visit::walk_pat(self, pattern) } fn visit_fn( &mut self, fn_kind: FnKind<'a>, fn_decl: &'a ast::FnDecl, span: Span, _node_id: NodeId, ) { if let Some(header) = fn_kind.header() { // Stability of const fn methods are covered in // `visit_trait_item` and `visit_impl_item` below; this is // because default methods don't pass through this point. self.check_extern(header.ext); } if fn_decl.c_variadic() { gate_feature_post!(&self, c_variadic, span, "C-variadic functions are unstable"); } visit::walk_fn(self, fn_kind, fn_decl, span) } fn visit_generic_param(&mut self, param: &'a GenericParam) { match param.kind { GenericParamKind::Const { .. } => gate_feature_post!( &self, const_generics, param.ident.span, "const generics are unstable" ), _ => {} } visit::walk_generic_param(self, param) } fn visit_assoc_ty_constraint(&mut self, constraint: &'a AssocTyConstraint) { match constraint.kind { AssocTyConstraintKind::Bound { .. } => gate_feature_post!( &self, associated_type_bounds, constraint.span, "associated type bounds are unstable" ), _ => {} } visit::walk_assoc_ty_constraint(self, constraint) } fn visit_trait_item(&mut self, ti: &'a ast::AssocItem) { match ti.kind { ast::AssocItemKind::Fn(ref sig, ref block) => { if block.is_none() { self.check_extern(sig.header.ext); } if sig.header.constness.node == ast::Constness::Const { gate_feature_post!(&self, const_fn, ti.span, "const fn is unstable"); } } ast::AssocItemKind::TyAlias(_, ref default) => { if let Some(_) = default { gate_feature_post!( &self, associated_type_defaults, ti.span, "associated type defaults are unstable" ); } } _ => {} } visit::walk_trait_item(self, ti) } fn visit_assoc_item(&mut self, ii: &'a ast::AssocItem) { if ii.defaultness == ast::Defaultness::Default { gate_feature_post!(&self, specialization, ii.span, "specialization is unstable"); } match ii.kind { ast::AssocItemKind::Fn(ref sig, _) => { if sig.decl.c_variadic() { gate_feature_post!( &self, c_variadic, ii.span, "C-variadic functions are unstable" ); } } ast::AssocItemKind::TyAlias(_, ref ty) => { if let Some(ty) = ty { self.check_impl_trait(ty); } self.check_gat(&ii.generics, ii.span); } _ => {} } visit::walk_assoc_item(self, ii) } fn visit_vis(&mut self, vis: &'a ast::Visibility) { if let ast::VisibilityKind::Crate(ast::CrateSugar::JustCrate) = vis.node { gate_feature_post!( &self, crate_visibility_modifier, vis.span, "`crate` visibility modifier is experimental" ); } visit::walk_vis(self, vis) } } pub fn check_crate( krate: &ast::Crate, parse_sess: &ParseSess, features: &Features, unstable: UnstableFeatures, ) { maybe_stage_features(&parse_sess.span_diagnostic, krate, unstable); let mut visitor = PostExpansionVisitor { parse_sess, features }; let spans = parse_sess.gated_spans.spans.borrow(); macro_rules! gate_all { ($gate:ident, $msg:literal) => { for span in spans.get(&sym::$gate).unwrap_or(&vec![]) { gate_feature_post!(&visitor, $gate, *span, $msg); } }; } gate_all!(let_chains, "`let` expressions in this position are experimental"); gate_all!(async_closure, "async closures are unstable"); gate_all!(generators, "yield syntax is experimental"); gate_all!(or_patterns, "or-patterns syntax is experimental"); gate_all!(const_extern_fn, "`const extern fn` definitions are unstable"); gate_all!(raw_ref_op, "raw address of syntax is experimental"); gate_all!(const_trait_bound_opt_out, "`?const` on trait bounds is experimental"); gate_all!(const_trait_impl, "const trait impls are experimental"); gate_all!(half_open_range_patterns, "half-open range patterns are unstable"); // All uses of `gate_all!` below this point were added in #65742, // and subsequently disabled (with the non-early gating readded). macro_rules! gate_all { ($gate:ident, $msg:literal) => { // FIXME(eddyb) do something more useful than always // disabling these uses of early feature-gatings. if false { for span in spans.get(&sym::$gate).unwrap_or(&vec![]) { gate_feature_post!(&visitor, $gate, *span, $msg); } } }; } gate_all!(trait_alias, "trait aliases are experimental"); gate_all!(associated_type_bounds, "associated type bounds are unstable"); gate_all!(crate_visibility_modifier, "`crate` visibility modifier is experimental"); gate_all!(const_generics, "const generics are unstable"); gate_all!(decl_macro, "`macro` is experimental"); gate_all!(box_patterns, "box pattern syntax is experimental"); gate_all!(exclusive_range_pattern, "exclusive range pattern syntax is experimental"); gate_all!(try_blocks, "`try` blocks are unstable"); gate_all!(label_break_value, "labels on blocks are unstable"); gate_all!(box_syntax, "box expression syntax is experimental; you can call `Box::new` instead"); // To avoid noise about type ascription in common syntax errors, // only emit if it is the *only* error. (Also check it last.) if parse_sess.span_diagnostic.err_count() == 0 { gate_all!(type_ascription, "type ascription is experimental"); } visit::walk_crate(&mut visitor, krate); } fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate, unstable: UnstableFeatures) { if !unstable.is_nightly_build() { for attr in krate.attrs.iter().filter(|attr| attr.check_name(sym::feature)) { struct_span_err!( span_handler, attr.span, E0554, "`#![feature]` may not be used on the {} release channel", option_env!("CFG_RELEASE_CHANNEL").unwrap_or("(unknown)") ) .emit(); } } }
36.921764
100
0.47384
d6b043d5e297d2c49068444e6f6e795c0a57ea78
5,096
#![feature(str_split_once)] #[macro_use] extern crate lazy_static; #[macro_use] extern crate prometheus; #[macro_use] extern crate log; use clap::Clap; use hyper::{ header::CONTENT_TYPE, server::conn::AddrStream, service::{make_service_fn, service_fn}, Body, Request, Response, Server, StatusCode, }; use prometheus::{Encoder, HistogramVec, TextEncoder, TEXT_FORMAT}; use std::convert::Infallible; use std::env; use std::panic; use elasticsearch_exporter::{Exporter, ExporterOptions}; lazy_static! { static ref HTTP_REQ_HISTOGRAM: HistogramVec = register_histogram_vec!( "http_request_duration_seconds", "The HTTP request latencies in seconds.", &["handler"] ) .expect("valid histogram vec metric"); } fn build_response(status: StatusCode, body: Body) -> Response<Body> { Response::builder() .status(status) .header(CONTENT_TYPE, TEXT_FORMAT) .body(body) .expect("valid Response built") } async fn serve_req(req: Request<Body>, options: String) -> Result<Response<Body>, Infallible> { let path = req.uri().path(); let timer = HTTP_REQ_HISTOGRAM.with_label_values(&[path]).start_timer(); let response = match path { "/health" | "/healthy" | "/healthz" => build_response(StatusCode::OK, Body::from("Ok")), "/" => build_response(StatusCode::OK, Body::from(options.to_string())), "/metrics" => { let encoder = TextEncoder::new(); let mut buffer = vec![]; match encoder.encode(&prometheus::gather(), &mut buffer) { Ok(_) => build_response(StatusCode::OK, Body::from(buffer)), Err(e) => { error!("prometheus encoder err {}", e); build_response(StatusCode::INTERNAL_SERVER_ERROR, Body::empty()) } } } _ => build_response( StatusCode::NOT_FOUND, Body::from(format!("Path {} not found", path)), ), }; timer.observe_duration(); Ok(response) } /// Setup panic hook pub fn panic_hook() { panic::set_hook(Box::new(|err| { eprintln!("Panic error {:?}, exiting program.", err); std::process::exit(70); })); } mod cli; use cli::{signal_channel, Opts}; #[tokio::main] async fn main() -> Result<(), Box<dyn std::error::Error>> { panic_hook(); if env::var_os("RUST_LOG").is_none() { env::set_var("RUST_LOG", "info,elasticsearch_exporter=debug"); } pretty_env_logger::init(); let mut opts = Opts::parse(); if let Ok(Ok(port)) = env::var("PORT").map(|p| p.parse::<u16>()) { opts.listen_addr.set_port(port); } let options = ExporterOptions { elasticsearch_url: opts.elasticsearch_url.clone(), elasticsearch_global_timeout: *opts.elasticsearch_global_timeout, elasticsearch_query_fields: opts.elasticsearch_query_fields.0.clone(), elasticsearch_subsystem_timeouts: opts.elasticsearch_subsystem_timeouts.0.clone(), elasticsearch_path_parameters: opts.elasticsearch_path_parameters.0.clone(), exporter_skip_labels: opts.exporter_skip_labels.0.clone(), exporter_skip_metrics: opts.exporter_skip_metrics.0.clone(), exporter_include_labels: opts.exporter_include_labels.0.clone(), exporter_poll_default_interval: *opts.exporter_poll_default_interval, exporter_skip_zero_metrics: !opts.exporter_allow_zero_metrics, exporter_poll_intervals: opts.exporter_poll_intervals.0.clone(), exporter_metrics_enabled: opts.exporter_metrics_enabled.0.clone(), exporter_metadata_refresh_interval: *opts.exporter_metadata_refresh_interval, }; info!("{}", options); let options_clone = options.clone(); let new_service = make_service_fn(move |socket: &AddrStream| { let options_string = options_clone.to_string(); let svc = service_fn(move |req| serve_req(req, options_string.clone())); trace!("incoming socket request: {:?}", socket); async move { Ok::<_, Infallible>(svc) } }); let signal_rx = signal_channel(); match Exporter::new(options).await { Ok(exporter) => { let _ = tokio::spawn(exporter.spawn()); } Err(e) => { error!("{}", e); std::process::exit(70); } } info!("Listening on http://{}", opts.listen_addr); Server::bind(&opts.listen_addr) // TCP .tcp_keepalive(Some(*opts.hyper_tcp_keepalive)) .tcp_nodelay(true) // HTTP 1 .http1_keepalive(true) .http1_half_close(false) .http1_max_buf_size(opts.hyper_http1_max_buf_size) // HTTP 2 .http2_keep_alive_interval(*opts.hyper_tcp_keepalive) .http2_keep_alive_timeout(*opts.hyper_http2_keep_alive_timeout) .http2_adaptive_window(true) .serve(new_service) .with_graceful_shutdown(async move { signal_rx.await.ok(); info!("Graceful context shutdown"); }) .await?; Ok(()) }
31.073171
96
0.632653
284b231507db601807c9bd30646c3cc310fb4b0c
6,918
use crate::util::{self, logger::Logger}; use libcnb::{build::GenericBuildContext, layer::Layer}; use std::{convert::TryFrom, fs, path::Path, process::Command}; pub const RUNTIME_JAR_FILE_NAME: &str = "runtime.jar"; pub struct Builder<'a, 'b> { logger: &'b Logger, ctx: &'a GenericBuildContext, } impl<'a, 'b> Builder<'a, 'b> { pub fn new(ctx: &'a GenericBuildContext, logger: &'b Logger) -> anyhow::Result<Self> { Ok(Builder { ctx, logger }) } pub fn contribute_opt_layer(&self) -> anyhow::Result<Layer> { let mut layer = self.ctx.layer("opt")?; let mut content_metadata = layer.mut_content_metadata(); content_metadata.launch = true; content_metadata.build = true; content_metadata.cache = false; layer.write_content_metadata()?; let contents = include_str!("../opt/run.sh"); let run_sh_path = layer.as_path().join("run.sh"); fs::write(&run_sh_path, contents)?; #[cfg(target_family = "unix")] set_executable(&run_sh_path)?; Ok(layer) } pub fn contribute_runtime_layer(&self) -> anyhow::Result<Layer> { self.logger.header("Installing Java function runtime")?; let mut runtime_layer = self.ctx.layer("sf-fx-runtime-java")?; let buildpack_toml: libcnb::data::buildpack::BuildpackToml = toml::from_str( &fs::read_to_string(self.ctx.buildpack_dir.join("buildpack.toml"))?, )?; let buildpack_toml_metadata = crate::data::buildpack_toml::Metadata::try_from(&buildpack_toml.metadata)?; let runtime_layer_metadata = crate::data::Runtime::from_runtime_layer(&runtime_layer.content_metadata().metadata); let runtime_jar_path = runtime_layer.as_path().join(RUNTIME_JAR_FILE_NAME); if buildpack_toml_metadata.runtime.sha256 == runtime_layer_metadata.sha256 && runtime_jar_path.exists() { self.logger .info("Installed Java function runtime from cache")?; } else { self.logger.debug("Creating function runtime layer")?; let mut content_metadata = runtime_layer.mut_content_metadata(); content_metadata.launch = true; content_metadata.build = false; content_metadata.cache = true; content_metadata.metadata.insert( String::from("runtime_jar_url"), toml::Value::String(buildpack_toml_metadata.runtime.url.clone()), ); content_metadata.metadata.insert( String::from("runtime_jar_sha256"), toml::Value::String(buildpack_toml_metadata.runtime.sha256.clone()), ); runtime_layer.write_content_metadata()?; self.logger .debug("Function runtime layer successfully created")?; self.logger.info("Starting download of function runtime")?; util::download(&buildpack_toml_metadata.runtime.url, &runtime_jar_path).map_err(|_| { self.logger.error("Download of function runtime failed", format!(r#" We couldn't download the function runtime at {}. This is usually caused by intermittent network issues. Please try again and contact us should the error persist. "#, buildpack_toml_metadata.runtime.url)).unwrap_err() })?; self.logger.info("Function runtime download successful")?; if buildpack_toml_metadata.runtime.sha256 != util::sha256(&fs::read(&runtime_jar_path)?) { self.logger.error( "Function runtime integrity check failed", r#" We could not verify the integrity of the downloaded function runtime. Please try again and contact us should the error persist. "#, )?; } self.logger .info("Function runtime installation successful")?; } Ok(runtime_layer) } pub fn contribute_function_bundle_layer( &self, runtime_jar_path: impl AsRef<Path>, ) -> anyhow::Result<Layer> { self.logger.header("Detecting function")?; let mut function_bundle_layer = self.ctx.layer("function-bundle")?; let mut content_metadata = function_bundle_layer.mut_content_metadata(); content_metadata.launch = true; content_metadata.build = false; content_metadata.cache = false; function_bundle_layer.write_content_metadata()?; let exit_status = Command::new("java") .arg("-jar") .arg(runtime_jar_path.as_ref()) .arg("bundle") .arg(&self.ctx.app_dir) .spawn()? .wait()?; if let Some(code) = exit_status.code() { match code { 0 => { self.logger.info("Detection successful")?; Ok(()) } 1 => self.logger.error( "No functions found", r#" Your project does not seem to contain any Java functions. The output above might contain information about issues with your function. "#, ), 2 => self.logger.error( "Multiple functions found", r#" Your project contains multiple Java functions. Currently, only projects that contain exactly one (1) function are supported. "#, ), 3..=6 => self.logger.error( "Detection failed", format!( r#"Function detection failed with internal error "{}""#, code ), ), _ => self.logger.error( "Detection failed", format!( r#" Function detection failed with unexpected error code {}. The output above might contain hints what caused this error to happen. "#, code ), ), }?; } let function_bundle_toml: crate::data::function_bundle::Toml = toml::from_slice( &fs::read(&function_bundle_layer.as_path().join("function-bundle.toml"))?, )?; self.logger.header(format!( "Detected function: {}", function_bundle_toml.function.class ))?; self.logger.info(format!( "Payload type: {}", function_bundle_toml.function.payload_class ))?; self.logger.info(format!( "Return type: {}", function_bundle_toml.function.return_class ))?; Ok(function_bundle_layer) } } #[cfg(target_family = "unix")] fn set_executable(path: impl AsRef<Path>) -> anyhow::Result<()> { use std::os::unix::fs::OpenOptionsExt; fs::OpenOptions::new().mode(0o775).open(path)?; Ok(()) }
36.603175
112
0.579069
e69734ed88c0096f98700ffca37a96e0d8ff7fdf
12,049
use std::collections::hash_map::{Occupied, Vacant}; use std::hash::{Hash, Hasher}; use std::hash::sip::SipHasher; use std::io::{fs, File, USER_RWX, BufferedReader}; use core::{Package, Target}; use util; use util::{CargoResult, Fresh, Dirty, Freshness, internal, Require, profile}; use super::{Kind, KindTarget}; use super::job::Work; use super::context::Context; /// A tuple result of the `prepare_foo` functions in this module. /// /// The first element of the triple is whether the target in question is /// currently fresh or not, and the second two elements are work to perform when /// the target is dirty or fresh, respectively. /// /// Both units of work are always generated because a fresh package may still be /// rebuilt if some upstream dependency changes. pub type Preparation = (Freshness, Work, Work); /// Prepare the necessary work for the fingerprint for a specific target. /// /// When dealing with fingerprints, cargo gets to choose what granularity /// "freshness" is considered at. One option is considering freshness at the /// package level. This means that if anything in a package changes, the entire /// package is rebuilt, unconditionally. This simplicity comes at a cost, /// however, in that test-only changes will cause libraries to be rebuilt, which /// is quite unfortunate! /// /// The cost was deemed high enough that fingerprints are now calculated at the /// layer of a target rather than a package. Each target can then be kept track /// of separately and only rebuilt as necessary. This requires cargo to /// understand what the inputs are to a target, so we drive rustc with the /// --dep-info flag to learn about all input files to a unit of compilation. /// /// This function will calculate the fingerprint for a target and prepare the /// work necessary to either write the fingerprint or copy over all fresh files /// from the old directories to their new locations. pub fn prepare_target(cx: &mut Context, pkg: &Package, target: &Target, kind: Kind) -> CargoResult<Preparation> { let _p = profile::start(format!("fingerprint: {} / {}", pkg.get_package_id(), target)); let (old, new) = dirs(cx, pkg, kind); let filename = filename(target); let old_loc = old.join(filename.as_slice()); let new_loc = new.join(filename.as_slice()); // We want to use the package fingerprint if we're either a doc target or a // path source. If we're a git/registry source, then the mtime of files may // fluctuate, but they won't change so long as the source itself remains // constant (which is the responsibility of the source) let use_pkg = { let doc = target.get_profile().is_doc(); let path = pkg.get_summary().get_source_id().is_path(); doc || !path }; info!("fingerprint at: {}", new_loc.display()); // First bit of the freshness calculation, whether the dep-info file // indicates that the target is fresh. let (old_dep_info, new_dep_info) = dep_info_loc(cx, pkg, target, kind); let are_files_fresh = use_pkg || try!(calculate_target_fresh(pkg, &old_dep_info)); // Second bit of the freshness calculation, whether rustc itself, the // target are fresh, and the enabled set of features are all fresh. let features = cx.resolve.features(pkg.get_package_id()); let features = features.map(|s| { let mut v = s.iter().collect::<Vec<&String>>(); v.sort(); v }); let rustc_fingerprint = if use_pkg { mk_fingerprint(cx, &(target, try!(calculate_pkg_fingerprint(cx, pkg)), features)) } else { mk_fingerprint(cx, &(target, features)) }; let is_rustc_fresh = try!(is_fresh(&old_loc, rustc_fingerprint.as_slice())); let (old_root, root) = { let layout = cx.layout(pkg, kind); if target.get_profile().is_custom_build() { (layout.old_build(pkg), layout.build(pkg)) } else if target.is_example() { (layout.old_examples().clone(), layout.examples().clone()) } else { (layout.old_root().clone(), layout.root().clone()) } }; let mut pairs = vec![(old_loc, new_loc.clone())]; if !target.get_profile().is_doc() { pairs.push((old_dep_info, new_dep_info)); for filename in try!(cx.target_filenames(target)).iter() { let filename = filename.as_slice(); let dst = root.join(filename); pairs.push((old_root.join(filename), root.join(filename))); if target.get_profile().is_test() { cx.compilation.tests.push((target.get_name().into_string(), dst.clone())); } else if target.is_bin() { cx.compilation.binaries.push(dst.clone()); } else if target.is_lib() { let pkgid = pkg.get_package_id().clone(); match cx.compilation.libraries.entry(pkgid) { Occupied(entry) => entry.into_mut(), Vacant(entry) => entry.set(Vec::new()), }.push(root.join(filename)); } } } Ok(prepare(is_rustc_fresh && are_files_fresh, new_loc, rustc_fingerprint, pairs)) } /// Prepare the necessary work for the fingerprint of a build command. /// /// Build commands are located on packages, not on targets. Additionally, we /// don't have --dep-info to drive calculation of the fingerprint of a build /// command. This brings up an interesting predicament which gives us a few /// options to figure out whether a build command is dirty or not: /// /// 1. A build command is dirty if *any* file in a package changes. In theory /// all files are candidate for being used by the build command. /// 2. A build command is dirty if any file in a *specific directory* changes. /// This may lose information as it may require files outside of the specific /// directory. /// 3. A build command must itself provide a dep-info-like file stating how it /// should be considered dirty or not. /// /// The currently implemented solution is option (1), although it is planned to /// migrate to option (2) in the near future. pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package, target: Option<&Target>) -> CargoResult<Preparation> { let _p = profile::start(format!("fingerprint build cmd: {}", pkg.get_package_id())); // TODO: this should not explicitly pass KindTarget let kind = KindTarget; if pkg.get_manifest().get_build().len() == 0 && target.is_none() { return Ok((Fresh, proc(_) Ok(()), proc(_) Ok(()))) } let (old, new) = dirs(cx, pkg, kind); let old_loc = old.join("build"); let new_loc = new.join("build"); info!("fingerprint at: {}", new_loc.display()); let new_fingerprint = try!(calculate_build_cmd_fingerprint(cx, pkg)); let new_fingerprint = mk_fingerprint(cx, &new_fingerprint); let is_fresh = try!(is_fresh(&old_loc, new_fingerprint.as_slice())); let mut pairs = vec![(old_loc, new_loc.clone())]; // The new custom build command infrastructure handles its own output // directory as part of freshness. if target.is_none() { let native_dir = cx.layout(pkg, kind).native(pkg); pairs.push((cx.layout(pkg, kind).old_native(pkg), native_dir.clone())); cx.compilation.native_dirs.insert(pkg.get_package_id().clone(), native_dir); } Ok(prepare(is_fresh, new_loc, new_fingerprint, pairs)) } /// Prepare work for when a package starts to build pub fn prepare_init(cx: &mut Context, pkg: &Package, kind: Kind) -> (Work, Work) { let (_, new1) = dirs(cx, pkg, kind); let new2 = new1.clone(); let work1 = proc(_) { try!(fs::mkdir(&new1, USER_RWX)); Ok(()) }; let work2 = proc(_) { try!(fs::mkdir(&new2, USER_RWX)); Ok(()) }; (work1, work2) } /// Given the data to build and write a fingerprint, generate some Work /// instances to actually perform the necessary work. fn prepare(is_fresh: bool, loc: Path, fingerprint: String, to_copy: Vec<(Path, Path)>) -> Preparation { let write_fingerprint = proc(desc_tx) { drop(desc_tx); try!(File::create(&loc).write_str(fingerprint.as_slice())); Ok(()) }; let move_old = proc(desc_tx) { drop(desc_tx); for &(ref src, ref dst) in to_copy.iter() { try!(fs::rename(src, dst)); } Ok(()) }; (if is_fresh {Fresh} else {Dirty}, write_fingerprint, move_old) } /// Return the (old, new) location for fingerprints for a package pub fn dirs(cx: &Context, pkg: &Package, kind: Kind) -> (Path, Path) { let layout = cx.layout(pkg, kind); let layout = layout.proxy(); (layout.old_fingerprint(pkg), layout.fingerprint(pkg)) } /// Returns the (old, new) location for the dep info file of a target. pub fn dep_info_loc(cx: &Context, pkg: &Package, target: &Target, kind: Kind) -> (Path, Path) { let (old, new) = dirs(cx, pkg, kind); let filename = format!("dep-{}", filename(target)); (old.join(filename.as_slice()), new.join(filename)) } fn is_fresh(loc: &Path, new_fingerprint: &str) -> CargoResult<bool> { let mut file = match File::open(loc) { Ok(file) => file, Err(..) => return Ok(false), }; let old_fingerprint = try!(file.read_to_string()); log!(5, "old fingerprint: {}", old_fingerprint); log!(5, "new fingerprint: {}", new_fingerprint); Ok(old_fingerprint.as_slice() == new_fingerprint) } /// Frob in the necessary data from the context to generate the real /// fingerprint. fn mk_fingerprint<T: Hash>(cx: &Context, data: &T) -> String { let hasher = SipHasher::new_with_keys(0,0); util::to_hex(hasher.hash(&(cx.config.rustc_version(), data))) } fn calculate_target_fresh(pkg: &Package, dep_info: &Path) -> CargoResult<bool> { let line = match BufferedReader::new(File::open(dep_info)).lines().next() { Some(Ok(line)) => line, _ => return Ok(false), }; let line = line.as_slice(); let mtime = try!(fs::stat(dep_info)).modified; let pos = try!(line.find_str(": ").require(|| { internal(format!("dep-info not in an understood format: {}", dep_info.display())) })); let deps = line.slice_from(pos + 2); let mut deps = deps.split(' ').map(|s| s.trim()).filter(|s| !s.is_empty()); loop { let mut file = match deps.next() { Some(s) => s.to_string(), None => break, }; while file.as_slice().ends_with("\\") { file.pop(); file.push(' '); file.push_str(deps.next().unwrap()) } match fs::stat(&pkg.get_root().join(file.as_slice())) { Ok(stat) if stat.modified <= mtime => {} Ok(stat) => { info!("stale: {} -- {} vs {}", file, stat.modified, mtime); return Ok(false) } _ => { info!("stale: {} -- missing", file); return Ok(false) } } } Ok(true) } fn calculate_build_cmd_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> { // TODO: this should be scoped to just the `build` directory, not the entire // package. calculate_pkg_fingerprint(cx, pkg) } fn calculate_pkg_fingerprint(cx: &Context, pkg: &Package) -> CargoResult<String> { let source = cx.sources .get(pkg.get_package_id().get_source_id()) .expect("BUG: Missing package source"); source.fingerprint(pkg) } fn filename(target: &Target) -> String { let kind = if target.is_lib() {"lib"} else {"bin"}; let flavor = if target.get_profile().is_test() { "test-" } else if target.get_profile().is_doc() { "doc-" } else { "" }; format!("{}{}-{}", flavor, kind, target.get_name()) }
39.375817
90
0.620549
38bccf49222ae9fd0283342ae8ccfa0423fd25e2
4,096
//! De/Serialization of JSON //! //! This modules is only available when using the `json` feature of the crate. use crate::de::DeserializeAs; use crate::ser::SerializeAs; use serde::de::DeserializeOwned; use serde::{Deserializer, Serialize, Serializer}; /// Serialize value as string containing JSON /// /// The same functionality is also available as [`serde_with::json::JsonString`][crate::json::JsonString] compatible with the `serde_as`-annotation. /// /// # Examples /// /// ``` /// # use serde_derive::{Deserialize, Serialize}; /// # /// #[derive(Deserialize, Serialize)] /// struct A { /// #[serde(with = "serde_with::json::nested")] /// other_struct: B, /// } /// #[derive(Deserialize, Serialize)] /// struct B { /// value: usize, /// } /// /// let v: A = serde_json::from_str(r#"{"other_struct":"{\"value\":5}"}"#).unwrap(); /// assert_eq!(5, v.other_struct.value); /// /// let x = A { /// other_struct: B { value: 10 }, /// }; /// assert_eq!( /// r#"{"other_struct":"{\"value\":10}"}"#, /// serde_json::to_string(&x).unwrap() /// ); /// ``` pub mod nested { use serde::de::{DeserializeOwned, Deserializer, Error, Visitor}; use serde::ser::{self, Serialize, Serializer}; use std::fmt; use std::marker::PhantomData; /// Deserialize value from a string which is valid JSON pub fn deserialize<'de, D, T>(deserializer: D) -> Result<T, D::Error> where D: Deserializer<'de>, T: DeserializeOwned, { #[derive(Default)] struct Helper<S: DeserializeOwned>(PhantomData<S>); impl<'de, S> Visitor<'de> for Helper<S> where S: DeserializeOwned, { type Value = S; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { write!(formatter, "valid json object") } fn visit_str<E>(self, value: &str) -> Result<S, E> where E: Error, { serde_json::from_str(value).map_err(Error::custom) } } deserializer.deserialize_str(Helper(PhantomData)) } /// Serialize value as string containing JSON /// /// # Errors /// /// Serialization can fail if `T`'s implementation of `Serialize` decides to /// fail, or if `T` contains a map with non-string keys. pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error> where T: Serialize, S: Serializer, { let s = serde_json::to_string(value).map_err(ser::Error::custom)?; serializer.serialize_str(&*s) } } /// Serialize value as string containing JSON /// /// The same functionality is also available as [`serde_with::json::nested`][crate::json::nested] compatible with serde's with-annotation. /// /// # Examples /// /// ``` /// # #[cfg(feature = "macros")] { /// # use serde_derive::{Deserialize, Serialize}; /// # use serde_with::{serde_as, json::JsonString}; /// # /// #[serde_as] /// #[derive(Deserialize, Serialize)] /// struct A { /// #[serde_as(as = "JsonString")] /// other_struct: B, /// } /// #[derive(Deserialize, Serialize)] /// struct B { /// value: usize, /// } /// /// let v: A = serde_json::from_str(r#"{"other_struct":"{\"value\":5}"}"#).unwrap(); /// assert_eq!(5, v.other_struct.value); /// /// let x = A { /// other_struct: B { value: 10 }, /// }; /// assert_eq!( /// r#"{"other_struct":"{\"value\":10}"}"#, /// serde_json::to_string(&x).unwrap() /// ); /// # } /// ``` #[derive(Copy, Clone, Debug, Default)] pub struct JsonString; impl<T> SerializeAs<T> for JsonString where T: Serialize, { fn serialize_as<S>(source: &T, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { crate::json::nested::serialize(source, serializer) } } impl<'de, T> DeserializeAs<'de, T> for JsonString where T: DeserializeOwned, { fn deserialize_as<D>(deserializer: D) -> Result<T, D::Error> where D: Deserializer<'de>, { crate::json::nested::deserialize(deserializer) } }
26.947368
148
0.580566
ebb187f71026736625291da21715973362915078
62,570
//! Everything in this module is pending to be refactored, turned into idiomatic-rust, and moved to //! other modules. #![allow( dead_code, mutable_transmutes, non_camel_case_types, non_snake_case, non_upper_case_globals, unused_mut, unused_assignments, unused_variables )] use std::ptr::null_mut; use glam::Vec3; use crate::{face_vert_to_index, get_normal, get_position, get_tex_coord, Geometry}; #[derive(Copy, Clone)] pub struct STSpace { pub vOs: Vec3, pub fMagS: f32, pub vOt: Vec3, pub fMagT: f32, pub iCounter: i32, pub bOrient: bool, } impl STSpace { pub fn zero() -> Self { Self { vOs: Vec3::ZERO, fMagS: 0.0, vOt: Vec3::ZERO, fMagT: 0.0, iCounter: 0, bOrient: false, } } } // To avoid visual errors (distortions/unwanted hard edges in lighting), when using sampled normal maps, the // normal map sampler must use the exact inverse of the pixel shader transformation. // The most efficient transformation we can possibly do in the pixel shader is // achieved by using, directly, the "unnormalized" interpolated tangent, bitangent and vertex normal: vT, vB and vN. // pixel shader (fast transform out) // vNout = normalize( vNt.x * vT + vNt.y * vB + vNt.z * vN ); // where vNt is the tangent space normal. The normal map sampler must likewise use the // interpolated and "unnormalized" tangent, bitangent and vertex normal to be compliant with the pixel shader. // sampler does (exact inverse of pixel shader): // float3 row0 = cross(vB, vN); // float3 row1 = cross(vN, vT); // float3 row2 = cross(vT, vB); // float fSign = dot(vT, row0)<0 ? -1 : 1; // vNt = normalize( fSign * float3(dot(vNout,row0), dot(vNout,row1), dot(vNout,row2)) ); // where vNout is the sampled normal in some chosen 3D space. // // Should you choose to reconstruct the bitangent in the pixel shader instead // of the vertex shader, as explained earlier, then be sure to do this in the normal map sampler also. // Finally, beware of quad triangulations. If the normal map sampler doesn't use the same triangulation of // quads as your renderer then problems will occur since the interpolated tangent spaces will differ // eventhough the vertex level tangent spaces match. This can be solved either by triangulating before // sampling/exporting or by using the order-independent choice of diagonal for splitting quads suggested earlier. // However, this must be used both by the sampler and your tools/rendering pipeline. // internal structure #[derive(Copy, Clone)] pub struct STriInfo { pub FaceNeighbors: [i32; 3], pub AssignedGroup: [*mut SGroup; 3], pub vOs: Vec3, pub vOt: Vec3, pub fMagS: f32, pub fMagT: f32, pub iOrgFaceNumber: i32, pub iFlag: i32, pub iTSpacesOffs: i32, pub vert_num: [u8; 4], } impl STriInfo { fn zero() -> Self { Self { FaceNeighbors: [0, 0, 0], AssignedGroup: [null_mut(), null_mut(), null_mut()], vOs: Vec3::ZERO, vOt: Vec3::ZERO, fMagS: 0.0, fMagT: 0.0, iOrgFaceNumber: 0, iFlag: 0, iTSpacesOffs: 0, vert_num: [0, 0, 0, 0], } } } #[derive(Copy, Clone)] pub struct SGroup { pub iNrFaces: i32, pub pFaceIndices: *mut i32, pub iVertexRepresentitive: i32, pub bOrientPreservering: bool, } impl SGroup { fn zero() -> Self { Self { iNrFaces: 0, pFaceIndices: null_mut(), iVertexRepresentitive: 0, bOrientPreservering: false, } } } #[derive(Clone)] pub struct SSubGroup { pub iNrFaces: i32, pub pTriMembers: Vec<i32>, } impl SSubGroup { fn zero() -> Self { Self { iNrFaces: 0, pTriMembers: Vec::new(), } } } #[derive(Copy, Clone)] pub union SEdge { pub unnamed: unnamed, pub array: [i32; 3], } impl SEdge { fn zero() -> Self { Self { array: [0, 0, 0] } } } #[derive(Copy, Clone)] pub struct unnamed { pub i0: i32, pub i1: i32, pub f: i32, } #[derive(Copy, Clone)] pub struct STmpVert { pub vert: [f32; 3], pub index: i32, } impl STmpVert { fn zero() -> Self { Self { vert: [0.0, 0.0, 0.0], index: 0, } } } pub unsafe fn genTangSpace<I: Geometry>(geometry: &mut I, fAngularThreshold: f32) -> bool { let mut iNrTrianglesIn = 0; let mut f = 0; let mut t = 0; let mut i = 0; let mut iNrTSPaces = 0; let mut iTotTris = 0; let mut iDegenTriangles = 0; let mut iNrMaxGroups = 0; let mut iNrActiveGroups: i32 = 0i32; let mut index = 0; let iNrFaces = geometry.num_faces(); let mut bRes: bool = false; let fThresCos: f32 = ((fAngularThreshold * 3.14159265358979323846f64 as f32 / 180.0f32) as f64).cos() as f32; f = 0; while f < iNrFaces { let verts = geometry.num_vertices_of_face(f); if verts == 3 { iNrTrianglesIn += 1 } else if verts == 4 { iNrTrianglesIn += 2 } f += 1 } if iNrTrianglesIn <= 0 { return false; } let mut piTriListIn = vec![0i32; 3 * iNrTrianglesIn]; let mut pTriInfos = vec![STriInfo::zero(); iNrTrianglesIn]; iNrTSPaces = GenerateInitialVerticesIndexList( &mut pTriInfos, &mut piTriListIn, geometry, iNrTrianglesIn, ); GenerateSharedVerticesIndexList(piTriListIn.as_mut_ptr(), geometry, iNrTrianglesIn); iTotTris = iNrTrianglesIn; iDegenTriangles = 0; t = 0; while t < iTotTris as usize { let i0 = piTriListIn[t * 3 + 0]; let i1 = piTriListIn[t * 3 + 1]; let i2 = piTriListIn[t * 3 + 2]; let p0 = get_position(geometry, i0 as usize); let p1 = get_position(geometry, i1 as usize); let p2 = get_position(geometry, i2 as usize); if p0 == p1 || p0 == p2 || p1 == p2 { pTriInfos[t].iFlag |= 1i32; iDegenTriangles += 1 } t += 1 } iNrTrianglesIn = iTotTris - iDegenTriangles; DegenPrologue( pTriInfos.as_mut_ptr(), piTriListIn.as_mut_ptr(), iNrTrianglesIn as i32, iTotTris as i32, ); InitTriInfo( pTriInfos.as_mut_ptr(), piTriListIn.as_ptr(), geometry, iNrTrianglesIn, ); iNrMaxGroups = iNrTrianglesIn * 3; let mut pGroups = vec![SGroup::zero(); iNrMaxGroups]; let mut piGroupTrianglesBuffer = vec![0; iNrTrianglesIn * 3]; iNrActiveGroups = Build4RuleGroups( pTriInfos.as_mut_ptr(), pGroups.as_mut_ptr(), piGroupTrianglesBuffer.as_mut_ptr(), piTriListIn.as_ptr(), iNrTrianglesIn as i32, ); let mut psTspace = vec![ STSpace { vOs: Vec3::new(1.0, 0.0, 0.0), fMagS: 1.0, vOt: Vec3::new(0.0, 1.0, 0.0), fMagT: 1.0, ..STSpace::zero() }; iNrTSPaces ]; bRes = GenerateTSpaces( &mut psTspace, pTriInfos.as_ptr(), pGroups.as_ptr(), iNrActiveGroups, piTriListIn.as_ptr(), fThresCos, geometry, ); if !bRes { return false; } DegenEpilogue( psTspace.as_mut_ptr(), pTriInfos.as_mut_ptr(), piTriListIn.as_mut_ptr(), geometry, iNrTrianglesIn as i32, iTotTris as i32, ); index = 0; f = 0; while f < iNrFaces { let verts_0 = geometry.num_vertices_of_face(f); if !(verts_0 != 3 && verts_0 != 4) { i = 0; while i < verts_0 { let mut pTSpace: *const STSpace = &mut psTspace[index] as *mut STSpace; let mut tang = Vec3::new((*pTSpace).vOs.x, (*pTSpace).vOs.y, (*pTSpace).vOs.z); let mut bitang = Vec3::new((*pTSpace).vOt.x, (*pTSpace).vOt.y, (*pTSpace).vOt.z); geometry.set_tangent( tang.into(), bitang.into(), (*pTSpace).fMagS, (*pTSpace).fMagT, (*pTSpace).bOrient, f, i, ); index += 1; i += 1 } } f += 1 } return true; } unsafe fn DegenEpilogue<I: Geometry>( mut psTspace: *mut STSpace, mut pTriInfos: *mut STriInfo, mut piTriListIn: *mut i32, geometry: &mut I, iNrTrianglesIn: i32, iTotTris: i32, ) { let mut t: i32 = 0i32; let mut i: i32 = 0i32; t = iNrTrianglesIn; while t < iTotTris { let bSkip: bool = if (*pTriInfos.offset(t as isize)).iFlag & 2i32 != 0i32 { true } else { false }; if !bSkip { i = 0i32; while i < 3i32 { let index1: i32 = *piTriListIn.offset((t * 3i32 + i) as isize); let mut bNotFound: bool = true; let mut j: i32 = 0i32; while bNotFound && j < 3i32 * iNrTrianglesIn { let index2: i32 = *piTriListIn.offset(j as isize); if index1 == index2 { bNotFound = false } else { j += 1 } } if !bNotFound { let iTri: i32 = j / 3i32; let iVert: i32 = j % 3i32; let iSrcVert: i32 = (*pTriInfos.offset(iTri as isize)).vert_num[iVert as usize] as i32; let iSrcOffs: i32 = (*pTriInfos.offset(iTri as isize)).iTSpacesOffs; let iDstVert: i32 = (*pTriInfos.offset(t as isize)).vert_num[i as usize] as i32; let iDstOffs: i32 = (*pTriInfos.offset(t as isize)).iTSpacesOffs; *psTspace.offset((iDstOffs + iDstVert) as isize) = *psTspace.offset((iSrcOffs + iSrcVert) as isize) } i += 1 } } t += 1 } t = 0i32; while t < iNrTrianglesIn { if (*pTriInfos.offset(t as isize)).iFlag & 2i32 != 0i32 { let mut vDstP = Vec3::new(0.0, 0.0, 0.0); let mut iOrgF: i32 = -1i32; let mut i_0: i32 = 0i32; let mut bNotFound_0: bool = false; let mut pV: *mut u8 = (*pTriInfos.offset(t as isize)).vert_num.as_mut_ptr(); let mut iFlag: i32 = 1i32 << *pV.offset(0isize) as i32 | 1i32 << *pV.offset(1isize) as i32 | 1i32 << *pV.offset(2isize) as i32; let mut iMissingIndex: i32 = 0i32; if iFlag & 2i32 == 0i32 { iMissingIndex = 1i32 } else if iFlag & 4i32 == 0i32 { iMissingIndex = 2i32 } else if iFlag & 8i32 == 0i32 { iMissingIndex = 3i32 } iOrgF = (*pTriInfos.offset(t as isize)).iOrgFaceNumber; vDstP = get_position( geometry, face_vert_to_index(iOrgF as usize, iMissingIndex as usize), ); bNotFound_0 = true; i_0 = 0i32; while bNotFound_0 && i_0 < 3i32 { let iVert_0: i32 = *pV.offset(i_0 as isize) as i32; let vSrcP = get_position( geometry, face_vert_to_index(iOrgF as usize, iVert_0 as usize), ); if vSrcP == vDstP { let iOffs: i32 = (*pTriInfos.offset(t as isize)).iTSpacesOffs; *psTspace.offset((iOffs + iMissingIndex) as isize) = *psTspace.offset((iOffs + iVert_0) as isize); bNotFound_0 = false } else { i_0 += 1 } } } t += 1 } } unsafe fn GenerateTSpaces<I: Geometry>( psTspace: &mut [STSpace], mut pTriInfos: *const STriInfo, mut pGroups: *const SGroup, iNrActiveGroups: i32, mut piTriListIn: *const i32, fThresCos: f32, geometry: &mut I, ) -> bool { let mut iMaxNrFaces: usize = 0; let mut iUniqueTspaces = 0; let mut g: i32 = 0i32; let mut i: i32 = 0i32; g = 0i32; while g < iNrActiveGroups { if iMaxNrFaces < (*pGroups.offset(g as isize)).iNrFaces as usize { iMaxNrFaces = (*pGroups.offset(g as isize)).iNrFaces as usize } g += 1 } if iMaxNrFaces == 0 { return true; } let mut pSubGroupTspace = vec![STSpace::zero(); iMaxNrFaces]; let mut pUniSubGroups = vec![SSubGroup::zero(); iMaxNrFaces]; let mut pTmpMembers = vec![0i32; iMaxNrFaces]; iUniqueTspaces = 0; g = 0i32; while g < iNrActiveGroups { let mut pGroup: *const SGroup = &*pGroups.offset(g as isize) as *const SGroup; let mut iUniqueSubGroups = 0; let mut s = 0; i = 0i32; while i < (*pGroup).iNrFaces { let f: i32 = *(*pGroup).pFaceIndices.offset(i as isize); let mut index: i32 = -1i32; let mut iVertIndex: i32 = -1i32; let mut iOF_1: i32 = -1i32; let mut iMembers: usize = 0; let mut j: i32 = 0i32; let mut l: usize = 0; let mut tmp_group: SSubGroup = SSubGroup { iNrFaces: 0, pTriMembers: Vec::new(), }; let mut bFound: bool = false; let mut n = Vec3::new(0.0, 0.0, 0.0); let mut vOs = Vec3::new(0.0, 0.0, 0.0); let mut vOt = Vec3::new(0.0, 0.0, 0.0); if (*pTriInfos.offset(f as isize)).AssignedGroup[0usize] == pGroup as *mut SGroup { index = 0i32 } else if (*pTriInfos.offset(f as isize)).AssignedGroup[1usize] == pGroup as *mut SGroup { index = 1i32 } else if (*pTriInfos.offset(f as isize)).AssignedGroup[2usize] == pGroup as *mut SGroup { index = 2i32 } iVertIndex = *piTriListIn.offset((f * 3i32 + index) as isize); n = get_normal(geometry, iVertIndex as usize); vOs = (*pTriInfos.offset(f as isize)).vOs - (n.dot((*pTriInfos.offset(f as isize)).vOs) * n); vOt = (*pTriInfos.offset(f as isize)).vOt - (n.dot((*pTriInfos.offset(f as isize)).vOt) * n); if VNotZero(vOs) { vOs = Normalize(vOs) } if VNotZero(vOt) { vOt = Normalize(vOt) } iOF_1 = (*pTriInfos.offset(f as isize)).iOrgFaceNumber; iMembers = 0; j = 0i32; while j < (*pGroup).iNrFaces { let t: i32 = *(*pGroup).pFaceIndices.offset(j as isize); let iOF_2: i32 = (*pTriInfos.offset(t as isize)).iOrgFaceNumber; let mut vOs2 = (*pTriInfos.offset(t as isize)).vOs - (n.dot((*pTriInfos.offset(t as isize)).vOs) * n); let mut vOt2 = (*pTriInfos.offset(t as isize)).vOt - (n.dot((*pTriInfos.offset(t as isize)).vOt) * n); if VNotZero(vOs2) { vOs2 = Normalize(vOs2) } if VNotZero(vOt2) { vOt2 = Normalize(vOt2) } let bAny: bool = if ((*pTriInfos.offset(f as isize)).iFlag | (*pTriInfos.offset(t as isize)).iFlag) & 4i32 != 0i32 { true } else { false }; let bSameOrgFace: bool = iOF_1 == iOF_2; let fCosS: f32 = vOs.dot(vOs2); let fCosT: f32 = vOt.dot(vOt2); if bAny || bSameOrgFace || fCosS > fThresCos && fCosT > fThresCos { let fresh0 = iMembers; iMembers = iMembers + 1; pTmpMembers[fresh0] = t } j += 1 } if iMembers > 1 { let mut uSeed: u32 = 39871946i32 as u32; QuickSort(pTmpMembers.as_mut_ptr(), 0i32, (iMembers - 1) as i32, uSeed); } tmp_group.iNrFaces = iMembers as i32; tmp_group.pTriMembers = pTmpMembers.clone(); bFound = false; l = 0; while l < iUniqueSubGroups && !bFound { bFound = CompareSubGroups(&mut tmp_group, &mut pUniSubGroups[l]); if !bFound { l += 1 } } if !bFound { pUniSubGroups[iUniqueSubGroups].iNrFaces = iMembers as i32; pUniSubGroups[iUniqueSubGroups].pTriMembers = tmp_group.pTriMembers.clone(); pSubGroupTspace[iUniqueSubGroups] = EvalTspace( tmp_group.pTriMembers.as_mut_ptr(), iMembers as i32, piTriListIn, pTriInfos, geometry, (*pGroup).iVertexRepresentitive, ); iUniqueSubGroups += 1 } let iOffs = (*pTriInfos.offset(f as isize)).iTSpacesOffs as usize; let iVert = (*pTriInfos.offset(f as isize)).vert_num[index as usize] as usize; let mut pTS_out: *mut STSpace = &mut psTspace[iOffs + iVert] as *mut STSpace; if (*pTS_out).iCounter == 1i32 { *pTS_out = AvgTSpace(pTS_out, &mut pSubGroupTspace[l]); (*pTS_out).iCounter = 2i32; (*pTS_out).bOrient = (*pGroup).bOrientPreservering } else { *pTS_out = pSubGroupTspace[l]; (*pTS_out).iCounter = 1i32; (*pTS_out).bOrient = (*pGroup).bOrientPreservering } i += 1 } iUniqueTspaces += iUniqueSubGroups; g += 1 } return true; } unsafe fn AvgTSpace(mut pTS0: *const STSpace, mut pTS1: *const STSpace) -> STSpace { let mut ts_res: STSpace = STSpace { vOs: Vec3::new(0.0, 0.0, 0.0), fMagS: 0., vOt: Vec3::new(0.0, 0.0, 0.0), fMagT: 0., iCounter: 0, bOrient: false, }; if (*pTS0).fMagS == (*pTS1).fMagS && (*pTS0).fMagT == (*pTS1).fMagT && (*pTS0).vOs == (*pTS1).vOs && (*pTS0).vOt == (*pTS1).vOt { ts_res.fMagS = (*pTS0).fMagS; ts_res.fMagT = (*pTS0).fMagT; ts_res.vOs = (*pTS0).vOs; ts_res.vOt = (*pTS0).vOt } else { ts_res.fMagS = 0.5f32 * ((*pTS0).fMagS + (*pTS1).fMagS); ts_res.fMagT = 0.5f32 * ((*pTS0).fMagT + (*pTS1).fMagT); ts_res.vOs = (*pTS0).vOs + (*pTS1).vOs; ts_res.vOt = (*pTS0).vOt + (*pTS1).vOt; if VNotZero(ts_res.vOs) { ts_res.vOs = Normalize(ts_res.vOs) } if VNotZero(ts_res.vOt) { ts_res.vOt = Normalize(ts_res.vOt) } } return ts_res; } unsafe fn Normalize(v: Vec3) -> Vec3 { return (1.0 / v.length()) * v; } unsafe fn VNotZero(v: Vec3) -> bool { NotZero(v.x) || NotZero(v.y) || NotZero(v.z) } unsafe fn NotZero(fX: f32) -> bool { fX.abs() > 1.17549435e-38f32 } unsafe fn EvalTspace<I: Geometry>( mut face_indices: *mut i32, iFaces: i32, mut piTriListIn: *const i32, mut pTriInfos: *const STriInfo, geometry: &mut I, iVertexRepresentitive: i32, ) -> STSpace { let mut res: STSpace = STSpace { vOs: Vec3::new(0.0, 0.0, 0.0), fMagS: 0., vOt: Vec3::new(0.0, 0.0, 0.0), fMagT: 0., iCounter: 0, bOrient: false, }; let mut fAngleSum: f32 = 0i32 as f32; let mut face: i32 = 0i32; res.vOs.x = 0.0f32; res.vOs.y = 0.0f32; res.vOs.z = 0.0f32; res.vOt.x = 0.0f32; res.vOt.y = 0.0f32; res.vOt.z = 0.0f32; res.fMagS = 0i32 as f32; res.fMagT = 0i32 as f32; face = 0i32; while face < iFaces { let f: i32 = *face_indices.offset(face as isize); if (*pTriInfos.offset(f as isize)).iFlag & 4i32 == 0i32 { let mut n = Vec3::new(0.0, 0.0, 0.0); let mut vOs = Vec3::new(0.0, 0.0, 0.0); let mut vOt = Vec3::new(0.0, 0.0, 0.0); let mut p0 = Vec3::new(0.0, 0.0, 0.0); let mut p1 = Vec3::new(0.0, 0.0, 0.0); let mut p2 = Vec3::new(0.0, 0.0, 0.0); let mut v1 = Vec3::new(0.0, 0.0, 0.0); let mut v2 = Vec3::new(0.0, 0.0, 0.0); let mut fCos: f32 = 0.; let mut fAngle: f32 = 0.; let mut fMagS: f32 = 0.; let mut fMagT: f32 = 0.; let mut i: i32 = -1i32; let mut index: i32 = -1i32; let mut i0: i32 = -1i32; let mut i1: i32 = -1i32; let mut i2: i32 = -1i32; if *piTriListIn.offset((3i32 * f + 0i32) as isize) == iVertexRepresentitive { i = 0i32 } else if *piTriListIn.offset((3i32 * f + 1i32) as isize) == iVertexRepresentitive { i = 1i32 } else if *piTriListIn.offset((3i32 * f + 2i32) as isize) == iVertexRepresentitive { i = 2i32 } index = *piTriListIn.offset((3i32 * f + i) as isize); n = get_normal(geometry, index as usize); vOs = (*pTriInfos.offset(f as isize)).vOs - (n.dot((*pTriInfos.offset(f as isize)).vOs) * n); vOt = (*pTriInfos.offset(f as isize)).vOt - (n.dot((*pTriInfos.offset(f as isize)).vOt) * n); if VNotZero(vOs) { vOs = Normalize(vOs) } if VNotZero(vOt) { vOt = Normalize(vOt) } i2 = *piTriListIn.offset((3i32 * f + if i < 2i32 { i + 1i32 } else { 0i32 }) as isize); i1 = *piTriListIn.offset((3i32 * f + i) as isize); i0 = *piTriListIn.offset((3i32 * f + if i > 0i32 { i - 1i32 } else { 2i32 }) as isize); p0 = get_position(geometry, i0 as usize); p1 = get_position(geometry, i1 as usize); p2 = get_position(geometry, i2 as usize); v1 = p0 - p1; v2 = p2 - p1; v1 = v1 - (n.dot(v1) * n); if VNotZero(v1) { v1 = Normalize(v1) } v2 = v2 - (n.dot(v2) * n); if VNotZero(v2) { v2 = Normalize(v2) } fCos = v1.dot(v2); fCos = if fCos > 1i32 as f32 { 1i32 as f32 } else if fCos < -1i32 as f32 { -1i32 as f32 } else { fCos }; fAngle = (fCos as f64).acos() as f32; fMagS = (*pTriInfos.offset(f as isize)).fMagS; fMagT = (*pTriInfos.offset(f as isize)).fMagT; res.vOs = res.vOs + (fAngle * vOs); res.vOt = res.vOt + (fAngle * vOt); res.fMagS += fAngle * fMagS; res.fMagT += fAngle * fMagT; fAngleSum += fAngle } face += 1 } if VNotZero(res.vOs) { res.vOs = Normalize(res.vOs) } if VNotZero(res.vOt) { res.vOt = Normalize(res.vOt) } if fAngleSum > 0i32 as f32 { res.fMagS /= fAngleSum; res.fMagT /= fAngleSum } return res; } unsafe fn CompareSubGroups(mut pg1: *const SSubGroup, mut pg2: *const SSubGroup) -> bool { let mut bStillSame: bool = true; let mut i = 0; if (*pg1).iNrFaces != (*pg2).iNrFaces { return false; } while i < (*pg1).iNrFaces as usize && bStillSame { bStillSame = if (*pg1).pTriMembers[i] == (*pg2).pTriMembers[i] { true } else { false }; if bStillSame { i += 1 } } return bStillSame; } unsafe fn QuickSort(mut pSortBuffer: *mut i32, mut iLeft: i32, mut iRight: i32, mut uSeed: u32) { let mut iL: i32 = 0; let mut iR: i32 = 0; let mut n: i32 = 0; let mut index: i32 = 0; let mut iMid: i32 = 0; let mut iTmp: i32 = 0; // Random let mut t: u32 = uSeed & 31i32 as u32; t = uSeed.rotate_left(t) | uSeed.rotate_right((32i32 as u32).wrapping_sub(t)); uSeed = uSeed.wrapping_add(t).wrapping_add(3i32 as u32); // Random end iL = iLeft; iR = iRight; n = iR - iL + 1i32; index = uSeed.wrapping_rem(n as u32) as i32; iMid = *pSortBuffer.offset((index + iL) as isize); loop { while *pSortBuffer.offset(iL as isize) < iMid { iL += 1 } while *pSortBuffer.offset(iR as isize) > iMid { iR -= 1 } if iL <= iR { iTmp = *pSortBuffer.offset(iL as isize); *pSortBuffer.offset(iL as isize) = *pSortBuffer.offset(iR as isize); *pSortBuffer.offset(iR as isize) = iTmp; iL += 1; iR -= 1 } if !(iL <= iR) { break; } } if iLeft < iR { QuickSort(pSortBuffer, iLeft, iR, uSeed); } if iL < iRight { QuickSort(pSortBuffer, iL, iRight, uSeed); }; } unsafe fn Build4RuleGroups( mut pTriInfos: *mut STriInfo, mut pGroups: *mut SGroup, mut piGroupTrianglesBuffer: *mut i32, mut piTriListIn: *const i32, iNrTrianglesIn: i32, ) -> i32 { let iNrMaxGroups: i32 = iNrTrianglesIn * 3i32; let mut iNrActiveGroups: i32 = 0i32; let mut iOffset: i32 = 0i32; let mut f: i32 = 0i32; let mut i: i32 = 0i32; f = 0i32; while f < iNrTrianglesIn { i = 0i32; while i < 3i32 { if (*pTriInfos.offset(f as isize)).iFlag & 4i32 == 0i32 && (*pTriInfos.offset(f as isize)).AssignedGroup[i as usize].is_null() { let mut bOrPre: bool = false; let mut neigh_indexL: i32 = 0; let mut neigh_indexR: i32 = 0; let vert_index: i32 = *piTriListIn.offset((f * 3i32 + i) as isize); let ref mut fresh2 = (*pTriInfos.offset(f as isize)).AssignedGroup[i as usize]; *fresh2 = &mut *pGroups.offset(iNrActiveGroups as isize) as *mut SGroup; (*(*pTriInfos.offset(f as isize)).AssignedGroup[i as usize]) .iVertexRepresentitive = vert_index; (*(*pTriInfos.offset(f as isize)).AssignedGroup[i as usize]).bOrientPreservering = (*pTriInfos.offset(f as isize)).iFlag & 8i32 != 0i32; (*(*pTriInfos.offset(f as isize)).AssignedGroup[i as usize]).iNrFaces = 0i32; let ref mut fresh3 = (*(*pTriInfos.offset(f as isize)).AssignedGroup[i as usize]).pFaceIndices; *fresh3 = &mut *piGroupTrianglesBuffer.offset(iOffset as isize) as *mut i32; iNrActiveGroups += 1; AddTriToGroup((*pTriInfos.offset(f as isize)).AssignedGroup[i as usize], f); bOrPre = if (*pTriInfos.offset(f as isize)).iFlag & 8i32 != 0i32 { true } else { false }; neigh_indexL = (*pTriInfos.offset(f as isize)).FaceNeighbors[i as usize]; neigh_indexR = (*pTriInfos.offset(f as isize)).FaceNeighbors [(if i > 0i32 { i - 1i32 } else { 2i32 }) as usize]; if neigh_indexL >= 0i32 { let bAnswer: bool = AssignRecur( piTriListIn, pTriInfos, neigh_indexL, (*pTriInfos.offset(f as isize)).AssignedGroup[i as usize], ); let bOrPre2: bool = if (*pTriInfos.offset(neigh_indexL as isize)).iFlag & 8i32 != 0i32 { true } else { false }; let bDiff: bool = if bOrPre != bOrPre2 { true } else { false }; } if neigh_indexR >= 0i32 { let bAnswer_0: bool = AssignRecur( piTriListIn, pTriInfos, neigh_indexR, (*pTriInfos.offset(f as isize)).AssignedGroup[i as usize], ); let bOrPre2_0: bool = if (*pTriInfos.offset(neigh_indexR as isize)).iFlag & 8i32 != 0i32 { true } else { false }; let bDiff_0: bool = if bOrPre != bOrPre2_0 { true } else { false }; } iOffset += (*(*pTriInfos.offset(f as isize)).AssignedGroup[i as usize]).iNrFaces } i += 1 } f += 1 } return iNrActiveGroups; } // /////////////////////////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////////////////////////// unsafe fn AssignRecur( mut piTriListIn: *const i32, mut psTriInfos: *mut STriInfo, iMyTriIndex: i32, mut pGroup: *mut SGroup, ) -> bool { let mut pMyTriInfo: *mut STriInfo = &mut *psTriInfos.offset(iMyTriIndex as isize) as *mut STriInfo; // track down vertex let iVertRep: i32 = (*pGroup).iVertexRepresentitive; let mut pVerts: *const i32 = &*piTriListIn.offset((3i32 * iMyTriIndex + 0i32) as isize) as *const i32; let mut i: i32 = -1i32; if *pVerts.offset(0isize) == iVertRep { i = 0i32 } else if *pVerts.offset(1isize) == iVertRep { i = 1i32 } else if *pVerts.offset(2isize) == iVertRep { i = 2i32 } if (*pMyTriInfo).AssignedGroup[i as usize] == pGroup { return true; } else { if !(*pMyTriInfo).AssignedGroup[i as usize].is_null() { return false; } } if (*pMyTriInfo).iFlag & 4i32 != 0i32 { if (*pMyTriInfo).AssignedGroup[0usize].is_null() && (*pMyTriInfo).AssignedGroup[1usize].is_null() && (*pMyTriInfo).AssignedGroup[2usize].is_null() { (*pMyTriInfo).iFlag &= !8i32; (*pMyTriInfo).iFlag |= if (*pGroup).bOrientPreservering { 8i32 } else { 0i32 } } } let bOrient: bool = if (*pMyTriInfo).iFlag & 8i32 != 0i32 { true } else { false }; if bOrient != (*pGroup).bOrientPreservering { return false; } AddTriToGroup(pGroup, iMyTriIndex); (*pMyTriInfo).AssignedGroup[i as usize] = pGroup; let neigh_indexL: i32 = (*pMyTriInfo).FaceNeighbors[i as usize]; let neigh_indexR: i32 = (*pMyTriInfo).FaceNeighbors[(if i > 0i32 { i - 1i32 } else { 2i32 }) as usize]; if neigh_indexL >= 0i32 { AssignRecur(piTriListIn, psTriInfos, neigh_indexL, pGroup); } if neigh_indexR >= 0i32 { AssignRecur(piTriListIn, psTriInfos, neigh_indexR, pGroup); } return true; } unsafe fn AddTriToGroup(mut pGroup: *mut SGroup, iTriIndex: i32) { *(*pGroup).pFaceIndices.offset((*pGroup).iNrFaces as isize) = iTriIndex; (*pGroup).iNrFaces += 1; } unsafe fn InitTriInfo<I: Geometry>( mut pTriInfos: *mut STriInfo, mut piTriListIn: *const i32, geometry: &mut I, iNrTrianglesIn: usize, ) { let mut f = 0; let mut i = 0; let mut t = 0; f = 0; while f < iNrTrianglesIn { i = 0i32; while i < 3i32 { (*pTriInfos.offset(f as isize)).FaceNeighbors[i as usize] = -1i32; let ref mut fresh4 = (*pTriInfos.offset(f as isize)).AssignedGroup[i as usize]; *fresh4 = 0 as *mut SGroup; (*pTriInfos.offset(f as isize)).vOs.x = 0.0f32; (*pTriInfos.offset(f as isize)).vOs.y = 0.0f32; (*pTriInfos.offset(f as isize)).vOs.z = 0.0f32; (*pTriInfos.offset(f as isize)).vOt.x = 0.0f32; (*pTriInfos.offset(f as isize)).vOt.y = 0.0f32; (*pTriInfos.offset(f as isize)).vOt.z = 0.0f32; (*pTriInfos.offset(f as isize)).fMagS = 0i32 as f32; (*pTriInfos.offset(f as isize)).fMagT = 0i32 as f32; (*pTriInfos.offset(f as isize)).iFlag |= 4i32; i += 1 } f += 1 } f = 0; while f < iNrTrianglesIn { let v1 = get_position(geometry, *piTriListIn.offset((f * 3 + 0) as isize) as usize); let v2 = get_position(geometry, *piTriListIn.offset((f * 3 + 1) as isize) as usize); let v3 = get_position(geometry, *piTriListIn.offset((f * 3 + 2) as isize) as usize); let t1 = get_tex_coord(geometry, *piTriListIn.offset((f * 3 + 0) as isize) as usize); let t2 = get_tex_coord(geometry, *piTriListIn.offset((f * 3 + 1) as isize) as usize); let t3 = get_tex_coord(geometry, *piTriListIn.offset((f * 3 + 2) as isize) as usize); let t21x: f32 = t2.x - t1.x; let t21y: f32 = t2.y - t1.y; let t31x: f32 = t3.x - t1.x; let t31y: f32 = t3.y - t1.y; let d1 = v2 - v1; let d2 = v3 - v1; let fSignedAreaSTx2: f32 = t21x * t31y - t21y * t31x; let mut vOs = (t31y * d1) - (t21y * d2); let mut vOt = (-t31x * d1) + (t21x * d2); (*pTriInfos.offset(f as isize)).iFlag |= if fSignedAreaSTx2 > 0i32 as f32 { 8i32 } else { 0i32 }; if NotZero(fSignedAreaSTx2) { let fAbsArea: f32 = fSignedAreaSTx2.abs(); let fLenOs: f32 = vOs.length(); let fLenOt: f32 = vOt.length(); let fS: f32 = if (*pTriInfos.offset(f as isize)).iFlag & 8i32 == 0i32 { -1.0f32 } else { 1.0f32 }; if NotZero(fLenOs) { (*pTriInfos.offset(f as isize)).vOs = (fS / fLenOs) * vOs } if NotZero(fLenOt) { (*pTriInfos.offset(f as isize)).vOt = (fS / fLenOt) * vOt } (*pTriInfos.offset(f as isize)).fMagS = fLenOs / fAbsArea; (*pTriInfos.offset(f as isize)).fMagT = fLenOt / fAbsArea; if NotZero((*pTriInfos.offset(f as isize)).fMagS) && NotZero((*pTriInfos.offset(f as isize)).fMagT) { (*pTriInfos.offset(f as isize)).iFlag &= !4i32 } } f += 1 } while t < iNrTrianglesIn - 1 { let iFO_a: i32 = (*pTriInfos.offset(t as isize)).iOrgFaceNumber; let iFO_b: i32 = (*pTriInfos.offset((t + 1) as isize)).iOrgFaceNumber; if iFO_a == iFO_b { let bIsDeg_a: bool = if (*pTriInfos.offset(t as isize)).iFlag & 1i32 != 0i32 { true } else { false }; let bIsDeg_b: bool = if (*pTriInfos.offset((t + 1) as isize)).iFlag & 1i32 != 0i32 { true } else { false }; if !(bIsDeg_a || bIsDeg_b) { let bOrientA: bool = if (*pTriInfos.offset(t as isize)).iFlag & 8i32 != 0i32 { true } else { false }; let bOrientB: bool = if (*pTriInfos.offset((t + 1) as isize)).iFlag & 8i32 != 0i32 { true } else { false }; if bOrientA != bOrientB { let mut bChooseOrientFirstTri: bool = false; if (*pTriInfos.offset((t + 1) as isize)).iFlag & 4i32 != 0i32 { bChooseOrientFirstTri = true } else if CalcTexArea(geometry, &*piTriListIn.offset((t * 3 + 0) as isize)) >= CalcTexArea(geometry, &*piTriListIn.offset(((t + 1) * 3 + 0) as isize)) { bChooseOrientFirstTri = true } let t0 = if bChooseOrientFirstTri { t } else { t + 1 }; let t1_0 = if bChooseOrientFirstTri { t + 1 } else { t }; (*pTriInfos.offset(t1_0 as isize)).iFlag &= !8i32; (*pTriInfos.offset(t1_0 as isize)).iFlag |= (*pTriInfos.offset(t0 as isize)).iFlag & 8i32 } } t += 2 } else { t += 1 } } let mut pEdges = vec![SEdge::zero(); iNrTrianglesIn * 3]; BuildNeighborsFast( pTriInfos, pEdges.as_mut_ptr(), piTriListIn, iNrTrianglesIn as i32, ); } unsafe fn BuildNeighborsFast( mut pTriInfos: *mut STriInfo, mut pEdges: *mut SEdge, mut piTriListIn: *const i32, iNrTrianglesIn: i32, ) { // build array of edges // could replace with a random seed? let mut uSeed: u32 = 39871946i32 as u32; let mut iEntries: i32 = 0i32; let mut iCurStartIndex: i32 = -1i32; let mut f: i32 = 0i32; let mut i: i32 = 0i32; f = 0i32; while f < iNrTrianglesIn { i = 0i32; while i < 3i32 { let i0: i32 = *piTriListIn.offset((f * 3i32 + i) as isize); let i1: i32 = *piTriListIn.offset((f * 3i32 + if i < 2i32 { i + 1i32 } else { 0i32 }) as isize); (*pEdges.offset((f * 3i32 + i) as isize)).unnamed.i0 = if i0 < i1 { i0 } else { i1 }; (*pEdges.offset((f * 3i32 + i) as isize)).unnamed.i1 = if !(i0 < i1) { i0 } else { i1 }; (*pEdges.offset((f * 3i32 + i) as isize)).unnamed.f = f; i += 1 } f += 1 } QuickSortEdges(pEdges, 0i32, iNrTrianglesIn * 3i32 - 1i32, 0i32, uSeed); iEntries = iNrTrianglesIn * 3i32; iCurStartIndex = 0i32; i = 1i32; while i < iEntries { if (*pEdges.offset(iCurStartIndex as isize)).unnamed.i0 != (*pEdges.offset(i as isize)).unnamed.i0 { let iL: i32 = iCurStartIndex; let iR: i32 = i - 1i32; iCurStartIndex = i; QuickSortEdges(pEdges, iL, iR, 1i32, uSeed); } i += 1 } iCurStartIndex = 0i32; i = 1i32; while i < iEntries { if (*pEdges.offset(iCurStartIndex as isize)).unnamed.i0 != (*pEdges.offset(i as isize)).unnamed.i0 || (*pEdges.offset(iCurStartIndex as isize)).unnamed.i1 != (*pEdges.offset(i as isize)).unnamed.i1 { let iL_0: i32 = iCurStartIndex; let iR_0: i32 = i - 1i32; iCurStartIndex = i; QuickSortEdges(pEdges, iL_0, iR_0, 2i32, uSeed); } i += 1 } i = 0i32; while i < iEntries { let i0_0: i32 = (*pEdges.offset(i as isize)).unnamed.i0; let i1_0: i32 = (*pEdges.offset(i as isize)).unnamed.i1; let f_0: i32 = (*pEdges.offset(i as isize)).unnamed.f; let mut bUnassigned_A: bool = false; let mut i0_A: i32 = 0; let mut i1_A: i32 = 0; let mut edgenum_A: i32 = 0; let mut edgenum_B: i32 = 0i32; GetEdge( &mut i0_A, &mut i1_A, &mut edgenum_A, &*piTriListIn.offset((f_0 * 3i32) as isize), i0_0, i1_0, ); bUnassigned_A = if (*pTriInfos.offset(f_0 as isize)).FaceNeighbors[edgenum_A as usize] == -1i32 { true } else { false }; if bUnassigned_A { let mut j: i32 = i + 1i32; let mut t: i32 = 0; let mut bNotFound: bool = true; while j < iEntries && i0_0 == (*pEdges.offset(j as isize)).unnamed.i0 && i1_0 == (*pEdges.offset(j as isize)).unnamed.i1 && bNotFound { let mut bUnassigned_B: bool = false; let mut i0_B: i32 = 0; let mut i1_B: i32 = 0; t = (*pEdges.offset(j as isize)).unnamed.f; GetEdge( &mut i1_B, &mut i0_B, &mut edgenum_B, &*piTriListIn.offset((t * 3i32) as isize), (*pEdges.offset(j as isize)).unnamed.i0, (*pEdges.offset(j as isize)).unnamed.i1, ); bUnassigned_B = if (*pTriInfos.offset(t as isize)).FaceNeighbors[edgenum_B as usize] == -1i32 { true } else { false }; if i0_A == i0_B && i1_A == i1_B && bUnassigned_B { bNotFound = false } else { j += 1 } } if !bNotFound { let mut t_0: i32 = (*pEdges.offset(j as isize)).unnamed.f; (*pTriInfos.offset(f_0 as isize)).FaceNeighbors[edgenum_A as usize] = t_0; (*pTriInfos.offset(t_0 as isize)).FaceNeighbors[edgenum_B as usize] = f_0 } } i += 1 } } unsafe fn GetEdge( mut i0_out: *mut i32, mut i1_out: *mut i32, mut edgenum_out: *mut i32, mut indices: *const i32, i0_in: i32, i1_in: i32, ) { *edgenum_out = -1i32; if *indices.offset(0isize) == i0_in || *indices.offset(0isize) == i1_in { if *indices.offset(1isize) == i0_in || *indices.offset(1isize) == i1_in { *edgenum_out.offset(0isize) = 0i32; *i0_out.offset(0isize) = *indices.offset(0isize); *i1_out.offset(0isize) = *indices.offset(1isize) } else { *edgenum_out.offset(0isize) = 2i32; *i0_out.offset(0isize) = *indices.offset(2isize); *i1_out.offset(0isize) = *indices.offset(0isize) } } else { *edgenum_out.offset(0isize) = 1i32; *i0_out.offset(0isize) = *indices.offset(1isize); *i1_out.offset(0isize) = *indices.offset(2isize) }; } // /////////////////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////////////////// unsafe fn QuickSortEdges( mut pSortBuffer: *mut SEdge, mut iLeft: i32, mut iRight: i32, channel: i32, mut uSeed: u32, ) { let mut t: u32 = 0; let mut iL: i32 = 0; let mut iR: i32 = 0; let mut n: i32 = 0; let mut index: i32 = 0; let mut iMid: i32 = 0; // early out let mut sTmp: SEdge = SEdge { unnamed: unnamed { i0: 0, i1: 0, f: 0 }, }; let iElems: i32 = iRight - iLeft + 1i32; if iElems < 2i32 { return; } else { if iElems == 2i32 { if (*pSortBuffer.offset(iLeft as isize)).array[channel as usize] > (*pSortBuffer.offset(iRight as isize)).array[channel as usize] { sTmp = *pSortBuffer.offset(iLeft as isize); *pSortBuffer.offset(iLeft as isize) = *pSortBuffer.offset(iRight as isize); *pSortBuffer.offset(iRight as isize) = sTmp } return; } } // Random t = uSeed & 31i32 as u32; t = uSeed.rotate_left(t) | uSeed.rotate_right((32i32 as u32).wrapping_sub(t)); uSeed = uSeed.wrapping_add(t).wrapping_add(3i32 as u32); // Random end iL = iLeft; iR = iRight; n = iR - iL + 1i32; index = uSeed.wrapping_rem(n as u32) as i32; iMid = (*pSortBuffer.offset((index + iL) as isize)).array[channel as usize]; loop { while (*pSortBuffer.offset(iL as isize)).array[channel as usize] < iMid { iL += 1 } while (*pSortBuffer.offset(iR as isize)).array[channel as usize] > iMid { iR -= 1 } if iL <= iR { sTmp = *pSortBuffer.offset(iL as isize); *pSortBuffer.offset(iL as isize) = *pSortBuffer.offset(iR as isize); *pSortBuffer.offset(iR as isize) = sTmp; iL += 1; iR -= 1 } if !(iL <= iR) { break; } } if iLeft < iR { QuickSortEdges(pSortBuffer, iLeft, iR, channel, uSeed); } if iL < iRight { QuickSortEdges(pSortBuffer, iL, iRight, channel, uSeed); }; } // returns the texture area times 2 unsafe fn CalcTexArea<I: Geometry>(geometry: &mut I, mut indices: *const i32) -> f32 { let t1 = get_tex_coord(geometry, *indices.offset(0isize) as usize); let t2 = get_tex_coord(geometry, *indices.offset(1isize) as usize); let t3 = get_tex_coord(geometry, *indices.offset(2isize) as usize); let t21x: f32 = t2.x - t1.x; let t21y: f32 = t2.y - t1.y; let t31x: f32 = t3.x - t1.x; let t31y: f32 = t3.y - t1.y; let fSignedAreaSTx2: f32 = t21x * t31y - t21y * t31x; return if fSignedAreaSTx2 < 0i32 as f32 { -fSignedAreaSTx2 } else { fSignedAreaSTx2 }; } // degen triangles unsafe fn DegenPrologue( mut pTriInfos: *mut STriInfo, mut piTriList_out: *mut i32, iNrTrianglesIn: i32, iTotTris: i32, ) { let mut iNextGoodTriangleSearchIndex: i32 = -1i32; let mut bStillFindingGoodOnes: bool = false; // locate quads with only one good triangle let mut t: i32 = 0i32; while t < iTotTris - 1i32 { let iFO_a: i32 = (*pTriInfos.offset(t as isize)).iOrgFaceNumber; let iFO_b: i32 = (*pTriInfos.offset((t + 1i32) as isize)).iOrgFaceNumber; if iFO_a == iFO_b { let bIsDeg_a: bool = if (*pTriInfos.offset(t as isize)).iFlag & 1i32 != 0i32 { true } else { false }; let bIsDeg_b: bool = if (*pTriInfos.offset((t + 1i32) as isize)).iFlag & 1i32 != 0i32 { true } else { false }; if bIsDeg_a ^ bIsDeg_b != false { (*pTriInfos.offset(t as isize)).iFlag |= 2i32; (*pTriInfos.offset((t + 1i32) as isize)).iFlag |= 2i32 } t += 2i32 } else { t += 1 } } iNextGoodTriangleSearchIndex = 1i32; t = 0i32; bStillFindingGoodOnes = true; while t < iNrTrianglesIn && bStillFindingGoodOnes { let bIsGood: bool = if (*pTriInfos.offset(t as isize)).iFlag & 1i32 == 0i32 { true } else { false }; if bIsGood { if iNextGoodTriangleSearchIndex < t + 2i32 { iNextGoodTriangleSearchIndex = t + 2i32 } } else { let mut t0: i32 = 0; let mut t1: i32 = 0; let mut bJustADegenerate: bool = true; while bJustADegenerate && iNextGoodTriangleSearchIndex < iTotTris { let bIsGood_0: bool = if (*pTriInfos.offset(iNextGoodTriangleSearchIndex as isize)).iFlag & 1i32 == 0i32 { true } else { false }; if bIsGood_0 { bJustADegenerate = false } else { iNextGoodTriangleSearchIndex += 1 } } t0 = t; t1 = iNextGoodTriangleSearchIndex; iNextGoodTriangleSearchIndex += 1; if !bJustADegenerate { let mut i: i32 = 0i32; i = 0i32; while i < 3i32 { let index: i32 = *piTriList_out.offset((t0 * 3i32 + i) as isize); *piTriList_out.offset((t0 * 3i32 + i) as isize) = *piTriList_out.offset((t1 * 3i32 + i) as isize); *piTriList_out.offset((t1 * 3i32 + i) as isize) = index; i += 1 } let tri_info: STriInfo = *pTriInfos.offset(t0 as isize); *pTriInfos.offset(t0 as isize) = *pTriInfos.offset(t1 as isize); *pTriInfos.offset(t1 as isize) = tri_info } else { bStillFindingGoodOnes = false } } if bStillFindingGoodOnes { t += 1 } } } unsafe fn GenerateSharedVerticesIndexList<I: Geometry>( mut piTriList_in_and_out: *mut i32, geometry: &mut I, iNrTrianglesIn: usize, ) { let mut i = 0; let mut iChannel: i32 = 0i32; let mut k = 0; let mut e = 0; let mut iMaxCount = 0; let mut vMin = get_position(geometry, 0); let mut vMax = vMin; let mut vDim = Vec3::new(0.0, 0.0, 0.0); let mut fMin: f32 = 0.; let mut fMax: f32 = 0.; i = 1; while i < iNrTrianglesIn * 3 { let index: i32 = *piTriList_in_and_out.offset(i as isize); let vP = get_position(geometry, index as usize); if vMin.x > vP.x { vMin.x = vP.x } else if vMax.x < vP.x { vMax.x = vP.x } if vMin.y > vP.y { vMin.y = vP.y } else if vMax.y < vP.y { vMax.y = vP.y } if vMin.z > vP.z { vMin.z = vP.z } else if vMax.z < vP.z { vMax.z = vP.z } i += 1 } vDim = vMax - vMin; iChannel = 0i32; fMin = vMin.x; fMax = vMax.x; if vDim.y > vDim.x && vDim.y > vDim.z { iChannel = 1i32; fMin = vMin.y; fMax = vMax.y } else if vDim.z > vDim.x { iChannel = 2i32; fMin = vMin.z; fMax = vMax.z } let mut piHashTable = vec![0i32; iNrTrianglesIn * 3]; let mut piHashOffsets = vec![0i32; g_iCells]; let mut piHashCount = vec![0i32; g_iCells]; let mut piHashCount2 = vec![0i32; g_iCells]; i = 0; while i < iNrTrianglesIn * 3 { let index_0: i32 = *piTriList_in_and_out.offset(i as isize); let vP_0 = get_position(geometry, index_0 as usize); let fVal: f32 = if iChannel == 0i32 { vP_0.x } else if iChannel == 1i32 { vP_0.y } else { vP_0.z }; let iCell = FindGridCell(fMin, fMax, fVal); piHashCount[iCell] += 1; i += 1 } piHashOffsets[0] = 0i32; k = 1; while k < g_iCells { piHashOffsets[k] = piHashOffsets[k - 1] + piHashCount[k - 1]; k += 1 } i = 0; while i < iNrTrianglesIn * 3 { let index_1: i32 = *piTriList_in_and_out.offset(i as isize); let vP_1 = get_position(geometry, index_1 as usize); let fVal_0: f32 = if iChannel == 0i32 { vP_1.x } else if iChannel == 1i32 { vP_1.y } else { vP_1.z }; let iCell_0 = FindGridCell(fMin, fMax, fVal_0); let mut pTable: *mut i32 = 0 as *mut i32; pTable = &mut piHashTable[piHashOffsets[iCell_0] as usize] as *mut i32; *pTable.offset(piHashCount2[iCell_0] as isize) = i as i32; piHashCount2[iCell_0] += 1; i += 1 } k = 0; while k < g_iCells { k += 1 } iMaxCount = piHashCount[0] as usize; k = 1; while k < g_iCells { if iMaxCount < piHashCount[k] as usize { iMaxCount = piHashCount[k] as usize } k += 1 } let mut pTmpVert = vec![STmpVert::zero(); iMaxCount]; k = 0; while k < g_iCells { // extract table of cell k and amount of entries in it let mut pTable_0 = &mut piHashTable[piHashOffsets[k] as usize] as *mut i32; let iEntries = piHashCount[k] as usize; if !(iEntries < 2) { e = 0; while e < iEntries { let mut i_0: i32 = *pTable_0.offset(e as isize); let vP_2 = get_position( geometry, *piTriList_in_and_out.offset(i_0 as isize) as usize, ); pTmpVert[e].vert[0usize] = vP_2.x; pTmpVert[e].vert[1usize] = vP_2.y; pTmpVert[e].vert[2usize] = vP_2.z; pTmpVert[e].index = i_0; e += 1 } MergeVertsFast( piTriList_in_and_out, pTmpVert.as_mut_ptr(), geometry, 0i32, (iEntries - 1) as i32, ); } k += 1 } } unsafe fn MergeVertsFast<I: Geometry>( mut piTriList_in_and_out: *mut i32, mut pTmpVert: *mut STmpVert, geometry: &mut I, iL_in: i32, iR_in: i32, ) { // make bbox let mut c: i32 = 0i32; let mut l: i32 = 0i32; let mut channel: i32 = 0i32; let mut fvMin: [f32; 3] = [0.; 3]; let mut fvMax: [f32; 3] = [0.; 3]; let mut dx: f32 = 0i32 as f32; let mut dy: f32 = 0i32 as f32; let mut dz: f32 = 0i32 as f32; let mut fSep: f32 = 0i32 as f32; c = 0i32; while c < 3i32 { fvMin[c as usize] = (*pTmpVert.offset(iL_in as isize)).vert[c as usize]; fvMax[c as usize] = fvMin[c as usize]; c += 1 } l = iL_in + 1i32; while l <= iR_in { c = 0i32; while c < 3i32 { if fvMin[c as usize] > (*pTmpVert.offset(l as isize)).vert[c as usize] { fvMin[c as usize] = (*pTmpVert.offset(l as isize)).vert[c as usize] } else if fvMax[c as usize] < (*pTmpVert.offset(l as isize)).vert[c as usize] { fvMax[c as usize] = (*pTmpVert.offset(l as isize)).vert[c as usize] } c += 1 } l += 1 } dx = fvMax[0usize] - fvMin[0usize]; dy = fvMax[1usize] - fvMin[1usize]; dz = fvMax[2usize] - fvMin[2usize]; channel = 0i32; if dy > dx && dy > dz { channel = 1i32 } else if dz > dx { channel = 2i32 } fSep = 0.5f32 * (fvMax[channel as usize] + fvMin[channel as usize]); if fSep >= fvMax[channel as usize] || fSep <= fvMin[channel as usize] { l = iL_in; while l <= iR_in { let mut i: i32 = (*pTmpVert.offset(l as isize)).index; let index: i32 = *piTriList_in_and_out.offset(i as isize); let vP = get_position(geometry, index as usize); let vN = get_normal(geometry, index as usize); let vT = get_tex_coord(geometry, index as usize); let mut bNotFound: bool = true; let mut l2: i32 = iL_in; let mut i2rec: i32 = -1i32; while l2 < l && bNotFound { let i2: i32 = (*pTmpVert.offset(l2 as isize)).index; let index2: i32 = *piTriList_in_and_out.offset(i2 as isize); let vP2 = get_position(geometry, index2 as usize); let vN2 = get_normal(geometry, index2 as usize); let vT2 = get_tex_coord(geometry, index2 as usize); i2rec = i2; if vP.x == vP2.x && vP.y == vP2.y && vP.z == vP2.z && vN.x == vN2.x && vN.y == vN2.y && vN.z == vN2.z && vT.x == vT2.x && vT.y == vT2.y && vT.z == vT2.z { bNotFound = false } else { l2 += 1 } } if !bNotFound { *piTriList_in_and_out.offset(i as isize) = *piTriList_in_and_out.offset(i2rec as isize) } l += 1 } } else { let mut iL: i32 = iL_in; let mut iR: i32 = iR_in; while iL < iR { let mut bReadyLeftSwap: bool = false; let mut bReadyRightSwap: bool = false; while !bReadyLeftSwap && iL < iR { bReadyLeftSwap = !((*pTmpVert.offset(iL as isize)).vert[channel as usize] < fSep); if !bReadyLeftSwap { iL += 1 } } while !bReadyRightSwap && iL < iR { bReadyRightSwap = (*pTmpVert.offset(iR as isize)).vert[channel as usize] < fSep; if !bReadyRightSwap { iR -= 1 } } if bReadyLeftSwap && bReadyRightSwap { let sTmp: STmpVert = *pTmpVert.offset(iL as isize); *pTmpVert.offset(iL as isize) = *pTmpVert.offset(iR as isize); *pTmpVert.offset(iR as isize) = sTmp; iL += 1; iR -= 1 } } if iL == iR { let bReadyRightSwap_0: bool = (*pTmpVert.offset(iR as isize)).vert[channel as usize] < fSep; if bReadyRightSwap_0 { iL += 1 } else { iR -= 1 } } if iL_in < iR { MergeVertsFast(piTriList_in_and_out, pTmpVert, geometry, iL_in, iR); } if iL < iR_in { MergeVertsFast(piTriList_in_and_out, pTmpVert, geometry, iL, iR_in); } }; } const g_iCells: usize = 2048; // it is IMPORTANT that this function is called to evaluate the hash since // inlining could potentially reorder instructions and generate different // results for the same effective input value fVal. #[inline(never)] unsafe fn FindGridCell(fMin: f32, fMax: f32, fVal: f32) -> usize { let fIndex = g_iCells as f32 * ((fVal - fMin) / (fMax - fMin)); let iIndex = fIndex as isize; return if iIndex < g_iCells as isize { if iIndex >= 0 { iIndex as usize } else { 0 } } else { g_iCells - 1 }; } unsafe fn GenerateInitialVerticesIndexList<I: Geometry>( pTriInfos: &mut [STriInfo], piTriList_out: &mut [i32], geometry: &mut I, iNrTrianglesIn: usize, ) -> usize { let mut iTSpacesOffs: usize = 0; let mut f = 0; let mut t: usize = 0; let mut iDstTriIndex = 0; f = 0; while f < geometry.num_faces() { let verts = geometry.num_vertices_of_face(f); if !(verts != 3 && verts != 4) { pTriInfos[iDstTriIndex].iOrgFaceNumber = f as i32; pTriInfos[iDstTriIndex].iTSpacesOffs = iTSpacesOffs as i32; if verts == 3 { let mut pVerts = &mut pTriInfos[iDstTriIndex].vert_num; pVerts[0] = 0; pVerts[1] = 1; pVerts[2] = 2; piTriList_out[iDstTriIndex * 3 + 0] = face_vert_to_index(f, 0) as i32; piTriList_out[iDstTriIndex * 3 + 1] = face_vert_to_index(f, 1) as i32; piTriList_out[iDstTriIndex * 3 + 2] = face_vert_to_index(f, 2) as i32; iDstTriIndex += 1 } else { pTriInfos[iDstTriIndex + 1].iOrgFaceNumber = f as i32; pTriInfos[iDstTriIndex + 1].iTSpacesOffs = iTSpacesOffs as i32; let i0 = face_vert_to_index(f, 0); let i1 = face_vert_to_index(f, 1); let i2 = face_vert_to_index(f, 2); let i3 = face_vert_to_index(f, 3); let T0 = get_tex_coord(geometry, i0); let T1 = get_tex_coord(geometry, i1); let T2 = get_tex_coord(geometry, i2); let T3 = get_tex_coord(geometry, i3); let distSQ_02: f32 = (T2 - T0).length_squared(); let distSQ_13: f32 = (T3 - T1).length_squared(); let mut bQuadDiagIs_02: bool = false; if distSQ_02 < distSQ_13 { bQuadDiagIs_02 = true } else if distSQ_13 < distSQ_02 { bQuadDiagIs_02 = false } else { let P0 = get_position(geometry, i0); let P1 = get_position(geometry, i1); let P2 = get_position(geometry, i2); let P3 = get_position(geometry, i3); let distSQ_02_0: f32 = (P2 - P0).length_squared(); let distSQ_13_0: f32 = (P3 - P1).length_squared(); bQuadDiagIs_02 = if distSQ_13_0 < distSQ_02_0 { false } else { true } } if bQuadDiagIs_02 { let mut pVerts_A = &mut pTriInfos[iDstTriIndex].vert_num; pVerts_A[0] = 0; pVerts_A[1] = 1; pVerts_A[2] = 2; piTriList_out[iDstTriIndex * 3 + 0] = i0 as i32; piTriList_out[iDstTriIndex * 3 + 1] = i1 as i32; piTriList_out[iDstTriIndex * 3 + 2] = i2 as i32; iDstTriIndex += 1; let mut pVerts_B = &mut pTriInfos[iDstTriIndex].vert_num; pVerts_B[0] = 0; pVerts_B[1] = 2; pVerts_B[2] = 3; piTriList_out[iDstTriIndex * 3 + 0] = i0 as i32; piTriList_out[iDstTriIndex * 3 + 1] = i2 as i32; piTriList_out[iDstTriIndex * 3 + 2] = i3 as i32; iDstTriIndex += 1 } else { let mut pVerts_A_0 = &mut pTriInfos[iDstTriIndex].vert_num; pVerts_A_0[0] = 0; pVerts_A_0[1] = 1; pVerts_A_0[2] = 3; piTriList_out[iDstTriIndex * 3 + 0] = i0 as i32; piTriList_out[iDstTriIndex * 3 + 1] = i1 as i32; piTriList_out[iDstTriIndex * 3 + 2] = i3 as i32; iDstTriIndex += 1; let mut pVerts_B_0 = &mut pTriInfos[iDstTriIndex].vert_num; pVerts_B_0[0] = 1; pVerts_B_0[1] = 2; pVerts_B_0[2] = 3; piTriList_out[iDstTriIndex * 3 + 0] = i1 as i32; piTriList_out[iDstTriIndex * 3 + 1] = i2 as i32; piTriList_out[iDstTriIndex * 3 + 2] = i3 as i32; iDstTriIndex += 1 } } iTSpacesOffs += verts } f += 1 } t = 0; while t < iNrTrianglesIn { pTriInfos[t].iFlag = 0; t += 1 } return iTSpacesOffs; }
35.230856
116
0.498642
0aaeefad16342d213d5c9b21ced36ae4d2f47f80
454
#[derive(Clone, PartialEq)] pub enum Direction { Up, Down, Left, Right } use self::Direction::*; impl Direction { pub fn is_opposite(&self, other: &Direction) -> bool { match self { Up => if let Down = other { true } else { false }, Down => if let Up = other { true } else { false }, Left => if let Right = other { true } else { false }, Right => if let Left = other { true } else { false }, } } }
21.619048
60
0.548458
cc12ad7af3aecbdd0b7c9f0938ba291ef4ede9bf
8,417
// Copyright (c) Meta Platforms, Inc. and affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use crate::decl_defs; use crate::reason::Reason; use crate::typing_defs; /// A type which can be traversed by a `Visitor`. pub trait Walkable<R: Reason> { fn accept(&self, v: &mut dyn Visitor<R>) { self.recurse(v); } fn recurse(&self, _v: &mut dyn Visitor<R>) {} } /// A visitor over data structures containing decls or types. pub trait Visitor<R: Reason> { /// Must return `self`. fn object(&mut self) -> &mut dyn Visitor<R>; fn visit_decl_ty(&mut self, o: &decl_defs::DeclTy<R>) { o.recurse(self.object()); } fn visit_ty(&mut self, o: &typing_defs::Ty<R>) { o.recurse(self.object()); } } impl<R: Reason, T: Walkable<R>> Walkable<R> for Option<T> { fn recurse(&self, v: &mut dyn Visitor<R>) { match self { Some(some) => some.accept(v), None => {} } } } impl<R: Reason, T: Walkable<R>> Walkable<R> for Box<T> { fn recurse(&self, v: &mut dyn Visitor<R>) { let obj: &T = &**self; obj.accept(v) } } impl<R: Reason, T: Walkable<R>> Walkable<R> for [T] { fn recurse(&self, v: &mut dyn Visitor<R>) { for obj in self { obj.accept(v); } } } impl<R: Reason, T: Walkable<R>> Walkable<R> for Vec<T> { fn recurse(&self, v: &mut dyn Visitor<R>) { for obj in self { obj.accept(v); } } } impl<R: Reason, K: Walkable<R>, V: Walkable<R>> Walkable<R> for std::collections::BTreeMap<K, V> { fn recurse(&self, v: &mut dyn Visitor<R>) { for (key, val) in self { key.accept(v); val.accept(v); } } } impl<R: Reason, T: Walkable<R>> Walkable<R> for hcons::Hc<T> { fn recurse(&self, v: &mut dyn Visitor<R>) { let obj: &T = &**self; obj.accept(v) } } /// Generate an impl of `Walkable<R>` for the given type which recurses on the /// given fields. /// /// # Examples /// /// Suppose we have this struct definition: /// /// struct Foo<R: Reason> { /// pos: R::Pos, /// ty: Ty<R>, /// constraint: Ty<R>, /// } /// /// We can generate an impl of `Walkable<R>` for `Foo<R>` like this: /// /// walkable!(Foo<R> => [ty, constraint]); /// /// The macro will expand to something like the following: /// /// impl<R: Reason> Walkable<R> for Foo<R> { /// fn recurse(&self, v: &mut dyn Visitor<R>) { /// self.ty.accept(v); /// self.constraint.accept(v); /// } /// } /// /// Note that the macro implicitly introduces the type parameter `R`. /// /// If the type is one which a `Visitor` may be interested in handling, add a /// `visit_` method to the `Visitor` trait, and reference that method with the /// `as` keyword in the `walkable!` macro: /// /// walkable!(Foo<R> as visit_foo => [ty, constraint]); /// /// This will expand to: /// /// impl<R: Reason> Walkable<R> for Foo<R> { /// fn accept(&self, v: &mut dyn crate::visitor::Visitor<R>) { /// v.visit_foo(self); /// } /// fn recurse(&self, v: &mut dyn Visitor<R>) { /// self.ty.accept(v); /// self.constraint.accept(v); /// } /// } /// /// If the type has type parameters other than `R`: /// /// struct Foo<R: Reason, T> { /// pos: R::Pos, /// ty: T, /// constraint: T, /// } /// /// Use the `impl` and `for` keywords to introduce all type parameters. Note /// that the `R: Reason` parameter is no longer implicitly introduced: /// /// walkable!(impl<R: Reason, T> for Foo<R, T> as visit_foo => [ty, constraint]); /// /// For enums: /// /// enum Typeconst<R: Reason> { /// Abstract(AbstractTypeconst<R>), /// Concrete(ConcreteTypeconst<R>), /// } /// /// Write a list of `pattern => [fields]` arms in curly braces: /// /// walkable!(Typeconst<R> as visit_typeconst => { /// Self::Abstract(at) => [at], /// Self::Concrete(ct) => [ct], /// }); /// /// For leaves (structures which cannot contain the types we are interested in /// visiting), either 1) don't implement `Walkable<R>`, and don't specify fields /// of that type in implementations of `Walkable<R>` for other types (as done /// with the field `pos` in `Foo<R>` in the example above), or 2) use /// `walkable!` to generate a no-op implementation of `Walkable<R>` (when not /// implementing `Walkable<R>` would be inconvenient): /// /// #[derive(Ord, PartialOrd)] /// enum Kind { A, B, C, D } /// struct Bar<R> { map: BTreeMap<Kind, Ty<R>> } /// walkable!(Bar<R> => [map]); // requires Kind : Walkable<R> /// walkable!(Kind); /// /// This leaf-node use expands to: /// /// impl<R: Reason> Walkable<R> for Kind {} macro_rules! walkable { ( @ACCEPT($r:ident, $visit:ident) ) => { fn accept(& self, v: &mut dyn $crate::visitor::Visitor<$r>) { v.$visit(self); } }; ( @STRUCT($r:ident, $reason_bound:path, [$($gen:ident)*], $name:ty, $({$accept:item},)? [$($e:tt)*]) ) => { impl<$r: $reason_bound $( , $gen: $crate::visitor::Walkable<$r> )* > $crate::visitor::Walkable<$r> for $name { $($accept)* #[allow(unused_variables)] fn recurse(&self, v: &mut dyn $crate::visitor::Visitor<$r>) { $( self.$e.accept(v); )* } } }; ( @ENUM($r:ident, $reason_bound:path, [$($gen:ident)*], $name:ty, $({$accept:item},)? [$( $variant:pat, [$($e:tt)*] )*]) ) => { impl<$r: $reason_bound $( , $gen: $crate::visitor::Walkable<$r> )* > $crate::visitor::Walkable<$r> for $name { $($accept)* #[allow(unused_variables)] fn recurse(& self, v: &mut dyn $crate::visitor::Visitor<$r>) { match self { $( $variant => { $( $e.accept(v); )* } )* } } } }; ( impl < $r:ident : $bound:path $( , $gen:ident )* $(,)? > for $name:ty as $visit:ident => [ $($e:tt),* $(,)? ] ) => { walkable! { @STRUCT($r, $bound, [$($gen)*], $name, {walkable!{ @ACCEPT($r, $visit) }}, [$($e)*]) } }; ( impl < $r:ident : $bound:path $( , $gen:ident )* $(,)? > for $name:ty => [ $($e:tt),* $(,)? ] ) => { walkable! { @STRUCT($r, $bound, [$($gen)*], $name, [$($e)*]) } }; ( impl < $r:ident : $bound:path $( , $gen:ident )* $(,)? > for $name:ty as $visit:ident => { $( $variant:pat => [ $($e:tt),* $(,)? ] ),* $(,)? } ) => { walkable! { @ENUM($r, $crate::reason::Reason, [$($gen)*], $name, {walkable!{ @ACCEPT($r, $visit) }}, [$($variant, [$($e)*])*]) } }; ( impl < $r:ident : $bound:path $( , $gen:ident )* $(,)? > for $name:ty => { $( $variant:pat => [ $($e:tt),* $(,)? ] ),* $(,)? } ) => { walkable! { @ENUM($r, $crate::reason::Reason, [$($gen)*], $name, [$($variant, [$($e)*])*]) } }; ( $name:ty as $visit:ident => [ $($e:tt),* $(,)? ] ) => { walkable! { @STRUCT(R, $crate::reason::Reason, [], $name, {walkable!{ @ACCEPT(R, $visit) }}, [$($e)*]) } }; ( $name:ty => [ $($e:tt),* $(,)? ] ) => { walkable! { @STRUCT(R, $crate::reason::Reason, [], $name, [$($e)*]) } }; ( $name:ty as $visit:ident => { $( $variant:pat => [ $($e:tt),* $(,)? ] ),* $(,)? } ) => { walkable! { @ENUM(R, $crate::reason::Reason, [], $name, {walkable!{ @ACCEPT(R, $visit) }}, [$($variant, [$($e)*])*]) } }; ( $name:ty => { $( $variant:pat => [ $($e:tt),* $(,)? ] ),* $(,)? } ) => { walkable! { @ENUM(R, $crate::reason::Reason, [], $name, [$($variant, [$($e)*])*]) } }; ( $name:ty as $visit:ident) => { walkable! { @STRUCT(R, $crate::reason::Reason, [], $name, {walkable!{ @ACCEPT(R, $visit) }}, []) } }; ( $name:ty ) => { walkable! { @STRUCT(R, $crate::reason::Reason, [], $name, []) } }; } walkable!(impl<R: Reason, A, B> for (A, B) => [0, 1]); walkable!(impl<R: Reason, A, B, C> for (A, B, C) => [0, 1, 2]); walkable!(impl<R: Reason, A, B, C, D> for (A, B, C, D) => [0, 1, 2, 3]);
35.217573
155
0.491862
221ad84f0d1afb5463ed9641db5683aeacd756a8
20,503
#![recursion_limit = "1024"] #![allow(clippy::all)] extern crate proc_macro; extern crate proc_macro2; extern crate syn; #[macro_use] extern crate quote; #[macro_use] extern crate bitflags; mod prim; use prim::*; mod float; use float::*; mod ty; use ty::*; use proc_macro2::{TokenStream, Span}; use syn::punctuated::Punctuated; use syn::token::Comma; bitflags!{ struct GenFlags: u32 { const COMPLETE = 0b0000_0001; const ALWAYS = 0b0000_0010; const DIFF = 0b0000_0100; const FIXED = 0b0000_1000; const DEFAULT = 0b0001_0000; } } // delta_bits = number of bits used for an integer type // delta_subbits = Try and use the smallest number of bits from the list // delta_always = always send this value instead of only changes // delta_complete = compare the whole struct and only send if changed // delta_diff = sends the difference between the values, only useful when // used with `delta_subbits` // delta_fixed - Causes the floating point number to be sent as a fixed point number #[proc_macro_derive(DeltaEncode, attributes( delta_bits, delta_subbits, delta_diff, delta_always, delta_complete, delta_fixed, delta_default, ))] pub fn delta_encode(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let ast: syn::DeriveInput = syn::parse(input).expect("Failed to parse input"); let ts: proc_macro::TokenStream = delta_encode_impl(ast).into(); ts } fn delta_encode_impl(ast: syn::DeriveInput) -> TokenStream { let name = &ast.ident; let flags = decode_flags(&ast.attrs); let (enc, dec) = match ast.data { syn::Data::Struct(syn::DataStruct{fields: syn::Fields::Named(fields), ..}) => { build_struct(name, &syn::Ident::new("self", Span::call_site()), &syn::Ident::new("base", Span::call_site()), flags, fields.named) }, syn::Data::Struct(syn::DataStruct{fields: syn::Fields::Unnamed(fields), ..}) => { build_tuple(name, &syn::Ident::new("self", Span::call_site()), &syn::Ident::new("base", Span::call_site()), flags, fields.unnamed) }, syn::Data::Enum(e) => { build_enum(name, &syn::Ident::new("self", Span::call_site()), &syn::Ident::new("base", Span::call_site()), flags, e.variants) } _ => unimplemented!("body type"), }; quote! { #[allow(unused_variables, non_snake_case, unreachable_patterns, clippy::float_cmp)] impl crate::delta_encode::DeltaEncodable for #name { #[inline] fn encode<W>(&self, base: Option<&Self>, w: &mut crate::delta_encode::bitio::Writer<W>) -> ::std::io::Result<()> where W: std::io::Write { #enc Ok(()) } #[inline] fn decode<R>(base: Option<&Self>, r: &mut crate::delta_encode::bitio::Reader<R>) -> ::std::io::Result<Self> where R: std::io::Read { Ok(#dec) } } } } fn decode_flags(attrs: &[syn::Attribute]) -> GenFlags { let mut flags = GenFlags::empty(); for attr in attrs.into_iter().filter_map(|v| v.interpret_meta()) { match attr { syn::Meta::Word(ref ident) if ident == "delta_complete" => { flags |= GenFlags::COMPLETE; }, syn::Meta::Word(ref ident) if ident == "delta_always" => { flags |= GenFlags::ALWAYS; }, syn::Meta::Word(ref ident) if ident == "delta_diff" => { flags |= GenFlags::DIFF; }, syn::Meta::Word(ref ident) if ident == "delta_fixed" => { flags |= GenFlags::FIXED; }, syn::Meta::Word(ref ident) if ident == "delta_default" => { flags |= GenFlags::DEFAULT; }, _ => {}, } } flags } fn build_enum(name: &syn::Ident, self_name: &syn::Ident, base_name: &syn::Ident, flags: GenFlags, variants: Punctuated<syn::Variant, Comma>) -> (TokenStream, TokenStream) { let mut encode: Vec<TokenStream> = vec![]; let mut encode_part: Vec<TokenStream> = vec![]; let mut decode: Vec<TokenStream> = vec![]; let mut decode_part: Vec<TokenStream> = vec![]; let self_ref = if self_name == "self" { syn::Ident::new("self", Span::call_site()) } else { syn::Ident::new( &format!("&{}", self_name), Span::call_site() ) }; let variant_bits = (variants.len().next_power_of_two() - 1).count_ones() as u8; for (idx, variant) in variants.into_iter().enumerate() { let idxu = idx as u64; let encode_variant = quote! { w.write_unsigned(#idxu, #variant_bits)?; }; let ident = &variant.ident; let variant_flags = flags | decode_flags(&variant.attrs); match variant.fields { syn::Fields::Unit => { encode.push(quote! { &#name::#ident => { #encode_variant } }); encode_part.push(quote! { (&#name::#ident, _) => { #encode_variant } }); decode.push(quote! { #idxu => { #name::#ident } }); decode_part.push(quote! { (#idxu, _) => { #name::#ident } }); }, syn::Fields::Named(fields) => { let mut sencode: Vec<TokenStream> = vec![]; let mut sencode_part: Vec<TokenStream> = vec![]; let mut sdecode: Vec<TokenStream> = vec![]; let mut sdecode_part: Vec<TokenStream> = vec![]; let mut field_info = vec![]; let mut field_info_base = vec![]; for field in fields.named { let fname = field.ident.unwrap(); let name_self = quote!(*#fname); let name_base_orig = syn::Ident::new( &format!("__enumbase__{}", fname), Span::call_site() ); field_info.push(fname.clone()); field_info_base.push(quote!(#fname: ref #name_base_orig)); let name_base = quote!(*#name_base_orig); build_ty( field.ty, variant_flags, &mut sencode, &mut sencode_part, &mut sdecode, &mut sdecode_part, quote!(#fname :), &name_self, &name_base, &field.attrs, ); } { let field_info = field_info.clone(); let sencode = sencode.clone(); encode.push(quote!( &#name::#ident{#(ref #field_info),*} => { #encode_variant #(#sencode)* } )); } { let field_info = field_info.clone(); let field_info_base = field_info_base.clone(); encode_part.push(quote!( ( &#name::#ident{#(ref #field_info),*}, &#name::#ident{#(#field_info_base),*}, ) => { #encode_variant #(#sencode_part)* } )); } { let field_info = field_info.clone(); let sencode = sencode.clone(); encode_part.push(quote!( ( &#name::#ident{#(ref #field_info),*}, _, ) => { #encode_variant #(#sencode)* } )); } { let sdecode = sdecode.clone(); decode.push(quote!( #idxu => { #name::#ident { #(#sdecode,)* } } )); } { let field_info_base = field_info_base.clone(); decode_part.push(quote!( ( #idxu, &#name::#ident{#(#field_info_base),*}, ) => { #name::#ident { #(#sdecode_part,)* } } )); } { let sdecode = sdecode.clone(); decode_part.push(quote!( ( #idxu, _, ) => { #name::#ident { #(#sdecode,)* } } )); } }, syn::Fields::Unnamed(fields) => { let mut sencode: Vec<TokenStream> = vec![]; let mut sencode_part: Vec<TokenStream> = vec![]; let mut sdecode: Vec<TokenStream> = vec![]; let mut sdecode_part: Vec<TokenStream> = vec![]; let mut field_info = vec![]; let mut field_info_base = vec![]; for (idx, field) in fields.unnamed.into_iter().enumerate() { let name_self_orig = syn::Ident::new(&format!("__enumcur__{}", idx), Span::call_site()); let name_self = quote!(*#name_self_orig); let name_base_orig = syn::Ident::new(&format!("__enumbase__{}", idx), Span::call_site()); let name_base = quote!(*#name_base_orig); field_info.push(quote!(ref #name_self_orig)); field_info_base.push(quote!(ref #name_base_orig)); build_ty( field.ty, flags, &mut sencode, &mut sencode_part, &mut sdecode, &mut sdecode_part, quote!(), &name_self, &name_base, &field.attrs, ); } { let field_info = field_info.clone(); let sencode = sencode.clone(); encode.push(quote!( &#name::#ident(#(#field_info),*) => { #encode_variant #(#sencode)* } )); } { let field_info = field_info.clone(); let field_info_base = field_info_base.clone(); encode_part.push(quote!( ( &#name::#ident(#(#field_info),*), &#name::#ident(#(#field_info_base),*), ) => { #encode_variant #(#sencode_part)* } )); } { let field_info = field_info.clone(); let sencode = sencode.clone(); encode_part.push(quote!( ( &#name::#ident(#(#field_info),*), _, ) => { #encode_variant #(#sencode)* } )); } { let sdecode = sdecode.clone(); decode.push(quote!( #idxu => { #name::#ident ( #(#sdecode,)* ) } )); } { let field_info_base = field_info_base.clone(); decode_part.push(quote!( ( #idxu, &#name::#ident(#(#field_info_base),*), ) => { #name::#ident ( #(#sdecode_part,)* ) } )); } { let sdecode = sdecode.clone(); decode_part.push(quote!( ( #idxu, _, ) => { #name::#ident ( #(#sdecode,)* ) } )); } } } } if flags.contains(GenFlags::COMPLETE) { (quote! { if #base_name.map_or(false, |v| *v == *self) { w.write_bool(false); } else { w.write_bool(true); if let Some(#base_name) = #base_name { match (#self_ref, #base_name) { #(#encode_part),* } } else { match #self_ref { #(#encode),* } } } }, quote! {{ let changed = r.read_bool()?; match (#base_name, changed) { (Some(#base_name), false) => #base_name.clone(), (Some(#base_name), true) => { match (r.read_unsigned(#variant_bits)?, #base_name) { #(#decode_part,)* _ => return Err(::std::io::Error::new(::std::io::ErrorKind::InvalidData, "Invalid enum variant")), } }, (None, true) => { match r.read_unsigned(#variant_bits)? { #(#decode,)* _ => return Err(::std::io::Error::new(::std::io::ErrorKind::InvalidData, "Invalid enum variant")), } }, (None, false) => return Err(::std::io::Error::new(::std::io::ErrorKind::InvalidData, "Mismatched decode, missing previous state")), } }}) } else { (quote! { if let Some(#base_name) = #base_name { match (#self_ref, #base_name) { #(#encode_part),* } } else { match #self_ref { #(#encode),* } } }, quote! {{ match #base_name { Some(#base_name) => { match (r.read_unsigned(#variant_bits)?, #base_name) { #(#decode_part,)* _ => return Err(::std::io::Error::new(::std::io::ErrorKind::InvalidData, "Invalid enum variant")), } }, None => { match r.read_unsigned(#variant_bits)? { #(#decode,)* _ => return Err(::std::io::Error::new(::std::io::ErrorKind::InvalidData, "Invalid enum variant")), } }, } }}) } } fn build_struct(name: &syn::Ident, self_name: &syn::Ident, base_name: &syn::Ident, flags: GenFlags, fields: Punctuated<syn::Field, Comma>) -> (TokenStream, TokenStream) { let mut encode: Vec<TokenStream> = vec![]; let mut encode_part: Vec<TokenStream> = vec![]; let mut decode: Vec<TokenStream> = vec![]; let mut decode_part: Vec<TokenStream> = vec![]; for field in fields { let fname = field.ident.unwrap(); let name_self = quote!(#self_name . #fname); let name_base = quote!(#base_name . #fname); build_ty( field.ty, flags, &mut encode, &mut encode_part, &mut decode, &mut decode_part, quote!(#fname :), &name_self, &name_base, &field.attrs, ); } if flags.contains(GenFlags::COMPLETE) { (quote! { if #base_name.map_or(false, |v| *v == *self) { w.write_bool(false)?; } else { w.write_bool(true)?; if let Some(#base_name) = #base_name { #(#encode_part)* } else { #(#encode)* } } }, quote! {{ let changed = r.read_bool()?; match (#base_name, changed) { (Some(#base_name), false) => (*#base_name).clone(), (Some(#base_name), true) => { #name { #(#decode_part,)* } }, (None, true) => { #name { #(#decode,)* } }, (None, false) => return Err(::std::io::Error::new(::std::io::ErrorKind::InvalidData, "Mismatched decode, missing previous state")), } }}) } else { (quote! { if let Some(#base_name) = #base_name { #(#encode_part)* } else { #(#encode)* } }, quote! {{ if let Some(#base_name) = #base_name { #name { #(#decode_part,)* } } else { #name { #(#decode,)* } } }}) } } fn build_tuple(name: &syn::Ident, self_name: &syn::Ident, base_name: &syn::Ident, flags: GenFlags, fields: Punctuated<syn::Field, Comma>) -> (TokenStream, TokenStream) { let mut encode: Vec<TokenStream> = vec![]; let mut encode_part: Vec<TokenStream> = vec![]; let mut decode: Vec<TokenStream> = vec![]; let mut decode_part: Vec<TokenStream> = vec![]; for (idx, field) in fields.into_iter().enumerate() { let index = syn::Index::from(idx); let name_self = quote!(#self_name.#index); let name_base = quote!(#base_name.#index); build_ty( field.ty, flags, &mut encode, &mut encode_part, &mut decode, &mut decode_part, quote!(), &name_self, &name_base, &field.attrs, ); } if flags.contains(GenFlags::COMPLETE) { (quote! { if #base_name.map_or(false, |v| *v == *self) { w.write_bool(false); } else { w.write_bool(true); if let Some(#base_name) = #base_name { #(#encode_part)* } else { #(#encode)* } } }, quote! {{ let changed = r.read_bool()?; match (#base_name, changed) { (Some(#base_name), false) => #base_name.clone(), (Some(#base_name), true) => { #name ( #(#decode_part,)* ) }, (None, true) => { #name ( #(#decode,)* ) }, (None, false) => return Err(::std::io::Error::new(::std::io::ErrorKind::InvalidData, "Mismatched decode, missing previous state")), } }}) } else { (quote! { if let Some(#base_name) = #base_name { #(#encode_part)* } else { #(#encode)* } }, quote! {{ if let Some(#base_name) = #base_name { #name ( #(#decode_part,)* ) } else { #name ( #(#decode,)* ) } }}) } }
35.90718
172
0.400673
01eec46d28868a835c329962e8f9940ef629f1ff
4,741
use minterpolate::InterpolationPrimitive; use serde::{Deserialize, Serialize}; use amethyst_core::{ alga::general::{SubsetOf, SupersetOf}, ecs::prelude::{Entity, WriteStorage}, math::{convert, RealField}, }; use crate::resources::{AnimationControlSet, AnimationSampling}; use self::SamplerPrimitive::*; /// Get the animation set for an entity. If none exists, one will be added. If entity is invalid, /// (eg. removed before) None will be returned. /// /// ### Type parameters: /// /// - `I`: identifier type for running animations, only one animation can be run at the same time /// with the same id /// - `T`: the component type that the animation applies to pub fn get_animation_set<'a, I, T>( controls: &'a mut WriteStorage<'_, AnimationControlSet<I, T>>, entity: Entity, ) -> Option<&'a mut AnimationControlSet<I, T>> where I: Send + Sync + 'static, T: AnimationSampling, { controls .entry(entity) .ok() .map(|entry| entry.or_insert_with(AnimationControlSet::default)) } /// Sampler primitive #[derive(Debug, Clone, Copy, Serialize, Deserialize)] pub enum SamplerPrimitive<S> where S: RealField + SubsetOf<f32> + SupersetOf<f32>, { /// A single value Scalar(S), /// Two values Vec2([S; 2]), /// Three values Vec3([S; 3]), /// Four values Vec4([S; 4]), } impl<S> From<[S; 2]> for SamplerPrimitive<S> where S: RealField + SubsetOf<f32> + SupersetOf<f32>, { fn from(arr: [S; 2]) -> Self { SamplerPrimitive::Vec2(arr) } } impl<S> From<[S; 3]> for SamplerPrimitive<S> where S: RealField + SubsetOf<f32> + SupersetOf<f32>, { fn from(arr: [S; 3]) -> Self { SamplerPrimitive::Vec3(arr) } } impl<S> From<[S; 4]> for SamplerPrimitive<S> where S: RealField + SubsetOf<f32> + SupersetOf<f32>, { fn from(arr: [S; 4]) -> Self { SamplerPrimitive::Vec4(arr) } } impl<S> InterpolationPrimitive for SamplerPrimitive<S> where S: RealField + SubsetOf<f32> + SupersetOf<f32>, { fn add(&self, other: &Self) -> Self { match (*self, *other) { (Scalar(ref s), Scalar(ref o)) => Scalar(*s + *o), (Vec2(ref s), Vec2(ref o)) => Vec2([s[0] + o[0], s[1] + o[1]]), (Vec3(ref s), Vec3(ref o)) => Vec3([s[0] + o[0], s[1] + o[1], s[2] + o[2]]), (Vec4(ref s), Vec4(ref o)) => { Vec4([s[0] + o[0], s[1] + o[1], s[2] + o[2], s[3] + o[3]]) } _ => panic!("Interpolation can not be done between primitives of different types"), } } fn sub(&self, other: &Self) -> Self { match (*self, *other) { (Scalar(ref s), Scalar(ref o)) => Scalar(*s - *o), (Vec2(ref s), Vec2(ref o)) => Vec2([s[0] - o[0], s[1] - o[1]]), (Vec3(ref s), Vec3(ref o)) => Vec3([s[0] - o[0], s[1] - o[1], s[2] - o[2]]), (Vec4(ref s), Vec4(ref o)) => { Vec4([s[0] - o[0], s[1] - o[1], s[2] - o[2], s[3] - o[3]]) } _ => panic!("Interpolation can not be done between primitives of different types"), } } fn mul(&self, scalar: f32) -> Self { match *self { Scalar(ref s) => Scalar(mul_f32(*s, scalar)), Vec2(ref s) => Vec2([mul_f32(s[0], scalar), mul_f32(s[1], scalar)]), Vec3(ref s) => Vec3([ mul_f32(s[0], scalar), mul_f32(s[1], scalar), mul_f32(s[2], scalar), ]), Vec4(ref s) => Vec4([ mul_f32(s[0], scalar), mul_f32(s[1], scalar), mul_f32(s[2], scalar), mul_f32(s[3], scalar), ]), } } fn dot(&self, other: &Self) -> f32 { convert(match (*self, *other) { (Scalar(s), Scalar(o)) => (s * o), (Vec2(s), Vec2(o)) => (s[0] * o[0] + s[1] * o[1]), (Vec3(s), Vec3(o)) => (s[0] * o[0] + s[1] * o[1] + s[2] * o[2]), (Vec4(s), Vec4(o)) => (s[0] * o[0] + s[1] * o[1] + s[2] * o[2] + s[3] * o[3]), _ => panic!("Interpolation can not be done between primitives of different types"), }) } fn magnitude2(&self) -> f32 { self.dot(self) } fn magnitude(&self) -> f32 { match *self { Scalar(s) => convert(s), Vec2(_) | Vec3(_) | Vec4(_) => self.magnitude2().sqrt(), } } fn normalize(&self) -> Self { match *self { Scalar(_) => *self, Vec2(_) | Vec3(_) | Vec4(_) => self.mul(1. / self.magnitude()), } } } fn mul_f32<T: RealField + SubsetOf<f32> + SupersetOf<f32>>(s: T, scalar: f32) -> T { convert::<f32, T>(scalar) * s }
30.197452
97
0.511074
0aef8815e79d0840b013ba8dd3240ba60cf21f4c
4,090
// Copyright 2020. The Tari Project // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the // following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following // disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the // following disclaimer in the documentation and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote // products derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use std::convert::TryFrom; use tari_common_types::types::BlindingFactor; use tari_core::{blocks::BlockHeader, proof_of_work::ProofOfWork}; use tari_utilities::{ByteArray, Hashable}; use crate::{ conversions::{datetime_to_timestamp, timestamp_to_datetime}, tari_rpc as grpc, }; impl From<BlockHeader> for grpc::BlockHeader { fn from(h: BlockHeader) -> Self { let pow_algo = h.pow_algo(); Self { hash: h.hash(), version: u32::from(h.version), height: h.height, prev_hash: h.prev_hash, timestamp: datetime_to_timestamp(h.timestamp), input_mr: h.input_mr, output_mr: h.output_mr, output_mmr_size: h.output_mmr_size, kernel_mr: h.kernel_mr, kernel_mmr_size: h.kernel_mmr_size, witness_mr: h.witness_mr, total_kernel_offset: h.total_kernel_offset.to_vec(), total_script_offset: h.total_script_offset.to_vec(), nonce: h.nonce, pow: Some(grpc::ProofOfWork { pow_algo: pow_algo.as_u64(), pow_data: h.pow.pow_data, }), } } } impl TryFrom<grpc::BlockHeader> for BlockHeader { type Error = String; fn try_from(header: grpc::BlockHeader) -> Result<Self, Self::Error> { let total_kernel_offset = BlindingFactor::from_bytes(&header.total_kernel_offset).map_err(|err| err.to_string())?; let total_script_offset = BlindingFactor::from_bytes(&header.total_script_offset).map_err(|err| err.to_string())?; let timestamp = header .timestamp .and_then(timestamp_to_datetime) .ok_or_else(|| "timestamp not provided or was negative".to_string())?; let pow = match header.pow { Some(p) => ProofOfWork::try_from(p)?, None => return Err("No proof of work provided".into()), }; Ok(Self { version: u16::try_from(header.version).map_err(|_| "header version too large")?, height: header.height, prev_hash: header.prev_hash, timestamp, input_mr: header.input_mr, output_mr: header.output_mr, witness_mr: header.witness_mr, output_mmr_size: header.output_mmr_size, kernel_mr: header.kernel_mr, kernel_mmr_size: header.kernel_mmr_size, total_kernel_offset, total_script_offset, nonce: header.nonce, pow, }) } }
42.164948
118
0.667726
edffe59fff59c4b8c652553d156f62237a6a3ad6
19,458
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ---------------------------------------------------------------------- // Gathering loans // // The borrow check proceeds in two phases. In phase one, we gather the full // set of loans that are required at any point. These are sorted according to // their associated scopes. In phase two, checking loans, we will then make // sure that all of these loans are honored. use middle::borrowck::*; use middle::borrowck::LoanPathKind::*; use middle::borrowck::move_data::MoveData; use middle::expr_use_visitor as euv; use middle::mem_categorization as mc; use middle::region; use middle::ty; use util::ppaux::{Repr}; use syntax::ast; use syntax::codemap::Span; use syntax::visit; use syntax::visit::Visitor; use syntax::ast::{Expr, FnDecl, Block, NodeId, Pat}; mod lifetime; mod restrictions; mod gather_moves; mod move_error; pub fn gather_loans_in_fn<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, decl: &ast::FnDecl, body: &ast::Block) -> (Vec<Loan<'tcx>>, move_data::MoveData<'tcx>) { let mut glcx = GatherLoanCtxt { bccx: bccx, all_loans: Vec::new(), item_ub: region::CodeExtent::from_node_id(body.id), move_data: MoveData::new(), move_error_collector: move_error::MoveErrorCollector::new(), }; { let mut euv = euv::ExprUseVisitor::new(&mut glcx, bccx.tcx); euv.walk_fn(decl, body); } glcx.report_potential_errors(); let GatherLoanCtxt { all_loans, move_data, .. } = glcx; (all_loans, move_data) } struct GatherLoanCtxt<'a, 'tcx: 'a> { bccx: &'a BorrowckCtxt<'a, 'tcx>, move_data: move_data::MoveData<'tcx>, move_error_collector: move_error::MoveErrorCollector<'tcx>, all_loans: Vec<Loan<'tcx>>, /// `item_ub` is used as an upper-bound on the lifetime whenever we /// ask for the scope of an expression categorized as an upvar. item_ub: region::CodeExtent, } impl<'a, 'tcx> euv::Delegate<'tcx> for GatherLoanCtxt<'a, 'tcx> { fn consume(&mut self, consume_id: ast::NodeId, _consume_span: Span, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) { debug!("consume(consume_id={}, cmt={}, mode={})", consume_id, cmt.repr(self.tcx()), mode); match mode { euv::Move(move_reason) => { gather_moves::gather_move_from_expr( self.bccx, &self.move_data, &self.move_error_collector, consume_id, cmt, move_reason); } euv::Copy => { } } } fn matched_pat(&mut self, matched_pat: &ast::Pat, cmt: mc::cmt<'tcx>, mode: euv::MatchMode) { debug!("matched_pat(matched_pat={}, cmt={}, mode={})", matched_pat.repr(self.tcx()), cmt.repr(self.tcx()), mode); if let mc::cat_downcast(..) = cmt.cat { gather_moves::gather_match_variant( self.bccx, &self.move_data, &self.move_error_collector, matched_pat, cmt, mode); } } fn consume_pat(&mut self, consume_pat: &ast::Pat, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) { debug!("consume_pat(consume_pat={}, cmt={}, mode={})", consume_pat.repr(self.tcx()), cmt.repr(self.tcx()), mode); match mode { euv::Copy => { return; } euv::Move(_) => { } } gather_moves::gather_move_from_pat( self.bccx, &self.move_data, &self.move_error_collector, consume_pat, cmt); } fn borrow(&mut self, borrow_id: ast::NodeId, borrow_span: Span, cmt: mc::cmt<'tcx>, loan_region: ty::Region, bk: ty::BorrowKind, loan_cause: euv::LoanCause) { debug!("borrow(borrow_id={}, cmt={}, loan_region={}, \ bk={}, loan_cause={})", borrow_id, cmt.repr(self.tcx()), loan_region, bk, loan_cause); self.guarantee_valid(borrow_id, borrow_span, cmt, bk, loan_region, loan_cause); } fn mutate(&mut self, assignment_id: ast::NodeId, assignment_span: Span, assignee_cmt: mc::cmt<'tcx>, mode: euv::MutateMode) { debug!("mutate(assignment_id={}, assignee_cmt={})", assignment_id, assignee_cmt.repr(self.tcx())); match opt_loan_path(&assignee_cmt) { Some(lp) => { gather_moves::gather_assignment(self.bccx, &self.move_data, assignment_id, assignment_span, lp, assignee_cmt.id, mode); } None => { // This can occur with e.g. `*foo() = 5`. In such // cases, there is no need to check for conflicts // with moves etc, just ignore. } } } fn decl_without_init(&mut self, id: ast::NodeId, span: Span) { gather_moves::gather_decl(self.bccx, &self.move_data, id, span, id); } } /// Implements the A-* rules in doc.rs. fn check_aliasability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, borrow_span: Span, loan_cause: euv::LoanCause, cmt: mc::cmt<'tcx>, req_kind: ty::BorrowKind) -> Result<(),()> { match (cmt.freely_aliasable(bccx.tcx), req_kind) { (None, _) => { /* Uniquely accessible path -- OK for `&` and `&mut` */ Ok(()) } (Some(mc::AliasableStatic(safety)), ty::ImmBorrow) => { // Borrow of an immutable static item: match safety { mc::InteriorUnsafe => { // If the static item contains an Unsafe<T>, it has interior // mutability. In such cases, another phase of the compiler // will ensure that the type is `Sync` and then trans will // not put it in rodata, so this is ok to allow. Ok(()) } mc::InteriorSafe => { // Immutable static can be borrowed, no problem. Ok(()) } } } (Some(mc::AliasableStaticMut(..)), _) => { // Even touching a static mut is considered unsafe. We assume the // user knows what they're doing in these cases. Ok(()) } (Some(alias_cause), ty::UniqueImmBorrow) | (Some(alias_cause), ty::MutBorrow) => { bccx.report_aliasability_violation( borrow_span, BorrowViolation(loan_cause), alias_cause); Err(()) } (_, _) => { Ok(()) } } } impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { pub fn tcx(&self) -> &'a ty::ctxt<'tcx> { self.bccx.tcx } /// Guarantees that `addr_of(cmt)` will be valid for the duration of `static_scope_r`, or /// reports an error. This may entail taking out loans, which will be added to the /// `req_loan_map`. fn guarantee_valid(&mut self, borrow_id: ast::NodeId, borrow_span: Span, cmt: mc::cmt<'tcx>, req_kind: ty::BorrowKind, loan_region: ty::Region, cause: euv::LoanCause) { debug!("guarantee_valid(borrow_id={}, cmt={}, \ req_mutbl={}, loan_region={})", borrow_id, cmt.repr(self.tcx()), req_kind, loan_region); // a loan for the empty region can never be dereferenced, so // it is always safe if loan_region == ty::ReEmpty { return; } // Check that the lifetime of the borrow does not exceed // the lifetime of the data being borrowed. if lifetime::guarantee_lifetime(self.bccx, self.item_ub, borrow_span, cause, cmt.clone(), loan_region, req_kind).is_err() { return; // reported an error, no sense in reporting more. } // Check that we don't allow mutable borrows of non-mutable data. if check_mutability(self.bccx, borrow_span, cause, cmt.clone(), req_kind).is_err() { return; // reported an error, no sense in reporting more. } // Check that we don't allow mutable borrows of aliasable data. if check_aliasability(self.bccx, borrow_span, cause, cmt.clone(), req_kind).is_err() { return; // reported an error, no sense in reporting more. } // Compute the restrictions that are required to enforce the // loan is safe. let restr = restrictions::compute_restrictions( self.bccx, borrow_span, cause, cmt.clone(), loan_region); debug!("guarantee_valid(): restrictions={}", restr); // Create the loan record (if needed). let loan = match restr { restrictions::Safe => { // No restrictions---no loan record necessary return; } restrictions::SafeIf(loan_path, restricted_paths) => { let loan_scope = match loan_region { ty::ReScope(scope) => scope, ty::ReFree(ref fr) => fr.scope, ty::ReStatic => { // If we get here, an error must have been // reported in // `lifetime::guarantee_lifetime()`, because // the only legal ways to have a borrow with a // static lifetime should not require // restrictions. To avoid reporting derived // errors, we just return here without adding // any loans. return; } ty::ReEmpty | ty::ReLateBound(..) | ty::ReEarlyBound(..) | ty::ReInfer(..) => { self.tcx().sess.span_bug( cmt.span, format!("invalid borrow lifetime: {}", loan_region).as_slice()); } }; debug!("loan_scope = {}", loan_scope); let borrow_scope = region::CodeExtent::from_node_id(borrow_id); let gen_scope = self.compute_gen_scope(borrow_scope, loan_scope); debug!("gen_scope = {}", gen_scope); let kill_scope = self.compute_kill_scope(loan_scope, &*loan_path); debug!("kill_scope = {}", kill_scope); if req_kind == ty::MutBorrow { self.mark_loan_path_as_mutated(&*loan_path); } Loan { index: self.all_loans.len(), loan_path: loan_path, kind: req_kind, gen_scope: gen_scope, kill_scope: kill_scope, span: borrow_span, restricted_paths: restricted_paths, cause: cause, } } }; debug!("guarantee_valid(borrow_id={}), loan={}", borrow_id, loan.repr(self.tcx())); // let loan_path = loan.loan_path; // let loan_gen_scope = loan.gen_scope; // let loan_kill_scope = loan.kill_scope; self.all_loans.push(loan); // if loan_gen_scope != borrow_id { // FIXME(#6268) Nested method calls // // Typically, the scope of the loan includes the point at // which the loan is originated. This // This is a subtle case. See the test case // <compile-fail/borrowck-bad-nested-calls-free.rs> // to see what we are guarding against. //let restr = restrictions::compute_restrictions( // self.bccx, borrow_span, cmt, RESTR_EMPTY); //let loan = { // let all_loans = &mut *self.all_loans; // FIXME(#5074) // Loan { // index: all_loans.len(), // loan_path: loan_path, // cmt: cmt, // mutbl: ConstMutability, // gen_scope: borrow_id, // kill_scope: kill_scope, // span: borrow_span, // restrictions: restrictions // } // } fn check_mutability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, borrow_span: Span, cause: euv::LoanCause, cmt: mc::cmt<'tcx>, req_kind: ty::BorrowKind) -> Result<(),()> { //! Implements the M-* rules in doc.rs. match req_kind { ty::UniqueImmBorrow | ty::ImmBorrow => { match cmt.mutbl { // I am intentionally leaving this here to help // refactoring if, in the future, we should add new // kinds of mutability. mc::McImmutable | mc::McDeclared | mc::McInherited => { // both imm and mut data can be lent as imm; // for mutable data, this is a freeze Ok(()) } } } ty::MutBorrow => { // Only mutable data can be lent as mutable. if !cmt.mutbl.is_mutable() { Err(bccx.report(BckError { span: borrow_span, cause: cause, cmt: cmt, code: err_mutbl })) } else { Ok(()) } } } } } pub fn mark_loan_path_as_mutated(&self, loan_path: &LoanPath) { //! For mutable loans of content whose mutability derives //! from a local variable, mark the mutability decl as necessary. match loan_path.kind { LpVar(local_id) | LpUpvar(ty::UpvarId{ var_id: local_id, closure_expr_id: _ }) => { self.tcx().used_mut_nodes.borrow_mut().insert(local_id); } LpDowncast(ref base, _) | LpExtend(ref base, mc::McInherited, _) | LpExtend(ref base, mc::McDeclared, _) => { self.mark_loan_path_as_mutated(&**base); } LpExtend(_, mc::McImmutable, _) => { // Nothing to do. } } } pub fn compute_gen_scope(&self, borrow_scope: region::CodeExtent, loan_scope: region::CodeExtent) -> region::CodeExtent { //! Determine when to introduce the loan. Typically the loan //! is introduced at the point of the borrow, but in some cases, //! notably method arguments, the loan may be introduced only //! later, once it comes into scope. if self.bccx.tcx.region_maps.is_subscope_of(borrow_scope, loan_scope) { borrow_scope } else { loan_scope } } pub fn compute_kill_scope(&self, loan_scope: region::CodeExtent, lp: &LoanPath<'tcx>) -> region::CodeExtent { //! Determine when the loan restrictions go out of scope. //! This is either when the lifetime expires or when the //! local variable which roots the loan-path goes out of scope, //! whichever happens faster. //! //! It may seem surprising that we might have a loan region //! larger than the variable which roots the loan-path; this can //! come about when variables of `&mut` type are re-borrowed, //! as in this example: //! //! fn counter<'a>(v: &'a mut Foo) -> &'a mut uint { //! &mut v.counter //! } //! //! In this case, the reference (`'a`) outlives the //! variable `v` that hosts it. Note that this doesn't come up //! with immutable `&` pointers, because borrows of such pointers //! do not require restrictions and hence do not cause a loan. let lexical_scope = lp.kill_scope(self.bccx.tcx); let rm = &self.bccx.tcx.region_maps; if rm.is_subscope_of(lexical_scope, loan_scope) { lexical_scope } else { assert!(self.bccx.tcx.region_maps.is_subscope_of(loan_scope, lexical_scope)); loan_scope } } pub fn report_potential_errors(&self) { self.move_error_collector.report_potential_errors(self.bccx); } } /// Context used while gathering loans on static initializers /// /// This visitor walks static initializer's expressions and makes /// sure the loans being taken are sound. struct StaticInitializerCtxt<'a, 'tcx: 'a> { bccx: &'a BorrowckCtxt<'a, 'tcx> } impl<'a, 'tcx, 'v> Visitor<'v> for StaticInitializerCtxt<'a, 'tcx> { fn visit_expr(&mut self, ex: &Expr) { if let ast::ExprAddrOf(mutbl, ref base) = ex.node { let base_cmt = self.bccx.cat_expr(&**base); let borrow_kind = ty::BorrowKind::from_mutbl(mutbl); // Check that we don't allow borrows of unsafe static items. if check_aliasability(self.bccx, ex.span, euv::AddrOf, base_cmt, borrow_kind).is_err() { return; // reported an error, no sense in reporting more. } } visit::walk_expr(self, ex); } } pub fn gather_loans_in_static_initializer(bccx: &mut BorrowckCtxt, expr: &ast::Expr) { debug!("gather_loans_in_static_initializer(expr={})", expr.repr(bccx.tcx)); let mut sicx = StaticInitializerCtxt { bccx: bccx }; sicx.visit_expr(expr); }
37.856031
93
0.500257
e296f3f7cde8f1ff0f56a12d96c277b2fb308f54
1,256
use std::{fmt, io}; use crate::serialization::{SerializationError, ZcashDeserialize, ZcashSerialize}; /// An encoding of a BCTV14 proof, as used in Zcash. pub struct Bctv14Proof(pub [u8; 296]); impl fmt::Debug for Bctv14Proof { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Bctv14Proof") .field(&hex::encode(&self.0[..])) .finish() } } // These impls all only exist because of array length restrictions. impl Copy for Bctv14Proof {} impl Clone for Bctv14Proof { fn clone(&self) -> Self { let mut bytes = [0; 296]; bytes[..].copy_from_slice(&self.0[..]); Self(bytes) } } impl PartialEq for Bctv14Proof { fn eq(&self, other: &Self) -> bool { self.0[..] == other.0[..] } } impl Eq for Bctv14Proof {} impl ZcashSerialize for Bctv14Proof { fn zcash_serialize<W: io::Write>(&self, mut writer: W) -> Result<(), SerializationError> { writer.write_all(&self.0[..])?; Ok(()) } } impl ZcashDeserialize for Bctv14Proof { fn zcash_deserialize<R: io::Read>(mut reader: R) -> Result<Self, SerializationError> { let mut bytes = [0; 296]; reader.read_exact(&mut bytes[..])?; Ok(Self(bytes)) } }
25.12
94
0.605096
d78d2d967408a2ec238f031b817c663f4ed20ea6
453
//! [Dispatch](https://developer.apple.com/documentation/dispatch) library. //! //! # Feature Flag //! //! This module corresponds to the **`dispatch`** //! [feature flag](../index.html#feature-flags). #![cfg(feature = "dispatch")] pub mod sys; mod autorelease_frequency; mod object; mod qos; mod queue; mod source; mod time; pub use autorelease_frequency::*; pub use object::*; pub use qos::*; pub use queue::*; pub use source::*; pub use time::*;
18.12
75
0.675497
266d8a8c44edabe93b3719fa06be80628a559743
10,319
use { crate::{ error::MetaplexError, state::{CommonWinningIndexChecks, CommonWinningIndexReturn, WinningConfigType, PREFIX}, utils::{ assert_derivation, common_redeem_checks, common_redeem_finish, get_amount_from_token_account, transfer_safety_deposit_box_items, CommonRedeemCheckArgs, CommonRedeemFinishArgs, CommonRedeemReturn, }, }, arrayref::array_ref, mpl_auction::processor::AuctionData, mpl_token_metadata::{ deprecated_instruction::deprecated_set_reservation_list, state::Reservation, }, solana_program::{ account_info::{next_account_info, AccountInfo}, entrypoint::ProgramResult, program::invoke_signed, program_error::ProgramError, pubkey::Pubkey, }, }; fn set_reservation_list_wrapper<'a>( program_id: &'a Pubkey, master_edition_info: &AccountInfo<'a>, reservation_list_info: &AccountInfo<'a>, auction_manager_info: &AccountInfo<'a>, signer_seeds: &[&[u8]], reservations: Vec<Reservation>, total_reservation_spots: Option<u64>, offset: u64, total_spot_offset: u64, ) -> ProgramResult { invoke_signed( &deprecated_set_reservation_list( *program_id, *master_edition_info.key, *reservation_list_info.key, *auction_manager_info.key, reservations, total_reservation_spots, offset, total_spot_offset, ), &[ master_edition_info.clone(), reservation_list_info.clone(), auction_manager_info.clone(), ], &[&signer_seeds], )?; Ok(()) } fn get_supply_snapshot_off_reservation_list( reservation_list_info: &AccountInfo, ) -> Result<Option<u64>, ProgramError> { let data = reservation_list_info.try_borrow_data()?; // this is an option, 9 bytes, first is 0 means is none if data[33] == 0 { Ok(None) } else { let amount_data = array_ref![data, 34, 8]; Ok(Some(u64::from_le_bytes(*amount_data))) } } #[allow(clippy::too_many_arguments)] pub fn reserve_list_if_needed<'a>( program_id: &'a Pubkey, winning_index: usize, auction_info: &AccountInfo<'a>, bidder_info: &AccountInfo<'a>, master_edition_info: &AccountInfo<'a>, reservation_list_info: &AccountInfo<'a>, auction_manager_info: &AccountInfo<'a>, safety_deposit_token_store_info: &AccountInfo<'a>, signer_seeds: &[&[u8]], ) -> ProgramResult { let total_reservation_spot_opt: Option<u64>; // This math will explicitly be off in custom cases where you are giving away multiple editions to a single // person. However these are rare. This optimization will literally break this case because // there will be fewer reservation spots than those available. However I'm switching to it // because we need to support those 50 person legacy auctions out there which are mostly limited editions // and get them redeemed so we can move to the newer system which works. let total_spot_offset: u64 = winning_index as u64; if get_supply_snapshot_off_reservation_list(reservation_list_info)?.is_none() { total_reservation_spot_opt = Some(std::cmp::min( get_amount_from_token_account(safety_deposit_token_store_info)?, AuctionData::get_num_winners(auction_info) as u64, )); } else { total_reservation_spot_opt = None } let my_spots: u64 = 1; set_reservation_list_wrapper( program_id, master_edition_info, reservation_list_info, auction_manager_info, signer_seeds, vec![Reservation { address: *bidder_info.key, spots_remaining: my_spots, total_spots: my_spots, }], total_reservation_spot_opt, // Note this logic is explicitly wrong in cases of tiered auctions where the edition // is not present in every single winning config. But that would require iteration to figure out, // and we are optimizing for the 99.8% case in this legacy logic. winning_index as u64, total_spot_offset, )?; Ok(()) } pub fn process_redeem_bid<'a>( program_id: &'a Pubkey, accounts: &'a [AccountInfo<'a>], // If present, means an auctioneer is collecting this bid and we should disregard bidder metadata // and just collect the prize. Can only be set through an inner call with redeem_unused_winning_config_items. overwrite_win_index: Option<usize>, ) -> ProgramResult { let account_info_iter = &mut accounts.iter(); let auction_manager_info = next_account_info(account_info_iter)?; let safety_deposit_token_store_info = next_account_info(account_info_iter)?; let destination_info = next_account_info(account_info_iter)?; let bid_redemption_info = next_account_info(account_info_iter)?; let safety_deposit_info = next_account_info(account_info_iter)?; let vault_info = next_account_info(account_info_iter)?; let fraction_mint_info = next_account_info(account_info_iter)?; let auction_info = next_account_info(account_info_iter)?; let bidder_metadata_info = next_account_info(account_info_iter)?; let bidder_info = next_account_info(account_info_iter)?; let payer_info = next_account_info(account_info_iter)?; let token_program_info = next_account_info(account_info_iter)?; let token_vault_program_info = next_account_info(account_info_iter)?; let token_metadata_program_info = next_account_info(account_info_iter)?; let store_info = next_account_info(account_info_iter)?; let system_info = next_account_info(account_info_iter)?; let rent_info = next_account_info(account_info_iter)?; let transfer_authority_info = next_account_info(account_info_iter)?; let safety_deposit_config_info = next_account_info(account_info_iter).ok(); let auction_extended_info = next_account_info(account_info_iter).ok(); let CommonRedeemReturn { auction_manager, redemption_bump_seed, cancelled, rent: _rent, win_index, token_metadata_program: _t, } = common_redeem_checks(CommonRedeemCheckArgs { program_id, auction_manager_info, safety_deposit_token_store_info, destination_info, bid_redemption_info, safety_deposit_info, vault_info, auction_info, auction_extended_info, bidder_metadata_info, bidder_info, token_program_info, token_vault_program_info, token_metadata_program_info, rent_info, store_info, safety_deposit_config_info, is_participation: false, user_provided_win_index: None, overwrite_win_index, assert_bidder_signer: true, ignore_bid_redeemed_item_check: false, })?; let mut winning_item_index = None; if !cancelled { if let Some(winning_index) = win_index { // Okay, so they placed in the auction winning prizes section! let CommonWinningIndexReturn { amount, winning_config_type, winning_config_item_index, } = auction_manager.common_winning_index_checks(CommonWinningIndexChecks { safety_deposit_info, winning_index, auction_manager_v1_ignore_claim: false, safety_deposit_config_info, })?; winning_item_index = winning_config_item_index; if winning_config_type != WinningConfigType::TokenOnlyTransfer && winning_config_type != WinningConfigType::PrintingV1 { return Err(MetaplexError::WrongBidEndpointForPrize.into()); } let auction_bump_seed = assert_derivation( program_id, auction_manager_info, &[PREFIX.as_bytes(), &auction_manager.auction().as_ref()], )?; let auction_key = auction_manager.auction(); let auction_auth_seeds = &[ PREFIX.as_bytes(), auction_key.as_ref(), &[auction_bump_seed], ]; if winning_config_type == WinningConfigType::PrintingV1 && overwrite_win_index.is_none() { let master_edition_info = match safety_deposit_config_info { Some(val) => val, None => return Err(ProgramError::NotEnoughAccountKeys), }; let reservation_list_info = match auction_extended_info { Some(val) => val, None => return Err(ProgramError::NotEnoughAccountKeys), }; reserve_list_if_needed( token_metadata_program_info.key, winning_index, auction_info, bidder_info, master_edition_info, reservation_list_info, auction_manager_info, safety_deposit_token_store_info, auction_auth_seeds, )?; } transfer_safety_deposit_box_items( token_vault_program_info.clone(), destination_info.clone(), safety_deposit_info.clone(), safety_deposit_token_store_info.clone(), vault_info.clone(), fraction_mint_info.clone(), auction_manager_info.clone(), transfer_authority_info.clone(), rent_info.clone(), amount as u64, auction_auth_seeds, )?; } } common_redeem_finish(CommonRedeemFinishArgs { program_id, auction_manager, auction_manager_info, bidder_metadata_info, rent_info, system_info, payer_info, bid_redemption_info, safety_deposit_config_info, vault_info, winning_index: win_index, redemption_bump_seed, bid_redeemed: true, participation_redeemed: false, winning_item_index, overwrite_win_index, })?; Ok(()) }
36.207018
113
0.642989
4acec8dbfca6184a288dd770c7c4966714fb7a88
5,152
// Copyright 2017 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. use std::io::{Result, Write}; use std::option::Option; use std::sync::Arc; use rocksdb::RateLimiter; const PRIORITY_HIGH: u8 = 1; const REFILL_PERIOD: i64 = 100 * 1000; const FARENESS: i32 = 10; const SNAP_MAX_BYTES_PER_TIME: i64 = 4 * 1024 * 1024; pub const DEFAULT_SNAP_MAX_BYTES_PER_SEC: u64 = 100 * 1024 * 1024; /// The I/O rate limiter for RocksDB. /// /// Throttles the maximum bytes per second written to disk. pub struct IOLimiter { inner: RateLimiter, } impl IOLimiter { /// # Arguments /// /// - `bytes_per_sec`: controls the total write rate of compaction and flush in bytes per second. pub fn new(bytes_per_sec: u64) -> IOLimiter { IOLimiter { inner: RateLimiter::new(bytes_per_sec as i64, REFILL_PERIOD, FARENESS), } } /// Sets the rate limit in bytes per second pub fn set_bytes_per_second(&self, bytes_per_sec: i64) { self.inner.set_bytes_per_second(bytes_per_sec) } /// Requests an access token to read or write bytes. If this request can not be satisfied, the call is blocked. pub fn request(&self, bytes: i64) { self.inner.request(bytes, PRIORITY_HIGH) } /// Gets the max bytes that can be granted in a single burst. /// Note: it will be less than or equal to `SNAP_MAX_BYTES_PER_TIME`. pub fn get_max_bytes_per_time(&self) -> i64 { if self.inner.get_singleburst_bytes() > SNAP_MAX_BYTES_PER_TIME { SNAP_MAX_BYTES_PER_TIME } else { self.inner.get_singleburst_bytes() } } /// Gets the total bytes that have gone through the rate limiter. pub fn get_total_bytes_through(&self) -> i64 { self.inner.get_total_bytes_through(PRIORITY_HIGH) } /// Gets the rate limit in bytes per second. pub fn get_bytes_per_second(&self) -> i64 { self.inner.get_bytes_per_second() } /// Gets the total number of requests that have gone through rate limiter pub fn get_total_requests(&self) -> i64 { self.inner.get_total_requests(PRIORITY_HIGH) } } pub struct LimitWriter<'a, T: Write + 'a> { limiter: Option<Arc<IOLimiter>>, writer: &'a mut T, } impl<'a, T: Write + 'a> LimitWriter<'a, T> { pub fn new(limiter: Option<Arc<IOLimiter>>, writer: &'a mut T) -> LimitWriter<'a, T> { LimitWriter { limiter, writer } } } impl<'a, T: Write + 'a> Write for LimitWriter<'a, T> { fn write(&mut self, buf: &[u8]) -> Result<usize> { let total = buf.len(); if let Some(ref limiter) = self.limiter { let single = limiter.get_max_bytes_per_time() as usize; let mut curr = 0; let mut end; while curr < total { if curr + single >= total { end = total; } else { end = curr + single; } limiter.request((end - curr) as i64); self.writer.write_all(&buf[curr..end])?; curr = end; } } else { self.writer.write_all(buf)?; } Ok(total) } fn flush(&mut self) -> Result<()> { self.writer.flush()?; Ok(()) } } #[cfg(test)] mod tests { use std::fs::File; use std::io::{Read, Write}; use std::sync::Arc; use tempdir::TempDir; use super::{IOLimiter, LimitWriter, SNAP_MAX_BYTES_PER_TIME}; #[test] fn test_io_limiter() { let limiter = IOLimiter::new(10 * 1024 * 1024); assert!(limiter.get_max_bytes_per_time() <= SNAP_MAX_BYTES_PER_TIME); limiter.set_bytes_per_second(20 * 1024 * 1024); assert_eq!(limiter.get_bytes_per_second(), 20 * 1024 * 1024); assert_eq!(limiter.get_total_bytes_through(), 0); limiter.request(1024 * 1024); assert_eq!(limiter.get_total_bytes_through(), 1024 * 1024); assert_eq!(limiter.get_total_requests(), 1); } #[test] fn test_limit_writer() { let dir = TempDir::new("_test_limit_writer").expect(""); let path = dir.path().join("test-file"); let mut file = File::create(&path).unwrap(); let mut limit_writer = LimitWriter::new(Some(Arc::new(IOLimiter::new(1024))), &mut file); let mut s = String::new(); for _ in 0..100 { s.push_str("Hello, World!"); } limit_writer.write_all(s.as_bytes()).unwrap(); limit_writer.flush().unwrap(); let mut file = File::open(&path).unwrap(); let mut contents = String::new(); file.read_to_string(&mut contents).unwrap(); assert_eq!(contents, s); } }
31.414634
115
0.611801
fc1b749b42309b848d4967c46d66a1553a259ff1
7,178
use self::super::super::super::isa::{GeneralPurposeRegisterBank, GeneralPurposeRegister, SpecialPurposeRegister}; use self::super::super::super::micro::{MicroOpPerformError, MicroOpBlock, MicroOp, NEXT_INSTRUCTION}; use arraydeque::{ArrayDeque, Wrapping as ArrayDequeBehaviourWrapping}; use self::super::super::super::isa::instruction::Instruction; use self::super::super::super::vm::{Memory, Ports}; use self::super::super::super::rw::ReadWritable; use std::collections::BTreeSet; /// Container for all data needed and/or useful for running a `pir-8-emu` virtual machine /// /// # Examples /// /// ``` /// # use pir_8_emu::ReadWritable; /// # use pir_8_emu::binutils::pir_8_emu::Vm; /// # use pir_8_emu::isa::instruction::{InstructionLoadImmediateWideRegisterPair, InstructionMadrDirection, InstructionRegisterPair, Instruction}; /// let mut vm = Vm::new("FSXYABCD").unwrap(); /// vm.reset("FSXYABCD", &[ /// Instruction::Halt.into(), /// Instruction::LoadImmediateWide { rr: InstructionLoadImmediateWideRegisterPair::Adr }.into(), /// 0x04, /// 0x20, /// Instruction::LoadImmediateByte { rrr: 0b000 }.into(), /// 0x69, /// Instruction::Save { rrr: 0b000 }.into(), /// Instruction::Halt.into(), /// ]); /// /// vm.jump_to_addr(0x0001).unwrap(); /// while !vm.execution_finished { /// vm.ins.reset_rw(); /// vm.perform_next_op().unwrap(); /// } /// /// assert_eq!(vm.memory[0x0420], 0x69); /// ``` #[derive(Debug)] pub struct Vm { pub memory: Memory, pub ports: Ports, pub registers: GeneralPurposeRegisterBank, pub pc: SpecialPurposeRegister<u16>, pub sp: SpecialPurposeRegister<u16>, pub adr: SpecialPurposeRegister<u16>, pub ins: SpecialPurposeRegister<u8>, pub ops: (MicroOpBlock, usize), pub curr_op: usize, pub instruction: Instruction, /// If this is set, [`instruction`](#structfield.instruction) contains the current instruction and /// [`ops`](#structfield.ops) contains the μOps corresponding thereto pub instruction_valid: bool, pub execution_finished: bool, pub stack: Vec<u8>, /// Any instruction successfully loaded will be added to the front of this queue pub instruction_history: ArrayDeque<[(u16, Instruction, u16); 10], ArrayDequeBehaviourWrapping>, /// Pause execution when PC is contained herein until the flag is cleared pub breakpoints: BTreeSet<u16>, pub active_breakpoint: Option<u16>, } impl Vm { /// Create a new, zero-initialised VM with the specified general-purpose register letters pub fn new(gp_reg_ltrs: &str) -> Result<Vm, i8> { Ok(Vm { memory: Memory::new(), ports: Ports::new(), registers: GeneralPurposeRegister::from_letters(gp_reg_ltrs)?, pc: SpecialPurposeRegister::new("Program Counter", "PC"), sp: SpecialPurposeRegister::new("Stack Pointer", "SP"), adr: SpecialPurposeRegister::new("Memory Address", "ADR"), ins: SpecialPurposeRegister::new("Instruction", "INS"), ops: NEXT_INSTRUCTION, curr_op: 0, instruction: Instruction::Halt, instruction_valid: false, execution_finished: false, stack: vec![], instruction_history: ArrayDeque::new(), breakpoints: BTreeSet::new(), active_breakpoint: None, }) } /// Reset this VM to a default state but with the specified memory buffer pub fn reset(&mut self, gp_reg_ltrs: &str, memory: &[u8]) -> Result<(), i8> { self.memory = Memory::from(memory); self.ports = Ports::new(); self.registers = GeneralPurposeRegister::from_letters(gp_reg_ltrs)?; self.pc = SpecialPurposeRegister::new("Program Counter", "PC"); self.sp = SpecialPurposeRegister::new("Stack Pointer", "SP"); self.adr = SpecialPurposeRegister::new("Memory Address", "ADR"); self.ins = SpecialPurposeRegister::new("Instruction", "INS"); self.ops = NEXT_INSTRUCTION; self.curr_op = 0; self.instruction_valid = false; self.execution_finished = false; self.stack.clear(); self.instruction_history.clear(); self.breakpoints.clear(); self.active_breakpoint = None; Ok(()) } /// Safely jump to the specified address /// /// The current μOp set will be executed, then `PC` updated to the specified address, and μOps set to /// [`NEXT_INSTRUCTION`](../../../micro/static.NEXT_INSTRUCTION.html) pub fn jump_to_addr(&mut self, to_addr: u16) -> Result<(), MicroOpPerformError> { for _ in self.curr_op..self.ops.1 { self.active_breakpoint = None; self.perform_next_op()?; } *self.pc = to_addr; self.ops = NEXT_INSTRUCTION; self.instruction_valid = false; self.curr_op = 0; self.execution_finished = false; self.active_breakpoint = None; Ok(()) } /// Perform next μOp /// /// If execution has finished, do nothing /// /// Otherwise, perform the current μOp and bump the μOp counter /// /// If the last μOp of the set has been performed: /// * if `INS` was written to, load the instruction therein /// * otherwise, load [`NEXT_INSTRUCTION`](../../../micro/static.NEXT_INSTRUCTION.html) /// /// The returned value represents whether new μOps are present pub fn perform_next_op(&mut self) -> Result<bool, MicroOpPerformError> { if self.execution_finished || self.active_breakpoint.is_some() { return Ok(false); } let mut new_ops = false; self.execution_finished = !self.ops.0[self.curr_op].perform(&mut self.stack, &mut self.memory, &mut self.ports, &mut self.registers, &mut self.pc, &mut self.sp, &mut self.adr, &mut self.ins)?; self.curr_op += 1; let (pc_r, pc_w) = (self.pc.was_read(), self.pc.was_written()); let pc = self.pc.wrapping_sub(1); if self.curr_op >= self.ops.1 { if self.ins.was_written() { self.instruction = Instruction::from(*self.ins); self.ops = MicroOp::from_instruction(self.instruction); self.instruction_valid = true; let mut data = 0u16; for i in 1..=(self.instruction.data_length() as u16) { data = (data << 8) | (self.memory[..][pc.wrapping_add(i) as usize] as u16); } self.instruction_history.push_front((pc, self.instruction, data)); } else { self.ops = NEXT_INSTRUCTION; self.instruction_valid = false; } self.curr_op = 0; new_ops = true; } self.active_breakpoint = self.breakpoints.get(&pc).copied(); if !pc_r { self.pc.reset_rw(); if pc_w { *self.pc = pc.wrapping_add(1); } } Ok(new_ops) } }
35.534653
146
0.604904
918983b5d6806348ea115580218c7a4c65115923
13,581
use std::collections::BinaryHeap; use std::collections::HashSet; use util::input::get_input_string; const ROOM_SLOT: &'static [i64] = &[2, 4, 6, 8]; const HALL_SLOT: &'static [i64] = &[0, 1, 3, 5, 7, 9, 10]; const COST_TABLE: &'static [i64] = &[1, 10, 100, 1000]; fn has_reached(a: &Vec<(char, char)>, b: &Vec<(char, char)>) -> bool { if a.len() != b.len() { return false; } for i in 0..a.len() { if a[i].0 != b[i].0 || a[i].1 != b[i].1 { return false; } } true } fn search(rooms: Vec<(char, char)>) -> i64 { let dest: Vec<(char, char)> = Vec::from([('A', 'A'), ('B', 'B'), ('C', 'C'), ('D', 'D')]); let mut cost_heap = BinaryHeap::new(); let mut vis: HashSet<(Vec<(char, char)>, Vec<char>)> = HashSet::new(); cost_heap.push(( 0, ( rooms.clone(), Vec::from([' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']), ), )); while !cost_heap.is_empty() { let (cost, (current_state_room, current_state_hallway)) = cost_heap.pop().unwrap(); if has_reached(&current_state_room, &dest) { return -cost; } if !vis.insert((current_state_room.to_vec(), current_state_hallway.to_vec())) { continue; } for i in 0..4 { if current_state_room[i].0 == dest[i].0 && current_state_room[i].1 == dest[i].1 { continue; } let to_move; let to_move_row; let to_move_id; if current_state_room[i].0 != ' ' { to_move_row = 0; to_move = current_state_room[i].0; } else if current_state_room[i].1 != ' ' { to_move_row = 1; to_move = current_state_room[i].1; } else { continue; } to_move_id = match to_move { 'A' => 0, 'B' => 1, 'C' => 2, 'D' => 3, _ => panic!("Something went wrong"), }; for h in HALL_SLOT { let hsize = *h as usize; let left_bound = hsize.min(2 * (i + 1)); let right_bound = hsize.max(2 * (i + 1)) + 1; let bound = &current_state_hallway[left_bound..right_bound]; if bound.iter().all(|&x| x == ' ') { let new_cost = -cost + COST_TABLE[to_move_id] * (to_move_row + 1 + (2 * (i + 1) as i64 - h).abs()); let mut new_state_room = current_state_room.to_vec(); match to_move_row { 0 => { new_state_room[i].0 = ' '; } _ => { new_state_room[i].1 = ' '; } } let mut new_state_hallway = current_state_hallway.to_vec(); new_state_hallway[*h as usize] = to_move; cost_heap.push((-new_cost, (new_state_room, new_state_hallway))); } } } for h in HALL_SLOT { let hall_loc = *h as usize; let item = current_state_hallway[hall_loc]; let room_loc; let i; if item == ' ' { continue; } else { i = match item { 'A' => 0, 'B' => 1, 'C' => 2, 'D' => 3, _ => panic!("Something went wrong"), }; room_loc = ROOM_SLOT[i] as usize; } let hrizontal = match hall_loc > room_loc { true => &current_state_hallway[room_loc..hall_loc], _ => &current_state_hallway[hall_loc + 1..room_loc + 1], }; if hrizontal.iter().all(|&x| x == ' ') { if (current_state_room[i].0 == ' ' || current_state_room[i].0 == item) && (current_state_room[i].1 == ' ' || current_state_room[i].1 == item) { let mut vertical = 0; if current_state_room[i].0 == ' ' && current_state_room[i].1 == ' ' { vertical += 1; } let new_cost = -cost + COST_TABLE[i] * (vertical as i64 + 1 + (room_loc as i64 - *h).abs()) as i64; let mut new_state_room = current_state_room.to_vec(); match vertical { 0 => { new_state_room[i].0 = item; } _ => { new_state_room[i].1 = item; } } let mut new_state_hallway = current_state_hallway.to_vec(); new_state_hallway[hall_loc] = ' '; cost_heap.push((-new_cost, (new_state_room, new_state_hallway))); } } } } unreachable!() } fn has_reached2(a: &Vec<(char, char, char, char)>, b: &Vec<(char, char, char, char)>) -> bool { if a.len() != b.len() { return false; } for i in 0..a.len() { if a[i].0 != b[i].0 || a[i].1 != b[i].1 || a[i].2 != b[i].2 || a[i].3 != b[i].3 { return false; } } true } fn search2(rooms: Vec<(char, char, char, char)>) -> i64 { let dest: Vec<(char, char, char, char)> = Vec::from([ ('A', 'A', 'A', 'A'), ('B', 'B', 'B', 'B'), ('C', 'C', 'C', 'C'), ('D', 'D', 'D', 'D'), ]); let mut cost_heap = BinaryHeap::new(); let mut vis: HashSet<(Vec<(char, char, char, char)>, Vec<char>)> = HashSet::new(); cost_heap.push(( 0, ( rooms.clone(), Vec::from([' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']), ), )); while !cost_heap.is_empty() { let (cost, (current_state_room, current_state_hallway)) = cost_heap.pop().unwrap(); if has_reached2(&current_state_room, &dest) { return -cost; } if !vis.insert((current_state_room.to_vec(), current_state_hallway.to_vec())) { continue; } for i in 0..4 { if current_state_room[i].0 == dest[i].0 && current_state_room[i].1 == dest[i].1 && current_state_room[i].2 == dest[i].2 && current_state_room[i].3 == dest[i].3 { continue; } let to_move; let to_move_row; let to_move_id; if current_state_room[i].0 != ' ' { to_move_row = 0; to_move = current_state_room[i].0; } else if current_state_room[i].1 != ' ' { to_move_row = 1; to_move = current_state_room[i].1; } else if current_state_room[i].2 != ' ' { to_move_row = 2; to_move = current_state_room[i].2; } else if current_state_room[i].3 != ' ' { to_move_row = 3; to_move = current_state_room[i].3; } else { continue; } to_move_id = match to_move { 'A' => 0, 'B' => 1, 'C' => 2, 'D' => 3, _ => panic!("Something went wrong"), }; for h in HALL_SLOT { let hsize = *h as usize; let left_bound = hsize.min(2 * (i + 1)); let right_bound = hsize.max(2 * (i + 1)) + 1; let bound = &current_state_hallway[left_bound..right_bound]; if bound.iter().all(|&x| x == ' ') { let new_cost = -cost + COST_TABLE[to_move_id] * (to_move_row + 1 + (2 * (i + 1) as i64 - h).abs()); let mut new_state_room = current_state_room.to_vec(); match to_move_row { 0 => { new_state_room[i].0 = ' '; } 1 => { new_state_room[i].1 = ' '; } 2 => { new_state_room[i].2 = ' '; } _ => { new_state_room[i].3 = ' '; } } let mut new_state_hallway = current_state_hallway.to_vec(); new_state_hallway[*h as usize] = to_move; cost_heap.push((-new_cost, (new_state_room, new_state_hallway))); } } } for h in HALL_SLOT { let hall_loc = *h as usize; let item = current_state_hallway[hall_loc]; let room_loc; let i; if item == ' ' { continue; } else { i = match item { 'A' => 0, 'B' => 1, 'C' => 2, 'D' => 3, _ => panic!("Something went wrong"), }; room_loc = ROOM_SLOT[i] as usize; } let hrizontal = match hall_loc > room_loc { true => &current_state_hallway[room_loc..hall_loc], _ => &current_state_hallway[hall_loc + 1..room_loc + 1], }; if hrizontal.iter().all(|&x| x == ' ') { if (current_state_room[i].0 == ' ' || current_state_room[i].0 == item) && (current_state_room[i].1 == ' ' || current_state_room[i].1 == item) && (current_state_room[i].2 == ' ' || current_state_room[i].2 == item) && (current_state_room[i].3 == ' ' || current_state_room[i].3 == item) { let mut vertical = 0; if current_state_room[i].0 == ' ' && current_state_room[i].1 == ' ' { vertical += 1; } if current_state_room[i].1 == ' ' && current_state_room[i].2 == ' ' { vertical += 1; } if current_state_room[i].2 == ' ' && current_state_room[i].3 == ' ' { vertical += 1; } let new_cost = -cost + COST_TABLE[i] * (vertical as i64 + 1 + (room_loc as i64 - *h).abs()) as i64; let mut new_state_room = current_state_room.to_vec(); match vertical { 0 => { new_state_room[i].0 = item; } 1 => { new_state_room[i].1 = item; } 2 => { new_state_room[i].2 = item; } _ => { new_state_room[i].3 = item; } } let mut new_state_hallway = current_state_hallway.to_vec(); new_state_hallway[hall_loc] = ' '; cost_heap.push((-new_cost, (new_state_room, new_state_hallway))); } } } } unreachable!() } #[allow(dead_code)] pub fn solve_part1() -> i64 { let strings = get_input_string("23"); let first = &strings[2].chars().collect::<Vec<_>>(); let second = &strings[3].chars().collect::<Vec<_>>(); let mut rooms: Vec<(char, char)> = Vec::new(); for i in (3..10).step_by(2) { rooms.push((first[i], second[i])); } assert_eq!(rooms.len(), 4); search(rooms) } #[allow(dead_code)] pub fn solve_part2() -> i64 { let strings = get_input_string("23"); let first = &strings[2].chars().collect::<Vec<_>>(); let second = &strings[3].chars().collect::<Vec<_>>(); let mut rooms: Vec<(char, char, char, char)> = Vec::new(); for i in (3..10).step_by(2) { match rooms.len() { 0 => { rooms.push((first[i], 'D', 'D', second[i])); } 1 => { rooms.push((first[i], 'C', 'B', second[i])); } 2 => { rooms.push((first[i], 'B', 'A', second[i])); } _ => { rooms.push((first[i], 'A', 'C', second[i])); } } } assert_eq!(rooms.len(), 4); search2(rooms) } #[cfg(test)] mod tests { use super::*; #[test] fn test_has_reached() { let v1 = Vec::from([('B', 'A'), ('C', 'D')]); let v2 = Vec::from([('B', 'A'), ('C', 'D')]); assert_eq!(has_reached(&v1, &v2), true); } #[test] fn test_has_not_reached() { let v1 = Vec::from([('B', 'A'), ('C', 'D')]); let v2 = Vec::from([('A', 'A'), ('D', 'D')]); assert_eq!(has_reached(&v1, &v2), false); } #[test] fn test_solve_part1() { assert_eq!(solve_part1(), 15385); } #[test] fn test_solve_part2() { assert_eq!(solve_part2(), 49803); } } fn main() { println!("{}", solve_part1()); println!("{}", solve_part2()); }
36.119681
95
0.402769
2f6d77babeaf2af2e717016712fada86911f16ed
325
// Copyright (c) 2020 Ant Financial // // SPDX-License-Identifier: Apache-2.0 // #![allow(bare_trait_objects)] pub mod agent; pub mod agent_ttrpc; pub mod health; pub mod health_ttrpc; pub mod oci; pub mod types; pub mod empty; #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }
14.772727
38
0.64
48eda91fbae50f401e7b07b0d2017dfdb4ca98e3
598
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn main() { match 'a' { char{ch} => true //~^ ERROR `char` does not name a struct or a struct variant }; }
35.176471
68
0.690635
c1355f0d6fe0c4a6955483a5b1737e3c0988a70e
951
fn main() { let _ = b"\u{a66e}"; //~^ ERROR unicode escape in byte string let _ = b'\u{a66e}'; //~^ ERROR unicode escape in byte string let _ = b'\u'; //~^ ERROR incorrect unicode escape sequence let _ = b'\x5'; //~^ ERROR numeric character escape is too short let _ = b'\xxy'; //~^ ERROR invalid character in numeric character escape: `x` let _ = '\x5'; //~^ ERROR numeric character escape is too short let _ = '\xxy'; //~^ ERROR invalid character in numeric character escape: `x` let _ = b"\u{a4a4} \xf \u"; //~^ ERROR unicode escape in byte string //~^^ ERROR invalid character in numeric character escape: ` ` //~^^^ ERROR incorrect unicode escape sequence let _ = "\xf \u"; //~^ ERROR invalid character in numeric character escape: ` ` //~^^ ERROR incorrect unicode escape sequence let _ = "\u8f"; //~^ ERROR incorrect unicode escape sequence }
27.171429
66
0.602524
f8af06a772f916ef61aa5e6df1f7654333b19d5c
1,403
use std::collections::HashMap; use anyhow::{Context, Result}; use serde_json::Value; use crate::{sources::ComputationSource, utils::get_sub_object, ObjectIdPair}; use super::FieldBuilder; #[derive(Clone, Copy)] pub struct ComputationEngine<'a> { objects: &'a HashMap<ObjectIdPair, Value>, } impl<'a> ComputationEngine<'a> { pub fn new(objects: &'a HashMap<ObjectIdPair, Value>) -> Self { Self { objects } } pub fn compute(self, computation: &ComputationSource) -> Result<Value> { Ok(match computation { ComputationSource::RawValue { value } => value.clone(), ComputationSource::FieldValue { object, field_path } => { let field_path_parts = field_path.split('.'); let object = self .objects .get(&object) .with_context(|| format!("Could not find object: {:?}", object))?; get_sub_object(object, field_path_parts)? } ComputationSource::Equals { lhs, rhs } => { let lhs = self.compute(lhs)?; let rhs = self.compute(rhs)?; Value::Bool(lhs == rhs) } }) } } impl<'a> From<FieldBuilder<'a>> for ComputationEngine<'a> { fn from(fb: FieldBuilder<'a>) -> Self { let FieldBuilder { objects, .. } = fb; Self { objects } } }
30.5
86
0.55809
bbd20682fba79dd32902a33fc1a06c055a68f6a9
8,732
use crate::plaineditor::*; use makepad_render::*; use makepad_hub::*; use makepad_widget::*; use crate::mprstokenizer::*; #[derive(Clone, PartialEq)] pub enum ItemDisplayType { Empty, PlainText {text: String}, Message {message: LocMessage}, } #[derive(Clone)] pub struct ItemDisplay { pub display: ItemDisplayType, pub update_display: bool, pub view: View, pub text_disp: TextEditor, pub text_buffer: TextBuffer, pub text: DrawText, pub last_text_buffer_id: usize, pub prev_button: NormalButton, pub next_button: NormalButton, pub open_button: NormalButton, pub item_title: DrawText } impl ItemDisplay { pub fn new(cx: &mut Cx) -> Self { let editor = Self { text: DrawText::new(cx, default_shader!()), view: View::new(), update_display: false, text_disp: TextEditor { read_only: true, draw_line_numbers: false, draw_cursor_row: false, mark_unmatched_parens: false, folding_depth: 3, ..TextEditor::new(cx) }, text_buffer: TextBuffer { ..TextBuffer::default() }, last_text_buffer_id: 0, prev_button: NormalButton::new(cx), next_button: NormalButton::new(cx), open_button: NormalButton::new(cx), item_title: DrawText::new(cx, default_shader!()), display: ItemDisplayType::Empty, }; editor } pub fn style(cx: &mut Cx) { live_body!(cx, { self::text_style_title: TextStyle { ..makepad_widget::widgetstyle::text_style_normal } self::text_color: #82; self::gutter_width: 10.; self::padding_top: 10.; self::color_bg: #25; self::style_text_editor: Style{ makepad_widget::texteditor::gutter_width: self::gutter_width; makepad_widget::texteditor::padding_top: self::padding_top; makepad_widget::texteditor::color_bg: self::color_bg; } }) } pub fn display_message(&mut self, cx: &mut Cx, loc_message: &LocMessage) { self.display = ItemDisplayType::Message {message: loc_message.clone()}; self.update_display = true; self.view.redraw_view_parent(cx); } pub fn display_plain_text(&mut self, cx: &mut Cx, val: &str) { self.display = ItemDisplayType::PlainText {text: val.to_string()}; self.update_display = true; self.view.redraw_view_parent(cx); } pub fn update_plain_text_buffer(text_buffer: &mut TextBuffer, text: &str) { text_buffer.load_from_utf8(text); PlainTokenizer::update_token_chunks(text_buffer, None); } pub fn update_message_text_buffer(text_buffer: &mut TextBuffer, loc_message: &LocMessage) { let text = if let Some(rendered) = &loc_message.rendered { if let Some(explanation) = &loc_message.explanation { format!("{}{}{}", loc_message.body, rendered, explanation) } else { format!("{}{}", loc_message.body, rendered) } } else { loc_message.body.clone() }; text_buffer.load_from_utf8(&text); if text_buffer.needs_token_chunks() && text_buffer.lines.len() >0 { let mut state = TokenizerState::new(&text_buffer.lines); let mut tokenizer = MprsTokenizer::new(); let mut pair_stack = Vec::new(); let mut line_count = 0; let mut token_count = 0; let mut backtick_toggle = false; let mut first_block = false; let mut first_block_code_line = false; let mut message_type = TokenType::Warning; loop { let offset = text_buffer.flat_text.len(); let mut token_type = tokenizer.next_token(&mut state, &mut text_buffer.flat_text, &text_buffer.token_chunks); let mut val = String::new(); for i in offset..text_buffer.flat_text.len() { val.push(text_buffer.flat_text[i]); } if token_type == TokenType::Operator && val == "`" { backtick_toggle = !backtick_toggle; } let inside_backtick = !backtick_toggle || token_type == TokenType::Operator && val == "`"; if line_count == 2 { first_block = true; } if first_block && token_count == 0 && token_type == TokenType::Number { first_block_code_line = true; } // Gray out everything thats not in backticks or code if (line_count == 0 && inside_backtick || line_count == 1 || first_block && token_count <= 2 && (val == "|" || token_type == TokenType::Number) || first_block && !first_block_code_line && inside_backtick || !first_block && inside_backtick) && token_type != TokenType::Whitespace && token_type != TokenType::Newline && token_type != TokenType::Eof { token_type = TokenType::Defocus; } // color the ^^ if first_block && !first_block_code_line && val == "^" { token_type = message_type; } if first_block && token_count == 1 && val != "|" && token_type != TokenType::Whitespace { first_block = false; } if line_count == 0 && token_count == 0 { if val == "warning" { token_type = TokenType::Warning } else if val == "error" { message_type = TokenType::Error; token_type = TokenType::Error } } //println!("{:?} {}", token_type, val); TokenChunk::push_with_pairing(&mut text_buffer.token_chunks, &mut pair_stack, state.next, offset, text_buffer.flat_text.len(), token_type); token_count += 1; if token_type == TokenType::Newline { line_count += 1; token_count = 0; first_block_code_line = false; } if token_type == TokenType::Eof { break } } } } pub fn handle_item_display(&mut self, cx: &mut Cx, event: &mut Event)->TextEditorEvent{ match &self.display { ItemDisplayType::Empty=>{ TextEditorEvent::None }, ItemDisplayType::PlainText {..} => { self.text_disp.handle_text_editor(cx, event, &mut self.text_buffer) }, ItemDisplayType::Message {..} => { self.text_disp.handle_text_editor(cx, event, &mut self.text_buffer) }, } } pub fn draw_item_display(&mut self, cx: &mut Cx) { if self.update_display { match &self.display { ItemDisplayType::Empty=>{ }, ItemDisplayType::PlainText {text} => { Self::update_plain_text_buffer(&mut self.text_buffer, text); }, ItemDisplayType::Message {message} => { Self::update_message_text_buffer(&mut self.text_buffer, message); }, } self.update_display = false; } match &self.display { ItemDisplayType::Empty=>{ } ItemDisplayType::PlainText {..} | ItemDisplayType::Message {..} => { let text_buffer = &mut self.text_buffer; live_style_begin!(cx, self::style_text_editor); if self.text_disp.begin_text_editor(cx, text_buffer).is_err() {return live_style_end!(cx, self::style_text_editor);} for (index, token_chunk) in text_buffer.token_chunks.iter_mut().enumerate() { self.text_disp.draw_chunk(cx, index, &text_buffer.flat_text, token_chunk, &text_buffer.markers); } self.text_disp.end_text_editor(cx, text_buffer); live_style_end!(cx, self::style_text_editor); }, } } }
38.46696
255
0.520614
cc1b14bed8d0b40e4fe515012048e03284c0103b
1,142
#![allow(clippy::module_inception)] #![allow(clippy::upper_case_acronyms)] #![allow(clippy::large_enum_variant)] #![allow(clippy::wrong_self_convention)] #![allow(clippy::should_implement_trait)] #![allow(clippy::blacklisted_name)] //! <p>This is the Amazon Rekognition API reference.</p> // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. pub use error_meta::Error; pub use config::Config; mod aws_endpoint; #[cfg(feature = "client")] pub mod client; pub mod config; pub mod error; mod error_meta; pub mod input; mod json_deser; mod json_errors; mod json_ser; pub mod model; pub mod operation; mod operation_deser; mod operation_ser; pub mod output; pub static PKG_VERSION: &str = env!("CARGO_PKG_VERSION"); pub use smithy_http::byte_stream::ByteStream; pub use smithy_http::result::SdkError; pub use smithy_types::Blob; static API_METADATA: aws_http::user_agent::ApiMetadata = aws_http::user_agent::ApiMetadata::new("rekognition", PKG_VERSION); pub use aws_auth::Credentials; pub use aws_types::region::Region; #[cfg(feature = "client")] pub use client::Client; pub use smithy_http::endpoint::Endpoint;
28.55
80
0.765324
67c1bf3bc9d0c136ad6b9d7bb5961528057f7f1b
1,375
// Copyright 2018-2021 Parity Technologies (UK) Ltd. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::backend::{ EnvBackend, TypedEnvBackend, }; use cfg_if::cfg_if; pub trait OnInstance: EnvBackend + TypedEnvBackend { fn on_instance<F, R>(f: F) -> R where F: FnOnce(&mut Self) -> R; } cfg_if! { if #[cfg(all(not(feature = "std"), target_arch = "wasm32"))] { mod on_chain; pub use self::on_chain::EnvInstance; } else if #[cfg(feature = "std")] { pub mod off_chain; pub use self::off_chain::EnvInstance; pub use self::off_chain::{ AccountError, TypedEncodedError, }; } else { compile_error! { "ink! only support compilation as `std` or `no_std` + `wasm32-unknown`" } } }
31.25
84
0.622545
641517c7c22ec1e9127763203ddda8df98f633ee
2,186
use crate::widget::attribute::util::get_style; use crate::widget::attribute::AttribKey; use crate::widget::attribute::Value; use expanse::geometry::Size; use expanse::number::Number; use expanse::Stretch; use mt_dom::attr; /// calculate the layout of the nodes utilizing the styles set on each of the widget /// and its children widget styles #[allow(unused)] pub fn compute_node_layout<MSG>( widget_node: &mut crate::Node<MSG>, parent_size: Size<Number>, ) { let mut stretch = Stretch::new(); let stretch_node = build_stretch_node_recursive(&mut stretch, &widget_node) .expect("must have built a style node"); stretch .compute_layout(stretch_node, parent_size) .expect("must compute the layout"); set_node_layout_from_stretch_node(widget_node, stretch_node, &stretch) } fn build_stretch_node_recursive<MSG>( stretch: &mut Stretch, widget_node: &crate::Node<MSG>, ) -> Option<expanse::node::Node> { let children_styles = if let Some(children) = widget_node.get_children() { children .iter() .filter_map(|c| build_stretch_node_recursive(stretch, c)) .collect() } else { vec![] }; let node_style = get_style(widget_node).cloned().unwrap_or_default(); stretch.new_node(node_style, &children_styles).ok() } fn set_node_layout_from_stretch_node<MSG>( widget_node: &mut crate::Node<MSG>, stretch_node: expanse::node::Node, stretch: &Stretch, ) { let layout = *stretch.layout(stretch_node).expect("must have layout"); let stretch_node_children: Vec<expanse::node::Node> = stretch.children(stretch_node).expect("must get children"); let widget_children = widget_node.children_mut().unwrap_or(&mut []); stretch_node_children .into_iter() .zip(widget_children.iter_mut()) .for_each(|(stretch_node_child, widget_child)| { set_node_layout_from_stretch_node( widget_child, stretch_node_child, stretch, ) }); widget_node.add_attributes_ref_mut(vec![attr( AttribKey::Layout, Value::from(layout), )]); }
32.147059
84
0.665599
8f5426227d3a800bba585745c24d6da38a20af10
47,707
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use bitflags::bitflags; use glib::translate::*; use glib::value::FromValue; use glib::value::FromValueOptional; use glib::value::SetValue; use glib::StaticType; use glib::Type; use std::fmt; bitflags! { pub struct AppInfoCreateFlags: u32 { const NONE = 0; const NEEDS_TERMINAL = 1; const SUPPORTS_URIS = 2; const SUPPORTS_STARTUP_NOTIFICATION = 4; } } impl fmt::Display for AppInfoCreateFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for AppInfoCreateFlags { type GlibType = ffi::GAppInfoCreateFlags; fn to_glib(&self) -> ffi::GAppInfoCreateFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GAppInfoCreateFlags> for AppInfoCreateFlags { fn from_glib(value: ffi::GAppInfoCreateFlags) -> AppInfoCreateFlags { AppInfoCreateFlags::from_bits_truncate(value) } } impl StaticType for AppInfoCreateFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_app_info_create_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for AppInfoCreateFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for AppInfoCreateFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for AppInfoCreateFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct ApplicationFlags: u32 { const FLAGS_NONE = 0; const IS_SERVICE = 1; const IS_LAUNCHER = 2; const HANDLES_OPEN = 4; const HANDLES_COMMAND_LINE = 8; const SEND_ENVIRONMENT = 16; const NON_UNIQUE = 32; const CAN_OVERRIDE_APP_ID = 64; const ALLOW_REPLACEMENT = 128; const REPLACE = 256; } } impl fmt::Display for ApplicationFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for ApplicationFlags { type GlibType = ffi::GApplicationFlags; fn to_glib(&self) -> ffi::GApplicationFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GApplicationFlags> for ApplicationFlags { fn from_glib(value: ffi::GApplicationFlags) -> ApplicationFlags { ApplicationFlags::from_bits_truncate(value) } } impl StaticType for ApplicationFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_application_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for ApplicationFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for ApplicationFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for ApplicationFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct AskPasswordFlags: u32 { const NEED_PASSWORD = 1; const NEED_USERNAME = 2; const NEED_DOMAIN = 4; const SAVING_SUPPORTED = 8; const ANONYMOUS_SUPPORTED = 16; const TCRYPT = 32; } } impl fmt::Display for AskPasswordFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for AskPasswordFlags { type GlibType = ffi::GAskPasswordFlags; fn to_glib(&self) -> ffi::GAskPasswordFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GAskPasswordFlags> for AskPasswordFlags { fn from_glib(value: ffi::GAskPasswordFlags) -> AskPasswordFlags { AskPasswordFlags::from_bits_truncate(value) } } impl StaticType for AskPasswordFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_ask_password_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for AskPasswordFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for AskPasswordFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for AskPasswordFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct BusNameOwnerFlags: u32 { const NONE = 0; const ALLOW_REPLACEMENT = 1; const REPLACE = 2; const DO_NOT_QUEUE = 4; } } impl fmt::Display for BusNameOwnerFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for BusNameOwnerFlags { type GlibType = ffi::GBusNameOwnerFlags; fn to_glib(&self) -> ffi::GBusNameOwnerFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GBusNameOwnerFlags> for BusNameOwnerFlags { fn from_glib(value: ffi::GBusNameOwnerFlags) -> BusNameOwnerFlags { BusNameOwnerFlags::from_bits_truncate(value) } } impl StaticType for BusNameOwnerFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_bus_name_owner_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for BusNameOwnerFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for BusNameOwnerFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for BusNameOwnerFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct BusNameWatcherFlags: u32 { const NONE = 0; const AUTO_START = 1; } } impl fmt::Display for BusNameWatcherFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for BusNameWatcherFlags { type GlibType = ffi::GBusNameWatcherFlags; fn to_glib(&self) -> ffi::GBusNameWatcherFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GBusNameWatcherFlags> for BusNameWatcherFlags { fn from_glib(value: ffi::GBusNameWatcherFlags) -> BusNameWatcherFlags { BusNameWatcherFlags::from_bits_truncate(value) } } impl StaticType for BusNameWatcherFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_bus_name_watcher_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for BusNameWatcherFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for BusNameWatcherFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for BusNameWatcherFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct ConverterFlags: u32 { const NONE = 0; const INPUT_AT_END = 1; const FLUSH = 2; } } impl fmt::Display for ConverterFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for ConverterFlags { type GlibType = ffi::GConverterFlags; fn to_glib(&self) -> ffi::GConverterFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GConverterFlags> for ConverterFlags { fn from_glib(value: ffi::GConverterFlags) -> ConverterFlags { ConverterFlags::from_bits_truncate(value) } } impl StaticType for ConverterFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_converter_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for ConverterFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for ConverterFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for ConverterFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusCallFlags: u32 { const NONE = 0; const NO_AUTO_START = 1; const ALLOW_INTERACTIVE_AUTHORIZATION = 2; } } impl fmt::Display for DBusCallFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusCallFlags { type GlibType = ffi::GDBusCallFlags; fn to_glib(&self) -> ffi::GDBusCallFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusCallFlags> for DBusCallFlags { fn from_glib(value: ffi::GDBusCallFlags) -> DBusCallFlags { DBusCallFlags::from_bits_truncate(value) } } impl StaticType for DBusCallFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_call_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusCallFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusCallFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusCallFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusCapabilityFlags: u32 { const NONE = 0; const UNIX_FD_PASSING = 1; } } impl fmt::Display for DBusCapabilityFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusCapabilityFlags { type GlibType = ffi::GDBusCapabilityFlags; fn to_glib(&self) -> ffi::GDBusCapabilityFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusCapabilityFlags> for DBusCapabilityFlags { fn from_glib(value: ffi::GDBusCapabilityFlags) -> DBusCapabilityFlags { DBusCapabilityFlags::from_bits_truncate(value) } } impl StaticType for DBusCapabilityFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_capability_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusCapabilityFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusCapabilityFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusCapabilityFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusConnectionFlags: u32 { const NONE = 0; const AUTHENTICATION_CLIENT = 1; const AUTHENTICATION_SERVER = 2; const AUTHENTICATION_ALLOW_ANONYMOUS = 4; const MESSAGE_BUS_CONNECTION = 8; const DELAY_MESSAGE_PROCESSING = 16; } } impl fmt::Display for DBusConnectionFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusConnectionFlags { type GlibType = ffi::GDBusConnectionFlags; fn to_glib(&self) -> ffi::GDBusConnectionFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusConnectionFlags> for DBusConnectionFlags { fn from_glib(value: ffi::GDBusConnectionFlags) -> DBusConnectionFlags { DBusConnectionFlags::from_bits_truncate(value) } } impl StaticType for DBusConnectionFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_connection_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusConnectionFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusConnectionFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusConnectionFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusInterfaceSkeletonFlags: u32 { const NONE = 0; const HANDLE_METHOD_INVOCATIONS_IN_THREAD = 1; } } impl fmt::Display for DBusInterfaceSkeletonFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusInterfaceSkeletonFlags { type GlibType = ffi::GDBusInterfaceSkeletonFlags; fn to_glib(&self) -> ffi::GDBusInterfaceSkeletonFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusInterfaceSkeletonFlags> for DBusInterfaceSkeletonFlags { fn from_glib(value: ffi::GDBusInterfaceSkeletonFlags) -> DBusInterfaceSkeletonFlags { DBusInterfaceSkeletonFlags::from_bits_truncate(value) } } impl StaticType for DBusInterfaceSkeletonFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_interface_skeleton_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusInterfaceSkeletonFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusInterfaceSkeletonFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusInterfaceSkeletonFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusMessageFlags: u32 { const NONE = 0; const NO_REPLY_EXPECTED = 1; const NO_AUTO_START = 2; const ALLOW_INTERACTIVE_AUTHORIZATION = 4; } } impl fmt::Display for DBusMessageFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusMessageFlags { type GlibType = ffi::GDBusMessageFlags; fn to_glib(&self) -> ffi::GDBusMessageFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusMessageFlags> for DBusMessageFlags { fn from_glib(value: ffi::GDBusMessageFlags) -> DBusMessageFlags { DBusMessageFlags::from_bits_truncate(value) } } impl StaticType for DBusMessageFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_message_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusMessageFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusMessageFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusMessageFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusProxyFlags: u32 { const NONE = 0; const DO_NOT_LOAD_PROPERTIES = 1; const DO_NOT_CONNECT_SIGNALS = 2; const DO_NOT_AUTO_START = 4; const GET_INVALIDATED_PROPERTIES = 8; const DO_NOT_AUTO_START_AT_CONSTRUCTION = 16; } } impl fmt::Display for DBusProxyFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusProxyFlags { type GlibType = ffi::GDBusProxyFlags; fn to_glib(&self) -> ffi::GDBusProxyFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusProxyFlags> for DBusProxyFlags { fn from_glib(value: ffi::GDBusProxyFlags) -> DBusProxyFlags { DBusProxyFlags::from_bits_truncate(value) } } impl StaticType for DBusProxyFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_proxy_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusProxyFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusProxyFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusProxyFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusSendMessageFlags: u32 { const NONE = 0; const PRESERVE_SERIAL = 1; } } impl fmt::Display for DBusSendMessageFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusSendMessageFlags { type GlibType = ffi::GDBusSendMessageFlags; fn to_glib(&self) -> ffi::GDBusSendMessageFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusSendMessageFlags> for DBusSendMessageFlags { fn from_glib(value: ffi::GDBusSendMessageFlags) -> DBusSendMessageFlags { DBusSendMessageFlags::from_bits_truncate(value) } } impl StaticType for DBusSendMessageFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_send_message_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusSendMessageFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusSendMessageFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusSendMessageFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusServerFlags: u32 { const NONE = 0; const RUN_IN_THREAD = 1; const AUTHENTICATION_ALLOW_ANONYMOUS = 2; } } impl fmt::Display for DBusServerFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusServerFlags { type GlibType = ffi::GDBusServerFlags; fn to_glib(&self) -> ffi::GDBusServerFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusServerFlags> for DBusServerFlags { fn from_glib(value: ffi::GDBusServerFlags) -> DBusServerFlags { DBusServerFlags::from_bits_truncate(value) } } impl StaticType for DBusServerFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_server_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusServerFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusServerFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusServerFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DBusSignalFlags: u32 { const NONE = 0; const NO_MATCH_RULE = 1; const MATCH_ARG0_NAMESPACE = 2; const MATCH_ARG0_PATH = 4; } } impl fmt::Display for DBusSignalFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DBusSignalFlags { type GlibType = ffi::GDBusSignalFlags; fn to_glib(&self) -> ffi::GDBusSignalFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDBusSignalFlags> for DBusSignalFlags { fn from_glib(value: ffi::GDBusSignalFlags) -> DBusSignalFlags { DBusSignalFlags::from_bits_truncate(value) } } impl StaticType for DBusSignalFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_dbus_signal_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DBusSignalFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DBusSignalFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DBusSignalFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct DriveStartFlags: u32 { const NONE = 0; } } impl fmt::Display for DriveStartFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for DriveStartFlags { type GlibType = ffi::GDriveStartFlags; fn to_glib(&self) -> ffi::GDriveStartFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GDriveStartFlags> for DriveStartFlags { fn from_glib(value: ffi::GDriveStartFlags) -> DriveStartFlags { DriveStartFlags::from_bits_truncate(value) } } impl StaticType for DriveStartFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_drive_start_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for DriveStartFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for DriveStartFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for DriveStartFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct FileCopyFlags: u32 { const NONE = 0; const OVERWRITE = 1; const BACKUP = 2; const NOFOLLOW_SYMLINKS = 4; const ALL_METADATA = 8; const NO_FALLBACK_FOR_MOVE = 16; const TARGET_DEFAULT_PERMS = 32; } } impl fmt::Display for FileCopyFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for FileCopyFlags { type GlibType = ffi::GFileCopyFlags; fn to_glib(&self) -> ffi::GFileCopyFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GFileCopyFlags> for FileCopyFlags { fn from_glib(value: ffi::GFileCopyFlags) -> FileCopyFlags { FileCopyFlags::from_bits_truncate(value) } } impl StaticType for FileCopyFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_file_copy_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for FileCopyFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for FileCopyFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for FileCopyFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct FileCreateFlags: u32 { const NONE = 0; const PRIVATE = 1; const REPLACE_DESTINATION = 2; } } impl fmt::Display for FileCreateFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for FileCreateFlags { type GlibType = ffi::GFileCreateFlags; fn to_glib(&self) -> ffi::GFileCreateFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GFileCreateFlags> for FileCreateFlags { fn from_glib(value: ffi::GFileCreateFlags) -> FileCreateFlags { FileCreateFlags::from_bits_truncate(value) } } impl StaticType for FileCreateFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_file_create_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for FileCreateFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for FileCreateFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for FileCreateFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct FileMeasureFlags: u32 { const NONE = 0; const REPORT_ANY_ERROR = 2; const APPARENT_SIZE = 4; const NO_XDEV = 8; } } impl fmt::Display for FileMeasureFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for FileMeasureFlags { type GlibType = ffi::GFileMeasureFlags; fn to_glib(&self) -> ffi::GFileMeasureFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GFileMeasureFlags> for FileMeasureFlags { fn from_glib(value: ffi::GFileMeasureFlags) -> FileMeasureFlags { FileMeasureFlags::from_bits_truncate(value) } } impl StaticType for FileMeasureFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_file_measure_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for FileMeasureFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for FileMeasureFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for FileMeasureFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct FileMonitorFlags: u32 { const NONE = 0; const WATCH_MOUNTS = 1; const SEND_MOVED = 2; const WATCH_HARD_LINKS = 4; const WATCH_MOVES = 8; } } impl fmt::Display for FileMonitorFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for FileMonitorFlags { type GlibType = ffi::GFileMonitorFlags; fn to_glib(&self) -> ffi::GFileMonitorFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GFileMonitorFlags> for FileMonitorFlags { fn from_glib(value: ffi::GFileMonitorFlags) -> FileMonitorFlags { FileMonitorFlags::from_bits_truncate(value) } } impl StaticType for FileMonitorFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_file_monitor_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for FileMonitorFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for FileMonitorFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for FileMonitorFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct FileQueryInfoFlags: u32 { const NONE = 0; const NOFOLLOW_SYMLINKS = 1; } } impl fmt::Display for FileQueryInfoFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for FileQueryInfoFlags { type GlibType = ffi::GFileQueryInfoFlags; fn to_glib(&self) -> ffi::GFileQueryInfoFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GFileQueryInfoFlags> for FileQueryInfoFlags { fn from_glib(value: ffi::GFileQueryInfoFlags) -> FileQueryInfoFlags { FileQueryInfoFlags::from_bits_truncate(value) } } impl StaticType for FileQueryInfoFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_file_query_info_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for FileQueryInfoFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for FileQueryInfoFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for FileQueryInfoFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct IOStreamSpliceFlags: u32 { const NONE = 0; const CLOSE_STREAM1 = 1; const CLOSE_STREAM2 = 2; const WAIT_FOR_BOTH = 4; } } impl fmt::Display for IOStreamSpliceFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for IOStreamSpliceFlags { type GlibType = ffi::GIOStreamSpliceFlags; fn to_glib(&self) -> ffi::GIOStreamSpliceFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GIOStreamSpliceFlags> for IOStreamSpliceFlags { fn from_glib(value: ffi::GIOStreamSpliceFlags) -> IOStreamSpliceFlags { IOStreamSpliceFlags::from_bits_truncate(value) } } impl StaticType for IOStreamSpliceFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_io_stream_splice_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for IOStreamSpliceFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for IOStreamSpliceFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for IOStreamSpliceFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct MountMountFlags: u32 { const NONE = 0; } } impl fmt::Display for MountMountFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for MountMountFlags { type GlibType = ffi::GMountMountFlags; fn to_glib(&self) -> ffi::GMountMountFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GMountMountFlags> for MountMountFlags { fn from_glib(value: ffi::GMountMountFlags) -> MountMountFlags { MountMountFlags::from_bits_truncate(value) } } impl StaticType for MountMountFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_mount_mount_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for MountMountFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for MountMountFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for MountMountFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct MountUnmountFlags: u32 { const NONE = 0; const FORCE = 1; } } impl fmt::Display for MountUnmountFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for MountUnmountFlags { type GlibType = ffi::GMountUnmountFlags; fn to_glib(&self) -> ffi::GMountUnmountFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GMountUnmountFlags> for MountUnmountFlags { fn from_glib(value: ffi::GMountUnmountFlags) -> MountUnmountFlags { MountUnmountFlags::from_bits_truncate(value) } } impl StaticType for MountUnmountFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_mount_unmount_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for MountUnmountFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for MountUnmountFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for MountUnmountFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct OutputStreamSpliceFlags: u32 { const NONE = 0; const CLOSE_SOURCE = 1; const CLOSE_TARGET = 2; } } impl fmt::Display for OutputStreamSpliceFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for OutputStreamSpliceFlags { type GlibType = ffi::GOutputStreamSpliceFlags; fn to_glib(&self) -> ffi::GOutputStreamSpliceFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GOutputStreamSpliceFlags> for OutputStreamSpliceFlags { fn from_glib(value: ffi::GOutputStreamSpliceFlags) -> OutputStreamSpliceFlags { OutputStreamSpliceFlags::from_bits_truncate(value) } } impl StaticType for OutputStreamSpliceFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_output_stream_splice_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for OutputStreamSpliceFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for OutputStreamSpliceFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for OutputStreamSpliceFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } #[cfg(any(feature = "v2_60", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_60")))] bitflags! { pub struct ResolverNameLookupFlags: u32 { const DEFAULT = 0; const IPV4_ONLY = 1; const IPV6_ONLY = 2; } } #[cfg(any(feature = "v2_60", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_60")))] impl fmt::Display for ResolverNameLookupFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[cfg(any(feature = "v2_60", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_60")))] #[doc(hidden)] impl ToGlib for ResolverNameLookupFlags { type GlibType = ffi::GResolverNameLookupFlags; fn to_glib(&self) -> ffi::GResolverNameLookupFlags { self.bits() } } #[cfg(any(feature = "v2_60", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_60")))] #[doc(hidden)] impl FromGlib<ffi::GResolverNameLookupFlags> for ResolverNameLookupFlags { fn from_glib(value: ffi::GResolverNameLookupFlags) -> ResolverNameLookupFlags { ResolverNameLookupFlags::from_bits_truncate(value) } } #[cfg(any(feature = "v2_60", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_60")))] impl StaticType for ResolverNameLookupFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_resolver_name_lookup_flags_get_type()) } } } #[cfg(any(feature = "v2_60", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_60")))] impl<'a> FromValueOptional<'a> for ResolverNameLookupFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } #[cfg(any(feature = "v2_60", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_60")))] impl<'a> FromValue<'a> for ResolverNameLookupFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } #[cfg(any(feature = "v2_60", feature = "dox"))] #[cfg_attr(feature = "dox", doc(cfg(feature = "v2_60")))] impl SetValue for ResolverNameLookupFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct ResourceLookupFlags: u32 { const NONE = 0; } } impl fmt::Display for ResourceLookupFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for ResourceLookupFlags { type GlibType = ffi::GResourceLookupFlags; fn to_glib(&self) -> ffi::GResourceLookupFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GResourceLookupFlags> for ResourceLookupFlags { fn from_glib(value: ffi::GResourceLookupFlags) -> ResourceLookupFlags { ResourceLookupFlags::from_bits_truncate(value) } } impl StaticType for ResourceLookupFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_resource_lookup_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for ResourceLookupFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for ResourceLookupFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for ResourceLookupFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct SettingsBindFlags: u32 { const DEFAULT = 0; const GET = 1; const SET = 2; const NO_SENSITIVITY = 4; const GET_NO_CHANGES = 8; const INVERT_BOOLEAN = 16; } } impl fmt::Display for SettingsBindFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for SettingsBindFlags { type GlibType = ffi::GSettingsBindFlags; fn to_glib(&self) -> ffi::GSettingsBindFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GSettingsBindFlags> for SettingsBindFlags { fn from_glib(value: ffi::GSettingsBindFlags) -> SettingsBindFlags { SettingsBindFlags::from_bits_truncate(value) } } impl StaticType for SettingsBindFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_settings_bind_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for SettingsBindFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for SettingsBindFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for SettingsBindFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct SubprocessFlags: u32 { const NONE = 0; const STDIN_PIPE = 1; const STDIN_INHERIT = 2; const STDOUT_PIPE = 4; const STDOUT_SILENCE = 8; const STDERR_PIPE = 16; const STDERR_SILENCE = 32; const STDERR_MERGE = 64; const INHERIT_FDS = 128; } } impl fmt::Display for SubprocessFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for SubprocessFlags { type GlibType = ffi::GSubprocessFlags; fn to_glib(&self) -> ffi::GSubprocessFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GSubprocessFlags> for SubprocessFlags { fn from_glib(value: ffi::GSubprocessFlags) -> SubprocessFlags { SubprocessFlags::from_bits_truncate(value) } } impl StaticType for SubprocessFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_subprocess_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for SubprocessFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for SubprocessFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for SubprocessFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct TlsCertificateFlags: u32 { const UNKNOWN_CA = 1; const BAD_IDENTITY = 2; const NOT_ACTIVATED = 4; const EXPIRED = 8; const REVOKED = 16; const INSECURE = 32; const GENERIC_ERROR = 64; const VALIDATE_ALL = 127; } } impl fmt::Display for TlsCertificateFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for TlsCertificateFlags { type GlibType = ffi::GTlsCertificateFlags; fn to_glib(&self) -> ffi::GTlsCertificateFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GTlsCertificateFlags> for TlsCertificateFlags { fn from_glib(value: ffi::GTlsCertificateFlags) -> TlsCertificateFlags { TlsCertificateFlags::from_bits_truncate(value) } } impl StaticType for TlsCertificateFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_tls_certificate_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for TlsCertificateFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for TlsCertificateFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for TlsCertificateFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct TlsDatabaseVerifyFlags: u32 { const NONE = 0; } } impl fmt::Display for TlsDatabaseVerifyFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for TlsDatabaseVerifyFlags { type GlibType = ffi::GTlsDatabaseVerifyFlags; fn to_glib(&self) -> ffi::GTlsDatabaseVerifyFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GTlsDatabaseVerifyFlags> for TlsDatabaseVerifyFlags { fn from_glib(value: ffi::GTlsDatabaseVerifyFlags) -> TlsDatabaseVerifyFlags { TlsDatabaseVerifyFlags::from_bits_truncate(value) } } impl StaticType for TlsDatabaseVerifyFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_tls_database_verify_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for TlsDatabaseVerifyFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for TlsDatabaseVerifyFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for TlsDatabaseVerifyFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } } bitflags! { pub struct TlsPasswordFlags: u32 { const NONE = 0; const RETRY = 2; const MANY_TRIES = 4; const FINAL_TRY = 8; } } impl fmt::Display for TlsPasswordFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { <Self as fmt::Debug>::fmt(self, f) } } #[doc(hidden)] impl ToGlib for TlsPasswordFlags { type GlibType = ffi::GTlsPasswordFlags; fn to_glib(&self) -> ffi::GTlsPasswordFlags { self.bits() } } #[doc(hidden)] impl FromGlib<ffi::GTlsPasswordFlags> for TlsPasswordFlags { fn from_glib(value: ffi::GTlsPasswordFlags) -> TlsPasswordFlags { TlsPasswordFlags::from_bits_truncate(value) } } impl StaticType for TlsPasswordFlags { fn static_type() -> Type { unsafe { from_glib(ffi::g_tls_password_flags_get_type()) } } } impl<'a> FromValueOptional<'a> for TlsPasswordFlags { unsafe fn from_value_optional(value: &glib::Value) -> Option<Self> { Some(FromValue::from_value(value)) } } impl<'a> FromValue<'a> for TlsPasswordFlags { unsafe fn from_value(value: &glib::Value) -> Self { from_glib(glib::gobject_ffi::g_value_get_flags(value.to_glib_none().0)) } } impl SetValue for TlsPasswordFlags { unsafe fn set_value(value: &mut glib::Value, this: &Self) { glib::gobject_ffi::g_value_set_flags(value.to_glib_none_mut().0, this.to_glib()) } }
26.711646
89
0.661894
1ccd5570504ed7cb0fad53f315bed79e769353e6
2,765
//! # Environment life cycle APIs //! //! These APIs map to the life cycle of a specific "Agent" or self-contained //! environment. If a Neon module is loaded multiple times (Web Workers, worker //! threads), these API will be handle data associated with a specific instance. //! //! See the [N-API Lifecycle][npai-docs] documentation for more details. //! //! [napi-docs]: https://nodejs.org/api/n-api.html#n_api_environment_life_cycle_apis use std::mem; use std::sync::Arc; use neon_runtime::raw::Env; use neon_runtime::reference; use neon_runtime::tsfn::ThreadsafeFunction; use crate::context::Context; use crate::handle::root::NapiRef; /// `InstanceData` holds Neon data associated with a particular instance of a /// native module. If a module is loaded multiple times (e.g., worker threads), this /// data will be unique per instance. pub(crate) struct InstanceData { /// Used to free `Root` in the same JavaScript environment that created it /// /// _Design Note_: An `Arc` ensures the `ThreadsafeFunction` outlives the unloading /// of a module. Since it is unlikely that modules will be re-loaded frequently, this /// could be replaced with a leaked `&'static ThreadsafeFunction<NapiRef>`. However, /// given the cost of FFI, this optimization is omitted until the cost of an /// `Arc` is demonstrated as significant. drop_queue: Arc<ThreadsafeFunction<NapiRef>>, } fn drop_napi_ref(env: Option<Env>, data: NapiRef) { if let Some(env) = env { unsafe { reference::unreference(env, mem::transmute(data)); } } } impl InstanceData { /// Return the data associated with this module instance, lazily initializing if /// necessary. /// /// # Safety /// No additional locking (e.g., `Mutex`) is necessary because holding a /// `Context` reference ensures serialized access. pub(crate) fn get<'a, C: Context<'a>>(cx: &mut C) -> &'a mut InstanceData { let env = cx.env().to_raw(); let data = unsafe { neon_runtime::lifecycle::get_instance_data::<InstanceData>(env).as_mut() }; if let Some(data) = data { return data; } let drop_queue = unsafe { let mut queue = ThreadsafeFunction::new(env, drop_napi_ref); queue.unref(env); queue }; let data = InstanceData { drop_queue: Arc::new(drop_queue), }; unsafe { &mut *neon_runtime::lifecycle::set_instance_data(env, data) } } /// Helper to return a reference to the `drop_queue` field of `InstanceData` pub(crate) fn drop_queue<'a, C: Context<'a>>(cx: &mut C) -> Arc<ThreadsafeFunction<NapiRef>> { Arc::clone(&InstanceData::get(cx).drop_queue) } }
35.909091
98
0.656058
56541d47e0e4bc2e635fdf6c7cf9e17c7a6b6cf5
5,195
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(clippy::module_name_repetitions)] use crate::connection::Http3State; use crate::connection_server::Http3ServerHandler; use crate::{Header, Priority, Res}; use neqo_common::{qdebug, qinfo}; use neqo_transport::server::ActiveConnectionRef; use neqo_transport::{AppError, Connection}; use std::cell::RefCell; use std::collections::VecDeque; use std::rc::Rc; #[derive(Debug, Clone)] pub struct ClientRequestStream { conn: ActiveConnectionRef, handler: Rc<RefCell<Http3ServerHandler>>, stream_id: u64, } impl ::std::fmt::Display for ClientRequestStream { fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { let conn: &Connection = &self.conn.borrow(); write!( f, "Http3 server conn={:?} stream_id={}", conn, self.stream_id ) } } impl ClientRequestStream { pub(crate) fn new( conn: ActiveConnectionRef, handler: Rc<RefCell<Http3ServerHandler>>, stream_id: u64, ) -> Self { Self { conn, handler, stream_id, } } /// Supply a response to a request. /// # Errors /// It may return `InvalidStreamId` if a stream does not exist anymore. pub fn set_response(&mut self, headers: &[Header], data: &[u8]) -> Res<()> { qinfo!([self], "Set new response."); self.handler .borrow_mut() .set_response(self.stream_id, headers, data) } /// Request a peer to stop sending a request. /// # Errors /// It may return `InvalidStreamId` if a stream does not exist anymore. pub fn stream_stop_sending(&mut self, app_error: AppError) -> Res<()> { qdebug!( [self], "stop sending stream_id:{} error:{}.", self.stream_id, app_error ); self.conn .borrow_mut() .stream_stop_sending(self.stream_id, app_error)?; Ok(()) } /// Reset a stream/request. /// # Errors /// It may return `InvalidStreamId` if a stream does not exist anymore pub fn cancel_fetch(&mut self, app_error: AppError) -> Res<()> { qdebug!([self], "reset error:{}.", app_error); self.handler.borrow_mut().cancel_fetch( self.stream_id, app_error, &mut self.conn.borrow_mut(), ) } } impl std::hash::Hash for ClientRequestStream { fn hash<H: std::hash::Hasher>(&self, state: &mut H) { self.conn.hash(state); state.write_u64(self.stream_id); state.finish(); } } impl PartialEq for ClientRequestStream { fn eq(&self, other: &Self) -> bool { self.conn == other.conn && self.stream_id == other.stream_id } } impl Eq for ClientRequestStream {} #[derive(Debug, Clone)] pub enum Http3ServerEvent { /// Headers are ready. Headers { request: ClientRequestStream, headers: Vec<Header>, fin: bool, }, /// Request data is ready. Data { request: ClientRequestStream, data: Vec<u8>, fin: bool, }, /// When individual connection change state. It is only used for tests. StateChange { conn: ActiveConnectionRef, state: Http3State, }, PriorityUpdate { stream_id: u64, priority: Priority, }, } #[derive(Debug, Default, Clone)] pub struct Http3ServerEvents { events: Rc<RefCell<VecDeque<Http3ServerEvent>>>, } impl Http3ServerEvents { fn insert(&self, event: Http3ServerEvent) { self.events.borrow_mut().push_back(event); } /// Take all events pub fn events(&self) -> impl Iterator<Item = Http3ServerEvent> { self.events.replace(VecDeque::new()).into_iter() } /// Whether there is request pending. pub fn has_events(&self) -> bool { !self.events.borrow().is_empty() } /// Take the next event if present. pub fn next_event(&self) -> Option<Http3ServerEvent> { self.events.borrow_mut().pop_front() } /// Insert a `Headers` event. pub(crate) fn headers(&self, request: ClientRequestStream, headers: Vec<Header>, fin: bool) { self.insert(Http3ServerEvent::Headers { request, headers, fin, }); } /// Insert a `StateChange` event. pub(crate) fn connection_state_change(&self, conn: ActiveConnectionRef, state: Http3State) { self.insert(Http3ServerEvent::StateChange { conn, state }); } /// Insert a `Data` event. pub(crate) fn data(&self, request: ClientRequestStream, data: Vec<u8>, fin: bool) { self.insert(Http3ServerEvent::Data { request, data, fin }); } pub(crate) fn priority_update(&self, stream_id: u64, priority: Priority) { self.insert(Http3ServerEvent::PriorityUpdate { stream_id, priority, }); } }
28.543956
97
0.605197
f826f47af96d87312787aac0020b03669cc59f3b
809
use log::info; use mlua::{ExternalResult, Function, Lua, MultiValue}; pub fn create_fn_print<'a>(lua: &'a Lua, service_name: &str) -> mlua::Result<Function<'a>> { let tostring: Function = lua.globals().raw_get("tostring")?; let target = format!("service '{service_name}'"); let f = lua.create_function(move |_lua, (tostring, args): (Function, MultiValue)| { let s = args .into_iter() .try_fold(String::new(), |mut init, x| -> mlua::Result<_> { let string = tostring.call::<_, mlua::String>(x)?; let string = std::str::from_utf8(string.as_bytes()).to_lua_err()?; init.push_str(string); (0..8 - string.as_bytes().len() % 8).for_each(|_| init.push(' ')); Ok(init) })?; info!(target: &target, "{s}"); Ok(()) })?; f.bind(tostring) }
36.772727
92
0.592089
bb16957f8c2dffffb2bf5e947dac09ab9f3a0808
5,633
use number_types::d8_type::d8; use number_types::d16_type::d16; use number_types::a16_type::a16; use std::num::Wrapping; type CartRomBank0 = [d8; 0x3eb0]; type CartRomBankN = [d8; 0x4000]; type BackgroundMapData = [d8; 0x400]; type InternalRamBank = [d8; 0x1000]; /* http://gameboy.mongenel.com/dmg/asmmemmap.html : GameBoy Memory Areas $FFFF Interrupt Enable Flag $FF80-$FFFE Zero Page - 127 bytes $FF00-$FF7F Hardware I/O Registers $FEA0-$FEFF Unusable Memory $FE00-$FE9F OAM - Object Attribute Memory $E000-$FDFF Echo RAM - Reserved, Do Not Use $D000-$DFFF Internal RAM - Bank 1-7 (switchable - CGB only) $C000-$CFFF Internal RAM - Bank 0 (fixed) $A000-$BFFF Cartridge RAM (If Available) $9C00-$9FFF BG Map Data 2 $9800-$9BFF BG Map Data 1 $8000-$97FF Character RAM $4000-$7FFF Cartridge ROM - Switchable Banks 1-xx $0150-$3FFF Cartridge ROM - Bank 0 (fixed) $0100-$014F Cartridge Header Area $0000-$00FF Restart and Interrupt Vectors */ pub struct Memory { restart_and_int_vectors: [d8; 0x100], cartridge_header: [d8; 0x50], cart_rom_bank_0: CartRomBank0, other_cart_rom_banks: Vec<CartRomBankN>, active_rom_bank_index: usize, character_ram: [d8; 0x800], background_data_0: BackgroundMapData, background_data_1: BackgroundMapData, cart_ram: Option<[d8; 0x2000]>, internal_ram_bank_0: InternalRamBank, other_internal_ram_banks: Vec<InternalRamBank>, active_ram_bank_index: usize, object_attribute_memory: [d8; 0xa0], hardware_io_regs: [d8; 0x80], enable_interrupt_flag: d8, } impl Memory { pub fn new_zeros() -> Self { Self { restart_and_int_vectors: [d8::ZERO; 0x100], cartridge_header: [d8::ZERO; 0x50], cart_rom_bank_0: [d8::ZERO; 0x3eb0], other_cart_rom_banks: vec![[d8::ZERO; 0x4000]], active_rom_bank_index: 0, character_ram: [d8::ZERO; 0x800], background_data_0: [d8::ZERO; 0x400], background_data_1: [d8::ZERO; 0x400], cart_ram: None, internal_ram_bank_0: [d8::ZERO; 0x1000], other_internal_ram_banks: vec![[d8::ZERO; 0x1000]], active_ram_bank_index: 0, object_attribute_memory: [d8::ZERO; 0xa0], hardware_io_regs: [d8::ZERO; 0x80], enable_interrupt_flag: d8::ZERO, } } pub fn read_d8(&self, a16(Wrapping(idx)): a16) -> Option<d8> { let idx = idx as usize; match idx { 0x0000 ... 0x00ff => Some(self.restart_and_int_vectors[idx]), 0x0100 ... 0x014f => Some(self.cartridge_header[idx - 0x0100]), 0x0150 ... 0x3fff => Some(self.cart_rom_bank_0[idx - 0x0150]), 0x4000 ... 0x7fff => Some(self.other_cart_rom_banks[ self.active_rom_bank_index ][idx - 0x4000]), 0x8000 ... 0x97ff => Some(self.character_ram[idx - 0x8000]), 0x9800 ... 0x9bff => Some(self.background_data_0[idx - 0x9800]), 0x9c00 ... 0x9fff => Some(self.background_data_1[idx - 0x9c00]), 0xa000 ... 0xbfff => self.cart_ram.as_ref().and_then(|mem| { Some(mem[idx - 0xa000]) }), 0xc000 ... 0xcfff => Some(self.internal_ram_bank_0[idx - 0xc000]), 0xd000 ... 0xdfff => Some(self.other_internal_ram_banks[ self.active_ram_bank_index ][idx - 0xd000]), 0xe000 ... 0xfdff => None, 0xfe00 ... 0xfe9f => Some(self.object_attribute_memory[idx - 0xfe00]), 0xfea0 ... 0xfeff => None, 0xff00 ... 0xff7f => Some(self.hardware_io_regs[idx - 0xff00]), 0xff80 ... 0xfffe => Some(d8::ZERO), 0xffff => Some(self.enable_interrupt_flag), _ => unreachable!(), } } pub fn put_d8(&mut self, a16(Wrapping(idx)): a16, val: d8) -> Option<()> { let idx = idx as usize; match idx { 0x0000 ... 0x00ff => Some(self.restart_and_int_vectors[idx] = val), 0x0100 ... 0x014f => None, 0x0150 ... 0x3fff => None, 0x4000 ... 0x7fff => None, 0x8000 ... 0x97ff => Some(self.character_ram[idx - 0x8000] = val), 0x9800 ... 0x9bff => Some(self.background_data_0[idx - 0x9800] = val), 0x9c00 ... 0x9fff => Some(self.background_data_1[idx - 0x9c00] = val), 0xa000 ... 0xbfff => self.cart_ram.as_mut().and_then(|mem| { Some(mem[idx - 0xa000] = val) }), 0xc000 ... 0xcfff => Some(self.internal_ram_bank_0[idx - 0xc000] = val), 0xd000 ... 0xdfff => Some(self.other_internal_ram_banks[ self.active_ram_bank_index ][idx - 0xd000] = val), 0xe000 ... 0xfdff => None, 0xfe00 ... 0xfe9f => Some(self.object_attribute_memory[idx - 0xfe00] = val), 0xfea0 ... 0xfeff => None, 0xff00 ... 0xff7f => Some(self.hardware_io_regs[idx - 0xff00] = val), 0xff80 ... 0xfffe => None, 0xffff => Some(self.enable_interrupt_flag = val), _ => unreachable!(), } } pub fn read_d16(&self, idx: a16) -> Option<d16> { Some([ self.read_d8(idx).unwrap_or(d8::ZERO), self.read_d8(idx + 1).unwrap_or(d8::ZERO) ].into()) } pub fn put_d16(&mut self, idx: a16, val: d16) -> Option<()> { let val: [d8; 2] = val.into(); let (lsb, msb) = (val[0], val[1]); let lsb: Option<()> = self.put_d8(idx, lsb); let msb: Option<()> = self.put_d8(idx + 1, msb); lsb.and(msb) } }
39.391608
88
0.587964
d9146d0c3b97b0656baf9e7029758462b67810e0
1,006
/*a Copyright Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. @file markup.rs @brief Markup module */ //a Documentation /*! # Markup module This module provides common types for managing markup languages. It provides a standard error type that utilizes a file/stream [Span] trait, and then provides markup [Event]s for parsing or writing markup streams. !*/ //a Imports mod error; mod event; mod traits; //a Exports pub use error::{Error, Result}; pub use event::{ContentType, Event, EventType}; pub use traits::Span;
25.15
72
0.765408
228ed13d89ad87da8491b781d99dcacd594c02e1
5,873
#[doc = "<s>Vulkan Manual Page</s> · Constant"] #[doc(alias = "VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION")] pub const EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION: u32 = 1; #[doc = "<s>Vulkan Manual Page</s> · Constant"] #[doc(alias = "VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME")] pub const EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME: *const std::os::raw::c_char = crate::cstr!("VK_EXT_texture_compression_astc_hdr"); #[doc = "Provided by [`crate::extensions::ext_texture_compression_astc_hdr`]"] impl crate::vk1_0::Format { pub const ASTC_4X4_SFLOAT_BLOCK_EXT: Self = Self(1000066000); pub const ASTC_5X4_SFLOAT_BLOCK_EXT: Self = Self(1000066001); pub const ASTC_5X5_SFLOAT_BLOCK_EXT: Self = Self(1000066002); pub const ASTC_6X5_SFLOAT_BLOCK_EXT: Self = Self(1000066003); pub const ASTC_6X6_SFLOAT_BLOCK_EXT: Self = Self(1000066004); pub const ASTC_8X5_SFLOAT_BLOCK_EXT: Self = Self(1000066005); pub const ASTC_8X6_SFLOAT_BLOCK_EXT: Self = Self(1000066006); pub const ASTC_8X8_SFLOAT_BLOCK_EXT: Self = Self(1000066007); pub const ASTC_10X5_SFLOAT_BLOCK_EXT: Self = Self(1000066008); pub const ASTC_10X6_SFLOAT_BLOCK_EXT: Self = Self(1000066009); pub const ASTC_10X8_SFLOAT_BLOCK_EXT: Self = Self(1000066010); pub const ASTC_10X10_SFLOAT_BLOCK_EXT: Self = Self(1000066011); pub const ASTC_12X10_SFLOAT_BLOCK_EXT: Self = Self(1000066012); pub const ASTC_12X12_SFLOAT_BLOCK_EXT: Self = Self(1000066013); } #[doc = "Provided by [`crate::extensions::ext_texture_compression_astc_hdr`]"] impl crate::vk1_0::StructureType { pub const PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT: Self = Self(1000066000); } impl<'a> crate::ExtendableFrom<'a, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT> for crate::vk1_0::DeviceCreateInfoBuilder<'a> {} impl<'a> crate::ExtendableFrom<'a, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'_>> for crate::vk1_0::DeviceCreateInfoBuilder<'a> {} impl<'a> crate::ExtendableFrom<'a, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT> for crate::vk1_1::PhysicalDeviceFeatures2Builder<'a> {} impl<'a> crate::ExtendableFrom<'a, PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'_>> for crate::vk1_1::PhysicalDeviceFeatures2Builder<'a> {} #[doc = "[Vulkan Manual Page](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT.html) · Structure"] #[doc(alias = "VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT")] #[derive(Copy, Clone)] #[repr(C)] pub struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { pub s_type: crate::vk1_0::StructureType, pub p_next: *mut std::ffi::c_void, pub texture_compression_astc_hdr: crate::vk1_0::Bool32, } impl PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { pub const STRUCTURE_TYPE: crate::vk1_0::StructureType = crate::vk1_0::StructureType::PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT; } impl Default for PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { fn default() -> Self { Self { s_type: Self::STRUCTURE_TYPE, p_next: std::ptr::null_mut(), texture_compression_astc_hdr: Default::default() } } } impl std::fmt::Debug for PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.debug_struct("PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT").field("s_type", &self.s_type).field("p_next", &self.p_next).field("texture_compression_astc_hdr", &(self.texture_compression_astc_hdr != 0)).finish() } } impl PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { #[inline] pub fn into_builder<'a>(self) -> PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a> { PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder(self, std::marker::PhantomData) } } #[derive(Copy, Clone)] #[doc = "[Vulkan Manual Page](https://www.khronos.org/registry/vulkan/specs/1.2-extensions/man/html/VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT.html) · Builder of [`PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT`]"] #[repr(transparent)] pub struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a>(PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, std::marker::PhantomData<&'a ()>); impl<'a> PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a> { #[inline] pub fn new() -> PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a> { PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder(Default::default(), std::marker::PhantomData) } #[inline] pub fn texture_compression_astc_hdr(mut self, texture_compression_astc_hdr: bool) -> Self { self.0.texture_compression_astc_hdr = texture_compression_astc_hdr as _; self } #[inline] #[doc = r" Discards all lifetime information."] #[doc = r" Use the `Deref` and `DerefMut` implementations if possible."] pub fn build_dangling(self) -> PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT { self.0 } } impl<'a> std::default::Default for PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a> { fn default() -> PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a> { Self::new() } } impl<'a> std::fmt::Debug for PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Debug::fmt(&self.0, f) } } impl<'a> std::ops::Deref for PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a> { type Target = PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; fn deref(&self) -> &Self::Target { &self.0 } } impl<'a> std::ops::DerefMut for PhysicalDeviceTextureCompressionASTCHDRFeaturesEXTBuilder<'a> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } }
57.578431
228
0.768091
7aa65c68aa845a7d0c32f8ed6f2408cd7f45e0a5
665
use azure_core::errors::AzureError; use azure_core::headers::CommonStorageResponseHeaders; use bytes::Bytes; use http::response::Response; use std::convert::TryInto; #[derive(Debug, Clone)] pub struct SetQueueMetadataResponse { pub common_storage_response_headers: CommonStorageResponseHeaders, } impl std::convert::TryFrom<&Response<Bytes>> for SetQueueMetadataResponse { type Error = AzureError; fn try_from(response: &Response<Bytes>) -> Result<Self, Self::Error> { debug!("response == {:?}", response); Ok(SetQueueMetadataResponse { common_storage_response_headers: response.headers().try_into()?, }) } }
28.913043
76
0.714286
d7c2b52b8dea424ec1e1606e8e1af9fc725f4298
15,171
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Defines the `PartialOrd` and `PartialEq` comparison traits. //! //! This module defines both `PartialOrd` and `PartialEq` traits which are used by the //! compiler to implement comparison operators. Rust programs may implement //!`PartialOrd` to overload the `<`, `<=`, `>`, and `>=` operators, and may implement //! `PartialEq` to overload the `==` and `!=` operators. //! //! For example, to define a type with a customized definition for the PartialEq //! operators, you could do the following: //! //! ```rust //! // Our type. //! struct SketchyNum { //! num : int //! } //! //! // Our implementation of `PartialEq` to support `==` and `!=`. //! impl PartialEq for SketchyNum { //! // Our custom eq allows numbers which are near each other to be equal! :D //! fn eq(&self, other: &SketchyNum) -> bool { //! (self.num - other.num).abs() < 5 //! } //! } //! //! // Now these binary operators will work when applied! //! assert!(SketchyNum {num: 37} == SketchyNum {num: 34}); //! assert!(SketchyNum {num: 25} != SketchyNum {num: 57}); //! ``` #![stable] use kinds::Sized; use option::{Option, Some, None}; /// Trait for values that can be compared for equality and inequality. /// /// This trait allows for partial equality, for types that do not have an /// equivalence relation. For example, in floating point numbers `NaN != NaN`, /// so floating point types implement `PartialEq` but not `Eq`. /// /// PartialEq only requires the `eq` method to be implemented; `ne` is defined /// in terms of it by default. Any manual implementation of `ne` *must* respect /// the rule that `eq` is a strict inverse of `ne`; that is, `!(a == b)` if and /// only if `a != b`. /// /// Eventually, this will be implemented by default for types that implement /// `Eq`. #[lang="eq"] #[unstable = "Definition may change slightly after trait reform"] pub trait PartialEq { /// This method tests for `self` and `other` values to be equal, and is used by `==`. fn eq(&self, other: &Self) -> bool; /// This method tests for `!=`. #[inline] fn ne(&self, other: &Self) -> bool { !self.eq(other) } } /// Trait for equality comparisons which are [equivalence relations]( /// https://en.wikipedia.org/wiki/Equivalence_relation). /// /// This means, that in addition to `a == b` and `a != b` being strict /// inverses, the equality must be (for all `a`, `b` and `c`): /// /// - reflexive: `a == a`; /// - symmetric: `a == b` implies `b == a`; and /// - transitive: `a == b` and `b == c` implies `a == c`. #[unstable = "Definition may change slightly after trait reform"] pub trait Eq: PartialEq { // FIXME #13101: this method is used solely by #[deriving] to // assert that every component of a type implements #[deriving] // itself, the current deriving infrastructure means doing this // assertion without using a method on this trait is nearly // impossible. // // This should never be implemented by hand. #[doc(hidden)] #[inline(always)] fn assert_receiver_is_total_eq(&self) {} } /// An ordering is, e.g, a result of a comparison between two values. #[deriving(Clone, PartialEq, Show)] #[stable] pub enum Ordering { /// An ordering where a compared value is less [than another]. Less = -1i, /// An ordering where a compared value is equal [to another]. Equal = 0i, /// An ordering where a compared value is greater [than another]. Greater = 1i, } impl Ordering { /// Reverse the `Ordering`, so that `Less` becomes `Greater` and /// vice versa. /// /// # Example /// /// ```rust /// assert_eq!(Less.reverse(), Greater); /// assert_eq!(Equal.reverse(), Equal); /// assert_eq!(Greater.reverse(), Less); /// /// /// let mut data: &mut [_] = &mut [2u, 10, 5, 8]; /// /// // sort the array from largest to smallest. /// data.sort_by(|a, b| a.cmp(b).reverse()); /// /// let b: &mut [_] = &mut [10u, 8, 5, 2]; /// assert!(data == b); /// ``` #[inline] #[experimental] pub fn reverse(self) -> Ordering { unsafe { // this compiles really nicely (to a single instruction); // an explicit match has a pile of branches and // comparisons. // // NB. it is safe because of the explicit discriminants // given above. ::mem::transmute::<_, Ordering>(-(self as i8)) } } } /// Trait for types that form a [total order]( /// https://en.wikipedia.org/wiki/Total_order). /// /// An order is a total order if it is (for all `a`, `b` and `c`): /// /// - total and antisymmetric: exactly one of `a < b`, `a == b` or `a > b` is /// true; and /// - transitive, `a < b` and `b < c` implies `a < c`. The same must hold for /// both `==` and `>`. #[unstable = "Definition may change slightly after trait reform"] pub trait Ord: Eq + PartialOrd { /// This method returns an ordering between `self` and `other` values. /// /// By convention, `self.cmp(&other)` returns the ordering matching /// the expression `self <operator> other` if true. For example: /// /// ``` /// assert_eq!( 5u.cmp(&10), Less); // because 5 < 10 /// assert_eq!(10u.cmp(&5), Greater); // because 10 > 5 /// assert_eq!( 5u.cmp(&5), Equal); // because 5 == 5 /// ``` fn cmp(&self, other: &Self) -> Ordering; } #[unstable = "Trait is unstable."] impl Eq for Ordering {} #[unstable = "Trait is unstable."] impl Ord for Ordering { #[inline] fn cmp(&self, other: &Ordering) -> Ordering { (*self as int).cmp(&(*other as int)) } } #[unstable = "Trait is unstable."] impl PartialOrd for Ordering { #[inline] fn partial_cmp(&self, other: &Ordering) -> Option<Ordering> { (*self as int).partial_cmp(&(*other as int)) } } /// Trait for values that can be compared for a sort-order. /// /// PartialOrd only requires implementation of the `partial_cmp` method, /// with the others generated from default implementations. /// /// However it remains possible to implement the others separately for types /// which do not have a total order. For example, for floating point numbers, /// `NaN < 0 == false` and `NaN >= 0 == false` (cf. IEEE 754-2008 section /// 5.11). #[lang="ord"] #[unstable = "Definition may change slightly after trait reform"] pub trait PartialOrd: PartialEq { /// This method returns an ordering between `self` and `other` values /// if one exists. fn partial_cmp(&self, other: &Self) -> Option<Ordering>; /// This method tests less than (for `self` and `other`) and is used by the `<` operator. #[inline] fn lt(&self, other: &Self) -> bool { match self.partial_cmp(other) { Some(Less) => true, _ => false, } } /// This method tests less than or equal to (`<=`). #[inline] fn le(&self, other: &Self) -> bool { match self.partial_cmp(other) { Some(Less) | Some(Equal) => true, _ => false, } } /// This method tests greater than (`>`). #[inline] fn gt(&self, other: &Self) -> bool { match self.partial_cmp(other) { Some(Greater) => true, _ => false, } } /// This method tests greater than or equal to (`>=`). #[inline] fn ge(&self, other: &Self) -> bool { match self.partial_cmp(other) { Some(Greater) | Some(Equal) => true, _ => false, } } } /// The equivalence relation. Two values may be equivalent even if they are /// of different types. The most common use case for this relation is /// container types; e.g. it is often desirable to be able to use `&str` /// values to look up entries in a container with `String` keys. #[experimental = "Better solutions may be discovered."] pub trait Equiv<T> for Sized? { /// Implement this function to decide equivalent values. fn equiv(&self, other: &T) -> bool; } /// Compare and return the minimum of two values. #[inline] #[stable] pub fn min<T: Ord>(v1: T, v2: T) -> T { if v1 < v2 { v1 } else { v2 } } /// Compare and return the maximum of two values. #[inline] #[stable] pub fn max<T: Ord>(v1: T, v2: T) -> T { if v1 > v2 { v1 } else { v2 } } /// Compare and return the minimum of two values if there is one. /// /// Returns the first argument if the comparison determines them to be equal. #[inline] #[experimental] pub fn partial_min<T: PartialOrd>(v1: T, v2: T) -> Option<T> { match v1.partial_cmp(&v2) { Some(Less) | Some(Equal) => Some(v1), Some(Greater) => Some(v2), None => None } } /// Compare and return the maximum of two values if there is one. /// /// Returns the first argument if the comparison determines them to be equal. #[inline] #[experimental] pub fn partial_max<T: PartialOrd>(v1: T, v2: T) -> Option<T> { match v1.partial_cmp(&v2) { Some(Less) => Some(v2), Some(Equal) | Some(Greater) => Some(v1), None => None } } // Implementation of PartialEq, Eq, PartialOrd and Ord for primitive types mod impls { use cmp::{PartialOrd, Ord, PartialEq, Eq, Ordering, Less, Greater, Equal}; use option::{Option, Some, None}; macro_rules! partial_eq_impl( ($($t:ty)*) => ($( #[unstable = "Trait is unstable."] impl PartialEq for $t { #[inline] fn eq(&self, other: &$t) -> bool { (*self) == (*other) } #[inline] fn ne(&self, other: &$t) -> bool { (*self) != (*other) } } )*) ) #[unstable = "Trait is unstable."] impl PartialEq for () { #[inline] fn eq(&self, _other: &()) -> bool { true } #[inline] fn ne(&self, _other: &()) -> bool { false } } partial_eq_impl!(bool char uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64) macro_rules! eq_impl( ($($t:ty)*) => ($( #[unstable = "Trait is unstable."] impl Eq for $t {} )*) ) eq_impl!(() bool char uint u8 u16 u32 u64 int i8 i16 i32 i64) macro_rules! partial_ord_impl( ($($t:ty)*) => ($( #[unstable = "Trait is unstable."] impl PartialOrd for $t { #[inline] fn partial_cmp(&self, other: &$t) -> Option<Ordering> { match (self <= other, self >= other) { (false, false) => None, (false, true) => Some(Greater), (true, false) => Some(Less), (true, true) => Some(Equal), } } #[inline] fn lt(&self, other: &$t) -> bool { (*self) < (*other) } #[inline] fn le(&self, other: &$t) -> bool { (*self) <= (*other) } #[inline] fn ge(&self, other: &$t) -> bool { (*self) >= (*other) } #[inline] fn gt(&self, other: &$t) -> bool { (*self) > (*other) } } )*) ) #[unstable = "Trait is unstable."] impl PartialOrd for () { #[inline] fn partial_cmp(&self, _: &()) -> Option<Ordering> { Some(Equal) } } #[unstable = "Trait is unstable."] impl PartialOrd for bool { #[inline] fn partial_cmp(&self, other: &bool) -> Option<Ordering> { (*self as u8).partial_cmp(&(*other as u8)) } } partial_ord_impl!(char uint u8 u16 u32 u64 int i8 i16 i32 i64 f32 f64) macro_rules! ord_impl( ($($t:ty)*) => ($( #[unstable = "Trait is unstable."] impl Ord for $t { #[inline] fn cmp(&self, other: &$t) -> Ordering { if *self < *other { Less } else if *self > *other { Greater } else { Equal } } } )*) ) #[unstable = "Trait is unstable."] impl Ord for () { #[inline] fn cmp(&self, _other: &()) -> Ordering { Equal } } #[unstable = "Trait is unstable."] impl Ord for bool { #[inline] fn cmp(&self, other: &bool) -> Ordering { (*self as u8).cmp(&(*other as u8)) } } ord_impl!(char uint u8 u16 u32 u64 int i8 i16 i32 i64) // & pointers #[unstable = "Trait is unstable."] impl<'a, T: PartialEq> PartialEq for &'a T { #[inline] fn eq(&self, other: & &'a T) -> bool { *(*self) == *(*other) } #[inline] fn ne(&self, other: & &'a T) -> bool { *(*self) != *(*other) } } #[unstable = "Trait is unstable."] impl<'a, T: PartialOrd> PartialOrd for &'a T { #[inline] fn partial_cmp(&self, other: &&'a T) -> Option<Ordering> { (**self).partial_cmp(*other) } #[inline] fn lt(&self, other: & &'a T) -> bool { *(*self) < *(*other) } #[inline] fn le(&self, other: & &'a T) -> bool { *(*self) <= *(*other) } #[inline] fn ge(&self, other: & &'a T) -> bool { *(*self) >= *(*other) } #[inline] fn gt(&self, other: & &'a T) -> bool { *(*self) > *(*other) } } #[unstable = "Trait is unstable."] impl<'a, T: Ord> Ord for &'a T { #[inline] fn cmp(&self, other: & &'a T) -> Ordering { (**self).cmp(*other) } } #[unstable = "Trait is unstable."] impl<'a, T: Eq> Eq for &'a T {} // &mut pointers #[unstable = "Trait is unstable."] impl<'a, T: PartialEq> PartialEq for &'a mut T { #[inline] fn eq(&self, other: &&'a mut T) -> bool { **self == *(*other) } #[inline] fn ne(&self, other: &&'a mut T) -> bool { **self != *(*other) } } #[unstable = "Trait is unstable."] impl<'a, T: PartialOrd> PartialOrd for &'a mut T { #[inline] fn partial_cmp(&self, other: &&'a mut T) -> Option<Ordering> { (**self).partial_cmp(*other) } #[inline] fn lt(&self, other: &&'a mut T) -> bool { **self < **other } #[inline] fn le(&self, other: &&'a mut T) -> bool { **self <= **other } #[inline] fn ge(&self, other: &&'a mut T) -> bool { **self >= **other } #[inline] fn gt(&self, other: &&'a mut T) -> bool { **self > **other } } #[unstable = "Trait is unstable."] impl<'a, T: Ord> Ord for &'a mut T { #[inline] fn cmp(&self, other: &&'a mut T) -> Ordering { (**self).cmp(*other) } } #[unstable = "Trait is unstable."] impl<'a, T: Eq> Eq for &'a mut T {} }
33.196937
93
0.547492
2910976c7be2cafbf611bbd53d2c1df421772271
420
use actix_web::{Responder, HttpResponse, App, test}; use actix_web_codegen::*; /// Docstrings shouldn't break anything. #[get("/")] async fn index() -> impl Responder { HttpResponse::Ok() } #[actix_web::main] async fn main() { let srv = test::start(|| App::new().service(index)); let request = srv.get("/"); let response = request.send().await.unwrap(); assert!(response.status().is_success()); }
23.333333
56
0.640476
16841c25065e18c9b96424c3b5b82261de73c348
2,257
use std::rc::Rc; pub struct List<T> { head: Link<T>, } type Link<T> = Option<Rc<Node<T>>>; struct Node<T> { elem: T, next: Link<T>, } impl<T> List<T> { pub fn new() -> Self { Self { head: None } } pub fn append(&self, elem: T) -> List<T> { List { head: Some(Rc::new(Node { elem, next: self.head.clone(), })), } } pub fn tail(&self) -> List<T> { List { head: self.head.as_ref().and_then(|node| node.next.clone()), } } pub fn head(&self) -> Option<&T> { self.head.as_ref().map(|node| &node.elem) } pub fn iter(&self) -> Iter<T> { Iter { next: self.head.as_ref().map(|node| &**node), } } } pub struct Iter<'a, T> { next: Option<&'a Node<T>>, } impl<'a, T> Iterator for Iter<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { self.next.map(|node| { self.next = node.next.as_ref().map(|node| &**node); &node.elem }) } } impl<T> Drop for List<T> { fn drop(&mut self) { let mut head = self.head.take(); while let Some(node) = head { if let Ok(mut node) = Rc::try_unwrap(node) { head = node.next.take(); } else { break; } } } } #[cfg(test)] mod tests { use super::List; #[test] fn basics() { let list = List::new(); assert_eq!(list.head(), None); let list = list.append(1).append(2).append(3); assert_eq!(list.head(), Some(&3)); let list = list.tail(); assert_eq!(list.head(), Some(&2)); let list = list.tail(); assert_eq!(list.head(), Some(&1)); let list = list.tail(); assert_eq!(list.head(), None); // Make sure empty tail works let list = list.tail(); assert_eq!(list.head(), None); } #[test] fn iter() { let list = List::new().append(1).append(2).append(3); let mut iter = list.iter(); assert_eq!(iter.next(), Some(&3)); assert_eq!(iter.next(), Some(&2)); assert_eq!(iter.next(), Some(&1)); } }
20.706422
72
0.463004
72fc96a867ab218e0e96cc9e19ba439442bb9fa2
2,590
//! Interface for Screeps [`RawMemory`] global object. //! //! [`RawMemory`]: https://docs.screeps.com/api/#RawMemory use serde::Deserialize; use crate::macros::*; #[derive(Deserialize, Debug)] pub struct ForeignSegment { username: String, id: String, data: String, } js_deserializable!(ForeignSegment); get_from_js!(get_active_segments -> { Object.keys(RawMemory.segments).map(Number) } -> Vec<u32>); /// Sets active segments (max 10 ids). pub fn set_active_segments(ids: &[u32]) { assert!( ids.len() <= 10, "can't set more than 10 active segments at a time" ); js! { @(no_return) RawMemory.setActiveSegments(@{ids}); } } get_from_js!(get_segment(id: u32) -> { RawMemory.segments[@{id}] } -> Option<String>); pub fn set_segment(id: u32, data: &str) { js! { @(no_return) RawMemory.segments[@{id}] = @{data}; } } /// This drops the reference to a segment; it doesn't affect the content of the /// segment. /// /// This is the equivalent of doing `delete RawMemory.segments[id]`. Again, this /// only deletes the local view of the segment, not the serialized one. It may /// be used to `set_segment` a new segment that wasn't part of the original 10 /// active segments. pub fn drop_segment(id: u32) { js! { @(no_return) delete RawMemory.segments[@{id}]; } } get_from_js!(get_foreign_segment -> { RawMemory.foreignSegment } -> ForeignSegment); /// Implements `RawMemory.setActiveForeignSegment` /// /// To use the default public segment of `username` (as set with /// [`set_default_public_segment`]), Use `None` instead of `Some(id)`. /// /// To clear the foreign segment, pass the empty string `""` as a username. pub fn set_active_foreign_segment(username: &str, id: Option<u32>) { if username == "" { js! { @(no_return) RawMemory.setActiveForeignSegment(null); } } else { match id { Some(id) => js! { @(no_return) RawMemory.setActiveForeignSegment(@{username}, @{id}); }, None => js! { @(no_return) RawMemory.setActiveForeignSegment(@{username}); }, }; }; } pub fn set_default_public_segment(id: u32) { js! { @(no_return) RawMemory.setDefaultPublicSegment(@{id}); } } pub fn set_public_segments(ids: &[u32]) { js! { @(no_return) RawMemory.setPublicSegments(@{ids}); } } get_from_js!(get -> {RawMemory.get()} -> String); pub fn set(value: &str) { js! { @(no_return) RawMemory.set(@{value}); } }
25.392157
80
0.616216
bfb676c41dc560e6e0234fe745e0bbd6d33dcca6
29,217
// Copyright (c) 2016 The vulkano developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, // at your option. All files in the project carrying such // notice may not be copied, modified, or distributed except // according to those terms. use std::error; use std::fmt; use std::mem; use std::os::raw::c_ulong; use std::ptr; use std::sync::Arc; use std::sync::atomic::AtomicBool; use format::Format; use image::ImageUsage; use instance::Instance; use instance::PhysicalDevice; use instance::QueueFamily; use swapchain::Capabilities; use swapchain::SurfaceSwapchainLock; use swapchain::capabilities; use swapchain::display::DisplayMode; use swapchain::display::DisplayPlane; use Error; use OomError; use VulkanObject; use check_errors; use vk; /// Represents a surface on the screen. /// /// Creating a `Surface` is platform-specific. pub struct Surface<W> { window: W, instance: Arc<Instance>, surface: vk::SurfaceKHR, // If true, a swapchain has been associated to this surface, and that any new swapchain // creation should be forbidden. has_swapchain: AtomicBool, } impl<W> Surface<W> { /// Creates a `Surface` given the raw handler. /// /// Be careful when using it /// pub unsafe fn from_raw_surface(instance: Arc<Instance>, surface: vk::SurfaceKHR, win: W) -> Surface<W> { Surface { window: win, instance: instance, surface: surface, has_swapchain: AtomicBool::new(false), } } /// Creates a `Surface` that covers a display mode. /// /// # Panic /// /// - Panics if `display_mode` and `plane` don't belong to the same physical device. /// - Panics if `plane` doesn't support the display of `display_mode`. /// pub fn from_display_mode(display_mode: &DisplayMode, plane: &DisplayPlane) -> Result<Arc<Surface<()>>, SurfaceCreationError> { if !display_mode .display() .physical_device() .instance() .loaded_extensions() .khr_display { return Err(SurfaceCreationError::MissingExtension { name: "VK_KHR_display" }); } assert_eq!(display_mode.display().physical_device().internal_object(), plane.physical_device().internal_object()); assert!(plane.supports(display_mode.display())); let instance = display_mode.display().physical_device().instance(); let vk = instance.pointers(); let surface = unsafe { let infos = vk::DisplaySurfaceCreateInfoKHR { sType: vk::STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR, pNext: ptr::null(), flags: 0, // reserved displayMode: display_mode.internal_object(), planeIndex: plane.index(), planeStackIndex: 0, // FIXME: plane.properties.currentStackIndex, transform: vk::SURFACE_TRANSFORM_IDENTITY_BIT_KHR, // TODO: let user choose globalAlpha: 0.0, // TODO: let user choose alphaMode: vk::DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR, // TODO: let user choose imageExtent: vk::Extent2D { // TODO: let user choose width: display_mode.visible_region()[0], height: display_mode.visible_region()[1], }, }; let mut output = mem::uninitialized(); check_errors(vk.CreateDisplayPlaneSurfaceKHR(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: (), instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Creates a `Surface` from a Win32 window. /// /// The surface's min, max and current extent will always match the window's dimensions. /// /// # Safety /// /// The caller must ensure that the `hinstance` and the `hwnd` are both correct and stay /// alive for the entire lifetime of the surface. The `win` parameter can be used to ensure this. pub unsafe fn from_hwnd<T, U>(instance: Arc<Instance>, hinstance: *const T, hwnd: *const U, win: W) -> Result<Arc<Surface<W>>, SurfaceCreationError> { let vk = instance.pointers(); if !instance.loaded_extensions().khr_win32_surface { return Err(SurfaceCreationError::MissingExtension { name: "VK_KHR_win32_surface" }); } let surface = { let infos = vk::Win32SurfaceCreateInfoKHR { sType: vk::STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR, pNext: ptr::null(), flags: 0, // reserved hinstance: hinstance as *mut _, hwnd: hwnd as *mut _, }; let mut output = mem::uninitialized(); check_errors(vk.CreateWin32SurfaceKHR(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: win, instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Creates a `Surface` from an XCB window. /// /// The surface's min, max and current extent will always match the window's dimensions. /// /// # Safety /// /// The caller must ensure that the `connection` and the `window` are both correct and stay /// alive for the entire lifetime of the surface. The `win` parameter can be used to ensure this. pub unsafe fn from_xcb<C>(instance: Arc<Instance>, connection: *const C, window: u32, win: W) -> Result<Arc<Surface<W>>, SurfaceCreationError> { let vk = instance.pointers(); if !instance.loaded_extensions().khr_xcb_surface { return Err(SurfaceCreationError::MissingExtension { name: "VK_KHR_xcb_surface" }); } let surface = { let infos = vk::XcbSurfaceCreateInfoKHR { sType: vk::STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR, pNext: ptr::null(), flags: 0, // reserved connection: connection as *mut _, window: window, }; let mut output = mem::uninitialized(); check_errors(vk.CreateXcbSurfaceKHR(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: win, instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Creates a `Surface` from an Xlib window. /// /// The surface's min, max and current extent will always match the window's dimensions. /// /// # Safety /// /// The caller must ensure that the `display` and the `window` are both correct and stay /// alive for the entire lifetime of the surface. The `win` parameter can be used to ensure this. pub unsafe fn from_xlib<D>(instance: Arc<Instance>, display: *const D, window: c_ulong, win: W) -> Result<Arc<Surface<W>>, SurfaceCreationError> { let vk = instance.pointers(); if !instance.loaded_extensions().khr_xlib_surface { return Err(SurfaceCreationError::MissingExtension { name: "VK_KHR_xlib_surface" }); } let surface = { let infos = vk::XlibSurfaceCreateInfoKHR { sType: vk::STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR, pNext: ptr::null(), flags: 0, // reserved dpy: display as *mut _, window: window, }; let mut output = mem::uninitialized(); check_errors(vk.CreateXlibSurfaceKHR(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: win, instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Creates a `Surface` from a Wayland window. /// /// The window's dimensions will be set to the size of the swapchain. /// /// # Safety /// /// The caller must ensure that the `display` and the `surface` are both correct and stay /// alive for the entire lifetime of the surface. The `win` parameter can be used to ensure this. pub unsafe fn from_wayland<D, S>(instance: Arc<Instance>, display: *const D, surface: *const S, win: W) -> Result<Arc<Surface<W>>, SurfaceCreationError> { let vk = instance.pointers(); if !instance.loaded_extensions().khr_wayland_surface { return Err(SurfaceCreationError::MissingExtension { name: "VK_KHR_wayland_surface" }); } let surface = { let infos = vk::WaylandSurfaceCreateInfoKHR { sType: vk::STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR, pNext: ptr::null(), flags: 0, // reserved display: display as *mut _, surface: surface as *mut _, }; let mut output = mem::uninitialized(); check_errors(vk.CreateWaylandSurfaceKHR(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: win, instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Creates a `Surface` from an Android window. /// /// # Safety /// /// The caller must ensure that the `window` is correct and stays alive for the entire /// lifetime of the surface. The `win` parameter can be used to ensure this. pub unsafe fn from_anativewindow<T>(instance: Arc<Instance>, window: *const T, win: W) -> Result<Arc<Surface<W>>, SurfaceCreationError> { let vk = instance.pointers(); if !instance.loaded_extensions().khr_android_surface { return Err(SurfaceCreationError::MissingExtension { name: "VK_KHR_android_surface" }); } let surface = { let infos = vk::AndroidSurfaceCreateInfoKHR { sType: vk::STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR, pNext: ptr::null(), flags: 0, // reserved window: window as *mut _, }; let mut output = mem::uninitialized(); check_errors(vk.CreateAndroidSurfaceKHR(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: win, instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Creates a `Surface` from an iOS `UIView`. /// /// # Safety /// /// - The caller must ensure that the `view` is correct and stays alive for the entire /// lifetime of the surface. The win parameter can be used to ensure this. /// - The `UIView` must be backed by a `CALayer` instance of type `CAMetalLayer`. pub unsafe fn from_ios_moltenvk<T>(instance: Arc<Instance>, view: *const T, win: W) -> Result<Arc<Surface<W>>, SurfaceCreationError> { let vk = instance.pointers(); if !instance.loaded_extensions().mvk_ios_surface { return Err(SurfaceCreationError::MissingExtension { name: "VK_MVK_ios_surface" }); } let surface = { let infos = vk::IOSSurfaceCreateInfoMVK { sType: vk::STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK, pNext: ptr::null(), flags: 0, // reserved pView: view as *const _, }; let mut output = mem::uninitialized(); check_errors(vk.CreateIOSSurfaceMVK(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: win, instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Creates a `Surface` from a MacOS `NSView`. /// /// # Safety /// /// - The caller must ensure that the `view` is correct and stays alive for the entire /// lifetime of the surface. The `win` parameter can be used to ensure this. /// - The `NSView` must be backed by a `CALayer` instance of type `CAMetalLayer`. pub unsafe fn from_macos_moltenvk<T>(instance: Arc<Instance>, view: *const T, win: W) -> Result<Arc<Surface<W>>, SurfaceCreationError> { let vk = instance.pointers(); if !instance.loaded_extensions().mvk_macos_surface { return Err(SurfaceCreationError::MissingExtension { name: "VK_MVK_macos_surface" }); } let surface = { let infos = vk::MacOSSurfaceCreateInfoMVK { sType: vk::STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK, pNext: ptr::null(), flags: 0, // reserved pView: view as *const _, }; let mut output = mem::uninitialized(); check_errors(vk.CreateMacOSSurfaceMVK(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: win, instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Creates a `Surface` from a `code:nn::code:vi::code:Layer`. /// /// # Safety /// /// The caller must ensure that the `window` is correct and stays alive for the entire /// lifetime of the surface. The `win` parameter can be used to ensure this. pub unsafe fn from_vi_surface<T>(instance: Arc<Instance>, window: *const T, win: W) -> Result<Arc<Surface<W>>, SurfaceCreationError> { let vk = instance.pointers(); if !instance.loaded_extensions().nn_vi_surface { return Err(SurfaceCreationError::MissingExtension { name: "VK_NN_vi_surface" }); } let surface = { let infos = vk::ViSurfaceCreateInfoNN { sType: vk::STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN, pNext: ptr::null(), flags: 0, // reserved window: window as *mut _, }; let mut output = mem::uninitialized(); check_errors(vk.CreateViSurfaceNN(instance.internal_object(), &infos, ptr::null(), &mut output))?; output }; Ok(Arc::new(Surface { window: win, instance: instance.clone(), surface: surface, has_swapchain: AtomicBool::new(false), })) } /// Returns true if the given queue family can draw on this surface. // FIXME: vulkano doesn't check this for the moment! pub fn is_supported(&self, queue: QueueFamily) -> Result<bool, CapabilitiesError> { unsafe { let vk = self.instance.pointers(); let mut output = mem::uninitialized(); check_errors(vk.GetPhysicalDeviceSurfaceSupportKHR(queue .physical_device() .internal_object(), queue.id(), self.surface, &mut output))?; Ok(output != 0) } } /// Retrieves the capabilities of a surface when used by a certain device. /// /// # Panic /// /// - Panics if the device and the surface don't belong to the same instance. /// pub fn capabilities(&self, device: PhysicalDevice) -> Result<Capabilities, CapabilitiesError> { unsafe { assert_eq!(&*self.instance as *const _, &**device.instance() as *const _, "Instance mismatch in Surface::capabilities"); let vk = self.instance.pointers(); let caps = { let mut out: vk::SurfaceCapabilitiesKHR = mem::uninitialized(); check_errors(vk.GetPhysicalDeviceSurfaceCapabilitiesKHR(device.internal_object(), self.surface, &mut out))?; out }; let formats = { let mut num = 0; check_errors(vk.GetPhysicalDeviceSurfaceFormatsKHR(device.internal_object(), self.surface, &mut num, ptr::null_mut()))?; let mut formats = Vec::with_capacity(num as usize); check_errors(vk.GetPhysicalDeviceSurfaceFormatsKHR(device.internal_object(), self.surface, &mut num, formats.as_mut_ptr()))?; formats.set_len(num as usize); formats }; let modes = { let mut num = 0; check_errors(vk.GetPhysicalDeviceSurfacePresentModesKHR(device.internal_object(), self.surface, &mut num, ptr::null_mut()))?; let mut modes = Vec::with_capacity(num as usize); check_errors(vk.GetPhysicalDeviceSurfacePresentModesKHR(device.internal_object(), self.surface, &mut num, modes.as_mut_ptr()))?; modes.set_len(num as usize); debug_assert!(modes .iter() .find(|&&m| m == vk::PRESENT_MODE_FIFO_KHR) .is_some()); debug_assert!(modes.iter().count() > 0); capabilities::supported_present_modes_from_list(modes.into_iter()) }; Ok(Capabilities { min_image_count: caps.minImageCount, max_image_count: if caps.maxImageCount == 0 { None } else { Some(caps.maxImageCount) }, current_extent: if caps.currentExtent.width == 0xffffffff && caps.currentExtent.height == 0xffffffff { None } else { Some([caps.currentExtent.width, caps.currentExtent.height]) }, min_image_extent: [caps.minImageExtent.width, caps.minImageExtent.height], max_image_extent: [caps.maxImageExtent.width, caps.maxImageExtent.height], max_image_array_layers: caps.maxImageArrayLayers, supported_transforms: capabilities::surface_transforms_from_bits(caps.supportedTransforms), current_transform: capabilities::surface_transforms_from_bits(caps.currentTransform).iter().next().unwrap(), // TODO: supported_composite_alpha: capabilities::supported_composite_alpha_from_bits(caps.supportedCompositeAlpha), supported_usage_flags: { let usage = ImageUsage::from_bits(caps.supportedUsageFlags); debug_assert!(usage.color_attachment); // specs say that this must be true usage }, supported_formats: formats.into_iter().map(|f| { (Format::from_vulkan_num(f.format).unwrap(), capabilities::color_space_from_num(f.colorSpace)) }).collect(), present_modes: modes, }) } } #[inline] pub fn window(&self) -> &W { &self.window } /// Returns the instance this surface was created with. #[inline] pub fn instance(&self) -> &Arc<Instance> { &self.instance } } unsafe impl <W> SurfaceSwapchainLock for Surface<W> { #[inline] fn flag(&self) -> &AtomicBool { &self.has_swapchain } } unsafe impl <W> VulkanObject for Surface<W> { type Object = vk::SurfaceKHR; const TYPE: vk::DebugReportObjectTypeEXT = vk::DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT; #[inline] fn internal_object(&self) -> vk::SurfaceKHR { self.surface } } impl <W> fmt::Debug for Surface<W> { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(fmt, "<Vulkan surface {:?}>", self.surface) } } impl <W> Drop for Surface<W> { #[inline] fn drop(&mut self) { unsafe { let vk = self.instance.pointers(); vk.DestroySurfaceKHR(self.instance.internal_object(), self.surface, ptr::null()); } } } /// Error that can happen when creating a debug callback. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum SurfaceCreationError { /// Not enough memory. OomError(OomError), /// The extension required for this function was not enabled. MissingExtension { /// Name of the missing extension. name: &'static str, }, } impl error::Error for SurfaceCreationError { #[inline] fn description(&self) -> &str { match *self { SurfaceCreationError::OomError(_) => "not enough memory available", SurfaceCreationError::MissingExtension { .. } => "the extension required for this function was not enabled", } } #[inline] fn cause(&self) -> Option<&error::Error> { match *self { SurfaceCreationError::OomError(ref err) => Some(err), _ => None, } } } impl fmt::Display for SurfaceCreationError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(fmt, "{}", error::Error::description(self)) } } impl From<OomError> for SurfaceCreationError { #[inline] fn from(err: OomError) -> SurfaceCreationError { SurfaceCreationError::OomError(err) } } impl From<Error> for SurfaceCreationError { #[inline] fn from(err: Error) -> SurfaceCreationError { match err { err @ Error::OutOfHostMemory => SurfaceCreationError::OomError(OomError::from(err)), err @ Error::OutOfDeviceMemory => SurfaceCreationError::OomError(OomError::from(err)), _ => panic!("unexpected error: {:?}", err), } } } /// Error that can happen when retrieving a surface's capabilities. #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[repr(u32)] pub enum CapabilitiesError { /// Not enough memory. OomError(OomError), /// The surface is no longer accessible and must be recreated. SurfaceLost, } impl error::Error for CapabilitiesError { #[inline] fn description(&self) -> &str { match *self { CapabilitiesError::OomError(_) => "not enough memory", CapabilitiesError::SurfaceLost => "the surface is no longer valid", } } #[inline] fn cause(&self) -> Option<&error::Error> { match *self { CapabilitiesError::OomError(ref err) => Some(err), _ => None, } } } impl fmt::Display for CapabilitiesError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(fmt, "{}", error::Error::description(self)) } } impl From<OomError> for CapabilitiesError { #[inline] fn from(err: OomError) -> CapabilitiesError { CapabilitiesError::OomError(err) } } impl From<Error> for CapabilitiesError { #[inline] fn from(err: Error) -> CapabilitiesError { match err { err @ Error::OutOfHostMemory => CapabilitiesError::OomError(OomError::from(err)), err @ Error::OutOfDeviceMemory => CapabilitiesError::OomError(OomError::from(err)), Error::SurfaceLost => CapabilitiesError::SurfaceLost, _ => panic!("unexpected error: {:?}", err), } } } #[cfg(test)] mod tests { use std::ptr; use swapchain::Surface; use swapchain::SurfaceCreationError; #[test] fn khr_win32_surface_ext_missing() { let instance = instance!(); match unsafe { Surface::from_hwnd(instance, ptr::null::<u8>(), ptr::null::<u8>(), ()) } { Err(SurfaceCreationError::MissingExtension { .. }) => (), _ => panic!(), } } #[test] fn khr_xcb_surface_ext_missing() { let instance = instance!(); match unsafe { Surface::from_xcb(instance, ptr::null::<u8>(), 0, ()) } { Err(SurfaceCreationError::MissingExtension { .. }) => (), _ => panic!(), } } #[test] fn khr_xlib_surface_ext_missing() { let instance = instance!(); match unsafe { Surface::from_xlib(instance, ptr::null::<u8>(), 0, ()) } { Err(SurfaceCreationError::MissingExtension { .. }) => (), _ => panic!(), } } #[test] fn khr_wayland_surface_ext_missing() { let instance = instance!(); match unsafe { Surface::from_wayland(instance, ptr::null::<u8>(), ptr::null::<u8>(), ()) } { Err(SurfaceCreationError::MissingExtension { .. }) => (), _ => panic!(), } } #[test] fn khr_android_surface_ext_missing() { let instance = instance!(); match unsafe { Surface::from_anativewindow(instance, ptr::null::<u8>(), ()) } { Err(SurfaceCreationError::MissingExtension { .. }) => (), _ => panic!(), } } }
37.894942
140
0.504261
2f2b446e7060d23b3f0d180833cb9fb35b5a4c8a
1,649
mod mock; use snowcat_common::settings::Settings; use snowcat_common::state::character::Character; use std::collections::HashMap; use std::ops::{Deref, DerefMut}; /// A wrapper type to store a map of characters to character IDs. pub struct CharacterMap(pub HashMap<u64, Character>); impl CharacterMap { /// Insert a character into the character map, returning the old value if /// one was previously present. pub fn insert(&mut self, id: u64, name: &str) -> Option<Character> { self.0.insert(id, Character { id, name: name.to_owned() }) } /// Remove a character from the character map, returning the removed value /// if it was previously present. pub fn remove(&mut self, id: u64) -> Option<Character> { self.0.remove(&id) } /// Remove a character from the character map, returning a tuple of the ID /// and removed value if it was previously present. pub fn remove_entry(&mut self, id: u64) -> Option<(u64, Character)> { self.0.remove_entry(&id) } } impl Default for CharacterMap { fn default() -> Self { let mut map = CharacterMap(HashMap::default()); mock::CHARACTERS.iter() .for_each(|(id, name)| { map.insert(*id, *name); }); map } } impl Deref for CharacterMap { type Target = HashMap<u64, Character>; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for CharacterMap { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } pub struct User<'data> { pub my_characters: Vec<&'data Character>, } pub async fn read_settings_file(_name: &str) -> Settings { Settings::default() } pub async fn write_settings_file(_settings: &Settings) { todo!("write settings to file") }
24.984848
75
0.691935
0a30b4f67190997dd152471c584106ec4714d78f
7,483
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 //! This file implements the information needed in the local interpretation context, i.e., the //! context created and updated when interpreting a single function. use std::collections::BTreeMap; use move_binary_format::errors::{Location, PartialVMError, VMError}; use move_core_types::vm_status::StatusCode; use move_model::ast::TempIndex; use crate::concrete::{ ty::{CodeOffset, Type}, value::{LocalSlot, Pointer, TypedValue}, }; #[derive(Clone, Debug)] pub enum AbortInfo { /// User-specific abort User(u64, Location), /// Internal abort (e.g., integer overflow or resource does not exist in global storage) Internal(StatusCode, Location), } impl AbortInfo { /// Convert the AbortInfo into a VMError pub fn into_err(self) -> VMError { match self { Self::User(status_code, location) => PartialVMError::new(StatusCode::ABORTED) .with_sub_status(status_code) .finish(location), Self::Internal(status_code, location) => { PartialVMError::new(status_code).finish(location) } } } /// Retrieve the status code as a u64 pub fn get_status_code(&self) -> u64 { match self { Self::User(status_code, _) => *status_code, Self::Internal(status_code, _) => *status_code as u64, } } } #[derive(Debug)] pub enum TerminationStatus { /// This function has not terminated, it is running normally None, /// An abort has been triggered and the function is in post-abort state PostAbort(AbortInfo), /// The function terminated successfully with a list of return values Return(Vec<TypedValue>), /// The function terminated with an abort Abort(AbortInfo), } pub struct LocalState { /// slots that holds local variables slots: Vec<LocalSlot>, /// program counter pc: CodeOffset, /// whether we set the PC to branch in the handling of last bytecode pc_branch: bool, /// termination status termination: TerminationStatus, /// mutable parameters that gets destroyed during the execution destroyed_args: BTreeMap<TempIndex, TypedValue>, } impl LocalState { pub fn new(slots: Vec<LocalSlot>) -> Self { Self { slots, pc: 0, pc_branch: false, termination: TerminationStatus::None, destroyed_args: BTreeMap::new(), } } /// Get the number of slots available in this state pub fn num_slots(&self) -> usize { self.slots.len() } /// Get the type of the local slot at `index` pub fn get_type(&self, index: TempIndex) -> &Type { self.slots.get(index).unwrap().get_type() } /// Check whether the local slot at `index` holds a value pub fn has_value(&self, index: TempIndex) -> bool { self.slots.get(index).unwrap().has_value() } /// Get the value held in local slot `index`. Panics if the slot does not hold a value pub fn get_value(&self, index: TempIndex) -> TypedValue { self.slots.get(index).unwrap().get_value() } /// Put the value held in local slot `index`. Override if the slot already holds a value pub fn put_value_override(&mut self, index: TempIndex, val: TypedValue) { self.slots.get_mut(index).unwrap().put_value_override(val) } /// Put the value held in local slot `index`. Panics if the slot already holds a value pub fn put_value(&mut self, index: TempIndex, val: TypedValue) { self.slots.get_mut(index).unwrap().put_value(val) } /// Delete the value held in local slot `index`. Panics if the slot does not hold a value pub fn del_value(&mut self, index: TempIndex) -> TypedValue { self.slots.get_mut(index).unwrap().del_value() } /// Save a mutable argument that is destroyed pub fn save_destroyed_arg(&mut self, index: TempIndex, val: TypedValue) { let exists = self.destroyed_args.insert(index, val); if cfg!(debug_assertions) { assert!(exists.is_none()); } } /// Load a mutable argument that is destroyed pub fn load_destroyed_arg(&mut self, index: TempIndex) -> TypedValue { self.destroyed_args.remove(&index).unwrap() } /// Get the current PC location (i.e., which bytecode to be executed) pub fn get_pc(&self) -> CodeOffset { self.pc } /// Set the PC location to jump to on next execution pub fn set_pc(&mut self, pc: CodeOffset) { if cfg!(debug_assertions) { assert!(!self.pc_branch); } self.pc = pc; self.pc_branch = true; } /// Decide the PC location for next bytecode instruction pub fn ready_pc_for_next_instruction(&mut self) { if self.pc_branch { self.pc_branch = false } else { self.pc += 1; } } /// Collect the pointers of the underlying values in the local slots pub fn collect_pointers(&self) -> BTreeMap<TempIndex, &Pointer> { self.slots .iter() .enumerate() .filter_map(|(idx, slot)| slot.get_content().map(|(_, ptr)| (idx, ptr))) .collect() } /// Mark that an abort is raised and we will be executing the abort action next pub fn transit_to_post_abort(&mut self, info: AbortInfo) { if cfg!(debug_assertions) { assert!(matches!(self.termination, TerminationStatus::None)); } self.termination = TerminationStatus::PostAbort(info); } /// Check whether execution of the current function is finished or not pub fn is_terminated(&self) -> bool { matches!( self.termination, TerminationStatus::Return(_) | TerminationStatus::Abort(_) ) } /// Check whether we are executing in a post-abort status pub fn is_post_abort(&self) -> bool { matches!(self.termination, TerminationStatus::PostAbort(_)) } /// Mark that the current function terminated with an abort pub fn terminate_with_abort(&mut self, abort_info: AbortInfo) { if cfg!(debug_assertions) { assert!(!self.is_terminated()); } let info = match &self.termination { TerminationStatus::None => { // no prior aborts has been seen, and no abort action attached abort_info } TerminationStatus::PostAbort(original_info) => { // re-abort, make sure we are aborting with the same status code if cfg!(debug_assertions) { assert_eq!( original_info.get_status_code(), abort_info.get_status_code() ); } original_info.clone() } _ => unreachable!(), }; self.termination = TerminationStatus::Abort(info); } /// Mark that the current function terminated with return values pub fn terminate_with_return(&mut self, ret_vals: Vec<TypedValue>) { if cfg!(debug_assertions) { assert!(!self.is_terminated()); } self.termination = TerminationStatus::Return(ret_vals); } /// Consume and reduce the state into termination status pub fn into_termination_status(self) -> TerminationStatus { self.termination } }
35.464455
94
0.618869
dd9244b41aa3239fa6fead580efee2a7f62d9978
5,465
use crate::commands::WholeStreamCommand; use crate::context::CommandRegistry; use crate::data::config; use crate::prelude::*; use nu_errors::ShellError; use nu_protocol::{Primitive, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value}; use nu_source::Tagged; use std::path::PathBuf; pub struct Config; #[derive(Deserialize)] pub struct ConfigArgs { load: Option<Tagged<PathBuf>>, set: Option<(Tagged<String>, Value)>, set_into: Option<Tagged<String>>, get: Option<Tagged<String>>, clear: Tagged<bool>, remove: Option<Tagged<String>>, path: Tagged<bool>, } impl WholeStreamCommand for Config { fn name(&self) -> &str { "config" } fn signature(&self) -> Signature { Signature::build("config") .named( "load", SyntaxShape::Path, "load the config from the path give", ) .named( "set", SyntaxShape::Any, "set a value in the config, eg) --set [key value]", ) .named( "set_into", SyntaxShape::Member, "sets a variable from values in the pipeline", ) .named("get", SyntaxShape::Any, "get a value from the config") .named("remove", SyntaxShape::Any, "remove a value from the config") .switch("clear", "clear the config") .switch("path", "return the path to the config file") } fn usage(&self) -> &str { "Configuration management." } fn run( &self, args: CommandArgs, registry: &CommandRegistry, ) -> Result<OutputStream, ShellError> { args.process(registry, config)?.run() } } pub fn config( ConfigArgs { load, set, set_into, get, clear, remove, path, }: ConfigArgs, RunnableContext { name, input, .. }: RunnableContext, ) -> Result<OutputStream, ShellError> { let name_span = name.clone(); let stream = async_stream! { let configuration = if let Some(supplied) = load { Some(supplied.item().clone()) } else { None }; let mut result = crate::data::config::read(name_span, &configuration)?; if let Some(v) = get { let key = v.to_string(); let value = result .get(&key) .ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?; match value { Value { value: UntaggedValue::Table(list), .. } => { for l in list { let value = l.clone(); yield ReturnSuccess::value(l.clone()); } } x => yield ReturnSuccess::value(x.clone()), } } else if let Some((key, value)) = set { result.insert(key.to_string(), value.clone()); config::write(&result, &configuration)?; yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(&value.tag)); } else if let Some(v) = set_into { let rows: Vec<Value> = input.values.collect().await; let key = v.to_string(); if rows.len() == 0 { yield Err(ShellError::labeled_error("No values given for set_into", "needs value(s) from pipeline", v.tag())); } else if rows.len() == 1 { // A single value let value = &rows[0]; result.insert(key.to_string(), value.clone()); config::write(&result, &configuration)?; yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(name)); } else { // Take in the pipeline as a table let value = UntaggedValue::Table(rows).into_value(name.clone()); result.insert(key.to_string(), value.clone()); config::write(&result, &configuration)?; yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(name)); } } else if let Tagged { item: true, tag } = clear { result.clear(); config::write(&result, &configuration)?; yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(tag)); return; } else if let Tagged { item: true, tag } = path { let path = config::default_path_for(&configuration)?; yield ReturnSuccess::value(UntaggedValue::Primitive(Primitive::Path(path)).into_value(tag)); } else if let Some(v) = remove { let key = v.to_string(); if result.contains_key(&key) { result.swap_remove(&key); config::write(&result, &configuration)? } else { yield Err(ShellError::labeled_error( "Key does not exist in config", "key", v.tag(), )); } yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(v.tag())); } else { yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(name)); } }; Ok(stream.to_output_stream()) }
31.051136
126
0.516926
eb66380694b7c6c100f04f3930b6252043d3d3b6
78
use test::Bencher; use digest::ripemd::Ripemd160; bench_digest!(Ripemd160);
13
30
0.75641
567b48e09b824a012d5022b0013f5b460db14c3e
5,392
//! A basic example showing the request components use futures::prelude::*; use std::pin::Pin; use std::time::{Duration, Instant}; use gotham::hyper::{Body, StatusCode}; use gotham::handler::{HandlerError, HandlerResult, IntoResponse}; use gotham::helpers::http::response::create_response; use gotham::router::builder::DefineSingleRoute; use gotham::router::builder::{build_simple_router, DrawRoutes}; use gotham::router::Router; use gotham::state::{FromState, State}; use gotham_derive::{StateData, StaticResponseExtender}; use serde_derive::Deserialize; use tokio::time::delay_until; type SleepFuture = Pin<Box<dyn Future<Output = Vec<u8>> + Send>>; #[derive(Deserialize, StateData, StaticResponseExtender)] struct QueryStringExtractor { seconds: u64, } /// Sneaky hack to make tests take less time. Nothing to see here ;-). #[cfg(not(test))] fn get_duration(seconds: u64) -> Duration { Duration::from_secs(seconds) } #[cfg(test)] fn get_duration(seconds: u64) -> Duration { Duration::from_millis(seconds) } /// All this function does is return a future that resolves after a number of /// seconds, with a Vec<u8> that tells you how long it slept for. /// /// Note that it does not block the thread from handling other requests, because /// it returns a `Future`, which will be managed by the tokio reactor, and /// called back once the timeout has expired. /// /// Vec<u8> is chosen because it is one of the things that you need to resolve /// a HandlerFuture and respond to a request. /// /// Most things that you call to access remote services (e.g databases and /// web apis) can be coerced into returning futures that yield useful data, /// so the patterns that you learn in this example should be applicable to /// real world problems. fn sleep(seconds: u64) -> SleepFuture { let when = Instant::now() + get_duration(seconds); let delay = delay_until(when.into()).map(move |_| { format!("slept for {} seconds\n", seconds) .as_bytes() .to_vec() }); delay.boxed() } /// This handler sleeps for the requested number of seconds, using the `sleep()` /// helper method, above. async fn sleep_handler(state: &mut State) -> Result<impl IntoResponse, HandlerError> { let seconds = QueryStringExtractor::borrow_from(state).seconds; println!("sleep for {} seconds once: starting", seconds); // Here, we call the sleep function. Note that this step doesn't block: // it just sets up the timer so that we can use it later. let sleep_future = sleep(seconds); // Here is where the serious sleeping happens. We yield execution of // this block until sleep_future is resolved. // The "slept for x seconds" value is stored in data. let data = sleep_future.await; // We return a `Result<impl IntoResponse, HandlerError>` directly // where the success type can be anything implementing `IntoResponse` // (including a `Response<Body>`) println!("sleep for {} seconds once: finished", seconds); Ok((StatusCode::OK, mime::TEXT_PLAIN, data)) } /// It calls sleep(1) as many times as needed to make the requested duration. /// /// Notice how much easier it is to read than the version in /// `simple_async_handlers`. async fn loop_handler(mut state: State) -> HandlerResult { let seconds = QueryStringExtractor::take_from(&mut state).seconds; println!("sleep for one second {} times: starting", seconds); // The code within this block reads exactly like syncronous code. // This is the style that you should aim to write your business // logic in. let mut accumulator = Vec::new(); for _ in 0..seconds { let body = sleep(1).await; accumulator.extend(body) } let res = create_response( &state, StatusCode::OK, mime::TEXT_PLAIN, Body::from(accumulator), ); println!("sleep for one second {} times: finished", seconds); Ok((state, res)) } /// Create a `Router`. fn router() -> Router { build_simple_router(|route| { route .get("/sleep") .with_query_string_extractor::<QueryStringExtractor>() .to_async_borrowing(sleep_handler); route .get("/loop") .with_query_string_extractor::<QueryStringExtractor>() .to_async(loop_handler); }) } /// Start a server and use a `Router` to dispatch requests. pub fn main() { let addr = "127.0.0.1:7878"; println!("Listening for requests at http://{}", addr); gotham::start(addr, router()) } #[cfg(test)] mod tests { use gotham::test::TestServer; use super::*; fn assert_returns_ok(url_str: &str, expected_response: &str) { let test_server = TestServer::new(router()).unwrap(); let response = test_server.client().get(url_str).perform().unwrap(); assert_eq!(response.status(), StatusCode::OK); assert_eq!( &String::from_utf8(response.read_body().unwrap()).unwrap(), expected_response ); } #[test] fn sleep_says_how_long_it_slept_for() { assert_returns_ok("http://localhost/sleep?seconds=2", "slept for 2 seconds\n"); } #[test] fn loop_breaks_the_time_into_one_second_sleeps() { assert_returns_ok( "http://localhost/loop?seconds=2", "slept for 1 seconds\nslept for 1 seconds\n", ); } }
33.7
87
0.668212
90bf2f4347df874a64e82ce2bfb273e6678ec987
1,627
trait State { fn request_review(self: Box<Self>) -> Box<dyn State>; fn approve(self: Box<Self>) -> Box<dyn State>; fn content<'a>(&self, post: &'a Post) -> &'a str { "" } } struct Draft {} impl State for Draft { fn request_review(self: Box<Self>) -> Box<dyn State> { Box::new(PendingReview {}) } fn approve(self: Box<Self>) -> Box<dyn State> { self } } struct PendingReview {} impl State for PendingReview { fn request_review(self: Box<Self>) -> Box<dyn State> { self } fn approve(self: Box<Self>) -> Box<dyn State> { Box::new(Published {}) } } struct Published {} impl State for Published { fn request_review(self: Box<Self>) -> Box<dyn State> { self } fn approve(self: Box<Self>) -> Box<dyn State> { self } fn content<'a>(&self, post: &'a Post) -> &'a str { &post.content } } pub struct Post { state: Option<Box<dyn State>>, content: String, } impl Post { pub fn new() -> Post { Post { state: Some(Box::new(Draft {})), content: String::new(), } } pub fn add_text(&mut self, text: &str) { self.content.push_str(text); } pub fn content(&self) -> &str { self.state.as_ref().unwrap().content(self) } pub fn request_review(&mut self) { if let Some(s) = self.state.take() { self.state = Some(s.request_review()); } } pub fn approve(&mut self) { if let Some(s) = self.state.take() { self.state = Some(s.approve()) } } }
19.841463
58
0.527351
f408c1c8bb0d65a5debaec68ba0c79b082f2a826
2,037
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use { crate::model::*, failure::{Error, Fail}, }; /// Errors produced by `Model`. #[derive(Debug, Fail)] pub enum ModelError { #[fail(display = "component instance not found with moniker {}", moniker)] InstanceNotFound { moniker: AbsoluteMoniker }, #[fail(display = "component declaration invalid")] ComponentInvalid, #[fail(display = "component manifest invalid")] ManifestInvalid { uri: String, #[fail(cause)] err: Error, }, #[fail(display = "namespace creation failed: {}", err)] NamespaceCreationFailed { #[fail(cause)] err: Error, }, #[fail(display = "resolver error")] ResolverError { #[fail(cause)] err: ResolverError, }, #[fail(display = "runner error")] RunnerError { #[fail(cause)] err: RunnerError, }, #[fail(display = "capability discovery error")] CapabilityDiscoveryError { #[fail(cause)] err: Error, }, } impl ModelError { pub fn instance_not_found(moniker: AbsoluteMoniker) -> ModelError { ModelError::InstanceNotFound { moniker } } pub fn namespace_creation_failed(err: impl Into<Error>) -> ModelError { ModelError::NamespaceCreationFailed { err: err.into() } } pub fn manifest_invalid(uri: impl Into<String>, err: impl Into<Error>) -> ModelError { ModelError::ManifestInvalid { uri: uri.into(), err: err.into() } } pub fn capability_discovery_error(err: impl Into<Error>) -> ModelError { ModelError::CapabilityDiscoveryError { err: err.into() } } } impl From<ResolverError> for ModelError { fn from(err: ResolverError) -> Self { ModelError::ResolverError { err } } } impl From<RunnerError> for ModelError { fn from(err: RunnerError) -> Self { ModelError::RunnerError { err } } }
27.527027
90
0.626411
9bc28e66212a5a80ff876668121bbf2d2699eefd
8,304
//! //! Base IPP definitions and tags //! use std::fmt; use enum_primitive_derive::Primitive; /// IPP protocol version #[derive(Debug, Copy, Clone, PartialEq)] pub struct IppVersion(pub u16); impl IppVersion { pub fn v1_0() -> Self { IppVersion(0x0100) } pub fn v1_1() -> Self { IppVersion(0x0101) } pub fn v2_0() -> Self { IppVersion(0x0200) } pub fn v2_1() -> Self { IppVersion(0x0201) } pub fn v2_2() -> Self { IppVersion(0x0202) } } /// IPP operation constants #[derive(Primitive, Debug, Copy, Clone, PartialEq)] #[allow(clippy::upper_case_acronyms)] pub enum Operation { PrintJob = 0x0002, PrintUri = 0x0003, ValidateJob = 0x0004, CreateJob = 0x0005, SendDocument = 0x0006, SendUri = 0x0007, CancelJob = 0x0008, GetJobAttributes = 0x0009, GetJobs = 0x000A, GetPrinterAttributes = 0x000B, HoldJob = 0x000C, ReleaseJob = 0x000D, RestartJob = 0x000E, PausePrinter = 0x0010, ResumePrinter = 0x0011, PurgeJobs = 0x0012, CupsGetDefault = 0x4001, CupsGetPrinters = 0x4002, CupsAddModifyPrinter = 0x4003, CupsDeletePrinter = 0x4004, CupsGetClasses = 0x4005, CupsAddModifyClass = 0x4006, CupsDeleteClass = 0x4007, CupsAcceptJobs = 0x4008, CupsRejectJobs = 0x4009, CupsSetDefault = 0x400A, CupsGetDevices = 0x400B, CupsGetPPDs = 0x400C, CupsMoveJob = 0x400D, CupsAuthenticateJob = 0x400E, CupsGetPPD = 0x400F, CupsGetDocument = 0x4027, CupsCreateLocalPrinter = 0x4028, } /// printer-state constants #[derive(Primitive, Debug, Copy, Clone, PartialEq)] pub enum PrinterState { Idle = 3, Processing = 4, Stopped = 5, } /// paper orientation constants #[derive(Primitive, Debug, Copy, Clone, PartialEq)] pub enum Orientation { Portrait = 3, Landscape = 4, ReverseLandscape = 5, ReversePortrait = 6, } /// print-quality constants #[derive(Primitive, Debug, Copy, Clone, PartialEq)] pub enum PrintQuality { Draft = 3, Normal = 4, High = 5, } /// finishings constants #[derive(Primitive, Debug, Copy, Clone, PartialEq)] pub enum Finishings { None = 3, Staple = 4, Punch = 5, Cover = 6, Bind = 7, SaddleStitch = 8, EdgeStitch = 9, } /// job-state constants #[derive(Primitive, Debug, Copy, Clone, PartialEq)] pub enum JobState { Pending = 3, PendingHeld = 4, Processing = 5, ProcessingStopped = 6, Canceled = 7, Aborted = 8, Completed = 9, } /// group delimiter tags #[derive(Primitive, Debug, Copy, Clone, PartialEq, Hash, Eq)] pub enum DelimiterTag { OperationAttributes = 0x01, JobAttributes = 0x02, EndOfAttributes = 0x03, PrinterAttributes = 0x04, UnsupportedAttributes = 0x05, } /// IPP value tags #[derive(Primitive, Debug, Copy, Clone, PartialEq)] pub enum ValueTag { Unsupported = 0x10, Unknown = 0x12, NoValue = 0x13, Integer = 0x21, Boolean = 0x22, Enum = 0x23, OctetStringUnspecified = 0x30, DateTime = 0x31, Resolution = 0x32, RangeOfInteger = 0x33, BegCollection = 0x34, TextWithLanguage = 0x35, NameWithLanguage = 0x36, EndCollection = 0x37, TextWithoutLanguage = 0x41, NameWithoutLanguage = 0x42, Keyword = 0x44, Uri = 0x45, UriScheme = 0x46, Charset = 0x47, NaturalLanguage = 0x48, MimeMediaType = 0x49, MemberAttrName = 0x4a, } /// IPP status codes #[derive(Primitive, Debug, Copy, Clone, PartialEq)] pub enum StatusCode { SuccessfulOk = 0x0000, SuccessfulOkIgnoredOrSubstitutedAttributes = 0x0001, SuccessfulOkConflictingAttributes = 0x0002, ClientErrorBadRequest = 0x0400, ClientErrorForbidden = 0x0401, ClientErrorNotAuthenticated = 0x0402, ClientErrorNotAuthorized = 0x0403, ClientErrorNotPossible = 0x0404, ClientErrorTimeout = 0x0405, ClientErrorNotFound = 0x0406, ClientErrorGone = 0x0407, ClientErrorRequestEntityTooLong = 0x0408, ClientErrorRequestValueTooLong = 0x0409, ClientErrorDocumentFormatNotSupported = 0x040A, ClientErrorAttributesOrValuesNotSupported = 0x040B, ClientErrorUriSchemeNotSupported = 0x040C, ClientErrorCharsetNotSupported = 0x040D, ClientErrorConflictingAttributes = 0x040E, ClientErrorCompressionNotSupported = 0x040F, ClientErrorCompressionError = 0x0410, ClientErrorDocumentFormatError = 0x0411, ClientErrorDocumentAccessError = 0x0412, ServerErrorInternalError = 0x0500, ServerErrorOperationNotSupported = 0x0501, ServerErrorServiceUnavailable = 0x0502, ServerErrorVersionNotSupported = 0x0503, ServerErrorDeviceError = 0x0504, ServerErrorTemporaryError = 0x0505, ServerErrorNotAcceptingJobs = 0x0506, ServerErrorBusy = 0x0507, ServerErrorJobCanceled = 0x0508, ServerErrorMultipleDocumentJobsNotSupported = 0x0509, UnknownStatusCode = 0xffff, } impl StatusCode { pub fn is_success(&self) -> bool { matches!( self, StatusCode::SuccessfulOk | StatusCode::SuccessfulOkIgnoredOrSubstitutedAttributes | StatusCode::SuccessfulOkConflictingAttributes ) } } impl fmt::Display for StatusCode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { StatusCode::SuccessfulOk => write!(f, "No error"), StatusCode::SuccessfulOkIgnoredOrSubstitutedAttributes => write!(f, "Ignored or substituted attributes"), StatusCode::SuccessfulOkConflictingAttributes => write!(f, "Conflicting attributes"), StatusCode::ClientErrorBadRequest => write!(f, "Bad request"), StatusCode::ClientErrorForbidden => write!(f, "Forbidden"), StatusCode::ClientErrorNotAuthenticated => write!(f, "Not authenticated"), StatusCode::ClientErrorNotAuthorized => write!(f, "Not authorized"), StatusCode::ClientErrorNotPossible => write!(f, "Not possible"), StatusCode::ClientErrorTimeout => write!(f, "Timeout"), StatusCode::ClientErrorNotFound => write!(f, "Not found"), StatusCode::ClientErrorGone => write!(f, "Gone"), StatusCode::ClientErrorRequestEntityTooLong => write!(f, "Entity too long"), StatusCode::ClientErrorRequestValueTooLong => write!(f, "Request value too long"), StatusCode::ClientErrorDocumentFormatNotSupported => write!(f, "Document format not supported"), StatusCode::ClientErrorAttributesOrValuesNotSupported => write!(f, "Attributes or values not supported"), StatusCode::ClientErrorUriSchemeNotSupported => write!(f, "Uri scheme not supported"), StatusCode::ClientErrorCharsetNotSupported => write!(f, "Charset not supported"), StatusCode::ClientErrorConflictingAttributes => write!(f, "Conflicting attributes"), StatusCode::ClientErrorCompressionNotSupported => write!(f, "Compression not supported"), StatusCode::ClientErrorCompressionError => write!(f, "Compression error"), StatusCode::ClientErrorDocumentFormatError => write!(f, "Document format error"), StatusCode::ClientErrorDocumentAccessError => write!(f, "Document access error"), StatusCode::ServerErrorInternalError => write!(f, "Internal error"), StatusCode::ServerErrorOperationNotSupported => write!(f, "Operation not supported"), StatusCode::ServerErrorServiceUnavailable => write!(f, "Service unavailable"), StatusCode::ServerErrorVersionNotSupported => write!(f, "Version not supported"), StatusCode::ServerErrorDeviceError => write!(f, "Device error"), StatusCode::ServerErrorTemporaryError => write!(f, "Temporary error"), StatusCode::ServerErrorNotAcceptingJobs => write!(f, "Not accepting jobs"), StatusCode::ServerErrorBusy => write!(f, "Busy"), StatusCode::ServerErrorJobCanceled => write!(f, "Job canceled"), StatusCode::ServerErrorMultipleDocumentJobsNotSupported => { write!(f, "Multiple document jobs not supported") } StatusCode::UnknownStatusCode => write!(f, "Unknown status code"), } } }
33.619433
117
0.672929
723e6ec7386e4641bff82f898973bf3d06a2136a
12,150
//! Module with additional conversion methods for the storage records. //! These methods are only needed for the `block` module, so they're kept in a //! private module. // Built-in deps use std::convert::TryFrom; // External imports // Workspace imports use zksync_types::SignedZkSyncTx; use zksync_types::{ Action, ActionType, Operation, { block::{ExecutedPriorityOp, ExecutedTx}, BlockNumber, PriorityOp, ZkSyncOp, ZkSyncTx, }, }; // Local imports use crate::{ chain::{ block::BlockSchema, operations::records::{ NewExecutedPriorityOperation, NewExecutedTransaction, NewExecutedTxAndPriorityOperation, StoredExecutedPriorityOperation, StoredExecutedTransaction, StoredOperation, }, }, prover::ProverSchema, QueryResult, StorageProcessor, }; impl StoredOperation { pub async fn into_op(self, conn: &mut StorageProcessor<'_>) -> QueryResult<Operation> { let block_number = self.block_number as BlockNumber; let id = Some(self.id); let action = if self.action_type == ActionType::COMMIT.to_string() { Action::Commit } else if self.action_type == ActionType::VERIFY.to_string() { let proof = Box::new(ProverSchema(conn).load_proof(block_number).await?); Action::Verify { proof: proof.expect("No proof for verify action").into(), } } else { unreachable!("Incorrect action type in db"); }; let block = BlockSchema(conn) .get_block(block_number) .await? .expect("Block for action does not exist"); Ok(Operation { id, action, block }) } } impl StoredExecutedTransaction { pub fn into_executed_tx(self) -> Result<ExecutedTx, anyhow::Error> { let tx: ZkSyncTx = serde_json::from_value(self.tx).expect("Unparsable ZkSyncTx in db"); let franklin_op: Option<ZkSyncOp> = serde_json::from_value(self.operation).expect("Unparsable ZkSyncOp in db"); let eth_sign_data = self .eth_sign_data .map(|value| serde_json::from_value(value).expect("Unparsable EthSignData")); Ok(ExecutedTx { signed_tx: SignedZkSyncTx { tx, eth_sign_data }, success: self.success, op: franklin_op, fail_reason: self.fail_reason, block_index: self .block_index .map(|val| u32::try_from(val).expect("Invalid block index")), created_at: self.created_at, batch_id: self.batch_id, }) } } impl StoredExecutedPriorityOperation { pub fn into_executed(self) -> ExecutedPriorityOp { let franklin_op: ZkSyncOp = serde_json::from_value(self.operation).expect("Unparsable priority op in db"); ExecutedPriorityOp { priority_op: PriorityOp { serial_id: self.priority_op_serialid as u64, data: franklin_op .try_get_priority_op() .expect("ZkSyncOp should have priority op"), deadline_block: self.deadline_block as u64, eth_hash: self.eth_hash, eth_block: self.eth_block as u64, }, op: franklin_op, block_index: self.block_index as u32, created_at: self.created_at, } } } impl NewExecutedPriorityOperation { pub fn prepare_stored_priority_op( exec_prior_op: ExecutedPriorityOp, block: BlockNumber, ) -> Self { let operation = serde_json::to_value(&exec_prior_op.op).unwrap(); let (from_account, to_account) = match exec_prior_op.op { ZkSyncOp::Deposit(deposit) => (deposit.priority_op.from, deposit.priority_op.to), ZkSyncOp::FullExit(full_exit) => { let eth_address = full_exit.priority_op.eth_address; (eth_address, eth_address) } _ => panic!( "Incorrect type of priority op: {:?}", exec_prior_op.priority_op ), }; Self { block_number: i64::from(block), block_index: exec_prior_op.block_index as i32, operation, from_account: from_account.as_ref().to_vec(), to_account: to_account.as_ref().to_vec(), priority_op_serialid: exec_prior_op.priority_op.serial_id as i64, deadline_block: exec_prior_op.priority_op.deadline_block as i64, eth_hash: exec_prior_op.priority_op.eth_hash, eth_block: exec_prior_op.priority_op.eth_block as i64, created_at: exec_prior_op.created_at, } } } impl NewExecutedTransaction { pub fn prepare_stored_tx(exec_tx: ExecutedTx, block: BlockNumber) -> Self { fn cut_prefix(input: &str) -> String { if let Some(input) = input.strip_prefix("0x") { input.into() } else if let Some(input) = input.strip_prefix("sync:") { input.into() } else { input.into() } } let tx = serde_json::to_value(&exec_tx.signed_tx.tx).expect("Cannot serialize tx"); let operation = serde_json::to_value(&exec_tx.op).expect("Cannot serialize operation"); let (from_account_hex, to_account_hex): (String, Option<String>) = match exec_tx.signed_tx.tx { ZkSyncTx::Withdraw(_) | ZkSyncTx::Transfer(_) => ( serde_json::from_value(tx["from"].clone()).unwrap(), serde_json::from_value(tx["to"].clone()).unwrap(), ), ZkSyncTx::ChangePubKey(_) => ( serde_json::from_value(tx["account"].clone()).unwrap(), serde_json::from_value(tx["newPkHash"].clone()).unwrap(), ), ZkSyncTx::Close(_) => ( serde_json::from_value(tx["account"].clone()).unwrap(), serde_json::from_value(tx["account"].clone()).unwrap(), ), ZkSyncTx::ForcedExit(_) => ( serde_json::from_value(tx["target"].clone()).unwrap(), serde_json::from_value(tx["target"].clone()).unwrap(), ), }; let from_account: Vec<u8> = hex::decode(cut_prefix(&from_account_hex)).unwrap(); let to_account: Option<Vec<u8>> = to_account_hex.map(|value| hex::decode(cut_prefix(&value)).unwrap()); let eth_sign_data = exec_tx.signed_tx.eth_sign_data.as_ref().map(|sign_data| { serde_json::to_value(sign_data).expect("Failed to encode EthSignData") }); Self { block_number: i64::from(block), tx_hash: exec_tx.signed_tx.hash().as_ref().to_vec(), from_account, to_account, tx, operation, success: exec_tx.success, fail_reason: exec_tx.fail_reason, block_index: exec_tx.block_index.map(|idx| idx as i32), primary_account_address: exec_tx.signed_tx.account().as_bytes().to_vec(), nonce: exec_tx.signed_tx.nonce() as i64, created_at: exec_tx.created_at, eth_sign_data, batch_id: exec_tx.batch_id, } } } impl NewExecutedTxAndPriorityOperation { pub fn prepare_stored_data_from_tx(exec_tx: ExecutedTx, block: BlockNumber) -> Self { fn cut_prefix(input: &str) -> String { if let Some(input) = input.strip_prefix("0x") { input.into() } else if let Some(input) = input.strip_prefix("sync:") { input.into() } else { input.into() } } let tx = serde_json::to_value(&exec_tx.signed_tx.tx).expect("Cannot serialize tx"); let operation = serde_json::to_value(&exec_tx.op).expect("Cannot serialize operation"); let (from_account_hex, to_account_hex): (String, Option<String>) = match exec_tx.signed_tx.tx { ZkSyncTx::Withdraw(_) | ZkSyncTx::Transfer(_) => ( serde_json::from_value(tx["from"].clone()).unwrap(), serde_json::from_value(tx["to"].clone()).unwrap(), ), ZkSyncTx::ChangePubKey(_) => ( serde_json::from_value(tx["account"].clone()).unwrap(), serde_json::from_value(tx["newPkHash"].clone()).unwrap(), ), ZkSyncTx::Close(_) => ( serde_json::from_value(tx["account"].clone()).unwrap(), serde_json::from_value(tx["account"].clone()).unwrap(), ), ZkSyncTx::ForcedExit(_) => ( serde_json::from_value(tx["target"].clone()).unwrap(), serde_json::from_value(tx["target"].clone()).unwrap(), ), }; let from_account: Vec<u8> = hex::decode(cut_prefix(&from_account_hex)).unwrap(); let to_account: Option<Vec<u8>> = to_account_hex.map(|value| hex::decode(cut_prefix(&value)).unwrap()); let eth_sign_data = exec_tx.signed_tx.eth_sign_data.as_ref().map(|sign_data| { serde_json::to_value(sign_data).expect("Failed to encode EthSignData") }); Self { //layer 1 eth_block: 0, priority_op_serialid: 0, deadline_block: 0, //layer2 tx, operation, success: exec_tx.success, fail_reason: exec_tx.fail_reason, primary_account_address: exec_tx.signed_tx.account().as_bytes().to_vec(), nonce: exec_tx.signed_tx.nonce() as i64, eth_sign_data, batch_id: exec_tx.batch_id, //share created_at: exec_tx.created_at, block_index: exec_tx.block_index.map(|idx| idx as i32), block_number: i64::from(block), eth_or_tx_hash: exec_tx.signed_tx.hash().as_ref().to_vec(), from_account, to_account, op_type: (|zop: Option<&ZkSyncOp>| -> i64 { if let Some(op) = zop { op.get_type() } else { 0 } })(exec_tx.op.as_ref()), } } pub fn prepare_stored_data_from_priority_op( exec_prior_op: ExecutedPriorityOp, block: BlockNumber, ) -> Self { let exec_prior_op2 = exec_prior_op.clone(); let operation = serde_json::to_value(&exec_prior_op.op).unwrap(); let (from_account, to_account) = match exec_prior_op.op { ZkSyncOp::Deposit(deposit) => (deposit.priority_op.from, deposit.priority_op.to), ZkSyncOp::FullExit(full_exit) => { let eth_address = full_exit.priority_op.eth_address; (eth_address, eth_address) } _ => panic!( "Incorrect type of priority op: {:?}", exec_prior_op.priority_op ), }; Self { //layer1 eth_block: exec_prior_op.priority_op.eth_block as i64, priority_op_serialid: exec_prior_op.priority_op.serial_id as i64, deadline_block: exec_prior_op.priority_op.deadline_block as i64, //layer2 operation, tx: Default::default(), success: false, fail_reason: None, primary_account_address: vec![], nonce: 0, eth_sign_data: None, batch_id: None, eth_or_tx_hash: exec_prior_op.priority_op.eth_hash, created_at: exec_prior_op.created_at, from_account: from_account.as_ref().to_vec(), to_account: Some(to_account.as_ref().to_vec()), block_number: i64::from(block), block_index: Some(exec_prior_op.block_index as i32), op_type: exec_prior_op2.op.get_type(), } } }
38.328076
95
0.566914
1d39b17eb09d16f6e23254f90e088406a1bdbaa2
3,626
// Copyright 2020-2021 The FuseQuery Authors. // // SPDX-License-Identifier: Apache-2.0. use std::fmt; use crate::datablocks::DataBlock; use crate::datavalues; use crate::datavalues::{ DataColumnarValue, DataSchema, DataType, DataValue, DataValueComparisonOperator, }; use crate::error::{FuseQueryError, FuseQueryResult}; use crate::functions::comparisons::{ ComparisonEqFunction, ComparisonGtEqFunction, ComparisonGtFunction, ComparisonLtEqFunction, ComparisonLtFunction, ComparisonNotEqFunction, }; use crate::functions::{FactoryFuncRef, IFunction}; #[derive(Clone)] pub struct ComparisonFunction { depth: usize, op: DataValueComparisonOperator, left: Box<dyn IFunction>, right: Box<dyn IFunction>, saved: Option<DataColumnarValue>, } impl ComparisonFunction { pub fn register(map: FactoryFuncRef) -> FuseQueryResult<()> { let mut map = map.as_ref().lock()?; map.insert("=", ComparisonEqFunction::try_create_func); map.insert("<", ComparisonLtFunction::try_create_func); map.insert(">", ComparisonGtFunction::try_create_func); map.insert("<=", ComparisonLtEqFunction::try_create_func); map.insert(">=", ComparisonGtEqFunction::try_create_func); map.insert("!=", ComparisonNotEqFunction::try_create_func); map.insert("<>", ComparisonNotEqFunction::try_create_func); Ok(()) } pub fn try_create_func( op: DataValueComparisonOperator, args: &[Box<dyn IFunction>], ) -> FuseQueryResult<Box<dyn IFunction>> { if args.len() != 2 { return Err(FuseQueryError::build_internal_error(format!( "Comparison function {} args length must be 2", op ))); } Ok(Box::new(ComparisonFunction { depth: 0, op, left: args[0].clone(), right: args[1].clone(), saved: None, })) } } impl IFunction for ComparisonFunction { fn return_type(&self, _input_schema: &DataSchema) -> FuseQueryResult<DataType> { Ok(DataType::Boolean) } fn nullable(&self, _input_schema: &DataSchema) -> FuseQueryResult<bool> { Ok(false) } fn eval(&self, block: &DataBlock) -> FuseQueryResult<DataColumnarValue> { Ok(DataColumnarValue::Array( datavalues::data_array_comparison_op( self.op.clone(), &self.left.eval(block)?, &self.right.eval(block)?, )?, )) } fn set_depth(&mut self, depth: usize) { self.depth = depth; } fn accumulate(&mut self, block: &DataBlock) -> FuseQueryResult<()> { self.left.accumulate(block)?; self.right.accumulate(block) } fn accumulate_result(&self) -> FuseQueryResult<Vec<DataValue>> { Err(FuseQueryError::build_internal_error(format!( "Unsupported accumulate_result operation for function {}", self.op ))) } fn merge(&mut self, _states: &[DataValue]) -> FuseQueryResult<()> { Err(FuseQueryError::build_internal_error(format!( "Unsupported merge operation for function {}", self.op ))) } fn merge_result(&self) -> FuseQueryResult<DataValue> { Err(FuseQueryError::build_internal_error(format!( "Unsupported merge_result operation for function {}", self.op ))) } } impl fmt::Display for ComparisonFunction { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} {} {}", self.left, self.op, self.right) } }
30.991453
95
0.619691
7249d385d71b69e85ab792580c8a4311d5a35fe3
266
//! Tests for TCP and UDP stream and client mod tcp; mod udp; pub use self::tcp::tcp_client_stream_test; pub use self::tcp::tcp_stream_test; pub use self::udp::next_random_socket_test; pub use self::udp::udp_client_stream_test; pub use self::udp::udp_stream_test;
24.181818
43
0.774436
e54b7823b262571282dbd4c47eff997e1e7833ae
25,753
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::collections::BTreeSet; use std::iter; use derive_builder::Builder; use crate::api::common::{CommaSeparatedList, NameOrId}; use crate::api::endpoint_prelude::*; #[derive(Debug, Clone)] pub(crate) enum Assignee { Unassigned, Id(u64), Ids(BTreeSet<u64>), } impl Assignee { pub(crate) fn add_params<'a>(&'a self, params: &mut FormParams<'a>) { match self { Assignee::Unassigned => { params.push("assignee_ids", "0"); }, Assignee::Id(id) => { params.push("assignee_id", *id); }, Assignee::Ids(ids) => { params.extend(ids.iter().map(|&id| ("assignee_ids[]", id))); }, } } } /// Parameters for setting the reviewer(s) of a merge request. #[derive(Debug, Clone)] pub(crate) enum Reviewer { /// Unset all reviewers. Unassigned, /// A set of reviewers. Ids(BTreeSet<u64>), } impl Reviewer { pub(crate) fn add_params<'a>(&'a self, params: &mut FormParams<'a>) { match self { Reviewer::Unassigned => { params.push("reviewer_ids", "0"); }, Reviewer::Ids(ids) => { params.extend(ids.iter().map(|&id| ("reviewer_ids[]", id))); }, } } } /// Create a new merge request on project. #[derive(Debug, Builder)] #[builder(setter(strip_option))] pub struct CreateMergeRequest<'a> { /// The project to open the merge requset *from*. #[builder(setter(into))] project: NameOrId<'a>, /// The name of the source branch for the merge request. #[builder(setter(into))] source_branch: Cow<'a, str>, /// The name of the target branch for the merge request. #[builder(setter(into))] target_branch: Cow<'a, str>, /// The title for the merge request. #[builder(setter(into))] title: Cow<'a, str>, /// The assignee of the merge request. #[builder(setter(name = "_assignee"), default, private)] assignee: Option<Assignee>, #[builder(setter(name = "_reviewer"), default, private)] reviewer: Option<Reviewer>, /// The description of the merge request. #[builder(setter(into), default)] description: Option<Cow<'a, str>>, /// The ID of the target project for the merge request. #[builder(default)] target_project_id: Option<u64>, /// Labels to add to the merge request. #[builder(setter(name = "_labels"), default, private)] labels: Option<CommaSeparatedList<Cow<'a, str>>>, /// The ID of the milestone to add the merge request to. #[builder(default)] milestone_id: Option<u64>, /// How many approvals are required before merging will be allowed. /// /// Note that this must be more than the project limit (if present). #[builder(default)] approvals_before_merge: Option<u64>, /// Whether to remove the source branch once merged or not. #[builder(default)] remove_source_branch: Option<bool>, /// Whether to allow collaboration with maintainers of the target project or not. #[builder(default)] allow_collaboration: Option<bool>, /// Whether to squash the branch when merging or not. #[builder(default)] squash: Option<bool>, /// Whether to allow collaboration with maintainers of the target project or not. #[deprecated(note = "use `allow_collaboration` instead")] #[builder(default)] allow_maintainer_to_push: Option<bool>, } impl<'a> CreateMergeRequest<'a> { /// Create a builder for the endpoint. pub fn builder() -> CreateMergeRequestBuilder<'a> { CreateMergeRequestBuilder::default() } } impl<'a> CreateMergeRequestBuilder<'a> { /// Filter unassigned merge requests. pub fn unassigned(&mut self) -> &mut Self { self.assignee = Some(Some(Assignee::Unassigned)); self } /// Filter merge requests assigned to a user (by ID). pub fn assignee(&mut self, assignee: u64) -> &mut Self { let assignee = match self.assignee.take() { Some(Some(Assignee::Ids(mut set))) => { set.insert(assignee); Assignee::Ids(set) }, Some(Some(Assignee::Id(old_id))) => { let set = [old_id, assignee].iter().copied().collect(); Assignee::Ids(set) }, _ => Assignee::Id(assignee), }; self.assignee = Some(Some(assignee)); self } /// Filter merge requests assigned to a users (by ID). pub fn assignees<I>(&mut self, iter: I) -> &mut Self where I: Iterator<Item = u64>, { let assignee = match self.assignee.take() { Some(Some(Assignee::Ids(mut set))) => { set.extend(iter); Assignee::Ids(set) }, Some(Some(Assignee::Id(old_id))) => { let set = iter.chain(iter::once(old_id)).collect(); Assignee::Ids(set) }, _ => Assignee::Ids(iter.collect()), }; self.assignee = Some(Some(assignee)); self } /// Filter merge requests without a reviewer. pub fn without_reviewer(&mut self) -> &mut Self { self.reviewer = Some(Some(Reviewer::Unassigned)); self } /// Filter merge requests reviewed by a user (by ID). pub fn reviewer(&mut self, reviewer: u64) -> &mut Self { let reviewer = match self.reviewer.take() { Some(Some(Reviewer::Ids(mut set))) => { set.insert(reviewer); Reviewer::Ids(set) }, _ => Reviewer::Ids(iter::once(reviewer).collect()), }; self.reviewer = Some(Some(reviewer)); self } /// Filter merge requests reviewed by users (by ID). pub fn reviewers<I>(&mut self, iter: I) -> &mut Self where I: Iterator<Item = u64>, { let reviewer = match self.reviewer.take() { Some(Some(Reviewer::Ids(mut set))) => { set.extend(iter); Reviewer::Ids(set) }, _ => Reviewer::Ids(iter.collect()), }; self.reviewer = Some(Some(reviewer)); self } /// Add a label. pub fn label<L>(&mut self, label: L) -> &mut Self where L: Into<Cow<'a, str>>, { self.labels .get_or_insert(None) .get_or_insert_with(CommaSeparatedList::new) .push(label.into()); self } /// Add multiple labels. pub fn labels<I, L>(&mut self, iter: I) -> &mut Self where I: Iterator<Item = L>, L: Into<Cow<'a, str>>, { self.labels .get_or_insert(None) .get_or_insert_with(CommaSeparatedList::new) .extend(iter.map(Into::into)); self } } impl<'a> Endpoint for CreateMergeRequest<'a> { fn method(&self) -> Method { Method::POST } fn endpoint(&self) -> Cow<'static, str> { format!("projects/{}/merge_requests", self.project).into() } fn body(&self) -> Result<Option<(&'static str, Vec<u8>)>, BodyError> { let mut params = FormParams::default(); params .push("source_branch", self.source_branch.as_ref()) .push("target_branch", self.target_branch.as_ref()) .push("title", self.title.as_ref()) .push_opt("description", self.description.as_ref()) .push_opt("target_project_id", self.target_project_id) .push_opt("milestone_id", self.milestone_id) .push_opt("labels", self.labels.as_ref()) .push_opt("approvals_before_merge", self.approvals_before_merge) .push_opt("remove_source_branch", self.remove_source_branch) .push_opt("allow_collaboration", self.allow_collaboration) .push_opt("squash", self.squash); if let Some(assignee) = self.assignee.as_ref() { assignee.add_params(&mut params); } if let Some(reviewer) = self.reviewer.as_ref() { reviewer.add_params(&mut params); } #[allow(deprecated)] { params.push_opt("allow_maintainer_to_push", self.allow_maintainer_to_push); } params.into_body() } } #[cfg(test)] mod tests { use http::Method; use crate::api::projects::merge_requests::{ CreateMergeRequest, CreateMergeRequestBuilderError, }; use crate::api::{self, Query}; use crate::test::client::{ExpectedUrl, SingleTestClient}; #[test] fn project_source_branch_target_branch_and_title_are_necessary() { let err = CreateMergeRequest::builder().build().unwrap_err(); crate::test::assert_missing_field!(err, CreateMergeRequestBuilderError, "project"); } #[test] fn project_is_necessary() { let err = CreateMergeRequest::builder() .source_branch("source") .target_branch("target") .title("title") .build() .unwrap_err(); crate::test::assert_missing_field!(err, CreateMergeRequestBuilderError, "project"); } #[test] fn source_branch_is_necessary() { let err = CreateMergeRequest::builder() .project(1) .target_branch("target") .title("title") .build() .unwrap_err(); crate::test::assert_missing_field!(err, CreateMergeRequestBuilderError, "source_branch"); } #[test] fn target_branch_is_necessary() { let err = CreateMergeRequest::builder() .project(1) .source_branch("source") .title("title") .build() .unwrap_err(); crate::test::assert_missing_field!(err, CreateMergeRequestBuilderError, "target_branch"); } #[test] fn title_is_necessary() { let err = CreateMergeRequest::builder() .project(1) .source_branch("source") .target_branch("target") .build() .unwrap_err(); crate::test::assert_missing_field!(err, CreateMergeRequestBuilderError, "title"); } #[test] fn project_source_branch_target_branch_and_title_are_sufficient() { CreateMergeRequest::builder() .project(1) .source_branch("source") .target_branch("target") .title("title") .build() .unwrap(); } #[test] fn endpoint() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_unassigned() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&assignee_ids=0", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .unassigned() .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_assignee() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&assignee_id=1", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .assignee(1) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_assignees() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&assignee_ids%5B%5D=1", "&assignee_ids%5B%5D=2", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .assignee(1) .assignees([1, 2].iter().copied()) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_unreviewed() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&reviewer_ids=0", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .without_reviewer() .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_reviewer() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&reviewer_ids%5B%5D=1", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .reviewer(1) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_reviewers() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&reviewer_ids%5B%5D=1", "&reviewer_ids%5B%5D=2", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .reviewer(1) .reviewers([1, 2].iter().copied()) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_description() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&description=description", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .description("description") .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_target_project_id() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&target_project_id=1", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .target_project_id(1) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_labels() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&labels=label%2Clabel1%2Clabel2", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .label("label") .labels(["label1", "label2"].iter().cloned()) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_milestone_id() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&milestone_id=1", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .milestone_id(1) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_approvals_before_merge() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&approvals_before_merge=2", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .approvals_before_merge(2) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_remove_source_branch() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&remove_source_branch=true", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .remove_source_branch(true) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_allow_collaboration() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&allow_collaboration=true", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .allow_collaboration(true) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] fn endpoint_squash() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&squash=false", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .squash(false) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } #[test] #[allow(deprecated)] fn endpoint_allow_maintainer_to_push() { let endpoint = ExpectedUrl::builder() .method(Method::POST) .endpoint("projects/simple%2Fproject/merge_requests") .content_type("application/x-www-form-urlencoded") .body_str(concat!( "source_branch=source%2Fbranch", "&target_branch=target%2Fbranch", "&title=title", "&allow_maintainer_to_push=true", )) .build() .unwrap(); let client = SingleTestClient::new_raw(endpoint, ""); let endpoint = CreateMergeRequest::builder() .project("simple/project") .source_branch("source/branch") .target_branch("target/branch") .title("title") .allow_maintainer_to_push(true) .build() .unwrap(); api::ignore(endpoint).query(&client).unwrap(); } }
33.229677
97
0.544519
642cab017cad3c87cf9285e825615c4c336dd8ce
12,245
use std::error::Error; use std::sync::Arc; use log::info; use lsp_server::{Connection, Message, Response}; use lsp_types::{notification::*, request::*, *}; use homer_compiler::*; use checker::info::SymbolInfo; type Result<T> = std::result::Result<T, Box<dyn Error + Sync + Send>>; fn main() -> Result<()> { // Set up logging. Because `stdio_transport` gets a lock on stdout and stdin, we must have // our logging only write out to stderr. flexi_logger::Logger::with(flexi_logger::LogSpecification::info()).start()?; info!("starting generic LSP server"); // Create the transport. Includes the stdio (stdin and stdout) versions but this could // also be implemented to use sockets or HTTP. let (connection, io_threads) = Connection::stdio(); let text_document_sync = Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { open_close: Some(true), change: Some(TextDocumentSyncKind::FULL), save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { include_text: Some(true), })), ..TextDocumentSyncOptions::default() })); let code_lens_provider = Some(CodeLensOptions { resolve_provider: Some(true) }); let execute_command_provider = Some(ExecuteCommandOptions { commands: vec![String::from("run_fn")], ..ExecuteCommandOptions::default() }); let hover_provider = Some(HoverProviderCapability::Simple(true)); let definition_provider = Some(OneOf::Left(true)); let server_capabilities = ServerCapabilities { text_document_sync, code_lens_provider, execute_command_provider, hover_provider, definition_provider, ..ServerCapabilities::default() }; let initialize_params_json = connection.initialize(serde_json::to_value(server_capabilities)?)?; let _initialize_params: InitializeParams = serde_json::from_value(initialize_params_json)?; let db = build::CompilerDB::new(); let mut server = Server::new(connection, db); server.run()?; io_threads.join()?; // Shut down gracefully. info!("shutting down server"); Ok(()) } struct Server { connection: Connection, db: build::CompilerDB, } impl Server { fn new(connection: Connection, db: build::CompilerDB) -> Self { Self { connection, db } } fn run(&mut self) -> Result<()> { // NOTE(MH): This is a hack to allow us to borrow `self` mutably // hereafter. let receiver = std::mem::replace(&mut self.connection.receiver, crossbeam::channel::never()); for msg in &receiver { info!("Received message: {:?}", msg); match msg { Message::Request(request) => { if self.connection.handle_shutdown(&request)? { return Ok(()); } self.handle_request(request)?; } Message::Response(_response) => info!("Received unhandled response"), Message::Notification(notification) => self.handle_notification(notification)?, } } Ok(()) } fn did_open(&mut self, params: DidOpenTextDocumentParams) -> Result<()> { let TextDocumentItem { uri, text, .. } = params.text_document; self.validate_document(uri, text, true) } fn did_change(&mut self, params: DidChangeTextDocumentParams) -> Result<()> { let uri = params.text_document.uri; let text = params.content_changes.into_iter().last().unwrap().text; self.validate_document(uri, text, true) } fn did_save(&mut self, params: DidSaveTextDocumentParams) -> Result<()> { let uri = params.text_document.uri; if let Some(text) = params.text { self.validate_document(uri, text, true) } else { info!("got save notification without text for {}", uri); Ok(()) } } fn did_close(&mut self, params: DidCloseTextDocumentParams) -> Result<()> { let uri = params.text_document.uri; let params = PublishDiagnosticsParams { uri, diagnostics: vec![], version: None }; let notification = lsp_server::Notification::new( PublishDiagnostics::METHOD.to_owned(), serde_json::to_value(params)?, ); self.connection.sender.send(Message::from(notification))?; Ok(()) } fn validate_document(&mut self, lsp_uri: Url, input: String, print_module: bool) -> Result<()> { let uri = build::Uri::new(lsp_uri.as_str()); info!("Received text for {:?}", uri); self.db.set_input(uri, Arc::new(input)); let diagnostics: Vec<_> = self.db.with_diagnostics(uri, |diagnostics| { diagnostics.map(homer_compiler::diagnostic::Diagnostic::to_lsp).collect() }); info!("Sending {} diagnostics", diagnostics.len()); let params = PublishDiagnosticsParams { uri: lsp_uri, diagnostics, version: None }; let notification = lsp_server::Notification::new( PublishDiagnostics::METHOD.to_owned(), serde_json::to_value(params)?, ); self.connection.sender.send(Message::from(notification))?; if print_module { if let Some(module) = self.db.checked_module(uri) { info!("{:?}", module); } } Ok(()) } fn hover(&self, params: HoverParams) -> Result<Option<Hover>> { let response = self.find_symbol(&params.text_document_position_params).map(|symbol| { let info = match &symbol { SymbolInfo::ExprBinder { typ, .. } | SymbolInfo::ExprVar { typ, .. } => { format!("{}", typ) } SymbolInfo::FuncRef { def, .. } => format!("{}", def), }; let range = Some(symbol.span().to_lsp()); let contents = HoverContents::Markup(MarkupContent { kind: MarkupKind::Markdown, value: format!("```homer\n{}\n```", info), }); Hover { contents, range } }); Ok(response) } fn goto_definition( &self, params: GotoDefinitionParams, ) -> Result<Option<GotoDefinitionResponse>> { let response = self .find_symbol(&params.text_document_position_params) .as_ref() .and_then(SymbolInfo::definition_span) .map(|span| { GotoDefinitionResponse::Scalar(Location { uri: params.text_document_position_params.text_document.uri, range: span.to_lsp(), }) }); Ok(response) } fn code_lenses(&self, params: CodeLensParams) -> Result<Option<Vec<CodeLens>>> { let lsp_uri = params.text_document.uri; info!("got code lens request for uri {:?}", lsp_uri); let uri = build::Uri::new(lsp_uri.as_str()); let lenses = if let Some(module) = self.db.checked_module(uri) { let mut lenses = Vec::new(); for decl in module.func_decls() { if decl.expr_params.is_empty() { let range = decl.name.span.to_lsp(); let arg = serde_json::to_value(RunFnParams { uri: uri.as_str().to_owned(), fun: decl.name.locatee.as_str().to_owned(), })?; let command = Some(Command { title: String::from("▶︎ Run Function"), command: String::from("run_fn"), arguments: Some(vec![arg]), }); lenses.push(CodeLens { range, command, data: None }); } } Some(lenses) } else { None }; Ok(lenses) } fn execute_command(&self, params: ExecuteCommandParams) -> Result<Option<serde_json::Value>> { let arguments = params.arguments; assert_eq!(arguments.len(), 1); let argument = arguments.into_iter().next().unwrap(); let args: RunFnParams = serde_json::from_value(argument)?; let message_params = if let Some(module) = self.db.anf_module(build::Uri::new(&args.uri)) { let machine = cek::Machine::new(&module, syntax::ExprVar::new(&args.fun)); let result = machine.run(); let message = format!("{}() = {}", args.fun, result.value()); ShowMessageParams { typ: MessageType::INFO, message } } else { ShowMessageParams { typ: MessageType::ERROR, message: String::from("The module cannot be compiled."), } }; let notification = lsp_server::Notification::new( ShowMessage::METHOD.to_owned(), serde_json::to_value(&message_params)?, ); if let Err(error) = self.connection.sender.send(Message::from(notification)) { info!("Failed to send message: {:?}", error); } Ok(None) } fn find_symbol(&self, position_params: &TextDocumentPositionParams) -> Option<SymbolInfo> { let uri = build::Uri::new(position_params.text_document.uri.as_str()); let symbols = self.db.symbols(uri); let loc = location::SourceLocation::from_lsp(position_params.position); // FIXME(MH): We should do a binary search here. symbols.iter().find_map(|symbol| { if symbol.span().contains(loc) { Some(symbol.clone()) } else { None } }) } fn handle_request(&self, request: lsp_server::Request) -> Result<()> { match request.method.as_ref() { HoverRequest::METHOD => self.dispatch_request::<HoverRequest>(request, &Self::hover), GotoDefinition::METHOD => { self.dispatch_request::<GotoDefinition>(request, &Self::goto_definition) } CodeLensRequest::METHOD => { self.dispatch_request::<CodeLensRequest>(request, &Self::code_lenses) } ExecuteCommand::METHOD => { self.dispatch_request::<ExecuteCommand>(request, &Self::execute_command) } other => { info!("Received unhandled request: {:?}", other); Ok(()) } } } fn handle_notification(&mut self, notification: lsp_server::Notification) -> Result<()> { match notification.method.as_ref() { DidOpenTextDocument::METHOD => { self.dispatch_notification::<DidOpenTextDocument>(notification, &Self::did_open) } DidChangeTextDocument::METHOD => { self.dispatch_notification::<DidChangeTextDocument>(notification, &Self::did_change) } DidSaveTextDocument::METHOD => { self.dispatch_notification::<DidSaveTextDocument>(notification, &Self::did_save) } DidCloseTextDocument::METHOD => { self.dispatch_notification::<DidCloseTextDocument>(notification, &Self::did_close) } other => { info!("Received unhandled notification: {:?}", other); Ok(()) } } } fn dispatch_request<R: Request>( &self, request: lsp_server::Request, f: &dyn Fn(&Self, R::Params) -> Result<R::Result>, ) -> Result<()> { let (id, params) = request.extract(R::METHOD).unwrap(); let result = f(self, params)?; let result = serde_json::to_value(result)?; let response = Response::new_ok(id, result); self.connection.sender.send(Message::from(response))?; Ok(()) } fn dispatch_notification<N: Notification>( &mut self, notification: lsp_server::Notification, f: &dyn Fn(&mut Self, N::Params) -> Result<()>, ) -> Result<()> { let params = notification.extract(N::METHOD).unwrap(); f(self, params) } } #[derive(serde::Deserialize, serde::Serialize)] struct RunFnParams { uri: String, fun: String, }
38.38558
100
0.570682
4a52690ef484303363ae8eb0c18aa051724890b8
5,561
// Copyright 2016 Dtoa Developers // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![doc(html_root_url = "https://docs.rs/dtoa/0.4.2")] #![cfg_attr(not(target_env = "sgx"), no_std)] #![cfg_attr(target_env = "sgx", feature(rustc_private))] #[cfg(not(target_env = "sgx"))] extern crate sgx_tstd as std; #[macro_use] mod diyfp; #[macro_use] mod dtoa; use std::{io, mem, ops, ptr, slice}; #[inline] pub fn write<W: io::Write, V: Floating>(wr: W, value: V) -> io::Result<usize> { value.write(wr) } pub trait Floating { fn write<W: io::Write>(self, W) -> io::Result<usize>; } impl Floating for f32 { fn write<W: io::Write>(self, wr: W) -> io::Result<usize> { dtoa! { floating_type: f32, significand_type: u32, exponent_type: i32, diy_significand_size: 32, significand_size: 23, exponent_bias: 0x7F, mask_type: u32, exponent_mask: 0x7F800000, significand_mask: 0x007FFFFF, hidden_bit: 0x00800000, cached_powers_f: CACHED_POWERS_F_32, cached_powers_e: CACHED_POWERS_E_32, min_power: (-36), }; unsafe { dtoa(wr, self) } } } impl Floating for f64 { fn write<W: io::Write>(self, wr: W) -> io::Result<usize> { dtoa! { floating_type: f64, significand_type: u64, exponent_type: isize, diy_significand_size: 64, significand_size: 52, exponent_bias: 0x3FF, mask_type: u64, exponent_mask: 0x7FF0000000000000, significand_mask: 0x000FFFFFFFFFFFFF, hidden_bit: 0x0010000000000000, cached_powers_f: CACHED_POWERS_F_64, cached_powers_e: CACHED_POWERS_E_64, min_power: (-348), }; unsafe { dtoa(wr, self) } } } //////////////////////////////////////////////////////////////////////////////// const MAX_DECIMAL_PLACES: isize = 324; static DEC_DIGITS_LUT: &'static [u8] = b"0001020304050607080910111213141516171819\ 2021222324252627282930313233343536373839\ 4041424344454647484950515253545556575859\ 6061626364656667686970717273747576777879\ 8081828384858687888990919293949596979899"; // 10^-36, 10^-28, ..., 10^52 static CACHED_POWERS_F_32: [u32; 12] = [ 0xaa242499, 0xfd87b5f3, 0xbce50865, 0x8cbccc09, 0xd1b71759, 0x9c400000, 0xe8d4a510, 0xad78ebc6, 0x813f3979, 0xc097ce7c, 0x8f7e32ce, 0xd5d238a5, ]; static CACHED_POWERS_E_32: [i16; 12] = [ -151, -125, -98, -71, -45, -18, 8, 35, 62, 88, 115, 141, ]; // 10^-348, 10^-340, ..., 10^340 static CACHED_POWERS_F_64: [u64; 87] = [ 0xfa8fd5a0081c0288, 0xbaaee17fa23ebf76, 0x8b16fb203055ac76, 0xcf42894a5dce35ea, 0x9a6bb0aa55653b2d, 0xe61acf033d1a45df, 0xab70fe17c79ac6ca, 0xff77b1fcbebcdc4f, 0xbe5691ef416bd60c, 0x8dd01fad907ffc3c, 0xd3515c2831559a83, 0x9d71ac8fada6c9b5, 0xea9c227723ee8bcb, 0xaecc49914078536d, 0x823c12795db6ce57, 0xc21094364dfb5637, 0x9096ea6f3848984f, 0xd77485cb25823ac7, 0xa086cfcd97bf97f4, 0xef340a98172aace5, 0xb23867fb2a35b28e, 0x84c8d4dfd2c63f3b, 0xc5dd44271ad3cdba, 0x936b9fcebb25c996, 0xdbac6c247d62a584, 0xa3ab66580d5fdaf6, 0xf3e2f893dec3f126, 0xb5b5ada8aaff80b8, 0x87625f056c7c4a8b, 0xc9bcff6034c13053, 0x964e858c91ba2655, 0xdff9772470297ebd, 0xa6dfbd9fb8e5b88f, 0xf8a95fcf88747d94, 0xb94470938fa89bcf, 0x8a08f0f8bf0f156b, 0xcdb02555653131b6, 0x993fe2c6d07b7fac, 0xe45c10c42a2b3b06, 0xaa242499697392d3, 0xfd87b5f28300ca0e, 0xbce5086492111aeb, 0x8cbccc096f5088cc, 0xd1b71758e219652c, 0x9c40000000000000, 0xe8d4a51000000000, 0xad78ebc5ac620000, 0x813f3978f8940984, 0xc097ce7bc90715b3, 0x8f7e32ce7bea5c70, 0xd5d238a4abe98068, 0x9f4f2726179a2245, 0xed63a231d4c4fb27, 0xb0de65388cc8ada8, 0x83c7088e1aab65db, 0xc45d1df942711d9a, 0x924d692ca61be758, 0xda01ee641a708dea, 0xa26da3999aef774a, 0xf209787bb47d6b85, 0xb454e4a179dd1877, 0x865b86925b9bc5c2, 0xc83553c5c8965d3d, 0x952ab45cfa97a0b3, 0xde469fbd99a05fe3, 0xa59bc234db398c25, 0xf6c69a72a3989f5c, 0xb7dcbf5354e9bece, 0x88fcf317f22241e2, 0xcc20ce9bd35c78a5, 0x98165af37b2153df, 0xe2a0b5dc971f303a, 0xa8d9d1535ce3b396, 0xfb9b7cd9a4a7443c, 0xbb764c4ca7a44410, 0x8bab8eefb6409c1a, 0xd01fef10a657842c, 0x9b10a4e5e9913129, 0xe7109bfba19c0c9d, 0xac2820d9623bf429, 0x80444b5e7aa7cf85, 0xbf21e44003acdd2d, 0x8e679c2f5e44ff8f, 0xd433179d9c8cb841, 0x9e19db92b4e31ba9, 0xeb96bf6ebadf77d9, 0xaf87023b9bf0ee6b, ]; static CACHED_POWERS_E_64: [i16; 87] = [ -1220, -1193, -1166, -1140, -1113, -1087, -1060, -1034, -1007, -980, -954, -927, -901, -874, -847, -821, -794, -768, -741, -715, -688, -661, -635, -608, -582, -555, -529, -502, -475, -449, -422, -396, -369, -343, -316, -289, -263, -236, -210, -183, -157, -130, -103, -77, -50, -24, 3, 30, 56, 83, 109, 136, 162, 189, 216, 242, 269, 295, 322, 348, 375, 402, 428, 455, 481, 508, 534, 561, 588, 614, 641, 667, 694, 720, 747, 774, 800, 827, 853, 880, 907, 933, 960, 986, 1013, 1039, 1066, ];
35.877419
80
0.657436
1d49ff34d0e0b5af005ab45cbd45a27848f8cfce
3,921
use clap::Clap; mod util; #[derive(Clap, Debug)] #[clap(version = "1.0", author = "Link Xie. <[email protected]>")] struct Options { #[clap(subcommand)] action: SubCommand, } #[derive(Clap, Debug)] enum SubCommand { New(NewAction), Renew(RenewAction), } #[derive(Clap, Debug)] struct NewAction { #[clap( long = "capacity", about = "The capacity of PreAccountCell. Required if --profit is not provided." )] capacity: Option<u64>, #[clap( long = "account-name-storage", about = "The length of account, must count its suffix. Required if --profit is not provided." )] account_name_storage: Option<u64>, #[clap(long = "profit", about = "The profit of proposal confirmation.")] profit: Option<u64>, #[clap( long = "price", required = true, about = "The register fee of account for one year." )] price: u64, #[clap( long = "quote", required = true, about = "The quote of CKB to USD, AKA USD/CKB." )] quote: u64, #[clap( long = "discount", default_value = "0", about = "The discount of register fee." )] discount: u32, #[clap(long = "current", about = "The current timestamp, can be omitted.")] current: Option<u64>, } #[derive(Clap, Debug)] struct RenewAction { #[clap( long = "profit", required = true, about = "The total profit which DAS get." )] profit: u64, #[clap( long = "price", required = true, about = "The renew fee of account for one year." )] price: u64, #[clap( long = "quote", required = true, about = "The quote of CKB to USD, AKA USD/CKB." )] quote: u64, } fn main() { // Parse options let options: Options = Options::parse(); // println!("{:?}", options); match options.action { SubCommand::New(options) => { let profit; if options.profit.is_none() { if options.account_name_storage.is_none() { panic!( "Params --account-name-storage is required when --profit is not provided." ); } if options.capacity.is_none() { panic!("Params --capacity is required when --profit is not provided."); } let storage_capacity = util::calc_account_storage_capacity(options.account_name_storage.unwrap()); println!( "storage_capacity({}) = ACCOUNT_CELL_BASIC_CAPACITY({}) + (account_name_storage({}) * 100_000_000)", storage_capacity, util::ACCOUNT_CELL_BASIC_CAPACITY, options.account_name_storage.unwrap() ); profit = options.capacity.unwrap() - storage_capacity; println!( "total_profit({}) = capacity({}) - storage_capacity({})", profit, options.capacity.unwrap(), storage_capacity ); } else { profit = options.profit.unwrap(); } let duration = util::calc_duration_from_paid( profit, options.price, options.quote, options.discount, ); if let Some(current) = options.current { let expired_at = current + duration; println!( "expired_at({}) = current({}) - duration({})", expired_at, current, duration ); } } SubCommand::Renew(options) => { let duration = util::calc_duration_from_paid(options.profit, options.price, options.quote, 0); } } }
29.044444
120
0.509054
0ae371085d049a0d42511d4391496538be07d38d
4,754
// This file is a modified version from wasm-bindgen-futures. // See: https://github.com/rustwasm/wasm-bindgen/blob/master/crates/futures/src/task/singlethread.rs // Licensed under Apache/MIT use serde::Serialize; use std::cell::{Cell, RefCell}; use std::future::Future; use std::mem::ManuallyDrop; use std::pin::Pin; use std::rc::Rc; use std::task::{Context, RawWaker, RawWakerVTable, Waker}; use crate::common::mem::{to_fat_ptr, FatPtr}; use crate::common::r#async::AsyncValue; use crate::guest::io::export_value_to_host; use super::host_resolve_async_value; struct Inner { future: Pin<Box<dyn Future<Output = ()> + 'static>>, waker: Waker, } pub struct Task { // The actual Future that we're executing as part of this task. // // This is an Option so that the Future can be immediately dropped when it's // finished inner: RefCell<Option<Inner>>, // This is used to ensure that the Task will only be queued once is_queued: Cell<bool>, } impl Task { pub fn spawn(future: Pin<Box<dyn Future<Output = ()> + 'static>>) { let this = Rc::new(Self { inner: RefCell::new(None), is_queued: Cell::new(false), }); let waker = unsafe { Waker::from_raw(Task::into_raw_waker(Rc::clone(&this))) }; *this.inner.borrow_mut() = Some(Inner { future, waker }); Task::wake_by_ref(&this); } pub fn alloc_and_spawn<FUT, RET>(future: FUT) -> FatPtr where FUT: Future<Output = RET> + 'static, RET: Serialize, { let layout = std::alloc::Layout::new::<AsyncValue>(); let len = layout.size() as u32; let ptr = unsafe { std::alloc::alloc_zeroed(layout) }; let fat_ptr = to_fat_ptr(ptr, len); Task::spawn(Box::pin(async move { let ret = future.await; let result_ptr = export_value_to_host(&ret); host_resolve_async_value(fat_ptr, result_ptr); })); fat_ptr } fn wake_by_ref(this: &Rc<Self>) { // If we've already been placed on the run queue then there's no need to // requeue ourselves since we're going to run at some point in the // future anyway. if this.is_queued.replace(true) { return; } super::queue::push_task(Rc::clone(this)); } /// Creates a standard library `RawWaker` from an `Rc` of ourselves. /// /// Note that in general this is wildly unsafe because everything with /// Futures requires `Sync` + `Send` with regard to Wakers. For wasm, /// however, everything is guaranteed to be singlethreaded (since we're /// compiled without the `atomics` feature) so we "safely lie" and say our /// `Rc` pointer is good enough. unsafe fn into_raw_waker(this: Rc<Self>) -> RawWaker { unsafe fn raw_clone(ptr: *const ()) -> RawWaker { let ptr = ManuallyDrop::new(Rc::from_raw(ptr as *const Task)); Task::into_raw_waker((*ptr).clone()) } unsafe fn raw_wake(ptr: *const ()) { let ptr = Rc::from_raw(ptr as *const Task); Task::wake_by_ref(&ptr); } unsafe fn raw_wake_by_ref(ptr: *const ()) { let ptr = ManuallyDrop::new(Rc::from_raw(ptr as *const Task)); Task::wake_by_ref(&ptr); } unsafe fn raw_drop(ptr: *const ()) { drop(Rc::from_raw(ptr as *const Task)); } const VTABLE: RawWakerVTable = RawWakerVTable::new(raw_clone, raw_wake, raw_wake_by_ref, raw_drop); RawWaker::new(Rc::into_raw(this) as *const (), &VTABLE) } pub(crate) fn run(&self) { let mut borrow = self.inner.borrow_mut(); // Wakeups can come in after a Future has finished and been destroyed, // so handle this gracefully by just ignoring the request to run. let inner = match borrow.as_mut() { Some(inner) => inner, None => return, }; // Ensure that if poll calls `waker.wake()` we can get enqueued back on // the run queue. self.is_queued.set(false); let poll = { let mut cx = Context::from_waker(&inner.waker); inner.future.as_mut().poll(&mut cx) }; // If a future has finished (`Ready`) then clean up resources associated // with the future ASAP. This ensures that we don't keep anything extra // alive in-memory by accident. Our own struct, `Rc<Task>` won't // actually go away until all wakers referencing us go away, which may // take quite some time, so ensure that the heaviest of resources are // released early. if poll.is_ready() { *borrow = None; } } }
33.478873
100
0.603281
29cd21a70d1282965e29f97e4ae5493350daaa50
103
use dade::model; #[model] struct TestModel { #[field(max_items = 2)] value: i8, } fn main() {}
12.875
27
0.582524
1660c0e4fcdba75e5ddae0a0a8018a474223af95
36
mod camera; pub use camera::Camera;
12
23
0.75
fc00c40813a81df2081909290942d0072e356fac
2,322
use ux::prelude::*; use ux::{ClickAction, Surface, Window}; #[derive(Default, Application)] struct Application { window: Window, } impl Application { fn new() -> Self { let app: Self = Default::default(); app.window .set_window_size(512, 512) .set_title("UX Framework - Stars") .show() .connect_destroy(move |_win| Application::quit()); app.window.set_background_color(Some(color::GRAY_9)); let surface = Surface::new(); surface.set_size(400.0, 400.0); // we should also change surface content size to avoid distortion surface.set_content_size(400.0, 400.0); surface.set_position(56.0, 56.0); app.window.set_child(&surface); surface.connect_draw(move |_widget, ctx, width, height| { ctx.clear_rect(0.0, 0.0, width as f64, height as f64); ctx.begin_path(); ctx.set_line_width(9.0); ctx.star(75.0, 75.0, 5, 50.0, 25.0); ctx.set_fill_color(color::TEAL_9); ctx.fill(); ctx.set_stroke_color(color::GRAY_7); ctx.stroke(); ctx.begin_path(); ctx.set_line_width(3.0); ctx.star(150.0, 200.0, 8, 50.0, 25.0); ctx.set_fill_color(color::CYAN_6); ctx.fill(); ctx.set_stroke_color(color::GRAY_7); ctx.stroke(); ctx.begin_path(); ctx.set_line_width(0.0); ctx.star(225.0, 75.0, 16, 50.0, 20.0); ctx.set_fill_color(color::RED_5); ctx.fill(); ctx.begin_path(); ctx.set_line_width(3.0); ctx.star(300.0, 200.0, 16, 50.0, 40.0); ctx.set_fill_color(color::YELLOW_5); ctx.fill(); ctx.set_stroke_color(color::GRAY_7); ctx.stroke(); false }); let action = ClickAction::new(); surface.add_action(&action); surface.set_reactive(true); action.connect_clicked(|action, actor| { let (x, y) = action.get_coords(); let (ax, ay) = actor.get_position(); println!("Clicked {}, {}: {}", action.get_button(), x - ax, y - ay); }); app } } fn main() { Application::run(); }
28.317073
80
0.527562
01632e3142ea49a34aa6cf88c35d826e5fa05831
13,100
///! Implementation of [multiaddr](https://github.com/jbenet/multiaddr) in Rust. pub use multihash; mod protocol; mod onion_addr; mod errors; #[cfg(feature = "url")] mod from_url; use serde::{ Deserialize, Deserializer, Serialize, Serializer, de::{self, Error as DeserializerError} }; use std::{ convert::TryFrom, fmt, io, iter::FromIterator, net::{IpAddr, Ipv4Addr, Ipv6Addr}, result::Result as StdResult, str::FromStr, sync::Arc }; pub use self::errors::{Result, Error}; pub use self::protocol::Protocol; pub use self::onion_addr::Onion3Addr; #[cfg(feature = "url")] pub use self::from_url::{FromUrlErr, from_url, from_url_lossy}; static_assertions::const_assert! { // This check is most certainly overkill right now, but done here // anyway to ensure the `as u64` casts in this crate are safe. std::mem::size_of::<usize>() <= std::mem::size_of::<u64>() } /// Representation of a Multiaddr. #[allow(clippy::rc_buffer)] #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Hash)] pub struct Multiaddr { bytes: Arc<Vec<u8>> } impl Multiaddr { /// Create a new, empty multiaddress. pub fn empty() -> Self { Self { bytes: Arc::new(Vec::new()) } } /// Create a new, empty multiaddress with the given capacity. pub fn with_capacity(n: usize) -> Self { Self { bytes: Arc::new(Vec::with_capacity(n)) } } /// Return the length in bytes of this multiaddress. pub fn len(&self) -> usize { self.bytes.len() } /// Returns true if the length of this multiaddress is 0. pub fn is_empty(&self) -> bool { self.bytes.len() == 0 } /// Return a copy of this [`Multiaddr`]'s byte representation. pub fn to_vec(&self) -> Vec<u8> { Vec::from(&self.bytes[..]) } /// Adds an already-parsed address component to the end of this multiaddr. /// /// # Examples /// /// ``` /// use parity_multiaddr::{Multiaddr, Protocol}; /// /// let mut address: Multiaddr = "/ip4/127.0.0.1".parse().unwrap(); /// address.push(Protocol::Tcp(10000)); /// assert_eq!(address, "/ip4/127.0.0.1/tcp/10000".parse().unwrap()); /// ``` /// pub fn push(&mut self, p: Protocol<'_>) { let mut w = io::Cursor::<&mut Vec<u8>>::new(Arc::make_mut(&mut self.bytes)); w.set_position(w.get_ref().len() as u64); p.write_bytes(&mut w).expect("Writing to a `io::Cursor<&mut Vec<u8>>` never fails.") } /// Pops the last `Protocol` of this multiaddr, or `None` if the multiaddr is empty. /// ``` /// use parity_multiaddr::{Multiaddr, Protocol}; /// /// let mut address: Multiaddr = "/ip4/127.0.0.1/udt/sctp/5678".parse().unwrap(); /// /// assert_eq!(address.pop().unwrap(), Protocol::Sctp(5678)); /// assert_eq!(address.pop().unwrap(), Protocol::Udt); /// ``` /// pub fn pop<'a>(&mut self) -> Option<Protocol<'a>> { let mut slice = &self.bytes[..]; // the remaining multiaddr slice if slice.is_empty() { return None } let protocol = loop { let (p, s) = Protocol::from_bytes(slice).expect("`slice` is a valid `Protocol`."); if s.is_empty() { break p.acquire() } slice = s }; let remaining_len = self.bytes.len() - slice.len(); Arc::make_mut(&mut self.bytes).truncate(remaining_len); Some(protocol) } /// Like [`Multiaddr::push`] but consumes `self`. pub fn with(mut self, p: Protocol<'_>) -> Self { let mut w = io::Cursor::<&mut Vec<u8>>::new(Arc::make_mut(&mut self.bytes)); w.set_position(w.get_ref().len() as u64); p.write_bytes(&mut w).expect("Writing to a `io::Cursor<&mut Vec<u8>>` never fails."); self } /// Returns the components of this multiaddress. /// /// # Example /// /// ```rust /// use std::net::Ipv4Addr; /// use parity_multiaddr::{Multiaddr, Protocol}; /// /// let address: Multiaddr = "/ip4/127.0.0.1/udt/sctp/5678".parse().unwrap(); /// /// let components = address.iter().collect::<Vec<_>>(); /// assert_eq!(components[0], Protocol::Ip4(Ipv4Addr::new(127, 0, 0, 1))); /// assert_eq!(components[1], Protocol::Udt); /// assert_eq!(components[2], Protocol::Sctp(5678)); /// ``` /// pub fn iter(&self) -> Iter<'_> { Iter(&self.bytes) } /// Replace a [`Protocol`] at some position in this `Multiaddr`. /// /// The parameter `at` denotes the index of the protocol at which the function /// `by` will be applied to the current protocol, returning an optional replacement. /// /// If `at` is out of bounds or `by` does not yield a replacement value, /// `None` will be returned. Otherwise a copy of this `Multiaddr` with the /// updated `Protocol` at position `at` will be returned. pub fn replace<'a, F>(&self, at: usize, by: F) -> Option<Multiaddr> where F: FnOnce(&Protocol<'_>) -> Option<Protocol<'a>> { let mut address = Multiaddr::with_capacity(self.len()); let mut fun = Some(by); let mut replaced = false; for (i, p) in self.iter().enumerate() { if i == at { let f = fun.take().expect("i == at only happens once"); if let Some(q) = f(&p) { address = address.with(q); replaced = true; continue } return None } address = address.with(p) } if replaced { Some(address) } else { None } } /// Checks whether the given `Multiaddr` is a suffix of this `Multiaddr`. pub fn ends_with(&self, other: &Multiaddr) -> bool { let n = self.bytes.len(); let m = other.bytes.len(); if n < m { return false } self.bytes[(n - m) ..] == other.bytes[..] } } impl fmt::Debug for Multiaddr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.to_string().fmt(f) } } impl fmt::Display for Multiaddr { /// Convert a Multiaddr to a string /// /// # Example /// /// ``` /// use parity_multiaddr::Multiaddr; /// /// let address: Multiaddr = "/ip4/127.0.0.1/udt".parse().unwrap(); /// assert_eq!(address.to_string(), "/ip4/127.0.0.1/udt"); /// ``` /// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for s in self.iter() { s.to_string().fmt(f)?; } Ok(()) } } impl AsRef<[u8]> for Multiaddr { fn as_ref(&self) -> &[u8] { self.bytes.as_ref() } } impl<'a> IntoIterator for &'a Multiaddr { type Item = Protocol<'a>; type IntoIter = Iter<'a>; fn into_iter(self) -> Iter<'a> { Iter(&self.bytes) } } impl<'a> FromIterator<Protocol<'a>> for Multiaddr { fn from_iter<T>(iter: T) -> Self where T: IntoIterator<Item = Protocol<'a>>, { let mut writer = Vec::new(); for cmp in iter { cmp.write_bytes(&mut writer).expect("Writing to a `Vec` never fails."); } Multiaddr { bytes: Arc::new(writer) } } } impl FromStr for Multiaddr { type Err = Error; fn from_str(input: &str) -> Result<Self> { let mut writer = Vec::new(); let mut parts = input.split('/').peekable(); if Some("") != parts.next() { // A multiaddr must start with `/` return Err(Error::InvalidMultiaddr) } while parts.peek().is_some() { let p = Protocol::from_str_parts(&mut parts)?; p.write_bytes(&mut writer).expect("Writing to a `Vec` never fails."); } Ok(Multiaddr { bytes: Arc::new(writer) }) } } /// Iterator over `Multiaddr` [`Protocol`]s. pub struct Iter<'a>(&'a [u8]); impl<'a> Iterator for Iter<'a> { type Item = Protocol<'a>; fn next(&mut self) -> Option<Self::Item> { if self.0.is_empty() { return None; } let (p, next_data) = Protocol::from_bytes(self.0).expect("`Multiaddr` is known to be valid."); self.0 = next_data; Some(p) } } impl<'a> From<Protocol<'a>> for Multiaddr { fn from(p: Protocol<'a>) -> Multiaddr { let mut w = Vec::new(); p.write_bytes(&mut w).expect("Writing to a `Vec` never fails."); Multiaddr { bytes: Arc::new(w) } } } impl From<IpAddr> for Multiaddr { fn from(v: IpAddr) -> Multiaddr { match v { IpAddr::V4(a) => a.into(), IpAddr::V6(a) => a.into() } } } impl From<Ipv4Addr> for Multiaddr { fn from(v: Ipv4Addr) -> Multiaddr { Protocol::Ip4(v).into() } } impl From<Ipv6Addr> for Multiaddr { fn from(v: Ipv6Addr) -> Multiaddr { Protocol::Ip6(v).into() } } impl TryFrom<Vec<u8>> for Multiaddr { type Error = Error; fn try_from(v: Vec<u8>) -> Result<Self> { // Check if the argument is a valid `Multiaddr` by reading its protocols. let mut slice = &v[..]; while !slice.is_empty() { let (_, s) = Protocol::from_bytes(slice)?; slice = s } Ok(Multiaddr { bytes: Arc::new(v) }) } } impl TryFrom<String> for Multiaddr { type Error = Error; fn try_from(s: String) -> Result<Multiaddr> { s.parse() } } impl<'a> TryFrom<&'a str> for Multiaddr { type Error = Error; fn try_from(s: &'a str) -> Result<Multiaddr> { s.parse() } } impl Serialize for Multiaddr { fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error> where S: Serializer, { if serializer.is_human_readable() { serializer.serialize_str(&self.to_string()) } else { serializer.serialize_bytes(self.as_ref()) } } } impl<'de> Deserialize<'de> for Multiaddr { fn deserialize<D>(deserializer: D) -> StdResult<Self, D::Error> where D: Deserializer<'de>, { struct Visitor { is_human_readable: bool }; impl<'de> de::Visitor<'de> for Visitor { type Value = Multiaddr; fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { formatter.write_str("multiaddress") } fn visit_seq<A: de::SeqAccess<'de>>(self, mut seq: A) -> StdResult<Self::Value, A::Error> { let mut buf: Vec<u8> = Vec::with_capacity(std::cmp::min(seq.size_hint().unwrap_or(0), 4096)); while let Some(e) = seq.next_element()? { buf.push(e); } if self.is_human_readable { let s = String::from_utf8(buf).map_err(DeserializerError::custom)?; s.parse().map_err(DeserializerError::custom) } else { Multiaddr::try_from(buf).map_err(DeserializerError::custom) } } fn visit_str<E: de::Error>(self, v: &str) -> StdResult<Self::Value, E> { v.parse().map_err(DeserializerError::custom) } fn visit_borrowed_str<E: de::Error>(self, v: &'de str) -> StdResult<Self::Value, E> { self.visit_str(v) } fn visit_string<E: de::Error>(self, v: String) -> StdResult<Self::Value, E> { self.visit_str(&v) } fn visit_bytes<E: de::Error>(self, v: &[u8]) -> StdResult<Self::Value, E> { self.visit_byte_buf(v.into()) } fn visit_borrowed_bytes<E: de::Error>(self, v: &'de [u8]) -> StdResult<Self::Value, E> { self.visit_byte_buf(v.into()) } fn visit_byte_buf<E: de::Error>(self, v: Vec<u8>) -> StdResult<Self::Value, E> { Multiaddr::try_from(v).map_err(DeserializerError::custom) } } if deserializer.is_human_readable() { deserializer.deserialize_str(Visitor { is_human_readable: true }) } else { deserializer.deserialize_bytes(Visitor { is_human_readable: false }) } } } /// Easy way for a user to create a `Multiaddr`. /// /// Example: /// /// ```rust /// # use parity_multiaddr::multiaddr; /// let addr = multiaddr!(Ip4([127, 0, 0, 1]), Tcp(10500u16)); /// ``` /// /// Each element passed to `multiaddr!` should be a variant of the `Protocol` enum. The /// optional parameter is turned into the proper type with the `Into` trait. /// /// For example, `Ip4([127, 0, 0, 1])` works because `Ipv4Addr` implements `From<[u8; 4]>`. #[macro_export] macro_rules! multiaddr { ($($comp:ident $(($param:expr))*),+) => { { use std::iter; let elem = iter::empty::<$crate::Protocol>(); $( let elem = { let cmp = $crate::Protocol::$comp $(( $param.into() ))*; elem.chain(iter::once(cmp)) }; )+ elem.collect::<$crate::Multiaddr>() } } }
30.114943
109
0.545191
5640a3bda239f3327810597b04c992a89b2a6233
2,801
// Copyright 2022 Webb Technologies Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::vec; use frame_support::traits::OnInitialize; use crate::mock::*; fn init_block(block: u64) { System::set_block_number(block); Session::on_initialize(block); } #[test] fn genesis_session_initializes_authorities() { let want = vec![mock_dkg_id(1), mock_dkg_id(2), mock_dkg_id(3), mock_dkg_id(4)]; new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { let authorities = DKGMetadata::authorities(); assert!(authorities.len() == 2); assert_eq!(want[0], authorities[0]); assert_eq!(want[1], authorities[1]); assert!(DKGMetadata::authority_set_id() == 0); let next_authorities = DKGMetadata::next_authorities(); assert!(next_authorities.len() == 2); assert_eq!(want[0], next_authorities[0]); assert_eq!(want[1], next_authorities[1]); }); } #[test] fn session_change_updates_next_authorities() { let want = vec![mock_dkg_id(1), mock_dkg_id(2), mock_dkg_id(3), mock_dkg_id(4)]; new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { init_block(1); let next_authorities = DKGMetadata::next_authorities(); assert!(next_authorities.len() == 2); assert_eq!(want[2], next_authorities[0]); assert_eq!(want[3], next_authorities[1]); init_block(2); let next_authorities = DKGMetadata::next_authorities(); assert!(next_authorities.len() == 2); assert_eq!(want[2], next_authorities[0]); assert_eq!(want[3], next_authorities[1]); }); } #[test] fn authority_set_at_genesis() { let want = vec![mock_dkg_id(1), mock_dkg_id(2)]; new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { let vs = DKGMetadata::authority_set(); assert_eq!(vs.id, 0u64); assert_eq!(vs.authorities[0], want[0]); assert_eq!(vs.authorities[1], want[1]); }); } #[test] fn authority_set_updates_work() { let want = vec![mock_dkg_id(1), mock_dkg_id(2), mock_dkg_id(3), mock_dkg_id(4)]; new_test_ext(vec![1, 2, 3, 4]).execute_with(|| { init_block(1); let vs = DKGMetadata::authority_set(); assert_eq!(vs.id, 1u64); assert_eq!(want[0], vs.authorities[0]); assert_eq!(want[1], vs.authorities[1]); init_block(2); let vs = DKGMetadata::authority_set(); assert_eq!(vs.id, 2u64); assert_eq!(want[2], vs.authorities[0]); assert_eq!(want[3], vs.authorities[1]); }); }
26.67619
81
0.697608
26a9263b3c9a92ba029e8633de33c0c0938f856f
1,607
// // Copyright © 2020 Haim Gelfenbeyn // This code is licensed under MIT license (see LICENSE.txt for details) // #![windows_subsystem = "windows"] #[macro_use] extern crate log; mod configuration; mod display_control; mod logging; mod pnp_detect; mod usb_devices; fn main() { logging::init_logging().unwrap(); let config = configuration::Configuration::load().unwrap(); let mut detector = usb_devices::UsbChangeDetector::new().unwrap(); let pnp_detect = pnp_detect::PnPDetect::new(move || { let changed_devices = detector.detect_changed_devices().unwrap(); debug!("Detected device change. Added devices: {:?}", changed_devices); if changed_devices.added_devices.contains(&config.usb_device) { info!("Detected added device we're looking for {:?}", &config.usb_device); display_control::wiggle_mouse(); display_control::switch_to(config.monitor_input_added).unwrap_or_else(|err| { error!("Cannot switch monitor input: {:?}", err); }); } if changed_devices.removed_devices.contains(&config.usb_device) { info!("Detected removed device we're looking for {:?}", &config.usb_device); display_control::wiggle_mouse(); display_control::switch_to(config.monitor_input_removed).unwrap_or_else(|err| { error!("Cannot switch monitor input: {:?}", err); }); } }); display_control::log_current_source().unwrap_or_else(|err| { error!("Cannot get monitor input: {:?}", err); }); pnp_detect.detect(); }
37.372093
91
0.645924
87ac49afbafe3f63c4e3182d574c849de9a71f80
8,384
use crate::introspection_helpers::{ is_old_migration_table, is_prisma_1_or_11_list_table, is_prisma_1_point_0_join_table, is_prisma_1_point_1_or_2_join_table, is_relay_table, }; use crate::SqlFamilyTrait; use datamodel::{Datamodel, Model}; use introspection_connector::{IntrospectionContext, Version, Warning}; use native_types::{MySqlType, PostgresType}; use quaint::connector::SqlFamily; use sql_schema_describer::{Column, ForeignKey, ForeignKeyAction, PrimaryKey, SqlSchema, Table}; use tracing::debug; #[derive(Debug)] pub struct VersionChecker { sql_family: SqlFamily, has_migration_table: bool, has_relay_table: bool, has_prisma_1_join_table: bool, has_prisma_1_1_or_2_join_table: bool, uses_on_delete: bool, uses_default_values: bool, always_has_created_at_updated_at: bool, always_has_p1_or_p_1_1_compatible_id: bool, uses_non_prisma_types: bool, has_inline_relations: bool, } const SQLITE_TYPES: &[&str] = &["BOOLEAN", "DATE", "REAL", "INTEGER", "TEXT"]; const POSTGRES_TYPES: &[PostgresType] = &[ PostgresType::Boolean, PostgresType::Timestamp(Some(3)), PostgresType::Decimal(Some((65, 30))), PostgresType::Integer, PostgresType::Text, PostgresType::VarChar(Some(25)), PostgresType::VarChar(Some(36)), PostgresType::VarChar(Some(191)), ]; const MYSQL_TYPES: &[MySqlType] = &[ MySqlType::TinyInt, MySqlType::DateTime(Some(3)), MySqlType::Decimal(Some((65, 30))), MySqlType::Int, MySqlType::MediumText, MySqlType::VarChar(191), MySqlType::Char(25), MySqlType::Char(36), ]; impl VersionChecker { pub fn new(schema: &SqlSchema, ctx: &IntrospectionContext) -> VersionChecker { VersionChecker { sql_family: ctx.sql_family(), has_migration_table: schema.tables.iter().any(|table| is_old_migration_table(table)), has_relay_table: schema.tables.iter().any(|table| is_relay_table(table)), has_prisma_1_join_table: schema.tables.iter().any(|table| is_prisma_1_point_0_join_table(table)), has_prisma_1_1_or_2_join_table: schema .tables .iter() .any(|table| is_prisma_1_point_1_or_2_join_table(table)), uses_on_delete: false, uses_default_values: false, always_has_created_at_updated_at: true, always_has_p1_or_p_1_1_compatible_id: true, uses_non_prisma_types: false, has_inline_relations: false, } } pub fn check_column_for_type_and_default_value(&mut self, column: &Column) { match self.sql_family { SqlFamily::Postgres => { if let Some(native_type) = &column.tpe.native_type { let native_type: PostgresType = serde_json::from_value(native_type.clone()).unwrap(); if !POSTGRES_TYPES.contains(&native_type) { self.uses_non_prisma_types = true } } } SqlFamily::Mysql => { if let Some(native_type) = &column.tpe.native_type { let native_type: MySqlType = serde_json::from_value(native_type.clone()).unwrap(); if !MYSQL_TYPES.contains(&native_type) { self.uses_non_prisma_types = true } } } SqlFamily::Sqlite if !SQLITE_TYPES.contains(&&*column.tpe.full_data_type) => { self.uses_non_prisma_types = true } _ => (), } if !column.auto_increment && column.default.is_some() { self.uses_default_values = true; }; } pub fn has_inline_relations(&mut self, table: &Table) { if !is_prisma_1_or_11_list_table(table) { self.has_inline_relations = true; } } pub fn uses_on_delete(&mut self, fk: &ForeignKey, table: &Table) { if !(fk.on_delete_action == ForeignKeyAction::NoAction || fk.on_delete_action == ForeignKeyAction::SetNull) && !is_prisma_1_or_11_list_table(table) && fk.on_delete_action != ForeignKeyAction::Cascade { self.uses_on_delete = true } } pub fn always_has_created_at_updated_at(&mut self, table: &Table, model: &Model) { if !is_prisma_1_or_11_list_table(table) && !is_relay_table(table) && !model.has_created_at_and_updated_at() { self.always_has_created_at_updated_at = false } } pub fn has_p1_compatible_primary_key_column(&mut self, table: &Table) { if !is_prisma_1_or_11_list_table(table) && !is_relay_table(table) { if let Some(PrimaryKey { columns, .. }) = &table.primary_key { if columns.len() == 1 { let tpe = &table.column_bang(columns.first().unwrap()).tpe; if self.sql_family == SqlFamily::Postgres { if let Some(native_type) = &tpe.native_type { let native_type: PostgresType = serde_json::from_value(native_type.clone()).unwrap(); if native_type != PostgresType::VarChar(Some(25)) && native_type != PostgresType::VarChar(Some(36)) && native_type != PostgresType::Integer { self.always_has_p1_or_p_1_1_compatible_id = false } } } else if self.sql_family == SqlFamily::Mysql { if let Some(native_type) = &tpe.native_type { let native_type: MySqlType = serde_json::from_value(native_type.clone()).unwrap(); if native_type != MySqlType::Char(25) && native_type != MySqlType::Char(36) && native_type != MySqlType::Int { self.always_has_p1_or_p_1_1_compatible_id = false } } }; } } } } fn is_prisma_2(&self, warnings: &[Warning]) -> bool { !self.has_relay_table && !self.uses_on_delete && !self.uses_non_prisma_types && self.has_migration_table && warnings.is_empty() } fn is_prisma_1_1(&self, warnings: &[Warning]) -> bool { !self.has_migration_table && !self.has_relay_table && !self.uses_on_delete && !self.uses_default_values && !self.uses_non_prisma_types && !self.has_prisma_1_join_table && self.always_has_p1_or_p_1_1_compatible_id && warnings.is_empty() } fn is_prisma_1(&self, warnings: &[Warning]) -> bool { !self.has_migration_table && !self.uses_on_delete && !self.uses_default_values && !self.uses_non_prisma_types && !self.has_prisma_1_1_or_2_join_table && !self.has_inline_relations && self.has_relay_table && self.always_has_created_at_updated_at && self.always_has_p1_or_p_1_1_compatible_id && warnings.is_empty() } pub fn version(&self, warnings: &[Warning], data_model: &Datamodel) -> Version { debug!("{:?}", &self); match self.sql_family { _ if data_model.is_empty() => Version::NonPrisma, SqlFamily::Sqlite if self.is_prisma_2(warnings) => Version::Prisma2, SqlFamily::Sqlite => Version::NonPrisma, SqlFamily::Mysql if self.is_prisma_2(warnings) => Version::Prisma2, SqlFamily::Mysql if self.is_prisma_1(warnings) => Version::Prisma1, SqlFamily::Mysql if self.is_prisma_1_1(warnings) => Version::Prisma11, SqlFamily::Mysql => Version::NonPrisma, SqlFamily::Postgres if self.is_prisma_2(warnings) => Version::Prisma2, SqlFamily::Postgres if self.is_prisma_1(warnings) => Version::Prisma1, SqlFamily::Postgres if self.is_prisma_1_1(warnings) => Version::Prisma11, SqlFamily::Postgres => Version::NonPrisma, SqlFamily::Mssql => Version::NonPrisma, } } }
40.307692
117
0.589218
506f02dc87ccbeb98deea36c3b69f254d688a535
2,025
use std::collections::HashSet; use super::{AsyncReader, ByteRecord}; use crate::datatypes::{DataType, Field}; use crate::error::Result; use crate::io::csv::utils::merge_schema; use futures::{AsyncRead, AsyncSeek}; /// Infers the [`Field`]s of a CSV file by reading through the first n records up to `max_rows`. /// Seeks back to the begining of the file _after_ the header pub async fn infer_schema<R, F>( reader: &mut AsyncReader<R>, max_rows: Option<usize>, has_header: bool, infer: &F, ) -> Result<Vec<Field>> where R: AsyncRead + AsyncSeek + Unpin + Send + Sync, F: Fn(&[u8]) -> DataType, { // get or create header names // when has_header is false, creates default column names with column_ prefix let headers: Vec<String> = if has_header { reader .headers() .await? .iter() .map(|s| s.to_string()) .collect() } else { let first_record_count = &reader.headers().await?.len(); (0..*first_record_count) .map(|i| format!("column_{}", i + 1)) .collect() }; // save the csv reader position after reading headers let position = reader.position().clone(); let header_length = headers.len(); // keep track of inferred field types let mut column_types: Vec<HashSet<DataType>> = vec![HashSet::new(); header_length]; let mut records_count = 0; let mut record = ByteRecord::new(); let max_records = max_rows.unwrap_or(usize::MAX); while records_count < max_records { if !reader.read_byte_record(&mut record).await? { break; } records_count += 1; for (i, column) in column_types.iter_mut().enumerate() { if let Some(string) = record.get(i) { column.insert(infer(string)); } } } let fields = merge_schema(&headers, &mut column_types); // return the reader seek back to the start reader.seek(position).await?; Ok(fields) }
28.928571
96
0.607407
1ac0aee85d4d9010d91ca47258ce745d2ae99ea1
87,704
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(non_uppercase_pattern_statics)] #![allow(non_camel_case_types)] #![allow(non_snake_case_functions)] #![allow(dead_code)] #![crate_name = "rustc_llvm"] #![experimental] #![license = "MIT/ASL2"] #![crate_type = "dylib"] #![crate_type = "rlib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/")] #![feature(globs)] #![feature(link_args)] extern crate libc; use std::c_str::ToCStr; use libc::{c_uint, c_ushort, uint64_t, c_int, size_t, c_char}; use libc::{c_longlong, c_ulonglong}; use debuginfo::{DIBuilderRef, DIDescriptor, DIFile, DILexicalBlock, DISubprogram, DIType, DIBasicType, DIDerivedType, DICompositeType, DIVariable, DIGlobalVariable, DIArray, DISubrange}; pub mod archive_ro; pub type Opcode = u32; pub type Bool = c_uint; pub static True: Bool = 1 as Bool; pub static False: Bool = 0 as Bool; // Consts for the LLVM CallConv type, pre-cast to uint. #[deriving(PartialEq)] pub enum CallConv { CCallConv = 0, FastCallConv = 8, ColdCallConv = 9, X86StdcallCallConv = 64, X86FastcallCallConv = 65, X86_64_Win64 = 79, } pub enum Visibility { LLVMDefaultVisibility = 0, HiddenVisibility = 1, ProtectedVisibility = 2, } // This enum omits the obsolete (and no-op) linkage types DLLImportLinkage, // DLLExportLinkage, GhostLinkage and LinkOnceODRAutoHideLinkage. // LinkerPrivateLinkage and LinkerPrivateWeakLinkage are not included either; // they've been removed in upstream LLVM commit r203866. pub enum Linkage { ExternalLinkage = 0, AvailableExternallyLinkage = 1, LinkOnceAnyLinkage = 2, LinkOnceODRLinkage = 3, WeakAnyLinkage = 5, WeakODRLinkage = 6, AppendingLinkage = 7, InternalLinkage = 8, PrivateLinkage = 9, ExternalWeakLinkage = 12, CommonLinkage = 14, } #[deriving(Clone)] pub enum Attribute { ZExtAttribute = 1 << 0, SExtAttribute = 1 << 1, NoReturnAttribute = 1 << 2, InRegAttribute = 1 << 3, StructRetAttribute = 1 << 4, NoUnwindAttribute = 1 << 5, NoAliasAttribute = 1 << 6, ByValAttribute = 1 << 7, NestAttribute = 1 << 8, ReadNoneAttribute = 1 << 9, ReadOnlyAttribute = 1 << 10, NoInlineAttribute = 1 << 11, AlwaysInlineAttribute = 1 << 12, OptimizeForSizeAttribute = 1 << 13, StackProtectAttribute = 1 << 14, StackProtectReqAttribute = 1 << 15, AlignmentAttribute = 31 << 16, NoCaptureAttribute = 1 << 21, NoRedZoneAttribute = 1 << 22, NoImplicitFloatAttribute = 1 << 23, NakedAttribute = 1 << 24, InlineHintAttribute = 1 << 25, StackAttribute = 7 << 26, ReturnsTwiceAttribute = 1 << 29, UWTableAttribute = 1 << 30, NonLazyBindAttribute = 1 << 31, } #[repr(u64)] pub enum OtherAttribute { // The following are not really exposed in // the LLVM c api so instead to add these // we call a wrapper function in RustWrapper // that uses the C++ api. SanitizeAddressAttribute = 1 << 32, MinSizeAttribute = 1 << 33, NoDuplicateAttribute = 1 << 34, StackProtectStrongAttribute = 1 << 35, SanitizeThreadAttribute = 1 << 36, SanitizeMemoryAttribute = 1 << 37, NoBuiltinAttribute = 1 << 38, ReturnedAttribute = 1 << 39, ColdAttribute = 1 << 40, BuiltinAttribute = 1 << 41, OptimizeNoneAttribute = 1 << 42, InAllocaAttribute = 1 << 43, NonNullAttribute = 1 << 44, } pub enum SpecialAttribute { DereferenceableAttribute(u64) } #[repr(C)] pub enum AttributeSet { ReturnIndex = 0, FunctionIndex = !0 } trait AttrHelper { fn apply_llfn(&self, idx: c_uint, llfn: ValueRef); fn apply_callsite(&self, idx: c_uint, callsite: ValueRef); } impl AttrHelper for Attribute { fn apply_llfn(&self, idx: c_uint, llfn: ValueRef) { unsafe { LLVMAddFunctionAttribute(llfn, idx, *self as uint64_t); } } fn apply_callsite(&self, idx: c_uint, callsite: ValueRef) { unsafe { LLVMAddCallSiteAttribute(callsite, idx, *self as uint64_t); } } } impl AttrHelper for OtherAttribute { fn apply_llfn(&self, idx: c_uint, llfn: ValueRef) { unsafe { LLVMAddFunctionAttribute(llfn, idx, *self as uint64_t); } } fn apply_callsite(&self, idx: c_uint, callsite: ValueRef) { unsafe { LLVMAddCallSiteAttribute(callsite, idx, *self as uint64_t); } } } impl AttrHelper for SpecialAttribute { fn apply_llfn(&self, idx: c_uint, llfn: ValueRef) { match *self { DereferenceableAttribute(bytes) => unsafe { LLVMAddDereferenceableAttr(llfn, idx, bytes as uint64_t); } } } fn apply_callsite(&self, idx: c_uint, callsite: ValueRef) { match *self { DereferenceableAttribute(bytes) => unsafe { LLVMAddDereferenceableCallSiteAttr(callsite, idx, bytes as uint64_t); } } } } pub struct AttrBuilder { attrs: Vec<(uint, Box<AttrHelper>)> } impl AttrBuilder { pub fn new() -> AttrBuilder { AttrBuilder { attrs: Vec::new() } } pub fn arg<'a, T: AttrHelper + 'static>(&'a mut self, idx: uint, a: T) -> &'a mut AttrBuilder { self.attrs.push((idx, box a as Box<AttrHelper>)); self } pub fn ret<'a, T: AttrHelper + 'static>(&'a mut self, a: T) -> &'a mut AttrBuilder { self.attrs.push((ReturnIndex as uint, box a as Box<AttrHelper>)); self } pub fn apply_llfn(&self, llfn: ValueRef) { for &(idx, ref attr) in self.attrs.iter() { attr.apply_llfn(idx as c_uint, llfn); } } pub fn apply_callsite(&self, callsite: ValueRef) { for &(idx, ref attr) in self.attrs.iter() { attr.apply_callsite(idx as c_uint, callsite); } } } // enum for the LLVM IntPredicate type pub enum IntPredicate { IntEQ = 32, IntNE = 33, IntUGT = 34, IntUGE = 35, IntULT = 36, IntULE = 37, IntSGT = 38, IntSGE = 39, IntSLT = 40, IntSLE = 41, } // enum for the LLVM RealPredicate type pub enum RealPredicate { RealPredicateFalse = 0, RealOEQ = 1, RealOGT = 2, RealOGE = 3, RealOLT = 4, RealOLE = 5, RealONE = 6, RealORD = 7, RealUNO = 8, RealUEQ = 9, RealUGT = 10, RealUGE = 11, RealULT = 12, RealULE = 13, RealUNE = 14, RealPredicateTrue = 15, } // The LLVM TypeKind type - must stay in sync with the def of // LLVMTypeKind in llvm/include/llvm-c/Core.h #[deriving(PartialEq)] #[repr(C)] pub enum TypeKind { Void = 0, Half = 1, Float = 2, Double = 3, X86_FP80 = 4, FP128 = 5, PPC_FP128 = 6, Label = 7, Integer = 8, Function = 9, Struct = 10, Array = 11, Pointer = 12, Vector = 13, Metadata = 14, X86_MMX = 15, } #[repr(C)] pub enum AtomicBinOp { Xchg = 0, Add = 1, Sub = 2, And = 3, Nand = 4, Or = 5, Xor = 6, Max = 7, Min = 8, UMax = 9, UMin = 10, } #[repr(C)] pub enum AtomicOrdering { NotAtomic = 0, Unordered = 1, Monotonic = 2, // Consume = 3, // Not specified yet. Acquire = 4, Release = 5, AcquireRelease = 6, SequentiallyConsistent = 7 } // Consts for the LLVMCodeGenFileType type (in include/llvm/c/TargetMachine.h) #[repr(C)] pub enum FileType { AssemblyFile = 0, ObjectFile = 1 } pub enum Metadata { MD_dbg = 0, MD_tbaa = 1, MD_prof = 2, MD_fpmath = 3, MD_range = 4, MD_tbaa_struct = 5 } // Inline Asm Dialect pub enum AsmDialect { AD_ATT = 0, AD_Intel = 1 } #[deriving(PartialEq)] #[repr(C)] pub enum CodeGenOptLevel { CodeGenLevelNone = 0, CodeGenLevelLess = 1, CodeGenLevelDefault = 2, CodeGenLevelAggressive = 3, } #[repr(C)] pub enum RelocMode { RelocDefault = 0, RelocStatic = 1, RelocPIC = 2, RelocDynamicNoPic = 3, } #[repr(C)] pub enum CodeGenModel { CodeModelDefault = 0, CodeModelJITDefault = 1, CodeModelSmall = 2, CodeModelKernel = 3, CodeModelMedium = 4, CodeModelLarge = 5, } // Opaque pointer types pub enum Module_opaque {} pub type ModuleRef = *mut Module_opaque; pub enum Context_opaque {} pub type ContextRef = *mut Context_opaque; pub enum Type_opaque {} pub type TypeRef = *mut Type_opaque; pub enum Value_opaque {} pub type ValueRef = *mut Value_opaque; pub enum BasicBlock_opaque {} pub type BasicBlockRef = *mut BasicBlock_opaque; pub enum Builder_opaque {} pub type BuilderRef = *mut Builder_opaque; pub enum ExecutionEngine_opaque {} pub type ExecutionEngineRef = *mut ExecutionEngine_opaque; pub enum MemoryBuffer_opaque {} pub type MemoryBufferRef = *mut MemoryBuffer_opaque; pub enum PassManager_opaque {} pub type PassManagerRef = *mut PassManager_opaque; pub enum PassManagerBuilder_opaque {} pub type PassManagerBuilderRef = *mut PassManagerBuilder_opaque; pub enum Use_opaque {} pub type UseRef = *mut Use_opaque; pub enum TargetData_opaque {} pub type TargetDataRef = *mut TargetData_opaque; pub enum ObjectFile_opaque {} pub type ObjectFileRef = *mut ObjectFile_opaque; pub enum SectionIterator_opaque {} pub type SectionIteratorRef = *mut SectionIterator_opaque; pub enum Pass_opaque {} pub type PassRef = *mut Pass_opaque; pub enum TargetMachine_opaque {} pub type TargetMachineRef = *mut TargetMachine_opaque; pub enum Archive_opaque {} pub type ArchiveRef = *mut Archive_opaque; pub mod debuginfo { use super::{ValueRef}; pub enum DIBuilder_opaque {} pub type DIBuilderRef = *mut DIBuilder_opaque; pub type DIDescriptor = ValueRef; pub type DIScope = DIDescriptor; pub type DILocation = DIDescriptor; pub type DIFile = DIScope; pub type DILexicalBlock = DIScope; pub type DISubprogram = DIScope; pub type DIType = DIDescriptor; pub type DIBasicType = DIType; pub type DIDerivedType = DIType; pub type DICompositeType = DIDerivedType; pub type DIVariable = DIDescriptor; pub type DIGlobalVariable = DIDescriptor; pub type DIArray = DIDescriptor; pub type DISubrange = DIDescriptor; pub enum DIDescriptorFlags { FlagPrivate = 1 << 0, FlagProtected = 1 << 1, FlagFwdDecl = 1 << 2, FlagAppleBlock = 1 << 3, FlagBlockByrefStruct = 1 << 4, FlagVirtual = 1 << 5, FlagArtificial = 1 << 6, FlagExplicit = 1 << 7, FlagPrototyped = 1 << 8, FlagObjcClassComplete = 1 << 9, FlagObjectPointer = 1 << 10, FlagVector = 1 << 11, FlagStaticMember = 1 << 12 } } // Link to our native llvm bindings (things that we need to use the C++ api // for) and because llvm is written in C++ we need to link against libstdc++ // // You'll probably notice that there is an omission of all LLVM libraries // from this location. This is because the set of LLVM libraries that we // link to is mostly defined by LLVM, and the `llvm-config` tool is used to // figure out the exact set of libraries. To do this, the build system // generates an llvmdeps.rs file next to this one which will be // automatically updated whenever LLVM is updated to include an up-to-date // set of the libraries we need to link to LLVM for. #[link(name = "rustllvm", kind = "static")] extern { /* Create and destroy contexts. */ pub fn LLVMContextCreate() -> ContextRef; pub fn LLVMContextDispose(C: ContextRef); pub fn LLVMGetMDKindIDInContext(C: ContextRef, Name: *const c_char, SLen: c_uint) -> c_uint; /* Create and destroy modules. */ pub fn LLVMModuleCreateWithNameInContext(ModuleID: *const c_char, C: ContextRef) -> ModuleRef; pub fn LLVMGetModuleContext(M: ModuleRef) -> ContextRef; pub fn LLVMDisposeModule(M: ModuleRef); /** Data layout. See Module::getDataLayout. */ pub fn LLVMGetDataLayout(M: ModuleRef) -> *const c_char; pub fn LLVMSetDataLayout(M: ModuleRef, Triple: *const c_char); /** Target triple. See Module::getTargetTriple. */ pub fn LLVMGetTarget(M: ModuleRef) -> *const c_char; pub fn LLVMSetTarget(M: ModuleRef, Triple: *const c_char); /** See Module::dump. */ pub fn LLVMDumpModule(M: ModuleRef); /** See Module::setModuleInlineAsm. */ pub fn LLVMSetModuleInlineAsm(M: ModuleRef, Asm: *const c_char); /** See llvm::LLVMTypeKind::getTypeID. */ pub fn LLVMGetTypeKind(Ty: TypeRef) -> TypeKind; /** See llvm::LLVMType::getContext. */ pub fn LLVMGetTypeContext(Ty: TypeRef) -> ContextRef; /* Operations on integer types */ pub fn LLVMInt1TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMInt8TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMInt16TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMInt32TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMInt64TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMIntTypeInContext(C: ContextRef, NumBits: c_uint) -> TypeRef; pub fn LLVMGetIntTypeWidth(IntegerTy: TypeRef) -> c_uint; /* Operations on real types */ pub fn LLVMFloatTypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMDoubleTypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMX86FP80TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMFP128TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMPPCFP128TypeInContext(C: ContextRef) -> TypeRef; /* Operations on function types */ pub fn LLVMFunctionType(ReturnType: TypeRef, ParamTypes: *const TypeRef, ParamCount: c_uint, IsVarArg: Bool) -> TypeRef; pub fn LLVMIsFunctionVarArg(FunctionTy: TypeRef) -> Bool; pub fn LLVMGetReturnType(FunctionTy: TypeRef) -> TypeRef; pub fn LLVMCountParamTypes(FunctionTy: TypeRef) -> c_uint; pub fn LLVMGetParamTypes(FunctionTy: TypeRef, Dest: *const TypeRef); /* Operations on struct types */ pub fn LLVMStructTypeInContext(C: ContextRef, ElementTypes: *const TypeRef, ElementCount: c_uint, Packed: Bool) -> TypeRef; pub fn LLVMCountStructElementTypes(StructTy: TypeRef) -> c_uint; pub fn LLVMGetStructElementTypes(StructTy: TypeRef, Dest: *mut TypeRef); pub fn LLVMIsPackedStruct(StructTy: TypeRef) -> Bool; /* Operations on array, pointer, and vector types (sequence types) */ pub fn LLVMRustArrayType(ElementType: TypeRef, ElementCount: u64) -> TypeRef; pub fn LLVMPointerType(ElementType: TypeRef, AddressSpace: c_uint) -> TypeRef; pub fn LLVMVectorType(ElementType: TypeRef, ElementCount: c_uint) -> TypeRef; pub fn LLVMGetElementType(Ty: TypeRef) -> TypeRef; pub fn LLVMGetArrayLength(ArrayTy: TypeRef) -> c_uint; pub fn LLVMGetPointerAddressSpace(PointerTy: TypeRef) -> c_uint; pub fn LLVMGetPointerToGlobal(EE: ExecutionEngineRef, V: ValueRef) -> *const (); pub fn LLVMGetVectorSize(VectorTy: TypeRef) -> c_uint; /* Operations on other types */ pub fn LLVMVoidTypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMLabelTypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMMetadataTypeInContext(C: ContextRef) -> TypeRef; /* Operations on all values */ pub fn LLVMTypeOf(Val: ValueRef) -> TypeRef; pub fn LLVMGetValueName(Val: ValueRef) -> *const c_char; pub fn LLVMSetValueName(Val: ValueRef, Name: *const c_char); pub fn LLVMDumpValue(Val: ValueRef); pub fn LLVMReplaceAllUsesWith(OldVal: ValueRef, NewVal: ValueRef); pub fn LLVMHasMetadata(Val: ValueRef) -> c_int; pub fn LLVMGetMetadata(Val: ValueRef, KindID: c_uint) -> ValueRef; pub fn LLVMSetMetadata(Val: ValueRef, KindID: c_uint, Node: ValueRef); /* Operations on Uses */ pub fn LLVMGetFirstUse(Val: ValueRef) -> UseRef; pub fn LLVMGetNextUse(U: UseRef) -> UseRef; pub fn LLVMGetUser(U: UseRef) -> ValueRef; pub fn LLVMGetUsedValue(U: UseRef) -> ValueRef; /* Operations on Users */ pub fn LLVMGetNumOperands(Val: ValueRef) -> c_int; pub fn LLVMGetOperand(Val: ValueRef, Index: c_uint) -> ValueRef; pub fn LLVMSetOperand(Val: ValueRef, Index: c_uint, Op: ValueRef); /* Operations on constants of any type */ pub fn LLVMConstNull(Ty: TypeRef) -> ValueRef; /* all zeroes */ pub fn LLVMConstAllOnes(Ty: TypeRef) -> ValueRef; pub fn LLVMConstICmp(Pred: c_ushort, V1: ValueRef, V2: ValueRef) -> ValueRef; pub fn LLVMConstFCmp(Pred: c_ushort, V1: ValueRef, V2: ValueRef) -> ValueRef; /* only for int/vector */ pub fn LLVMGetUndef(Ty: TypeRef) -> ValueRef; pub fn LLVMIsConstant(Val: ValueRef) -> Bool; pub fn LLVMIsNull(Val: ValueRef) -> Bool; pub fn LLVMIsUndef(Val: ValueRef) -> Bool; pub fn LLVMConstPointerNull(Ty: TypeRef) -> ValueRef; /* Operations on metadata */ pub fn LLVMMDStringInContext(C: ContextRef, Str: *const c_char, SLen: c_uint) -> ValueRef; pub fn LLVMMDNodeInContext(C: ContextRef, Vals: *const ValueRef, Count: c_uint) -> ValueRef; pub fn LLVMAddNamedMetadataOperand(M: ModuleRef, Str: *const c_char, Val: ValueRef); /* Operations on scalar constants */ pub fn LLVMConstInt(IntTy: TypeRef, N: c_ulonglong, SignExtend: Bool) -> ValueRef; pub fn LLVMConstIntOfString(IntTy: TypeRef, Text: *const c_char, Radix: u8) -> ValueRef; pub fn LLVMConstIntOfStringAndSize(IntTy: TypeRef, Text: *const c_char, SLen: c_uint, Radix: u8) -> ValueRef; pub fn LLVMConstReal(RealTy: TypeRef, N: f64) -> ValueRef; pub fn LLVMConstRealOfString(RealTy: TypeRef, Text: *const c_char) -> ValueRef; pub fn LLVMConstRealOfStringAndSize(RealTy: TypeRef, Text: *const c_char, SLen: c_uint) -> ValueRef; pub fn LLVMConstIntGetZExtValue(ConstantVal: ValueRef) -> c_ulonglong; pub fn LLVMConstIntGetSExtValue(ConstantVal: ValueRef) -> c_longlong; /* Operations on composite constants */ pub fn LLVMConstStringInContext(C: ContextRef, Str: *const c_char, Length: c_uint, DontNullTerminate: Bool) -> ValueRef; pub fn LLVMConstStructInContext(C: ContextRef, ConstantVals: *const ValueRef, Count: c_uint, Packed: Bool) -> ValueRef; pub fn LLVMConstArray(ElementTy: TypeRef, ConstantVals: *const ValueRef, Length: c_uint) -> ValueRef; pub fn LLVMConstVector(ScalarConstantVals: *const ValueRef, Size: c_uint) -> ValueRef; /* Constant expressions */ pub fn LLVMAlignOf(Ty: TypeRef) -> ValueRef; pub fn LLVMSizeOf(Ty: TypeRef) -> ValueRef; pub fn LLVMConstNeg(ConstantVal: ValueRef) -> ValueRef; pub fn LLVMConstNSWNeg(ConstantVal: ValueRef) -> ValueRef; pub fn LLVMConstNUWNeg(ConstantVal: ValueRef) -> ValueRef; pub fn LLVMConstFNeg(ConstantVal: ValueRef) -> ValueRef; pub fn LLVMConstNot(ConstantVal: ValueRef) -> ValueRef; pub fn LLVMConstAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstNSWAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstNUWAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstFAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstSub(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstNSWSub(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstNUWSub(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstFSub(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstMul(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstNSWMul(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstNUWMul(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstFMul(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstUDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstSDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstExactSDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstFDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstURem(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstSRem(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstFRem(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstAnd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstOr(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstXor(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstShl(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstLShr(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstAShr(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstGEP(ConstantVal: ValueRef, ConstantIndices: *const ValueRef, NumIndices: c_uint) -> ValueRef; pub fn LLVMConstInBoundsGEP(ConstantVal: ValueRef, ConstantIndices: *const ValueRef, NumIndices: c_uint) -> ValueRef; pub fn LLVMConstTrunc(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstSExt(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstZExt(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstFPTrunc(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstFPExt(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstUIToFP(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstSIToFP(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstFPToUI(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstFPToSI(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstPtrToInt(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstIntToPtr(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstBitCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstZExtOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstSExtOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstTruncOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstPointerCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstIntCast(ConstantVal: ValueRef, ToType: TypeRef, isSigned: Bool) -> ValueRef; pub fn LLVMConstFPCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstSelect(ConstantCondition: ValueRef, ConstantIfTrue: ValueRef, ConstantIfFalse: ValueRef) -> ValueRef; pub fn LLVMConstExtractElement(VectorConstant: ValueRef, IndexConstant: ValueRef) -> ValueRef; pub fn LLVMConstInsertElement(VectorConstant: ValueRef, ElementValueConstant: ValueRef, IndexConstant: ValueRef) -> ValueRef; pub fn LLVMConstShuffleVector(VectorAConstant: ValueRef, VectorBConstant: ValueRef, MaskConstant: ValueRef) -> ValueRef; pub fn LLVMConstExtractValue(AggConstant: ValueRef, IdxList: *const c_uint, NumIdx: c_uint) -> ValueRef; pub fn LLVMConstInsertValue(AggConstant: ValueRef, ElementValueConstant: ValueRef, IdxList: *const c_uint, NumIdx: c_uint) -> ValueRef; pub fn LLVMConstInlineAsm(Ty: TypeRef, AsmString: *const c_char, Constraints: *const c_char, HasSideEffects: Bool, IsAlignStack: Bool) -> ValueRef; pub fn LLVMBlockAddress(F: ValueRef, BB: BasicBlockRef) -> ValueRef; /* Operations on global variables, functions, and aliases (globals) */ pub fn LLVMGetGlobalParent(Global: ValueRef) -> ModuleRef; pub fn LLVMIsDeclaration(Global: ValueRef) -> Bool; pub fn LLVMGetLinkage(Global: ValueRef) -> c_uint; pub fn LLVMSetLinkage(Global: ValueRef, Link: c_uint); pub fn LLVMGetSection(Global: ValueRef) -> *const c_char; pub fn LLVMSetSection(Global: ValueRef, Section: *const c_char); pub fn LLVMGetVisibility(Global: ValueRef) -> c_uint; pub fn LLVMSetVisibility(Global: ValueRef, Viz: c_uint); pub fn LLVMGetAlignment(Global: ValueRef) -> c_uint; pub fn LLVMSetAlignment(Global: ValueRef, Bytes: c_uint); /* Operations on global variables */ pub fn LLVMAddGlobal(M: ModuleRef, Ty: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMAddGlobalInAddressSpace(M: ModuleRef, Ty: TypeRef, Name: *const c_char, AddressSpace: c_uint) -> ValueRef; pub fn LLVMGetNamedGlobal(M: ModuleRef, Name: *const c_char) -> ValueRef; pub fn LLVMGetFirstGlobal(M: ModuleRef) -> ValueRef; pub fn LLVMGetLastGlobal(M: ModuleRef) -> ValueRef; pub fn LLVMGetNextGlobal(GlobalVar: ValueRef) -> ValueRef; pub fn LLVMGetPreviousGlobal(GlobalVar: ValueRef) -> ValueRef; pub fn LLVMDeleteGlobal(GlobalVar: ValueRef); pub fn LLVMGetInitializer(GlobalVar: ValueRef) -> ValueRef; pub fn LLVMSetInitializer(GlobalVar: ValueRef, ConstantVal: ValueRef); pub fn LLVMIsThreadLocal(GlobalVar: ValueRef) -> Bool; pub fn LLVMSetThreadLocal(GlobalVar: ValueRef, IsThreadLocal: Bool); pub fn LLVMIsGlobalConstant(GlobalVar: ValueRef) -> Bool; pub fn LLVMSetGlobalConstant(GlobalVar: ValueRef, IsConstant: Bool); /* Operations on aliases */ pub fn LLVMAddAlias(M: ModuleRef, Ty: TypeRef, Aliasee: ValueRef, Name: *const c_char) -> ValueRef; /* Operations on functions */ pub fn LLVMAddFunction(M: ModuleRef, Name: *const c_char, FunctionTy: TypeRef) -> ValueRef; pub fn LLVMGetNamedFunction(M: ModuleRef, Name: *const c_char) -> ValueRef; pub fn LLVMGetFirstFunction(M: ModuleRef) -> ValueRef; pub fn LLVMGetLastFunction(M: ModuleRef) -> ValueRef; pub fn LLVMGetNextFunction(Fn: ValueRef) -> ValueRef; pub fn LLVMGetPreviousFunction(Fn: ValueRef) -> ValueRef; pub fn LLVMDeleteFunction(Fn: ValueRef); pub fn LLVMGetOrInsertFunction(M: ModuleRef, Name: *const c_char, FunctionTy: TypeRef) -> ValueRef; pub fn LLVMGetIntrinsicID(Fn: ValueRef) -> c_uint; pub fn LLVMGetFunctionCallConv(Fn: ValueRef) -> c_uint; pub fn LLVMSetFunctionCallConv(Fn: ValueRef, CC: c_uint); pub fn LLVMGetGC(Fn: ValueRef) -> *const c_char; pub fn LLVMSetGC(Fn: ValueRef, Name: *const c_char); pub fn LLVMAddDereferenceableAttr(Fn: ValueRef, index: c_uint, bytes: uint64_t); pub fn LLVMAddFunctionAttribute(Fn: ValueRef, index: c_uint, PA: uint64_t); pub fn LLVMAddFunctionAttrString(Fn: ValueRef, index: c_uint, Name: *const c_char); pub fn LLVMRemoveFunctionAttrString(Fn: ValueRef, index: c_uint, Name: *const c_char); pub fn LLVMGetFunctionAttr(Fn: ValueRef) -> c_ulonglong; /* Operations on parameters */ pub fn LLVMCountParams(Fn: ValueRef) -> c_uint; pub fn LLVMGetParams(Fn: ValueRef, Params: *const ValueRef); pub fn LLVMGetParam(Fn: ValueRef, Index: c_uint) -> ValueRef; pub fn LLVMGetParamParent(Inst: ValueRef) -> ValueRef; pub fn LLVMGetFirstParam(Fn: ValueRef) -> ValueRef; pub fn LLVMGetLastParam(Fn: ValueRef) -> ValueRef; pub fn LLVMGetNextParam(Arg: ValueRef) -> ValueRef; pub fn LLVMGetPreviousParam(Arg: ValueRef) -> ValueRef; pub fn LLVMAddAttribute(Arg: ValueRef, PA: c_uint); pub fn LLVMRemoveAttribute(Arg: ValueRef, PA: c_uint); pub fn LLVMGetAttribute(Arg: ValueRef) -> c_uint; pub fn LLVMSetParamAlignment(Arg: ValueRef, align: c_uint); /* Operations on basic blocks */ pub fn LLVMBasicBlockAsValue(BB: BasicBlockRef) -> ValueRef; pub fn LLVMValueIsBasicBlock(Val: ValueRef) -> Bool; pub fn LLVMValueAsBasicBlock(Val: ValueRef) -> BasicBlockRef; pub fn LLVMGetBasicBlockParent(BB: BasicBlockRef) -> ValueRef; pub fn LLVMCountBasicBlocks(Fn: ValueRef) -> c_uint; pub fn LLVMGetBasicBlocks(Fn: ValueRef, BasicBlocks: *const ValueRef); pub fn LLVMGetFirstBasicBlock(Fn: ValueRef) -> BasicBlockRef; pub fn LLVMGetLastBasicBlock(Fn: ValueRef) -> BasicBlockRef; pub fn LLVMGetNextBasicBlock(BB: BasicBlockRef) -> BasicBlockRef; pub fn LLVMGetPreviousBasicBlock(BB: BasicBlockRef) -> BasicBlockRef; pub fn LLVMGetEntryBasicBlock(Fn: ValueRef) -> BasicBlockRef; pub fn LLVMAppendBasicBlockInContext(C: ContextRef, Fn: ValueRef, Name: *const c_char) -> BasicBlockRef; pub fn LLVMInsertBasicBlockInContext(C: ContextRef, BB: BasicBlockRef, Name: *const c_char) -> BasicBlockRef; pub fn LLVMDeleteBasicBlock(BB: BasicBlockRef); pub fn LLVMMoveBasicBlockAfter(BB: BasicBlockRef, MoveAfter: BasicBlockRef); pub fn LLVMMoveBasicBlockBefore(BB: BasicBlockRef, MoveBefore: BasicBlockRef); /* Operations on instructions */ pub fn LLVMGetInstructionParent(Inst: ValueRef) -> BasicBlockRef; pub fn LLVMGetFirstInstruction(BB: BasicBlockRef) -> ValueRef; pub fn LLVMGetLastInstruction(BB: BasicBlockRef) -> ValueRef; pub fn LLVMGetNextInstruction(Inst: ValueRef) -> ValueRef; pub fn LLVMGetPreviousInstruction(Inst: ValueRef) -> ValueRef; pub fn LLVMInstructionEraseFromParent(Inst: ValueRef); /* Operations on call sites */ pub fn LLVMSetInstructionCallConv(Instr: ValueRef, CC: c_uint); pub fn LLVMGetInstructionCallConv(Instr: ValueRef) -> c_uint; pub fn LLVMAddInstrAttribute(Instr: ValueRef, index: c_uint, IA: c_uint); pub fn LLVMRemoveInstrAttribute(Instr: ValueRef, index: c_uint, IA: c_uint); pub fn LLVMSetInstrParamAlignment(Instr: ValueRef, index: c_uint, align: c_uint); pub fn LLVMAddCallSiteAttribute(Instr: ValueRef, index: c_uint, Val: uint64_t); pub fn LLVMAddDereferenceableCallSiteAttr(Instr: ValueRef, index: c_uint, bytes: uint64_t); /* Operations on call instructions (only) */ pub fn LLVMIsTailCall(CallInst: ValueRef) -> Bool; pub fn LLVMSetTailCall(CallInst: ValueRef, IsTailCall: Bool); /* Operations on load/store instructions (only) */ pub fn LLVMGetVolatile(MemoryAccessInst: ValueRef) -> Bool; pub fn LLVMSetVolatile(MemoryAccessInst: ValueRef, volatile: Bool); /* Operations on phi nodes */ pub fn LLVMAddIncoming(PhiNode: ValueRef, IncomingValues: *const ValueRef, IncomingBlocks: *const BasicBlockRef, Count: c_uint); pub fn LLVMCountIncoming(PhiNode: ValueRef) -> c_uint; pub fn LLVMGetIncomingValue(PhiNode: ValueRef, Index: c_uint) -> ValueRef; pub fn LLVMGetIncomingBlock(PhiNode: ValueRef, Index: c_uint) -> BasicBlockRef; /* Instruction builders */ pub fn LLVMCreateBuilderInContext(C: ContextRef) -> BuilderRef; pub fn LLVMPositionBuilder(Builder: BuilderRef, Block: BasicBlockRef, Instr: ValueRef); pub fn LLVMPositionBuilderBefore(Builder: BuilderRef, Instr: ValueRef); pub fn LLVMPositionBuilderAtEnd(Builder: BuilderRef, Block: BasicBlockRef); pub fn LLVMGetInsertBlock(Builder: BuilderRef) -> BasicBlockRef; pub fn LLVMClearInsertionPosition(Builder: BuilderRef); pub fn LLVMInsertIntoBuilder(Builder: BuilderRef, Instr: ValueRef); pub fn LLVMInsertIntoBuilderWithName(Builder: BuilderRef, Instr: ValueRef, Name: *const c_char); pub fn LLVMDisposeBuilder(Builder: BuilderRef); pub fn LLVMDisposeExecutionEngine(EE: ExecutionEngineRef); /* Metadata */ pub fn LLVMSetCurrentDebugLocation(Builder: BuilderRef, L: ValueRef); pub fn LLVMGetCurrentDebugLocation(Builder: BuilderRef) -> ValueRef; pub fn LLVMSetInstDebugLocation(Builder: BuilderRef, Inst: ValueRef); /* Terminators */ pub fn LLVMBuildRetVoid(B: BuilderRef) -> ValueRef; pub fn LLVMBuildRet(B: BuilderRef, V: ValueRef) -> ValueRef; pub fn LLVMBuildAggregateRet(B: BuilderRef, RetVals: *const ValueRef, N: c_uint) -> ValueRef; pub fn LLVMBuildBr(B: BuilderRef, Dest: BasicBlockRef) -> ValueRef; pub fn LLVMBuildCondBr(B: BuilderRef, If: ValueRef, Then: BasicBlockRef, Else: BasicBlockRef) -> ValueRef; pub fn LLVMBuildSwitch(B: BuilderRef, V: ValueRef, Else: BasicBlockRef, NumCases: c_uint) -> ValueRef; pub fn LLVMBuildIndirectBr(B: BuilderRef, Addr: ValueRef, NumDests: c_uint) -> ValueRef; pub fn LLVMBuildInvoke(B: BuilderRef, Fn: ValueRef, Args: *const ValueRef, NumArgs: c_uint, Then: BasicBlockRef, Catch: BasicBlockRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildLandingPad(B: BuilderRef, Ty: TypeRef, PersFn: ValueRef, NumClauses: c_uint, Name: *const c_char) -> ValueRef; pub fn LLVMBuildResume(B: BuilderRef, Exn: ValueRef) -> ValueRef; pub fn LLVMBuildUnreachable(B: BuilderRef) -> ValueRef; /* Add a case to the switch instruction */ pub fn LLVMAddCase(Switch: ValueRef, OnVal: ValueRef, Dest: BasicBlockRef); /* Add a destination to the indirectbr instruction */ pub fn LLVMAddDestination(IndirectBr: ValueRef, Dest: BasicBlockRef); /* Add a clause to the landing pad instruction */ pub fn LLVMAddClause(LandingPad: ValueRef, ClauseVal: ValueRef); /* Set the cleanup on a landing pad instruction */ pub fn LLVMSetCleanup(LandingPad: ValueRef, Val: Bool); /* Arithmetic */ pub fn LLVMBuildAdd(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNSWAdd(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNUWAdd(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFAdd(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildSub(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNSWSub(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNUWSub(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFSub(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildMul(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNSWMul(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNUWMul(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFMul(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildUDiv(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildSDiv(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildExactSDiv(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFDiv(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildURem(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildSRem(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFRem(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildShl(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildLShr(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildAShr(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildAnd(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildOr(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildXor(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildBinOp(B: BuilderRef, Op: Opcode, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNSWNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNUWNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildNot(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; /* Memory */ pub fn LLVMBuildMalloc(B: BuilderRef, Ty: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildArrayMalloc(B: BuilderRef, Ty: TypeRef, Val: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildAlloca(B: BuilderRef, Ty: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildArrayAlloca(B: BuilderRef, Ty: TypeRef, Val: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFree(B: BuilderRef, PointerVal: ValueRef) -> ValueRef; pub fn LLVMBuildLoad(B: BuilderRef, PointerVal: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildStore(B: BuilderRef, Val: ValueRef, Ptr: ValueRef) -> ValueRef; pub fn LLVMBuildGEP(B: BuilderRef, Pointer: ValueRef, Indices: *const ValueRef, NumIndices: c_uint, Name: *const c_char) -> ValueRef; pub fn LLVMBuildInBoundsGEP(B: BuilderRef, Pointer: ValueRef, Indices: *const ValueRef, NumIndices: c_uint, Name: *const c_char) -> ValueRef; pub fn LLVMBuildStructGEP(B: BuilderRef, Pointer: ValueRef, Idx: c_uint, Name: *const c_char) -> ValueRef; pub fn LLVMBuildGlobalString(B: BuilderRef, Str: *const c_char, Name: *const c_char) -> ValueRef; pub fn LLVMBuildGlobalStringPtr(B: BuilderRef, Str: *const c_char, Name: *const c_char) -> ValueRef; /* Casts */ pub fn LLVMBuildTrunc(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildZExt(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildSExt(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFPToUI(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFPToSI(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildUIToFP(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildSIToFP(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFPTrunc(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFPExt(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildPtrToInt(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildIntToPtr(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildBitCast(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildZExtOrBitCast(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildSExtOrBitCast(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildTruncOrBitCast(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildCast(B: BuilderRef, Op: Opcode, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildPointerCast(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildIntCast(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFPCast(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, Name: *const c_char) -> ValueRef; /* Comparisons */ pub fn LLVMBuildICmp(B: BuilderRef, Op: c_uint, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFCmp(B: BuilderRef, Op: c_uint, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; /* Miscellaneous instructions */ pub fn LLVMBuildPhi(B: BuilderRef, Ty: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildCall(B: BuilderRef, Fn: ValueRef, Args: *const ValueRef, NumArgs: c_uint, Name: *const c_char) -> ValueRef; pub fn LLVMBuildSelect(B: BuilderRef, If: ValueRef, Then: ValueRef, Else: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildVAArg(B: BuilderRef, list: ValueRef, Ty: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildExtractElement(B: BuilderRef, VecVal: ValueRef, Index: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildInsertElement(B: BuilderRef, VecVal: ValueRef, EltVal: ValueRef, Index: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildShuffleVector(B: BuilderRef, V1: ValueRef, V2: ValueRef, Mask: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildExtractValue(B: BuilderRef, AggVal: ValueRef, Index: c_uint, Name: *const c_char) -> ValueRef; pub fn LLVMBuildInsertValue(B: BuilderRef, AggVal: ValueRef, EltVal: ValueRef, Index: c_uint, Name: *const c_char) -> ValueRef; pub fn LLVMBuildIsNull(B: BuilderRef, Val: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildIsNotNull(B: BuilderRef, Val: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildPtrDiff(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; /* Atomic Operations */ pub fn LLVMBuildAtomicLoad(B: BuilderRef, PointerVal: ValueRef, Name: *const c_char, Order: AtomicOrdering, Alignment: c_uint) -> ValueRef; pub fn LLVMBuildAtomicStore(B: BuilderRef, Val: ValueRef, Ptr: ValueRef, Order: AtomicOrdering, Alignment: c_uint) -> ValueRef; pub fn LLVMBuildAtomicCmpXchg(B: BuilderRef, LHS: ValueRef, CMP: ValueRef, RHS: ValueRef, Order: AtomicOrdering, FailureOrder: AtomicOrdering) -> ValueRef; pub fn LLVMBuildAtomicRMW(B: BuilderRef, Op: AtomicBinOp, LHS: ValueRef, RHS: ValueRef, Order: AtomicOrdering, SingleThreaded: Bool) -> ValueRef; pub fn LLVMBuildAtomicFence(B: BuilderRef, Order: AtomicOrdering); /* Selected entries from the downcasts. */ pub fn LLVMIsATerminatorInst(Inst: ValueRef) -> ValueRef; pub fn LLVMIsAStoreInst(Inst: ValueRef) -> ValueRef; /** Writes a module to the specified path. Returns 0 on success. */ pub fn LLVMWriteBitcodeToFile(M: ModuleRef, Path: *const c_char) -> c_int; /** Creates target data from a target layout string. */ pub fn LLVMCreateTargetData(StringRep: *const c_char) -> TargetDataRef; /// Adds the target data to the given pass manager. The pass manager /// references the target data only weakly. pub fn LLVMAddTargetData(TD: TargetDataRef, PM: PassManagerRef); /** Number of bytes clobbered when doing a Store to *T. */ pub fn LLVMStoreSizeOfType(TD: TargetDataRef, Ty: TypeRef) -> c_ulonglong; /** Number of bytes clobbered when doing a Store to *T. */ pub fn LLVMSizeOfTypeInBits(TD: TargetDataRef, Ty: TypeRef) -> c_ulonglong; /** Distance between successive elements in an array of T. Includes ABI padding. */ pub fn LLVMABISizeOfType(TD: TargetDataRef, Ty: TypeRef) -> c_uint; /** Returns the preferred alignment of a type. */ pub fn LLVMPreferredAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) -> c_uint; /** Returns the minimum alignment of a type. */ pub fn LLVMABIAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) -> c_uint; /// Computes the byte offset of the indexed struct element for a /// target. pub fn LLVMOffsetOfElement(TD: TargetDataRef, StructTy: TypeRef, Element: c_uint) -> c_ulonglong; /** * Returns the minimum alignment of a type when part of a call frame. */ pub fn LLVMCallFrameAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) -> c_uint; /** Disposes target data. */ pub fn LLVMDisposeTargetData(TD: TargetDataRef); /** Creates a pass manager. */ pub fn LLVMCreatePassManager() -> PassManagerRef; /** Creates a function-by-function pass manager */ pub fn LLVMCreateFunctionPassManagerForModule(M: ModuleRef) -> PassManagerRef; /** Disposes a pass manager. */ pub fn LLVMDisposePassManager(PM: PassManagerRef); /** Runs a pass manager on a module. */ pub fn LLVMRunPassManager(PM: PassManagerRef, M: ModuleRef) -> Bool; /** Runs the function passes on the provided function. */ pub fn LLVMRunFunctionPassManager(FPM: PassManagerRef, F: ValueRef) -> Bool; /** Initializes all the function passes scheduled in the manager */ pub fn LLVMInitializeFunctionPassManager(FPM: PassManagerRef) -> Bool; /** Finalizes all the function passes scheduled in the manager */ pub fn LLVMFinalizeFunctionPassManager(FPM: PassManagerRef) -> Bool; pub fn LLVMInitializePasses(); /** Adds a verification pass. */ pub fn LLVMAddVerifierPass(PM: PassManagerRef); pub fn LLVMAddGlobalOptimizerPass(PM: PassManagerRef); pub fn LLVMAddIPSCCPPass(PM: PassManagerRef); pub fn LLVMAddDeadArgEliminationPass(PM: PassManagerRef); pub fn LLVMAddInstructionCombiningPass(PM: PassManagerRef); pub fn LLVMAddCFGSimplificationPass(PM: PassManagerRef); pub fn LLVMAddFunctionInliningPass(PM: PassManagerRef); pub fn LLVMAddFunctionAttrsPass(PM: PassManagerRef); pub fn LLVMAddScalarReplAggregatesPass(PM: PassManagerRef); pub fn LLVMAddScalarReplAggregatesPassSSA(PM: PassManagerRef); pub fn LLVMAddJumpThreadingPass(PM: PassManagerRef); pub fn LLVMAddConstantPropagationPass(PM: PassManagerRef); pub fn LLVMAddReassociatePass(PM: PassManagerRef); pub fn LLVMAddLoopRotatePass(PM: PassManagerRef); pub fn LLVMAddLICMPass(PM: PassManagerRef); pub fn LLVMAddLoopUnswitchPass(PM: PassManagerRef); pub fn LLVMAddLoopDeletionPass(PM: PassManagerRef); pub fn LLVMAddLoopUnrollPass(PM: PassManagerRef); pub fn LLVMAddGVNPass(PM: PassManagerRef); pub fn LLVMAddMemCpyOptPass(PM: PassManagerRef); pub fn LLVMAddSCCPPass(PM: PassManagerRef); pub fn LLVMAddDeadStoreEliminationPass(PM: PassManagerRef); pub fn LLVMAddStripDeadPrototypesPass(PM: PassManagerRef); pub fn LLVMAddConstantMergePass(PM: PassManagerRef); pub fn LLVMAddArgumentPromotionPass(PM: PassManagerRef); pub fn LLVMAddTailCallEliminationPass(PM: PassManagerRef); pub fn LLVMAddIndVarSimplifyPass(PM: PassManagerRef); pub fn LLVMAddAggressiveDCEPass(PM: PassManagerRef); pub fn LLVMAddGlobalDCEPass(PM: PassManagerRef); pub fn LLVMAddCorrelatedValuePropagationPass(PM: PassManagerRef); pub fn LLVMAddPruneEHPass(PM: PassManagerRef); pub fn LLVMAddSimplifyLibCallsPass(PM: PassManagerRef); pub fn LLVMAddLoopIdiomPass(PM: PassManagerRef); pub fn LLVMAddEarlyCSEPass(PM: PassManagerRef); pub fn LLVMAddTypeBasedAliasAnalysisPass(PM: PassManagerRef); pub fn LLVMAddBasicAliasAnalysisPass(PM: PassManagerRef); pub fn LLVMPassManagerBuilderCreate() -> PassManagerBuilderRef; pub fn LLVMPassManagerBuilderDispose(PMB: PassManagerBuilderRef); pub fn LLVMPassManagerBuilderSetOptLevel(PMB: PassManagerBuilderRef, OptimizationLevel: c_uint); pub fn LLVMPassManagerBuilderSetSizeLevel(PMB: PassManagerBuilderRef, Value: Bool); pub fn LLVMPassManagerBuilderSetDisableUnitAtATime( PMB: PassManagerBuilderRef, Value: Bool); pub fn LLVMPassManagerBuilderSetDisableUnrollLoops( PMB: PassManagerBuilderRef, Value: Bool); pub fn LLVMPassManagerBuilderSetDisableSimplifyLibCalls( PMB: PassManagerBuilderRef, Value: Bool); pub fn LLVMPassManagerBuilderUseInlinerWithThreshold( PMB: PassManagerBuilderRef, threshold: c_uint); pub fn LLVMPassManagerBuilderPopulateModulePassManager( PMB: PassManagerBuilderRef, PM: PassManagerRef); pub fn LLVMPassManagerBuilderPopulateFunctionPassManager( PMB: PassManagerBuilderRef, PM: PassManagerRef); pub fn LLVMPassManagerBuilderPopulateLTOPassManager( PMB: PassManagerBuilderRef, PM: PassManagerRef, Internalize: Bool, RunInliner: Bool); /** Destroys a memory buffer. */ pub fn LLVMDisposeMemoryBuffer(MemBuf: MemoryBufferRef); /* Stuff that's in rustllvm/ because it's not upstream yet. */ /** Opens an object file. */ pub fn LLVMCreateObjectFile(MemBuf: MemoryBufferRef) -> ObjectFileRef; /** Closes an object file. */ pub fn LLVMDisposeObjectFile(ObjFile: ObjectFileRef); /** Enumerates the sections in an object file. */ pub fn LLVMGetSections(ObjFile: ObjectFileRef) -> SectionIteratorRef; /** Destroys a section iterator. */ pub fn LLVMDisposeSectionIterator(SI: SectionIteratorRef); /** Returns true if the section iterator is at the end of the section list: */ pub fn LLVMIsSectionIteratorAtEnd(ObjFile: ObjectFileRef, SI: SectionIteratorRef) -> Bool; /** Moves the section iterator to point to the next section. */ pub fn LLVMMoveToNextSection(SI: SectionIteratorRef); /** Returns the current section size. */ pub fn LLVMGetSectionSize(SI: SectionIteratorRef) -> c_ulonglong; /** Returns the current section contents as a string buffer. */ pub fn LLVMGetSectionContents(SI: SectionIteratorRef) -> *const c_char; /** Reads the given file and returns it as a memory buffer. Use LLVMDisposeMemoryBuffer() to get rid of it. */ pub fn LLVMRustCreateMemoryBufferWithContentsOfFile(Path: *const c_char) -> MemoryBufferRef; /** Borrows the contents of the memory buffer (doesn't copy it) */ pub fn LLVMCreateMemoryBufferWithMemoryRange(InputData: *const c_char, InputDataLength: size_t, BufferName: *const c_char, RequiresNull: Bool) -> MemoryBufferRef; pub fn LLVMCreateMemoryBufferWithMemoryRangeCopy(InputData: *const c_char, InputDataLength: size_t, BufferName: *const c_char) -> MemoryBufferRef; pub fn LLVMIsMultithreaded() -> Bool; pub fn LLVMStartMultithreaded() -> Bool; /** Returns a string describing the last error caused by an LLVMRust* call. */ pub fn LLVMRustGetLastError() -> *const c_char; /// Print the pass timings since static dtors aren't picking them up. pub fn LLVMRustPrintPassTimings(); pub fn LLVMStructCreateNamed(C: ContextRef, Name: *const c_char) -> TypeRef; pub fn LLVMStructSetBody(StructTy: TypeRef, ElementTypes: *const TypeRef, ElementCount: c_uint, Packed: Bool); pub fn LLVMConstNamedStruct(S: TypeRef, ConstantVals: *const ValueRef, Count: c_uint) -> ValueRef; /** Enables LLVM debug output. */ pub fn LLVMSetDebug(Enabled: c_int); /** Prepares inline assembly. */ pub fn LLVMInlineAsm(Ty: TypeRef, AsmString: *const c_char, Constraints: *const c_char, SideEffects: Bool, AlignStack: Bool, Dialect: c_uint) -> ValueRef; pub static LLVMRustDebugMetadataVersion: u32; pub fn LLVMRustAddModuleFlag(M: ModuleRef, name: *const c_char, value: u32); pub fn LLVMDIBuilderCreate(M: ModuleRef) -> DIBuilderRef; pub fn LLVMDIBuilderDispose(Builder: DIBuilderRef); pub fn LLVMDIBuilderFinalize(Builder: DIBuilderRef); pub fn LLVMDIBuilderCreateCompileUnit(Builder: DIBuilderRef, Lang: c_uint, File: *const c_char, Dir: *const c_char, Producer: *const c_char, isOptimized: bool, Flags: *const c_char, RuntimeVer: c_uint, SplitName: *const c_char); pub fn LLVMDIBuilderCreateFile(Builder: DIBuilderRef, Filename: *const c_char, Directory: *const c_char) -> DIFile; pub fn LLVMDIBuilderCreateSubroutineType(Builder: DIBuilderRef, File: DIFile, ParameterTypes: DIArray) -> DICompositeType; pub fn LLVMDIBuilderCreateFunction(Builder: DIBuilderRef, Scope: DIDescriptor, Name: *const c_char, LinkageName: *const c_char, File: DIFile, LineNo: c_uint, Ty: DIType, isLocalToUnit: bool, isDefinition: bool, ScopeLine: c_uint, Flags: c_uint, isOptimized: bool, Fn: ValueRef, TParam: ValueRef, Decl: ValueRef) -> DISubprogram; pub fn LLVMDIBuilderCreateBasicType(Builder: DIBuilderRef, Name: *const c_char, SizeInBits: c_ulonglong, AlignInBits: c_ulonglong, Encoding: c_uint) -> DIBasicType; pub fn LLVMDIBuilderCreatePointerType(Builder: DIBuilderRef, PointeeTy: DIType, SizeInBits: c_ulonglong, AlignInBits: c_ulonglong, Name: *const c_char) -> DIDerivedType; pub fn LLVMDIBuilderCreateStructType(Builder: DIBuilderRef, Scope: DIDescriptor, Name: *const c_char, File: DIFile, LineNumber: c_uint, SizeInBits: c_ulonglong, AlignInBits: c_ulonglong, Flags: c_uint, DerivedFrom: DIType, Elements: DIArray, RunTimeLang: c_uint, VTableHolder: ValueRef, UniqueId: *const c_char) -> DICompositeType; pub fn LLVMDIBuilderCreateMemberType(Builder: DIBuilderRef, Scope: DIDescriptor, Name: *const c_char, File: DIFile, LineNo: c_uint, SizeInBits: c_ulonglong, AlignInBits: c_ulonglong, OffsetInBits: c_ulonglong, Flags: c_uint, Ty: DIType) -> DIDerivedType; pub fn LLVMDIBuilderCreateLexicalBlock(Builder: DIBuilderRef, Scope: DIDescriptor, File: DIFile, Line: c_uint, Col: c_uint, Discriminator: c_uint) -> DILexicalBlock; pub fn LLVMDIBuilderCreateStaticVariable(Builder: DIBuilderRef, Context: DIDescriptor, Name: *const c_char, LinkageName: *const c_char, File: DIFile, LineNo: c_uint, Ty: DIType, isLocalToUnit: bool, Val: ValueRef, Decl: ValueRef) -> DIGlobalVariable; pub fn LLVMDIBuilderCreateLocalVariable(Builder: DIBuilderRef, Tag: c_uint, Scope: DIDescriptor, Name: *const c_char, File: DIFile, LineNo: c_uint, Ty: DIType, AlwaysPreserve: bool, Flags: c_uint, ArgNo: c_uint) -> DIVariable; pub fn LLVMDIBuilderCreateArrayType(Builder: DIBuilderRef, Size: c_ulonglong, AlignInBits: c_ulonglong, Ty: DIType, Subscripts: DIArray) -> DIType; pub fn LLVMDIBuilderCreateVectorType(Builder: DIBuilderRef, Size: c_ulonglong, AlignInBits: c_ulonglong, Ty: DIType, Subscripts: DIArray) -> DIType; pub fn LLVMDIBuilderGetOrCreateSubrange(Builder: DIBuilderRef, Lo: c_longlong, Count: c_longlong) -> DISubrange; pub fn LLVMDIBuilderGetOrCreateArray(Builder: DIBuilderRef, Ptr: *const DIDescriptor, Count: c_uint) -> DIArray; pub fn LLVMDIBuilderInsertDeclareAtEnd(Builder: DIBuilderRef, Val: ValueRef, VarInfo: DIVariable, InsertAtEnd: BasicBlockRef) -> ValueRef; pub fn LLVMDIBuilderInsertDeclareBefore(Builder: DIBuilderRef, Val: ValueRef, VarInfo: DIVariable, InsertBefore: ValueRef) -> ValueRef; pub fn LLVMDIBuilderCreateEnumerator(Builder: DIBuilderRef, Name: *const c_char, Val: c_ulonglong) -> ValueRef; pub fn LLVMDIBuilderCreateEnumerationType(Builder: DIBuilderRef, Scope: ValueRef, Name: *const c_char, File: ValueRef, LineNumber: c_uint, SizeInBits: c_ulonglong, AlignInBits: c_ulonglong, Elements: ValueRef, ClassType: ValueRef) -> ValueRef; pub fn LLVMDIBuilderCreateUnionType(Builder: DIBuilderRef, Scope: ValueRef, Name: *const c_char, File: ValueRef, LineNumber: c_uint, SizeInBits: c_ulonglong, AlignInBits: c_ulonglong, Flags: c_uint, Elements: ValueRef, RunTimeLang: c_uint, UniqueId: *const c_char) -> ValueRef; pub fn LLVMSetUnnamedAddr(GlobalVar: ValueRef, UnnamedAddr: Bool); pub fn LLVMDIBuilderCreateTemplateTypeParameter(Builder: DIBuilderRef, Scope: ValueRef, Name: *const c_char, Ty: ValueRef, File: ValueRef, LineNo: c_uint, ColumnNo: c_uint) -> ValueRef; pub fn LLVMDIBuilderCreateOpDeref(IntType: TypeRef) -> ValueRef; pub fn LLVMDIBuilderCreateOpPlus(IntType: TypeRef) -> ValueRef; pub fn LLVMDIBuilderCreateComplexVariable(Builder: DIBuilderRef, Tag: c_uint, Scope: ValueRef, Name: *const c_char, File: ValueRef, LineNo: c_uint, Ty: ValueRef, AddrOps: *const ValueRef, AddrOpsCount: c_uint, ArgNo: c_uint) -> ValueRef; pub fn LLVMDIBuilderCreateNameSpace(Builder: DIBuilderRef, Scope: ValueRef, Name: *const c_char, File: ValueRef, LineNo: c_uint) -> ValueRef; pub fn LLVMDICompositeTypeSetTypeArray(CompositeType: ValueRef, TypeArray: ValueRef); pub fn LLVMTypeToString(Type: TypeRef) -> *const c_char; pub fn LLVMValueToString(value_ref: ValueRef) -> *const c_char; pub fn LLVMIsAArgument(value_ref: ValueRef) -> ValueRef; pub fn LLVMIsAAllocaInst(value_ref: ValueRef) -> ValueRef; pub fn LLVMInitializeX86TargetInfo(); pub fn LLVMInitializeX86Target(); pub fn LLVMInitializeX86TargetMC(); pub fn LLVMInitializeX86AsmPrinter(); pub fn LLVMInitializeX86AsmParser(); pub fn LLVMInitializeARMTargetInfo(); pub fn LLVMInitializeARMTarget(); pub fn LLVMInitializeARMTargetMC(); pub fn LLVMInitializeARMAsmPrinter(); pub fn LLVMInitializeARMAsmParser(); pub fn LLVMInitializeMipsTargetInfo(); pub fn LLVMInitializeMipsTarget(); pub fn LLVMInitializeMipsTargetMC(); pub fn LLVMInitializeMipsAsmPrinter(); pub fn LLVMInitializeMipsAsmParser(); pub fn LLVMRustAddPass(PM: PassManagerRef, Pass: *const c_char) -> bool; pub fn LLVMRustCreateTargetMachine(Triple: *const c_char, CPU: *const c_char, Features: *const c_char, Model: CodeGenModel, Reloc: RelocMode, Level: CodeGenOptLevel, EnableSegstk: bool, UseSoftFP: bool, NoFramePointerElim: bool, FunctionSections: bool, DataSections: bool) -> TargetMachineRef; pub fn LLVMRustDisposeTargetMachine(T: TargetMachineRef); pub fn LLVMRustAddAnalysisPasses(T: TargetMachineRef, PM: PassManagerRef, M: ModuleRef); pub fn LLVMRustAddBuilderLibraryInfo(PMB: PassManagerBuilderRef, M: ModuleRef, DisableSimplifyLibCalls: bool); pub fn LLVMRustAddLibraryInfo(PM: PassManagerRef, M: ModuleRef, DisableSimplifyLibCalls: bool); pub fn LLVMRustRunFunctionPassManager(PM: PassManagerRef, M: ModuleRef); pub fn LLVMRustWriteOutputFile(T: TargetMachineRef, PM: PassManagerRef, M: ModuleRef, Output: *const c_char, FileType: FileType) -> bool; pub fn LLVMRustPrintModule(PM: PassManagerRef, M: ModuleRef, Output: *const c_char); pub fn LLVMRustSetLLVMOptions(Argc: c_int, Argv: *const *const c_char); pub fn LLVMRustPrintPasses(); pub fn LLVMRustSetNormalizedTarget(M: ModuleRef, triple: *const c_char); pub fn LLVMRustAddAlwaysInlinePass(P: PassManagerBuilderRef, AddLifetimes: bool); pub fn LLVMRustLinkInExternalBitcode(M: ModuleRef, bc: *const c_char, len: size_t) -> bool; pub fn LLVMRustRunRestrictionPass(M: ModuleRef, syms: *const *const c_char, len: size_t); pub fn LLVMRustMarkAllFunctionsNounwind(M: ModuleRef); pub fn LLVMRustOpenArchive(path: *const c_char) -> ArchiveRef; pub fn LLVMRustArchiveReadSection(AR: ArchiveRef, name: *const c_char, out_len: *mut size_t) -> *const c_char; pub fn LLVMRustDestroyArchive(AR: ArchiveRef); pub fn LLVMRustSetDLLExportStorageClass(V: ValueRef); pub fn LLVMVersionMajor() -> c_int; pub fn LLVMVersionMinor() -> c_int; pub fn LLVMRustGetSectionName(SI: SectionIteratorRef, data: *mut *const c_char) -> c_int; } pub fn SetInstructionCallConv(instr: ValueRef, cc: CallConv) { unsafe { LLVMSetInstructionCallConv(instr, cc as c_uint); } } pub fn SetFunctionCallConv(fn_: ValueRef, cc: CallConv) { unsafe { LLVMSetFunctionCallConv(fn_, cc as c_uint); } } pub fn SetLinkage(global: ValueRef, link: Linkage) { unsafe { LLVMSetLinkage(global, link as c_uint); } } pub fn SetUnnamedAddr(global: ValueRef, unnamed: bool) { unsafe { LLVMSetUnnamedAddr(global, unnamed as Bool); } } pub fn set_thread_local(global: ValueRef, is_thread_local: bool) { unsafe { LLVMSetThreadLocal(global, is_thread_local as Bool); } } pub fn ConstICmp(pred: IntPredicate, v1: ValueRef, v2: ValueRef) -> ValueRef { unsafe { LLVMConstICmp(pred as c_ushort, v1, v2) } } pub fn ConstFCmp(pred: RealPredicate, v1: ValueRef, v2: ValueRef) -> ValueRef { unsafe { LLVMConstFCmp(pred as c_ushort, v1, v2) } } pub fn SetFunctionAttribute(fn_: ValueRef, attr: Attribute) { unsafe { LLVMAddFunctionAttribute(fn_, FunctionIndex as c_uint, attr as uint64_t) } } /* Memory-managed interface to target data. */ pub struct TargetData { pub lltd: TargetDataRef } impl Drop for TargetData { fn drop(&mut self) { unsafe { LLVMDisposeTargetData(self.lltd); } } } pub fn mk_target_data(string_rep: &str) -> TargetData { TargetData { lltd: string_rep.with_c_str(|buf| { unsafe { LLVMCreateTargetData(buf) } }) } } /* Memory-managed interface to object files. */ pub struct ObjectFile { pub llof: ObjectFileRef, } impl ObjectFile { // This will take ownership of llmb pub fn new(llmb: MemoryBufferRef) -> Option<ObjectFile> { unsafe { let llof = LLVMCreateObjectFile(llmb); if llof as int == 0 { // LLVMCreateObjectFile took ownership of llmb return None } Some(ObjectFile { llof: llof, }) } } } impl Drop for ObjectFile { fn drop(&mut self) { unsafe { LLVMDisposeObjectFile(self.llof); } } } /* Memory-managed interface to section iterators. */ pub struct SectionIter { pub llsi: SectionIteratorRef } impl Drop for SectionIter { fn drop(&mut self) { unsafe { LLVMDisposeSectionIterator(self.llsi); } } } pub fn mk_section_iter(llof: ObjectFileRef) -> SectionIter { unsafe { SectionIter { llsi: LLVMGetSections(llof) } } } /// Safe wrapper around `LLVMGetParam`, because segfaults are no fun. pub fn get_param(llfn: ValueRef, index: c_uint) -> ValueRef { unsafe { assert!(index < LLVMCountParams(llfn)); LLVMGetParam(llfn, index) } } // FIXME #15460 - create a public function that actually calls our // static LLVM symbols. Otherwise the linker will just throw llvm // away. We're just calling lots of stuff until we transitively get // all of LLVM. This is worse than anything. pub unsafe fn static_link_hack_this_sucks() { LLVMInitializePasses(); LLVMInitializeX86TargetInfo(); LLVMInitializeX86Target(); LLVMInitializeX86TargetMC(); LLVMInitializeX86AsmPrinter(); LLVMInitializeX86AsmParser(); LLVMInitializeARMTargetInfo(); LLVMInitializeARMTarget(); LLVMInitializeARMTargetMC(); LLVMInitializeARMAsmPrinter(); LLVMInitializeARMAsmParser(); LLVMInitializeMipsTargetInfo(); LLVMInitializeMipsTarget(); LLVMInitializeMipsTargetMC(); LLVMInitializeMipsAsmPrinter(); LLVMInitializeMipsAsmParser(); LLVMRustSetLLVMOptions(0 as c_int, 0 as *const _); LLVMPassManagerBuilderPopulateModulePassManager(0 as *mut _, 0 as *mut _); LLVMPassManagerBuilderPopulateLTOPassManager(0 as *mut _, 0 as *mut _, False, False); LLVMPassManagerBuilderPopulateFunctionPassManager(0 as *mut _, 0 as *mut _); LLVMPassManagerBuilderSetOptLevel(0 as *mut _, 0 as c_uint); LLVMPassManagerBuilderUseInlinerWithThreshold(0 as *mut _, 0 as c_uint); LLVMWriteBitcodeToFile(0 as *mut _, 0 as *const _); LLVMPassManagerBuilderCreate(); LLVMPassManagerBuilderDispose(0 as *mut _); LLVMRustLinkInExternalBitcode(0 as *mut _, 0 as *const _, 0 as size_t); LLVMLinkInJIT(); LLVMLinkInMCJIT(); LLVMLinkInInterpreter(); extern { fn LLVMLinkInJIT(); fn LLVMLinkInMCJIT(); fn LLVMLinkInInterpreter(); } } // The module containing the native LLVM dependencies, generated by the build system // Note that this must come after the rustllvm extern declaration so that // parts of LLVM that rustllvm depends on aren't thrown away by the linker. // Works to the above fix for #15460 to ensure LLVM dependencies that // are only used by rustllvm don't get stripped by the linker. mod llvmdeps;
41.704232
99
0.523545
9b89557104a38579806e752352925883719f7bc5
4,871
//! FIXME: write short doc here use crate::{Assist, AssistCtx, AssistId}; use hir::db::HirDatabase; use join_to_string::join; use ra_syntax::{ ast::{self, AstNode}, Direction, SmolStr, SyntaxKind::{IDENT, WHITESPACE}, TextRange, TextUnit, }; const DERIVE_TRAIT: &str = "derive"; // Assist: add_custom_impl // // Adds impl block for derived trait. // // ``` // #[derive(Deb<|>ug, Display)] // struct S; // ``` // -> // ``` // #[derive(Display)] // struct S; // // impl Debug for S { // // } // ``` pub(crate) fn add_custom_impl(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist> { let input = ctx.find_node_at_offset::<ast::AttrInput>()?; let attr = input.syntax().parent().and_then(ast::Attr::cast)?; let attr_name = attr .syntax() .descendants_with_tokens() .filter(|t| t.kind() == IDENT) .find_map(|i| i.into_token()) .filter(|t| *t.text() == DERIVE_TRAIT)? .text() .clone(); let trait_token = ctx.token_at_offset().filter(|t| t.kind() == IDENT && *t.text() != attr_name).next()?; let annotated = attr.syntax().siblings(Direction::Next).find_map(|s| ast::Name::cast(s))?; let annotated_name = annotated.syntax().text().to_string(); let start_offset = annotated.syntax().parent()?.text_range().end(); ctx.add_assist(AssistId("add_custom_impl"), "add custom impl", |edit| { edit.target(attr.syntax().text_range()); let new_attr_input = input .syntax() .descendants_with_tokens() .filter(|t| t.kind() == IDENT) .filter_map(|t| t.into_token().map(|t| t.text().clone())) .filter(|t| t != trait_token.text()) .collect::<Vec<SmolStr>>(); let has_more_derives = new_attr_input.len() > 0; let new_attr_input = join(new_attr_input.iter()).separator(", ").surround_with("(", ")").to_string(); let new_attr_input_len = new_attr_input.len(); let mut buf = String::new(); buf.push_str("\n\nimpl "); buf.push_str(trait_token.text().as_str()); buf.push_str(" for "); buf.push_str(annotated_name.as_str()); buf.push_str(" {\n"); let cursor_delta = if has_more_derives { edit.replace(input.syntax().text_range(), new_attr_input); input.syntax().text_range().len() - TextUnit::from_usize(new_attr_input_len) } else { let attr_range = attr.syntax().text_range(); edit.delete(attr_range); let line_break_range = attr .syntax() .next_sibling_or_token() .filter(|t| t.kind() == WHITESPACE) .map(|t| t.text_range()) .unwrap_or(TextRange::from_to(TextUnit::from(0), TextUnit::from(0))); edit.delete(line_break_range); attr_range.len() + line_break_range.len() }; edit.set_cursor(start_offset + TextUnit::of_str(&buf) - cursor_delta); buf.push_str("\n}"); edit.insert(start_offset, buf); }) } #[cfg(test)] mod tests { use super::*; use crate::helpers::{check_assist, check_assist_not_applicable}; #[test] fn add_custom_impl_for_unique_input() { check_assist( add_custom_impl, " #[derive(Debu<|>g)] struct Foo { bar: String, } ", " struct Foo { bar: String, } impl Debug for Foo { <|> } ", ) } #[test] fn add_custom_impl_for_with_visibility_modifier() { check_assist( add_custom_impl, " #[derive(Debug<|>)] pub struct Foo { bar: String, } ", " pub struct Foo { bar: String, } impl Debug for Foo { <|> } ", ) } #[test] fn add_custom_impl_when_multiple_inputs() { check_assist( add_custom_impl, " #[derive(Display, Debug<|>, Serialize)] struct Foo {} ", " #[derive(Display, Serialize)] struct Foo {} impl Debug for Foo { <|> } ", ) } #[test] fn test_ignore_derive_macro_without_input() { check_assist_not_applicable( add_custom_impl, " #[derive(<|>)] struct Foo {} ", ) } #[test] fn test_ignore_if_cursor_on_param() { check_assist_not_applicable( add_custom_impl, " #[derive<|>(Debug)] struct Foo {} ", ); check_assist_not_applicable( add_custom_impl, " #[derive(Debug)<|>] struct Foo {} ", ) } #[test] fn test_ignore_if_not_derive() { check_assist_not_applicable( add_custom_impl, " #[allow(non_camel_<|>case_types)] struct Foo {} ", ) } }
23.531401
94
0.54342
28da988d166549349c316f2ebb2118d13d926fb2
1,468
use futures_cpupool::CpuPool; use num_cpus; use config::{Backend, Variant, Version}; /// Returns a [`CpuPool`](https://docs.rs/futures-cpupool/0.1.8/futures_cpupool/struct.CpuPool.html) /// with threads equal to the number of logical cores on your machine #[inline(always)] pub fn default_cpu_pool() -> CpuPool { CpuPool::new(num_cpus::get()) } #[cfg(feature = "serde")] pub(crate) fn default_cpu_pool_serde() -> Option<CpuPool> { None } /// Returns the number of logical cores on your machine #[inline(always)] pub fn default_lanes() -> u32 { num_cpus::get() as u32 } /// Returns the number of logical cores on your machine #[inline(always)] pub fn default_threads() -> u32 { num_cpus::get() as u32 } /// [`Backend::C`](enum.Backend.html#variant.C) pub const DEFAULT_BACKEND: Backend = Backend::C; /// `32_u32` pub const DEFAULT_HASH_LEN: u32 = 32; /// `192_u32` pub const DEFAULT_ITERATIONS: u32 = 192; /// `4096_u32` pub const DEFAULT_MEMORY_SIZE: u32 = 4_096; /// `false` pub const DEFAULT_OPT_OUT_OF_SECRET_KEY: bool = false; /// `false` pub const DEFAULT_PASSWORD_CLEARING: bool = false; /// `32_u32` pub const DEFAULT_SALT_LEN: u32 = 32; /// `false` pub const DEFAULT_SECRET_KEY_CLEARING: bool = false; /// [`Variant::Argon2id`](enum.Variant.html#variant.Argon2id) pub const DEFAULT_VARIANT: Variant = Variant::Argon2id; /// [`Version::_0x13`](enum.Version.html#variant._0x13) pub const DEFAULT_VERSION: Version = Version::_0x13;
24.881356
100
0.711853
f4ef960e9472f1b0cddeb34cfe3597260cce2f47
90
//#![feature(trace_macros)] //trace_macros!(true); #![recursion_limit="256"] mod macros;
18
50
0.688889
9190fb59687e1cda4d8eb9b9b96de7be2d3dcf3a
14,747
// WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!! #[cfg(any(feature = "all", feature = "android-app-admin-SystemUpdatePolicy"))] __jni_bindgen! { /// public final class [SystemUpdatePolicy](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html) /// /// Required feature: android-app-admin-SystemUpdatePolicy public final class SystemUpdatePolicy ("android/app/admin/SystemUpdatePolicy") extends crate::java::lang::Object, implements crate::android::os::Parcelable { // // Not emitting: Non-public method // /// [SystemUpdatePolicy](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#SystemUpdatePolicy()) // fn new<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::app::admin::SystemUpdatePolicy>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == (empty), .name == "<init>", .descriptor == "()V" // unsafe { // let __jni_args = []; // let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "<init>\0", "()V\0"); // __jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr()) // } // } /// [createAutomaticInstallPolicy](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#createAutomaticInstallPolicy()) /// /// Required features: "android-app-admin-SystemUpdatePolicy" #[cfg(any(feature = "all", all(feature = "android-app-admin-SystemUpdatePolicy")))] pub fn createAutomaticInstallPolicy<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::app::admin::SystemUpdatePolicy>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC | STATIC, .name == "createAutomaticInstallPolicy", .descriptor == "()Landroid/app/admin/SystemUpdatePolicy;" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/app/admin/SystemUpdatePolicy\0", "createAutomaticInstallPolicy\0", "()Landroid/app/admin/SystemUpdatePolicy;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [createWindowedInstallPolicy](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#createWindowedInstallPolicy(int,%20int)) /// /// Required features: "android-app-admin-SystemUpdatePolicy" #[cfg(any(feature = "all", all(feature = "android-app-admin-SystemUpdatePolicy")))] pub fn createWindowedInstallPolicy<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: i32, arg1: i32) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::app::admin::SystemUpdatePolicy>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC | STATIC, .name == "createWindowedInstallPolicy", .descriptor == "(II)Landroid/app/admin/SystemUpdatePolicy;" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0), __jni_bindgen::AsJValue::as_jvalue(&arg1)]; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/app/admin/SystemUpdatePolicy\0", "createWindowedInstallPolicy\0", "(II)Landroid/app/admin/SystemUpdatePolicy;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [createPostponeInstallPolicy](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#createPostponeInstallPolicy()) /// /// Required features: "android-app-admin-SystemUpdatePolicy" #[cfg(any(feature = "all", all(feature = "android-app-admin-SystemUpdatePolicy")))] pub fn createPostponeInstallPolicy<'env>(__jni_env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::app::admin::SystemUpdatePolicy>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC | STATIC, .name == "createPostponeInstallPolicy", .descriptor == "()Landroid/app/admin/SystemUpdatePolicy;" unsafe { let __jni_args = []; let (__jni_class, __jni_method) = __jni_env.require_class_static_method("android/app/admin/SystemUpdatePolicy\0", "createPostponeInstallPolicy\0", "()Landroid/app/admin/SystemUpdatePolicy;\0"); __jni_env.call_static_object_method_a(__jni_class, __jni_method, __jni_args.as_ptr()) } } /// [getPolicyType](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#getPolicyType()) pub fn getPolicyType<'env>(&'env self) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC, .name == "getPolicyType", .descriptor == "()I" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "getPolicyType\0", "()I\0"); __jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [getInstallWindowStart](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#getInstallWindowStart()) pub fn getInstallWindowStart<'env>(&'env self) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC, .name == "getInstallWindowStart", .descriptor == "()I" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "getInstallWindowStart\0", "()I\0"); __jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [getInstallWindowEnd](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#getInstallWindowEnd()) pub fn getInstallWindowEnd<'env>(&'env self) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC, .name == "getInstallWindowEnd", .descriptor == "()I" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "getInstallWindowEnd\0", "()I\0"); __jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [setFreezePeriods](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#setFreezePeriods(java.util.List)) /// /// Required features: "android-app-admin-SystemUpdatePolicy", "java-util-List" #[cfg(any(feature = "all", all(feature = "android-app-admin-SystemUpdatePolicy", feature = "java-util-List")))] pub fn setFreezePeriods<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::util::List>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::app::admin::SystemUpdatePolicy>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC, .name == "setFreezePeriods", .descriptor == "(Ljava/util/List;)Landroid/app/admin/SystemUpdatePolicy;" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into())]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "setFreezePeriods\0", "(Ljava/util/List;)Landroid/app/admin/SystemUpdatePolicy;\0"); __jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [getFreezePeriods](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#getFreezePeriods()) /// /// Required features: "java-util-List" #[cfg(any(feature = "all", all(feature = "java-util-List")))] pub fn getFreezePeriods<'env>(&'env self) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::util::List>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC, .name == "getFreezePeriods", .descriptor == "()Ljava/util/List;" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "getFreezePeriods\0", "()Ljava/util/List;\0"); __jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [toString](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#toString()) /// /// Required features: "java-lang-String" #[cfg(any(feature = "all", all(feature = "java-lang-String")))] pub fn toString<'env>(&'env self) -> __jni_bindgen::std::result::Result<__jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::java::lang::String>>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC, .name == "toString", .descriptor == "()Ljava/lang/String;" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "toString\0", "()Ljava/lang/String;\0"); __jni_env.call_object_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [describeContents](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#describeContents()) pub fn describeContents<'env>(&'env self) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC, .name == "describeContents", .descriptor == "()I" unsafe { let __jni_args = []; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "describeContents\0", "()I\0"); __jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// [writeToParcel](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#writeToParcel(android.os.Parcel,%20int)) /// /// Required features: "android-os-Parcel" #[cfg(any(feature = "all", all(feature = "android-os-Parcel")))] pub fn writeToParcel<'env>(&'env self, arg0: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::android::os::Parcel>>, arg1: i32) -> __jni_bindgen::std::result::Result<(), __jni_bindgen::Local<'env, crate::java::lang::Throwable>> { // class.path == "android/app/admin/SystemUpdatePolicy", java.flags == PUBLIC, .name == "writeToParcel", .descriptor == "(Landroid/os/Parcel;I)V" unsafe { let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0.into()), __jni_bindgen::AsJValue::as_jvalue(&arg1)]; let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env); let (__jni_class, __jni_method) = __jni_env.require_class_method("android/app/admin/SystemUpdatePolicy\0", "writeToParcel\0", "(Landroid/os/Parcel;I)V\0"); __jni_env.call_void_method_a(self.0.object, __jni_method, __jni_args.as_ptr()) } } /// **get** public static final [CREATOR](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#CREATOR) /// /// Required feature: android-os-Parcelable_Creator #[cfg(any(feature = "all", feature = "android-os-Parcelable_Creator"))] pub fn CREATOR<'env>(env: &'env __jni_bindgen::Env) -> __jni_bindgen::std::option::Option<__jni_bindgen::Local<'env, crate::android::os::Parcelable_Creator>> { unsafe { let (class, field) = env.require_class_static_field("android/app/admin/SystemUpdatePolicy\0", "CREATOR\0", "Landroid/os/Parcelable$Creator;\0"); env.get_static_object_field(class, field) } } /// public static final [TYPE_INSTALL_AUTOMATIC](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#TYPE_INSTALL_AUTOMATIC) pub const TYPE_INSTALL_AUTOMATIC : i32 = 1; /// public static final [TYPE_INSTALL_WINDOWED](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#TYPE_INSTALL_WINDOWED) pub const TYPE_INSTALL_WINDOWED : i32 = 2; /// public static final [TYPE_POSTPONE](https://developer.android.com/reference/android/app/admin/SystemUpdatePolicy.html#TYPE_POSTPONE) pub const TYPE_POSTPONE : i32 = 3; } }
81.027473
372
0.665356
146531733e923f395f03b6fdd98c33d2160b64d5
10,891
//! # **Humansize** //! //! Humansize lets you easily represent file sizes in a human-friendly format. //! You can specify your own formatting style, pick among the three defaults provided //! by the library: //! //! * Decimal (Multiples of 1000, `KB` units) //! * Binary (Multiples of 1024, `KiB` units) //! * Conventional (Multiples of 1024, `KB` units) //! //! ## How to use it //! //! Simply import the `FileSize` trait and the options module and call the //! file_size method on any positive integer, using one of the three standards //! provided by the options module. //! //! ```rust //! extern crate humansize; //! use humansize::{FileSize, file_size_opts as options}; //! //! fn main() { //! let size = 1000; //! println!("Size is {}", size.file_size(options::DECIMAL).unwrap()); //! //! println!("Size is {}", size.file_size(options::BINARY).unwrap()); //! //! println!("Size is {}", size.file_size(options::CONVENTIONAL).unwrap()); //! } //! ``` //! //! If you wish to customize the way sizes are displayed, you may create your own custom `FileSizeOpts` struct //! and pass that to the method. See the `custom_options.rs` file in the example folder. static SCALE_DECIMAL: [&str; 9] = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; static SCALE_DECIMAL_LONG: [&str; 9] = [ "Bytes", "Kilobytes", "Megabytes", "Gigabytes", "Terabytes", "Petabytes", "Exabytes", "Zettabytes", "Yottabytes", ]; static SCALE_BINARY: [&str; 9] = ["B", "KiB", "MiB", "GiB", "TiB", "PiB", "EiB", "ZiB", "YiB"]; static SCALE_BINARY_LONG: [&str; 9] = [ "Bytes", "Kibibytes", "Mebibytes", "Gibibytes", "Tebibytes", "Pebibytes", "Exbibytes", "Zebibytes", "Yobibytes", ]; pub mod file_size_opts { //! Describes the struct that holds the options needed by the `file_size` method. //! The three most common formats are provided as constants to be used easily #[derive(Debug, PartialEq, Copy, Clone)] /// Holds the standard to use when displying the size. pub enum Kilo { /// The decimal scale and units Decimal, /// The binary scale and units Binary, } #[derive(Debug, Copy, Clone)] /// Forces a certain representation of the resulting file size. pub enum FixedAt { Byte, Kilo, Mega, Giga, Tera, Peta, Exa, Zetta, Yotta, No, } /// Holds the options for the `file_size` method. #[derive(Debug)] pub struct FileSizeOpts { /// The scale (binary/decimal) to divide against. pub divider: Kilo, /// The unit set to display. pub units: Kilo, /// The amount of decimal places to display if the decimal part is non-zero. pub decimal_places: usize, /// The amount of zeroes to display if the decimal part is zero. pub decimal_zeroes: usize, /// Whether to force a certain representation and if so, which one. pub fixed_at: FixedAt, /// Whether to use the full suffix or its abbreveation. pub long_units: bool, /// Whether to place a space between value and units. pub space: bool, /// An optional suffix which will be appended after the unit. pub suffix: &'static str, /// Whether to allow negative numbers as input. If `False`, negative values will return an error. pub allow_negative: bool, } impl AsRef<FileSizeOpts> for FileSizeOpts { fn as_ref(&self) -> &FileSizeOpts { self } } /// Options to display sizes in the binary format. pub const BINARY: FileSizeOpts = FileSizeOpts { divider: Kilo::Binary, units: Kilo::Binary, decimal_places: 2, decimal_zeroes: 0, fixed_at: FixedAt::No, long_units: false, space: true, suffix: "", allow_negative: false, }; /// Options to display sizes in the decimal format. pub const DECIMAL: FileSizeOpts = FileSizeOpts { divider: Kilo::Decimal, units: Kilo::Decimal, decimal_places: 2, decimal_zeroes: 0, fixed_at: FixedAt::No, long_units: false, space: true, suffix: "", allow_negative: false, }; /// Options to display sizes in the "conventional" format. /// This 1024 as the value of the `Kilo`, but displays decimal-style units (`KB`, not `KiB`). pub const CONVENTIONAL: FileSizeOpts = FileSizeOpts { divider: Kilo::Binary, units: Kilo::Decimal, decimal_places: 2, decimal_zeroes: 0, fixed_at: FixedAt::No, long_units: false, space: true, suffix: "", allow_negative: false, }; } /// The trait for the `file_size`method pub trait FileSize { /// Formats self according to the parameters in `opts`. `opts` can either be one of the /// three defaults providedby the `file_size_opts` module, or be custom-defined according /// to your needs /// /// # Errors /// Will fail by default if called on a negative number. Override this behavior by setting /// `allow_negative` to `True` in a custom options struct. /// /// # Examples /// ```rust /// use humansize::{FileSize, file_size_opts as options}; /// /// let size = 5128; /// println!("Size is {}", size.file_size(options::DECIMAL).unwrap()); /// ``` /// fn file_size<T: AsRef<FileSizeOpts>>(&self, opts: T) -> Result<String, String>; } fn f64_eq(left: f64, right: f64) -> bool { left == right || (left - right).abs() <= std::f64::EPSILON } use self::file_size_opts::*; macro_rules! impl_file_size_u { (for $($t:ty)*) => ($( impl FileSize for $t { fn file_size<T: AsRef<FileSizeOpts>>(&self, _opts: T) -> Result<String, String> { let opts = _opts.as_ref(); let divider = match opts.divider { Kilo::Decimal => 1000.0, Kilo::Binary => 1024.0 }; let mut size: f64 = *self as f64; let mut scale_idx = 0; match opts.fixed_at { FixedAt::No => { while size >= divider { size /= divider; scale_idx += 1; } } val => { while scale_idx != val as usize { size /= divider; scale_idx += 1; } } } let mut scale = match (opts.units, opts.long_units) { (Kilo::Decimal, false) => SCALE_DECIMAL[scale_idx], (Kilo::Decimal, true) => SCALE_DECIMAL_LONG[scale_idx], (Kilo::Binary, false) => SCALE_BINARY[scale_idx], (Kilo::Binary, true) => SCALE_BINARY_LONG[scale_idx] }; // Remove "s" from the scale if the size is 1.x if opts.long_units && f64_eq(size.trunc(), 1.0) { scale = &scale[0 .. scale.len()-1]; } let places = if f64_eq(size.fract(), 0.0) { opts.decimal_zeroes } else { opts.decimal_places }; let space = if opts.space {" "} else {""}; Ok(format!("{:.*}{}{}{}", places, size, space, scale, opts.suffix)) } } )*) } macro_rules! impl_file_size_i { (for $($t:ty)*) => ($( impl FileSize for $t { fn file_size<T: AsRef<FileSizeOpts>>(&self, _opts: T) -> Result<String, String> { let opts = _opts.as_ref(); if *self < 0 && !opts.allow_negative { return Err("Tried calling file_size on a negative value".to_owned()); } else { let sign = if *self < 0 { "-" } else { "" }; Ok(format!("{}{}", sign, (self.abs() as u64).file_size(opts)?)) } } } )*) } impl_file_size_u!(for usize u8 u16 u32 u64); impl_file_size_i!(for isize i8 i16 i32 i64); #[test] fn test_sizes() { assert_eq!(0.file_size(BINARY).unwrap(), "0 B"); assert_eq!(999.file_size(BINARY).unwrap(), "999 B"); assert_eq!(1000.file_size(BINARY).unwrap(), "1000 B"); assert_eq!(1000.file_size(DECIMAL).unwrap(), "1 KB"); assert_eq!(1023.file_size(BINARY).unwrap(), "1023 B"); assert_eq!(1023.file_size(DECIMAL).unwrap(), "1.02 KB"); assert_eq!(1024.file_size(BINARY).unwrap(), "1 KiB"); assert_eq!(1024.file_size(CONVENTIONAL).unwrap(), "1 KB"); let semi_custom_options = file_size_opts::FileSizeOpts { space: false, ..file_size_opts::DECIMAL }; assert_eq!(1000.file_size(semi_custom_options).unwrap(), "1KB"); let semi_custom_options2 = file_size_opts::FileSizeOpts { suffix: "/s", ..file_size_opts::BINARY }; assert_eq!(999.file_size(semi_custom_options2).unwrap(), "999 B/s"); let semi_custom_options3 = file_size_opts::FileSizeOpts { suffix: "/day", space: false, ..file_size_opts::DECIMAL }; assert_eq!(1000.file_size(semi_custom_options3).unwrap(), "1KB/day"); let semi_custom_options4 = file_size_opts::FileSizeOpts { fixed_at: file_size_opts::FixedAt::Byte, ..file_size_opts::BINARY }; assert_eq!(2048.file_size(semi_custom_options4).unwrap(), "2048 B"); let semi_custom_options5 = file_size_opts::FileSizeOpts { fixed_at: file_size_opts::FixedAt::Kilo, ..file_size_opts::BINARY }; assert_eq!( 16584975.file_size(semi_custom_options5).unwrap(), "16196.26 KiB" ); let semi_custom_options6 = file_size_opts::FileSizeOpts { fixed_at: file_size_opts::FixedAt::Tera, decimal_places: 10, ..file_size_opts::BINARY }; assert_eq!( 15284975.file_size(semi_custom_options6).unwrap(), "0.0000139016 TiB" ); let semi_custom_options7 = file_size_opts::FileSizeOpts { allow_negative: true, ..file_size_opts::DECIMAL }; assert_eq!( (-5500).file_size(&semi_custom_options7).unwrap(), "-5.50 KB" ); assert_eq!((5500).file_size(&semi_custom_options7).unwrap(), "5.50 KB"); }
33.407975
111
0.544119
8793c0b7ce0e9731dae74b5764830c07f7da8e4a
4,348
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! A module containing the HTTP server and handlers for servicing client requests pub mod handlers; use std::sync::{mpsc, Arc}; use std::thread::{self, JoinHandle}; use depot; use hab_net::http::middleware::*; use hab_net::oauth::github::GitHubClient; use hab_core::event::EventLogger; use iron::prelude::*; use mount::Mount; use persistent::{self, Read}; use staticfile::Static; use config::Config; use error::Result; use self::handlers::*; // Iron defaults to a threadpool of size `8 * num_cpus`. // See: http://172.16.2.131:9633/iron/prelude/struct.Iron.html#method.http const HTTP_THREAD_COUNT: usize = 128; /// Create a new `iron::Chain` containing a Router and it's required middleware pub fn router(config: Arc<Config>) -> Result<Chain> { let basic = Authenticated::new(&*config); let router = router!( status: get "/status" => status, authenticate: get "/authenticate/:code" => github_authenticate, jobs: post "/jobs" => XHandler::new(job_create).before(basic.clone()), job: get "/jobs/:id" => job_show, job_log: get "/jobs/:id/log" => job_log, user_invitations: get "/user/invitations" => { XHandler::new(list_account_invitations).before(basic.clone()) }, user_origins: get "/user/origins" => XHandler::new(list_user_origins).before(basic.clone()), // NOTE: Each of the handler functions for projects currently // short-circuits processing if trying to do anything with a // non-"core" origin, since we're not enabling Builder for any // other origins at the moment. projects: post "/projects" => XHandler::new(project_create).before(basic.clone()), project: get "/projects/:origin/:name" => project_show, project_jobs: get "/projects/:origin/:name/jobs" => project_jobs, edit_project: put "/projects/:origin/:name" => { XHandler::new(project_update).before(basic.clone()) }, delete_project: delete "/projects/:origin/:name" => { XHandler::new(project_delete).before(basic.clone()) } ); let mut chain = Chain::new(router); chain.link(persistent::Read::<GitHubCli>::both( GitHubClient::new(&*config), )); chain.link(Read::<EventLog>::both( EventLogger::new(&config.log_dir, config.events_enabled), )); chain.link_before(RouteBroker); chain.link_after(Cors); Ok(chain) } /// Create a new HTTP listener and run it in a separate thread. This function will block the calling /// thread until the new listener has successfully started. /// /// # Errors /// /// * Depot could not be started /// * Couldn't create Router or it's middleware /// /// # Panics /// /// * Listener crashed during startup pub fn run(config: Arc<Config>) -> Result<JoinHandle<()>> { let (tx, rx) = mpsc::sync_channel(1); let depot = depot::DepotUtil::new(config.depot.clone()); let depot_chain = depot::server::router(depot)?; let mut mount = Mount::new(); if let Some(ref path) = config.ui.root { debug!("Mounting UI at filepath {}", path); mount.mount("/", Static::new(path)); } let chain = router(config.clone())?; mount.mount("/v1", chain).mount("/v1/depot", depot_chain); let handle = thread::Builder::new() .name("http-srv".to_string()) .spawn(move || { let mut server = Iron::new(mount); server.threads = HTTP_THREAD_COUNT; server.http(&config.http).unwrap(); tx.send(()).unwrap(); }) .unwrap(); match rx.recv() { Ok(()) => Ok(handle), Err(e) => panic!("http-srv thread startup error, err={}", e), } }
35.933884
100
0.644664
795b38c56e5e36d2d4901151dd70f48e8fcfb118
867
// option2.rs // Make me compile! Execute `rustlings hint option2` for hints fn main() { let optional_value = Some(String::from("rustlings")); if let Some(value) = optional_value{ println!("the value of optional value is: {}", value); } else { println!("The optional value doesn't contain anything!"); } let mut optional_values_vec: Vec<Option<i8>> = Vec::new(); for x in 1..10 { optional_values_vec.push(Some(x)); } // TODO: make this a while let statement - remember that vector.pop also adds another layer of Option<T> // You can stack `Option<T>`'s into while let and if let // value = optional_values_vec.pop() { // println!("current value: {}", value); // } while let Some(value) = optional_values_vec.pop(){ println!("current value: {}", value.unwrap()); } }
30.964286
108
0.615917
75d3fd6827afbdfc59c9cd24d653bb074f96008a
3,219
use std::*; use std::fs::File; use std::io::prelude::*; pub struct TriangularMatrix<T> { data: Vec<T>, pub number_of_lines: usize } #[allow(dead_code)] impl<T> TriangularMatrix<T> { pub fn from_file(file_name: &str) -> TriangularMatrix<usize> { let mut file = File::open(file_name).expect("Imposible Abrir el fichero de distancias"); let file_content = &mut String::new(); file.read_to_string(file_content).expect("Formato del fichero de distancias incorrecto"); return TriangularMatrix { data: file_content .trim() .split_whitespace() .map(|e| e.parse::<usize>().expect("Elmento del fichero de distancias no es entero")) .collect(), number_of_lines: file_content.trim().lines().count() + 1 }; } pub fn filled_false(number_of_lines: usize) -> TriangularMatrix<bool> { let capacity = (number_of_lines * number_of_lines - number_of_lines) / 2; let mut data = Vec::with_capacity(capacity); for _ in 0..capacity { data.push(false); } TriangularMatrix { data, number_of_lines } } fn check_index(&self, line: usize, column: usize){ if column > line {panic!("Impossible to access {},{} element", line, column)} if line > self.number_of_lines {panic!("Impossible to access {},{} element with {} columns" , line, column, self.number_of_lines)} } pub fn get(&self, line: usize, column: usize) -> &T { self.check_index(line, column); let line_jump = (line * line - line) / 2; //N * (N -1) / 2 &self.data[line_jump + column] } pub fn set(&mut self, line: usize, column: usize, value: T) { self.check_index(line, column); let line_jump = (line * line - line) / 2; //N * (N -1) / 2 self.data[line_jump + column] = value } pub fn enumerate_indexes(&self) -> TriangularMultiIndexEnumerate<T> { return TriangularMultiIndexEnumerate { index: (1, 0), number_of_lines: self.number_of_lines, matrix: &self.data }; } pub fn enumerate_from(&self, column: usize, line: usize) -> TriangularMultiIndexEnumerate<T> { return TriangularMultiIndexEnumerate { index: (column, line), number_of_lines: self.number_of_lines, matrix: &self.data }; } } pub struct TriangularMultiIndexEnumerate<'a, T: 'a> { index: (usize, usize), number_of_lines: usize, matrix: &'a Vec<T>, } impl<'a, T: 'a> Iterator for TriangularMultiIndexEnumerate<'a, T> { type Item = (usize, usize, &'a T); fn next(&mut self) -> Option<(usize, usize, &'a T)> { let line = self.index.0; let column = self.index.1; let next_column = self.index.1 + 1; self.index = (line + next_column / line, next_column % line); if line >= self.number_of_lines { return None; } else { let line_jump = (line * line - line) / 2; let value = &self.matrix[column + line_jump]; return Some((line, column, value)); } } }
34.98913
101
0.580304
fbff6faed02da056e6368457a2157324fa87225b
3,130
extern crate glib; extern crate gtk; extern crate cairo; extern crate gtk_sys; extern crate gdk_sys; extern crate gobject_sys; extern crate glib_sys; extern crate cairo_sys; use std::mem; use self::glib_sys::gpointer; use self::gobject_sys::GTypeInstance; use self::glib_sys::GType; use std::ptr; use std::os::raw::c_void; use gobject_utils::PrivateAccessor; use std::rc::Rc; use glib::object::Downcast; use glib::translate::*; use railway::RailwayStation; #[repr(C)] pub struct StationWrapperC(c_void); #[repr(C)] pub struct StationWrapperClass { pub parent_class: gobject_sys::GObjectClass, } pub struct StationWrapperPrivate { pub station: Option<Rc<RailwayStation>>, } // finalize: Option<unsafe extern "C" fn(*mut GObject)>, unsafe extern "C" fn finalize(instance: *mut gobject_sys::GObject) { let mut accessor = get_private_accessor(instance); let _ = accessor.get(); // drop } // pub type GClassInitFunc = Option<unsafe extern "C" fn(gpointer, gpointer)>; unsafe extern "C" fn class_init(g_class: gpointer, _class_data: gpointer) { // i32 is dummy gobject_sys::g_type_class_add_private(g_class, mem::size_of::<Box<i32>>()); let klass = g_class as *mut gobject_sys::GInitiallyUnownedClass; (*klass).finalize = Some(finalize); } // pub type GInstanceInitFunc = Option<unsafe extern "C" fn(*mut GTypeInstance, gpointer)>; unsafe extern "C" fn init(instance: *mut GTypeInstance, _g_class: gpointer) { let priv_ = Box::new(StationWrapperPrivate { station: None }); let mut accessor = PrivateAccessor::<StationWrapperPrivate>::from_instance(instance, get_type()); accessor.set(priv_); } fn get_private_accessor(obj: *mut gobject_sys::GObject) -> PrivateAccessor<StationWrapperPrivate> { unsafe { PrivateAccessor::<StationWrapperPrivate>::from_object(obj, get_type()) } } static mut TYPE: GType = 0; pub fn get_type() -> glib_sys::GType { unsafe { if TYPE == 0 { let gtype = ::gobject_utils::register_static_type::<StationWrapperClass>( "StationWrapper", gobject_sys::g_object_get_type(), Some(class_init), Some(init)); TYPE = gtype; } TYPE } } glib_wrapper! { pub struct StationWrapper(Object<StationWrapperC>); match fn { get_type => || get_type(), } } impl StationWrapper { pub fn new() -> StationWrapper { unsafe { let ptr = gobject_sys::g_object_new(get_type(), ptr::null()); StationWrapper::from_glib_full(ptr as *mut _).downcast_unchecked() } } pub fn get_station(&self) -> Rc<RailwayStation> { unsafe { let accessor = get_private_accessor(self.to_glib_none().0); accessor.borrow().station.as_ref().unwrap().clone() } } pub fn set_station(&mut self, station: RailwayStation) { let mut accessor = get_private_accessor(self.to_glib_none().0); unsafe { accessor.borrow_mut().station = Some(Rc::new(station)); } } }
28.981481
99
0.651757
eb66a2c661b36472aa0a1f61ec2b6dbebdd82587
17,258
//! `Object` trait, along with some implementations. References. //! //! Some of the structs are incomplete (missing fields that are in the PDF references). mod types; mod stream; mod color; mod function; pub use self::types::*; pub use self::stream::*; pub use self::color::*; pub use self::function::*; pub use crate::file::PromisedRef; use crate::primitive::*; use crate::error::*; use crate::enc::*; use std::fmt; use std::marker::PhantomData; use std::collections::HashMap; use std::rc::Rc; use std::ops::Deref; use std::hash::{Hash, Hasher}; use std::convert::TryInto; pub type ObjNr = u64; pub type GenNr = u16; pub trait Resolve: { fn resolve(&self, r: PlainRef) -> Result<Primitive>; fn get<T: Object>(&self, r: Ref<T>) -> Result<RcRef<T>>; } pub struct NoResolve; impl Resolve for NoResolve { fn resolve(&self, _: PlainRef) -> Result<Primitive> { Err(PdfError::Reference) } fn get<T: Object>(&self, _r: Ref<T>) -> Result<RcRef<T>> { Err(PdfError::Reference) } } /// A PDF Object pub trait Object: Sized + 'static { /// Convert primitive to Self fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result<Self>; } pub trait Updater { fn create<T: ObjectWrite>(&mut self, obj: T) -> Result<RcRef<T>>; fn update<T: ObjectWrite>(&mut self, old: PlainRef, obj: T) -> Result<RcRef<T>>; fn promise<T: Object>(&mut self) -> PromisedRef<T>; fn fulfill<T: ObjectWrite>(&mut self, promise: PromisedRef<T>, obj: T) -> Result<RcRef<T>>; } pub struct NoUpdate; impl Updater for NoUpdate { fn create<T: ObjectWrite>(&mut self, _obj: T) -> Result<RcRef<T>> { panic!() } fn update<T: ObjectWrite>(&mut self, _old: PlainRef, _obj: T) -> Result<RcRef<T>> { panic!() } fn promise<T: Object>(&mut self) -> PromisedRef<T> { panic!() } fn fulfill<T: ObjectWrite>(&mut self, _promise: PromisedRef<T>, _obj: T) -> Result<RcRef<T>> { panic!() } } pub trait ObjectWrite { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive>; } pub trait FromDict: Sized { fn from_dict(dict: Dictionary, resolve: &impl Resolve) -> Result<Self>; } pub trait ToDict: ObjectWrite { fn to_dict(&self, update: &mut impl Updater) -> Result<Dictionary>; } pub trait SubType<T> {} pub trait Trace { fn trace(&self, _cb: &mut impl FnMut(PlainRef)) {} } /////// // Refs /////// // TODO move to primitive.rs #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] pub struct PlainRef { pub id: ObjNr, pub gen: GenNr, } impl Object for PlainRef { fn from_primitive(p: Primitive, _: &impl Resolve) -> Result<Self> { p.into_reference() } } impl ObjectWrite for PlainRef { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Reference(*self)) } } // NOTE: Copy & Clone implemented manually ( https://github.com/rust-lang/rust/issues/26925 ) pub struct Ref<T> { inner: PlainRef, _marker: PhantomData<T> } impl<T> Clone for Ref<T> { fn clone(&self) -> Ref<T> { Ref { inner: self.inner, _marker: PhantomData } } } impl<T> Copy for Ref<T> {} impl<T> Ref<T> { pub fn new(inner: PlainRef) -> Ref<T> { Ref { inner, _marker: PhantomData::default(), } } pub fn from_id(id: ObjNr) -> Ref<T> { Ref { inner: PlainRef {id, gen: 0}, _marker: PhantomData::default(), } } pub fn get_inner(&self) -> PlainRef { self.inner } pub fn upcast<U>(self) -> Ref<U> where T: SubType<U> { Ref::new(self.inner) } } impl<T: Object> Object for Ref<T> { fn from_primitive(p: Primitive, _: &impl Resolve) -> Result<Self> { Ok(Ref::new(p.into_reference()?)) } } impl<T> ObjectWrite for Ref<T> { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive> { self.inner.to_primitive(update) } } impl<T> Trace for Ref<T> { fn trace(&self, cb: &mut impl FnMut(PlainRef)) { cb(self.inner); } } impl<T> fmt::Debug for Ref<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Ref({})", self.inner.id) } } impl<T> Hash for Ref<T> { fn hash<H: Hasher>(&self, state: &mut H) { self.inner.hash(state) } } impl<T> PartialEq for Ref<T> { fn eq(&self, rhs: &Self) -> bool { self.inner.eq(&rhs.inner) } } impl<T> Eq for Ref<T> {} #[derive(Debug)] pub struct RcRef<T> { inner: PlainRef, data: Rc<T> } impl<T> RcRef<T> { pub fn new(inner: PlainRef, data: Rc<T>) -> RcRef<T> { RcRef { inner, data } } pub fn get_ref(&self) -> Ref<T> { Ref::new(self.inner) } } impl<T: Object + std::fmt::Debug> Object for RcRef<T> { fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result<Self> { match p { Primitive::Reference(r) => resolve.get(Ref::new(r)), p => Err(PdfError::UnexpectedPrimitive {expected: "Reference", found: p.get_debug_name()}) } } } impl<T> ObjectWrite for RcRef<T> { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive> { self.inner.to_primitive(update) } } impl<T> Deref for RcRef<T> { type Target = T; fn deref(&self) -> &T { &*self.data } } impl<T> Clone for RcRef<T> { fn clone(&self) -> RcRef<T> { RcRef { inner: self.inner, data: self.data.clone(), } } } impl<T> Trace for RcRef<T> { fn trace(&self, cb: &mut impl FnMut(PlainRef)) { cb(self.inner); } } impl<'a, T> From<&'a RcRef<T>> for Ref<T> { fn from(r: &'a RcRef<T>) -> Ref<T> { Ref::new(r.inner) } } impl<T> Hash for RcRef<T> { fn hash<H: Hasher>(&self, state: &mut H) { std::ptr::hash(&**self, state) } } impl<T> PartialEq for RcRef<T> { fn eq(&self, rhs: &Self) -> bool { std::ptr::eq(&**self, &**rhs) } } impl<T> Eq for RcRef<T> {} #[derive(Debug)] pub enum MaybeRef<T> { Direct(Rc<T>), Indirect(RcRef<T>), } impl<T> MaybeRef<T> { pub fn as_ref(&self) -> Option<Ref<T>> { match *self { MaybeRef::Indirect(ref r) => Some(r.get_ref()), _ => None } } } impl<T: Object> Object for MaybeRef<T> { fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result<Self> { Ok(match p { Primitive::Reference(r) => MaybeRef::Indirect(resolve.get(Ref::new(r))?), p => MaybeRef::Direct(Rc::new(T::from_primitive(p, resolve)?)) }) } } impl<T: ObjectWrite> ObjectWrite for MaybeRef<T> { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive> { match self { MaybeRef::Direct(ref inner) => inner.to_primitive(update), MaybeRef::Indirect(r) => r.to_primitive(update) } } } impl<T> Deref for MaybeRef<T> { type Target = T; fn deref(&self) -> &T { match *self { MaybeRef::Direct(ref t) => t, MaybeRef::Indirect(ref r) => &**r } } } impl<T> Clone for MaybeRef<T> { fn clone(&self) -> Self { match *self { MaybeRef::Direct(ref rc) => MaybeRef::Direct(rc.clone()), MaybeRef::Indirect(ref r) => MaybeRef::Indirect(r.clone()) } } } impl<T> Trace for MaybeRef<T> { fn trace(&self, cb: &mut impl FnMut(PlainRef)) { match *self { MaybeRef::Indirect(ref rc) => rc.trace(cb), MaybeRef::Direct(_) => () } } } impl<T> From<Rc<T>> for MaybeRef<T> { fn from(r: Rc<T>) -> MaybeRef<T> { MaybeRef::Direct(r) } } impl<T> From<MaybeRef<T>> for Rc<T> { fn from(r: MaybeRef<T>) -> Rc<T> { match r { MaybeRef::Direct(rc) => rc, MaybeRef::Indirect(r) => r.data } } } impl<'a, T> From<&'a MaybeRef<T>> for Rc<T> { fn from(r: &'a MaybeRef<T>) -> Rc<T> { match r { MaybeRef::Direct(ref rc) => rc.clone(), MaybeRef::Indirect(ref r) => r.data.clone() } } } impl<T> From<RcRef<T>> for MaybeRef<T> { fn from(r: RcRef<T>) -> MaybeRef<T> { MaybeRef::Indirect(r) } } impl<T> Hash for MaybeRef<T> { fn hash<H: Hasher>(&self, state: &mut H) { std::ptr::hash(&**self, state) } } impl<T> PartialEq for MaybeRef<T> { fn eq(&self, rhs: &Self) -> bool { std::ptr::eq(&**self, &**rhs) } } impl<T> Eq for MaybeRef<T> {} ////////////////////////////////////// // Object for Primitives & other types ////////////////////////////////////// impl Object for i32 { fn from_primitive(p: Primitive, r: &impl Resolve) -> Result<Self> { match p { Primitive::Reference(id) => r.resolve(id)?.as_integer(), p => p.as_integer() } } } impl ObjectWrite for i32 { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Integer(*self)) } } impl Object for u32 { fn from_primitive(p: Primitive, r: &impl Resolve) -> Result<Self> { match p { Primitive::Reference(id) => r.resolve(id)?.as_u32(), p => p.as_u32() } } } impl ObjectWrite for u32 { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Integer(*self as _)) } } impl Object for usize { fn from_primitive(p: Primitive, r: &impl Resolve) -> Result<Self> { match p { Primitive::Reference(id) => Ok(r.resolve(id)?.as_u32()? as usize), p => Ok(p.as_u32()? as usize) } } } impl ObjectWrite for usize { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Integer(*self as _)) } } impl Object for f32 { fn from_primitive(p: Primitive, r: &impl Resolve) -> Result<Self> { match p { Primitive::Reference(id) => r.resolve(id)?.as_number(), p => p.as_number() } } } impl ObjectWrite for f32 { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Number(*self)) } } impl Object for bool { fn from_primitive(p: Primitive, r: &impl Resolve) -> Result<Self> { match p { Primitive::Reference(id) => r.resolve(id)?.as_bool(), p => p.as_bool() } } } impl ObjectWrite for bool { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Boolean(*self)) } } impl Object for Dictionary { fn from_primitive(p: Primitive, r: &impl Resolve) -> Result<Self> { match p { Primitive::Dictionary(dict) => Ok(dict), Primitive::Reference(id) => Dictionary::from_primitive(r.resolve(id)?, r), _ => Err(PdfError::UnexpectedPrimitive {expected: "Dictionary", found: p.get_debug_name()}), } } } impl Object for String { fn from_primitive(p: Primitive, _: &impl Resolve) -> Result<Self> { p.into_name() } } impl<T: Object> Object for Vec<T> { /// Will try to convert `p` to `T` first, then try to convert `p` to Vec<T> fn from_primitive(p: Primitive, r: &impl Resolve) -> Result<Self> { Ok( match p { Primitive::Array(_) => { p.into_array(r)? .into_iter() .map(|p| T::from_primitive(p, r)) .collect::<Result<Vec<T>>>()? }, Primitive::Null => { Vec::new() } Primitive::Reference(id) => Self::from_primitive(r.resolve(id)?, r)?, _ => vec![T::from_primitive(p, r)?] } ) } } impl<T: ObjectWrite> ObjectWrite for Vec<T> { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive> { Primitive::array::<T, _, _, _>(self.iter(), update) } } impl<T: Trace> Trace for Vec<T> { fn trace(&self, cb: &mut impl FnMut(PlainRef)) { for i in self.iter() { i.trace(cb); } } } /* pub struct Data(pub Vec<u8>); impl Object for Data { fn serialize<W: io::Write>(&self, out: &mut W) -> Result<()> { unimplemented!() } /// Will try to convert `p` to `T` first, then try to convert `p` to Vec<T> fn from_primitive(p: Primitive, r: &impl Resolve) -> Result<Self> { match p { Primitive::Array(_) => { p.into_array(r)? .into_iter() .map(|p| u8::from_primitive(p, r)) .collect::<Result<Vec<T>>>()? }, Primitive::Null => { Vec::new() } Primitive::Reference(id) => Self::from_primitive(r.resolve(id)?, r)?, _ => } } }*/ impl Object for Primitive { fn from_primitive(p: Primitive, _: &impl Resolve) -> Result<Self> { Ok(p) } } impl ObjectWrite for Primitive { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(self.clone()) } } impl Trace for Primitive { fn trace(&self, cb: &mut impl FnMut(PlainRef)) { match *self { Primitive::Reference(r) => cb(r), Primitive::Array(ref parts) => parts.iter().for_each(|p| p.trace(cb)), Primitive::Dictionary(ref dict) => dict.values().for_each(|p| p.trace(cb)), _ => () } } } impl ObjectWrite for String { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Name(self.clone())) } } impl<V: Object> Object for HashMap<String, V> { fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result<Self> { match p { Primitive::Null => Ok(HashMap::new()), Primitive::Dictionary (dict) => { let mut new = Self::new(); for (key, val) in dict.iter() { new.insert(key.clone(), V::from_primitive(val.clone(), resolve)?); } Ok(new) } Primitive::Reference (id) => HashMap::from_primitive(resolve.resolve(id)?, resolve), p => Err(PdfError::UnexpectedPrimitive {expected: "Dictionary", found: p.get_debug_name()}) } } } impl<V: ObjectWrite> ObjectWrite for HashMap<String, V> { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive> { if self.is_empty() { Ok(Primitive::Null) } else { let mut dict = Dictionary::new(); for (k, v) in self.iter() { dict.insert(k, v.to_primitive(update)?); } Ok(Primitive::Dictionary(dict)) } } } impl<T: Object> Object for Option<T> { fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result<Self> { match p { Primitive::Null => Ok(None), p => match T::from_primitive(p, resolve) { Ok(p) => Ok(Some(p)), // References to non-existing objects ought not to be an error Err(PdfError::NullRef {..}) => Ok(None), Err(PdfError::FreeObject {..}) => Ok(None), Err(e) => Err(e), } } } } impl<T: ObjectWrite> ObjectWrite for Option<T> { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive> { match self { None => Ok(Primitive::Null), Some(t) => t.to_primitive(update) } } } impl<T: Trace> Trace for Option<T> { fn trace(&self, cb: &mut impl FnMut(PlainRef)) { if let Some(ref t) = *self { t.trace(cb) } } } impl<T: Object> Object for Box<T> { fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result<Self> { T::from_primitive(p, resolve).map(Box::new) } } impl<T: ObjectWrite> ObjectWrite for Box<T> { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive> { (**self).to_primitive(update) } } impl<T: Trace> Trace for Box<T> { fn trace(&self, cb: &mut impl FnMut(PlainRef)) { (**self).trace(cb) } } impl Object for () { fn from_primitive(_p: Primitive, _resolve: &impl Resolve) -> Result<Self> { Ok(()) } } impl ObjectWrite for () { fn to_primitive(&self, _: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Null) } } impl Trace for () {} impl<T, U> Object for (T, U) where T: Object, U: Object { fn from_primitive(p: Primitive, resolve: &impl Resolve) -> Result<Self> { let arr = p.into_array(resolve)?; if arr.len() != 2 { bail!("expected array of length 2 (found {})", arr.len()); } let [a, b]: [Primitive; 2] = arr.try_into().unwrap(); Ok((T::from_primitive(a, resolve)?, U::from_primitive(b, resolve)?)) } } impl<T, U> ObjectWrite for (T, U) where T: ObjectWrite, U: ObjectWrite { fn to_primitive(&self, update: &mut impl Updater) -> Result<Primitive> { Ok(Primitive::Array(vec![self.0.to_primitive(update)?, self.1.to_primitive(update)?])) } } impl<T: Trace, U: Trace> Trace for (T, U) { fn trace(&self, cb: &mut impl FnMut(PlainRef)) { self.0.trace(cb); self.1.trace(cb); } }
28.199346
109
0.54989
ac2e5fb19b5e119a5fa53a2ffe0bbf1c39baacca
315
struct Owner(i32); impl Owner { // Annotate lifetimes as in a standalone function. fn add_one<'a>(&'a mut self) { self.0 += 1; } fn print<'a>(&'a self) { println!("`print`: {}", self.0); } } fn main() { let mut owner = Owner(18); owner.add_one(); owner.print(); }
16.578947
54
0.51746
f59928ede7b7def6926796042796273a974526e5
2,433
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::S4par { #[doc = r" Modifies the contents of the register"] #[inline(always)] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline(always)] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline(always)] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } } #[doc = r" Value of the field"] pub struct PaR { bits: u32, } impl PaR { #[doc = r" Value of the field as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } } #[doc = r" Proxy"] pub struct _PaW<'a> { w: &'a mut W, } impl<'a> _PaW<'a> { #[doc = r" Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, bits: u32) -> &'a mut W { const MASK: u32 = 4294967295; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((bits & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline(always)] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:31 - Peripheral address"] #[inline(always)] pub fn pa(&self) -> PaR { let bits = { const MASK: u32 = 4294967295; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u32 }; PaR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline(always)] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:31 - Peripheral address"] #[inline(always)] pub fn pa(&mut self) -> _PaW { _PaW { w: self } } }
24.089109
59
0.497739
713bcc861a9d1d1bf020c5df3f1c7cde1bf91d03
2,041
use super::ray::Ray; use super::Vec3; use std::f64; // Ratio needs to change when resolution also change. // const CAMERA_SCREEN_SIZE_HEIGHT: f64 = 0.3; // const CAMERA_SCREEN_SIZE_WIDTH: f64 = 0.4; /** * viewPlane points * 0-----------1 * | | * | c | * | | * 2-----------3 */ #[derive(Debug)] pub struct Camera { pub position: Vec3, pub direction: Vec3, pub lower_top_corner: Vec3, pub horizonal: Vec3, pub vertical: Vec3, pub u: Vec3, pub v: Vec3, pub lens_radius: f64, } impl Camera { pub fn set( position: Vec3, look_at: Vec3, up: Vec3, fov: f64, ratio: f64, aperture: f64, focus_dist: f64, ) -> Camera { let theta = fov.to_radians(); let h = (theta / 2.0).tan(); let view_height = 2.0 * h; let view_width = ratio * view_height; let to = Vec3::normalize(position - look_at); let u = Vec3::normalize(Vec3::cross(up, to)); let v = Vec3::cross(to, u); let horizonal = u * view_width * focus_dist; let vertical = v * view_height * focus_dist; // + vertical because I draw from top left let lt_corner = position - (horizonal / 2.0) + (vertical / 2.0) - to * focus_dist; Camera { position, direction: to, horizonal, vertical, u, v, lower_top_corner: lt_corner, lens_radius: aperture / 2.0, } } pub fn generate_ray(&self, x: f64, y: f64) -> Ray { let rd = Vec3::rand_in_unit_disk() * self.lens_radius; let offset = self.u * rd.0 + self.v * rd.1; let fx = x / super::SCREEN_WIDTH as f64; let fy = y / super::SCREEN_HEIGHT as f64; Ray::new( self.position + offset, self.lower_top_corner + (self.horizonal * fx) - (self.vertical * fy) - self.position - offset, ) } }
24.297619
90
0.512984
4a46fc5c3723723ddaa33e12797ba8537b689aa7
367
// Take a look at the license at the top of the repository in the LICENSE file. #![doc = include_str!("../README.md")] #![cfg_attr(feature = "dox", feature(doc_cfg))] pub use ffi; pub use gdk; pub use gio; pub use glib; pub use x11; #[macro_use] mod rt; #[allow(clippy::upper_case_acronyms)] #[allow(unused_imports)] mod auto; mod x11_display; pub use auto::*;
16.681818
79
0.692098
79885d1a8a8f6d61a1fc3c3a49b64ac2693a428c
1,093
use super::{error::TomlHelper, parsable::Parse}; use log::error; use toml::Value; #[derive(Clone, Debug)] pub struct Derive { pub names: Vec<String>, pub cfg_condition: Option<String>, } impl Parse for Derive { fn parse(toml: &Value, object_name: &str) -> Option<Derive> { let names = match toml.lookup("name").and_then(Value::as_str) { Some(names) => names, None => { error!("No 'name' given for derive for object {}", object_name); return None; } }; toml.check_unwanted( &["name", "cfg_condition"], &format!("derive {}", object_name), ); let cfg_condition = toml .lookup("cfg_condition") .and_then(Value::as_str) .map(ToOwned::to_owned); let mut names_vec = Vec::new(); for name in names.split(',') { names_vec.push(name.trim().into()); } Some(Derive { names: names_vec, cfg_condition, }) } } pub type Derives = Vec<Derive>;
25.418605
80
0.524245
ac025e4069b038e99f33283be0428b34e1027a81
11,785
// Copyright 2019-2020 Parity Technologies (UK) Ltd. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use sha2::Sha512; use hmac::Hmac; use pbkdf2::pbkdf2; use schnorrkel::keys::MiniSecretKey; use zeroize::Zeroize; #[derive(Clone, Copy, PartialEq, Eq, Debug)] pub enum Error { InvalidEntropy, } /// `entropy` should be a byte array from a correctly recovered and checksumed BIP39. /// /// This function accepts slices of different length for different word lengths: /// /// + 16 bytes for 12 words. /// + 20 bytes for 15 words. /// + 24 bytes for 18 words. /// + 28 bytes for 21 words. /// + 32 bytes for 24 words. /// /// Any other length will return an error. /// /// `password` is analog to BIP39 seed generation itself, with an empty string being defalt. pub fn mini_secret_from_entropy(entropy: &[u8], password: &str) -> Result<MiniSecretKey, Error> { let seed = seed_from_entropy(entropy, password)?; Ok(MiniSecretKey::from_bytes(&seed[..32]).expect("Length is always correct; qed")) } /// Similar to `mini_secret_from_entropy`, except that it provides the 64-byte seed directly. pub fn seed_from_entropy(entropy: &[u8], password: &str) -> Result<[u8; 64], Error> { if entropy.len() < 16 || entropy.len() > 32 || entropy.len() % 4 != 0 { return Err(Error::InvalidEntropy); } let mut salt = String::with_capacity(8 + password.len()); salt.push_str("mnemonic"); salt.push_str(password); let mut seed = [0u8; 64]; pbkdf2::<Hmac<Sha512>>(entropy, salt.as_bytes(), 2048, &mut seed); salt.zeroize(); Ok(seed) } #[cfg(test)] mod test { use super::*; use bip39::{Mnemonic, Language}; use rustc_hex::FromHex; // phrase, entropy, seed, expanded secret_key // // ALL SEEDS GENERATED USING "Substrate" PASSWORD! static VECTORS: &[[&str; 3]] = &[ [ "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about", "00000000000000000000000000000000", "44e9d125f037ac1d51f0a7d3649689d422c2af8b1ec8e00d71db4d7bf6d127e33f50c3d5c84fa3e5399c72d6cbbbbc4a49bf76f76d952f479d74655a2ef2d453", ], [ "legal winner thank year wave sausage worth useful legal winner thank yellow", "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", "4313249608fe8ac10fd5886c92c4579007272cb77c21551ee5b8d60b780416850f1e26c1f4b8d88ece681cb058ab66d6182bc2ce5a03181f7b74c27576b5c8bf", ], [ "letter advice cage absurd amount doctor acoustic avoid letter advice cage above", "80808080808080808080808080808080", "27f3eb595928c60d5bc91a4d747da40ed236328183046892ed6cd5aa9ae38122acd1183adf09a89839acb1e6eaa7fb563cc958a3f9161248d5a036e0d0af533d", ], [ "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong", "ffffffffffffffffffffffffffffffff", "227d6256fd4f9ccaf06c45eaa4b2345969640462bbb00c5f51f43cb43418c7a753265f9b1e0c0822c155a9cabc769413ecc14553e135fe140fc50b6722c6b9df", ], [ "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon agent", "000000000000000000000000000000000000000000000000", "44e9d125f037ac1d51f0a7d3649689d422c2af8b1ec8e00d71db4d7bf6d127e33f50c3d5c84fa3e5399c72d6cbbbbc4a49bf76f76d952f479d74655a2ef2d453", ], [ "legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal will", "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", "cb1d50e14101024a88905a098feb1553d4306d072d7460e167a60ccb3439a6817a0afc59060f45d999ddebc05308714733c9e1e84f30feccddd4ad6f95c8a445", ], [ "letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter always", "808080808080808080808080808080808080808080808080", "9ddecf32ce6bee77f867f3c4bb842d1f0151826a145cb4489598fe71ac29e3551b724f01052d1bc3f6d9514d6df6aa6d0291cfdf997a5afdb7b6a614c88ab36a", ], [ "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo when", "ffffffffffffffffffffffffffffffffffffffffffffffff", "8971cb290e7117c64b63379c97ed3b5c6da488841bd9f95cdc2a5651ac89571e2c64d391d46e2475e8b043911885457cd23e99a28b5a18535fe53294dc8e1693", ], [ "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art", "0000000000000000000000000000000000000000000000000000000000000000", "44e9d125f037ac1d51f0a7d3649689d422c2af8b1ec8e00d71db4d7bf6d127e33f50c3d5c84fa3e5399c72d6cbbbbc4a49bf76f76d952f479d74655a2ef2d453", ], [ "legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth title", "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", "3037276a5d05fcd7edf51869eb841bdde27c574dae01ac8cfb1ea476f6bea6ef57ab9afe14aea1df8a48f97ae25b37d7c8326e49289efb25af92ba5a25d09ed3", ], [ "letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic bless", "8080808080808080808080808080808080808080808080808080808080808080", "2c9c6144a06ae5a855453d98c3dea470e2a8ffb78179c2e9eb15208ccca7d831c97ddafe844ab933131e6eb895f675ede2f4e39837bb5769d4e2bc11df58ac42", ], [ "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo vote", "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", "047e89ef7739cbfe30da0ad32eb1720d8f62441dd4f139b981b8e2d0bd412ed4eb14b89b5098c49db2301d4e7df4e89c21e53f345138e56a5e7d63fae21c5939", ], [ "ozone drill grab fiber curtain grace pudding thank cruise elder eight picnic", "9e885d952ad362caeb4efe34a8e91bd2", "f4956be6960bc145cdab782e649a5056598fd07cd3f32ceb73421c3da27833241324dc2c8b0a4d847eee457e6d4c5429f5e625ece22abaa6a976e82f1ec5531d", ], [ "gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog", "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", "fbcc5229ade0c0ff018cb7a329c5459f91876e4dde2a97ddf03c832eab7f26124366a543f1485479c31a9db0d421bda82d7e1fe562e57f3533cb1733b001d84d", ], [ "hamster diagram private dutch cause delay private meat slide toddler razor book happy fancy gospel tennis maple dilemma loan word shrug inflict delay length", "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", "7c60c555126c297deddddd59f8cdcdc9e3608944455824dd604897984b5cc369cad749803bb36eb8b786b570c9cdc8db275dbe841486676a6adf389f3be3f076", ], [ "scheme spot photo card baby mountain device kick cradle pact join borrow", "c0ba5a8e914111210f2bd131f3d5e08d", "c12157bf2506526c4bd1b79a056453b071361538e9e2c19c28ba2cfa39b5f23034b974e0164a1e8acd30f5b4c4de7d424fdb52c0116bfc6a965ba8205e6cc121", ], [ "horn tenant knee talent sponsor spell gate clip pulse soap slush warm silver nephew swap uncle crack brave", "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", "23766723e970e6b79dec4d5e4fdd627fd27d1ee026eb898feb9f653af01ad22080c6f306d1061656d01c4fe9a14c05f991d2c7d8af8730780de4f94cd99bd819", ], [ "panda eyebrow bullet gorilla call smoke muffin taste mesh discover soft ostrich alcohol speed nation flash devote level hobby quick inner drive ghost inside", "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", "f4c83c86617cb014d35cd87d38b5ef1c5d5c3d58a73ab779114438a7b358f457e0462c92bddab5a406fe0e6b97c71905cf19f925f356bc673ceb0e49792f4340", ], [ "cat swing flag economy stadium alone churn speed unique patch report train", "23db8160a31d3e0dca3688ed941adbf3", "719d4d4de0638a1705bf5237262458983da76933e718b2d64eb592c470f3c5d222e345cc795337bb3da393b94375ff4a56cfcd68d5ea25b577ee9384d35f4246", ], [ "light rule cinnamon wrap drastic word pride squirrel upgrade then income fatal apart sustain crack supply proud access", "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", "7ae1291db32d16457c248567f2b101e62c5549d2a64cd2b7605d503ec876d58707a8d663641e99663bc4f6cc9746f4852e75e7e54de5bc1bd3c299c9a113409e", ], [ "all hour make first leader extend hole alien behind guard gospel lava path output census museum junior mass reopen famous sing advance salt reform", "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", "a911a5f4db0940b17ecb79c4dcf9392bf47dd18acaebdd4ef48799909ebb49672947cc15f4ef7e8ef47103a1a91a6732b821bda2c667e5b1d491c54788c69391", ], [ "vessel ladder alter error federal sibling chat ability sun glass valve picture", "f30f8c1da665478f49b001d94c5fc452", "4e2314ca7d9eebac6fe5a05a5a8d3546bc891785414d82207ac987926380411e559c885190d641ff7e686ace8c57db6f6e4333c1081e3d88d7141a74cf339c8f", ], [ "scissors invite lock maple supreme raw rapid void congress muscle digital elegant little brisk hair mango congress clump", "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", "7a83851102849edc5d2a3ca9d8044d0d4f00e5c4a292753ed3952e40808593251b0af1dd3c9ed9932d46e8608eb0b928216a6160bd4fc775a6e6fbd493d7c6b2", ], [ "void come effort suffer camp survey warrior heavy shoot primary clutch crush open amazing screen patrol group space point ten exist slush involve unfold", "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", "938ba18c3f521f19bd4a399c8425b02c716844325b1a65106b9d1593fbafe5e0b85448f523f91c48e331995ff24ae406757cff47d11f240847352b348ff436ed", ] ]; #[test] fn vectors_are_correct() { for vector in VECTORS { let phrase = vector[0]; let expected_entropy: Vec<u8> = vector[1].from_hex().unwrap(); let expected_seed: Vec<u8> = vector[2].from_hex().unwrap(); let mnemonic = Mnemonic::from_phrase(phrase, Language::English).unwrap(); let seed = seed_from_entropy(mnemonic.entropy(), "Substrate").unwrap(); let secret = mini_secret_from_entropy(mnemonic.entropy(), "Substrate").unwrap().to_bytes(); assert_eq!(mnemonic.entropy(), &expected_entropy[..], "Entropy is incorrect for {}", phrase); assert_eq!(&seed[..], &expected_seed[..], "Seed is incorrect for {}", phrase); assert_eq!(&secret[..], &expected_seed[..32], "Secret is incorrect for {}", phrase); } } }
55.328638
202
0.732202
21515b7411f3a4a94f186823760dbbd8a5175c88
485
// variables1.rs // Make me compile! Execute the command `rustlings hint variables1` if you want a hint :) // About this `I AM NOT DONE` thing: // We sometimes encourage you to keep trying things on a given exercise, // even after you already figured it out. If you got everything working and // feel ready for the next exercise, remove the `I AM NOT DONE` comment below. // run using ``rustlings run variables1`` fn main() { let x = 5; println!("x has the value {}", x); }
32.333333
89
0.696907