lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/aes_hash.rs
DSLAM-UMD/aHash
48690455d837c807fd283d1bca689c898ddc981d
use crate::convert::*; #[cfg(feature = "specialize")] use crate::fallback_hash::MULTIPLE; use crate::operations::*; use crate::RandomState; use core::hash::Hasher; use crate::random_state::PI; #[derive(Debug, Clone)] pub struct AHasher { enc: u128, sum: u128, key: u128, } impl AHasher { #[inline] pub fn new_with_keys(key1: u128, key2: u128) -> Self { let pi: [u128; 2] = PI.convert(); let key1 = key1 ^ pi[0]; let key2 = key2 ^ pi[1]; Self { enc: key1, sum: key2, key: key1 ^ key2, } } #[allow(unused)] pub(crate) fn test_with_keys(key1: u128, key2: u128) -> Self { Self { enc: key1, sum: key2, key: key1 ^ key2, } } #[inline] pub(crate) fn from_random_state(rand_state: &RandomState) -> Self { let key1 = [rand_state.k0, rand_state.k1].convert(); let key2 = [rand_state.k2, rand_state.k3].convert(); Self { enc: key1, sum: key2, key: key1 ^ key2, } } #[inline(always)] fn add_in_length(&mut self, length: u64) { let mut enc: [u64; 2] = self.enc.convert(); enc[0] = enc[0].wrapping_add(length); self.enc = enc.convert(); } #[inline(always)] fn hash_in(&mut self, new_value: u128) { self.enc = aesenc(self.enc, new_value); self.sum = shuffle_and_add(self.sum, new_value); } #[inline(always)] fn hash_in_2(&mut self, v1: u128, v2: u128) { self.enc = aesenc(self.enc, v1); self.sum = shuffle_and_add(self.sum, v1); self.enc = aesenc(self.enc, v2); self.sum = shuffle_and_add(self.sum, v2); } #[inline] #[cfg(feature = "specialize")] fn short_finish(&self) -> u64 { let combined = aesdec(self.sum, self.enc); let result: [u64; 2] = aesenc(combined, combined).convert(); result[0] } } impl Hasher for AHasher { #[inline] fn write_u8(&mut self, i: u8) { self.write_u64(i as u64); } #[inline] fn write_u16(&mut self, i: u16) { self.write_u64(i as u64); } #[inline] fn write_u32(&mut self, i: u32) { self.write_u64(i as u64); } #[inline] fn write_u128(&mut self, i: u128) { self.hash_in(i); } #[inline] fn write_usize(&mut self, i: usize) { self.write_u64(i as u64); } #[inline] fn write_u64(&mut self, i: u64) { self.write_u128(i as u128); } #[inline] #[allow(clippy::collapsible_if)] fn write(&mut self, input: &[u8]) { let mut data = input; let length = data.len(); self.add_in_length(length as u64); if data.len() <= 8 { let value = read_small(data); self.hash_in(value.convert()); } else { if data.len() > 32 { if data.len() > 64 { let tail = data.read_last_u128x4(); let mut current: [u128; 4] = [self.key; 4]; current[0] = aesenc(current[0], tail[0]); current[1] = aesenc(current[1], tail[1]); current[2] = aesenc(current[2], tail[2]); current[3] = aesenc(current[3], tail[3]); let mut sum: [u128; 2] = [self.key, self.key]; sum[0] = add_by_64s(sum[0].convert(), tail[0].convert()).convert(); sum[1] = add_by_64s(sum[1].convert(), tail[1].convert()).convert(); sum[0] = shuffle_and_add(sum[0], tail[2]); sum[1] = shuffle_and_add(sum[1], tail[3]); while data.len() > 64 { let (blocks, rest) = data.read_u128x4(); current[0] = aesenc(current[0], blocks[0]); current[1] = aesenc(current[1], blocks[1]); current[2] = aesenc(current[2], blocks[2]); current[3] = aesenc(current[3], blocks[3]); sum[0] = shuffle_and_add(sum[0], blocks[0]); sum[1] = shuffle_and_add(sum[1], blocks[1]); sum[0] = shuffle_and_add(sum[0], blocks[2]); sum[1] = shuffle_and_add(sum[1], blocks[3]); data = rest; } self.hash_in_2(aesenc(current[0], current[1]), aesenc(current[2], current[3])); self.hash_in(add_by_64s(sum[0].convert(), sum[1].convert()).convert()); } else { let (head, _) = data.read_u128x2(); let tail = data.read_last_u128x2(); self.hash_in_2(head[0], head[1]); self.hash_in_2(tail[0], tail[1]); } } else { if data.len() > 16 { self.hash_in_2(data.read_u128().0, data.read_last_u128()); } else { let value: [u64; 2] = [data.read_u64().0, data.read_last_u64()]; self.hash_in(value.convert()); } } } } #[inline] fn finish(&self) -> u64 { let combined = aesdec(self.sum, self.enc); let result: [u64; 2] = aesenc(aesenc(combined, self.key), combined).convert(); result[0] } } #[cfg(feature = "specialize")] pub(crate) struct AHasherU64 { pub(crate) buffer: u64, pub(crate) pad: u64, } #[cfg(feature = "specialize")] impl Hasher for AHasherU64 { #[inline] fn finish(&self) -> u64 { let rot = (self.pad & 64) as u32; self.buffer.rotate_left(rot) } #[inline] fn write(&mut self, _bytes: &[u8]) { unreachable!("This should never be called") } #[inline] fn write_u8(&mut self, i: u8) { self.write_u64(i as u64); } #[inline] fn write_u16(&mut self, i: u16) { self.write_u64(i as u64); } #[inline] fn write_u32(&mut self, i: u32) { self.write_u64(i as u64); } #[inline] fn write_u64(&mut self, i: u64) { self.buffer = folded_multiply(i ^ self.buffer, MULTIPLE); } #[inline] fn write_u128(&mut self, _i: u128) { unreachable!("This should never be called") } #[inline] fn write_usize(&mut self, _i: usize) { unimplemented!() } } #[cfg(feature = "specialize")] pub(crate) struct AHasherFixed(pub AHasher); #[cfg(feature = "specialize")] impl Hasher for AHasherFixed { #[inline] fn finish(&self) -> u64 { self.0.short_finish() } #[inline] fn write(&mut self, bytes: &[u8]) { self.0.write(bytes) } #[inline] fn write_u8(&mut self, i: u8) { self.write_u64(i as u64); } #[inline] fn write_u16(&mut self, i: u16) { self.write_u64(i as u64); } #[inline] fn write_u32(&mut self, i: u32) { self.write_u64(i as u64); } #[inline] fn write_u64(&mut self, i: u64) { self.0.write_u64(i); } #[inline] fn write_u128(&mut self, i: u128) { self.0.write_u128(i); } #[inline] fn write_usize(&mut self, i: usize) { self.0.write_usize(i); } } #[cfg(feature = "specialize")] pub(crate) struct AHasherStr(pub AHasher); #[cfg(feature = "specialize")] impl Hasher for AHasherStr { #[inline] fn finish(&self) -> u64 { let result : [u64; 2] = self.0.enc.convert(); result[0] } #[inline] fn write(&mut self, bytes: &[u8]) { if bytes.len() > 8 { self.0.write(bytes); self.0.enc = aesdec(self.0.sum, self.0.enc); self.0.enc = aesenc(aesenc(self.0.enc, self.0.key), self.0.enc); } else { self.0.add_in_length(bytes.len() as u64); let value = read_small(bytes).convert(); self.0.sum = shuffle_and_add(self.0.sum, value); self.0.enc = aesdec(self.0.sum, self.0.enc); self.0.enc = aesenc(aesenc(self.0.enc, self.0.key), self.0.enc); } } #[inline] fn write_u8(&mut self, _i: u8) {} #[inline] fn write_u16(&mut self, _i: u16) {} #[inline] fn write_u32(&mut self, _i: u32) {} #[inline] fn write_u64(&mut self, _i: u64) {} #[inline] fn write_u128(&mut self, _i: u128) {} #[inline] fn write_usize(&mut self, _i: usize) {} } #[cfg(test)] mod tests { use super::*; use crate::convert::Convert; use crate::operations::aesenc; use crate::RandomState; use std::hash::{BuildHasher, Hasher}; #[test] fn test_sanity() { let mut hasher = RandomState::with_seeds(1, 2, 3, 4).build_hasher(); hasher.write_u64(0); let h1 = hasher.finish(); hasher.write(&[1, 0, 0, 0, 0, 0, 0, 0]); let h2 = hasher.finish(); assert_ne!(h1, h2); } #[cfg(feature = "compile-time-rng")] #[test] fn test_builder() { use std::collections::HashMap; use std::hash::BuildHasherDefault; let mut map = HashMap::<u32, u64, BuildHasherDefault<AHasher>>::default(); map.insert(1, 3); } #[cfg(feature = "compile-time-rng")] #[test] fn test_default() { let hasher_a = AHasher::default(); let a_enc: [u64; 2] = hasher_a.enc.convert(); let a_sum: [u64; 2] = hasher_a.sum.convert(); assert_ne!(0, a_enc[0]); assert_ne!(0, a_enc[1]); assert_ne!(0, a_sum[0]); assert_ne!(0, a_sum[1]); assert_ne!(a_enc[0], a_enc[1]); assert_ne!(a_sum[0], a_sum[1]); assert_ne!(a_enc[0], a_sum[0]); assert_ne!(a_enc[1], a_sum[1]); let hasher_b = AHasher::default(); let b_enc: [u64; 2] = hasher_b.enc.convert(); let b_sum: [u64; 2] = hasher_b.sum.convert(); assert_eq!(a_enc[0], b_enc[0]); assert_eq!(a_enc[1], b_enc[1]); assert_eq!(a_sum[0], b_sum[0]); assert_eq!(a_sum[1], b_sum[1]); } #[test] fn test_hash() { let mut result: [u64; 2] = [0x6c62272e07bb0142, 0x62b821756295c58d]; let value: [u64; 2] = [1 << 32, 0xFEDCBA9876543210]; result = aesenc(value.convert(), result.convert()).convert(); result = aesenc(result.convert(), result.convert()).convert(); let mut result2: [u64; 2] = [0x6c62272e07bb0142, 0x62b821756295c58d]; let value2: [u64; 2] = [1, 0xFEDCBA9876543210]; result2 = aesenc(value2.convert(), result2.convert()).convert(); result2 = aesenc(result2.convert(), result.convert()).convert(); let result: [u8; 16] = result.convert(); let result2: [u8; 16] = result2.convert(); assert_ne!(hex::encode(result), hex::encode(result2)); } #[test] fn test_conversion() { let input: &[u8] = "dddddddd".as_bytes(); let bytes: u64 = as_array!(input, 8).convert(); assert_eq!(bytes, 0x6464646464646464); } }
use crate::convert::*; #[cfg(feature = "specialize")] use crate::fallback_hash::MULTIPLE; use crate::operations::*; use crate::RandomState; use core::hash::Hasher; use crate::random_state::PI; #[derive(Debug, Clone)] pub struct AHasher { enc: u128, sum: u128, key: u128, } impl AHasher { #[inline] pub fn new_with_keys(key1: u128, key2: u128) -> Self { let pi: [u128; 2] = PI.convert(); let key1 = key1 ^ pi[0]; let key2 = key2 ^ pi[1]; Self { enc: key1, sum: key2, key: key1 ^ key2, } } #[allow(unused)] pub(crate) fn test_with_keys(key1: u128, key2: u128) -> Self { Self { enc: key1, sum: key2, key: key1 ^ key2, } } #[inline] pub(crate) fn from_random_state(rand_state: &RandomState) -> Self { let key1 = [rand_state.k0, rand_state.k1].convert(); let key2 = [rand_state.k2, rand_state.k3].convert(); Self { enc: key1, sum: key2, key: key1 ^ key2, } } #[inline(always)] fn add_in_length(&mut self, length: u64) { let mut enc: [u64; 2] = self.enc.convert(); enc[0] = enc[0].wrapping_add(length); self.enc = enc.convert(); } #[inline(always)] fn hash_in(&mut self, new_value: u128) { self.enc = aesenc(self.enc, new_value); self.sum = shuffle_and_add(self.sum, new_value); } #[inline(always)] fn hash_in_2(&mut self, v1: u128, v2: u128) { self.enc = aesenc(self.enc, v1); self.sum = shuffle_and_add(self.sum, v1); self.enc = aesenc(self.enc, v2); self.sum = shuffle_and_add(self.sum, v2); } #[inline] #[cfg(feature = "specialize")] fn short_finish(&self) -> u64 { let combined = aesdec(self.sum, self.enc); let result: [u64; 2] = aesenc(combined, combined).convert(); result[0] } } impl Hasher for AHasher { #[inline] fn write_u8(&mut self, i: u8) { self.write_u64(i as u64); } #[inline] fn write_u16(&mut self, i: u16) { self.write_u64(i as u64); } #[inline] fn write_u32(&mut self, i: u32) { self.write_u64(i as u64); } #[inline] fn write_u128(&mut self, i: u128) { self.hash_in(i); } #[inline] fn write_usize(&mut self, i: usize) { self.write_u64(i as u64); } #[inline] fn write_u64(&mut self, i: u64) { self.write_u128(i as u128); } #[inline] #[allow(clippy::collapsible_if)] fn write(&mut self, input: &[u8]) { let mut data = input; let length = data.len(); self.add_in_length(length as u64); if data.len() <= 8 { let value = read_small(data); self.hash_in(value.convert()); } else { if data.len() > 32 { if data.len() > 64 { let tail = data.read_last_u128x4(); let mut current: [u128; 4] = [self.key; 4]; current[0] = aesenc(current[0], tail[0]); current[1] = aesenc(current[1], tail[1]); current[2] = aesenc(current[2], tail[2]); current[3] = aesenc(current[3], tail[3]); let mut sum: [u128; 2] = [self.key, self.key]; sum[0] = add_by_64s(sum[0].convert(), tail[0].convert()).convert(); sum[1] = add_by_64s(sum[1].convert(), tail[1].convert()).convert(); sum[0] = shuffle_and_add(sum[0], tail[2]); sum[1] = shuffle_and_add(sum[1], tail[3]); while data.len() > 64 { let (blocks, rest) = data.read_u128x4(); current[0] = aesenc(current[0], blocks[0]); current[1] = aesenc(current[1], blocks[1]); current[2] = aesenc(current[2], blocks[2]); current[3] = aesenc(current[3], blocks[3]); sum[0] = shuffle_and_add(sum[0], blocks[0]); sum[1] = shuffle_and_add(sum[1], blocks[1]); sum[0] = shuffle_and_add(sum[0], blocks[2]); sum[1] = shuffle_and_add(sum[1], blocks[3]); data = rest; } self.hash_in_2(aesenc(current[0], current[1]), aesenc(current[2], current[3])); self.hash_in(add_by_64s(sum[0].convert(), sum[1].convert()).convert()); } else { let (head, _) = data.read_u128x2(); let tail = data.read_last_u128x2(); self.hash_in_2(head[0], head[1]); self.hash_in_2(tail[0], tail[1]); } } else { if data.len() > 16 { self.hash_in_2(data.read_u128().0, data.read_last_u128()); } else { let value: [u64; 2] = [data.read_u64().0, data.read_last_u64()]; self.hash_in(value.convert()); } } } } #[inline] fn finish(&self) -> u64 { let combined = aesdec(self.sum, self.enc); let result: [u64; 2] = aesenc(aesenc(combined, self.key), combined).convert(); result[0] } } #[cfg(feature = "specialize")] pub(crate) struct AHasherU64 { pub(crate) buffer: u64, pub(crate) pad: u64, } #[cfg(feature = "specialize")] impl Hasher for AHasherU64 { #[inline] fn finish(&self) -> u64 { let rot = (self.pad & 64) as u32; self.buffer.rotate_left(rot) } #[inline] fn write(&mut self, _bytes: &[u8]) { unreachable!("This should never be called") } #[inline] fn write_u8(&mut self, i: u8) { self.write_u64(i as u64); } #[inline] fn write_u16(&mut self, i: u16) { self.write_u64(i as u64); } #[inline] fn write_u32(&mut self, i: u32) { self.write_u64(i as u64); } #[inline] fn write_u64(&mut self, i: u64) { self.buffer = folded_multiply(i ^ self.buffer, MULTIPLE); } #[inline] fn write_u128(&mut self, _i: u128) { unreachable!("This should never be called") } #[inline] fn write_usize(&mut self, _i: usize) { unimplemented!() } } #[cfg(feature = "specialize")] pub(crate) struct AHasherFixed(pub AHasher); #[cfg(feature = "specialize")] impl Hasher for AHasherFixed { #[inline] fn finish(&self) -> u64 { self.0.short_finish() } #[inline] fn write(&mut self, bytes: &[u8]) { self.0.write(bytes) } #[inline] fn write_u8(&mut self, i: u8) { self.write_u64(i as u64); } #[inline] fn write_u16(&mut self, i: u16) { self.write_u64(i as u64); } #[inline] fn write_u32(&mut self, i: u32) { self.write_u64(i as u64); } #[inline] fn write_u64(&mut self, i: u64) { self.0.write_u64(i); } #[inline] fn write_u128(&mut self, i: u128) { self.0.write_u128(i); } #[inline] fn write_usize(&mut self, i: usize) { self.0.write_usize(i); } } #[cfg(feature = "specialize")] pub(crate) struct AHasherStr(pub AHasher); #[cfg(feature = "specialize")] impl Hasher for AHasherStr { #[inline] fn finish(&self) -> u64 { let result : [u64; 2] = self.0.enc.convert(); result[0] } #[inline]
#[inline] fn write_u8(&mut self, _i: u8) {} #[inline] fn write_u16(&mut self, _i: u16) {} #[inline] fn write_u32(&mut self, _i: u32) {} #[inline] fn write_u64(&mut self, _i: u64) {} #[inline] fn write_u128(&mut self, _i: u128) {} #[inline] fn write_usize(&mut self, _i: usize) {} } #[cfg(test)] mod tests { use super::*; use crate::convert::Convert; use crate::operations::aesenc; use crate::RandomState; use std::hash::{BuildHasher, Hasher}; #[test] fn test_sanity() { let mut hasher = RandomState::with_seeds(1, 2, 3, 4).build_hasher(); hasher.write_u64(0); let h1 = hasher.finish(); hasher.write(&[1, 0, 0, 0, 0, 0, 0, 0]); let h2 = hasher.finish(); assert_ne!(h1, h2); } #[cfg(feature = "compile-time-rng")] #[test] fn test_builder() { use std::collections::HashMap; use std::hash::BuildHasherDefault; let mut map = HashMap::<u32, u64, BuildHasherDefault<AHasher>>::default(); map.insert(1, 3); } #[cfg(feature = "compile-time-rng")] #[test] fn test_default() { let hasher_a = AHasher::default(); let a_enc: [u64; 2] = hasher_a.enc.convert(); let a_sum: [u64; 2] = hasher_a.sum.convert(); assert_ne!(0, a_enc[0]); assert_ne!(0, a_enc[1]); assert_ne!(0, a_sum[0]); assert_ne!(0, a_sum[1]); assert_ne!(a_enc[0], a_enc[1]); assert_ne!(a_sum[0], a_sum[1]); assert_ne!(a_enc[0], a_sum[0]); assert_ne!(a_enc[1], a_sum[1]); let hasher_b = AHasher::default(); let b_enc: [u64; 2] = hasher_b.enc.convert(); let b_sum: [u64; 2] = hasher_b.sum.convert(); assert_eq!(a_enc[0], b_enc[0]); assert_eq!(a_enc[1], b_enc[1]); assert_eq!(a_sum[0], b_sum[0]); assert_eq!(a_sum[1], b_sum[1]); } #[test] fn test_hash() { let mut result: [u64; 2] = [0x6c62272e07bb0142, 0x62b821756295c58d]; let value: [u64; 2] = [1 << 32, 0xFEDCBA9876543210]; result = aesenc(value.convert(), result.convert()).convert(); result = aesenc(result.convert(), result.convert()).convert(); let mut result2: [u64; 2] = [0x6c62272e07bb0142, 0x62b821756295c58d]; let value2: [u64; 2] = [1, 0xFEDCBA9876543210]; result2 = aesenc(value2.convert(), result2.convert()).convert(); result2 = aesenc(result2.convert(), result.convert()).convert(); let result: [u8; 16] = result.convert(); let result2: [u8; 16] = result2.convert(); assert_ne!(hex::encode(result), hex::encode(result2)); } #[test] fn test_conversion() { let input: &[u8] = "dddddddd".as_bytes(); let bytes: u64 = as_array!(input, 8).convert(); assert_eq!(bytes, 0x6464646464646464); } }
fn write(&mut self, bytes: &[u8]) { if bytes.len() > 8 { self.0.write(bytes); self.0.enc = aesdec(self.0.sum, self.0.enc); self.0.enc = aesenc(aesenc(self.0.enc, self.0.key), self.0.enc); } else { self.0.add_in_length(bytes.len() as u64); let value = read_small(bytes).convert(); self.0.sum = shuffle_and_add(self.0.sum, value); self.0.enc = aesdec(self.0.sum, self.0.enc); self.0.enc = aesenc(aesenc(self.0.enc, self.0.key), self.0.enc); } }
function_block-full_function
[ { "content": "fn test_input_affect_every_byte<T: Hasher>(constructor: impl Fn(u128, u128) -> T) {\n\n let base = hash_with(&0, constructor(0, 0));\n\n for shift in 0..16 {\n\n let mut alternitives = vec![];\n\n for v in 0..256 {\n\n let input = (v as u128) << (shift * 8);\n\n let hasher = constructor(0, 0);\n\n alternitives.push(hash_with(&input, hasher));\n\n }\n\n assert_each_byte_differs(shift, base, alternitives);\n\n }\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 0, "score": 191313.49835310288 }, { "content": "fn test_keys_change_output<T: Hasher>(constructor: impl Fn(u128, u128) -> T) {\n\n let mut a = constructor(1, 1);\n\n let mut b = constructor(1, 2);\n\n let mut c = constructor(2, 1);\n\n let mut d = constructor(2, 2);\n\n \"test\".hash(&mut a);\n\n \"test\".hash(&mut b);\n\n \"test\".hash(&mut c);\n\n \"test\".hash(&mut d);\n\n assert_sufficiently_different(a.finish(), b.finish(), 1);\n\n assert_sufficiently_different(a.finish(), c.finish(), 1);\n\n assert_sufficiently_different(a.finish(), d.finish(), 1);\n\n assert_sufficiently_different(b.finish(), c.finish(), 1);\n\n assert_sufficiently_different(b.finish(), d.finish(), 1);\n\n assert_sufficiently_different(c.finish(), d.finish(), 1);\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 1, "score": 175952.06969853916 }, { "content": "fn test_single_key_bit_flip<T: Hasher>(constructor: impl Fn(u128, u128) -> T) {\n\n for bit in 0..128 {\n\n let mut a = constructor(0, 0);\n\n let mut b = constructor(0, 1 << bit);\n\n let mut c = constructor(1 << bit, 0);\n\n \"1234\".hash(&mut a);\n\n \"1234\".hash(&mut b);\n\n \"1234\".hash(&mut c);\n\n assert_sufficiently_different(a.finish(), b.finish(), 2);\n\n assert_sufficiently_different(a.finish(), c.finish(), 2);\n\n assert_sufficiently_different(b.finish(), c.finish(), 2);\n\n let mut a = constructor(0, 0);\n\n let mut b = constructor(0, 1 << bit);\n\n let mut c = constructor(1 << bit, 0);\n\n \"12345678\".hash(&mut a);\n\n \"12345678\".hash(&mut b);\n\n \"12345678\".hash(&mut c);\n\n assert_sufficiently_different(a.finish(), b.finish(), 2);\n\n assert_sufficiently_different(a.finish(), c.finish(), 2);\n\n assert_sufficiently_different(b.finish(), c.finish(), 2);\n", "file_path": "src/hash_quality_test.rs", "rank": 2, "score": 173057.5439330745 }, { "content": "///Ensures that for every bit in the output there is some value for each byte in the key that flips it.\n\nfn test_keys_affect_every_byte<H: Hash, T: Hasher>(item: H, constructor: impl Fn(u128, u128) -> T) {\n\n let base = hash_with(&item, constructor(0, 0));\n\n for shift in 0..16 {\n\n let mut alternitives1 = vec![];\n\n let mut alternitives2 = vec![];\n\n for v in 0..256 {\n\n let input = (v as u128) << (shift * 8);\n\n let hasher1 = constructor(input, 0);\n\n let hasher2 = constructor(0, input);\n\n let h1 = hash_with(&item, hasher1);\n\n let h2 = hash_with(&item, hasher2);\n\n alternitives1.push(h1);\n\n alternitives2.push(h2);\n\n }\n\n assert_each_byte_differs(shift, base, alternitives1);\n\n assert_each_byte_differs(shift, base, alternitives2);\n\n }\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 3, "score": 168388.99578286445 }, { "content": "fn test_finish_is_consistent<T: Hasher>(constructor: impl Fn(u128, u128) -> T) {\n\n let mut hasher = constructor(1, 2);\n\n \"Foo\".hash(&mut hasher);\n\n let a = hasher.finish();\n\n let b = hasher.finish();\n\n assert_eq!(a, b);\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 4, "score": 159626.85760824807 }, { "content": "fn test_all_bytes_matter<T: Hasher>(hasher: impl Fn() -> T) {\n\n let mut item = vec![0; 256];\n\n let base_hash = hash(&item, &hasher);\n\n for pos in 0..256 {\n\n item[pos] = 255;\n\n let hash = hash(&item, &hasher);\n\n assert_ne!(base_hash, hash, \"Position {} did not affect output\", pos);\n\n item[pos] = 0;\n\n }\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 5, "score": 155652.29292947293 }, { "content": "fn test_padding_doesnot_collide<T: Hasher>(hasher: impl Fn() -> T) {\n\n for c in 0..128u8 {\n\n for string in [\"\", \"\\0\", \"\\x01\", \"1234\", \"12345678\", \"1234567812345678\"].iter() {\n\n let mut short = hasher();\n\n string.hash(&mut short);\n\n let value = short.finish();\n\n let mut padded = string.to_string();\n\n for num in 1..=128 {\n\n let mut long = hasher();\n\n padded.push(c as char);\n\n padded.hash(&mut long);\n\n let (same_bytes, same_nibbles) = count_same_bytes_and_nibbles(value, long.finish());\n\n assert!(\n\n same_bytes <= 3,\n\n format!(\"{} bytes of {} -> {:x} vs {:x}\", num, c, value, long.finish())\n\n );\n\n assert!(\n\n same_nibbles <= 8,\n\n format!(\"{} bytes of {} -> {:x} vs {:x}\", num, c, value, long.finish())\n\n );\n", "file_path": "src/hash_quality_test.rs", "rank": 6, "score": 152965.36461262856 }, { "content": "fn gen_combinations(options: &[u32; 8], depth: u32, so_far: Vec<u32>, combinations: &mut Vec<Vec<u32>>) {\n\n if depth == 0 {\n\n return;\n\n }\n\n for option in options {\n\n let mut next = so_far.clone();\n\n next.push(*option);\n\n combinations.push(next.clone());\n\n gen_combinations(options, depth - 1, next, combinations);\n\n }\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 7, "score": 152592.65442774136 }, { "content": "fn hash_with<H: Hash, T: Hasher>(b: &H, mut hasher: T) -> u64 {\n\n b.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 8, "score": 140655.26785185517 }, { "content": "/// Provides a way to get an optimized hasher for a given data type.\n\n/// Rather than using a Hasher generically which can hash any value, this provides a way to get a specialized hash\n\n/// for a specific type. So this may be faster for primitive types.\n\n/// # Example\n\n/// ```\n\n/// use std::hash::BuildHasher;\n\n/// use ahash::RandomState;\n\n/// use ahash::CallHasher;\n\n///\n\n/// let hash_builder = RandomState::new();\n\n/// //...\n\n/// let value = 17;\n\n/// let hash = u32::get_hash(&value, &hash_builder);\n\n/// ```\n\npub trait CallHasher {\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64;\n\n}\n\n\n\n#[cfg(not(feature = \"specialize\"))]\n\nimpl<T> CallHasher for T\n\nwhere\n\n T: Hash + ?Sized,\n\n{\n\n #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n let mut hasher = build_hasher.build_hasher();\n\n value.hash(&mut hasher);\n\n hasher.finish()\n\n }\n\n}\n\n\n\n#[cfg(feature = \"specialize\")]\n\nimpl<T> CallHasher for T\n\nwhere\n", "file_path": "src/specialize.rs", "rank": 9, "score": 137567.5821642611 }, { "content": "fn test_no_pair_collisions<T: Hasher>(hasher: impl Fn() -> T) {\n\n let base = [0_u64, 0_u64];\n\n let base_hash = hash(&base, &hasher);\n\n for bitpos1 in 0..64 {\n\n let a = 1_u64 << bitpos1;\n\n for bitpos2 in 0..bitpos1 {\n\n let b = 1_u64 << bitpos2;\n\n let aa = hash(&[a, a], &hasher);\n\n let ab = hash(&[a, b], &hasher);\n\n let ba = hash(&[b, a], &hasher);\n\n let bb = hash(&[b, b], &hasher);\n\n assert_sufficiently_different(base_hash, aa, 3);\n\n assert_sufficiently_different(base_hash, ab, 3);\n\n assert_sufficiently_different(base_hash, ba, 3);\n\n assert_sufficiently_different(base_hash, bb, 3);\n\n assert_sufficiently_different(aa, ab, 3);\n\n assert_sufficiently_different(ab, ba, 3);\n\n assert_sufficiently_different(ba, bb, 3);\n\n assert_sufficiently_different(aa, ba, 3);\n\n assert_sufficiently_different(ab, bb, 3);\n\n assert_sufficiently_different(aa, bb, 3);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 10, "score": 134970.3854669006 }, { "content": "fn test_single_bit_flip<T: Hasher>(hasher: impl Fn() -> T) {\n\n let size = 32;\n\n let compare_value = hash(&0u32, &hasher);\n\n for pos in 0..size {\n\n let test_value = hash(&(1u32 << pos), &hasher);\n\n assert_sufficiently_different(compare_value, test_value, 2);\n\n }\n\n let size = 64;\n\n let compare_value = hash(&0u64, &hasher);\n\n for pos in 0..size {\n\n let test_value = hash(&(1u64 << pos), &hasher);\n\n assert_sufficiently_different(compare_value, test_value, 2);\n\n }\n\n let size = 128;\n\n let compare_value = hash(&0u128, &hasher);\n\n for pos in 0..size {\n\n let test_value = hash(&(1u128 << pos), &hasher);\n\n dbg!(compare_value, test_value);\n\n assert_sufficiently_different(compare_value, test_value, 2);\n\n }\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 11, "score": 132897.30253883774 }, { "content": "fn test_no_full_collisions<T: Hasher>(gen_hash: impl Fn() -> T) {\n\n let options: [u32; 8] = [\n\n 0x00000000, 0x20000000, 0x40000000, 0x60000000, 0x80000000, 0xA0000000, 0xC0000000, 0xE0000000,\n\n ];\n\n let mut combinations = Vec::new();\n\n gen_combinations(&options, 7, Vec::new(), &mut combinations);\n\n let mut map: HashMap<u64, Vec<u8>> = HashMap::new();\n\n for combination in combinations {\n\n let array = unsafe {\n\n let (begin, middle, end) = combination.align_to::<u8>();\n\n assert_eq!(0, begin.len());\n\n assert_eq!(0, end.len());\n\n middle.to_vec()\n\n };\n\n let mut hasher = gen_hash();\n\n hasher.write(&array);\n\n let hash = hasher.finish();\n\n if let Some(value) = map.get(&hash) {\n\n assert_eq!(\n\n value, &array,\n\n \"Found a collision between {:x?} and {:x?}. Hash: {:x?}\",\n\n value, &array, &hash\n\n );\n\n } else {\n\n map.insert(hash, array);\n\n }\n\n }\n\n assert_eq!(2396744, map.len());\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 12, "score": 116382.42860437292 }, { "content": "fn bench_ahash(c: &mut Criterion) {\n\n c.bench(\n\n \"aeshash\",\n\n ParameterizedBenchmark::new(\"u8\", |b, &s| b.iter(|| black_box(aeshash(s))), &U8_VALUES),\n\n );\n\n c.bench(\n\n \"aeshash\",\n\n ParameterizedBenchmark::new(\"u16\", |b, &s| b.iter(|| black_box(aeshash(s))), &U16_VALUES),\n\n );\n\n c.bench(\n\n \"aeshash\",\n\n ParameterizedBenchmark::new(\"u32\", |b, &s| b.iter(|| black_box(aeshash(s))), &U32_VALUES),\n\n );\n\n c.bench(\n\n \"aeshash\",\n\n ParameterizedBenchmark::new(\"u64\", |b, &s| b.iter(|| black_box(aeshash(s))), &U64_VALUES),\n\n );\n\n c.bench(\n\n \"aeshash\",\n\n ParameterizedBenchmark::new(\"u128\", |b, &s| b.iter(|| black_box(aeshash(s))), &U128_VALUES),\n\n );\n\n c.bench(\n\n \"aeshash\",\n\n ParameterizedBenchmark::new(\"string\", |b, s| b.iter(|| black_box(aeshash(s))), gen_strings()),\n\n );\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 13, "score": 113090.23180643762 }, { "content": "fn generic_hash<K: Hash, B: BuildHasher>(key: &K, builder: &B) -> u64 {\n\n let mut hasher = builder.build_hasher();\n\n key.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 14, "score": 112434.74163223026 }, { "content": "fn assert_each_byte_differs(num: u64, base: u64, alternitives: Vec<u64>) {\n\n let mut changed_bits = 0_u64;\n\n for alternitive in alternitives {\n\n changed_bits |= base ^ alternitive\n\n }\n\n assert_eq!(core::u64::MAX, changed_bits, \"Bits changed: {:x} on num: {:?}\", changed_bits, num);\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 15, "score": 109770.26411953487 }, { "content": "fn compare_ahash(c: &mut Criterion) {\n\n let builder = RandomState::new();\n\n let test = \"compare_ahash\";\n\n for num in &[1,3,7,15,31,63,127,255,511,1023] {\n\n let name = \"string\".to_owned() + &num.to_string();\n\n let string = create_string(*num);\n\n c.bench_with_input(BenchmarkId::new(test, &name), &string, |bencher, s| {\n\n bencher.iter(|| {\n\n black_box(ahash(s, &builder))\n\n });\n\n });\n\n }\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 16, "score": 109701.64876051126 }, { "content": "fn count_same_bytes_and_nibbles(a: u64, b: u64) -> (i32, i32) {\n\n let mut same_byte_count = 0;\n\n let mut same_nibble_count = 0;\n\n for byte in 0..8 {\n\n let ba = (a >> (8 * byte)) as u8;\n\n let bb = (b >> (8 * byte)) as u8;\n\n if ba == bb {\n\n same_byte_count += 1;\n\n }\n\n if ba & 0xF0u8 == bb & 0xF0u8 {\n\n same_nibble_count += 1;\n\n }\n\n if ba & 0x0Fu8 == bb & 0x0Fu8 {\n\n same_nibble_count += 1;\n\n }\n\n }\n\n (same_byte_count, same_nibble_count)\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 17, "score": 108552.39905351031 }, { "content": "#[inline(never)]\n\n#[no_panic]\n\nfn hash_test_specialize(num: i32, string: &str) -> (u64, u64) {\n\n let hasher1 = AHasher::new_with_keys(1, 2);\n\n let hasher2 = AHasher::new_with_keys(1, 2);\n\n (\n\n i32::get_hash(&num, &SimpleBuildHasher { hasher: hasher1 }),\n\n <[u8]>::get_hash(string.as_bytes(), &SimpleBuildHasher { hasher: hasher2 }),\n\n )\n\n}\n\n\n", "file_path": "tests/nopanic.rs", "rank": 18, "score": 108402.51899189517 }, { "content": "fn bench_ahash_words(c: &mut Criterion) {\n\n let words = gen_word_pairs();\n\n c.bench_function(\"aes_words\", |b| b.iter(|| black_box(ahash_vec(&words))));\n\n}\n\n\n", "file_path": "tests/map_tests.rs", "rank": 19, "score": 106589.840343751 }, { "content": "fn ahash<K: Hash>(k: &K, builder: &RandomState) -> u64 {\n\n let hasher = builder.build_hasher();\n\n k.get_hash(hasher)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 20, "score": 105961.87456690906 }, { "content": "fn hash<H: Hash, T: Hasher>(b: &H, hash_builder: &dyn Fn() -> T) -> u64 {\n\n let mut hasher = hash_builder();\n\n b.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 21, "score": 100991.67710106322 }, { "content": "fn ahash_vec<H: Hash>(b: &Vec<H>) -> u64 {\n\n let mut total: u64 = 0;\n\n for item in b {\n\n let mut hasher = AHasher::new_with_keys(1234, 5678);\n\n item.hash(&mut hasher);\n\n total = total.wrapping_add(hasher.finish());\n\n }\n\n total\n\n}\n\n\n", "file_path": "tests/map_tests.rs", "rank": 22, "score": 94624.43402286191 }, { "content": "#[allow(unused)] // False positive\n\nfn hash<H: Hash, B: BuildHasher>(b: &H, build_hasher: &B) -> u64 {\n\n H::get_hash(b, build_hasher)\n\n}\n\n\n", "file_path": "tests/map_tests.rs", "rank": 23, "score": 90334.5728743963 }, { "content": "#[inline(never)]\n\n#[no_panic]\n\nfn hash_test_random(num: i32, string: &str) -> (u64, u64) {\n\n let build_hasher1 = RandomState::with_seeds(1, 2, 3, 4);\n\n let build_hasher2 = RandomState::with_seeds(1, 2, 3, 4);\n\n (\n\n i32::get_hash(&num, &build_hasher1),\n\n <[u8]>::get_hash(string.as_bytes(), &build_hasher2),\n\n )\n\n}\n\n\n", "file_path": "tests/nopanic.rs", "rank": 24, "score": 86372.93788085668 }, { "content": "#[inline(never)]\n\n#[no_panic]\n\nfn hash_test_final(num: i32, string: &str) -> (u64, u64) {\n\n use core::hash::Hasher;\n\n let mut hasher1 = AHasher::new_with_keys(1, 2);\n\n let mut hasher2 = AHasher::new_with_keys(3, 4);\n\n hasher1.write_i32(num);\n\n hasher2.write(string.as_bytes());\n\n (hasher1.finish(), hasher2.finish())\n\n}\n\n\n", "file_path": "tests/nopanic.rs", "rank": 25, "score": 86372.93788085668 }, { "content": "fn assert_sufficiently_different(a: u64, b: u64, tolerance: i32) {\n\n let (same_byte_count, same_nibble_count) = count_same_bytes_and_nibbles(a, b);\n\n assert!(same_byte_count <= tolerance, \"{:x} vs {:x}: {:}\", a, b, same_byte_count);\n\n assert!(\n\n same_nibble_count <= tolerance * 3,\n\n \"{:x} vs {:x}: {:}\",\n\n a,\n\n b,\n\n same_nibble_count\n\n );\n\n let flipped_bits = (a ^ b).count_ones();\n\n assert!(\n\n flipped_bits > 12 && flipped_bits < 52,\n\n \"{:x} and {:x}: {:}\",\n\n a,\n\n b,\n\n flipped_bits\n\n );\n\n for rotate in 0..64 {\n\n let flipped_bits2 = (a ^ (b.rotate_left(rotate))).count_ones();\n\n assert!(\n\n flipped_bits2 > 10 && flipped_bits2 < 54,\n\n \"{:x} and {:x}: {:}\",\n\n a,\n\n b.rotate_left(rotate),\n\n flipped_bits2\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/hash_quality_test.rs", "rank": 26, "score": 86365.599101053 }, { "content": "fn bench_fallback(c: &mut Criterion) {\n\n c.bench(\n\n \"fallback\",\n\n ParameterizedBenchmark::new(\"u8\", |b, &s| b.iter(|| black_box(fallbackhash(s))), &U8_VALUES),\n\n );\n\n c.bench(\n\n \"fallback\",\n\n ParameterizedBenchmark::new(\"u16\", |b, &s| b.iter(|| black_box(fallbackhash(s))), &U16_VALUES),\n\n );\n\n c.bench(\n\n \"fallback\",\n\n ParameterizedBenchmark::new(\"u32\", |b, &s| b.iter(|| black_box(fallbackhash(s))), &U32_VALUES),\n\n );\n\n c.bench(\n\n \"fallback\",\n\n ParameterizedBenchmark::new(\"u64\", |b, &s| b.iter(|| black_box(fallbackhash(s))), &U64_VALUES),\n\n );\n\n c.bench(\n\n \"fallback\",\n\n ParameterizedBenchmark::new(\"u128\", |b, &s| b.iter(|| black_box(fallbackhash(s))), &U128_VALUES),\n\n );\n\n c.bench(\n\n \"fallback\",\n\n ParameterizedBenchmark::new(\"string\", |b, s| b.iter(|| black_box(fallbackhash(s))), gen_strings()),\n\n );\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 27, "score": 85991.16173807261 }, { "content": "fn bench_fx(c: &mut Criterion) {\n\n c.bench(\n\n \"fx\",\n\n ParameterizedBenchmark::new(\"u8\", |b, &s| b.iter(|| black_box(fxhash(s))), &U8_VALUES),\n\n );\n\n c.bench(\n\n \"fx\",\n\n ParameterizedBenchmark::new(\"u16\", |b, &s| b.iter(|| black_box(fxhash(s))), &U16_VALUES),\n\n );\n\n c.bench(\n\n \"fx\",\n\n ParameterizedBenchmark::new(\"u32\", |b, &s| b.iter(|| black_box(fxhash(s))), &U32_VALUES),\n\n );\n\n c.bench(\n\n \"fx\",\n\n ParameterizedBenchmark::new(\"u64\", |b, &s| b.iter(|| black_box(fxhash(s))), &U64_VALUES),\n\n );\n\n c.bench(\n\n \"fx\",\n\n ParameterizedBenchmark::new(\"u128\", |b, &s| b.iter(|| black_box(fxhash(s))), &U128_VALUES),\n\n );\n\n c.bench(\n\n \"fx\",\n\n ParameterizedBenchmark::new(\"string\", |b, s| b.iter(|| black_box(fxhash(s))), gen_strings()),\n\n );\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 28, "score": 85991.16173807261 }, { "content": "fn bench_sip(c: &mut Criterion) {\n\n c.bench(\n\n \"sip\",\n\n ParameterizedBenchmark::new(\"u8\", |b, &s| b.iter(|| black_box(siphash(s))), &U8_VALUES),\n\n );\n\n c.bench(\n\n \"sip\",\n\n ParameterizedBenchmark::new(\"u16\", |b, &s| b.iter(|| black_box(siphash(s))), &U16_VALUES),\n\n );\n\n c.bench(\n\n \"sip\",\n\n ParameterizedBenchmark::new(\"u32\", |b, &s| b.iter(|| black_box(siphash(s))), &U32_VALUES),\n\n );\n\n c.bench(\n\n \"sip\",\n\n ParameterizedBenchmark::new(\"u64\", |b, &s| b.iter(|| black_box(siphash(s))), &U64_VALUES),\n\n );\n\n c.bench(\n\n \"sip\",\n\n ParameterizedBenchmark::new(\"u128\", |b, &s| b.iter(|| black_box(siphash(s))), &U128_VALUES),\n", "file_path": "tests/bench.rs", "rank": 29, "score": 85991.16173807261 }, { "content": "fn bench_fnv(c: &mut Criterion) {\n\n c.bench(\n\n \"fnv\",\n\n ParameterizedBenchmark::new(\"u8\", |b, &s| b.iter(|| black_box(fnvhash(s))), &U8_VALUES),\n\n );\n\n c.bench(\n\n \"fnv\",\n\n ParameterizedBenchmark::new(\"u16\", |b, &s| b.iter(|| black_box(fnvhash(s))), &U16_VALUES),\n\n );\n\n c.bench(\n\n \"fnv\",\n\n ParameterizedBenchmark::new(\"u32\", |b, &s| b.iter(|| black_box(fnvhash(s))), &U32_VALUES),\n\n );\n\n c.bench(\n\n \"fnv\",\n\n ParameterizedBenchmark::new(\"u64\", |b, &s| b.iter(|| black_box(fnvhash(s))), &U64_VALUES),\n\n );\n\n c.bench(\n\n \"fnv\",\n\n ParameterizedBenchmark::new(\"u128\", |b, &s| b.iter(|| black_box(fnvhash(s))), &U128_VALUES),\n\n );\n\n c.bench(\n\n \"fnv\",\n\n ParameterizedBenchmark::new(\"string\", |b, s| b.iter(|| black_box(fnvhash(s))), gen_strings()),\n\n );\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 30, "score": 85991.16173807261 }, { "content": "fn bench_sea(c: &mut Criterion) {\n\n c.bench(\n\n \"sea\",\n\n ParameterizedBenchmark::new(\"string\", |b, s| b.iter(|| black_box(seahash(s))), gen_strings()),\n\n );\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 31, "score": 85991.16173807261 }, { "content": "fn compare_sip13(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = BuildHasherDefault::<siphasher::sip::SipHasher13>::default();\n\n compare_other(c, \"compare_sip13\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 32, "score": 83756.40690571925 }, { "content": "fn compare_t1ha(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = t1ha::T1haBuildHasher::default();\n\n compare_other(c, \"compare_t1ha\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 33, "score": 83756.40690571925 }, { "content": "fn compare_highway(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = highway::HighwayBuildHasher::default();\n\n compare_other(c, \"compare_highway\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 34, "score": 83756.40690571925 }, { "content": "fn compare_metro(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = metrohash::MetroBuildHasher::default();\n\n compare_other(c, \"compare_metro\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 35, "score": 83756.40690571925 }, { "content": "fn compare_xxhash(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = twox_hash::RandomXxHashBuilder64::default();\n\n compare_other(c, \"compare_xxhash\", builder)\n\n}\n\n\n\ncriterion_main!(compare);\n\ncriterion_group!(\n\n compare,\n\n compare_ahash,\n\n compare_farmhash,\n\n compare_fnvhash,\n\n compare_fxhash,\n\n compare_highway,\n\n compare_metro,\n\n compare_t1ha,\n\n compare_sip13,\n\n compare_sip24,\n\n compare_wyhash,\n\n compare_xxhash,\n\n);\n", "file_path": "compare/tests/compare.rs", "rank": 36, "score": 83756.40690571925 }, { "content": "fn compare_fnvhash(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = FnvBuildHasher::default();\n\n compare_other(c, \"compare_fnvhash\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 37, "score": 83756.40690571925 }, { "content": "fn compare_farmhash(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = BuildHasherDefault::<FarmHasher>::default();\n\n compare_other(c, \"compare_farmhash\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 38, "score": 83756.40690571925 }, { "content": "fn compare_fxhash(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = FxBuildHasher::default();\n\n compare_other(c, \"compare_fxhash\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 39, "score": 83756.40690571925 }, { "content": "fn compare_wyhash(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = BuildHasherDefault::<wyhash::WyHash>::default();\n\n compare_other(c, \"compare_wyhash\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 40, "score": 83756.40690571925 }, { "content": "fn compare_sip24(c: &mut Criterion) {\n\n let int: u64 = 1234;\n\n let string = create_string(1024);\n\n let builder = BuildHasherDefault::<siphasher::sip::SipHasher24>::default();\n\n compare_other(c, \"compare_sip24\", builder)\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 41, "score": 83756.40690571925 }, { "content": "struct SimpleBuildHasher {\n\n hasher: AHasher,\n\n}\n\n\n\nimpl BuildHasher for SimpleBuildHasher {\n\n type Hasher = AHasher;\n\n\n\n fn build_hasher(&self) -> Self::Hasher {\n\n self.hasher.clone()\n\n }\n\n}\n\n\n", "file_path": "tests/nopanic.rs", "rank": 42, "score": 83474.56962247596 }, { "content": "fn compare_other<B: BuildHasher>(c: &mut Criterion, test: &str, builder: B) {\n\n for num in &[1,3,7,15,31,63,127,255,511,1023] {\n\n let name = \"string\".to_owned() + &num.to_string();\n\n let string = create_string(*num);\n\n c.bench_with_input(BenchmarkId::new(test, &name), &string, |bencher, s| {\n\n bencher.iter(|| {\n\n black_box(generic_hash(&s, &builder))\n\n });\n\n });\n\n }\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 43, "score": 83062.65734235839 }, { "content": "fn bench_fx_words(c: &mut Criterion) {\n\n let words = gen_word_pairs();\n\n c.bench_function(\"fx_words\", |b| b.iter(|| black_box(fxhash_vec(&words))));\n\n}\n\n\n\ncriterion_main!(benches);\n\ncriterion_group!(benches, bench_ahash_words, bench_fx_words,);\n", "file_path": "tests/map_tests.rs", "rank": 44, "score": 81704.18366005528 }, { "content": "#[cfg(all(any(target_arch = \"x86\", target_arch = \"x86_64\"), target_feature = \"aes\"))]\n\nfn aeshash<H: Hash>(b: &H) -> u64 {\n\n let build_hasher = RandomState::with_seeds(1, 2, 3, 4);\n\n H::get_hash(b, &build_hasher)\n\n}\n", "file_path": "tests/bench.rs", "rank": 45, "score": 79188.66809166558 }, { "content": "fn fnvhash<H: Hash>(b: &H) -> u64 {\n\n let mut hasher = fnv::FnvHasher::default();\n\n b.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 46, "score": 79188.66809166558 }, { "content": "#[cfg(not(all(any(target_arch = \"x86\", target_arch = \"x86_64\"), target_feature = \"aes\")))]\n\nfn aeshash<H: Hash>(_b: &H) -> u64 {\n\n panic!(\"aes must be enabled\")\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 47, "score": 79188.66809166558 }, { "content": "#[cfg(not(all(any(target_arch = \"x86\", target_arch = \"x86_64\"), target_feature = \"aes\")))]\n\nfn fallbackhash<H: Hash>(b: &H) -> u64 {\n\n let build_hasher = RandomState::with_seeds(1, 2, 3, 4);\n\n H::get_hash(b, &build_hasher)\n\n}\n", "file_path": "tests/bench.rs", "rank": 48, "score": 79188.66809166558 }, { "content": "fn seahash<H: Hash>(b: &H) -> u64 {\n\n let mut hasher = seahash::SeaHasher::default();\n\n b.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n\nconst STRING_LENGTHS: [u32; 12] = [1, 3, 4, 7, 8, 15, 16, 24, 33, 68, 132, 1024];\n\n\n", "file_path": "tests/bench.rs", "rank": 49, "score": 79188.66809166558 }, { "content": "fn fxhash<H: Hash>(b: &H) -> u64 {\n\n let mut hasher = FxHasher::default();\n\n b.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 50, "score": 79188.66809166558 }, { "content": "fn siphash<H: Hash>(b: &H) -> u64 {\n\n let mut hasher = DefaultHasher::default();\n\n b.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 51, "score": 79188.66809166558 }, { "content": "#[cfg(all(any(target_arch = \"x86\", target_arch = \"x86_64\"), target_feature = \"aes\"))]\n\nfn fallbackhash<H: Hash>(_b: &H) -> u64 {\n\n panic!(\"aes must be disabled\")\n\n}\n\n\n", "file_path": "tests/bench.rs", "rank": 52, "score": 79188.66809166558 }, { "content": "fn fxhash_vec<H: Hash>(b: &Vec<H>) -> u64 {\n\n let mut total: u64 = 0;\n\n for item in b {\n\n let mut hasher = FxHasher::default();\n\n item.hash(&mut hasher);\n\n total = total.wrapping_add(hasher.finish());\n\n }\n\n total\n\n}\n\n\n", "file_path": "tests/map_tests.rs", "rank": 53, "score": 71617.91684224624 }, { "content": "#[allow(unused)] // False positive\n\nfn check_for_collisions<H: Hash, B: BuildHasher>(build_hasher: &B, items: &[H], bucket_count: usize) {\n\n let mut buckets = vec![0; bucket_count];\n\n for item in items {\n\n let value = hash(item, build_hasher) as usize;\n\n buckets[value % bucket_count] += 1;\n\n }\n\n let mean = items.len() / bucket_count;\n\n let max = *buckets.iter().max().unwrap();\n\n let min = *buckets.iter().min().unwrap();\n\n assert!(\n\n (min as f64) > (mean as f64) * 0.95,\n\n \"min: {}, max:{}, {:?}\",\n\n min,\n\n max,\n\n buckets\n\n );\n\n assert!(\n\n (max as f64) < (mean as f64) * 1.05,\n\n \"min: {}, max:{}, {:?}\",\n\n min,\n\n max,\n\n buckets\n\n );\n\n}\n\n\n", "file_path": "tests/map_tests.rs", "rank": 54, "score": 70145.41189235912 }, { "content": "fn create_string(len: usize) -> String {\n\n let mut string = String::default();\n\n for pos in 1..=len {\n\n let c = (48 + (pos % 10) as u8) as char;\n\n string.push(c);\n\n }\n\n string\n\n}\n\n\n", "file_path": "compare/tests/compare.rs", "rank": 55, "score": 62999.00481526557 }, { "content": "#[allow(unused)] // False positive\n\nfn test_hash_common_words<B: BuildHasher>(build_hasher: &B) {\n\n let word_pairs: Vec<_> = gen_word_pairs();\n\n check_for_collisions(build_hasher, &word_pairs, 32);\n\n}\n\n\n", "file_path": "tests/map_tests.rs", "rank": 56, "score": 62638.58860809358 }, { "content": "#[inline(never)]\n\nfn hash_test_specialize_wrapper(num: i32, string: &str) {\n\n hash_test_specialize(num, string);\n\n}\n\n\n", "file_path": "tests/nopanic.rs", "rank": 57, "score": 58045.06867395537 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n if let Some(channel) = version_check::Channel::read() {\n\n if channel.supports_features() {\n\n println!(\"cargo:rustc-cfg=feature=\\\"specialize\\\"\");\n\n }\n\n }\n\n let os = env::var(\"CARGO_CFG_TARGET_OS\").expect(\"CARGO_CFG_TARGET_OS was not set\");\n\n if os.eq_ignore_ascii_case(\"linux\")\n\n || os.eq_ignore_ascii_case(\"android\")\n\n || os.eq_ignore_ascii_case(\"windows\")\n\n || os.eq_ignore_ascii_case(\"macos\")\n\n || os.eq_ignore_ascii_case(\"ios\")\n\n || os.eq_ignore_ascii_case(\"freebsd\")\n\n || os.eq_ignore_ascii_case(\"openbsd\")\n\n || os.eq_ignore_ascii_case(\"dragonfly\")\n\n || os.eq_ignore_ascii_case(\"solaris\")\n\n || os.eq_ignore_ascii_case(\"illumos\")\n\n || os.eq_ignore_ascii_case(\"fuchsia\")\n\n || os.eq_ignore_ascii_case(\"redox\")\n\n || os.eq_ignore_ascii_case(\"cloudabi\")\n\n || os.eq_ignore_ascii_case(\"haiku\")\n\n || os.eq_ignore_ascii_case(\"vxworks\")\n\n || os.eq_ignore_ascii_case(\"emscripten\")\n\n || os.eq_ignore_ascii_case(\"wasi\")\n\n {\n\n println!(\"cargo:rustc-cfg=feature=\\\"runtime-rng\\\"\");\n\n }\n\n}\n", "file_path": "build.rs", "rank": 58, "score": 50297.22417752387 }, { "content": "#[test]\n\nfn test_no_panic() {\n\n hash_test_final_wrapper(2, \"Foo\");\n\n hash_test_specialize_wrapper(2, \"Bar\");\n\n hash_test_random_wrapper(2, \"Baz\");\n\n}\n", "file_path": "tests/nopanic.rs", "rank": 59, "score": 47241.24735574992 }, { "content": "#[test]\n\nfn test_bucket_distribution() {\n\n let build_hasher = RandomState::with_seeds(1, 2, 3, 4);\n\n test_hash_common_words(&build_hasher);\n\n let sequence: Vec<_> = (0..320000).collect();\n\n check_for_collisions(&build_hasher, &sequence, 32);\n\n let sequence: Vec<_> = (0..2560000).collect();\n\n check_for_collisions(&build_hasher, &sequence, 256);\n\n let sequence: Vec<_> = (0..320000).map(|i| i * 1024).collect();\n\n check_for_collisions(&build_hasher, &sequence, 32);\n\n let sequence: Vec<_> = (0..2560000_u64).map(|i| i * 1024).collect();\n\n check_for_collisions(&build_hasher, &sequence, 256);\n\n}\n\n\n", "file_path": "tests/map_tests.rs", "rank": 60, "score": 44728.888095411276 }, { "content": "fn gen_strings() -> Vec<String> {\n\n STRING_LENGTHS\n\n .iter()\n\n .map(|len| {\n\n let mut string = String::default();\n\n for pos in 1..=*len {\n\n let c = (48 + (pos % 10) as u8) as char;\n\n string.push(c);\n\n }\n\n string\n\n })\n\n .collect()\n\n}\n\n\n\nconst U8_VALUES: [u8; 1] = [123];\n\nconst U16_VALUES: [u16; 1] = [1234];\n\nconst U32_VALUES: [u32; 1] = [12345678];\n\nconst U64_VALUES: [u64; 1] = [1234567890123456];\n\nconst U128_VALUES: [u128; 1] = [12345678901234567890123456789012];\n\n\n", "file_path": "tests/bench.rs", "rank": 61, "score": 41399.19081163485 }, { "content": "fn gen_word_pairs() -> Vec<String> {\n\n let words: Vec<_> = r#\"\n\na, ability, able, about, above, accept, according, account, across, act, action,\n\nactivity, actually, add, address, administration, admit, adult, affect, after,\n\nagain, against, age, agency, agent, ago, agree, agreement, ahead, air, all,\n\nallow, almost, alone, along, already, also, although, always, American, among,\n\namount, analysis, and, animal, another, answer, any, anyone, anything, appear,\n\napply, approach, area, argue, arm, around, arrive, art, article, artist, as,\n\nask, assume, at, attack, attention, attorney, audience, author, authority,\n\navailable, avoid, away, baby, back, bad, bag, ball, bank, bar, base, be, beat,\n\nbeautiful, because, become, bed, before, begin, behavior, behind, believe,\n\nbenefit, best, better, between, beyond, big, bill, billion, bit, black, blood,\n\nblue, board, body, book, born, both, box, boy, break, bring, brother, budget,\n\nbuild, building, business, but, buy, by, call, camera, campaign, can, cancer,\n\ncandidate, capital, car, card, care, career, carry, case, catch, cause, cell,\n\ncenter, central, century, certain, certainly, chair, challenge, chance, change,\n\ncharacter, charge, check, child, choice, choose, church, citizen, city, civil,\n\nclaim, class, clear, clearly, close, coach, cold, collection, college, color,\n\ncome, commercial, common, community, company, compare, computer, concern,\n\ncondition, conference, Congress, consider, consumer, contain, continue, control,\n", "file_path": "tests/map_tests.rs", "rank": 62, "score": 39297.24339199056 }, { "content": "#[inline(never)]\n\nfn hash_test_random_wrapper(num: i32, string: &str) {\n\n hash_test_specialize(num, string);\n\n}\n\n\n", "file_path": "tests/nopanic.rs", "rank": 63, "score": 35183.75029698451 }, { "content": "#[inline(never)]\n\nfn hash_test_final_wrapper(num: i32, string: &str) {\n\n hash_test_final(num, string);\n\n}\n\n\n", "file_path": "tests/nopanic.rs", "rank": 64, "score": 35183.75029698451 }, { "content": "}\n\ncall_hasher_impl!(u8);\n\ncall_hasher_impl!(u16);\n\ncall_hasher_impl!(u32);\n\ncall_hasher_impl!(u64);\n\ncall_hasher_impl!(i8);\n\ncall_hasher_impl!(i16);\n\ncall_hasher_impl!(i32);\n\ncall_hasher_impl!(i64);\n\n\n\n#[cfg(feature = \"specialize\")]\n\nimpl CallHasher for u128 {\n\n #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_fixed_length(value)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"specialize\")]\n\nimpl CallHasher for i128 {\n", "file_path": "src/specialize.rs", "rank": 65, "score": 32786.93983970306 }, { "content": " #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_fixed_length(value)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"specialize\")]\n\nimpl CallHasher for usize {\n\n #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_fixed_length(value)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"specialize\")]\n\nimpl CallHasher for isize {\n\n #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_fixed_length(value)\n\n }\n", "file_path": "src/specialize.rs", "rank": 66, "score": 32781.201052923905 }, { "content": "}\n\n\n\n#[cfg(feature = \"specialize\")]\n\nimpl CallHasher for [u8] {\n\n #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_str(value)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"specialize\")]\n\nimpl CallHasher for Vec<u8> {\n\n #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_str(value)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"specialize\")]\n\nimpl CallHasher for str {\n", "file_path": "src/specialize.rs", "rank": 67, "score": 32780.39735477918 }, { "content": " T: Hash + ?Sized,\n\n{\n\n #[inline]\n\n default fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n let mut hasher = build_hasher.build_hasher();\n\n value.hash(&mut hasher);\n\n hasher.finish()\n\n }\n\n}\n\n\n\nmacro_rules! call_hasher_impl {\n\n ($typ:ty) => {\n\n #[cfg(feature = \"specialize\")]\n\n impl CallHasher for $typ {\n\n #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_u64(value)\n\n }\n\n }\n\n };\n", "file_path": "src/specialize.rs", "rank": 68, "score": 32778.15529918672 }, { "content": " #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_str(value)\n\n }\n\n}\n\n\n\n#[cfg(all(feature = \"specialize\"))]\n\nimpl CallHasher for String {\n\n #[inline]\n\n fn get_hash<H: Hash + ?Sized, B: BuildHasher>(value: &H, build_hasher: &B) -> u64 {\n\n build_hasher.hash_as_str(value)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::*;\n\n\n\n #[test]\n", "file_path": "src/specialize.rs", "rank": 69, "score": 32776.73509324046 }, { "content": "/// ```\n\n/// use std::hash::BuildHasher;\n\n/// use ahash::RandomState;\n\n/// use ahash::CallHasher;\n\n///\n\n/// let hash_builder = RandomState::new();\n\n/// //...\n\n/// let value = 17;\n\n/// let hash = u32::get_hash(&value, &hash_builder);\n\n/// ```\n", "file_path": "src/specialize.rs", "rank": 70, "score": 32774.39429028162 }, { "content": " assert_eq!(u16::get_hash(&&2, &build_hasher), u16::get_hash(&2, &build_hasher));\n\n assert_eq!(u32::get_hash(&&3, &build_hasher), u32::get_hash(&3, &build_hasher));\n\n assert_eq!(u64::get_hash(&&4, &build_hasher), u64::get_hash(&4, &build_hasher));\n\n assert_eq!(u128::get_hash(&&5, &build_hasher), u128::get_hash(&5, &build_hasher));\n\n assert_eq!(\n\n str::get_hash(&\"test\", &build_hasher),\n\n str::get_hash(\"test\", &build_hasher)\n\n );\n\n assert_eq!(\n\n str::get_hash(&\"test\", &build_hasher),\n\n String::get_hash(&\"test\".to_string(), &build_hasher)\n\n );\n\n #[cfg(feature = \"specialize\")]\n\n assert_eq!(\n\n str::get_hash(&\"test\", &build_hasher),\n\n <[u8]>::get_hash(\"test\".as_bytes(), &build_hasher)\n\n );\n\n\n\n let build_hasher = RandomState::with_seeds(10, 20, 30, 40);\n\n assert_eq!(u8::get_hash(&&&1, &build_hasher), u8::get_hash(&1, &build_hasher));\n", "file_path": "src/specialize.rs", "rank": 71, "score": 32773.01557812057 }, { "content": " #[cfg(feature = \"specialize\")]\n\n pub fn test_specialized_invoked() {\n\n let build_hasher = RandomState::with_seeds(1, 2, 3, 4);\n\n let shortened = u64::get_hash(&0, &build_hasher);\n\n let mut hasher = AHasher::new_with_keys(1, 2);\n\n 0_u64.hash(&mut hasher);\n\n assert_ne!(hasher.finish(), shortened);\n\n }\n\n\n\n /// Tests that some non-trivial transformation takes place.\n\n #[test]\n\n pub fn test_input_processed() {\n\n let build_hasher = RandomState::with_seeds(2, 2, 2, 2);\n\n assert_ne!(0, u64::get_hash(&0, &build_hasher));\n\n assert_ne!(1, u64::get_hash(&0, &build_hasher));\n\n assert_ne!(2, u64::get_hash(&0, &build_hasher));\n\n assert_ne!(3, u64::get_hash(&0, &build_hasher));\n\n assert_ne!(4, u64::get_hash(&0, &build_hasher));\n\n assert_ne!(5, u64::get_hash(&0, &build_hasher));\n\n\n", "file_path": "src/specialize.rs", "rank": 72, "score": 32772.83360542783 }, { "content": " assert_eq!(u16::get_hash(&&&2, &build_hasher), u16::get_hash(&2, &build_hasher));\n\n assert_eq!(u32::get_hash(&&&3, &build_hasher), u32::get_hash(&3, &build_hasher));\n\n assert_eq!(u64::get_hash(&&&4, &build_hasher), u64::get_hash(&4, &build_hasher));\n\n assert_eq!(u128::get_hash(&&&5, &build_hasher), u128::get_hash(&5, &build_hasher));\n\n assert_eq!(\n\n str::get_hash(&&\"test\", &build_hasher),\n\n str::get_hash(\"test\", &build_hasher)\n\n );\n\n assert_eq!(\n\n str::get_hash(&&\"test\", &build_hasher),\n\n String::get_hash(&\"test\".to_string(), &build_hasher)\n\n );\n\n #[cfg(feature = \"specialize\")]\n\n assert_eq!(\n\n str::get_hash(&&\"test\", &build_hasher),\n\n <[u8]>::get_hash(&\"test\".to_string().into_bytes(), &build_hasher)\n\n );\n\n }\n\n}\n", "file_path": "src/specialize.rs", "rank": 73, "score": 32772.7754807801 }, { "content": "use core::hash::BuildHasher;\n\nuse core::hash::Hash;\n\nuse core::hash::Hasher;\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nextern crate alloc;\n\n#[cfg(feature = \"std\")]\n\nextern crate std as alloc;\n\n\n\n#[cfg(feature = \"specialize\")]\n\nuse crate::BuildHasherExt;\n\n#[cfg(feature = \"specialize\")]\n\nuse alloc::string::String;\n\n#[cfg(feature = \"specialize\")]\n\nuse alloc::vec::Vec;\n\n\n\n/// Provides a way to get an optimized hasher for a given data type.\n\n/// Rather than using a Hasher generically which can hash any value, this provides a way to get a specialized hash\n\n/// for a specific type. So this may be faster for primitive types.\n\n/// # Example\n", "file_path": "src/specialize.rs", "rank": 74, "score": 32769.07392585806 }, { "content": " assert_ne!(0, u64::get_hash(&1, &build_hasher));\n\n assert_ne!(1, u64::get_hash(&1, &build_hasher));\n\n assert_ne!(2, u64::get_hash(&1, &build_hasher));\n\n assert_ne!(3, u64::get_hash(&1, &build_hasher));\n\n assert_ne!(4, u64::get_hash(&1, &build_hasher));\n\n assert_ne!(5, u64::get_hash(&1, &build_hasher));\n\n\n\n let xored = u64::get_hash(&0, &build_hasher) ^ u64::get_hash(&1, &build_hasher);\n\n assert_ne!(0, xored);\n\n assert_ne!(1, xored);\n\n assert_ne!(2, xored);\n\n assert_ne!(3, xored);\n\n assert_ne!(4, xored);\n\n assert_ne!(5, xored);\n\n }\n\n\n\n #[test]\n\n pub fn test_ref_independent() {\n\n let build_hasher = RandomState::with_seeds(1, 2, 3, 4);\n\n assert_eq!(u8::get_hash(&&1, &build_hasher), u8::get_hash(&1, &build_hasher));\n", "file_path": "src/specialize.rs", "rank": 75, "score": 32766.28337357091 }, { "content": "use ahash::*;\n\nuse core::slice;\n\nuse std::hash::{BuildHasher, Hasher};\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn ahash64(buf: *const (), len: usize, seed: u64) -> u64 {\n\n let buf: &[u8] = unsafe { slice::from_raw_parts(buf as *const u8, len) };\n\n let mut hasher = RandomState::with_seeds(\n\n 0x243f_6a88_85a3_08d3_u64.wrapping_add(seed),\n\n 0x1319_8a2e_0370_7344_u64 ^ seed,\n\n 0xa409_3822_299f_31d0,\n\n 0x082e_fa98_ec4e_6c89,\n\n )\n\n .build_hasher();\n\n hasher.write(buf);\n\n hasher.finish()\n\n}\n", "file_path": "smhasher/ahash-cbindings/src/lib.rs", "rank": 76, "score": 28381.364733510116 }, { "content": "/// start with the same data.\n\n///\n\n#[derive(Debug, Clone)]\n\npub struct AHasher {\n\n buffer: u64,\n\n pad: u64,\n\n extra_keys: [u64; 2],\n\n}\n\n\n\nimpl AHasher {\n\n /// Creates a new hasher keyed to the provided key.\n\n #[inline]\n\n #[allow(dead_code)] // Is not called if non-fallback hash is used.\n\n pub fn new_with_keys(key1: u128, key2: u128) -> AHasher {\n\n let pi: [u128; 2] = PI.convert();\n\n let key1: [u64; 2] = (key1 ^ pi[0]).convert();\n\n let key2: [u64; 2] = (key2 ^ pi[1]).convert();\n\n AHasher {\n\n buffer: key1[0],\n\n pad: key1[1],\n", "file_path": "src/fallback_hash.rs", "rank": 77, "score": 46.62448384823496 }, { "content": " #[inline]\n\n fn finish(&self) -> u64 {\n\n let rot = (self.pad & 64) as u32;\n\n self.buffer.rotate_left(rot)\n\n }\n\n\n\n #[inline]\n\n fn write(&mut self, _bytes: &[u8]) {\n\n unreachable!(\"This should never be called\")\n\n }\n\n\n\n #[inline]\n\n fn write_u8(&mut self, i: u8) {\n\n self.write_u64(i as u64);\n\n }\n\n\n\n #[inline]\n\n fn write_u16(&mut self, i: u16) {\n\n self.write_u64(i as u64);\n\n }\n", "file_path": "src/fallback_hash.rs", "rank": 82, "score": 37.66638012214995 }, { "content": " extra_keys: key2,\n\n }\n\n }\n\n\n\n #[allow(unused)] // False positive\n\n pub(crate) fn test_with_keys(key1: u128, key2: u128) -> Self {\n\n let key1: [u64; 2] = key1.convert();\n\n let key2: [u64; 2] = key2.convert();\n\n Self {\n\n buffer: key1[0],\n\n pad: key1[1],\n\n extra_keys: key2,\n\n }\n\n }\n\n\n\n #[inline]\n\n #[allow(dead_code)] // Is not called if non-fallback hash is used.\n\n pub(crate) fn from_random_state(rand_state: &RandomState) -> AHasher {\n\n AHasher {\n\n buffer: rand_state.k0,\n", "file_path": "src/fallback_hash.rs", "rank": 83, "score": 37.2256652315938 }, { "content": "\n\n #[inline]\n\n fn write_u32(&mut self, i: u32) {\n\n self.write_u64(i as u64);\n\n }\n\n\n\n #[inline]\n\n fn write_u64(&mut self, i: u64) {\n\n self.buffer = folded_multiply(i ^ self.buffer, MULTIPLE);\n\n }\n\n\n\n #[inline]\n\n fn write_u128(&mut self, _i: u128) {\n\n unreachable!(\"This should never be called\")\n\n }\n\n\n\n #[inline]\n\n fn write_usize(&mut self, _i: usize) {\n\n unimplemented!()\n\n }\n", "file_path": "src/fallback_hash.rs", "rank": 84, "score": 36.10239955288128 }, { "content": " fn write(&mut self, input: &[u8]) {\n\n let mut data = input;\n\n let length = data.len() as u64;\n\n //Needs to be an add rather than an xor because otherwise it could be canceled with carefully formed input.\n\n self.buffer = self.buffer.wrapping_add(length).wrapping_mul(MULTIPLE);\n\n //A 'binary search' on sizes reduces the number of comparisons.\n\n if data.len() > 8 {\n\n if data.len() > 16 {\n\n let tail = data.read_last_u128();\n\n self.large_update(tail);\n\n while data.len() > 16 {\n\n let (block, rest) = data.read_u128();\n\n self.large_update(block);\n\n data = rest;\n\n }\n\n } else {\n\n self.large_update([data.read_u64().0, data.read_last_u64()].convert());\n\n }\n\n } else {\n\n let value = read_small(data);\n", "file_path": "src/fallback_hash.rs", "rank": 86, "score": 35.28658879841878 }, { "content": " self.large_update(value.convert());\n\n }\n\n }\n\n\n\n #[inline]\n\n fn finish(&self) -> u64 {\n\n let rot = (self.buffer & 63) as u32;\n\n folded_multiply(self.buffer, self.pad).rotate_left(rot)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"specialize\")]\n\npub(crate) struct AHasherU64 {\n\n pub(crate) buffer: u64,\n\n pub(crate) pad: u64,\n\n}\n\n\n\n/// A specialized hasher for only primitives under 64 bits.\n\n#[cfg(feature = \"specialize\")]\n\nimpl Hasher for AHasherU64 {\n", "file_path": "src/fallback_hash.rs", "rank": 87, "score": 34.14528203814751 }, { "content": "\n\n #[inline]\n\n fn write(&mut self, bytes: &[u8]) {\n\n if bytes.len() > 8 {\n\n self.0.write(bytes)\n\n } else {\n\n let value = read_small(bytes);\n\n self.0.buffer = folded_multiply(value[0] ^ self.0.buffer,\n\n value[1] ^ self.0.extra_keys[1]);\n\n self.0.pad = self.0.pad.wrapping_add(bytes.len() as u64);\n\n }\n\n }\n\n\n\n #[inline]\n\n fn write_u8(&mut self, _i: u8) {}\n\n\n\n #[inline]\n\n fn write_u16(&mut self, _i: u16) {}\n\n\n\n #[inline]\n", "file_path": "src/fallback_hash.rs", "rank": 88, "score": 33.47029821836793 }, { "content": " /// However it takes in 128 bits of data instead of 64. Both halves must be masked.\n\n ///\n\n /// This makes it impossible for an attacker to place a single bit difference between\n\n /// two blocks so as to cancel each other.\n\n ///\n\n /// However this is not sufficient. to prevent (a,b) from hashing the same as (b,a) the buffer itself must\n\n /// be updated between calls in a way that does not commute. To achieve this XOR and Rotate are used.\n\n /// Add followed by xor is not the same as xor followed by add, and rotate ensures that the same out bits\n\n /// can't be changed by the same set of input bits. To cancel this sequence with subsequent input would require\n\n /// knowing the keys.\n\n #[inline(always)]\n\n fn large_update(&mut self, new_data: u128) {\n\n let block: [u64; 2] = new_data.convert();\n\n let combined = folded_multiply(block[0] ^ self.extra_keys[0], block[1] ^ self.extra_keys[1]);\n\n self.buffer = (self.buffer.wrapping_add(self.pad) ^ combined).rotate_left(ROT);\n\n }\n\n\n\n #[inline]\n\n #[cfg(feature = \"specialize\")]\n\n fn short_finish(&self) -> u64 {\n", "file_path": "src/fallback_hash.rs", "rank": 91, "score": 33.116098213985204 }, { "content": " self.buffer.wrapping_add(self.pad)\n\n }\n\n}\n\n\n\n/// Provides [Hasher] methods to hash all of the primitive types.\n\n///\n\n/// [Hasher]: core::hash::Hasher\n\nimpl Hasher for AHasher {\n\n #[inline]\n\n fn write_u8(&mut self, i: u8) {\n\n self.update(i as u64);\n\n }\n\n\n\n #[inline]\n\n fn write_u16(&mut self, i: u16) {\n\n self.update(i as u64);\n\n }\n\n\n\n #[inline]\n\n fn write_u32(&mut self, i: u32) {\n", "file_path": "src/fallback_hash.rs", "rank": 94, "score": 31.951682812070885 }, { "content": " let value: u64 = 1 << 32;\n\n hasher.update(value);\n\n let result = hasher.buffer;\n\n let mut hasher = AHasher::new_with_keys(0, 0);\n\n let value2: u64 = 1;\n\n hasher.update(value2);\n\n let result2 = hasher.buffer;\n\n let result: [u8; 8] = result.convert();\n\n let result2: [u8; 8] = result2.convert();\n\n assert_ne!(hex::encode(result), hex::encode(result2));\n\n }\n\n\n\n #[test]\n\n fn test_conversion() {\n\n let input: &[u8] = \"dddddddd\".as_bytes();\n\n let bytes: u64 = as_array!(input, 8).convert();\n\n assert_eq!(bytes, 0x6464646464646464);\n\n }\n\n}\n", "file_path": "src/fallback_hash.rs", "rank": 95, "score": 30.96316069766607 }, { "content": " fn read_last_u64(&self) -> u64;\n\n fn read_last_u128(&self) -> u128;\n\n fn read_last_u128x2(&self) -> [u128; 2];\n\n fn read_last_u128x4(&self) -> [u128; 4];\n\n}\n\n\n\nimpl ReadFromSlice for [u8] {\n\n #[inline(always)]\n\n fn read_u16(&self) -> (u16, &[u8]) {\n\n let (value, rest) = self.split_at(2);\n\n (as_array!(value, 2).convert(), rest)\n\n }\n\n\n\n #[inline(always)]\n\n fn read_u32(&self) -> (u32, &[u8]) {\n\n let (value, rest) = self.split_at(4);\n\n (as_array!(value, 4).convert(), rest)\n\n }\n\n\n\n #[inline(always)]\n", "file_path": "src/convert.rs", "rank": 97, "score": 29.829926282159487 }, { "content": " #[cfg(feature = \"specialize\")]\n\n default fn hash_as_str<T: Hash + ?Sized>(&self, value: &T) -> u64 {\n\n let mut hasher = self.build_hasher();\n\n value.hash(&mut hasher);\n\n hasher.finish()\n\n }\n\n #[inline]\n\n #[cfg(not(feature = \"specialize\"))]\n\n fn hash_as_str<T: Hash + ?Sized>(&self, value: &T) -> u64 {\n\n let mut hasher = self.build_hasher();\n\n value.hash(&mut hasher);\n\n hasher.finish()\n\n }\n\n}\n\n\n\n// #[inline(never)]\n\n// #[doc(hidden)]\n\n// pub fn hash_test(input: &[u8]) -> u64 {\n\n// let a = RandomState::with_seeds(11, 22, 33, 44);\n\n// <[u8]>::get_hash(input, &a)\n", "file_path": "src/lib.rs", "rank": 99, "score": 29.533995206865068 } ]
Rust
packages/sycamore/src/template.rs
parker-codes/sycamore
27d90ecb08a020ea9da7e1b3233d6331ac2050ef
use std::cell::RefCell; use std::fmt; use std::rc::Rc; use crate::generic_node::GenericNode; #[derive(Clone)] pub(crate) enum TemplateType<G: GenericNode> { Node(G), Lazy(Rc<RefCell<dyn FnMut() -> Template<G>>>), Fragment(Vec<Template<G>>), } #[derive(Clone)] pub struct Template<G: GenericNode> { pub(crate) inner: TemplateType<G>, } impl<G: GenericNode> Template<G> { pub fn new_node(node: G) -> Self { Self { inner: TemplateType::Node(node), } } pub fn new_lazy(f: impl FnMut() -> Template<G> + 'static) -> Self { Self { inner: TemplateType::Lazy(Rc::new(RefCell::new(f))), } } pub fn new_fragment(fragment: Vec<Template<G>>) -> Self { Self { inner: TemplateType::Fragment(fragment), } } pub fn empty() -> Self { Self::new_node(G::marker()) } pub fn as_node(&self) -> Option<&G> { if let TemplateType::Node(v) = &self.inner { Some(v) } else { None } } pub fn as_fragment(&self) -> Option<&Vec<Template<G>>> { if let TemplateType::Fragment(v) = &self.inner { Some(v) } else { None } } #[allow(clippy::type_complexity)] pub fn as_lazy(&self) -> Option<&Rc<RefCell<dyn FnMut() -> Template<G>>>> { if let TemplateType::Lazy(v) = &self.inner { Some(v) } else { None } } pub fn is_node(&self) -> bool { matches!( self, Template { inner: TemplateType::Node(_) } ) } pub fn is_fragment(&self) -> bool { matches!( self, Template { inner: TemplateType::Fragment(_) } ) } pub fn is_lazy(&self) -> bool { matches!( self, Template { inner: TemplateType::Lazy(_) } ) } pub fn append_template(&mut self, template: Template<G>) { match &mut self.inner { TemplateType::Node(node) => { self.inner = TemplateType::Fragment(vec![Template::new_node(node.clone()), template]); } TemplateType::Lazy(lazy) => { self.inner = TemplateType::Fragment(vec![ Template { inner: TemplateType::Lazy(Rc::clone(&lazy)), }, template, ]); } TemplateType::Fragment(fragment) => { fragment.push(template); } } } pub fn flatten(self) -> Vec<G> { match self.inner { TemplateType::Node(node) => vec![node], TemplateType::Lazy(lazy) => lazy.borrow_mut()().flatten(), TemplateType::Fragment(fragment) => fragment .into_iter() .map(|x| x.flatten()) .flatten() .collect(), } } } impl<G: GenericNode> fmt::Debug for Template<G> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.inner { TemplateType::Node(node) => node.fmt(f), TemplateType::Lazy(lazy) => lazy.as_ref().borrow_mut()().fmt(f), TemplateType::Fragment(fragment) => fragment.fmt(f), } } }
use std::cell::RefCell; use std::fmt; use std::rc::Rc; use crate::generic_node::GenericNode; #[derive(Clone)] pub(crate) enum TemplateType<G: GenericNode> { Node(G), Lazy(Rc<RefCell<dyn FnMut() -> Template<G>>>), Fragment(Vec<Template<G>>), } #[derive(Clone)] pub struct Template<G: GenericNode> { pub(crate) inner: TemplateType<G>, } impl<G: GenericNode> Template<G> { pub fn new_node(node: G) -> Self { Self { inner: TemplateType::Node(node), } } pub fn new_lazy(f: impl FnMut() -> Template<G> + 'static) -> Self { Self { inner: TemplateType::Lazy(Rc::new(RefCell::new(f))), } } pub fn new_fragment(fragment: Vec<Template<G>>) -> Self { Self { inner: TemplateType::Fragment(fragment), } } pub fn empty() -> Self { Self::new_node(G::marker()) } pub fn as_node(&self) -> Option<&G> { if let TemplateType::Node(v) = &self.inner { Some(v) } else { None } } pub fn as_fragment(&self) -> Option<&Vec<Template<G>>> { if let TemplateType::Fragment(v) = &self.inner { Some(v) } else { None } } #[allow(clippy::type_complexity)] pub fn as_lazy(&self) -> Option<&Rc<RefCell<dyn FnMut() -> Template<G>>>> { if let TemplateType::Lazy(v) = &self.inner { Some(v) } else { None } } pub fn is_node(&self) -> bool { matches!( self, Template { inner: TemplateType::Node(_) } ) } pub fn is_fragment(&self) -> bool { matches!( self, Template { inner: TemplateType::Fragment(_) } ) } pub fn is_lazy(&self) -> bool { matches!( self, Template { inner: TemplateType::Lazy(_) } ) } pub fn append_template(&mut self, template: Template<G>) { match &mut self.inner { TemplateType::Node(node) => { self.inner = TemplateType::Fragment(vec![Template::new_node(node.clone()), template]); } TemplateType::Lazy(lazy) => { self.inner = TemplateType::Fragment(vec![ Template { inner: TemplateType::Lazy(Rc::clone(&lazy)), }, template, ]); } TemplateType::Fragment(fragment) => { fragment.push(template); } } } pub fn flatten(self) -> Vec<G> { match self.inner { TemplateType::Node(node) => vec![nod
} impl<G: GenericNode> fmt::Debug for Template<G> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.inner { TemplateType::Node(node) => node.fmt(f), TemplateType::Lazy(lazy) => lazy.as_ref().borrow_mut()().fmt(f), TemplateType::Fragment(fragment) => fragment.fmt(f), } } }
e], TemplateType::Lazy(lazy) => lazy.borrow_mut()().flatten(), TemplateType::Fragment(fragment) => fragment .into_iter() .map(|x| x.flatten()) .flatten() .collect(), } }
function_block-function_prefixed
[ { "content": "#[component(BrowserRouter<G>)]\n\npub fn browser_router<R: Route>(render: impl Fn(R) -> Template<G> + 'static) -> Template<G> {\n\n PATHNAME.with(|pathname| {\n\n assert!(pathname.borrow().is_none());\n\n // Get initial url from window.location.\n\n *pathname.borrow_mut() = Some(Signal::new(\n\n web_sys::window().unwrap().location().pathname().unwrap(),\n\n ));\n\n });\n\n let pathname = PATHNAME.with(|p| p.borrow().clone().unwrap());\n\n\n\n // Listen to onpopstate.\n\n let closure = Closure::wrap(Box::new(cloned!((pathname) => move || {\n\n pathname.set(web_sys::window().unwrap().location().pathname().unwrap());\n\n })) as Box<dyn FnMut()>);\n\n web_sys::window()\n\n .unwrap()\n\n .add_event_listener_with_callback(\"popstate\", closure.as_ref().unchecked_ref())\n\n .unwrap();\n\n closure.forget();\n\n\n", "file_path": "packages/sycamore-router/src/router.rs", "rank": 0, "score": 281480.6639401089 }, { "content": "#[component(Indexed<G>)]\n\npub fn indexed<T: 'static, F: 'static>(props: IndexedProps<T, F, G>) -> Template<G>\n\nwhere\n\n T: Clone + PartialEq,\n\n F: Fn(T) -> Template<G>,\n\n{\n\n let IndexedProps { iterable, template } = props;\n\n let template = Rc::new(template);\n\n\n\n let mut mapped = map_indexed(iterable, {\n\n let template = Rc::clone(&template);\n\n move |x| template(x.clone())\n\n });\n\n Template::new_lazy(move || Template::new_fragment(mapped()))\n\n}\n", "file_path": "packages/sycamore/src/flow.rs", "rank": 1, "score": 233498.37469322764 }, { "content": "/// Adds a callback function to the current reactive scope's cleanup.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore::prelude::*;\n\n///\n\n/// let cleanup_called = Signal::new(false);\n\n///\n\n/// let scope = create_root(cloned!((cleanup_called) => move || {\n\n/// on_cleanup(move || {\n\n/// cleanup_called.set(true);\n\n/// })\n\n/// }));\n\n///\n\n/// assert_eq!(*cleanup_called.get(), false);\n\n///\n\n/// drop(scope);\n\n/// assert_eq!(*cleanup_called.get(), true);\n\n/// ```\n\npub fn on_cleanup(f: impl FnOnce() + 'static) {\n\n SCOPE.with(|scope| {\n\n if scope.borrow().is_some() {\n\n scope\n\n .borrow_mut()\n\n .as_mut()\n\n .unwrap()\n\n .add_cleanup(Box::new(f));\n\n } else {\n\n #[cfg(all(target_arch = \"wasm32\", debug_assertions))]\n\n web_sys::console::warn_1(\n\n &\"Cleanup callbacks created outside of a reactive root will never run.\".into(),\n\n );\n\n #[cfg(all(not(target_arch = \"wasm32\"), debug_assertions))]\n\n eprintln!(\n\n \"WARNING: Cleanup callbacks created outside of a reactive root will never run.\"\n\n );\n\n }\n\n });\n\n}\n", "file_path": "packages/sycamore/src/rx/effect.rs", "rank": 2, "score": 224850.07828511792 }, { "content": "#[component(Header<G>)]\n\npub fn header() -> Template<G> {\n\n template! {\n\n header {\n\n Nav()\n\n }\n\n }\n\n}\n", "file_path": "docs/src/header.rs", "rank": 3, "score": 208180.71602250997 }, { "content": "#[component(Index<G>)]\n\npub fn index() -> Template<G> {\n\n template! {\n\n div(class=\"flex flex-col items-center w-full\") {\n\n h1(class=\"text-5xl font-bold mt-20 mb-5\") {\n\n \"Sycamore\"\n\n }\n\n\n\n p(class=\"mb-10\") {\n\n \"Pure Rust + WASM web-apps\"\n\n }\n\n a(\n\n href=\"/getting_started/installation\",\n\n class=\"py-2 px-3 bg-white hover:bg-yellow-500 border-2 border-yellow-500 \\\n\n rounded font-medium transition\",\n\n ) {\n\n \"Read the Book\"\n\n }\n\n }\n\n }\n\n}\n", "file_path": "docs/src/index.rs", "rank": 4, "score": 208180.71602250997 }, { "content": "#[component(Sidebar<G>)]\n\npub fn sidebar() -> Template<G> {\n\n let sections = PAGES\n\n .iter()\n\n .map(|section| {\n\n let pages = section\n\n .1\n\n .iter()\n\n .map(|page| {\n\n template! {\n\n li {\n\n a(\n\n href=page.1,\n\n class=\"pl-4 hover:bg-gray-300 w-full inline-block rounded transition\",\n\n ) {\n\n (page.0)\n\n }\n\n }\n\n }\n\n })\n\n .collect();\n", "file_path": "docs/src/sidebar.rs", "rank": 5, "score": 208180.71602250997 }, { "content": "#[component(Copyright<G>)]\n\npub fn copyright() -> Template<G> {\n\n template! {\n\n footer(class=\"info\") {\n\n p { \"Double click to edit a todo\" }\n\n p {\n\n \"Created by \"\n\n a(href=\"https://github.com/lukechu10\", target=\"_blank\") { \"lukechu10\" }\n\n }\n\n p {\n\n \"Part of \"\n\n a(href=\"http://todomvc.com\") { \"TodoMVC\" }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/todomvc/src/copyright.rs", "rank": 6, "score": 204463.42255163926 }, { "content": "#[component(Component<G>)]\n\npub fn component() -> Template<G> {\n\n template! {\n\n div\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/template/component-pass.rs", "rank": 7, "score": 204293.50618987647 }, { "content": "/// Reconciles an array of nodes.\n\n///\n\n/// # Params\n\n/// * `parent` - The parent node under which all other nodes are (direct) children.\n\n/// * `a` - The current/existing nodes that are to be diffed.\n\n/// * `b` - The new nodes that are to be inserted. After the reconciliation, all the nodes in `b`\n\n/// should be inserted under `parent`.\n\n///\n\n/// # Panics\n\n/// Panics if `a.is_empty()`. Append nodes instead.\n\npub fn reconcile_fragments<G: GenericNode>(parent: &G, mut a: Vec<G>, b: Vec<G>) {\n\n debug_assert!(!a.is_empty(), \"a cannot be empty\");\n\n\n\n // Sanity check: make sure all nodes in a are children of parent.\n\n #[cfg(debug_assertions)]\n\n {\n\n for (i, node) in a.iter().enumerate() {\n\n if node.parent_node().as_ref() != Some(&parent) {\n\n panic!(\n\n \"node {} in existing nodes Vec is not a child of parent. node = {:#?}\",\n\n i, node\n\n );\n\n }\n\n }\n\n }\n\n\n\n let b_len = b.len();\n\n let mut a_end = a.len();\n\n let mut b_end = b_len;\n\n let mut a_start = 0;\n", "file_path": "packages/sycamore/src/generic_node/render.rs", "rank": 8, "score": 202591.44364418712 }, { "content": "#[component(Content<G>)]\n\npub fn content(pathname: String) -> Template<G> {\n\n let location = web_sys::window()\n\n .unwrap()\n\n .document()\n\n .unwrap()\n\n .location()\n\n .unwrap();\n\n\n\n let docs_container_ref = NodeRef::<G>::new();\n\n\n\n let markdown = Signal::new(String::new());\n\n let html = create_memo(cloned!((markdown) => move || {\n\n let markdown = markdown.get();\n\n\n\n let options = Options::all();\n\n let parser = Parser::new_ext(markdown.as_ref(), options);\n\n\n\n let mut output = String::new();\n\n html::push_html(&mut output, parser);\n\n\n", "file_path": "docs/src/content.rs", "rank": 9, "score": 193047.02264763412 }, { "content": "/// Render a [`Template`] under a `parent` node by reusing existing nodes (client side\n\n/// hydration). Alias for [`hydrate_to`] with `parent` being the `<body>` tag.\n\n///\n\n/// For rendering without hydration, use [`render`] instead.\n\n///\n\n/// **TODO**: This method currently deletes existing nodes from DOM and reinserts new\n\n/// created nodes. This will be fixed in a later release.\n\n///\n\n/// _This API requires the following crate features to be activated: `dom`_\n\npub fn hydrate(template: impl FnOnce() -> Template<DomNode>) {\n\n let window = web_sys::window().unwrap();\n\n let document = window.document().unwrap();\n\n\n\n hydrate_to(template, &document.body().unwrap());\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/dom_node.rs", "rank": 10, "score": 192986.99025512626 }, { "content": "/// Render a [`Template`] into the DOM.\n\n/// Alias for [`render_to`] with `parent` being the `<body>` tag.\n\n///\n\n/// _This API requires the following crate features to be activated: `dom`_\n\npub fn render(template: impl FnOnce() -> Template<DomNode>) {\n\n let window = web_sys::window().unwrap();\n\n let document = window.document().unwrap();\n\n\n\n render_to(template, &document.body().unwrap());\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/dom_node.rs", "rank": 11, "score": 192985.72416856987 }, { "content": "#[component(Item<G>)]\n\npub fn item(props: ItemProps) -> Template<G> {\n\n let ItemProps { todo, app_state } = props;\n\n\n\n let title = cloned!((todo) => move || todo.get().title.clone());\n\n let completed = create_selector(cloned!((todo) => move || todo.get().completed));\n\n let id = todo.get().id;\n\n\n\n let editing = Signal::new(false);\n\n let input_ref = NodeRef::<G>::new();\n\n let value = Signal::new(\"\".to_string());\n\n\n\n let handle_input = cloned!((value) => move |event: Event| {\n\n let target: HtmlInputElement = event.target().unwrap().unchecked_into();\n\n value.set(target.value());\n\n });\n\n\n\n let toggle_completed = cloned!((todo) => move |_| {\n\n todo.set(Todo {\n\n completed: !todo.get().completed,\n\n ..todo.get().as_ref().clone()\n", "file_path": "examples/todomvc/src/item.rs", "rank": 12, "score": 186510.90125616663 }, { "content": "/// Render a [`Template`] into a static [`String`]. Useful\n\n/// for rendering to a string on the server side.\n\n///\n\n/// _This API requires the following crate features to be activated: `ssr`_\n\npub fn render_to_string(template: impl FnOnce() -> Template<SsrNode>) -> String {\n\n let mut ret = String::new();\n\n let _scope = create_root(|| {\n\n for node in template().flatten() {\n\n ret.push_str(&format!(\"{}\", node));\n\n }\n\n });\n\n\n\n ret\n\n}\n", "file_path": "packages/sycamore/src/generic_node/ssr_node.rs", "rank": 13, "score": 185239.63191910528 }, { "content": "#[component(Header<G>)]\n\npub fn header(app_state: AppState) -> Template<G> {\n\n let value = Signal::new(String::new());\n\n\n\n let handle_submit = cloned!((app_state, value) => move |event: Event| {\n\n let event: KeyboardEvent = event.unchecked_into();\n\n\n\n if event.key() == \"Enter\" {\n\n let mut task = value.get().as_ref().clone();\n\n task = task.trim().to_string();\n\n\n\n if !task.is_empty() {\n\n app_state.add_todo(task);\n\n value.set(\"\".to_string());\n\n }\n\n }\n\n });\n\n\n\n template! {\n\n header(class=\"header\") {\n\n h1 { \"todos\" }\n\n input(class=\"new-todo\",\n\n placeholder=\"What needs to be done?\",\n\n bind:value=value,\n\n on:keyup=handle_submit,\n\n )\n\n }\n\n }\n\n}\n", "file_path": "examples/todomvc/src/header.rs", "rank": 14, "score": 183476.55783792346 }, { "content": "#[component(Footer<G>)]\n\npub fn footer(app_state: AppState) -> Template<G> {\n\n let items_text = cloned!((app_state) => move || {\n\n match app_state.todos_left() {\n\n 1 => \"item\",\n\n _ => \"items\"\n\n }\n\n });\n\n\n\n let has_completed_todos = create_selector(cloned!((app_state) => move || {\n\n app_state.todos_left() < app_state.todos.get().len()\n\n }));\n\n\n\n let app_state2 = app_state.clone();\n\n let app_state3 = app_state.clone();\n\n\n\n template! {\n\n footer(class=\"footer\") {\n\n span(class=\"todo-count\") {\n\n strong { (app_state.todos_left()) }\n\n span { \" \" (items_text()) \" left\" }\n", "file_path": "examples/todomvc/src/footer.rs", "rank": 15, "score": 183476.55783792346 }, { "content": "#[component(List<G>)]\n\npub fn list(app_state: AppState) -> Template<G> {\n\n let todos_left = create_selector(cloned!((app_state) => move || {\n\n app_state.todos_left()\n\n }));\n\n\n\n let input_ref = NodeRef::<G>::new();\n\n\n\n // FIXME: bind to boolean attribute\n\n create_effect(cloned!((todos_left, input_ref) => move || {\n\n let checked = *todos_left.get() == 0;\n\n\n\n if let Some(input_ref) = input_ref.try_get::<DomNode>() {\n\n input_ref.unchecked_into::<HtmlInputElement>().set_checked(checked);\n\n }\n\n }));\n\n\n\n let filtered_todos = create_memo(cloned!((app_state) => move || {\n\n app_state.todos.get().iter().filter(|todo| match *app_state.filter.get() {\n\n Filter::All => true,\n\n Filter::Active => !todo.get().completed,\n", "file_path": "examples/todomvc/src/list.rs", "rank": 16, "score": 183476.55783792346 }, { "content": "/// Render a [`Template`] under a `parent` node.\n\n/// For rendering under the `<body>` tag, use [`render`] instead.\n\n///\n\n/// _This API requires the following crate features to be activated: `dom`_\n\npub fn render_to(template: impl FnOnce() -> Template<DomNode>, parent: &Node) {\n\n let scope = create_root(|| {\n\n insert(\n\n &DomNode {\n\n id: NodeId::new_with_node(parent),\n\n node: Rc::new(parent.clone()),\n\n },\n\n template(),\n\n None,\n\n None,\n\n );\n\n });\n\n\n\n thread_local! {\n\n static GLOBAL_SCOPES: std::cell::RefCell<Vec<ReactiveScope>> = std::cell::RefCell::new(Vec::new());\n\n }\n\n\n\n GLOBAL_SCOPES.with(|global_scopes| global_scopes.borrow_mut().push(scope));\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/dom_node.rs", "rank": 17, "score": 182555.12138471444 }, { "content": "/// Render a [`Template`] under a `parent` node by reusing existing nodes (client side\n\n/// hydration). For rendering under the `<body>` tag, use [`hydrate_to`] instead.\n\n///\n\n/// For rendering without hydration, use [`render`] instead.\n\n///\n\n/// **TODO**: This method currently deletes existing nodes from DOM and reinserts new\n\n/// created nodes. This will be fixed in a later release.\n\n///\n\n/// _This API requires the following crate features to be activated: `dom`_\n\npub fn hydrate_to(template: impl FnOnce() -> Template<DomNode>, parent: &Node) {\n\n for child in get_children(parent.unchecked_ref()) {\n\n child.remove();\n\n }\n\n\n\n let scope = create_root(|| {\n\n insert(\n\n &DomNode {\n\n id: NodeId::new_with_node(&parent),\n\n node: Rc::new(parent.clone()),\n\n },\n\n template(),\n\n None,\n\n None, // TODO\n\n );\n\n });\n\n\n\n thread_local! {\n\n static GLOBAL_SCOPES: std::cell::RefCell<Vec<ReactiveScope>> = std::cell::RefCell::new(Vec::new());\n\n }\n\n\n\n GLOBAL_SCOPES.with(|global_scopes| global_scopes.borrow_mut().push(scope));\n\n}\n", "file_path": "packages/sycamore/src/generic_node/dom_node.rs", "rank": 18, "score": 182554.1467290009 }, { "content": "#[component(Keyed<G>)]\n\npub fn keyed<T: 'static, F: 'static, K: 'static, Key: 'static>(\n\n props: KeyedProps<T, F, G, K, Key>,\n\n) -> Template<G>\n\nwhere\n\n F: Fn(T) -> Template<G>,\n\n K: Fn(&T) -> Key,\n\n Key: Clone + Hash + Eq,\n\n T: Clone + Eq + Hash,\n\n{\n\n let KeyedProps {\n\n iterable,\n\n template,\n\n key,\n\n } = props;\n\n let template = Rc::new(template);\n\n\n\n let mut mapped = map_keyed(\n\n iterable,\n\n {\n\n let template = Rc::clone(&template);\n", "file_path": "packages/sycamore/src/flow.rs", "rank": 19, "score": 174111.8911971813 }, { "content": "pub fn template_impl(component: HtmlRoot) -> TokenStream {\n\n component.to_token_stream()\n\n}\n", "file_path": "packages/sycamore-macro/src/template/mod.rs", "rank": 20, "score": 167706.99710430036 }, { "content": "pub fn bench(c: &mut Criterion) {\n\n c.bench_function(\"ssr_small\", |b| {\n\n b.iter(|| {\n\n #[component(App<G>)]\n\n fn app() -> Template<G> {\n\n template! {\n\n div(class=\"my-container\") {\n\n p { \"Hello World!\" }\n\n }\n\n }\n\n }\n\n\n\n let _ssr = render_to_string(|| template! { App() });\n\n })\n\n });\n\n\n\n c.bench_function(\"ssr_medium\", |b| {\n\n b.iter(|| {\n\n #[component(ListItem<G>)]\n\n fn list_item(value: i32) -> Template<G> {\n", "file_path": "packages/sycamore/benches/ssr.rs", "rank": 21, "score": 167270.20600951163 }, { "content": "pub fn bench(c: &mut Criterion) {\n\n c.bench_function(\"reactivity_signals\", |b| {\n\n b.iter(|| {\n\n let state = Signal::new(black_box(0));\n\n\n\n for _i in 0..1000 {\n\n let value = state.get();\n\n state.set(*value + 1);\n\n }\n\n });\n\n });\n\n\n\n c.bench_function(\"reactivity_effects\", |b| {\n\n b.iter(|| {\n\n let state = Signal::new(black_box(0));\n\n create_effect(cloned!((state) => move || {\n\n let _double = *state.get() * 2;\n\n }));\n\n\n\n for _i in 0..1000 {\n", "file_path": "packages/sycamore/benches/reactivity.rs", "rank": 22, "score": 167270.20600951163 }, { "content": "/// Creates an effect on signals used inside the effect closure.\n\n///\n\n/// Unlike [`create_effect`], this will allow the closure to run different code upon first\n\n/// execution, so it can return a value.\n\npub fn create_effect_initial<R: 'static>(\n\n initial: impl FnOnce() -> (Rc<RefCell<dyn FnMut()>>, R) + 'static,\n\n) -> R {\n\n type InitialFn = dyn FnOnce() -> (Rc<RefCell<dyn FnMut()>>, Box<dyn Any>);\n\n\n\n /// Internal implementation: use dynamic dispatch to reduce code bloat.\n\n fn internal(initial: Box<InitialFn>) -> Box<dyn Any> {\n\n let running: Rc<RefCell<Option<Running>>> = Rc::new(RefCell::new(None));\n\n\n\n type MutEffect = Rc<RefCell<Option<Rc<RefCell<dyn FnMut()>>>>>;\n\n let effect: MutEffect = Rc::new(RefCell::new(None));\n\n let ret: Rc<RefCell<Option<Box<dyn Any>>>> = Rc::new(RefCell::new(None));\n\n\n\n let initial = RefCell::new(Some(initial));\n\n\n\n let execute: Rc<dyn Fn()> = Rc::new({\n\n let running = Rc::downgrade(&running);\n\n let ret = Rc::downgrade(&ret);\n\n move || {\n\n CONTEXTS.with(|contexts| {\n", "file_path": "packages/sycamore/src/rx/effect.rs", "rank": 23, "score": 163821.63011418178 }, { "content": "/// Insert a [`GenericNode`] under `parent` at the specified `marker`. If `initial` is `Some(_)`,\n\n/// `initial` will be replaced with the new inserted node.\n\n///\n\n/// # Params\n\n/// * `parent` - The parent node to insert `accessor` under.\n\n/// * `accessor` - The [`Template`] to be inserted.\n\n/// * `initial` - An optional initial node that is already inserted into the DOM.\n\n/// * `marker` - An optional marker node. If `marker` is `Some(_)`, `accessor` will be inserted\n\n/// directly before `marker`. If `marker` is `None`, `accessor` will be appended at the end of\n\n/// `parent`.\n\npub fn insert<G: GenericNode>(\n\n parent: &G,\n\n accessor: Template<G>,\n\n initial: Option<Template<G>>,\n\n marker: Option<&G>,\n\n) {\n\n insert_expression(parent, accessor, initial, marker, false);\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/render.rs", "rank": 24, "score": 162638.6417193311 }, { "content": "/// Cleans the children specified by `current` from `parent`.\n\n///\n\n/// # Params\n\n/// * `parent` - The parent node from which to clean the children.\n\n/// * `current` - A [`Vec`] of [`GenericNode`]s that are to be removed.\n\n/// * `marker` - If `marker` is `None`, all the nodes from `parent` are removed regardless of\n\n/// `current`. This behavior will likely change in the future.\n\n/// * `replacement` - An optional replacement node for the removed nodes.\n\npub fn clean_children<G: GenericNode>(\n\n parent: &G,\n\n current: Vec<G>,\n\n marker: Option<&G>,\n\n replacement: Option<&G>,\n\n) {\n\n if marker == None {\n\n parent.update_inner_text(\"\");\n\n if let Some(replacement) = replacement {\n\n parent.append_child(replacement);\n\n }\n\n return;\n\n }\n\n\n\n for node in current {\n\n if node.parent_node().as_ref() == Some(&parent) {\n\n if let Some(replacement) = replacement {\n\n parent.replace_child(&node, &replacement);\n\n } else {\n\n parent.remove_child(&node);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/render.rs", "rank": 25, "score": 160359.45110341025 }, { "content": "/// Appends all the nodes in `fragment` to `parent` behind `marker`.\n\npub fn append_nodes<G: GenericNode>(parent: &G, fragment: Vec<G>, marker: Option<&G>) {\n\n for node in fragment {\n\n parent.insert_child_before(&node, marker);\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/render.rs", "rank": 26, "score": 158736.2765474924 }, { "content": "/// Normalizes a `Vec<Template<G>>` into a `Vec<G>`.\n\n///\n\n/// Returns whether the normalized `Vec<G>` is dynamic (and should be rendered in an effect).\n\n///\n\n/// # Params\n\n/// * `v` - The [`Vec`] to write the output to.\n\n/// * `fragment` - The `Vec<Template<G>>` to normalize.\n\n/// * `unwrap` - If `true`, unwraps the `fragment` without setting `dynamic` to true. In most cases,\n\n/// this should be `false`.\n\npub fn normalize_incoming_fragment<G: GenericNode>(\n\n v: &mut Vec<Template<G>>,\n\n fragment: Vec<Template<G>>,\n\n unwrap: bool,\n\n) -> bool {\n\n let mut dynamic = false;\n\n\n\n for template in fragment {\n\n match template.inner {\n\n TemplateType::Node(_) => v.push(template),\n\n TemplateType::Lazy(f) if unwrap => {\n\n let mut value = f.as_ref().borrow_mut()();\n\n while let TemplateType::Lazy(f) = value.inner {\n\n value = f.as_ref().borrow_mut()();\n\n }\n\n dynamic = normalize_incoming_fragment(\n\n v,\n\n match value.inner {\n\n TemplateType::Node(_) => vec![value],\n\n TemplateType::Fragment(fragment) => fragment,\n", "file_path": "packages/sycamore/src/generic_node/render.rs", "rank": 27, "score": 158193.98407266353 }, { "content": "/// Run the passed closure inside an untracked scope.\n\n///\n\n/// See also [`StateHandle::get_untracked()`].\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use sycamore::prelude::*;\n\n///\n\n/// let state = Signal::new(1);\n\n///\n\n/// let double = create_memo({\n\n/// let state = state.clone();\n\n/// move || untrack(|| *state.get() * 2)\n\n/// });\n\n///\n\n/// assert_eq!(*double.get(), 2);\n\n///\n\n/// state.set(2);\n\n/// // double value should still be old value because state was untracked\n\n/// assert_eq!(*double.get(), 2);\n\n/// ```\n\npub fn untrack<T>(f: impl FnOnce() -> T) -> T {\n\n let f = Rc::new(RefCell::new(Some(f)));\n\n let g = Rc::clone(&f);\n\n\n\n // Do not panic if running inside destructor.\n\n if let Ok(ret) = CONTEXTS.try_with(|contexts| {\n\n let tmp = contexts.take();\n\n\n\n let ret = f.take().unwrap()();\n\n\n\n *contexts.borrow_mut() = tmp;\n\n\n\n ret\n\n }) {\n\n ret\n\n } else {\n\n g.take().unwrap()()\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/rx/effect.rs", "rank": 28, "score": 156889.1955380764 }, { "content": "#[must_use = \"create_root returns the reactive scope of the effects created inside this scope\"]\n\npub fn create_root<'a>(callback: impl FnOnce() + 'a) -> ReactiveScope {\n\n /// Internal implementation: use dynamic dispatch to reduce code bloat.\n\n fn internal<'a>(callback: Box<dyn FnOnce() + 'a>) -> ReactiveScope {\n\n SCOPE.with(|scope| {\n\n let outer_scope = scope.replace(Some(ReactiveScope::new()));\n\n callback();\n\n\n\n scope\n\n .replace(outer_scope)\n\n .expect(\"ReactiveScope should be valid inside the reactive root\")\n\n })\n\n }\n\n\n\n internal(Box::new(callback))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::cell::RefCell;\n\n use std::rc::Rc;\n", "file_path": "packages/sycamore/src/rx.rs", "rank": 29, "score": 156886.67774312384 }, { "content": "#[component(C<G>)]\n\nfn c() -> Template<G> {\n\n template! {\n\n div\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/template/component-fail.rs", "rank": 30, "score": 155734.54454142397 }, { "content": "#[component(Nav<G>)]\n\nfn nav() -> Template<G> {\n\n template! {\n\n nav(class=\"fixed top-0 z-50 px-8 w-full \\\n\n backdrop-filter backdrop-blur-sm backdrop-saturate-150 bg-opacity-80 \\\n\n bg-gray-100 border-b border-gray-400\") {\n\n div(class=\"flex flex-row justify-between items-center h-12\") {\n\n // Brand section\n\n div(class=\"flex-initial\") {\n\n div(class=\"flex space-x-4 text-white\") {\n\n a(href=\"/#\", class=\"py-2 px-3 text-sm font-medium \\\n\n bg-gray-500 hover:bg-gray-600 transition-colors rounded\") {\n\n \"Sycamore\"\n\n }\n\n }\n\n }\n\n // Links section\n\n div(class=\"flex flex-row ml-2 space-x-4 text-white\") {\n\n a(class=\"py-2 px-3 text-sm text-gray-600 hover:text-gray-800 hover:underline transition\",\n\n href=\"/getting_started/installation\",\n\n ) {\n", "file_path": "docs/src/header.rs", "rank": 31, "score": 154450.3896326162 }, { "content": "#[component(App<G>)]\n\nfn app() -> Template<G> {\n\n template! {\n\n main {\n\n BrowserRouter(|route: Routes| {\n\n template! {\n\n div(class=\"mt-12\") {\n\n header::Header()\n\n (match &route {\n\n Routes::Index => template! {\n\n div(class=\"container mx-auto\") {\n\n index::Index()\n\n }\n\n },\n\n Routes::Docs(a, b) => template! {\n\n content::Content(format!(\"/{}/{}\", a, b))\n\n },\n\n Routes::NotFound => template! {\n\n \"404 Not Found\"\n\n },\n\n })\n\n }\n\n }\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "docs/src/main.rs", "rank": 32, "score": 154450.3896326162 }, { "content": "/// Creates an effect on signals used inside the effect closure.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore::prelude::*;\n\n///\n\n/// let state = Signal::new(0);\n\n///\n\n/// create_effect(cloned!((state) => move || {\n\n/// println!(\"State changed. New state value = {}\", state.get());\n\n/// })); // Prints \"State changed. New state value = 0\"\n\n///\n\n/// state.set(1); // Prints \"State changed. New state value = 1\"\n\n/// ```\n\npub fn create_effect<F>(mut effect: F)\n\nwhere\n\n F: FnMut() + 'static,\n\n{\n\n create_effect_initial(move || {\n\n effect();\n\n (Rc::new(RefCell::new(effect)), ())\n\n });\n\n}\n\n\n", "file_path": "packages/sycamore/src/rx/effect.rs", "rank": 33, "score": 153660.52961210947 }, { "content": "#[component(App<G>)]\n\nfn app() -> Template<G> {\n\n let progress = Tweened::new([0.0, 1.0], Duration::from_millis(250), easing::quad_out);\n\n let progress0 = progress.clone();\n\n let progress1 = progress.clone();\n\n let progress2 = progress.clone();\n\n let progress3 = progress.clone();\n\n let progress4 = progress.clone();\n\n let progress5 = progress.clone();\n\n\n\n template! {\n\n div {\n\n style {\n\n r#\"\n\n progress {\n\n display: block;\n\n width: 100%;\n\n }\n\n \"#\n\n }\n\n progress(value=progress.get()[0])\n", "file_path": "examples/tweened/src/main.rs", "rank": 34, "score": 151514.3984690354 }, { "content": "#[component(App<G>)]\n\nfn app() -> Template<G> {\n\n let name = Signal::new(String::new());\n\n let name2 = name.clone();\n\n\n\n template! {\n\n div {\n\n h1 {\n\n \"Hello \"\n\n (if *create_selector(cloned!((name) => move || !name.get().is_empty())).get() {\n\n cloned!((name) => template! {\n\n span { (name.get()) }\n\n })\n\n } else {\n\n template! { span { \"World\" } }\n\n })\n\n \"!\"\n\n }\n\n\n\n input(bind:value=name2)\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/hello/src/main.rs", "rank": 35, "score": 151514.3984690354 }, { "content": "#[component(App<G>)]\n\nfn app() -> Template<G> {\n\n let local_storage = web_sys::window()\n\n .unwrap()\n\n .local_storage()\n\n .unwrap()\n\n .expect(\"user has not enabled localStorage\");\n\n\n\n let todos = if let Ok(Some(app_state)) = local_storage.get_item(KEY) {\n\n serde_json::from_str(&app_state).unwrap_or_else(|_| Signal::new(Vec::new()))\n\n } else {\n\n Signal::new(Vec::new())\n\n };\n\n\n\n let app_state = AppState {\n\n todos,\n\n filter: Signal::new(Filter::get_filter_from_hash()),\n\n };\n\n\n\n create_effect(cloned!((local_storage, app_state) => move || {\n\n for todo in app_state.todos.get().iter() {\n", "file_path": "examples/todomvc/src/main.rs", "rank": 36, "score": 151514.3984690354 }, { "content": "#[component(App<G>)]\n\nfn app() -> Template<G> {\n\n let counter = Signal::new(0);\n\n\n\n create_effect(cloned!((counter) => move || {\n\n log::info!(\"Counter value: {}\", *counter.get());\n\n }));\n\n\n\n let increment = cloned!((counter) => move |_| counter.set(*counter.get() + 1));\n\n\n\n let reset = cloned!((counter) => move |_| counter.set(0));\n\n\n\n template! {\n\n div {\n\n \"Counter demo\"\n\n p(class=\"value\") {\n\n \"Value: \"\n\n (counter.get())\n\n }\n\n button(class=\"increment\", on:click=increment) {\n\n \"Increment\"\n\n }\n\n button(class=\"reset\", on:click=reset) {\n\n \"Reset\"\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/counter/src/main.rs", "rank": 37, "score": 151514.3984690354 }, { "content": "#[component(App<G>)]\n\nfn app() -> Template<G> {\n\n let name = Signal::new(String::new());\n\n\n\n let handle_change = move |_| unreachable!();\n\n\n\n template! {\n\n div {\n\n h1 {\n\n \"Hello \"\n\n ({if !name.get().is_empty() {\n\n cloned!((name) => template! {\n\n span { (name.get()) }\n\n })\n\n } else {\n\n template! { span { \"World\" } }\n\n }})\n\n \"!\"\n\n }\n\n\n\n input(placeholder=\"What is your name?\", on:input=handle_change)\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/ssr/src/main.rs", "rank": 38, "score": 151514.3984690354 }, { "content": "#[component(App<G>)]\n\nfn app() -> Template<G> {\n\n let items = Signal::new(vec![\n\n template! { \"Hello!\" },\n\n template! { \"I am an item in a fragment\"},\n\n ]);\n\n\n\n let add_item = cloned!((items) => move |_| {\n\n items.set(\n\n (*items.get())\n\n .clone()\n\n .into_iter()\n\n .chain(Some(template! { \"New item\" }))\n\n .collect(),\n\n );\n\n });\n\n\n\n template! {\n\n div {\n\n button(on:click=add_item) { \"Add item\" }\n\n div(class=\"items\") {\n\n (Template::new_fragment((*items.get()).clone()))\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/iteration/src/main.rs", "rank": 39, "score": 151514.3984690354 }, { "content": "#[component(App<G>)]\n\nfn app() -> Template<G> {\n\n let state = Signal::new(1);\n\n\n\n let increment = cloned!((state) => move |_| {\n\n state.set(*state.get() + 1);\n\n });\n\n\n\n template! {\n\n div {\n\n h1 {\n\n \"Component demo\"\n\n }\n\n\n\n MyComponent(state.handle())\n\n MyComponent(state.handle())\n\n\n\n button(on:click=increment) {\n\n \"Increment\"\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/components/src/main.rs", "rank": 40, "score": 151514.3984690354 }, { "content": "/// Trait for describing how something should be rendered into DOM nodes.\n\npub trait IntoTemplate<G: GenericNode> {\n\n /// Called during the initial render when creating the DOM nodes. Should return a\n\n /// `Vec` of [`GenericNode`]s.\n\n fn create(&self) -> Template<G>;\n\n}\n\n\n\nimpl<G: GenericNode> IntoTemplate<G> for Template<G> {\n\n fn create(&self) -> Template<G> {\n\n self.clone()\n\n }\n\n}\n\n\n\nimpl<T: fmt::Display + ?Sized, G: GenericNode> IntoTemplate<G> for T {\n\n fn create(&self) -> Template<G> {\n\n Template::new_node(G::text_node(&format!(\"{}\", self)))\n\n }\n\n}\n", "file_path": "packages/sycamore/src/render.rs", "rank": 41, "score": 151286.1159637145 }, { "content": "struct TweenedInner<T: Lerp + Clone + 'static> {\n\n signal: Signal<T>,\n\n current_task: Option<Task>,\n\n transition_duration: Duration,\n\n easing_fn: Rc<dyn Fn(f32) -> f32>,\n\n}\n\n\n\nimpl<T: Lerp + Clone + 'static> Tweened<T> {\n\n /// Create a new tweened state with the given value.\n\n pub fn new(\n\n initial: T,\n\n transition_duration: std::time::Duration,\n\n easing_fn: impl Fn(f32) -> f32 + 'static,\n\n ) -> Self {\n\n Self(Rc::new(RefCell::new(TweenedInner {\n\n signal: Signal::new(initial),\n\n current_task: None,\n\n transition_duration: Duration::from_std(transition_duration)\n\n .expect(\"transition_duration is greater than the maximum value\"),\n\n easing_fn: Rc::new(easing_fn),\n", "file_path": "packages/sycamore/src/rx/motion.rs", "rank": 42, "score": 148658.79976040553 }, { "content": "pub fn component_impl(\n\n attr: ComponentFunctionName,\n\n component: ComponentFunction,\n\n) -> Result<TokenStream> {\n\n let ComponentFunctionName {\n\n component_name,\n\n generics: generic_node_ty,\n\n } = attr;\n\n\n\n let component_name_str = component_name.to_string();\n\n let generic_node_ty = generic_node_ty.type_params().next().unwrap();\n\n let generic_node: TypeParam = syn::parse_quote! {\n\n #generic_node_ty: ::sycamore::generic_node::GenericNode\n\n };\n\n\n\n let ComponentFunction {\n\n block,\n\n props_type: _,\n\n arg,\n\n generics,\n", "file_path": "packages/sycamore-macro/src/component/mod.rs", "rank": 43, "score": 148345.25959924478 }, { "content": "#[component(StaticRouter<G>)]\n\npub fn static_router<R: Route>(\n\n (pathname, render): (String, impl Fn(R) -> Template<G> + 'static),\n\n) -> Template<G> {\n\n let path = pathname\n\n .split('/')\n\n .filter(|s| !s.is_empty())\n\n .collect::<Vec<_>>();\n\n\n\n let route = R::match_route(&path);\n\n render(route)\n\n}\n\n\n\nthread_local! {\n\n static PATHNAME: RefCell<Option<Signal<String>>> = RefCell::new(None);\n\n}\n\n\n\n/// A router that uses the\n\n/// [HTML5 History API](https://developer.mozilla.org/en-US/docs/Web/API/History_API) to keep the\n\n/// UI in sync with the URL.\n", "file_path": "packages/sycamore-router/src/router.rs", "rank": 44, "score": 139526.55819801404 }, { "content": "fn compile_fail<G: GenericNode>() {\n\n let _: Template<G> = template! { p.my-class#id };\n\n\n\n let _: Template<G> = template! { button(disabled) };\n\n let _: Template<G> = template! { button(on:click) };\n\n let _: Template<G> = template! { button(unknown:directive=\"123\") };\n\n\n\n let _: Template<G> = template! { button(a.b.c=\"123\") };\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/template/element-fail.rs", "rank": 45, "score": 138929.7952792781 }, { "content": "fn compile_pass<G: GenericNode>() {\n\n let _: Template<G> = template! { Component() };\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/template/component-pass.rs", "rank": 46, "score": 138929.7952792781 }, { "content": "fn compile_fail<G: GenericNode>() {\n\n let _: Template<G> = template! { UnknownComponent() };\n\n\n\n let _: Template<G> = template! { C };\n\n let _: Template<G> = template! { C(1) };\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/template/component-fail.rs", "rank": 47, "score": 138929.7952792781 }, { "content": "fn compile_pass<G: GenericNode>() {\n\n let _: Template<G> = template! { \"Raw text nodes!\" };\n\n\n\n let _: Template<G> = template! {\n\n p { \"First\" }\n\n p { \"Second\" }\n\n \"Third\"\n\n };\n\n\n\n // let spliced = 123;\n\n // let _: Template<G> = template! { (spliced) };\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/template/root-pass.rs", "rank": 48, "score": 138929.7952792781 }, { "content": "fn compile_pass<G: GenericNode>() {\n\n let _: Template<G> = template! { p };\n\n let _: Template<G> = template! { custom-element };\n\n\n\n let _: Template<G> = template! { p() };\n\n let _: Template<G> = template! { custom-element() };\n\n\n\n let _: Template<G> = template! { p(class=\"my-class\") };\n\n let _: Template<G> = template! { p(class=\"my-class\", id=\"my-id\") };\n\n\n\n let _: Template<G> = template! { button(class=\"my-btn\", on:click=|_| {}) };\n\n let _: Template<G> = template! { button(class=\"my-btn\", aria-hidden=\"true\") };\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/template/element-pass.rs", "rank": 49, "score": 138929.7952792781 }, { "content": "#[component(MyComponent<G>)]\n\nfn my_component(num: StateHandle<i32>) -> Template<G> {\n\n template! {\n\n div(class=\"my-component\") {\n\n \"My component\"\n\n p {\n\n \"Value: \"\n\n (num.get())\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/components/src/main.rs", "rank": 50, "score": 133098.39904116665 }, { "content": "#[proc_macro]\n\npub fn template(component: TokenStream) -> TokenStream {\n\n let component = parse_macro_input!(component as template::HtmlRoot);\n\n\n\n template::template_impl(component).into()\n\n}\n\n\n\n/// A macro for creating components from functions.\n", "file_path": "packages/sycamore-macro/src/lib.rs", "rank": 51, "score": 131238.91205638097 }, { "content": "#[wasm_bindgen_test]\n\nfn empty_template() {\n\n let node = template! {};\n\n\n\n render_to(|| node, &test_container());\n\n\n\n assert_eq!(\n\n document()\n\n .query_selector(\"#test-container\")\n\n .unwrap()\n\n .unwrap()\n\n .inner_html(),\n\n \"<!---->\"\n\n );\n\n}\n\n\n", "file_path": "packages/sycamore/tests/web/main.rs", "rank": 52, "score": 127316.40678475602 }, { "content": "pub fn route_impl(input: DeriveInput) -> syn::Result<TokenStream> {\n\n let mut quoted = TokenStream::new();\n\n let mut err_quoted = TokenStream::new();\n\n let mut has_error_handler = false;\n\n\n\n match &input.data {\n\n syn::Data::Enum(de) => {\n\n let ty_name = &input.ident;\n\n\n\n for variant in &de.variants {\n\n let variant_id = &variant.ident;\n\n\n\n for attr in &variant.attrs {\n\n let attr_name = match attr.path.get_ident() {\n\n Some(ident) => ident.to_string(),\n\n None => continue,\n\n };\n\n\n\n match attr_name.as_str() {\n\n \"to\" => {\n", "file_path": "packages/sycamore-router-macro/src/route.rs", "rank": 53, "score": 119777.08855315743 }, { "content": "/// Trait that is implemented by components. Should not be implemented manually. Use the\n\n/// [`component`](sycamore_macro::component) macro instead.\n\npub trait Component<G: GenericNode> {\n\n /// The name of the component (for use in debug mode).\n\n const NAME: &'static str = \"UnnamedComponent\";\n\n}\n", "file_path": "packages/sycamore/src/component.rs", "rank": 54, "score": 117896.08698127526 }, { "content": "fn insert_expression<G: GenericNode>(\n\n parent: &G,\n\n value: Template<G>,\n\n mut current: Option<Template<G>>,\n\n marker: Option<&G>,\n\n unwrap_fragment: bool,\n\n) {\n\n while let Some(Template {\n\n inner: TemplateType::Lazy(f),\n\n }) = current\n\n {\n\n current = Some(f.borrow_mut()());\n\n }\n\n\n\n match value.inner {\n\n TemplateType::Node(node) => {\n\n if let Some(current) = current {\n\n clean_children(parent, current.flatten(), marker, Some(&node));\n\n } else {\n\n parent.insert_child_before(&node, marker);\n", "file_path": "packages/sycamore/src/generic_node/render.rs", "rank": 55, "score": 109437.46275509917 }, { "content": "/// Navigates to the specified `url`. The url should have the same origin as the app.\n\n///\n\n/// This is useful for imperatively navigating to an url when using an anchor tag (`<a>`) is not\n\n/// possible/suitable (e.g. when submitting a form).\n\n///\n\n/// # Panics\n\n/// This function will `panic!()` if a [`BrowserRouter`] has not yet been created.\n\npub fn navigate(url: &str) {\n\n PATHNAME.with(|pathname| {\n\n assert!(\n\n pathname.borrow().is_some(),\n\n \"navigate can only be used with a BrowserRouter\"\n\n );\n\n\n\n let pathname = pathname.borrow().clone().unwrap();\n\n pathname.set(url.to_string());\n\n\n\n // Update History API.\n\n let history = web_sys::window().unwrap().history().unwrap();\n\n history\n\n .push_state_with_url(&JsValue::UNDEFINED, \"\", Some(pathname.get().as_str()))\n\n .unwrap();\n\n });\n\n}\n", "file_path": "packages/sycamore-router/src/router.rs", "rank": 56, "score": 107554.76361561668 }, { "content": "/// Runs a callback in a `requestAnimationFrame` loop until the `callback` returns `false`.\n\npub fn loop_raf(task: Task) {\n\n TASKS.with(|tasks| {\n\n if tasks.borrow().is_empty() {\n\n run_tasks();\n\n }\n\n\n\n tasks.borrow_mut().insert(task);\n\n });\n\n}\n", "file_path": "packages/sycamore/src/utils.rs", "rank": 57, "score": 107550.86693552093 }, { "content": "pub fn quad_out(t: f32) -> f32 {\n\n -t * (t - 2.0)\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 58, "score": 106717.04454253904 }, { "content": "pub fn circ_out(t: f32) -> f32 {\n\n f32::sqrt(1.0 - f32::powi(t - 1.0, 2).powi(2))\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 59, "score": 106717.04454253904 }, { "content": "pub fn sine_in(t: f32) -> f32 {\n\n f32::cos(1.0 - (t * PI / 2.0))\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 60, "score": 106717.04454253904 }, { "content": "pub fn cubic_in(t: f32) -> f32 {\n\n t * t * t\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 61, "score": 106717.04454253904 }, { "content": "pub fn circ_in(t: f32) -> f32 {\n\n 1.0 - f32::sqrt(1.0 - f32::powi(t, 2))\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 62, "score": 106717.04454253904 }, { "content": "pub fn quint_in(t: f32) -> f32 {\n\n t * t * t * t * t\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 63, "score": 106717.04454253904 }, { "content": "pub fn expo_in(t: f32) -> f32 {\n\n if t.abs() <= f32::EPSILON {\n\n 0.0\n\n } else {\n\n EXP_BASE.powf(10. * t - 10.0)\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 64, "score": 106717.04454253904 }, { "content": "pub fn bounce_out(t: f32) -> f32 {\n\n // TODO: Refactor? Code seems like a repetition.\n\n // Further, it is unclear why the numbers here are\n\n // picked.\n\n if t < 1.0 / BOUNCE_GRAVITY {\n\n BOUNCE_AMPLITUDE * t * t\n\n } else if t < 2.0 / BOUNCE_GRAVITY {\n\n let t = t - 1.5 / BOUNCE_GRAVITY;\n\n BOUNCE_AMPLITUDE * t * t + 0.75\n\n } else if t < 2.5 / BOUNCE_GRAVITY {\n\n let t = t - 2.25 / BOUNCE_GRAVITY;\n\n BOUNCE_AMPLITUDE * t * t + 0.9375\n\n } else {\n\n let t = t - 2.625 / BOUNCE_GRAVITY;\n\n BOUNCE_AMPLITUDE * t * t + 0.984375\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 65, "score": 106717.04454253904 }, { "content": "pub fn quad_in(t: f32) -> f32 {\n\n t * t\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 66, "score": 106717.04454253904 }, { "content": "pub fn linear(t: f32) -> f32 {\n\n t\n\n}\n\n\n\n// Quadratic\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 67, "score": 106717.04454253904 }, { "content": "pub fn cubic_out(t: f32) -> f32 {\n\n let f = t - 1.0;\n\n f * f * f + 1.0\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 68, "score": 106717.04454253904 }, { "content": "pub fn quart_out(t: f32) -> f32 {\n\n let f = t - 1.0;\n\n f * f * f * (1.0 - t) + 1.0\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 69, "score": 106717.04454253904 }, { "content": "pub fn quart_in(t: f32) -> f32 {\n\n t * t * t * t\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 70, "score": 106717.04454253904 }, { "content": "pub fn expo_out(t: f32) -> f32 {\n\n if (t - 1.0).abs() <= f32::EPSILON {\n\n 0.0\n\n } else {\n\n 1.0 - EXP_BASE.powf(-10.0 * t)\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 71, "score": 106717.04454253904 }, { "content": "pub fn quint_out(t: f32) -> f32 {\n\n let f = t - 1.0;\n\n f * f * f * f * f + 1.0\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 72, "score": 106717.04454253904 }, { "content": "pub fn sine_out(t: f32) -> f32 {\n\n f32::sin(t * PI / 2.0)\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 73, "score": 106717.04454253904 }, { "content": "pub fn bounce_in(t: f32) -> f32 {\n\n 1.0 - bounce_out(1.0 - t)\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 74, "score": 106717.04454253904 }, { "content": "/// Function that maps a `Vec` to another `Vec` via a map function. The mapped `Vec` is lazy\n\n/// computed, meaning that it's value will only be updated when requested. Modifications to the\n\n/// input `Vec` are diffed by index to prevent recomputing values that have not changed.\n\n///\n\n/// Generally, it is preferred to use [`map_keyed`] instead when a key function is available.\n\n///\n\n/// This function is the underlying utility behind [`Indexed`](crate::flow::Indexed).\n\n///\n\n/// # Params\n\n/// * `list` - The list to be mapped. The list must be a [`StateHandle`] (obtained from a\n\n/// [`Signal`]) and therefore reactive.\n\n/// * `map_fn` - A closure that maps from the input type to the output type.\n\npub fn map_indexed<T, U>(\n\n list: StateHandle<Vec<T>>,\n\n map_fn: impl Fn(&T) -> U + 'static,\n\n) -> impl FnMut() -> Vec<U>\n\nwhere\n\n T: PartialEq + Clone,\n\n U: Clone + 'static,\n\n{\n\n // Previous state used for diffing.\n\n let mut items = Rc::new(Vec::new());\n\n let mapped = Rc::new(RefCell::new(Vec::new()));\n\n let mut scopes = Vec::new();\n\n\n\n move || {\n\n let new_items = list.get(); // Subscribe to list.\n\n untrack(|| {\n\n if new_items.is_empty() {\n\n // Fast path for removing all items.\n\n drop(mem::take(&mut scopes));\n\n items = Rc::new(Vec::new());\n", "file_path": "packages/sycamore/src/rx/iter.rs", "rank": 75, "score": 105896.45197615963 }, { "content": "pub fn quart_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n 8.0 * t * t * t * t\n\n } else {\n\n let f = t - 1.0;\n\n -8.0 * f * f * f * f + 1.0\n\n }\n\n}\n\n\n\n// Quintic\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 76, "score": 104971.30465679399 }, { "content": "pub fn quint_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n 16.0 * t * t * t * t * t\n\n } else {\n\n let f = (2.0 * t) - 2.0;\n\n 0.5 * f * f * f * f * f + 1.0\n\n }\n\n}\n\n\n\n// Circular\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 77, "score": 104971.30465679399 }, { "content": "pub fn circ_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n (1.0 - f32::sqrt(1.0 - f32::powi(2.0 * t, 2))) / 2.0\n\n } else {\n\n (f32::sqrt(1.0 - f32::powi(-2.0 * t + 2.0, 2)) + 1.0) / 2.0\n\n }\n\n}\n\n\n\n// Exponential\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 78, "score": 104971.30465679399 }, { "content": "pub fn expo_inout(t: f32) -> f32 {\n\n if t.abs() <= f32::EPSILON {\n\n 0.0\n\n } else if (t - 1.0) <= f32::EPSILON {\n\n 1.0\n\n } else if t <= 0.5 {\n\n f32::powf(EXP_BASE, 20.0 * t - 10.0) / 2.0\n\n } else {\n\n 1.0 + f32::powf(EXP_BASE, -20.0 * t + 10.0) / -2.0\n\n }\n\n}\n\n\n\n// Sine\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 79, "score": 104971.30465679399 }, { "content": "pub fn sine_inout(t: f32) -> f32 {\n\n -(f32::cos(PI * t) - 1.0) / 2.0\n\n}\n\n\n\n// Bounce\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 80, "score": 104971.30465679399 }, { "content": "pub fn bounce_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n (1.0 - bounce_out(1.0 - 2.0 * t)) / 2.0\n\n } else {\n\n (1.0 + bounce_out(-1.0 + 2.0 * t)) / 2.0\n\n }\n\n}\n\n\n\n// TODO: add more easing functions\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 81, "score": 104971.30465679399 }, { "content": "pub fn cubic_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n 4.0 * t * t * t\n\n } else {\n\n let f = 2.0 * t - 2.0;\n\n 0.5 * f * f * f + 1.0\n\n }\n\n}\n\n\n\n// Quartic\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 82, "score": 104971.30465679399 }, { "content": "pub fn quad_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n 2.0 * t * t\n\n } else {\n\n -2.0 * t * t + 4.0 * t - 1.0\n\n }\n\n}\n\n\n\n// Cubic\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 83, "score": 104971.30465679399 }, { "content": "/// Function that maps a `Vec` to another `Vec` via a map function. The mapped `Vec` is lazy\n\n/// computed, meaning that it's value will only be updated when requested. Modifications to the\n\n/// input `Vec` are diffed using keys to prevent recomputing values that have not changed.\n\n///\n\n/// This function is the underlying utility behind [`Keyed`](crate::flow::Keyed).\n\n///\n\n/// # Params\n\n/// * `list` - The list to be mapped. The list must be a [`StateHandle`] (obtained from a\n\n/// [`Signal`]) and therefore reactive.\n\n/// * `map_fn` - A closure that maps from the input type to the output type.\n\n/// * `key_fn` - A closure that returns an _unique_ key to each entry.\n\n///\n\n/// _Credits: Based on TypeScript implementation in <https://github.com/solidjs/solid>_\n\npub fn map_keyed<T, K, U>(\n\n list: StateHandle<Vec<T>>,\n\n map_fn: impl Fn(&T) -> U + 'static,\n\n key_fn: impl Fn(&T) -> K + 'static,\n\n) -> impl FnMut() -> Vec<U>\n\nwhere\n\n T: Eq + Clone,\n\n K: Eq + Hash,\n\n U: Clone + 'static,\n\n{\n\n // Previous state used for diffing.\n\n let mut items = Rc::new(Vec::new());\n\n let mapped = Rc::new(RefCell::new(Vec::new()));\n\n let mut scopes: Vec<Option<Rc<ReactiveScope>>> = Vec::new();\n\n\n\n move || {\n\n let new_items = list.get(); // Subscribe to list.\n\n untrack(|| {\n\n if new_items.is_empty() {\n\n // Fast path for removing all items.\n", "file_path": "packages/sycamore/src/rx/iter.rs", "rank": 84, "score": 101740.2334430266 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SsrNodeInner {\n\n ty: Rc<SsrNodeType>,\n\n /// No parent if `Weak::upgrade` returns `None`.\n\n parent: RefCell<Weak<SsrNodeInner>>,\n\n}\n\n\n\n/// Rendering backend for Server Side Rendering, aka. SSR.\n\n///\n\n/// _This API requires the following crate features to be activated: `ssr`_\n\n#[derive(Debug, Clone)]\n\npub struct SsrNode(Rc<SsrNodeInner>);\n\n\n\nimpl PartialEq for SsrNode {\n\n fn eq(&self, other: &Self) -> bool {\n\n Rc::ptr_eq(&self.0.ty, &other.0.ty)\n\n }\n\n}\n\n\n\nimpl Eq for SsrNode {}\n\n\n", "file_path": "packages/sycamore/src/generic_node/ssr_node.rs", "rank": 85, "score": 100830.43254721329 }, { "content": "#[proc_macro_derive(Route, attributes(to, not_found))]\n\npub fn route(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n route::route_impl(input)\n\n .unwrap_or_else(|err| err.to_compile_error())\n\n .into()\n\n}\n", "file_path": "packages/sycamore-router-macro/src/lib.rs", "rank": 86, "score": 98801.2832764303 }, { "content": "/// Abstraction over a rendering backend.\n\n///\n\n/// You would probably use this trait as a trait bound when you want to accept any rendering\n\n/// backend. For example, components are often generic over [`GenericNode`] to be able to render to\n\n/// different backends.\n\n///\n\n/// Note that components are **NOT** represented by [`GenericNode`]. Instead, components are\n\n/// _disappearing_, meaning that they are simply functions that generate [`GenericNode`]s inside a\n\n/// new reactive context. This means that there is no overhead whatsoever when using components.\n\n///\n\n/// Sycamore ships with 2 rendering backends out of the box:\n\n/// * [`DomNode`] - Rendering in the browser (to real DOM nodes).\n\n/// * [`SsrNode`] - Render to a static string (often on the server side for Server Side Rendering,\n\n/// aka. SSR).\n\n///\n\n/// To implement your own rendering backend, you will need to create a new struct which implements\n\n/// [`GenericNode`].\n\npub trait GenericNode: fmt::Debug + Clone + PartialEq + Eq + Hash + 'static {\n\n /// Create a new element node.\n\n fn element(tag: &str) -> Self;\n\n\n\n /// Create a new text node.\n\n fn text_node(text: &str) -> Self;\n\n\n\n /// Create a marker (dummy) node. For [`DomNode`], this is implemented by creating an empty\n\n /// comment node. This is used, for example, in [`Keyed`](crate::flow::Keyed) and\n\n /// [`Indexed`](crate::flow::Indexed) for scenarios where you want to push a new item to the\n\n /// end of the list. If the list is empty, a dummy node is needed to store the position of\n\n /// the component.\n\n fn marker() -> Self;\n\n\n\n /// Sets an attribute on a node.\n\n fn set_attribute(&self, name: &str, value: &str);\n\n\n\n /// Sets a property on a node.\n\n fn set_property(&self, name: &str, value: &JsValue);\n\n\n", "file_path": "packages/sycamore/src/generic_node.rs", "rank": 87, "score": 95991.18569057784 }, { "content": "pub fn route(i: &str) -> IResult<&str, RoutePathAst> {\n\n map(separated_list0(tag(\"/\"), segment), |segments| {\n\n let segments = segments\n\n .into_iter()\n\n .filter(|x| !matches!(x, SegmentAst::Param(\"\")))\n\n .collect();\n\n RoutePathAst { segments }\n\n })(i)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use expect_test::{expect, Expect};\n\n\n\n use super::*;\n\n\n\n fn check(input: &str, expect: Expect) {\n\n let actual = format!(\"{:#?}\", route(input).unwrap());\n\n expect.assert_eq(&actual);\n\n }\n", "file_path": "packages/sycamore-router-macro/src/parser.rs", "rank": 88, "score": 93134.45989533982 }, { "content": "/// Creates a memoized value from some signals. Also know as \"derived stores\".\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore::prelude::*;\n\n///\n\n/// let state = Signal::new(0);\n\n///\n\n/// let double = create_memo(cloned!((state) => move || *state.get() * 2));\n\n/// assert_eq!(*double.get(), 0);\n\n///\n\n/// state.set(1);\n\n/// assert_eq!(*double.get(), 2);\n\n/// ```\n\npub fn create_memo<F, Out>(derived: F) -> StateHandle<Out>\n\nwhere\n\n F: FnMut() -> Out + 'static,\n\n Out: 'static,\n\n{\n\n create_selector_with(derived, |_, _| false)\n\n}\n\n\n", "file_path": "packages/sycamore/src/rx/effect.rs", "rank": 89, "score": 92697.15568007581 }, { "content": "/// Creates a memoized value from some signals. Also know as \"derived stores\".\n\n/// Unlike [`create_memo`], this function will not notify dependents of a change if the output is\n\n/// the same. That is why the output of the function must implement [`PartialEq`].\n\n///\n\n/// To specify a custom comparison function, use [`create_selector_with`].\n\npub fn create_selector<F, Out>(derived: F) -> StateHandle<Out>\n\nwhere\n\n F: FnMut() -> Out + 'static,\n\n Out: PartialEq + 'static,\n\n{\n\n create_selector_with(derived, PartialEq::eq)\n\n}\n\n\n", "file_path": "packages/sycamore/src/rx/effect.rs", "rank": 90, "score": 92693.5726052957 }, { "content": "#[proc_macro_attribute]\n\npub fn component(attr: TokenStream, component: TokenStream) -> TokenStream {\n\n let attr = parse_macro_input!(attr as component::ComponentFunctionName);\n\n let component = parse_macro_input!(component as component::ComponentFunction);\n\n\n\n component::component_impl(attr, component)\n\n .unwrap_or_else(|err| err.to_compile_error())\n\n .into()\n\n}\n", "file_path": "packages/sycamore-macro/src/lib.rs", "rank": 91, "score": 91828.26844274049 }, { "content": "#[test]\n\nfn ui() {\n\n let t = trybuild::TestCases::new();\n\n t.compile_fail(\"tests/template/*-fail.rs\");\n\n\n\n t.pass(\"tests/template/*-pass.rs\");\n\n}\n", "file_path": "packages/sycamore-macro/tests/template_macro.rs", "rank": 92, "score": 90879.47851329591 }, { "content": "#[wasm_bindgen_test]\n\nfn template_interpolation() {\n\n let text = template! { \"Hello Sycamore!\" };\n\n let node = template! {\n\n p {\n\n (text)\n\n }\n\n };\n\n\n\n render_to(|| node, &test_container());\n\n\n\n assert_eq!(\n\n document()\n\n .query_selector(\"p\")\n\n .unwrap()\n\n .unwrap()\n\n .text_content()\n\n .unwrap(),\n\n \"Hello Sycamore!\"\n\n );\n\n}\n\n\n", "file_path": "packages/sycamore/tests/web/main.rs", "rank": 93, "score": 90879.47851329591 }, { "content": "#[wasm_bindgen_test]\n\nfn fragment_template() {\n\n let count = Signal::new(vec![1, 2]);\n\n\n\n let node = cloned!((count) => template! {\n\n div {\n\n Keyed(KeyedProps {\n\n iterable: count.handle(),\n\n template: |item| template! {\n\n span { \"The value is: \" }\n\n strong { (item) }\n\n },\n\n key: |item| *item,\n\n })\n\n }\n\n });\n\n\n\n render_to(|| node, &test_container());\n\n\n\n let p = document().query_selector(\"div\").unwrap().unwrap();\n\n\n", "file_path": "packages/sycamore/tests/web/keyed.rs", "rank": 94, "score": 90879.47851329591 }, { "content": "#[test]\n\nfn self_closing_tag() {\n\n let node = template! {\n\n div {\n\n input\n\n input(value=\"a\")\n\n }\n\n };\n\n\n\n assert_eq!(\n\n render_to_string(|| node),\n\n \"<div><input /><input value=\\\"a\\\" /></div>\"\n\n )\n\n}\n", "file_path": "packages/sycamore/tests/ssr/main.rs", "rank": 95, "score": 90250.89515336996 }, { "content": "fn main() {}\n", "file_path": "packages/sycamore-macro/tests/template/element-pass.rs", "rank": 96, "score": 89047.68641451807 }, { "content": "fn main() {}\n", "file_path": "packages/sycamore-macro/tests/template/component-fail.rs", "rank": 97, "score": 89047.68641451807 }, { "content": "fn main() {}\n", "file_path": "packages/sycamore-macro/tests/template/component-pass.rs", "rank": 98, "score": 89047.68641451807 }, { "content": "fn main() {}\n", "file_path": "packages/sycamore-macro/tests/template/root-pass.rs", "rank": 99, "score": 89047.68641451807 } ]
Rust
libuser/src/crt0/relocation.rs
tiliarou/KFS
48e314360c0a149be280811be56643c954d3793c
#[allow(clippy::missing_docs_in_private_items)] mod module_header { global_asm!(r#" .section .rodata.mod0 .global module_header module_header: .ascii "MOD0" .int _DYNAMIC - module_header .int __bss_start__ - module_header .int __bss_end__ - module_header .int __eh_frame_hdr_start__ - module_header .int __eh_frame_hdr_end__ - module_header .int 0 // TODO: runtime-generated module object offset for rtld "#); } #[repr(C)] #[derive(Debug)] pub struct ModuleHeader { pub magic: u32, pub dynamic_off: u32, pub bss_start_off: u32, pub bss_end_off: u32, pub unwind_start_off: u32, pub unwind_end_off: u32, pub module_object_off: u32 } impl ModuleHeader { pub const MAGIC: u32 = 0x30444F4D; } extern "C" { pub static module_header: ModuleHeader; } #[repr(C)] #[derive(Debug)] struct ElfDyn { tag: isize, val: usize, } const DT_NULL: isize = 0; const DT_RELA: isize = 7; const DT_RELASZ: isize = 8; const DT_RELAENT: isize = 9; const DT_RELACOUNT: isize = 0x6ffffff9; const DT_REL: isize = 17; const DT_RELSZ: isize = 18; const DT_RELENT: isize = 19; const DT_RELCOUNT: isize = 0x6ffffffa; #[repr(C)] struct ElfRel { offset: usize, info: usize } #[repr(C)] struct ElfRela { offset: usize, info: usize, addend: isize } const R_386_RELATIVE: usize = 8; #[cfg(target_os = "sunrise")] #[no_mangle] #[allow(clippy::cast_ptr_alignment)] pub unsafe extern fn relocate_self(aslr_base: *mut u8, module_headr: *const ModuleHeader) -> u32 { let module_header_address = module_headr as *const u8; let module_headr = &(*module_headr); if module_headr.magic != ModuleHeader::MAGIC { return 1; } let mut dynamic = module_header_address.add(module_headr.dynamic_off as usize) as *const ElfDyn; let mut rela_offset = None; let mut rela_entry_size = 0; let mut rela_count = 0; let mut rel_offset = None; let mut rel_entry_size = 0; let mut rel_count = 0; while (*dynamic).tag != DT_NULL { match (*dynamic).tag { DT_RELA => { rela_offset = Some((*dynamic).val); }, DT_RELAENT => { rela_entry_size = (*dynamic).val; }, DT_REL => { rel_offset = Some((*dynamic).val); }, DT_RELENT => { rel_entry_size = (*dynamic).val; }, DT_RELACOUNT => { rela_count = (*dynamic).val; }, DT_RELCOUNT => { rel_count = (*dynamic).val; }, _ => {} } dynamic = dynamic.offset(1); } if let Some(rela_offset) = rela_offset { if rela_entry_size != core::mem::size_of::<ElfRela>() { return 2; } let rela_base = (aslr_base.add(rela_offset)) as *mut ElfRela; for i in 0..rela_count { let rela = rela_base.add(i); let reloc_type = (*rela).info & 0xff; if let R_386_RELATIVE = reloc_type { *(aslr_base.add((*rela).offset) as *mut *mut ()) = aslr_base.offset((*rela).addend) as _; } else { return 4; } } } if let Some(rel_offset) = rel_offset { if rel_entry_size != core::mem::size_of::<ElfRel>() { return 3; } let rel_base = (aslr_base.add(rel_offset)) as *mut ElfRel; for i in 0..rel_count { let rel = rel_base.add(i); let reloc_type = (*rel).info & 0xff; if let R_386_RELATIVE = reloc_type { let ptr = aslr_base.add((*rel).offset) as *mut usize; *ptr += aslr_base as usize; } else { return 4; } } } 0 }
#[allow(clippy::missing_docs_in_private_items)] mod module_header { global_asm!(r#" .section .rodata.mod0 .global module_header module_header: .ascii "MOD0" .int _DYNAMIC - module_header .int __bss_start__ - module_header .int __bss_end__ - module_header .int __eh_frame_hdr_start__ - module_header .int __eh_frame_hdr_end__ - module_header .int 0 // TODO: runtime-generated module object offset for rtld "#); } #[repr(C)] #[derive(Debug)] pub struct ModuleHeader { pub magic: u32, pub dynamic_off: u32, pub bss_start_off: u32, pub bss_end_off: u32, pub unwind_start_off: u32, pub unwind_end_off: u32, pub module_object_off: u32 } impl ModuleHeader { pub const MAGIC: u32 = 0x30444F4D; } extern "C" { pub static module_header: ModuleHeader; } #[repr(C)] #[derive(Debug)] struct ElfDyn { tag: isize, val: usize, } const DT_NULL: isize = 0; const DT_RELA: isize = 7; const DT_RELASZ: isize = 8; const DT_RELAENT: isize = 9; const DT_RELACOUNT: isize = 0x6ffffff9; const DT_REL: isize = 17; const DT_RELSZ: isize = 18; const DT_RELENT: isize = 19; const DT_RELCOUNT: isize = 0x6ffffffa; #[repr(C)] struct ElfRel { offset: usize, info: usize } #[repr(C)] struct ElfRela { offset: usize, info: usize, addend: isize } const R_386_RELATIVE: usize = 8; #[cfg(target_os = "sunrise")] #[no_mangle] #[allow(clippy::cast_ptr_alignment)]
pub unsafe extern fn relocate_self(aslr_base: *mut u8, module_headr: *const ModuleHeader) -> u32 { let module_header_address = module_headr as *const u8; let module_headr = &(*module_headr); if module_headr.magic != ModuleHeader::MAGIC { return 1; } let mut dynamic = module_header_address.add(module_headr.dynamic_off as usize) as *const ElfDyn; let mut rela_offset = None; let mut rela_entry_size = 0; let mut rela_count = 0; let mut rel_offset = None; let mut rel_entry_size = 0; let mut rel_count = 0; while (*dynamic).tag != DT_NULL { match (*dynamic).tag { DT_RELA => { rela_offset = Some((*dynamic).val); }, DT_RELAENT => { rela_entry_size = (*dynamic).val; }, DT_REL => { rel_offset = Some((*dynamic).val); }, DT_RELENT => { rel_entry_size = (*dynamic).val; }, DT_RELACOUNT => { rela_count = (*dynamic).val; }, DT_RELCOUNT => { rel_count = (*dynamic).val; }, _ => {} } dynamic = dynamic.offset(1); } if let Some(rela_offset) = rela_offset { if rela_entry_size != core::mem::size_of::<ElfRela>() { return 2; } let rela_base = (aslr_base.add(rela_offset)) as *mut ElfRela; for i in 0..rela_count { let rela = rela_base.add(i); let reloc_type = (*rela).info & 0xff; if let R_386_RELATIVE = reloc_type { *(aslr_base.add((*rela).offset) as *mut *mut ()) = aslr_base.offset((*rela).addend) as _; } else { return 4; } } } if let Some(rel_offset) = rel_offset { if rel_entry_size != core::mem::size_of::<ElfRel>() { return 3; } let rel_base = (aslr_base.add(rel_offset)) as *mut ElfRel; for i in 0..rel_count { let rel = rel_base.add(i); let reloc_type = (*rel).info & 0xff; if let R_386_RELATIVE = reloc_type { let ptr = aslr_base.add((*rel).offset) as *mut usize; *ptr += aslr_base as usize; } else { return 4; } } } 0 }
function_block-full_function
[]
Rust
crates/svm-runtime/src/runtime/macros.rs
Jorropo/svm
d4e77133a428e379e98ae19e596c897421576a63
#[macro_export] macro_rules! include_svm_runtime { ($pages_storage_gen: expr, $page_cache_ctor: expr, $PC: path, $ENV: path, $env_gen: expr) => { mod runtime { use log::{debug, error, info}; use $crate::runtime::{ContractExecError, Receipt}; svm_runtime::include_svm_vmcalls!($PC); use svm_common::{Address, State}; use svm_contract::{ env::ContractEnv, error::{ContractBuildError, TransactionBuildError}, traits::ContractStore, transaction::Transaction, wasm::Contract, }; #[inline(always)] pub fn contract_build(bytes: &[u8]) -> Result<Contract, ContractBuildError> { debug!("runtime `contract_build`"); <$ENV as ContractEnv>::build_contract(bytes) } #[inline(always)] pub fn contract_deploy_validate(contract: &Contract) -> Result<(), ContractBuildError> { Ok(()) } #[inline(always)] pub fn contract_compute_address(contract: &Contract) -> Address { debug!("runtime `contract_compute_address`"); <$ENV as ContractEnv>::compute_address(contract) } #[inline(always)] pub fn contract_store(contract: &Contract, addr: &Address) { debug!("runtime `contract_store`"); let mut env = $env_gen(); env.store_contract(contract, addr); } #[inline(always)] pub fn transaction_build(bytes: &[u8]) -> Result<Transaction, TransactionBuildError> { debug!("runtime `transaction_build`"); <$ENV as ContractEnv>::build_transaction(bytes) } pub fn contract_exec( tx: Transaction, import_object: &wasmer_runtime::ImportObject, ) -> Receipt { debug!("runtime `contract_exec`"); let receipt = match do_contract_exec(&tx, import_object) { Err(e) => Receipt { success: false, error: Some(e), tx, results: Vec::new(), new_state: None, }, Ok((state, results)) => Receipt { success: true, error: None, tx, results, new_state: Some(state), }, }; debug!("receipt: {:?}", receipt); receipt } pub fn import_object_create( addr: Address, state: State, node_data: *const std::ffi::c_void, opts: $crate::opts::Opts, ) -> wasmer_runtime::ImportObject { use svm_runtime::ctx_data_wrapper::SvmCtxDataWrapper; use wasmer_runtime::{func, ImportObject}; debug!( "runtime `import_object_create` address={:?}, state={:?}, opts={:?}", addr, state, opts ); let wrapped_pages_storage_gen = move || $pages_storage_gen(addr.clone(), state.clone(), opts.max_pages); let wrapped_data = SvmCtxDataWrapper::new(node_data); let state_gen = svm_runtime::lazy_create_svm_state_gen!( wrapped_data, wrapped_pages_storage_gen, $page_cache_ctor, $PC, opts ); let mut import_object = ImportObject::new_with_data(state_gen); let mut ns = wasmer_runtime_core::import::Namespace::new(); ns.insert("mem_to_reg_copy", func!(vmcalls::mem_to_reg_copy)); ns.insert("reg_to_mem_copy", func!(vmcalls::reg_to_mem_copy)); ns.insert("storage_read_to_reg", func!(vmcalls::storage_read_to_reg)); ns.insert("storage_read_to_mem", func!(vmcalls::storage_read_to_mem)); ns.insert( "storage_write_from_mem", func!(vmcalls::storage_write_from_mem), ); ns.insert( "storage_write_from_reg", func!(vmcalls::storage_write_from_reg), ); ns.insert("reg_replace_byte", func!(vmcalls::reg_replace_byte)); ns.insert("reg_read_be_i64", func!(vmcalls::reg_read_be_i64)); ns.insert("reg_write_be_i64", func!(vmcalls::reg_write_be_i64)); import_object.register("svm", ns); import_object } #[inline(always)] fn do_contract_exec( tx: &Transaction, import_object: &wasmer_runtime::ImportObject, ) -> Result<(State, Vec<wasmer_runtime::Value>), ContractExecError> { let mut env = $env_gen(); let contract = contract_load(tx, &mut env)?; let module = contract_compile(&contract, &tx.contract)?; let mut instance = instantiate(&contract, &tx.contract, &module, import_object)?; let args = prepare_args_and_memory(tx, &mut instance); let func = get_exported_func(&instance, &tx.func_name)?; match func.call(&args) { Err(e) => Err(ContractExecError::ExecFailed), Ok(results) => { let storage = get_instance_svm_storage_mut(&mut instance); let state = storage.commit(); Ok((state, results)) } } } fn contract_load( tx: &Transaction, env: &mut $ENV, ) -> Result<Contract, ContractExecError> { info!("runtime `contract_load`"); let store = env.get_store(); match store.load(&tx.contract) { None => Err(ContractExecError::NotFound(tx.contract.clone())), Some(contract) => Ok(contract), } } fn contract_compile( contract: &Contract, addr: &Address, ) -> Result<wasmer_runtime::Module, ContractExecError> { info!("runtime `contract_compile` (addr={:?})", addr); let compile = svm_compiler::compile_program(&contract.wasm); match compile { Err(e) => { error!("wasmer module compilation failed (addr={:?})", addr); Err(ContractExecError::CompilationFailed(addr.clone())) } Ok(module) => { info!("wasmer module compile succeeded"); Ok(module) } } } fn instantiate( contract: &Contract, addr: &Address, module: &wasmer_runtime::Module, import_object: &wasmer_runtime::ImportObject, ) -> Result<wasmer_runtime::Instance, ContractExecError> { info!("runtime `instantiate` (wasmer module instantiate)"); let instantiate = module.instantiate(import_object); match instantiate { Err(e) => Err(ContractExecError::InstantiationFailed(addr.clone())), Ok(instance) => Ok(instance), } } fn get_exported_func<'a>( instance: &'a wasmer_runtime::Instance, func_name: &str, ) -> Result<wasmer_runtime::DynFunc<'a>, ContractExecError> { let func = instance.dyn_func(func_name); match func { Err(e) => { error!("exported function: `{}` not found", func_name); Err(ContractExecError::FuncNotFound(func_name.to_string())) } Ok(func) => { info!("found exported function `{}`", func_name); Ok(func) } } } fn prepare_args_and_memory( tx: &Transaction, instance: &mut wasmer_runtime::Instance, ) -> Vec<wasmer_runtime::Value> { use svm_contract::wasm::{WasmArgValue, WasmIntType}; use wasmer_runtime::Value; debug!("runtime `prepare_args_and_memory`"); let memory = instance.context_mut().memory(0); let mut mem_offset = 0; let mut wasmer_args = Vec::with_capacity(tx.func_args.len()); for arg in tx.func_args.iter() { let wasmer_arg = match arg { WasmArgValue::I32(v) => Value::I32(*v as i32), WasmArgValue::I64(v) => Value::I64(*v as i64), WasmArgValue::Fixed(ty, buf) => { let buf_mem_start = mem_offset; let view = memory.view(); for byte in buf.into_iter() { view[mem_offset].set(*byte); mem_offset += 1; } match ty { WasmIntType::I32 => Value::I32(buf_mem_start as i32), WasmIntType::I64 => Value::I64(buf_mem_start as i64), } } WasmArgValue::Slice(..) => unimplemented!(), }; wasmer_args.push(wasmer_arg); } debug!("wasmer args={:?}", wasmer_args); wasmer_args } #[inline(always)] fn get_instance_svm_storage_mut( instance: &mut wasmer_runtime::Instance, ) -> &mut svm_storage::PageSliceCache<$PC> { let wasmer_ctx: &mut wasmer_runtime::Ctx = instance.context_mut(); $crate::wasmer_data_storage!(wasmer_ctx.data, $PC) } } }; }
#[macro_export] macro_rules! include_svm_runtime { ($pages_storage_gen: expr, $page_cache_ctor: expr, $PC: path, $ENV: path, $env_gen: expr) => { mod runtime { use log::{debug, error, info}; use $crate::runtime::{ContractExecError, Receipt}; svm_runtime::include_svm_vmcalls!($PC); use svm_common::{Address, State}; use svm_contract::{ env::ContractEnv, error::{ContractBuildError, TransactionBuildError}, traits::ContractStore, transaction::Transaction, wasm::Contract, }; #[inline(always)] pub fn contract_build(bytes: &[u8]) -> Result<Contract, ContractBuildError> { debug!("runtime `contract_build`"); <$ENV as ContractEnv>::build_contract(bytes) } #[inline(always)] pub fn contract_deploy_validate(contract: &Contract) -> Result<(), ContractBuildError> { Ok(()) } #[inline(always)] pub fn contract_compute_address(contract: &Contract) -> Address { debug!("runtime `contract_compute_address`"); <$ENV as ContractEnv>::compute_address(contract) } #[inline(always)] pub fn contract_store(contract: &Contract, addr: &Address) { debug!("runtime `contract_store`"); let mut env = $env_gen(); env.store_contract(contract, addr); } #[inline(always)] pub fn transaction_build(bytes: &[u8]) -> Result<Transaction, TransactionBuildError> { debug!("runtime `transaction_build`"); <$ENV as ContractEnv>::build_transaction(bytes) } pub fn contract_exec( tx: Transaction, import_object: &wasmer_runtime::ImportObject, ) -> Receipt { debug!("runtime `contract_exec`"); let receipt = match do_contract_exec(&tx, import_object) { Err(e) => Receipt { success: false, error: Some(e), tx, results: Vec::new(), new_state: None, }, Ok((state, results)) => Receipt { success: true, error: None, tx, results, new_state: Some(state), }, }; debug!("receipt: {:?}", receipt); receipt } pub fn import_object_create( addr: Address, state: State, node_data: *const std::ffi::c_void, opts: $crate::opts::Opts, ) -> wasmer_runtime::ImportObject { use svm_runtime::ctx_data_wrapper::SvmCtxDataWrapper; use wasmer_runtime::{func, ImportObject}; debug!( "runtime `import_object_create` address={:?}, state={:?}, opts={:?}", addr, state, opts ); let wrapped_pages_storage_gen = move || $pages_storage_gen(addr.clone(), state.clone(), opts.max_pages); let wrapped_data = SvmCtxDataWrapper::new(node_data); let state_gen = svm_runtime::lazy_create_svm_state_gen!( wrapped_data, wrapped_pages_storage_gen, $page_cache_ctor, $PC, opts ); let mut import_object = ImportObject::new_with_data(state_gen); let mut ns = wasmer_runtime_core::import::Namespace::new(); ns.insert("mem_to_reg_copy", func!(vmcalls::mem_to_reg_copy)); ns.insert("reg_to_mem_copy", func!(vmcalls::reg_to_mem_copy)); ns.insert("storage_read_to_reg", func!(vmcalls::storage_read_to_reg)); ns.insert("storage_read_to_mem", func!(vmcalls::storage_read_to_mem)); ns.insert( "storage_write_from_mem", func!(vmcalls::storage_write_from_mem), ); ns.insert( "storage_write_from_reg", func!(vmcalls::storage_write_from_reg), ); ns.insert("reg_replace_byte", func!(vmcalls::reg_replace_byte)); ns.insert("reg_read_be_i64", func!(vmcalls::reg_read_be_i64)); ns.insert("reg_write_be_i64", func!(vmcalls::reg_write_be_i64)); import_object.register("svm", ns); import_object } #[inline(always
ontext_mut(); $crate::wasmer_data_storage!(wasmer_ctx.data, $PC) } } }; }
)] fn do_contract_exec( tx: &Transaction, import_object: &wasmer_runtime::ImportObject, ) -> Result<(State, Vec<wasmer_runtime::Value>), ContractExecError> { let mut env = $env_gen(); let contract = contract_load(tx, &mut env)?; let module = contract_compile(&contract, &tx.contract)?; let mut instance = instantiate(&contract, &tx.contract, &module, import_object)?; let args = prepare_args_and_memory(tx, &mut instance); let func = get_exported_func(&instance, &tx.func_name)?; match func.call(&args) { Err(e) => Err(ContractExecError::ExecFailed), Ok(results) => { let storage = get_instance_svm_storage_mut(&mut instance); let state = storage.commit(); Ok((state, results)) } } } fn contract_load( tx: &Transaction, env: &mut $ENV, ) -> Result<Contract, ContractExecError> { info!("runtime `contract_load`"); let store = env.get_store(); match store.load(&tx.contract) { None => Err(ContractExecError::NotFound(tx.contract.clone())), Some(contract) => Ok(contract), } } fn contract_compile( contract: &Contract, addr: &Address, ) -> Result<wasmer_runtime::Module, ContractExecError> { info!("runtime `contract_compile` (addr={:?})", addr); let compile = svm_compiler::compile_program(&contract.wasm); match compile { Err(e) => { error!("wasmer module compilation failed (addr={:?})", addr); Err(ContractExecError::CompilationFailed(addr.clone())) } Ok(module) => { info!("wasmer module compile succeeded"); Ok(module) } } } fn instantiate( contract: &Contract, addr: &Address, module: &wasmer_runtime::Module, import_object: &wasmer_runtime::ImportObject, ) -> Result<wasmer_runtime::Instance, ContractExecError> { info!("runtime `instantiate` (wasmer module instantiate)"); let instantiate = module.instantiate(import_object); match instantiate { Err(e) => Err(ContractExecError::InstantiationFailed(addr.clone())), Ok(instance) => Ok(instance), } } fn get_exported_func<'a>( instance: &'a wasmer_runtime::Instance, func_name: &str, ) -> Result<wasmer_runtime::DynFunc<'a>, ContractExecError> { let func = instance.dyn_func(func_name); match func { Err(e) => { error!("exported function: `{}` not found", func_name); Err(ContractExecError::FuncNotFound(func_name.to_string())) } Ok(func) => { info!("found exported function `{}`", func_name); Ok(func) } } } fn prepare_args_and_memory( tx: &Transaction, instance: &mut wasmer_runtime::Instance, ) -> Vec<wasmer_runtime::Value> { use svm_contract::wasm::{WasmArgValue, WasmIntType}; use wasmer_runtime::Value; debug!("runtime `prepare_args_and_memory`"); let memory = instance.context_mut().memory(0); let mut mem_offset = 0; let mut wasmer_args = Vec::with_capacity(tx.func_args.len()); for arg in tx.func_args.iter() { let wasmer_arg = match arg { WasmArgValue::I32(v) => Value::I32(*v as i32), WasmArgValue::I64(v) => Value::I64(*v as i64), WasmArgValue::Fixed(ty, buf) => { let buf_mem_start = mem_offset; let view = memory.view(); for byte in buf.into_iter() { view[mem_offset].set(*byte); mem_offset += 1; } match ty { WasmIntType::I32 => Value::I32(buf_mem_start as i32), WasmIntType::I64 => Value::I64(buf_mem_start as i64), } } WasmArgValue::Slice(..) => unimplemented!(), }; wasmer_args.push(wasmer_arg); } debug!("wasmer args={:?}", wasmer_args); wasmer_args } #[inline(always)] fn get_instance_svm_storage_mut( instance: &mut wasmer_runtime::Instance, ) -> &mut svm_storage::PageSliceCache<$PC> { let wasmer_ctx: &mut wasmer_runtime::Ctx = instance.c
random
[ { "content": "#[allow(dead_code)]\n\npub fn parse_transaction(bytes: &[u8]) -> Result<Transaction, TransactionBuildError> {\n\n let mut cursor = Cursor::new(bytes);\n\n\n\n parse_version(&mut cursor)?;\n\n\n\n let contract = parse_address(&mut cursor, Field::Contract)?;\n\n let sender = parse_address(&mut cursor, Field::Sender)?;\n\n let func_name = parse_func_name(&mut cursor)?;\n\n let func_args = parse_func_args(&mut cursor)?;\n\n\n\n let tx = Transaction {\n\n contract,\n\n sender,\n\n func_name,\n\n func_args,\n\n };\n\n\n\n Ok(tx)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 0, "score": 281802.3822875633 }, { "content": "#[inline(always)]\n\nfn parse_author(cursor: &mut Cursor<&[u8]>) -> Result<Address, ContractBuildError> {\n\n parse_address(cursor, Field::Author)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/parse.rs", "rank": 1, "score": 278679.137842729 }, { "content": "fn parse_address(cursor: &mut Cursor<&[u8]>, field: Field) -> Result<Address, ContractBuildError> {\n\n let mut addr = vec![0; Address::len()];\n\n\n\n let res = cursor.read_exact(&mut addr);\n\n ensure_enough_bytes!(res, field);\n\n\n\n debug!(\" parsed address (field={}) {:?}\", field, addr);\n\n\n\n Ok(Address::from(addr.as_ref()))\n\n}\n", "file_path": "crates/svm-contract/src/wire/deploy/parse.rs", "rank": 2, "score": 272090.7872613066 }, { "content": "fn parse_admins(cursor: &mut Cursor<&[u8]>) -> Result<Vec<Address>, ContractBuildError> {\n\n let res = cursor.read_u16::<BigEndian>();\n\n\n\n ensure_enough_bytes!(res, Field::AdminsCount);\n\n\n\n let admin_count = res.unwrap() as usize;\n\n if admin_count > 0 {\n\n return Err(ContractBuildError::AdminsNotSupportedYet);\n\n }\n\n\n\n // let mut admins = Vec::<Address>::with_capacity(admin_count);\n\n // for i in 0..admin_count {\n\n // let addr = parse_address(addr, Field::Admins);\n\n // admins.push(addr);\n\n // }\n\n\n\n Ok(Vec::new())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/parse.rs", "rank": 3, "score": 269408.720889502 }, { "content": "#[allow(dead_code)]\n\npub fn parse_contract(bytes: &[u8]) -> Result<Contract, ContractBuildError> {\n\n let mut cursor = Cursor::new(bytes);\n\n\n\n parse_version(&mut cursor)?;\n\n\n\n let name = parse_name(&mut cursor)?;\n\n let author = parse_author(&mut cursor)?;\n\n let _admins = parse_admins(&mut cursor)?;\n\n parse_deps(&mut cursor)?;\n\n let wasm = parse_code(&mut cursor)?;\n\n\n\n let contract = Contract { name, wasm, author };\n\n\n\n Ok(contract)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/parse.rs", "rank": 4, "score": 268938.51121344394 }, { "content": "fn read_u8(cursor: &mut Cursor<&[u8]>, field: Field) -> Result<u8, TransactionBuildError> {\n\n let res = cursor.read_u8();\n\n\n\n ensure_enough_bytes!(res, field);\n\n\n\n Ok(res.unwrap())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 5, "score": 263757.9467751492 }, { "content": "fn parse_version(cursor: &mut Cursor<&[u8]>) -> Result<u32, TransactionBuildError> {\n\n let res = cursor.read_u32::<BigEndian>();\n\n\n\n ensure_enough_bytes!(res, Field::Version);\n\n\n\n let version = res.unwrap();\n\n if version != 0 {\n\n return Err(TransactionBuildError::UnsupportedProtoVersion(version));\n\n }\n\n\n\n Ok(version)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 6, "score": 255871.5784350668 }, { "content": "fn parse_func_name(cursor: &mut Cursor<&[u8]>) -> Result<String, TransactionBuildError> {\n\n let res = cursor.read_u8();\n\n\n\n ensure_enough_bytes!(res, Field::FuncName);\n\n\n\n let name_len = res.unwrap() as usize;\n\n if name_len == 0 {\n\n return Err(TransactionBuildError::EmptyFuncName);\n\n }\n\n\n\n let mut name_buf = vec![0; name_len];\n\n let res = cursor.read_exact(&mut name_buf);\n\n\n\n if res.is_err() {\n\n return Err(TransactionBuildError::NotEnoughBytes(Field::FuncName));\n\n }\n\n\n\n // TODO: make `String::from_utf8` work without raising\n\n let name = unsafe { String::from_utf8_unchecked(name_buf) };\n\n\n\n Ok(name)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 7, "score": 251715.2141570198 }, { "content": "fn parse_code(cursor: &mut Cursor<&[u8]>) -> Result<Vec<u8>, ContractBuildError> {\n\n let res = cursor.read_u64::<BigEndian>();\n\n ensure_enough_bytes!(res, Field::CodeLength);\n\n\n\n let code_len = res.unwrap() as usize;\n\n let mut code = vec![0; code_len];\n\n\n\n let res = cursor.read_exact(&mut code);\n\n ensure_enough_bytes!(res, Field::Code);\n\n\n\n Ok(code)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/parse.rs", "rank": 8, "score": 248686.1527535792 }, { "content": "fn parse_deps(cursor: &mut Cursor<&[u8]>) -> Result<(), ContractBuildError> {\n\n let res = cursor.read_u16::<BigEndian>();\n\n\n\n ensure_enough_bytes!(res, Field::DepsCount);\n\n\n\n let deps_count = res.unwrap() as usize;\n\n\n\n if deps_count > 0 {\n\n return Err(ContractBuildError::DepsNotSupportedYet);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/parse.rs", "rank": 9, "score": 246934.57094155168 }, { "content": "pub fn validate_contract(contract: &Contract) -> Result<(), ContractBuildError> {\n\n validate_author(contract)?;\n\n validate_admins(contract)?;\n\n validate_deps(contract)?;\n\n validate_wasm(contract)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/validate.rs", "rank": 10, "score": 246669.84530754568 }, { "content": "fn parse_func_arg(cursor: &mut Cursor<&[u8]>) -> Result<WasmArgValue, TransactionBuildError> {\n\n let arg_type = parse_func_arg_type(cursor)?;\n\n\n\n let arg_val = match arg_type {\n\n WasmArgType::I32 => {\n\n let arg_val = read_u32(cursor, Field::ArgValue)?;\n\n WasmArgValue::I32(arg_val)\n\n }\n\n WasmArgType::I64 => {\n\n let arg_val = read_u64(cursor, Field::ArgValue)?;\n\n WasmArgValue::I64(arg_val)\n\n }\n\n WasmArgType::Fixed => {\n\n let fixed_byte_length = read_u32(cursor, Field::ArgLength)?;\n\n let offset_int_type = parse_func_arg_int_type(cursor)?;\n\n\n\n let buf = read_buffer(cursor, fixed_byte_length, Field::ArgValue)?;\n\n\n\n WasmArgValue::Fixed(offset_int_type, buf)\n\n }\n\n WasmArgType::Slice => {\n\n // TODO: implement\n\n unimplemented!()\n\n }\n\n };\n\n\n\n Ok(arg_val)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 11, "score": 243907.17109251604 }, { "content": "fn parse_func_arg_type(cursor: &mut Cursor<&[u8]>) -> Result<WasmArgType, TransactionBuildError> {\n\n let byte = read_u8(cursor, Field::ArgType)?;\n\n\n\n let arg_type = WasmArgType::try_from(byte);\n\n\n\n match arg_type {\n\n Ok(arg_type) => Ok(arg_type),\n\n Err(_) => Err(TransactionBuildError::InvalidArgType(byte)),\n\n }\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 12, "score": 240235.47039549577 }, { "content": "fn read_u64(cursor: &mut Cursor<&[u8]>, field: Field) -> Result<u64, TransactionBuildError> {\n\n let res = cursor.read_u64::<BigEndian>();\n\n\n\n ensure_enough_bytes!(res, field);\n\n\n\n Ok(res.unwrap())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 13, "score": 239788.6793616159 }, { "content": "fn read_u32(cursor: &mut Cursor<&[u8]>, field: Field) -> Result<u32, TransactionBuildError> {\n\n let res = cursor.read_u32::<BigEndian>();\n\n\n\n ensure_enough_bytes!(res, field);\n\n\n\n Ok(res.unwrap())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 14, "score": 239788.6793616159 }, { "content": "fn parse_version(cursor: &mut Cursor<&[u8]>) -> Result<u32, ContractBuildError> {\n\n let res = cursor.read_u32::<BigEndian>();\n\n\n\n ensure_enough_bytes!(res, Field::Version);\n\n\n\n let version = res.unwrap();\n\n if version != 0 {\n\n return Err(ContractBuildError::UnsupportedProtoVersion(version));\n\n }\n\n\n\n debug!(\" parsed raw contract version: {:?}\", version);\n\n\n\n Ok(version)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/parse.rs", "rank": 15, "score": 238268.56306026355 }, { "content": "fn parse_name(cursor: &mut Cursor<&[u8]>) -> Result<String, ContractBuildError> {\n\n let res = cursor.read_u8();\n\n\n\n ensure_enough_bytes!(res, Field::NameLength);\n\n\n\n let name_len = res.unwrap() as usize;\n\n\n\n if name_len == 0 {\n\n return Err(ContractBuildError::EmptyName);\n\n }\n\n\n\n let mut name_buf = vec![0; name_len];\n\n let res = cursor.read_exact(&mut name_buf);\n\n\n\n if res.is_err() {\n\n return Err(ContractBuildError::NotEnoughBytes(Field::Name));\n\n }\n\n\n\n // TODO: make `String::from_utf8` work without raising\n\n let name = unsafe { String::from_utf8_unchecked(name_buf) };\n\n\n\n Ok(name)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/parse.rs", "rank": 16, "score": 238268.56306026355 }, { "content": "fn parse_func_args(cursor: &mut Cursor<&[u8]>) -> Result<Vec<WasmArgValue>, TransactionBuildError> {\n\n let args_count = read_u8(cursor, Field::ArgsCount)?;\n\n\n\n let mut args = Vec::with_capacity(args_count as usize);\n\n\n\n for _ in 0..args_count {\n\n let arg = parse_func_arg(cursor)?;\n\n args.push(arg);\n\n }\n\n\n\n Ok(args)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 17, "score": 235976.96641900306 }, { "content": "fn validate_account(_addr: &Address, _field: Field) -> Result<(), ContractBuildError> {\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/validate.rs", "rank": 18, "score": 234352.45661654585 }, { "content": "/// Deallocates the memory of the `addr`\n\nfn svm_address_destroy(addr: *const svm_address_t) {\n\n let addr: *mut svm_address_t = addr as _;\n\n unsafe {\n\n Box::from_raw(addr as *mut Address);\n\n }\n\n}\n\n\n\n#[allow(unused)]\n", "file_path": "crates/svm-runtime-c-api/src/c_types.rs", "rank": 19, "score": 218274.20445219608 }, { "content": "fn validate_author(contract: &Contract) -> Result<(), ContractBuildError> {\n\n validate_account(&contract.author, Field::Author)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/validate.rs", "rank": 20, "score": 209321.2678733149 }, { "content": "fn validate_wasm(_contract: &Contract) -> Result<(), ContractBuildError> {\n\n Ok(())\n\n}\n", "file_path": "crates/svm-contract/src/wire/deploy/validate.rs", "rank": 21, "score": 200815.52276917757 }, { "content": "fn validate_deps(_contract: &Contract) -> Result<(), ContractBuildError> {\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/validate.rs", "rank": 22, "score": 200815.52276917757 }, { "content": "fn validate_admins(_contract: &Contract) -> Result<(), ContractBuildError> {\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/deploy/validate.rs", "rank": 23, "score": 200815.52276917757 }, { "content": "/// Deallocates the memory of the `receipt`\n\nfn svm_receipt_destroy(receipt: *const svm_receipt_t) {\n\n panic!();\n\n // let receipt: *mut svm_receipt_t = receipt as _;\n\n // unsafe {\n\n // Box::from_raw(receipt as *mut Receipt);\n\n // }\n\n}\n", "file_path": "crates/svm-runtime-c-api/src/c_types.rs", "rank": 24, "score": 192089.9099269128 }, { "content": "/// Deallocates the memory of the `receipt`\n\nfn svm_wasm_contract_destroy(contract: *const svm_contract_t) {\n\n panic!();\n\n // let contract: *mut svm_contract_t = contract as _;\n\n // unsafe {\n\n // Box::from_raw(contract as *mut WasmContract);\n\n // }\n\n}\n\n\n\n#[allow(unused)]\n", "file_path": "crates/svm-runtime-c-api/src/c_types.rs", "rank": 25, "score": 187891.47604722236 }, { "content": "#[test]\n\n#[ignore]\n\nfn contract_exec_invalid_state() {\n\n //\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/runtime.rs", "rank": 26, "score": 170618.64491904352 }, { "content": "#[test]\n\nfn contract_exec_valid_transaction() {\n\n // 1) deploying the contract\n\n let bytes = build_raw_contract!(\n\n 0, // protocol version\n\n \"Contract #1\", // contract name\n\n 0x10_20_30_40, // author address\n\n \"wasm/runtime-1.wast\" // file holding the wasm code\n\n );\n\n let contract = runtime::contract_build(&bytes).unwrap();\n\n let addr = runtime::contract_compute_address(&contract);\n\n runtime::contract_store(&contract, &addr);\n\n\n\n // 2) executing a transaction `reg_set_and_persist`\n\n // setting register `64:0` the value `1000`.\n\n // then, persisting it to storage (page=`0`, slice=`0`, offset=`0`)\n\n let bytes = build_raw_tx!(\n\n 0, // protocol version\n\n addr.clone(), // contract address\n\n 0x11_22_33_44, // sender address\n\n \"reg_set_and_persist\", // `func_name` to execute\n", "file_path": "crates/svm-runtime/tests/runtime.rs", "rank": 27, "score": 170601.92184427407 }, { "content": "#[must_use]\n\npub fn compile_program(wasm: &[u8]) -> CompileResult<Module> {\n\n let compiler = svm_compiler!();\n\n\n\n wasmer_runtime_core::compile_with(wasm, &compiler)\n\n}\n", "file_path": "crates/svm-compiler/src/compiler.rs", "rank": 28, "score": 167581.30501259034 }, { "content": "#[inline(always)]\n\npub fn u8_pair_add(a: u8, b: u8) -> (u8, u8) {\n\n let c = u16::from(a) + u16::from(b);\n\n\n\n let c0 = (c & 0xFF) as u8;\n\n let c1 = ((c >> 8) & 0xFF) as u8;\n\n\n\n (c1, c0)\n\n}\n\n\n\n/// Adds 3 unsigned bytes and returns also the carry.\n\n///\n\n/// u8_triple_add(10, 20, 30) -> returns (0, 60)\n\n/// u8_triple_add(255, 5, 5) -> returns (1, 9)\n", "file_path": "crates/svm-common/src/utils.rs", "rank": 29, "score": 167509.40159580362 }, { "content": "#[test]\n\nfn runtime_tx_exec_changing_state() {\n\n unsafe {\n\n let node = FullNode::default();\n\n let raw_contract = alloc_raw_contract!();\n\n let raw_import_object = alloc_raw_import_object!();\n\n let author_addr = Address::from([0xFF; 20].as_ref());\n\n\n\n // 1) deploy\n\n let bytes = build_raw_contract!(\"wasm/store.wast\", &author_addr);\n\n let _ = svm_contract_build(\n\n raw_contract,\n\n bytes.as_ptr() as *const c_void,\n\n bytes.len() as u64,\n\n );\n\n let raw_addr = svm_contract_compute_address(*raw_contract);\n\n let _ = svm_contract_store(*raw_contract, raw_addr);\n\n\n\n // 2) execute\n\n let _res = svm_import_object(\n\n raw_import_object,\n", "file_path": "crates/svm-runtime-c-api/tests/c_api.rs", "rank": 30, "score": 164875.38608540045 }, { "content": "#[inline(always)]\n\npub fn u8_triple_add(a: u8, b: u8, c: u8) -> (u8, u8) {\n\n let d = u16::from(a) + u16::from(b) + u16::from(c);\n\n\n\n let d0 = (d & 0xFF) as u8;\n\n let d1 = ((d >> 8) & 0xFF) as u8;\n\n\n\n (d1, d0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_u32_to_be_array() {\n\n let expected = [0x11, 0x22, 0x33, 0x44];\n\n let actual = u32_to_be_array(0x11_22_33_44);\n\n\n\n assert_eq!(expected, actual);\n\n }\n", "file_path": "crates/svm-common/src/utils.rs", "rank": 31, "score": 164621.819291663 }, { "content": "/// Receives a wasm program reprsented as a vector of parsed wasm instructions.\n\n/// On success returns for each function the `Gas` it requires.\n\n/// Otherwise, returns an `crate::error::Error`\n\npub fn estimate_program_gas(program: &Program) -> Result<HashMap<FuncIndex, Gas>, Error> {\n\n let mut program_state = ProgramState::new();\n\n\n\n // we sort `functions_ids`, this is important in order to maintain determinsitic execution of unit-tests\n\n let mut functions_ids: Vec<FuncIndex> = program.functions_ids().clone();\n\n functions_ids.sort();\n\n\n\n for func_idx in functions_ids.drain(..) {\n\n estimate_function_gas(func_idx, program, &mut program_state)?;\n\n }\n\n\n\n Ok(program_state.func_gas_cache)\n\n}\n\n\n", "file_path": "crates/svm-gas/src/function_gas.rs", "rank": 32, "score": 153792.55304099675 }, { "content": "#[inline(always)]\n\npub fn zero_page() -> Vec<u8> {\n\n vec![0; PAGE_SIZE]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[should_panic(expected = \"`PageHash::from` expects exactly 32 bytes input\")]\n\n fn page_hash_expects_exactly_32_bytes_input() {\n\n PageHash::from([0; 10].as_ref());\n\n }\n\n\n\n #[test]\n\n fn page_hash_from_slice() {\n\n let raw: [u8; 32] = [\n\n 01, 02, 03, 04, 05, 06, 07, 08, 09, 10, 20, 30, 40, 50, 60, 70, 80, 90, 11, 22, 33, 44,\n\n 55, 66, 77, 88, 99, 251, 252, 253, 254, 255,\n\n ];\n", "file_path": "crates/svm-storage/src/page.rs", "rank": 33, "score": 152639.7970234123 }, { "content": "/// Trait for managing the contract environment.\n\n/// Relies on associated `ContractEnvTypes`.\n\npub trait ContractEnv {\n\n type Types: ContractEnvTypes;\n\n\n\n /// Borrows environment's store\n\n fn get_store(&self) -> &<Self::Types as ContractEnvTypes>::Store;\n\n\n\n /// Borrows mutably environment's store\n\n fn get_store_mut(&mut self) -> &mut <Self::Types as ContractEnvTypes>::Store;\n\n\n\n /// Computes contract hash\n\n #[inline(always)]\n\n fn compute_code_hash(contract: &Contract) -> CodeHash {\n\n <Self::Types as ContractEnvTypes>::CodeHasher::hash(&contract.wasm)\n\n }\n\n\n\n /// Computes contract account address\n\n #[inline(always)]\n\n fn compute_address(contract: &Contract) -> Address {\n\n <Self::Types as ContractEnvTypes>::AddressCompute::compute(contract)\n\n }\n", "file_path": "crates/svm-contract/src/env.rs", "rank": 34, "score": 149910.2401570995 }, { "content": "pub trait ContractEnvTypes {\n\n type Serializer: ContractSerializer;\n\n\n\n type Deserializer: ContractDeserializer;\n\n\n\n type Store: ContractStore<Self::Serializer, Self::Deserializer>;\n\n\n\n type AddressCompute: ContractAddressCompute;\n\n\n\n type CodeHasher: ContractCodeHasher;\n\n}\n\n\n", "file_path": "crates/svm-contract/src/env.rs", "rank": 35, "score": 147349.94616185594 }, { "content": "#[test]\n\n#[ignore]\n\nfn contract_exec_non_existing_contract() {\n\n // ...\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/runtime.rs", "rank": 36, "score": 143210.21433295903 }, { "content": "/// a utility function to be used in `c-api` tests\n\npub fn build_wasmer_import_t(\n\n mode_name: &str,\n\n import_name: &str,\n\n func: *const wasmer_import_func_t,\n\n) -> wasmer_import_t {\n\n wasmer_import_t {\n\n module_name: cast_str_to_wasmer_byte_array(mode_name),\n\n import_name: cast_str_to_wasmer_byte_array(import_name),\n\n tag: wasmer_import_export_kind::WASM_FUNCTION,\n\n value: wasmer_import_export_value { func },\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! alloc_raw_ptr_stack {\n\n ($ptr_struct_type: path, $) => {{\n\n use std::alloc::Layout;\n\n\n\n let ptr_size: usize = std::mem::size_of::<*mut *mut $ptr_type>();\n", "file_path": "crates/svm-runtime-c-api/src/c_utils.rs", "rank": 37, "score": 142529.7326910951 }, { "content": "#[inline(always)]\n\npub fn u64_to_be_array(num: u64) -> [u8; 8] {\n\n let b7 = ((num >> 56) & 0xFF) as u8;\n\n let b6 = ((num >> 48) & 0xFF) as u8;\n\n let b5 = ((num >> 40) & 0xFF) as u8;\n\n let b4 = ((num >> 32) & 0xFF) as u8;\n\n let b3 = ((num >> 24) & 0xFF) as u8;\n\n let b2 = ((num >> 16) & 0xFF) as u8;\n\n let b1 = ((num >> 8) & 0xFF) as u8;\n\n let b0 = (num & 0xFF) as u8;\n\n\n\n [b7, b6, b5, b4, b3, b2, b1, b0]\n\n}\n\n\n\n/// Converts an unsigned 64-bit integer into a 8-byte array (ordered in Little-Endian)\n", "file_path": "crates/svm-common/src/utils.rs", "rank": 38, "score": 140696.622116703 }, { "content": "#[inline(always)]\n\npub fn u32_to_be_array(num: u32) -> [u8; 4] {\n\n let b3 = ((num >> 24) & 0xFF) as u8;\n\n let b2 = ((num >> 16) & 0xFF) as u8;\n\n let b1 = ((num >> 8) & 0xFF) as u8;\n\n let b0 = (num & 0xFF) as u8;\n\n\n\n [b3, b2, b1, b0]\n\n}\n\n\n\n/// Converts an unsigned 32-bit integer into a 4-byte array (ordered in Little-Endian)\n", "file_path": "crates/svm-common/src/utils.rs", "rank": 39, "score": 140696.622116703 }, { "content": "#[inline(always)]\n\npub fn u32_to_le_array(num: u32) -> [u8; 4] {\n\n let b0 = ((num >> 24) & 0xFF) as u8;\n\n let b1 = ((num >> 16) & 0xFF) as u8;\n\n let b2 = ((num >> 8) & 0xFF) as u8;\n\n let b3 = (num & 0xFF) as u8;\n\n\n\n [b3, b2, b1, b0]\n\n}\n\n\n\n/// Converts an unsigned 64-bit integer into a 8-byte array (ordered in Big-Endian)\n", "file_path": "crates/svm-common/src/utils.rs", "rank": 40, "score": 138234.4816317307 }, { "content": "#[inline(always)]\n\npub fn u64_to_le_array(num: u64) -> [u8; 8] {\n\n let b0 = ((num >> 56) & 0xFF) as u8;\n\n let b1 = ((num >> 48) & 0xFF) as u8;\n\n let b2 = ((num >> 40) & 0xFF) as u8;\n\n let b3 = ((num >> 32) & 0xFF) as u8;\n\n let b4 = ((num >> 24) & 0xFF) as u8;\n\n let b5 = ((num >> 16) & 0xFF) as u8;\n\n let b6 = ((num >> 8) & 0xFF) as u8;\n\n let b7 = (num & 0xFF) as u8;\n\n\n\n [b7, b6, b5, b4, b3, b2, b1, b0]\n\n}\n\n\n\n/// Adds 2 unsigned bytes and returns also the carry.\n\n///\n\n/// # Example\n\n///\n\n/// u8_pair_add(10, 20) -> returns (0, 30)\n\n/// u8_pair_add(255, 10) -> returns (1, 9)\n\n///\n", "file_path": "crates/svm-common/src/utils.rs", "rank": 41, "score": 138234.4816317307 }, { "content": "#[test]\n\n#[ignore]\n\nfn deploy_wasm_contract() {\n\n let bytes = WireContractBuilder::new()\n\n .with_version(0)\n\n .with_name(\"Contract #1\")\n\n .with_author(Address::from(0x10_20_30_40))\n\n .with_code(&[0xAA, 0xBB, 0xCC, 0xDD])\n\n .build();\n\n\n\n let contract = runtime::contract_build(&bytes).unwrap();\n\n let addr = runtime::contract_compute_address(&contract);\n\n runtime::contract_store(&contract, &addr);\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/runtime.rs", "rank": 42, "score": 138060.14890827614 }, { "content": "/// Computes a contract account address.\n\n/// Algorithm must be deterministic.\n\npub trait ContractAddressCompute {\n\n fn compute(contract: &Contract) -> Address;\n\n}\n\n\n", "file_path": "crates/svm-contract/src/traits.rs", "rank": 43, "score": 136816.3242542603 }, { "content": "#[test]\n\n#[ignore]\n\nfn contract_exec_invalid_func_args() {\n\n //\n\n}\n", "file_path": "crates/svm-runtime/tests/runtime.rs", "rank": 44, "score": 132918.33319816933 }, { "content": "#[test]\n\n#[ignore]\n\nfn contract_exec_invalid_func_name() {\n\n //\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/runtime.rs", "rank": 45, "score": 132918.33319816933 }, { "content": "#[test]\n\nfn build_transaction() {\n\n let bytes = WireTxBuilder::new()\n\n .with_version(0)\n\n .with_contract(Address::from(0x10_20_30_40))\n\n .with_sender(Address::from(0x50_60_70_80))\n\n .with_func_name(\"run\")\n\n .with_func_args(&vec![WasmArgValue::I32(10), WasmArgValue::I64(20)])\n\n .build();\n\n\n\n let actual = <MemoryEnv as ContractEnv>::build_transaction(&bytes).unwrap();\n\n\n\n let expected = Transaction {\n\n contract: Address::from(0x10_20_30_40),\n\n sender: Address::from(0x50_60_70_80),\n\n func_name: \"run\".to_string(),\n\n func_args: vec![WasmArgValue::I32(10), WasmArgValue::I64(20)],\n\n };\n\n\n\n assert_eq!(expected, actual);\n\n}\n", "file_path": "crates/svm-contract/tests/exec_build.rs", "rank": 46, "score": 131558.3410464486 }, { "content": "fn parse_address(\n\n cursor: &mut Cursor<&[u8]>,\n\n field: Field,\n\n) -> Result<Address, TransactionBuildError> {\n\n let mut bytes = vec![0; Address::len()];\n\n\n\n let res = cursor.read_exact(&mut bytes);\n\n ensure_enough_bytes!(res, field);\n\n\n\n let addr = Address::from(&bytes[..]);\n\n\n\n Ok(addr)\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 47, "score": 128647.76595480944 }, { "content": "/// we explicitly whitelist the supported opcodes\n\nfn parse_wasm_opcode(opcode: &Operator) -> Result<(), ParseError> {\n\n match opcode {\n\n Operator::Unreachable\n\n | Operator::Nop\n\n | Operator::Block { .. }\n\n | Operator::Loop { .. }\n\n | Operator::If { .. }\n\n | Operator::Else\n\n | Operator::End\n\n | Operator::Br { .. }\n\n | Operator::BrIf { .. }\n\n | Operator::BrTable { .. }\n\n | Operator::Return\n\n | Operator::Call { .. }\n\n | Operator::CallIndirect { .. }\n\n | Operator::Drop\n\n | Operator::Select\n\n | Operator::GetLocal { .. }\n\n | Operator::SetLocal { .. }\n\n | Operator::TeeLocal { .. }\n", "file_path": "crates/svm-compiler/src/middleware/validation.rs", "rank": 48, "score": 122372.77547970964 }, { "content": "fn estimate_vmcall(_func_idx: FuncIndex, _program: &Program) -> Result<Gas, Error> {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "crates/svm-gas/src/function_gas.rs", "rank": 49, "score": 121368.34224577897 }, { "content": "fn full_node_as_ptr(node: &FullNode) -> *const c_void {\n\n node as *const FullNode as *const _\n\n}\n\n\n\n/// Represents a fake node vmcall implemented in another programming-language using the FFI interface.\n\n/// See test: `call_node_get_set_balance`\n\n#[no_mangle]\n\nunsafe extern \"C\" fn vmcall_get_balance(\n\n ctx: *mut wasmer_instance_context_t,\n\n reg_bits: i32,\n\n reg_idx: i32,\n\n) -> i64 {\n\n assert_eq!(Address::len() * 8, reg_bits as usize);\n\n\n\n let ptr: *const u8 = svm_register_get(ctx, reg_bits, reg_idx) as _;\n\n let addr = Address::from(ptr);\n\n\n\n let node: *const c_void = svm_instance_context_node_data_get(ctx);\n\n let node: &FullNode = &*(node as *const FullNode);\n\n\n", "file_path": "crates/svm-runtime-c-api/tests/c_api.rs", "rank": 50, "score": 120552.22837383529 }, { "content": "#[doc(hidden)]\n\npub fn cast_str_to_wasmer_byte_array(s: &str) -> wasmer_byte_array {\n\n let bytes: &[u8] = s.as_bytes();\n\n let bytes_ptr: *const u8 = bytes.as_ptr();\n\n let bytes_len: u32 = bytes.len() as u32;\n\n\n\n std::mem::forget(bytes);\n\n\n\n wasmer_byte_array {\n\n bytes: bytes_ptr,\n\n bytes_len,\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\npub unsafe fn cast_wasmer_byte_array_to_string(wasmer_bytes: &wasmer_byte_array) -> String {\n\n let slice: &[u8] =\n\n std::slice::from_raw_parts(wasmer_bytes.bytes, wasmer_bytes.bytes_len as usize);\n\n\n\n if let Ok(s) = std::str::from_utf8(slice) {\n\n s.to_string()\n", "file_path": "crates/svm-runtime-c-api/src/c_utils.rs", "rank": 51, "score": 115908.10149958076 }, { "content": "#[test]\n\nfn store_contract() {\n\n let bytes = WireContractBuilder::new()\n\n .with_version(0)\n\n .with_name(\"Contract #1\")\n\n .with_author(Address::from(0x10_20_30_40))\n\n .with_code(&[0xAA, 0xBB, 0xCC, 0xDD])\n\n .build();\n\n\n\n let contract = <MemoryEnv as ContractEnv>::build_contract(&bytes).unwrap();\n\n let addr = <MemoryEnv as ContractEnv>::compute_address(&contract);\n\n\n\n let store = MemContractStore::new();\n\n let mut env = MemoryEnv::new(store);\n\n\n\n env.store_contract(&contract, &addr);\n\n\n\n let store = env.get_store();\n\n\n\n let stored = store.load(&addr).unwrap();\n\n assert_eq!(stored, contract);\n\n}\n", "file_path": "crates/svm-contract/tests/store.rs", "rank": 52, "score": 107233.16835683829 }, { "content": "#[test]\n\nfn build_contract() {\n\n let bytes = WireContractBuilder::new()\n\n .with_version(0)\n\n .with_name(\"Contract #1\")\n\n .with_author(Address::from(0x10_20_30_40))\n\n .with_code(&[0xAA, 0xBB, 0xCC, 0xDD])\n\n .build();\n\n\n\n let contract = <MemoryEnv as ContractEnv>::build_contract(&bytes).unwrap();\n\n\n\n assert_eq!(\"Contract #1\", contract.name);\n\n assert_eq!(Address::from(0x10_20_30_40), contract.author);\n\n assert_eq!([0xAA, 0xBB, 0xCC, 0xDD], contract.wasm.as_ref());\n\n}\n", "file_path": "crates/svm-contract/tests/deploy_build.rs", "rank": 53, "score": 105415.41582977946 }, { "content": "/// Serializing a contract into its raw representation trait.\n\npub trait ContractSerializer {\n\n #[allow(missing_docs)]\n\n fn serialize(contract: &Contract) -> Vec<u8>;\n\n}\n\n\n", "file_path": "crates/svm-contract/src/traits.rs", "rank": 54, "score": 103434.4307534458 }, { "content": "/// Deserializing raw contract into its in-memory representation trait.\n\npub trait ContractDeserializer {\n\n #[allow(missing_docs)]\n\n fn deserialize(bytes: Vec<u8>) -> Contract;\n\n}\n\n\n", "file_path": "crates/svm-contract/src/traits.rs", "rank": 55, "score": 103434.35500734545 }, { "content": "/// Computes code-hash derived deterministically from raw contract.\n\npub trait ContractCodeHasher {\n\n fn hash(bytes: &[u8]) -> CodeHash;\n\n}\n", "file_path": "crates/svm-contract/src/traits.rs", "rank": 56, "score": 101687.98740179239 }, { "content": "#[test]\n\nfn runtime_node_vmcalls() {\n\n unsafe {\n\n let mut node = FullNode::default();\n\n let raw_contract = alloc_raw_contract!();\n\n let raw_import_object = alloc_raw_import_object!();\n\n let author_addr = Address::from([0xFF; 20].as_ref());\n\n\n\n // 1) deploy\n\n let bytes = build_raw_contract!(\"wasm/mul_balance.wast\", &author_addr);\n\n let _ = svm_contract_build(\n\n raw_contract,\n\n bytes.as_ptr() as *const c_void,\n\n bytes.len() as u64,\n\n );\n\n let raw_addr = svm_contract_compute_address(*raw_contract);\n\n let _ = svm_contract_store(*raw_contract, raw_addr);\n\n\n\n // 2) execute\n\n let gb_ptr = cast_vmcall_to_import_func_t!(\n\n vmcall_get_balance,\n", "file_path": "crates/svm-runtime-c-api/tests/c_api.rs", "rank": 57, "score": 101130.6873822501 }, { "content": "fn gen_for_c() {\n\n let crate_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let header_name: &str = \"svm_wasmer\";\n\n\n\n // set expand dir for macro expanding\n\n env::set_var(\"CARGO_EXPAND_TARGET_DIR\", crate_dir.clone());\n\n\n\n // set target ouput dir for header\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let mut out_header = PathBuf::from(&out_dir).join(\"../../../\");\n\n out_header.push(header_name);\n\n out_header.set_extension(\"h\");\n\n\n\n // build using cbindgen\n\n Builder::new()\n\n .with_crate(crate_dir.clone())\n\n .with_language(Language::C)\n\n .with_include_guard(\"WASMER_SVM_H\")\n\n .with_header(\"#include \\\"wasmer.h\\\"\")\n\n .with_parse_expand(&[\"svm-runtime-c-api\"])\n", "file_path": "crates/svm-runtime-c-api/build.rs", "rank": 58, "score": 97398.38118114334 }, { "content": "fn main() {\n\n gen_for_c();\n\n}\n", "file_path": "crates/svm-runtime-c-api/build.rs", "rank": 59, "score": 97398.38118114334 }, { "content": "/// Stores serialized contracts (a.k.a raw contracts)\n\n/// and deserializes raw contract into `Contract` upon fetching.\n\npub trait ContractStore<S, D>\n\nwhere\n\n S: ContractSerializer,\n\n D: ContractDeserializer,\n\n{\n\n /// Stores the `hash` -> `raw contract` association\n\n fn store(&mut self, contract: &Contract, address: &Address, hash: CodeHash);\n\n\n\n /// Given a contract account address, fetches its raw contract dada\n\n /// and deserializes it. Return `None` it contract doesn't exist\n\n fn load(&self, address: &Address) -> Option<Contract>;\n\n}\n\n\n", "file_path": "crates/svm-contract/src/traits.rs", "rank": 60, "score": 94984.93785532926 }, { "content": "/// Implementors are in-charge of calculating a page hash.\n\n/// The page hash isderived from 3 components: `contract address` + `page-index` + `page-data`\n\npub trait StateHasher {\n\n /// `pages_hash` - a slice of `PageHash`\n\n #[must_use]\n\n fn hash(pages_hash: &[PageHash]) -> StateHash;\n\n}\n\n\n", "file_path": "crates/svm-storage/src/traits.rs", "rank": 61, "score": 94359.04174755323 }, { "content": "fn read_buffer(\n\n cursor: &mut Cursor<&[u8]>,\n\n buf_len: u32,\n\n field: Field,\n\n) -> Result<Vec<u8>, TransactionBuildError> {\n\n let mut buf = vec![0; buf_len as usize];\n\n\n\n let res = cursor.read_exact(&mut buf);\n\n ensure_enough_bytes!(res, field);\n\n\n\n Ok(buf)\n\n}\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 62, "score": 93523.63414168256 }, { "content": "#[test]\n\nfn vmcalls_empty_wasm() {\n\n let wasm = r#\"\n\n (module\n\n (func (export \"do_nothing\")))\"#;\n\n\n\n let module = compile!(&wasm);\n\n let _instance = module.instantiate(&imports! {}).unwrap();\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 63, "score": 93397.2431662324 }, { "content": "/// returns the if-statement `true-block` **inclusive** offsets and whether it has an `else-block`\n\nfn find_if_stmt_true_block(\n\n func_body: &Vec<Instruction>,\n\n block_state: &mut BlockState,\n\n) -> Result<(usize, BlockOffsets, bool), Error> {\n\n let op = func_body.get(block_state.cursor.get()).unwrap();\n\n\n\n match op {\n\n Instruction::If(_) => block_state.advance_cursor(),\n\n _ => panic!(\"expects block to be an if-statement block\"),\n\n };\n\n\n\n let (true_start, mut true_end): (usize, usize) =\n\n (block_state.cursor.get(), block_state.cursor.get());\n\n\n\n let mut block_depth = 1;\n\n let mut found_else = false;\n\n\n\n while !(block_state.is_eof()) {\n\n let op = func_body.get(block_state.cursor.get()).unwrap();\n\n\n", "file_path": "crates/svm-gas/src/function_gas.rs", "rank": 64, "score": 92702.40827967823 }, { "content": "#[test]\n\nfn vmcalls_mem_to_reg_copy() {\n\n let module = wasmer_compile_module_file!(\"wasm/mem_to_reg_copy.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"mem_to_reg_copy\" => func!(vmcalls::mem_to_reg_copy),\n\n },\n\n };\n\n\n\n let instance = module.instantiate(&import_object).unwrap();\n\n\n\n // initializing memory #0 cells `200..203` with values `10, 20, 30` respectively\n\n svm_runtime::wasmer_ctx_mem_cells_write!(instance.context(), 0, 200, &[10, 20, 30]);\n\n\n\n // asserting register `2` (of type `64 bits`) content is empty prior copy\n\n let reg = svm_runtime::wasmer_ctx_reg!(instance.context(), 64, 2, MemMerklePageCache);\n\n assert_eq!(vec![0, 0, 0, 0, 0, 0, 0, 0], reg.view());\n\n\n\n let do_copy: Func<(i32, i32, i32)> = instance.func(\"do_copy_to_reg\").unwrap();\n\n assert!(do_copy.call(200, 3, 2).is_ok());\n\n\n\n // asserting register `2` (of type `64 bits`) content is `10, 20, 30, 0, ... 0`\n\n let reg = svm_runtime::wasmer_ctx_reg!(instance.context(), 64, 2, MemMerklePageCache);\n\n assert_eq!(vec![10, 20, 30, 0, 0, 0, 0, 0], reg.view());\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 65, "score": 91557.09047657377 }, { "content": "#[test]\n\nfn vmcalls_reg_to_mem_copy() {\n\n let module = wasmer_compile_module_file!(\"wasm/reg_to_mem_copy.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"reg_to_mem_copy\" => func!(vmcalls::reg_to_mem_copy),\n\n },\n\n };\n\n\n\n let instance = module.instantiate(&import_object).unwrap();\n\n\n\n // initializing reg `2` (of type `64 bits`) with values `10, 20, 30` respectively\n\n let reg = svm_runtime::wasmer_ctx_reg!(instance.context(), 64, 2, MemMerklePageCache);\n\n reg.set(&[10, 20, 30]);\n\n\n\n // asserting memory #0, cells `0..3` are zeros before copy\n\n let cells = svm_runtime::wasmer_ctx_mem_cells!(instance.context(), 0, 0, 3);\n\n assert_eq!([Cell::new(0), Cell::new(0), Cell::new(0)], cells);\n\n\n\n // copying reg `2` content into memory cells `0..3`\n\n let do_copy: Func<(i32, i32, i32)> = instance.func(\"do_copy_to_mem\").unwrap();\n\n assert!(do_copy.call(2, 3, 0).is_ok());\n\n\n\n // asserting memory #0, cells `0..3` have the values `10, 20, 30` respectively\n\n let cells = svm_runtime::wasmer_ctx_mem_cells!(instance.context(), 0, 0, 3);\n\n assert_eq!([Cell::new(10), Cell::new(20), Cell::new(30)], cells);\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 66, "score": 91557.09047657377 }, { "content": "#[test]\n\nfn vmcalls_storage_write_from_reg() {\n\n let module = wasmer_compile_module_file!(\"wasm/storage_write_from_reg.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"storage_write_from_reg\" => func!(vmcalls::storage_write_from_reg),\n\n },\n\n };\n\n\n\n let mut instance = module.instantiate(&import_object).unwrap();\n\n let storage =\n\n svm_runtime::wasmer_data_storage!(instance.context_mut().data, MemMerklePageCache);\n\n\n\n // we first initialize register `5` (of type `64 bits`) with `[10, 20, 30, 0, 0, 0, 0, 0]`\n\n let reg = svm_runtime::wasmer_ctx_reg!(instance.context(), 64, 5, MemMerklePageCache);\n\n reg.set(&[10, 20, 30]);\n\n\n\n let layout = svm_runtime::svm_page_slice_layout!(1, 10, 100, 3);\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 67, "score": 91557.09047657377 }, { "content": "#[test]\n\nfn vmcalls_storage_write_from_mem() {\n\n let module = wasmer_compile_module_file!(\"wasm/storage_write_from_mem.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"storage_write_from_mem\" => func!(vmcalls::storage_write_from_mem),\n\n },\n\n };\n\n\n\n let mut instance = module.instantiate(&import_object).unwrap();\n\n let storage =\n\n svm_runtime::wasmer_data_storage!(instance.context_mut().data, MemMerklePageCache);\n\n\n\n svm_runtime::wasmer_ctx_mem_cells_write!(instance.context(), 0, 200, &[10, 20, 30]);\n\n\n\n let layout = svm_runtime::svm_page_slice_layout!(1, 10, 100, 3);\n\n\n\n assert_eq!(None, storage.read_page_slice(&layout));\n\n\n\n let do_write: Func<(i32, i32, i32, i32, i32)> = instance.func(\"do_write_from_mem\").unwrap();\n\n\n\n // we copy memory cells `200..`203` into storage (`page 1`, `slice 10`, cells: `100..103`)\n\n assert!(do_write.call(200, 3, 1, 10, 100).is_ok());\n\n\n\n assert_eq!(Some(vec![10, 20, 30]), storage.read_page_slice(&layout));\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 68, "score": 91557.09047657377 }, { "content": "fn parse_func_arg_int_type(\n\n cursor: &mut Cursor<&[u8]>,\n\n) -> Result<WasmIntType, TransactionBuildError> {\n\n let arg_type = parse_func_arg_type(cursor)?;\n\n\n\n match arg_type {\n\n WasmArgType::I32 => Ok(WasmIntType::I32),\n\n WasmArgType::I64 => Ok(WasmIntType::I64),\n\n _ => Err(TransactionBuildError::InvalidArgIntType),\n\n }\n\n}\n\n\n", "file_path": "crates/svm-contract/src/wire/exec/parse.rs", "rank": 69, "score": 88403.59359104026 }, { "content": "#[test]\n\nfn vmcalls_storage_read_an_empty_page_slice_to_mem() {\n\n let module = wasmer_compile_module_file!(\"wasm/storage_to_mem_copy.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"storage_read_to_mem\" => func!(vmcalls::storage_read_to_mem),\n\n },\n\n };\n\n\n\n let instance = module.instantiate(&import_object).unwrap();\n\n\n\n // we fill memory #0, cells `200..203` with garbage data\n\n svm_runtime::wasmer_ctx_mem_cells_write!(instance.context(), 0, 200, &[255, 255, 255]);\n\n let cells = svm_runtime::wasmer_ctx_mem_cells!(instance.context(), 0, 200, 3);\n\n assert_eq!(&[Cell::new(255), Cell::new(255), Cell::new(255)], cells);\n\n\n\n // we copy storage `slice 0` (page `1`, cells: `100..103`) into memory starting from address = 200\n\n let do_copy: Func<(i32, i32, i32, i32, i32)> = instance.func(\"do_copy_to_mem\").unwrap();\n\n assert!(do_copy.call(1, 10, 100, 3, 200).is_ok());\n\n\n\n let cells = svm_runtime::wasmer_ctx_mem_cells!(instance.context(), 0, 200, 3);\n\n assert_eq!(&[Cell::new(0), Cell::new(0), Cell::new(0)], cells);\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 70, "score": 86583.03226549536 }, { "content": "#[test]\n\nfn vmcalls_reg_replace_byte_read_write_be_i64() {\n\n let module = wasmer_compile_module_file!(\"wasm/reg_replace_read_write_be_i64.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"storage_read_to_reg\" => func!(vmcalls::storage_read_to_reg),\n\n \"storage_write_from_reg\" => func!(vmcalls::storage_write_from_reg),\n\n \"reg_replace_byte\" => func!(vmcalls::reg_replace_byte),\n\n \"reg_read_be_i64\" => func!(vmcalls::reg_read_be_i64),\n\n \"reg_write_be_i64\" => func!(vmcalls::reg_write_be_i64),\n\n },\n\n };\n\n\n\n let instance = module.instantiate(&import_object).unwrap();\n\n\n\n // we first initialize register `64:5` with `[254, 255, 0, 0, 0, 0, 0, 0]`\n\n let reg = svm_runtime::wasmer_ctx_reg!(instance.context(), 64, 5, MemMerklePageCache);\n\n reg.set(&[0, 0, 0, 0, 0, 0, 255, 254]);\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 71, "score": 86583.03226549536 }, { "content": "#[test]\n\nfn vmcalls_storage_read_an_empty_page_slice_to_reg() {\n\n let module = wasmer_compile_module_file!(\"wasm/storage_to_reg_copy.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"storage_read_to_reg\" => func!(vmcalls::storage_read_to_reg),\n\n },\n\n };\n\n\n\n let instance = module.instantiate(&import_object).unwrap();\n\n\n\n // we first initialize register `2` with some garbage data which should be overriden\n\n // after calling the exported `do_copy_to_reg` function\n\n let reg = svm_runtime::wasmer_ctx_reg!(instance.context(), 64, 2, MemMerklePageCache);\n\n reg.set(&[255; 8]);\n\n\n\n assert_eq!(vec![255; 8], reg.view());\n\n\n\n let do_copy: Func<(i32, i32, i32, i32, i32)> = instance.func(\"do_copy_to_reg\").unwrap();\n\n assert!(do_copy.call(1, 10, 100, 3, 2).is_ok());\n\n\n\n // register `2` (of type `64 bits) should contain zeros, since an empty page-slice is treated as a page-slice containing only zeros\n\n let reg = svm_runtime::wasmer_ctx_reg!(instance.context(), 64, 2, MemMerklePageCache);\n\n assert_eq!(vec![0, 0, 0, 0, 0, 0, 0, 0], reg.view());\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 72, "score": 86583.03226549536 }, { "content": "/// This trait should be implemented by state-oriented pages storage.\n\n/// Since a Smart Contract must have a state (like a source control revision) we need to have this\n\n/// capability implemented for real-usage Smart Contract storage.\n\npub trait PagesStateStorage: PagesStorage {\n\n /// Returns the current storage state (i.e revision)\n\n #[must_use]\n\n fn get_state(&self) -> State;\n\n\n\n /// Returns the page-hash of a given page indexed by `page_idx`\n\n #[must_use]\n\n fn get_page_hash(&self, page_idx: PageIndex) -> PageHash;\n\n}\n\n\n", "file_path": "crates/svm-storage/src/traits.rs", "rank": 73, "score": 86026.67339185267 }, { "content": "/// `PageCache` is a marker trait intended for subclassing the `PagesStateStorage` trait.\n\n/// It's intended to mark a `PagesStateStorage` as having a caching layer on top of the backed pages-storage.\n\npub trait PageCache: PagesStateStorage {}\n", "file_path": "crates/svm-storage/src/traits.rs", "rank": 74, "score": 86022.46730875727 }, { "content": "#[test]\n\nfn vmcalls_storage_read_non_empty_page_slice_to_reg() {\n\n let module = wasmer_compile_module_file!(\"wasm/storage_to_reg_copy.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"storage_read_to_reg\" => func!(vmcalls::storage_read_to_reg),\n\n },\n\n };\n\n\n\n let mut instance = module.instantiate(&import_object).unwrap();\n\n let storage =\n\n svm_runtime::wasmer_data_storage!(instance.context_mut().data, MemMerklePageCache);\n\n let layout = svm_runtime::svm_page_slice_layout!(1, 10, 100, 3);\n\n\n\n // we write `[10, 20, 30]` into storage slice `10` (page `1`, cells: `100..103`)\n\n storage.write_page_slice(&layout, &vec![10, 20, 30]);\n\n\n\n // we first initialize register `2` (of type `64 bits`) with some garbage data which should be overriden\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 75, "score": 85085.15644901908 }, { "content": "#[test]\n\nfn vmcalls_storage_read_non_empty_page_slice_to_mem() {\n\n let module = wasmer_compile_module_file!(\"wasm/storage_to_mem_copy.wast\");\n\n\n\n let import_object = imports! {\n\n test_create_svm_state_gen!(),\n\n\n\n \"svm\" => {\n\n \"storage_read_to_mem\" => func!(vmcalls::storage_read_to_mem),\n\n },\n\n };\n\n\n\n let mut instance = module.instantiate(&import_object).unwrap();\n\n let storage =\n\n svm_runtime::wasmer_data_storage!(instance.context_mut().data, MemMerklePageCache);\n\n let layout = svm_runtime::svm_page_slice_layout!(1, 10, 100, 3);\n\n\n\n // we write `[10, 20, 30]` into storage slice `10` (page `1`, cells `100..103`)\n\n storage.write_page_slice(&layout, &vec![10, 20, 30]);\n\n\n\n let do_copy: Func<(i32, i32, i32, i32, i32)> = instance.func(\"do_copy_to_mem\").unwrap();\n\n\n\n // we copy storage `slice 0` (page `1`, cells: `100..103`) into memory #0, starting from address `200`\n\n assert!(do_copy.call(1, 10, 100, 3, 200).is_ok());\n\n\n\n let cells = svm_runtime::wasmer_ctx_mem_cells!(instance.context(), 0, 200, 3);\n\n assert_eq!(&[Cell::new(10), Cell::new(20), Cell::new(30)], cells);\n\n}\n\n\n", "file_path": "crates/svm-runtime/tests/vmcalls.rs", "rank": 76, "score": 85085.15644901908 }, { "content": "use crate::runtime::ContractExecError;\n\n\n\nuse svm_common::State;\n\nuse svm_contract::transaction::Transaction;\n\n\n\nuse wasmer_runtime::Value;\n\n\n\n/// Runtime transaction execution receipt\n\n#[derive(Debug)]\n\npub struct Receipt {\n\n /// whether transaction succedded or not\n\n pub success: bool,\n\n\n\n /// the execution error in case execution failed\n\n pub error: Option<ContractExecError>,\n\n\n\n /// executed transaction\n\n pub tx: Transaction,\n\n\n\n /// the new contract `State` if execution succedded\n\n pub new_state: Option<State>,\n\n\n\n /// returned values\n\n pub results: Vec<Value>,\n\n}\n", "file_path": "crates/svm-runtime/src/runtime/receipt.rs", "rank": 77, "score": 82601.48085330083 }, { "content": "mod error;\n\nmod macros;\n\nmod receipt;\n\nmod rocksdb;\n\n\n\npub use error::ContractExecError;\n\npub use receipt::Receipt;\n", "file_path": "crates/svm-runtime/src/runtime/mod.rs", "rank": 78, "score": 82349.79457467466 }, { "content": " ContractExecError::ExecFailed => \"Execution failed\",\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for ContractExecError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let msg = match self {\n\n ContractExecError::NotFound(addr) => format!(\"Contract `{:?}` not found\", addr),\n\n ContractExecError::CompilationFailed(addr) => {\n\n format!(\"Compilation failed for contract `{:?}`\", addr)\n\n }\n\n ContractExecError::InstantiationFailed(addr) => {\n\n format!(\"Instance Instantiation failed for contract `{:?}`\", addr)\n\n }\n\n ContractExecError::FuncNotFound(func) => format!(\"Function `{}` not found\", func),\n\n ContractExecError::ExecFailed => \"Execution failed\".to_string(),\n\n };\n\n\n\n write!(f, \"{}\", msg)\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for ContractExecError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n <Self as std::fmt::Display>::fmt(self, f)\n\n }\n\n}\n", "file_path": "crates/svm-runtime/src/runtime/error.rs", "rank": 79, "score": 82260.70713193809 }, { "content": "use svm_common::Address;\n\n\n\n/// Contract execution error\n\n#[allow(missing_docs)]\n\n#[derive(PartialEq, Clone)]\n\npub enum ContractExecError {\n\n NotFound(Address),\n\n CompilationFailed(Address),\n\n InstantiationFailed(Address),\n\n FuncNotFound(String),\n\n ExecFailed,\n\n}\n\n\n\nimpl std::error::Error for ContractExecError {\n\n fn description(&self) -> &'static str {\n\n match self {\n\n ContractExecError::NotFound(_) => \"Contract not found\",\n\n ContractExecError::CompilationFailed(_) => \"Compilation failed\",\n\n ContractExecError::InstantiationFailed(_) => \"Instance Instantiation failed\",\n\n ContractExecError::FuncNotFound(_) => \"Function not found\",\n", "file_path": "crates/svm-runtime/src/runtime/error.rs", "rank": 80, "score": 82258.12579574973 }, { "content": "use crate::traits::ContractAddressCompute;\n\nuse crate::wasm::Contract;\n\nuse svm_common::{Address, DefaultKeyHasher, KeyHasher};\n\n\n\n/// Default implementation for `ContractAddressCompute`.\n\n///\n\n/// Computing the contract's account address as follows:\n\n/// Taking `Address::len()` bytes of `HASH(contract.author || contract.wasm)`\n\npub struct DefaultContractAddressCompute;\n\n\n\nimpl ContractAddressCompute for DefaultContractAddressCompute {\n\n fn compute(contract: &Contract) -> Address {\n\n let mut buf = Vec::with_capacity(Address::len() + contract.wasm.len());\n\n buf.extend_from_slice(contract.author.as_slice());\n\n buf.extend_from_slice(contract.wasm.as_slice());\n\n\n\n let hash = DefaultKeyHasher::hash(&buf);\n\n\n\n Address::from(&hash[0..Address::len()])\n\n }\n\n}\n", "file_path": "crates/svm-contract/src/default/contract_address_compute.rs", "rank": 81, "score": 81342.0533207911 }, { "content": "\n\n /// * Parses a raw contract into `Contract`\n\n /// * Enriches the contract with its derived address\n\n fn build_contract(bytes: &[u8]) -> Result<Contract, ContractBuildError> {\n\n let contract = crate::wire::deploy::parse_contract(bytes)?;\n\n\n\n crate::wire::deploy::validate_contract(&contract)?;\n\n\n\n Ok(contract)\n\n }\n\n\n\n /// Parses a raw transaction\n\n fn build_transaction(bytes: &[u8]) -> Result<Transaction, TransactionBuildError> {\n\n let tx = crate::wire::exec::parse_transaction(bytes)?;\n\n\n\n Ok(tx)\n\n }\n\n\n\n /// Stores contract by its `CodeHash`\n\n #[inline(always)]\n\n fn store_contract(&mut self, contract: &Contract, addr: &Address) {\n\n let hash = Self::compute_code_hash(contract);\n\n let store = self.get_store_mut();\n\n\n\n store.store(contract, addr, hash);\n\n }\n\n}\n", "file_path": "crates/svm-contract/src/env.rs", "rank": 82, "score": 77885.85602093676 }, { "content": "use crate::traits::{\n\n ContractAddressCompute, ContractCodeHasher, ContractDeserializer, ContractSerializer,\n\n ContractStore,\n\n};\n\nuse crate::transaction::Transaction;\n\nuse crate::types::CodeHash;\n\nuse crate::wasm::Contract;\n\nuse crate::wire::{deploy::ContractBuildError, exec::TransactionBuildError};\n\n\n\nuse svm_common::Address;\n\n\n", "file_path": "crates/svm-contract/src/env.rs", "rank": 83, "score": 77875.00622612449 }, { "content": "use crate::wasm::WasmArgValue;\n\nuse svm_common::Address;\n\n\n\n/// An in-memory representation of a smart-contract transaction.\n\n#[derive(Clone, PartialEq)]\n\npub struct Transaction {\n\n /// The contract account address\n\n pub contract: Address,\n\n\n\n /// Transaction sender account address\n\n pub sender: Address,\n\n\n\n /// Contract function to execute\n\n pub func_name: String,\n\n\n\n /// Contrant function args\n\n pub func_args: Vec<WasmArgValue>,\n\n}\n\n\n\nimpl std::fmt::Debug for Transaction {\n", "file_path": "crates/svm-contract/src/transaction.rs", "rank": 84, "score": 77811.49305863297 }, { "content": " fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n let contract = self.fmt_contract();\n\n let sender = self.fmt_sender();\n\n let func_name = self.fmt_func_name();\n\n let func_args = self.fmt_func_args();\n\n\n\n let msg = [contract, sender, func_name, func_args];\n\n\n\n write!(f, \"{}\", msg.join(\"\\n\"))\n\n }\n\n}\n\n\n\nimpl Transaction {\n\n fn fmt_contract(&self) -> String {\n\n self.fmt_address(\"Contract\", &self.contract)\n\n }\n\n\n\n fn fmt_sender(&self) -> String {\n\n self.fmt_address(\"Sender\", &self.sender)\n\n }\n", "file_path": "crates/svm-contract/src/transaction.rs", "rank": 85, "score": 77803.94467649092 }, { "content": "\n\n fn fmt_address(&self, field: &str, addr: &Address) -> String {\n\n format!(\"{:?}: {:?}\", field, addr)\n\n }\n\n\n\n fn fmt_func_name(&self) -> String {\n\n format!(\"FuncName: {:?}\", self.func_name)\n\n }\n\n\n\n fn fmt_func_arg(&self, func_arg: &WasmArgValue) -> String {\n\n format!(\"{:?}\", func_arg)\n\n }\n\n\n\n fn fmt_func_args(&self) -> String {\n\n let mut args_str = Vec::with_capacity(self.func_args.len());\n\n\n\n for arg in self.func_args.iter() {\n\n let arg_str = self.fmt_func_arg(arg);\n\n args_str.push(arg_str);\n\n }\n\n\n\n format!(\"FuncArgs: {}\", args_str.join(\", \"))\n\n }\n\n}\n", "file_path": "crates/svm-contract/src/transaction.rs", "rank": 86, "score": 77796.55274482301 }, { "content": "/// Holds settings for using the runtime.\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Opts {\n\n /// maximum pages required by the contract pages storage\n\n pub max_pages: usize,\n\n\n\n /// maximum pages required by the contract page-cache slice\n\n pub max_pages_slices: usize,\n\n}\n", "file_path": "crates/svm-runtime/src/opts.rs", "rank": 87, "score": 75738.41095958522 }, { "content": "use crate::default::{DefaultCodeHasher, DefaultContractAddressCompute};\n\nuse crate::env::{ContractEnv, ContractEnvTypes};\n\nuse crate::memory::MemContractStore;\n\nuse crate::wasm::{WasmContractJsonDeserializer, WasmContractJsonSerializer};\n\n\n\npub struct MemoryEnvTypes {}\n\n\n\nimpl ContractEnvTypes for MemoryEnvTypes {\n\n type Serializer = WasmContractJsonSerializer;\n\n\n\n type Deserializer = WasmContractJsonDeserializer;\n\n\n\n type Store = MemContractStore<Self::Serializer, Self::Deserializer>;\n\n\n\n type AddressCompute = DefaultContractAddressCompute;\n\n\n\n type CodeHasher = DefaultCodeHasher;\n\n}\n\n\n\n/// In-memory implementation for `ContractEnv`\n", "file_path": "crates/svm-contract/src/memory/env.rs", "rank": 88, "score": 75610.02380510316 }, { "content": "use crate::default::{DefaultCodeHasher, DefaultContractAddressCompute};\n\nuse crate::env::{ContractEnv, ContractEnvTypes};\n\nuse crate::rocksdb::RocksContractStore;\n\nuse crate::wasm::{WasmContractJsonDeserializer, WasmContractJsonSerializer};\n\n\n\npub struct RocksEnvTypes {}\n\n\n\nimpl ContractEnvTypes for RocksEnvTypes {\n\n type Serializer = WasmContractJsonSerializer;\n\n\n\n type Deserializer = WasmContractJsonDeserializer;\n\n\n\n type Store = RocksContractStore<Self::Serializer, Self::Deserializer>;\n\n\n\n type AddressCompute = DefaultContractAddressCompute;\n\n\n\n type CodeHasher = DefaultCodeHasher;\n\n}\n\n\n\n/// Contract environment backed by `rocksdb` for persistence.\n", "file_path": "crates/svm-contract/src/rocksdb/env.rs", "rank": 89, "score": 75609.76486510571 }, { "content": "pub struct RocksEnv {\n\n store: <RocksEnvTypes as ContractEnvTypes>::Store,\n\n}\n\n\n\nimpl RocksEnv {\n\n /// Creates a new `RocksEnv`. Injects externally the `ContractStore`\n\n pub fn new(store: <RocksEnvTypes as ContractEnvTypes>::Store) -> Self {\n\n Self { store }\n\n }\n\n}\n\n\n\nimpl ContractEnv for RocksEnv {\n\n type Types = RocksEnvTypes;\n\n\n\n fn get_store(&self) -> &<Self::Types as ContractEnvTypes>::Store {\n\n &self.store\n\n }\n\n\n\n fn get_store_mut(&mut self) -> &mut <Self::Types as ContractEnvTypes>::Store {\n\n &mut self.store\n\n }\n\n}\n", "file_path": "crates/svm-contract/src/rocksdb/env.rs", "rank": 90, "score": 75608.14635090575 }, { "content": "pub struct MemoryEnv {\n\n store: <MemoryEnvTypes as ContractEnvTypes>::Store,\n\n}\n\n\n\nimpl MemoryEnv {\n\n /// Creates a new in-memory environment.\n\n pub fn new(store: <MemoryEnvTypes as ContractEnvTypes>::Store) -> Self {\n\n Self { store }\n\n }\n\n}\n\n\n\nimpl ContractEnv for MemoryEnv {\n\n type Types = MemoryEnvTypes;\n\n\n\n fn get_store(&self) -> &<Self::Types as ContractEnvTypes>::Store {\n\n &self.store\n\n }\n\n\n\n fn get_store_mut(&mut self) -> &mut <Self::Types as ContractEnvTypes>::Store {\n\n &mut self.store\n\n }\n\n}\n", "file_path": "crates/svm-contract/src/memory/env.rs", "rank": 91, "score": 75608.11650342162 }, { "content": "mod contract_store;\n\nmod env;\n\n\n\npub use contract_store::MemContractStore;\n\npub use env::MemoryEnv;\n", "file_path": "crates/svm-contract/src/memory/mod.rs", "rank": 92, "score": 75431.57472586374 }, { "content": "mod contract_store;\n\nmod env;\n\n\n\npub use contract_store::RocksContractStore;\n\npub use env::RocksEnv;\n", "file_path": "crates/svm-contract/src/rocksdb/mod.rs", "rank": 93, "score": 75431.57472586374 }, { "content": "mod code_hasher;\n\nmod contract_address_compute;\n\n\n\npub use code_hasher::DefaultCodeHasher;\n\npub use contract_address_compute::DefaultContractAddressCompute;\n", "file_path": "crates/svm-contract/src/default/mod.rs", "rank": 94, "score": 75430.35728671848 }, { "content": "mod arg;\n\nmod contract;\n\nmod serialize;\n\n\n\npub use arg::{WasmArgType, WasmArgTypeError, WasmArgValue, WasmIntType};\n\npub use contract::Contract;\n\npub use serialize::WasmContractJsonDeserializer;\n\npub use serialize::WasmContractJsonSerializer;\n", "file_path": "crates/svm-contract/src/wasm/mod.rs", "rank": 95, "score": 75429.09907820885 }, { "content": "pub mod deploy;\n\npub mod exec;\n", "file_path": "crates/svm-contract/src/wire/mod.rs", "rank": 96, "score": 75418.15388982704 }, { "content": "\n\nmod build;\n\nmod error;\n\nmod field;\n\nmod parse;\n\n\n\npub use build::WireTxBuilder;\n\npub use error::TransactionBuildError;\n\npub use parse::parse_transaction;\n", "file_path": "crates/svm-contract/src/wire/exec/mod.rs", "rank": 97, "score": 73305.36432068892 }, { "content": "//! | (8 bytes) | (wasm) |\n\n//! |________________|____________________________________|\n\n\n\nmod build;\n\nmod error;\n\nmod field;\n\nmod parse;\n\nmod validate;\n\n\n\npub use build::WireContractBuilder;\n\npub use error::ContractBuildError;\n\npub use parse::parse_contract;\n\npub use validate::validate_contract;\n", "file_path": "crates/svm-contract/src/wire/deploy/mod.rs", "rank": 98, "score": 73302.43269125628 }, { "content": "//! Exec Contract Wire Protocol Version 0.0.0.0\n\n//! -------------------------------------------------------\n\n//! | proto | |\n\n//! | version | contract address |\n\n//! | (4 bytes) | (20 bytes) |\n\n//! |____________|________________________________________|\n\n//! | |\n\n//! | sender address |\n\n//! | (20 bytes) |\n\n//! |_____________________________________________________|\n\n//! | | |\n\n//! | func name | |\n\n//! | length | func name (UTF-8) |\n\n//! | (1 byte) | |\n\n//! |_____________|_______________________________________|\n\n//! | | | | |\n\n//! | #args | arg #1 type | arg #1 | . . . . |\n\n//! | (1 byte) | (1 byte) | value | |\n\n//! |___________|______________|_________|________________|\n\n//!\n", "file_path": "crates/svm-contract/src/wire/exec/mod.rs", "rank": 99, "score": 73287.09320813262 } ]
Rust
src/golf/lexer/matcher.rs
nilq/golf
aceb3d54cfd3afe1c0ea55d6ec688dd896866931
use super::*; macro_rules! token { ($tokenizer:expr, $token_type:ident, $accum:expr) => {{ token!($tokenizer , TokenType::$token_type, $accum) }}; ($tokenizer:expr, $token_type:expr, $accum:expr) => {{ let tokenizer = $tokenizer as &$crate::golf::lexer::Tokenizer; let token_type = $token_type as $crate::golf::lexer::token::TokenType; Some(Token::new(token_type, tokenizer.last_position(), $accum)) }}; } pub trait Matcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token>; } pub struct WhitespaceMatcher; impl Matcher for WhitespaceMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut found = false; while !tokenizer.end() && tokenizer.peek().unwrap().is_whitespace() { found = true; tokenizer.next(); } if found { token!(tokenizer, Whitespace, String::new()) } else { None } } } pub struct IntLiteralMatcher {} impl Matcher for IntLiteralMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut accum = String::new(); let prefix = match tokenizer.peek() { Some(&'-') => Some(false), Some(&'+') => Some(true), _ => None, }; if let Some(_) = prefix { tokenizer.advance(1) }; while !tokenizer.end() && tokenizer.peek().unwrap().is_digit(10) { accum.push(tokenizer.next().unwrap()); } if !accum.is_empty() { let literal: String = if Some(false) == prefix { match i64::from_str_radix(accum.as_str(), 10) { Ok(result) => format!("-{}", result), Err(error) => panic!("unable to parse int: {}", error) } } else { match u64::from_str_radix(accum.as_str(), 10) { Ok(result) => result.to_string(), Err(error) => panic!("unable to parse int: {}", error) } }; token!(tokenizer, IntLiteral, literal) } else { None } } } pub struct FloatLiteralMatcher; impl Matcher for FloatLiteralMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut accum = String::new(); let prefix = match tokenizer.peek() { Some(&'-') => Some(false), Some(&'+') => Some(true), _ => None, }; if let Some(_) = prefix { tokenizer.advance(1) }; let curr = tokenizer.next().unwrap(); if curr.is_digit(10) { accum.push(curr) } else if curr == '.' { accum.push_str("0.") } else { return None } while !tokenizer.end() { let current = *tokenizer.peek().unwrap(); if !current.is_whitespace() && current.is_digit(10) || current == '.' { if current == '.' && accum.contains('.') { panic!("illegal decimal point") } accum.push(tokenizer.next().unwrap()) } else { break } } if accum == "0." { None } else if accum.contains('.') { let literal: String = if Some(false) == prefix { match accum.parse::<f64>() { Ok(result) => format!("-{}", result), Err(error) => panic!("unable to parse float: {}", error) } } else { match accum.parse::<f64>() { Ok(result) => result.to_string(), Err(error) => panic!("unable to parse float: {}", error) } }; token!(tokenizer, FloatLiteral, literal) } else { let literal: String = if Some(false) == prefix { match i64::from_str_radix(accum.as_str(), 10) { Ok(result) => format!("-{}", result), Err(error) => panic!("unable to parse int: {}", error) } } else { match u64::from_str_radix(accum.as_str(), 10) { Ok(result) => result.to_string(), Err(error) => panic!("unable to parse int: {}", error) } }; token!(tokenizer, IntLiteral, literal) } } } pub struct StringLiteralMatcher {} impl Matcher for StringLiteralMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut raw_marker = false; let delimeter = match *tokenizer.peek().unwrap() { '"' => Some('"'), '\'' => Some('\''), 'r' if tokenizer.peek_n(1) == Some(&'"') => { raw_marker = true; tokenizer.advance(1); Some('"') }, _ => return None, }; tokenizer.advance(1); let mut string = String::new(); let mut found_escape = false; while !tokenizer.end() { if raw_marker { if tokenizer.peek().unwrap() == &'"' { break } string.push(tokenizer.next().unwrap()) } else if found_escape { string.push( match tokenizer.next().unwrap() { c @ '\\' | c @ '\'' | c @ '"' => c, 'n' => '\n', 'r' => '\r', 't' => '\t', s => panic!("invalid character escape: {}", s), } ); found_escape = false } else { match *tokenizer.peek().unwrap() { '\\' => { tokenizer.next(); found_escape = true }, c if c == delimeter.unwrap() => break, _ => string.push(tokenizer.next().unwrap()), } } } tokenizer.advance(1); match delimeter.unwrap() { '"' => { token!(tokenizer, StringLiteral, string) }, _ => { if string.len() == 1 { token!(tokenizer, CharLiteral, string) } else { panic!("invalid char literal") } }, } } } pub struct ConstantMatcher { token_type: TokenType, constants: Vec<String>, } impl ConstantMatcher { pub fn new(token_type: TokenType, constants: Vec<String>) -> Self { ConstantMatcher { token_type: token_type, constants: constants, } } } impl Matcher for ConstantMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { for constant in self.constants.clone() { let dat = tokenizer.clone().take(constant.len()); if dat.size_hint().1.unwrap() != constant.len() { return None } if dat.collect::<String>() == constant { tokenizer.advance(constant.len()); return token!(tokenizer, self.token_type.clone(), constant) } } None } } pub struct KeyMatcher { token_type: TokenType, constants: Vec<String>, } impl KeyMatcher { pub fn new(token_type: TokenType, constants: Vec<String>) -> Self { KeyMatcher { token_type, constants, } } } impl Matcher for KeyMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { for constant in self.constants.clone() { let dat = tokenizer.clone().take(constant.len()); if dat.size_hint().1.unwrap() != constant.len() { return None } if dat.collect::<String>() == constant { if let Some(c) = tokenizer.peek_n(constant.len()) { if "_?".contains(*c) || c.is_alphanumeric() { return None } } tokenizer.advance(constant.len()); return token!(tokenizer, self.token_type.clone(), constant) } } None } } pub struct IdentifierMatcher; impl Matcher for IdentifierMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut identifier = String::new(); while !tokenizer.end() { let current = *tokenizer.peek().unwrap(); if !current.is_whitespace() && ("_?'".contains(current) || current.is_alphanumeric()) { identifier.push(tokenizer.next().unwrap()); } else { break } } if !identifier.is_empty() { token!(tokenizer, Identifier, identifier) } else { None } } }
use super::*; macro_rules! token { ($tokenizer:expr, $token_type:ident, $accum:expr) => {{ token!($tokenizer , TokenType::$token_type, $accum) }}; ($tokenizer:expr, $token_type:expr, $accum:expr) => {{ let tokenizer = $tokenizer as &$crate::golf::lexer::Tokenizer; let token_type = $token_type as $crate::golf::lexer::token::TokenType; Some(Token::new(token_type, tokenizer.last_position(), $accum)) }}; } pub trait Matcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token>; } pub struct WhitespaceMatcher; impl Matcher for WhitespaceMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut found = false; while !tokenizer.end() && tokenizer.peek().unwrap().is_whitespace() { found = true; tokenizer.next(); } if found { token!(tokenizer, Whitespace, String::new()) } else { None } } } pub struct IntLiteralMatcher {} impl Matcher for IntLiteralMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut accum = String::new(); let prefix = match tokenizer.peek() { Some(&'-') => Some(false), Some(&'+') => Some(true), _ => None, }; if let Some(_) = prefix { tokenizer.advance(1) }; while !tokenizer.end() && tokenizer.peek().unwrap().is_digit(10) { accum.push(tokenizer.next().unwrap()); } if !accum.is_empty() { let literal: String = if Some(false) == prefix { match i64::from_str_radix(accum.as_str(), 10) { Ok(result) => format!("-{}", result), Err(error) => panic!("unable to parse int: {}", error) } } else { match u64::from_str_radix(accum.as_str(), 10) { Ok(result) => result.to_string(), Err(error) => panic!("unable to parse int: {}", error) } }; token!(tokenizer, IntLiteral, literal) } else { None } } } pub struct FloatLiteralMatcher; impl Matcher for FloatLiteralMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut accum = String::new(); let prefix = match tokenizer.peek() { Some(&'-') => Some(false), Some(&'+') => Some(true), _ => None, }; if let Some(_) = prefix { tokenizer.advance(1) }; let curr = tokenizer.next().unwrap(); if curr.is_digit(10) { accum.push(curr) } else if curr == '.' { accum.push_str("0.") } else { return None } while !tokenizer.end() { let current = *tokenizer.peek().unwrap(); if !current.is_whitespace() && current.is_digit(10) || current == '.' { if current == '.' && accum.contains('.') { panic!("illegal decimal point") } accum.push(tokenizer.next().unwrap()) } else { break } } if accum == "0." { None } else if accum.contains('.') { let literal: String = if Some(false) == prefix { match accum.parse::<f64>() { Ok(result) => format!("-{}", result), Err(error) => panic!("unable to parse float: {}", error) } } else { match accum.parse::<f64>() { Ok(result) => result.to_string(), Err(error) => panic!("unable to parse float: {}", error) } }; token!(tokenizer, FloatLiteral, literal) } else { let literal: String = if Some(false) == prefix {
} else { match u64::from_str_radix(accum.as_str(), 10) { Ok(result) => result.to_string(), Err(error) => panic!("unable to parse int: {}", error) } }; token!(tokenizer, IntLiteral, literal) } } } pub struct StringLiteralMatcher {} impl Matcher for StringLiteralMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut raw_marker = false; let delimeter = match *tokenizer.peek().unwrap() { '"' => Some('"'), '\'' => Some('\''), 'r' if tokenizer.peek_n(1) == Some(&'"') => { raw_marker = true; tokenizer.advance(1); Some('"') }, _ => return None, }; tokenizer.advance(1); let mut string = String::new(); let mut found_escape = false; while !tokenizer.end() { if raw_marker { if tokenizer.peek().unwrap() == &'"' { break } string.push(tokenizer.next().unwrap()) } else if found_escape { string.push( match tokenizer.next().unwrap() { c @ '\\' | c @ '\'' | c @ '"' => c, 'n' => '\n', 'r' => '\r', 't' => '\t', s => panic!("invalid character escape: {}", s), } ); found_escape = false } else { match *tokenizer.peek().unwrap() { '\\' => { tokenizer.next(); found_escape = true }, c if c == delimeter.unwrap() => break, _ => string.push(tokenizer.next().unwrap()), } } } tokenizer.advance(1); match delimeter.unwrap() { '"' => { token!(tokenizer, StringLiteral, string) }, _ => { if string.len() == 1 { token!(tokenizer, CharLiteral, string) } else { panic!("invalid char literal") } }, } } } pub struct ConstantMatcher { token_type: TokenType, constants: Vec<String>, } impl ConstantMatcher { pub fn new(token_type: TokenType, constants: Vec<String>) -> Self { ConstantMatcher { token_type: token_type, constants: constants, } } } impl Matcher for ConstantMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { for constant in self.constants.clone() { let dat = tokenizer.clone().take(constant.len()); if dat.size_hint().1.unwrap() != constant.len() { return None } if dat.collect::<String>() == constant { tokenizer.advance(constant.len()); return token!(tokenizer, self.token_type.clone(), constant) } } None } } pub struct KeyMatcher { token_type: TokenType, constants: Vec<String>, } impl KeyMatcher { pub fn new(token_type: TokenType, constants: Vec<String>) -> Self { KeyMatcher { token_type, constants, } } } impl Matcher for KeyMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { for constant in self.constants.clone() { let dat = tokenizer.clone().take(constant.len()); if dat.size_hint().1.unwrap() != constant.len() { return None } if dat.collect::<String>() == constant { if let Some(c) = tokenizer.peek_n(constant.len()) { if "_?".contains(*c) || c.is_alphanumeric() { return None } } tokenizer.advance(constant.len()); return token!(tokenizer, self.token_type.clone(), constant) } } None } } pub struct IdentifierMatcher; impl Matcher for IdentifierMatcher { fn try_match(&self, tokenizer: &mut Tokenizer) -> Option<Token> { let mut identifier = String::new(); while !tokenizer.end() { let current = *tokenizer.peek().unwrap(); if !current.is_whitespace() && ("_?'".contains(current) || current.is_alphanumeric()) { identifier.push(tokenizer.next().unwrap()); } else { break } } if !identifier.is_empty() { token!(tokenizer, Identifier, identifier) } else { None } } }
match i64::from_str_radix(accum.as_str(), 10) { Ok(result) => format!("-{}", result), Err(error) => panic!("unable to parse int: {}", error) }
if_condition
[ { "content": "pub fn lexer(data: &mut Chars) -> Lexer {\n\n let tokenizer = Tokenizer::new(data);\n\n let mut lexer = Lexer::new(tokenizer);\n\n\n\n let eol = vec![\"\\n\"].iter().map(|&x| x.to_string()).collect();\n\n\n\n let symbols = vec![\n\n \"(\",\n\n \")\",\n\n \"[\",\n\n \"]\",\n\n \",\",\n\n \":\",\n\n \";\",\n\n \"{\",\n\n \"}\",\n\n \"!\",\n\n \"|\",\n\n \"=\",\n\n \"!\",\n", "file_path": "src/golf/lexer/lexer.rs", "rank": 1, "score": 67298.67365814302 }, { "content": "fn transpile(s: String) -> Option<Rc<String>> {\n\n let lexer = lexer(&mut s.chars());\n\n\n\n let traveler = Traveler::new(lexer.collect());\n\n let mut parser = Parser::new(traveler);\n\n\n\n match parser.parse() {\n\n Err(err) => match err {\n\n ParserError {ref value, ref position} => {\n\n match *position {\n\n Some(ref pos) => {\n\n let mut lines = s.lines();\n\n\n\n for i in 0 .. pos.line - 1 {\n\n if i == pos.line - 2 {\n\n let source_pos = format!(\"ln {} | \", pos.line - 1).yellow();\n\n match lines.next() {\n\n Some(line) => println!(\"{}{}\", source_pos, line),\n\n None => unreachable!(),\n\n }\n", "file_path": "src/main.rs", "rank": 2, "score": 62098.60623150757 }, { "content": "fn write(path: &str, data: Rc<String>) {\n\n let path = Path::new(path);\n\n println!(\"building: {}\", path.display());\n\n\n\n let split_name = path.file_name().unwrap().to_str().unwrap().split(\".\");\n\n let split: Vec<&str> = split_name.collect();\n\n \n\n let parent_path = match path.parent() {\n\n Some(p) => match p.file_name() {\n\n Some(path) => path.to_str().unwrap(),\n\n None => \".\",\n\n },\n\n None => \".\",\n\n };\n\n\n\n let output_name = format!(\"{}/{}.lua\", parent_path, split.get(0).unwrap());\n\n\n\n let mut output_file = File::create(output_name).unwrap();\n\n match output_file.write_all(data.as_bytes()) {\n\n Ok(_) => (),\n\n Err(why) => println!(\"{}\", why.description())\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 47899.00580005268 }, { "content": "fn file(path: &str) -> Option<Rc<String>> {\n\n let path = Path::new(path);\n\n let display = path.display();\n\n \n\n let mut file = match File::open(&path) {\n\n Err(why) => panic!(\"failed to open {}: {}\", display, why.description()),\n\n Ok(file) => file,\n\n };\n\n \n\n let mut s = String::new();\n\n \n\n match file.read_to_string(&mut s) {\n\n Err(why) => panic!(\"failed to read {}: {}\", display, why.description()),\n\n Ok(_) => transpile(s),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 47899.00580005268 }, { "content": "fn main() {\n\n match env::args().nth(1) {\n\n Some(a) => transpile_path(&a),\n\n\n\n None => println!(\"a golf language\n\n\n\ngolf <path>\n\n \"),\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 5, "score": 36451.28280034814 }, { "content": "fn test() {\n\n let test = r#\"\n\nfib = {\n\n |0| 0\n\n\n\n a = 10\n\n\n\n |1| 1\n\n |n| (fib n - 1) + fib n - 2\n\n}\n\n\n\ntwice = {\n\n |n| 2 * n\n\n}\n\n\n\ntwice_fib = twice . fib\n\n\n\na = twice_fib 10\n\n \"#;\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 36451.28280034814 }, { "content": "fn transpile_path(path: &str) {\n\n let meta = metadata(path).unwrap();\n\n \n\n if meta.is_file() {\n\n match file(path) {\n\n Some(n) => write(path, n),\n\n None => (),\n\n }\n\n } else {\n\n let paths = fs::read_dir(path).unwrap();\n\n\n\n for path in paths {\n\n let path = format!(\"{}\", path.unwrap().path().display());\n\n let split: Vec<&str> = path.split(\".\").collect();\n\n\n\n match split.get(1) {\n\n Some(n) if *n == \"golf\" => (),\n\n _ => continue,\n\n }\n\n\n\n transpile_path(&format!(\"{}\", path))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 28414.96097703441 }, { "content": " position: None,\n\n }\n\n }\n\n\n\n pub fn new_pos(position: TokenPosition, value: &str) -> ParserError {\n\n ParserError {\n\n value: ParserErrorValue::Constant(value.to_owned()),\n\n position: Some(position),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ParserError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self.value {\n\n ParserErrorValue::Constant(ref s) => match self.position {\n\n Some(p) => write!(f, \"{}: {}\", p, s),\n\n None => write!(f, \"{}\", s),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/golf/parser/error.rs", "rank": 8, "score": 23142.666702112816 }, { "content": "use super::*;\n\nuse std::fmt;\n\n\n\n#[derive(Debug)]\n\npub enum TranspileErrorValue {\n\n Constant(String),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TranspileError {\n\n pub value: TranspileErrorValue,\n\n pub position: Option<TokenPosition>,\n\n}\n\n\n\nimpl TranspileError {\n\n pub fn new_pos(value: &str, position: TokenPosition) -> TranspileError {\n\n TranspileError {\n\n value: TranspileErrorValue::Constant(value.to_owned()),\n\n position: Some(position),\n\n }\n", "file_path": "src/golf/transpiler/error.rs", "rank": 9, "score": 23142.65477270497 }, { "content": "use super::*;\n\nuse std::fmt;\n\n\n\n#[derive(Debug)]\n\npub enum CheckErrorValue {\n\n Constant(String),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CheckError {\n\n pub value: CheckErrorValue,\n\n pub position: Option<TokenPosition>,\n\n}\n\n\n\nimpl CheckError {\n\n pub fn new_pos(value: &str, position: TokenPosition) -> CheckError {\n\n CheckError {\n\n value: CheckErrorValue::Constant(value.to_owned()),\n\n position: Some(position),\n\n }\n", "file_path": "src/golf/checker/error.rs", "rank": 10, "score": 23142.65477270497 }, { "content": "use super::*;\n\n\n\nuse std::fmt;\n\n\n\n#[derive(Debug)]\n\npub enum ParserErrorValue {\n\n Constant(String),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ParserError {\n\n pub value: ParserErrorValue,\n\n pub position: Option<TokenPosition>,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl ParserError {\n\n pub fn new(value: &str) -> ParserError {\n\n ParserError {\n\n value: ParserErrorValue::Constant(value.to_owned()),\n", "file_path": "src/golf/parser/error.rs", "rank": 11, "score": 23142.305185107274 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for TranspileError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self.value {\n\n TranspileErrorValue::Constant(ref s) => write!(f, \"{}\", s),\n\n }\n\n }\n\n}\n", "file_path": "src/golf/transpiler/error.rs", "rank": 12, "score": 23139.518578055682 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for CheckError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self.value {\n\n CheckErrorValue::Constant(ref s) => write!(f, \"{}\", s),\n\n }\n\n }\n\n}\n", "file_path": "src/golf/checker/error.rs", "rank": 13, "score": 23139.518578055682 }, { "content": "use std::fmt;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum TokenType {\n\n IntLiteral,\n\n FloatLiteral,\n\n StringLiteral,\n\n CharLiteral,\n\n BoolLiteral,\n\n Symbol,\n\n Operator,\n\n Identifier,\n\n Whitespace,\n\n Indent,\n\n EOL,\n\n EOF,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct TokenPosition {\n", "file_path": "src/golf/lexer/token.rs", "rank": 14, "score": 22945.968460889646 }, { "content": " pub fn commit_snapshot(&mut self) {\n\n self.snapshots.pop();\n\n }\n\n\n\n pub fn last_position(&self) -> TokenPosition {\n\n self.peek_snapshot().unwrap().pos\n\n }\n\n\n\n pub fn try_match_token(&mut self, matcher: &Matcher) -> Option<Token> {\n\n if self.end() {\n\n return Some(Token::new(TokenType::EOF,\n\n TokenPosition::new(self.index, self.index),\n\n String::new()));\n\n }\n\n\n\n self.take_snapshot();\n\n match matcher.try_match(self) {\n\n Some(t) => {\n\n self.commit_snapshot();\n\n Some(t)\n", "file_path": "src/golf/lexer/tokenizer.rs", "rank": 15, "score": 22944.24036153705 }, { "content": " pub line: usize,\n\n pub col: usize,\n\n}\n\n\n\nimpl Default for TokenPosition {\n\n fn default() -> Self {\n\n TokenPosition {\n\n line: 1,\n\n col: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for TokenPosition {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"[{}, {}]\", self.line, self.col)\n\n }\n\n}\n\n\n\nimpl TokenPosition {\n", "file_path": "src/golf/lexer/token.rs", "rank": 16, "score": 22941.061276678207 }, { "content": "use super::*;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Snapshot {\n\n pub pos: TokenPosition,\n\n pub index: usize,\n\n}\n\n\n\nimpl Snapshot {\n\n pub fn new(index: usize, pos: TokenPosition) -> Snapshot {\n\n Snapshot {\n\n index,\n\n pos,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Tokenizer {\n\n pub pos: TokenPosition,\n", "file_path": "src/golf/lexer/tokenizer.rs", "rank": 17, "score": 22940.78536609042 }, { "content": " content,\n\n }\n\n }\n\n\n\n pub fn content(&self) -> &String {\n\n &self.content\n\n }\n\n}\n\n\n\nimpl PartialEq for Token {\n\n fn eq(&self, other: &Token) -> bool {\n\n self.token_type == other.token_type\n\n }\n\n}\n", "file_path": "src/golf/lexer/token.rs", "rank": 18, "score": 22939.600707611055 }, { "content": " pub fn new(line: usize, col: usize) -> TokenPosition {\n\n TokenPosition {\n\n line, col,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Token {\n\n pub token_type: TokenType,\n\n pub position: TokenPosition,\n\n content: String,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl Token {\n\n pub fn new(token_type: TokenType, position: TokenPosition, content: String) -> Token {\n\n Token {\n\n token_type,\n\n position,\n", "file_path": "src/golf/lexer/token.rs", "rank": 19, "score": 22939.393861875644 }, { "content": " index: usize,\n\n items: Vec<char>,\n\n snapshots: Vec<Snapshot>,\n\n}\n\n\n\nimpl Iterator for Tokenizer {\n\n type Item = char;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.read().cloned()\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl Tokenizer {\n\n pub fn new(items: &mut Iterator<Item = char>) -> Tokenizer {\n\n Tokenizer {\n\n index: 0,\n\n pos: TokenPosition::default(),\n\n items: items.collect(),\n", "file_path": "src/golf/lexer/tokenizer.rs", "rank": 20, "score": 22938.261143665357 }, { "content": " Some(&self.items[self.index + n])\n\n }\n\n\n\n pub fn read(&mut self) -> Option<&char> {\n\n if self.end() {\n\n return None\n\n }\n\n self.advance(1);\n\n Some(&self.items[self.index - 1])\n\n }\n\n\n\n pub fn advance(&mut self, a: usize) {\n\n if self.index + a <= self.items.len() {\n\n for item in &self.items[self.index .. self.index + a] {\n\n match *item {\n\n '\\n' => {\n\n self.pos.line += 1;\n\n self.pos.col = 0;\n\n }\n\n _ => self.pos.col += 1\n", "file_path": "src/golf/lexer/tokenizer.rs", "rank": 21, "score": 22937.725250624582 }, { "content": " }\n\n\n\n None => {\n\n self.rollback_snapshot();\n\n None\n\n }\n\n }\n\n }\n\n\n\n pub fn index(&self) -> &usize {\n\n &self.index\n\n }\n\n}\n", "file_path": "src/golf/lexer/tokenizer.rs", "rank": 22, "score": 22935.308276252497 }, { "content": " snapshots: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn end(&self) -> bool {\n\n self.end_n(0)\n\n }\n\n\n\n pub fn end_n(&self, lookahead: usize) -> bool {\n\n self.index + lookahead >= self.items.len()\n\n }\n\n\n\n pub fn peek(&self) -> Option<&char> {\n\n self.peek_n(0)\n\n }\n\n\n\n pub fn peek_n(&self, n: usize) -> Option<&char> {\n\n if self.end_n(n) {\n\n return None\n\n }\n", "file_path": "src/golf/lexer/tokenizer.rs", "rank": 23, "score": 22934.502660200837 }, { "content": " }\n\n }\n\n self.index += a\n\n }\n\n }\n\n\n\n pub fn take_snapshot(&mut self) {\n\n self.snapshots.push(Snapshot::new(self.index, self.pos));\n\n }\n\n\n\n pub fn peek_snapshot(&self) -> Option<&Snapshot> {\n\n self.snapshots.last()\n\n }\n\n\n\n pub fn rollback_snapshot(&mut self) {\n\n let snapshot = self.snapshots.pop().unwrap();\n\n self.index = snapshot.index;\n\n self.pos = snapshot.pos;\n\n }\n\n\n", "file_path": "src/golf/lexer/tokenizer.rs", "rank": 24, "score": 22934.376610638534 }, { "content": " &self.matchers\n\n }\n\n\n\n pub fn matchers_mut(&mut self) -> &mut Vec<Rc<Matcher>> {\n\n &mut self.matchers\n\n }\n\n}\n\n\n\nimpl Iterator for Lexer {\n\n type Item = Token;\n\n\n\n fn next(&mut self) -> Option<Token> {\n\n let token = match self.match_token() {\n\n Some(n) => n,\n\n None => return None,\n\n };\n\n match token.token_type {\n\n TokenType::EOF => None,\n\n TokenType::Whitespace => {\n\n match self.next() {\n\n Some(t) => Some(t),\n\n None => None,\n\n }\n\n }\n\n _ => Some(token),\n\n }\n\n }\n\n}\n", "file_path": "src/golf/lexer/lexer.rs", "rank": 39, "score": 20.696946113784016 }, { "content": "#[allow(dead_code)]\n\nimpl Lexer {\n\n pub fn new(tokenizer: Tokenizer) -> Lexer {\n\n Lexer {\n\n tokenizer,\n\n matchers: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn match_token(&mut self) -> Option<Token> {\n\n for matcher in &mut self.matchers {\n\n match self.tokenizer.try_match_token(matcher.as_ref()) {\n\n Some(t) => return Some(t),\n\n None => continue,\n\n }\n\n }\n\n None\n\n }\n\n\n\n pub fn matchers(&self) -> &Vec<Rc<Matcher>> {\n", "file_path": "src/golf/lexer/lexer.rs", "rank": 40, "score": 18.856465303928417 }, { "content": " }\n\n\n\n pub fn expect_content(&self, content: &str) -> ParserResult<String> {\n\n if self.current_content() == content {\n\n Ok(self.current_content())\n\n } else {\n\n Err(ParserError::new_pos(self.current().position, &format!(\"expected '{}', found '{}'\", content, self.current_content())))\n\n }\n\n }\n\n\n\n pub fn expect_contents(&self, sequence: Vec<String>) -> Result<Vec<&Token>, String> {\n\n let mut res = Vec::new();\n\n\n\n for (accum, c) in sequence.iter().enumerate() {\n\n if self.top + accum >= self.tokens.len() {\n\n return Err(format!(\"expected '{}', found end of source >:(\", c))\n\n }\n\n\n\n if c != self.tokens[self.top + accum].content() {\n\n return Err(format!(\"expected '{}', found '{}'\", c, self.tokens[self.top + accum].content()))\n\n }\n\n\n\n res.push(self.get(self.top + accum));\n\n }\n\n\n\n Ok(res)\n\n }\n\n}\n", "file_path": "src/golf/parser/traveler.rs", "rank": 41, "score": 18.21961016170379 }, { "content": " fn try_call(&mut self, callee: Expression) -> ParserResult<Expression> {\n\n match self.traveler.current().token_type {\n\n TokenType::IntLiteral |\n\n TokenType::FloatLiteral |\n\n TokenType::BoolLiteral |\n\n TokenType::StringLiteral |\n\n TokenType::CharLiteral |\n\n TokenType::Identifier => self.call(callee),\n\n TokenType::Symbol => match self.traveler.current_content().as_str() {\n\n \"(\" => self.call(callee),\n\n _ => Ok(callee),\n\n },\n\n\n\n _ => Ok(callee),\n\n }\n\n }\n\n\n\n fn index(&mut self, id: Rc<Expression>) -> ParserResult<Expression> {\n\n self.traveler.next();\n\n\n", "file_path": "src/golf/parser/parser.rs", "rank": 42, "score": 17.826199048241218 }, { "content": " ].iter().map(|&x| x.to_string()).collect();\n\n\n\n let indent = vec![\n\n \" \", \"\\t\",\n\n ].iter().map(|&x| x.to_string()).collect();\n\n\n\n let boolean = vec![\n\n \"true\",\n\n \"false\",\n\n ].iter().map(|&x| x.to_string()).collect();\n\n\n\n let matcher_eol = ConstantMatcher::new(TokenType::EOL, eol);\n\n let matcher_indent = ConstantMatcher::new(TokenType::Indent, indent);\n\n let matcher_operator = ConstantMatcher::new(TokenType::Operator, operators);\n\n let matcher_symbol = ConstantMatcher::new(TokenType::Symbol, symbols);\n\n let matcher_boolean = KeyMatcher::new(TokenType::BoolLiteral, boolean);\n\n let matcher_whitespace = WhitespaceMatcher {};\n\n let matcher_int_literal = IntLiteralMatcher {};\n\n let matcher_float_literal = FloatLiteralMatcher {};\n\n let matcher_identifier = IdentifierMatcher {};\n", "file_path": "src/golf/lexer/lexer.rs", "rank": 43, "score": 17.600399656930577 }, { "content": "use super::{Token, TokenType};\n\nuse super::{ParserError, ParserResult};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Traveler {\n\n pub tokens: Vec<Token>,\n\n top: usize,\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl Traveler {\n\n pub fn new(tokens: Vec<Token>) -> Traveler {\n\n Traveler {\n\n tokens,\n\n top: 0,\n\n }\n\n }\n\n\n\n pub fn next(&mut self) -> bool {\n\n if self.top < self.tokens.len() {\n", "file_path": "src/golf/parser/traveler.rs", "rank": 44, "score": 17.22073548564078 }, { "content": " let matcher_string_literal = StringLiteralMatcher {};\n\n\n\n lexer.matchers_mut().push(Rc::new(matcher_eol));\n\n lexer.matchers_mut().push(Rc::new(matcher_indent));\n\n lexer.matchers_mut().push(Rc::new(matcher_whitespace));\n\n lexer.matchers_mut().push(Rc::new(matcher_operator));\n\n lexer.matchers_mut().push(Rc::new(matcher_symbol));\n\n lexer.matchers_mut().push(Rc::new(matcher_float_literal));\n\n lexer.matchers_mut().push(Rc::new(matcher_int_literal));\n\n lexer.matchers_mut().push(Rc::new(matcher_string_literal));\n\n lexer.matchers_mut().push(Rc::new(matcher_boolean));\n\n lexer.matchers_mut().push(Rc::new(matcher_identifier));\n\n lexer\n\n}\n\n\n\npub struct Lexer {\n\n tokenizer: Tokenizer,\n\n matchers: Vec<Rc<Matcher>>,\n\n}\n\n\n", "file_path": "src/golf/lexer/lexer.rs", "rank": 45, "score": 16.692504666663552 }, { "content": " return &self.tokens[self.tokens.len() - 1];\n\n }\n\n &self.tokens[self.top]\n\n }\n\n\n\n pub fn get(&self, i: usize) -> &Token {\n\n assert!(i > 0 && i < self.tokens.len(), \"trying to get non-existing token\");\n\n &self.tokens[i]\n\n }\n\n\n\n pub fn current_content(&self) -> String {\n\n self.current().content().clone()\n\n }\n\n\n\n pub fn expect(&self, token: TokenType) -> ParserResult<String> {\n\n if self.current().token_type == token {\n\n Ok(self.current_content())\n\n } else {\n\n Err(ParserError::new_pos(self.current().position, &format!(\"expected '{:?}', found '{}'\", token, self.current_content())))\n\n }\n", "file_path": "src/golf/parser/traveler.rs", "rank": 46, "score": 15.58431886680436 }, { "content": "use super::*;\n\n\n\nuse std::rc::Rc;\n\n\n\npub struct Transpiler {\n\n ast: Vec<Statement>\n\n}\n\n\n\nimpl Transpiler {\n\n pub fn new(ast: Vec<Statement>) -> Transpiler {\n\n Transpiler {\n\n ast,\n\n }\n\n }\n\n\n\n pub fn lua(&self) -> TranspileResult<Rc<String>> {\n\n let mut result = String::new();\n\n\n\n for statement in &self.ast {\n\n result.push_str(&*self.lua_statement(&statement)?)\n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 47, "score": 15.171604670593645 }, { "content": "pub mod error;\n\npub mod transpiler;\n\n\n\npub use super::*;\n\n\n\npub use self::error::*;\n\npub use self::transpiler::*;\n\n\n\npub type TranspileResult<T> = Result<T, TranspileError>;\n", "file_path": "src/golf/transpiler/mod.rs", "rank": 48, "score": 15.165153351008026 }, { "content": " self.skip_whitespace()?;\n\n if self.traveler.current_content() != \"}\" {\n\n arms.push(self.statement()?);\n\n }\n\n }\n\n }\n\n\n\n self.traveler.expect_content(\"}\")?;\n\n self.traveler.next();\n\n\n\n Ok(Expression::Function(Function{arms: Rc::new(Expression::Block(arms)), position: self.traveler.current().position}))\n\n }\n\n\n\n pub fn term(&mut self) -> ParserResult<Expression> {\n\n if self.traveler.remaining() < 2 {\n\n return Ok(Expression::EOF)\n\n }\n\n\n\n match self.traveler.current().token_type {\n\n TokenType::IntLiteral => {\n", "file_path": "src/golf/parser/parser.rs", "rank": 49, "score": 15.075738479857554 }, { "content": "use std::rc::Rc;\n\n\n\nuse super::*;\n\n\n\npub struct Parser {\n\n traveler: Traveler,\n\n}\n\n\n\nimpl Parser {\n\n pub fn new(traveler: Traveler) -> Parser {\n\n Parser {\n\n traveler,\n\n }\n\n }\n\n\n\n pub fn parse(&mut self) -> ParserResult<Vec<Statement>> {\n\n let mut stack = Vec::new();\n\n\n\n while self.traveler.remaining() > 1 {\n\n self.skip_whitespace()?;\n", "file_path": "src/golf/parser/parser.rs", "rank": 50, "score": 14.90204261940214 }, { "content": "pub mod error;\n\npub mod symtab;\n\npub mod checker;\n\n\n\npub use super::*;\n\n\n\npub use self::error::*;\n\npub use self::symtab::*;\n\npub use self::checker::*;\n\n\n\npub type CheckResult<T> = Result<T, CheckError>;\n", "file_path": "src/golf/checker/mod.rs", "rank": 51, "score": 14.773721908922465 }, { "content": " stack.push(self.statement()?);\n\n }\n\n\n\n Ok(stack)\n\n }\n\n\n\n pub fn skip_whitespace(&mut self) -> ParserResult<()> {\n\n while self.traveler.current_content() == \"\\n\" ||\n\n self.traveler.current().token_type == TokenType::EOL ||\n\n self.traveler.current().token_type == TokenType::Indent {\n\n\n\n self.traveler.next();\n\n\n\n if self.traveler.remaining() < 2 {\n\n break\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "src/golf/parser/parser.rs", "rank": 52, "score": 14.763179223615406 }, { "content": " Err(ParserError::new_pos(self.traveler.current().position, &format!(\"expected expression, found: {:?}\", self.traveler.current_content())))\n\n } else {\n\n let right = Rc::new(self.expression()?);\n\n\n\n Ok(\n\n Statement::Assignment(\n\n Assignment {\n\n left,\n\n right,\n\n position: self.traveler.current().position\n\n }\n\n )\n\n )\n\n }\n\n }\n\n\n\n fn statement(&mut self) -> ParserResult<Statement> {\n\n self.skip_whitespace()?;\n\n match self.traveler.current().token_type {\n\n TokenType::Symbol => match self.traveler.current_content().as_str() {\n", "file_path": "src/golf/parser/parser.rs", "rank": 53, "score": 14.595532346298537 }, { "content": "pub mod error;\n\npub mod traveler;\n\npub mod ast;\n\npub mod parser;\n\n\n\npub use super::*;\n\n\n\npub use self::error::*;\n\npub use self::traveler::*;\n\npub use self::ast::*;\n\npub use self::parser::*;\n\n\n\npub type ParserResult<T> = Result<T, ParserError>;\n", "file_path": "src/golf/parser/mod.rs", "rank": 54, "score": 14.394928294691548 }, { "content": "\n\n TokenType::FloatLiteral => {\n\n let a = Expression::Number(self.traveler.current_content().parse::<f64>().unwrap());\n\n self.traveler.next();\n\n a\n\n }\n\n\n\n TokenType::BoolLiteral => {\n\n let a = Expression::Bool(self.traveler.current_content() == \"true\");\n\n self.traveler.next();\n\n a\n\n }\n\n\n\n TokenType::StringLiteral => {\n\n let a = Expression::Str(Rc::new(self.traveler.current_content().clone()));\n\n self.traveler.next();\n\n a\n\n }\n\n\n\n TokenType::CharLiteral => {\n", "file_path": "src/golf/parser/parser.rs", "rank": 55, "score": 13.521207715605446 }, { "content": "pub mod token;\n\npub mod tokenizer;\n\npub mod matcher;\n\npub mod lexer;\n\n\n\npub use self::token::*;\n\npub use self::tokenizer::*;\n\npub use self::matcher::*;\n\npub use self::lexer::*;\n", "file_path": "src/golf/lexer/mod.rs", "rank": 56, "score": 13.484034226542777 }, { "content": " self.top += 1;\n\n return true\n\n }\n\n false\n\n }\n\n\n\n pub fn prev(&mut self) -> bool {\n\n if self.top > 0 {\n\n self.top -= 1;\n\n return true\n\n }\n\n false\n\n }\n\n\n\n pub fn remaining(&self) -> usize {\n\n self.tokens.len() - self.top + 1\n\n }\n\n\n\n pub fn current(&self) -> &Token {\n\n if self.top > self.tokens.len() - 1 {\n", "file_path": "src/golf/parser/traveler.rs", "rank": 57, "score": 13.18150136550283 }, { "content": "use super::Tokenizer;\n\nuse super::matcher::*;\n\nuse super::token::{Token, TokenType};\n\n\n\nuse std::str::Chars;\n\nuse std::rc::Rc;\n\n\n", "file_path": "src/golf/lexer/lexer.rs", "rank": 58, "score": 12.978710473559573 }, { "content": "use super::*;\n\n\n\nuse std::rc::Rc;\n\n\n\npub struct Checker {\n\n ast: Vec<Statement>\n\n}\n\n\n\nimpl Checker {\n\n pub fn new(ast: Vec<Statement>) -> Checker {\n\n Checker {\n\n ast,\n\n }\n\n }\n\n\n\n pub fn check(&self, sym: &mut SymTab) -> CheckResult<()> {\n\n for statement in &self.ast {\n\n self.check_statement(sym, &statement)?\n\n }\n\n\n", "file_path": "src/golf/checker/checker.rs", "rank": 59, "score": 12.208225811408187 }, { "content": " let mut params = Vec::new();\n\n\n\n let mut acc = 0;\n\n\n\n while self.traveler.current_content() != \"|\" {\n\n if acc == 0 {\n\n if self.traveler.current().token_type == TokenType::Operator {\n\n params.push(Rc::new(Expression::Operand(Operand::from_str(&self.traveler.current_content()).unwrap().0)));\n\n self.traveler.next();\n\n\n\n continue\n\n }\n\n }\n\n \n\n let a = match self.traveler.current().token_type {\n\n TokenType::IntLiteral => {\n\n let a = Expression::Number(self.traveler.current_content().parse::<f64>().unwrap());\n\n self.traveler.next();\n\n a\n\n }\n", "file_path": "src/golf/parser/parser.rs", "rank": 60, "score": 12.190314221735628 }, { "content": " let a = Ok(Expression::Number(self.traveler.current_content().parse::<f64>().unwrap()));\n\n self.traveler.next();\n\n a\n\n }\n\n\n\n TokenType::FloatLiteral => {\n\n let a = Ok(Expression::Number(self.traveler.current_content().parse::<f64>().unwrap()));\n\n self.traveler.next();\n\n a\n\n }\n\n\n\n TokenType::BoolLiteral => {\n\n let a = Ok(Expression::Bool(self.traveler.current_content() == \"true\"));\n\n self.traveler.next();\n\n a\n\n }\n\n\n\n TokenType::StringLiteral => {\n\n let a = Ok(Expression::Str(Rc::new(self.traveler.current_content().clone())));\n\n self.traveler.next();\n", "file_path": "src/golf/parser/parser.rs", "rank": 61, "score": 12.14561848239914 }, { "content": " }\n\n\n\n let mut error = String::from(\"\");\n\n\n\n for _ in 0 .. pos.col + source_pos.len() {\n\n error.push_str(\" \")\n\n }\n\n\n\n error.push_str(\"^ \");\n\n\n\n match *value {\n\n CheckErrorValue::Constant(ref a) => error.push_str(a),\n\n }\n\n \n\n println!(\"{}\", error.red());\n\n \n\n },\n\n \n\n None => (),\n\n }\n", "file_path": "src/main.rs", "rank": 62, "score": 11.133534670584776 }, { "content": "\n\n let mut error = String::from(\"\");\n\n\n\n for _ in 0 .. pos.col + source_pos.len() {\n\n error.push_str(\" \")\n\n }\n\n\n\n error.push_str(\"^ \");\n\n\n\n match *value {\n\n CheckErrorValue::Constant(ref a) => error.push_str(a),\n\n }\n\n \n\n println!(\"{}\", error.red());\n\n \n\n },\n\n \n\n None => (),\n\n }\n\n },\n", "file_path": "src/main.rs", "rank": 63, "score": 11.133534670584776 }, { "content": "\n\n fn expression(&mut self) -> ParserResult<Expression> {\n\n self.skip_whitespace()?;\n\n\n\n let expr = self.term()?;\n\n\n\n if expr == Expression::EOF {\n\n return Ok(expr)\n\n }\n\n\n\n if self.traveler.remaining() > 1 {\n\n self.skip_whitespace()?;\n\n if self.traveler.current().token_type == TokenType::Operator {\n\n return self.operation(expr)\n\n }\n\n }\n\n\n\n Ok(expr)\n\n }\n\n\n", "file_path": "src/golf/parser/parser.rs", "rank": 64, "score": 10.832151894548922 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn check_expression(&self, sym: &mut SymTab, expression: &Expression) -> CheckResult<()> {\n\n match *expression {\n\n Expression::Block(ref statements) => {\n\n for s in statements {\n\n self.check_statement(sym, s)?\n\n }\n\n Ok(())\n\n },\n\n\n\n Expression::Identifier(ref id, ref position) => match sym.get_name(&*id) {\n\n None => {\n\n Err(CheckError::new_pos(\"undeclared use\", position.clone()))\n\n },\n\n Some(_) => Ok(())\n\n },\n\n\n\n Expression::Operation(ref operation) => {\n", "file_path": "src/golf/checker/checker.rs", "rank": 65, "score": 10.588230823381185 }, { "content": " lines.next();\n\n }\n\n }\n\n\n\n let source_pos = format!(\"ln {}, cl {}| \", pos.line, pos.col).yellow();\n\n\n\n match lines.next() {\n\n Some(line) => println!(\"{}{}\", source_pos, line),\n\n None => unreachable!(),\n\n }\n\n\n\n let mut error = String::from(\"\");\n\n\n\n for _ in 0 .. pos.col + source_pos.len() {\n\n error.push_str(\" \")\n\n }\n\n\n\n error.push_str(\"^ \");\n\n\n\n match *value {\n", "file_path": "src/main.rs", "rank": 66, "score": 10.024618923546745 }, { "content": " } else {\n\n lines.next();\n\n }\n\n }\n\n\n\n let source_pos = format!(\"ln {}, cl {}| \", pos.line, pos.col).yellow();\n\n\n\n match lines.next() {\n\n Some(line) => println!(\"{}{}\", source_pos, line),\n\n None => unreachable!(),\n\n }\n\n\n\n let mut error = String::from(\"\");\n\n\n\n for _ in 0 .. pos.col + source_pos.len() {\n\n error.push_str(\" \")\n\n }\n\n\n\n error.push_str(\"^ \");\n\n\n", "file_path": "src/main.rs", "rank": 67, "score": 9.527792300066052 }, { "content": "use std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse std::collections::HashMap;\n\n\n\nuse std::fmt;\n\n\n\n#[derive(Clone)]\n\npub struct SymTab {\n\n pub parent: Option<Rc<SymTab>>,\n\n pub names: RefCell<HashMap<String, usize>>,\n\n}\n\n\n\nimpl SymTab {\n\n pub fn new(parent: Rc<SymTab>, names: &[Rc<String>]) -> SymTab {\n\n let mut hash_names = HashMap::new();\n\n\n\n for (i, name) in names.iter().enumerate() {\n\n hash_names.insert((**name).clone(), i);\n\n }\n\n\n", "file_path": "src/golf/checker/symtab.rs", "rank": 68, "score": 9.514129974185291 }, { "content": " match *value {\n\n ParserErrorValue::Constant(ref a) => error.push_str(a),\n\n }\n\n\n\n println!(\"{}\", error.red());\n\n },\n\n\n\n None => (),\n\n }\n\n },\n\n },\n\n Ok(stuff) => {\n\n let mut symtab = SymTab::new_global();\n\n let checker = Checker::new(stuff.clone());\n\n\n\n match checker.check(&mut symtab) {\n\n Err(err) => match err {\n\n CheckError {ref value, ref position} => {\n\n match *position {\n\n Some(ref pos) => {\n", "file_path": "src/main.rs", "rank": 69, "score": 9.447765084468296 }, { "content": " ParserErrorValue::Constant(ref a) => error.push_str(a),\n\n }\n\n\n\n println!(\"{}\", error.red());\n\n },\n\n\n\n None => (),\n\n }\n\n },\n\n },\n\n Ok(stuff) => {\n\n println!(\"{:#?}\", stuff);\n\n\n\n let mut symtab = SymTab::new_global();\n\n let checker = Checker::new(stuff.clone());\n\n\n\n match checker.check(&mut symtab) {\n\n Err(err) => match err {\n\n CheckError {ref value, ref position} => {\n\n match *position {\n", "file_path": "src/main.rs", "rank": 70, "score": 9.31775505221361 }, { "content": "use std::rc::Rc;\n\n\n\nuse super::*;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Expression {\n\n Block(Vec<Statement>),\n\n Number(f64),\n\n Bool(bool),\n\n Str(Rc<String>),\n\n Char(char),\n\n Identifier(Rc<String>, TokenPosition),\n\n Operation(Operation),\n\n Call(Call),\n\n Index(Index),\n\n Function(Function),\n\n Arm(Arm),\n\n Operand(Operand),\n\n EOF,\n\n}\n", "file_path": "src/golf/parser/ast.rs", "rank": 71, "score": 9.198827008025116 }, { "content": " self.traveler.next();\n\n\n\n let body = Rc::new(self.statement()?);\n\n\n\n self.skip_whitespace()?;\n\n\n\n Ok(Expression::Arm(Arm {params, body, position: self.traveler.current().position}))\n\n }\n\n\n\n fn function(&mut self) -> ParserResult<Expression> {\n\n self.traveler.next();\n\n\n\n self.skip_whitespace()?;\n\n\n\n let mut arms = Vec::new();\n\n\n\n while self.traveler.current_content() != \"}\" { \n\n if self.traveler.current_content() == \"|\" {\n\n arms.push(Statement::Expression(Rc::new(self.arm()?)))\n\n } else {\n", "file_path": "src/golf/parser/parser.rs", "rank": 72, "score": 8.786370272648634 }, { "content": " },\n\n _ => self.try_call(a),\n\n }\n\n } else {\n\n Ok(a)\n\n }\n\n },\n\n\n\n TokenType::Symbol => match self.traveler.current_content().as_str() {\n\n \"(\" => {\n\n self.traveler.next();\n\n \n\n let a = self.expression()?;\n\n\n\n self.skip_whitespace()?;\n\n self.traveler.expect_content(\")\")?;\n\n self.traveler.next();\n\n\n\n if self.traveler.current_content() == \"[\" {\n\n self.index(Rc::new(a))\n", "file_path": "src/golf/parser/parser.rs", "rank": 73, "score": 8.636852814248561 }, { "content": "\n\n fn call(&mut self, caller: Expression) -> ParserResult<Expression> {\n\n let mut args = Vec::new();\n\n\n\n let mut acc = 0;\n\n\n\n while self.traveler.current_content() != \"\\n\" {\n\n if self.traveler.current_content() == \",\" {\n\n self.traveler.next();\n\n\n\n let expr = Rc::new(self.expression()?);\n\n\n\n if *expr == Expression::EOF {\n\n break\n\n }\n\n\n\n args.push(expr);\n\n\n\n } else if acc == 0 {\n\n let expr = Rc::new(self.expression()?);\n", "file_path": "src/golf/parser/parser.rs", "rank": 74, "score": 8.582856564429006 }, { "content": " } else if self.traveler.remaining() > 1 {\n\n self.try_call(a)\n\n } else {\n\n Ok(a)\n\n }\n\n }\n\n \"{\" => self.function(),\n\n _ =>{\n\n Err(ParserError::new_pos(self.traveler.current().position, &format!(\"unexpected symbol: {}\", self.traveler.current_content())))\n\n }\n\n },\n\n\n\n _ => Err(ParserError::new_pos(self.traveler.current().position, &format!(\"unexpected: {}\", self.traveler.current_content()))),\n\n }\n\n }\n\n\n\n fn assignment(&mut self, left: Rc<Expression>) -> ParserResult<Statement> {\n\n self.traveler.next();\n\n\n\n if self.traveler.current_content() == \"\\n\" {\n", "file_path": "src/golf/parser/parser.rs", "rank": 75, "score": 8.45582685103432 }, { "content": " }\n\n\n\n Ok(Rc::new(result))\n\n }\n\n\n\n pub fn lua_statement(&self, statement: &Statement) -> TranspileResult<Rc<String>> {\n\n match *statement {\n\n Statement::Expression(ref expression) => self.lua_expression(&expression),\n\n Statement::Assignment(ref assignment) => {\n\n match *assignment.left {\n\n Expression::Identifier(ref id, ref pos) => {\n\n let id = match id.as_str() {\n\n \"while\" |\n\n \"if\" |\n\n \"else\" |\n\n \"elseif\" |\n\n \"do\" |\n\n \"local\" |\n\n \"end\" |\n\n \"for\" |\n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 76, "score": 8.10406482602888 }, { "content": " a\n\n }\n\n\n\n TokenType::CharLiteral => {\n\n let a = Ok(Expression::Char(self.traveler.current_content().clone().remove(0)));\n\n self.traveler.next();\n\n a\n\n }\n\n\n\n TokenType::Identifier => {\n\n let a = Expression::Identifier(Rc::new(self.traveler.current_content().clone()), self.traveler.current().position);\n\n self.traveler.next();\n\n\n\n if self.traveler.remaining() > 1 {\n\n match self.traveler.current_content().as_str() {\n\n \",\" | \")\" => Ok(a),\n\n \"[\" => self.index(Rc::new(a)),\n\n \"!\" => {\n\n self.traveler.next();\n\n Ok(Expression::Call(Call {callee: Rc::new(a), args: vec!(), position: self.traveler.current().position}))\n", "file_path": "src/golf/parser/parser.rs", "rank": 77, "score": 7.690308961074537 }, { "content": " let lexer = lexer(&mut test.chars());\n\n\n\n let traveler = Traveler::new(lexer.collect());\n\n let mut parser = Parser::new(traveler);\n\n\n\n match parser.parse() {\n\n Err(err) => match err {\n\n ParserError {ref value, ref position} => {\n\n match *position {\n\n Some(ref pos) => {\n\n let mut lines = test.lines();\n\n\n\n for i in 0 .. pos.line - 1 {\n\n if i == pos.line - 2 {\n\n let source_pos = format!(\"ln {} | \", pos.line - 1).yellow();\n\n match lines.next() {\n\n Some(line) => println!(\"{}{}\", source_pos, line),\n\n None => unreachable!(),\n\n }\n\n } else {\n", "file_path": "src/main.rs", "rank": 78, "score": 7.6199249384814856 }, { "content": " let mut acc = 1;\n\n\n\n for arg in &call.args {\n\n result.push_str(&*self.lua_expression(&arg)?);\n\n\n\n if acc != call.args.len() {\n\n result.push(',')\n\n }\n\n \n\n acc += 1\n\n }\n\n\n\n result.push(')');\n\n\n\n Ok(Rc::new(result))\n\n },\n\n\n\n Expression::Function(ref function) => {\n\n let mut result = \"setmetatable({}, {\".to_string();\n\n \n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 79, "score": 7.536615181448154 }, { "content": " if env_index > 0 {\n\n if let Some(ref p) = self.parent {\n\n try!(p.dump(f, env_index - 1));\n\n try!(writeln!(f, \"------------------------------\"));\n\n }\n\n }\n\n\n\n for (i, v) in self.names.borrow().iter().enumerate() {\n\n try!(writeln!(f, \"({} : {}) = {:?}\", i, env_index, v))\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl fmt::Debug for SymTab {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n try!(self.dump(f, 0));\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/golf/checker/symtab.rs", "rank": 80, "score": 7.325327538383926 }, { "content": "pub mod lexer;\n\npub mod parser;\n\npub mod checker;\n\npub mod transpiler;\n\n\n\npub use self::lexer::*;\n\npub use self::parser::*;\n\npub use self::checker::*;\n\npub use self::transpiler::*;\n", "file_path": "src/golf/mod.rs", "rank": 81, "score": 7.267589548942415 }, { "content": "\n\n new_index\n\n }\n\n\n\n pub fn get_name(&self, name: &str) -> Option<(usize, usize)> {\n\n self.get_name_internal(name, 0)\n\n }\n\n\n\n fn get_name_internal(&self, name: &str, env_index: usize) -> Option<(usize, usize)> {\n\n if let Some(index) = self.names.borrow().get(name) {\n\n return Some((*index, env_index));\n\n }\n\n\n\n match self.parent {\n\n Some(ref parent) => parent.get_name_internal(name, env_index + 1),\n\n None => None,\n\n }\n\n }\n\n\n\n fn dump(&self, f: &mut fmt::Formatter, env_index: usize) -> fmt::Result {\n", "file_path": "src/golf/checker/symtab.rs", "rank": 82, "score": 7.208633240627264 }, { "content": " let term = self.term()?;\n\n\n\n ex_stack.push(term);\n\n\n\n let mut done = false;\n\n\n\n while ex_stack.len() > 1 {\n\n if !done {\n\n if self.traveler.current().token_type != TokenType::Operator {\n\n done = true;\n\n continue\n\n }\n\n\n\n let (op, precedence) = Operand::from_str(&self.traveler.current_content()).unwrap();\n\n self.traveler.next();\n\n\n\n if precedence >= op_stack.last().unwrap().1 {\n\n let left = ex_stack.pop().unwrap();\n\n let right = ex_stack.pop().unwrap();\n\n\n", "file_path": "src/golf/parser/parser.rs", "rank": 83, "score": 7.2077502359046335 }, { "content": "\n\n Ok(Rc::new(result))\n\n },\n\n\n\n Expression::Arm(ref arm) => {\n\n let mut result = format!(\"if {} == #__args then\\n\", arm.params.len() + 1);\n\n \n\n let mut acc = 2;\n\n \n\n for p in &arm.params {\n\n match **p {\n\n ref c @ Expression::Identifier(_, _) => {\n\n match *c {\n\n Expression::Identifier(ref id, _) if !Operand::from_str(id).is_some() => {\n\n result.push_str(&format!(\"local {} = __args[{}]\\n\", self.lua_expression(&c)?, acc))\n\n },\n\n \n\n _ => (),\n\n }\n\n }\n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 84, "score": 7.204217018319506 }, { "content": "\n\n if flag {\n\n match *arm.body {\n\n Statement::Expression(ref e) => result.push_str(&format!(\"return {}\\n\", self.lua_expression(&e)?)),\n\n _ => (),\n\n }\n\n }\n\n\n\n result.push_str(\"end\\n\");\n\n\n\n Ok(Rc::new(result))\n\n }\n\n\n\n _ => Ok(Rc::new(String::new())),\n\n }\n\n }\n\n}\n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 85, "score": 7.004700655055485 }, { "content": " _ => (),\n\n }\n\n\n\n acc += 1\n\n }\n\n \n\n acc = 2;\n\n\n\n let mut flag = true;\n\n\n\n for p in &arm.params {\n\n match **p {\n\n ref c @ Expression::Number(_) |\n\n ref c @ Expression::Bool(_) |\n\n ref c @ Expression::Char(_) |\n\n ref c @ Expression::Operation { .. } |\n\n ref c @ Expression::Str(_) => {\n\n flag = false;\n\n\n\n result.push_str(&format!(\"if {} == __args[{}] then\\n\", self.lua_expression(c)?, acc));\n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 86, "score": 6.944372041118643 }, { "content": " Call {\n\n callee: Rc::new(caller),\n\n args,\n\n position: self.traveler.current().position\n\n }\n\n )\n\n )\n\n }\n\n\n\n fn operation(&mut self, expression: Expression) -> ParserResult<Expression> {\n\n let mut ex_stack = vec![expression];\n\n let mut op_stack: Vec<(Operand, u8)> = Vec::new();\n\n\n\n op_stack.push(Operand::from_str(&self.traveler.current_content()).unwrap());\n\n self.traveler.next();\n\n\n\n if self.traveler.current_content() == \"\\n\" {\n\n self.traveler.next();\n\n }\n\n\n", "file_path": "src/golf/parser/parser.rs", "rank": 87, "score": 6.692465523936274 }, { "content": " }\n\n }\n\n \n\n pub fn to_string(&self) -> String {\n\n match *self {\n\n Operand::Pow => \"^\".to_string(),\n\n Operand::Mul => \"*\".to_string(),\n\n Operand::Div => \"/\".to_string(),\n\n Operand::Mod => \"%\".to_string(),\n\n Operand::Add => \"+\".to_string(),\n\n Operand::Sub => \"-\".to_string(),\n\n Operand::Equal => \"==\".to_string(),\n\n Operand::NEqual => \"~=\".to_string(),\n\n Operand::Lt => \"<\".to_string(),\n\n Operand::Gt => \">\".to_string(),\n\n Operand::LtEqual => \"<=\".to_string(),\n\n Operand::GtEqual => \">=\".to_string(),\n\n Operand::Combine => \"..\".to_string(),\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n", "file_path": "src/golf/parser/ast.rs", "rank": 88, "score": 6.658417049128249 }, { "content": "extern crate colored;\n\nuse colored::*;\n\n\n\nmod golf;\n\nuse golf::*;\n\n\n\nuse std::io::prelude::*;\n\nuse std::error::Error;\n\n\n\nuse std::fs;\n\nuse std::fs::File;\n\nuse std::fs::metadata;\n\n\n\nuse std::rc::Rc;\n\n\n\nuse std::env;\n\nuse std::path::Path;\n\n\n", "file_path": "src/main.rs", "rank": 89, "score": 6.369673290665348 }, { "content": " match *s {\n\n Statement::Expression(ref e) => match **e {\n\n Expression::Arm(_) => (),\n\n _ => result.push_str(\"return \"),\n\n },\n\n _ => (),\n\n }\n\n }\n\n\n\n acc += 1;\n\n result.push_str(&*self.lua_statement(s)?);\n\n result.push('\\n');\n\n },\n\n\n\n _ => unreachable!(),\n\n }\n\n\n\n result.push_str(\"end,\\n\");\n\n\n\n result.push_str(\"})\");\n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 90, "score": 6.167541416188028 }, { "content": " _ => ()\n\n }\n\n }\n\n },\n\n _ => (),\n\n },\n\n _ => (),\n\n }\n\n },\n\n\n\n _ => unreachable!(),\n\n }\n\n\n\n result.push_str(\"__call = function(...)\\nlocal __args = {...}\\n\");\n\n\n\n let mut acc = 1;\n\n\n\n match *function.arms {\n\n Expression::Block(ref statements) => for s in statements {\n\n if acc == statements.len() {\n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 91, "score": 6.161513087520502 }, { "content": " let a = Expression::Char(self.traveler.current_content().clone().remove(0));\n\n self.traveler.next();\n\n a\n\n }\n\n\n\n TokenType::Identifier => {\n\n let a = Expression::Identifier(Rc::new(self.traveler.current_content().clone()), self.traveler.current().position);\n\n self.traveler.next();\n\n a\n\n },\n\n\n\n _ => return Err(ParserError::new_pos(self.traveler.current().position, &format!(\"unexpected parameter: {}\", self.traveler.current_content()))),\n\n };\n\n\n\n params.push(Rc::new(a));\n\n \n\n acc += 1\n\n }\n\n\n\n self.traveler.expect_content(\"|\")?;\n", "file_path": "src/golf/parser/parser.rs", "rank": 92, "score": 6.141051227148127 }, { "content": " SymTab {\n\n parent: Some(parent),\n\n names: RefCell::new(hash_names),\n\n }\n\n }\n\n\n\n pub fn new_global() -> SymTab {\n\n SymTab {\n\n parent: None,\n\n names: RefCell::new(HashMap::new()),\n\n }\n\n }\n\n\n\n pub fn add_name(&self, name: &str) -> usize {\n\n if let Some(index) = self.names.borrow().get(name) {\n\n return *index\n\n }\n\n\n\n let new_index = self.names.borrow().len();\n\n self.names.borrow_mut().insert(name.to_string(), new_index);\n", "file_path": "src/golf/checker/symtab.rs", "rank": 93, "score": 6.134440834400166 }, { "content": " for arg in &call.args {\n\n self.check_expression(sym, &arg)?\n\n }\n\n\n\n Ok(())\n\n },\n\n\n\n _ => Ok(())\n\n }\n\n }\n\n\n\n pub fn check_statement(&self, sym: &mut SymTab, statement: &Statement) -> CheckResult<()> {\n\n match *statement {\n\n Statement::Expression(ref expression) => self.check_expression(sym, &expression)?, \n\n Statement::Assignment(ref assignment) => {\n\n match *assignment.left {\n\n Expression::Identifier(ref name, _) => {\n\n sym.add_name(name);\n\n self.check_expression(sym, &assignment.right)?\n\n },\n", "file_path": "src/golf/checker/checker.rs", "rank": 94, "score": 6.114299051643037 }, { "content": "\n\n if *expr == Expression::EOF {\n\n break\n\n }\n\n\n\n args.push(expr);\n\n\n\n } else {\n\n self.traveler.prev();\n\n if self.traveler.current_content() != \"!\" || self.traveler.current_content() != \",\" {\n\n self.traveler.next();\n\n }\n\n break\n\n }\n\n\n\n acc += 1\n\n }\n\n\n\n Ok(\n\n Expression::Call(\n", "file_path": "src/golf/parser/parser.rs", "rank": 95, "score": 5.983066035978288 }, { "content": " let mut lines = s.lines();\n\n\n\n for i in 0 .. pos.line - 1 {\n\n if i == pos.line - 2 {\n\n let source_pos = format!(\" | \").yellow();\n\n match lines.next() {\n\n Some(line) => println!(\"{}{}\", source_pos, line),\n\n None => unreachable!(),\n\n }\n\n } else {\n\n lines.next();\n\n }\n\n }\n\n\n\n let source_pos = format!(\"ln {}, cl {}| \", pos.line, pos.col).yellow();\n\n\n\n match lines.next() {\n\n Some(line) => println!(\"{}{}\", source_pos, line),\n\n None => unreachable!(),\n\n }\n", "file_path": "src/main.rs", "rank": 96, "score": 5.952905824359266 }, { "content": " pub position: TokenPosition,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Function {\n\n pub arms: Rc<Expression>,\n\n pub position: TokenPosition,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Arm {\n\n pub params: Vec<Rc<Expression>>,\n\n pub body: Rc<Statement>,\n\n pub position: TokenPosition,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum Statement {\n\n Expression(Rc<Expression>),\n\n Assignment(Assignment),\n", "file_path": "src/golf/parser/ast.rs", "rank": 97, "score": 5.8786929846432345 }, { "content": " \"then\" => format!(\"_{}\", id),\n\n _ => format!(\"{}\", id),\n\n };\n\n\n\n let left = Expression::Identifier(Rc::new(id), *pos);\n\n\n\n let result = format!(\"local {}={}\\n\", self.lua_expression(&left)?, self.lua_expression(&assignment.right)?);\n\n Ok(Rc::new(result))\n\n },\n\n\n\n _ => {\n\n let result = format!(\"local {}={}\\n\", self.lua_expression(&assignment.left)?, self.lua_expression(&assignment.right)?);\n\n Ok(Rc::new(result))\n\n },\n\n }\n\n },\n\n }\n\n }\n\n\n\n pub fn lua_expression(&self, expression: &Expression) -> TranspileResult<Rc<String>> {\n", "file_path": "src/golf/transpiler/transpiler.rs", "rank": 98, "score": 5.8671428533448555 }, { "content": "impl Operand {\n\n pub fn from_str(v: &str) -> Option<(Operand, u8)> {\n\n match v {\n\n \"^\" => Some((Operand::Pow, 0)),\n\n \"*\" => Some((Operand::Mul, 1)),\n\n \"/\" => Some((Operand::Div, 1)),\n\n \"%\" => Some((Operand::Mod, 1)),\n\n \"+\" => Some((Operand::Add, 2)),\n\n \"-\" => Some((Operand::Sub, 2)),\n\n \"==\" => Some((Operand::Equal, 3)),\n\n \"~=\" => Some((Operand::NEqual, 3)),\n\n \"<\" => Some((Operand::Lt, 4)),\n\n \">\" => Some((Operand::Gt, 4)),\n\n \"<=\" => Some((Operand::LtEqual, 4)),\n\n \">=\" => Some((Operand::GtEqual, 4)),\n\n \".\" => Some((Operand::Combine, 5)),\n\n \"++\" => Some((Operand::Concat, 5)),\n\n \"<|\" => Some((Operand::PipeLeft, 5)),\n\n \"|>\" => Some((Operand::PipeRight, 5)),\n\n _ => None,\n", "file_path": "src/golf/parser/ast.rs", "rank": 99, "score": 5.79374443549357 } ]
Rust
src/experimental/type_graph_builder.rs
Cypher1/Tako
a709df240ce27714fe5474419236049ac979dab5
use crate::ast::{ path_to_string, Abs, Apply, BinOp, HasInfo, Let, Path, Sym, Symbol, UnOp, Visitor, }; use crate::database::DBStorage; use crate::errors::TError; use crate::passes::ast_interpreter::Interpreter; use crate::primitives::{ bit_type, i32_type, string_type, Pack, Prim::{Bool, Str, I32}, Val, Val::{App, Function, Lambda, PrimVal, Product, Struct, Union, Variable, WithRequirement}, }; use log::{debug, info}; use std::collections::BTreeSet; use crate::experimental::type_graph::TypeGraph; #[derive(Default)] pub struct TypeGraphBuilder {} type Res = Result<Val, TError>; #[derive(Debug, Clone)] pub struct State { pub graph: TypeGraph, pub path: Path, } impl Visitor<State, Val, TypeGraph, Path> for TypeGraphBuilder { fn visit_root(&mut self, storage: &mut DBStorage, module: &Path) -> Result<TypeGraph, TError> { let (expr, _entity) = &storage.parse_file(module)?; info!( "Building symbol table & type graph... {}", path_to_string(module) ); let mut state = State { path: module.clone(), graph: TypeGraph::default(), }; let ty = self.visit(storage, &mut state, expr)?; state.graph.require_assignable(&state.path, &ty)?; Ok(state.graph) } fn visit_sym(&mut self, _storage: &mut DBStorage, state: &mut State, expr: &Sym) -> Res { debug!( "visiting sym {} {}", path_to_string(&state.path), &expr.name ); Ok(Variable(format!("typeof({})", expr.name))) } fn visit_val(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Val) -> Res { match expr { Product(vals) => { let mut tys: BTreeSet<Val> = set![]; for val in vals.iter() { tys.insert(self.visit_val(storage, state, val)?); } Ok(Product(tys)) } Union(vals) => { let mut tys: BTreeSet<Val> = set![]; for val in vals.iter() { tys.insert(self.visit_val(storage, state, val)?); } Ok(Union(tys)) } PrimVal(I32(_)) => Ok(i32_type()), PrimVal(Bool(_)) => Ok(bit_type()), PrimVal(Str(_)) => Ok(string_type()), Lambda(node) => { state.path.push(Symbol::Anon); let ty = self.visit(storage, state, node); state.path.pop(); ty } Struct(vals) => { let mut tys: Vec<(String, Val)> = vec![]; for val in vals.iter() { tys.push((val.0.clone(), self.visit_val(storage, state, &val.1)?)); } Ok(Struct(tys)) } _ty => Ok(Val::Variable("Type".to_string())), } } fn visit_apply(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Apply) -> Res { state.path.push(Symbol::Anon); let mut arg_tys = vec![]; for arg in &expr.args { let ty = self.visit_let(storage, state, arg)?; arg_tys.push((arg.name.clone(), ty)); } let result_ty = self.visit(storage, state, &*expr.inner)?; state.path.pop(); Ok(App { inner: Box::new(result_ty), arguments: Box::new(Struct(arg_tys)), }) } fn visit_abs(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Abs) -> Res { debug!("visiting {} {}", path_to_string(&state.path), &expr.name); Ok(WithRequirement( Box::new(self.visit(storage, state, &expr.value)?), vec![expr.name.clone()], )) } fn visit_let(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Let) -> Res { debug!("visiting {} {}", path_to_string(&state.path), &expr.name); let path_name = Symbol::new(&expr.name); state.path.push(path_name); let args = if let Some(args) = &expr.args { let mut arg_tys = vec![]; for arg in args { let ty = self.visit_let(storage, state, arg)?; arg_tys.push((arg.name.clone(), ty)); } Some(arg_tys) } else { None }; let val_ty = self.visit(storage, state, &expr.value)?; let ty = args.map_or(val_ty.clone(), |args| Function { intros: Pack::new(), arguments: Box::new(Struct(args)), results: Box::new(val_ty), }); state.graph.require_assignable(&state.path, &ty)?; state.path.pop(); Ok(Struct(vec![(expr.name.clone(), ty)])) } fn visit_un_op(&mut self, storage: &mut DBStorage, state: &mut State, expr: &UnOp) -> Res { let op = storage .get_extern(&expr.name) .expect("operator should exist"); let ty = &op.ty; let arg_ty = self.visit(storage, state, &expr.inner)?; debug!("un op {}: {}, {}", &expr.name, ty, &arg_ty); Interpreter::default().visit_apply( storage, &mut vec![], &Apply { inner: Box::new(ty.clone()), args: vec![Let::new("it", arg_ty)], info: expr.get_info().clone(), }, ) } fn visit_bin_op(&mut self, storage: &mut DBStorage, state: &mut State, expr: &BinOp) -> Res { let op = storage .get_extern(&expr.name) .expect("operator should exist"); let ty = &op.ty; let left_ty = self.visit(storage, state, &expr.left)?; let right_ty = self.visit(storage, state, &expr.right)?; debug!("bin op {}: {}, {} {}", &expr.name, ty, &left_ty, &right_ty); Interpreter::default().visit_apply( storage, &mut vec![], &Apply { inner: Box::new(ty.clone()), args: vec![Let::new("left", left_ty), Let::new("right", right_ty)], info: expr.get_info().clone(), }, ) } } #[cfg(test)] mod tests { use super::*; use crate::ast::Symbol; use crate::errors::TError; use crate::experimental::type_graph::TypeGraph; use crate::primitives::{i32_type, string_type}; use pretty_assertions::assert_eq; type Test = Result<(), TError>; fn filename() -> String { module_root().to_filename() } fn module_root() -> Symbol { Symbol::Named("test".to_owned(), Some("tk".to_owned())) } fn get_tg(s: &str) -> Result<TypeGraph, TError> { let mut storage = DBStorage::default(); storage.set_file(&filename(), s.to_string()); let module = storage.module_name(&filename()); let mut tgb = TypeGraphBuilder::default(); let tg: TypeGraph = tgb.visit_root(&mut storage, &module)?; Ok(tg) } #[test] fn type_of_int_literal_is_i32() -> Test { let tg = &mut get_tg("0")?; assert_eq!(tg.get_type(&[module_root()])?, i32_type()); Ok(()) } #[test] fn type_of_variable_of_int_literal_is_i32() -> Test { let tg = &mut get_tg("x=0")?; assert_eq!( tg.get_type(&[module_root(), Symbol::new("x")])?, i32_type(), "x has type i32" ); assert_eq!( tg.get_type(&[module_root()])?, rec!("x" => i32_type()), "program has type i32" ); Ok(()) } #[test] fn type_of_struct_int_and_string() -> Test { let tg = &mut get_tg("x=0, y='hi'")?; assert_eq!( tg.get_type(&[module_root(), Symbol::new("x")])?, i32_type(), "x has type i32" ); assert_eq!( tg.get_type(&[module_root(), Symbol::new("y")])?, string_type(), "y has type str" ); assert_eq!( format!("{}", tg.get_type(&[module_root()])?), format!( "{}", Product(set![rec!("x" => i32_type()), rec!("y" => string_type())]) ), "program has struct type {{x: i32, y: string}}" ); Ok(()) } }
use crate::ast::{ path_to_string, Abs, Apply, BinOp, HasInfo, Let, Path, Sym, Symbol, UnOp, Visitor, }; use crate::database::DBStorage; use crate::errors::TError; use crate::passes::ast_interpreter::Interpreter; use crate::primitives::{ bit_type, i32_type, string_type, Pack, Prim::{Bool, Str, I32}, Val, Val::{App, Function, Lambda, PrimVal, Product, Struct, Union, Variable, WithRequirement}, }; use log::{debug, info}; use std::collections::BTreeSet; use crate::experimental::type_graph::TypeGraph; #[derive(Default)] pub struct TypeGraphBuilder {} type Res = Result<Val, TError>; #[derive(Debug, Clone)] pub struct State { pub graph: TypeGraph, pub path: Path, } impl Visitor<State, Val, TypeGraph, Path> for TypeGraphBuilder { fn visit_root(&mut self, storage: &mut DBStorage, module: &Path) -> Result<TypeGraph, TError> { let (expr, _entity) = &storage.parse_file(m
fn visit_sym(&mut self, _storage: &mut DBStorage, state: &mut State, expr: &Sym) -> Res { debug!( "visiting sym {} {}", path_to_string(&state.path), &expr.name ); Ok(Variable(format!("typeof({})", expr.name))) } fn visit_val(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Val) -> Res { match expr { Product(vals) => { let mut tys: BTreeSet<Val> = set![]; for val in vals.iter() { tys.insert(self.visit_val(storage, state, val)?); } Ok(Product(tys)) } Union(vals) => { let mut tys: BTreeSet<Val> = set![]; for val in vals.iter() { tys.insert(self.visit_val(storage, state, val)?); } Ok(Union(tys)) } PrimVal(I32(_)) => Ok(i32_type()), PrimVal(Bool(_)) => Ok(bit_type()), PrimVal(Str(_)) => Ok(string_type()), Lambda(node) => { state.path.push(Symbol::Anon); let ty = self.visit(storage, state, node); state.path.pop(); ty } Struct(vals) => { let mut tys: Vec<(String, Val)> = vec![]; for val in vals.iter() { tys.push((val.0.clone(), self.visit_val(storage, state, &val.1)?)); } Ok(Struct(tys)) } _ty => Ok(Val::Variable("Type".to_string())), } } fn visit_apply(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Apply) -> Res { state.path.push(Symbol::Anon); let mut arg_tys = vec![]; for arg in &expr.args { let ty = self.visit_let(storage, state, arg)?; arg_tys.push((arg.name.clone(), ty)); } let result_ty = self.visit(storage, state, &*expr.inner)?; state.path.pop(); Ok(App { inner: Box::new(result_ty), arguments: Box::new(Struct(arg_tys)), }) } fn visit_abs(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Abs) -> Res { debug!("visiting {} {}", path_to_string(&state.path), &expr.name); Ok(WithRequirement( Box::new(self.visit(storage, state, &expr.value)?), vec![expr.name.clone()], )) } fn visit_let(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Let) -> Res { debug!("visiting {} {}", path_to_string(&state.path), &expr.name); let path_name = Symbol::new(&expr.name); state.path.push(path_name); let args = if let Some(args) = &expr.args { let mut arg_tys = vec![]; for arg in args { let ty = self.visit_let(storage, state, arg)?; arg_tys.push((arg.name.clone(), ty)); } Some(arg_tys) } else { None }; let val_ty = self.visit(storage, state, &expr.value)?; let ty = args.map_or(val_ty.clone(), |args| Function { intros: Pack::new(), arguments: Box::new(Struct(args)), results: Box::new(val_ty), }); state.graph.require_assignable(&state.path, &ty)?; state.path.pop(); Ok(Struct(vec![(expr.name.clone(), ty)])) } fn visit_un_op(&mut self, storage: &mut DBStorage, state: &mut State, expr: &UnOp) -> Res { let op = storage .get_extern(&expr.name) .expect("operator should exist"); let ty = &op.ty; let arg_ty = self.visit(storage, state, &expr.inner)?; debug!("un op {}: {}, {}", &expr.name, ty, &arg_ty); Interpreter::default().visit_apply( storage, &mut vec![], &Apply { inner: Box::new(ty.clone()), args: vec![Let::new("it", arg_ty)], info: expr.get_info().clone(), }, ) } fn visit_bin_op(&mut self, storage: &mut DBStorage, state: &mut State, expr: &BinOp) -> Res { let op = storage .get_extern(&expr.name) .expect("operator should exist"); let ty = &op.ty; let left_ty = self.visit(storage, state, &expr.left)?; let right_ty = self.visit(storage, state, &expr.right)?; debug!("bin op {}: {}, {} {}", &expr.name, ty, &left_ty, &right_ty); Interpreter::default().visit_apply( storage, &mut vec![], &Apply { inner: Box::new(ty.clone()), args: vec![Let::new("left", left_ty), Let::new("right", right_ty)], info: expr.get_info().clone(), }, ) } } #[cfg(test)] mod tests { use super::*; use crate::ast::Symbol; use crate::errors::TError; use crate::experimental::type_graph::TypeGraph; use crate::primitives::{i32_type, string_type}; use pretty_assertions::assert_eq; type Test = Result<(), TError>; fn filename() -> String { module_root().to_filename() } fn module_root() -> Symbol { Symbol::Named("test".to_owned(), Some("tk".to_owned())) } fn get_tg(s: &str) -> Result<TypeGraph, TError> { let mut storage = DBStorage::default(); storage.set_file(&filename(), s.to_string()); let module = storage.module_name(&filename()); let mut tgb = TypeGraphBuilder::default(); let tg: TypeGraph = tgb.visit_root(&mut storage, &module)?; Ok(tg) } #[test] fn type_of_int_literal_is_i32() -> Test { let tg = &mut get_tg("0")?; assert_eq!(tg.get_type(&[module_root()])?, i32_type()); Ok(()) } #[test] fn type_of_variable_of_int_literal_is_i32() -> Test { let tg = &mut get_tg("x=0")?; assert_eq!( tg.get_type(&[module_root(), Symbol::new("x")])?, i32_type(), "x has type i32" ); assert_eq!( tg.get_type(&[module_root()])?, rec!("x" => i32_type()), "program has type i32" ); Ok(()) } #[test] fn type_of_struct_int_and_string() -> Test { let tg = &mut get_tg("x=0, y='hi'")?; assert_eq!( tg.get_type(&[module_root(), Symbol::new("x")])?, i32_type(), "x has type i32" ); assert_eq!( tg.get_type(&[module_root(), Symbol::new("y")])?, string_type(), "y has type str" ); assert_eq!( format!("{}", tg.get_type(&[module_root()])?), format!( "{}", Product(set![rec!("x" => i32_type()), rec!("y" => string_type())]) ), "program has struct type {{x: i32, y: string}}" ); Ok(()) } }
odule)?; info!( "Building symbol table & type graph... {}", path_to_string(module) ); let mut state = State { path: module.clone(), graph: TypeGraph::default(), }; let ty = self.visit(storage, &mut state, expr)?; state.graph.require_assignable(&state.path, &ty)?; Ok(state.graph) }
function_block-function_prefixed
[ { "content": "pub fn infer(storage: &mut DBStorage, expr: &Node, env: &Val) -> Result<Val, TError> {\n\n // Infer that expression t has type A, t => A\n\n // See https://ncatlab.org/nlab/show/bidirectional+typechecking\n\n use crate::ast::{Abs, Apply, BinOp, Let, Sym, ToNode, UnOp, Visitor};\n\n match expr {\n\n ValNode(prim, _) => match prim {\n\n Product(vals) => {\n\n let mut tys: BTreeSet<Val> = set![];\n\n for val in vals.iter() {\n\n tys.insert(infer(storage, &val.clone().into_node(), env)?);\n\n }\n\n Ok(Product(tys))\n\n }\n\n Union(vals) => {\n\n let mut tys: BTreeSet<Val> = set![];\n\n for val in vals.iter() {\n\n tys.insert(infer(storage, &val.clone().into_node(), env)?);\n\n }\n\n Ok(Union(tys))\n\n }\n", "file_path": "src/passes/type_checker.rs", "rank": 0, "score": 278075.8258400591 }, { "content": "#[must_use]\n\npub fn variable(name: &str) -> Val {\n\n Variable(name.to_string())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n type Res = Result<(), TError>;\n\n\n\n #[test]\n\n fn bits_zero_length() {\n\n assert_eq!(bits(0, 0), bits![]);\n\n assert_eq!(bits(1, 0), bits![]);\n\n }\n\n\n\n #[test]\n\n fn bits_one_length() {\n\n assert_eq!(bits(0, 1), bits![0]);\n\n assert_eq!(bits(1, 1), bits![1]);\n", "file_path": "src/primitives.rs", "rank": 1, "score": 246485.08722340397 }, { "content": "pub fn prim_type_or(l: Val, r: Val, _info: &Info) -> Res {\n\n Ok(Val::Union(set!(l, r)))\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 2, "score": 240839.91270641133 }, { "content": "#[must_use]\n\npub fn i32_type() -> Val {\n\n record(vec![byte_type(); 4]).expect(\"i32 should be safe\")\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 3, "score": 238184.92929462326 }, { "content": "pub fn prim_type_arrow(l: Val, r: Val, _info: &Info) -> Res {\n\n // TODO: add existential and forall quantification operators\n\n Ok(Val::Function {\n\n intros: dict!(),\n\n results: Box::new(r),\n\n arguments: Box::new(l),\n\n })\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 4, "score": 236130.31455366005 }, { "content": "fn get_symbol(args: &Args, sym: &str, info: &Info) -> Res {\n\n args.get(sym).map_or_else(\n\n || {\n\n Err(TError::UnknownSymbol(\n\n sym.to_string(),\n\n info.clone(),\n\n \"\".to_string(),\n\n ))\n\n },\n\n |val| val(),\n\n )\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 5, "score": 233204.63666319873 }, { "content": "pub fn prim_and(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(Bool(l)), PrimVal(Bool(r))) => Ok(boolean(*l && *r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \"&&\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 6, "score": 225061.3359082098 }, { "content": "pub fn prim_or(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(Bool(l)), PrimVal(Bool(r))) => Ok(boolean(*l || *r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \"||\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 7, "score": 225061.3359082098 }, { "content": "pub fn prim_eq(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(Bool(l)), PrimVal(Bool(r))) => Ok(boolean(*l == *r)),\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(boolean(l == r)),\n\n (PrimVal(Str(l)), PrimVal(Str(r))) => Ok(boolean(l == r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \"==\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 8, "score": 221072.87476805385 }, { "content": "pub fn prim_sub(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(I32(l)), PrimVal(Bool(r))) => Ok(int32(l - if *r { 1 } else { 0 })),\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(int32(l - r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \"-\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 9, "score": 221072.87476805385 }, { "content": "pub fn prim_gte(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(Bool(l)), PrimVal(Bool(r))) => Ok(boolean(*l >= *r)),\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(boolean(l >= r)),\n\n (PrimVal(Str(l)), PrimVal(Str(r))) => Ok(boolean(l >= r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \">=\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 10, "score": 221072.87476805385 }, { "content": "pub fn prim_gt(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(Bool(l)), PrimVal(Bool(r))) => Ok(boolean(*l && !(*r))),\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(boolean(l > r)),\n\n (PrimVal(Str(l)), PrimVal(Str(r))) => Ok(boolean(l > r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \">\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 11, "score": 221072.87476805388 }, { "content": "pub fn prim_neq(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(Bool(l)), PrimVal(Bool(r))) => Ok(boolean(*l != *r)),\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(boolean(l != r)),\n\n (PrimVal(Str(l)), PrimVal(Str(r))) => Ok(boolean(l != r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \"!=\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 12, "score": 221072.87476805385 }, { "content": "pub fn prim_mod(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(int32(l % r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \"%\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 13, "score": 221072.87476805385 }, { "content": "pub fn prim_div(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(int32(l / r)),\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \"/\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 14, "score": 221072.87476805385 }, { "content": "pub fn prim_mul(l: &Val, r: &Val, info: &Info) -> Res {\n\n use crate::primitives::record;\n\n let fail = || {\n\n Err(TError::TypeMismatch2(\n\n \"*\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n ))\n\n };\n\n match (l, r) {\n\n (PrimVal(Bool(l)), PrimVal(I32(r))) => Ok(int32(if *l { *r } else { 0 })),\n\n (PrimVal(Bool(l)), PrimVal(Str(r))) => Ok(string(if *l { r } else { \"\" })),\n\n (PrimVal(I32(l)), PrimVal(Bool(r))) => Ok(int32(if *r { *l } else { 0 })),\n\n (PrimVal(Str(l)), PrimVal(Bool(r))) => Ok(string(if *r { l } else { \"\" })),\n\n (PrimVal(Bool(_)), PrimVal(_)) | (PrimVal(_), PrimVal(Bool(_))) => fail(),\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(int32(l.wrapping_mul(*r))),\n\n (l, r) => Ok(record(vec![l.clone(), r.clone()])?),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 15, "score": 221072.87476805388 }, { "content": "pub fn prim_pow(l: &Val, r: &Val, info: &Info) -> Res {\n\n match (l, r) {\n\n (PrimVal(I32(l)), PrimVal(Bool(r))) => Ok(int32(if *r { *l } else { 1 })),\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(int32(i32::pow(\n\n *l,\n\n u32::try_from(*r).map_err(|_err| {\n\n TError::TypeMismatch2(\n\n \"^ (range error)\".to_string(),\n\n Box::new(PrimVal(I32(*l))),\n\n Box::new(PrimVal(I32(*r))),\n\n info.clone(),\n\n )\n\n })?,\n\n ))), // TODO: require pos pow\n\n (l, r) => Err(TError::TypeMismatch2(\n\n \"^\".to_string(),\n\n Box::new((*l).clone()),\n\n Box::new((*r).clone()),\n\n info.clone(),\n\n )),\n\n }\n\n}\n\n\n\npub type Args = HashMap<String, Box<dyn Fn() -> Res>>;\n\npub type FuncImpl = Box<dyn Fn(&DBStorage, Args, &Info) -> Res>;\n\n\n", "file_path": "src/externs.rs", "rank": 16, "score": 221072.87476805385 }, { "content": "#[must_use]\n\npub fn int32(i: i32) -> Val {\n\n Val::PrimVal(Prim::I32(i))\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 17, "score": 218870.31833911798 }, { "content": "#[must_use]\n\npub fn string(s: &str) -> Val {\n\n Val::PrimVal(Prim::Str(s.to_string()))\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 18, "score": 218727.4258922272 }, { "content": "#[must_use]\n\npub fn type_type() -> Val {\n\n variable(\"Type\")\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 19, "score": 214348.30059972315 }, { "content": "#[must_use]\n\npub fn builtin(name: &str) -> Val {\n\n Val::PrimVal(Prim::BuiltIn(name.to_string()))\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 20, "score": 213586.73964962747 }, { "content": "pub fn prim_add(l: &Val, r: &Val, _info: &Info) -> Res {\n\n use crate::primitives::sum;\n\n match (l, r) {\n\n (PrimVal(Bool(l)), PrimVal(Bool(r))) => {\n\n Ok(int32(if *l { 1 } else { 0 } + if *r { 1 } else { 0 }))\n\n }\n\n (PrimVal(Bool(l)), PrimVal(I32(r))) => Ok(int32(r.wrapping_add(if *l { 1 } else { 0 }))),\n\n (PrimVal(Bool(l)), PrimVal(Str(r))) => Ok(PrimVal(Str(l.to_string() + &r.to_string()))),\n\n (PrimVal(I32(l)), PrimVal(Bool(r))) => Ok(int32(l.wrapping_add(if *r { 1 } else { 0 }))),\n\n (PrimVal(I32(l)), PrimVal(I32(r))) => Ok(int32(l.wrapping_add(*r))),\n\n (PrimVal(I32(l)), PrimVal(Str(r))) => Ok(PrimVal(Str(l.to_string() + &r.to_string()))),\n\n (PrimVal(Str(l)), PrimVal(Bool(r))) => Ok(PrimVal(Str(l.to_string() + &r.to_string()))),\n\n (PrimVal(Str(l)), PrimVal(I32(r))) => Ok(PrimVal(Str(l.to_string() + &r.to_string()))),\n\n (PrimVal(Str(l)), PrimVal(Str(r))) => Ok(PrimVal(Str(l.to_string() + &r.to_string()))),\n\n (l, r) => Ok(sum(vec![l.clone(), r.clone()])?),\n\n //(l, r) => Err(TError::TypeMismatch2(\n\n //\"+\".to_string(),\n\n //Box::new((*l).clone()),\n\n //Box::new((*r).clone()),\n\n //info,\n\n //)),\n\n }\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 21, "score": 212307.80757129763 }, { "content": "pub fn prim_add_strs(l: &Val, r: &Val, _info: &Info) -> Res {\n\n let to_str = |v: &Val| {\n\n if let PrimVal(Str(s)) = v {\n\n s.to_string()\n\n } else {\n\n format!(\"{}\", v)\n\n }\n\n };\n\n Ok(PrimVal(Str(format!(\"{}{}\", to_str(l), to_str(r)))))\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 22, "score": 208544.53582726867 }, { "content": "#[must_use]\n\npub fn byte_type() -> Val {\n\n record(vec![bit_type(); 8]).expect(\"byte should be safe\")\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 23, "score": 204034.98953269675 }, { "content": "#[must_use]\n\npub fn trit_type() -> Val {\n\n sum(vec![unit_type(); 3]).expect(\"trit should be safe\")\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 24, "score": 204034.98953269675 }, { "content": "#[must_use]\n\npub fn quad_type() -> Val {\n\n record(vec![bit_type(); 2]).expect(\"quad should be safe\")\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 25, "score": 204034.98953269675 }, { "content": "#[must_use]\n\npub fn string_type() -> Val {\n\n char_type().ptr()\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 26, "score": 204034.98953269675 }, { "content": "#[must_use]\n\npub fn number_type() -> Val {\n\n variable(\"Number\")\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 27, "score": 204034.98953269675 }, { "content": "#[must_use]\n\npub fn char_type() -> Val {\n\n byte_type()\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 28, "score": 204034.98953269675 }, { "content": "#[must_use]\n\npub fn unit_type() -> Val {\n\n Product(set![])\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 29, "score": 204034.98953269675 }, { "content": "#[must_use]\n\npub fn never_type() -> Val {\n\n Union(set![])\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 30, "score": 204034.98953269675 }, { "content": "#[must_use]\n\npub fn bit_type() -> Val {\n\n sum(vec![unit_type(); 2]).expect(\"bit should be safe\")\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 31, "score": 204034.98953269675 }, { "content": "pub fn add_to_product(tys: &mut TypeSet, values: &TypeSet) {\n\n for val in values {\n\n if let Product(vals) = val {\n\n add_to_product(tys, vals);\n\n } else {\n\n tys.insert(val.clone()); // hopeless\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 32, "score": 200614.85290091846 }, { "content": "fn find_symbol<'a>(state: &'a [Frame], name: &str) -> Option<&'a Val> {\n\n for frame in state.iter().rev() {\n\n if let Some(val) = frame.get(name) {\n\n return Some(val); // This is the variable\n\n }\n\n // Not in this frame, go back up.\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/passes/ast_interpreter.rs", "rank": 33, "score": 200370.4969022673 }, { "content": "pub fn prim_type_and(l: Val, r: Val) -> Res {\n\n Ok(Val::Product(set!(l, r)))\n\n}\n\n\n", "file_path": "src/externs.rs", "rank": 34, "score": 196387.72631957714 }, { "content": "pub fn build_logger(finish: impl FnOnce(&mut env_logger::Builder)) {\n\n if unsafe { LOGS_UNINITIALISED } {\n\n unsafe {\n\n LOGS_UNINITIALISED = false;\n\n }\n\n finish(\n\n env_logger::Builder::from_env(\n\n env_logger::Env::default()\n\n .filter_or(\"TAKO_LOG\", \"warn\")\n\n .write_style_or(\"TAKO_LOG_STYLE\", \"AUTO\"),\n\n )\n\n .format_timestamp(None),\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 35, "score": 190300.90234566003 }, { "content": "fn only_item_or_build(vals: TypeSet, builder: fn(TypeSet) -> Val) -> Val {\n\n if vals.is_empty() {\n\n builder(vals) // skip reductions automatically.\n\n } else if vals.len() == 1 {\n\n vals.iter()\n\n .next()\n\n .expect(\"set with len 1 should always have a value\")\n\n .clone()\n\n } else {\n\n reduce_common_padding(vals, builder)\n\n }\n\n}\n\n\n", "file_path": "src/experimental/type_graph.rs", "rank": 36, "score": 190139.2318070461 }, { "content": "fn factor_out(val: Val, reduction: &Val) -> Val {\n\n let factor_tys = |tys: TypeSet, builder: fn(TypeSet) -> Val| {\n\n let mut new_tys = set![];\n\n for ty in tys.iter().cloned() {\n\n new_tys.insert(factor_out(ty, reduction));\n\n }\n\n new_tys.remove(reduction);\n\n only_item_or_build(new_tys, builder)\n\n };\n\n match val {\n\n Product(tys) => factor_tys(tys, Product),\n\n Union(tys) => factor_tys(tys, Union),\n\n Padded(k, ty) => match reduction {\n\n Padded(j, reduction) => {\n\n if k <= *j {\n\n factor_out(*ty, &reduction.clone().padded(j - k))\n\n } else {\n\n *ty\n\n }\n\n }\n", "file_path": "src/experimental/type_graph.rs", "rank": 37, "score": 188106.0025153857 }, { "content": "fn reduce_common_padding(tys: TypeSet, builder: fn(TypeSet) -> Val) -> Val {\n\n let mut common_padding = None;\n\n for ty in &tys {\n\n common_padding = Some(if let Padded(k, _ty) = ty {\n\n std::cmp::min(*k, common_padding.unwrap_or(*k))\n\n } else {\n\n 0\n\n });\n\n }\n\n let common_padding = common_padding.unwrap_or(0);\n\n if common_padding > 0 {\n\n let mut unpadded_tys = set![];\n\n for ty in tys.iter().cloned() {\n\n if let Padded(k, ty) = ty {\n\n let k = k - common_padding;\n\n unpadded_tys.insert((*ty).padded(k));\n\n }\n\n }\n\n return builder(unpadded_tys).padded(common_padding);\n\n }\n\n builder(tys)\n\n}\n\n\n", "file_path": "src/experimental/type_graph.rs", "rank": 38, "score": 181081.56002734214 }, { "content": "fn binding_power(storage: &mut DBStorage, tok: &Token) -> i32 {\n\n match binding(storage, tok) {\n\n Semantic::Operator { binding, .. } => binding,\n\n Semantic::Func => 1000,\n\n }\n\n}\n\n\n\nimpl Token {\n\n pub fn get_info(&self) -> Info {\n\n self.pos.clone().get_info()\n\n }\n\n}\n\n\n\nimpl Loc {\n\n pub fn get_info(self) -> Info {\n\n Info {\n\n loc: Some(self),\n\n ..Info::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/passes/parser.rs", "rank": 39, "score": 176212.6520753682 }, { "content": "#[must_use]\n\npub fn get_implementation(name: &str) -> Option<FuncImpl> {\n\n match name {\n\n \"print\" => Some(Box::new(|_, args, info| {\n\n let val = get_symbol(&args, \"it\", info)?;\n\n match val {\n\n PrimVal(Str(s)) => print!(\"{}\", s),\n\n s => print!(\"{:?}\", s),\n\n };\n\n Ok(int32(0))\n\n })),\n\n \"eprint\" => Some(Box::new(|_, args, info| {\n\n let val = get_symbol(&args, \"it\", info)?;\n\n match val {\n\n PrimVal(Str(s)) => eprint!(\"{}\", s),\n\n s => eprint!(\"{:?}\", s),\n\n };\n\n Ok(int32(0))\n\n })),\n\n \"struct\" => Some(Box::new(|_, args, info| {\n\n use crate::ast::{BinOp, Sym};\n", "file_path": "src/externs.rs", "rank": 40, "score": 174099.63310826133 }, { "content": "pub fn sum(values: Vec<Val>) -> Result<Val, TError> {\n\n let mut layout = set![];\n\n let tag_bits = num_bits(values.len() as Offset);\n\n for (count, val) in values.into_iter().enumerate() {\n\n let mut tagged = tag(bits(count, tag_bits));\n\n if val != unit_type() {\n\n tagged = record(vec![tagged, val])?;\n\n }\n\n layout.insert(tagged);\n\n }\n\n Ok(Union(layout))\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 41, "score": 172973.1154235361 }, { "content": "#[must_use]\n\npub fn path_to_string(path: PathRef) -> String {\n\n path.iter()\n\n .map(|p| format!(\"{}\", p))\n\n .collect::<Vec<String>>()\n\n .join(\".\")\n\n}\n\n\n\n//TODO: Remove the default instance.\n\n#[derive(Debug, Clone, PartialEq, Eq, Default)]\n\npub struct Entry {\n\n pub uses: HashSet<Path>,\n\n pub defined_at: Path,\n\n // pub requires: Vec<Sym>,\n\n // pub defines: HashMap<Sym, Path>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Root {\n\n pub ast: Node,\n\n pub entity: Entity,\n", "file_path": "src/ast.rs", "rank": 43, "score": 168698.16379921508 }, { "content": "type Delta = i32;\n", "file_path": "src/experimental/lambda.rs", "rank": 44, "score": 164751.60798902874 }, { "content": "// A list of types with an offset to get to the first bit (used for padding, frequently 0).\n\ntype Layout = Vec<Val>; // Use a deque\n\npub type TypeSet = BTreeSet<Val>;\n\npub type Pack = BTreeSet<(String, Val)>;\n\npub type Frame = HashMap<String, Val>;\n\n\n\n#[derive(PartialEq, Eq, Clone, PartialOrd, Ord, Hash)]\n\npub enum Prim {\n\n Bool(bool),\n\n I32(i32),\n\n Str(String),\n\n BuiltIn(String),\n\n Tag(BitVec), // An identifying bit string (prefix).\n\n}\n\nuse Prim::{Bool, BuiltIn, Str, Tag, I32};\n\n\n\nimpl std::fmt::Debug for Prim {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Bool(b) => write!(f, \"{:?}\", b)?,\n\n I32(i) => write!(f, \"{:?}\", i)?,\n", "file_path": "src/primitives.rs", "rank": 45, "score": 162215.19629954686 }, { "content": "#[must_use]\n\npub fn boolean(b: bool) -> Val {\n\n Val::PrimVal(Prim::Bool(b))\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 46, "score": 161142.6008500163 }, { "content": "#[allow(dead_code)]\n\npub fn card(ty: &Val) -> Result<Offset, TError> {\n\n match ty {\n\n PrimVal(Tag(_bits)) => Ok(1),\n\n BitStr(_ptr_size) => Err(TError::StaticPointerCardinality(Info::default())),\n\n Union(s) => {\n\n let mut sum = 0;\n\n for sty in s {\n\n sum += card(sty)?;\n\n }\n\n Ok(sum)\n\n }\n\n Product(s) => {\n\n let mut prod = 1;\n\n for sty in s {\n\n prod *= card(sty)?;\n\n }\n\n Ok(prod)\n\n }\n\n Pointer(_ptr_size, t) => card(t),\n\n Padded(_size, t) => card(t),\n\n x => Err(TError::UnknownCardOfAbstractType(\n\n format!(\"{:#?}\", x),\n\n Info::default(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 47, "score": 160523.66281545386 }, { "content": "pub fn record(values: Layout) -> Result<Val, TError> {\n\n let mut layout = set![];\n\n let mut off = 0;\n\n for val in values {\n\n // Detect nested records?\n\n // Work out the padding here\n\n let size = size(&val)?;\n\n layout.insert(val.padded(off));\n\n off += size;\n\n }\n\n let mut tys = set![];\n\n add_to_product(&mut tys, &layout);\n\n Ok(Product(tys))\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 48, "score": 160523.6628154539 }, { "content": "// Calculates the memory needed for a new instance in bits.\n\npub fn size(ty: &Val) -> Result<Offset, TError> {\n\n match ty {\n\n PrimVal(Tag(bits)) => Ok(bits.len()),\n\n BitStr(ptr_size) => Ok(*ptr_size),\n\n Union(s) | Product(s) => {\n\n let mut res = 0;\n\n for sty in s.iter() {\n\n // This includes padding in size.\n\n let c = size(sty)?;\n\n if res <= c {\n\n res = c;\n\n }\n\n }\n\n Ok(res)\n\n }\n\n Pointer(ptr_size, _t) => Ok(*ptr_size),\n\n Padded(bits, t) => Ok(bits + size(t)?),\n\n Variable(name) => Err(TError::UnknownSizeOfVariableType(\n\n name.clone(),\n\n Info::default(),\n\n )),\n\n x => Err(TError::UnknownSizeOfAbstractType(\n\n format!(\"{:#?}\", x),\n\n Info::default(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 49, "score": 160523.66281545386 }, { "content": "#[cfg(feature = \"bench\")]\n\npub fn criterion_benchmark(c: &mut Criterion) {\n\n let module = vec![];\n\n\n\n c.bench_function(\"microbench_type_of_i32\", |b| {\n\n let code = Arc::new(\"12\".to_string());\n\n let mut db = DBStorage::default();\n\n let prog = black_box(parse_string(&mut db, &module, &code).expect(\"should parse\"));\n\n let env = Variable(\"test_program\".to_string()); // TODO: Track the type env\n\n b.iter(|| infer(&mut db, &prog.0, &env));\n\n });\n\n\n\n c.bench_function(\"microbench_parse_and_type_of_i32_pre_cache\", |b| {\n\n let code = Arc::new(\"12\".to_string());\n\n let mut db = DBStorage::default();\n\n let prog = parse_string(&mut db, &module, &code).expect(\"should parse\");\n\n let env = Variable(\"test_program\".to_string()); // TODO: Track the type env\n\n infer(&mut db, &prog.0, &env);\n\n b.iter(|| {\n\n let prog = black_box(parse_string(&mut db, &module, &code).expect(\"should parse\"));\n\n infer(&mut db, &prog.0, &env)\n", "file_path": "benches/tako_bench.rs", "rank": 50, "score": 154993.67727485456 }, { "content": "#[must_use]\n\npub fn tag(bits: BitVec) -> Val {\n\n Val::PrimVal(Prim::Tag(bits))\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 51, "score": 153753.78687679966 }, { "content": "#[must_use]\n\npub fn merge_vals(left: &[(String, Val)], right: &[(String, Val)]) -> Vec<(String, Val)> {\n\n let mut names = HashSet::<String>::new();\n\n for pair in right {\n\n names.insert(pair.0.clone());\n\n }\n\n let mut items = vec![];\n\n for pair in left {\n\n if !names.contains(&pair.0) {\n\n items.push(pair.clone());\n\n }\n\n }\n\n for pair in right {\n\n items.push(pair.clone());\n\n }\n\n items\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 52, "score": 153443.23267493537 }, { "content": "pub trait Visitor<State, Res, Final, Start = Root> {\n\n fn visit_root(&mut self, storage: &mut DBStorage, e: &Start) -> Result<Final, TError>;\n\n\n\n fn visit_sym(\n\n &mut self,\n\n storage: &mut DBStorage,\n\n state: &mut State,\n\n e: &Sym,\n\n ) -> Result<Res, TError>;\n\n fn visit_val(\n\n &mut self,\n\n storage: &mut DBStorage,\n\n state: &mut State,\n\n e: &Val,\n\n ) -> Result<Res, TError>;\n\n fn visit_apply(\n\n &mut self,\n\n\n\n storage: &mut DBStorage,\n\n state: &mut State,\n", "file_path": "src/ast.rs", "rank": 53, "score": 151783.5800249037 }, { "content": "fn repl(storage: &mut DBStorage) {\n\n print_cli_info();\n\n // `()` can be used when no completer is required\n\n let rl_config = Config::builder().tab_stop(2).build();\n\n\n\n let mut rl = Editor::<()>::with_config(rl_config);\n\n if let Err(err) = rl.load_history(&storage.history_file()) {\n\n error!(\"{:?}\", err);\n\n }\n\n let mut last_cmd_was_interrupt = false;\n\n loop {\n\n let readline = rl.readline(\"> \");\n\n let mut cmd_was_interrupt = false;\n\n match readline {\n\n Ok(line) => {\n\n if !line.is_empty() {\n\n if line == \":exit\" {\n\n break;\n\n }\n\n rl.add_history_entry(line.as_str());\n", "file_path": "src/main.rs", "rank": 54, "score": 151072.89844710685 }, { "content": "pub fn eval(mut stack: Stack) -> Stack {\n\n // debug!(\"{:?}\", shows(&stack));\n\n while let Some(curr) = stack.pop_front() {\n\n match curr {\n\n T(S) => {\n\n if stack.len() >= 3 {\n\n let x = stack\n\n .pop_front()\n\n .expect(\"Empty pop_front from non-empty stack (S.x).\");\n\n let y = stack\n\n .pop_front()\n\n .expect(\"Empty pop_front from non-empty stack (S.y).\");\n\n let z = stack\n\n .pop_front()\n\n .expect(\"Empty pop_front from non-empty stack (S.z).\");\n\n // xz(yz)\n\n let mut parend = vec![];\n\n if let P(y) = y {\n\n parend.extend(y);\n\n } else {\n", "file_path": "src/experimental/ski.rs", "rank": 55, "score": 147666.83879437504 }, { "content": "pub fn print_cli_info() {\n\n println!(\"{}{}\", TITLE, VERSION);\n\n}\n\n\n", "file_path": "src/cli_options.rs", "rank": 56, "score": 145153.52942372786 }, { "content": "#[must_use]\n\npub fn bits(mut n: Offset, len: Offset) -> BitVec {\n\n let mut v: BitVec = bitvec![0; len];\n\n for mut b in v.iter_mut().rev() {\n\n if n == 0 {\n\n break;\n\n }\n\n *b = n % 2 != 0;\n\n n /= 2;\n\n }\n\n v\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 57, "score": 137941.61428027644 }, { "content": "#[test]\n\nfn lambda() {\n\n run(\"examples/lambda.tk\");\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 58, "score": 131664.23387099127 }, { "content": "#[test]\n\nfn type_i32() {\n\n run(\"examples/type_i32.tk\");\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 59, "score": 131610.8839251652 }, { "content": "#[test]\n\nfn type_str() {\n\n run(\"examples/type_str.tk\");\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 60, "score": 131585.2580817626 }, { "content": "pub fn show(s: &SVal) -> String {\n\n match s {\n\n T(S) => \"S\".to_string(),\n\n T(K) => \"K\".to_string(),\n\n T(I) => \"I\".to_string(),\n\n V(name) => name.to_string(),\n\n P(st) => {\n\n let mut s = \"(\".to_string();\n\n for t in st.iter() {\n\n s += &show(t);\n\n }\n\n s += \")\";\n\n s\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/experimental/ski.rs", "rank": 61, "score": 131250.40074015944 }, { "content": "pub trait HasInfo {\n\n fn get_info(&self) -> &Info;\n\n fn get_mut_info(&mut self) -> &mut Info;\n\n}\n\n\n\n#[derive(Clone, Ord, PartialOrd, Hash, PartialEq, Eq)]\n\npub enum Symbol {\n\n Anon,\n\n Named(String, Option<String>), // name, (and for files) an optional extension\n\n}\n\n\n\nimpl fmt::Debug for Symbol {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Symbol::Anon => write!(f, \"_\")?,\n\n // TODO: Edge case exists here if two files with different extensions are used together\n\n Symbol::Named(name, None) => write!(f, \"{}\", name)?,\n\n Symbol::Named(name, Some(ext)) => write!(f, \"{}.{}\", name, ext)?,\n\n }\n\n Ok(())\n", "file_path": "src/ast.rs", "rank": 62, "score": 131029.08087492394 }, { "content": "// use std::sync::Arc;\n\ntype InnerVal = Box<Val>;\n\n\n\n#[derive(PartialEq, Eq, Clone, PartialOrd, Ord, Hash)]\n\npub enum Val {\n\n PrimVal(Prim),\n\n // Complex types\n\n Pointer(Offset, InnerVal), // Defaults to 8 bytes (64 bit)\n\n Lambda(Box<Node>),\n\n Struct(Vec<(String, Val)>), // Should really just store values, but we can't do that yet.\n\n Union(TypeSet),\n\n Product(TypeSet),\n\n Padded(Offset, InnerVal),\n\n Function {\n\n intros: Pack,\n\n arguments: InnerVal,\n\n results: InnerVal,\n\n },\n\n App {\n\n inner: InnerVal,\n\n arguments: InnerVal,\n", "file_path": "src/primitives.rs", "rank": 63, "score": 130666.05661427246 }, { "content": "fn binding(storage: &mut DBStorage, tok: &Token) -> Semantic {\n\n storage.get_extern_operator(&tok.value)\n\n}\n\n\n", "file_path": "src/passes/parser.rs", "rank": 64, "score": 130627.66632572051 }, { "content": "type State = Table;\n", "file_path": "src/passes/to_cpp.rs", "rank": 65, "score": 129302.63000935857 }, { "content": "pub fn any_true<I>(vals: I) -> Tribool\n\nwhere\n\n I: Iterator<Item = Tribool>,\n\n{\n\n let mut st = False;\n\n for i in vals {\n\n st = st.or(&i);\n\n }\n\n st\n\n}\n\n\n", "file_path": "src/data_structures/tribool.rs", "rank": 66, "score": 128758.9737043966 }, { "content": "pub fn all_true<I>(vals: I) -> Tribool\n\nwhere\n\n I: Iterator<Item = Tribool>,\n\n{\n\n let mut st = True;\n\n for i in vals {\n\n st = st.and(&i);\n\n }\n\n st\n\n}\n", "file_path": "src/data_structures/tribool.rs", "rank": 67, "score": 128758.9737043966 }, { "content": "fn expr(\n\n storage: &mut DBStorage,\n\n init_toks: VecDeque<Token>,\n\n init_lbp: i32,\n\n path: PathRef,\n\n) -> Result<(Node, AstNode, VecDeque<Token>), TError> {\n\n // TODO: Name update's fields, this is confusing (0 is tree, 1 is toks)\n\n let init_update = nud(storage, init_toks, path)?;\n\n let mut left: Node = init_update.0;\n\n let mut left_node = init_update.1;\n\n let mut toks: VecDeque<Token> = init_update.2;\n\n loop {\n\n match toks.front() {\n\n None => break,\n\n Some(token) => {\n\n if init_lbp >= binding_power(storage, token) {\n\n break;\n\n }\n\n }\n\n }\n", "file_path": "src/passes/parser.rs", "rank": 68, "score": 128493.36821879318 }, { "content": "fn binding_dir(storage: &mut DBStorage, tok: &Token) -> Direction {\n\n match binding(storage, tok) {\n\n Semantic::Operator { assoc, .. } => assoc,\n\n Semantic::Func => Direction::Left,\n\n }\n\n}\n\n\n", "file_path": "src/passes/parser.rs", "rank": 69, "score": 127940.37718432472 }, { "content": "type State = String;\n\n\n\nimpl Visitor<State, (), String, Node> for PrettyPrint {\n\n fn visit_root(&mut self, storage: &mut DBStorage, expr: &Node) -> Result<String, TError> {\n\n let mut state: String = \"\".to_string();\n\n self.visit(storage, &mut state, expr)?;\n\n Ok(state)\n\n }\n\n\n\n fn visit_sym(&mut self, _storage: &mut DBStorage, state: &mut State, expr: &Sym) -> Res {\n\n if let Some(def_at) = &expr.get_info().defined_at {\n\n write!(state, \".{}\", path_to_string(def_at))?;\n\n } else {\n\n write!(state, \"{}\", expr.name)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn visit_val(&mut self, _storage: &mut DBStorage, state: &mut State, expr: &Val) -> Res {\n\n write!(state, \"{}\", &expr)?;\n", "file_path": "src/passes/pretty_print.rs", "rank": 70, "score": 126518.264516408 }, { "content": "#[must_use]\n\npub fn make_name(def: PathRef) -> String {\n\n let def_n: Vec<String> = def.iter().map(|n| n.clone().to_name()).collect();\n\n def_n.join(\"_\")\n\n}\n\n\n", "file_path": "src/passes/to_cpp.rs", "rank": 71, "score": 125206.5766686252 }, { "content": "fn to_file_path(context: PathRef) -> Path {\n\n let mut module = context.to_vec();\n\n loop {\n\n match module.last() {\n\n None => panic!(\n\n \"Couldn't find a file associated with symbol at {}\",\n\n path_to_string(context)\n\n ),\n\n Some(Symbol::Named(_, Some(_ext))) => break, // Found the file\n\n _ => {} // Skip anons and regular symbols\n\n }\n\n module.pop();\n\n }\n\n module\n\n}\n\n\n\npub struct DBStorage {\n\n pub world: World,\n\n project_dirs: Option<ProjectDirs>,\n\n pub options: Options,\n", "file_path": "src/database.rs", "rank": 72, "score": 121795.47883722978 }, { "content": "type State = Vec<Frame>;\n\nimpl<'a> Visitor<State, Val, Val> for Interpreter<'a> {\n\n fn visit_root(&mut self, storage: &mut DBStorage, root: &Root) -> Res {\n\n let mut base_frame = map! {};\n\n for (name, ext) in storage.get_externs().iter() {\n\n base_frame.insert(name.clone(), ext.value.clone());\n\n }\n\n let mut state = vec![base_frame];\n\n self.visit(storage, &mut state, &root.ast)\n\n }\n\n\n\n fn visit_sym(&mut self, storage: &mut DBStorage, state: &mut State, expr: &Sym) -> Res {\n\n let name = &expr.name;\n\n debug!(\"evaluating sym '{}'\", name);\n\n let value = find_symbol(state, name);\n\n if let Some(prim) = value {\n\n debug!(\"{} = (from stack) {}\", name, prim.clone().into_node());\n\n return Ok(prim.clone());\n\n }\n\n if let Some(ext) = crate::externs::get_implementation(name) {\n", "file_path": "src/passes/ast_interpreter.rs", "rank": 73, "score": 119161.46304170275 }, { "content": "#[cfg(not(test))]\n\npub fn ensure_initialized() {\n\n build_logger(env_logger::Builder::init);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 74, "score": 117681.41691941017 }, { "content": "fn cancel_neighbours(tys: &TypeSet) -> TypeSet {\n\n let mut res = tys.clone();\n\n for ty in tys {\n\n let mut simpl_tys = set![];\n\n for other in res.iter().cloned() {\n\n simpl_tys.insert(factor_out(other, ty));\n\n }\n\n res = simpl_tys;\n\n }\n\n res\n\n}\n\n\n", "file_path": "src/experimental/type_graph.rs", "rank": 75, "score": 117357.21644129908 }, { "content": "pub fn work<'a>(\n\n storage: &mut DBStorage,\n\n filename: &str,\n\n print_impl: Option<ImplFn<'a>>,\n\n) -> Result<String, TError> {\n\n let mut contents = String::new();\n\n let mut file = File::open(filename.to_owned())?;\n\n file.read_to_string(&mut contents)?;\n\n\n\n work_on_string(storage, contents, filename, print_impl)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 76, "score": 116762.36020238651 }, { "content": "fn run(file: &str) {\n\n test_expecting(Success, vec![\"--run\", file]);\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 77, "score": 116192.76748132543 }, { "content": "type Id = i32; // TODO: Make this a vec of Id so it can be treated as a stack\n\n\n\n#[derive(Default, Debug, Clone)]\n\npub struct TypeGraph {\n\n // Map from paths to Ids (to avoid mapping from Paths to other things)\n\n pub symbols: HashMap<Path, Id>,\n\n\n\n pub types: HashMap<Id, Val>,\n\n\n\n // A counter used for generating new type variables... (probably a bad idea)\n\n pub counter: Id,\n\n}\n\n\n", "file_path": "src/experimental/type_graph.rs", "rank": 78, "score": 115289.55809810091 }, { "content": "fn abs(inner: Term) -> Term {\n\n Abs {\n\n inner: Box::new(inner),\n\n }\n\n}\n\n\n\nimpl Term {\n\n fn shift(&self, delta: Delta) -> Term {\n\n self.shift_with_cutoff(delta, 0)\n\n }\n\n\n\n fn shift_with_cutoff(&self, delta: Delta, cutoff: Ind) -> Term {\n\n match self {\n\n Var { ind } => {\n\n if *ind < cutoff {\n\n self.clone()\n\n } else {\n\n var(ind\n\n .checked_add(delta)\n\n .expect(\"Should not run out of indexes\"))\n", "file_path": "src/experimental/lambda.rs", "rank": 79, "score": 115015.59399705172 }, { "content": "pub fn parse_string(\n\n storage: &mut DBStorage,\n\n module: PathRef,\n\n text: &Arc<String>,\n\n) -> Result<(Node, Entity), TError> {\n\n let toks = lex_string(storage, module, text)?;\n\n debug!(\"Parsing contents... {}\", path_to_string(module));\n\n let (root, root_node, left_over) = expr(storage, toks, 0, module)?;\n\n let root_entity = storage.store_node(root_node, module);\n\n\n\n if let Some(head) = left_over.front() {\n\n return Err(TError::ParseError(\n\n format!(\"Oh no: Left over tokens {:?}\", left_over),\n\n head.get_info(),\n\n ));\n\n }\n\n debug!(\"ast: {}\", root);\n\n Ok((root, root_entity))\n\n}\n\n\n", "file_path": "src/passes/parser.rs", "rank": 80, "score": 114928.10492949799 }, { "content": "// Consumes a single token from a Deque of characters.\n\npub fn lex_head<'a>(\n\n mut contents: std::iter::Peekable<std::str::Chars<'a>>,\n\n pos: &mut Loc,\n\n) -> (Token, std::iter::Peekable<std::str::Chars<'a>>) {\n\n let mut head: VecDeque<char> = VecDeque::new();\n\n\n\n let mut tok_type: TokenType = TokenType::Unknown;\n\n let mut quote: Option<char> = None;\n\n let mut start = pos.clone();\n\n\n\n // TODO: This should be simplified (make tight loops).\n\n while let Some(chr) = contents.peek() {\n\n let chr_type = classify_char(*chr);\n\n tok_type = match (&tok_type, &chr_type) {\n\n (TokenType::Unknown, TokenType::Whitespace) => TokenType::Unknown, // Ignore\n\n (TokenType::Unknown, TokenType::StringLit) => {\n\n quote = Some(*chr);\n\n TokenType::StringLit\n\n }\n\n (TokenType::Unknown, new_tok_type) => new_tok_type.clone(),\n", "file_path": "src/tokens.rs", "rank": 81, "score": 113771.18503714135 }, { "content": "pub fn work_on_string<'a>(\n\n storage: &mut DBStorage,\n\n contents: String,\n\n filename: &str,\n\n print_impl: Option<ImplFn<'a>>,\n\n) -> Result<String, TError> {\n\n use ast::ToNode;\n\n use cli_options::Command;\n\n\n\n let module_name = storage.module_name(filename);\n\n storage.set_file(filename, contents);\n\n\n\n match storage.options.cmd {\n\n Command::Build => storage.build_with_gpp(&module_name),\n\n Command::Interpret | Command::Repl => {\n\n let root = storage.look_up_definitions(&module_name)?;\n\n let mut interp = Interpreter::default();\n\n if let Some(print_impl) = print_impl {\n\n interp.impls.insert(\"print\".to_string(), print_impl);\n\n }\n", "file_path": "src/lib.rs", "rank": 82, "score": 113771.18503714135 }, { "content": "fn run_with_error(file: &str) {\n\n test_expecting(Error, vec![\"--run\", file]);\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 83, "score": 113420.10183095254 }, { "content": "pub fn print_cli_help() {\n\n print_cli_info();\n\n println!(\"{}\", USAGE);\n\n}\n\n\n\npub const TITLE: &str = \"tako v\";\n\n\n\npub const VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\n\n\npub const USAGE: &str = \"An experimental programming language for ergonomic software verification.\n\n\n\nUsage:\n\n tako [-i|-r|-si|-sr] <files>...\n\n tako (-h | --help)\n\n tako --version\n\n\n\nOptions:\n\n -i --interactive Run as a repl (interactive mode).\n\n -r --run Run files in interpreter.\n\n -si --stack_interactive Run as a repl (interactive mode) using the experimental stack based interpter.\n\n -sr --stack_run Run files in the experimental stack based interpter.\n\n -h --help Show this screen.\n\n --version Show compiler version.\n\n\";\n", "file_path": "src/cli_options.rs", "rank": 84, "score": 112385.36819781116 }, { "content": "fn merge_bit_patterns(tys: TypeSet) -> Option<TypeSet> {\n\n let mut bits: Vec<(Offset, BitVec)> = vec![];\n\n let mut others: TypeSet = set![];\n\n\n\n for ty in tys {\n\n match ty {\n\n PrimVal(Tag(t)) => bits.push((0, t)),\n\n Padded(k, ty) => match *ty {\n\n PrimVal(Tag(t)) => bits.push((k, t)),\n\n ty => {\n\n others.insert(ty.padded(k));\n\n }\n\n },\n\n _ => {\n\n others.insert(ty);\n\n }\n\n }\n\n }\n\n\n\n let mut new_bits = set![];\n", "file_path": "src/experimental/type_graph.rs", "rank": 85, "score": 111164.44458052787 }, { "content": "#[must_use]\n\npub fn byte_size() -> Offset {\n\n 8\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 86, "score": 111022.45908840992 }, { "content": "// TODO: Return nodes.\n\ntype Res = Result<Node, TError>;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct State {\n\n pub table: Table,\n\n pub path: Vec<Symbol>,\n\n}\n\n\n\nimpl Visitor<State, Node, Root, Path> for SymbolTableBuilder {\n\n fn visit_root(&mut self, storage: &mut DBStorage, module: &Path) -> Result<Root, TError> {\n\n let (expr, entity) = &storage.parse_file(module)?;\n\n info!(\"Building symbol table... {}\", path_to_string(module));\n\n\n\n let mut table = Table::default();\n\n let mut main_at = module.clone();\n\n main_at.push(Symbol::new(\"main\"));\n\n\n\n let main_symbol = table.get_mut(&main_at);\n\n main_symbol.value.uses.insert(module.clone());\n\n\n", "file_path": "src/passes/symbol_table_builder.rs", "rank": 87, "score": 108226.85228644771 }, { "content": "struct TypeCheckerSystem {}\n\n\n\nimpl<'a> System<'a> for TypeCheckerSystem {\n\n #[allow(clippy::type_complexity)]\n\n type SystemData = (\n\n ReadStorage<'a, Call>,\n\n ReadStorage<'a, Definition>,\n\n WriteStorage<'a, HasType>,\n\n ReadStorage<'a, HasValue>,\n\n ReadStorage<'a, Sequence>,\n\n ReadStorage<'a, SymbolRef>,\n\n WriteStorage<'a, Untyped>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (calls, definition, mut has_type, has_value, sequence, symbol_ref, mut untyped): Self::SystemData,\n\n ) {\n\n for _ in (\n\n &calls,\n", "file_path": "src/passes/type_checker.rs", "rank": 88, "score": 107878.7550971816 }, { "content": "pub fn shows(s: &Stack) -> String {\n\n show(&P(s.clone()))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use log::info;\n\n\n\n fn v(name: &str) -> SVal {\n\n SVal::V(name.to_string())\n\n }\n\n\n\n fn test(stack: Stack, expected: &Stack) {\n\n info!(\"Running: {:?}\", &stack);\n\n let out = eval(stack);\n\n info!(\"Got: {:?}\", &out);\n\n\n\n assert_eq!(&out, expected);\n\n }\n", "file_path": "src/experimental/ski.rs", "rank": 89, "score": 102285.45095127003 }, { "content": "fn get_defs(args: Node) -> Vec<Let> {\n\n if let Node::SymNode(sym_node) = args {\n\n return vec![sym_node.as_let()];\n\n }\n\n if let Node::LetNode(let_node) = args {\n\n return vec![let_node];\n\n }\n\n if let Node::BinOpNode(BinOp {\n\n name,\n\n left,\n\n right,\n\n info: _,\n\n }) = args.clone()\n\n {\n\n if name == \",\" {\n\n let mut left = get_defs(*left);\n\n left.append(&mut get_defs(*right));\n\n return left;\n\n }\n\n }\n\n vec![Let {\n\n name: \"it\".to_string(),\n\n args: None,\n\n info: args.get_info().clone(),\n\n value: Box::new(args),\n\n }]\n\n}\n\n\n", "file_path": "src/passes/parser.rs", "rank": 90, "score": 100099.03617429012 }, { "content": "fn operator(binding: i32, assoc: Direction) -> Semantic {\n\n Semantic::Operator { binding, assoc }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub struct Extern {\n\n pub name: String,\n\n pub value: Val,\n\n pub semantic: Semantic,\n\n pub ty: Node,\n\n pub cpp: LangImpl,\n\n}\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug)]\n\npub struct LangImpl {\n\n pub code: String,\n\n pub arg_joiner: String,\n\n pub arg_processor: String,\n\n pub includes: String,\n\n pub flags: Vec<String>,\n", "file_path": "src/externs.rs", "rank": 91, "score": 99562.55157974278 }, { "content": "#[test]\n\nfn sym_op() {\n\n run_with_error(\"counter_examples/sym_op.tk\");\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 92, "score": 95735.80856925661 }, { "content": "#[test]\n\nfn ignored_let() {\n\n run(\"examples/ignored_let.tk\");\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 93, "score": 95726.91887360417 }, { "content": "#[test]\n\nfn nested_as_function() {\n\n run(\"examples/nested_as_function.tk\");\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 94, "score": 95718.22935177993 }, { "content": "#[test]\n\nfn non_lambda() {\n\n run_with_error(\"counter_examples/non_lambda.tk\");\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 95, "score": 95589.6403735837 }, { "content": "fn test_expecting(expected: TestResult, options: Vec<&str>) {\n\n takolib::ensure_initialized();\n\n\n\n let mut storage = DBStorage::default();\n\n storage.options = Options::new(options);\n\n let mut stdout: Vec<String> = vec![];\n\n let result = {\n\n use takolib::externs::Res;\n\n use takolib::primitives::Prim::{Str, I32};\n\n use takolib::primitives::Val::PrimVal;\n\n let mut print_impl =\n\n &mut |_: &mut DBStorage,\n\n args: HashMap<String, Box<dyn Fn() -> takolib::externs::Res>>,\n\n _: &takolib::ast::Info|\n\n -> Res {\n\n stdout.push(\n\n match args.get(\"it\").expect(\"Expected value named 'it' not found\")()? {\n\n PrimVal(Str(s)) => s,\n\n s => format!(\"{:?}\", s),\n\n },\n", "file_path": "tests/goldens.rs", "rank": 96, "score": 95173.40248403643 }, { "content": "#[test]\n\nfn type_bool() {\n\n run(\"examples/type_bool.tk\");\n\n}\n\n\n", "file_path": "tests/goldens.rs", "rank": 97, "score": 94510.67440123444 }, { "content": "#[must_use]\n\npub fn get_externs() -> &'static HashMap<String, Extern> {\n\n &EXTERN_MAP\n\n}\n", "file_path": "src/externs.rs", "rank": 98, "score": 93217.13863827402 } ]
Rust
src/engine/mod.rs
Atul9/rg3d
a4a7ad682a7f4d056863b3bba8c759b49018397c
pub mod resource_manager; pub mod error; use crate::{ core::{ math::vec2::Vec2, visitor::{ Visitor, VisitResult, Visit, }, }, sound::context::Context, engine::{ resource_manager::ResourceManager, error::EngineError, }, gui::UserInterface, renderer::{ Renderer, error::RendererError, }, window::{ WindowBuilder, Window, }, scene::SceneContainer, PossiblyCurrent, GlRequest, GlProfile, WindowedContext, NotCurrent, Api, event_loop::EventLoop, gui::Control, }; use std::{ sync::{Arc, Mutex}, time, time::Duration, }; pub struct Engine<M: 'static, C: 'static + Control<M, C>> { context: glutin::WindowedContext<PossiblyCurrent>, pub renderer: Renderer, pub user_interface: UserInterface<M, C>, pub sound_context: Arc<Mutex<Context>>, pub resource_manager: Arc<Mutex<ResourceManager>>, pub scenes: SceneContainer, pub ui_time: Duration, } impl<M, C: 'static + Control<M, C>> Engine<M, C> { #[inline] pub fn new(window_builder: WindowBuilder, events_loop: &EventLoop<()>) -> Result<Engine<M, C>, EngineError> { let context_wrapper: WindowedContext<NotCurrent> = glutin::ContextBuilder::new() .with_vsync(true) .with_gl_profile(GlProfile::Core) .with_gl(GlRequest::Specific(Api::OpenGl, (3, 3))) .build_windowed(window_builder, events_loop)?; let mut context = match unsafe { context_wrapper.make_current() } { Ok(context) => context, Err((_, e)) => return Err(EngineError::from(e)), }; let client_size = context.window().inner_size(); Ok(Engine { renderer: Renderer::new(&mut context, client_size.into())?, resource_manager: Arc::new(Mutex::new(ResourceManager::new())), sound_context: Context::new()?, scenes: SceneContainer::new(), user_interface: UserInterface::new(), ui_time: Default::default(), context, }) } #[inline] pub fn get_window(&self) -> &Window { self.context.window() } pub fn update(&mut self, dt: f32) { let inner_size = self.context.window().inner_size(); let frame_size = Vec2::new(inner_size.width as f32, inner_size.height as f32); if let Ok(mut resource_manager) = self.resource_manager.try_lock() { resource_manager.update(dt); } for scene in self.scenes.iter_mut() { scene.update(frame_size, dt); } let time = time::Instant::now(); self.user_interface.update(frame_size, dt); self.ui_time = time::Instant::now() - time; } pub fn get_ui_mut(&mut self) -> &mut UserInterface<M, C> { &mut self.user_interface } #[inline] pub fn render(&mut self, dt: f32) -> Result<(), RendererError> { self.user_interface.draw(); self.renderer.render_and_swap_buffers(&self.scenes, &self.user_interface.get_drawing_context(), &self.context, dt) } } impl<M: 'static, C: 'static + Control<M, C>> Visit for Engine<M, C> { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; if visitor.is_reading() { self.renderer.flush(); self.resource_manager.lock().unwrap().update(0.0); self.scenes.clear(); } self.resource_manager.lock()?.visit("ResourceManager", visitor)?; self.scenes.visit("Scenes", visitor)?; self.sound_context.lock()?.visit("SoundContext", visitor)?; if visitor.is_reading() { self.resource_manager.lock()?.reload_resources(); for scene in self.scenes.iter_mut() { scene.resolve(); } } visitor.leave_region() } }
pub mod resource_manager; pub mod error; use crate::{ core::{ math::vec2::Vec2, visitor::{ Visitor, VisitResult, Visit, }, }, sound::context::Context, engine::{ resource_manager::ResourceManager, error::EngineError, }, gui::UserInterface, renderer::{ Renderer, error::RendererError, }, window::{ WindowBuilde
er: Renderer::new(&mut context, client_size.into())?, resource_manager: Arc::new(Mutex::new(ResourceManager::new())), sound_context: Context::new()?, scenes: SceneContainer::new(), user_interface: UserInterface::new(), ui_time: Default::default(), context, }) } #[inline] pub fn get_window(&self) -> &Window { self.context.window() } pub fn update(&mut self, dt: f32) { let inner_size = self.context.window().inner_size(); let frame_size = Vec2::new(inner_size.width as f32, inner_size.height as f32); if let Ok(mut resource_manager) = self.resource_manager.try_lock() { resource_manager.update(dt); } for scene in self.scenes.iter_mut() { scene.update(frame_size, dt); } let time = time::Instant::now(); self.user_interface.update(frame_size, dt); self.ui_time = time::Instant::now() - time; } pub fn get_ui_mut(&mut self) -> &mut UserInterface<M, C> { &mut self.user_interface } #[inline] pub fn render(&mut self, dt: f32) -> Result<(), RendererError> { self.user_interface.draw(); self.renderer.render_and_swap_buffers(&self.scenes, &self.user_interface.get_drawing_context(), &self.context, dt) } } impl<M: 'static, C: 'static + Control<M, C>> Visit for Engine<M, C> { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; if visitor.is_reading() { self.renderer.flush(); self.resource_manager.lock().unwrap().update(0.0); self.scenes.clear(); } self.resource_manager.lock()?.visit("ResourceManager", visitor)?; self.scenes.visit("Scenes", visitor)?; self.sound_context.lock()?.visit("SoundContext", visitor)?; if visitor.is_reading() { self.resource_manager.lock()?.reload_resources(); for scene in self.scenes.iter_mut() { scene.resolve(); } } visitor.leave_region() } }
r, Window, }, scene::SceneContainer, PossiblyCurrent, GlRequest, GlProfile, WindowedContext, NotCurrent, Api, event_loop::EventLoop, gui::Control, }; use std::{ sync::{Arc, Mutex}, time, time::Duration, }; pub struct Engine<M: 'static, C: 'static + Control<M, C>> { context: glutin::WindowedContext<PossiblyCurrent>, pub renderer: Renderer, pub user_interface: UserInterface<M, C>, pub sound_context: Arc<Mutex<Context>>, pub resource_manager: Arc<Mutex<ResourceManager>>, pub scenes: SceneContainer, pub ui_time: Duration, } impl<M, C: 'static + Control<M, C>> Engine<M, C> { #[inline] pub fn new(window_builder: WindowBuilder, events_loop: &EventLoop<()>) -> Result<Engine<M, C>, EngineError> { let context_wrapper: WindowedContext<NotCurrent> = glutin::ContextBuilder::new() .with_vsync(true) .with_gl_profile(GlProfile::Core) .with_gl(GlRequest::Specific(Api::OpenGl, (3, 3))) .build_windowed(window_builder, events_loop)?; let mut context = match unsafe { context_wrapper.make_current() } { Ok(context) => context, Err((_, e)) => return Err(EngineError::from(e)), }; let client_size = context.window().inner_size(); Ok(Engine { render
random
[ { "content": "pub fn check_gl_error_internal(line: u32, file: &str) {\n\n unsafe {\n\n let error_code = gl::GetError();\n\n if error_code != gl::NO_ERROR {\n\n let code = match error_code {\n\n gl::INVALID_ENUM => \"GL_INVALID_ENUM\",\n\n gl::INVALID_VALUE => \"GL_INVALID_VALUE\",\n\n gl::INVALID_OPERATION => \"GL_INVALID_OPERATION\",\n\n gl::STACK_OVERFLOW => \"GL_STACK_OVERFLOW\",\n\n gl::STACK_UNDERFLOW => \"GL_STACK_UNDERFLOW\",\n\n gl::OUT_OF_MEMORY => \"GL_OUT_OF_MEMORY\",\n\n _ => \"Unknown\",\n\n };\n\n\n\n Log::writeln(format!(\"{} error has occurred! At line {} in file {}, stability is not guaranteed!\", code, line, file));\n\n\n\n if gl::GetDebugMessageLog::is_loaded() {\n\n let mut max_message_length = 0;\n\n gl::GetIntegerv(gl::MAX_DEBUG_MESSAGE_LENGTH, &mut max_message_length);\n\n\n", "file_path": "src/renderer/framework/mod.rs", "rank": 0, "score": 112847.91329361964 }, { "content": "pub fn translate_button(button: crate::event::MouseButton) -> crate::gui::message::MouseButton {\n\n match button {\n\n crate::event::MouseButton::Left => crate::gui::message::MouseButton::Left,\n\n crate::event::MouseButton::Right => crate::gui::message::MouseButton::Right,\n\n crate::event::MouseButton::Middle => crate::gui::message::MouseButton::Middle,\n\n crate::event::MouseButton::Other(i) => crate::gui::message::MouseButton::Other(i),\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 1, "score": 99594.43214820177 }, { "content": "pub fn translate_event(event: &WindowEvent) -> Option<OsEvent> {\n\n match event {\n\n WindowEvent::ReceivedCharacter(c) => Some(OsEvent::Character(*c)),\n\n WindowEvent::KeyboardInput { input, .. } => {\n\n if let Some(key) = input.virtual_keycode {\n\n Some(OsEvent::KeyboardInput {\n\n button: translate_key(key),\n\n state: translate_state(input.state),\n\n })\n\n } else {\n\n None\n\n }\n\n }\n\n WindowEvent::CursorMoved { position, .. } => {\n\n Some(OsEvent::CursorMoved {\n\n position: Vec2::new(position.x as f32, position.y as f32)\n\n })\n\n }\n\n WindowEvent::MouseWheel { delta, .. } => {\n\n match delta {\n", "file_path": "src/utils/mod.rs", "rank": 2, "score": 92476.83497739072 }, { "content": "use crate::{\n\n renderer::error::RendererError,\n\n sound::error::SoundError\n\n};\n\nuse glutin::{CreationError, ContextError};\n\n\n\n#[derive(Debug)]\n\npub enum EngineError {\n\n Sound(SoundError),\n\n Renderer(RendererError),\n\n InternalError(String),\n\n ContextError(String),\n\n}\n\n\n\nimpl From<SoundError> for EngineError {\n\n fn from(sound: SoundError) -> Self {\n\n EngineError::Sound(sound)\n\n }\n\n}\n\n\n", "file_path": "src/engine/error.rs", "rank": 3, "score": 86028.14886817246 }, { "content": "impl From<RendererError> for EngineError {\n\n fn from(renderer: RendererError) -> Self {\n\n EngineError::Renderer(renderer)\n\n }\n\n}\n\n\n\nimpl From<CreationError> for EngineError {\n\n fn from(e: CreationError) -> Self {\n\n EngineError::InternalError(format!(\"{:?}\", e))\n\n }\n\n}\n\n\n\nimpl From<ContextError> for EngineError {\n\n fn from(e: ContextError) -> Self {\n\n EngineError::ContextError(format!(\"{:?}\", e))\n\n }\n\n}", "file_path": "src/engine/error.rs", "rank": 4, "score": 86016.55213890044 }, { "content": "use std::ffi::NulError;\n\nuse crate::ContextError;\n\n\n\n#[derive(Debug)]\n\npub enum RendererError {\n\n ShaderCompilationFailed {\n\n shader_name: String,\n\n error_message: String,\n\n },\n\n\n\n /// Means that shader link stage failed, exact reason is inside `error_message`\n\n ShaderLinkingFailed {\n\n shader_name: String,\n\n error_message: String,\n\n },\n\n FaultyShaderSource,\n\n UnableToFindShaderUniform(String),\n\n InvalidTextureData,\n\n\n\n /// Means that you tried to draw element range from GeometryBuffer that\n", "file_path": "src/renderer/error.rs", "rank": 13, "score": 79104.82930050856 }, { "content": "\n\nimpl From<NulError> for RendererError {\n\n fn from(_: NulError) -> Self {\n\n RendererError::FaultyShaderSource\n\n }\n\n}\n\n\n\nimpl From<ContextError> for RendererError {\n\n fn from(err: ContextError) -> Self {\n\n RendererError::Context(err)\n\n }\n\n}", "file_path": "src/renderer/error.rs", "rank": 14, "score": 79096.22206682916 }, { "content": " /// does not have enough elements.\n\n InvalidElementRange {\n\n start: usize,\n\n end: usize,\n\n total: usize,\n\n },\n\n\n\n /// Means that attribute descriptor tries to define an attribute that does\n\n /// not exists in vertex, or it does not match size. For example you have vertex:\n\n /// pos: float2,\n\n /// normal: float3\n\n /// But you described second attribute as Float4, then you'll get this error.\n\n InvalidAttributeDescriptor,\n\n\n\n InvalidFrameBuffer,\n\n\n\n FailedToConstructFBO,\n\n\n\n Context(ContextError)\n\n}\n", "file_path": "src/renderer/error.rs", "rank": 15, "score": 79087.9508869825 }, { "content": "#![deny(unsafe_code)]\n\n\n\npub mod surface;\n\npub mod error;\n\npub mod debug_renderer;\n\n\n\n// Framework wraps all OpenGL calls so it has to be unsafe. Rest of renderer\n\n// code must be safe.\n\n#[macro_use]\n\n#[allow(unsafe_code)]\n\nmod framework;\n\n\n\nmod ui_renderer;\n\nmod particle_system_renderer;\n\nmod gbuffer;\n\nmod deferred_light_renderer;\n\nmod shadow_map_renderer;\n\nmod flat_shader;\n\nmod sprite_renderer;\n\nmod ssao;\n", "file_path": "src/renderer/mod.rs", "rank": 16, "score": 78665.10185390674 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n\n\n pub(in crate) fn render_and_swap_buffers(&mut self,\n\n scenes: &SceneContainer,\n\n drawing_context: &DrawingContext,\n\n context: &glutin::WindowedContext<PossiblyCurrent>,\n\n dt: f32,\n\n ) -> Result<(), RendererError> {\n\n scope_profile!();\n\n\n\n self.render_frame(scenes, drawing_context, dt)?;\n\n\n\n self.statistics.end_frame();\n\n context.swap_buffers()?;\n\n check_gl_error!();\n\n self.statistics.finalize();\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 17, "score": 78662.81726484036 }, { "content": "mod blur;\n\nmod light_volume;\n\n\n\nuse glutin::PossiblyCurrent;\n\nuse std::{\n\n rc::Rc,\n\n sync::{\n\n Arc,\n\n Mutex,\n\n },\n\n time,\n\n collections::HashMap,\n\n cell::RefCell,\n\n};\n\nuse crate::{\n\n resource::texture::Texture,\n\n renderer::{\n\n ui_renderer::{\n\n UiRenderer,\n\n UiRenderContext,\n", "file_path": "src/renderer/mod.rs", "rank": 18, "score": 78661.3089016369 }, { "content": " Some(gpu_texture.value.clone())\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn update(&mut self, dt: f32) {\n\n for entry in self.map.values_mut() {\n\n entry.time_to_live -= dt;\n\n }\n\n self.map.retain(|_, v| v.time_to_live > 0.0);\n\n }\n\n\n\n fn clear(&mut self) {\n\n self.map.clear();\n\n }\n\n}\n\n\n\nimpl Renderer {\n\n pub(in crate) fn new(context: &mut glutin::WindowedContext<PossiblyCurrent>, frame_size: (u32, u32)) -> Result<Self, RendererError> {\n", "file_path": "src/renderer/mod.rs", "rank": 19, "score": 78660.95075070046 }, { "content": " pub fn get_frame_size(&self) -> (u32, u32) {\n\n self.frame_size\n\n }\n\n\n\n pub fn set_quality_settings(&mut self, settings: &QualitySettings) -> Result<(), RendererError> {\n\n self.quality_settings = *settings;\n\n self.deferred_light_renderer.set_quality_settings(&mut self.state, settings)\n\n }\n\n\n\n pub fn get_quality_settings(&self) -> QualitySettings {\n\n self.quality_settings\n\n }\n\n\n\n pub(in crate) fn flush(&mut self) {\n\n self.texture_cache.clear();\n\n self.geometry_cache.clear();\n\n }\n\n\n\n fn render_frame(&mut self, scenes: &SceneContainer,\n\n drawing_context: &DrawingContext,\n", "file_path": "src/renderer/mod.rs", "rank": 20, "score": 78657.90254139625 }, { "content": " color::Color,\n\n math::Rect,\n\n pool::Handle,\n\n },\n\n gui::draw::DrawingContext,\n\n engine::resource_manager::TimedEntry,\n\n};\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Statistics {\n\n /// Geometry statistics.\n\n pub geometry: RenderPassStatistics,\n\n /// Real time consumed to render frame.\n\n pub pure_frame_time: f32,\n\n /// Total time renderer took to process single frame, usually includes\n\n /// time renderer spend to wait to buffers swap (can include vsync)\n\n pub capped_frame_time: f32,\n\n /// Total amount of frames been rendered in one second.\n\n pub frames_per_second: usize,\n\n frame_counter: usize,\n", "file_path": "src/renderer/mod.rs", "rank": 21, "score": 78654.18457949672 }, { "content": " particle_system_renderer: ParticleSystemRenderer,\n\n /// Dummy white one pixel texture which will be used as stub when rendering\n\n /// something without texture specified.\n\n white_dummy: Rc<RefCell<GpuTexture>>,\n\n /// Dummy one pixel texture with (0, 1, 0) vector is used as stub when rendering\n\n /// something without normal map.\n\n normal_dummy: Rc<RefCell<GpuTexture>>,\n\n ui_renderer: UiRenderer,\n\n statistics: Statistics,\n\n quad: SurfaceSharedData,\n\n frame_size: (u32, u32),\n\n ambient_color: Color,\n\n quality_settings: QualitySettings,\n\n pub debug_renderer: DebugRenderer,\n\n gbuffers: HashMap<Handle<Node>, GBuffer>,\n\n backbuffer_clear_color: Color,\n\n texture_cache: TextureCache,\n\n geometry_cache: GeometryCache,\n\n}\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 22, "score": 78652.84439449836 }, { "content": " },\n\n surface::SurfaceSharedData,\n\n particle_system_renderer::{\n\n ParticleSystemRenderer,\n\n ParticleSystemRenderContext,\n\n },\n\n gbuffer::{\n\n GBuffer,\n\n GBufferRenderContext,\n\n },\n\n deferred_light_renderer::{\n\n DeferredLightRenderer,\n\n DeferredRendererContext,\n\n },\n\n error::RendererError,\n\n framework::{\n\n gpu_texture::{\n\n GpuTexture,\n\n GpuTextureKind,\n\n PixelKind,\n", "file_path": "src/renderer/mod.rs", "rank": 23, "score": 78651.68382857737 }, { "content": " dt: f32,\n\n ) -> Result<(), RendererError> {\n\n scope_profile!();\n\n\n\n // We have to invalidate resource bindings cache because some textures or programs,\n\n // or other GL resources can be destroyed and then on their \"names\" some new resource\n\n // are created, but cache still thinks that resource is correctly bound, but it is different\n\n // object have same name.\n\n self.state.invalidate_resource_bindings_cache();\n\n\n\n // Update caches - this will remove timed out resources.\n\n self.geometry_cache.update(dt);\n\n self.texture_cache.update(dt);\n\n\n\n self.statistics.begin_frame();\n\n\n\n let window_viewport = Rect::new(0, 0, self.frame_size.0 as i32, self.frame_size.1 as i32);\n\n self.backbuffer.clear(&mut self.state, window_viewport, Some(self.backbuffer_clear_color), Some(1.0), Some(0));\n\n\n\n let frame_width = self.frame_size.0 as f32;\n", "file_path": "src/renderer/mod.rs", "rank": 24, "score": 78651.38759750877 }, { "content": " frame_start_time: time::Instant,\n\n last_fps_commit_time: time::Instant,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct RenderPassStatistics {\n\n pub draw_calls: usize,\n\n pub triangles_rendered: usize,\n\n}\n\n\n\nimpl Default for RenderPassStatistics {\n\n fn default() -> Self {\n\n Self {\n\n draw_calls: 0,\n\n triangles_rendered: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl std::ops::AddAssign for RenderPassStatistics {\n", "file_path": "src/renderer/mod.rs", "rank": 25, "score": 78650.7024811192 }, { "content": " },\n\n flat_shader::FlatShader,\n\n sprite_renderer::{\n\n SpriteRenderer,\n\n SpriteRenderContext,\n\n },\n\n debug_renderer::DebugRenderer,\n\n },\n\n scene::{\n\n SceneContainer,\n\n node::Node,\n\n },\n\n core::{\n\n scope_profile,\n\n math::{\n\n vec3::Vec3,\n\n mat4::Mat4,\n\n vec2::Vec2,\n\n TriangleDefinition,\n\n },\n", "file_path": "src/renderer/mod.rs", "rank": 26, "score": 78650.6144688199 }, { "content": " texture: gbuffer.frame_texture(),\n\n })\n\n ],\n\n );\n\n }\n\n }\n\n\n\n // Render UI on top of everything.\n\n self.statistics += self.ui_renderer.render(\n\n UiRenderContext {\n\n state: &mut self.state,\n\n viewport: window_viewport,\n\n backbuffer: &mut self.backbuffer,\n\n frame_width,\n\n frame_height,\n\n drawing_context,\n\n white_dummy: self.white_dummy.clone(),\n\n texture_cache: &mut self.texture_cache,\n\n }\n\n )?;\n", "file_path": "src/renderer/mod.rs", "rank": 27, "score": 78650.08928978578 }, { "content": " quality_settings: settings,\n\n debug_renderer: DebugRenderer::new(&mut state)?,\n\n gbuffers: Default::default(),\n\n backbuffer_clear_color: Color::from_rgba(0, 0, 0, 0),\n\n texture_cache: Default::default(),\n\n geometry_cache: Default::default(),\n\n state,\n\n })\n\n }\n\n\n\n pub fn set_ambient_color(&mut self, color: Color) {\n\n self.ambient_color = color;\n\n }\n\n\n\n pub fn get_ambient_color(&self) -> Color {\n\n self.ambient_color\n\n }\n\n\n\n pub fn get_statistics(&self) -> Statistics {\n\n self.statistics\n", "file_path": "src/renderer/mod.rs", "rank": 28, "score": 78649.48971385883 }, { "content": "\n\n /// Whether to use screen space ambient occlusion or not.\n\n pub use_ssao: bool,\n\n /// Radius of sampling hemisphere used in SSAO, it defines much ambient\n\n /// occlusion will be in your scene.\n\n pub ssao_radius: f32,\n\n\n\n /// Global switch to enable or disable light scattering. Each light can have\n\n /// its own scatter switch, but this one is able to globally disable scatter.\n\n pub light_scatter_enabled: bool,\n\n}\n\n\n\nimpl Default for QualitySettings {\n\n fn default() -> Self {\n\n Self {\n\n point_shadow_map_size: 1024,\n\n point_shadows_distance: 15.0,\n\n point_shadows_enabled: true,\n\n point_soft_shadows: true,\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 29, "score": 78649.15550460316 }, { "content": "impl Default for Statistics {\n\n fn default() -> Self {\n\n Self {\n\n geometry: RenderPassStatistics::default(),\n\n pure_frame_time: 0.0,\n\n capped_frame_time: 0.0,\n\n frames_per_second: 0,\n\n frame_counter: 0,\n\n frame_start_time: time::Instant::now(),\n\n last_fps_commit_time: time::Instant::now(),\n\n }\n\n }\n\n}\n\n\n\npub struct Renderer {\n\n state: State,\n\n backbuffer: BackBuffer,\n\n deferred_light_renderer: DeferredLightRenderer,\n\n flat_shader: FlatShader,\n\n sprite_renderer: SpriteRenderer,\n", "file_path": "src/renderer/mod.rs", "rank": 30, "score": 78649.1172078988 }, { "content": "pub struct QualitySettings {\n\n /// Point shadows\n\n /// Size of cube map face of shadow map texture in pixels.\n\n pub point_shadow_map_size: usize,\n\n /// Use or not percentage close filtering (smoothing) for point shadows.\n\n pub point_soft_shadows: bool,\n\n /// Point shadows enabled or not.\n\n pub point_shadows_enabled: bool,\n\n /// Maximum distance from camera to draw shadows.\n\n pub point_shadows_distance: f32,\n\n\n\n /// Spot shadows\n\n /// Size of square shadow map texture in pixels\n\n pub spot_shadow_map_size: usize,\n\n /// Use or not percentage close filtering (smoothing) for spot shadows.\n\n pub spot_soft_shadows: bool,\n\n /// Spot shadows enabled or not.\n\n pub spot_shadows_enabled: bool,\n\n /// Maximum distance from camera to draw shadows.\n\n pub spot_shadows_distance: f32,\n", "file_path": "src/renderer/mod.rs", "rank": 31, "score": 78648.95955616586 }, { "content": " }\n\n\n\n pub fn set_backbuffer_clear_color(&mut self, color: Color) {\n\n self.backbuffer_clear_color = color;\n\n }\n\n\n\n /// Sets new frame size, should be called when received a Resize event.\n\n ///\n\n /// # Notes\n\n ///\n\n /// Input values will be set to 1 pixel if new size is 0. Rendering cannot\n\n /// be performed into 0x0 texture.\n\n pub fn set_frame_size(&mut self, new_size: (u32, u32)) {\n\n self.deferred_light_renderer.set_frame_size(&mut self.state, new_size).unwrap();\n\n self.frame_size.0 = new_size.0.max(1);\n\n self.frame_size.1 = new_size.1.max(1);\n\n // Invalidate all g-buffers.\n\n self.gbuffers.clear();\n\n }\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 32, "score": 78648.42936094037 }, { "content": " spot_shadow_map_size: 1024,\n\n spot_shadows_distance: 15.0,\n\n spot_shadows_enabled: true,\n\n spot_soft_shadows: true,\n\n\n\n use_ssao: true,\n\n ssao_radius: 0.5,\n\n\n\n light_scatter_enabled: true\n\n }\n\n }\n\n}\n\n\n\nimpl Statistics {\n\n /// Must be called before render anything.\n\n fn begin_frame(&mut self) {\n\n self.frame_start_time = time::Instant::now();\n\n self.geometry = Default::default();\n\n }\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 33, "score": 78647.62905008529 }, { "content": " viewport,\n\n texture_cache: &mut self.texture_cache,\n\n });\n\n\n\n self.statistics += self.sprite_renderer.render(\n\n SpriteRenderContext {\n\n state,\n\n framebuffer: &mut gbuffer.final_frame,\n\n graph,\n\n camera,\n\n white_dummy: self.white_dummy.clone(),\n\n viewport,\n\n textures: &mut self.texture_cache,\n\n geom_map: &mut self.geometry_cache,\n\n });\n\n\n\n self.statistics += self.debug_renderer.render(state, viewport, &mut gbuffer.final_frame, camera);\n\n\n\n // Finally render everything into back buffer.\n\n self.statistics.geometry += self.backbuffer.draw(\n", "file_path": "src/renderer/mod.rs", "rank": 34, "score": 78645.8726863099 }, { "content": " fn add_assign(&mut self, rhs: Self) {\n\n self.draw_calls += rhs.draw_calls;\n\n self.triangles_rendered += rhs.triangles_rendered;\n\n }\n\n}\n\n\n\nimpl std::ops::AddAssign<DrawCallStatistics> for RenderPassStatistics {\n\n fn add_assign(&mut self, rhs: DrawCallStatistics) {\n\n self.draw_calls += 1;\n\n self.triangles_rendered += rhs.triangles;\n\n }\n\n}\n\n\n\nimpl std::ops::AddAssign<RenderPassStatistics> for Statistics {\n\n fn add_assign(&mut self, rhs: RenderPassStatistics) {\n\n self.geometry += rhs;\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq)]\n", "file_path": "src/renderer/mod.rs", "rank": 35, "score": 78645.8120064113 }, { "content": " gl::load_with(|symbol| context.get_proc_address(symbol) as *const _);\n\n\n\n let settings = QualitySettings::default();\n\n let mut state = State::new();\n\n\n\n Ok(Self {\n\n backbuffer: BackBuffer,\n\n frame_size,\n\n deferred_light_renderer: DeferredLightRenderer::new(&mut state, frame_size, &settings)?,\n\n flat_shader: FlatShader::new()?,\n\n statistics: Statistics::default(),\n\n sprite_renderer: SpriteRenderer::new()?,\n\n white_dummy: Rc::new(RefCell::new(GpuTexture::new(&mut state, GpuTextureKind::Rectangle { width: 1, height: 1 },\n\n PixelKind::RGBA8, Some(&[255, 255, 255, 255]))?)),\n\n normal_dummy: Rc::new(RefCell::new(GpuTexture::new(&mut state, GpuTextureKind::Rectangle { width: 1, height: 1 },\n\n PixelKind::RGBA8, Some(&[128, 128, 255, 255]))?)),\n\n quad: SurfaceSharedData::make_unit_xy_quad(),\n\n ui_renderer: UiRenderer::new(&mut state)?,\n\n particle_system_renderer: ParticleSystemRenderer::new(&mut state)?,\n\n ambient_color: Color::opaque(100, 100, 100),\n", "file_path": "src/renderer/mod.rs", "rank": 36, "score": 78645.62924510923 }, { "content": " }\n\n })\n\n .or_insert_with(|| GBuffer::new(state, viewport.w as usize, viewport.h as usize).unwrap());\n\n\n\n self.statistics += gbuffer.fill(\n\n GBufferRenderContext {\n\n state,\n\n graph,\n\n camera,\n\n white_dummy: self.white_dummy.clone(),\n\n normal_dummy: self.normal_dummy.clone(),\n\n texture_cache: &mut self.texture_cache,\n\n geom_cache: &mut self.geometry_cache,\n\n });\n\n\n\n self.statistics += self.deferred_light_renderer.render(\n\n DeferredRendererContext {\n\n state,\n\n scene,\n\n camera,\n", "file_path": "src/renderer/mod.rs", "rank": 37, "score": 78645.53879647957 }, { "content": " gbuffer,\n\n white_dummy: self.white_dummy.clone(),\n\n ambient_color: self.ambient_color,\n\n settings: &self.quality_settings,\n\n textures: &mut self.texture_cache,\n\n geometry_cache: &mut self.geometry_cache,\n\n });\n\n\n\n let depth = gbuffer.depth();\n\n\n\n self.statistics += self.particle_system_renderer.render(\n\n ParticleSystemRenderContext {\n\n state,\n\n framebuffer: &mut gbuffer.final_frame,\n\n graph,\n\n camera,\n\n white_dummy: self.white_dummy.clone(),\n\n depth,\n\n frame_width,\n\n frame_height,\n", "file_path": "src/renderer/mod.rs", "rank": 38, "score": 78645.17954568763 }, { "content": " /// Must be called before SwapBuffers but after all rendering is done.\n\n fn end_frame(&mut self) {\n\n let current_time = time::Instant::now();\n\n\n\n self.pure_frame_time = current_time.duration_since(self.frame_start_time).as_secs_f32();\n\n self.frame_counter += 1;\n\n\n\n if current_time.duration_since(self.last_fps_commit_time).as_secs_f32() >= 1.0 {\n\n self.last_fps_commit_time = current_time;\n\n self.frames_per_second = self.frame_counter;\n\n self.frame_counter = 0;\n\n }\n\n }\n\n\n\n /// Must be called after SwapBuffers to get capped frame time.\n\n fn finalize(&mut self) {\n\n self.capped_frame_time = time::Instant::now().duration_since(self.frame_start_time).as_secs_f32();\n\n }\n\n}\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 39, "score": 78642.93391703512 }, { "content": " height: texture.height as usize,\n\n };\n\n let mut gpu_texture = GpuTexture::new(\n\n state,\n\n kind,\n\n PixelKind::from(texture.kind),\n\n Some(texture.bytes.as_slice()))\n\n .unwrap();\n\n gpu_texture.bind_mut(state, 0)\n\n .generate_mip_maps()\n\n .set_minification_filter(MininificationFilter::LinearMip)\n\n .set_magnification_filter(MagnificationFilter::Linear)\n\n .set_max_anisotropy();\n\n TimedEntry {\n\n value: Rc::new(RefCell::new(gpu_texture)),\n\n time_to_live: 20.0,\n\n }\n\n });\n\n // Texture won't be destroyed while it used.\n\n gpu_texture.time_to_live = 20.0;\n", "file_path": "src/renderer/mod.rs", "rank": 40, "score": 78642.84472762622 }, { "content": " fn clear(&mut self) {\n\n self.map.clear();\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct TextureCache {\n\n map: HashMap<usize, TimedEntry<Rc<RefCell<GpuTexture>>>>\n\n}\n\n\n\nimpl TextureCache {\n\n fn get(&mut self, state: &mut State, texture: Arc<Mutex<Texture>>) -> Option<Rc<RefCell<GpuTexture>>> {\n\n scope_profile!();\n\n\n\n if texture.lock().unwrap().loaded {\n\n let key = (&*texture as *const _) as usize;\n\n let gpu_texture = self.map.entry(key).or_insert_with(move || {\n\n let texture = texture.lock().unwrap();\n\n let kind = GpuTextureKind::Rectangle {\n\n width: texture.width as usize,\n", "file_path": "src/renderer/mod.rs", "rank": 41, "score": 78642.36903095403 }, { "content": "#[derive(Default)]\n\npub struct GeometryCache {\n\n map: HashMap<usize, TimedEntry<GeometryBuffer<surface::Vertex>>>\n\n}\n\n\n\nimpl GeometryCache {\n\n fn get(&mut self, state: &mut State, data: &SurfaceSharedData) -> &mut GeometryBuffer<surface::Vertex> {\n\n scope_profile!();\n\n\n\n let key = (data as *const _) as usize;\n\n\n\n let geometry_buffer = self.map.entry(key).or_insert_with(|| {\n\n let geometry_buffer = GeometryBuffer::new(GeometryBufferKind::StaticDraw, ElementKind::Triangle);\n\n\n\n geometry_buffer.bind(state)\n\n .describe_attributes(vec![\n\n AttributeDefinition { kind: AttributeKind::Float3, normalized: false },\n\n AttributeDefinition { kind: AttributeKind::Float2, normalized: false },\n\n AttributeDefinition { kind: AttributeKind::Float3, normalized: false },\n\n AttributeDefinition { kind: AttributeKind::Float4, normalized: false },\n", "file_path": "src/renderer/mod.rs", "rank": 42, "score": 78641.97308972484 }, { "content": " let frame_height = self.frame_size.1 as f32;\n\n\n\n for scene in scenes.iter() {\n\n let graph = &scene.graph;\n\n\n\n for (camera_handle, camera) in graph.pair_iter().filter_map(|(handle, node)| {\n\n if let Node::Camera(camera) = node { Some((handle, camera)) } else { None }\n\n }) {\n\n if !camera.is_enabled() {\n\n continue;\n\n }\n\n\n\n let viewport = camera.viewport_pixels(Vec2::new(frame_width, frame_height));\n\n\n\n let state = &mut self.state;\n\n let gbuffer = self.gbuffers\n\n .entry(camera_handle)\n\n .and_modify(|buf| {\n\n if buf.width != viewport.w || buf.height != viewport.h {\n\n *buf = GBuffer::new(state, viewport.w as usize, viewport.h as usize).unwrap();\n", "file_path": "src/renderer/mod.rs", "rank": 43, "score": 78639.31432695757 }, { "content": " self.geometry_cache.get(state, &self.quad),\n\n state,\n\n viewport,\n\n &self.flat_shader.program,\n\n DrawParameters {\n\n cull_face: CullFace::Back,\n\n culling: false,\n\n color_write: Default::default(),\n\n depth_write: true,\n\n stencil_test: false,\n\n depth_test: false,\n\n blend: false,\n\n },\n\n &[\n\n (self.flat_shader.wvp_matrix, UniformValue::Mat4({\n\n Mat4::ortho(0.0, viewport.w as f32, viewport.h as f32, 0.0, -1.0, 1.0) *\n\n Mat4::scale(Vec3::new(viewport.w as f32, viewport.h as f32, 0.0))\n\n })),\n\n (self.flat_shader.diffuse_texture, UniformValue::Sampler {\n\n index: 0,\n", "file_path": "src/renderer/mod.rs", "rank": 44, "score": 78639.31432695757 }, { "content": " MininificationFilter,\n\n MagnificationFilter,\n\n },\n\n geometry_buffer::{\n\n GeometryBuffer,\n\n GeometryBufferKind,\n\n ElementKind,\n\n AttributeKind,\n\n AttributeDefinition,\n\n DrawCallStatistics\n\n },\n\n framebuffer::{\n\n BackBuffer,\n\n FrameBufferTrait,\n\n DrawParameters,\n\n CullFace,\n\n },\n\n gpu_program::UniformValue,\n\n state::State,\n\n gl,\n", "file_path": "src/renderer/mod.rs", "rank": 45, "score": 78639.31432695757 }, { "content": " AttributeDefinition { kind: AttributeKind::Float4, normalized: false },\n\n AttributeDefinition { kind: AttributeKind::UnsignedByte4, normalized: false }])\n\n .unwrap()\n\n .set_vertices(data.vertices.as_slice())\n\n .set_triangles(data.triangles());\n\n\n\n TimedEntry { value: geometry_buffer, time_to_live: 20.0 }\n\n });\n\n\n\n geometry_buffer.time_to_live = 20.0;\n\n geometry_buffer\n\n }\n\n\n\n fn update(&mut self, dt: f32) {\n\n for entry in self.map.values_mut() {\n\n entry.time_to_live -= dt;\n\n }\n\n self.map.retain(|_, v| v.time_to_live > 0.0);\n\n }\n\n\n", "file_path": "src/renderer/mod.rs", "rank": 46, "score": 78639.31432695757 }, { "content": "use crate::utils::log::Log;\n\nuse crate::renderer::framework::gl::types::{GLchar, GLenum, GLuint, GLsizei};\n\nuse std::ffi::CStr;\n\n\n\n#[allow(clippy::all)]\n\npub(in crate) mod gl;\n\n\n\nmacro_rules! check_gl_error {\n\n () => (crate::renderer::framework::check_gl_error_internal(line!(), file!()))\n\n}\n\n\n\npub mod gpu_program;\n\npub mod geometry_buffer;\n\npub mod gpu_texture;\n\npub mod framebuffer;\n\npub mod state;\n\n\n", "file_path": "src/renderer/framework/mod.rs", "rank": 47, "score": 74652.3710643235 }, { "content": " for i in 0..message_count as usize {\n\n let source = sources[i];\n\n let ty = types[i];\n\n let severity = severities[i];\n\n let id = ids[i];\n\n let len = lengths[i] as usize;\n\n\n\n let source_str =\n\n match source {\n\n gl::DEBUG_SOURCE_API => \"API\",\n\n gl::DEBUG_SOURCE_SHADER_COMPILER => \"Shader Compiler\",\n\n gl::DEBUG_SOURCE_WINDOW_SYSTEM => \"Window System\",\n\n gl::DEBUG_SOURCE_THIRD_PARTY => \"Third Party\",\n\n gl::DEBUG_SOURCE_APPLICATION => \"Application\",\n\n gl::DEBUG_SOURCE_OTHER => \"Other\",\n\n _ => \"Unknown\"\n\n };\n\n\n\n let type_str =\n\n match ty {\n", "file_path": "src/renderer/framework/mod.rs", "rank": 48, "score": 74624.43658397772 }, { "content": " gl::DEBUG_TYPE_ERROR => \"Error\",\n\n gl::DEBUG_TYPE_DEPRECATED_BEHAVIOR => \"Deprecated Behavior\",\n\n gl::DEBUG_TYPE_UNDEFINED_BEHAVIOR => \"Undefined Behavior\",\n\n gl::DEBUG_TYPE_PERFORMANCE => \"Performance\",\n\n gl::DEBUG_TYPE_PORTABILITY => \"Portability\",\n\n gl::DEBUG_TYPE_OTHER => \"Other\",\n\n _ => \"Unknown\",\n\n };\n\n\n\n let severity_str =\n\n match severity {\n\n gl::DEBUG_SEVERITY_HIGH => \"High\",\n\n gl::DEBUG_SEVERITY_MEDIUM => \"Medium\",\n\n gl::DEBUG_SEVERITY_LOW => \"Low\",\n\n gl::DEBUG_SEVERITY_NOTIFICATION => \"Notification\",\n\n _ => \"Unknown\"\n\n };\n\n\n\n let str_msg = CStr::from_ptr(message);\n\n\n", "file_path": "src/renderer/framework/mod.rs", "rank": 49, "score": 74624.24497870737 }, { "content": " Log::writeln(format!(\"OpenGL message\\nSource: {}\\nType: {}\\nId: {}\\nSeverity: {}\\nMessage: {:?}\\n\",\n\n source_str,\n\n type_str,\n\n id,\n\n severity_str,\n\n str_msg));\n\n\n\n message = message.add(len);\n\n }\n\n } else {\n\n Log::writeln(\"Debug info is not available - glGetDebugMessageLog is not available!\".to_owned());\n\n }\n\n }\n\n }\n\n}", "file_path": "src/renderer/framework/mod.rs", "rank": 50, "score": 74619.17721365677 }, { "content": " let mut max_logged_messages = 0;\n\n gl::GetIntegerv(gl::MAX_DEBUG_LOGGED_MESSAGES, &mut max_logged_messages);\n\n\n\n let buffer_size = max_message_length * max_logged_messages;\n\n\n\n let mut message_buffer: Vec<GLchar> = Vec::with_capacity(buffer_size as usize);\n\n message_buffer.set_len(buffer_size as usize);\n\n\n\n let mut sources: Vec<GLenum> = Vec::with_capacity(max_logged_messages as usize);\n\n sources.set_len(max_logged_messages as usize);\n\n\n\n let mut types: Vec<GLenum> = Vec::with_capacity(max_logged_messages as usize);\n\n types.set_len(max_logged_messages as usize);\n\n\n\n let mut ids: Vec<GLuint> = Vec::with_capacity(max_logged_messages as usize);\n\n ids.set_len(max_logged_messages as usize);\n\n\n\n let mut severities: Vec<GLenum> = Vec::with_capacity(max_logged_messages as usize);\n\n severities.set_len(max_logged_messages as usize);\n\n\n", "file_path": "src/renderer/framework/mod.rs", "rank": 51, "score": 74619.17721365677 }, { "content": " let mut lengths: Vec<GLsizei> = Vec::with_capacity(max_logged_messages as usize);\n\n lengths.set_len(max_logged_messages as usize);\n\n\n\n let message_count = gl::GetDebugMessageLog(\n\n max_logged_messages as u32,\n\n buffer_size,\n\n sources.as_mut_ptr(),\n\n types.as_mut_ptr(),\n\n ids.as_mut_ptr(),\n\n severities.as_mut_ptr(),\n\n lengths.as_mut_ptr(),\n\n message_buffer.as_mut_ptr(),\n\n );\n\n\n\n if message_count == 0 {\n\n Log::writeln(\"Debug info is not available - run with OpenGL debug flag!\".to_owned());\n\n }\n\n\n\n let mut message = message_buffer.as_ptr();\n\n\n", "file_path": "src/renderer/framework/mod.rs", "rank": 52, "score": 74619.17721365677 }, { "content": "pub trait FrameBufferTrait {\n\n fn id(&self) -> u32;\n\n\n\n fn clear(&mut self, state: &mut State, viewport: Rect<i32>, color: Option<Color>, depth: Option<f32>, stencil: Option<i32>) {\n\n scope_profile!();\n\n\n\n let mut mask = 0;\n\n\n\n state.set_viewport(viewport);\n\n state.set_framebuffer(self.id());\n\n\n\n if let Some(color) = color {\n\n state.set_color_write(ColorMask::default());\n\n state.set_clear_color(color);\n\n mask |= gl::COLOR_BUFFER_BIT;\n\n }\n\n if let Some(depth) = depth {\n\n state.set_depth_write(true);\n\n state.set_clear_depth(depth);\n\n mask |= gl::DEPTH_BUFFER_BIT;\n", "file_path": "src/renderer/framework/framebuffer.rs", "rank": 53, "score": 74216.66979820261 }, { "content": "pub trait CustomEmitter: Any + Emit + Visit + Send {\n\n /// Creates boxed copy of custom emitter.\n\n fn box_clone(&self) -> Box<dyn CustomEmitter>;\n\n\n\n /// Returns unique of custom emitter. Must never be negative!\n\n /// Negative numbers reserved for built-in kinds.\n\n fn get_kind(&self) -> i32;\n\n}\n\n\n\npub enum EmitterKind {\n\n /// Unknown kind here is just to have ability to implement Default trait,\n\n /// must not be used at runtime!\n\n Unknown,\n\n Box(BoxEmitter),\n\n Sphere(SphereEmitter),\n\n Custom(Box<dyn CustomEmitter>),\n\n}\n\n\n\nimpl EmitterKind {\n\n pub fn new(id: i32) -> Result<Self, String> {\n", "file_path": "src/scene/particle_system.rs", "rank": 54, "score": 74172.07039412821 }, { "content": "pub fn translate_state(state: ElementState) -> ButtonState {\n\n match state {\n\n ElementState::Pressed => ButtonState::Pressed,\n\n ElementState::Released => ButtonState::Released,\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 55, "score": 66495.94146961154 }, { "content": "/// Small helper that creates static physics geometry from given mesh.\n\n///\n\n/// # Notes\n\n///\n\n/// This method *bakes* global transform of given mesh into static geometry\n\n/// data. So if given mesh was at some position with any rotation and scale\n\n/// resulting static geometry will have vertices that exactly matches given\n\n/// mesh.\n\npub fn mesh_to_static_geometry(mesh: &Mesh) -> StaticGeometry {\n\n let mut triangles = Vec::new();\n\n let global_transform = mesh.global_transform();\n\n for surface in mesh.surfaces() {\n\n let shared_data = surface.get_data();\n\n let shared_data = shared_data.lock().unwrap();\n\n\n\n let vertices = shared_data.get_vertices();\n\n for triangle in shared_data.triangles() {\n\n let a = global_transform.transform_vector(vertices[triangle[0] as usize].position);\n\n let b = global_transform.transform_vector(vertices[triangle[1] as usize].position);\n\n let c = global_transform.transform_vector(vertices[triangle[2] as usize].position);\n\n\n\n // Silently ignore degenerated triangles.\n\n if let Some(triangle) = StaticTriangle::from_points(&a, &b, &c) {\n\n triangles.push(triangle);\n\n }\n\n }\n\n }\n\n StaticGeometry::new(triangles)\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 56, "score": 66495.94146961154 }, { "content": "pub fn translate_key(key: VirtualKeyCode) -> KeyCode {\n\n match key {\n\n VirtualKeyCode::Key1 => KeyCode::Key1,\n\n VirtualKeyCode::Key2 => KeyCode::Key2,\n\n VirtualKeyCode::Key3 => KeyCode::Key3,\n\n VirtualKeyCode::Key4 => KeyCode::Key4,\n\n VirtualKeyCode::Key5 => KeyCode::Key5,\n\n VirtualKeyCode::Key6 => KeyCode::Key6,\n\n VirtualKeyCode::Key7 => KeyCode::Key7,\n\n VirtualKeyCode::Key8 => KeyCode::Key8,\n\n VirtualKeyCode::Key9 => KeyCode::Key9,\n\n VirtualKeyCode::Key0 => KeyCode::Key0,\n\n VirtualKeyCode::A => KeyCode::A,\n\n VirtualKeyCode::B => KeyCode::B,\n\n VirtualKeyCode::C => KeyCode::C,\n\n VirtualKeyCode::D => KeyCode::D,\n\n VirtualKeyCode::E => KeyCode::E,\n\n VirtualKeyCode::F => KeyCode::F,\n\n VirtualKeyCode::G => KeyCode::G,\n\n VirtualKeyCode::H => KeyCode::H,\n", "file_path": "src/utils/mod.rs", "rank": 57, "score": 64325.48738789888 }, { "content": "pub fn load_to_scene<P: AsRef<Path>>(scene: &mut Scene, resource_manager: &mut ResourceManager, path: P) -> Result<Handle<Node>, FbxError> {\n\n let start_time = Instant::now();\n\n\n\n Log::writeln(format!(\"Trying to load {:?}\", path.as_ref()));\n\n\n\n let now = Instant::now();\n\n let fbx = FbxDocument::new(path.as_ref())?;\n\n let parsing_time = now.elapsed().as_millis();\n\n\n\n let now = Instant::now();\n\n let fbx_scene = FbxScene::new(&fbx)?;\n\n let dom_prepare_time = now.elapsed().as_millis();\n\n\n\n let now = Instant::now();\n\n let result = convert(&fbx_scene, resource_manager, scene);\n\n let conversion_time = now.elapsed().as_millis();\n\n\n\n Log::writeln(format!(\"FBX {:?} loaded in {} ms\\n\\t- Parsing - {} ms\\n\\t- DOM Prepare - {} ms\\n\\t- Conversion - {} ms\",\n\n path.as_ref(), start_time.elapsed().as_millis(), parsing_time, dom_prepare_time, conversion_time));\n\n\n\n result\n\n}", "file_path": "src/resource/fbx/mod.rs", "rank": 58, "score": 61279.616314314466 }, { "content": "pub fn virtual_key_code_name(code: VirtualKeyCode) -> &'static str {\n\n match code {\n\n VirtualKeyCode::Key1 => \"1\",\n\n VirtualKeyCode::Key2 => \"2\",\n\n VirtualKeyCode::Key3 => \"3\",\n\n VirtualKeyCode::Key4 => \"4\",\n\n VirtualKeyCode::Key5 => \"5\",\n\n VirtualKeyCode::Key6 => \"6\",\n\n VirtualKeyCode::Key7 => \"7\",\n\n VirtualKeyCode::Key8 => \"8\",\n\n VirtualKeyCode::Key9 => \"9\",\n\n VirtualKeyCode::Key0 => \"0\",\n\n VirtualKeyCode::A => \"A\",\n\n VirtualKeyCode::B => \"B\",\n\n VirtualKeyCode::C => \"C\",\n\n VirtualKeyCode::D => \"D\",\n\n VirtualKeyCode::E => \"E\",\n\n VirtualKeyCode::F => \"F\",\n\n VirtualKeyCode::G => \"G\",\n\n VirtualKeyCode::H => \"H\",\n", "file_path": "src/utils/mod.rs", "rank": 59, "score": 59572.09889961035 }, { "content": "fn prepare_source_code(code: &str) -> Result<CString, RendererError> {\n\n let mut shared = \"\\n// include 'shared.glsl'\\n\".to_owned();\n\n shared += include_str!(\"../shaders/shared.glsl\");\n\n shared += \"\\n// end of include\\n\";\n\n\n\n if let Some(p) = code.rfind('#') {\n\n let mut full = code.to_owned();\n\n let end = p + full[p..].find('\\n').unwrap() + 1;\n\n full.insert_str(end, &shared);\n\n Ok(CString::new(full)?)\n\n } else {\n\n shared += code;\n\n Ok(CString::new(shared)?)\n\n }\n\n}\n\n\n\nimpl GpuProgram {\n\n pub fn from_source(name: &str, vertex_source: &str, fragment_source: &str) -> Result<GpuProgram, RendererError> {\n\n unsafe {\n\n let vertex_shader = create_shader(format!(\"{}_VertexShader\", name), gl::VERTEX_SHADER, vertex_source)?;\n", "file_path": "src/renderer/framework/gpu_program.rs", "rank": 60, "score": 58785.90487220309 }, { "content": "pub fn read_ascii<R>(reader: &mut R) -> Result<FbxDocument, FbxError>\n\n where R: Read + Seek {\n\n let mut nodes: Pool<FbxNode> = Pool::new();\n\n let root_handle = nodes.spawn(FbxNode {\n\n name: String::from(\"__ROOT__\"),\n\n children: Vec::new(),\n\n parent: Handle::NONE,\n\n attributes: Vec::new(),\n\n });\n\n let mut parent_handle: Handle<FbxNode> = root_handle;\n\n let mut node_handle: Handle<FbxNode> = Handle::NONE;\n\n let mut buffer: Vec<u8> = Vec::new();\n\n let mut name: Vec<u8> = Vec::new();\n\n let mut value: Vec<u8> = Vec::new();\n\n\n\n let buf_len = reader.seek(SeekFrom::End(0))?;\n\n reader.seek(SeekFrom::Start(0))?;\n\n\n\n // Read line by line\n\n while reader.seek(SeekFrom::Current(0))? < buf_len {\n", "file_path": "src/resource/fbx/document/ascii.rs", "rank": 61, "score": 53259.11945271353 }, { "content": "pub fn read_binary<R>(file: &mut R) -> Result<FbxDocument, FbxError>\n\n where R: Read + Seek {\n\n let total_length = file.seek(SeekFrom::End(0))?;\n\n file.seek(SeekFrom::Start(0))?;\n\n\n\n // Ignore all stuff until version.\n\n let mut temp = [0; 23];\n\n file.read_exact(&mut temp)?;\n\n\n\n // Verify version.\n\n let version = file.read_u32::<LittleEndian>()? as i32;\n\n if version < 7100 || version > 7400 {\n\n return Err(FbxError::UnsupportedVersion(version));\n\n }\n\n\n\n let mut nodes = Pool::new();\n\n let mut root = FbxNode::default();\n\n root.name = String::from(\"__ROOT__\");\n\n let root_handle = nodes.spawn(root);\n\n\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 62, "score": 53259.11945271353 }, { "content": "// Create our own engine type aliases. These specializations are needed\n\n// because engine provides a way to extend UI with custom nodes and messages.\n\ntype GameEngine = rg3d::engine::Engine<(), StubNode>;\n", "file_path": "examples/ui.rs", "rank": 63, "score": 52688.90286197097 }, { "content": "// Create our own engine type aliases. These specializations are needed\n\n// because engine provides a way to extend UI with custom nodes and messages.\n\ntype GameEngine = rg3d::engine::Engine<(), StubNode>;\n", "file_path": "examples/async.rs", "rank": 64, "score": 52688.90286197097 }, { "content": "// Create our own engine type aliases. These specializations are needed\n\n// because engine provides a way to extend UI with custom nodes and messages.\n\ntype GameEngine = rg3d::engine::Engine<(), StubNode>;\n", "file_path": "examples/simple.rs", "rank": 65, "score": 52688.90286197097 }, { "content": "fn is_binary<P: AsRef<Path>>(path: P) -> Result<bool, FbxError> {\n\n let mut file = File::open(path)?;\n\n let mut magic = [0; 18];\n\n file.read_exact(&mut magic)?;\n\n let fbx_magic = b\"Kaydara FBX Binary\";\n\n Ok(magic == *fbx_magic)\n\n}\n\n\n\nimpl FbxDocument {\n\n pub fn new<P: AsRef<Path>>(path: P) -> Result<FbxDocument, FbxError> {\n\n let is_bin = is_binary(path.as_ref())?;\n\n\n\n let mut reader = BufReader::new(File::open(path)?);\n\n\n\n if is_bin {\n\n binary::read_binary(&mut reader)\n\n } else {\n\n ascii::read_ascii(&mut reader)\n\n }\n\n }\n\n\n\n pub fn root(&self) -> Handle<FbxNode> {\n\n self.root\n\n }\n\n\n\n pub fn nodes(&self) -> &FbxNodeContainer {\n\n &self.nodes\n\n }\n\n}", "file_path": "src/resource/fbx/document/mod.rs", "rank": 66, "score": 51986.208750684 }, { "content": "fn create_shader(name: String, actual_type: GLuint, source: &str) -> Result<GLuint, RendererError> {\n\n unsafe {\n\n let csource = prepare_source_code(source)?;\n\n\n\n let shader = gl::CreateShader(actual_type);\n\n gl::ShaderSource(shader, 1, &csource.as_ptr(), std::ptr::null());\n\n gl::CompileShader(shader);\n\n\n\n let mut status = 1;\n\n gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);\n\n if status == 0 {\n\n let mut log_len = 0;\n\n gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut log_len);\n\n let mut buffer: Vec<u8> = Vec::with_capacity(log_len as usize);\n\n buffer.set_len(log_len as usize);\n\n gl::GetShaderInfoLog(shader, log_len, std::ptr::null_mut(), buffer.as_mut_ptr() as *mut i8);\n\n let compilation_message = String::from_utf8_unchecked(buffer);\n\n Log::writeln(format!(\"Failed to compile {} shader: {}\", name, compilation_message));\n\n Err(RendererError::ShaderCompilationFailed {\n\n shader_name: name,\n\n error_message: compilation_message,\n\n })\n\n } else {\n\n Log::writeln(format!(\"Shader {} compiled!\", name));\n\n Ok(shader)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/renderer/framework/gpu_program.rs", "rank": 67, "score": 51561.96274216042 }, { "content": "// Create our own engine type aliases. These specializations are needed\n\n// because engine provides a way to extend UI with custom nodes and messages.\n\ntype GameEngine = rg3d::engine::Engine<(), StubNode>;\n", "file_path": "examples/3rd_person.rs", "rank": 68, "score": 51554.11022543512 }, { "content": "pub fn into_any_arc<T: Any + Send + Sync>(opt: Option<Arc<T>>) -> Option<Arc<dyn Any + Send + Sync>> {\n\n match opt {\n\n Some(r) => Some(r),\n\n None => None,\n\n }\n\n}", "file_path": "src/utils/mod.rs", "rank": 69, "score": 51353.66986965642 }, { "content": "pub trait Emit {\n\n fn emit(&self, emitter: &Emitter, particle_system: &ParticleSystem, particle: &mut Particle);\n\n}\n\n\n\npub struct BoxEmitter {\n\n half_width: f32,\n\n half_height: f32,\n\n half_depth: f32,\n\n}\n\n\n\nimpl BoxEmitter {\n\n pub fn new(width: f32, height: f32, depth: f32) -> Self {\n\n Self {\n\n half_width: width * 0.5,\n\n half_height: height * 0.5,\n\n half_depth: depth * 0.5,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/scene/particle_system.rs", "rank": 70, "score": 48950.71736531271 }, { "content": "// User interface in the engine build up on graph data structure, on tree to be\n\n// more precise. Each UI element can has single parent and multiple children.\n\n// UI uses complex layout system which automatically organizes your widgets.\n\n// In this example we'll use Grid and StackPanel layout controls. Grid can be\n\n// divided in rows and columns, its child element can set their desired column\n\n// and row and grid will automatically put them in correct position. StackPanel\n\n// will \"stack\" UI elements either on top of each other or in one line. Such\n\n// complex layout system was borrowed from WPF framework. You can read more here:\n\n// https://docs.microsoft.com/en-us/dotnet/framework/wpf/advanced/layout\n\nfn create_ui(engine: &mut GameEngine) -> Interface {\n\n let window_width = engine.renderer.get_frame_size().0 as f32;\n\n\n\n // Gather all suitable video modes, we'll use them to fill combo box of\n\n // available resolutions.\n\n let video_modes =\n\n engine.get_window()\n\n .primary_monitor()\n\n .video_modes()\n\n .filter(|vm| {\n\n // Leave only modern video modes, we are not in 1998.\n\n vm.size().width > 800 &&\n\n vm.size().height > 600 &&\n\n vm.bit_depth() == 32\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let ui = &mut engine.user_interface;\n\n\n\n // First of all create debug text that will show title of example and current FPS.\n", "file_path": "examples/ui.rs", "rank": 71, "score": 45599.95526132589 }, { "content": "use crate::{\n\n core::{\n\n scope_profile,\n\n color::Color,\n\n math::{\n\n vec3::Vec3,\n\n aabb::AxisAlignedBoundingBox,\n\n frustum::Frustum,\n\n Rect\n\n }\n\n },\n\n scene::camera::Camera,\n\n renderer::{\n\n RenderPassStatistics,\n\n error::RendererError,\n\n framework::{\n\n framebuffer::{\n\n FrameBufferTrait,\n\n DrawParameters,\n\n CullFace,\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 72, "score": 44660.398338530715 }, { "content": "use crate::{\n\n scene::{\n\n node::Node,\n\n graph::Graph,\n\n camera::Camera,\n\n },\n\n core::{\n\n scope_profile,\n\n math::Rect,\n\n },\n\n renderer::{\n\n TextureCache,\n\n GeometryCache,\n\n surface::SurfaceSharedData,\n\n error::RendererError,\n\n framework::{\n\n gpu_texture::GpuTexture,\n\n gl,\n\n gpu_program::{\n\n UniformValue,\n", "file_path": "src/renderer/sprite_renderer.rs", "rank": 73, "score": 44659.99883799845 }, { "content": " pub camera: &'c Camera,\n\n pub white_dummy: Rc<RefCell<GpuTexture>>,\n\n pub viewport: Rect<i32>,\n\n pub textures: &'a mut TextureCache,\n\n pub geom_map: &'a mut GeometryCache,\n\n}\n\n\n\nimpl SpriteRenderer {\n\n pub fn new() -> Result<Self, RendererError> {\n\n let surface = SurfaceSharedData::make_collapsed_xy_quad();\n\n\n\n Ok(Self {\n\n shader: SpriteShader::new()?,\n\n surface,\n\n })\n\n }\n\n\n\n #[must_use]\n\n pub fn render(&mut self, args: SpriteRenderContext) -> RenderPassStatistics {\n\n scope_profile!();\n", "file_path": "src/renderer/sprite_renderer.rs", "rank": 74, "score": 44654.70060799612 }, { "content": "\n\npub struct UiRenderer {\n\n shader: UiShader,\n\n geometry_buffer: GeometryBuffer<gui::draw::Vertex>,\n\n}\n\n\n\npub struct UiRenderContext<'a, 'b, 'c> {\n\n pub state: &'a mut State,\n\n pub viewport: Rect<i32>,\n\n pub backbuffer: &'b mut BackBuffer,\n\n pub frame_width: f32,\n\n pub frame_height: f32,\n\n pub drawing_context: &'c DrawingContext,\n\n pub white_dummy: Rc<RefCell<GpuTexture>>,\n\n pub texture_cache: &'a mut TextureCache,\n\n}\n\n\n\nimpl UiRenderer {\n\n pub(in crate::renderer) fn new(state: &mut State) -> Result<Self, RendererError> {\n\n let geometry_buffer = GeometryBuffer::new(GeometryBufferKind::DynamicDraw, ElementKind::Triangle);\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 75, "score": 44654.11754658534 }, { "content": " TextureCache,\n\n },\n\n gui::{\n\n brush::Brush,\n\n draw::{\n\n DrawingContext,\n\n CommandKind,\n\n CommandTexture,\n\n },\n\n self,\n\n },\n\n resource::texture::{\n\n Texture,\n\n TextureKind,\n\n },\n\n core::{\n\n scope_profile,\n\n math::{\n\n Rect,\n\n mat4::Mat4,\n\n vec4::Vec4,\n\n vec2::Vec2,\n\n },\n\n color::Color,\n\n },\n\n};\n\nuse crate::renderer::framework::framebuffer::DrawPartContext;\n\n\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 76, "score": 44652.99893692076 }, { "content": " let fragment_source = include_str!(\"shaders/debug_fs.glsl\");\n\n let vertex_source = include_str!(\"shaders/debug_vs.glsl\");\n\n let program = GpuProgram::from_source(\"DebugShader\", &vertex_source, &fragment_source)?;\n\n Ok(Self {\n\n wvp_matrix: program.uniform_location(\"worldViewProjection\")?,\n\n program,\n\n })\n\n }\n\n}\n\n\n\npub struct Line {\n\n pub begin: Vec3,\n\n pub end: Vec3,\n\n pub color: Color,\n\n}\n\n\n\nimpl DebugRenderer {\n\n pub(in crate) fn new(state: &mut State) -> Result<Self, RendererError> {\n\n let geometry = GeometryBuffer::new(GeometryBufferKind::DynamicDraw, ElementKind::Line);\n\n\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 77, "score": 44652.890261530956 }, { "content": "\n\n geometry_buffer.bind(state)\n\n .describe_attributes(vec![\n\n AttributeDefinition { kind: AttributeKind::Float2, normalized: false },\n\n AttributeDefinition { kind: AttributeKind::Float2, normalized: false },\n\n ])?;\n\n\n\n Ok(Self {\n\n geometry_buffer,\n\n shader: UiShader::new()?,\n\n })\n\n }\n\n\n\n pub(in crate::renderer) fn render(&mut self, args: UiRenderContext) -> Result<RenderPassStatistics, RendererError> {\n\n scope_profile!();\n\n\n\n let UiRenderContext {\n\n state, viewport, backbuffer,\n\n frame_width, frame_height, drawing_context, white_dummy\n\n , texture_cache\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 78, "score": 44652.799821568165 }, { "content": "use std::{\n\n rc::Rc,\n\n sync::{\n\n Mutex,\n\n Arc,\n\n },\n\n cell::RefCell,\n\n};\n\nuse crate::{\n\n renderer::{\n\n RenderPassStatistics,\n\n framework::{\n\n gl,\n\n geometry_buffer::{\n\n ElementKind,\n\n GeometryBuffer,\n\n AttributeDefinition,\n\n AttributeKind,\n\n GeometryBufferKind,\n\n },\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 79, "score": 44651.296665562164 }, { "content": " gpu_texture::GpuTexture,\n\n gpu_program::{\n\n UniformValue,\n\n GpuProgram,\n\n UniformLocation,\n\n },\n\n state::{\n\n State,\n\n ColorMask,\n\n StencilFunc,\n\n StencilOp,\n\n },\n\n framebuffer::{\n\n BackBuffer,\n\n FrameBufferTrait,\n\n DrawParameters,\n\n CullFace,\n\n },\n\n },\n\n error::RendererError,\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 80, "score": 44645.85952162772 }, { "content": " camera_side_vector: program.uniform_location(\"cameraSideVector\")?,\n\n camera_up_vector: program.uniform_location(\"cameraUpVector\")?,\n\n size: program.uniform_location(\"size\")?,\n\n diffuse_texture: program.uniform_location(\"diffuseTexture\")?,\n\n color: program.uniform_location(\"color\")?,\n\n rotation: program.uniform_location(\"rotation\")?,\n\n program,\n\n })\n\n }\n\n}\n\n\n\npub struct SpriteRenderer {\n\n shader: SpriteShader,\n\n surface: SurfaceSharedData,\n\n}\n\n\n\npub struct SpriteRenderContext<'a, 'b, 'c> {\n\n pub state: &'a mut State,\n\n pub framebuffer: &'b mut FrameBuffer,\n\n pub graph: &'c Graph,\n", "file_path": "src/renderer/sprite_renderer.rs", "rank": 81, "score": 44645.77144794395 }, { "content": " // Back face\n\n self.add_line(Line { begin: left_top_back, end: right_top_back, color });\n\n self.add_line(Line { begin: right_top_back, end: right_bottom_back, color });\n\n self.add_line(Line { begin: right_bottom_back, end: left_bottom_back, color });\n\n self.add_line(Line { begin: left_bottom_back, end: left_top_back, color });\n\n\n\n // Edges\n\n self.add_line(Line { begin: left_top_front, end: left_top_back, color });\n\n self.add_line(Line { begin: right_top_front, end: right_top_back, color });\n\n self.add_line(Line { begin: right_bottom_front, end: right_bottom_back, color });\n\n self.add_line(Line { begin: left_bottom_front, end: left_bottom_back, color });\n\n }\n\n\n\n pub(in crate) fn render(&mut self, state: &mut State, viewport: Rect<i32>, framebuffer: &mut FrameBuffer, camera: &Camera) -> RenderPassStatistics {\n\n scope_profile!();\n\n\n\n let mut statistics = RenderPassStatistics::default();\n\n\n\n self.vertices.clear();\n\n self.line_indices.clear();\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 82, "score": 44645.73817306036 }, { "content": " GpuProgram,\n\n UniformLocation,\n\n },\n\n framebuffer::{\n\n FrameBuffer,\n\n DrawParameters,\n\n CullFace,\n\n FrameBufferTrait,\n\n },\n\n state::State,\n\n },\n\n RenderPassStatistics,\n\n },\n\n};\n\nuse std::{\n\n rc::Rc,\n\n cell::RefCell,\n\n};\n\n\n", "file_path": "src/renderer/sprite_renderer.rs", "rank": 83, "score": 44644.815998524595 }, { "content": " FrameBuffer\n\n },\n\n geometry_buffer::{\n\n GeometryBuffer,\n\n GeometryBufferKind,\n\n AttributeDefinition,\n\n AttributeKind,\n\n ElementKind\n\n },\n\n gpu_program::{\n\n UniformLocation,\n\n GpuProgram,\n\n UniformValue\n\n },\n\n state::State\n\n }\n\n }\n\n};\n\nuse rg3d_core::math::mat4::Mat4;\n\n\n\n#[repr(C)]\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 84, "score": 44644.42420014107 }, { "content": "\n\n let mut statistics = RenderPassStatistics::default();\n\n\n\n let SpriteRenderContext {\n\n state, framebuffer, graph,\n\n camera, white_dummy, viewport,\n\n textures, geom_map\n\n } = args;\n\n\n\n state.set_blend_func(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);\n\n\n\n let inv_view = camera.inv_view_matrix().unwrap();\n\n\n\n let camera_up = inv_view.up();\n\n let camera_side = inv_view.side();\n\n\n\n for node in graph.linear_iter() {\n\n let sprite = if let Node::Sprite(sprite) = node {\n\n sprite\n\n } else {\n", "file_path": "src/renderer/sprite_renderer.rs", "rank": 85, "score": 44640.777072071425 }, { "content": " geometry.bind(state)\n\n .describe_attributes(vec![\n\n AttributeDefinition { kind: AttributeKind::Float3, normalized: false },\n\n AttributeDefinition { kind: AttributeKind::UnsignedByte4, normalized: true },\n\n ])?;\n\n\n\n Ok(Self {\n\n geometry,\n\n shader: DebugShader::new()?,\n\n lines: Default::default(),\n\n vertices: Default::default(),\n\n line_indices: Default::default(),\n\n })\n\n }\n\n\n\n pub fn add_line(&mut self, line: Line) {\n\n self.lines.push(line);\n\n }\n\n\n\n pub fn clear_lines(&mut self) {\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 86, "score": 44639.89679950421 }, { "content": " } = args;\n\n\n\n let mut statistics = RenderPassStatistics::default();\n\n\n\n state.set_blend_func(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA);\n\n\n\n let geometry_buffer = self.geometry_buffer.bind(state);\n\n\n\n geometry_buffer\n\n .set_triangles(drawing_context.get_triangles())\n\n .set_vertices(drawing_context.get_vertices());\n\n\n\n let ortho = Mat4::ortho(0.0, frame_width, frame_height,\n\n 0.0, -1.0, 1.0);\n\n\n\n for cmd in drawing_context.get_commands() {\n\n let mut diffuse_texture = white_dummy.clone();\n\n let mut is_font_texture = false;\n\n let mut color_write = true;\n\n\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 87, "score": 44639.371655813775 }, { "content": " self.lines.clear()\n\n }\n\n\n\n pub fn draw_frustum(&mut self, frustum: &Frustum, color: Color) {\n\n let left_top_front = frustum.left_top_front_corner();\n\n let left_bottom_front = frustum.left_bottom_front_corner();\n\n let right_bottom_front = frustum.right_bottom_front_corner();\n\n let right_top_front = frustum.right_top_front_corner();\n\n\n\n let left_top_back = frustum.left_top_back_corner();\n\n let left_bottom_back = frustum.left_bottom_back_corner();\n\n let right_bottom_back = frustum.right_bottom_back_corner();\n\n let right_top_back = frustum.right_top_back_corner();\n\n\n\n // Front face\n\n self.add_line(Line { begin: left_top_front, end: right_top_front, color });\n\n self.add_line(Line { begin: right_top_front, end: right_bottom_front, color });\n\n self.add_line(Line { begin: right_bottom_front, end: left_bottom_front, color });\n\n self.add_line(Line { begin: left_bottom_front, end: left_top_front, color });\n\n\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 88, "score": 44637.97938400764 }, { "content": " // Back face\n\n self.add_line(Line { begin: left_top_back, end: right_top_back, color });\n\n self.add_line(Line { begin: right_top_back, end: right_bottom_back, color });\n\n self.add_line(Line { begin: right_bottom_back, end: left_bottom_back, color });\n\n self.add_line(Line { begin: left_bottom_back, end: left_top_back, color });\n\n\n\n // Edges\n\n self.add_line(Line { begin: left_top_front, end: left_top_back, color });\n\n self.add_line(Line { begin: right_top_front, end: right_top_back, color });\n\n self.add_line(Line { begin: right_bottom_front, end: right_bottom_back, color });\n\n self.add_line(Line { begin: left_bottom_front, end: left_bottom_back, color });\n\n }\n\n\n\n pub fn draw_aabb(&mut self, aabb: &AxisAlignedBoundingBox, color: Color) {\n\n let left_bottom_front = Vec3::new(aabb.min.x, aabb.min.y, aabb.max.z);\n\n let left_top_front = Vec3::new(aabb.min.x, aabb.max.y, aabb.max.z);\n\n let right_top_front = Vec3::new(aabb.max.x, aabb.max.y, aabb.max.z);\n\n let right_bottom_front = Vec3::new(aabb.max.x, aabb.min.y, aabb.max.z);\n\n\n\n let left_bottom_back = Vec3::new(aabb.min.x, aabb.min.y, aabb.min.z);\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 89, "score": 44637.662248780165 }, { "content": " self.add_line(Line { begin: left_bottom_front, end: left_bottom_back, color });\n\n }\n\n\n\n pub fn draw_oob(&mut self, aabb: &AxisAlignedBoundingBox, transform: Mat4, color: Color) {\n\n let left_bottom_front = transform.transform_vector(Vec3::new(aabb.min.x, aabb.min.y, aabb.max.z));\n\n let left_top_front = transform.transform_vector(Vec3::new(aabb.min.x, aabb.max.y, aabb.max.z));\n\n let right_top_front = transform.transform_vector(Vec3::new(aabb.max.x, aabb.max.y, aabb.max.z));\n\n let right_bottom_front = transform.transform_vector(Vec3::new(aabb.max.x, aabb.min.y, aabb.max.z));\n\n\n\n let left_bottom_back = transform.transform_vector(Vec3::new(aabb.min.x, aabb.min.y, aabb.min.z));\n\n let left_top_back = transform.transform_vector(Vec3::new(aabb.min.x, aabb.max.y, aabb.min.z));\n\n let right_top_back = transform.transform_vector(Vec3::new(aabb.max.x, aabb.max.y, aabb.min.z));\n\n let right_bottom_back = transform.transform_vector(Vec3::new(aabb.max.x, aabb.min.y, aabb.min.z));\n\n\n\n // Front face\n\n self.add_line(Line { begin: left_top_front, end: right_top_front, color });\n\n self.add_line(Line { begin: right_top_front, end: right_bottom_front, color });\n\n self.add_line(Line { begin: right_bottom_front, end: left_bottom_front, color });\n\n self.add_line(Line { begin: left_bottom_front, end: left_top_front, color });\n\n\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 90, "score": 44637.49181133595 }, { "content": " let tex = Texture::from_bytes(\n\n font.get_atlas_size() as u32,\n\n font.get_atlas_size() as u32,\n\n TextureKind::R8,\n\n font.get_atlas_pixels().to_vec(),\n\n );\n\n font.texture = Some(Arc::new(Mutex::new(tex)));\n\n }\n\n if let Some(texture) = texture_cache.get(state, font.texture.clone().unwrap().downcast::<Mutex<Texture>>().unwrap()) {\n\n diffuse_texture = texture;\n\n }\n\n is_font_texture = true;\n\n }\n\n CommandTexture::Texture(texture) => {\n\n if let Ok(texture) = texture.clone().downcast::<Mutex<Texture>>() {\n\n if let Some(texture) = texture_cache.get(state, texture) {\n\n diffuse_texture = texture;\n\n }\n\n }\n\n }\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 91, "score": 44635.59640221116 }, { "content": " match cmd.get_kind() {\n\n CommandKind::Clip => {\n\n if cmd.get_nesting() == 1 {\n\n backbuffer.clear(state, viewport, None, None, Some(0));\n\n }\n\n state.set_stencil_op(StencilOp { zpass: gl::INCR, ..Default::default() });\n\n // Make sure that clipping rect will be drawn at previous nesting level only (clip to parent)\n\n state.set_stencil_func(StencilFunc { func: gl::EQUAL, ref_value: i32::from(cmd.get_nesting() - 1), ..Default::default() });\n\n // Draw clipping geometry to stencil buffers\n\n state.set_stencil_mask(0xFF);\n\n color_write = false;\n\n }\n\n CommandKind::Geometry => {\n\n // Make sure to draw geometry only on clipping geometry with current nesting level\n\n state.set_stencil_func(StencilFunc { func: gl::EQUAL, ref_value: i32::from(cmd.get_nesting()), ..Default::default() });\n\n\n\n match cmd.texture() {\n\n CommandTexture::Font(font_arc) => {\n\n let mut font = font_arc.lock().unwrap();\n\n if font.texture.is_none() {\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 92, "score": 44635.59640221116 }, { "content": " let left_top_back = Vec3::new(aabb.min.x, aabb.max.y, aabb.min.z);\n\n let right_top_back = Vec3::new(aabb.max.x, aabb.max.y, aabb.min.z);\n\n let right_bottom_back = Vec3::new(aabb.max.x, aabb.min.y, aabb.min.z);\n\n\n\n // Front face\n\n self.add_line(Line { begin: left_top_front, end: right_top_front, color });\n\n self.add_line(Line { begin: right_top_front, end: right_bottom_front, color });\n\n self.add_line(Line { begin: right_bottom_front, end: left_bottom_front, color });\n\n self.add_line(Line { begin: left_bottom_front, end: left_top_front, color });\n\n\n\n // Back face\n\n self.add_line(Line { begin: left_top_back, end: right_top_back, color });\n\n self.add_line(Line { begin: right_top_back, end: right_bottom_back, color });\n\n self.add_line(Line { begin: right_bottom_back, end: left_bottom_back, color });\n\n self.add_line(Line { begin: left_bottom_back, end: left_top_back, color });\n\n\n\n // Edges\n\n self.add_line(Line { begin: left_top_front, end: left_top_back, color });\n\n self.add_line(Line { begin: right_top_front, end: right_top_back, color });\n\n self.add_line(Line { begin: right_bottom_front, end: right_bottom_back, color });\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 93, "score": 44635.59640221116 }, { "content": " let vertex_source = include_str!(\"shaders/ui_vs.glsl\");\n\n let program = GpuProgram::from_source(\"UIShader\", vertex_source, fragment_source)?;\n\n Ok(Self {\n\n wvp_matrix: program.uniform_location(\"worldViewProjection\")?,\n\n diffuse_texture: program.uniform_location(\"diffuseTexture\")?,\n\n is_font: program.uniform_location(\"isFont\")?,\n\n solid_color: program.uniform_location(\"solidColor\")?,\n\n brush_type: program.uniform_location(\"brushType\")?,\n\n gradient_point_count: program.uniform_location(\"gradientPointCount\")?,\n\n gradient_colors: program.uniform_location(\"gradientColors\")?,\n\n gradient_stops: program.uniform_location(\"gradientStops\")?,\n\n gradient_origin: program.uniform_location(\"gradientOrigin\")?,\n\n gradient_end: program.uniform_location(\"gradientEnd\")?,\n\n bounds_min: program.uniform_location(\"boundsMin\")?,\n\n bounds_max: program.uniform_location(\"boundsMax\")?,\n\n resolution: program.uniform_location(\"resolution\")?,\n\n program,\n\n })\n\n }\n\n}\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 94, "score": 44635.59640221116 }, { "content": " _ => ()\n\n }\n\n\n\n // Do not draw geometry to stencil buffer\n\n state.set_stencil_mask(0);\n\n }\n\n }\n\n\n\n let mut raw_stops = [0.0; 16];\n\n let mut raw_colors = [Vec4::default(); 16];\n\n\n\n let uniforms = [\n\n (self.shader.diffuse_texture, UniformValue::Sampler { index: 0, texture: diffuse_texture }),\n\n (self.shader.wvp_matrix, UniformValue::Mat4(ortho)),\n\n (self.shader.resolution, UniformValue::Vec2(Vec2::new(frame_width, frame_height))),\n\n (self.shader.bounds_min, UniformValue::Vec2(cmd.min())),\n\n (self.shader.bounds_max, UniformValue::Vec2(cmd.max())),\n\n (self.shader.is_font, UniformValue::Bool(is_font_texture)),\n\n (self.shader.brush_type, UniformValue::Integer({\n\n match cmd.brush() {\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 95, "score": 44635.59640221116 }, { "content": "\n\n let mut i = 0;\n\n for line in self.lines.iter() {\n\n let color = line.color.into();\n\n self.vertices.push(Vertex { position: line.begin, color });\n\n self.vertices.push(Vertex { position: line.end, color });\n\n self.line_indices.push([i, i + 1]);\n\n i += 2;\n\n }\n\n self.geometry\n\n .bind(state)\n\n .set_vertices(&self.vertices)\n\n .set_lines(&self.line_indices);\n\n\n\n statistics += framebuffer.draw(\n\n &self.geometry,\n\n state,\n\n viewport,\n\n &self.shader.program,\n\n DrawParameters {\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 96, "score": 44635.59640221116 }, { "content": " Brush::Solid(_) => Vec2::ZERO,\n\n Brush::LinearGradient { to, .. } => *to,\n\n Brush::RadialGradient { .. } => Vec2::ZERO,\n\n }\n\n })),\n\n (self.shader.gradient_point_count, UniformValue::Integer({\n\n match cmd.brush() {\n\n Brush::Solid(_) => 0,\n\n Brush::LinearGradient { stops, .. } | Brush::RadialGradient { stops, .. } => stops.len() as i32,\n\n }\n\n })),\n\n (self.shader.gradient_stops, UniformValue::FloatArray({\n\n match cmd.brush() {\n\n Brush::Solid(_) => &[],\n\n Brush::LinearGradient { stops, .. } | Brush::RadialGradient { stops, .. } => {\n\n for (i, point) in stops.iter().enumerate() {\n\n raw_stops[i] = point.stop;\n\n }\n\n &raw_stops\n\n }\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 97, "score": 44635.59640221116 }, { "content": " cull_face: CullFace::Back,\n\n culling: false,\n\n color_write: Default::default(),\n\n depth_write: false,\n\n stencil_test: false,\n\n depth_test: true,\n\n blend: false\n\n },\n\n &[\n\n (self.shader.wvp_matrix, UniformValue::Mat4(camera.view_projection_matrix()))\n\n ]\n\n );\n\n\n\n statistics.draw_calls += 1;\n\n\n\n statistics\n\n }\n\n}", "file_path": "src/renderer/debug_renderer.rs", "rank": 98, "score": 44635.59640221116 }, { "content": " Brush::Solid(_) => 0,\n\n Brush::LinearGradient { .. } => 1,\n\n Brush::RadialGradient { .. } => 2,\n\n }\n\n })),\n\n (self.shader.solid_color, UniformValue::Color({\n\n match cmd.brush() {\n\n Brush::Solid(color) => *color,\n\n _ => Color::WHITE,\n\n }\n\n })),\n\n (self.shader.gradient_origin, UniformValue::Vec2({\n\n match cmd.brush() {\n\n Brush::Solid(_) => Vec2::ZERO,\n\n Brush::LinearGradient { from, .. } => *from,\n\n Brush::RadialGradient { center, .. } => *center,\n\n }\n\n })),\n\n (self.shader.gradient_end, UniformValue::Vec2({\n\n match cmd.brush() {\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 99, "score": 44635.59640221116 } ]
Rust
core/bin/zksync_api/src/api_server/rest/v1/mod.rs
huitseeker/zksync
5b936b1855a08033cca7f75d6f87fde106c6e8fd
pub use self::error::{Error, ErrorBody}; use actix_web::{ web::{self, Json}, Scope, }; use serde::{Deserialize, Serialize}; use zksync_config::{ApiServerOptions, ConfigurationOptions}; use zksync_types::BlockNumber; use crate::api_server::tx_sender::TxSender; mod blocks; pub mod client; mod config; mod error; mod operations; mod search; #[cfg(test)] mod test_utils; mod tokens; mod transactions; pub const MAX_LIMIT: u32 = 100; type JsonResult<T> = std::result::Result<web::Json<T>, Error>; pub(crate) fn api_scope( tx_sender: TxSender, env_options: ConfigurationOptions, api_server_options: ApiServerOptions, ) -> Scope { web::scope("/api/v1") .service(config::api_scope(&env_options)) .service(blocks::api_scope( &api_server_options, tx_sender.pool.clone(), )) .service(transactions::api_scope(tx_sender.clone())) .service(operations::api_scope(tx_sender.pool.clone())) .service(search::api_scope(tx_sender.pool.clone())) .service(tokens::api_scope( tx_sender.tokens, tx_sender.ticker_requests, )) } #[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Default)] struct PaginationQuery { before: Option<BlockNumber>, after: Option<BlockNumber>, limit: BlockNumber, } #[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq)] pub enum Pagination { Before(BlockNumber), After(BlockNumber), Last, } impl PaginationQuery { fn into_inner(self) -> Result<(Pagination, BlockNumber), Error> { let (pagination, limit) = match self { Self { before: Some(before), after: None, limit, } => Ok((Pagination::Before(before), limit)), Self { before: None, after: Some(after), limit, } => Ok((Pagination::After(after), limit)), Self { before: None, after: None, limit, } => Ok((Pagination::Last, limit)), _ => Err(Error::bad_request("Incorrect pagination query") .detail("Pagination query contains both `before` and `after` values.")), }?; if limit == 0 { return Err(Error::bad_request("Incorrect pagination query") .detail("Limit should be greater than zero")); } if limit > MAX_LIMIT { return Err(Error::bad_request("Incorrect pagination query") .detail(format!("Limit should be lower than {}", MAX_LIMIT))); } Ok((pagination, limit)) } } impl Pagination { fn into_max(self, limit: BlockNumber) -> Result<Option<BlockNumber>, Error> { assert!(limit > 0, "Limit should be greater than zero"); match self { Pagination::Before(before) => { if before < 1 { return Err(Error::bad_request("Incorrect pagination query") .detail("Before should be greater than zero")); } Ok(Some(before - 1)) } Pagination::After(after) => Ok(Some(after + limit + 1)), Pagination::Last => Ok(None), } } fn into_query(self, limit: BlockNumber) -> PaginationQuery { match self { Pagination::Before(before) => PaginationQuery { before: Some(before), limit, ..PaginationQuery::default() }, Pagination::After(after) => PaginationQuery { after: Some(after), limit, ..PaginationQuery::default() }, Pagination::Last => PaginationQuery { limit, ..PaginationQuery::default() }, } } } #[test] fn pagination_before_max_limit() { let pagination = Pagination::Before(10); let max = pagination.into_max(10).unwrap(); assert_eq!(max, Some(9)) } #[test] fn pagination_after_max_limit() { let pagination = Pagination::After(10); let max = pagination.into_max(10).unwrap(); assert_eq!(max, Some(21)) }
pub use self::error::{Error, ErrorBody}; use actix_web::{ web::{self, Json}, Scope, }; use serde::{Deserialize, Serialize}; use zksync_config::{ApiServerOptions, ConfigurationOptions}; use zksync_types::BlockNumber; use crate::api_server::tx_sender::TxSender; mod blocks; pub mod client; mod config; mod error; mod operations; mod search; #[cfg(test)] mod test_utils; mod tokens; mod transactions; pub const MAX_LIMIT: u32 = 100; type JsonResult<T> = std::result::Result<web::Json<T>, Error>; pub(crate) fn api_scope( tx_sender: TxSender, env_options: ConfigurationOptions, api_server_options: ApiServerOptions, ) -> Scope { web::scope("/api/v1") .service(config::api_scope(&env_options)) .service(blocks::api_scope( &api_server_options, tx_sender.pool.clone(), )) .service(transactions::api_scope(tx_sender.clone())) .service(operations::api_scope(tx_sender.pool.clone())) .service(search::api_scope(tx_sender.pool.clone())) .service(tokens::api_scope( tx_sender.tokens, tx_sender.ticker_requests, )) } #[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Default)] struct PaginationQuery { before: Option<BlockNumber>, after: Option<BlockNumber>, limit: BlockNumber, } #[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq)] pub enum Pagination { Before(BlockNumber), After(BlockNumber), Last, } impl PaginationQuery { fn into_inner(self) -> Result<(Pagination, BlockNumber), Error> { let (pagination, limit) = match self { Self { before: Some(before), after: None, limit, } => Ok((Pagination::Before(before), limit)), Self { before: None, after: Some(after), limit, } => Ok((Pagination::After(after), limit)), Self { before: None, after: None, limit, } => Ok((Pagination::Last, limit)), _ => Err(Error::bad_request("Incorrect pagination query") .detail("Pagination query contains both `before` and `after` values.")), }?; if limit == 0 { return Err(Error::bad_request("Incorrect pagination query") .detail("Limit should be greater than zero")); } if limit > MAX_LIMIT { return Err(Error::bad_request("Incorrect pagination query") .detail(format!("Limit should be lower than {}", MAX_LIMIT))); } Ok((pagination, limit)) } } impl Pagination { fn into_max(self, limit: BlockNumber) -> Result<Option<BlockNumber>, Error> { assert!(limit > 0, "Limit should be greater than zero"); match self { Pagination::Before(before) => { if before < 1 { return Err(Error::bad_request("Incorrect pagination query") .detail("Before should be greater than zer
fn into_query(self, limit: BlockNumber) -> PaginationQuery { match self { Pagination::Before(before) => PaginationQuery { before: Some(before), limit, ..PaginationQuery::default() }, Pagination::After(after) => PaginationQuery { after: Some(after), limit, ..PaginationQuery::default() }, Pagination::Last => PaginationQuery { limit, ..PaginationQuery::default() }, } } } #[test] fn pagination_before_max_limit() { let pagination = Pagination::Before(10); let max = pagination.into_max(10).unwrap(); assert_eq!(max, Some(9)) } #[test] fn pagination_after_max_limit() { let pagination = Pagination::After(10); let max = pagination.into_max(10).unwrap(); assert_eq!(max, Some(21)) }
o")); } Ok(Some(before - 1)) } Pagination::After(after) => Ok(Some(after + limit + 1)), Pagination::Last => Ok(None), } }
function_block-function_prefixed
[ { "content": "/// Takes name of the config, extends it to the constant and volatile config paths,\n\n/// loads them and merged into on object.\n\nfn merge_configs(config: &str) -> serde_json::Value {\n\n let mut constant_config = load_json(&config_path(&format!(\"constant/{}\", config)));\n\n let mut volatile_config = load_json(&config_path(&format!(\"volatile/{}\", config)));\n\n\n\n constant_config\n\n .as_object_mut()\n\n .expect(\"Cannot merge not at object\")\n\n .append(volatile_config.as_object_mut().unwrap());\n\n\n\n constant_config\n\n}\n\n\n\n/// Configuration for EIP1271-compatible test smart wallet.\n\n#[derive(Debug, Deserialize)]\n\npub struct EIP1271Config {\n\n /// Private key of the account owner (to sign transactions).\n\n pub owner_private_key: H256,\n\n /// Address of the account owner (set in contract).\n\n pub owner_address: Address,\n\n /// Address of the smart wallet contract.\n", "file_path": "core/lib/config/src/test_config/mod.rs", "rank": 0, "score": 317462.57038252044 }, { "content": "#[doc(hidden)]\n\npub fn get_genesis_token_list(network: &str) -> Result<Vec<TokenGenesisListItem>, anyhow::Error> {\n\n let mut file_path = parse_env::<PathBuf>(\"ZKSYNC_HOME\");\n\n file_path.push(\"etc\");\n\n file_path.push(\"tokens\");\n\n file_path.push(network);\n\n file_path.set_extension(\"json\");\n\n Ok(serde_json::from_str(&read_to_string(file_path)?)?)\n\n}\n\n\n\n/// Token price known to the zkSync network.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct TokenPrice {\n\n #[serde(with = \"UnsignedRatioSerializeAsDecimal\")]\n\n pub usd_price: Ratio<BigUint>,\n\n pub last_updated: DateTime<Utc>,\n\n}\n\n\n\n/// Type of transaction fees that exist in the zkSync network.\n\n#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Hash, Eq)]\n\npub enum TxFeeTypes {\n", "file_path": "core/lib/types/src/tokens.rs", "rank": 1, "score": 305844.24153312936 }, { "content": "/// Deserializes either a `String` or `Vec<u8>` into `Vec<u8>`.\n\n/// The reason we cannot expect just a vector is backward compatibility: messages\n\n/// used to be stored as strings.\n\npub fn deserialize_eth_message<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StringOrVec;\n\n\n\n impl<'de> Visitor<'de> for StringOrVec {\n\n type Value = Vec<u8>;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"a byte array or a string\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n Ok(v.as_bytes().to_vec())\n\n }\n\n\n", "file_path": "core/lib/types/src/tx/utils.rs", "rank": 2, "score": 295793.9015603957 }, { "content": "fn load_json(path: &str) -> serde_json::Value {\n\n serde_json::from_str(&fs::read_to_string(path).expect(\"Invalid config path\"))\n\n .expect(\"Invalid config format\")\n\n}\n\n\n", "file_path": "core/lib/config/src/test_config/mod.rs", "rank": 3, "score": 290694.11392210575 }, { "content": "/// Gets smallest block size given the list of supported chunk sizes.\n\npub fn smallest_block_size_for_chunks(\n\n chunks_used: usize,\n\n available_block_sizes: &[usize],\n\n) -> usize {\n\n for &block_size in available_block_sizes {\n\n if block_size >= chunks_used {\n\n return block_size;\n\n }\n\n }\n\n panic!(\n\n \"Provided chunks amount ({}) cannot fit in one block, maximum available size is {}\",\n\n chunks_used,\n\n available_block_sizes.last().unwrap()\n\n );\n\n}\n", "file_path": "core/lib/types/src/block.rs", "rank": 4, "score": 286914.1682161319 }, { "content": "/// Saves specified error message in the file log.\n\npub fn save_error(category: &str, reason: impl Display) {\n\n let msg = Message::ErrorOccurred {\n\n category: category.to_string(),\n\n reason: reason.to_string(),\n\n };\n\n\n\n tokio::spawn(async move {\n\n session()\n\n .sender\n\n .clone()\n\n .send(msg)\n\n .await\n\n .expect(\"Unable to save error message\")\n\n });\n\n}\n\n\n\nasync fn run_messages_writer(\n\n out_dir: PathBuf,\n\n mut receiver: Receiver<Message>,\n\n) -> anyhow::Result<()> {\n", "file_path": "core/tests/loadtest/src/session.rs", "rank": 5, "score": 270178.76885715954 }, { "content": "/// Creates a sample operation to be stored in `operations` table.\n\n/// This function is required since `eth_operations` table is linked to\n\n/// the `operations` table by the operation id.\n\npub fn get_commit_operation(block_number: BlockNumber) -> Operation {\n\n Operation {\n\n id: None,\n\n action: Action::Commit,\n\n block: Block::new(\n\n block_number,\n\n Fr::default(),\n\n 0,\n\n Vec::new(),\n\n (0, 0),\n\n 100,\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/tests/ethereum.rs", "rank": 6, "score": 269583.27731383283 }, { "content": "/// Same as `get_commit_operation`, but creates a verify operation instead.\n\npub fn get_verify_operation(block_number: BlockNumber) -> Operation {\n\n let action = Action::Verify {\n\n proof: Default::default(),\n\n };\n\n Operation {\n\n id: None,\n\n action,\n\n block: Block::new(\n\n block_number,\n\n Fr::default(),\n\n 0,\n\n Vec::new(),\n\n (0, 0),\n\n 100,\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/tests/ethereum.rs", "rank": 7, "score": 269582.74472094426 }, { "content": "pub fn noop_operation(tree: &CircuitAccountTree, acc_id: u32) -> Operation<Bn256> {\n\n let signature_data = SignatureData::init_empty();\n\n let first_sig_msg = Fr::zero();\n\n let second_sig_msg = Fr::zero();\n\n let third_sig_msg = Fr::zero();\n\n let signer_pub_key_packed = [Some(false); 256];\n\n\n\n let acc = tree.get(acc_id).unwrap();\n\n let account_address_fe = Fr::from_str(&acc_id.to_string()).unwrap();\n\n let token_fe = Fr::zero();\n\n let balance_value = match acc.subtree.get(0) {\n\n None => Fr::zero(),\n\n Some(bal) => bal.value,\n\n };\n\n let pubdata = vec![false; CHUNK_BIT_WIDTH];\n\n let pubdata_chunks: Vec<_> = pubdata\n\n .chunks(CHUNK_BIT_WIDTH)\n\n .map(|x| le_bit_vector_into_field_element(&x.to_vec()))\n\n .collect();\n\n let (audit_account, audit_balance) = get_audits(tree, acc_id, 0);\n", "file_path": "core/lib/circuit/src/witness/noop.rs", "rank": 8, "score": 264620.29504831287 }, { "content": "pub fn create_withdraw_tx() -> ExecutedOperations {\n\n let withdraw_op = ZkSyncOp::Withdraw(Box::new(WithdrawOp {\n\n tx: Withdraw::new(\n\n 0,\n\n Default::default(),\n\n Default::default(),\n\n 0,\n\n 100u32.into(),\n\n 10u32.into(),\n\n 12,\n\n None,\n\n ),\n\n account_id: 0,\n\n }));\n\n\n\n let executed_withdraw_op = ExecutedTx {\n\n signed_tx: withdraw_op.try_get_tx().unwrap().into(),\n\n success: true,\n\n op: Some(withdraw_op),\n\n fail_reason: None,\n\n block_index: None,\n\n created_at: Utc::now(),\n\n batch_id: None,\n\n };\n\n\n\n ExecutedOperations::Tx(Box::new(executed_withdraw_op))\n\n}\n\n\n", "file_path": "core/lib/types/src/tests/utils.rs", "rank": 9, "score": 262884.83089370903 }, { "content": "pub fn create_change_pubkey_tx() -> ExecutedOperations {\n\n let change_pubkey_op = ZkSyncOp::ChangePubKeyOffchain(Box::new(ChangePubKeyOp {\n\n tx: ChangePubKey::new(\n\n 1,\n\n Default::default(),\n\n Default::default(),\n\n 0,\n\n Default::default(),\n\n Default::default(),\n\n None,\n\n None,\n\n ),\n\n account_id: 0,\n\n }));\n\n\n\n let executed_change_pubkey_op = ExecutedTx {\n\n signed_tx: change_pubkey_op.try_get_tx().unwrap().into(),\n\n success: true,\n\n op: Some(change_pubkey_op),\n\n fail_reason: None,\n\n block_index: None,\n\n created_at: Utc::now(),\n\n batch_id: None,\n\n };\n\n\n\n ExecutedOperations::Tx(Box::new(executed_change_pubkey_op))\n\n}\n", "file_path": "core/lib/types/src/tests/utils.rs", "rank": 10, "score": 260062.89309965842 }, { "content": "pub fn create_full_exit_op() -> ExecutedOperations {\n\n let priority_op = FullExit {\n\n account_id: 0,\n\n eth_address: Address::zero(),\n\n token: 0,\n\n };\n\n ExecutedOperations::PriorityOp(Box::new(ExecutedPriorityOp {\n\n priority_op: PriorityOp {\n\n serial_id: 0,\n\n data: ZkSyncPriorityOp::FullExit(priority_op.clone()),\n\n deadline_block: 0,\n\n eth_hash: Vec::new(),\n\n eth_block: 0,\n\n },\n\n op: ZkSyncOp::FullExit(Box::new(FullExitOp {\n\n priority_op,\n\n withdraw_amount: None,\n\n })),\n\n block_index: 0,\n\n created_at: Utc::now(),\n\n }))\n\n}\n\n\n", "file_path": "core/lib/types/src/tests/utils.rs", "rank": 11, "score": 260062.89309965842 }, { "content": "pub fn parse_ether(eth_value: &str) -> Result<BigUint, anyhow::Error> {\n\n let split = eth_value.split('.').collect::<Vec<&str>>();\n\n ensure!(split.len() == 1 || split.len() == 2, \"Wrong eth value\");\n\n let string_wei_value = if split.len() == 1 {\n\n format!(\"{}000000000000000000\", split[0])\n\n } else if split.len() == 2 {\n\n let before_dot = split[0];\n\n let after_dot = split[1];\n\n ensure!(\n\n after_dot.len() <= 18,\n\n \"ETH value can have up to 18 digits after dot.\"\n\n );\n\n let zeros_to_pad = 18 - after_dot.len();\n\n format!(\"{}{}{}\", before_dot, after_dot, \"0\".repeat(zeros_to_pad))\n\n } else {\n\n unreachable!()\n\n };\n\n\n\n Ok(BigUint::from_str(&string_wei_value)?)\n\n}\n\n\n\n/// Used to sign and post ETH transactions for the zkSync contracts.\n\n#[derive(Debug, Clone)]\n\npub struct EthereumAccount<T: Transport> {\n\n pub private_key: H256,\n\n pub address: Address,\n\n pub main_contract_eth_client: ETHClient<T, PrivateKeySigner>,\n\n}\n\n\n", "file_path": "core/tests/testkit/src/eth_account.rs", "rank": 12, "score": 254931.9461875638 }, { "content": "pub fn bench_signatures(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Signature verify\");\n\n group.throughput(Throughput::Elements(1));\n\n group.bench_function(\n\n \"bench_signature_verify_zksync_musig\",\n\n bench_signature_zksync_musig_verify,\n\n );\n\n group.bench_function(\n\n \"bench_signature_verify_eth_packed\",\n\n bench_signature_verify_eth_packed,\n\n );\n\n group.bench_function(\n\n \"bench_signature_seckp_recover\",\n\n bench_signature_seckp_recover,\n\n );\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(signature_benches, bench_signatures);\n", "file_path": "core/lib/types/benches/criterion/signatures/mod.rs", "rank": 13, "score": 252584.26362541504 }, { "content": "pub fn bench_primitives(c: &mut Criterion) {\n\n c.bench_function(\"u64_get_bits_le\", bench_u64_get_bits_le);\n\n\n\n let mut group = c.benchmark_group(\"Bit Converters\");\n\n\n\n group.throughput(Throughput::Bytes(BYTE_SLICE_SIZE as u64));\n\n group.bench_function(\"bytes_into_be_bits\", bench_bytes_into_be_bits);\n\n group.bench_function(\"pack_bits_into_bytes\", bench_pack_bits_into_bytes);\n\n group.bench_function(\n\n \"pack_bits_into_bytes_in_order\",\n\n bench_pack_bits_into_bytes_in_order,\n\n );\n\n group.bench_function(\"BitIterator::next\", bench_bit_iterator_le_next);\n\n\n\n group.finish();\n\n\n\n c.bench_function(\n\n \"bench_circuit_account_transform\",\n\n bench_circuit_account_transform,\n\n );\n\n}\n\n\n\ncriterion_group!(primitives_benches, bench_primitives);\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 14, "score": 252584.26362541504 }, { "content": "/// Checks whether the token amount can be packed (and thus used in the transaction).\n\npub fn is_token_amount_packable(amount: &BigUint) -> bool {\n\n Some(amount.clone()) == unpack_token_amount(&pack_token_amount(amount))\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 15, "score": 250374.21821803233 }, { "content": "/// Generates a random account with a set of changes.\n\npub fn gen_acc_random_updates<R: Rng>(rng: &mut R) -> impl Iterator<Item = (u32, AccountUpdate)> {\n\n let id: u32 = rng.gen();\n\n let balance = u128::from(rng.gen::<u64>());\n\n let nonce: u32 = rng.gen();\n\n let pub_key_hash = PubKeyHash { data: rng.gen() };\n\n let address: Address = rng.gen::<[u8; 20]>().into();\n\n\n\n let mut a = Account::default_with_address(&address);\n\n let old_nonce = nonce;\n\n a.nonce = old_nonce + 2;\n\n a.pub_key_hash = pub_key_hash;\n\n\n\n let old_balance = a.get_balance(0);\n\n a.set_balance(0, BigUint::from(balance));\n\n let new_balance = a.get_balance(0);\n\n vec![\n\n (\n\n id,\n\n AccountUpdate::Create {\n\n nonce: old_nonce,\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 16, "score": 249149.8754802618 }, { "content": "/// Returns the closest possible packable fee amount.\n\n/// Returned amount is always less or equal to the provided amount.\n\npub fn closest_packable_token_amount(amount: &BigUint) -> BigUint {\n\n let fee_packed = pack_token_amount(&amount);\n\n unpack_token_amount(&fee_packed).expect(\"token amount repacking\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::TokenLike;\n\n use serde::{Deserialize, Serialize};\n\n\n\n #[test]\n\n fn test_roundtrip() {\n\n let zero = BigUint::from_u32(1).unwrap();\n\n let one = BigUint::from_u32(1).unwrap();\n\n {\n\n let round_trip_zero = unpack_token_amount(&pack_token_amount(&zero));\n\n let round_trip_one = unpack_token_amount(&pack_token_amount(&one));\n\n assert_eq!(Some(zero.clone()), round_trip_zero);\n\n assert_eq!(Some(one.clone()), round_trip_one);\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 17, "score": 244949.37239884387 }, { "content": "/// Transforms the token amount into packed form.\n\n/// If the provided token amount is not packable, it is rounded down to the\n\n/// closest amount that fits in packed form. As a result, some precision will be lost.\n\npub fn pack_token_amount(amount: &BigUint) -> Vec<u8> {\n\n FloatConversions::pack(\n\n amount,\n\n params::AMOUNT_EXPONENT_BIT_WIDTH,\n\n params::AMOUNT_MANTISSA_BIT_WIDTH,\n\n )\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 18, "score": 243631.28792484262 }, { "content": "/// Attempts to unpack the token amount.\n\npub fn unpack_token_amount(data: &[u8]) -> Option<BigUint> {\n\n FloatConversions::unpack(\n\n data,\n\n params::AMOUNT_EXPONENT_BIT_WIDTH,\n\n params::AMOUNT_MANTISSA_BIT_WIDTH,\n\n )\n\n .and_then(BigUint::from_u128)\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 19, "score": 243630.8021406831 }, { "content": "#[test]\n\n#[ignore]\n\nfn corrupted_last_operation() {\n\n // Perform some operations\n\n let mut circuit = apply_many_ops();\n\n\n\n // Try to cut off an operation at end.\n\n circuit.operations.pop();\n\n\n\n // As we removed the last operation, the last chunk of the block is no longer the last chunk of\n\n // the corresponding transaction.\n\n // See `circuit.rs` for details.\n\n let expected_msg =\n\n \"ensure last chunk of the block is a last chunk of corresponding transaction\";\n\n\n\n let error = check_circuit_non_panicking(circuit)\n\n .expect_err(\"Corrupted operations list should lead to an error\");\n\n\n\n assert!(\n\n error.contains(expected_msg),\n\n \"corrupted_operations: Got error message '{}', but expected '{}'\",\n\n error,\n\n expected_msg\n\n );\n\n}\n\n\n\n/// Checks that corrupted list of operations in block leads to predictable errors.\n\n/// Check for chunk in the beginning of the operations list.\n", "file_path": "core/lib/circuit/src/witness/tests/mod.rs", "rank": 20, "score": 241602.0315582235 }, { "content": "/// Runs the massive API spam routine.\n\n///\n\n/// This process will continue until the cancel command is occurred or the limit is reached.\n\npub fn run(monitor: Monitor) -> (ApiTestsFuture, CancellationToken) {\n\n let cancellation = CancellationToken::default();\n\n\n\n let token = cancellation.clone();\n\n let future = async move {\n\n log::info!(\"API tests starting...\");\n\n\n\n let mut builder = ApiTestsBuilder::new(token.clone());\n\n builder = sdk_tests::wire_tests(builder, &monitor);\n\n builder = rest_api_tests::wire_tests(builder, &monitor);\n\n let report = builder.run().await;\n\n\n\n log::info!(\"API tests finished\");\n\n\n\n report\n\n }\n\n .boxed();\n\n\n\n (future, cancellation)\n\n}\n", "file_path": "core/tests/loadtest/src/api/mod.rs", "rank": 21, "score": 240976.66196537035 }, { "content": "/// Generates a new `PrivateKey` from seed using a deterministic algorithm:\n\n/// seed is hashed via `sha256` hash (twice), and the output treated as a `PrivateKey`.\n\n/// If the obtained value doesn't have a correct value to be a `PrivateKey`, hashing operation is applied\n\n/// repeatedly to the previous output, until the value can be interpreted as a `PrivateKey`.\n\npub fn private_key_from_seed(seed: &[u8]) -> Result<PrivateKey, ClientError> {\n\n if seed.len() < 32 {\n\n return Err(ClientError::SeedTooShort);\n\n }\n\n\n\n let sha256_bytes = |input: &[u8]| {\n\n let mut hasher = Sha256::new();\n\n hasher.input(input);\n\n hasher.result()\n\n };\n\n\n\n let mut effective_seed = sha256_bytes(seed);\n\n\n\n loop {\n\n let raw_priv_key = sha256_bytes(&effective_seed);\n\n let mut fs_repr = FsRepr::default();\n\n fs_repr\n\n .read_be(&raw_priv_key[..])\n\n .expect(\"failed to read raw_priv_key\");\n\n match Fs::from_repr(fs_repr) {\n", "file_path": "sdk/zksync-rs/src/utils.rs", "rank": 22, "score": 235766.7462375164 }, { "content": "/// Formats amount in wei to tokens.\n\n/// Behaves just like js ethers.utils.formatEther\n\npub fn format_ether(wei: impl ToString) -> String {\n\n format_units(wei, 18)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_format_units() {\n\n // Test vector of (decimals, wei input, expected output)\n\n let vals = vec![\n\n (0, \"1000000000000000100000\", \"1000000000000000100000.0\"),\n\n (1, \"0\", \"0.0\"),\n\n (1, \"11000000000000000000\", \"1100000000000000000.0\"),\n\n (2, \"0\", \"0.0\"),\n\n (2, \"1000000000000000100000\", \"10000000000000001000.0\"),\n\n (4, \"10001000000\", \"1000100.0\"),\n\n (4, \"10100000000000000000000\", \"1010000000000000000.0\"),\n\n (4, \"110\", \"0.011\"),\n", "file_path": "core/lib/utils/src/format.rs", "rank": 23, "score": 229054.5724354274 }, { "content": "/// Max token id, based on the number of processable tokens\n\npub fn max_token_id() -> TokenId {\n\n number_of_processable_tokens() as u16 - 1\n\n}\n\n\n\npub const ETH_TOKEN_ID: TokenId = 0;\n\n\n\npub const ACCOUNT_ID_BIT_WIDTH: usize = 32;\n\n\n\npub const INPUT_DATA_ADDRESS_BYTES_WIDTH: usize = 32;\n\npub const INPUT_DATA_BLOCK_NUMBER_BYTES_WIDTH: usize = 32;\n\npub const INPUT_DATA_FEE_ACC_BYTES_WIDTH_WITH_EMPTY_OFFSET: usize = 32;\n\npub const INPUT_DATA_FEE_ACC_BYTES_WIDTH: usize = 3;\n\npub const INPUT_DATA_ROOT_BYTES_WIDTH: usize = 32;\n\npub const INPUT_DATA_EMPTY_BYTES_WIDTH: usize = 64;\n\npub const INPUT_DATA_ROOT_HASH_BYTES_WIDTH: usize = 32;\n\n\n\npub const TOKEN_BIT_WIDTH: usize = 16;\n\npub const TX_TYPE_BIT_WIDTH: usize = 8;\n\n\n\n/// Account subtree hash width\n", "file_path": "core/lib/crypto/src/params.rs", "rank": 24, "score": 228634.18020559632 }, { "content": "pub fn serialize_proof(\n\n proof: &Proof<Engine, PlonkCsWidth4WithNextStepParams>,\n\n) -> EncodedProofPlonk {\n\n let mut inputs = vec![];\n\n for input in proof.input_values.iter() {\n\n let ser = EthereumSerializer::serialize_fe(input);\n\n inputs.push(ser);\n\n }\n\n let mut serialized_proof = vec![];\n\n\n\n for c in proof.wire_commitments.iter() {\n\n let (x, y) = EthereumSerializer::serialize_g1(c);\n\n serialized_proof.push(x);\n\n serialized_proof.push(y);\n\n }\n\n\n\n let (x, y) = EthereumSerializer::serialize_g1(&proof.grand_product_commitment);\n\n serialized_proof.push(x);\n\n serialized_proof.push(y);\n\n\n", "file_path": "core/lib/prover_utils/src/lib.rs", "rank": 25, "score": 228130.21742795996 }, { "content": "/// Generates dummy operation with the default `new_root_hash` in the block.\n\npub fn gen_operation(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n) -> Operation {\n\n gen_operation_with_txs(block_number, action, block_chunks_size, vec![])\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 26, "score": 227624.31826084785 }, { "content": "/// Generates dummy operation with the default `new_root_hash` in the block and given set of transactions.\n\npub fn gen_operation_with_txs(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n txs: Vec<ExecutedOperations>,\n\n) -> Operation {\n\n Operation {\n\n id: None,\n\n action,\n\n block: Block {\n\n block_number,\n\n new_root_hash: Fr::default(),\n\n fee_account: 0,\n\n block_transactions: txs,\n\n processed_priority_ops: (0, 0),\n\n block_chunks_size,\n\n commit_gas_limit: 1_000_000.into(),\n\n verify_gas_limit: 1_500_000.into(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 27, "score": 225311.4032077331 }, { "content": "/// Generates dummy operation with the unique `new_root_hash` in the block.\n\npub fn gen_unique_operation(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n) -> Operation {\n\n gen_unique_operation_with_txs(block_number, action, block_chunks_size, vec![])\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 28, "score": 225300.52226742584 }, { "content": "pub fn is_signature_from_address(\n\n signature: &PackedEthSignature,\n\n msg: &[u8],\n\n address: Address,\n\n) -> Result<bool, SignerError> {\n\n let signature_is_correct = signature\n\n .signature_recover_signer(msg)\n\n .map_err(|err| SignerError::RecoverAddress(err.to_string()))?\n\n == address;\n\n Ok(signature_is_correct)\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum AddressOrIndex {\n\n Address(Address),\n\n Index(usize),\n\n}\n\n\n\n/// Describes whether to add a prefix `\\x19Ethereum Signed Message:\\n`\n\n/// when requesting a message signature.\n", "file_path": "core/lib/eth_signer/src/json_rpc_signer.rs", "rank": 29, "score": 223533.904218425 }, { "content": "/// Creates several random updates for the provided account map,\n\n/// and returns the resulting account map together with the list\n\n/// of generated updates.\n\npub fn apply_random_updates(\n\n mut accounts: AccountMap,\n\n rng: &mut XorShiftRng,\n\n) -> (AccountMap, Vec<(u32, AccountUpdate)>) {\n\n let updates = (0..3)\n\n .map(|_| gen_acc_random_updates(rng))\n\n .flatten()\n\n .collect::<AccountUpdates>();\n\n apply_updates(&mut accounts, updates.clone());\n\n (accounts, updates)\n\n}\n\n\n\n/// Here we create updates for blocks 1,2,3 (commit 3 blocks)\n\n/// We apply updates for blocks 1,2 (verify 2 blocks)\n\n/// Make sure that we can get state for all blocks.\n\n#[db_test]\n\nasync fn test_commit_rewind(mut storage: StorageProcessor<'_>) -> QueryResult<()> {\n\n let _ = env_logger::try_init();\n\n let mut rng = create_rng();\n\n\n", "file_path": "core/lib/storage/src/tests/chain/block.rs", "rank": 30, "score": 223252.6175385436 }, { "content": "/// Generates dummy operation with the unique `new_root_hash` in the block and\n\n/// given set of transactions..\n\npub fn gen_unique_operation_with_txs(\n\n block_number: BlockNumber,\n\n action: Action,\n\n block_chunks_size: usize,\n\n txs: Vec<ExecutedOperations>,\n\n) -> Operation {\n\n Operation {\n\n id: None,\n\n action,\n\n block: Block {\n\n block_number,\n\n new_root_hash: dummy_root_hash_for_block(block_number),\n\n fee_account: 0,\n\n block_transactions: txs,\n\n processed_priority_ops: (0, 0),\n\n block_chunks_size,\n\n commit_gas_limit: 1_000_000.into(),\n\n verify_gas_limit: 1_500_000.into(),\n\n },\n\n }\n\n}\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 31, "score": 223056.90342477342 }, { "content": "/// Number of supported tokens.\n\npub fn total_tokens() -> usize {\n\n 2usize.pow(balance_tree_depth() as u32)\n\n}\n\n\n", "file_path": "core/lib/crypto/src/params.rs", "rank": 32, "score": 221524.68973907386 }, { "content": "/// Generates proof for exit given circuit using step-by-step algorithm.\n\npub fn gen_verified_proof_for_exit_circuit<C: Circuit<Engine> + Clone>(\n\n circuit: C,\n\n) -> Result<EncodedProofPlonk, anyhow::Error> {\n\n let vk = VerificationKey::read(File::open(get_exodus_verification_key_path())?)?;\n\n\n\n log::info!(\"Proof for circuit started\");\n\n\n\n let hints = transpile(circuit.clone())?;\n\n let setup = setup(circuit.clone(), &hints)?;\n\n let size_log2 = setup.n.next_power_of_two().trailing_zeros();\n\n\n\n let size_log2 = std::cmp::max(size_log2, SETUP_MIN_POW2); // for exit circuit\n\n let key_monomial_form = get_universal_setup_monomial_form(size_log2, false)?;\n\n\n\n let proof = prove_by_steps::<_, _, RollingKeccakTranscript<Fr>>(\n\n circuit,\n\n &hints,\n\n &setup,\n\n None,\n\n &key_monomial_form,\n\n )?;\n\n\n\n let valid = verify::<_, RollingKeccakTranscript<Fr>>(&proof, &vk)?;\n\n anyhow::ensure!(valid, \"proof for exit is invalid\");\n\n\n\n log::info!(\"Proof for circuit successful\");\n\n Ok(serialize_proof(&proof))\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/lib.rs", "rank": 33, "score": 220922.11110537188 }, { "content": "/// Converts \"gwei\" amount to the \"wei\".\n\npub fn gwei_to_wei(gwei: impl Into<BigUint>) -> BigUint {\n\n gwei.into() * BigUint::from(10u64.pow(9))\n\n}\n\n\n\n/// Creates a future which represents a collection of the outputs of the futures\n\n/// given.\n\n///\n\n/// But unlike the `futures::future::join_all` method, it performs futures in chunks\n\n/// to reduce descriptors usage.\n\npub async fn wait_all_chunks<I>(chunk_sizes: &[usize], i: I) -> Vec<<I::Item as Future>::Output>\n\nwhere\n\n I: IntoIterator,\n\n I::Item: Future,\n\n{\n\n let mut output = Vec::new();\n\n for chunk in DynamicChunks::new(i, chunk_sizes) {\n\n let values = futures::future::join_all(chunk).await;\n\n output.extend(values);\n\n }\n\n output\n", "file_path": "core/tests/loadtest/src/utils.rs", "rank": 34, "score": 219575.98759456765 }, { "content": "/// Number of tokens that are processed by this release\n\npub fn number_of_processable_tokens() -> usize {\n\n let num = 128;\n\n\n\n assert!(num <= total_tokens());\n\n assert!(num.is_power_of_two());\n\n\n\n num\n\n}\n\n\n", "file_path": "core/lib/crypto/src/params.rs", "rank": 35, "score": 219201.20540774323 }, { "content": "pub fn build_block_witness<'a>(\n\n account_tree: &'a mut CircuitAccountTree,\n\n block: &Block,\n\n) -> Result<WitnessBuilder<'a>, anyhow::Error> {\n\n let block_number = block.block_number;\n\n let block_size = block.block_chunks_size;\n\n\n\n log::info!(\"building prover data for block {}\", &block_number);\n\n\n\n let mut witness_accum = WitnessBuilder::new(account_tree, block.fee_account, block_number);\n\n\n\n let ops = block\n\n .block_transactions\n\n .iter()\n\n .filter_map(|tx| tx.get_executed_op().cloned());\n\n\n\n let mut operations = vec![];\n\n let mut pub_data = vec![];\n\n let mut fees = vec![];\n\n for op in ops {\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 36, "score": 219057.51742701273 }, { "content": "/// Encode JsonWebToken with shared secret - secret,\n\n/// sub - message and exp - time until token will be valid\n\npub fn encode_auth_token(secret: &str, sub: &str, exp: usize) -> Result<String, JwtError> {\n\n let payload = PayloadAuthToken {\n\n sub: sub.to_string(),\n\n exp,\n\n };\n\n encode(\n\n &Header::default(),\n\n &payload,\n\n &EncodingKey::from_secret(secret.as_ref()),\n\n )\n\n}\n", "file_path": "infrastructure/tok_cli/src/utils.rs", "rank": 37, "score": 217883.0549601758 }, { "content": "/// Depth of the left subtree of the account tree that can be used in the current version of the circuit.\n\npub fn used_account_subtree_depth() -> usize {\n\n let num = 24; // total accounts = 2.pow(num) ~ 16mil\n\n\n\n assert!(num <= account_tree_depth());\n\n\n\n num\n\n}\n\n\n", "file_path": "core/lib/crypto/src/params.rs", "rank": 38, "score": 217138.81498385704 }, { "content": "fn signing_failed_error(err: impl ToString) -> SignerError {\n\n SignerError::SigningFailed(err.to_string())\n\n}\n\n\n\npub struct Signer<S: EthereumSigner> {\n\n pub pubkey_hash: PubKeyHash,\n\n pub address: Address,\n\n pub(crate) private_key: PrivateKey,\n\n pub(crate) eth_signer: Option<S>,\n\n pub(crate) account_id: Option<AccountId>,\n\n}\n\n\n\nimpl<S: EthereumSigner> fmt::Debug for Signer<S> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut pk_contents = Vec::new();\n\n self.private_key\n\n .write(&mut pk_contents)\n\n .expect(\"Failed writing the private key contents\");\n\n f.debug_struct(\"Signer\")\n\n .field(\"pubkey_hash\", &self.pubkey_hash)\n", "file_path": "sdk/zksync-rs/src/signer.rs", "rank": 39, "score": 215863.11735151813 }, { "content": "pub fn reverse_bytes<T: Clone>(bits: &[T]) -> Vec<T> {\n\n bits.chunks(8)\n\n .rev()\n\n .map(|x| x.to_vec())\n\n .fold(Vec::new(), |mut acc, mut byte| {\n\n acc.append(&mut byte);\n\n acc\n\n })\n\n}\n\n\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 40, "score": 214953.43058624372 }, { "content": "/// Formats amount in wei to tokens with precision.\n\n/// Behaves just like ethers.utils.formatUnits\n\npub fn format_units(wei: impl ToString, units: u8) -> String {\n\n let mut chars: VecDeque<char> = wei.to_string().chars().collect();\n\n\n\n while chars.len() < units as usize {\n\n chars.push_front('0');\n\n }\n\n chars.insert(chars.len() - units as usize, '.');\n\n if *chars.front().unwrap() == '.' {\n\n chars.push_front('0');\n\n }\n\n while *chars.back().unwrap() == '0' {\n\n chars.pop_back();\n\n }\n\n if *chars.back().unwrap() == '.' {\n\n chars.push_back('0');\n\n }\n\n chars.iter().collect()\n\n}\n\n\n", "file_path": "core/lib/utils/src/format.rs", "rank": 41, "score": 214938.56573507376 }, { "content": "/// Creates a dummy new root hash for the block based on its number.\n\npub fn dummy_root_hash_for_block(block_number: BlockNumber) -> Fr {\n\n Fr::from_str(&block_number.to_string()).unwrap()\n\n}\n\n\n", "file_path": "core/lib/storage/src/test_data.rs", "rank": 42, "score": 214613.49776242895 }, { "content": "/// Creates a fixed-seed RNG for tests.\n\npub fn create_rng() -> XorShiftRng {\n\n XorShiftRng::from_seed([0, 1, 2, 3])\n\n}\n", "file_path": "core/lib/storage/src/tests/mod.rs", "rank": 43, "score": 214153.44383488537 }, { "content": "/// Returns `ethabi::Contract` object for ERC-20 smart contract interface.\n\npub fn ierc20_contract() -> ethabi::Contract {\n\n load_contract(IERC20_INTERFACE)\n\n}\n\n\n\n/// `EthereumProvider` gains access to on-chain operations, such as deposits and full exits.\n\n/// Methods to interact with Ethereum return corresponding Ethereum transaction hash.\n\n/// In order to monitor transaction execution, an Ethereum node `web3` API is exposed\n\n/// via `EthereumProvider::web3` method.\n\n#[derive(Debug)]\n\npub struct EthereumProvider<S: EthereumSigner> {\n\n tokens_cache: TokensCache,\n\n eth_client: ETHClient<Http, S>,\n\n erc20_abi: ethabi::Contract,\n\n confirmation_timeout: Duration,\n\n}\n\n\n\nimpl<S: EthereumSigner> EthereumProvider<S> {\n\n /// Creates a new Ethereum provider.\n\n pub async fn new<P: Provider>(\n\n provider: &P,\n", "file_path": "sdk/zksync-rs/src/ethereum/mod.rs", "rank": 44, "score": 210920.18221261151 }, { "content": "/// Returns `ethabi::Contract` object for zkSync smart contract.\n\npub fn zksync_contract() -> ethabi::Contract {\n\n load_contract(ZKSYNC_INTERFACE)\n\n}\n\n\n", "file_path": "sdk/zksync-rs/src/ethereum/mod.rs", "rank": 45, "score": 210920.18221261151 }, { "content": "pub fn get_block_verification_key_path(block_chunks: usize) -> PathBuf {\n\n let mut key = get_keys_root_dir();\n\n key.push(&format!(\"verification_block_{}.key\", block_chunks));\n\n key\n\n}\n\n\n", "file_path": "core/lib/prover_utils/src/fs_utils.rs", "rank": 46, "score": 205345.565487432 }, { "content": "/// Converts `BigUint` value into the corresponding `U256` value.\n\npub fn biguint_to_u256(value: BigUint) -> U256 {\n\n let bytes = value.to_bytes_le();\n\n U256::from_little_endian(&bytes)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn biguint_u256_conversion_roundrip(u256: U256) {\n\n let biguint = u256_to_biguint(u256);\n\n // Make sure that the string representations are the same.\n\n assert_eq!(biguint.to_string(), u256.to_string());\n\n\n\n let restored = biguint_to_u256(biguint);\n\n assert_eq!(u256, restored);\n\n }\n\n\n\n #[test]\n\n fn test_zero_conversion() {\n", "file_path": "sdk/zksync-rs/src/utils.rs", "rank": 47, "score": 204901.73028873873 }, { "content": "/// Converts `U256` into the corresponding `BigUint` value.\n\npub fn u256_to_biguint(value: U256) -> BigUint {\n\n let mut bytes = [0u8; 32];\n\n value.to_little_endian(&mut bytes);\n\n BigUint::from_bytes_le(&bytes)\n\n}\n\n\n", "file_path": "sdk/zksync-rs/src/utils.rs", "rank": 48, "score": 204900.90796236368 }, { "content": "#[test]\n\n#[should_panic]\n\nfn no_supported_block_size() {\n\n Block::new_from_available_block_sizes(\n\n 0,\n\n Default::default(),\n\n 0,\n\n vec![create_withdraw_tx()],\n\n (0, 0),\n\n &[0],\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n );\n\n}\n\n\n\n/// Checks that the byte order is indeed big-endian.\n", "file_path": "core/lib/types/src/tests/block.rs", "rank": 49, "score": 204607.14323583897 }, { "content": "fn bench_bytes_into_be_bits(b: &mut Bencher<'_>) {\n\n let value: Vec<u8> = vec![0xAB; BYTE_SLICE_SIZE];\n\n\n\n let value_ref: &[u8] = value.as_ref();\n\n\n\n b.iter(|| {\n\n let _ = BitConvert::from_be_bytes(black_box(value_ref));\n\n });\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 50, "score": 204289.00582797368 }, { "content": "/// Replaces a sequence of updates with the sequence of updates required to revert\n\n/// the applied state change.\n\npub fn reverse_updates(updates: &mut AccountUpdates) {\n\n updates.reverse();\n\n for (_, acc_upd) in updates.iter_mut() {\n\n *acc_upd = acc_upd.reversed_update();\n\n }\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 51, "score": 204100.2881063344 }, { "content": "#[proc_macro_attribute]\n\npub fn test(_args: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::ItemFn);\n\n\n\n for attr in &input.attrs {\n\n if attr.path.is_ident(\"test\") {\n\n let msg = \"second test attribute is supplied\";\n\n return syn::Error::new_spanned(&attr, msg)\n\n .to_compile_error()\n\n .into();\n\n }\n\n }\n\n\n\n parse_knobs(input).unwrap_or_else(|e| e.to_compile_error().into())\n\n}\n", "file_path": "core/lib/storage/db_test_macro/src/lib.rs", "rank": 52, "score": 204012.16493949835 }, { "content": "pub fn apply_leaf_operation<Fa: Fn(&mut CircuitAccount<Bn256>), Fb: Fn(&mut Balance<Bn256>)>(\n\n tree: &mut CircuitAccountTree,\n\n account_address: u32,\n\n token: u32,\n\n fa: Fa,\n\n fb: Fb,\n\n) -> (AccountWitness<Bn256>, AccountWitness<Bn256>, Fr, Fr) {\n\n let default_account = CircuitAccount::default();\n\n\n\n //applying deposit\n\n let mut account = tree.remove(account_address).unwrap_or(default_account);\n\n let account_witness_before = AccountWitness::from_circuit_account(&account);\n\n let mut balance = account\n\n .subtree\n\n .remove(token)\n\n .unwrap_or(Balance { value: Fr::zero() });\n\n let balance_before = balance.value;\n\n fb(&mut balance);\n\n let balance_after = balance.value;\n\n account.subtree.insert(token, balance);\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 53, "score": 203809.69700759012 }, { "content": "pub fn big_decimal_to_ratio(num: &BigDecimal) -> Result<Ratio<BigUint>, anyhow::Error> {\n\n let (big_int, exp) = num.as_bigint_and_exponent();\n\n anyhow::ensure!(!big_int.is_negative(), \"BigDecimal should be unsigned\");\n\n let big_uint = big_int.to_biguint().unwrap();\n\n let ten_pow = BigUint::from(10 as u32).pow(exp as u128);\n\n Ok(Ratio::new(big_uint, ten_pow))\n\n}\n\n\n", "file_path": "core/lib/utils/src/convert.rs", "rank": 54, "score": 203080.52859468965 }, { "content": "pub fn address_to_stored_string(address: &Address) -> String {\n\n format!(\"0x{:x}\", address)\n\n}\n\n\n", "file_path": "core/lib/storage/src/tokens/utils.rs", "rank": 55, "score": 202591.44801516633 }, { "content": "fn bench_pack_bits_into_bytes(b: &mut Bencher<'_>) {\n\n let value: Vec<bool> = vec![true; BYTE_SLICE_SIZE * 8];\n\n\n\n let setup = || value.clone();\n\n\n\n b.iter_batched(\n\n setup,\n\n |value| {\n\n let _ = BitConvert::into_bytes(black_box(value));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 56, "score": 202179.7737575425 }, { "content": "/// For reference, raw speed of optimized signature library\n\nfn bench_signature_seckp_recover(b: &mut Bencher<'_>) {\n\n let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]);\n\n\n\n let message = secp256k1::Message::from_slice(&rng.gen::<[u8; 32]>()).expect(\"msg creation\");\n\n let secret_key =\n\n &secp256k1::SecretKey::from_slice(&rng.gen::<[u8; 32]>()).expect(\"secret key creation\");\n\n\n\n let secp = secp256k1::Secp256k1::new();\n\n let signature = secp.sign_recoverable(&message, &secret_key);\n\n\n\n let verify_secp = secp256k1::Secp256k1::verification_only();\n\n\n\n let setup = || (&verify_secp, message, signature);\n\n b.iter_batched(\n\n setup,\n\n |(secp, msg, sign)| {\n\n let _ = black_box(secp.recover(&msg, &sign));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/signatures/mod.rs", "rank": 57, "score": 202179.7737575425 }, { "content": "fn bench_circuit_account_transform(b: &mut Bencher<'_>) {\n\n let setup = || {\n\n let mut account = Account::default_with_address(&Address::from_slice(\n\n &hex::decode(\"0102030405060708091011121314151617181920\").unwrap(),\n\n ));\n\n account.set_balance(1, 1u32.into());\n\n account.set_balance(2, 2u32.into());\n\n account.nonce = 3;\n\n account.pub_key_hash =\n\n PubKeyHash::from_hex(\"sync:0102030405060708091011121314151617181920\").unwrap();\n\n account\n\n };\n\n\n\n b.iter_batched(\n\n setup,\n\n |account| {\n\n let _ = CircuitAccount::from(black_box(account));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 58, "score": 202179.7737575425 }, { "content": "/// Checks whether the fee amount can be packed (and thus used in the transaction).\n\npub fn is_fee_amount_packable(amount: &BigUint) -> bool {\n\n Some(amount.clone()) == unpack_fee_amount(&pack_fee_amount(amount))\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 59, "score": 201995.90414343541 }, { "content": "pub fn str_to_address(value: &str) -> Result<Address> {\n\n let str_addr = value[\"0x\".len()..].parse().context(\"Error parse address\")?;\n\n Ok(str_addr)\n\n}\n\n\n", "file_path": "infrastructure/tok_cli/src/utils.rs", "rank": 60, "score": 200908.76976413734 }, { "content": "pub fn stored_str_address_to_address(address: &str) -> Address {\n\n assert_eq!(address.len(), 42, \"db stored token address length\");\n\n address[2..]\n\n .parse()\n\n .expect(\"failed to parse stored db address\")\n\n}\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn address_store_roundtrip() {\n\n let address = Address::random();\n\n let stored_address = address_to_stored_string(&address);\n\n assert_eq!(address, stored_str_address_to_address(&stored_address));\n\n }\n\n}\n", "file_path": "core/lib/storage/src/tokens/utils.rs", "rank": 61, "score": 200527.76380841044 }, { "content": "fn bench_bit_iterator_le_next(b: &mut Bencher<'_>) {\n\n let value: Vec<u64> = vec![0xDEAD_BEEF_DEAD_BEEF; BYTE_SLICE_SIZE / 8];\n\n\n\n let setup = || BitIteratorLe::new(&value);\n\n\n\n b.iter_batched(\n\n setup,\n\n |bit_iterator| {\n\n for _ in bit_iterator {\n\n // Do nothing, we're just draining the iterator.\n\n }\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 62, "score": 200130.80578988887 }, { "content": "fn bench_u64_get_bits_le(b: &mut Bencher<'_>) {\n\n let value: u64 = 0xDEAD_BEEF_DEAD_BEEF;\n\n\n\n b.iter(|| {\n\n let _ = black_box(value).get_bits_le();\n\n });\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 63, "score": 200130.80578988887 }, { "content": "fn bench_signature_verify_eth_packed(b: &mut Bencher<'_>) {\n\n let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]);\n\n const TYPICAL_ETH_SIGNATURE_LEN: usize = 150;\n\n\n\n let pk = H256(rng.gen());\n\n\n\n let message = rng\n\n .gen_iter::<u8>()\n\n .take(TYPICAL_ETH_SIGNATURE_LEN)\n\n .collect::<Vec<_>>();\n\n\n\n let signature = PackedEthSignature::sign(&pk, &message).unwrap();\n\n\n\n let setup = || (signature.clone(), message.clone());\n\n\n\n b.iter_batched(\n\n setup,\n\n |(signature, msg)| {\n\n let _ = black_box(signature.signature_recover_signer(&msg));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/signatures/mod.rs", "rank": 64, "score": 200130.80578988887 }, { "content": "fn bench_signature_zksync_musig_verify(b: &mut Bencher<'_>) {\n\n let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]);\n\n const WITHDRAW_TX_LEN: usize = 65;\n\n\n\n let pk = PrivateKey(rng.gen());\n\n let message = rng\n\n .gen_iter::<u8>()\n\n .take(WITHDRAW_TX_LEN)\n\n .collect::<Vec<_>>();\n\n\n\n let setup = || (TxSignature::sign_musig(&pk, &message), message.clone());\n\n\n\n b.iter_batched(\n\n setup,\n\n |(signature, msg)| {\n\n black_box(signature.verify_musig(&msg));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/signatures/mod.rs", "rank": 65, "score": 200130.80578988887 }, { "content": "fn bench_pack_bits_into_bytes_in_order(b: &mut Bencher<'_>) {\n\n let value: Vec<bool> = vec![true; BYTE_SLICE_SIZE * 8];\n\n\n\n let setup = || value.clone();\n\n\n\n b.iter_batched(\n\n setup,\n\n |value| {\n\n let _ = BitConvert::into_bytes_ordered(black_box(value));\n\n },\n\n BatchSize::SmallInput,\n\n );\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/primitives/mod.rs", "rank": 66, "score": 200130.80578988887 }, { "content": "#[wasm_bindgen(js_name = pubKeyHash)]\n\npub fn pub_key_hash(pubkey: &[u8]) -> Result<Vec<u8>, JsValue> {\n\n let pubkey = JUBJUB_PARAMS\n\n .with(|params| PublicKey::read(&pubkey[..], params))\n\n .map_err(|_| JsValue::from_str(\"couldn't read public key\"))?;\n\n Ok(utils::pub_key_hash(&pubkey))\n\n}\n\n\n", "file_path": "sdk/zksync-crypto/src/lib.rs", "rank": 67, "score": 198624.37467913158 }, { "content": "/// Returns the closest possible packable token amount.\n\n/// Returned amount is always less or equal to the provided amount.\n\npub fn closest_packable_fee_amount(amount: &BigUint) -> BigUint {\n\n let fee_packed = pack_fee_amount(&amount);\n\n unpack_fee_amount(&fee_packed).expect(\"fee repacking\")\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 68, "score": 197944.48241327348 }, { "content": "pub fn bench_merkle_tree(c: &mut Criterion) {\n\n c.bench_function(\"Sequential SMT create\", smt_create);\n\n c.bench_function(\"Sequential SMT insert (empty)\", smt_insert_empty);\n\n c.bench_function(\"Sequential SMT insert (filled)\", smt_insert_filled);\n\n c.bench_function(\"Sequential SMT root hash\", smt_root_hash);\n\n}\n", "file_path": "core/lib/types/benches/criterion/merkle_tree/sequential_smt.rs", "rank": 69, "score": 197932.6797299423 }, { "content": "pub fn bench_merkle_tree(c: &mut Criterion) {\n\n c.bench_function(\"Parallel SMT create\", smt_create);\n\n c.bench_function(\"Parallel SMT insert (empty)\", smt_insert_empty);\n\n c.bench_function(\"Parallel SMT insert (filled)\", smt_insert_filled);\n\n c.bench_function(\"Parallel SMT root hash\", smt_root_hash);\n\n c.bench_function(\"Parallel SMT root hash (cached)\", smt_root_hash_cached);\n\n}\n", "file_path": "core/lib/types/benches/criterion/merkle_tree/parallel_smt.rs", "rank": 70, "score": 197932.6797299423 }, { "content": "pub fn bench_rescue_hasher(c: &mut Criterion) {\n\n let mut small_input_group = c.benchmark_group(\"Small input\");\n\n small_input_group.throughput(Throughput::Bytes((SMALL_INPUT_SIZE / 8) as u64));\n\n small_input_group.bench_function(\"Rescue Hasher\", rescue_small);\n\n small_input_group.finish();\n\n\n\n let mut big_input_group = c.benchmark_group(\"Big input\");\n\n big_input_group.throughput(Throughput::Bytes((BIG_INPUT_SIZE / 8) as u64));\n\n big_input_group.bench_function(\"Rescue Hasher\", rescue_big);\n\n big_input_group.finish();\n\n}\n", "file_path": "core/lib/types/benches/criterion/merkle_tree/rescue_hasher.rs", "rank": 71, "score": 197932.6797299423 }, { "content": "fn gen_account(id: u32) -> CircuitAccount<Engine> {\n\n let mut account = CircuitAccount::<Engine>::default();\n\n\n\n account.address = Fr::from_str(&id.to_string()).unwrap();\n\n account\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/merkle_tree/sequential_smt.rs", "rank": 72, "score": 197050.15375991317 }, { "content": "fn gen_account(id: u32) -> CircuitAccount<Engine> {\n\n let mut account = CircuitAccount::<Engine>::default();\n\n account.address = Fr::from_str(&id.to_string()).unwrap();\n\n account\n\n}\n\n\n", "file_path": "core/lib/types/benches/criterion/merkle_tree/parallel_smt.rs", "rank": 73, "score": 197050.15375991317 }, { "content": "#[test]\n\nfn to_self() {\n\n let token_id = 0;\n\n let amount = BigUint::from(100u32);\n\n let fee = BigUint::from(10u32);\n\n\n\n let mut tb = PlasmaTestBuilder::new();\n\n\n\n let (account_id, account, sk) = tb.add_account(Unlocked);\n\n tb.set_balance(account_id, token_id, &amount + &fee);\n\n\n\n let transfer = Transfer::new_signed(\n\n account_id,\n\n account.address,\n\n account.address,\n\n token_id,\n\n amount.clone(),\n\n fee.clone(),\n\n account.nonce,\n\n &sk,\n\n )\n", "file_path": "core/lib/state/src/tests/operations/transfer.rs", "rank": 74, "score": 196371.58571527456 }, { "content": "/// handy function to get file path in file_dump dir\n\npub fn get_path_in_file_dump_dir(filename: &str) -> PathBuf {\n\n let mut base_dir = std::env::var(\"ZKSYNC_HOME\")\n\n .map(PathBuf::from)\n\n .unwrap_or_else(|_| std::env::current_dir().expect(\"Current dir not set\"));\n\n base_dir.push(\"core\");\n\n base_dir.push(\"circuit\");\n\n base_dir.push(\"src\");\n\n base_dir.push(\"playground\");\n\n base_dir.push(\"file_dump\");\n\n base_dir.push(filename);\n\n base_dir\n\n}\n\n\n\npub mod plonk_playground;\n", "file_path": "core/lib/circuit/src/playground/mod.rs", "rank": 75, "score": 196005.4721625395 }, { "content": "/// Transforms the fee amount into the packed form.\n\n/// As the packed form for fee is smaller than one for the token,\n\n/// the same value must be packable as a token amount, but not packable\n\n/// as a fee amount.\n\n/// If the provided fee amount is not packable, it is rounded down to the\n\n/// closest amount that fits in packed form. As a result, some precision will be lost.\n\npub fn pack_fee_amount(amount: &BigUint) -> Vec<u8> {\n\n FloatConversions::pack(\n\n amount,\n\n params::FEE_EXPONENT_BIT_WIDTH,\n\n params::FEE_MANTISSA_BIT_WIDTH,\n\n )\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 76, "score": 195954.12107196636 }, { "content": "/// Attempts to unpack the fee amount.\n\npub fn unpack_fee_amount(data: &[u8]) -> Option<BigUint> {\n\n FloatConversions::unpack(\n\n data,\n\n params::FEE_EXPONENT_BIT_WIDTH,\n\n params::FEE_MANTISSA_BIT_WIDTH,\n\n )\n\n .and_then(BigUint::from_u128)\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 77, "score": 195943.36797471315 }, { "content": "/// Transforms relative path like `constant/eip1271.json` into full path like\n\n/// `$ZKSYNC_HOME/etc/test_config/constant/eip1271.json`.\n\nfn config_path(postfix: &str) -> String {\n\n let home = std::env::var(\"ZKSYNC_HOME\").expect(\"ZKSYNC_HOME variable must be set\");\n\n\n\n format!(\"{}/etc/test_config/{}\", home, postfix)\n\n}\n\n\n", "file_path": "core/lib/config/src/test_config/mod.rs", "rank": 78, "score": 194301.82465104116 }, { "content": "/// Get root hash of the used subtree.\n\npub fn get_used_subtree_root_hash(account_tree: &CircuitAccountTree) -> Fr {\n\n // We take account 0, and hash it with it's Merkle proof.\n\n let account_index = 0;\n\n let account_merkle_path = account_tree.merkle_path(account_index);\n\n let account = account_tree\n\n .get(account_index)\n\n .cloned()\n\n .unwrap_or_else(CircuitAccount::default);\n\n let mut current_hash = account_tree.hasher.hash_bits(account.get_bits_le());\n\n for merkle_path_item in account_merkle_path\n\n .iter()\n\n .take(used_account_subtree_depth())\n\n {\n\n current_hash = account_tree\n\n .hasher\n\n .compress(&current_hash, &merkle_path_item.0, 0);\n\n }\n\n current_hash\n\n}\n\n\n", "file_path": "core/lib/circuit/src/witness/utils.rs", "rank": 79, "score": 193010.13177524658 }, { "content": "pub fn resize_grow_only<T: Clone>(to_resize: &mut Vec<T>, new_size: usize, pad_with: T) {\n\n assert!(to_resize.len() <= new_size);\n\n to_resize.resize(new_size, pad_with);\n\n}\n\n\n", "file_path": "core/lib/circuit/src/utils.rs", "rank": 80, "score": 191384.90370479543 }, { "content": "#[test]\n\nfn success() {\n\n let mut tb = PlasmaTestBuilder::new();\n\n let token_id = 1;\n\n let balance = 10u32;\n\n let (account_id, account, sk) = tb.add_account(Locked);\n\n tb.set_balance(account_id, token_id, balance);\n\n let old_pub_key_hash = account.pub_key_hash.clone();\n\n let new_pub_key_hash = PubKeyHash::from_privkey(&sk);\n\n\n\n let change_pub_key = ChangePubKey::new_signed(\n\n account_id,\n\n account.address,\n\n new_pub_key_hash.clone(),\n\n token_id,\n\n balance.into(),\n\n account.nonce,\n\n None,\n\n &sk,\n\n )\n\n .expect(\"Failed to sign ChangePubkey\");\n", "file_path": "core/lib/state/src/tests/operations/change_pub_key.rs", "rank": 81, "score": 191264.4810105408 }, { "content": "#[test]\n\nfn test_get_withdrawals_data() {\n\n let operations = vec![\n\n create_change_pubkey_tx(),\n\n create_full_exit_op(),\n\n create_withdraw_tx(),\n\n ];\n\n let mut block = Block::new(\n\n 0,\n\n Fr::one(),\n\n 0,\n\n operations.clone(),\n\n (0, 0),\n\n 100,\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n );\n\n\n\n let expected = {\n\n let mut data = vec![];\n\n for op in &operations[1..] {\n", "file_path": "core/lib/types/src/tests/block.rs", "rank": 82, "score": 191015.66523477816 }, { "content": "#[test]\n\n#[ignore]\n\nfn corrupted_first_operation() {\n\n // Perform some operations\n\n let mut circuit = apply_many_ops();\n\n\n\n // Now try to cut off an operation at the beginning.\n\n circuit.operations.remove(0);\n\n\n\n // We corrupted the very first chunk, so it should be reported.\n\n // See `circuit.rs` for details.\n\n let expected_msg = \"chunk number 0/verify_correct_chunking/correct_sequence\";\n\n\n\n let error = check_circuit_non_panicking(circuit)\n\n .expect_err(\"Corrupted operations list should lead to an error\");\n\n\n\n assert!(\n\n error.contains(expected_msg),\n\n \"corrupted_operations: Got error message '{}', but expected '{}'\",\n\n error,\n\n expected_msg\n\n );\n\n}\n\n\n\n/// Checks that corrupted list of operations in block leads to predictable errors.\n\n/// Check for chunk in the middle of the operations list.\n", "file_path": "core/lib/circuit/src/witness/tests/mod.rs", "rank": 83, "score": 190821.7194089211 }, { "content": "#[test]\n\n#[ignore]\n\nfn corrupted_intermediate_operation() {\n\n // Perform some operations\n\n let mut circuit = apply_many_ops();\n\n\n\n // Now replace the operation in the middle with incorrect operation.\n\n let corrupted_op_chunk = circuit.operations.len() / 2;\n\n circuit.operations[corrupted_op_chunk] = circuit.operations[0].clone();\n\n\n\n // Create an error message with the exact chunk number.\n\n // See `circuit.rs` for details.\n\n let expected_msg = format!(\n\n \"chunk number {}/verify_correct_chunking/correct_sequence\",\n\n corrupted_op_chunk\n\n );\n\n\n\n let error = check_circuit_non_panicking(circuit)\n\n .expect_err(\"Corrupted operations list should lead to an error\");\n\n\n\n assert!(\n\n error.contains(&expected_msg),\n\n \"corrupted_operations: Got error message '{}', but expected '{}'\",\n\n error,\n\n expected_msg\n\n );\n\n}\n\n\n\n/// Checks that corrupted validator merkle proof in block leads to predictable errors.\n\n/// Check for chunk in the end of the operations list.\n", "file_path": "core/lib/circuit/src/witness/tests/mod.rs", "rank": 84, "score": 190821.7194089211 }, { "content": "/// Given the account map, applies a sequence of updates to the state.\n\npub fn apply_updates(accounts: &mut AccountMap, updates: AccountUpdates) {\n\n for (id, update) in updates.into_iter() {\n\n let updated_account = Account::apply_update(accounts.remove(&id), update);\n\n if let Some(account) = updated_account {\n\n accounts.insert(id, account);\n\n }\n\n }\n\n}\n\n\n", "file_path": "core/lib/types/src/helpers.rs", "rank": 85, "score": 190471.4355159827 }, { "content": "fn parse_knobs(mut input: syn::ItemFn) -> Result<TokenStream, syn::Error> {\n\n let sig = &mut input.sig;\n\n let body = &input.block;\n\n let attrs = &input.attrs;\n\n let vis = input.vis;\n\n\n\n if sig.asyncness.is_none() {\n\n let msg = \"the async keyword is missing from the function declaration\";\n\n return Err(syn::Error::new_spanned(sig.fn_token, msg));\n\n }\n\n\n\n sig.asyncness = None;\n\n\n\n if sig.inputs.len() != 1 || !is_arg_storage_processor(sig.inputs.first()) {\n\n let msg = \"the DB test function must take `mut storage: zksync_storage::StorageProcessor<'_>` as a single argument\";\n\n return Err(syn::Error::new_spanned(&sig.inputs, msg));\n\n }\n\n\n\n // Remove argument, as the test function must not have one.\n\n sig.inputs.pop();\n", "file_path": "core/lib/storage/db_test_macro/src/lib.rs", "rank": 86, "score": 189641.90216315704 }, { "content": "#[test]\n\nfn test_ethereum_signature_verify_with_serialization() {\n\n let address: Address = \"52312AD6f01657413b2eaE9287f6B9ADaD93D5FE\".parse().unwrap();\n\n let message = \"hello world\";\n\n #[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n struct TestSignatureSerialize {\n\n signature: PackedEthSignature,\n\n }\n\n\n\n // signature calculated using ethers.js signer\n\n let test_signature_serialize = \"{ \\\"signature\\\": \\\"0x111ea2824732851dd0893eaa5873597ba38ed08b69f6d8a0d7f5da810335566403d05281b1f56d12ca653e32eb7d67b76814b0cc8b0da2d7ad2c862d575329951b\\\"}\";\n\n\n\n // test serialization\n\n let deserialized_signature: TestSignatureSerialize =\n\n serde_json::from_str(test_signature_serialize).expect(\"signature deserialize\");\n\n let signature_after_roundtrip: TestSignatureSerialize = serde_json::from_str(\n\n &serde_json::to_string(&deserialized_signature).expect(\"signature serialize roundtrip\"),\n\n )\n\n .expect(\"signature deserialize roundtrip\");\n\n assert_eq!(\n\n deserialized_signature, signature_after_roundtrip,\n", "file_path": "core/lib/types/src/tx/tests.rs", "rank": 87, "score": 189070.94371859616 }, { "content": "#[test]\n\nfn nonce_mismatch() {\n\n let mut tb = PlasmaTestBuilder::new();\n\n let (account_id, account, sk) = tb.add_account(Locked);\n\n let new_pub_key_hash = PubKeyHash::from_privkey(&sk);\n\n\n\n let change_pub_key = ChangePubKey::new_signed(\n\n account_id,\n\n account.address,\n\n new_pub_key_hash,\n\n 0,\n\n 0u32.into(),\n\n account.nonce + 1,\n\n None,\n\n &sk,\n\n )\n\n .expect(\"Failed to sign ChangePubkey\");\n\n\n\n tb.test_tx_fail(change_pub_key.into(), \"Nonce mismatch\");\n\n}\n\n\n\n/// Check that ChangePubKey fails if account address\n\n/// does not correspond to account_id\n", "file_path": "core/lib/state/src/tests/operations/change_pub_key.rs", "rank": 88, "score": 189015.57847825385 }, { "content": "fn read_file_to_json_value(path: &str) -> io::Result<serde_json::Value> {\n\n let zksync_home = std::env::var(\"ZKSYNC_HOME\").unwrap_or_else(|_| \".\".into());\n\n let path = std::path::Path::new(&zksync_home).join(path);\n\n let contents = fs::read_to_string(path)?;\n\n let val = serde_json::Value::from_str(&contents)?;\n\n Ok(val)\n\n}\n\n\n", "file_path": "core/lib/contracts/src/lib.rs", "rank": 89, "score": 188827.33761428227 }, { "content": "#[test]\n\nfn test_get_eth_encoded_root() {\n\n let block = Block::new(\n\n 0,\n\n Fr::one(),\n\n 0,\n\n vec![],\n\n (0, 0),\n\n 1,\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n );\n\n\n\n let mut bytes = [0u8; 32];\n\n let byte = bytes.last_mut().unwrap();\n\n *byte = 1;\n\n\n\n assert_eq!(block.get_eth_encoded_root(), H256::from(bytes));\n\n}\n\n\n", "file_path": "core/lib/types/src/tests/block.rs", "rank": 90, "score": 188770.47639198983 }, { "content": "#[test]\n\nfn test_get_eth_witness_data() {\n\n let operations = vec![\n\n create_change_pubkey_tx(),\n\n create_full_exit_op(),\n\n create_withdraw_tx(),\n\n create_change_pubkey_tx(),\n\n ];\n\n let change_pubkey_tx = &operations[0];\n\n let mut block = Block::new(\n\n 0,\n\n Fr::one(),\n\n 0,\n\n operations.clone(),\n\n (0, 0),\n\n 100,\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n );\n\n\n\n let witness = change_pubkey_tx\n", "file_path": "core/lib/types/src/tests/block.rs", "rank": 91, "score": 188770.47639198983 }, { "content": "#[test]\n\nfn test_get_eth_public_data() {\n\n let mut block = Block::new(\n\n 0,\n\n Fr::one(),\n\n 0,\n\n vec![\n\n create_change_pubkey_tx(),\n\n create_full_exit_op(),\n\n create_withdraw_tx(),\n\n ],\n\n (0, 0),\n\n 100,\n\n 1_000_000.into(),\n\n 1_500_000.into(),\n\n );\n\n\n\n let expected = {\n\n let mut data = vec![];\n\n for op in &block.block_transactions {\n\n data.extend(op.get_executed_op().unwrap().public_data());\n", "file_path": "core/lib/types/src/tests/block.rs", "rank": 92, "score": 188770.47639198983 }, { "content": "pub fn get_matches_from_lines(stream: &str, pattern: &str) -> Result<String> {\n\n let lines = stream.split_whitespace().collect::<Vec<_>>();\n\n\n\n for std_out_line in lines {\n\n if std_out_line.starts_with(pattern) {\n\n return Ok(std_out_line.to_string());\n\n }\n\n }\n\n Err(anyhow::anyhow!(\n\n \"error of finding the pattern '{}' in stream\",\n\n pattern\n\n ))\n\n}\n\n\n", "file_path": "infrastructure/tok_cli/src/utils.rs", "rank": 93, "score": 188205.13398100072 }, { "content": "pub fn deploy_contracts(use_prod_contracts: bool, genesis_root: Fr) -> Contracts {\n\n let mut args = vec![\"run\", \"deploy-testkit\", \"--genesisRoot\"];\n\n let genesis_root = format!(\"0x{}\", genesis_root.to_hex());\n\n args.push(genesis_root.as_str());\n\n if use_prod_contracts {\n\n args.push(\"--prodContracts\");\n\n }\n\n let stdout = run_external_command(\"zk\", &args);\n\n\n\n let mut contracts = HashMap::new();\n\n for std_out_line in stdout.split_whitespace().collect::<Vec<_>>() {\n\n if let Some((name, address)) = get_contract_address(std_out_line) {\n\n contracts.insert(name, address);\n\n }\n\n }\n\n\n\n Contracts {\n\n governance: contracts\n\n .remove(\"GOVERNANCE_ADDR\")\n\n .expect(\"GOVERNANCE_ADDR missing\"),\n", "file_path": "core/tests/testkit/src/external_commands.rs", "rank": 94, "score": 187390.7636215442 }, { "content": "#[test]\n\nfn invalid_account_id() {\n\n let mut tb = PlasmaTestBuilder::new();\n\n let (account_id, account, sk) = tb.add_account(Locked);\n\n let new_pub_key_hash = PubKeyHash::from_privkey(&sk);\n\n\n\n let change_pub_key = ChangePubKey::new_signed(\n\n account_id + 1,\n\n account.address,\n\n new_pub_key_hash,\n\n 0,\n\n 0u32.into(),\n\n account.nonce + 1,\n\n None,\n\n &sk,\n\n )\n\n .expect(\"Failed to sign ChangePubkey\");\n\n\n\n tb.test_tx_fail(\n\n change_pub_key.into(),\n\n \"ChangePubKey account id is incorrect\",\n\n );\n\n}\n", "file_path": "core/lib/state/src/tests/operations/change_pub_key.rs", "rank": 95, "score": 186832.8205101972 }, { "content": "#[wasm_bindgen(js_name = privateKeyFromSeed)]\n\npub fn private_key_from_seed(seed: &[u8]) -> Result<Vec<u8>, JsValue> {\n\n if seed.len() < 32 {\n\n return Err(JsValue::from_str(\"Seed is too short\"));\n\n };\n\n\n\n let sha256_bytes = |input: &[u8]| -> Vec<u8> {\n\n let mut hasher = Sha256::new();\n\n hasher.input(input);\n\n hasher.result().to_vec()\n\n };\n\n\n\n let mut effective_seed = sha256_bytes(seed);\n\n\n\n loop {\n\n let raw_priv_key = sha256_bytes(&effective_seed);\n\n let mut fs_repr = FsRepr::default();\n\n fs_repr\n\n .read_be(&raw_priv_key[..])\n\n .expect(\"failed to read raw_priv_key\");\n\n if Fs::from_repr(fs_repr).is_ok() {\n\n return Ok(raw_priv_key);\n\n } else {\n\n effective_seed = raw_priv_key;\n\n }\n\n }\n\n}\n\n\n", "file_path": "sdk/zksync-crypto/src/lib.rs", "rank": 96, "score": 186224.81964017297 }, { "content": "#[wasm_bindgen]\n\npub fn private_key_to_pubkey(private_key: &[u8]) -> Result<Vec<u8>, JsValue> {\n\n let mut pubkey_buf = Vec::with_capacity(PACKED_POINT_SIZE);\n\n\n\n let pubkey = privkey_to_pubkey_internal(private_key)?;\n\n\n\n pubkey\n\n .write(&mut pubkey_buf)\n\n .expect(\"failed to write pubkey to buffer\");\n\n\n\n Ok(pubkey_buf)\n\n}\n\n\n\n#[wasm_bindgen]\n", "file_path": "sdk/zksync-crypto/src/lib.rs", "rank": 97, "score": 184326.07758558783 }, { "content": "#[wasm_bindgen]\n\npub fn private_key_to_pubkey_hash(private_key: &[u8]) -> Result<Vec<u8>, JsValue> {\n\n Ok(utils::pub_key_hash(&privkey_to_pubkey_internal(\n\n private_key,\n\n )?))\n\n}\n\n\n", "file_path": "sdk/zksync-crypto/src/lib.rs", "rank": 98, "score": 182478.6531491152 }, { "content": "/// We use musig Schnorr signature scheme.\n\n/// It is impossible to restore signer for signature, that is why we provide public key of the signer\n\n/// along with signature.\n\n/// [0..32] - packed public key of signer.\n\n/// [32..64] - packed r point of the signature.\n\n/// [64..96] - s poing of the signature.\n\npub fn sign_musig(private_key: &[u8], msg: &[u8]) -> Result<Vec<u8>, JsValue> {\n\n let mut packed_full_signature = Vec::with_capacity(PACKED_POINT_SIZE + PACKED_SIGNATURE_SIZE);\n\n let p_g = FixedGenerators::SpendingKeyGenerator;\n\n let private_key = read_signing_key(private_key)?;\n\n\n\n {\n\n let public_key =\n\n JUBJUB_PARAMS.with(|params| PublicKey::from_private(&private_key, p_g, params));\n\n public_key\n\n .write(&mut packed_full_signature)\n\n .expect(\"failed to write pubkey to packed_point\");\n\n };\n\n\n\n let signature = JUBJUB_PARAMS.with(|jubjub_params| {\n\n RESCUE_PARAMS.with(|rescue_params| {\n\n let hashed_msg = rescue_hash_tx_msg(msg);\n\n let seed = Seed::deterministic_seed(&private_key, &hashed_msg);\n\n private_key.musig_rescue_sign(&hashed_msg, &seed, p_g, rescue_params, jubjub_params)\n\n })\n\n });\n", "file_path": "sdk/zksync-crypto/src/lib.rs", "rank": 99, "score": 177358.9157579885 } ]
Rust
day06/day06.rs
CheezeCake/AoC-2019
1426c025f4e6fa1268458fac22776b409e13168a
use std::collections::HashMap; use std::collections::HashSet; use std::collections::VecDeque; use std::io; use std::io::prelude::*; type OrbitInfo = HashMap<String, HashSet<String>>; #[derive(Debug, Clone)] struct OrbitCount { direct: usize, indirect: usize, } impl OrbitCount { fn new() -> Self { Self { direct: 0, indirect: 0, } } } fn count_orbits(objects_orbiting: &OrbitInfo, in_orbit_around: &OrbitInfo) -> usize { let mut in_orbit_around = in_orbit_around.clone(); let mut orbit_count = HashMap::new(); orbit_count.insert(String::from("COM"), OrbitCount::new()); while !in_orbit_around.is_empty() { let not_orbiting_name = in_orbit_around .iter() .find(|(_, s)| s.is_empty()) .expect("dependency cycle detected") .0 .clone(); let not_orbiting_count = orbit_count .entry(not_orbiting_name.clone()) .or_insert(OrbitCount::new()) .clone(); for object_orbiting in objects_orbiting .get(&not_orbiting_name) .unwrap_or(&HashSet::new()) { let entry = orbit_count .entry(object_orbiting.to_string()) .or_insert(OrbitCount::new()); entry.direct += 1; entry.indirect = not_orbiting_count.direct + not_orbiting_count.indirect; in_orbit_around .get_mut(object_orbiting) .unwrap() .remove(&not_orbiting_name); } in_orbit_around.remove(&not_orbiting_name); } orbit_count .iter() .map(|(_, cnt)| cnt.direct + cnt.indirect) .sum() } fn shortest_path( objects_orbiting: &OrbitInfo, in_orbit_around: &OrbitInfo, start: &String, target: &String, ) -> Option<usize> { let mut visited = HashSet::new(); let mut q = VecDeque::new(); q.push_back((start, 0)); visited.insert(start); let empty = HashSet::new(); while !q.is_empty() { let (obj, n) = q.pop_front().unwrap(); if in_orbit_around .get(target) .unwrap_or(&HashSet::new()) .contains(obj) { return Some(n - 1); } for o in objects_orbiting.get(obj).unwrap_or(&empty) { if !visited.contains(o) { q.push_back((o, n + 1)); visited.insert(o); } } for o in in_orbit_around.get(obj).unwrap_or(&empty) { if !visited.contains(o) { q.push_back((o, n + 1)); visited.insert(o); } } } None } fn main() { let mut objects_orbiting: OrbitInfo = OrbitInfo::new(); let mut in_orbit_around: OrbitInfo = OrbitInfo::new(); in_orbit_around.insert(String::from("COM"), HashSet::new()); for line in io::stdin().lock().lines() { let line = line.unwrap(); let parts: Vec<&str> = line.split(')').collect(); assert_eq!(parts.len(), 2); let (obj1, obj2) = (parts[0].to_string(), parts[1].to_string()); objects_orbiting .entry(obj1.clone()) .or_insert(HashSet::new()) .insert(obj2.clone()); in_orbit_around .entry(obj2.clone()) .or_insert(HashSet::new()) .insert(obj1.clone()); } println!( "part 1: {}", count_orbits(&objects_orbiting, &in_orbit_around) ); println!( "part 2: {}", shortest_path( &objects_orbiting, &in_orbit_around, &String::from("YOU"), &String::from("SAN") ) .expect("no path found") ); }
use std::collections::HashMap; use std::collections::HashSet; use std::collections::VecDeque; use std::io; use std::io::prelude::*; type OrbitIn
let mut q = VecDeque::new(); q.push_back((start, 0)); visited.insert(start); let empty = HashSet::new(); while !q.is_empty() { let (obj, n) = q.pop_front().unwrap(); if in_orbit_around .get(target) .unwrap_or(&HashSet::new()) .contains(obj) { return Some(n - 1); } for o in objects_orbiting.get(obj).unwrap_or(&empty) { if !visited.contains(o) { q.push_back((o, n + 1)); visited.insert(o); } } for o in in_orbit_around.get(obj).unwrap_or(&empty) { if !visited.contains(o) { q.push_back((o, n + 1)); visited.insert(o); } } } None } fn main() { let mut objects_orbiting: OrbitInfo = OrbitInfo::new(); let mut in_orbit_around: OrbitInfo = OrbitInfo::new(); in_orbit_around.insert(String::from("COM"), HashSet::new()); for line in io::stdin().lock().lines() { let line = line.unwrap(); let parts: Vec<&str> = line.split(')').collect(); assert_eq!(parts.len(), 2); let (obj1, obj2) = (parts[0].to_string(), parts[1].to_string()); objects_orbiting .entry(obj1.clone()) .or_insert(HashSet::new()) .insert(obj2.clone()); in_orbit_around .entry(obj2.clone()) .or_insert(HashSet::new()) .insert(obj1.clone()); } println!( "part 1: {}", count_orbits(&objects_orbiting, &in_orbit_around) ); println!( "part 2: {}", shortest_path( &objects_orbiting, &in_orbit_around, &String::from("YOU"), &String::from("SAN") ) .expect("no path found") ); }
fo = HashMap<String, HashSet<String>>; #[derive(Debug, Clone)] struct OrbitCount { direct: usize, indirect: usize, } impl OrbitCount { fn new() -> Self { Self { direct: 0, indirect: 0, } } } fn count_orbits(objects_orbiting: &OrbitInfo, in_orbit_around: &OrbitInfo) -> usize { let mut in_orbit_around = in_orbit_around.clone(); let mut orbit_count = HashMap::new(); orbit_count.insert(String::from("COM"), OrbitCount::new()); while !in_orbit_around.is_empty() { let not_orbiting_name = in_orbit_around .iter() .find(|(_, s)| s.is_empty()) .expect("dependency cycle detected") .0 .clone(); let not_orbiting_count = orbit_count .entry(not_orbiting_name.clone()) .or_insert(OrbitCount::new()) .clone(); for object_orbiting in objects_orbiting .get(&not_orbiting_name) .unwrap_or(&HashSet::new()) { let entry = orbit_count .entry(object_orbiting.to_string()) .or_insert(OrbitCount::new()); entry.direct += 1; entry.indirect = not_orbiting_count.direct + not_orbiting_count.indirect; in_orbit_around .get_mut(object_orbiting) .unwrap() .remove(&not_orbiting_name); } in_orbit_around.remove(&not_orbiting_name); } orbit_count .iter() .map(|(_, cnt)| cnt.direct + cnt.indirect) .sum() } fn shortest_path( objects_orbiting: &OrbitInfo, in_orbit_around: &OrbitInfo, start: &String, target: &String, ) -> Option<usize> { let mut visited = HashSet::new();
random
[ { "content": "type Position = (i32, i32);\n\n\n", "file_path": "day15/day15.rs", "rank": 0, "score": 53542.84334560843 }, { "content": "type State = Vec<Vec<Tile>>;\n\n\n", "file_path": "day24/day24.rs", "rank": 1, "score": 50393.35288035059 }, { "content": "enum PositionType {\n\n Wall,\n\n Empty,\n\n OxygenSystem,\n\n}\n\n\n", "file_path": "day15/day15.rs", "rank": 3, "score": 28144.298778405722 }, { "content": "fn get_area_map(program: &Vec<i64>) -> HashMap<Position, PositionType> {\n\n let mut area_map = HashMap::new();\n\n\n\n let mut q = VecDeque::new();\n\n let mut discovered = HashSet::new();\n\n\n\n q.push_back((CPU::new(&program), (0, 0)));\n\n discovered.insert((0, 0));\n\n\n\n while !q.is_empty() {\n\n let (cpu, pos) = q.pop_front().unwrap();\n\n for (new_pos, direction) in adjacent_positions(pos) {\n\n if discovered.contains(&new_pos) {\n\n continue;\n\n }\n\n discovered.insert(new_pos);\n\n\n\n let mut new_cpu = cpu.clone();\n\n let pt = new_cpu.run(direction as i64);\n\n area_map.insert(\n", "file_path": "day15/day15.rs", "rank": 4, "score": 18686.86561166415 }, { "content": "use std::cmp;\n\nuse std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::collections::VecDeque;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\n\n", "file_path": "day18/day18.rs", "rank": 5, "score": 3.874796588420603 }, { "content": "use std::cmp;\n\nuse std::collections::HashMap;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\n\n", "file_path": "day17/day17.rs", "rank": 6, "score": 3.8627537718057625 }, { "content": "use std::cmp::Ordering;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\nuse std::iter::Sum;\n\nuse std::num::ParseIntError;\n\nuse std::ops::Add;\n\nuse std::ops::AddAssign;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, Clone, Copy)]\n", "file_path": "day12/day12.rs", "rank": 7, "score": 3.845017621991756 }, { "content": "use std::cmp::Ordering;\n\nuse std::collections::HashMap;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\n\n", "file_path": "day13/day13.rs", "rank": 8, "score": 3.838991475311215 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::collections::VecDeque;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\n\n", "file_path": "day23/day23.rs", "rank": 9, "score": 3.835109461518093 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\nuse std::ops::RangeInclusive;\n\n\n", "file_path": "day11/day11.rs", "rank": 10, "score": 3.8155197458565415 }, { "content": "use mod_exp::mod_exp;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\nuse std::num::ParseIntError;\n\nuse std::str::FromStr;\n\n\n", "file_path": "day22/src/main.rs", "rank": 11, "score": 3.8155197458565415 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::collections::VecDeque;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\n\n\n#[derive(Clone)]\n", "file_path": "day15/day15.rs", "rank": 13, "score": 3.796227073199939 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\n\n", "file_path": "day09/day09.rs", "rank": 14, "score": 3.786507559689913 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\n\n", "file_path": "day25/day25.rs", "rank": 15, "score": 3.786507559689913 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\n\n", "file_path": "day19/day19.rs", "rank": 16, "score": 3.786507559689913 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\nuse std::ops::Index;\n\nuse std::ops::IndexMut;\n\n\n", "file_path": "day21/day21.rs", "rank": 17, "score": 3.786507559689913 }, { "content": "use std::collections::VecDeque;\n\nuse std::i32;\n\nuse std::io;\n\n\n", "file_path": "day07/day07.rs", "rank": 18, "score": 3.7768376889905353 }, { "content": "use std::io;\n\nuse std::io::prelude::*;\n\nuse std::iter::successors;\n\n\n", "file_path": "day01/day01.rs", "rank": 19, "score": 3.7768376889905353 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\nuse std::num::ParseIntError;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug)]\n", "file_path": "day14/day14.rs", "rank": 20, "score": 3.7694269271554113 }, { "content": "use std::collections::BTreeMap;\n\nuse std::collections::HashSet;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\n\n", "file_path": "day10/day10.rs", "rank": 21, "score": 3.758005223514145 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::collections::VecDeque;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\n\n\n#[derive(Hash, PartialEq, Eq, Debug, Copy, Clone)]\n", "file_path": "day20/day20.rs", "rank": 22, "score": 3.63759659357308 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n", "file_path": "day24/day24.rs", "rank": 23, "score": 3.59561278751029 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\n\n", "file_path": "day03/day03.rs", "rank": 24, "score": 3.5952833540221416 }, { "content": "use std::collections::HashSet;\n\nuse std::env;\n\n\n", "file_path": "day04/day04.rs", "rank": 25, "score": 3.5952833540221416 }, { "content": " new_pos,\n\n match pt {\n\n 0 => PositionType::Wall,\n\n 1 => {\n\n q.push_back((new_cpu, new_pos));\n\n PositionType::Empty\n\n }\n\n 2 => PositionType::OxygenSystem,\n\n _ => panic!(\"invalid position type: {}\", pt),\n\n },\n\n );\n\n }\n\n }\n\n\n\n area_map\n\n}\n\n\n", "file_path": "day15/day15.rs", "rank": 26, "score": 3.4181302532625457 }, { "content": " discovered.insert(new_pos.clone());\n\n\n\n if let PositionType::Empty | PositionType::OxygenSystem =\n\n map.get(&new_pos).unwrap_or(&PositionType::Wall)\n\n {\n\n q.push_back((new_pos, dist + 1));\n\n }\n\n }\n\n }\n\n\n\n dist_map\n\n}\n\n\n", "file_path": "day15/day15.rs", "rank": 27, "score": 3.3884435087424976 }, { "content": "use std::io;\n\n\n", "file_path": "day16/day16.rs", "rank": 28, "score": 3.308745560067663 }, { "content": "use std::io;\n\n\n", "file_path": "day05/day05.rs", "rank": 29, "score": 3.308745560067663 }, { "content": "use std::io;\n\n\n", "file_path": "day02/day02.rs", "rank": 30, "score": 3.308745560067663 }, { "content": "use std::io;\n\n\n\nconst HEIGHT: usize = 6;\n\nconst WIDTH: usize = 25;\n\nconst LAYER_SIZE: usize = HEIGHT * WIDTH;\n\n\n", "file_path": "day08/day08.rs", "rank": 31, "score": 2.6713833173903825 } ]
Rust
chips/nrf5x/src/rtc.rs
jkchien/tock
fa7543b5bdfbd239f5831c01998f0fe27ccbf95d
use core::cell::Cell; use kernel::common::cells::OptionalCell; use kernel::common::registers::{register_bitfields, ReadOnly, ReadWrite, WriteOnly}; use kernel::common::StaticRef; use kernel::hil::time::{self, Alarm, Ticks, Time}; use kernel::ErrorCode; const RTC1_BASE: StaticRef<RtcRegisters> = unsafe { StaticRef::new(0x40011000 as *const RtcRegisters) }; #[repr(C)] struct RtcRegisters { tasks_start: WriteOnly<u32, Task::Register>, tasks_stop: WriteOnly<u32, Task::Register>, tasks_clear: WriteOnly<u32, Task::Register>, tasks_trigovrflw: WriteOnly<u32, Task::Register>, _reserved0: [u8; 240], events_tick: ReadWrite<u32, Event::Register>, events_ovrflw: ReadWrite<u32, Event::Register>, _reserved1: [u8; 56], events_compare: [ReadWrite<u32, Event::Register>; 4], _reserved2: [u8; 436], intenset: ReadWrite<u32, Inte::Register>, intenclr: ReadWrite<u32, Inte::Register>, _reserved3: [u8; 52], evten: ReadWrite<u32, Inte::Register>, evtenset: ReadWrite<u32, Inte::Register>, evtenclr: ReadWrite<u32, Inte::Register>, _reserved4: [u8; 440], counter: ReadOnly<u32, Counter::Register>, prescaler: ReadWrite<u32, Prescaler::Register>, _reserved5: [u8; 52], cc: [ReadWrite<u32, Counter::Register>; 4], _reserved6: [u8; 2732], power: ReadWrite<u32>, } register_bitfields![u32, Inte [ TICK 0, OVRFLW 1, COMPARE0 16, COMPARE1 17, COMPARE2 18, COMPARE3 19 ], Prescaler [ PRESCALER OFFSET(0) NUMBITS(12) ], Task [ ENABLE 0 ], Event [ READY 0 ], Counter [ VALUE OFFSET(0) NUMBITS(24) ] ]; pub struct Rtc<'a> { registers: StaticRef<RtcRegisters>, overflow_client: OptionalCell<&'a dyn time::OverflowClient>, alarm_client: OptionalCell<&'a dyn time::AlarmClient>, enabled: Cell<bool>, } impl<'a> Rtc<'a> { pub const fn new() -> Self { Self { registers: RTC1_BASE, overflow_client: OptionalCell::empty(), alarm_client: OptionalCell::empty(), enabled: Cell::new(false), } } pub fn handle_interrupt(&self) { if self.registers.events_ovrflw.is_set(Event::READY) { self.registers.events_ovrflw.write(Event::READY::CLEAR); self.overflow_client.map(|client| client.overflow()); } if self.registers.events_compare[0].is_set(Event::READY) { self.registers.intenclr.write(Inte::COMPARE0::SET); self.registers.events_compare[0].write(Event::READY::CLEAR); self.alarm_client.map(|client| { client.alarm(); }); } } } impl Time for Rtc<'_> { type Frequency = time::Freq32KHz; type Ticks = time::Ticks24; fn now(&self) -> Self::Ticks { Self::Ticks::from(self.registers.counter.read(Counter::VALUE)) } } impl<'a> time::Counter<'a> for Rtc<'a> { fn set_overflow_client(&'a self, client: &'a dyn time::OverflowClient) { self.overflow_client.set(client); self.registers.intenset.write(Inte::OVRFLW::SET); } fn start(&self) -> Result<(), ErrorCode> { self.registers.prescaler.write(Prescaler::PRESCALER.val(0)); self.registers.tasks_start.write(Task::ENABLE::SET); self.enabled.set(true); Ok(()) } fn stop(&self) -> Result<(), ErrorCode> { self.registers.tasks_stop.write(Task::ENABLE::SET); self.enabled.set(false); Ok(()) } fn reset(&self) -> Result<(), ErrorCode> { self.registers.tasks_clear.write(Task::ENABLE::SET); Ok(()) } fn is_running(&self) -> bool { self.enabled.get() } } impl<'a> Alarm<'a> for Rtc<'a> { fn set_alarm_client(&self, client: &'a dyn time::AlarmClient) { self.alarm_client.set(client); } fn set_alarm(&self, reference: Self::Ticks, dt: Self::Ticks) { const SYNC_TICS: u32 = 2; let regs = &*self.registers; let mut expire = reference.wrapping_add(dt); let now = self.now(); let earliest_possible = now.wrapping_add(Self::Ticks::from(SYNC_TICS)); if !now.within_range(reference, expire) || expire.wrapping_sub(now).into_u32() <= SYNC_TICS { expire = earliest_possible; } regs.cc[0].write(Counter::VALUE.val(expire.into_u32())); regs.events_compare[0].write(Event::READY::CLEAR); regs.intenset.write(Inte::COMPARE0::SET); } fn get_alarm(&self) -> Self::Ticks { Self::Ticks::from(self.registers.cc[0].read(Counter::VALUE)) } fn disarm(&self) -> Result<(), ErrorCode> { let regs = &*self.registers; regs.intenclr.write(Inte::COMPARE0::SET); regs.events_compare[0].write(Event::READY::CLEAR); Ok(()) } fn is_armed(&self) -> bool { self.registers.evten.is_set(Inte::COMPARE0) } fn minimum_dt(&self) -> Self::Ticks { Self::Ticks::from(10) } }
use core::cell::Cell; use kernel::common::cells::OptionalCell; use kernel::common::registers::{register_bitfields, ReadOnly, ReadWrite, WriteOnly}; use kernel::common::StaticRef; use kernel::hil::time::{self, Alarm, Ticks, Time}; use kernel::ErrorCode; const RTC1_BASE: StaticRef<RtcRegisters> = unsafe { StaticRef::new(0x40011000 as *const RtcRegisters) }; #[repr(C)] struct RtcRegisters { tasks_start: WriteOnly<u32, Task::Register>, tasks_stop: WriteOnly<u32, Task::Register>, tasks_clear: WriteOnly<u32, Task::Register>, tasks_trigovrflw: WriteOnly<u32, Task::Register>, _reserved0: [u8; 240], events_tick: ReadWrite<u32, Event::Register>, events_ovrflw: ReadWrite<u32, Event::Register>, _reserved1: [u8; 56], events_compare: [ReadWrite<u32, Event::Register>; 4], _reserved2: [u8; 436], intenset: ReadWrite<u32, Inte:
self.now(); let earliest_possible = now.wrapping_add(Self::Ticks::from(SYNC_TICS)); if !now.within_range(reference, expire) || expire.wrapping_sub(now).into_u32() <= SYNC_TICS { expire = earliest_possible; } regs.cc[0].write(Counter::VALUE.val(expire.into_u32())); regs.events_compare[0].write(Event::READY::CLEAR); regs.intenset.write(Inte::COMPARE0::SET); } fn get_alarm(&self) -> Self::Ticks { Self::Ticks::from(self.registers.cc[0].read(Counter::VALUE)) } fn disarm(&self) -> Result<(), ErrorCode> { let regs = &*self.registers; regs.intenclr.write(Inte::COMPARE0::SET); regs.events_compare[0].write(Event::READY::CLEAR); Ok(()) } fn is_armed(&self) -> bool { self.registers.evten.is_set(Inte::COMPARE0) } fn minimum_dt(&self) -> Self::Ticks { Self::Ticks::from(10) } }
:Register>, intenclr: ReadWrite<u32, Inte::Register>, _reserved3: [u8; 52], evten: ReadWrite<u32, Inte::Register>, evtenset: ReadWrite<u32, Inte::Register>, evtenclr: ReadWrite<u32, Inte::Register>, _reserved4: [u8; 440], counter: ReadOnly<u32, Counter::Register>, prescaler: ReadWrite<u32, Prescaler::Register>, _reserved5: [u8; 52], cc: [ReadWrite<u32, Counter::Register>; 4], _reserved6: [u8; 2732], power: ReadWrite<u32>, } register_bitfields![u32, Inte [ TICK 0, OVRFLW 1, COMPARE0 16, COMPARE1 17, COMPARE2 18, COMPARE3 19 ], Prescaler [ PRESCALER OFFSET(0) NUMBITS(12) ], Task [ ENABLE 0 ], Event [ READY 0 ], Counter [ VALUE OFFSET(0) NUMBITS(24) ] ]; pub struct Rtc<'a> { registers: StaticRef<RtcRegisters>, overflow_client: OptionalCell<&'a dyn time::OverflowClient>, alarm_client: OptionalCell<&'a dyn time::AlarmClient>, enabled: Cell<bool>, } impl<'a> Rtc<'a> { pub const fn new() -> Self { Self { registers: RTC1_BASE, overflow_client: OptionalCell::empty(), alarm_client: OptionalCell::empty(), enabled: Cell::new(false), } } pub fn handle_interrupt(&self) { if self.registers.events_ovrflw.is_set(Event::READY) { self.registers.events_ovrflw.write(Event::READY::CLEAR); self.overflow_client.map(|client| client.overflow()); } if self.registers.events_compare[0].is_set(Event::READY) { self.registers.intenclr.write(Inte::COMPARE0::SET); self.registers.events_compare[0].write(Event::READY::CLEAR); self.alarm_client.map(|client| { client.alarm(); }); } } } impl Time for Rtc<'_> { type Frequency = time::Freq32KHz; type Ticks = time::Ticks24; fn now(&self) -> Self::Ticks { Self::Ticks::from(self.registers.counter.read(Counter::VALUE)) } } impl<'a> time::Counter<'a> for Rtc<'a> { fn set_overflow_client(&'a self, client: &'a dyn time::OverflowClient) { self.overflow_client.set(client); self.registers.intenset.write(Inte::OVRFLW::SET); } fn start(&self) -> Result<(), ErrorCode> { self.registers.prescaler.write(Prescaler::PRESCALER.val(0)); self.registers.tasks_start.write(Task::ENABLE::SET); self.enabled.set(true); Ok(()) } fn stop(&self) -> Result<(), ErrorCode> { self.registers.tasks_stop.write(Task::ENABLE::SET); self.enabled.set(false); Ok(()) } fn reset(&self) -> Result<(), ErrorCode> { self.registers.tasks_clear.write(Task::ENABLE::SET); Ok(()) } fn is_running(&self) -> bool { self.enabled.get() } } impl<'a> Alarm<'a> for Rtc<'a> { fn set_alarm_client(&self, client: &'a dyn time::AlarmClient) { self.alarm_client.set(client); } fn set_alarm(&self, reference: Self::Ticks, dt: Self::Ticks) { const SYNC_TICS: u32 = 2; let regs = &*self.registers; let mut expire = reference.wrapping_add(dt); let now =
random
[ { "content": "/// Interface for receiving notification when a particular time\n\n/// (`Counter` value) is reached. Clients use the\n\n/// [`AlarmClient`](trait.AlarmClient.html) trait to signal when the\n\n/// counter has reached a pre-specified value set in\n\n/// [`set_alarm`](#tymethod.set_alarm). Alarms are intended for\n\n/// low-level time needs that require precision (i.e., firing on a\n\n/// precise clock tick). Software that needs more functionality\n\n/// but can tolerate some jitter should use the `Timer` trait\n\n/// instead.\n\npub trait Alarm<'a>: Time {\n\n /// Specify the callback for when the counter reaches the alarm\n\n /// value. If there was a previously installed callback this call\n\n /// replaces it.\n\n fn set_alarm_client(&'a self, client: &'a dyn AlarmClient);\n\n\n\n /// Specify when the callback should be called and enable it. The\n\n /// callback will be enqueued when `Time::now() == reference + dt`. The\n\n /// callback itself may not run exactly at this time, due to delays.\n\n /// However, it it assured to execute *after* `reference + dt`: it can\n\n /// be delayed but will never fire early. The method takes `reference`\n\n /// and `dt` rather than a single value denoting the counter value so it\n\n /// can distinguish between alarms which have very recently already\n\n /// passed and those in the far far future (see #1651).\n\n fn set_alarm(&self, reference: Self::Ticks, dt: Self::Ticks);\n\n\n\n /// Return the current alarm value. This is undefined at boot and\n\n /// otherwise returns `now + dt` from the last call to `set_alarm`.\n\n fn get_alarm(&self) -> Self::Ticks;\n\n\n", "file_path": "kernel/src/hil/time.rs", "rank": 0, "score": 180698.15883605415 }, { "content": "/// State that is stored in each process's grant region to support IPC.\n\nstruct IPCData<const NUM_PROCS: usize> {\n\n /// An array of app slices that this application has shared with other\n\n /// applications.\n\n shared_memory: [ReadWriteAppSlice; NUM_PROCS],\n\n search_slice: ReadOnlyAppSlice,\n\n /// An array of upcalls this process has registered to receive upcalls\n\n /// from other services.\n\n client_upcalls: [Upcall; NUM_PROCS],\n\n /// The upcall setup by a service. Each process can only be one service.\n\n upcall: Upcall,\n\n}\n\n\n\nimpl<const NUM_PROCS: usize> Default for IPCData<NUM_PROCS> {\n\n fn default() -> IPCData<NUM_PROCS> {\n\n const DEFAULT_RW_APP_SLICE: ReadWriteAppSlice = ReadWriteAppSlice::const_default();\n\n IPCData {\n\n shared_memory: [DEFAULT_RW_APP_SLICE; NUM_PROCS],\n\n search_slice: ReadOnlyAppSlice::default(),\n\n client_upcalls: [Upcall::default(); NUM_PROCS],\n\n upcall: Upcall::default(),\n", "file_path": "kernel/src/ipc.rs", "rank": 1, "score": 160697.42989690398 }, { "content": "struct LogTest<A: Alarm<'static>> {\n\n log: &'static Log,\n\n buffer: TakeCell<'static, [u8]>,\n\n alarm: A,\n\n state: Cell<TestState>,\n\n ops: &'static [TestOp],\n\n op_index: Cell<usize>,\n\n op_start: Cell<bool>,\n\n read_val: Cell<u64>,\n\n write_val: Cell<u64>,\n\n}\n\n\n\nimpl<A: Alarm<'static>> LogTest<A> {\n\n fn new(\n\n log: &'static Log,\n\n buffer: &'static mut [u8],\n\n alarm: A,\n\n ops: &'static [TestOp],\n\n ) -> LogTest<A> {\n\n // Recover test state.\n", "file_path": "boards/imix/src/test/log_test.rs", "rank": 2, "score": 158170.26194170225 }, { "content": "struct LogTest<A: Alarm<'static>> {\n\n log: &'static Log,\n\n buffer: TakeCell<'static, [u8]>,\n\n alarm: A,\n\n state: Cell<TestState>,\n\n ops: &'static [TestOp],\n\n op_index: Cell<usize>,\n\n op_start: Cell<bool>,\n\n read_val: Cell<u64>,\n\n write_val: Cell<u64>,\n\n}\n\n\n\nimpl<A: Alarm<'static>> LogTest<A> {\n\n fn new(\n\n log: &'static Log,\n\n buffer: &'static mut [u8],\n\n alarm: A,\n\n ops: &'static [TestOp],\n\n ) -> LogTest<A> {\n\n // Recover test state.\n", "file_path": "boards/nano33ble/src/test/log_test.rs", "rank": 3, "score": 158170.26194170225 }, { "content": "struct LogTest<A: Alarm<'static>> {\n\n log: &'static Log,\n\n buffer: TakeCell<'static, [u8]>,\n\n alarm: A,\n\n ops: &'static [TestOp],\n\n op_index: Cell<usize>,\n\n}\n\n\n\nimpl<A: Alarm<'static>> LogTest<A> {\n\n fn new(\n\n log: &'static Log,\n\n buffer: &'static mut [u8],\n\n alarm: A,\n\n ops: &'static [TestOp],\n\n ) -> LogTest<A> {\n\n debug!(\n\n \"Log recovered from flash (Start and end entry IDs: {:?} to {:?})\",\n\n log.log_start(),\n\n log.log_end()\n\n );\n", "file_path": "boards/imix/src/test/linear_log_test.rs", "rank": 4, "score": 155816.72891515214 }, { "content": "struct LogTest<A: Alarm<'static>> {\n\n log: &'static Log,\n\n buffer: TakeCell<'static, [u8]>,\n\n alarm: A,\n\n ops: &'static [TestOp],\n\n op_index: Cell<usize>,\n\n}\n\n\n\nimpl<A: Alarm<'static>> LogTest<A> {\n\n fn new(\n\n log: &'static Log,\n\n buffer: &'static mut [u8],\n\n alarm: A,\n\n ops: &'static [TestOp],\n\n ) -> LogTest<A> {\n\n debug_verbose!(\n\n \"Log recovered from flash (Start and end entry IDs: {:?} to {:?})\",\n\n log.log_start(),\n\n log.log_end()\n\n );\n", "file_path": "boards/nano33ble/src/test/linear_log_test.rs", "rank": 5, "score": 155816.72891515214 }, { "content": "#[repr(C)]\n\nstruct IntConfigRegisters {\n\n _reserved0: [u8; 3],\n\n /// Machine Software Interrupt\n\n msip: ReadWrite<u8, intcon::Register>,\n\n _reserved1: [u8; 3],\n\n /// Machine Timer Interrupt\n\n mtip: ReadWrite<u8, intcon::Register>,\n\n _reserved2: [u8; 3],\n\n /// Machine External Interrupt\n\n meip: ReadWrite<u8, intcon::Register>,\n\n /// CLIC Software Interrupt\n\n csip: ReadWrite<u8, intcon::Register>,\n\n _reserved3: [u8; 3],\n\n /// Local Interrupt 0-127\n\n localint: [ReadWrite<u8, intcon::Register>; 128],\n\n _reserved4: [u8; 880],\n\n}\n\n\n\n/// Configuration Register\n", "file_path": "arch/rv32i/src/clic.rs", "rank": 6, "score": 148179.47207072197 }, { "content": "#[repr(C)]\n\nstruct IntPendRegisters {\n\n _reserved0: [u8; 3],\n\n /// Machine Software Interrupt\n\n msip: ReadWrite<u8, intpend::Register>,\n\n _reserved1: [u8; 3],\n\n /// Machine Timer Interrupt\n\n mtip: ReadWrite<u8, intpend::Register>,\n\n _reserved2: [u8; 3],\n\n /// Machine External Interrupt\n\n meip: ReadWrite<u8, intpend::Register>,\n\n /// CLIC Software Interrupt\n\n csip: ReadWrite<u8, intpend::Register>,\n\n _reserved3: [u8; 3],\n\n /// Local Interrupt 0-127\n\n localintpend: [ReadWrite<u8, intpend::Register>; 128],\n\n _reserved4: [u8; 880],\n\n}\n\n\n\n/// Interrupt Enable Registers\n", "file_path": "arch/rv32i/src/clic.rs", "rank": 7, "score": 148179.47207072197 }, { "content": "#[repr(C)]\n\nstruct IntEnableRegisters {\n\n _reserved0: [u8; 3],\n\n /// Machine Software Interrupt\n\n msip: ReadWrite<u8, inten::Register>,\n\n _reserved1: [u8; 3],\n\n /// Machine Timer Interrupt\n\n mtip: ReadWrite<u8, inten::Register>,\n\n _reserved2: [u8; 3],\n\n /// Machine External Interrupt\n\n meip: ReadWrite<u8, inten::Register>,\n\n /// CLIC Software Interrupt\n\n csip: ReadWrite<u8, inten::Register>,\n\n _reserved3: [u8; 3],\n\n /// Local Interrupt 0-127\n\n localint: [ReadWrite<u8, inten::Register>; 128],\n\n _reserved4: [u8; 880],\n\n}\n\n\n\n/// Interrupt Configuration Registers\n", "file_path": "arch/rv32i/src/clic.rs", "rank": 8, "score": 148179.47207072197 }, { "content": "#[derive(Copy, Clone)]\n\nstruct AlarmData {\n\n expiration: Expiration,\n\n}\n\n\n\nimpl AlarmData {\n\n fn new() -> AlarmData {\n\n AlarmData {\n\n expiration: Expiration::Disabled,\n\n }\n\n }\n\n}\n\n\n", "file_path": "capsules/src/ble_advertising_driver.rs", "rank": 9, "score": 148130.82697912137 }, { "content": "/// Parse a `u32` from four bytes as received on the bus\n\nfn get_u32(b0: u8, b1: u8, b2: u8, b3: u8) -> u32 {\n\n (b0 as u32) | ((b1 as u32) << 8) | ((b2 as u32) << 16) | ((b3 as u32) << 24)\n\n}\n\n\n", "file_path": "capsules/src/usb/descriptors.rs", "rank": 10, "score": 114341.88880419065 }, { "content": "fn ticks_from_val<T: Ticks>(val: u64) -> T {\n\n if val <= T::max_value().into_u32() as u64 {\n\n T::from(val as u32)\n\n } else {\n\n T::max_value()\n\n }\n\n}\n\n\n", "file_path": "kernel/src/hil/time.rs", "rank": 11, "score": 113693.67039544029 }, { "content": "fn crc8(data: &[u8]) -> u8 {\n\n let polynomial = 0x31;\n\n let mut crc = 0xff;\n\n\n\n for x in 0..data.len() {\n\n crc ^= data[x as usize] as u8;\n\n for _i in 0..8 {\n\n if (crc & 0x80) != 0 {\n\n crc = crc << 1 ^ polynomial;\n\n } else {\n\n crc = crc << 1;\n\n }\n\n }\n\n }\n\n crc\n\n}\n\n\n\npub struct SHT3x<'a, A: Alarm<'a>> {\n\n i2c: &'a dyn i2c::I2CDevice,\n\n humidity_client: OptionalCell<&'a dyn kernel::hil::sensors::HumidityClient>,\n", "file_path": "capsules/src/sht3x.rs", "rank": 12, "score": 109259.25206571241 }, { "content": "/// Callback handler for when an Alarm fires (a `Counter` reaches a specific\n\n/// value).\n\npub trait AlarmClient {\n\n /// Callback indicating the alarm time has been reached. The alarm\n\n /// MUST be disabled when this is called. If a new alarm is needed,\n\n /// the client can call `Alarm::set_alarm`.\n\n fn alarm(&self);\n\n}\n\n\n", "file_path": "kernel/src/hil/time.rs", "rank": 13, "score": 108883.93385401777 }, { "content": "pub fn decode_u8(buf: &[u8]) -> SResult<u8> {\n\n stream_len_cond!(buf, 1);\n\n stream_done!(1, buf[0]);\n\n}\n\n\n", "file_path": "capsules/src/net/stream.rs", "rank": 14, "score": 106701.43230070421 }, { "content": "/// Verifies that the prefixes of the two buffers match, where the length of the\n\n/// prefix is given in bits\n\npub fn matches_prefix(buf1: &[u8], buf2: &[u8], prefix_len: u8) -> bool {\n\n let full_bytes = (prefix_len / 8) as usize;\n\n let remaining_bits = prefix_len % 8;\n\n let bytes = full_bytes + if remaining_bits != 0 { 1 } else { 0 };\n\n\n\n if bytes > buf1.len() || bytes > buf2.len() {\n\n return false;\n\n }\n\n\n\n // Ensure that the prefix bits in the last byte match\n\n if remaining_bits != 0 {\n\n let last_byte_mask = 0xff << (8 - remaining_bits);\n\n if (buf1[full_bytes] ^ buf2[full_bytes]) & last_byte_mask != 0 {\n\n return false;\n\n }\n\n }\n\n\n\n // Ensure that the prefix bytes before that match\n\n buf1[..full_bytes].iter().eq(buf2[..full_bytes].iter())\n\n}\n\n\n", "file_path": "capsules/src/net/util.rs", "rank": 15, "score": 106124.79700713174 }, { "content": "/// Represents a moment in time, obtained by calling `now`.\n\npub trait Time {\n\n /// The number of ticks per second\n\n type Frequency: Frequency;\n\n /// The width of a time value\n\n type Ticks: Ticks;\n\n\n\n /// Returns a timestamp. Depending on the implementation of\n\n /// Time, this could represent either a static timestamp or\n\n /// a sample of a counter; if an implementation relies on\n\n /// it being constant or changing it should use `Timestamp`\n\n /// or `Counter`.\n\n fn now(&self) -> Self::Ticks;\n\n\n\n /// Returns the number of ticks in the provided number of seconds,\n\n /// rounding down any fractions. If the value overflows Ticks it\n\n /// returns `Ticks::max_value()`.\n\n fn ticks_from_seconds(s: u32) -> Self::Ticks {\n\n let val: u64 = Self::Frequency::frequency() as u64 * s as u64;\n\n ticks_from_val(val)\n\n }\n", "file_path": "kernel/src/hil/time.rs", "rank": 16, "score": 105155.77765079858 }, { "content": "fn decompress_nh(iphc_header: u8, buf: &[u8], consumed: &mut usize) -> (bool, u8) {\n\n let is_nhc = (iphc_header & iphc::NH) != 0;\n\n let mut next_header: u8 = 0;\n\n if !is_nhc {\n\n next_header = buf[*consumed];\n\n *consumed += 1;\n\n }\n\n (is_nhc, next_header)\n\n}\n\n\n", "file_path": "capsules/src/net/sixlowpan/sixlowpan_compression.rs", "rank": 17, "score": 102497.56277666675 }, { "content": "pub fn encode_u8(buf: &mut [u8], b: u8) -> SResult {\n\n stream_len_cond!(buf, 1);\n\n buf[0] = b;\n\n stream_done!(1);\n\n}\n\n\n", "file_path": "capsules/src/net/stream.rs", "rank": 18, "score": 102131.91727514108 }, { "content": "/// Maps a LoWPAN_NHC header the corresponding IPv6 next header type,\n\n/// or an error if the NHC header is invalid\n\nfn nhc_to_ip6_nh(nhc: u8) -> Result<u8, ()> {\n\n match nhc & nhc::DISPATCH_MASK {\n\n nhc::DISPATCH_NHC => match nhc & nhc::EID_MASK {\n\n nhc::HOP_OPTS => Ok(ip6_nh::HOP_OPTS),\n\n nhc::ROUTING => Ok(ip6_nh::ROUTING),\n\n nhc::FRAGMENT => Ok(ip6_nh::FRAGMENT),\n\n nhc::DST_OPTS => Ok(ip6_nh::DST_OPTS),\n\n nhc::MOBILITY => Ok(ip6_nh::MOBILITY),\n\n nhc::IP6 => Ok(ip6_nh::IP6),\n\n _ => Err(()),\n\n },\n\n nhc::DISPATCH_UDP => Ok(ip6_nh::UDP),\n\n _ => Err(()),\n\n }\n\n}\n\n\n", "file_path": "capsules/src/net/sixlowpan/sixlowpan_compression.rs", "rank": 19, "score": 100662.93576723857 }, { "content": "/// Represents a static moment in time, that does not change over\n\n/// repeated calls to `Time::now`.\n\npub trait Timestamp: Time {}\n\n\n", "file_path": "kernel/src/hil/time.rs", "rank": 20, "score": 100345.93607908548 }, { "content": "/// Parse a `u16` from two bytes as received on the bus\n\nfn get_u16(b0: u8, b1: u8) -> u16 {\n\n (b0 as u16) | ((b1 as u16) << 8)\n\n}\n\n\n", "file_path": "capsules/src/usb/descriptors.rs", "rank": 21, "score": 100303.69492161946 }, { "content": "/// Interface for controlling callbacks when an interval has passed.\n\n/// This interface is intended for software that requires repeated\n\n/// and/or one-shot timers and is willing to experience some jitter or\n\n/// imprecision in return for a simpler API that doesn't require\n\n/// actual calculation of counter values. Software that requires more\n\n/// precisely timed callbacks should use the `Alarm` trait instead.\n\npub trait Timer<'a>: Time {\n\n /// Specify the callback to invoke when the timer interval expires.\n\n /// If there was a previously installed callback this call replaces it. \n\n fn set_timer_client(&'a self, client: &'a dyn TimerClient);\n\n\n\n /// Start a one-shot timer that will invoke the callback at least\n\n /// `interval` ticks in the future. If there is a timer currently pending,\n\n /// calling this cancels that previous timer. After a callback is invoked\n\n /// for a one shot timer, the timer MUST NOT invoke the callback again\n\n /// unless a new timer is started (either with repeating or one shot).\n\n /// Returns the actual interval for the timer that was registered.\n\n /// This MUST NOT be smaller than `interval` but MAY be larger.\n\n fn oneshot(&'a self, interval: Self::Ticks) -> Self::Ticks;\n\n\n\n /// Start a repeating timer that will invoke the callback every\n\n /// `interval` ticks in the future. If there is a timer currently\n\n /// pending, calling this cancels that previous timer.\n\n /// Returns the actual interval for the timer that was registered.\n\n /// This MUST NOT be smaller than `interval` but MAY be larger.\n\n fn repeating(&'a self, interval: Self::Ticks) -> Self::Ticks;\n", "file_path": "kernel/src/hil/time.rs", "rank": 22, "score": 97359.80098824605 }, { "content": "/// Represents a free-running hardware counter that can be started and stopped.\n\npub trait Counter<'a>: Time {\n\n /// Specify the callback for when the counter overflows its maximum\n\n /// value (defined by `Ticks`). If there was a previously registered\n\n /// callback this call replaces it.\n\n fn set_overflow_client(&'a self, client: &'a dyn OverflowClient);\n\n\n\n /// Starts the free-running hardware counter. Valid `Result<(), ErrorCode>` values are:\n\n /// - `Ok(())`: the counter is now running\n\n /// - `Err(ErrorCode::OFF)`: underlying clocks or other hardware resources\n\n /// are not on, such that the counter cannot start.\n\n /// - `Err(ErrorCode::FAIL)`: unidentified failure, counter is not running.\n\n /// After a successful call to `start`, `is_running` MUST return true. \n\n fn start(&self) -> Result<(), ErrorCode>;\n\n\n\n /// Stops the free-running hardware counter. Valid `Result<(), ErrorCode>` values are:\n\n /// - `Ok(())`: the counter is now stopped. No further\n\n /// overflow callbacks will be invoked.\n\n /// - `Err(ErrorCode::BUSY)`: the counter is in use in a way that means it\n\n /// cannot be stopped and is busy.\n\n /// - `Err(ErrorCode::FAIL)`: unidentified failure, counter is running.\n", "file_path": "kernel/src/hil/time.rs", "rank": 23, "score": 97343.18766868592 }, { "content": "struct Imix {\n\n pconsole: &'static capsules::process_console::ProcessConsole<\n\n 'static,\n\n components::process_console::Capability,\n\n >,\n\n console: &'static capsules::console::Console<'static>,\n\n gpio: &'static capsules::gpio::GPIO<'static, sam4l::gpio::GPIOPin<'static>>,\n\n alarm: &'static AlarmDriver<'static, VirtualMuxAlarm<'static, sam4l::ast::Ast<'static>>>,\n\n temp: &'static capsules::temperature::TemperatureSensor<'static>,\n\n humidity: &'static capsules::humidity::HumiditySensor<'static>,\n\n ambient_light: &'static capsules::ambient_light::AmbientLight<'static>,\n\n adc: &'static capsules::adc::AdcDedicated<'static, sam4l::adc::Adc>,\n\n led:\n\n &'static capsules::led::LedDriver<'static, LedHigh<'static, sam4l::gpio::GPIOPin<'static>>>,\n\n button: &'static capsules::button::Button<'static, sam4l::gpio::GPIOPin<'static>>,\n\n rng: &'static capsules::rng::RngDriver<'static>,\n\n analog_comparator: &'static capsules::analog_comparator::AnalogComparator<\n\n 'static,\n\n sam4l::acifc::Acifc<'static>,\n\n >,\n", "file_path": "boards/imix/src/main.rs", "rank": 24, "score": 96652.83074658358 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct CalibrationData {\n\n temp_slope: f32,\n\n temp_intercept: f32,\n\n humidity_slope: f32,\n\n humidity_intercept: f32,\n\n}\n\n\n\npub struct Hts221<'a> {\n\n buffer: TakeCell<'static, [u8]>,\n\n i2c: &'a dyn I2CDevice,\n\n temperature_client: OptionalCell<&'a dyn TemperatureClient>,\n\n humidity_client: OptionalCell<&'a dyn HumidityClient>,\n\n state: Cell<State>,\n\n pending_temperature: Cell<bool>,\n\n pending_humidity: Cell<bool>,\n\n}\n\n\n\nimpl<'a> Hts221<'a> {\n\n pub fn new(i2c: &'a dyn I2CDevice, buffer: &'static mut [u8]) -> Self {\n\n Hts221 {\n", "file_path": "capsules/src/hts221.rs", "rank": 25, "score": 96652.83074658358 }, { "content": "struct Writer {}\n\n\n\nstatic mut WRITER: Writer = Writer {};\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IoWrite for Writer {\n\n fn write(&mut self, buf: &[u8]) {\n\n for b in buf {\n\n // Print to a special address for simulation output\n\n unsafe {\n\n write_volatile(0x8000_1008 as *mut u8, *b as u8);\n\n }\n\n }\n\n }\n", "file_path": "boards/swervolf/src/io.rs", "rank": 26, "score": 96652.83074658358 }, { "content": "struct Transaction {\n\n /// The buffer containing the bytes to transmit as it should be returned to\n\n /// the client\n\n app_id: ProcessId,\n\n /// The total amount to transmit\n\n read_len: OptionalCell<usize>,\n\n}\n\n\n\npub struct I2CMasterDriver<'a, I: 'a + i2c::I2CMaster> {\n\n i2c: &'a I,\n\n buf: TakeCell<'static, [u8]>,\n\n tx: MapCell<Transaction>,\n\n apps: Grant<App>,\n\n}\n\n\n\nimpl<'a, I: 'a + i2c::I2CMaster> I2CMasterDriver<'a, I> {\n\n pub fn new(i2c: &'a I, buf: &'static mut [u8], apps: Grant<App>) -> I2CMasterDriver<'a, I> {\n\n I2CMasterDriver {\n\n i2c,\n\n buf: TakeCell::new(buf),\n", "file_path": "capsules/src/i2c_master.rs", "rank": 27, "score": 96652.83074658358 }, { "content": "struct Writer {}\n\n\n\nstatic mut WRITER: Writer = Writer {};\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IoWrite for Writer {\n\n fn write(&mut self, buf: &[u8]) {\n\n let uart = sifive::uart::Uart::new(e310x::uart::UART0_BASE, 16_000_000);\n\n uart.transmit_sync(buf);\n\n }\n\n}\n\n\n\n/// Panic handler.\n\n#[cfg(not(test))]\n", "file_path": "boards/hifive1/src/io.rs", "rank": 28, "score": 96652.83074658358 }, { "content": "struct Writer {\n\n initialized: bool,\n\n}\n\n\n\nstatic mut WRITER: Writer = Writer { initialized: false };\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nconst BUF_LEN: usize = 512;\n\nstatic mut STATIC_PANIC_BUF: [u8; BUF_LEN] = [0; BUF_LEN];\n\n\n\nstatic mut DUMMY: DummyUsbClient = DummyUsbClient {\n\n fired: VolatileCell::new(false),\n\n};\n\n\n", "file_path": "boards/nano33ble/src/io.rs", "rank": 29, "score": 96652.83074658358 }, { "content": "#[repr(C)]\n\nstruct Descriptor {\n\n addr: u32, // Transfer Address Register (RW): Address of memory block to compute\n\n ctrl: TCR, // Transfer Control Register (RW): IEN, TRWIDTH, BTSIZE\n\n _res: [u32; 2],\n\n crc: u32, // Transfer Reference Register (RW): Reference CRC (for compare mode)\n\n}\n\n\n\n// Transfer Control Register (see Section 41.6.18)\n", "file_path": "chips/sam4l/src/crccu.rs", "rank": 30, "score": 96652.83074658358 }, { "content": "/// A structure representing this platform that holds references to all\n\n/// capsules for this platform.\n\nstruct Hail {\n\n console: &'static capsules::console::Console<'static>,\n\n gpio: &'static capsules::gpio::GPIO<'static, sam4l::gpio::GPIOPin<'static>>,\n\n alarm: &'static capsules::alarm::AlarmDriver<\n\n 'static,\n\n VirtualMuxAlarm<'static, sam4l::ast::Ast<'static>>,\n\n >,\n\n ambient_light: &'static capsules::ambient_light::AmbientLight<'static>,\n\n temp: &'static capsules::temperature::TemperatureSensor<'static>,\n\n ninedof: &'static capsules::ninedof::NineDof<'static>,\n\n humidity: &'static capsules::humidity::HumiditySensor<'static>,\n\n spi: &'static capsules::spi_controller::Spi<\n\n 'static,\n\n VirtualSpiMasterDevice<'static, sam4l::spi::SpiHw>,\n\n >,\n\n nrf51822: &'static capsules::nrf51822_serialization::Nrf51822Serialization<'static>,\n\n adc: &'static capsules::adc::AdcDedicated<'static, sam4l::adc::Adc>,\n\n led: &'static capsules::led::LedDriver<'static, LedLow<'static, sam4l::gpio::GPIOPin<'static>>>,\n\n button: &'static capsules::button::Button<'static, sam4l::gpio::GPIOPin<'static>>,\n\n rng: &'static capsules::rng::RngDriver<'static>,\n", "file_path": "boards/hail/src/main.rs", "rank": 31, "score": 96652.83074658358 }, { "content": "struct Writer {\n\n initialized: bool,\n\n}\n\n\n\nstatic mut WRITER: Writer = Writer { initialized: false };\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IoWrite for Writer {\n\n fn write(&mut self, buf: &[u8]) {\n\n // Here, we create a second instance of the USART3 struct.\n\n // This is okay because we only call this during a panic, and\n\n // we will never actually process the interrupts\n\n let uart = unsafe { sam4l::usart::USART::new_usart3(CHIP.unwrap().pm) };\n\n let regs_manager = &sam4l::usart::USARTRegManager::panic_new(&uart);\n", "file_path": "boards/imix/src/io.rs", "rank": 32, "score": 96652.83074658358 }, { "content": "struct Writer {\n\n initialized: bool,\n\n}\n\n\n\nstatic mut WRITER: Writer = Writer { initialized: false };\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IoWrite for Writer {\n\n fn write(&mut self, buf: &[u8]) {\n\n // Here, we create a second instance of the USART0 struct.\n\n // This is okay because we only call this during a panic, and\n\n // we will never actually process the interrupts\n\n let uart = unsafe { sam4l::usart::USART::new_usart0(CHIP.unwrap().pm) };\n\n let regs_manager = &sam4l::usart::USARTRegManager::panic_new(&uart);\n", "file_path": "boards/hail/src/io.rs", "rank": 33, "score": 96652.83074658358 }, { "content": "#[repr(C)]\n\nstruct Register {\n\n val: ReadWrite<u32>,\n\n set: WriteOnly<u32>,\n\n clear: WriteOnly<u32>,\n\n toggle: WriteOnly<u32>,\n\n}\n\n\n", "file_path": "chips/sam4l/src/gpio.rs", "rank": 34, "score": 96652.83074658358 }, { "content": "/// Teensy 4 platform\n\nstruct Teensy40 {\n\n led:\n\n &'static capsules::led::LedDriver<'static, LedHigh<'static, imxrt1060::gpio::Pin<'static>>>,\n\n console: &'static capsules::console::Console<'static>,\n\n ipc: kernel::ipc::IPC<NUM_PROCS>,\n\n alarm: &'static capsules::alarm::AlarmDriver<\n\n 'static,\n\n capsules::virtual_alarm::VirtualMuxAlarm<'static, imxrt1060::gpt::Gpt1<'static>>,\n\n >,\n\n}\n\n\n\nimpl kernel::Platform for Teensy40 {\n\n fn with_driver<F, R>(&self, driver_num: usize, f: F) -> R\n\n where\n\n F: FnOnce(Option<&dyn kernel::Driver>) -> R,\n\n {\n\n match driver_num {\n\n capsules::led::DRIVER_NUM => f(Some(self.led)),\n\n capsules::console::DRIVER_NUM => f(Some(self.console)),\n\n kernel::ipc::DRIVER_NUM => f(Some(&self.ipc)),\n", "file_path": "boards/teensy40/src/main.rs", "rank": 35, "score": 96652.83074658358 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Buffer {\n\n id: usize,\n\n free: bool,\n\n}\n\n\n\nimpl Buffer {\n\n pub const fn new(id: usize) -> Self {\n\n Buffer { id, free: true }\n\n }\n\n}\n\n\n\npub struct Usb<'a> {\n\n registers: StaticRef<UsbRegisters>,\n\n descriptors: [Endpoint<'a>; N_ENDPOINTS],\n\n client: OptionalCell<&'a dyn hil::usb::Client<'a>>,\n\n state: OptionalCell<State>,\n\n bufs: Cell<[Buffer; N_BUFFERS]>,\n\n addr: Cell<u16>,\n\n}\n\n\n", "file_path": "chips/lowrisc/src/usbdev.rs", "rank": 36, "score": 96652.83074658358 }, { "content": "#[derive(Clone, Copy)]\n\nstruct ProcessUpcall {\n\n app_id: ProcessId,\n\n upcall_id: UpcallId,\n\n appdata: usize,\n\n fn_ptr: NonNull<*mut ()>,\n\n}\n\n\n\n#[derive(Clone, Copy, Default)]\n\npub struct Upcall {\n\n cb: Option<ProcessUpcall>,\n\n}\n\n\n\nimpl Upcall {\n\n pub(crate) fn new(\n\n app_id: ProcessId,\n\n upcall_id: UpcallId,\n\n appdata: usize,\n\n fn_ptr: NonNull<*mut ()>,\n\n ) -> Upcall {\n\n Upcall {\n", "file_path": "kernel/src/upcall.rs", "rank": 37, "score": 96652.83074658358 }, { "content": "/// Verifies that a prefix given in the form of a byte array slice is valid with\n\n/// respect to its length in bits (prefix_len):\n\n///\n\n/// - The byte array slice must contain enough bytes to cover the prefix length\n\n/// (no implicit zero-padding)\n\n/// - The rest of the prefix array slice is zero-padded\n\npub fn verify_prefix_len(prefix: &[u8], prefix_len: u8) -> bool {\n\n let full_bytes = (prefix_len / 8) as usize;\n\n let remaining_bits = prefix_len % 8;\n\n let bytes = full_bytes + if remaining_bits != 0 { 1 } else { 0 };\n\n\n\n if bytes > prefix.len() {\n\n return false;\n\n }\n\n\n\n // The bits between the prefix's end and the next byte boundary must be 0\n\n if remaining_bits != 0 {\n\n let last_byte_mask = 0xff >> remaining_bits;\n\n if prefix[full_bytes] & last_byte_mask != 0 {\n\n return false;\n\n }\n\n }\n\n\n\n // Ensure that the remaining bytes are also 0\n\n prefix[bytes..].iter().all(|&b| b == 0)\n\n}\n\n\n", "file_path": "capsules/src/net/util.rs", "rank": 38, "score": 95504.55761083134 }, { "content": "// Debugging functions.\n\nfn packet_to_hex(packet: &[VolatileCell<u8>], packet_hex: &mut [u8]) {\n\n let hex_char = |x: u8| {\n\n if x < 10 {\n\n b'0' + x\n\n } else {\n\n b'a' + x - 10\n\n }\n\n };\n\n\n\n for (i, x) in packet.iter().enumerate() {\n\n let x = x.get();\n\n packet_hex[2 * i] = hex_char(x >> 4);\n\n packet_hex[2 * i + 1] = hex_char(x & 0x0f);\n\n }\n\n}\n\n\n", "file_path": "chips/nrf52/src/usbd.rs", "rank": 39, "score": 95504.55761083134 }, { "content": "/// This is the current object header used for TicKV objects\n\nstruct ObjectHeader {\n\n version: u8,\n\n // In reality this is a u4.\n\n flags: u8,\n\n // In reality this is a u12.\n\n len: u16,\n\n hashed_key: u64,\n\n}\n\n\n\npub(crate) const FLAGS_VALID: u8 = 8;\n\n\n\nimpl ObjectHeader {\n\n fn new(hashed_key: u64, len: u16) -> Self {\n\n assert!(len < 0xFFF);\n\n Self {\n\n version: VERSION,\n\n flags: FLAGS_VALID,\n\n len,\n\n hashed_key,\n\n }\n", "file_path": "libraries/tickv/src/tickv.rs", "rank": 40, "score": 95147.74759662364 }, { "content": "/// A structure representing this platform that holds references to all\n\n/// capsules for this platform. We've included an alarm and console.\n\nstruct HiFive1 {\n\n led:\n\n &'static capsules::led::LedDriver<'static, LedLow<'static, sifive::gpio::GpioPin<'static>>>,\n\n console: &'static capsules::console::Console<'static>,\n\n lldb: &'static capsules::low_level_debug::LowLevelDebug<\n\n 'static,\n\n capsules::virtual_uart::UartDevice<'static>,\n\n >,\n\n alarm: &'static capsules::alarm::AlarmDriver<\n\n 'static,\n\n VirtualMuxAlarm<'static, sifive::clint::Clint<'static>>,\n\n >,\n\n}\n\n\n\n/// Mapping of integer syscalls to objects that implement syscalls.\n\nimpl Platform for HiFive1 {\n\n fn with_driver<F, R>(&self, driver_num: usize, f: F) -> R\n\n where\n\n F: FnOnce(Option<&dyn kernel::Driver>) -> R,\n\n {\n", "file_path": "boards/hifive1/src/main.rs", "rank": 41, "score": 95147.6981806788 }, { "content": "#[repr(C)]\n\nstruct PwmRegisters {\n\n _reserved0: [u8; 4],\n\n /// Stops PWM pulse generation on all channels at the end of current PWM period\n\n tasks_stop: WriteOnly<u32, TASK::Register>,\n\n /// Loads the first PWM value on all enabled channels\n\n tasks_seqstart: [WriteOnly<u32, TASK::Register>; 2],\n\n /// Steps by one value in the current sequence on all enabled channels if DECODER.MO\n\n tasks_nextstep: WriteOnly<u32, TASK::Register>,\n\n _reserved1: [u8; 240],\n\n /// Response to STOP task, emitted when PWM pulses are no longer generated\n\n events_stopped: ReadWrite<u32, EVENT::Register>,\n\n /// First PWM period started on sequence 0\n\n events_seqstarted: [ReadWrite<u32, EVENT::Register>; 2],\n\n /// Emitted at end of every sequence 0, when last value from RAM has been\n\n /// applied to the wave counter\n\n events_seqend: [ReadWrite<u32, EVENT::Register>; 2],\n\n /// Emitted at the end of each PWM period\n\n events_pwmperiodend: ReadWrite<u32, EVENT::Register>,\n\n /// Concatenated sequences have been played the amount of times defined in LOOP.CNT\n\n events_loopsdone: ReadWrite<u32, EVENT::Register>,\n", "file_path": "chips/nrf52/src/pwm.rs", "rank": 42, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct UsartRegisters {\n\n /// Control register 1\n\n cr1: ReadWrite<u32, CR1::Register>,\n\n /// Control register 2\n\n cr2: ReadWrite<u32, CR2::Register>,\n\n /// Control register 3\n\n cr3: ReadWrite<u32, CR3::Register>,\n\n /// Baud rate register\n\n brr: ReadWrite<u32, BRR::Register>,\n\n /// Guard time and prescaler register\n\n gtpr: ReadWrite<u32, GTPR::Register>,\n\n /// Receiver timeout register\n\n rtor: ReadWrite<u32, RTOR::Register>,\n\n /// Request register\n\n rqr: ReadWrite<u32, RTOR::Register>,\n\n /// Interrupt and status register\n\n isr: ReadWrite<u32, ISR::Register>,\n\n /// Interrupt flag clear register\n\n icr: ReadWrite<u32, ICR::Register>,\n\n /// Receive data register\n", "file_path": "chips/stm32f303xc/src/usart.rs", "rank": 43, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct UarteRegisters {\n\n task_startrx: WriteOnly<u32, Task::Register>,\n\n task_stoprx: WriteOnly<u32, Task::Register>,\n\n task_starttx: WriteOnly<u32, Task::Register>,\n\n task_stoptx: WriteOnly<u32, Task::Register>,\n\n _reserved1: [u32; 7],\n\n task_flush_rx: WriteOnly<u32, Task::Register>,\n\n _reserved2: [u32; 52],\n\n event_cts: ReadWrite<u32, Event::Register>,\n\n event_ncts: ReadWrite<u32, Event::Register>,\n\n _reserved3: [u32; 2],\n\n event_endrx: ReadWrite<u32, Event::Register>,\n\n _reserved4: [u32; 3],\n\n event_endtx: ReadWrite<u32, Event::Register>,\n\n event_error: ReadWrite<u32, Event::Register>,\n\n _reserved6: [u32; 7],\n\n event_rxto: ReadWrite<u32, Event::Register>,\n\n _reserved7: [u32; 1],\n\n event_rxstarted: ReadWrite<u32, Event::Register>,\n\n event_txstarted: ReadWrite<u32, Event::Register>,\n", "file_path": "chips/nrf52/src/uart.rs", "rank": 44, "score": 95141.2284676903 }, { "content": "/// Wrapper for TWIS clock that ensures TWIM clock is off\n\nstruct TWISClock {\n\n master: pm::Clock,\n\n slave: Option<pm::Clock>,\n\n}\n\nimpl ClockInterface for TWISClock {\n\n fn is_enabled(&self) -> bool {\n\n let slave_clock = self.slave.expect(\"I2C: Use of slave with no clock\");\n\n slave_clock.is_enabled()\n\n }\n\n\n\n fn enable(&self) {\n\n let slave_clock = self.slave.expect(\"I2C: Use of slave with no clock\");\n\n if self.master.is_enabled() {\n\n panic!(\"I2C: Request for slave clock, but master active\");\n\n }\n\n slave_clock.enable();\n\n }\n\n\n\n fn disable(&self) {\n\n let slave_clock = self.slave.expect(\"I2C: Use of slave with no clock\");\n", "file_path": "chips/sam4l/src/i2c.rs", "rank": 45, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct SyscfgRegisters {\n\n /// memory remap register\n\n memrm: ReadWrite<u32, MEMRM::Register>,\n\n /// peripheral mode configuration register\n\n pmc: ReadWrite<u32, PMC::Register>,\n\n /// external interrupt configuration register 1\n\n exticr1: ReadWrite<u32, EXTICR1::Register>,\n\n /// external interrupt configuration register 2\n\n exticr2: ReadWrite<u32, EXTICR2::Register>,\n\n /// external interrupt configuration register 3\n\n exticr3: ReadWrite<u32, EXTICR3::Register>,\n\n /// external interrupt configuration register 4\n\n exticr4: ReadWrite<u32, EXTICR4::Register>,\n\n _reserved0: [u8; 8],\n\n /// Compensation cell control register\n\n cmpcr: ReadOnly<u32, CMPCR::Register>,\n\n}\n\n\n\nregister_bitfields![u32,\n\n MEMRM [\n", "file_path": "chips/stm32f303xc/src/syscfg.rs", "rank": 46, "score": 95141.2284676903 }, { "content": "struct Writer {}\n\n\n\nstatic mut WRITER: Writer = Writer {};\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IoWrite for Writer {\n\n fn write(&mut self, buf: &[u8]) {\n\n sifive::uart::Uart::new(arty_e21_chip::uart::UART0_BASE, 32_000_000).transmit_sync(buf);\n\n }\n\n}\n\n\n\n/// Panic handler.\n\n#[cfg(not(test))]\n\n#[no_mangle]\n", "file_path": "boards/arty_e21/src/io.rs", "rank": 47, "score": 95141.2284676903 }, { "content": "struct Writer {}\n\n\n\nstatic mut WRITER: Writer = Writer {};\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IoWrite for Writer {\n\n fn write(&mut self, buf: &[u8]) {\n\n unsafe {\n\n PANIC_REFERENCES.uart.unwrap().transmit_sync(buf);\n\n }\n\n }\n\n}\n\n\n\n/// Panic handler.\n", "file_path": "boards/litex/arty/src/io.rs", "rank": 48, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct GpioRegisters {\n\n /// Reserved\n\n _reserved1: [u32; 321],\n\n /// Write GPIO port\n\n /// - Address: 0x504 - 0x508\n\n out: ReadWrite<u32, Out::Register>,\n\n /// Set individual bits in GPIO port\n\n /// - Address: 0x508 - 0x50C\n\n outset: ReadWrite<u32, OutSet::Register>,\n\n /// Clear individual bits in GPIO port\n\n /// - Address: 0x50C - 0x510\n\n outclr: ReadWrite<u32, OutClr::Register>,\n\n /// Read GPIO Port\n\n /// - Address: 0x510 - 0x514\n\n in_: ReadWrite<u32, In::Register>,\n\n /// Direction of GPIO pins\n\n /// - Address: 0x514 - 0x518\n\n dir: ReadWrite<u32, Dir::Register>,\n\n /// DIR set register\n\n /// - Address: 0x518 - 0x51C\n", "file_path": "chips/nrf5x/src/gpio.rs", "rank": 49, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct FicrRegisters {\n\n /// Reserved\n\n _reserved0: [u32; 4],\n\n /// Code memory page size\n\n /// - Address: 0x010 - 0x014\n\n codepagesize: ReadOnly<u32, CodePageSize::Register>,\n\n /// Code memory size\n\n /// - Address: 0x014 - 0x018\n\n codesize: ReadOnly<u32, CodeSize::Register>,\n\n /// Reserved\n\n _reserved1: [u32; 18],\n\n /// Device identifier\n\n /// - Address: 0x060 - 0x064\n\n deviceid0: ReadOnly<u32, DeviceId0::Register>,\n\n /// Device identifier\n\n /// - Address: 0x064 - 0x068\n\n deviceid1: ReadOnly<u32, DeviceId1::Register>,\n\n /// Reserved\n\n _reserved2: [u32; 6],\n\n /// Encryption Root\n", "file_path": "chips/nrf52/src/ficr.rs", "rank": 50, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct RccRegisters {\n\n /// clock control register\n\n cr: ReadWrite<u32, CR::Register>,\n\n /// PLL configuration register\n\n pllcfgr: ReadWrite<u32, PLLCFGR::Register>,\n\n /// clock configuration register\n\n cfgr: ReadWrite<u32, CFGR::Register>,\n\n /// clock interrupt register\n\n cir: ReadWrite<u32, CIR::Register>,\n\n /// AHB1 peripheral reset register\n\n ahb1rstr: ReadWrite<u32, AHB1RSTR::Register>,\n\n /// AHB2 peripheral reset register\n\n ahb2rstr: ReadWrite<u32, AHB2RSTR::Register>,\n\n /// AHB3 peripheral reset register\n\n ahb3rstr: ReadWrite<u32, AHB3RSTR::Register>,\n\n _reserved0: [u8; 4],\n\n /// APB1 peripheral reset register\n\n apb1rstr: ReadWrite<u32, APB1RSTR::Register>,\n\n /// APB2 peripheral reset register\n\n apb2rstr: ReadWrite<u32, APB2RSTR::Register>,\n", "file_path": "chips/stm32f4xx/src/rcc.rs", "rank": 51, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct ExtiRegisters {\n\n /// Interrupt mask register (EXTI_IMR)\n\n imr: ReadWrite<u32, IMR::Register>,\n\n /// Event mask register (EXTI_EMR)\n\n emr: ReadWrite<u32, EMR::Register>,\n\n /// Rising Trigger selection register (EXTI_RTSR)\n\n rtsr: ReadWrite<u32, RTSR::Register>,\n\n /// Falling Trigger selection register (EXTI_FTSR)\n\n ftsr: ReadWrite<u32, FTSR::Register>,\n\n /// Software interrupt event register (EXTI_SWIER)\n\n swier: ReadWrite<u32, SWIER::Register>,\n\n /// Pending register (EXTI_PR)\n\n pr: ReadWrite<u32, PR::Register>,\n\n}\n\n\n\nregister_bitfields![u32,\n\n IMR [\n\n /// Interrupt Mask on line 0\n\n MR0 OFFSET(0) NUMBITS(1) [],\n\n /// Interrupt Mask on line 1\n", "file_path": "chips/stm32f4xx/src/exti.rs", "rank": 52, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct TrngRegisters {\n\n cr: WriteOnly<u32, Control::Register>,\n\n _reserved0: [u32; 3],\n\n ier: WriteOnly<u32, Interrupt::Register>,\n\n idr: WriteOnly<u32, Interrupt::Register>,\n\n imr: ReadOnly<u32, Interrupt::Register>,\n\n isr: ReadOnly<u32, Interrupt::Register>,\n\n _reserved1: [u32; 12],\n\n odata: ReadOnly<u32, OutputData::Register>,\n\n}\n\n\n\nregister_bitfields![u32,\n\n Control [\n\n /// Security Key\n\n KEY OFFSET(8) NUMBITS(24) [],\n\n /// Enables the TRNG to provide random values\n\n ENABLE OFFSET(0) NUMBITS(1) [\n\n Disable = 0,\n\n Enable = 1\n\n ]\n", "file_path": "chips/sam4l/src/trng.rs", "rank": 53, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct ClicRegisters {\n\n /// CLIC Interrupt Pending Registers\n\n clicintip: IntPendRegisters,\n\n /// CLIC Interrupt Enable Registers\n\n clicintie: IntEnableRegisters,\n\n /// CLIC Interrupt Configuration Registers\n\n clicintcfg: IntConfigRegisters,\n\n /// CLIC Configuration Registers\n\n cliccfg: ConfigRegisters,\n\n}\n\n\n\n/// Interrupt Pending Registers\n", "file_path": "arch/rv32i/src/clic.rs", "rank": 54, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct GpioRegisters {\n\n /// GPIO port mode register\n\n moder: ReadWrite<u32, MODER::Register>,\n\n /// GPIO port output type register\n\n otyper: ReadWrite<u32, OTYPER::Register>,\n\n /// GPIO port output speed register\n\n ospeedr: ReadWrite<u32, OSPEEDR::Register>,\n\n /// GPIO port pull-up/pull-down register\n\n pupdr: ReadWrite<u32, PUPDR::Register>,\n\n /// GPIO port input data register\n\n idr: ReadOnly<u32, IDR::Register>,\n\n /// GPIO port output data register\n\n odr: ReadWrite<u32, ODR::Register>,\n\n /// GPIO port bit set/reset register\n\n bsrr: WriteOnly<u32, BSRR::Register>,\n\n /// GPIO port configuration lock register\n\n lckr: ReadWrite<u32, LCKR::Register>,\n\n /// GPIO alternate function low register\n\n afrl: ReadWrite<u32, AFRL::Register>,\n\n /// GPIO alternate function high register\n", "file_path": "chips/stm32f303xc/src/gpio.rs", "rank": 55, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct PmRegisters {\n\n mcctrl: ReadWrite<u32, MainClockControl::Register>,\n\n cpusel: ReadWrite<u32, CpuClockSelect::Register>,\n\n _reserved1: u32,\n\n pbasel: ReadWrite<u32, PeripheralBusXClockSelect::Register>,\n\n pbbsel: ReadWrite<u32, PeripheralBusXClockSelect::Register>,\n\n pbcsel: ReadWrite<u32, PeripheralBusXClockSelect::Register>,\n\n pbdsel: ReadWrite<u32, PeripheralBusXClockSelect::Register>,\n\n _reserved2: u32,\n\n cpumask: ReadWrite<u32, ClockMaskCpu::Register>, // 0x020\n\n hsbmask: ReadWrite<u32, ClockMaskHsb::Register>,\n\n pbamask: ReadWrite<u32, ClockMaskPba::Register>,\n\n pbbmask: ReadWrite<u32, ClockMaskPbb::Register>,\n\n pbcmask: ReadWrite<u32, ClockMaskPbc::Register>,\n\n pbdmask: ReadWrite<u32, ClockMaskPbd::Register>,\n\n _reserved3: [u32; 2],\n\n pbadivmask: ReadWrite<u32, DividedClockMask::Register>, // 0x040\n\n _reserved4: [u32; 4],\n\n cfdctrl: ReadWrite<u32, ClockFailureDetectorControl::Register>,\n\n unlock: WriteOnly<u32, PmUnlock::Register>,\n", "file_path": "chips/sam4l/src/pm.rs", "rank": 56, "score": 95141.2284676903 }, { "content": "/// Wrapper for TWIM clock that ensures TWIS clock is off\n\nstruct TWIMClock {\n\n master: pm::Clock,\n\n slave: Option<pm::Clock>,\n\n}\n\nimpl ClockInterface for TWIMClock {\n\n fn is_enabled(&self) -> bool {\n\n self.master.is_enabled()\n\n }\n\n\n\n fn enable(&self) {\n\n self.slave.map(|slave_clock| {\n\n if slave_clock.is_enabled() {\n\n panic!(\"I2C: Request for master clock, but slave active\");\n\n }\n\n });\n\n self.master.enable();\n\n }\n\n\n\n fn disable(&self) {\n\n self.master.disable();\n\n }\n\n}\n\n\n", "file_path": "chips/sam4l/src/i2c.rs", "rank": 57, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\n#[allow(dead_code)]\n\nstruct DMARegisters {\n\n mar: ReadWrite<u32, MemoryAddress::Register>,\n\n psr: VolatileCell<DMAPeripheral>,\n\n _psr_padding: [u8; 3],\n\n tcr: ReadWrite<u32, TransferCounter::Register>,\n\n marr: ReadWrite<u32, MemoryAddressReload::Register>,\n\n tcrr: ReadWrite<u32, TransferCounter::Register>,\n\n cr: WriteOnly<u32, Control::Register>,\n\n mr: ReadWrite<u32, Mode::Register>,\n\n sr: ReadOnly<u32, Status::Register>,\n\n ier: WriteOnly<u32, Interrupt::Register>,\n\n idr: WriteOnly<u32, Interrupt::Register>,\n\n imr: ReadOnly<u32, Interrupt::Register>,\n\n isr: ReadOnly<u32, Interrupt::Register>,\n\n}\n\n\n\nregister_bitfields![u32,\n\n MemoryAddress [\n\n MADDR OFFSET(0) NUMBITS(32) []\n\n ],\n", "file_path": "chips/sam4l/src/dma.rs", "rank": 58, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct RccRegisters {\n\n /// clock control register\n\n cr: ReadWrite<u32, CR::Register>,\n\n /// clock configuration register\n\n cfgr: ReadWrite<u32, CFGR::Register>,\n\n /// clock interrupt register\n\n cir: ReadWrite<u32, CIR::Register>,\n\n /// APB2 peripheral reset register\n\n apb2rstr: ReadWrite<u32, APB2RSTR::Register>,\n\n /// APB1 peripheral reset register\n\n apb1rstr: ReadWrite<u32, APB1RSTR::Register>,\n\n /// AHB peripheral clock register\n\n ahbenr: ReadWrite<u32, AHBENR::Register>,\n\n /// APB2 peripheral clock enable register\n\n apb2enr: ReadWrite<u32, APB2ENR::Register>,\n\n /// APB1 peripheral clock enable register\n\n apb1enr: ReadWrite<u32, APB1ENR::Register>,\n\n /// Backup domain control register\n\n bdcr: ReadWrite<u32, BDCR::Register>,\n\n /// clock control & status register\n", "file_path": "chips/stm32f303xc/src/rcc.rs", "rank": 59, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct AesRegisters {\n\n ctrl: ReadWrite<u32, Control::Register>, // 0x00\n\n mode: ReadWrite<u32, Mode::Register>, // 0x04\n\n databufptr: ReadWrite<u32, DataBuf::Register>, // 0x08\n\n sr: ReadOnly<u32, Status::Register>, // 0x0c\n\n ier: WriteOnly<u32, Interrupt::Register>, // 0x10\n\n idr: WriteOnly<u32, Interrupt::Register>, // 0x14\n\n imr: ReadOnly<u32, Interrupt::Register>, // 0x18\n\n _reserved0: [u32; 1], // 0x1c\n\n key0: WriteOnly<u32, Key::Register>, // 0x20\n\n key1: WriteOnly<u32, Key::Register>, // 0x24\n\n key2: WriteOnly<u32, Key::Register>, // 0x28\n\n key3: WriteOnly<u32, Key::Register>, // 0x2c\n\n key4: WriteOnly<u32, Key::Register>, // 0x30\n\n key5: WriteOnly<u32, Key::Register>, // 0x34\n\n key6: WriteOnly<u32, Key::Register>, // 0x38\n\n key7: WriteOnly<u32, Key::Register>, // 0x3c\n\n initvect0: WriteOnly<u32, InitVector::Register>, // 0x40\n\n initvect1: WriteOnly<u32, InitVector::Register>, // 0x44\n\n initvect2: WriteOnly<u32, InitVector::Register>, // 0x48\n", "file_path": "chips/sam4l/src/aes.rs", "rank": 60, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct TempRegisters {\n\n /// Start temperature measurement\n\n /// Address: 0x000 - 0x004\n\n pub task_start: WriteOnly<u32, Task::Register>,\n\n /// Stop temperature measurement\n\n /// Address: 0x004 - 0x008\n\n pub task_stop: WriteOnly<u32, Task::Register>,\n\n /// Reserved\n\n pub _reserved1: [u32; 62],\n\n /// Temperature measurement complete, data ready\n\n /// Address: 0x100 - 0x104\n\n pub event_datardy: ReadWrite<u32, Event::Register>,\n\n /// Reserved\n\n // Note, `inten` register on nRF51 is ignored because it's not supported by nRF52\n\n // And intenset and intenclr provide the same functionality\n\n pub _reserved2: [u32; 128],\n\n /// Enable interrupt\n\n /// Address: 0x304 - 0x308\n\n pub intenset: ReadWrite<u32, Intenset::Register>,\n\n /// Disable interrupt\n", "file_path": "chips/nrf5x/src/temperature.rs", "rank": 61, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct FlashcalwRegisters {\n\n fcr: ReadWrite<u32, FlashControl::Register>,\n\n fcmd: ReadWrite<u32, FlashCommand::Register>,\n\n fsr: ReadOnly<u32, FlashStatus::Register>,\n\n fpr: ReadOnly<u32, FlashParameter::Register>,\n\n fvr: ReadOnly<u32, FlashVersion::Register>,\n\n fgpfrhi: ReadOnly<u32, FlashGeneralPurposeFuseHigh::Register>,\n\n fgpfrlo: ReadOnly<u32, FlashGeneralPurposeFuseLow::Register>,\n\n _reserved1: [u32; 251],\n\n ctrl: WriteOnly<u32, PicoCacheControl::Register>,\n\n sr: ReadWrite<u32, PicoCacheStatus::Register>,\n\n _reserved2: [u32; 4],\n\n maint0: WriteOnly<u32, PicoCacheMaintenance0::Register>,\n\n maint1: WriteOnly<u32, PicoCacheMaintenance1::Register>,\n\n mcfg: ReadWrite<u32, PicoCacheMonitorConfiguration::Register>,\n\n men: ReadWrite<u32, PicoCacheMonitorEnable::Register>,\n\n mctrl: WriteOnly<u32, PicoCacheMonitorStatus::Register>,\n\n msr: ReadOnly<u32, PicoCacheMonitorStatus::Register>,\n\n}\n\n\n", "file_path": "chips/sam4l/src/flashcalw.rs", "rank": 62, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct BscifRegisters {\n\n ier: WriteOnly<u32, Interrupt::Register>,\n\n idr: WriteOnly<u32, Interrupt::Register>,\n\n imr: ReadOnly<u32, Interrupt::Register>,\n\n isr: ReadOnly<u32, Interrupt::Register>,\n\n icr: WriteOnly<u32, Interrupt::Register>,\n\n pclksr: ReadOnly<u32, PowerClocksStatus::Register>,\n\n unlock: WriteOnly<u32, Unlock::Register>,\n\n _reserved0: u32,\n\n oscctrl32: ReadWrite<u32, Oscillator32Control::Register>,\n\n rc32kcr: ReadWrite<u32, RC32Control::Register>,\n\n rc32ktune: ReadWrite<u32, RC32kTuning::Register>,\n\n bod33ctrl: ReadWrite<u32, BodControl::Register>,\n\n bod33level: ReadWrite<u32, BodLevel::Register>,\n\n bod33sampling: ReadWrite<u32, BodSamplingControl::Register>,\n\n bod18ctrl: ReadWrite<u32, BodControl::Register>,\n\n bot18level: ReadWrite<u32, BodLevel::Register>,\n\n bod18sampling: ReadWrite<u32, BodSamplingControl::Register>,\n\n vregcr: ReadWrite<u32, VoltageRegulatorConfig::Register>,\n\n _reserved1: [u32; 4],\n", "file_path": "chips/sam4l/src/bscif.rs", "rank": 63, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct UsartRegisters {\n\n cr: WriteOnly<u32, Control::Register>, // 0x00\n\n mr: ReadWrite<u32, Mode::Register>, // 0x04\n\n ier: WriteOnly<u32, Interrupt::Register>, // 0x08\n\n idr: WriteOnly<u32, Interrupt::Register>, // 0x0C\n\n imr: ReadOnly<u32, Interrupt::Register>, // 0x10\n\n csr: ReadOnly<u32, ChannelStatus::Register>, // 0x14\n\n rhr: ReadOnly<u32, ReceiverHold::Register>, // 0x18\n\n thr: WriteOnly<u32, TransmitHold::Register>, // 0x1C\n\n brgr: ReadWrite<u32, BaudRate::Register>, // 0x20\n\n rtor: ReadWrite<u32, RxTimeout::Register>, // 0x24\n\n ttgr: ReadWrite<u32, TxTimeGuard::Register>, // 0x28\n\n _reserved0: [ReadOnly<u32>; 5],\n\n fidi: ReadWrite<u32, FidiRatio::Register>, // 0x40\n\n ner: ReadOnly<u32, NumErrors::Register>, // 0x44\n\n _reserved1: ReadOnly<u32>,\n\n ifr: ReadWrite<u32, IrdaFilter::Register>, // 0x4C\n\n man: ReadWrite<u32, Manchester::Register>, // 0x50\n\n linmr: ReadWrite<u32, LinMode::Register>, // 0x54\n\n linir: ReadWrite<u32, LinID::Register>, // 0x58\n", "file_path": "chips/sam4l/src/usart.rs", "rank": 64, "score": 95141.2284676903 }, { "content": "struct DetachablePin {\n\n pin: &'static GPIOPin<'static>,\n\n function: Option<PeripheralFunction>,\n\n}\n\n\n\nimpl DetachablePin {\n\n fn detach(&self) {\n\n self.pin.configure(None);\n\n self.pin.enable_output();\n\n self.pin.clear();\n\n }\n\n\n\n fn restore(&self) {\n\n self.pin.configure(self.function);\n\n }\n\n}\n\n\n", "file_path": "boards/imix/src/power.rs", "rank": 65, "score": 95141.2284676903 }, { "content": "/// A structure representing this platform that holds references to all\n\n/// capsules for this platform.\n\nstruct STM32F3Discovery {\n\n console: &'static capsules::console::Console<'static>,\n\n ipc: kernel::ipc::IPC<NUM_PROCS>,\n\n gpio: &'static capsules::gpio::GPIO<'static, stm32f303xc::gpio::Pin<'static>>,\n\n led: &'static capsules::led::LedDriver<\n\n 'static,\n\n LedHigh<'static, stm32f303xc::gpio::Pin<'static>>,\n\n >,\n\n button: &'static capsules::button::Button<'static, stm32f303xc::gpio::Pin<'static>>,\n\n ninedof: &'static capsules::ninedof::NineDof<'static>,\n\n l3gd20: &'static capsules::l3gd20::L3gd20Spi<'static>,\n\n lsm303dlhc: &'static capsules::lsm303dlhc::Lsm303dlhcI2C<'static>,\n\n temp: &'static capsules::temperature::TemperatureSensor<'static>,\n\n alarm: &'static capsules::alarm::AlarmDriver<\n\n 'static,\n\n VirtualMuxAlarm<'static, stm32f303xc::tim2::Tim2<'static>>,\n\n >,\n\n adc: &'static capsules::adc::AdcVirtualized<'static>,\n\n nonvolatile_storage: &'static capsules::nonvolatile_storage_driver::NonvolatileStorage<'static>,\n\n}\n", "file_path": "boards/stm32f3discovery/src/main.rs", "rank": 66, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct SyscfgRegisters {\n\n /// memory remap register\n\n memrm: ReadWrite<u32, MEMRM::Register>,\n\n /// peripheral mode configuration register\n\n pmc: ReadWrite<u32, PMC::Register>,\n\n /// external interrupt configuration register 1\n\n exticr1: ReadWrite<u32, EXTICR1::Register>,\n\n /// external interrupt configuration register 2\n\n exticr2: ReadWrite<u32, EXTICR2::Register>,\n\n /// external interrupt configuration register 3\n\n exticr3: ReadWrite<u32, EXTICR3::Register>,\n\n /// external interrupt configuration register 4\n\n exticr4: ReadWrite<u32, EXTICR4::Register>,\n\n _reserved0: [u8; 8],\n\n /// Compensation cell control register\n\n cmpcr: ReadOnly<u32, CMPCR::Register>,\n\n}\n\n\n\nregister_bitfields![u32,\n\n MEMRM [\n", "file_path": "chips/stm32f4xx/src/syscfg.rs", "rank": 67, "score": 95141.2284676903 }, { "content": "struct BaudFraction {\n\n frac: f32,\n\n reg_val: u8,\n\n}\n\n\n\n#[rustfmt::skip]\n\n// Table out of the datasheet to correct the baudrate\n\nconst BAUD_FRACTIONS: &'static [BaudFraction; 36] = &[\n\n BaudFraction { frac: 0.0000, reg_val: 0x00 },\n\n BaudFraction { frac: 0.0529, reg_val: 0x01 },\n\n BaudFraction { frac: 0.0715, reg_val: 0x02 },\n\n BaudFraction { frac: 0.0835, reg_val: 0x04 },\n\n BaudFraction { frac: 0.1001, reg_val: 0x08 },\n\n BaudFraction { frac: 0.1252, reg_val: 0x10 },\n\n BaudFraction { frac: 0.1430, reg_val: 0x20 },\n\n BaudFraction { frac: 0.1670, reg_val: 0x11 },\n\n BaudFraction { frac: 0.2147, reg_val: 0x21 },\n\n BaudFraction { frac: 0.2224, reg_val: 0x22 },\n\n BaudFraction { frac: 0.2503, reg_val: 0x44 },\n\n BaudFraction { frac: 0.3000, reg_val: 0x25 },\n", "file_path": "chips/msp432/src/uart.rs", "rank": 68, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct AstRegisters {\n\n cr: ReadWrite<u32, Control::Register>,\n\n cv: ReadWrite<u32, Value::Register>,\n\n sr: ReadOnly<u32, Status::Register>,\n\n scr: WriteOnly<u32, Interrupt::Register>,\n\n ier: WriteOnly<u32, Interrupt::Register>,\n\n idr: WriteOnly<u32, Interrupt::Register>,\n\n imr: ReadOnly<u32, Interrupt::Register>,\n\n wer: ReadWrite<u32, Event::Register>,\n\n // 0x20\n\n ar0: ReadWrite<u32, Value::Register>,\n\n ar1: ReadWrite<u32, Value::Register>,\n\n _reserved0: [u32; 2],\n\n pir0: ReadWrite<u32, PeriodicInterval::Register>,\n\n pir1: ReadWrite<u32, PeriodicInterval::Register>,\n\n _reserved1: [u32; 2],\n\n // 0x40\n\n clock: ReadWrite<u32, ClockControl::Register>,\n\n dtr: ReadWrite<u32, DigitalTuner::Register>,\n\n eve: WriteOnly<u32, Event::Register>,\n", "file_path": "chips/sam4l/src/ast.rs", "rank": 69, "score": 95141.2284676903 }, { "content": "struct Writer {}\n\n\n\nstatic mut WRITER: Writer = Writer {};\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IoWrite for Writer {\n\n fn write(&mut self, buf: &[u8]) {\n\n // This creates a second instance of the UART peripheral, and should only be used\n\n // during panic.\n\n earlgrey::uart::Uart::new(\n\n earlgrey::uart::UART0_BASE,\n\n earlgrey::chip_config::CONFIG.peripheral_freq,\n\n )\n\n .transmit_sync(buf);\n", "file_path": "boards/earlgrey-nexysvideo/src/io.rs", "rank": 71, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct Tim2Registers {\n\n /// control register 1\n\n cr1: ReadWrite<u32, CR1::Register>,\n\n /// control register 2\n\n cr2: ReadWrite<u32, CR2::Register>,\n\n /// slave mode control register\n\n smcr: ReadWrite<u32, SMCR::Register>,\n\n /// DMA/Interrupt enable register\n\n dier: ReadWrite<u32, DIER::Register>,\n\n /// status register\n\n sr: ReadWrite<u32, SR::Register>,\n\n /// event generation register\n\n egr: WriteOnly<u32, EGR::Register>,\n\n /// capture/compare mode register 1 (output mode)\n\n ccmr1_output: ReadWrite<u32, CCMR1_Output::Register>,\n\n /// capture/compare mode register 2 (output mode)\n\n ccmr2_output: ReadWrite<u32, CCMR2_Output::Register>,\n\n /// capture/compare enable register\n\n ccer: ReadWrite<u32, CCER::Register>,\n\n /// counter\n", "file_path": "chips/stm32f303xc/src/tim2.rs", "rank": 72, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct Dma1Registers {\n\n /// low interrupt status register\n\n lisr: ReadOnly<u32, LISR::Register>,\n\n /// high interrupt status register\n\n hisr: ReadOnly<u32, HISR::Register>,\n\n /// low interrupt flag clear register\n\n lifcr: ReadWrite<u32, LIFCR::Register>,\n\n /// high interrupt flag clear register\n\n hifcr: ReadWrite<u32, HIFCR::Register>,\n\n /// stream x configuration register\n\n s0cr: ReadWrite<u32, S0CR::Register>,\n\n /// stream x number of data register\n\n s0ndtr: ReadWrite<u32>,\n\n /// stream x peripheral address register\n\n s0par: ReadWrite<u32>,\n\n /// stream x memory 0 address register\n\n s0m0ar: ReadWrite<u32>,\n\n /// stream x memory 1 address register\n\n s0m1ar: ReadWrite<u32>,\n\n /// stream x FIFO control register\n", "file_path": "chips/stm32f4xx/src/dma1.rs", "rank": 73, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct AdcRegisters {\n\n isr: ReadWrite<u32, ISR::Register>,\n\n ier: ReadWrite<u32, IER::Register>,\n\n cr: ReadWrite<u32, CR::Register>,\n\n cfgr: ReadWrite<u32, CFGR::Register>,\n\n\n\n _reserved0: [u32; 1],\n\n smpr1: ReadWrite<u32, SMPR1::Register>,\n\n smpr2: ReadWrite<u32, SMPR2::Register>,\n\n\n\n _reserved1: [u32; 1],\n\n tr1: ReadWrite<u32, TR1::Register>,\n\n tr2: ReadWrite<u32, TR2::Register>,\n\n tr3: ReadWrite<u32, TR3::Register>,\n\n\n\n _reserved2: [u32; 1],\n\n sqr1: ReadWrite<u32, SQR1::Register>,\n\n sqr2: ReadWrite<u32, SQR2::Register>,\n\n sqr3: ReadWrite<u32, SQR3::Register>,\n\n sqr4: ReadWrite<u32, SQR4::Register>,\n", "file_path": "chips/stm32f303xc/src/adc.rs", "rank": 74, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct GpioRegisters {\n\n gper: Register,\n\n pmr0: Register,\n\n pmr1: Register,\n\n pmr2: Register,\n\n oder: Register,\n\n ovr: Register,\n\n pvr: ReadOnly<u32>,\n\n _reserved0: [u32; 3],\n\n puer: Register,\n\n pder: Register,\n\n ier: Register,\n\n imr0: Register,\n\n imr1: Register,\n\n gfer: Register,\n\n ifr: RegisterRC,\n\n _reserved1: [u32; 8],\n\n ocdr0: Register,\n\n ocdr1: Register,\n\n _reserved2: [u32; 4],\n", "file_path": "chips/sam4l/src/gpio.rs", "rank": 75, "score": 95141.2284676903 }, { "content": "struct Writer {\n\n initialized: bool,\n\n}\n\n\n\nstatic mut WRITER: Writer = Writer { initialized: false };\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nconst BUF_LEN: usize = 512;\n\nstatic mut STATIC_PANIC_BUF: [u8; BUF_LEN] = [0; BUF_LEN];\n\n\n\nstatic mut DUMMY: DummyUsbClient = DummyUsbClient {\n\n fired: VolatileCell::new(false),\n\n};\n\n\n", "file_path": "boards/clue_nrf52840/src/io.rs", "rank": 76, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct AdcRegisters {\n\n sr: ReadWrite<u32, SR::Register>,\n\n cr1: ReadWrite<u32, CR1::Register>,\n\n cr2: ReadWrite<u32, CR2::Register>,\n\n smpr1: ReadWrite<u32, SMPR1::Register>,\n\n smpr2: ReadWrite<u32, SMPR2::Register>,\n\n jofr1: ReadWrite<u32, JOFR::Register>,\n\n jofr2: ReadWrite<u32, JOFR::Register>,\n\n jofr3: ReadWrite<u32, JOFR::Register>,\n\n jofr4: ReadWrite<u32, JOFR::Register>,\n\n htr: ReadWrite<u32, HTR::Register>,\n\n ltr: ReadWrite<u32, LTR::Register>,\n\n sqr1: ReadWrite<u32, SQR1::Register>,\n\n sqr2: ReadWrite<u32, SQR2::Register>,\n\n sqr3: ReadWrite<u32, SQR3::Register>,\n\n jsqr: ReadWrite<u32, JSQR::Register>,\n\n jdr1: ReadOnly<u32, JDR::Register>,\n\n jdr2: ReadOnly<u32, JDR::Register>,\n\n jdr3: ReadOnly<u32, JDR::Register>,\n\n jdr4: ReadOnly<u32, JDR::Register>,\n\n dr: ReadOnly<u32, DR::Register>,\n\n}\n\n\n", "file_path": "chips/stm32f4xx/src/adc.rs", "rank": 77, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct CrccuRegisters {\n\n // From page 1005 of SAM4L manual\n\n dscr: ReadWrite<u32, DescriptorBaseAddress::Register>,\n\n _reserved0: u32,\n\n dmaen: WriteOnly<u32, DmaEnable::Register>,\n\n dmadis: WriteOnly<u32, DmaDisable::Register>,\n\n dmasr: ReadOnly<u32, DmaStatus::Register>,\n\n dmaier: WriteOnly<u32, DmaInterrupt::Register>,\n\n dmaidr: WriteOnly<u32, DmaInterrupt::Register>,\n\n dmaimr: ReadOnly<u32, DmaInterrupt::Register>,\n\n dmaisr: ReadOnly<u32, DmaInterrupt::Register>,\n\n _reserved1: [u32; 4],\n\n cr: WriteOnly<u32, Control::Register>,\n\n mr: ReadWrite<u32, Mode::Register>,\n\n sr: ReadOnly<u32, Status::Register>,\n\n ier: WriteOnly<u32, Interrupt::Register>,\n\n idr: WriteOnly<u32, Interrupt::Register>,\n\n imr: ReadOnly<u32, Interrupt::Register>,\n\n isr: ReadOnly<u32, Interrupt::Register>,\n\n}\n", "file_path": "chips/sam4l/src/crccu.rs", "rank": 78, "score": 95141.2284676903 }, { "content": "struct Writer {}\n\n\n\nstatic mut WRITER: Writer = Writer {};\n\n\n\nimpl Write for Writer {\n\n fn write_str(&mut self, s: &str) -> ::core::fmt::Result {\n\n self.write(s.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl IoWrite for Writer {\n\n fn write(&mut self, buf: &[u8]) {\n\n unsafe {\n\n PANIC_REFERENCES.uart.unwrap().transmit_sync(buf);\n\n }\n\n }\n\n}\n\n\n\n/// Panic handler.\n", "file_path": "boards/litex/sim/src/io.rs", "rank": 79, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct AdcRegisters {\n\n /// Start the ADC and prepare the result buffer in RAM\n\n tasks_start: WriteOnly<u32, TASK::Register>,\n\n /// Take one ADC sample, if scan is enabled all channels are sampled\n\n tasks_sample: WriteOnly<u32, TASK::Register>,\n\n /// Stop the ADC and terminate any on-going conversion\n\n tasks_stop: WriteOnly<u32, TASK::Register>,\n\n /// Starts offset auto-calibration\n\n tasks_calibrateoffset: WriteOnly<u32, TASK::Register>,\n\n _reserved0: [u8; 240],\n\n /// The ADC has started\n\n events_started: ReadWrite<u32, EVENT::Register>,\n\n /// The ADC has filled up the Result buffer\n\n events_end: ReadWrite<u32, EVENT::Register>,\n\n /// A conversion task has been completed. Depending on the mode, multiple conversion\n\n events_done: ReadWrite<u32, EVENT::Register>,\n\n /// A result is ready to get transferred to RAM\n\n events_resultdone: ReadWrite<u32, EVENT::Register>,\n\n /// Calibration is complete\n\n events_calibratedone: ReadWrite<u32, EVENT::Register>,\n", "file_path": "chips/nrf52/src/adc.rs", "rank": 80, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct SpiRegisters {\n\n /// control register 1\n\n cr1: ReadWrite<u32, CR1::Register>,\n\n /// control register 2\n\n cr2: ReadWrite<u32, CR2::Register>,\n\n /// status register\n\n sr: ReadWrite<u32, SR::Register>,\n\n // this should be _reserved: [u8; 3], but it does not work,\n\n // packing is correct, but writing to the data register does not work\n\n // leaving it commented out until an upgrade to packed data is written\n\n /// data register\n\n dr: ReadWrite<u8, DR::Register>,\n\n /// CRC polynomial register\n\n crcpr: ReadWrite<u32, CRCPR::Register>,\n\n /// RX CRC register\n\n rxcrcr: ReadOnly<u32, RXCRCR::Register>,\n\n /// TX CRC register\n\n txcrcr: ReadOnly<u32, TXCRCR::Register>,\n\n /// I2S configuration register\n\n i2scfgr: ReadWrite<u32, I2SCFGR::Register>,\n", "file_path": "chips/stm32f303xc/src/spi.rs", "rank": 81, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct SystickRegisters {\n\n syst_csr: ReadWrite<u32, ControlAndStatus::Register>,\n\n syst_rvr: ReadWrite<u32, ReloadValue::Register>,\n\n syst_cvr: ReadWrite<u32, CurrentValue::Register>,\n\n syst_calib: ReadOnly<u32, CalibrationValue::Register>,\n\n}\n\n\n\nregister_bitfields![u32,\n\n ControlAndStatus [\n\n /// Returns 1 if timer counted to 0 since last time this was read.\n\n COUNTFLAG 16,\n\n\n\n /// Clock source is (0) External Clock or (1) Processor Clock.\n\n CLKSOURCE 2,\n\n\n\n /// Set to 1 to enable SysTick exception request.\n\n TICKINT 1,\n\n\n\n /// Enable the counter (1 == Enabled).\n\n ENABLE 0\n", "file_path": "arch/cortex-m/src/systick.rs", "rank": 82, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\n#[allow(dead_code)]\n\nstruct TWISRegisters {\n\n cr: ReadWrite<u32, ControlSlave::Register>,\n\n nbytes: ReadWrite<u32, Nbytes::Register>,\n\n tr: ReadWrite<u32, Timing::Register>,\n\n rhr: ReadOnly<u32, ReceiveHolding::Register>,\n\n thr: WriteOnly<u32, TransmitHolding::Register>,\n\n pecr: ReadOnly<u32, PacketErrorCheck::Register>,\n\n sr: ReadOnly<u32, StatusSlave::Register>,\n\n ier: WriteOnly<u32, InterruptSlave::Register>,\n\n idr: WriteOnly<u32, InterruptSlave::Register>,\n\n imr: ReadOnly<u32, InterruptSlave::Register>,\n\n scr: WriteOnly<u32, StatusClearSlave::Register>,\n\n pr: ReadOnly<u32>,\n\n vr: ReadOnly<u32>,\n\n hstr: ReadWrite<u32>,\n\n srr: ReadWrite<u32, SlewRateSlave::Register>,\n\n hssrr: ReadWrite<u32>,\n\n}\n\n\n\nregister_bitfields![u32,\n", "file_path": "chips/sam4l/src/i2c.rs", "rank": 83, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\n#[allow(dead_code)]\n\nstruct TWIMRegisters {\n\n cr: WriteOnly<u32, Control::Register>,\n\n cwgr: ReadWrite<u32, ClockWaveformGenerator::Register>,\n\n smbtr: ReadWrite<u32, SmbusTiming::Register>,\n\n cmdr: ReadWrite<u32, Command::Register>,\n\n ncmdr: ReadWrite<u32, Command::Register>,\n\n rhr: ReadOnly<u32, ReceiveHolding::Register>,\n\n thr: WriteOnly<u32, TransmitHolding::Register>,\n\n sr: ReadOnly<u32, Status::Register>,\n\n ier: WriteOnly<u32, Interrupt::Register>,\n\n idr: WriteOnly<u32, Interrupt::Register>,\n\n imr: ReadOnly<u32, Interrupt::Register>,\n\n scr: WriteOnly<u32, StatusClear::Register>,\n\n pr: ReadOnly<u32>,\n\n vr: ReadOnly<u32>,\n\n hscwgr: ReadWrite<u32>,\n\n srr: ReadWrite<u32, SlewRate::Register>,\n\n hssrr: ReadWrite<u32>,\n\n}\n\n\n\n// Listing of all registers related to the TWIS peripheral.\n\n// Section 28.9 of the datasheet\n", "file_path": "chips/sam4l/src/i2c.rs", "rank": 84, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct TwimRegisters {\n\n /// Start TWI receive sequence\n\n tasks_startrx: WriteOnly<u32, TASK::Register>,\n\n _reserved0: [u8; 4],\n\n /// Start TWI transmit sequence\n\n tasks_starttx: WriteOnly<u32, TASK::Register>,\n\n _reserved1: [u8; 8],\n\n /// Stop TWI transaction\n\n tasks_stop: WriteOnly<u32, TASK::Register>,\n\n _reserved2: [u8; 4],\n\n /// Suspend TWI transaction\n\n tasks_suspend: WriteOnly<u32, TASK::Register>,\n\n /// Resume TWI transaction\n\n tasks_resume: WriteOnly<u32, TASK::Register>,\n\n _reserved3: [u8; 224],\n\n /// TWI stopped\n\n events_stopped: ReadWrite<u32, EVENT::Register>,\n\n _reserved4: [u8; 28],\n\n /// TWI error\n\n events_error: ReadWrite<u32, EVENT::Register>,\n", "file_path": "chips/nrf52/src/i2c.rs", "rank": 85, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct PpiRegisters {\n\n tasks_chg0_en: ReadWrite<u32, Control::Register>,\n\n tasks_chg0_dis: ReadWrite<u32, Control::Register>,\n\n tasks_chg1_en: ReadWrite<u32, Control::Register>,\n\n tasks_chg1_dis: ReadWrite<u32, Control::Register>,\n\n tasks_chg2_en: ReadWrite<u32, Control::Register>,\n\n tasks_chg2_dis: ReadWrite<u32, Control::Register>,\n\n tasks_chg3_en: ReadWrite<u32, Control::Register>,\n\n tasks_chg3_dis: ReadWrite<u32, Control::Register>,\n\n tasks_chg4_en: ReadWrite<u32, Control::Register>,\n\n tasks_chg4_dis: ReadWrite<u32, Control::Register>,\n\n tasks_chg5_en: ReadWrite<u32, Control::Register>,\n\n tasks_chg5_dis: ReadWrite<u32, Control::Register>,\n\n _reserved1: [u32; 308],\n\n chen: ReadWrite<u32, Channel::Register>,\n\n chenset: ReadWrite<u32, Channel::Register>,\n\n chenclr: ReadWrite<u32, Channel::Register>,\n\n ch0_eep: ReadWrite<u32, EventEndPoint::Register>,\n\n ch0_tep: ReadWrite<u32, TaskEndPoint::Register>,\n\n ch1_eep: ReadWrite<u32, EventEndPoint::Register>,\n", "file_path": "chips/nrf52/src/ppi.rs", "rank": 86, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct DbgRegisters {\n\n /// IDCODE\n\n dbgmcu_idcode: ReadOnly<u32, DBGMCU_IDCODE::Register>,\n\n /// Control Register\n\n dbgmcu_cr: ReadWrite<u32, DBGMCU_CR::Register>,\n\n /// Debug MCU APB1 Freeze registe\n\n dbgmcu_apb1_fz: ReadWrite<u32, DBGMCU_APB1_FZ::Register>,\n\n /// Debug MCU APB2 Freeze registe\n\n dbgmcu_apb2_fz: ReadWrite<u32, DBGMCU_APB2_FZ::Register>,\n\n}\n\n\n\nregister_bitfields![u32,\n\n DBGMCU_IDCODE [\n\n /// DEV_ID\n\n DEV_ID OFFSET(0) NUMBITS(12) [],\n\n /// REV_ID\n\n REV_ID OFFSET(16) NUMBITS(16) []\n\n ],\n\n DBGMCU_CR [\n\n /// DBG_SLEEP\n", "file_path": "chips/stm32f4xx/src/dbg.rs", "rank": 87, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct RegisterRC {\n\n val: ReadOnly<u32>,\n\n reserved0: u32,\n\n clear: WriteOnly<u32>,\n\n reserved1: u32,\n\n}\n\n\n", "file_path": "chips/sam4l/src/gpio.rs", "rank": 88, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct GpioteRegisters {\n\n /// Task for writing to pin specified in CONFIG\\[n\\].PSEL.\n\n /// Action on pin is configured in CONFIG\\[n\\].POLARITY\n\n ///\n\n /// - Address: 0x000 - 0x010 (nRF51)\n\n /// - Address: 0x000 - 0x020 (nRF52)\n\n task_out: [ReadWrite<u32, TasksOut::Register>; NUM_GPIOTE],\n\n /// Reserved\n\n // task_set and task_clear are not used on nRF52\n\n _reserved0: [u8; 0x100 - (0x0 + NUM_GPIOTE * 4)],\n\n /// Event generated from pin specified in CONFIG\\[n\\].PSEL\n\n ///\n\n /// - Address: 0x100 - 0x110 (nRF51)\n\n /// - Address: 0x100 - 0x120 (nRF52)\n\n event_in: [ReadWrite<u32, EventsIn::Register>; NUM_GPIOTE],\n\n /// Reserved\n\n _reserved1: [u8; 0x17C - (0x100 + NUM_GPIOTE * 4)],\n\n /// Event generated from multiple input GPIO pins\n\n /// - Address: 0x17C - 0x180\n\n event_port: ReadWrite<u32, EventsPort::Register>,\n", "file_path": "chips/nrf5x/src/gpio.rs", "rank": 89, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct NvmcRegisters {\n\n /// Ready flag\n\n /// Address 0x400 - 0x404\n\n pub ready: ReadOnly<u32, Ready::Register>,\n\n /// Reserved\n\n _reserved1: [u32; 64],\n\n /// Configuration register\n\n /// Address: 0x504 - 0x508\n\n pub config: ReadWrite<u32, Configuration::Register>,\n\n /// Register for erasing a page in Code area\n\n /// Address: 0x508 - 0x50C\n\n pub erasepage: ReadWrite<u32, ErasePage::Register>,\n\n /// Register for erasing all non-volatile user memory\n\n /// Address: 0x50C - 0x510\n\n pub eraseall: ReadWrite<u32, EraseAll::Register>,\n\n _reserved2: u32,\n\n /// Register for erasing User Information Configuration Registers\n\n /// Address: 0x514 - 0x518\n\n pub eraseuicr: ReadWrite<u32, EraseUicr::Register>,\n\n /// Reserved\n", "file_path": "chips/nrf52/src/nvmc.rs", "rank": 90, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct ConfigRegisters {\n\n cliccfg: ReadWrite<u8, conreg::Register>,\n\n}\n\n\n\nregister_bitfields![u8,\n\n intpend [\n\n IntPend OFFSET(0) NUMBITS(1) []\n\n ]\n\n];\n\n\n\nregister_bitfields![u8,\n\n inten [\n\n IntEn OFFSET(0) NUMBITS(1) []\n\n ]\n\n];\n\n\n\n// The data sheet isn't completely clear on this field, but it looks like there\n\n// are four bits for priority and level, and the lowest for bits of the register\n\n// are reserved.\n\nregister_bitfields![u8,\n", "file_path": "arch/rv32i/src/clic.rs", "rank": 91, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct AcifcRegisters {\n\n ctrl: ReadWrite<u32, Control::Register>,\n\n sr: ReadOnly<u32, Status::Register>,\n\n _reserved0: [ReadOnly<u32>; 2],\n\n ier: WriteOnly<u32, Interrupt::Register>,\n\n idr: WriteOnly<u32, Interrupt::Register>,\n\n imr: ReadOnly<u32, Interrupt::Register>,\n\n isr: ReadOnly<u32, Interrupt::Register>,\n\n icr: WriteOnly<u32, Interrupt::Register>,\n\n tr: ReadWrite<u32, Test::Register>,\n\n _reserved1: [ReadOnly<u32>; 2],\n\n parameter: ReadOnly<u32, Parameter::Register>,\n\n version: ReadOnly<u32>,\n\n _reserved2: [ReadOnly<u32>; 18],\n\n confw: [ReadWrite<u32, WindowConfiguration::Register>; 4],\n\n _reserved3: [ReadOnly<u32>; 16],\n\n conf: [ReadWrite<u32, ACConfiguration::Register>; 8],\n\n}\n\n\n\nregister_bitfields![u32,\n", "file_path": "chips/sam4l/src/acifc.rs", "rank": 92, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct Tim2Registers {\n\n /// control register 1\n\n cr1: ReadWrite<u32, CR1::Register>,\n\n /// control register 2\n\n cr2: ReadWrite<u32, CR2::Register>,\n\n /// slave mode control register\n\n smcr: ReadWrite<u32, SMCR::Register>,\n\n /// DMA/Interrupt enable register\n\n dier: ReadWrite<u32, DIER::Register>,\n\n /// status register\n\n sr: ReadWrite<u32, SR::Register>,\n\n /// event generation register\n\n egr: WriteOnly<u32, EGR::Register>,\n\n /// capture/compare mode register 1 (output mode)\n\n ccmr1_output: ReadWrite<u32, CCMR1_Output::Register>,\n\n /// capture/compare mode register 2 (output mode)\n\n ccmr2_output: ReadWrite<u32, CCMR2_Output::Register>,\n\n /// capture/compare enable register\n\n ccer: ReadWrite<u32, CCER::Register>,\n\n /// counter\n", "file_path": "chips/stm32f4xx/src/tim2.rs", "rank": 93, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct ScifRegisters {\n\n ier: WriteOnly<u32, Interrupt::Register>,\n\n idr: WriteOnly<u32, Interrupt::Register>,\n\n imr: ReadOnly<u32, Interrupt::Register>,\n\n isr: ReadOnly<u32, Interrupt::Register>,\n\n icr: WriteOnly<u32, Interrupt::Register>,\n\n pclksr: ReadOnly<u32, Interrupt::Register>,\n\n unlock: WriteOnly<u32, Unlock::Register>,\n\n cscr: ReadWrite<u32>,\n\n oscctrl0: ReadWrite<u32, Oscillator::Register>,\n\n pll0: ReadWrite<u32, PllControl::Register>,\n\n dfll0conf: ReadWrite<u32, Dfll::Register>,\n\n dfll0val: ReadWrite<u32>,\n\n dfll0mul: ReadWrite<u32>,\n\n dfll0step: ReadWrite<u32, DfllStep::Register>,\n\n dfll0ssg: ReadWrite<u32>,\n\n dfll0ratio: ReadOnly<u32>,\n\n dfll0sync: WriteOnly<u32>,\n\n rccr: ReadWrite<u32>,\n\n rcfastcfg: ReadWrite<u32, Rcfast::Register>,\n", "file_path": "chips/sam4l/src/scif.rs", "rank": 94, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct FlashRegisters {\n\n /// Flash access control register\n\n /// Address offset 0x00\n\n pub acr: ReadWrite<u32, AccessControl::Register>,\n\n /// Flash key register\n\n /// Address offset 0x04\n\n pub kr: WriteOnly<u32, Key::Register>,\n\n /// Flash option key register\n\n /// Address offset 0x08\n\n pub okr: WriteOnly<u32, Key::Register>,\n\n /// Flash status register\n\n /// Address offset 0x0C\n\n pub sr: ReadWrite<u32, Status::Register>,\n\n /// Flash control register\n\n /// Address offset 0x10\n\n pub cr: ReadWrite<u32, Control::Register>,\n\n /// Flash address register\n\n /// Address offset 0x14\n\n pub ar: WriteOnly<u32, Address::Register>,\n\n /// Reserved\n", "file_path": "chips/stm32f303xc/src/flash.rs", "rank": 95, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct SpimRegisters {\n\n _reserved0: [u8; 16], // reserved\n\n tasks_start: WriteOnly<u32, TASK::Register>, // Start SPI transaction\n\n tasks_stop: WriteOnly<u32, TASK::Register>, // Stop SPI transaction\n\n _reserved1: [u8; 4], // reserved\n\n tasks_suspend: WriteOnly<u32, TASK::Register>, // Suspend SPI transaction\n\n tasks_resume: WriteOnly<u32, TASK::Register>, // Resume SPI transaction\n\n _reserved2: [u8; 224], // reserved\n\n events_stopped: ReadWrite<u32, EVENT::Register>, // SPI transaction has stopped\n\n _reserved3: [u8; 8], // reserved\n\n events_endrx: ReadWrite<u32, EVENT::Register>, // End of RXD buffer reached\n\n _reserved4: [u8; 4], // reserved\n\n events_end: ReadWrite<u32, EVENT::Register>, // End of RXD buffer and TXD buffer reached\n\n _reserved5: [u8; 4], // reserved\n\n events_endtx: ReadWrite<u32, EVENT::Register>, // End of TXD buffer reached\n\n _reserved6: [u8; 40], // reserved\n\n events_started: ReadWrite<u32, EVENT::Register>, // Transaction started\n\n _reserved7: [u8; 176], // reserved\n\n shorts: ReadWrite<u32>, // Shortcut register\n\n _reserved8: [u8; 256], // reserved\n", "file_path": "chips/nrf52/src/spi.rs", "rank": 96, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct ExtiRegisters {\n\n /// Interrupt mask register (EXTI_IMR1)\n\n imr1: ReadWrite<u32, IMR1::Register>,\n\n /// Event mask register (EXTI_EMR1)\n\n emr1: ReadWrite<u32, EMR1::Register>,\n\n /// Rising Trigger selection register (EXTI_RTSR1)\n\n rtsr1: ReadWrite<u32, RTSR1::Register>,\n\n /// Falling Trigger selection register (EXTI_FTSR1)\n\n ftsr1: ReadWrite<u32, FTSR1::Register>,\n\n /// Software interrupt event register (EXTI_SWIER1)\n\n swier1: ReadWrite<u32, SWIER1::Register>,\n\n /// Pending register (EXTI_PR1)\n\n pr1: ReadWrite<u32, PR1::Register>,\n\n\n\n /// Interrupt mask register (EXTI_IMR1)\n\n imr2: ReadWrite<u32, IMR2::Register>,\n\n /// Event mask register (EXTI_EMR1)\n\n emr2: ReadWrite<u32, EMR2::Register>,\n\n /// Rising Trigger selection register (EXTI_RTSR1)\n\n rtsr2: ReadWrite<u32, RTSR2::Register>,\n", "file_path": "chips/stm32f303xc/src/exti.rs", "rank": 97, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct GpioRegisters {\n\n /// GPIO port mode register\n\n moder: ReadWrite<u32, MODER::Register>,\n\n /// GPIO port output type register\n\n otyper: ReadWrite<u32, OTYPER::Register>,\n\n /// GPIO port output speed register\n\n ospeedr: ReadWrite<u32, OSPEEDR::Register>,\n\n /// GPIO port pull-up/pull-down register\n\n pupdr: ReadWrite<u32, PUPDR::Register>,\n\n /// GPIO port input data register\n\n idr: ReadOnly<u32, IDR::Register>,\n\n /// GPIO port output data register\n\n odr: ReadWrite<u32, ODR::Register>,\n\n /// GPIO port bit set/reset register\n\n bsrr: WriteOnly<u32, BSRR::Register>,\n\n /// GPIO port configuration lock register\n\n lckr: ReadWrite<u32, LCKR::Register>,\n\n /// GPIO alternate function low register\n\n afrl: ReadWrite<u32, AFRL::Register>,\n\n /// GPIO alternate function high register\n", "file_path": "chips/stm32f4xx/src/gpio.rs", "rank": 98, "score": 95141.2284676903 }, { "content": "#[repr(C)]\n\nstruct Lpi2cRegisters {\n\n // Version ID Register\n\n verid: ReadOnly<u32, VERID::Register>,\n\n // Parameter Register\n\n param: ReadOnly<u32, PARAM::Register>,\n\n _reserved1: [u8; 8],\n\n // Master Control Register\n\n mcr: ReadWrite<u32, MCR::Register>,\n\n // Master Status Register\n\n msr: ReadWrite<u32, MSR::Register>,\n\n // Master Interrupt Enable Register\n\n mier: ReadWrite<u32, MIER::Register>,\n\n // Master DMA Enable Register\n\n mder: ReadWrite<u32, MDER::Register>,\n\n // Master Configuration Register 0\n\n mcfgr0: ReadWrite<u32, MCFGR0::Register>,\n\n // Master Configuration Register 1\n\n mcfgr1: ReadWrite<u32, MCFGR1::Register>,\n\n // Master Configuration Register 2\n\n mcfgr2: ReadWrite<u32, MCFGR2::Register>,\n", "file_path": "chips/imxrt10xx/src/lpi2c.rs", "rank": 99, "score": 95141.2284676903 } ]
Rust
binrw/src/private.rs
dmgolembiowski/binrw
9779ff3749d0576a46b544373442ce9d9af2914c
use crate::{ error::CustomError, io::{self, Seek, Write}, BinRead, BinResult, Error, ReadOptions, WriteOptions, }; #[cfg(not(feature = "std"))] use alloc::{boxed::Box, string::String}; pub enum AssertErrorFn<M, E> { Message(M), Error(E), } pub fn assert<MsgFn, Msg, ErrorFn, Err>( test: bool, pos: u64, error_fn: AssertErrorFn<MsgFn, ErrorFn>, ) -> BinResult<()> where MsgFn: Fn() -> Msg, Msg: Into<String> + Sized, ErrorFn: Fn() -> Err, Err: CustomError + 'static, { if test { Ok(()) } else { Err(match error_fn { AssertErrorFn::Message(error_fn) => Error::AssertFail { pos, message: error_fn().into(), }, AssertErrorFn::Error(error_fn) => Error::Custom { pos, err: Box::new(error_fn()), }, }) } } pub fn coerce_fn<R, T, F>(f: F) -> F where F: Fn(T) -> R, { f } pub fn magic<R, B>(reader: &mut R, expected: B, options: &ReadOptions) -> BinResult<()> where B: BinRead<Args = ()> + core::fmt::Debug + PartialEq + Sync + Send + 'static, R: io::Read + io::Seek, { let pos = reader.stream_position()?; let val = B::read_options(reader, options, ())?; if val == expected { Ok(()) } else { Err(Error::BadMagic { pos, found: Box::new(val) as _, }) } } pub fn parse_function_args_type_hint<R, Res, Args, F>(_: F, a: Args) -> Args where R: crate::io::Read + Seek, F: FnOnce(&mut R, &crate::ReadOptions, Args) -> crate::BinResult<Res>, { a } pub fn write_function_args_type_hint<T, W, Args, F>(_: F, a: Args) -> Args where W: Write + Seek, F: FnOnce(&T, &mut W, &crate::WriteOptions, Args) -> crate::BinResult<()>, { a } pub fn map_args_type_hint<Input, Output, MapFn, Args>(_: &MapFn, args: Args) -> Args where MapFn: FnOnce(Input) -> Output, Input: BinRead<Args = Args>, { args } pub fn write_fn_type_hint<T, WriterFn, Writer, Args>(x: WriterFn) -> WriterFn where Args: Clone, Writer: Write + Seek, WriterFn: Fn(&T, &mut Writer, &WriteOptions, Args) -> BinResult<()>, { x } pub fn write_map_args_type_hint<Input, Output, MapFn, Args>(_: &MapFn, args: Args) -> Args where MapFn: FnOnce(Input) -> Output, Output: crate::BinWrite<Args = Args>, { args } pub fn write_try_map_args_type_hint<Input, Output, MapFn, Args>(_: &MapFn, args: Args) -> Args where MapFn: FnOnce(Input) -> BinResult<Output>, Output: crate::BinWrite<Args = Args>, { args } pub fn write_map_fn_input_type_hint<Input, Output, MapFn>(func: MapFn) -> MapFn where MapFn: FnOnce(Input) -> Output, { func } pub fn write_fn_map_output_type_hint<Input, Output, MapFn, Writer, WriteFn, Args>( _: &MapFn, func: WriteFn, ) -> WriteFn where MapFn: FnOnce(Input) -> Output, Args: Clone, Writer: Write + Seek, WriteFn: Fn(&Output, &mut Writer, &WriteOptions, Args) -> BinResult<()>, { func } pub fn write_fn_try_map_output_type_hint<Input, Output, MapFn, Writer, WriteFn, Args>( _: &MapFn, func: WriteFn, ) -> WriteFn where MapFn: FnOnce(Input) -> BinResult<Output>, Args: Clone, Writer: Write + Seek, WriteFn: Fn(&Output, &mut Writer, &WriteOptions, Args) -> BinResult<()>, { func } pub fn write_zeroes<W: Write>(writer: &mut W, count: u64) -> BinResult<()> { const BUF_SIZE: u64 = 0x20; const ZEROES: [u8; BUF_SIZE as usize] = [0u8; BUF_SIZE as usize]; if count <= BUF_SIZE { writer.write_all(&ZEROES[..count as usize])?; } else { let full_chunks = count / BUF_SIZE; let remaining = count % BUF_SIZE; for _ in 0..full_chunks { writer.write_all(&ZEROES)?; } writer.write_all(&ZEROES[..remaining as usize])?; } Ok(()) }
use crate::{ error::CustomError, io::{self, Seek, Write}, BinRead, BinResult, Error, ReadOptions, WriteOptions, }; #[cfg(not(feature = "std"))] use alloc::{boxed::Box, string::String}; pub enum AssertErrorFn<M, E> { Message(M), Error(E), } pub fn assert<MsgFn, Msg, ErrorFn, Err>( test: bool, pos: u64, error_fn: AssertErrorFn<MsgFn, ErrorFn>, ) -> BinResult<()> where MsgFn: Fn() -> Msg, Msg: Into<String> + Sized, ErrorFn: Fn() -> Err, Err: CustomError + 'static, { if test { Ok(()) } else { Err(match error_fn { AssertErrorFn::Message(error_fn) => Error::AssertFail { pos, message: error_fn().into(), }, AssertErrorFn::Error(error_fn) => Error::Custom { pos, err: Box::new(error_fn()), }, }) } } pub fn coerce_fn<R, T, F>(f: F) -> F where F: Fn(T) -> R, { f } pub fn magic<R, B>(reader: &mut R, expected: B, options: &ReadOptions) -> BinResult<()> where B: BinRead<Args = ()> + core::fmt::Debug + PartialEq + Sync + Send + 'static, R: io::Read + io::Seek, { let pos = reader.stream_position()?; let val = B::read_options(reader, options, ())?; if val == expected { Ok(()) } else { Err(Error::BadMagic { pos, found: Box::new(val) as _, }) } } pub fn parse_function_args_type_hint<R, Res, Args,
riter, &WriteOptions, Args) -> BinResult<()>, { x } pub fn write_map_args_type_hint<Input, Output, MapFn, Args>(_: &MapFn, args: Args) -> Args where MapFn: FnOnce(Input) -> Output, Output: crate::BinWrite<Args = Args>, { args } pub fn write_try_map_args_type_hint<Input, Output, MapFn, Args>(_: &MapFn, args: Args) -> Args where MapFn: FnOnce(Input) -> BinResult<Output>, Output: crate::BinWrite<Args = Args>, { args } pub fn write_map_fn_input_type_hint<Input, Output, MapFn>(func: MapFn) -> MapFn where MapFn: FnOnce(Input) -> Output, { func } pub fn write_fn_map_output_type_hint<Input, Output, MapFn, Writer, WriteFn, Args>( _: &MapFn, func: WriteFn, ) -> WriteFn where MapFn: FnOnce(Input) -> Output, Args: Clone, Writer: Write + Seek, WriteFn: Fn(&Output, &mut Writer, &WriteOptions, Args) -> BinResult<()>, { func } pub fn write_fn_try_map_output_type_hint<Input, Output, MapFn, Writer, WriteFn, Args>( _: &MapFn, func: WriteFn, ) -> WriteFn where MapFn: FnOnce(Input) -> BinResult<Output>, Args: Clone, Writer: Write + Seek, WriteFn: Fn(&Output, &mut Writer, &WriteOptions, Args) -> BinResult<()>, { func } pub fn write_zeroes<W: Write>(writer: &mut W, count: u64) -> BinResult<()> { const BUF_SIZE: u64 = 0x20; const ZEROES: [u8; BUF_SIZE as usize] = [0u8; BUF_SIZE as usize]; if count <= BUF_SIZE { writer.write_all(&ZEROES[..count as usize])?; } else { let full_chunks = count / BUF_SIZE; let remaining = count % BUF_SIZE; for _ in 0..full_chunks { writer.write_all(&ZEROES)?; } writer.write_all(&ZEROES[..remaining as usize])?; } Ok(()) }
F>(_: F, a: Args) -> Args where R: crate::io::Read + Seek, F: FnOnce(&mut R, &crate::ReadOptions, Args) -> crate::BinResult<Res>, { a } pub fn write_function_args_type_hint<T, W, Args, F>(_: F, a: Args) -> Args where W: Write + Seek, F: FnOnce(&T, &mut W, &crate::WriteOptions, Args) -> crate::BinResult<()>, { a } pub fn map_args_type_hint<Input, Output, MapFn, Args>(_: &MapFn, args: Args) -> Args where MapFn: FnOnce(Input) -> Output, Input: BinRead<Args = Args>, { args } pub fn write_fn_type_hint<T, WriterFn, Writer, Args>(x: WriterFn) -> WriterFn where Args: Clone, Writer: Write + Seek, WriterFn: Fn(&T, &mut W
random
[ { "content": "/// A helper similar to `#[br(count = N)]` which can be used with any collection.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use binrw::{BinRead, helpers::count, io::Cursor, BinReaderExt};\n\n/// # use std::collections::VecDeque;\n\n/// #[derive(BinRead)]\n\n/// struct CountBytes {\n\n/// len: u8,\n\n///\n\n/// #[br(parse_with = count(len as usize))]\n\n/// data: VecDeque<u8>,\n\n/// }\n\n///\n\n/// # let mut x = Cursor::new(b\"\\x03\\x01\\x02\\x03\");\n\n/// # let x: CountBytes = x.read_be().unwrap();\n\n/// # assert_eq!(x.data, &[1, 2, 3]);\n\n/// ```\n\npub fn count<R, T, Arg, Ret>(n: usize) -> impl Fn(&mut R, &ReadOptions, Arg) -> BinResult<Ret>\n\nwhere\n\n T: BinRead<Args = Arg>,\n\n R: Read + Seek,\n\n Arg: Clone,\n\n Ret: core::iter::FromIterator<T> + 'static,\n\n{\n\n move |reader, ro, args| {\n\n let mut container: Ret = core::iter::empty::<T>().collect();\n\n if let Some(bytes) = <dyn core::any::Any>::downcast_mut::<Vec<u8>>(&mut container) {\n\n bytes.reserve(n);\n\n let byte_count = reader\n\n .take(n.try_into().map_err(not_enough_bytes)?)\n\n .read_to_end(bytes)?;\n\n (byte_count == n)\n\n .then(|| container)\n\n .ok_or_else(|| not_enough_bytes(()))\n\n } else {\n\n let read = |reader: &mut R, ro: &ReadOptions, args: Arg| {\n\n let mut value = T::read_options(reader, ro, args.clone())?;\n\n value.after_parse(reader, ro, args)?;\n\n Ok(value)\n\n };\n\n count_with(n, read)(reader, ro, args)\n\n }\n\n }\n\n}\n\n\n", "file_path": "binrw/src/helpers.rs", "rank": 2, "score": 280874.36367320234 }, { "content": "#[deprecated(since = \"0.2.0\", note = \"Use Vec<u8> instead.\")]\n\npub fn read_bytes<R: Read + Seek>(\n\n reader: &mut R,\n\n _options: &ReadOptions,\n\n args: VecArgs<()>,\n\n) -> BinResult<Vec<u8>> {\n\n let mut buf = vec![0; args.count];\n\n reader.read_exact(&mut buf)?;\n\n\n\n Ok(buf)\n\n}\n\n\n", "file_path": "binrw/src/helpers.rs", "rank": 7, "score": 224137.14093093254 }, { "content": "/// Do the same as [count](binrw::helpers::count) with a custom parsing function for the inner type.\n\n///\n\n/// # Examples\n\n///\n\n/// This example shows how to read `len` lists of two elements using [count_with](binrw::helpers::count_with) coupled with [count](binrw::helpers::count).\n\n/// ```\n\n/// # use binrw::{BinRead, helpers::count, helpers::count_with, io::Cursor, BinReaderExt};\n\n/// # use std::collections::VecDeque;\n\n/// #[derive(BinRead)]\n\n/// struct CountBytes {\n\n/// len: u8,\n\n///\n\n/// #[br(parse_with = count_with(len as usize, count(2)))]\n\n/// data: VecDeque<VecDeque<u8>>,\n\n/// }\n\n///\n\n/// # let mut x = Cursor::new(b\"\\x02\\x01\\x02\\x03\\x04\");\n\n/// # let x: CountBytes = x.read_be().unwrap();\n\n/// # assert_eq!(x.data, &[[1, 2], [3, 4]]);\n\npub fn count_with<R, T, Arg, ReadFn, Ret>(\n\n n: usize,\n\n read: ReadFn,\n\n) -> impl Fn(&mut R, &ReadOptions, Arg) -> BinResult<Ret>\n\nwhere\n\n R: Read + Seek,\n\n Arg: Clone,\n\n ReadFn: Fn(&mut R, &ReadOptions, Arg) -> BinResult<T>,\n\n Ret: core::iter::FromIterator<T> + 'static,\n\n{\n\n move |reader, ro, args| {\n\n core::iter::repeat_with(|| read(reader, ro, args.clone()))\n\n .take(n)\n\n .collect()\n\n }\n\n}\n", "file_path": "binrw/src/helpers.rs", "rank": 8, "score": 214821.0166227939 }, { "content": "/// The `BinRead` trait reads data from streams and converts it into objects.\n\n///\n\n/// [`io`]: crate::io\n\n///\n\n/// This trait is usually derived, but can also be manually implemented by\n\n/// writing an appropriate [`Args`] type and [`read_options()`] function.\n\n///\n\n/// [`Args`]: Self::Args\n\n/// [`read_options()`]: Self::read_options\n\n///\n\n/// # Derivable\n\n///\n\n/// This trait can be used with `#[derive]` or `#[derive_binread]`. Each field\n\n/// of a derived type must either implement `BinRead` or be annotated with an\n\n/// attribute containing a [`map`], [`try_map`], or [`parse_with`] directive.\n\n///\n\n/// [`map`]: crate::attribute#map\n\n/// [`parse_with`]: crate::attribute#parse_with\n\n/// [`try_map`]: crate::attribute#map\n\n///\n\n/// Using `#[derive_binread]` instead of `#[derive]` is required when using\n\n/// [temporary fields].\n\n///\n\n/// [temporary fields]: crate::attribute#temp\n\npub trait BinRead: Sized + 'static {\n\n /// The type used for the `args` parameter of [`read_args()`] and\n\n /// [`read_options()`].\n\n ///\n\n /// When the given type implements [`Default`], convenience functions like\n\n /// [`read()`] are enabled. `BinRead` implementations that don’t receive any\n\n /// arguments should use the `()` type.\n\n ///\n\n /// When `BinRead` is derived, the [`import`] and [`import_tuple`]\n\n /// directives define this type.\n\n ///\n\n /// [`import`]: crate::attribute#arguments\n\n /// [`import_tuple`]: crate::attribute#arguments\n\n /// [`read()`]: Self::read\n\n /// [`read_args()`]: Self::read_args\n\n /// [`read_options()`]: Self::read_options\n\n type Args: Clone;\n\n\n\n /// Read `Self` from the reader using default arguments.\n\n fn read<R: Read + Seek>(reader: &mut R) -> BinResult<Self>\n", "file_path": "binrw/src/binread/mod.rs", "rank": 9, "score": 213823.20356637135 }, { "content": "fn combine_error(all_errors: &mut Option<syn::Error>, new_error: syn::Error) {\n\n if let Some(all_errors) = all_errors {\n\n all_errors.combine(new_error);\n\n } else {\n\n *all_errors = Some(new_error);\n\n }\n\n}\n\n\n\npub(crate) trait TempableField {\n\n // The identifier for this field.\n\n fn ident(&self) -> &syn::Ident;\n\n\n\n /// Returns true if this field is temporary and should be removed from the struct.\n\n fn is_temp(&self) -> bool;\n\n\n\n /// Returns true if this field is temporary and should be removed from the struct.\n\n /// Ignores the value set by `set_crossover_temp`.\n\n fn is_temp_for_crossover(&self) -> bool;\n\n\n\n /// Set the crossover temporary field.\n", "file_path": "binrw_derive/src/parser/mod.rs", "rank": 10, "score": 211955.44856723404 }, { "content": "#[inline]\n\nfn slice_write(pos_mut: &mut u64, slice: &mut [u8], buf: &[u8]) -> Result<usize> {\n\n let pos = cmp::min(*pos_mut, slice.len() as u64);\n\n let amt = (&mut slice[(pos as usize)..]).write(buf)?;\n\n *pos_mut += amt as u64;\n\n Ok(amt)\n\n}\n\n\n", "file_path": "binrw/src/io/no_std/cursor.rs", "rank": 11, "score": 206552.82364989916 }, { "content": "// Resizing write implementation\n\nfn vec_write(pos_mut: &mut u64, vec: &mut Vec<u8>, buf: &[u8]) -> Result<usize> {\n\n let pos: usize = (*pos_mut).try_into().map_err(|_| {\n\n Error::new(\n\n ErrorKind::InvalidInput,\n\n &\"cursor position exceeds maximum possible vector length\",\n\n )\n\n })?;\n\n // Make sure the internal buffer is as least as big as where we\n\n // currently are\n\n let len = vec.len();\n\n if len < pos {\n\n // use `resize` so that the zero filling is as efficient as possible\n\n vec.resize(pos, 0);\n\n }\n\n // Figure out what bytes will be used to overwrite what's currently\n\n // there (left), and what will be appended on the end (right)\n\n {\n\n let space = vec.len() - pos;\n\n let (left, right) = buf.split_at(cmp::min(space, buf.len()));\n\n vec[pos..pos + left.len()].copy_from_slice(left);\n", "file_path": "binrw/src/io/no_std/cursor.rs", "rank": 12, "score": 201030.98881860462 }, { "content": "/// Extension methods for writing [`BinWrite`] objects directly to a writer.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// use binrw::{binwrite, BinWriterExt, io::Cursor, Endian};\n\n///\n\n/// #[binwrite]\n\n/// struct MyStruct(u8, u16, u8);\n\n///\n\n/// let mut writer = Cursor::new(Vec::new());\n\n/// writer.write_be(&MyStruct(1, 0xffff, 2)).unwrap();\n\n/// writer.write_type(&0x1234_u16, Endian::Little).unwrap();\n\n///\n\n/// assert_eq!(&writer.into_inner()[..], &[1, 0xff, 0xff, 2, 0x34, 0x12][..]);\n\n/// ```\n\npub trait BinWriterExt: Write + Seek + Sized {\n\n /// Write `T` from the writer with the given byte order.\n\n fn write_type<T: BinWrite>(&mut self, value: &T, endian: Endian) -> BinResult<()>\n\n where\n\n T::Args: Default,\n\n {\n\n self.write_type_args(value, endian, T::Args::default())\n\n }\n\n\n\n /// Write `T` from the writer assuming big-endian byte order.\n\n fn write_be<T: BinWrite>(&mut self, value: &T) -> BinResult<()>\n\n where\n\n T::Args: Default,\n\n {\n\n self.write_type(value, Endian::Big)\n\n }\n\n\n\n /// Write `T` from the writer assuming little-endian byte order.\n\n fn write_le<T: BinWrite>(&mut self, value: &T) -> BinResult<()>\n\n where\n", "file_path": "binrw/src/binwrite/mod.rs", "rank": 13, "score": 198856.51505122255 }, { "content": "#[test]\n\nfn write_enum() {\n\n #[derive(BinWrite)]\n\n #[bw(repr(u32))]\n\n enum Test {\n\n A,\n\n B = 3,\n\n C,\n\n D = 5,\n\n }\n\n\n\n let mut x = Cursor::new(Vec::new());\n\n\n\n vec![Test::A, Test::B, Test::C, Test::D]\n\n .write_options(&mut x, &WriteOptions::new(Endian::Big), ())\n\n .unwrap();\n\n\n\n assert_eq!(\n\n &x.into_inner()[..],\n\n &[0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0, 5]\n\n );\n\n}\n\n\n", "file_path": "binrw/tests/derive/write/c_enum.rs", "rank": 14, "score": 195908.47847148377 }, { "content": "/// The `CustomError` trait describes types that are usable as custom errors\n\n/// in a [`BinResult`](crate::BinResult).\n\n///\n\n/// This trait is automatically implemented for any type which implements the\n\n/// same traits as [`std::error::Error`], so anything you would normally use as\n\n/// an error in other code is also a valid `CustomError`, with the additional\n\n/// restriction that it must also be [`Send`] + [`Sync`].\n\n///\n\n/// This trait is Sealed.\n\npub trait CustomError: fmt::Display + fmt::Debug + Send + Sync + private::Sealed {\n\n #[doc(hidden)]\n\n fn as_any(&self) -> &(dyn Any + Send + Sync);\n\n\n\n #[doc(hidden)]\n\n fn as_any_mut(&mut self) -> &mut (dyn Any + Send + Sync);\n\n\n\n #[doc(hidden)]\n\n fn as_box_any(self: Box<Self>) -> Box<dyn Any + Send + Sync>;\n\n}\n\n\n\nimpl<T: fmt::Display + fmt::Debug + Send + Sync + 'static> CustomError for T {\n\n fn as_any(&self) -> &(dyn Any + Send + Sync) {\n\n self\n\n }\n\n\n\n fn as_any_mut(&mut self) -> &mut (dyn Any + Send + Sync) {\n\n self\n\n }\n\n\n\n fn as_box_any(self: Box<Self>) -> Box<dyn Any + Send + Sync> {\n\n self\n\n }\n\n}\n\n\n", "file_path": "binrw/src/error/mod.rs", "rank": 15, "score": 193385.1142736963 }, { "content": "fn is_nightly() -> Option<bool> {\n\n let rustc = env::var_os(\"RUSTC\")?;\n\n let output = Command::new(rustc).arg(\"--version\").output().ok()?;\n\n let version = str::from_utf8(&output.stdout).ok()?;\n\n let nightly = version.contains(\"nightly\") || version.contains(\"dev\");\n\n\n\n Some(nightly)\n\n}\n", "file_path": "binrw_derive/build.rs", "rank": 17, "score": 189539.0059238849 }, { "content": "#[test]\n\nfn enum_return_all_errors() {\n\n #[derive(BinRead, Debug)]\n\n #[br(big, return_all_errors)]\n\n enum Test {\n\n #[br(magic(0u16))]\n\n One { a: u16 },\n\n #[br(magic(1u16))]\n\n Two { a: u16 },\n\n }\n\n\n\n let error = Test::read(&mut Cursor::new(\"\\0\\x01\")).expect_err(\"accepted bad data\");\n\n dbg!(&error);\n\n match error {\n\n binrw::Error::EnumErrors {\n\n pos,\n\n variant_errors,\n\n } => {\n\n assert_eq!(pos, 0);\n\n assert_eq!(variant_errors.len(), 2);\n\n assert_eq!(variant_errors[0].0, \"One\");\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 19, "score": 185988.6208486578 }, { "content": "#[test]\n\nfn pass_args() {\n\n #[binwrite]\n\n #[bw(import{ x: u32, y: u8 })]\n\n struct TestInner {\n\n #[bw(calc = x)]\n\n x_copy: u32,\n\n\n\n #[bw(calc = y)]\n\n y_copy: u8,\n\n }\n\n\n\n #[derive(BinWrite)]\n\n #[bw(big)]\n\n struct Test {\n\n #[bw(args { x: 1, y: 2 })]\n\n inner: TestInner,\n\n }\n\n\n\n let mut x = Cursor::new(Vec::new());\n\n Test {\n\n inner: TestInner {},\n\n }\n\n .write_to(&mut x)\n\n .unwrap();\n\n\n\n assert_eq!(&x.into_inner()[..], b\"\\0\\0\\0\\x01\\x02\");\n\n}\n", "file_path": "binrw/tests/derive/write/args.rs", "rank": 20, "score": 185375.50641376313 }, { "content": "#[test]\n\nfn enum_non_copy_args() {\n\n #[derive(BinRead, Debug)]\n\n #[br(import(a: NonCopyArg))]\n\n enum Test {\n\n A {\n\n #[br(calc = a.0)]\n\n a: u8,\n\n },\n\n B {\n\n #[br(calc = a.0)]\n\n b: u8,\n\n },\n\n }\n\n\n\n #[derive(Clone)]\n\n struct NonCopyArg(u8);\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 21, "score": 181700.1864065352 }, { "content": "#[test]\n\nfn enum_return_unexpected_error() {\n\n #[derive(BinRead, Debug)]\n\n #[br(big, return_unexpected_error)]\n\n enum Test {\n\n #[br(magic(0u16))]\n\n One { a: u16 },\n\n #[br(magic(1u16))]\n\n Two { a: u16 },\n\n }\n\n\n\n let error = Test::read(&mut Cursor::new(\"\\0\\x01\")).expect_err(\"accepted bad data\");\n\n assert!(matches!(error, binrw::Error::NoVariantMatch { .. }));\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 22, "score": 181682.7490452651 }, { "content": "#[test]\n\nfn enum_round_trip() {\n\n #[derive(BinRead, BinWrite)]\n\n #[brw(big)]\n\n enum Test {\n\n #[brw(magic = b\"AAA\")]\n\n A {\n\n #[brw(little)]\n\n x: u32,\n\n y: u8,\n\n },\n\n\n\n #[brw(little, magic = b\"BBB\")]\n\n B {\n\n x: u32,\n\n\n\n #[brw(big)]\n\n y: u16,\n\n },\n\n\n\n #[brw(magic = b\"CCC\")]\n", "file_path": "binrw/tests/derive/write/enum.rs", "rank": 23, "score": 180967.58940184137 }, { "content": "#[test]\n\nfn enum_one_way() {\n\n #[derive(BinWrite)]\n\n #[brw(big)]\n\n enum Test {\n\n #[brw(magic = b\"AAA\")]\n\n A {\n\n #[brw(little)]\n\n x: u32,\n\n y: u8,\n\n },\n\n\n\n #[brw(little, magic = b\"BBB\")]\n\n B(u32, #[brw(big)] u16),\n\n\n\n #[brw(magic = b\"CCC\")]\n\n C,\n\n }\n\n\n\n let mut x = Cursor::new(Vec::new());\n\n\n", "file_path": "binrw/tests/derive/write/enum.rs", "rank": 24, "score": 180967.58940184137 }, { "content": "#[test]\n\nfn clone_args() {\n\n #[derive(Clone)]\n\n struct OnlyCloneable;\n\n\n\n #[derive(BinRead)]\n\n #[br(import(_needs_clone: OnlyCloneable))]\n\n struct ArgsNeedClone;\n\n\n\n #[derive(BinRead)]\n\n struct TestCloneArray {\n\n // Test for `[T; N]::Args`\n\n #[br(args(OnlyCloneable))]\n\n _array: [ArgsNeedClone; 35],\n\n\n\n // Test for `Vec<T>::Args`\n\n #[br(count = 4, args { inner: (OnlyCloneable,) })]\n\n _vec: Vec<ArgsNeedClone>,\n\n }\n\n\n\n TestCloneArray::read(&mut binrw::io::Cursor::new(b\"\")).unwrap();\n\n}\n", "file_path": "binrw/tests/binread_impls.rs", "rank": 25, "score": 180180.04998599598 }, { "content": "/// Extension methods for reading [`BinRead`] objects directly from a reader.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// use binrw::BinReaderExt;\n\n/// use binrw::endian::LE;\n\n/// use binrw::io::Cursor;\n\n///\n\n/// let mut reader = Cursor::new(b\"\\x07\\0\\0\\0\\xCC\\0\\0\\x05\");\n\n/// let x: u32 = reader.read_le().unwrap();\n\n/// let y: u16 = reader.read_type(LE).unwrap();\n\n/// let z = reader.read_be::<u16>().unwrap();\n\n///\n\n/// assert_eq!((x, y, z), (7u32, 0xCCu16, 5u16));\n\n/// ```\n\npub trait BinReaderExt: Read + Seek + Sized {\n\n /// Read `T` from the reader with the given byte order.\n\n fn read_type<T: BinRead>(&mut self, endian: Endian) -> BinResult<T>\n\n where\n\n T::Args: Default,\n\n {\n\n self.read_type_args(endian, T::Args::default())\n\n }\n\n\n\n /// Read `T` from the reader assuming big-endian byte order.\n\n fn read_be<T: BinRead>(&mut self) -> BinResult<T>\n\n where\n\n T::Args: Default,\n\n {\n\n self.read_type(Endian::Big)\n\n }\n\n\n\n /// Read `T` from the reader assuming little-endian byte order.\n\n fn read_le<T: BinRead>(&mut self) -> BinResult<T>\n\n where\n", "file_path": "binrw/src/binread/mod.rs", "rank": 26, "score": 177893.63111638214 }, { "content": "#[test]\n\nfn binread_temp_applies() {\n\n #[binrw]\n\n #[bw(import { x: u32})]\n\n struct TestInner {\n\n #[br(ignore)]\n\n #[bw(calc = x)]\n\n x_copy: u32,\n\n }\n\n\n\n #[binrw]\n\n #[bw(big)]\n\n struct Test {\n\n #[bw(args { x: 1 })]\n\n inner: TestInner,\n\n }\n\n\n\n let mut x = Cursor::new(Vec::new());\n\n\n\n Test {\n\n inner: TestInner {},\n\n }\n\n .write_options(&mut x, &WriteOptions::new(Endian::Big), ())\n\n .unwrap();\n\n\n\n assert_eq!(&x.into_inner()[..], &[0, 0, 0, 1]);\n\n}\n", "file_path": "binrw/tests/derive/write/binread_temp.rs", "rank": 27, "score": 177205.37561494883 }, { "content": "#[test]\n\nfn round_trip_unit_enum() {\n\n #[derive(BinRead, BinWrite)]\n\n #[brw(repr(u32), magic = 0xff_u8)]\n\n enum Test {\n\n A,\n\n B = 3,\n\n C,\n\n D = 5,\n\n }\n\n\n\n let data = &[\n\n 0xff, 0, 0, 0, 0, 0xff, 0, 0, 0, 3, 0xff, 0, 0, 0, 4, 0xff, 0, 0, 0, 5,\n\n ];\n\n let test: [Test; 4] = Cursor::new(data).read_be().unwrap();\n\n\n\n let mut x = Cursor::new(Vec::new());\n\n\n\n test.write_options(&mut x, &WriteOptions::new(Endian::Big), ())\n\n .unwrap();\n\n\n\n assert_eq!(&x.into_inner()[..], data);\n\n}\n\n\n", "file_path": "binrw/tests/derive/write/c_enum.rs", "rank": 28, "score": 176917.3626288605 }, { "content": "#[test]\n\nfn magic_enum_round_trip() {\n\n #[derive(BinRead, BinWrite)]\n\n enum Test {\n\n #[brw(magic = b\"abc\")]\n\n A,\n\n\n\n #[brw(magic = b\"123\")]\n\n B,\n\n\n\n #[brw(magic = b\"def\")]\n\n C,\n\n\n\n #[brw(magic = b\"456\")]\n\n D,\n\n }\n\n\n\n let data = b\"123abcdef456\";\n\n let test: [Test; 4] = Cursor::new(data).read_be().unwrap();\n\n\n\n let mut x = Cursor::new(Vec::new());\n\n\n\n test.write_options(&mut x, &WriteOptions::new(Endian::Big), ())\n\n .unwrap();\n\n\n\n assert_eq!(&x.into_inner()[..], data);\n\n}\n", "file_path": "binrw/tests/derive/write/c_enum.rs", "rank": 29, "score": 176917.3626288605 }, { "content": "#[test]\n\nfn return_error() {\n\n struct ReturnError(Option<Error>);\n\n\n\n impl Read for ReturnError {\n\n fn read(&mut self, _buf: &mut [u8]) -> Result<usize> {\n\n Err(self.0.take().unwrap())\n\n }\n\n }\n\n\n\n let mut x = ReturnError(Some(Error::from(ErrorKind::ConnectionRefused)));\n\n let mut out = [0, 1, 2, 3];\n\n\n\n assert_eq!(\n\n x.read_exact(&mut out).unwrap_err().kind(),\n\n ErrorKind::ConnectionRefused\n\n );\n\n\n\n let mut x = ReturnError(Some(Error::from(ErrorKind::ConnectionRefused))).bytes();\n\n assert_eq!(\n\n x.next().unwrap().unwrap_err().kind(),\n\n ErrorKind::ConnectionRefused\n\n );\n\n}\n\n\n", "file_path": "binrw/tests/io/no_std/mod.rs", "rank": 30, "score": 175326.81571076473 }, { "content": "#[test]\n\nfn usable_args() {\n\n #[binrw]\n\n #[bw(import { x: u32, _y: u8 })]\n\n struct Test {\n\n #[br(temp, ignore)]\n\n #[bw(calc = x)]\n\n x_copy: u32,\n\n }\n\n\n\n let mut x = binrw::io::Cursor::new(Vec::new());\n\n\n\n Test {}\n\n .write_options(&mut x, &Default::default(), binrw::args! { x: 3, _y: 2 })\n\n .unwrap();\n\n}\n", "file_path": "binrw/tests/derive/write/import.rs", "rank": 32, "score": 174500.1956556645 }, { "content": "/// The `Seek` trait provides a cursor which can be moved within a stream of\n\n/// bytes.\n\npub trait Seek {\n\n /// Seek to an offset, in bytes, in a stream.\n\n fn seek(&mut self, pos: SeekFrom) -> Result<u64>;\n\n /// Returns the current seek position from the start of the stream.\n\n ///\n\n /// This is equivalent to `self.seek(SeekFrom::Current(0))`.\n\n fn stream_position(&mut self) -> Result<u64> {\n\n self.seek(SeekFrom::Current(0))\n\n }\n\n}\n\n\n\nimpl<S: Seek + ?Sized> Seek for &mut S {\n\n #[inline]\n\n fn seek(&mut self, pos: SeekFrom) -> Result<u64> {\n\n (**self).seek(pos)\n\n }\n\n}\n\n\n", "file_path": "binrw/src/io/no_std/mod.rs", "rank": 34, "score": 171811.93887998228 }, { "content": "/// A trait for objects which are byte-oriented sinks.\n\npub trait Write {\n\n /// Write a buffer into this writer, returning how many bytes were written.\n\n fn write(&mut self, buf: &[u8]) -> Result<usize>;\n\n\n\n /// Flush this output stream, ensuring that all intermediately buffered\n\n /// contents reach their destination.\n\n fn flush(&mut self) -> Result<()>;\n\n\n\n /// Attempts to write an entire buffer into this writer.\n\n fn write_all(&mut self, mut buf: &[u8]) -> Result<()> {\n\n while !buf.is_empty() {\n\n match self.write(buf) {\n\n Ok(0) => {\n\n return Err(Error::new(\n\n ErrorKind::WriteZero,\n\n \"failed to write whole buffer\",\n\n ));\n\n }\n\n Ok(n) => buf = &buf[n..],\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => {}\n", "file_path": "binrw/src/io/no_std/mod.rs", "rank": 35, "score": 170751.94701939262 }, { "content": "fn not_enough_bytes<T>(_: T) -> Error {\n\n Error::Io(io::Error::new(\n\n io::ErrorKind::UnexpectedEof,\n\n \"not enough bytes in reader\",\n\n ))\n\n}\n\n\n\n/// Arguments passed to the binread impl for Vec\n\n#[derive(BinrwNamedArgs, Clone)]\n\npub struct VecArgs<B> {\n\n /// The number of elements to read.\n\n pub count: usize,\n\n\n\n /// Arguments to pass to the inner type\n\n #[named_args(try_optional)]\n\n pub inner: B,\n\n}\n\n\n\nimpl<B: BinRead> BinRead for Vec<B> {\n\n type Args = VecArgs<B::Args>;\n", "file_path": "binrw/src/binread/impls.rs", "rank": 36, "score": 166672.8206545622 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/conflicting_keyword_enum_error_mode.rs", "rank": 38, "score": 166421.3882412132 }, { "content": "#[test]\n\nfn correct_args_type_set() {\n\n #[derive(BinWrite)]\n\n #[bw(import { _x: u32, _y: u8 })]\n\n struct Test {}\n\n\n\n let mut x = binrw::io::Cursor::new(Vec::new());\n\n\n\n Test {}\n\n .write_options(&mut x, &Default::default(), binrw::args! { _x: 3, _y: 2 })\n\n .unwrap();\n\n}\n\n\n", "file_path": "binrw/tests/derive/write/import.rs", "rank": 40, "score": 165823.12993198735 }, { "content": "/// Read items until a condition is met. The final item will be included.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use binrw::{BinRead, helpers::until, io::Cursor, BinReaderExt};\n\n/// #[derive(BinRead)]\n\n/// struct NullTerminated {\n\n/// #[br(parse_with = until(|&byte| byte == 0))]\n\n/// data: Vec<u8>,\n\n/// }\n\n///\n\n/// # let mut x = Cursor::new(b\"\\x01\\x02\\x03\\x04\\0\");\n\n/// # let x: NullTerminated = x.read_be().unwrap();\n\n/// # assert_eq!(x.data, &[1, 2, 3, 4, 0]);\n\n/// ```\n\npub fn until<Reader, T, CondFn, Arg, Ret>(\n\n cond: CondFn,\n\n) -> impl Fn(&mut Reader, &ReadOptions, Arg) -> BinResult<Ret>\n\nwhere\n\n T: BinRead<Args = Arg>,\n\n Reader: Read + Seek,\n\n CondFn: Fn(&T) -> bool,\n\n Arg: Clone,\n\n Ret: core::iter::FromIterator<T>,\n\n{\n\n let read = |reader: &mut Reader, ro: &ReadOptions, args: Arg| {\n\n let mut value = T::read_options(reader, ro, args.clone())?;\n\n value.after_parse(reader, ro, args)?;\n\n Ok(value)\n\n };\n\n until_with(cond, read)\n\n}\n\n\n", "file_path": "binrw/src/helpers.rs", "rank": 41, "score": 165258.33082530205 }, { "content": "#[proc_macro_attribute]\n\n#[cfg(not(tarpaulin_include))]\n\npub fn binread(_: TokenStream, input: TokenStream) -> TokenStream {\n\n binread::derive_from_attribute(parse_macro_input!(input as DeriveInput)).into()\n\n}\n\n\n", "file_path": "binrw_derive/src/lib.rs", "rank": 42, "score": 164785.6279048392 }, { "content": "fn get_passed_args(field: &StructField) -> Option<TokenStream> {\n\n let args = &field.args;\n\n match args {\n\n PassedArgs::Named(fields) => Some(if let Some(count) = &field.count {\n\n quote! {\n\n #ARGS_MACRO! { count: ((#count) as usize) #(, #fields)* }\n\n }\n\n } else {\n\n quote! {\n\n #ARGS_MACRO! { #(#fields),* }\n\n }\n\n }),\n\n PassedArgs::List(list) => Some(quote! { (#(#list,)*) }),\n\n PassedArgs::Tuple(tuple) => Some(tuple.clone()),\n\n PassedArgs::None => field\n\n .count\n\n .as_ref()\n\n .map(|count| quote! { #ARGS_MACRO! { count: ((#count) as usize) }}),\n\n }\n\n}\n", "file_path": "binrw_derive/src/codegen/write_options/struct_field.rs", "rank": 43, "score": 164173.25698815187 }, { "content": "/// Do the same as [until_eof](binrw::helpers::until_eof) with a custom parsing function for the inner type.\n\n///\n\n/// # Examples\n\n///\n\n/// This example shows how to read lists of two elements until the end of file using [until_eof_with](binrw::helpers::until_eof_with) coupled with [count](binrw::helpers::count).\n\n/// ```\n\n/// # use binrw::{BinRead, helpers::{until_eof, until_eof_with, count}, io::Cursor, BinReaderExt};\n\n/// # use std::collections::VecDeque;\n\n/// #[derive(BinRead)]\n\n/// struct EntireFile {\n\n/// #[br(parse_with = until_eof_with(count(2)))]\n\n/// data: VecDeque<VecDeque<u8>>,\n\n/// }\n\n///\n\n/// # let mut x = Cursor::new(b\"\\x01\\x02\\x03\\x04\");\n\n/// # let x: EntireFile = x.read_be().unwrap();\n\n/// # assert_eq!(x.data, &[[1, 2], [3, 4]]);\n\n/// ```\n\npub fn until_eof_with<Reader, T, Arg, ReadFn, Ret>(\n\n read: ReadFn,\n\n) -> impl Fn(&mut Reader, &ReadOptions, Arg) -> BinResult<Ret>\n\nwhere\n\n Reader: Read + Seek,\n\n Arg: Clone,\n\n ReadFn: Fn(&mut Reader, &ReadOptions, Arg) -> BinResult<T>,\n\n Ret: core::iter::FromIterator<T>,\n\n{\n\n move |reader, ro, args| {\n\n let mut last_error = false;\n\n core::iter::repeat_with(|| read(reader, ro, args.clone()))\n\n .take_while(|result| {\n\n !last_error\n\n && match result {\n\n Ok(_) => true,\n\n Err(e) if e.is_eof() => false,\n\n Err(_) => {\n\n last_error = true;\n\n true //keep the first error we get\n\n }\n\n }\n\n })\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "binrw/src/helpers.rs", "rank": 44, "score": 162510.36676196486 }, { "content": "/// Read items until a condition is met. The last item will *not* be included.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use binrw::{BinRead, helpers::until_exclusive, io::Cursor, BinReaderExt};\n\n/// #[derive(BinRead)]\n\n/// struct NullTerminated {\n\n/// #[br(parse_with = until_exclusive(|&byte| byte == 0))]\n\n/// data: Vec<u8>,\n\n/// }\n\n///\n\n/// # let mut x = Cursor::new(b\"\\x01\\x02\\x03\\x04\\0\");\n\n/// # let x: NullTerminated = x.read_be().unwrap();\n\n/// # assert_eq!(x.data, &[1, 2, 3, 4]);\n\n/// ```\n\npub fn until_exclusive<Reader, T, CondFn, Arg, Ret>(\n\n cond: CondFn,\n\n) -> impl Fn(&mut Reader, &ReadOptions, Arg) -> BinResult<Ret>\n\nwhere\n\n T: BinRead<Args = Arg>,\n\n Reader: Read + Seek,\n\n CondFn: Fn(&T) -> bool,\n\n Arg: Clone,\n\n Ret: core::iter::FromIterator<T>,\n\n{\n\n let read = |reader: &mut Reader, ro: &ReadOptions, args: Arg| {\n\n let mut value = T::read_options(reader, ro, args.clone())?;\n\n value.after_parse(reader, ro, args)?;\n\n Ok(value)\n\n };\n\n until_exclusive_with(cond, read)\n\n}\n\n\n", "file_path": "binrw/src/helpers.rs", "rank": 45, "score": 162508.34422450728 }, { "content": "/// Do the same as [until](binrw::helpers::until) with a custom parsing function for the inner type.\n\n///\n\n/// # Examples\n\n///\n\n/// This example shows how to read lists of two elements until a condition is met using [until_with](binrw::helpers::until_with) coupled with [count](binrw::helpers::count).\n\n/// ```\n\n/// # use binrw::{BinRead, helpers::{until, until_with, count}, io::Cursor, BinReaderExt};\n\n/// # use std::collections::VecDeque;\n\n/// #[derive(BinRead)]\n\n/// struct NullTerminated {\n\n/// #[br(parse_with = until_with(|bytes| bytes == &[0, 0], count(2)))]\n\n/// data: VecDeque<VecDeque<u8>>,\n\n/// }\n\n///\n\n/// # let mut x = Cursor::new(b\"\\x01\\x02\\x03\\x04\\0\\0\");\n\n/// # let x: NullTerminated = x.read_be().unwrap();\n\n/// # assert_eq!(x.data, &[[1, 2], [3, 4], [0, 0]]);\n\n/// ```\n\npub fn until_with<Reader, T, CondFn, Arg, ReadFn, Ret>(\n\n cond: CondFn,\n\n read: ReadFn,\n\n) -> impl Fn(&mut Reader, &ReadOptions, Arg) -> BinResult<Ret>\n\nwhere\n\n Reader: Read + Seek,\n\n CondFn: Fn(&T) -> bool,\n\n Arg: Clone,\n\n ReadFn: Fn(&mut Reader, &ReadOptions, Arg) -> BinResult<T>,\n\n Ret: core::iter::FromIterator<T>,\n\n{\n\n move |reader, ro, args| {\n\n let mut last_cond = true;\n\n let mut last_error = false;\n\n core::iter::repeat_with(|| read(reader, ro, args.clone()))\n\n .take_while(|result| {\n\n let cont = last_cond && !last_error; //keep the first error we get\n\n if let Ok(val) = result {\n\n last_cond = !cond(val);\n\n } else {\n\n last_error = true;\n\n }\n\n cont\n\n })\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "binrw/src/helpers.rs", "rank": 46, "score": 162033.90830676898 }, { "content": "/// Read items until the end of the file is hit.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use binrw::{BinRead, helpers::until_eof, io::Cursor, BinReaderExt};\n\n/// #[derive(BinRead)]\n\n/// struct EntireFile {\n\n/// #[br(parse_with = until_eof)]\n\n/// data: Vec<u8>,\n\n/// }\n\n///\n\n/// # let mut x = Cursor::new(b\"\\x01\\x02\\x03\\x04\");\n\n/// # let x: EntireFile = x.read_be().unwrap();\n\n/// # assert_eq!(x.data, &[1, 2, 3, 4]);\n\n/// ```\n\npub fn until_eof<Reader, T, Arg, Ret>(\n\n reader: &mut Reader,\n\n ro: &ReadOptions,\n\n args: Arg,\n\n) -> BinResult<Ret>\n\nwhere\n\n T: BinRead<Args = Arg>,\n\n Reader: Read + Seek,\n\n Arg: Clone,\n\n Ret: core::iter::FromIterator<T>,\n\n{\n\n let read = |reader: &mut Reader, ro: &ReadOptions, args: Arg| {\n\n let mut value = T::read_options(reader, ro, args.clone())?;\n\n value.after_parse(reader, ro, args)?;\n\n Ok(value)\n\n };\n\n until_eof_with(read)(reader, ro, args)\n\n}\n\n\n", "file_path": "binrw/src/helpers.rs", "rank": 47, "score": 161439.47738114945 }, { "content": "/// Do the same as [until_exclusive](binrw::helpers::until_exclusive) with a custom parsing function for the inner type.\n\n///\n\n/// # Examples\n\n///\n\n/// This example shows how to read lists of two elements until a condition is met using [until_exclusive_with](binrw::helpers::until_exclusive_with) coupled with [count](binrw::helpers::count).\n\n/// ```\n\n/// # use binrw::{BinRead, helpers::{until_exclusive, until_exclusive_with, count}, io::Cursor, BinReaderExt};\n\n/// # use std::collections::VecDeque;\n\n/// #[derive(BinRead)]\n\n/// struct NullTerminated {\n\n/// #[br(parse_with = until_exclusive_with(|bytes| bytes == &[0, 0], count(2)))]\n\n/// data: VecDeque<VecDeque<u8>>,\n\n/// }\n\n///\n\n/// # let mut x = Cursor::new(b\"\\x01\\x02\\x03\\x04\\0\\0\");\n\n/// # let x: NullTerminated = x.read_be().unwrap();\n\n/// # assert_eq!(x.data, &[[1, 2], [3, 4]]);\n\n/// ```\n\npub fn until_exclusive_with<Reader, T, CondFn, Arg, ReadFn, Ret>(\n\n cond: CondFn,\n\n read: ReadFn,\n\n) -> impl Fn(&mut Reader, &ReadOptions, Arg) -> BinResult<Ret>\n\nwhere\n\n Reader: Read + Seek,\n\n CondFn: Fn(&T) -> bool,\n\n Arg: Clone,\n\n ReadFn: Fn(&mut Reader, &ReadOptions, Arg) -> BinResult<T>,\n\n Ret: core::iter::FromIterator<T>,\n\n{\n\n move |reader, ro, args| {\n\n let mut last_error = false;\n\n core::iter::repeat_with(|| read(reader, ro, args.clone()))\n\n .take_while(|result| {\n\n !last_error\n\n && if let Ok(val) = result {\n\n !cond(val)\n\n } else {\n\n last_error = true;\n\n true //keep the first error we get\n\n }\n\n })\n\n .collect()\n\n }\n\n}\n\n\n", "file_path": "binrw/src/helpers.rs", "rank": 48, "score": 159649.8998074231 }, { "content": "#[test]\n\nfn custom_error_trait() {\n\n #[derive(Debug)]\n\n struct Oops;\n\n impl core::fmt::Display for Oops {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"Oops\")\n\n }\n\n }\n\n\n\n let err = Error::Custom {\n\n pos: 0,\n\n err: Box::new(Oops),\n\n };\n\n\n\n match err {\n\n Error::Custom { mut err, .. } => {\n\n assert!(err.is::<Oops>());\n\n assert!(!err.is::<i32>());\n\n assert!(err.downcast_ref::<Oops>().is_some());\n\n assert!(err.downcast_ref::<i32>().is_none());\n", "file_path": "binrw/tests/error.rs", "rank": 49, "score": 151741.4411057675 }, { "content": "#[test]\n\nfn enum_endianness() {\n\n #[derive(BinRead, Debug, Eq, PartialEq)]\n\n #[br(big)]\n\n enum Test {\n\n #[br(magic(1u16))]\n\n OneBig,\n\n #[br(little, magic(2u16))]\n\n TwoLittle { a: u16 },\n\n }\n\n\n\n assert_eq!(\n\n Test::read(&mut Cursor::new(b\"\\0\\x01\")).unwrap(),\n\n Test::OneBig\n\n );\n\n let error = Test::read(&mut Cursor::new(b\"\\x01\\0\")).expect_err(\"accepted bad data\");\n\n assert!(matches!(error, binrw::Error::EnumErrors { .. }));\n\n assert_eq!(\n\n Test::read(&mut Cursor::new(b\"\\x02\\0\\x03\\0\")).unwrap(),\n\n Test::TwoLittle { a: 3 }\n\n );\n\n let error = Test::read(&mut Cursor::new(b\"\\0\\x02\\x03\\0\")).expect_err(\"accepted bad data\");\n\n assert!(matches!(error, binrw::Error::EnumErrors { .. }));\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 50, "score": 151638.07384324243 }, { "content": "#[test]\n\nfn mixed_enum() {\n\n #[derive(BinRead, Debug, Eq, PartialEq)]\n\n #[br(big)]\n\n enum Test {\n\n #[br(magic(0u8))]\n\n Zero,\n\n #[br(magic(2u8))]\n\n Two { a: u16, b: u16 },\n\n }\n\n\n\n assert!(matches!(\n\n Test::read(&mut Cursor::new(b\"\\0\")).unwrap(),\n\n Test::Zero\n\n ));\n\n let error = Test::read(&mut Cursor::new(b\"\\x01\")).expect_err(\"accepted bad data\");\n\n assert!(matches!(error, binrw::Error::EnumErrors { .. }));\n\n let result = Test::read(&mut Cursor::new(b\"\\x02\\0\\x03\\0\\x04\")).unwrap();\n\n assert_eq!(result, Test::Two { a: 3, b: 4 });\n\n}\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 51, "score": 151638.07384324243 }, { "content": "#[test]\n\nfn enum_magic() {\n\n #[derive(BinRead, Debug, PartialEq)]\n\n #[br(big, magic(0x1234u16))]\n\n enum Test {\n\n #[br(magic(0u8))]\n\n Zero { a: u16 },\n\n // Fail on the second field to actually test that a rewind happens\n\n // to the beginning of the enum data, not to before the enum magic\n\n #[br(magic(1u8))]\n\n One { a: u16 },\n\n }\n\n\n\n let result = Test::read(&mut Cursor::new(b\"\\x12\\x34\\x01\\x02\\x03\")).unwrap();\n\n assert_eq!(result, Test::One { a: 515 });\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 52, "score": 151638.07384324243 }, { "content": "#[test]\n\nfn enum_assert() {\n\n #[derive(BinRead, Debug, PartialEq)]\n\n #[br(assert(b == 1))]\n\n enum Test {\n\n A { a: u8, b: u8 },\n\n B { a: i16, b: u8 },\n\n }\n\n\n\n let mut data = Cursor::new(b\"\\xff\\xff\\x01\");\n\n let result = Test::read(&mut data).unwrap();\n\n assert_eq!(result, Test::B { a: -1, b: 1 });\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 53, "score": 151638.07384324243 }, { "content": "fn generate_unit_enum_magic(variants: &[UnitEnumField]) -> TokenStream {\n\n let branches = variants.iter().map(|variant| {\n\n let name = &variant.ident;\n\n let magic = variant.magic.as_ref().map(|magic| {\n\n let magic = magic.match_value();\n\n\n\n quote! {\n\n #WRITE_METHOD (\n\n &#magic,\n\n #WRITER,\n\n &#OPT,\n\n (),\n\n )?;\n\n }\n\n });\n\n\n\n quote! {\n\n Self::#name => {\n\n #magic\n\n }\n\n }\n\n });\n\n\n\n quote! {\n\n match self {\n\n #( #branches )*\n\n }\n\n }\n\n}\n", "file_path": "binrw_derive/src/codegen/write_options/enum.rs", "rank": 54, "score": 150585.8143941992 }, { "content": "#[test]\n\nfn test() {\n\n #[derive(PartialEq, Debug)]\n\n pub struct NotClone;\n\n\n\n #[derive(BinrwNamedArgs)]\n\n pub struct Test<T: Clone> {\n\n blah: u32,\n\n not_copy: String,\n\n not_clone: NotClone,\n\n #[named_args(default = 2)]\n\n default_val: u32,\n\n generic: T,\n\n }\n\n\n\n let x = Test::<String>::builder()\n\n .blah(3)\n\n .not_copy(\"a string here\".into())\n\n .not_clone(NotClone)\n\n .generic(\"generic string :o\".into())\n\n .finalize();\n\n\n\n assert_eq!(x.blah, 3);\n\n assert_eq!(x.not_copy, \"a string here\");\n\n assert_eq!(x.not_clone, NotClone);\n\n assert_eq!(x.generic, \"generic string :o\");\n\n assert_eq!(x.default_val, 2);\n\n}\n", "file_path": "binrw/tests/builder.rs", "rank": 55, "score": 150231.6961907447 }, { "content": "#[derive(Debug)]\n\nenum Repr {\n\n Simple(ErrorKind),\n\n}\n\n\n\n/// A list specifying general categories of I/O error.\n\n#[non_exhaustive]\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum ErrorKind {\n\n /// An entity was not found, often a file.\n\n NotFound,\n\n /// The operation lacked the necessary privileges to complete.\n\n PermissionDenied,\n\n /// The connection was refused by the remote server.\n\n ConnectionRefused,\n\n /// The connection was reset by the remote server.\n\n ConnectionReset,\n\n /// The connection was aborted (terminated) by the remote server.\n\n ConnectionAborted,\n\n /// The network operation failed because it was not connected yet.\n\n NotConnected,\n", "file_path": "binrw/src/io/no_std/error.rs", "rank": 56, "score": 149880.44791575635 }, { "content": "fn get_args_argument(args_var: &Option<Ident>) -> TokenStream {\n\n args_var.as_ref().map_or_else(\n\n || quote! { <_>::default() },\n\n |args_var| quote! { #args_var.clone() },\n\n )\n\n}\n\n\n", "file_path": "binrw_derive/src/codegen/read_options/struct.rs", "rank": 57, "score": 149799.25260150383 }, { "content": "#[derive(BinRead)]\n\n#[br(return_all_errors, return_unexpected_error)]\n\nenum Foo {\n\n A(i32),\n\n}\n\n\n", "file_path": "binrw/tests/ui/conflicting_keyword_enum_error_mode.rs", "rank": 58, "score": 149048.21107288133 }, { "content": "#[test]\n\nfn map_args() {\n\n #[derive(BinRead)]\n\n #[br(import(offset: u64))]\n\n #[br(map = |x: u64| Self(x + offset))]\n\n struct PlusOffset(u64);\n\n\n\n let mut data = Cursor::new(&[0u8; 8][..]);\n\n\n\n let PlusOffset(x) = data.read_be_args((20,)).unwrap();\n\n\n\n assert_eq!(x, 20);\n\n}\n\n\n", "file_path": "binrw/tests/derive/map_args.rs", "rank": 59, "score": 148419.75786901702 }, { "content": "#[test]\n\nfn enum_rewind_on_assert() {\n\n #[derive(BinRead, Debug)]\n\n #[br(assert(b == 1))]\n\n enum Test {\n\n A { a: u8, b: u8 },\n\n B { a: u16, b: u8 },\n\n }\n\n\n\n let mut data = Cursor::new(b\"\\0\\0\\0\\0\");\n\n let expected = data.seek(SeekFrom::Start(1)).unwrap();\n\n Test::read(&mut data).expect_err(\"accepted bad data\");\n\n assert_eq!(expected, data.seek(SeekFrom::Current(0)).unwrap());\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 60, "score": 148295.306938615 }, { "content": "#[test]\n\nfn enum_rewind_on_eof() {\n\n #[derive(BinRead, Debug)]\n\n enum Test {\n\n A {\n\n a: u8,\n\n // Fail on the second field to actually test that a rewind happens\n\n // to the beginning of the enum, not just the beginning of the field\n\n b: u16,\n\n },\n\n }\n\n\n\n let mut data = Cursor::new(b\"\\0\\0\\0\");\n\n let expected = data.seek(SeekFrom::Start(1)).unwrap();\n\n Test::read(&mut data).expect_err(\"accepted bad data\");\n\n assert_eq!(expected, data.seek(SeekFrom::Current(0)).unwrap());\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 61, "score": 148295.306938615 }, { "content": "#[test]\n\nfn cursor_test() {\n\n let data = &[1, 2, 3, 4, 5];\n\n let mut test = Cursor::new(data);\n\n let mut test2 = std::io::Cursor::new(data);\n\n\n\n assert_eq!(test.get_ref(), test2.get_ref());\n\n assert_eq!(test.get_mut(), test2.get_mut());\n\n assert_eq!(test.position(), test2.position());\n\n assert_eq!(test.position(), test2.position());\n\n test.set_position(5);\n\n test2.set_position(5);\n\n assert_eq!(test.position(), test2.position());\n\n test.set_position(5000);\n\n test2.set_position(5000);\n\n assert_eq!(test.position(), test2.position());\n\n assert_eq!(\n\n test.seek(SeekFrom::Start(0)).unwrap(),\n\n test2.seek(std::io::SeekFrom::Start(0)).unwrap(),\n\n );\n\n let mut buf = [0u8; 4];\n\n let mut buf2 = [0u8; 4];\n\n assert_eq!(test.read(&mut buf).unwrap(), test2.read(&mut buf2).unwrap());\n\n assert_eq!(buf, buf2);\n\n assert_eq!(test.read(&mut buf).unwrap(), test2.read(&mut buf2).unwrap());\n\n assert_eq!(buf, buf2);\n\n}\n", "file_path": "binrw/tests/io/no_std/cursor.rs", "rank": 62, "score": 148015.0900144083 }, { "content": "#[test]\n\nfn simple_write() {\n\n let mut x = Cursor::new(Vec::new());\n\n\n\n Test { x: 1, y: 2, z: 3 }\n\n .write_options(&mut x, &WriteOptions::new(Endian::Big), ())\n\n .unwrap();\n\n\n\n assert_eq!(&x.into_inner()[..], &[1, 0, 2, 0, 0, 0, 3]);\n\n}\n\n\n\nuse binrw::BinReaderExt;\n\n\n", "file_path": "binrw/tests/derive/write/simple.rs", "rank": 63, "score": 147446.32106554773 }, { "content": "#[test]\n\nfn write_endian() {\n\n let mut x = Cursor::new(Vec::new());\n\n\n\n TestEndian {\n\n x: 1,\n\n y: 2,\n\n z: 3,\n\n not_z: 3,\n\n }\n\n .write_options(&mut x, &WriteOptions::new(Endian::Big), ())\n\n .unwrap();\n\n\n\n assert_eq!(&x.into_inner()[..], &[0, 1, 2, 0, 0, 0, 0, 3, 3, 0, 0, 0]);\n\n}\n\n\n", "file_path": "binrw/tests/derive/write/endian.rs", "rank": 64, "score": 147446.32106554773 }, { "content": "#[test]\n\nfn enum_calc_temp_field() {\n\n #[binread]\n\n #[derive(Debug, Eq, PartialEq)]\n\n enum Test {\n\n #[br(magic(0u8))]\n\n Zero {\n\n #[br(temp)]\n\n a: u8,\n\n #[br(calc(a))]\n\n b: u8,\n\n },\n\n }\n\n\n\n let result = Test::read(&mut Cursor::new(b\"\\0\\x04\")).unwrap();\n\n // This also indirectly checks that `temp` is actually working since\n\n // compilation would fail if it weren’t due to the missing `a` property\n\n assert_eq!(result, Test::Zero { b: 4 });\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 65, "score": 145134.48064261043 }, { "content": "#[test]\n\nfn enum_rewind_on_variant_assert() {\n\n #[derive(BinRead, Debug)]\n\n enum Test {\n\n #[br(assert(b == 1))]\n\n A { a: u8, b: u8 },\n\n }\n\n\n\n let mut data = Cursor::new(b\"\\0\\0\");\n\n let expected = data.seek(SeekFrom::Start(1)).unwrap();\n\n Test::read(&mut data).expect_err(\"accepted bad data\");\n\n assert_eq!(expected, data.seek(SeekFrom::Current(0)).unwrap());\n\n}\n\n\n", "file_path": "binrw/tests/derive/enum.rs", "rank": 66, "score": 145134.48064261043 }, { "content": "#[test]\n\nfn unit_enum_repr() {\n\n #[derive(BinRead, Debug, Eq, PartialEq)]\n\n #[br(big, repr(i16))]\n\n enum Test {\n\n Neg1 = -1,\n\n Zero,\n\n Two = 2,\n\n }\n\n\n\n assert_eq!(\n\n Test::read(&mut Cursor::new(b\"\\xff\\xff\")).unwrap(),\n\n Test::Neg1\n\n );\n\n let error = Test::read(&mut Cursor::new(b\"\\0\\x01\")).expect_err(\"accepted bad data\");\n\n assert!(matches!(error, binrw::Error::NoVariantMatch { .. }));\n\n assert_eq!(Test::read(&mut Cursor::new(b\"\\0\\x02\")).unwrap(), Test::Two);\n\n}\n\n\n", "file_path": "binrw/tests/derive/unit_enum.rs", "rank": 67, "score": 145134.48064261043 }, { "content": "#[test]\n\nfn unit_enum_magic() {\n\n #[derive(BinRead, Debug, Eq, PartialEq)]\n\n #[br(big)]\n\n enum Test {\n\n // First variant not having any magic ensures that there is no reliance\n\n // internally on a specific variant having a magic\n\n #[allow(dead_code)]\n\n Zero,\n\n #[br(magic(1u16))]\n\n One,\n\n #[br(magic(2u16))]\n\n Two,\n\n }\n\n\n\n let error = Test::read(&mut Cursor::new(b\"\\0\\0\")).expect_err(\"accepted bad data\");\n\n assert!(matches!(error, binrw::Error::NoVariantMatch { .. }));\n\n assert_eq!(Test::read(&mut Cursor::new(b\"\\0\\x01\")).unwrap(), Test::One);\n\n assert_eq!(Test::read(&mut Cursor::new(b\"\\0\\x02\")).unwrap(), Test::Two);\n\n}\n\n\n", "file_path": "binrw/tests/derive/unit_enum.rs", "rank": 68, "score": 145134.48064261043 }, { "content": "#[test]\n\nfn show_backtrace_2() {\n\n #![allow(dead_code)]\n\n use binrw::{io::Cursor, BinRead, BinReaderExt};\n\n\n\n #[derive(BinRead)]\n\n struct InnerMostStruct {\n\n #[br(little)]\n\n len: u32,\n\n\n\n #[br(count = len, err_context(\"len = {}\", len))]\n\n items: Vec<u32>,\n\n }\n\n\n\n #[derive(BinRead)]\n\n struct MiddleStruct {\n\n #[br(little)]\n\n #[br(err_context(\"While parsing the innerest most struct\"))]\n\n inner: InnerMostStruct,\n\n }\n\n\n", "file_path": "binrw/tests/error.rs", "rank": 69, "score": 144891.99788094676 }, { "content": "#[test]\n\nfn show_backtrace() {\n\n #![allow(dead_code)]\n\n use binrw::{io::Cursor, BinRead, BinReaderExt};\n\n\n\n #[derive(BinRead)]\n\n struct InnerMostStruct {\n\n #[br(little)]\n\n len: u32,\n\n\n\n #[br(count = len)]\n\n items: Vec<u32>,\n\n }\n\n\n\n #[derive(BinRead)]\n\n enum MiddleEnum {\n\n OnlyOption {\n\n #[br(big)]\n\n #[br(assert(inner.len == 3))]\n\n inner: InnerMostStruct,\n\n },\n", "file_path": "binrw/tests/error.rs", "rank": 70, "score": 144891.99788094676 }, { "content": "#[test]\n\nfn calc_simple_write() {\n\n #[binwrite]\n\n struct Test {\n\n x: u8,\n\n\n\n #[bw(calc = Some(2))]\n\n y: Option<u16>,\n\n\n\n #[bw(calc = (*x as u32) + 2)]\n\n z: u32,\n\n }\n\n\n\n let mut x = Cursor::new(Vec::new());\n\n\n\n Test { x: 1 }\n\n .write_options(&mut x, &WriteOptions::new(Endian::Big), ())\n\n .unwrap();\n\n\n\n assert_eq!(&x.into_inner()[..], &[1, 0, 2, 0, 0, 0, 3]);\n\n}\n", "file_path": "binrw/tests/derive/write/calc.rs", "rank": 71, "score": 144302.29030000712 }, { "content": "#[test]\n\nfn unit_enum_magic_bytes() {\n\n #[derive(BinRead, Debug, Eq, PartialEq)]\n\n #[br(big)]\n\n enum Test {\n\n #[br(magic(b\"zero\"))]\n\n Zero,\n\n #[allow(dead_code)]\n\n One,\n\n #[br(magic(b\"two0\"))]\n\n Two,\n\n }\n\n\n\n assert_eq!(Test::read(&mut Cursor::new(b\"zero\")).unwrap(), Test::Zero);\n\n let error = Test::read(&mut Cursor::new(b\"oops\")).expect_err(\"accepted bad data\");\n\n assert!(matches!(error, binrw::Error::NoVariantMatch { .. }));\n\n assert_eq!(Test::read(&mut Cursor::new(b\"two0\")).unwrap(), Test::Two);\n\n}\n\n\n", "file_path": "binrw/tests/derive/unit_enum.rs", "rank": 72, "score": 142140.69757157678 }, { "content": "#[test]\n\nfn unit_enum_rewind_on_no_variant() {\n\n #[derive(BinRead, Debug)]\n\n #[br(repr(u8))]\n\n enum Test {\n\n A = 1,\n\n }\n\n\n\n let mut data = Cursor::new(b\"\\0\\0\");\n\n let expected = data.seek(SeekFrom::Start(1)).unwrap();\n\n Test::read(&mut data).expect_err(\"accepted bad data\");\n\n assert_eq!(expected, data.seek(SeekFrom::Current(0)).unwrap());\n\n}\n", "file_path": "binrw/tests/derive/unit_enum.rs", "rank": 73, "score": 142140.69757157678 }, { "content": "#[test]\n\nfn unit_enum_rewind_on_eof() {\n\n #[derive(BinRead, Debug)]\n\n #[br(repr(u16))]\n\n enum Test {\n\n A,\n\n }\n\n\n\n let mut data = Cursor::new(b\"\\0\\0\");\n\n let expected = data.seek(SeekFrom::Start(1)).unwrap();\n\n Test::read(&mut data).expect_err(\"accepted bad data\");\n\n assert_eq!(expected, data.seek(SeekFrom::Current(0)).unwrap());\n\n}\n\n\n", "file_path": "binrw/tests/derive/unit_enum.rs", "rank": 74, "score": 142140.69757157678 }, { "content": "#[test]\n\nfn pad_size_to() {\n\n #[derive(BinRead, Debug, PartialEq)]\n\n struct Test {\n\n #[br(pad_before = 1, pad_size_to = 2)]\n\n a: u8,\n\n b: u8,\n\n }\n\n\n\n let result = Test::read(&mut Cursor::new(b\"\\0\\x01\\0\\x02\")).unwrap();\n\n assert_eq!(result, Test { a: 1, b: 2 });\n\n}\n\n\n", "file_path": "binrw/tests/derive/struct.rs", "rank": 75, "score": 141450.96167436632 }, { "content": "#[test]\n\nfn restore_position_writing() {\n\n #[derive(BinWrite)]\n\n struct Test {\n\n #[bw(restore_position)]\n\n x: u32,\n\n y: u8,\n\n }\n\n\n\n let mut x = Vec::new();\n\n {\n\n let mut x = Cursor::new(&mut x);\n\n Test {\n\n x: 0xffff_ffff,\n\n y: 0,\n\n }\n\n .write_to(&mut x)\n\n .unwrap();\n\n }\n\n assert_eq!(&x[..], b\"\\0\\xff\\xff\\xff\");\n\n}\n", "file_path": "binrw/tests/derive/write/restore_position.rs", "rank": 76, "score": 141324.6511172506 }, { "content": "#[test]\n\nfn interrupt_once() {\n\n struct InterruptReader(bool);\n\n\n\n impl Read for InterruptReader {\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n\n if self.0 {\n\n self.0 = false;\n\n Err(Error::from(ErrorKind::Interrupted))\n\n } else {\n\n buf.fill(0);\n\n Ok(buf.len())\n\n }\n\n }\n\n }\n\n\n\n let mut x = InterruptReader(true);\n\n let mut out = [1, 2, 3, 4];\n\n x.read_exact(&mut out).unwrap();\n\n\n\n assert_eq!(out, [0, 0, 0, 0]);\n\n\n\n let mut x = InterruptReader(true).bytes();\n\n assert_eq!(x.next().unwrap().unwrap(), 0);\n\n assert_eq!(x.next().unwrap().unwrap(), 0);\n\n assert_eq!(x.next().unwrap().unwrap(), 0);\n\n assert_eq!(x.next().unwrap().unwrap(), 0);\n\n}\n\n\n", "file_path": "binrw/tests/io/no_std/mod.rs", "rank": 77, "score": 141200.61931528166 }, { "content": "#[test]\n\nfn bytes() {\n\n let mut cursor = MalfunctioningEddie::new(b\"\\0\\x01\\x02\\x03\\x04\\x05\");\n\n {\n\n let mut bytes = cursor.by_ref().bytes();\n\n assert!(matches!(bytes.next(), Some(Ok(0))));\n\n assert!(matches!(bytes.next(), Some(Ok(1))));\n\n }\n\n\n\n // Interrupted error should cause a retry\n\n cursor.trigger_non_fatal_error();\n\n {\n\n let mut bytes = cursor.by_ref().bytes();\n\n assert!(matches!(bytes.next(), Some(Ok(2))));\n\n }\n\n\n\n // Reads through Bytes should have advanced the underlying stream\n\n let mut raw_read_data = [0u8; 2];\n\n assert_eq!(cursor.read(&mut raw_read_data).unwrap(), 2);\n\n assert_eq!(raw_read_data, [3, 4]);\n\n\n\n // Errors other than Interrupted should be returned\n\n cursor.trigger_fatal_error();\n\n let mut bytes = cursor.bytes();\n\n assert_eq!(\n\n bytes.next().unwrap().unwrap_err().kind(),\n\n ErrorKind::BrokenPipe\n\n );\n\n}\n\n\n", "file_path": "binrw/tests/io/no_std/mod.rs", "rank": 78, "score": 141200.61931528166 }, { "content": "#[test]\n\nfn take() {\n\n const IN: &[u8] = b\"ABCD\";\n\n const LIMIT: usize = 2;\n\n let x = Cursor::new(IN);\n\n let mut out = Vec::new();\n\n assert_eq!(x.take(LIMIT as u64).read_to_end(&mut out).unwrap(), LIMIT);\n\n assert_eq!(out, &IN[..LIMIT]);\n\n\n\n // Test error handling\n\n struct ReturnError(Option<Error>);\n\n\n\n impl Read for ReturnError {\n\n fn read(&mut self, _buf: &mut [u8]) -> Result<usize> {\n\n Err(self.0.take().unwrap())\n\n }\n\n }\n\n\n\n let x = ReturnError(Some(Error::from(ErrorKind::ConnectionRefused)));\n\n let mut out = Vec::new();\n\n assert_eq!(\n", "file_path": "binrw/tests/io/no_std/mod.rs", "rank": 79, "score": 141200.61931528166 }, { "content": "#[test]\n\nfn args_same_name() {\n\n #[derive(BinRead, Debug)]\n\n #[br(import { y: u16, x: u8 })]\n\n struct Test {\n\n #[br(calc(x))]\n\n z: u8,\n\n\n\n #[br(calc(y))]\n\n z2: u16,\n\n }\n\n\n\n #[derive(BinRead, Debug)]\n\n struct Test2 {\n\n #[br(calc(3))]\n\n x: u8,\n\n\n\n #[br(args { x, y: 3 })]\n\n y: Test,\n\n }\n\n\n\n let result = Test2::read(&mut Cursor::new(b\"\")).unwrap();\n\n assert_eq!(result.y.z, 3);\n\n}\n\n\n", "file_path": "binrw/tests/derive/struct.rs", "rank": 80, "score": 141108.68433198697 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/invalid_if_args.rs", "rank": 81, "score": 141103.76654519376 }, { "content": "fn generate_unit_enum_repr(repr: &TokenStream, variants: &[UnitEnumField]) -> TokenStream {\n\n let branches = variants.iter().map(|variant| {\n\n let name = &variant.ident;\n\n quote! {\n\n Self::#name => Self::#name\n\n }\n\n });\n\n\n\n quote! {\n\n #WRITE_METHOD (\n\n &(match self {\n\n #(#branches),*\n\n } as #repr),\n\n #WRITER,\n\n &#OPT,\n\n (),\n\n )?;\n\n }\n\n}\n\n\n", "file_path": "binrw_derive/src/codegen/write_options/enum.rs", "rank": 82, "score": 140086.84481849315 }, { "content": "#[test]\n\nfn unit_enum_magic_pre_assert() {\n\n #[derive(BinRead, Debug, Eq, PartialEq)]\n\n #[br(big, import { allow_zero: bool, forbid_zero: bool })]\n\n enum Test {\n\n #[br(magic(0u16), pre_assert(allow_zero))]\n\n // This redundant check is intentional and tests that assertions are\n\n // combining properly\n\n #[br(pre_assert(!forbid_zero))]\n\n Zero,\n\n #[br(magic(0u16))]\n\n OtherZero,\n\n }\n\n\n\n assert_eq!(\n\n Test::read_args(\n\n &mut Cursor::new(b\"\\0\\0\"),\n\n <Test as BinRead>::Args::builder()\n\n .allow_zero(true)\n\n .forbid_zero(false)\n\n .finalize()\n", "file_path": "binrw/tests/derive/unit_enum.rs", "rank": 83, "score": 139300.67853006115 }, { "content": "fn generate_variant_impl(en: &Enum, variant: &EnumVariant) -> TokenStream {\n\n // TODO: Kind of expensive since the enum is containing all the fields\n\n // and this is a clone.\n\n let mut new_enum = en.with_variant(variant);\n\n // Drop imports, we already have them in scope\n\n new_enum.imports = Imports::None;\n\n let input = Input::Enum(new_enum);\n\n\n\n match variant {\n\n EnumVariant::Variant { ident, options } => StructGenerator::new(&input, options)\n\n .read_fields(\n\n None,\n\n Some(&format!(\"{}::{}\", en.ident.as_ref().unwrap(), &ident)),\n\n )\n\n .add_assertions(get_assertions(&en.assertions))\n\n .return_value(Some(ident))\n\n .finish(),\n\n\n\n EnumVariant::Unit(options) => generate_unit_struct(&input, None, Some(&options.ident)),\n\n }\n\n}\n", "file_path": "binrw_derive/src/codegen/read_options/enum.rs", "rank": 84, "score": 139241.10288076708 }, { "content": "#[derive(BinRead)]\n\nstruct Try<BR: BinRead<Args = ()>>(#[br(try)] Option<BR>);\n\n\n", "file_path": "binrw/tests/after_parse_test.rs", "rank": 85, "score": 138988.5653246636 }, { "content": "fn get_passed_args(field: &StructField) -> Option<TokenStream> {\n\n let args = &field.args;\n\n match args {\n\n PassedArgs::Named(fields) => Some(if let Some(count) = &field.count {\n\n quote! {\n\n #ARGS_MACRO! { count: ((#count) as usize) #(, #fields)* }\n\n }\n\n } else {\n\n quote! {\n\n #ARGS_MACRO! { #(#fields),* }\n\n }\n\n }),\n\n PassedArgs::List(list) => Some(quote! { (#(#list,)*) }),\n\n PassedArgs::Tuple(tuple) => Some(tuple.clone()),\n\n PassedArgs::None => field\n\n .count\n\n .as_ref()\n\n .map(|count| quote! { #ARGS_MACRO! { count: ((#count) as usize) }}),\n\n }\n\n}\n\n\n", "file_path": "binrw_derive/src/codegen/read_options/struct.rs", "rank": 86, "score": 138098.0813613511 }, { "content": "#[test]\n\nfn assert_custom_err() {\n\n #[derive(Debug)]\n\n struct Oops(u8);\n\n impl core::fmt::Display for Oops {\n\n fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {\n\n write!(f, \"oops!\")\n\n }\n\n }\n\n\n\n #[derive(BinRead, Debug)]\n\n struct Test {\n\n #[br(assert(a == 1, Oops(a)))]\n\n a: u8,\n\n }\n\n\n\n Test::read(&mut Cursor::new(\"\\x01\")).unwrap();\n\n let error = Test::read(&mut Cursor::new(\"\\x02\")).expect_err(\"accepted bad data\");\n\n assert_eq!(format!(\"{}\", error), \"oops! at 0x0\");\n\n let error = error.custom_err::<Oops>().expect(\"bad error type\");\n\n assert_eq!(error.0, 2);\n\n}\n\n\n", "file_path": "binrw/tests/derive/struct.rs", "rank": 87, "score": 137858.81761373347 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/conflicting_keyword_bool.rs", "rank": 88, "score": 137848.19957809246 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/derive_binread_with_temp.rs", "rank": 89, "score": 137675.50406620366 }, { "content": "// Since binread re-exports std types when the std feature is turned on, make\n\n// sure we are not accidentally testing the std implementation\n\nfn sanity_check() {\n\n use core::any::TypeId;\n\n assert!(TypeId::of::<Cursor<[u8; 0]>>() != TypeId::of::<std::io::Cursor<[u8; 0]>>());\n\n assert!(TypeId::of::<Error>() != TypeId::of::<std::io::Error>());\n\n assert!(TypeId::of::<ErrorKind>() != TypeId::of::<std::io::ErrorKind>());\n\n assert!(TypeId::of::<dyn Read>() != TypeId::of::<dyn std::io::Read>());\n\n assert!(TypeId::of::<Result<()>>() != TypeId::of::<std::io::Result<()>>());\n\n}\n\n\n", "file_path": "binrw/tests/io/no_std/mod.rs", "rank": 90, "score": 137645.9822706421 }, { "content": "#[test]\n\nfn read_exact() {\n\n let mut cursor = MalfunctioningEddie::new(b\"\\0\\x01\\x02\\x03\\x04\\x05\");\n\n\n\n let mut raw_read_data = [0u8; 2];\n\n cursor.read_exact(&mut raw_read_data).unwrap();\n\n assert_eq!(raw_read_data, [0, 1]);\n\n\n\n // Interrupted error should cause a retry\n\n cursor.trigger_non_fatal_error();\n\n cursor.read_exact(&mut raw_read_data).unwrap();\n\n assert_eq!(raw_read_data, [2, 3]);\n\n\n\n // Errors other than Interrupted should be returned\n\n cursor.trigger_fatal_error();\n\n assert_eq!(\n\n cursor.read_exact(&mut raw_read_data).unwrap_err().kind(),\n\n ErrorKind::BrokenPipe\n\n );\n\n\n\n // Read through a mutable reference should work as if it were directly on\n", "file_path": "binrw/tests/io/no_std/mod.rs", "rank": 91, "score": 137633.50180072192 }, { "content": "#[test]\n\nfn read_to_end() {\n\n // Test happy path\n\n const IN: &[u8] = b\"ABCD\";\n\n let mut x = Cursor::new(IN);\n\n let mut out = Vec::new();\n\n assert_eq!(x.read_to_end(&mut out).unwrap(), IN.len());\n\n assert_eq!(out, IN);\n\n\n\n struct InterruptReader(bool);\n\n\n\n impl Read for InterruptReader {\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n\n if self.0 {\n\n self.0 = false;\n\n Err(Error::from(ErrorKind::Interrupted))\n\n } else {\n\n buf.fill(0);\n\n Ok(buf.len())\n\n }\n\n }\n", "file_path": "binrw/tests/io/no_std/mod.rs", "rank": 92, "score": 137633.50180072192 }, { "content": "#[test]\n\nfn parse_with_default_args() {\n\n #[derive(Clone)]\n\n struct Args(u8);\n\n impl Default for Args {\n\n fn default() -> Self {\n\n Self(42)\n\n }\n\n }\n\n\n\n #[derive(BinRead, Debug, PartialEq)]\n\n #[br(import { in_a: u8 })]\n\n struct InnerImport {\n\n #[br(calc(in_a))]\n\n a: u8,\n\n b: u8,\n\n }\n\n\n\n #[derive(BinRead, Debug, PartialEq)]\n\n #[br(import_raw(args: Args))]\n\n struct InnerImportTuple {\n", "file_path": "binrw/tests/derive/struct.rs", "rank": 93, "score": 137544.4471109184 }, { "content": "#[test]\n\n#[should_panic]\n\nfn map_assert() {\n\n #[derive(BinRead, Debug, Eq, PartialEq)]\n\n #[br(assert(false), map(|_: u8| Test {}))]\n\n struct Test {}\n\n\n\n Test::read(&mut Cursor::new(b\"a\")).unwrap();\n\n}\n\n\n", "file_path": "binrw/tests/derive/map_args.rs", "rank": 94, "score": 137544.35489474013 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/invalid_assert_args.rs", "rank": 95, "score": 137539.5293241252 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/non_blocking_errors.rs", "rank": 96, "score": 137521.5456559285 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/invalid_keyword_enum.rs", "rank": 97, "score": 137442.9346814658 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/unsupported_type_enum.rs", "rank": 98, "score": 137442.9346814658 }, { "content": "fn main() {}\n", "file_path": "binrw/tests/ui/conflicting_keyword_option.rs", "rank": 99, "score": 137168.2589114267 } ]
Rust
parser/src/ast_var.rs
patrickf2000/lila
3aad462580fea1f14d9aeb81d192fe1ee3e30e6d
use crate::ast; use crate::ast::*; use crate::lex::{Token, Lex}; use crate::syntax::ErrorManager; use crate::ast_builder::AstBuilder; use crate::ast_utils::*; pub fn build_var_dec(builder : &mut AstBuilder, name : String) -> bool { let mut var_dec = ast::create_stmt(AstStmtType::VarDec, &mut builder.scanner); var_dec.name = name; let mut extra_names : Vec<String> = Vec::new(); let mut token = builder.get_token(); while token == Token::Comma { token = builder.get_token(); match token { Token::Id(ref val) => extra_names.push(val.to_string()), _ => { builder.syntax_error("Expected variable name.".to_string()); return false; }, } token = builder.get_token(); } if token != Token::Colon { builder.syntax_error("Expected \':\' after variable name.".to_string()); return false; } let mut is_array = false; let mut dtype : DataType; let mut sub_type = DataType::None; token = builder.get_token(); match token { Token::Byte => dtype = DataType::Byte, Token::UByte => dtype = DataType::UByte, Token::Short => dtype = DataType::Short, Token::UShort => dtype = DataType::UShort, Token::Int => dtype = DataType::Int, Token::UInt => dtype = DataType::UInt, Token::Int64 => dtype = DataType::Int64, Token::UInt64 => dtype = DataType::UInt64, Token::Float => dtype = DataType::Float, Token::Double => dtype = DataType::Double, Token::Char => dtype = DataType::Char, Token::TStr => dtype = DataType::Str, Token::Id(ref val) => { if !ast::enum_exists(&mut builder.tree, val.to_string()) { builder.syntax_error("Invalid enumeration.".to_string()); return false; } dtype = DataType::Enum(val.to_string()); }, _ => { builder.syntax_error("Invalid type.".to_string()); return false; }, } token = builder.get_token(); match token { Token::Assign => {}, Token::LBracket => { is_array = true; if !build_args(builder, &mut var_dec, Token::RBracket) { return false; } }, _ => { builder.syntax_error("Expected assignment operator.".to_string()); return false; }, } if is_array { sub_type = dtype; dtype = DataType::Ptr; if builder.get_token() != Token::Semicolon { builder.syntax_error("Expected terminator.".to_string()); return false; } } else { if !build_args(builder, &mut var_dec, Token::Semicolon) { return false; } var_dec.args = check_operations(&var_dec.args, builder.keep_postfix); } var_dec.data_type = dtype; var_dec.sub_type = sub_type; builder.add_stmt(var_dec.clone()); for n in extra_names.iter() { var_dec.name = n.to_string(); builder.add_stmt(var_dec.clone()); } true } fn build_var_assign_stmt(builder : &mut AstBuilder, var_assign : &mut AstStmt, name : String, assign_op : Token) -> bool { let mut check_end = false; match assign_op { Token::OpInc | Token::OpDec => { let mut id_arg = ast::create_arg(AstArgType::Id); if var_assign.stmt_type == AstStmtType::ArrayAssign { id_arg.sub_args = var_assign.sub_args.clone(); } id_arg.str_val = name; var_assign.args.push(id_arg); if assign_op == Token::OpInc { let op_arg = ast::create_arg(AstArgType::OpAdd); var_assign.args.push(op_arg); } else { let op_arg = ast::create_arg(AstArgType::OpSub); var_assign.args.push(op_arg); } let num_arg = ast::create_int(1); var_assign.args.push(num_arg); check_end = true; }, Token::AddAssign | Token::SubAssign | Token::MulAssign | Token::DivAssign | Token::ModAssign => { let mut id_arg = ast::create_arg(AstArgType::Id); if var_assign.stmt_type == AstStmtType::ArrayAssign { id_arg.sub_args = var_assign.sub_args.clone(); } id_arg.str_val = name; var_assign.args.push(id_arg); if assign_op == Token::AddAssign { let op_arg = ast::create_arg(AstArgType::OpAdd); var_assign.args.push(op_arg); } else if assign_op == Token::SubAssign { let op_arg = ast::create_arg(AstArgType::OpSub); var_assign.args.push(op_arg); } else if assign_op == Token::MulAssign { let op_arg = ast::create_arg(AstArgType::OpMul); var_assign.args.push(op_arg); } else if assign_op == Token::DivAssign { let op_arg = ast::create_arg(AstArgType::OpDiv); var_assign.args.push(op_arg); } else if assign_op == Token::ModAssign { let op_arg = ast::create_arg(AstArgType::OpMod); var_assign.args.push(op_arg); } if !build_args(builder, var_assign, Token::Semicolon) { return false; } }, Token::Assign => { if !build_args(builder, var_assign, Token::Semicolon) { return false; } }, _ => { builder.syntax_error("Expected \'=\' in array assignment.".to_string()); return false; }, } if check_end { if builder.get_token() != Token::Semicolon { builder.syntax_error("Expected terminator.".to_string()); return false; } } true } pub fn build_var_assign(builder : &mut AstBuilder, name : String, assign_op : Token) -> bool { let mut var_assign = ast::create_stmt(AstStmtType::VarAssign, &mut builder.scanner); var_assign.name = name.clone(); if !build_var_assign_stmt(builder, &mut var_assign, name, assign_op) { return false; } var_assign.args = check_operations(&var_assign.args, builder.keep_postfix); builder.add_stmt(var_assign); true } pub fn build_array_assign(builder : &mut AstBuilder, id_val : String) -> bool { let mut array_assign = ast::create_stmt(AstStmtType::ArrayAssign, &mut builder.scanner); array_assign.name = id_val.clone(); if !build_args(builder, &mut array_assign, Token::RBracket) { return false; } let assign_op = builder.get_token(); if !build_var_assign_stmt(builder, &mut array_assign, id_val, assign_op) { return false; } array_assign.args = check_operations(&array_assign.args, builder.keep_postfix); builder.add_stmt(array_assign); true } pub fn build_sizeof(scanner : &mut Lex, syntax : &mut ErrorManager) -> AstArg { let mut sizeof = ast::create_arg(AstArgType::Sizeof); let token1 = scanner.get_token(); let token2 = scanner.get_token(); let token3 = scanner.get_token(); if token1 != Token::LParen || token3 != Token::RParen { syntax.syntax_error(scanner, "Sizeof begins with \'(\' and ends with \')\'".to_string()); return ast::create_arg(AstArgType::None); } match token2 { Token::Id(ref val) => { let mut arg = ast::create_arg(AstArgType::Id); arg.str_val = val.to_string(); sizeof.sub_args.push(arg); }, _ => { syntax.syntax_error(scanner, "Expected variable name.".to_string()); return ast::create_arg(AstArgType::None); }, } sizeof } pub fn build_addrof(scanner : &mut Lex, syntax : &mut ErrorManager) -> AstArg { let mut addrof = ast::create_arg(AstArgType::AddrOf); let token = scanner.get_token(); match token { Token::Id(ref val) => { let mut arg = ast::create_arg(AstArgType::Id); arg.str_val = val.to_string(); addrof.sub_args.push(arg); }, _ => { syntax.syntax_error(scanner, "Expected variable name.".to_string()); return ast::create_arg(AstArgType::None); }, } addrof }
use crate::ast; use crate::ast::*; use crate::lex::{Token, Lex}; use crate::syntax::ErrorManager; use crate::ast_builder::AstBuilder; use crate::ast_utils::*; pub fn build_var_dec(builder : &mut AstBuilder, name : String) -> bool { let mut var_dec = ast::create_stmt(AstStmtType::VarDec, &mut builder.scanner); var_dec.name = name; let mut extra_names : Vec<String> = Vec::new(); let mut token = builder.get_token(); while token == Token::Comma { token = builder.get_token(); match token { Token::Id(ref val) => extra_names.push(val.to_string()), _ => { builder.syntax_error("Expected variable name.".to_string()); return false; }, } token = builder.get_token(); } if token != Token::Colon { builder.syntax_error("Expected \':\' after variable name.".to_string()); return false; } let mut is_array = false; let mut dtype : DataType; let mut sub_type = DataType::None; token = builder.get_token(); match token { Token::Byte => dtype = DataType::Byte, Token::UByte => dtype = DataType::UByte, Token::Short => dtype = DataType::Short, Token::UShort => dtype = DataType::UShort, Token::Int => dtype = DataType::Int, Token::UInt => dtype = DataType::UInt, Token::Int64 => dtype = DataType::Int64, Token::UInt64 => dtype = DataType::UInt64, Token::Float => dtype = DataType::Float, Token::Double => dtype = DataType::Double, Token::Char => dtype = DataType::Char, Token::TStr => dtype = DataType::Str, Token::Id(ref val) => { if !ast::enum_exists(&mut builder.tree, val.to_string()) { builder.syntax_error("Invalid enumeration.".to_string()); return false; } dtype = DataType::Enum(val.to_string()); }, _ => { builder.syntax_error("Invalid type.".to_string()); return false; }, } token = builder.get_token(); match token { Token::Assign => {}, Token::LBracket => { is_array = true; if !build_args(builder, &mut var_dec, Token::RBracket) { return false; } }, _ => { builder.syntax_error("Expected assignment operator.".to_string()); return false; }, } if is_array { sub_type = dtype; dtype = DataType::Ptr; if builder.get_token() != Token::Semicolon { builder.syntax_error("Expected terminator.".to_string()); return false; } } else { if !build_args(builder, &mut var_dec, Token::Semicolon) { return false; } var_dec.args = check_operations(&var_dec.args, builder.keep_postfix); } var_dec.data_type = dtype; var_dec.sub_type = sub_type; builder.add_stmt(var_dec.clone()); for n in extra_names.iter() { var_dec.name = n.to_string(); builder.add_stmt(var_dec.clone()); } true } fn build_var_assign_stmt(builder : &mut AstBuilder, var_assign : &mut AstStmt, name : String, assign_op : Token) -> bool { let mut check_end = false; match assign_op { Token::OpInc | Token::OpDec => { let mut id_arg = ast::create_arg(AstArgType::Id); if var_assign.stmt_type == AstStmtType::ArrayAssign { id_arg.sub_args = var_assign.sub_args.clone(); } id_arg.str_val = name; var_assign.args.push(id_arg); if assign_op == Token::OpInc { let op_arg = ast::create_arg(AstArgType::OpAdd); var_assign.args.push(op_arg); } else { let op_arg = ast::create_arg(AstArgType::OpSub); var_assign.args.push(op_ar
pub fn build_var_assign(builder : &mut AstBuilder, name : String, assign_op : Token) -> bool { let mut var_assign = ast::create_stmt(AstStmtType::VarAssign, &mut builder.scanner); var_assign.name = name.clone(); if !build_var_assign_stmt(builder, &mut var_assign, name, assign_op) { return false; } var_assign.args = check_operations(&var_assign.args, builder.keep_postfix); builder.add_stmt(var_assign); true } pub fn build_array_assign(builder : &mut AstBuilder, id_val : String) -> bool { let mut array_assign = ast::create_stmt(AstStmtType::ArrayAssign, &mut builder.scanner); array_assign.name = id_val.clone(); if !build_args(builder, &mut array_assign, Token::RBracket) { return false; } let assign_op = builder.get_token(); if !build_var_assign_stmt(builder, &mut array_assign, id_val, assign_op) { return false; } array_assign.args = check_operations(&array_assign.args, builder.keep_postfix); builder.add_stmt(array_assign); true } pub fn build_sizeof(scanner : &mut Lex, syntax : &mut ErrorManager) -> AstArg { let mut sizeof = ast::create_arg(AstArgType::Sizeof); let token1 = scanner.get_token(); let token2 = scanner.get_token(); let token3 = scanner.get_token(); if token1 != Token::LParen || token3 != Token::RParen { syntax.syntax_error(scanner, "Sizeof begins with \'(\' and ends with \')\'".to_string()); return ast::create_arg(AstArgType::None); } match token2 { Token::Id(ref val) => { let mut arg = ast::create_arg(AstArgType::Id); arg.str_val = val.to_string(); sizeof.sub_args.push(arg); }, _ => { syntax.syntax_error(scanner, "Expected variable name.".to_string()); return ast::create_arg(AstArgType::None); }, } sizeof } pub fn build_addrof(scanner : &mut Lex, syntax : &mut ErrorManager) -> AstArg { let mut addrof = ast::create_arg(AstArgType::AddrOf); let token = scanner.get_token(); match token { Token::Id(ref val) => { let mut arg = ast::create_arg(AstArgType::Id); arg.str_val = val.to_string(); addrof.sub_args.push(arg); }, _ => { syntax.syntax_error(scanner, "Expected variable name.".to_string()); return ast::create_arg(AstArgType::None); }, } addrof }
g); } let num_arg = ast::create_int(1); var_assign.args.push(num_arg); check_end = true; }, Token::AddAssign | Token::SubAssign | Token::MulAssign | Token::DivAssign | Token::ModAssign => { let mut id_arg = ast::create_arg(AstArgType::Id); if var_assign.stmt_type == AstStmtType::ArrayAssign { id_arg.sub_args = var_assign.sub_args.clone(); } id_arg.str_val = name; var_assign.args.push(id_arg); if assign_op == Token::AddAssign { let op_arg = ast::create_arg(AstArgType::OpAdd); var_assign.args.push(op_arg); } else if assign_op == Token::SubAssign { let op_arg = ast::create_arg(AstArgType::OpSub); var_assign.args.push(op_arg); } else if assign_op == Token::MulAssign { let op_arg = ast::create_arg(AstArgType::OpMul); var_assign.args.push(op_arg); } else if assign_op == Token::DivAssign { let op_arg = ast::create_arg(AstArgType::OpDiv); var_assign.args.push(op_arg); } else if assign_op == Token::ModAssign { let op_arg = ast::create_arg(AstArgType::OpMod); var_assign.args.push(op_arg); } if !build_args(builder, var_assign, Token::Semicolon) { return false; } }, Token::Assign => { if !build_args(builder, var_assign, Token::Semicolon) { return false; } }, _ => { builder.syntax_error("Expected \'=\' in array assignment.".to_string()); return false; }, } if check_end { if builder.get_token() != Token::Semicolon { builder.syntax_error("Expected terminator.".to_string()); return false; } } true }
function_block-function_prefixed
[]
Rust
stronghold-wasm/src/wasm_structs/transaction.rs
stronghold-financial/stronghold
7461cbdc6c67459517f10205bd918111ad109100
use wasm_bindgen::prelude::*; use stronghold_rust::sapling_bls12::{ Key, ProposedTransaction, PublicAddress, SimpleTransaction, Transaction, SAPLING, }; use super::errors::*; use super::note::WasmNote; use super::panic_hook; use super::spend_proof::WasmSpendProof; use super::witness::JsWitness; #[wasm_bindgen] pub struct WasmTransactionPosted { transaction: Transaction, } #[wasm_bindgen] impl WasmTransactionPosted { #[wasm_bindgen] pub fn deserialize(bytes: &[u8]) -> Result<WasmTransactionPosted, JsValue> { panic_hook::set_once(); let mut cursor: std::io::Cursor<&[u8]> = std::io::Cursor::new(bytes); let transaction = Transaction::read(SAPLING.clone(), &mut cursor).map_err(WasmTransactionError)?; Ok(WasmTransactionPosted { transaction }) } #[wasm_bindgen] pub fn serialize(&self) -> Result<Vec<u8>, JsValue> { let mut cursor: std::io::Cursor<Vec<u8>> = std::io::Cursor::new(vec![]); self.transaction.write(&mut cursor).map_err(WasmIoError)?; Ok(cursor.into_inner()) } #[wasm_bindgen] pub fn verify(&self) -> bool { match self.transaction.verify() { Ok(_) => true, Err(_e) => false, } } #[wasm_bindgen(getter, js_name = "notesLength")] pub fn notes_length(&self) -> usize { self.transaction.receipts().len() } #[wasm_bindgen(js_name = "getNote")] pub fn get_note(&self, index: usize) -> Result<Vec<u8>, JsValue> { let proof = &self.transaction.receipts()[index]; let mut cursor: Vec<u8> = Vec::with_capacity(275); proof .merkle_note() .write(&mut cursor) .map_err(WasmIoError)?; Ok(cursor) } #[wasm_bindgen(getter, js_name = "spendsLength")] pub fn spends_length(&self) -> usize { self.transaction.spends().len() } #[wasm_bindgen(js_name = "getSpend")] pub fn get_spend(&self, index: usize) -> WasmSpendProof { let proof = &self.transaction.spends()[index]; WasmSpendProof { proof: proof.clone(), } } #[wasm_bindgen(getter, js_name = "transactionFee")] pub fn transaction_fee(&self) -> i64 { self.transaction.transaction_fee() } #[wasm_bindgen(getter, js_name = "transactionSignature")] pub fn transaction_signature(&self) -> Result<Vec<u8>, JsValue> { let mut serialized_signature = vec![]; self.transaction .binding_signature() .write(&mut serialized_signature) .map_err(WasmIoError)?; Ok(serialized_signature) } #[wasm_bindgen(getter, js_name = "transactionHash")] pub fn transaction_hash(&self) -> Vec<u8> { self.transaction.transaction_signature_hash().to_vec() } } #[wasm_bindgen] pub struct WasmTransaction { transaction: ProposedTransaction, } #[wasm_bindgen] impl WasmTransaction { #[wasm_bindgen(constructor)] pub fn new() -> WasmTransaction { panic_hook::set_once(); WasmTransaction { transaction: ProposedTransaction::new(SAPLING.clone()), } } #[wasm_bindgen] pub fn receive(&mut self, spender_hex_key: &str, note: &WasmNote) -> Result<String, JsValue> { let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).map_err(WasmSaplingKeyError)?; self.transaction .receive(&spender_key, &note.note) .map_err(WasmSaplingProofError)?; Ok("".to_string()) } #[wasm_bindgen] pub fn spend( &mut self, spender_hex_key: &str, note: &WasmNote, witness: &JsWitness, ) -> Result<String, JsValue> { let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).map_err(WasmSaplingKeyError)?; self.transaction .spend(spender_key, &note.note, witness) .map_err(WasmSaplingProofError)?; Ok("".to_string()) } #[wasm_bindgen] pub fn post_miners_fee(&mut self) -> Result<WasmTransactionPosted, JsValue> { let transaction = self .transaction .post_miners_fee() .map_err(WasmTransactionError)?; Ok(WasmTransactionPosted { transaction }) } #[wasm_bindgen] pub fn post( &mut self, spender_hex_key: &str, change_goes_to: Option<String>, intended_transaction_fee: u64, ) -> Result<WasmTransactionPosted, JsValue> { let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).map_err(WasmSaplingKeyError)?; let change_key = match change_goes_to { Some(s) => { Some(PublicAddress::from_hex(SAPLING.clone(), &s).map_err(WasmSaplingKeyError)?) } None => None, }; let posted_transaction = self .transaction .post(&spender_key, change_key, intended_transaction_fee) .map_err(WasmTransactionError)?; Ok(WasmTransactionPosted { transaction: posted_transaction, }) } } impl Default for WasmTransaction { fn default() -> Self { WasmTransaction::new() } } #[wasm_bindgen] pub struct WasmSimpleTransaction { transaction: SimpleTransaction, } #[wasm_bindgen] impl WasmSimpleTransaction { #[wasm_bindgen(constructor)] pub fn new( spender_hex_key: &str, intended_transaction_fee: u64, ) -> Result<WasmSimpleTransaction, JsValue> { panic_hook::set_once(); let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).map_err(WasmSaplingKeyError)?; Ok(WasmSimpleTransaction { transaction: SimpleTransaction::new( SAPLING.clone(), spender_key, intended_transaction_fee, ), }) } #[wasm_bindgen] pub fn spend(&mut self, note: &WasmNote, witness: &JsWitness) -> Result<String, JsValue> { self.transaction .spend(&note.note, witness) .map_err(WasmSaplingProofError)?; Ok("".to_string()) } #[wasm_bindgen] pub fn receive(&mut self, note: &WasmNote) -> Result<String, JsValue> { self.transaction .receive(&note.note) .map_err(WasmSaplingProofError)?; Ok("".to_string()) } #[wasm_bindgen] pub fn post(&mut self) -> Result<WasmTransactionPosted, JsValue> { let posted_transaction = self.transaction.post().map_err(WasmTransactionError)?; Ok(WasmTransactionPosted { transaction: posted_transaction, }) } }
use wasm_bindgen::prelude::*; use stronghold_rust::sapling_bls12::{ Key, ProposedTransaction, PublicAddress, SimpleTransaction, Transaction, SAPLING, }; use super::errors::*; use super::note::WasmNote; use super::panic_hook; use super::spend_proof::WasmSpendProof; use super::witness::JsWitness; #[wasm_bindgen] pub struct WasmTransactionPosted { transaction: Transaction, } #[wasm_bindgen] impl WasmTransactionPosted { #[wasm_bindgen] pub fn deserialize(bytes: &[u8]) -> Result<WasmTransactionPosted, JsValue> { panic_hook::set_once(); let mut cursor: std::io::Cursor<&[u8]> = std::io::Cursor::new(bytes); let transaction = Transaction::read(SAPLING.clone(), &mut cursor).map_err(WasmTransactionError)?; Ok(WasmTransactionPosted { transaction }) } #[wasm_bindgen] pub fn serialize(&self) -> Result<Vec<u8>, JsValue> { let mut cursor: std::io::Cursor<Vec<u8>> = std::io::Cursor::new(vec![]); self.transaction.write(&mut cursor).map_err(WasmIoError)?; Ok(cursor.into_inner()) } #[wasm_bindgen] pub fn verify(&self) -> bool { match self.transaction.verify() { Ok(_) => true, Err(_e) => false, } } #[wasm_bindgen(getter, js_name = "notesLength")] pub fn notes_length(&self) -> usize { self.transaction.receipts().len() } #[wasm_bindgen(js_name = "getNote")] pub fn get_note(&self, index: usize) -> Result<Vec<u8>, JsValue> {
#[wasm_bindgen(getter, js_name = "spendsLength")] pub fn spends_length(&self) -> usize { self.transaction.spends().len() } #[wasm_bindgen(js_name = "getSpend")] pub fn get_spend(&self, index: usize) -> WasmSpendProof { let proof = &self.transaction.spends()[index]; WasmSpendProof { proof: proof.clone(), } } #[wasm_bindgen(getter, js_name = "transactionFee")] pub fn transaction_fee(&self) -> i64 { self.transaction.transaction_fee() } #[wasm_bindgen(getter, js_name = "transactionSignature")] pub fn transaction_signature(&self) -> Result<Vec<u8>, JsValue> { let mut serialized_signature = vec![]; self.transaction .binding_signature() .write(&mut serialized_signature) .map_err(WasmIoError)?; Ok(serialized_signature) } #[wasm_bindgen(getter, js_name = "transactionHash")] pub fn transaction_hash(&self) -> Vec<u8> { self.transaction.transaction_signature_hash().to_vec() } } #[wasm_bindgen] pub struct WasmTransaction { transaction: ProposedTransaction, } #[wasm_bindgen] impl WasmTransaction { #[wasm_bindgen(constructor)] pub fn new() -> WasmTransaction { panic_hook::set_once(); WasmTransaction { transaction: ProposedTransaction::new(SAPLING.clone()), } } #[wasm_bindgen] pub fn receive(&mut self, spender_hex_key: &str, note: &WasmNote) -> Result<String, JsValue> { let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).map_err(WasmSaplingKeyError)?; self.transaction .receive(&spender_key, &note.note) .map_err(WasmSaplingProofError)?; Ok("".to_string()) } #[wasm_bindgen] pub fn spend( &mut self, spender_hex_key: &str, note: &WasmNote, witness: &JsWitness, ) -> Result<String, JsValue> { let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).map_err(WasmSaplingKeyError)?; self.transaction .spend(spender_key, &note.note, witness) .map_err(WasmSaplingProofError)?; Ok("".to_string()) } #[wasm_bindgen] pub fn post_miners_fee(&mut self) -> Result<WasmTransactionPosted, JsValue> { let transaction = self .transaction .post_miners_fee() .map_err(WasmTransactionError)?; Ok(WasmTransactionPosted { transaction }) } #[wasm_bindgen] pub fn post( &mut self, spender_hex_key: &str, change_goes_to: Option<String>, intended_transaction_fee: u64, ) -> Result<WasmTransactionPosted, JsValue> { let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).map_err(WasmSaplingKeyError)?; let change_key = match change_goes_to { Some(s) => { Some(PublicAddress::from_hex(SAPLING.clone(), &s).map_err(WasmSaplingKeyError)?) } None => None, }; let posted_transaction = self .transaction .post(&spender_key, change_key, intended_transaction_fee) .map_err(WasmTransactionError)?; Ok(WasmTransactionPosted { transaction: posted_transaction, }) } } impl Default for WasmTransaction { fn default() -> Self { WasmTransaction::new() } } #[wasm_bindgen] pub struct WasmSimpleTransaction { transaction: SimpleTransaction, } #[wasm_bindgen] impl WasmSimpleTransaction { #[wasm_bindgen(constructor)] pub fn new( spender_hex_key: &str, intended_transaction_fee: u64, ) -> Result<WasmSimpleTransaction, JsValue> { panic_hook::set_once(); let spender_key = Key::from_hex(SAPLING.clone(), spender_hex_key).map_err(WasmSaplingKeyError)?; Ok(WasmSimpleTransaction { transaction: SimpleTransaction::new( SAPLING.clone(), spender_key, intended_transaction_fee, ), }) } #[wasm_bindgen] pub fn spend(&mut self, note: &WasmNote, witness: &JsWitness) -> Result<String, JsValue> { self.transaction .spend(&note.note, witness) .map_err(WasmSaplingProofError)?; Ok("".to_string()) } #[wasm_bindgen] pub fn receive(&mut self, note: &WasmNote) -> Result<String, JsValue> { self.transaction .receive(&note.note) .map_err(WasmSaplingProofError)?; Ok("".to_string()) } #[wasm_bindgen] pub fn post(&mut self) -> Result<WasmTransactionPosted, JsValue> { let posted_transaction = self.transaction.post().map_err(WasmTransactionError)?; Ok(WasmTransactionPosted { transaction: posted_transaction, }) } }
let proof = &self.transaction.receipts()[index]; let mut cursor: Vec<u8> = Vec::with_capacity(275); proof .merkle_note() .write(&mut cursor) .map_err(WasmIoError)?; Ok(cursor) }
function_block-function_prefix_line
[ { "content": "#[wasm_bindgen(js_name = \"generateKey\")]\n\npub fn create_key_to_js() -> Key {\n\n let hasher = sapling_bls12::SAPLING.clone();\n\n let sapling_key = sapling_bls12::Key::generate_key(hasher);\n\n\n\n Key {\n\n spending_key: sapling_key.hex_spending_key(),\n\n incoming_view_key: sapling_key.incoming_view_key().hex_key(),\n\n outgoing_view_key: sapling_key.outgoing_view_key().hex_key(),\n\n public_address: sapling_key.generate_public_address().hex_public_address(),\n\n }\n\n}\n\n\n", "file_path": "stronghold-wasm/src/lib.rs", "rank": 0, "score": 208132.7685324565 }, { "content": "#[wasm_bindgen(catch, js_name = \"generateNewPublicAddress\")]\n\npub fn create_new_public_key_to_js(private_key: &str) -> Result<Key, JsValue> {\n\n let hasher = sapling_bls12::SAPLING.clone();\n\n let sapling_key =\n\n sapling_bls12::Key::from_hex(hasher, private_key).map_err(WasmSaplingKeyError)?;\n\n\n\n Ok(Key {\n\n spending_key: sapling_key.hex_spending_key(),\n\n incoming_view_key: sapling_key.incoming_view_key().hex_key(),\n\n outgoing_view_key: sapling_key.outgoing_view_key().hex_key(),\n\n public_address: sapling_key.generate_public_address().hex_public_address(),\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_create_new_public_key_to_js() {\n\n let key1 = create_key_to_js();\n\n let key2 = create_new_public_key_to_js(&key1.spending_key).unwrap();\n\n\n\n assert_eq!(key1.spending_key(), key2.spending_key());\n\n assert_eq!(key1.incoming_view_key(), key2.incoming_view_key());\n\n assert_eq!(key1.outgoing_view_key(), key2.outgoing_view_key());\n\n\n\n assert_ne!(key1.public_address(), key2.public_address());\n\n }\n\n}\n", "file_path": "stronghold-wasm/src/lib.rs", "rank": 1, "score": 174854.34234841392 }, { "content": "#[inline]\n\npub fn set_once() {\n\n use std::sync::Once;\n\n static SET_HOOK: Once = Once::new();\n\n SET_HOOK.call_once(|| {\n\n panic::set_hook(Box::new(hook));\n\n });\n\n}\n", "file_path": "stronghold-wasm/src/panic_hook/mod.rs", "rank": 2, "score": 140642.20364184317 }, { "content": "#[test]\n\nfn test_transaction() {\n\n let sapling = sapling_bls12::SAPLING.clone();\n\n let mut transaction = ProposedTransaction::new(sapling.clone());\n\n let spender_key: SaplingKey<Bls12> = SaplingKey::generate_key(sapling.clone());\n\n let receiver_key: SaplingKey<Bls12> = SaplingKey::generate_key(sapling.clone());\n\n let in_note = Note::new(\n\n sapling.clone(),\n\n spender_key.generate_public_address(),\n\n 42,\n\n Memo([0; 32]),\n\n );\n\n let out_note = Note::new(\n\n sapling.clone(),\n\n receiver_key.generate_public_address(),\n\n 40,\n\n Memo([0; 32]),\n\n );\n\n let in_note2 = Note::new(\n\n sapling.clone(),\n\n spender_key.generate_public_address(),\n", "file_path": "stronghold-rust/src/transaction/tests.rs", "rank": 3, "score": 136887.68762050703 }, { "content": "#[test]\n\nfn test_transaction_signature() {\n\n let sapling = sapling_bls12::SAPLING.clone();\n\n let spender_key = SaplingKey::generate_key(sapling.clone());\n\n let receiver_key = SaplingKey::generate_key(sapling.clone());\n\n let spender_address = spender_key.generate_public_address();\n\n let receiver_address = receiver_key.generate_public_address();\n\n\n\n let mut transaction = SimpleTransaction::new(sapling.clone(), spender_key, 0);\n\n let in_note = Note::new(sapling.clone(), spender_address.clone(), 42, Memo([0; 32]));\n\n let out_note = Note::new(sapling.clone(), receiver_address.clone(), 41, Memo([0; 32]));\n\n let witness = make_fake_witness(sapling.clone(), &in_note);\n\n\n\n transaction\n\n .spend(&in_note, &witness)\n\n .expect(\"should be able to spend note\");\n\n\n\n transaction\n\n .receive(&out_note)\n\n .expect(\"Should be able to receive note\");\n\n\n", "file_path": "stronghold-rust/src/transaction/tests.rs", "rank": 4, "score": 135263.13960744842 }, { "content": "#[test]\n\nfn test_simple_transaction() {\n\n let sapling = sapling_bls12::SAPLING.clone();\n\n let spender_key = SaplingKey::generate_key(sapling.clone());\n\n let receiver_key = SaplingKey::generate_key(sapling.clone());\n\n let spender_address = spender_key.generate_public_address();\n\n let receiver_address = receiver_key.generate_public_address();\n\n\n\n let mut transaction = SimpleTransaction::new(sapling.clone(), spender_key, 0);\n\n let in_note = Note::new(sapling.clone(), spender_address.clone(), 42, Memo([0; 32]));\n\n let out_note = Note::new(sapling.clone(), receiver_address.clone(), 41, Memo([0; 32]));\n\n let witness = make_fake_witness(sapling.clone(), &in_note);\n\n\n\n transaction\n\n .spend(&in_note, &witness)\n\n .expect(\"should be able to spend note\");\n\n\n\n transaction\n\n .receive(&out_note)\n\n .expect(\"Should be able to receive note\");\n\n\n\n let public_transaction = transaction\n\n .post()\n\n .expect(\"should be able to post transaction\");\n\n\n\n public_transaction\n\n .verify()\n\n .expect(\"should be able to verify transaction\")\n\n}\n\n\n", "file_path": "stronghold-rust/src/transaction/tests.rs", "rank": 5, "score": 135263.13960744842 }, { "content": "#[test]\n\nfn test_key_generation_and_construction() {\n\n let sapling = &*sapling_bls12::SAPLING;\n\n let key: SaplingKey<Bls12> = SaplingKey::generate_key(sapling.clone());\n\n let key2: SaplingKey<Bls12> = SaplingKey::new(sapling.clone(), key.spending_key).unwrap();\n\n assert!(key.spending_key != [0; 32]);\n\n assert!(key2.spending_key == key.spending_key);\n\n assert!(key2.incoming_viewing_key.view_key == key.incoming_viewing_key.view_key);\n\n\n\n // should not fail or infinite loop\n\n key2.generate_public_address();\n\n}\n\n\n", "file_path": "stronghold-rust/src/keys/test.rs", "rank": 6, "score": 134056.56948929216 }, { "content": "#[test]\n\nfn test_diffie_hellman_shared_key() {\n\n let sapling = &*sapling_bls12::SAPLING;\n\n let key1: SaplingKey<Bls12> = SaplingKey::generate_key(sapling.clone());\n\n\n\n // second address has to use the same diversifier for the keys to be valid\n\n let address1 = key1.generate_public_address();\n\n let (secret_key, public_key) = address1.generate_diffie_hellman_keys(&sapling.jubjub);\n\n let shared_secret1 = shared_secret(\n\n &sapling.jubjub,\n\n &secret_key,\n\n &address1.transmission_key,\n\n &public_key,\n\n );\n\n let shared_secret2 = shared_secret(\n\n &sapling.jubjub,\n\n &key1.incoming_viewing_key.view_key,\n\n &public_key,\n\n &public_key,\n\n );\n\n if shared_secret1 != shared_secret2 {\n\n assert!(false, \"secrets don't match\");\n\n }\n\n}\n\n\n", "file_path": "stronghold-rust/src/keys/test.rs", "rank": 7, "score": 132526.01590191724 }, { "content": "/// Load a sapling object configured to a BLS12 jubjub curve. This is currently\n\n/// the only pairing for which a jubjub curve has been defined, and is the\n\n/// default implementation.\n\n///\n\n/// Provided as a convenience method so clients don't have to depend\n\n/// explicitly on zcash_primitives just to define a JubjubBls12 point.\n\nfn load() -> Sapling {\n\n Sapling::load(zcash_primitives::jubjub::JubjubBls12::new())\n\n}\n", "file_path": "stronghold-rust/src/sapling_bls12.rs", "rank": 8, "score": 131734.4154107584 }, { "content": "#[test]\n\nfn test_serialization() {\n\n let sapling = &*sapling_bls12::SAPLING;\n\n let key: SaplingKey<Bls12> = SaplingKey::generate_key(sapling.clone());\n\n let mut serialized_key = [0; 32];\n\n key.write(&mut serialized_key[..])\n\n .expect(\"Should be able to serialize key\");\n\n assert_ne!(serialized_key, [0; 32]);\n\n\n\n let read_back_key: SaplingKey<Bls12> =\n\n SaplingKey::read(sapling.clone(), &mut serialized_key.as_ref())\n\n .expect(\"Should be able to load key from valid bytes\");\n\n assert_eq!(\n\n read_back_key.incoming_view_key().view_key,\n\n key.incoming_view_key().view_key\n\n );\n\n\n\n let public_address = key.generate_public_address();\n\n let mut serialized_address = [0; 43];\n\n public_address\n\n .write(&mut serialized_address[..])\n", "file_path": "stronghold-rust/src/keys/test.rs", "rank": 9, "score": 126314.00664334507 }, { "content": "#[test]\n\nfn test_hex_conversion() {\n\n let sapling = &*sapling_bls12::SAPLING;\n\n let key: SaplingKey<Bls12> = SaplingKey::generate_key(sapling.clone());\n\n\n\n let hex = key.hex_spending_key();\n\n assert_eq!(hex.len(), 64);\n\n let second_key: SaplingKey<Bls12> = SaplingKey::from_hex(sapling.clone(), &hex).unwrap();\n\n assert_eq!(second_key.spending_key, key.spending_key);\n\n\n\n let address = key.generate_public_address();\n\n let hex = address.hex_public_address();\n\n assert_eq!(hex.len(), 86);\n\n let second_address = PublicAddress::from_hex(sapling.clone(), &hex).unwrap();\n\n assert_eq!(second_address, address);\n\n\n\n assert!(PublicAddress::from_hex(sapling.clone(), \"invalid\").is_err());\n\n}\n", "file_path": "stronghold-rust/src/keys/test.rs", "rank": 10, "score": 124455.38151963548 }, { "content": "#[test]\n\nfn test_miners_fee() {\n\n let sapling = &*sapling_bls12::SAPLING;\n\n let mut transaction = ProposedTransaction::new(sapling.clone());\n\n let receiver_key: SaplingKey<Bls12> = SaplingKey::generate_key(sapling.clone());\n\n let out_note = Note::new(\n\n sapling.clone(),\n\n receiver_key.generate_public_address(),\n\n 42,\n\n Memo([0; 32]),\n\n );\n\n transaction\n\n .receive(&receiver_key, &out_note)\n\n .expect(\"It's a valid note\");\n\n let posted_transaction = transaction\n\n .post_miners_fee()\n\n .expect(\"it is a valid miner's fee\");\n\n assert_eq!(posted_transaction.transaction_fee, -42);\n\n assert_eq!(\n\n posted_transaction\n\n .iter_receipts()\n\n .next()\n\n .unwrap()\n\n .merkle_note\n\n .note_encryption_keys[0..30],\n\n NOTE_ENCRYPTION_MINER_KEYS[0..30]\n\n );\n\n}\n\n\n", "file_path": "stronghold-rust/src/transaction/tests.rs", "rank": 11, "score": 124153.27135455348 }, { "content": "pub fn hook(info: &panic::PanicInfo) {\n\n hook_impl(info);\n\n}\n\n\n", "file_path": "stronghold-wasm/src/panic_hook/mod.rs", "rank": 12, "score": 116991.81237266191 }, { "content": "/// Calculate the key used to encrypt the shared keys for a ReceiptProof or\n\n/// ReceiptParams.\n\n///\n\n/// The shared keys are encrypted using the outgoing viewing key for the\n\n/// spender (the person creating the note owned by the receiver). This gets\n\n/// combined with hashes of the receipt values to make a key unique to, and\n\n/// signed by, the receipt.\n\n///\n\n/// Naming is getting a bit far-fetched here because it's the keys used to\n\n/// encrypt other keys. Keys, all the way down!\n\nfn calculate_key_for_encryption_keys<J: JubjubEngine + pairing::MultiMillerLoop>(\n\n outgoing_view_key: &OutgoingViewKey<J>,\n\n value_commitment: &edwards::Point<J, Unknown>,\n\n note_commitment: &J::Fr,\n\n public_key: &edwards::Point<J, PrimeOrder>,\n\n) -> [u8; 32] {\n\n let mut key_input = [0u8; 128];\n\n key_input[0..32].copy_from_slice(&outgoing_view_key.view_key);\n\n value_commitment.write(&mut key_input[32..64]).unwrap();\n\n key_input[64..96].copy_from_slice(note_commitment.to_repr().as_ref());\n\n public_key.write(&mut key_input[96..128]).unwrap();\n\n\n\n Blake2b::new()\n\n .hash_length(32)\n\n .personal(SHARED_KEY_PERSONALIZATION)\n\n .hash(&key_input)\n\n .as_bytes()\n\n .try_into()\n\n .expect(\"has has incorrect length\")\n\n}\n", "file_path": "stronghold-rust/src/merkle_note.rs", "rank": 13, "score": 110019.04120546329 }, { "content": " async addTransactions(\n\n blockData: Block,\n\n transactions: TransactionAPIType[],\n\n ): Promise<Transaction[]> {\n\n const transactionsToInsert: Transaction[] = transactions.map((transaction) => {\n\n const metadata = transaction.metadata as {\n\n size: number\n\n fee: number\n\n timestamp: number\n\n notes: Note[]\n\n spends: Spend[]\n\n }\n\n\n\n return {\n\n hash: transaction.transaction_identifier.hash,\n\n fee: metadata.fee || 0,\n\n size: metadata.size || 0,\n\n timestamp: blockData.timestamp,\n\n block: blockData,\n\n notes: metadata.notes,\n\n spends: metadata.spends,\n\n } as Transaction\n\n })\n\n\n\n return await this.transactionsRepository.save(transactionsToInsert)\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 14, "score": 105771.69277561406 }, { "content": "fn hook_impl(info: &panic::PanicInfo) {\n\n let e = Error::new();\n\n let stack = e.stack();\n\n\n\n let er = js_sys::Error::new(&info.to_string());\n\n let _ = js_sys::Reflect::set(&er, &\"stack\".into(), &stack.into());\n\n\n\n wasm_bindgen::throw_val(er.into());\n\n}\n\n\n", "file_path": "stronghold-wasm/src/panic_hook/mod.rs", "rank": 15, "score": 105629.16143113622 }, { "content": "// Convert the integer value to a point on the Jubjub curve, accounting for\n\n// negative values\n\nfn value_balance_to_point<J: JubjubEngine + pairing::MultiMillerLoop>(\n\n value: i64,\n\n params: &J::Params,\n\n) -> Result<edwards::Point<J, Unknown>, TransactionError> {\n\n // Can only construct edwards point on positive numbers, so need to\n\n // add and possibly negate later\n\n let is_negative = value.is_negative();\n\n let abs = match value.checked_abs() {\n\n Some(a) => a as u64,\n\n None => return Err(TransactionError::IllegalValueError),\n\n };\n\n\n\n let mut value_balance = params\n\n .generator(FixedGenerators::ValueCommitmentValue)\n\n .mul(J::Fs::from(abs), params);\n\n\n\n if is_negative {\n\n value_balance = value_balance.negate();\n\n }\n\n\n\n Ok(value_balance.into())\n\n}\n", "file_path": "stronghold-rust/src/transaction/mod.rs", "rank": 28, "score": 98684.87187452515 }, { "content": " notesLength(): number {\n\n return this.withReference((t) => t.notesLength)\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 29, "score": 94164.02716876942 }, { "content": "export const RpcUseIpcFlagKey = 'rpc.ipc'\n", "file_path": "stronghold-cli/src/flags.ts", "rank": 30, "score": 88563.8943384201 }, { "content": "export const RpcUseTcpFlagKey = 'rpc.tcp'\n", "file_path": "stronghold-cli/src/flags.ts", "rank": 31, "score": 88563.8943384201 }, { "content": "/* This Source Code Form is subject to the terms of the Mozilla Public\n\n * License, v. 2.0. If a copy of the MPL was not distributed with this\n\n * file, You can obtain one at https://mozilla.org/MPL/2.0/. */\n\n\n\nexport * from './sendTransaction'\n", "file_path": "stronghold/src/rpc/routes/transactions/index.ts", "rank": 32, "score": 81097.00869822822 }, { "content": " notesLength(): number {\n\n return this.elements.length\n", "file_path": "stronghold/src/testUtilities/fake/strategy.ts", "rank": 33, "score": 80798.5384381325 }, { "content": "export class Indexer {\n\n indexer: Indexer | null = null\n\n blockRepository: Repository<Block>\n\n configRepository: Repository<Config>\n\n transactionsRepository: Repository<Transaction>\n\n\n\n config: IndexerConfigOptions = {\n\n lastBlockHash: null,\n\n }\n\n\n\n constructor() {\n\n // Each entity has its own repository which handles all operations with its entity.\n\n // When dealing with entities, Repositories are more convenient to use than EntityManagers:\n\n this.configRepository = getManager().getRepository(Config)\n\n this.blockRepository = getManager().getRepository(Block)\n\n this.transactionsRepository = getManager().getRepository(Transaction)\n\n }\n\n\n\n async init(): Promise<Indexer> {\n\n const indexer = new Indexer()\n\n await indexer.loadConfig()\n\n\n\n return indexer\n\n }\n\n\n\n async setConfig(key: keyof IndexerConfigOptions, value: string): Promise<void> {\n\n if (!(key in this.config)) {\n\n throw 'Invalid key'\n\n }\n\n\n\n const config = await this.configRepository.findOne({ key })\n\n if (!config) {\n\n throw 'Key not found'\n\n }\n\n config.value = value\n\n await this.configRepository.save(config)\n\n }\n\n\n\n async loadConfig(): Promise<void> {\n\n const configs = await this.configRepository.find()\n\n\n\n if (!configs || configs.length <= 0) {\n\n return\n\n }\n\n\n\n for (const config of configs) {\n\n if (config.key in this.config) {\n\n this.config = {\n\n ...this.config,\n\n [config.key]: config.value,\n\n }\n\n }\n\n }\n\n }\n\n\n\n async getBlock(sequence?: number, hash?: string): Promise<Block | null> {\n\n const blockData = await this.blockRepository.findOne({\n\n where: {\n\n hash,\n\n sequence: sequence,\n\n },\n\n })\n\n\n\n return blockData || null\n\n }\n\n\n\n async deleteAtSequence(sequence: number): Promise<void> {\n\n await this.blockRepository.delete({ sequence: sequence })\n\n }\n\n\n\n async deleteAllFromSequence(sequence: number): Promise<void> {\n\n await this.blockRepository\n\n .createQueryBuilder()\n\n .delete()\n\n .where('sequence > :sequence', { sequence: sequence })\n\n .execute()\n\n }\n\n\n\n async addBlock(block: GetBlockResponse): Promise<Block> {\n\n const metadata = block.metadata as { size: number; difficulty: number }\n\n\n\n const blockToInsert = new Block()\n\n blockToInsert.hash = block.blockIdentifier.hash\n\n blockToInsert.sequence = Number(block.blockIdentifier.index)\n\n blockToInsert.previousBlockHash = block.parentBlockIdentifier.hash\n\n blockToInsert.size = metadata.size || 0\n\n blockToInsert.difficulty = metadata.difficulty || 0\n\n blockToInsert.timestamp = block.timestamp\n\n blockToInsert.transactionsCount = block.transactions.length\n\n\n\n const blockData = await this.blockRepository.save(blockToInsert)\n\n\n\n await this.addTransactions(blockData, block.transactions)\n\n\n\n return blockData\n\n }\n\n\n\n async addTransactions(\n\n blockData: Block,\n\n transactions: TransactionAPIType[],\n\n ): Promise<Transaction[]> {\n\n const transactionsToInsert: Transaction[] = transactions.map((transaction) => {\n\n const metadata = transaction.metadata as {\n\n size: number\n\n fee: number\n\n timestamp: number\n\n notes: Note[]\n\n spends: Spend[]\n\n }\n\n\n\n return {\n\n hash: transaction.transaction_identifier.hash,\n\n fee: metadata.fee || 0,\n\n size: metadata.size || 0,\n\n timestamp: blockData.timestamp,\n\n block: blockData,\n\n notes: metadata.notes,\n\n spends: metadata.spends,\n\n } as Transaction\n\n })\n\n\n\n return await this.transactionsRepository.save(transactionsToInsert)\n\n }\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 34, "score": 62871.65504571904 }, { "content": " constructor() {\n\n // Each entity has its own repository which handles all operations with its entity.\n\n // When dealing with entities, Repositories are more convenient to use than EntityManagers:\n\n this.configRepository = getManager().getRepository(Config)\n\n this.blockRepository = getManager().getRepository(Block)\n\n this.transactionsRepository = getManager().getRepository(Transaction)\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 35, "score": 62349.612311319186 }, { "content": " async init(): Promise<Indexer> {\n\n const indexer = new Indexer()\n\n await indexer.loadConfig()\n\n\n\n return indexer\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 36, "score": 62349.612311319186 }, { "content": " transactionFee(): Promise<bigint> {\n\n return this.workerPool.transactionFee(this)\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 37, "score": 62029.81123813293 }, { "content": " transactionSignature(): Buffer {\n\n return this.withReference((t) => Buffer.from(t.transactionSignature))\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 38, "score": 62029.81123813293 }, { "content": " transactionHash(): TransactionHash {\n\n return this.withReference((t) => Buffer.from(t.transactionHash))\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 39, "score": 62029.81123813293 }, { "content": " async addBlock(block: GetBlockResponse): Promise<Block> {\n\n const metadata = block.metadata as { size: number; difficulty: number }\n\n\n\n const blockToInsert = new Block()\n\n blockToInsert.hash = block.blockIdentifier.hash\n\n blockToInsert.sequence = Number(block.blockIdentifier.index)\n\n blockToInsert.previousBlockHash = block.parentBlockIdentifier.hash\n\n blockToInsert.size = metadata.size || 0\n\n blockToInsert.difficulty = metadata.difficulty || 0\n\n blockToInsert.timestamp = block.timestamp\n\n blockToInsert.transactionsCount = block.transactions.length\n\n\n\n const blockData = await this.blockRepository.save(blockToInsert)\n\n\n\n await this.addTransactions(blockData, block.transactions)\n\n\n\n return blockData\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 40, "score": 61848.76313918198 }, { "content": " async deleteAllFromSequence(sequence: number): Promise<void> {\n\n await this.blockRepository\n\n .createQueryBuilder()\n\n .delete()\n\n .where('sequence > :sequence', { sequence: sequence })\n\n .execute()\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 41, "score": 61848.76313918198 }, { "content": " async getBlock(sequence?: number, hash?: string): Promise<Block | null> {\n\n const blockData = await this.blockRepository.findOne({\n\n where: {\n\n hash,\n\n sequence: sequence,\n\n },\n\n })\n\n\n\n return blockData || null\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 42, "score": 61848.76313918198 }, { "content": " async setConfig(key: keyof IndexerConfigOptions, value: string): Promise<void> {\n\n if (!(key in this.config)) {\n\n throw 'Invalid key'\n\n }\n\n\n\n const config = await this.configRepository.findOne({ key })\n\n if (!config) {\n\n throw 'Key not found'\n\n }\n\n config.value = value\n\n await this.configRepository.save(config)\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 43, "score": 61848.76313918198 }, { "content": " async loadConfig(): Promise<void> {\n\n const configs = await this.configRepository.find()\n\n\n\n if (!configs || configs.length <= 0) {\n\n return\n\n }\n\n\n\n for (const config of configs) {\n\n if (config.key in this.config) {\n\n this.config = {\n\n ...this.config,\n\n [config.key]: config.value,\n\n }\n\n }\n\n }\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 44, "score": 61848.76313918198 }, { "content": " async deleteAtSequence(sequence: number): Promise<void> {\n\n await this.blockRepository.delete({ sequence: sequence })\n", "file_path": "stronghold-rosetta-api/src/indexer/Indexer.ts", "rank": 45, "score": 61848.76313918198 }, { "content": "/// Referred to as `ovk` in the literature.\n\n#[derive(Clone)]\n\npub struct OutgoingViewKey<J: JubjubEngine + pairing::MultiMillerLoop> {\n\n pub(crate) sapling: Arc<Sapling<J>>,\n\n pub(crate) view_key: [u8; 32],\n\n}\n\n\n\nimpl<J: JubjubEngine + pairing::MultiMillerLoop> OutgoingViewKey<J> {\n\n /// Load a key from a string of hexadecimal digits\n\n pub fn from_hex(\n\n sapling: Arc<Sapling<J>>,\n\n value: &str,\n\n ) -> Result<Self, errors::SaplingKeyError> {\n\n match hex_to_bytes(&value) {\n\n Err(()) => Err(errors::SaplingKeyError::InvalidViewingKey),\n\n Ok(bytes) => {\n\n if bytes.len() != 32 {\n\n Err(errors::SaplingKeyError::InvalidViewingKey)\n\n } else {\n\n let mut view_key = [0; 32];\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 46, "score": 60971.81333736534 }, { "content": "\n\nconst DIFFIE_HELLMAN_PERSONALIZATION: &[u8; 16] = b\"Beanstalk shared\";\n\n\n\n/// Key that allows someone to view a transaction that you have received.\n\n///\n\n/// Referred to as `ivk` in the literature.\n\n#[derive(Clone)]\n\npub struct IncomingViewKey<J: JubjubEngine + pairing::MultiMillerLoop> {\n\n pub(crate) sapling: Arc<Sapling<J>>,\n\n pub(crate) view_key: J::Fs,\n\n}\n\n\n\nimpl<J: JubjubEngine + pairing::MultiMillerLoop> IncomingViewKey<J> {\n\n /// load view key from a Read implementation\n\n pub fn read<R: io::Read>(\n\n sapling: Arc<Sapling<J>>,\n\n reader: &mut R,\n\n ) -> Result<Self, errors::SaplingKeyError> {\n\n let view_key = read_scalar(reader)?;\n\n Ok(IncomingViewKey { sapling, view_key })\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 47, "score": 60971.334519645774 }, { "content": " public_address\n\n }\n\n\n\n /// Calculate the shared secret key given the ephemeral public key that was\n\n /// created for a transaction.\n\n pub(crate) fn shared_secret(\n\n &self,\n\n ephemeral_public_key: &edwards::Point<J, PrimeOrder>,\n\n ) -> [u8; 32] {\n\n shared_secret(\n\n &self.sapling.jubjub,\n\n &self.view_key,\n\n ephemeral_public_key,\n\n ephemeral_public_key,\n\n )\n\n }\n\n}\n\n\n\n/// Key that allows someone to view a transaction that you have spent.\n\n///\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 48, "score": 60968.10214884698 }, { "content": " }\n\n\n\n /// Load a key from a string of hexadecimal digits\n\n pub fn from_hex(\n\n sapling: Arc<Sapling<J>>,\n\n value: &str,\n\n ) -> Result<Self, errors::SaplingKeyError> {\n\n match hex_to_bytes(&value) {\n\n Err(()) => Err(errors::SaplingKeyError::InvalidViewingKey),\n\n Ok(bytes) => {\n\n if bytes.len() != 32 {\n\n Err(errors::SaplingKeyError::InvalidViewingKey)\n\n } else {\n\n Self::read(sapling, &mut bytes[..].as_ref())\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Load a key from a string of words to be decoded into bytes.\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 49, "score": 60967.88863402074 }, { "content": " ) -> Result<PublicAddress<J>, errors::SaplingKeyError> {\n\n PublicAddress::from_view_key(self, diversifier)\n\n }\n\n\n\n /// Generate a public address from this key,\n\n /// picking a diversifier that is guaranteed to work with it.\n\n ///\n\n /// This method always succeeds, retrying with a different diversifier if\n\n /// one doesn't work.\n\n pub fn generate_public_address(&self) -> PublicAddress<J> {\n\n let public_address;\n\n loop {\n\n let mut diversifier_candidate = [0u8; 11];\n\n thread_rng().fill(&mut diversifier_candidate);\n\n\n\n if let Ok(key) = self.public_address(&diversifier_candidate) {\n\n public_address = key;\n\n break;\n\n }\n\n }\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 50, "score": 60965.81329437569 }, { "content": " ///\n\n /// See https://github.com/BeanstalkNetwork/word-encoding\n\n pub fn from_words(\n\n sapling: Arc<Sapling<J>>,\n\n language_code: &str,\n\n value: String,\n\n ) -> Result<Self, errors::SaplingKeyError> {\n\n let language = Language::from_language_code(language_code)\n\n .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?;\n\n let mnemonic = Mnemonic::from_phrase(&value, language)\n\n .map_err(|_| errors::SaplingKeyError::InvalidPaymentAddress)?;\n\n let bytes = mnemonic.entropy();\n\n let mut byte_arr = [0; 32];\n\n byte_arr.clone_from_slice(&bytes[0..32]);\n\n Self::read(sapling, &mut byte_arr[..].as_ref())\n\n }\n\n\n\n /// Viewing key as hexadecimal, for readability.\n\n pub fn hex_key(&self) -> String {\n\n bytes_to_hex(&scalar_to_bytes(&self.view_key))\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 51, "score": 60965.4329730042 }, { "content": " let mut view_key = [0; 32];\n\n view_key.clone_from_slice(&bytes[0..32]);\n\n Ok(Self { sapling, view_key })\n\n }\n\n\n\n /// Viewing key as hexadecimal, for readability.\n\n pub fn hex_key(&self) -> String {\n\n bytes_to_hex(&self.view_key)\n\n }\n\n\n\n /// Even more readable\n\n pub fn words_key(&self, language_code: &str) -> Result<String, errors::SaplingKeyError> {\n\n let language = Language::from_language_code(language_code)\n\n .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?;\n\n let mnemonic = Mnemonic::from_entropy(&self.view_key, language).unwrap();\n\n Ok(mnemonic.phrase().to_string())\n\n }\n\n}\n\n\n\n/// Pair of outgoing and incoming view keys for a complete audit\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 52, "score": 60964.95594080515 }, { "content": "//! View keys allow your transactions to be read\n\n//! by a third party without giving the option to spend your\n\n//! coins. This was designed for auditing systems, but may have other purposes\n\n//! such as in the use of light clients.\n\n//!\n\n//! There are two kinds of view keys. One allows you to share transactions\n\n//! that you have received, while the other allows you to share transactions\n\n//! that you have spent.\n\n//!\n\n\n\nuse super::{errors, PublicAddress, Sapling};\n\nuse crate::serializing::{\n\n bytes_to_hex, hex_to_bytes, point_to_bytes, read_scalar, scalar_to_bytes,\n\n};\n\nuse bip39::{Language, Mnemonic};\n\nuse blake2b_simd::Params as Blake2b;\n\nuse rand::{thread_rng, Rng};\n\n\n\nuse std::{io, sync::Arc};\n\nuse zcash_primitives::jubjub::{edwards, JubjubEngine, PrimeOrder};\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 53, "score": 60963.96240546504 }, { "content": "/// of spends and receipts\n\n#[derive(Clone)]\n\npub struct ViewKeys<J: JubjubEngine + pairing::MultiMillerLoop> {\n\n pub incoming: IncomingViewKey<J>,\n\n pub outgoing: OutgoingViewKey<J>,\n\n}\n\n\n\n/// Derive a shared secret key from a secret key and the other person's public\n\n/// key.\n\n///\n\n///\n\n/// The shared secret point is calculated by multiplying the public and private\n\n/// keys. This gets converted to bytes and hashed together with the reference\n\n/// public key to generate the final shared secret as used in encryption.\n\n\n\n/// A Diffie Hellman key exchange might look like this:\n\n/// * alice generates her DH secret key as SaplingKeys::internal_viewing_key\n\n/// * alice chooses a diversifier and publishes it and the transmission key\n\n/// generated from it as a PublicAddress\n\n/// * The transmission key becomes her DH public_key\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 54, "score": 60963.35038194177 }, { "content": " }\n\n\n\n /// Even more readable\n\n pub fn words_key(&self, language_code: &str) -> Result<String, errors::SaplingKeyError> {\n\n let language = Language::from_language_code(language_code)\n\n .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?;\n\n let mnemonic = Mnemonic::from_entropy(&scalar_to_bytes(&self.view_key), language).unwrap();\n\n Ok(mnemonic.phrase().to_string())\n\n }\n\n\n\n /// Generate a public address from the incoming viewing key, given a specific\n\n /// 11 byte diversifier.\n\n ///\n\n /// This may fail, as not all diversifiers are created equal.\n\n ///\n\n /// Note: This may need to be public at some point. I'm hoping the client\n\n /// API would never have to deal with diversifiers, but I'm not sure, yet.\n\n pub fn public_address(\n\n &self,\n\n diversifier: &[u8; 11],\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 55, "score": 60963.3400666579 }, { "content": " view_key.clone_from_slice(&bytes[0..32]);\n\n Ok(Self { sapling, view_key })\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Load a key from a string of words to be decoded into bytes.\n\n ///\n\n /// See https://github.com/BeanstalkNetwork/word-encoding\n\n pub fn from_words(\n\n sapling: Arc<Sapling<J>>,\n\n language_code: &str,\n\n value: String,\n\n ) -> Result<Self, errors::SaplingKeyError> {\n\n let language = Language::from_language_code(language_code)\n\n .ok_or(errors::SaplingKeyError::InvalidLanguageEncoding)?;\n\n let mnemonic = Mnemonic::from_phrase(&value, language)\n\n .map_err(|_| errors::SaplingKeyError::InvalidPaymentAddress)?;\n\n let bytes = mnemonic.entropy();\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 56, "score": 60962.14637372927 }, { "content": "/// * Bob chooses some randomness as his secret key using the\n\n/// generate_diffie_hellman_keys method on alice's PublicAddress\n\n/// * That method calculates bob's public key as (alice diversifier * bob secret key)\n\n/// * This public key becomes the reference public key for both sides\n\n/// * bob sends public key to Alice\n\n/// * bob calculates shared secret key as (alice public key * bob secret key)\n\n/// * which is (alice transmission key * bob secret key)\n\n/// * maths to (alice internal viewing key * diversifier * bob secret key)\n\n/// * alice calculates shared secret key as (bob public key * alice internal viewing key)\n\n/// * this maths to (alice diversifier * bob secret key * alice internal viewing key)\n\n/// * both alice and bob hash the shared secret key with the reference public\n\n/// key (bob's public key) to get the final shared secret\n\n///\n\n/// The resulting key can be used in any symmetric cipher\n\npub(crate) fn shared_secret<J: JubjubEngine + pairing::MultiMillerLoop>(\n\n jubjub: &J::Params,\n\n secret_key: &J::Fs,\n\n other_public_key: &edwards::Point<J, PrimeOrder>,\n\n reference_public_key: &edwards::Point<J, PrimeOrder>,\n\n) -> [u8; 32] {\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 57, "score": 60961.176891177645 }, { "content": " let shared_secret = point_to_bytes(&other_public_key.mul(*secret_key, jubjub))\n\n .expect(\"should be able to convert point to bytes\");\n\n let reference_bytes =\n\n point_to_bytes(&reference_public_key).expect(\"should be able to convert point to bytes\");\n\n\n\n let mut hasher = Blake2b::new()\n\n .hash_length(32)\n\n .personal(DIFFIE_HELLMAN_PERSONALIZATION)\n\n .to_state();\n\n\n\n hasher.update(&shared_secret);\n\n hasher.update(&reference_bytes);\n\n let mut hash_result = [0; 32];\n\n hash_result[..].clone_from_slice(&hasher.finalize().as_ref());\n\n hash_result\n\n}\n", "file_path": "stronghold-rust/src/keys/view_keys.rs", "rank": 58, "score": 60957.98587511509 }, { "content": "export const SendTransactionResponseSchema: yup.ObjectSchema<SendTransactionResponse> = yup\n\n .object({\n\n fromAccountName: yup.string().defined(),\n\n toPublicKey: yup.string().defined(),\n\n amount: yup.string().defined(),\n\n transactionHash: yup.string().defined(),\n\n })\n", "file_path": "stronghold/src/rpc/routes/transactions/sendTransaction.ts", "rank": 59, "score": 60558.60665128498 }, { "content": "export const SendTransactionRequestSchema: yup.ObjectSchema<SendTransactionRequest> = yup\n\n .object({\n\n fromAccountName: yup.string().defined(),\n\n toPublicKey: yup.string().defined(),\n\n amount: yup.string().defined(),\n\n transactionFee: yup.string().defined(),\n\n memo: yup.string().defined(),\n\n })\n", "file_path": "stronghold/src/rpc/routes/transactions/sendTransaction.ts", "rank": 60, "score": 60558.60665128498 }, { "content": "export interface Transaction<E, H> {\n\n /**\n\n * Verify whether the transaction has valid proofs.\n\n */\n\n verify(): Promise<VerificationResult>\n\n\n\n /**\n\n * The number of notes in the transaction.\n\n */\n\n notesLength(): number\n\n\n\n /**\n\n * Iterate over all the notes created by this transaction.\n\n */\n\n notes(): Iterable<E>\n\n\n\n /**\n\n * The number of spends in the transaction.\n\n */\n\n spendsLength(): number\n\n\n\n /**\n\n * Iterate over all the spends in the transaction. A spend includes a nullifier,\n\n * indicating that a note was spent, and a commitment committing to\n\n * the root hash and tree size at the time the note was spent.\n\n */\n\n spends(): Iterable<Spend<H>>\n\n\n\n /**\n\n * Preallocate any resources necessary for using the transaction.\n\n */\n\n takeReference(): unknown\n\n\n\n /**\n\n * Return any resources necessary for using the transaction.\n\n */\n\n returnReference(): void\n\n\n\n /**\n\n * Wraps the given callback in takeReference and returnReference.\n\n */\n\n withReference<R>(callback: (transaction: unknown) => R): R\n\n\n\n /**\n\n * Get the transaction fee for this transactions.\n\n *\n\n * In general, each transaction has outputs lower than the amount spent; the\n\n * miner can collect the difference as a transaction fee.\n\n *\n\n * In a block header's minersFee transaction, the opposite happens;\n\n * the miner creates a block with zero spends and output equal to the sum\n\n * of the miner's fee for the block's transaction, plus the block chain's\n\n * mining reward.\n\n *\n\n * The transaction fee is the difference between outputs and spends on the\n\n * transaction.\n\n */\n\n transactionFee(): Promise<bigint>\n\n\n\n /**\n\n * Get transaction signature for this transaction.\n\n */\n\n transactionSignature(): Buffer\n\n\n\n /**\n\n * Get the transaction hash.\n\n */\n\n transactionHash(): Buffer\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 61, "score": 58488.55118821142 }, { "content": "export class TransactionSerde implements Serde<StrongholdTransaction, SerializedTransaction> {\n\n constructor(private readonly workerPool: WorkerPool) {}\n\n\n\n equals(): boolean {\n\n throw new Error(`Not implemented`)\n\n }\n\n\n\n serialize(transaction: StrongholdTransaction): SerializedTransaction {\n\n return transaction.serialize()\n\n }\n\n\n\n deserialize(data: SerializedTransaction): StrongholdTransaction {\n\n return new StrongholdTransaction(data, this.workerPool)\n\n }\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 62, "score": 57830.88408662614 }, { "content": "export class StrongholdTransaction\n\n implements Transaction<StrongholdNoteEncrypted, WasmNoteEncryptedHash> {\n\n private readonly wasmTransactionPostedSerialized: Buffer\n\n private readonly workerPool: WorkerPool\n\n\n\n private wasmTransactionPosted: WasmTransactionPosted | null = null\n\n private referenceCount = 0\n\n\n\n constructor(wasmTransactionPostedSerialized: Buffer, workerPool: WorkerPool) {\n\n this.wasmTransactionPostedSerialized = wasmTransactionPostedSerialized\n\n this.workerPool = workerPool\n\n }\n\n\n\n serialize(): Buffer {\n\n return this.wasmTransactionPostedSerialized\n\n }\n\n\n\n takeReference(): WasmTransactionPosted {\n\n this.referenceCount++\n\n if (this.wasmTransactionPosted === null) {\n\n this.wasmTransactionPosted = WasmTransactionPosted.deserialize(\n\n this.wasmTransactionPostedSerialized,\n\n )\n\n }\n\n return this.wasmTransactionPosted\n\n }\n\n\n\n returnReference(): void {\n\n this.referenceCount--\n\n if (this.referenceCount <= 0) {\n\n this.referenceCount = 0\n\n this.wasmTransactionPosted?.free()\n\n this.wasmTransactionPosted = null\n\n }\n\n }\n\n\n\n withReference<R>(callback: (transaction: WasmTransactionPosted) => R): R {\n\n const transaction = this.takeReference()\n\n try {\n\n return callback(transaction)\n\n } finally {\n\n this.returnReference()\n\n }\n\n }\n\n\n\n async verify(): Promise<VerificationResult> {\n\n const result = await this.workerPool.verify(this)\n\n\n\n return result === true\n\n ? { valid: true }\n\n : { valid: false, reason: VerificationResultReason.ERROR }\n\n }\n\n\n\n notesLength(): number {\n\n return this.withReference((t) => t.notesLength)\n\n }\n\n\n\n getNote(index: number): StrongholdNoteEncrypted {\n\n return this.withReference((t) => {\n\n // Get the note\n\n const serializedNote = Buffer.from(t.getNote(index))\n\n\n\n // Convert it to an StrongholdNoteEncrypted\n\n return new StrongholdNoteEncrypted(serializedNote)\n\n })\n\n }\n\n\n\n *notes(): Iterable<StrongholdNoteEncrypted> {\n\n const notesLength = this.notesLength()\n\n\n\n for (let i = 0; i < notesLength; i++) {\n\n yield this.getNote(i)\n\n }\n\n }\n\n\n\n spendsLength(): number {\n\n return this.withReference((t) => t.spendsLength)\n\n }\n\n\n\n *spends(): Iterable<Spend<WasmNoteEncryptedHash>> {\n\n const spendsLength = this.spendsLength()\n\n for (let i = 0; i < spendsLength; i++) {\n\n yield this.withReference((t) => {\n\n const wasmSpend = t.getSpend(i)\n\n const spend: Spend<WasmNoteEncryptedHash> = {\n\n size: wasmSpend.treeSize,\n\n nullifier: Buffer.from(wasmSpend.nullifier),\n\n commitment: Buffer.from(wasmSpend.rootHash),\n\n }\n\n wasmSpend.free()\n\n return spend\n\n })\n\n }\n\n }\n\n\n\n transactionFee(): Promise<bigint> {\n\n return this.workerPool.transactionFee(this)\n\n }\n\n\n\n transactionSignature(): Buffer {\n\n return this.withReference((t) => Buffer.from(t.transactionSignature))\n\n }\n\n\n\n transactionHash(): TransactionHash {\n\n return this.withReference((t) => Buffer.from(t.transactionHash))\n\n }\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 63, "score": 57826.239295462394 }, { "content": "export interface IDatabaseTransaction {\n\n /**\n\n * Lock the database\n\n */\n\n acquireLock(): Promise<void>\n\n\n\n /**\n\n * Commit the transaction atomically to the database but do not release the database lock\n\n * */\n\n update(): Promise<void>\n\n\n\n /**\n\n * Commit the transaction atomically to the database and release the database lock\n\n * */\n\n commit(): Promise<void>\n\n\n\n /**\n\n * Abort the transaction and release the database lock\n\n * */\n\n abort(): Promise<void>\n\n\n\n /**\n\n * The number of pending operations\n\n */\n\n readonly size: number\n", "file_path": "stronghold/src/storage/database/transaction.ts", "rank": 64, "score": 57187.93547605983 }, { "content": " *notes(): Iterable<StrongholdNoteEncrypted> {\n\n const notesLength = this.notesLength()\n\n\n\n for (let i = 0; i < notesLength; i++) {\n\n yield this.getNote(i)\n\n }\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 65, "score": 57178.75920804733 }, { "content": " constructor(private readonly workerPool: WorkerPool) {}\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 66, "score": 57178.75920804733 }, { "content": " serialize(): Buffer {\n\n return this.wasmTransactionPostedSerialized\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 67, "score": 57178.75920804733 }, { "content": " constructor(wasmTransactionPostedSerialized: Buffer, workerPool: WorkerPool) {\n\n this.wasmTransactionPostedSerialized = wasmTransactionPostedSerialized\n\n this.workerPool = workerPool\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 68, "score": 57178.75920804733 }, { "content": " deserialize(data: SerializedTransaction): StrongholdTransaction {\n\n return new StrongholdTransaction(data, this.workerPool)\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 69, "score": 57178.75920804733 }, { "content": " equals(): boolean {\n\n throw new Error(`Not implemented`)\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 70, "score": 57178.75920804733 }, { "content": " async verify(): Promise<VerificationResult> {\n\n const result = await this.workerPool.verify(this)\n\n\n\n return result === true\n\n ? { valid: true }\n\n : { valid: false, reason: VerificationResultReason.ERROR }\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 71, "score": 57178.75920804733 }, { "content": "@Entity()\n\nexport class Transaction {\n\n @PrimaryColumn(Hash)\n\n hash!: string\n\n\n\n @Column({\n\n type: 'bigint',\n\n transformer: bigint,\n\n })\n\n fee!: number\n\n\n\n @Column()\n\n size!: number\n\n\n\n @Column(Timestamp)\n\n timestamp!: number\n\n\n\n @Index()\n\n @ManyToOne(() => Block, (block) => block.transactions, { onDelete: 'CASCADE' })\n\n block!: Block\n\n\n\n @Column('jsonb')\n\n notes!: Note[]\n\n\n\n @Column('jsonb')\n\n spends!: Spend[]\n", "file_path": "stronghold-rosetta-api/src/entity/Transaction.ts", "rank": 72, "score": 57178.75920804733 }, { "content": "export class LevelupTransaction implements IDatabaseTransaction {\n\n db: LevelupDatabase\n\n scopes: Set<string>\n\n type: 'readwrite' | 'read'\n\n batch: LevelupBatch\n\n committing = false\n\n aborting = false\n\n cache = new Map<string, unknown>()\n\n cacheDelete = new Set<string>()\n\n unlock: MutexUnlockFunction | null = null\n\n waiting: Promise<void> | null = null\n\n waitingResolve: (() => void) | null = null\n\n id = 0\n\n\n\n static id = 0\n\n\n\n constructor(\n\n db: LevelupDatabase,\n\n scopes: IDatabaseStore<DatabaseSchema>[],\n\n type: 'readwrite' | 'read',\n\n ) {\n\n this.db = db\n\n this.type = type\n\n this.id = ++LevelupTransaction.id\n\n\n\n this.scopes = new Set(scopes.map((s) => s.name))\n\n this.batch = new LevelupBatch(db)\n\n }\n\n\n\n get size(): number {\n\n return this.batch.queue.length\n\n }\n\n\n\n async acquireLock(): Promise<void> {\n\n if (this.unlock) return\n\n\n\n if (!this.waiting) {\n\n this.waiting = new Promise((resolve) => (this.waitingResolve = resolve))\n\n this.unlock = await this.db.lock.lock()\n\n if (this.waitingResolve) this.waitingResolve()\n\n this.waiting = null\n\n this.waitingResolve = null\n\n } else {\n\n await this.waiting\n\n }\n\n }\n\n\n\n releaseLock(): void {\n\n if (!this.unlock) return\n\n this.unlock()\n\n }\n\n\n\n async has<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n ): Promise<boolean> {\n\n await this.acquireLock()\n\n return (await this.get(store, key)) !== undefined\n\n }\n\n\n\n async get<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n ): Promise<SchemaValue<Schema> | undefined> {\n\n await this.acquireLock()\n\n this.assertCanRead(store)\n\n\n\n const [encodedKey] = store.encode(key)\n\n const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey)\n\n\n\n if (this.cacheDelete.has(cacheKey)) {\n\n return undefined\n\n }\n\n\n\n if (this.cache.has(cacheKey)) {\n\n const cached = this.cache.get(cacheKey)\n\n return cached as SchemaValue<Schema>\n\n }\n\n\n\n const value = await store.get(key)\n\n this.cache.set(cacheKey, value)\n\n return value\n\n }\n\n\n\n async put<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n value: SchemaValue<Schema>,\n\n ): Promise<void> {\n\n await this.acquireLock()\n\n this.assertCanWrite(store)\n\n\n\n const [encodedKey, encodedValue] = store.encode(key, value)\n\n const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey)\n\n\n\n this.batch.putEncoded(encodedKey, encodedValue)\n\n this.cache.set(cacheKey, value)\n\n this.cacheDelete.delete(cacheKey)\n\n }\n\n\n\n async add<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n value: SchemaValue<Schema>,\n\n ): Promise<void> {\n\n await this.acquireLock()\n\n this.assertCanWrite(store)\n\n\n\n if (await this.has(store, key)) {\n\n throw new DuplicateKeyError(`Key already exists ${String(key)}`)\n\n }\n\n\n\n const [encodedKey, encodedValue] = store.encode(key, value)\n\n const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey)\n\n this.batch.putEncoded(encodedKey, encodedValue)\n\n this.cache.set(cacheKey, value)\n\n this.cacheDelete.delete(cacheKey)\n\n }\n\n\n\n async del<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n ): Promise<void> {\n\n await this.acquireLock()\n\n this.assertCanWrite(store)\n\n\n\n const [encodedKey] = store.encode(key)\n\n const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey)\n\n this.batch.delEncoded(encodedKey)\n\n this.cache.set(cacheKey, undefined)\n\n this.cacheDelete.add(cacheKey)\n\n }\n\n\n\n async update(): Promise<void> {\n\n try {\n\n if (!this.aborting) {\n\n await this.batch.commit()\n\n }\n\n } finally {\n\n this.cache.clear()\n\n this.cacheDelete.clear()\n\n this.committing = false\n\n }\n\n }\n\n\n\n async commit(): Promise<void> {\n\n try {\n\n await this.update()\n\n } finally {\n\n this.releaseLock()\n\n }\n\n }\n\n\n\n async abort(): Promise<void> {\n\n this.aborting = true\n\n this.releaseLock()\n\n return Promise.resolve()\n\n }\n\n\n\n private assertCanRead(store: DatabaseStore<DatabaseSchema>): void {\n\n this.assertCanWrite(store)\n\n }\n\n\n\n private assertCanWrite(store: DatabaseStore<DatabaseSchema>): void {\n\n if (this.committing) {\n\n throw new Error(`Transaction is being committed`)\n\n }\n\n\n\n if (!this.scopes.has(store.name)) {\n\n throw new Error(\n\n `Store ${store.name} is not in transaction scopes: ${Array.from(\n\n this.scopes.values(),\n\n ).join(', ')}`,\n\n )\n\n }\n\n }\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 73, "score": 57178.75920804733 }, { "content": " serialize(transaction: StrongholdTransaction): SerializedTransaction {\n\n return transaction.serialize()\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 74, "score": 57178.75920804733 }, { "content": " async has<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n ): Promise<boolean> {\n\n await this.acquireLock()\n\n return (await this.get(store, key)) !== undefined\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 75, "score": 57178.75920804733 }, { "content": " withReference<R>(callback: (transaction: WasmTransactionPosted) => R): R {\n\n const transaction = this.takeReference()\n\n try {\n\n return callback(transaction)\n\n } finally {\n\n this.returnReference()\n\n }\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 76, "score": 57178.75920804733 }, { "content": " *spends(): Iterable<Spend<WasmNoteEncryptedHash>> {\n\n const spendsLength = this.spendsLength()\n\n for (let i = 0; i < spendsLength; i++) {\n\n yield this.withReference((t) => {\n\n const wasmSpend = t.getSpend(i)\n\n const spend: Spend<WasmNoteEncryptedHash> = {\n\n size: wasmSpend.treeSize,\n\n nullifier: Buffer.from(wasmSpend.nullifier),\n\n commitment: Buffer.from(wasmSpend.rootHash),\n\n }\n\n wasmSpend.free()\n\n return spend\n\n })\n\n }\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 77, "score": 57178.75920804733 }, { "content": "export class UseCommand extends StrongholdCommand {\n\n static description = 'Change the default account used by all commands'\n\n\n\n static args = [\n\n {\n\n name: 'name',\n\n required: true,\n\n description: 'name of the account',\n\n },\n\n ]\n\n\n\n static flags = {\n\n ...RemoteFlags,\n\n }\n\n\n\n async start(): Promise<void> {\n\n const { args } = this.parse(UseCommand)\n\n const name = (args.name as string).trim()\n\n\n\n await this.sdk.client.connect()\n\n await this.sdk.client.useAccount({ name })\n\n this.log(`The default account is now: ${name}`)\n\n }\n", "file_path": "stronghold-cli/src/commands/accounts/use.ts", "rank": 78, "score": 57114.525423955914 }, { "content": "export class KeyStore<TSchema extends Record<string, unknown>> {\n\n dataDir: string\n\n files: FileSystem\n\n storage: FileStore<TSchema>\n\n config: Readonly<TSchema>\n\n defaults: TSchema\n\n loaded: Partial<TSchema>\n\n overrides: Partial<TSchema> = {}\n\n keysLoaded = new Set<keyof TSchema>()\n\n schema: yup.ObjectSchema<Partial<TSchema>> | undefined\n\n\n\n readonly onConfigChange: Event<\n\n [key: keyof TSchema, value: TSchema[keyof TSchema]]\n\n > = new Event()\n\n\n\n constructor(\n\n files: FileSystem,\n\n configName: string,\n\n defaults: TSchema,\n\n dataDir?: string,\n\n schema?: yup.ObjectSchema<TSchema | Partial<TSchema>>,\n\n ) {\n\n this.files = files\n\n this.storage = new FileStore<TSchema>(files, configName, dataDir)\n\n this.schema = schema\n\n this.dataDir = this.storage.dataDir\n\n\n\n const loaded = Object.setPrototypeOf({}, defaults) as TSchema\n\n const overrides = Object.setPrototypeOf({}, loaded) as TSchema\n\n const config = Object.setPrototypeOf({}, overrides) as TSchema\n\n\n\n this.defaults = defaults\n\n this.loaded = loaded\n\n this.overrides = overrides\n\n this.config = config\n\n }\n\n\n\n async load(): Promise<void> {\n\n const data = await this.storage.load()\n\n\n\n // Validate file store if we have a schema\n\n if (this.schema) {\n\n const { error } = await YupUtils.tryValidate(this.schema, data)\n\n if (error) {\n\n throw new Error(error.message)\n\n }\n\n }\n\n\n\n this.keysLoaded.clear()\n\n\n\n if (data !== null) {\n\n let key: keyof TSchema\n\n\n\n for (key in data) {\n\n this.keysLoaded.add(key)\n\n }\n\n }\n\n\n\n this.loaded = { ...data } as Partial<TSchema>\n\n\n\n // Patch back in inheritence so config is still TSchema\n\n Object.setPrototypeOf(this.loaded, this.defaults)\n\n Object.setPrototypeOf(this.overrides, this.loaded)\n\n\n\n // Write the file out if it doesnt exist\n\n if (data === null) await this.save()\n\n }\n\n\n\n async save(): Promise<void> {\n\n const save: PartialRecursive<TSchema> = {}\n\n\n\n let key: keyof TSchema\n\n for (key in this.loaded) {\n\n const shouldSaveKey = this.keysLoaded.has(key) || this.loaded[key] !== this.defaults[key]\n\n\n\n if (shouldSaveKey) {\n\n Object.assign(save, { [key]: this.config[key] })\n\n }\n\n }\n\n\n\n await this.storage.save(save)\n\n }\n\n\n\n set<T extends keyof TSchema>(key: T, value: TSchema[T]): void {\n\n const previousValue = this.config[key]\n\n\n\n Object.assign(this.loaded, { [key]: value })\n\n\n\n if (Object.prototype.hasOwnProperty.call(this.overrides, key)) {\n\n delete this.overrides[key]\n\n }\n\n\n\n if (previousValue !== value) {\n\n this.onConfigChange.emit(key, value)\n\n }\n\n }\n\n\n\n setOverride<T extends keyof TSchema>(key: T, value: TSchema[T]): void {\n\n const previousValue = this.config[key]\n\n\n\n Object.assign(this.overrides, { [key]: value })\n\n\n\n if (previousValue !== value) {\n\n this.onConfigChange.emit(key, value)\n\n }\n\n }\n\n\n\n get<T extends keyof TSchema>(key: T): TSchema[T] {\n\n return this.config[key]\n\n }\n\n\n\n getArray<T extends keyof TSchema>(key: T): TSchema[T] {\n\n const value = this.get(key)\n\n\n\n if (Array.isArray(value)) {\n\n return value\n\n }\n\n\n\n if (typeof value !== 'string') {\n\n throw new Error(`${String(key)} must be array or string`)\n\n }\n\n\n\n return value.split(',').filter(Boolean) as TSchema[T]\n\n }\n", "file_path": "stronghold/src/fileStores/keyStore.ts", "rank": 79, "score": 56901.09692814192 }, { "content": " async add<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n value: SchemaValue<Schema>,\n\n ): Promise<void> {\n\n await this.acquireLock()\n\n this.assertCanWrite(store)\n\n\n\n if (await this.has(store, key)) {\n\n throw new DuplicateKeyError(`Key already exists ${String(key)}`)\n\n }\n\n\n\n const [encodedKey, encodedValue] = store.encode(key, value)\n\n const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey)\n\n this.batch.putEncoded(encodedKey, encodedValue)\n\n this.cache.set(cacheKey, value)\n\n this.cacheDelete.delete(cacheKey)\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 80, "score": 56545.61822777387 }, { "content": " async del<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n ): Promise<void> {\n\n await this.acquireLock()\n\n this.assertCanWrite(store)\n\n\n\n const [encodedKey] = store.encode(key)\n\n const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey)\n\n this.batch.delEncoded(encodedKey)\n\n this.cache.set(cacheKey, undefined)\n\n this.cacheDelete.add(cacheKey)\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 81, "score": 56545.61822777387 }, { "content": " constructor(\n\n db: LevelupDatabase,\n\n scopes: IDatabaseStore<DatabaseSchema>[],\n\n type: 'readwrite' | 'read',\n\n ) {\n\n this.db = db\n\n this.type = type\n\n this.id = ++LevelupTransaction.id\n\n\n\n this.scopes = new Set(scopes.map((s) => s.name))\n\n this.batch = new LevelupBatch(db)\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 82, "score": 56545.61822777387 }, { "content": " async update(): Promise<void> {\n\n try {\n\n if (!this.aborting) {\n\n await this.batch.commit()\n\n }\n\n } finally {\n\n this.cache.clear()\n\n this.cacheDelete.clear()\n\n this.committing = false\n\n }\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 83, "score": 56545.61822777387 }, { "content": " returnReference(): void {\n\n this.referenceCount--\n\n if (this.referenceCount <= 0) {\n\n this.referenceCount = 0\n\n this.wasmTransactionPosted?.free()\n\n this.wasmTransactionPosted = null\n\n }\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 84, "score": 56545.61822777387 }, { "content": " async get<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n ): Promise<SchemaValue<Schema> | undefined> {\n\n await this.acquireLock()\n\n this.assertCanRead(store)\n\n\n\n const [encodedKey] = store.encode(key)\n\n const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey)\n\n\n\n if (this.cacheDelete.has(cacheKey)) {\n\n return undefined\n\n }\n\n\n\n if (this.cache.has(cacheKey)) {\n\n const cached = this.cache.get(cacheKey)\n\n return cached as SchemaValue<Schema>\n\n }\n\n\n\n const value = await store.get(key)\n\n this.cache.set(cacheKey, value)\n\n return value\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 85, "score": 56545.61822777387 }, { "content": " takeReference(): WasmTransactionPosted {\n\n this.referenceCount++\n\n if (this.wasmTransactionPosted === null) {\n\n this.wasmTransactionPosted = WasmTransactionPosted.deserialize(\n\n this.wasmTransactionPostedSerialized,\n\n )\n\n }\n\n return this.wasmTransactionPosted\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 86, "score": 56545.61822777387 }, { "content": " async abort(): Promise<void> {\n\n this.aborting = true\n\n this.releaseLock()\n\n return Promise.resolve()\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 87, "score": 56545.61822777387 }, { "content": " async commit(): Promise<void> {\n\n try {\n\n await this.update()\n\n } finally {\n\n this.releaseLock()\n\n }\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 88, "score": 56545.61822777387 }, { "content": " async put<Schema extends DatabaseSchema>(\n\n store: LevelupStore<Schema>,\n\n key: SchemaKey<Schema>,\n\n value: SchemaValue<Schema>,\n\n ): Promise<void> {\n\n await this.acquireLock()\n\n this.assertCanWrite(store)\n\n\n\n const [encodedKey, encodedValue] = store.encode(key, value)\n\n const cacheKey = BUFFER_TO_STRING_ENCODING.serialize(encodedKey)\n\n\n\n this.batch.putEncoded(encodedKey, encodedValue)\n\n this.cache.set(cacheKey, value)\n\n this.cacheDelete.delete(cacheKey)\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 89, "score": 56545.61822777387 }, { "content": " getNote(index: number): StrongholdNoteEncrypted {\n\n return this.withReference((t) => {\n\n // Get the note\n\n const serializedNote = Buffer.from(t.getNote(index))\n\n\n\n // Convert it to an StrongholdNoteEncrypted\n\n return new StrongholdNoteEncrypted(serializedNote)\n\n })\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 90, "score": 56545.61822777387 }, { "content": " get size(): number {\n\n return this.batch.queue.length\n", "file_path": "stronghold/src/storage/levelup/transaction.ts", "rank": 91, "score": 56545.61822777387 }, { "content": " spendsLength(): number {\n\n return this.withReference((t) => t.spendsLength)\n", "file_path": "stronghold/src/primitives/transaction.ts", "rank": 92, "score": 56545.61822777387 }, { "content": " async start(): Promise<void> {\n\n const { args } = this.parse(UseCommand)\n\n const name = (args.name as string).trim()\n\n\n\n await this.sdk.client.connect()\n\n await this.sdk.client.useAccount({ name })\n\n this.log(`The default account is now: ${name}`)\n", "file_path": "stronghold-cli/src/commands/accounts/use.ts", "rank": 93, "score": 56489.021911755444 }, { "content": " async load(): Promise<void> {\n\n const data = await this.storage.load()\n\n\n\n // Validate file store if we have a schema\n\n if (this.schema) {\n\n const { error } = await YupUtils.tryValidate(this.schema, data)\n\n if (error) {\n\n throw new Error(error.message)\n\n }\n\n }\n\n\n\n this.keysLoaded.clear()\n\n\n\n if (data !== null) {\n\n let key: keyof TSchema\n\n\n\n for (key in data) {\n\n this.keysLoaded.add(key)\n\n }\n\n }\n\n\n\n this.loaded = { ...data } as Partial<TSchema>\n\n\n\n // Patch back in inheritence so config is still TSchema\n\n Object.setPrototypeOf(this.loaded, this.defaults)\n\n Object.setPrototypeOf(this.overrides, this.loaded)\n\n\n\n // Write the file out if it doesnt exist\n\n if (data === null) await this.save()\n", "file_path": "stronghold/src/fileStores/keyStore.ts", "rank": 94, "score": 56277.930829633406 }, { "content": " get<T extends keyof TSchema>(key: T): TSchema[T] {\n\n return this.config[key]\n", "file_path": "stronghold/src/fileStores/keyStore.ts", "rank": 95, "score": 56277.930829633406 }, { "content": " set<T extends keyof TSchema>(key: T, value: TSchema[T]): void {\n\n const previousValue = this.config[key]\n\n\n\n Object.assign(this.loaded, { [key]: value })\n\n\n\n if (Object.prototype.hasOwnProperty.call(this.overrides, key)) {\n\n delete this.overrides[key]\n\n }\n\n\n\n if (previousValue !== value) {\n\n this.onConfigChange.emit(key, value)\n\n }\n", "file_path": "stronghold/src/fileStores/keyStore.ts", "rank": 96, "score": 56277.930829633406 }, { "content": " async save(): Promise<void> {\n\n const save: PartialRecursive<TSchema> = {}\n\n\n\n let key: keyof TSchema\n\n for (key in this.loaded) {\n\n const shouldSaveKey = this.keysLoaded.has(key) || this.loaded[key] !== this.defaults[key]\n\n\n\n if (shouldSaveKey) {\n\n Object.assign(save, { [key]: this.config[key] })\n\n }\n\n }\n\n\n\n await this.storage.save(save)\n", "file_path": "stronghold/src/fileStores/keyStore.ts", "rank": 97, "score": 56277.930829633406 }, { "content": " constructor(\n\n files: FileSystem,\n\n configName: string,\n\n defaults: TSchema,\n\n dataDir?: string,\n\n schema?: yup.ObjectSchema<TSchema | Partial<TSchema>>,\n\n ) {\n\n this.files = files\n\n this.storage = new FileStore<TSchema>(files, configName, dataDir)\n\n this.schema = schema\n\n this.dataDir = this.storage.dataDir\n\n\n\n const loaded = Object.setPrototypeOf({}, defaults) as TSchema\n\n const overrides = Object.setPrototypeOf({}, loaded) as TSchema\n\n const config = Object.setPrototypeOf({}, overrides) as TSchema\n\n\n\n this.defaults = defaults\n\n this.loaded = loaded\n\n this.overrides = overrides\n\n this.config = config\n", "file_path": "stronghold/src/fileStores/keyStore.ts", "rank": 98, "score": 56277.930829633406 }, { "content": "@EntityRepository(Transaction)\n\nexport class TransactionRepository extends Repository<Transaction> {\n\n async findWithInstances(\n\n transactionHash?: string,\n\n blockHash?: string,\n\n ): Promise<Transaction | null> {\n\n const transaction = await this.findOne({\n\n where: {\n\n hash: transactionHash,\n\n block: { hash: blockHash },\n\n },\n\n relations: FULL_JOINS,\n\n })\n\n return transaction || null\n\n }\n\n\n\n async getWithInstances(transactionHash?: string, blockHash?: string): Promise<Transaction> {\n\n return await this.findOneOrFail({\n\n where: {\n\n hash: transactionHash,\n\n block: { hash: blockHash },\n\n },\n\n relations: FULL_JOINS,\n\n })\n\n }\n\n\n\n async findByHashWithInstances(hash: string, limit: number): Promise<Transaction[]> {\n\n return (\n\n (await this.find({\n\n where: { hash: Like(`%${hash}%`) },\n\n take: limit,\n\n relations: FULL_JOINS,\n\n })) || []\n\n )\n\n }\n", "file_path": "stronghold-rosetta-api/src/repository/TransactionRepository.ts", "rank": 99, "score": 55926.34524006235 } ]
Rust
crates/rune/src/compile/expr.rs
shekohex/rune
05fab8da952737e61c8d1141393e3faa6318d4e6
use crate::ast; use crate::compiler::{Compiler, Needs}; use crate::error::CompileResult; use crate::traits::Compile; use crate::worker::Expanded; use crate::CompileError; use runestick::Inst; impl Compile<(&ast::Expr, Needs)> for Compiler<'_> { fn compile(&mut self, (expr, needs): (&ast::Expr, Needs)) -> CompileResult<()> { let span = expr.span(); log::trace!("Expr => {:?}", self.source.source(span)); match expr { ast::Expr::Self_(self_) => { self.compile((self_, needs))?; } ast::Expr::Path(path) => { self.compile((path, needs))?; } ast::Expr::ExprWhile(expr_while) => { self.compile((expr_while, needs))?; } ast::Expr::ExprFor(expr_for) => { self.compile((expr_for, needs))?; } ast::Expr::ExprLoop(expr_loop) => { self.compile((expr_loop, needs))?; } ast::Expr::ExprLet(expr_let) => { self.compile((expr_let, needs))?; } ast::Expr::ExprGroup(expr) => { self.compile((&*expr.expr, needs))?; } ast::Expr::ExprUnary(expr_unary) => { self.compile((expr_unary, needs))?; } ast::Expr::ExprBinary(expr_binary) => { self.compile((expr_binary, needs))?; } ast::Expr::ExprIf(expr_if) => { self.compile((expr_if, needs))?; } ast::Expr::ExprIndexSet(expr_index_set) => { self.compile((expr_index_set, needs))?; } ast::Expr::ExprIndexGet(expr_index_get) => { self.compile((expr_index_get, needs))?; } ast::Expr::ExprBreak(expr_break) => { self.compile(expr_break)?; } ast::Expr::ExprYield(expr_yield) => { self.compile((expr_yield, needs))?; } ast::Expr::ExprBlock(expr_block) => { self.compile((expr_block, needs))?; } ast::Expr::ExprAsync(expr_async) => { self.compile((expr_async, needs))?; } ast::Expr::ExprReturn(expr_return) => { self.compile((expr_return, needs))?; } ast::Expr::ExprMatch(expr_match) => { self.compile((expr_match, needs))?; } ast::Expr::ExprAwait(expr_await) => { self.compile((expr_await, needs))?; } ast::Expr::ExprTry(expr_try) => { self.compile((expr_try, needs))?; } ast::Expr::ExprSelect(expr_select) => { self.compile((expr_select, needs))?; } ast::Expr::ExprCall(expr_call) => { self.compile((expr_call, needs))?; } ast::Expr::ExprFieldAccess(expr_field_access) => { self.compile((expr_field_access, needs))?; } ast::Expr::ExprClosure(expr_closure) => { self.compile((expr_closure, needs))?; } ast::Expr::LitUnit(lit_unit) => { self.compile((lit_unit, needs))?; } ast::Expr::LitTuple(lit_tuple) => { self.compile((lit_tuple, needs))?; } ast::Expr::LitBool(lit_bool) => { self.compile((lit_bool, needs))?; } ast::Expr::LitNumber(lit_number) => { self.compile((lit_number, needs))?; } ast::Expr::LitVec(lit_vec) => { self.compile((lit_vec, needs))?; } ast::Expr::LitObject(lit_object) => { self.compile((lit_object, needs))?; } ast::Expr::LitChar(lit_char) => { self.compile((lit_char, needs))?; } ast::Expr::LitStr(lit_str) => { self.compile((lit_str, needs))?; } ast::Expr::LitByte(lit_char) => { self.compile((lit_char, needs))?; } ast::Expr::LitByteStr(lit_str) => { self.compile((lit_str, needs))?; } ast::Expr::LitTemplate(lit_template) => { self.compile((lit_template, needs))?; } ast::Expr::MacroCall(expr_call_macro) => { let _guard = self.items.push_macro(); let item = self.items.item(); if let Some(Expanded::Expr(expr)) = self.expanded.get(&item) { self.compile((expr, needs))?; } else { let span = expr_call_macro.span(); return Err(CompileError::internal("macro has not been expanded", span)); } } ast::Expr::Item(decl) => { let span = decl.span(); if needs.value() { self.asm.push(Inst::Unit, span); } } } Ok(()) } }
use crate::ast; use crate::compiler::{Compiler, Needs}; use crate::error::CompileResult; use crate::traits::Compile; use crate::worker::Expanded; use crate::CompileError; use runestick::Inst; impl Compile<(&ast::Expr, Needs)> for Compiler<'_> { fn compile(&mut self, (expr, needs): (&ast::Expr, Needs)) -> CompileResult<()> { let span = expr.span(); log::trace!("Expr => {:?}", self.source.source(span)); match expr { ast::Expr::Self_(self_) => { self.compile((self_, needs))?; } ast::Expr::Path(path) => { self.compile((path, needs))?; } ast::Expr::ExprWhile(expr_while) => { self.compile((expr_while, needs))?; } ast::Expr::ExprFor(expr_for) => { self.compile((expr_for, needs))?; } ast::Expr::ExprLoop(expr_loop) => { self.compile((expr_loop, needs))?; } ast::Expr::ExprLet(expr_let) => { self.compile((expr_let, needs))?; } ast::Expr::ExprGroup(expr) => { self.compile((&*expr.expr, needs))?; } ast::Expr::ExprUnary(expr_unary) => { self.compile((expr_unary, needs))?; } ast::Expr::ExprBinary(expr_binary) => { self.compile((expr_binary, needs))?; } ast::Expr::ExprIf(expr_if) => { self.compile((expr_if, needs))?; } ast::Expr::ExprIndexSet(expr_index_set) => { self.compile((expr_index_set, needs))?; } ast::Expr::ExprIndexGet(expr_index_get) => { self.compile((expr_index_get, needs))?; } ast::Expr::ExprBreak(expr_break) => { self.compile(expr_break)?; } ast::Expr::ExprYield(expr_yield) => { self.compile((expr_yield, needs))?; } ast::Expr::ExprBlock(expr_block) => { self.compile((expr_block, needs))?; } ast::Expr::ExprAsync(expr_async) => { self.compile((expr_async, needs))?; } ast::Expr::ExprReturn(expr_return) => { self.compile((expr_return, needs))?; } ast::Expr::ExprMatch(expr_match) => { self.compile((expr_match, needs))?; } ast::Expr::ExprAwait(expr_await) => { self.compile((expr_await, needs))?; } ast::Expr::ExprTry(expr_try) => { self.compile((expr_try, needs))?; } ast::Expr::ExprSelect(expr_select) => { self.compile((expr_select, needs))?; } ast::Expr::ExprCall(expr_call) => { self.compile((expr_call, needs))?; } ast::Expr::ExprFieldAccess(expr_field_acces
}
s) => { self.compile((expr_field_access, needs))?; } ast::Expr::ExprClosure(expr_closure) => { self.compile((expr_closure, needs))?; } ast::Expr::LitUnit(lit_unit) => { self.compile((lit_unit, needs))?; } ast::Expr::LitTuple(lit_tuple) => { self.compile((lit_tuple, needs))?; } ast::Expr::LitBool(lit_bool) => { self.compile((lit_bool, needs))?; } ast::Expr::LitNumber(lit_number) => { self.compile((lit_number, needs))?; } ast::Expr::LitVec(lit_vec) => { self.compile((lit_vec, needs))?; } ast::Expr::LitObject(lit_object) => { self.compile((lit_object, needs))?; } ast::Expr::LitChar(lit_char) => { self.compile((lit_char, needs))?; } ast::Expr::LitStr(lit_str) => { self.compile((lit_str, needs))?; } ast::Expr::LitByte(lit_char) => { self.compile((lit_char, needs))?; } ast::Expr::LitByteStr(lit_str) => { self.compile((lit_str, needs))?; } ast::Expr::LitTemplate(lit_template) => { self.compile((lit_template, needs))?; } ast::Expr::MacroCall(expr_call_macro) => { let _guard = self.items.push_macro(); let item = self.items.item(); if let Some(Expanded::Expr(expr)) = self.expanded.get(&item) { self.compile((expr, needs))?; } else { let span = expr_call_macro.span(); return Err(CompileError::internal("macro has not been expanded", span)); } } ast::Expr::Item(decl) => { let span = decl.span(); if needs.value() { self.asm.push(Inst::Unit, span); } } } Ok(()) }
function_block-function_prefixed
[]
Rust
src/main.rs
jyanar/boids_rs
6c03a77ea80c0cf332fce327138ce8f2458a91f9
extern crate piston_window; use rand::Rng; use std::time::{SystemTime,UNIX_EPOCH}; use std::path::Path; use piston_window::*; const WINDOW_SIZE: u32 = 800; const GFX_CONTEXT_OFFSET: f64 = 0.0 as f64; const MILLIS_PER_FRAME: u128 = 10; const BLACK: [f32;4] = [0.0, 0.0, 0.0, 1.0]; const WHITE: [f32;4] = [1.0; 4]; const SQUARE_SIZE: f64 = 2.0; const NBOIDS: usize = 300; const NPREDS: usize = 0; const MAXSPEED: f64 = 2.5; const MAXDIST: f64 = 25.0; const MINSEP: f64 = 10.0; const ALIGNMENT_WEIGHT: f64 = 0.125; const SEPARATION_WEIGHT: f64 = 1.0; const COHESION_WEIGHT: f64 = 0.01; const OBSTACLE_WEIGHT: f64 = 1.0; #[derive(Debug)] struct Vector { x: f64, y: f64, } impl Vector { fn addvec(&mut self, vector: &Vector) { self.x = self.x + vector.x; self.y = self.y + vector.y; } fn subvec(&mut self, vector: &Vector) { self.x = self.x - vector.x; self.y = self.y - vector.y; } fn addscalar(&mut self, scalar: f64) { self.x = self.x + scalar; self.y = self.y + scalar; } fn subscalar(&mut self, scalar: f64) { self.x = self.x + scalar; self.y = self.y + scalar; } fn multscalar(&mut self, scalar: f64) { self.x = self.x * scalar; self.y = self.y * scalar; } fn normalize(&mut self) { let mag = self.get_magnitude(); self.x = self.x / mag; self.y = self.y / mag; } fn set_magnitude(&mut self, magnitude: f64) { self.normalize(); self.x = self.x * magnitude; self.y = self.y * magnitude; } fn get_magnitude(&self) -> f64 { (self.x.powi(2) + self.y.powi(2)).sqrt() } fn get_distance(&self, vector: &Vector) -> f64 { let mut vector = Vector { x: vector.x , y: vector.y }; vector.subvec(self); vector.get_magnitude() } fn dot(&self, vector: Vector) -> f64 { self.x * vector.x + self.y * vector.y } } #[derive(Debug)] struct Boid { id: usize, predator: bool, maxspeed: f64, location: Vector, velocity: Vector, } impl Boid { fn compute_alignment(&self, boids: &[Boid]) -> Vector { let mut nearby = 0; let mut alignment = [0.0 ; 2]; for i in 0 .. boids.len() { if self.id != boids[i].id && self.location.get_distance(&boids[i].location) < MAXDIST { alignment[0] += boids[i].velocity.x; alignment[1] += boids[i].velocity.y; } } let mut alignment = Vector { x: alignment[0] , y: alignment[1] }; alignment.multscalar(1.0); alignment } fn compute_separation(&self, boids: &[Boid]) -> Vector { let nearby = 0; let mut separation = [0.0 ; 2]; for i in 0 .. boids.len() { if self.location.get_distance(&boids[i].location) < MINSEP && self.id != boids[i].id { separation[0] += self.location.x - boids[i].location.x; separation[1] += self.location.y - boids[i].location.y; } } let separation: Vector = Vector { x: separation[0] , y: separation[1] }; separation } fn compute_cohesion(&self, boids: &[Boid]) -> Vector { let mut nearby = 0; let mut cohesion: Vector = Vector { x: 0.0, y: 0.0 }; for i in 0 .. boids.len() { if (boids[i].location.x - self.location.x).abs() < MAXDIST && (boids[i].location.y - self.location.y).abs() < MAXDIST && (boids[i].id != self.id) { cohesion.addvec(&boids[i].location); nearby += 1; } } if nearby > 0 { cohesion.multscalar(1.0 / nearby as f64); } cohesion.subvec(&self.location); cohesion.multscalar(1.0); cohesion } fn compute_obstacle(&self) -> Vector { let mut dir: (f64,f64) = (0.0,0.0); if self.location.x < MINSEP { dir.0 = (MINSEP - self.location.x).exp(); } if self.location.y < MINSEP { dir.1 = (MINSEP - self.location.y).exp(); } if self.location.x > (WINDOW_SIZE as f64 - MINSEP) { dir.0 = -10.0; } if self.location.y > (WINDOW_SIZE as f64 - MINSEP) { dir.1 = -10.0; } Vector { x: dir.0, y: dir.1 } } fn update_velocity(&mut self, v: &Vector) { self.velocity.addvec(v); } fn update_position(&mut self) { self.location.addvec(&self.velocity); } fn limit_speed(&mut self) { if self.velocity.get_magnitude() > MAXSPEED { self.velocity.set_magnitude(MAXSPEED); } } } struct Flock { flock: Vec<Boid>, } impl Flock { fn generate_flock(&mut self, n_boids: usize, n_preds: usize) { let mut rng = rand::thread_rng(); self.flock = Vec::with_capacity(n_boids + n_preds); for i in 0 .. (n_boids + n_preds) { self.add_boid(Boid { id: i, predator: false, maxspeed: MAXSPEED, location: Vector {x: rng.gen_range(0.0, WINDOW_SIZE as f64), y: rng.gen_range(0.0, WINDOW_SIZE as f64)}, velocity: Vector {x: 1.0, y: 0.0}, }); } for i in 0 .. n_preds { self.flock[i].predator = true; self.flock[i].maxspeed = MAXSPEED + 1.0; } } fn add_boid(&mut self, boid: Boid) { self.flock.push(boid); } fn take_step(&mut self) { for i in 0 .. self.flock.len() { let mut algn = self.flock[i].compute_alignment(&self.flock); let mut sepr = self.flock[i].compute_separation(&self.flock); let mut cohn = self.flock[i].compute_cohesion(&self.flock); let obst = self.flock[i].compute_obstacle(); algn.multscalar(ALIGNMENT_WEIGHT); sepr.multscalar(SEPARATION_WEIGHT); cohn.multscalar(COHESION_WEIGHT); let mut sum = Vector{x: 0.0, y: 0.0}; sum.addvec(&sepr); sum.addvec(&algn); sum.addvec(&cohn); sum.addvec(&obst); self.flock[i].update_velocity(&sum); self.flock[i].limit_speed(); self.flock[i].update_position(); } } } fn main() { let mut f = Flock { flock: vec![] }; f.generate_flock(NBOIDS, 0); let opengl = OpenGL::V3_2; let mut window: PistonWindow = WindowSettings::new("boids", [WINDOW_SIZE; 2]) .exit_on_esc(true) .graphics_api(opengl) .build() .unwrap(); let mut previous_update = UNIX_EPOCH; while let Some(e) = window.next() { if previous_update.elapsed() .map(|d| d.as_millis()) .unwrap_or(0) > MILLIS_PER_FRAME { f.take_step(); previous_update = SystemTime::now(); } window.draw_2d(&e, |context, graphics, _| { clear(BLACK, graphics); let context = context.trans(GFX_CONTEXT_OFFSET, GFX_CONTEXT_OFFSET); for i in 0 .. f.flock.len() { rectangle(WHITE, [f.flock[i].location.x, f.flock[i].location.y, SQUARE_SIZE, SQUARE_SIZE], context.transform, graphics); } }); } }
extern crate piston_window; use rand::Rng; use std::time::{SystemTime,UNIX_EPOCH}; use std::path::Path; use piston_window::*; const WINDOW_SIZE: u32 = 800; const GFX_CONTEXT_OFFSET: f64 = 0.0 as f64; const MILLIS_PER_FRAME: u128 = 10; const BLACK: [f32;4] = [0.0, 0.0, 0.0, 1.0]; const WHITE: [f32;4] = [1.0; 4]; const SQUARE_SIZE: f64 = 2.0; const NBOIDS: usize = 300; const NPREDS: usize = 0; const MAXSPEED: f64 = 2.5; const MAXDIST: f64 = 25.0; const MINSEP: f64 = 10.0; const ALIGNMENT_WEIGHT: f64 = 0.125; const SEPARATION_WEIGHT: f64 = 1.0; const COHESION_WEIGHT: f64 = 0.01; const OBSTACLE_WEIGHT: f64 = 1.0; #[derive(Debug)] struct Vector { x: f64, y: f64, } impl Vector { fn addvec(&mut self, vector: &Vector) { self.x = self.x + vector.x; self.y = self.y + vector.y; } fn subvec(&mut self, vector: &Vector) { self.x = self.x - vector.x; self.y = self.y - vector.y; } fn addscalar(&mut self, scalar: f64) { self.x = self.x + scalar; self.y = self.y + scalar; } fn subscalar(&mut self, scalar: f64) { self.x = self.x + scalar; self.y = self.y + scalar; } fn multscalar(&mut self, scalar: f64) { self.x = self.x * scalar; self.y = self.y * scalar; } fn normalize(&mut self) { let mag = self.get_magnitude(); self.x = self.x / mag; self.y = self.y / mag; } fn set_magnitude(&mut self, magnitude: f64) { self.normalize(); self.x = self.x * magnitude; self.y = self.y * magnitude; } fn get_magnitude(&self) -> f64 { (self.x.powi(2) + self.y.powi(2)).sqrt() } fn get_distance(&self, vector: &Vector) -> f64 { let mut vector = Vector { x: vector.x , y: vector.y }; vector.subvec(self); vector.get_magnitude() } fn dot(&self, vector: Vector) -> f64 { self.x * vector.x + self.y * vector.y } } #[derive(Debug)] struct Boid { id: usize, predator: bool, maxspeed: f64, location: Vector, velocity: Vector, } impl Boid {
fn compute_separation(&self, boids: &[Boid]) -> Vector { let nearby = 0; let mut separation = [0.0 ; 2]; for i in 0 .. boids.len() { if self.location.get_distance(&boids[i].location) < MINSEP && self.id != boids[i].id { separation[0] += self.location.x - boids[i].location.x; separation[1] += self.location.y - boids[i].location.y; } } let separation: Vector = Vector { x: separation[0] , y: separation[1] }; separation } fn compute_cohesion(&self, boids: &[Boid]) -> Vector { let mut nearby = 0; let mut cohesion: Vector = Vector { x: 0.0, y: 0.0 }; for i in 0 .. boids.len() { if (boids[i].location.x - self.location.x).abs() < MAXDIST && (boids[i].location.y - self.location.y).abs() < MAXDIST && (boids[i].id != self.id) { cohesion.addvec(&boids[i].location); nearby += 1; } } if nearby > 0 { cohesion.multscalar(1.0 / nearby as f64); } cohesion.subvec(&self.location); cohesion.multscalar(1.0); cohesion } fn compute_obstacle(&self) -> Vector { let mut dir: (f64,f64) = (0.0,0.0); if self.location.x < MINSEP { dir.0 = (MINSEP - self.location.x).exp(); } if self.location.y < MINSEP { dir.1 = (MINSEP - self.location.y).exp(); } if self.location.x > (WINDOW_SIZE as f64 - MINSEP) { dir.0 = -10.0; } if self.location.y > (WINDOW_SIZE as f64 - MINSEP) { dir.1 = -10.0; } Vector { x: dir.0, y: dir.1 } } fn update_velocity(&mut self, v: &Vector) { self.velocity.addvec(v); } fn update_position(&mut self) { self.location.addvec(&self.velocity); } fn limit_speed(&mut self) { if self.velocity.get_magnitude() > MAXSPEED { self.velocity.set_magnitude(MAXSPEED); } } } struct Flock { flock: Vec<Boid>, } impl Flock { fn generate_flock(&mut self, n_boids: usize, n_preds: usize) { let mut rng = rand::thread_rng(); self.flock = Vec::with_capacity(n_boids + n_preds); for i in 0 .. (n_boids + n_preds) { self.add_boid(Boid { id: i, predator: false, maxspeed: MAXSPEED, location: Vector {x: rng.gen_range(0.0, WINDOW_SIZE as f64), y: rng.gen_range(0.0, WINDOW_SIZE as f64)}, velocity: Vector {x: 1.0, y: 0.0}, }); } for i in 0 .. n_preds { self.flock[i].predator = true; self.flock[i].maxspeed = MAXSPEED + 1.0; } } fn add_boid(&mut self, boid: Boid) { self.flock.push(boid); } fn take_step(&mut self) { for i in 0 .. self.flock.len() { let mut algn = self.flock[i].compute_alignment(&self.flock); let mut sepr = self.flock[i].compute_separation(&self.flock); let mut cohn = self.flock[i].compute_cohesion(&self.flock); let obst = self.flock[i].compute_obstacle(); algn.multscalar(ALIGNMENT_WEIGHT); sepr.multscalar(SEPARATION_WEIGHT); cohn.multscalar(COHESION_WEIGHT); let mut sum = Vector{x: 0.0, y: 0.0}; sum.addvec(&sepr); sum.addvec(&algn); sum.addvec(&cohn); sum.addvec(&obst); self.flock[i].update_velocity(&sum); self.flock[i].limit_speed(); self.flock[i].update_position(); } } } fn main() { let mut f = Flock { flock: vec![] }; f.generate_flock(NBOIDS, 0); let opengl = OpenGL::V3_2; let mut window: PistonWindow = WindowSettings::new("boids", [WINDOW_SIZE; 2]) .exit_on_esc(true) .graphics_api(opengl) .build() .unwrap(); let mut previous_update = UNIX_EPOCH; while let Some(e) = window.next() { if previous_update.elapsed() .map(|d| d.as_millis()) .unwrap_or(0) > MILLIS_PER_FRAME { f.take_step(); previous_update = SystemTime::now(); } window.draw_2d(&e, |context, graphics, _| { clear(BLACK, graphics); let context = context.trans(GFX_CONTEXT_OFFSET, GFX_CONTEXT_OFFSET); for i in 0 .. f.flock.len() { rectangle(WHITE, [f.flock[i].location.x, f.flock[i].location.y, SQUARE_SIZE, SQUARE_SIZE], context.transform, graphics); } }); } }
fn compute_alignment(&self, boids: &[Boid]) -> Vector { let mut nearby = 0; let mut alignment = [0.0 ; 2]; for i in 0 .. boids.len() { if self.id != boids[i].id && self.location.get_distance(&boids[i].location) < MAXDIST { alignment[0] += boids[i].velocity.x; alignment[1] += boids[i].velocity.y; } } let mut alignment = Vector { x: alignment[0] , y: alignment[1] }; alignment.multscalar(1.0); alignment }
function_block-full_function
[]
Rust
day23.rs
codecow911/adventofcode2018
293cbf4aeda53ed644386dc53a880e762875610e
#![feature(test)] use std::error::Error; use std::fs::File; use std::io::{BufRead, BufReader}; #[macro_use] extern crate scan_fmt; #[derive(Debug)] struct Point { x: i32, y: i32, z: i32, r: u32 } impl Point { pub fn set(&mut self, x: i32, y: i32, z: i32) { self.x = x; self.y = y; self.z = z; } pub fn distance(&self, other: &Point) -> u32 { (i32::abs(self.x - other.x) + i32::abs(self.y - other.y) + i32::abs(self.z - other.z)) as u32 } pub fn in_range(&self, other: &Point) -> bool { self.distance(other) <= self.r } } fn parse(path: &str) -> Vec<Point> { let file = match File::open(path) { Err(why) => panic!("couldn't open {}: {}", path, Error::description(&why)), Ok(file) => file, }; let mut result = Vec::new(); let reader = BufReader::new(file); for line in reader.lines() { match line { Ok(line) => { let (x, y, z, r) = scan_fmt!(&line, "pos=<{},{},{}>, r={}", i32, i32, i32, u32); result.push(Point {x: x.unwrap(), y: y.unwrap(), z: z.unwrap(), r: r.unwrap()}); }, Err(e) => println!("err: {}", e) } } result } fn get_max(points: &Vec<Point>) -> Point { let max = points.iter().max_by_key(|x| x.r).unwrap(); Point {x: max.x, y: max.y, z: max.z, r: max.r} } fn part1(path: &str) -> u32 { let mut points = parse(path); let max = get_max(&points); points.retain(|x| max.in_range(x)); points.len() as u32 } fn part2(path: &str) -> u32 { let nanobots = parse(path); let mut minx = nanobots.iter().min_by_key(|x| x.x).unwrap().x; let mut miny = nanobots.iter().min_by_key(|x| x.y).unwrap().y; let mut minz = nanobots.iter().min_by_key(|x| x.z).unwrap().z; let mut maxx = nanobots.iter().max_by_key(|x| x.x).unwrap().x; let mut maxy = nanobots.iter().max_by_key(|x| x.y).unwrap().y; let mut maxz = nanobots.iter().max_by_key(|x| x.z).unwrap().z; let mut found_best = false; let origin = Point{x:0, y:0, z:0, r:0}; let mut best_cell = Point{x:0, y:0, z:0, r:0}; let mut grid_size = maxx - minx; while grid_size > 0 { let mut max_count = 0; let mut x = minx; while x <= maxx { let mut y = miny; while y <= maxy { let mut z = minz; while z <= maxz { let count = nanobots.iter().fold(0, |sum, a| { let d = a.distance(&Point{x, y, z, r:0}) as i32; if (d - a.r as i32) < grid_size { sum + 1 } else { sum } }); if max_count < count { max_count = count; best_cell.set(x, y, z); found_best = true; } else if max_count == count { if !found_best || (origin.distance(&Point{x, y, z, r:0}) < best_cell.distance(&origin)) { best_cell.set(x, y, z); found_best = true; } } z += grid_size; } y += grid_size; } x += grid_size; } minx = best_cell.x - grid_size; miny = best_cell.y - grid_size; minz = best_cell.z - grid_size; maxx = best_cell.x + grid_size; maxy = best_cell.y + grid_size; maxz = best_cell.z + grid_size; grid_size = grid_size / 2; } origin.distance(&best_cell) } #[cfg(test)] mod tests { #[test] fn test_part1_ex() { use part1; assert_eq!(part1(r"C:\Users\Igascoigne\advent2018\dec_01_01\test.txt"), 7); } } fn main() { println!("result: {}", part2(r"C:\Users\Igascoigne\advent2018\dec_01_01\input.txt")); }
#![feature(test)] use std::error::Error; use std::fs::File; use std::io::{BufRead, BufReader}; #[macro_use] extern crate scan_fmt; #[derive(Debug)] struct Point { x: i32, y: i32, z: i32, r: u32 } impl Point { pub fn set(&mut self, x: i32, y: i32, z: i32) { self.x = x; self.y = y; self.z = z; } pub fn distance(&self, other: &Point) -> u32 { (i32::abs(self.x - other.x) + i32::abs(self.y - other.y) + i32::abs(self.z - other.z)) as u32 } pub fn in_range(&self, other: &Point) -> bool { self.distance(other) <= self.r } } fn parse(path: &str) -> Vec<Point> { let file = match File::open(path) { Err(why) => panic!("couldn't open {}: {}", path, Error::description(&why)), Ok(file) => file, }; let mut result = Vec::new(); let reader = BufReader::new(file); for line in reader.lines() { match line { Ok(line) => { let (x, y, z, r) = scan_fmt!(&line, "pos=<{},{},{}>, r={}", i32, i32, i32, u32); result.push(Point {x: x.unwrap(), y: y.unwrap(), z: z.unwrap(), r: r.unwrap()}); }, Err(e) => println!("err: {}", e) } } result } fn get_max(points: &Vec<Point>) -> Point { let max = points.iter().max_by_key(|x| x.r).unwrap(); Point {x: max.x, y: max.y, z: max.z, r: max.r} } fn part1(path: &str) -> u32 { let mut points = parse(path); let max = get_max(&points); points.retain(|x| max.in_range(x)); points.len() as u32 } fn part2(path: &str) -> u32 { let nanobots = parse(path); let mut minx = nanobots.iter().min_by_key(|x| x.x).unwrap().x; let mut miny = nanobots.iter().min_by_key(|x| x.y).unwrap().y; let mut minz = nanobots.iter().min_by_key(|x| x.z).unwrap().z; let mut maxx = nanobots.iter().max_by_key(|x| x.x).unwrap().x; let mut maxy = nanobots.iter().max_by_key(|x| x.y).unwrap().y; let mut maxz = nanobots.iter().max_by_key(|x| x.z).unwrap().z; let mut found_best = false; let origin = Point{x:0, y:0, z:0, r:0}; let mut best_cell = Point{x:0, y:0, z:0, r:0};
#[cfg(test)] mod tests { #[test] fn test_part1_ex() { use part1; assert_eq!(part1(r"C:\Users\Igascoigne\advent2018\dec_01_01\test.txt"), 7); } } fn main() { println!("result: {}", part2(r"C:\Users\Igascoigne\advent2018\dec_01_01\input.txt")); }
let mut grid_size = maxx - minx; while grid_size > 0 { let mut max_count = 0; let mut x = minx; while x <= maxx { let mut y = miny; while y <= maxy { let mut z = minz; while z <= maxz { let count = nanobots.iter().fold(0, |sum, a| { let d = a.distance(&Point{x, y, z, r:0}) as i32; if (d - a.r as i32) < grid_size { sum + 1 } else { sum } }); if max_count < count { max_count = count; best_cell.set(x, y, z); found_best = true; } else if max_count == count { if !found_best || (origin.distance(&Point{x, y, z, r:0}) < best_cell.distance(&origin)) { best_cell.set(x, y, z); found_best = true; } } z += grid_size; } y += grid_size; } x += grid_size; } minx = best_cell.x - grid_size; miny = best_cell.y - grid_size; minz = best_cell.z - grid_size; maxx = best_cell.x + grid_size; maxy = best_cell.y + grid_size; maxz = best_cell.z + grid_size; grid_size = grid_size / 2; } origin.distance(&best_cell) }
function_block-function_prefix_line
[ { "content": "fn part1(path: &str, verbose: bool) -> u32 {\n\n let mut armies = parse(path);\n\n let mut round = 1;\n\n loop {\n\n if verbose {\n\n println!(\"\");\n\n for army in armies.iter_mut() {\n\n army.groups.sort_by_key(|x| x.id);\n\n army.print();\n\n }\n\n println!(\"\");\n\n }\n\n\n\n let immune_count = armies[IMMUNE_SYSTEM].unit_count();\n\n let infection_count = armies[INFECTION].unit_count();\n\n if immune_count == 0 && infection_count == 0 {\n\n // tie\n\n return 0;\n\n } else if immune_count == 0 {\n\n return infection_count;\n", "file_path": "day24.rs", "rank": 0, "score": 242360.35556339828 }, { "content": "#[allow(dead_code)]\n\nfn part1(path: &str, print: bool) -> u32 {\n\n let mut map = parse(path, 3);\n\n let mut round = 0;\n\n\n\n if print {\n\n println!(\"Initially:\");\n\n map.print();\n\n println!(\"\");\n\n }\n\n\n\n loop {\n\n round += 1;\n\n println!(\"simulating {}\", round);\n\n let done = map.is_done();\n\n if done || map.process_turn(false).0 {\n\n if print {\n\n println!(\"Round {} incomplete:\", round);\n\n map.print();\n\n println!(\"\");\n\n }\n", "file_path": "day15.rs", "rank": 1, "score": 242360.35556339833 }, { "content": "fn part1(path: &str, n:u32) -> i32 {\n\n let mut problem = parse_input(path);\n\n println!(\"0: {} plants: {}\", problem.state_to_string(), problem.num_plants());\n\n for x in 1..=n {\n\n let gen = compute_generation(&problem);\n\n problem.state = gen.0;\n\n problem.zero = gen.1;\n\n println!(\"{}: {} zero: {} num_plants: {}\", x, problem.state_to_string(), problem.zero, problem.num_plants());\n\n }\n\n problem.sum_pots()\n\n}\n\n\n", "file_path": "day12.rs", "rank": 2, "score": 229267.2647667757 }, { "content": "#[allow(dead_code)]\n\nfn solution(path: &str) -> (u32, u32) {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut positions = Vec::new();\n\n let mut coord = (0, 0);\n\n\n\n let mut distance_field = HashMap::new();\n\n distance_field.insert(coord, 0u32);\n\n\n\n let mut delta_coord = HashMap::new();\n\n delta_coord.insert('N', (0, -1));\n\n delta_coord.insert('S', (0, 1));\n\n delta_coord.insert('E', (-1, 0));\n\n delta_coord.insert('W', (1, 0));\n\n\n\n let reader = BufReader::new(file);\n\n let mut lc = 0;\n", "file_path": "day20.rs", "rank": 3, "score": 217771.07934198176 }, { "content": "#[allow(dead_code)]\n\nfn part2(path:&str) -> i32 {\n\n use indextree::Arena;\n\n let arena = &mut Arena::new();\n\n let input = parse_input(path);\n\n\n\n let mut cursor: usize = 0;\n\n build_tree_recursive(arena, &input, &mut cursor, None);\n\n let mut val = 0;\n\n get_node_value(arena, indextree::NodeId::new(0), &mut val);\n\n val\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_part1_example() {\n\n use part1;\n\n assert_eq!(part1(\"C:\\\\Users\\\\lgascoigne\\\\IdeaProjects\\\\advent\\\\test.txt\"), 138);\n\n }\n\n\n", "file_path": "day08.rs", "rank": 4, "score": 213316.79924115344 }, { "content": "#[allow(dead_code)]\n\nfn part1(path:&str) -> i32 {\n\n let mut coords = parse_coords(path);\n\n\n\n let extents = Coord::normalize_extents(&mut coords);\n\n let xmin = extents.0;\n\n let xmax = extents.1;\n\n let ymin = extents.2;\n\n let ymax = extents.3;\n\n\n\n // this is a dumb algorithm, order is higher than needed should do simultaneous flood from the origins\n\n let dim = Grid::len(xmin, xmax, ymin, ymax);\n\n let mut grid = Grid {cell_flag: vec![UNVISITED; dim as usize], xmin:xmin, xmax:xmax, ymin:ymin, ymax:ymax};\n\n let dimx = grid.dimx();\n\n let dimy = grid.dimy();\n\n let mut test_coord : Coord = Coord {x:0, y:0};\n\n let coords_len = coords.len();\n\n\n\n for i in 0..dim {\n\n test_coord.y = i / dimx;\n\n test_coord.x = i - (test_coord.y * dimx);\n", "file_path": "day06.rs", "rank": 5, "score": 213316.79924115344 }, { "content": "#[allow(dead_code)]\n\nfn part1(path:&str) -> i32 {\n\n use indextree::Arena;\n\n let arena = &mut Arena::new();\n\n let input = parse_input(path);\n\n\n\n let mut cursor: usize = 0;\n\n build_tree_recursive(arena, &input, &mut cursor, None);\n\n let mut sum: i32 = 0;\n\n for node in arena.iter() {\n\n let meta_sum: i32 = node.data.metadata.iter().sum();\n\n sum += meta_sum;\n\n }\n\n sum\n\n}\n\n\n", "file_path": "day08.rs", "rank": 6, "score": 213316.79924115344 }, { "content": "#[allow(dead_code)]\n\nfn part1(path: &str) -> u32 {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut device = Device::new();\n\n let mut regs: [u32; 4] = [0u32; 4];\n\n let mut inst: [u32; 4] = [0u32; 4];\n\n let mut result: [u32; 4] = [0u32; 4];\n\n\n\n let reader = BufReader::new(file);\n\n let mut lc = 0;\n\n let mut count = 0u32;\n\n\n\n let mut stats: Vec<Vec<u32>> = vec![vec![0u32; 16]; 16];\n\n\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n", "file_path": "day16.rs", "rank": 7, "score": 211141.63275039027 }, { "content": "#[allow(dead_code)]\n\nfn part2(path: &str) -> u32 {\n\n // lame but tired of this problem\n\n // use to figure out ap then use ap in part 1\n\n for x in 4..50u32 {\n\n println!(\"ap: {}\", x);\n\n let mut map = parse(path, x);\n\n let mut iter = 1;\n\n loop {\n\n let done = map.is_done();\n\n let result = map.process_turn(true);\n\n if done || result.0 {\n\n if result.1 {\n\n break;\n\n } else {\n\n return x;\n\n }\n\n }\n\n if iter % 10 == 0 {\n\n println!(\"iter: {} hp: {} goblins: {} elves: {}\", iter, map.hp_remaining(), map.num_class('G'), map.num_class('E'));\n\n }\n", "file_path": "day15.rs", "rank": 8, "score": 211141.63275039027 }, { "content": "#[allow(dead_code)]\n\nfn part2(path: &str) -> u32 {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut device = Device::new();\n\n device.instructions.sort_by(|a, b| a.opcode.cmp(&b.opcode));\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let (a, b, c, d) = scan_fmt!(&line, \"{} {} {} {}\", u32, u32, u32, u32);\n\n let mut inst: [u32; 4] = [a.unwrap(), b.unwrap(), c.unwrap(), d.unwrap()];\n\n println!(\"exec: {:?}\", inst);\n\n device.execute_part2(&inst);\n\n }\n\n Err(e) => println!(\"err: {}\", e)\n\n }\n\n }\n", "file_path": "day16.rs", "rank": 10, "score": 211141.6327503903 }, { "content": "fn part2(path: &str) -> u32 {\n\n let mut armies = parse(path);\n\n let mut boost = 1u32;\n\n loop {\n\n println!(\"boost: {}\", boost);\n\n let result = boosted(&mut armies.clone(), boost);\n\n if result.is_some() {\n\n return result.unwrap();\n\n }\n\n boost += 1;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_part1_ex() {\n\n use part1;\n\n assert_eq!(part1(r\"C:\\Users\\Igascoigne\\advent2018\\dec_01_01\\test.txt\", false), 5216);\n\n }\n\n\n\n #[test]\n\n fn test_part1_input() {\n\n use part1;\n\n assert_eq!(part1(r\"C:\\Users\\Igascoigne\\advent2018\\dec_01_01\\input.txt\", false), 10538);\n\n }\n\n}\n\n\n", "file_path": "day24.rs", "rank": 12, "score": 211141.63275039027 }, { "content": "#[allow(dead_code)]\n\nfn part1(path: &str) -> Option<(u32, u32)> {\n\n let mut problem = parse(path);\n\n let mut panic = 4096;\n\n while panic > 0 {\n\n let collision = problem.tick(false);\n\n if collision.is_some() {\n\n return Some(collision.unwrap());\n\n }\n\n panic -= 1;\n\n }\n\n None\n\n}\n\n\n", "file_path": "day13.rs", "rank": 13, "score": 208453.36703605796 }, { "content": "#[allow(dead_code)]\n\nfn part2(path: &str) -> Option<(u32, u32)> {\n\n let mut problem = parse(path);\n\n let mut panic = 1000000;\n\n while panic > 0 {\n\n problem.tick(true);\n\n if problem.carts.len() == 1 {\n\n let cart = problem.carts.iter().nth(0).unwrap().1;\n\n return Some((cart.x, cart.y));\n\n }\n\n panic -= 1;\n\n }\n\n None\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_part1_ex0() {\n\n use part1;\n\n assert_eq!(part1(r\"C:\\Users\\Igascoigne\\advent2018\\dec_01_01\\test.txt\").unwrap(), (7, 3));\n", "file_path": "day13.rs", "rank": 14, "score": 208453.36703605796 }, { "content": "#[allow(dead_code)]\n\nfn solution(path: &str, reg0: u32) -> u32 {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut device = Device::new();\n\n device.registers[0] = reg0;\n\n device.instructions.sort_by(|a, b| a.opcode.cmp(&b.opcode));\n\n\n\n let reader = BufReader::new(file);\n\n let mut lc = 0;\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n if lc == 0 {\n\n let a = scan_fmt!(&line, \"#ip {}\", u32);\n\n device.bound = a.unwrap() as usize;\n\n lc += 1;\n\n } else {\n", "file_path": "day19.rs", "rank": 15, "score": 208453.36703605796 }, { "content": "#[allow(dead_code)]\n\nfn solution(path: &str, reg0: u32) -> u32 {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut device = Device::new();\n\n device.registers[0] = reg0;\n\n device.instructions.sort_by(|a, b| a.opcode.cmp(&b.opcode));\n\n\n\n let reader = BufReader::new(file);\n\n let mut lc = 0;\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n if lc == 0 {\n\n let a = scan_fmt!(&line, \"#ip {}\", u32);\n\n device.bound = a.unwrap() as usize;\n\n lc += 1;\n\n } else {\n", "file_path": "day21.rs", "rank": 16, "score": 208453.36703605796 }, { "content": "fn parse_points(path:&str) -> Vec<(Point)> {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut result = Vec::new();\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let (a, b, c, d) = scan_fmt!(&line, \"position=<{},{}> velocity=<{},{}>\", i32, i32, i32, i32);\n\n result.push(Point {pos_x: a.unwrap(), pos_y: b.unwrap(), vel_x: c.unwrap(), vel_y: d.unwrap()});\n\n }\n\n Err(e) => println!(\"err: {}\", e)\n\n }\n\n }\n\n return result;\n\n}\n\n\n", "file_path": "day10.rs", "rank": 17, "score": 207758.3528203956 }, { "content": "fn part2(path:&str, total_dist:i32) -> i32 {\n\n let mut coords = parse_coords(path);\n\n\n\n let extents = Coord::normalize_extents(&mut coords);\n\n let xmin = extents.0;\n\n let xmax = extents.1;\n\n let ymin = extents.2;\n\n let ymax = extents.3;\n\n\n\n let dim = Grid::len(xmin, xmax, ymin, ymax);\n\n let mut grid = Grid {cell_flag: vec![UNVISITED; dim as usize], xmin:xmin, xmax:xmax, ymin:ymin, ymax:ymax};\n\n let dimx = grid.dimx();\n\n let mut test_coord : Coord = Coord {x:0, y:0};\n\n let coords_len = coords.len();\n\n\n\n for i in 0..dim {\n\n test_coord.y = i / dimx;\n\n test_coord.x = i - (test_coord.y * dimx);\n\n\n\n let mut sum = 0;\n", "file_path": "day06.rs", "rank": 18, "score": 203197.4342224619 }, { "content": "fn parse_input(path:&str) -> Vec<i32> {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut result = Vec::new();\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n result = line.split(\" \").map(|x| x.parse::<i32>().unwrap()).collect();\n\n }\n\n Err(e) => println!(\"err: {}\", e)\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "day08.rs", "rank": 20, "score": 197795.98660943232 }, { "content": "fn part1(path:&str, output:&str, frame_count: u32, frame_duration: u32) {\n\n let points = parse_points(path);\n\n\n\n let tr = find_start_time_dims(&points);\n\n if tr.is_none() {\n\n panic!(\"failed to find start time\");\n\n }\n\n\n\n let std = tr.unwrap();\n\n let mut start_time = std.1;\n\n println!(\"start_time: {}\", start_time);\n\n if frame_count * frame_duration > start_time {\n\n start_time = 0;\n\n } else {\n\n start_time -= frame_count * frame_duration;\n\n }\n\n\n\n let dims = extents_at_time(&points, start_time);\n\n let max_dims = usize::max(dims.0, dims.1) * 4;\n\n println!(\"max_dims: {}\", max_dims);\n", "file_path": "day10.rs", "rank": 21, "score": 196072.64722934383 }, { "content": "fn solution(path: &str, print: bool) -> usize {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut lines = Vec::new();\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let (x, y0, y1) = scan_fmt!(&line, \"x={}, y={}..{}\", usize, usize, usize);\n\n if x.is_some() && y0.is_some() && y1.is_some() {\n\n lines.push((x.unwrap(), x.unwrap(), y0.unwrap(), y1.unwrap()));\n\n } else {\n\n let (y, x0, x1) = scan_fmt!(&line, \"y={}, x={}..{}\", usize, usize, usize);\n\n assert!(y.is_some() && x0.is_some() && x1.is_some());\n\n lines.push( (x0.unwrap(), x1.unwrap(), y.unwrap(), y.unwrap()));\n\n }\n\n }\n", "file_path": "day17.rs", "rank": 22, "score": 193281.96075434732 }, { "content": "fn part2(path: &str, dim: usize) -> i32 {\n\n let mut area = parse(path, dim);\n\n let mut hm = HashMap::new();\n\n let mut repeat = 0;\n\n let mut cycle_start = 0;\n\n let mut res_counts = vec![0];\n\n for x in 1..10000 {\n\n area.tick();\n\n area.update_from_next();\n\n let state = area.to_string();\n\n if hm.contains_key(&state) {\n\n repeat = x;\n\n cycle_start = *hm.get(&state).unwrap();\n\n println!(\"repeat: {} cycle_start: {}\", repeat, cycle_start);\n\n break;\n\n }\n\n hm.insert(state, x);\n\n res_counts.push(area.count_all_cells('|') * area.count_all_cells('#'));\n\n }\n\n \n", "file_path": "day18.rs", "rank": 23, "score": 192981.85546658974 }, { "content": "#[allow(dead_code)]\n\nfn part2(path:&str, base_duration:i32, num_workers:i32) -> i32 {\n\n let reqs = parse_requirements(path);\n\n let mut graph = Graph {steps: Vec::new(), idle_workers: num_workers, active_workers: Vec::new()};\n\n for req in &reqs {\n\n graph.add_requirement(req);\n\n }\n\n for step in graph.steps.iter_mut() {\n\n step.duration = base_duration + step.name as i32 - 'A' as i32 + 1;\n\n }\n\n graph.execute_time()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_part1_example() {\n\n use part1;\n\n assert_eq!(part1(\"C:\\\\Users\\\\lgascoigne\\\\IdeaProjects\\\\advent\\\\test.txt\", 1), \"CABDFE\");\n\n }\n\n\n", "file_path": "day07.rs", "rank": 24, "score": 192689.49728485444 }, { "content": "fn parse(path: &str, ap: u32) -> Map {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut map = Map::default();\n\n let mut y = 0u32;\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let mut x = 0u32;\n\n let mut tmp = Vec::new();\n\n for ch in line.chars() {\n\n match ch {\n\n 'E' => {\n\n map.npcs.insert((x, y), Npc { class: ch, x: x, y: y, ap: ap, hp: 200 });\n\n tmp.push(Square {x: x, y: y, contents: '.'})\n\n },\n", "file_path": "day15.rs", "rank": 25, "score": 191018.3023324766 }, { "content": "fn extents_at_time(points: &Vec<Point>, time: u32) -> (usize, usize, i32, i32) {\n\n let new_points = points_at_time(points, time);\n\n return get_extents(&new_points);\n\n}\n\n\n", "file_path": "day10.rs", "rank": 26, "score": 187474.0620092029 }, { "content": "fn fight(armies: &mut Vec<Army>, round: u32, verbose: bool) -> bool {\n\n let mut attacks = HashMap::new();\n\n\n\n Group::sort_by_ep(&mut armies[IMMUNE_SYSTEM].groups);\n\n armies[IMMUNE_SYSTEM].target_selection(&armies[INFECTION], &mut attacks, false);\n\n\n\n Group::sort_by_ep(&mut armies[INFECTION].groups);\n\n armies[INFECTION].target_selection(&armies[IMMUNE_SYSTEM], &mut attacks, false);\n\n\n\n let mut all_groups = Vec::new();\n\n for army in armies.iter() {\n\n all_groups.append(&mut army.groups.clone());\n\n }\n\n Group::sort_by_initiative(&mut all_groups);\n\n\n\n if verbose {\n\n println!(\"\");\n\n }\n\n\n\n let mut total_killed = 0;\n", "file_path": "day24.rs", "rank": 27, "score": 180448.42177393852 }, { "content": "fn part2(path: &str, n:u32) -> usize {\n\n let mut repeat: HashSet<Vec<bool>> = HashSet::new();\n\n let mut problem = parse_input(path);\n\n\n\n let mut repeat_gen = 0;\n\n let mut first = 0;\n\n let mut pattern = Vec::new();\n\n for x in 1..=n {\n\n let gen = compute_generation(&problem);\n\n problem.state = gen.0;\n\n problem.zero = gen.1;\n\n\n\n let mut trimmed = problem.state.clone();\n\n for i in 0..trimmed.len() {\n\n if trimmed[i] {\n\n first = i;\n\n break;\n\n }\n\n }\n\n trimmed.drain(0..first);\n", "file_path": "day12.rs", "rank": 28, "score": 175667.23595710073 }, { "content": "fn or(a: u32, b: u32, c: &mut u32) {\n\n *c = a | b;\n\n}\n\n\n", "file_path": "day21.rs", "rank": 29, "score": 169313.86139744002 }, { "content": "fn and(a: u32, b: u32, c: &mut u32) {\n\n *c = a & b;\n\n}\n\n\n", "file_path": "day16.rs", "rank": 30, "score": 169313.86139744002 }, { "content": "fn and(a: u32, b: u32, c: &mut u32) {\n\n *c = a & b;\n\n}\n\n\n", "file_path": "day19.rs", "rank": 31, "score": 169313.86139744002 }, { "content": "fn and(a: u32, b: u32, c: &mut u32) {\n\n *c = a & b;\n\n}\n\n\n", "file_path": "day21.rs", "rank": 32, "score": 169313.86139744002 }, { "content": "fn or(a: u32, b: u32, c: &mut u32) {\n\n *c = a | b;\n\n}\n\n\n", "file_path": "day19.rs", "rank": 33, "score": 169313.86139744002 }, { "content": "fn or(a: u32, b: u32, c: &mut u32) {\n\n *c = a | b;\n\n}\n\n\n", "file_path": "day16.rs", "rank": 34, "score": 169313.86139744002 }, { "content": "type Op = fn(u32, u32, &mut u32);\n\n\n", "file_path": "day21.rs", "rank": 35, "score": 167862.6886515934 }, { "content": "type Op = fn(u32, u32, &mut u32);\n\n\n", "file_path": "day16.rs", "rank": 36, "score": 167862.6886515934 }, { "content": "type Op = fn(u32, u32, &mut u32);\n\n\n", "file_path": "day19.rs", "rank": 37, "score": 167862.6886515934 }, { "content": "#[allow(dead_code)]\n\nfn part1(path:&str, num_workers:i32) -> String {\n\n let reqs = parse_requirements(path);\n\n let mut graph = Graph {steps: Vec::new(), idle_workers: num_workers, active_workers: Vec::new()};\n\n for req in &reqs {\n\n graph.add_requirement(req);\n\n }\n\n graph.execute()\n\n}\n\n\n", "file_path": "day07.rs", "rank": 38, "score": 167009.37537606782 }, { "content": "fn add(a: u32, b: u32, c: &mut u32) {\n\n // diff vs. day19.rs must wrap\n\n *c = a.wrapping_add(b);\n\n}\n\n\n", "file_path": "day21.rs", "rank": 39, "score": 165666.85570481274 }, { "content": "fn add(a: u32, b: u32, c: &mut u32) {\n\n *c = a + b;\n\n}\n\n\n", "file_path": "day16.rs", "rank": 40, "score": 165666.85570481274 }, { "content": "fn eq(a: u32, b: u32, c: &mut u32) {\n\n if a == b { *c = 1; } else { *c = 0 }\n\n}\n\n\n", "file_path": "day19.rs", "rank": 41, "score": 165666.85570481274 }, { "content": "fn set(a: u32, _b: u32, c: &mut u32) {\n\n *c = a;\n\n}\n\n\n", "file_path": "day16.rs", "rank": 42, "score": 165666.85570481274 }, { "content": "fn set(a: u32, _b: u32, c: &mut u32) {\n\n *c = a;\n\n}\n\n\n", "file_path": "day21.rs", "rank": 43, "score": 165666.85570481274 }, { "content": "fn eq(a: u32, b: u32, c: &mut u32) {\n\n if a == b { *c = 1; } else { *c = 0 }\n\n}\n\n\n", "file_path": "day16.rs", "rank": 44, "score": 165666.85570481274 }, { "content": "fn mul(a: u32, b: u32, c: &mut u32) {\n\n *c = a * b;\n\n}\n\n\n", "file_path": "day19.rs", "rank": 45, "score": 165666.85570481274 }, { "content": "fn mul(a: u32, b: u32, c: &mut u32) {\n\n // diff vs. day19.rs must wrap\n\n *c = a.wrapping_mul(b);\n\n}\n\n\n", "file_path": "day21.rs", "rank": 46, "score": 165666.85570481274 }, { "content": "fn eq(a: u32, b: u32, c: &mut u32) {\n\n if a == b { *c = 1; } else { *c = 0 }\n\n}\n\n\n", "file_path": "day21.rs", "rank": 47, "score": 165666.85570481274 }, { "content": "fn set(a: u32, _b: u32, c: &mut u32) {\n\n *c = a;\n\n}\n\n\n", "file_path": "day19.rs", "rank": 48, "score": 165666.85570481274 }, { "content": "fn gt(a: u32, b: u32, c: &mut u32) {\n\n if a > b { *c = 1; } else { *c = 0 }\n\n}\n\n\n", "file_path": "day21.rs", "rank": 49, "score": 165666.85570481274 }, { "content": "fn mul(a: u32, b: u32, c: &mut u32) {\n\n *c = a * b;\n\n}\n\n\n", "file_path": "day16.rs", "rank": 50, "score": 165666.85570481274 }, { "content": "fn gt(a: u32, b: u32, c: &mut u32) {\n\n if a > b { *c = 1; } else { *c = 0 }\n\n}\n\n\n", "file_path": "day16.rs", "rank": 51, "score": 165666.85570481274 }, { "content": "fn gt(a: u32, b: u32, c: &mut u32) {\n\n if a > b { *c = 1; } else { *c = 0 }\n\n}\n\n\n", "file_path": "day19.rs", "rank": 52, "score": 165666.85570481274 }, { "content": "fn add(a: u32, b: u32, c: &mut u32) {\n\n *c = a + b;\n\n}\n\n\n", "file_path": "day19.rs", "rank": 53, "score": 165666.85570481274 }, { "content": "fn part1(path: &str, dim: usize, iters: usize, print: bool) -> usize {\n\n let mut area = parse(path, dim);\n\n\n\n if print {\n\n println!(\"Initial state:\");\n\n area.print();\n\n }\n\n\n\n for x in 1..=iters {\n\n area.tick();\n\n area.update_from_next();\n\n if print {\n\n println!(\"\");\n\n if x == 1 { println!(\"After{} minute:\", x); } else { println!(\"After {} minutes:\", x); }\n\n area.print();\n\n println!(\"str: {}\", area.to_string());\n\n }\n\n }\n\n\n\n area.count_all_cells('|') * area.count_all_cells('#')\n\n}\n\n\n", "file_path": "day18.rs", "rank": 54, "score": 162345.5540579408 }, { "content": "fn part1(input: u32) -> (i32, i32, i64) {\n\n let mut grid = Grid {cells: vec![0i64; GRID_DIMS], table: vec![0i64; GRID_DIMS]};\n\n for x in 0..GRID_DIMS {\n\n let coords = Grid::cell_to_coords(x);\n\n grid.cells[x] = Grid::get_cell_power_level(coords.0, coords.1, input);\n\n }\n\n let mut results = Vec::new();\n\n for x in 0..GRID_DIMS {\n\n let result = grid.sample_nxn(x, 3);\n\n if result.is_some() {\n\n results.push(result.unwrap());\n\n }\n\n }\n\n *results.iter().max_by_key(|x| x.2).unwrap()\n\n}\n\n\n", "file_path": "day11.rs", "rank": 55, "score": 160440.24538461945 }, { "content": "fn part2(input: &str, sequence: &str) -> u32 {\n\n let mut seq_vec = Vec::new();\n\n let mut scoreboard = Vec::new();\n\n for ch in input.chars() {\n\n scoreboard.push(ch.to_digit(10).unwrap());\n\n }\n\n for ch in sequence.chars() {\n\n seq_vec.push(ch.to_digit(10).unwrap());\n\n }\n\n let seq_len = seq_vec.len();\n\n let mut elfs: Vec<usize> = (0..scoreboard.len()).collect();\n\n loop {\n\n let sum: u32 = elfs.iter().fold(0, |sum, x| sum + scoreboard[*x]);\n\n let created = sum.to_string();\n\n for ch in created.chars() {\n\n scoreboard.push(ch.to_digit(10).unwrap());\n\n let len = scoreboard.len();\n\n if len >= seq_len {\n\n let mut found = true;\n\n for x in 0..seq_len {\n", "file_path": "day14.rs", "rank": 56, "score": 158784.78442001028 }, { "content": "fn get_extents(points: &Vec<Point>) -> (usize, usize, i32, i32) {\n\n let mut xmin = std::i32::MAX;\n\n let mut ymin = std::i32::MAX;\n\n let mut xmax = std::i32::MIN;\n\n let mut ymax = std::i32::MIN;\n\n for point in points {\n\n if point.pos_x < xmin {\n\n xmin = point.pos_x;\n\n }\n\n if point.pos_y < ymin {\n\n ymin = point.pos_y;\n\n }\n\n if point.pos_x > xmax {\n\n xmax = point.pos_x;\n\n }\n\n if point.pos_y > ymax {\n\n ymax = point.pos_y;\n\n }\n\n }\n\n let width =(xmax - xmin) as usize;\n\n let height = (ymax - ymin) as usize;\n\n (width, height, xmin.abs(), ymin.abs())\n\n}\n\n\n", "file_path": "day10.rs", "rank": 57, "score": 157291.0470657635 }, { "content": "fn part1(path: &str) -> usize {\n\n let points = parse(path);\n\n let mut tree = KdTree::new(DIMS);\n\n for point in &points {\n\n tree.add(&point.0, point.1).unwrap();\n\n }\n\n\n\n let mut constellations = Vec::new();\n\n let mut assigned: HashSet<usize> = HashSet::new();\n\n for point in &points {\n\n if assigned.contains(&point.1) {\n\n continue;\n\n }\n\n\n\n let mut cluster: HashSet<usize> = HashSet::new();\n\n add_recursive(&mut cluster, &mut assigned, &points, &tree, point);\n\n constellations.push(cluster);\n\n }\n\n\n\n constellations.len()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_part1_ex() {\n\n }\n\n}\n\n\n", "file_path": "day25.rs", "rank": 58, "score": 156888.09642198685 }, { "content": "fn parse(path: &str) -> Problem {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut problem = Problem::default();\n\n let mut y = 0u32;\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let mut x = 0u32;\n\n let mut tmp = Vec::new();\n\n for ch in line.chars() {\n\n match ch {\n\n '<' => {\n\n problem.carts.insert((x, y), Cart {x: x, y: y, orientation:ch, next_turn:0});\n\n tmp.push(Cell {x: x, y: y, contents: '-'})\n\n }\n", "file_path": "day13.rs", "rank": 59, "score": 156888.09642198685 }, { "content": "fn points_at_time(points: &Vec<Point>, time: u32) -> Vec<Point> {\n\n let mut result = Vec::with_capacity(points.len());\n\n let t = time as i32;\n\n for x in points {\n\n let pos_x = x.pos_x + x.vel_x * t;\n\n let pos_y = x.pos_y + x.vel_y * t;\n\n result.push(Point {pos_x: pos_x, pos_y: pos_y, vel_x:0, vel_y:0});\n\n }\n\n result\n\n}\n\n\n", "file_path": "day10.rs", "rank": 60, "score": 154698.91832971855 }, { "content": "// without summed area it's too slow\n\nfn part2_summed_area(input: u32) -> (i32, i32, i64) {\n\n let mut grid = Grid {cells: vec![0i64; GRID_DIMS], table: vec![0i64; GRID_DIMS]};\n\n for x in 0..GRID_DIMS {\n\n let coords = Grid::cell_to_coords(x);\n\n grid.cells[x] = Grid::get_cell_power_level(coords.0, coords.1, input);\n\n }\n\n grid.build_table();\n\n\n\n let mut results = vec![(0i32, 0i32, 0i64); GRID_DIM];\n\n for n in 1..=GRID_DIM {\n\n let mut result = Vec::new();\n\n for x in 0..GRID_DIMS {\n\n let tmp = grid.sample_table_nxn(x, n);\n\n if tmp.is_some() {\n\n result.push(tmp.unwrap());\n\n }\n\n }\n\n results[n-1] = *result.iter().max_by_key(|x| x.2).unwrap();\n\n }\n\n\n", "file_path": "day11.rs", "rank": 61, "score": 154194.92297011128 }, { "content": "fn parse_input(path:&str) -> Problem {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut problem = Problem { state: vec![false; 3], zero: 4, notes: Vec::new() };\n\n let reader = BufReader::new(file);\n\n let mut line_count = 0;\n\n let mut num = 0;\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n if line_count == 0 {\n\n let a = scan_fmt!(&line, \"initial state: {}\", String);\n\n for char in a.unwrap().chars() {\n\n if char == '#' {\n\n problem.state.push(true);\n\n } else {\n\n problem.state.push(false);\n", "file_path": "day12.rs", "rank": 62, "score": 152925.8473598981 }, { "content": "fn parse(path: &str) -> Vec<Army> {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let re0 = Regex::new(r\"(\\d{1,}) units each with (\\d{1,}) hit points\").unwrap();\n\n let re1 = Regex::new(r\"with an attack that does (\\d{1,}) ([^\\s]+) damage at initiative (\\d{1,})\").unwrap();\n\n let re2 = Regex::new(r\"weak to (\\w+)(, \\w+)*\").unwrap();\n\n let re3 = Regex::new(r\"immune to (\\w+)?(, \\w+)?(, \\w+)*\").unwrap();\n\n\n\n let mut result = Vec::new();\n\n result.push(Army{ id: IMMUNE_SYSTEM, name: String::from(\"Immune System\"), groups: Vec::new() });\n\n result.push(Army{ id: INFECTION, name: String::from(\"Infection\"), groups: Vec::new() });\n\n\n\n let mut army = IMMUNE_SYSTEM;\n\n let mut group_id = 1u32;\n\n\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n", "file_path": "day24.rs", "rank": 63, "score": 149039.30612724344 }, { "content": "fn parse_coords(path:&str) -> Vec<Coord> {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut coords = Vec::new();\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let (x, y) = scan_fmt!(&line, \"{}, {}\", i32, i32);\n\n coords.push(Coord {x: x.unwrap(), y: y.unwrap()});\n\n }\n\n Err(e) => println!(\"err: {}\", e)\n\n }\n\n }\n\n return coords;\n\n}\n\n\n", "file_path": "day06.rs", "rank": 64, "score": 145438.65375602554 }, { "content": "fn find_start_time_dims_step(points: &Vec<Point>, start: u32, step: u32) -> Option<(usize, u32)> {\n\n let mut time = start;\n\n let mut last_dim = std::usize::MAX;\n\n let mut last_time = 0u32;\n\n\n\n for _x in 0..128 {\n\n let dims = extents_at_time(points, time);\n\n let max = usize::max(dims.0, dims.1);\n\n if max < last_dim {\n\n last_dim = max;\n\n last_time = time;\n\n } else {\n\n return Some((last_dim, last_time));\n\n }\n\n time += step;\n\n }\n\n None\n\n}\n\n\n", "file_path": "day10.rs", "rank": 66, "score": 144109.2029363677 }, { "content": "#[allow(dead_code)]\n\nfn part1(input: &str, to_create: i32) -> String {\n\n let mut scoreboard = Vec::new();\n\n for ch in input.chars() {\n\n scoreboard.push(ch.to_digit(10).unwrap());\n\n }\n\n let mut elfs: Vec<usize> = (0..scoreboard.len()).collect();\n\n let mut recipes_created = 0;\n\n loop {\n\n let sum: u32 = elfs.iter().fold(0, |sum, x| sum + scoreboard[*x]);\n\n let created = sum.to_string();\n\n for ch in created.chars() {\n\n recipes_created += 1;\n\n scoreboard.push(ch.to_digit(10).unwrap());\n\n if recipes_created >= to_create + 10 {\n\n let mut output = String::new();\n\n for x in 0..10 {\n\n let ch2 = scoreboard[to_create as usize + x as usize].to_string().chars().nth(0).unwrap();\n\n output.push(ch2);\n\n }\n\n return output;\n\n }\n\n }\n\n let scoreboard_len = scoreboard.len();\n\n for elf in elfs.iter_mut() {\n\n *elf = (*elf + scoreboard[*elf] as usize + 1) % scoreboard_len;\n\n }\n\n }\n\n}\n\n\n", "file_path": "day14.rs", "rank": 67, "score": 143059.50388647756 }, { "content": "fn parse(path: &str, dim: usize) -> Area {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut area = Area { rows: Vec::with_capacity(dim), work_row: Vec::with_capacity(dim), next: Vec::with_capacity(dim) };\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let mut row = Vec::with_capacity(dim);\n\n for ch in line.chars() {\n\n row.push(ch);\n\n }\n\n area.rows.push(row);\n\n }\n\n Err(e) => println!(\"err: {}\", e)\n\n }\n\n }\n\n area\n\n}\n\n\n", "file_path": "day18.rs", "rank": 68, "score": 141939.9075234256 }, { "content": "fn boosted(armies: &mut Vec<Army>, boost: u32) -> Option<u32> {\n\n for group in armies[IMMUNE_SYSTEM].groups.iter_mut() {\n\n group.attack.dmg += boost;\n\n }\n\n\n\n loop {\n\n let immune_count = armies[IMMUNE_SYSTEM].unit_count();\n\n let infection_count = armies[INFECTION].unit_count();\n\n if immune_count == 0 && infection_count == 0 {\n\n // tie\n\n return None;\n\n } else if immune_count == 0 {\n\n return None;\n\n } else if infection_count == 0 {\n\n return Some(immune_count);\n\n }\n\n\n\n if fight(armies, 0, false) {\n\n // stalemate\n\n return None;\n\n }\n\n }\n\n}\n\n\n", "file_path": "day24.rs", "rank": 69, "score": 141499.72596619613 }, { "content": "fn generate_pixels(points: &Vec<Point>, max_dims: usize, time: u32) -> Option<Vec<u8>> {\n\n let points_t = points_at_time(points, time);\n\n let dims = get_extents(&points_t);\n\n println!(\"dims: {:?}\", dims);\n\n if dims.0 > max_dims || dims.1 > max_dims {\n\n None\n\n } else {\n\n let num_pixels = max_dims * max_dims;\n\n let mut pixels = vec![0u8; num_pixels];\n\n //let shift = max_dims as i32 / 2;\n\n for x in points_t {\n\n println!(\"pos_x = {} pos_y = {}\", x.pos_x, x.pos_y);\n\n let pos_x = x.pos_x;\n\n let pos_y = x.pos_y;\n\n if pos_y >= 0 && pos_x >= 0 {\n\n let pixel_index = pos_y as usize * max_dims + pos_x as usize;\n\n if pixel_index < num_pixels {\n\n pixels[pixel_index] = 1u8;\n\n }\n\n }\n\n }\n\n Some(pixels)\n\n }\n\n}\n\n\n", "file_path": "day10.rs", "rank": 70, "score": 140335.02606842655 }, { "content": "fn parse_requirements(path:&str) -> Vec<(char, char)> {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut requirements = Vec::new();\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let (a, b) = scan_fmt!(&line, \"Step {} must be finished before step {} can begin.\", char, char);\n\n requirements.push((a.unwrap(), b.unwrap()));\n\n }\n\n Err(e) => println!(\"err: {}\", e)\n\n }\n\n }\n\n return requirements;\n\n}\n\n\n", "file_path": "day07.rs", "rank": 71, "score": 138653.51271538308 }, { "content": "fn find_start_time_dims(points: &Vec<Point>) -> Option<(usize, u32)> {\n\n let mut start = 0;\n\n let mut step = 512;\n\n\n\n loop {\n\n let result = find_start_time_dims_step(points, start, step);\n\n if result.is_some() {\n\n let std = result.unwrap();\n\n start = u32::max(0, std.1 - step);\n\n step /= 2;\n\n if step == 0 {\n\n return result;\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "day10.rs", "rank": 72, "score": 137472.6837913426 }, { "content": "fn parse(path: &str) -> Vec<([f64; DIMS], usize)> {\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut result = Vec::new();\n\n let reader = BufReader::new(file);\n\n let mut index = 0usize;\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n let (x, y, z, a) = scan_fmt!(&line, \"{},{},{},{}\", i32, i32, i32, i32);\n\n let point: [f64; DIMS] = [x.unwrap() as f64, y.unwrap() as f64, z.unwrap() as f64, a.unwrap() as f64];\n\n result.push((point, index));\n\n index += 1;\n\n },\n\n Err(e) => println!(\"err: {}\", e)\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "day25.rs", "rank": 73, "score": 135487.2788385245 }, { "content": "#[allow(dead_code)]\n\nfn part2(use_strip2: bool) {\n\n let path = \"C:\\\\Users\\\\Igascoigne\\\\advent2018\\\\dec_01_01\\\\input.txt\";\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut line_count = 0;\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n assert_eq!(line_count, 0);\n\n assert!(line.is_ascii());\n\n let original_input = line.into_bytes();\n\n let mut best_len = std::usize::MAX;\n\n for i in 0..26 {\n\n let mut result = original_input.clone();\n\n let c0 = i as u8 + 'A' as u8;\n\n let c1 = i as u8 + 'A' as u8 + 32u8;\n", "file_path": "day05.rs", "rank": 74, "score": 135070.52104572125 }, { "content": "#[derive(Debug)]\n\nstruct Point {\n\n pos_x: i32,\n\n pos_y: i32,\n\n vel_x: i32,\n\n vel_y: i32\n\n}\n\n\n", "file_path": "day10.rs", "rank": 75, "score": 133188.47777225578 }, { "content": "#[allow(unused_parens)]\n\nfn solution(part1: bool) {\n\n let path = \"C:\\\\Users\\\\lgascoigne\\\\IdeaProjects\\\\advent\\\\input.txt\";\n\n let file = match File::open(path) {\n\n Err(why) => panic!(\"couldn't open {}: {}\", path, Error::description(&why)),\n\n Ok(file) => file,\n\n };\n\n\n\n let mut lines = Vec::new();\n\n let reader = BufReader::new(file);\n\n for line in reader.lines() {\n\n match line {\n\n Ok(line) => {\n\n lines.push(line);\n\n }\n\n Err(e) => println!(\"err: {}\", e)\n\n }\n\n }\n\n\n\n let re_begin = Regex::new(r\"\\[(\\d{4})-(\\d{2})-(\\d{2}) (\\d{2}):(\\d{2})\\] Guard #(\\d*) begins shift\").unwrap();\n\n let re_wake = Regex::new(r\"\\[(\\d{4})-(\\d{2})-(\\d{2}) (\\d{2}):(\\d{2})\\] wakes up\").unwrap();\n", "file_path": "day04.rs", "rank": 77, "score": 109141.89469853244 }, { "content": "fn build_tree_recursive(arena: &mut indextree::Arena<NodeData>, input: &Vec<i32>, cursor: &mut usize, parent: Option<indextree::NodeId> ) {\n\n if *cursor >= input.len() {\n\n return;\n\n }\n\n\n\n let child_count = input[*cursor];\n\n *cursor += 1;\n\n let meta_count = input[*cursor];\n\n *cursor += 1;\n\n\n\n let new_id = arena.new_node(NodeData {metadata:Vec::new()});\n\n if child_count > 0 {\n\n for _x in 0..child_count {\n\n build_tree_recursive(arena, input, cursor, Some(new_id));\n\n }\n\n }\n\n\n\n if parent.is_some() {\n\n assert!(parent.unwrap().append(new_id, arena).is_ok());\n\n }\n\n\n\n let node = arena.get_mut(new_id).unwrap();\n\n for _x in 0..meta_count {\n\n node.data.metadata.push(input[*cursor]);\n\n *cursor += 1;\n\n }\n\n}\n\n\n", "file_path": "day08.rs", "rank": 78, "score": 108429.6192178388 }, { "content": "fn get_node_value(arena: &indextree::Arena<NodeData>, node_id: indextree::NodeId, val: &mut i32) {\n\n let child_count = node_id.children(arena).count();\n\n if child_count == 0 {\n\n let node= arena.get(node_id).unwrap();\n\n let meta_sum: i32 = node.data.metadata.iter().sum();\n\n *val += meta_sum;\n\n } else {\n\n let node= arena.get(node_id).unwrap();\n\n for x in &node.data.metadata {\n\n let index = *x as usize;\n\n if index > 0 && index <= child_count {\n\n let child_node = node_id.children(arena).nth(index - 1 as usize);\n\n if child_node.is_some() {\n\n get_node_value(arena, child_node.unwrap(), val);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "day08.rs", "rank": 79, "score": 106194.25528608452 }, { "content": "fn solution(num_players:usize, last_marble:u32) -> u32 {\n\n let mut circle = VecDeque::with_capacity(last_marble as usize);\n\n let mut scores = vec![0u32; num_players];\n\n circle.push_back(0u32);\n\n for marble in 1..last_marble+1 {\n\n if marble % 23 == 0 {\n\n let player = marble as usize % num_players;\n\n scores[player] += marble;\n\n for _ in 0..7 {\n\n let tmp = circle.pop_back().unwrap();\n\n circle.push_front(tmp);\n\n }\n\n scores[player] += circle.pop_front().unwrap();\n\n } else {\n\n for _ in 0..2 {\n\n let tmp = circle.pop_front().unwrap();\n\n circle.push_back(tmp);\n\n }\n\n circle.push_front(marble);\n\n }\n", "file_path": "day09.rs", "rank": 80, "score": 101412.12987970514 }, { "content": "#[bench]\n\nfn bench_part2_strip(b: &mut Bencher) {\n\n b.iter(|| part2(false));\n\n}\n\n*/\n\n\n", "file_path": "day05.rs", "rank": 81, "score": 99506.18723417973 }, { "content": "#[bench]\n\nfn bench_part2_strip2(b: &mut Bencher) {\n\n b.iter(|| part2(true));\n\n}\n\n\n", "file_path": "day05.rs", "rank": 82, "score": 99506.18723417973 }, { "content": "fn add_recursive(cluster: &mut HashSet<usize>, assigned: &mut HashSet<usize>, points: &Vec<([f64; DIMS], usize)>, tree: &KdTree<f64, usize, &[f64; DIMS]>, point: &([f64; DIMS], usize)) {\n\n let result = tree.within(&point.0, 3.0f64, &manhattan).unwrap();\n\n for x in &result {\n\n if !assigned.contains(x.1) && !cluster.contains(x.1) {\n\n cluster.insert(*x.1);\n\n assigned.insert(*x.1);\n\n add_recursive(cluster, assigned, points, tree, &points[*x.1]);\n\n }\n\n }\n\n}\n\n\n", "file_path": "day25.rs", "rank": 83, "score": 97363.68904901312 }, { "content": "#[bench]\n\nfn bench_part2_strip2_rayon(b: &mut Bencher) {\n\n b.iter(|| part2_rayon());\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_strip() {\n\n use strip;\n\n assert!(String::from_utf8(strip(&\"aA\".to_string().into_bytes())).unwrap().is_empty());\n\n }\n\n}\n\n\n", "file_path": "day05.rs", "rank": 84, "score": 97127.72203113973 }, { "content": "fn compute_generation(problem: &Problem) -> (Vec<bool>, usize) {\n\n let len = problem.state.len() as i32;\n\n let mut result = Vec::new();\n\n let mut zero = problem.zero;\n\n let mut pot = -(problem.zero as i32) + 1;\n\n while pot < len {\n\n let window = problem.get_window(pot);\n\n let mut found = false;\n\n for note in &problem.notes {\n\n if window == note.pattern {\n\n found = true;\n\n if note.output {\n\n Problem::set(&mut result, &mut zero, pot, true);\n\n } else {\n\n Problem::set(&mut result, &mut zero, pot, false);\n\n }\n\n break;\n\n }\n\n }\n\n if !found {\n", "file_path": "day12.rs", "rank": 85, "score": 91362.4876856006 }, { "content": "fn sum_pots_pattern(pattern: &Vec<bool>, first: usize) -> usize {\n\n let mut pot = first;\n\n let mut result = 0;\n\n for val in pattern {\n\n if *val {\n\n result += pot\n\n }\n\n pot += 1\n\n }\n\n result\n\n}\n\n\n", "file_path": "day12.rs", "rank": 86, "score": 85452.05906285043 }, { "content": "fn part2(depth: usize, target: (usize, usize), expand: usize) -> u32 {\n\n let regions = create(depth, target, (target.0 + expand, target.1 + expand));\n\n let initial = Region { coord: (0, 0), tool: Tool::Torch };\n\n let target = Region { coord: target, tool: Tool::Torch };\n\n let result = astar(&initial, |p| p.successors(&regions), |p| p.distance(&target),\n\n |p| *p == target);\n\n result.unwrap().1\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_part1_ex() {\n\n use part1;\n\n assert_eq!(part1(510, (10, 10)), 114);\n\n }\n\n\n\n #[test]\n\n fn test_part1_input() {\n\n use part1;\n\n assert_eq!(part1(11541, (14, 778)), 11575);\n\n }\n\n}\n\n\n", "file_path": "day22.rs", "rank": 87, "score": 79883.89971831063 }, { "content": "struct Device {\n\n registers: [u32; NUM_REGS],\n\n instructions: Vec<Instruction>,\n\n ip: u32,\n\n bound: usize,\n\n program: Vec<[u32; 4]>,\n\n breakpoints: Vec<DataBreakpoint>\n\n}\n\n\n\nimpl Device {\n\n pub fn new() -> Device {\n\n Device {\n\n registers: [0u32; NUM_REGS],\n\n instructions: vec![\n\n Instruction { mnemonic: String::from(\"addr\"), opcode: 13, op: add, ops: Operands::RegReg },\n\n Instruction { mnemonic: String::from(\"addi\"), opcode: 10, op: add, ops: Operands::RegImm },\n\n Instruction { mnemonic: String::from(\"mulr\"), opcode: 14, op: mul, ops: Operands::RegReg },\n\n Instruction { mnemonic: String::from(\"muli\"), opcode: 5, op: mul, ops: Operands::RegImm },\n\n Instruction { mnemonic: String::from(\"banr\"), opcode: 0, op: and, ops: Operands::RegReg },\n\n Instruction { mnemonic: String::from(\"bani\"), opcode: 6, op: and, ops: Operands::RegImm },\n", "file_path": "day19.rs", "rank": 88, "score": 65929.1455606538 }, { "content": "struct Device {\n\n registers: [u32; 4],\n\n instructions: Vec<Instruction>\n\n}\n\n\n\nimpl Device {\n\n pub fn new() -> Device {\n\n Device {\n\n registers: [0u32; 4],\n\n instructions: vec![\n\n Instruction { mnemonic: String::from(\"addr\"), opcode: 13, op: add, ops: Operands::RegReg },\n\n Instruction { mnemonic: String::from(\"addi\"), opcode: 10, op: add, ops: Operands::RegImm },\n\n Instruction { mnemonic: String::from(\"mulr\"), opcode: 14, op: mul, ops: Operands::RegReg },\n\n Instruction { mnemonic: String::from(\"muli\"), opcode: 5, op: mul, ops: Operands::RegImm },\n\n Instruction { mnemonic: String::from(\"banr\"), opcode: 0, op: and, ops: Operands::RegReg },\n\n Instruction { mnemonic: String::from(\"bani\"), opcode: 6, op: and, ops: Operands::RegImm },\n\n Instruction { mnemonic: String::from(\"borr\"), opcode: 7, op: or, ops: Operands::RegReg },\n\n Instruction { mnemonic: String::from(\"bori\"), opcode: 4, op: or, ops: Operands::RegImm },\n\n Instruction { mnemonic: String::from(\"setr\"), opcode: 2, op: set, ops: Operands::RegReg },\n\n Instruction { mnemonic: String::from(\"seti\"), opcode: 15, op: set, ops: Operands::ImmReg },\n", "file_path": "day16.rs", "rank": 89, "score": 65929.1455606538 }, { "content": "struct Worker {\n\n step: char,\n\n work_remaining: i32\n\n}\n\n\n", "file_path": "day07.rs", "rank": 90, "score": 65929.1455606538 }, { "content": "struct Instruction {\n\n mnemonic: String,\n\n opcode: u32,\n\n op: Op,\n\n ops: Operands\n\n}\n\n\n", "file_path": "day16.rs", "rank": 91, "score": 65929.1455606538 }, { "content": "#[derive(Default)]\n\nstruct Map {\n\n squares: Vec<Vec<Square>>,\n\n npcs: HashMap<(u32, u32), Npc>\n\n}\n\n\n\nimpl Map {\n\n #[allow(dead_code)]\n\n pub fn print(&self) {\n\n let mut string = String::new();\n\n for line in &self.squares {\n\n let mut units = String::new();\n\n for cell in line {\n\n match self.npcs.get(&(cell.x, cell.y)) {\n\n Some(npc) => {\n\n string.push(npc.class);\n\n units.push_str(format!(\"{}({}), \", npc.class, npc.hp).as_str());\n\n },\n\n None => string.push(cell.contents)\n\n }\n\n }\n", "file_path": "day15.rs", "rank": 92, "score": 65929.1455606538 }, { "content": "#[derive(Default)]\n\nstruct Square {\n\n x: u32,\n\n y: u32,\n\n contents: char\n\n}\n\n\n", "file_path": "day15.rs", "rank": 93, "score": 65929.1455606538 }, { "content": "struct Graph {\n\n steps: Vec<Step>,\n\n idle_workers: i32,\n\n active_workers: Vec<Worker>\n\n}\n\n\n\nimpl Graph {\n\n pub fn add_requirement(&mut self, requirement:&(char, char)) {\n\n match self.steps.iter().position(|x| x.name == requirement.1) {\n\n Some(pos) => self.steps[pos].requirements.push(requirement.0),\n\n None => self.steps.push(Step {name: requirement.1, requirements: vec![requirement.0], duration: 0})\n\n }\n\n\n\n if self.steps.iter().find(|x| x.name == requirement.0).is_none() {\n\n self.steps.push(Step {name: requirement.0, requirements:Vec::new(), duration: 0})\n\n }\n\n }\n\n\n\n pub fn is_done(&self) -> bool {\n\n self.steps.is_empty() && self.active_workers.is_empty()\n", "file_path": "day07.rs", "rank": 94, "score": 65929.1455606538 }, { "content": "#[derive(Copy, Clone, Default)]\n\nstruct Npc {\n\n class: char,\n\n x: u32,\n\n y: u32,\n\n ap: u32,\n\n hp: u32\n\n}\n\n\n\nimpl Npc {\n\n pub fn get_hostile(&self) -> char {\n\n if self.class == 'E' { 'G' } else { 'E' }\n\n }\n\n}\n\n\n", "file_path": "day15.rs", "rank": 95, "score": 65929.1455606538 }, { "content": "#[derive(Default)]\n\nstruct Scan {\n\n rows: Vec<Vec<char>>,\n\n xmin: usize,\n\n xmax: usize,\n\n ymin: usize,\n\n ymax: usize,\n\n active_water: Vec<(usize, usize)>,\n\n reached: HashSet<(usize, usize)>,\n\n sand: HashSet<(usize, usize)>\n\n}\n\n\n\nimpl Scan {\n\n pub fn init(&mut self, xmin: usize, xmax: usize, ymin: usize, ymax: usize) {\n\n self.xmin = xmin;\n\n self.xmax = xmax;\n\n self.ymin = ymin;\n\n self.ymax = ymax;\n\n\n\n for _y in 0..=ymax {\n\n self.rows.push(vec!['.'; xmax - xmin + 1]);\n", "file_path": "day17.rs", "rank": 96, "score": 65929.1455606538 }, { "content": "struct Coord {\n\n x: i32,\n\n y: i32\n\n}\n\n\n\nimpl Coord {\n\n pub fn manhattan(a:&Coord, b:&Coord) -> i32 {\n\n (a.x - b.x).abs() + (a.y - b.y).abs()\n\n }\n\n\n\n pub fn normalize_extents(coords:&mut Vec<Coord>) -> (i32, i32, i32, i32) {\n\n let mut xmin = std::i32::MAX;\n\n let mut xmax = std::i32::MIN;\n\n let mut ymin = std::i32::MAX;\n\n let mut ymax = std::i32::MIN;\n\n\n\n for coord in coords.iter() {\n\n let cx = coord.x;\n\n let cy = coord.y;\n\n xmin = cmp::min(cx, xmin);\n", "file_path": "day06.rs", "rank": 97, "score": 65929.1455606538 }, { "content": "struct Instruction {\n\n mnemonic: String,\n\n opcode: u32,\n\n op: Op,\n\n ops: Operands\n\n}\n\n\n", "file_path": "day19.rs", "rank": 98, "score": 65929.1455606538 }, { "content": "struct Step {\n\n name: char,\n\n requirements: Vec<char>,\n\n duration: i32\n\n}\n\n\n", "file_path": "day07.rs", "rank": 99, "score": 65929.1455606538 } ]
Rust
src/cli.rs
paulstansifer/odd
456a321c041241c95e147cb1c9c254d02df64a64
#![allow(non_snake_case)] use std::{fs::File, io::Read, path::Path}; use libunseemly::{ ast, ast::Ast, core_forms, expand, grammar, name::{n, Name}, runtime::{core_values, eval, eval::Value}, ty, ty_compare, util::assoc::Assoc, }; use std::{borrow::Cow, cell::RefCell, io::BufRead}; thread_local! { pub static TY_ENV : RefCell<Assoc<Name, Ast>> = RefCell::new(core_values::core_types()); pub static VAL_ENV : RefCell<Assoc<Name, Value>> = RefCell::new(core_values::core_values()); } #[cfg_attr(tarpaulin, skip)] fn main() { let arguments: Vec<String> = std::env::args().collect(); if arguments.len() == 1 { repl(); } else if arguments.len() == 2 { let filename = Path::new(&arguments[1]); let mut raw_input = String::new(); File::open(&filename) .expect("Error opening file") .read_to_string(&mut raw_input) .expect("Error reading file"); if let Some(dir) = filename.parent() { if dir.is_dir() { std::env::set_current_dir(dir).unwrap(); } } libunseemly::terminal_display(libunseemly::eval_unseemly_program_top(&raw_input)); } else if arguments.len() == 3 { let lang = libunseemly::language_from_file(&std::path::Path::new(&arguments[1])); let mut second_program = String::new(); File::open(&Path::new(&arguments[2])) .expect("Error opening file") .read_to_string(&mut second_program) .expect("Error reading file"); if let Some(dir) = Path::new(&arguments[2]).parent() { if dir.is_dir() { std::env::set_current_dir(dir).unwrap(); } } libunseemly::terminal_display(libunseemly::eval_program(&second_program, lang)); } } struct LineHelper { highlighter: rustyline::highlight::MatchingBracketHighlighter, validator: rustyline::validate::MatchingBracketValidator, } impl LineHelper { fn new() -> LineHelper { LineHelper { highlighter: rustyline::highlight::MatchingBracketHighlighter::new(), validator: rustyline::validate::MatchingBracketValidator::new(), } } } impl rustyline::completion::Completer for LineHelper { type Candidate = String; fn complete( &self, line: &str, pos: usize, _ctxt: &rustyline::Context, ) -> Result<(usize, Vec<String>), rustyline::error::ReadlineError> { let mut res = vec![]; let (start, word_so_far) = rustyline::completion::extract_word(line, pos, None, b"[({ })]"); VAL_ENV.with(|vals| { let vals = vals.borrow(); for k in vals.iter_keys() { if k.sp().starts_with(word_so_far) { res.push(k.sp()); } } }); Ok((start, res)) } } impl rustyline::hint::Hinter for LineHelper { type Hint = String; fn hint(&self, _line: &str, _pos: usize, _ctxt: &rustyline::Context) -> Option<String> { None } } impl rustyline::highlight::Highlighter for LineHelper { fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> { self.highlighter.highlight(line, pos) } fn highlight_prompt<'b, 's: 'b, 'p: 'b>( &'s self, prompt: &'p str, default: bool, ) -> Cow<'b, str> { self.highlighter.highlight_prompt(prompt, default) } fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> { self.highlighter.highlight_hint(hint) } fn highlight_candidate<'c>( &self, candidate: &'c str, completion: rustyline::config::CompletionType, ) -> Cow<'c, str> { self.highlighter.highlight_candidate(candidate, completion) } fn highlight_char(&self, line: &str, pos: usize) -> bool { self.highlighter.highlight_char(line, pos) } } impl rustyline::validate::Validator for LineHelper { fn validate( &self, ctx: &mut rustyline::validate::ValidationContext, ) -> rustyline::Result<rustyline::validate::ValidationResult> { self.validator.validate(ctx) } fn validate_while_typing(&self) -> bool { self.validator.validate_while_typing() } } impl rustyline::Helper for LineHelper {} pub fn repl() { let prelude_filename = dirs::home_dir().unwrap().join(".unseemly_prelude"); let history_filename = dirs::home_dir().unwrap().join(".unseemly_history"); let mut rl = rustyline::Editor::<LineHelper>::new(); rl.set_helper(Some(LineHelper::new())); let quit = regex::Regex::new(r"\s*quit\s*").unwrap(); let just_parse = regex::Regex::new(r"^:p (.*)$").unwrap(); let just_parse_debug_print = regex::Regex::new(r"^:pd (.*)$").unwrap(); let just_type = regex::Regex::new(r"^:t (.*)$").unwrap(); let just_eval = regex::Regex::new(r"^:e (.*)$").unwrap(); let type_and_expand = regex::Regex::new(r"^:x (.*)$").unwrap(); let canon_type = regex::Regex::new(r"^:tt (.*)$").unwrap(); let subtype = regex::Regex::new(r"^:sub (.*)\s*<:\s*(.*)$").unwrap(); let assign_value = regex::Regex::new(r"^(\w+)\s*:=(.*)$").unwrap(); let save_value = regex::Regex::new(r"^:s +((\w+)\s*:=(.*))$").unwrap(); let assign_type = regex::Regex::new(r"^(\w+)\s*t=(.*)$").unwrap(); let save_type = regex::Regex::new(r"^:s +((\w+)\s*t=(.*))$").unwrap(); let comment = regex::Regex::new(r"^#").unwrap(); println!(); println!(" \x1b[1;38mUnseemly\x1b[0m"); println!(" `<expr>` to (typecheck and expand and) evaluate `<expr>`."); println!(" `:x <expr>` to (typecheck and) expand `<expr>`."); println!(" `:e <expr>` to (expand and) evaluate `<expr>` without typechecking."); println!(" `<name> := <expr>` to bind a name for this session."); println!(" `:t <expr>` to synthesize the type of <expr>."); println!(" `:tt <type>` to canonicalize <type>."); println!(" `:sub <type_a> <: <type_b>` to check that `<type_a>` is a subtype of `<type_b>`"); println!(" `<name> t= <type>` to bind a type for this session."); println!(" `:s <name> := <expr>` to save a binding to the prelude for the future."); println!(" `:s <name> t= <expr>` to save a type binding to the prelude."); println!(" `:p <expr>` to parse `<expr>` and pretty-print its AST output."); println!(" `:pd <expr>` to parse `<expr>` and debug-print its AST output."); println!(" Command history is saved over sessions."); println!(" Tab-completion works on variables, and lots of Bash-isms work."); if let Ok(prelude_file) = File::open(&prelude_filename) { let prelude = std::io::BufReader::new(prelude_file); for line in prelude.lines() { let line = line.unwrap(); if comment.captures(&line).is_some() { } else if let Some(caps) = assign_value.captures(&line) { if let Err(e) = assign_variable(&caps[1], &caps[2]) { println!(" Error in prelude line: {}\n {}", line, e); } } else if let Some(caps) = assign_type.captures(&line) { if let Err(e) = assign_t_var(&caps[1], &caps[2]) { println!(" Error in prelude line: {}\n {}", line, e); } } } println!(" [prelude loaded from {}]", prelude_filename.display()); } println!(); println!("This virtual machine kills cyber-fascists."); let _ = rl.load_history(&history_filename); while let Ok(line) = rl.readline("\x1b[1;36m≫\x1b[0m ") { rl.add_history_entry(line.clone()); if quit.captures(&line).is_some() { break; } let result = if let Some(caps) = just_parse.captures(&line) { parse_unseemly_program(&caps[1], true) } else if let Some(caps) = just_parse_debug_print.captures(&line) { parse_unseemly_program(&caps[1], false) } else if let Some(caps) = just_type.captures(&line) { type_unseemly_program(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = just_eval.captures(&line) { eval_unseemly_program_without_typechecking(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = type_and_expand.captures(&line) { type_and_expand_unseemly_program(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = canon_type.captures(&line) { canonicalize_type(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = subtype.captures(&line) { check_subtype(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = assign_value.captures(&line) { assign_variable(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = save_value.captures(&line) { match assign_variable(&caps[2], &caps[3]) { Ok(_) => { use std::io::Write; let mut prel_file = std::fs::OpenOptions::new() .create(true) .append(true) .open(&prelude_filename) .unwrap(); writeln!(prel_file, "{}", &caps[1]).unwrap(); Ok(format!("[saved to {}]", &prelude_filename.display())) } Err(e) => Err(e), } } else if let Some(caps) = assign_type.captures(&line) { assign_t_var(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = save_type.captures(&line) { match assign_t_var(&caps[2], &caps[3]) { Ok(_) => { use std::io::Write; let mut prel_file = std::fs::OpenOptions::new() .create(true) .append(true) .open(&prelude_filename) .unwrap(); writeln!(prel_file, "{}", &caps[1]).unwrap(); Ok(format!("[saved to {}]", &prelude_filename.display())) } Err(e) => Err(e), } } else { eval_unseemly_program(&line).map(|x| format!("{}", x)) }; match result { Ok(v) => println!("\x1b[1;32m≉\x1b[0m {}", v), Err(s) => println!("\x1b[1;31m✘\x1b[0m {}", s), } } println!("Bye! Saving history to {}", &history_filename.display()); rl.save_history(&history_filename).unwrap(); } fn assign_variable(name: &str, expr: &str) -> Result<Value, String> { let res = eval_unseemly_program(expr); if let Ok(ref v) = res { let ty = type_unseemly_program(expr).unwrap(); TY_ENV.with(|tys| { VAL_ENV.with(|vals| { let new_tys = tys.borrow().set(n(name), ty); let new_vals = vals.borrow().set(n(name), v.clone()); *tys.borrow_mut() = new_tys; *vals.borrow_mut() = new_vals; }) }) } res } fn assign_t_var(name: &str, t: &str) -> Result<Ast, String> { let ast = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t, ) .map_err(|e| e.msg)?; let res = TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))); if let Ok(ref t) = res { TY_ENV.with(|tys| { let new_tys = tys.borrow().set(n(name), t.clone()); *tys.borrow_mut() = new_tys; }) } res } fn canonicalize_type(t: &str) -> Result<Ast, String> { let ast = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))) } fn check_subtype(t_a: &str, t_b: &str) -> Result<Ast, String> { let ast_a = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t_a, ) .map_err(|e| e.msg)?; let ast_b = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t_b, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| { ty_compare::must_subtype(&ast_a, &ast_b, tys.borrow().clone()) .map( |env| { ast::Ast(std::rc::Rc::new(ast::LocatedAst { c: ast::Atom(n(&format!("OK, under this environment: {}", env))), begin: 0, end: 0, file_id: 0, })) }, ) .map_err(|e| format!("{}", e)) }) } fn parse_unseemly_program(program: &str, pretty: bool) -> Result<String, String> { let ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; if pretty { Ok(format!("{}", ast)) } else { Ok(format!("{:#?}", ast)) } } fn type_unseemly_program(program: &str) -> Result<Ast, String> { let ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))) } fn eval_unseemly_program_without_typechecking(program: &str) -> Result<Value, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let core_ast = expand::expand(&ast).map_err(|_| "error".to_owned())?; VAL_ENV.with(|vals| eval::eval(&core_ast, vals.borrow().clone()).map_err(|_| "???".to_string())) } fn eval_unseemly_program(program: &str) -> Result<Value, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let _type = TY_ENV .with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e)))?; let core_ast = expand::expand(&ast).map_err(|_| "error".to_owned())?; VAL_ENV.with(|vals| eval::eval(&core_ast, vals.borrow().clone()).map_err(|_| "???".to_string())) } fn type_and_expand_unseemly_program(program: &str) -> Result<ast::Ast, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let _type = TY_ENV .with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e)))?; expand::expand(&ast).map_err(|_| "error".to_owned()) }
#![allow(non_snake_case)] use std::{fs::File, io::Read, path::Path}; use libunseemly::{ ast, ast::Ast, core_forms, expand, grammar, name::{n, Name}, runtime::{core_values, eval, eval::Value}, ty, ty_compare, util::assoc::Assoc, }; use std::{borrow::Cow, cell::RefCell, io::BufRead}; thread_local! { pub static TY_ENV : RefCell<Assoc<Name, Ast>> = RefCell::new(core_values::core_types()); pub static VAL_ENV : RefCell<Assoc<Name, Value>> = RefCell::new(core_values::core_values()); } #[cfg_attr(tarpaulin, skip)] fn main() { let arguments: Vec<String> = std::env::args().collect(); if arguments.len() == 1 { repl(); } else if arguments.len() == 2 { let filename = Path::new(&arguments[1]); let mut raw_input = String::new(); File::open(&filename) .expect("Error opening file") .read_to_string(&mut raw_input) .expect("Error reading file"); if let Some(dir) = filename.parent() { if dir.is_dir() { std::env::set_current_dir(dir).unwrap(); } } libunseemly::terminal_display(libunseemly::eval_unseemly_program_top(&raw_input)); } else if arguments.len() == 3 { let lang = libunseemly::language_from_file(&std::path::Path::new(&arguments[1])); let mut second_program = String::new(); File::open(&Path::new(&arguments[2])) .expect("Error opening file") .read_to_string(&mut second_program) .expect("Error reading file"); if let Some(dir) = Path::new(&arguments[2]).parent() { if dir.is_dir() { std::env::set_current_dir(dir).unwrap(); } } libunseemly::terminal_display(libunseemly::eval_program(&second_program, lang)); } } struct LineHelper { highlighter: rustyline::highlight::MatchingBracketHighlighter, validator: rustyline::validate::MatchingBracketValidator, } impl LineHelper { fn new() -> LineHelper {
} impl rustyline::completion::Completer for LineHelper { type Candidate = String; fn complete( &self, line: &str, pos: usize, _ctxt: &rustyline::Context, ) -> Result<(usize, Vec<String>), rustyline::error::ReadlineError> { let mut res = vec![]; let (start, word_so_far) = rustyline::completion::extract_word(line, pos, None, b"[({ })]"); VAL_ENV.with(|vals| { let vals = vals.borrow(); for k in vals.iter_keys() { if k.sp().starts_with(word_so_far) { res.push(k.sp()); } } }); Ok((start, res)) } } impl rustyline::hint::Hinter for LineHelper { type Hint = String; fn hint(&self, _line: &str, _pos: usize, _ctxt: &rustyline::Context) -> Option<String> { None } } impl rustyline::highlight::Highlighter for LineHelper { fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> { self.highlighter.highlight(line, pos) } fn highlight_prompt<'b, 's: 'b, 'p: 'b>( &'s self, prompt: &'p str, default: bool, ) -> Cow<'b, str> { self.highlighter.highlight_prompt(prompt, default) } fn highlight_hint<'h>(&self, hint: &'h str) -> Cow<'h, str> { self.highlighter.highlight_hint(hint) } fn highlight_candidate<'c>( &self, candidate: &'c str, completion: rustyline::config::CompletionType, ) -> Cow<'c, str> { self.highlighter.highlight_candidate(candidate, completion) } fn highlight_char(&self, line: &str, pos: usize) -> bool { self.highlighter.highlight_char(line, pos) } } impl rustyline::validate::Validator for LineHelper { fn validate( &self, ctx: &mut rustyline::validate::ValidationContext, ) -> rustyline::Result<rustyline::validate::ValidationResult> { self.validator.validate(ctx) } fn validate_while_typing(&self) -> bool { self.validator.validate_while_typing() } } impl rustyline::Helper for LineHelper {} pub fn repl() { let prelude_filename = dirs::home_dir().unwrap().join(".unseemly_prelude"); let history_filename = dirs::home_dir().unwrap().join(".unseemly_history"); let mut rl = rustyline::Editor::<LineHelper>::new(); rl.set_helper(Some(LineHelper::new())); let quit = regex::Regex::new(r"\s*quit\s*").unwrap(); let just_parse = regex::Regex::new(r"^:p (.*)$").unwrap(); let just_parse_debug_print = regex::Regex::new(r"^:pd (.*)$").unwrap(); let just_type = regex::Regex::new(r"^:t (.*)$").unwrap(); let just_eval = regex::Regex::new(r"^:e (.*)$").unwrap(); let type_and_expand = regex::Regex::new(r"^:x (.*)$").unwrap(); let canon_type = regex::Regex::new(r"^:tt (.*)$").unwrap(); let subtype = regex::Regex::new(r"^:sub (.*)\s*<:\s*(.*)$").unwrap(); let assign_value = regex::Regex::new(r"^(\w+)\s*:=(.*)$").unwrap(); let save_value = regex::Regex::new(r"^:s +((\w+)\s*:=(.*))$").unwrap(); let assign_type = regex::Regex::new(r"^(\w+)\s*t=(.*)$").unwrap(); let save_type = regex::Regex::new(r"^:s +((\w+)\s*t=(.*))$").unwrap(); let comment = regex::Regex::new(r"^#").unwrap(); println!(); println!(" \x1b[1;38mUnseemly\x1b[0m"); println!(" `<expr>` to (typecheck and expand and) evaluate `<expr>`."); println!(" `:x <expr>` to (typecheck and) expand `<expr>`."); println!(" `:e <expr>` to (expand and) evaluate `<expr>` without typechecking."); println!(" `<name> := <expr>` to bind a name for this session."); println!(" `:t <expr>` to synthesize the type of <expr>."); println!(" `:tt <type>` to canonicalize <type>."); println!(" `:sub <type_a> <: <type_b>` to check that `<type_a>` is a subtype of `<type_b>`"); println!(" `<name> t= <type>` to bind a type for this session."); println!(" `:s <name> := <expr>` to save a binding to the prelude for the future."); println!(" `:s <name> t= <expr>` to save a type binding to the prelude."); println!(" `:p <expr>` to parse `<expr>` and pretty-print its AST output."); println!(" `:pd <expr>` to parse `<expr>` and debug-print its AST output."); println!(" Command history is saved over sessions."); println!(" Tab-completion works on variables, and lots of Bash-isms work."); if let Ok(prelude_file) = File::open(&prelude_filename) { let prelude = std::io::BufReader::new(prelude_file); for line in prelude.lines() { let line = line.unwrap(); if comment.captures(&line).is_some() { } else if let Some(caps) = assign_value.captures(&line) { if let Err(e) = assign_variable(&caps[1], &caps[2]) { println!(" Error in prelude line: {}\n {}", line, e); } } else if let Some(caps) = assign_type.captures(&line) { if let Err(e) = assign_t_var(&caps[1], &caps[2]) { println!(" Error in prelude line: {}\n {}", line, e); } } } println!(" [prelude loaded from {}]", prelude_filename.display()); } println!(); println!("This virtual machine kills cyber-fascists."); let _ = rl.load_history(&history_filename); while let Ok(line) = rl.readline("\x1b[1;36m≫\x1b[0m ") { rl.add_history_entry(line.clone()); if quit.captures(&line).is_some() { break; } let result = if let Some(caps) = just_parse.captures(&line) { parse_unseemly_program(&caps[1], true) } else if let Some(caps) = just_parse_debug_print.captures(&line) { parse_unseemly_program(&caps[1], false) } else if let Some(caps) = just_type.captures(&line) { type_unseemly_program(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = just_eval.captures(&line) { eval_unseemly_program_without_typechecking(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = type_and_expand.captures(&line) { type_and_expand_unseemly_program(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = canon_type.captures(&line) { canonicalize_type(&caps[1]).map(|x| format!("{}", x)) } else if let Some(caps) = subtype.captures(&line) { check_subtype(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = assign_value.captures(&line) { assign_variable(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = save_value.captures(&line) { match assign_variable(&caps[2], &caps[3]) { Ok(_) => { use std::io::Write; let mut prel_file = std::fs::OpenOptions::new() .create(true) .append(true) .open(&prelude_filename) .unwrap(); writeln!(prel_file, "{}", &caps[1]).unwrap(); Ok(format!("[saved to {}]", &prelude_filename.display())) } Err(e) => Err(e), } } else if let Some(caps) = assign_type.captures(&line) { assign_t_var(&caps[1], &caps[2]).map(|x| format!("{}", x)) } else if let Some(caps) = save_type.captures(&line) { match assign_t_var(&caps[2], &caps[3]) { Ok(_) => { use std::io::Write; let mut prel_file = std::fs::OpenOptions::new() .create(true) .append(true) .open(&prelude_filename) .unwrap(); writeln!(prel_file, "{}", &caps[1]).unwrap(); Ok(format!("[saved to {}]", &prelude_filename.display())) } Err(e) => Err(e), } } else { eval_unseemly_program(&line).map(|x| format!("{}", x)) }; match result { Ok(v) => println!("\x1b[1;32m≉\x1b[0m {}", v), Err(s) => println!("\x1b[1;31m✘\x1b[0m {}", s), } } println!("Bye! Saving history to {}", &history_filename.display()); rl.save_history(&history_filename).unwrap(); } fn assign_variable(name: &str, expr: &str) -> Result<Value, String> { let res = eval_unseemly_program(expr); if let Ok(ref v) = res { let ty = type_unseemly_program(expr).unwrap(); TY_ENV.with(|tys| { VAL_ENV.with(|vals| { let new_tys = tys.borrow().set(n(name), ty); let new_vals = vals.borrow().set(n(name), v.clone()); *tys.borrow_mut() = new_tys; *vals.borrow_mut() = new_vals; }) }) } res } fn assign_t_var(name: &str, t: &str) -> Result<Ast, String> { let ast = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t, ) .map_err(|e| e.msg)?; let res = TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))); if let Ok(ref t) = res { TY_ENV.with(|tys| { let new_tys = tys.borrow().set(n(name), t.clone()); *tys.borrow_mut() = new_tys; }) } res } fn canonicalize_type(t: &str) -> Result<Ast, String> { let ast = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))) } fn check_subtype(t_a: &str, t_b: &str) -> Result<Ast, String> { let ast_a = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t_a, ) .map_err(|e| e.msg)?; let ast_b = grammar::parse( &grammar::FormPat::Call(n("Type")), core_forms::outermost__parse_context(), t_b, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| { ty_compare::must_subtype(&ast_a, &ast_b, tys.borrow().clone()) .map( |env| { ast::Ast(std::rc::Rc::new(ast::LocatedAst { c: ast::Atom(n(&format!("OK, under this environment: {}", env))), begin: 0, end: 0, file_id: 0, })) }, ) .map_err(|e| format!("{}", e)) }) } fn parse_unseemly_program(program: &str, pretty: bool) -> Result<String, String> { let ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; if pretty { Ok(format!("{}", ast)) } else { Ok(format!("{:#?}", ast)) } } fn type_unseemly_program(program: &str) -> Result<Ast, String> { let ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; TY_ENV.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e))) } fn eval_unseemly_program_without_typechecking(program: &str) -> Result<Value, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let core_ast = expand::expand(&ast).map_err(|_| "error".to_owned())?; VAL_ENV.with(|vals| eval::eval(&core_ast, vals.borrow().clone()).map_err(|_| "???".to_string())) } fn eval_unseemly_program(program: &str) -> Result<Value, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let _type = TY_ENV .with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e)))?; let core_ast = expand::expand(&ast).map_err(|_| "error".to_owned())?; VAL_ENV.with(|vals| eval::eval(&core_ast, vals.borrow().clone()).map_err(|_| "???".to_string())) } fn type_and_expand_unseemly_program(program: &str) -> Result<ast::Ast, String> { let ast: Ast = grammar::parse( &core_forms::outermost_form(), core_forms::outermost__parse_context(), program, ) .map_err(|e| e.msg)?; let _type = TY_ENV .with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!("{}", e)))?; expand::expand(&ast).map_err(|_| "error".to_owned()) }
LineHelper { highlighter: rustyline::highlight::MatchingBracketHighlighter::new(), validator: rustyline::validate::MatchingBracketValidator::new(), } }
function_block-function_prefix_line
[ { "content": "pub fn eval(expr: &Ast, env: Assoc<Name, Value>) -> Result<Value, ()> {\n\n walk::<Eval>(expr, &LazyWalkReses::new_wrapper(env))\n\n}\n\n\n", "file_path": "src/runtime/eval.rs", "rank": 0, "score": 278618.50121150416 }, { "content": "pub fn eval_top(expr: &Ast) -> Result<Value, ()> { eval(expr, Assoc::new()) }\n\n\n", "file_path": "src/runtime/eval.rs", "rank": 1, "score": 269209.5785498401 }, { "content": "// I *think* the environment doesn't matter\n\npub fn expand(ast: &Ast) -> Result<Ast, ()> {\n\n use crate::runtime::reify::Reifiable;\n\n Ok(Ast::reflect(&crate::ast_walk::walk::<ExpandMacros>(ast, &LazyWalkReses::new_empty())?))\n\n}\n\n\n", "file_path": "src/expand.rs", "rank": 2, "score": 264107.483822172 }, { "content": "pub fn neg_eval(pat: &Ast, env: Assoc<Name, Value>) -> Result<Assoc<Name, Value>, ()> {\n\n walk::<Destructure>(pat, &LazyWalkReses::new_wrapper(env))\n\n}\n\n\n\ncustom_derive! {\n\n #[derive(Copy, Clone, Debug, Reifiable)]\n\n pub struct QQuote {}\n\n}\n\ncustom_derive! {\n\n #[derive(Copy, Clone, Debug, Reifiable)]\n\n pub struct QQuoteDestr {}\n\n}\n\n\n\nimpl WalkMode for QQuote {\n\n fn name() -> &'static str { \"QQuote\" }\n\n\n\n // Why not `Ast`? Because QQuote and Eval need to share environments.\n\n type Elt = Value;\n\n type Negated = QQuoteDestr;\n\n type AsPositive = QQuote;\n", "file_path": "src/runtime/eval.rs", "rank": 3, "score": 257997.13744685764 }, { "content": "pub fn more_quoted_ty(t: &Ast, nt: Name) -> Ast {\n\n ast!({\"Type\" \"type_apply\" :\n\n \"type_rator\" => (, get__primitive_type(nt)),\n\n \"arg\" => [(, t.clone())]})\n\n}\n\n\n", "file_path": "src/core_type_forms.rs", "rank": 4, "score": 250995.07750695132 }, { "content": "pub fn core_types() -> Assoc<Name, Ast> {\n\n use crate::runtime::reify::{Irr, Reifiable};\n\n core_typed_values()\n\n .map(&erase_value)\n\n .set(\n\n n(\"Bool\"),\n\n ast!({\"Type\" \"enum\" : \"name\" => [@\"c\" \"True\", \"False\"], \"component\" => [@\"c\" [], []]}))\n\n .set(\n\n n(\"Option\"),\n\n ast!({ \"Type\" \"forall_type\" :\n\n \"param\" => [\"T\"],\n\n \"body\" => (import [* [forall \"param\"]]\n\n {\"Type\" \"enum\" : \"name\" => [@\"c\" \"Some\", \"None\"],\n\n \"component\" => [@\"c\" [], [(vr \"T\")]]})}))\n\n .set(\n\n n(\"Unit\"),\n\n ast!({\"Type\" \"tuple\" : \"component\" => []}))\n\n // These need to be in the environment, not just atomic types\n\n // because we sometimes look them up internally in the compiler\n\n // in the environment,\n", "file_path": "src/runtime/core_values.rs", "rank": 5, "score": 246392.54565407697 }, { "content": "pub fn canonicalize(t: &Ast, env: Assoc<Name, Ast>) -> Result<Ast, TyErr> {\n\n walk::<Canonicalize>(t, &LazyWalkReses::<Canonicalize>::new_wrapper(env))\n\n}\n\n\n", "file_path": "src/ty_compare.rs", "rank": 6, "score": 244677.3750485598 }, { "content": "/// Takes the Unseemly type `T` to `Sequence<T>`\n\npub fn sequence_type__of(ty: &Ast) -> Ast {\n\n ast!({ \"Type\" \"type_apply\" :\n\n \"type_rator\" => (, crate::core_type_forms::get__primitive_type(n(\"Sequence\"))),\n\n \"arg\" => [(, ty.clone()) ]})\n\n}\n\n\n", "file_path": "src/runtime/reify.rs", "rank": 7, "score": 242448.9083889549 }, { "content": "pub fn ty_of_1ary_function<A: Reifiable + 'static, R: Reifiable + 'static>() -> Ast {\n\n ast!(\"TODO: generate type\")\n\n}\n\n\n\nmacro_rules! reify_types {\n\n ( $($t:ty),* ) => {{\n\n let mut res = Assoc::new();\n\n $(\n\n res = res.set(<$t as Reifiable>::ty_name(), <$t as Reifiable>::ty());\n\n )*\n\n res\n\n }}\n\n}\n\n\n\nmacro_rules! fake_reifiability {\n\n ( $underlying_type:ty ) => {\n\n impl Reifiable for $underlying_type {\n\n fn ty_name() -> Name { n(stringify!($underlying_type)) }\n\n fn reify(&self) -> Value { panic!() }\n\n fn reflect(_: &Value) -> Self { panic!() }\n", "file_path": "src/runtime/reify.rs", "rank": 8, "score": 233882.6412586951 }, { "content": "pub fn neg_synth_type(pat: &Ast, env: Assoc<Name, Ast>) -> Result<Assoc<Name, Ast>, TypeError> {\n\n walk::<UnpackTy>(pat, &LazyWalkReses::new_wrapper(env))\n\n}\n\n\n\ncustom_derive! {\n\n #[derive(Reifiable, Clone, PartialEq)]\n\n pub enum TyErr {\n\n Mismatch(Ast, Ast), // got, expected\n\n LengthMismatch(Vec<Ast>, usize),\n\n NtInterpMismatch(Name, Name),\n\n NonexistentEnumArm(Name, Ast),\n\n NonexistentStructField(Name, Ast),\n\n NonExhaustiveMatch(Ast),\n\n UnableToDestructure(Ast, Name),\n\n UnboundName(Name),\n\n // TODO: the reification macros can't handle empty `enum` cases. Fix that!\n\n AnnotationRequired(()),\n\n NeedsDriver(()),\n\n // TODO: replace all uses of `Other` with more specific errors:\n\n Other(String)\n", "file_path": "src/ty.rs", "rank": 9, "score": 232989.93694794737 }, { "content": "pub fn synth_type(expr: &Ast, env: Assoc<Name, Ast>) -> TypeResult {\n\n walk::<SynthTy>(expr, &LazyWalkReses::new_wrapper(env))\n\n}\n\n\n", "file_path": "src/ty.rs", "rank": 10, "score": 232648.4608423065 }, { "content": "pub fn less_quoted_ty(t: &Ast, nt: Option<Name>, loc: &Ast) -> Result<Ast, crate::ty::TypeError> {\n\n // suppose that this is an expr, and `body` has the type `Expr<String>`:\n\n expect_ty_node!( (t ; crate::core_forms::find_core_form(\"Type\", \"type_apply\") ; loc)\n\n tapp_parts;\n\n {\n\n if let Some(nt) = nt { // Check it if you got it\n\n ty_exp!(\n\n tapp_parts.get_leaf_or_panic(&n(\"type_rator\")),\n\n &get__primitive_type(nt),\n\n loc\n\n );\n\n }\n\n\n\n let args = tapp_parts.get_rep_leaf_or_panic(n(\"arg\"));\n\n if args.len() != 1 {\n\n ty_err!(LengthMismatch(args.into_iter().cloned().collect(), 1)\n\n at loc);\n\n }\n\n\n\n // ...returns `String` in that case\n\n Ok(args[0].clone())\n\n }\n\n )\n\n}\n\n\n", "file_path": "src/core_type_forms.rs", "rank": 11, "score": 231706.29774913576 }, { "content": "// Macros have types!\n\n// ...but they're not higher-order (i.e., you can't do anything with a macro other than invoke it).\n\n// This means that we can just generate a type for them at the location of invocation.\n\nfn macro_type(forall_ty_vars: &[Name], arguments: Vec<(Name, Ast)>, output: Ast) -> Ast {\n\n let mut components = vec![];\n\n for (k, v) in arguments.iter() {\n\n // The fields in a struct type are not renamed like normal during freshening,\n\n // so roll back any freshening that happened during evaluation, hence `unhygienic_orig`.\n\n // TODO: this can go wrong if a macro-defining macro collides two term names.\n\n // Fixing this probably requires rethinking how \"component_name\" works.\n\n // Perhaps not using structs at all might also work.\n\n components.push(mbe!(\"component_name\" => (, ast!(k.unhygienic_orig())),\n\n \"component\" => (, v.to_ast())));\n\n }\n\n let argument_struct = raw_ast!(Node(\n\n crate::core_forms::find_core_form(\"Type\", \"struct\"),\n\n crate::util::mbe::EnvMBE::new_from_anon_repeat(components),\n\n ExportBeta::Nothing\n\n ));\n\n let mac_fn = u!({Type fn : [(, argument_struct)] (, output.to_ast())});\n\n\n\n if forall_ty_vars.is_empty() {\n\n mac_fn\n\n } else {\n\n ast!({\"Type\" \"forall_type\" :\n\n \"body\" => (import [* [forall \"param\"]] (, mac_fn)),\n\n \"param\" => (,seq forall_ty_vars.iter().map(|n: &Name| { ast!(*n) }).collect::<Vec<_>>())\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/core_macro_forms.rs", "rank": 12, "score": 227824.8730754937 }, { "content": "/// Substitute `VariableReference`s in `node`, according to `env`.\n\n/// TODO: don't use this to \"capture the environment\"; it doesn't work in the presence of recursion\n\n/// Instead, we should introduce a \"constant\" to Beta. (Does `SameAs` suffice now?)\n\n/// TODO: because of mu's use of `VariableReference`s in a place where other `Ast`s are forbidden,\n\n/// it seems like this has limited use.\n\n/// TODO: this isn't capture-avoiding (and shouldn't be, when called by `freshen_rec`)\n\n/// It's safe to use when the RHS of the environment is just fresh names.\n\npub fn substitute(node: &Ast, env: &Assoc<Name, Ast>) -> Ast {\n\n substitute_rec(node, &EnvMBE::new(), &Ren::from(env.clone()))\n\n}\n\n\n", "file_path": "src/alpha.rs", "rank": 13, "score": 224938.33134594496 }, { "content": "/// Evaluate a program written in some language.\n\npub fn eval_program(program: &str, lang: Language) -> Result<Value, String> {\n\n // TODO: looks like `outermost_form` ought to be a property of `ParseContext`\n\n let ast: Ast = crate::grammar::parse(&core_forms::outermost_form(), lang.pc, program)\n\n .map_err(|e| e.msg)?;\n\n\n\n let _type = ast_walk::walk::<ty::SynthTy>(\n\n &ast,\n\n &ast_walk::LazyWalkReses::new(lang.type_env, lang.type_env__phaseless, ast.clone()),\n\n )\n\n .map_err(|e| format!(\"{}\", e))?;\n\n\n\n let core_ast = crate::expand::expand(&ast).map_err(|_| \"???\".to_string())?;\n\n\n\n eval(&core_ast, lang.value_env).map_err(|_| \"???\".to_string())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 224394.43116740498 }, { "content": "/// Takes the Unseemly type `Sequence<T>` to `T`\n\npub fn un__sequence_type(ty: &Ast, loc: &Ast) -> Result<Ast, crate::ty::TypeError> {\n\n // This is a hack; `Sequence` is not a nonterminal!\n\n crate::core_type_forms::less_quoted_ty(ty, Some(n(\"Sequence\")), loc)\n\n}\n\n\n\nimpl<T: Reifiable> Reifiable for Vec<T> {\n\n fn ty_name() -> Name { n(\"Sequence\") }\n\n\n\n fn concrete_arguments() -> Option<Vec<Ast>> { Some(vec![T::ty_invocation()]) }\n\n\n\n fn reify(&self) -> Value {\n\n Value::Sequence(self.iter().map(|elt| Rc::new(elt.reify())).collect())\n\n }\n\n\n\n fn reflect(v: &Value) -> Self {\n\n extract!((v) Value::Sequence = (ref s) =>\n\n s.iter().map(|elt| T::reflect(&elt)).collect()\n\n )\n\n }\n\n}\n", "file_path": "src/runtime/reify.rs", "rank": 15, "score": 219637.70513695542 }, { "content": "// TODO #4: this should be extensible for when the syntax environment is extended...\n\n// or just automatically have one type per NT. Probably the latter.\n\npub fn nt_to_type(nt: Name) -> Ast {\n\n if nt == n(\"Type\") || nt == n(\"Pat\") || nt == n(\"Expr\") {\n\n get__primitive_type(nt)\n\n } else {\n\n icp!(\"unknown NT {}\", nt)\n\n }\n\n}\n\n\n", "file_path": "src/core_type_forms.rs", "rank": 16, "score": 216508.96066444725 }, { "content": "pub fn erase_value(tv: &TypedValue) -> Ast { tv.ty.clone() }\n", "file_path": "src/runtime/core_values.rs", "rank": 17, "score": 215171.92936594575 }, { "content": "pub fn get__primitive_type(called: Name) -> Ast {\n\n ast!({primitive_type.with(|p_t| p_t.clone()) ; \"name\" => (at called)})\n\n}\n\n\n", "file_path": "src/core_type_forms.rs", "rank": 18, "score": 212422.88626503435 }, { "content": "/// Follow variable references in `env` and underdeterminednesses in `unif`\n\n/// until we hit something that can't move further.\n\n/// TODO #28: could this be replaced by `SynthTy`?\n\n/// TODO: This doesn't change `env`, and none of its clients care. It should just return `Ast`.\n\npub fn resolve(Clo { it: t, env }: Clo<Ast>, unif: &HashMap<Name, Clo<Ast>>) -> Clo<Ast> {\n\n let u_f = underdetermined_form.with(|u_f| u_f.clone());\n\n\n\n let resolved = match t.c() {\n\n VariableReference(vr) => {\n\n match env.find(vr) {\n\n // HACK: leave mu-protected variables alone, instead of recurring forever\n\n Some(vr_ast) if vr_ast.c() == &VariableReference(*vr) => None,\n\n Some(different) => Some(Clo { it: different.clone(), env: env.clone() }),\n\n None => None,\n\n }\n\n }\n\n Node(form, parts, _) if form == &find_core_form(\"Type\", \"type_apply\") => {\n\n // Expand defined type applications.\n\n // This is sorta similar to the type synthesis for \"type_apply\",\n\n // but it does not recursively process the arguments (which may be underdetermined!).\n\n let arg_terms = parts.get_rep_leaf_or_panic(n(\"arg\"));\n\n\n\n let resolved = resolve(\n\n Clo { it: parts.get_leaf_or_panic(&n(\"type_rator\")).clone(), env: env.clone() },\n", "file_path": "src/ty_compare.rs", "rank": 19, "score": 210342.94561253063 }, { "content": "pub fn core_typed_values() -> Assoc<Name, TypedValue> { static_core_values.with(|cv| cv.clone()) }\n\n\n", "file_path": "src/runtime/core_values.rs", "rank": 21, "score": 198474.33270617345 }, { "content": "pub fn string_operations() -> Assoc<Name, TypedValue> {\n\n assoc_n!(\n\n \"string_to_sequence\" => tyf! {\n\n {\"Type\" \"fn\" :\n\n \"param\" => [{\"Type\" \"String\" :}],\n\n \"ret\" => { \"Type\" \"type_apply\" :\n\n \"type_rator\" => (vr \"Sequence\"), \"arg\" => [{\"Type\" \"Int\" :}]}\n\n },\n\n (Text(s)) =>\n\n Sequence(s.chars().map(|c: char| Rc::new(Int(BigInt::from(c as u32)))).collect())\n\n },\n\n \"anything_to_string\" => tyf!{ { \"Type\" \"forall_type\" :\n\n \"param\" => [\"T\"],\n\n \"body\" => (import [* [forall \"param\"]] { \"Type\" \"fn\" :\n\n \"param\" => [(vr \"T\")],\n\n \"ret\" => {\"Type\" \"String\" :}})},\n\n (anything) => Text(format!(\"{}\", anything)) },\n\n \"ident_to_string\" => tyf! {\n\n {\"Type\" \"fn\" :\n\n \"param\" => [{\"Type\" \"Ident\" :}],\n", "file_path": "src/runtime/core_values.rs", "rank": 22, "score": 198145.80734799546 }, { "content": "pub fn cell_operations() -> Assoc<Name, TypedValue> {\n\n assoc_n!(\n\n \"new_cell\" =>\n\n tyf!( { \"Type\" \"forall_type\" :\n\n \"param\" => [\"T\"],\n\n \"body\" => (import [* [forall \"param\"]] { \"Type\" \"fn\" :\n\n \"param\" => [(vr \"T\")],\n\n \"ret\" =>\n\n { \"Type\" \"type_apply\" : \"type_rator\" => (vr \"Cell\"), \"arg\" => [(vr \"T\")]}})},\n\n ( val ) => {\n\n Cell(Rc::new(std::cell::RefCell::new(val)))\n\n }\n\n ),\n\n \"assign\" =>\n\n tyf!( { \"Type\" \"forall_type\" :\n\n \"param\" => [\"T\"],\n\n \"body\" => (import [* [forall \"param\"]] { \"Type\" \"fn\" :\n\n \"param\" => [\n\n { \"Type\" \"type_apply\" :\n\n \"type_rator\" => (vr \"Cell\"),\n", "file_path": "src/runtime/core_values.rs", "rank": 23, "score": 198145.80734799546 }, { "content": "pub fn sequence_operations() -> Assoc<Name, TypedValue> {\n\n assoc_n!(\n\n \"range\" => tyf!( {\"Type\" \"fn\" :\n\n \"param\" => [{\"Type\" \"Int\" :}, {\"Type\" \"Int\" :}],\n\n \"ret\" => {\"Type\" \"type_apply\" :\n\n \"type_rator\" => (vr \"Sequence\"),\n\n \"arg\" => [{\"Type\" \"Int\" :}]}},\n\n (Int(start), Int(end)) => Sequence((start.to_i32().unwrap()..end.to_i32().unwrap()).map(\n\n |i| Rc::new(Int(BigInt::from(i)))).collect())\n\n ),\n\n \"empty\" => TypedValue {\n\n ty: ast!({\"Type\" \"forall_type\" :\n\n \"param\" => [\"T\"],\n\n \"body\" => (import [* [forall \"param\"]] { \"Type\" \"type_apply\" :\n\n \"type_rator\" => (vr \"Sequence\"), \"arg\" => [(vr \"T\")]})}),\n\n val: val!(seq)},\n\n \"index\" =>\n\n tyf!( { \"Type\" \"forall_type\" :\n\n \"param\" => [\"T\"],\n\n \"body\" => (import [* [forall \"param\"]] { \"Type\" \"fn\" :\n", "file_path": "src/runtime/core_values.rs", "rank": 24, "score": 198145.80734799546 }, { "content": "/// What should `t` be, if matched under a repetition?\n\n/// A tuple, driven by whatever names are `forall`ed in `env`.\n\nfn repeated_type(t: &Ast, env: &Assoc<Name, Ast>) -> Result<Ast, crate::ty::TypeError> {\n\n let mut drivers = vec![];\n\n for v in t.free_vrs() {\n\n if env.find(&v).map(|a| a.c()) == Some(&ast::VariableReference(v)) {\n\n drivers.push(env.find_or_panic(&v).clone());\n\n }\n\n }\n\n\n\n if drivers.is_empty() {\n\n // TODO: this is just a tuple where every element has the same type...\n\n // ...but how do we constrain the length?\n\n ty_err!(NeedsDriver (()) at t);\n\n }\n\n\n\n Ok(ast!({\"Type\" \"tuple\" : \"component\" => (,seq vec![ast!({\"Type\" \"dotdotdot_type\" :\n\n \"driver\" => (,seq drivers),\n\n \"body\" => (, t.clone())\n\n })])}))\n\n}\n\n\n", "file_path": "src/core_macro_forms.rs", "rank": 25, "score": 196235.04665500013 }, { "content": "pub fn freshen(a: &Ast) -> Ast {\n\n // TODO: I think this shouldn't take a reference for performance\n\n if freshening_enabled.with(|f| *f.borrow()) {\n\n a.c_map(&|c| match c {\n\n &Node(ref f, ref p, ref export) => {\n\n // Every part that gets mentioned inside this node...\n\n let mentioned = mentioned_in_import(p);\n\n // ...needs to have its binders freshend:\n\n let fresh_ast_and_rens = freshen_binders_inside_node(p, &mentioned);\n\n\n\n let freshened_node = Node(\n\n f.clone(),\n\n fresh_ast_and_rens.marched_map(\n\n &mut |_, marched: &EnvMBE<(Ast, Ren)>, &(ref part, _)| {\n\n freshen_rec(part, marched, Ren::new())\n\n },\n\n ),\n\n export.clone(),\n\n );\n\n\n", "file_path": "src/alpha.rs", "rank": 26, "score": 195666.09279480082 }, { "content": "pub fn parse_flimsy_mbe(flimsy: &Ast, grammar: &FormPat) -> Option<EnvMBE<Ast>> {\n\n use crate::grammar::FormPat::*;\n\n\n\n match grammar {\n\n Literal(_, _) => None,\n\n Call(_) => None,\n\n Scan(_, _) => None,\n\n Seq(_) => match flimsy.c() {\n\n Shape(flimsy_parts) => {\n\n if flimsy_parts[0].c() != &Atom(n(\"SEQ\")) {\n\n panic!(\"Needed a SEQ, got {}\", flimsy)\n\n }\n\n let mut fpi = flimsy_parts[1..].iter().peekable();\n\n\n\n Some(parse_flimsy_seq(&mut fpi, grammar))\n\n }\n\n _ => panic!(\"Needed a SEQ shape, got {}\", flimsy),\n\n },\n\n Star(ref body) | Plus(ref body) => match flimsy.c() {\n\n Shape(flimsy_parts) => {\n", "file_path": "src/macros/flimsy_syntax.rs", "rank": 27, "score": 193954.95372648144 }, { "content": "// TODO: move to `ast_walk`\n\n// TODO: using `lazy_static!` (with or without gensym) makes some tests fail. Why?\n\n/// Special name for negative `ast_walk`ing\n\npub fn negative_ret_val() -> Name { Name::global(\"⋄\") }\n\n\n\nimpl fmt::Debug for Name {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, \"«{}»\", self.sp()) }\n\n}\n\n\n\nimpl fmt::Display for Name {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, \"{}\", self.print()) }\n\n}\n\n\n", "file_path": "src/name.rs", "rank": 28, "score": 191243.26489340822 }, { "content": "pub fn expect_type(expected: &Ast, got: &Ast, loc: &Ast) -> Result<(), TypeError> {\n\n if got != expected {\n\n Err(crate::util::err::Spanned {\n\n loc: loc.clone(),\n\n body: TyErr::Mismatch(expected.clone(), got.clone()),\n\n })\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/ty.rs", "rank": 29, "score": 190552.7517409908 }, { "content": "pub fn synth_type_top(expr: &Ast) -> TypeResult {\n\n walk::<SynthTy>(expr, &LazyWalkReses::new_wrapper(Assoc::new()))\n\n}\n\n\n", "file_path": "src/ty.rs", "rank": 30, "score": 189985.19313151468 }, { "content": "pub fn freshen_binders_inside_node(parts: &EnvMBE<Ast>, mentioned: &[Name]) -> EnvMBE<(Ast, Ren)> {\n\n parts.named_map(&mut |n: &Name, a: &Ast| {\n\n if mentioned.contains(n) {\n\n freshen_binders(a)\n\n } else {\n\n (a.clone(), Ren::new())\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/alpha.rs", "rank": 31, "score": 189450.37338639045 }, { "content": "// TODO: verify that this handles internal `ExtendEnv`s right\n\npub fn freshen_with(lhs: &Ast, rhs: &Ast) -> (Ast, Ast) {\n\n if freshening_enabled.with(|f| *f.borrow()) {\n\n match (lhs.c(), rhs.c()) {\n\n (&Node(ref f, ref p_lhs, ref export), &Node(ref f_rhs, ref p_rhs, ref export_rhs)) => {\n\n if f != f_rhs || export != export_rhs {\n\n return (lhs.clone(), rhs.clone());\n\n }\n\n // Every part that gets mentioned inside this node...\n\n let mentioned = mentioned_in_import(p_lhs);\n\n // (if it matters which `p_{l,r}hs` we used, the match below will be `None`)\n\n // ...needs to have its binders freshend:\n\n match freshen_binders_inside_node_with(p_lhs, p_rhs, &mentioned) {\n\n Some(fresh_ast_and_rens) => {\n\n let new_p_lhs = fresh_ast_and_rens.marched_map(\n\n &mut |_,\n\n marched: &EnvMBE<(Ast, Ren, Ast, Ren)>,\n\n &(ref parts, _, _, _)| {\n\n freshen_rec(\n\n parts,\n\n &marched.map(&mut |q| (q.0.clone(), q.1.clone())),\n", "file_path": "src/alpha.rs", "rank": 32, "score": 188412.4480828722 }, { "content": "/// Remove an `ExtendEnv` without respecting its binding behavior.\n\n/// This is safe if directly inside a `Node` that was just freshened.\n\n/// (TODO: think about what \"just\" means here. It's super-subtle!)\n\npub fn strip_ee(a: &Ast) -> &Ast {\n\n match a.c() {\n\n ExtendEnv(body, _) => (&*body),\n\n ExtendEnvPhaseless(body, _) => (&*body),\n\n _ => icp!(\"Not an EE\"),\n\n }\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 33, "score": 187982.6462073886 }, { "content": "pub fn strip_ql(a: &Ast) -> &Ast {\n\n match a.c() {\n\n QuoteLess(body, _) => &*body,\n\n _ => icp!(\"Not an unquote\"),\n\n }\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 34, "score": 187982.6462073886 }, { "content": "pub fn n(s: &str) -> Name { Name::global(s) }\n\n\n", "file_path": "src/name.rs", "rank": 35, "score": 186353.05670323322 }, { "content": "#[wasm_bindgen]\n\npub fn stash_lang(result_name: &str, program: &str, lang_of_progam: &str) {\n\n let orig_lang = language_stash.with(|ls| (*ls.borrow()).get(lang_of_progam).unwrap().clone());\n\n let new_lang = get_language(program, orig_lang);\n\n language_stash.with(|ls| ls.borrow_mut().insert(result_name.to_string(), new_lang));\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 36, "score": 184213.64899146877 }, { "content": "/// Returns an `Ast` like `a`, but with fresh `Atom`s\n\n/// and a map to change references in the same manner\n\npub fn freshen_binders(a: &Ast) -> (Ast, Ren) {\n\n match a.c() {\n\n Trivial | VariableReference(_) => (a.clone(), Ren::new()),\n\n Atom(old_name) => {\n\n let new_name = old_name.freshen();\n\n (\n\n a.with_c(Atom(new_name)),\n\n Ren::single(*old_name, raw_ast!(VariableReference(new_name))),\n\n )\n\n }\n\n Node(ref f, ref parts, ref export) => {\n\n if export == &crate::beta::ExportBeta::Nothing {\n\n return (a.clone(), Ren::new()); // short-circuit (should this at least warn?)\n\n }\n\n let exported = export.names_mentioned(); // Unmentioned atoms shouldn't be touched\n\n\n\n let fresh_pairs = freshen_binders_inside_node(parts, &exported);\n\n let fresh_ast = fresh_pairs.map(&mut |&(ref a, _): &(Ast, _)| a.clone());\n\n let renaming = export.extract_from_mbe(&fresh_pairs, &|&(_, ref r): &(_, Ren)| &r);\n\n\n", "file_path": "src/alpha.rs", "rank": 37, "score": 183658.1489692204 }, { "content": "pub fn get_language(program: &str, lang: Language) -> Language {\n\n // TODO: I guess syntax extensions ought to return `Result`, too...\n\n let lib_ast = crate::grammar::parse(&core_forms::outermost_form(), lang.pc, &program).unwrap();\n\n let lib_typed = ast_walk::walk::<ty::SynthTy>(\n\n &lib_ast,\n\n &ast_walk::LazyWalkReses::new(lang.type_env, lang.type_env__phaseless, lib_ast.clone()),\n\n )\n\n .unwrap();\n\n let lib_expanded = crate::expand::expand(&lib_ast).unwrap();\n\n let lib_evaled = crate::runtime::eval::eval(&lib_expanded, lang.value_env).unwrap();\n\n let (new_pc, new__value_env) = if let Value::Sequence(mut lang_and_env) = lib_evaled {\n\n let env_value = lang_and_env.pop().unwrap();\n\n let lang_value = lang_and_env.pop().unwrap();\n\n let new_pc = match &*lang_value {\n\n Value::ParseContext(boxed_pc) => (**boxed_pc).clone(),\n\n _ => icp!(\"[type error] not a language\"),\n\n };\n\n let new__value_env = if let Value::Struct(ref env) = *env_value {\n\n let mut new__value_env = Assoc::new();\n\n // We need to un-freshen the names that we're importing\n", "file_path": "src/main.rs", "rank": 38, "score": 183170.16483241442 }, { "content": "// Helper for `bound_from_[export_]beta`:\n\nfn names_exported_by(ast: &Ast, quote_depth: i16) -> Vec<Name> {\n\n use tap::tap::Tap;\n\n\n\n match ast.c() {\n\n Atom(n) => vec![*n],\n\n Node(_, sub_parts, export) => {\n\n if quote_depth <= 0 {\n\n bound_from_export_beta(export, sub_parts, quote_depth)\n\n } else {\n\n sub_parts.map_reduce(\n\n &|a: &Ast| names_exported_by(a, quote_depth),\n\n &|v1, v2| v1.clone().tap_mut(|v1| v1.append(&mut v2.clone())),\n\n vec![],\n\n )\n\n }\n\n }\n\n ExtendEnv(body, _) => names_exported_by(body, quote_depth),\n\n QuoteMore(body, _) => names_exported_by(body, quote_depth + 1),\n\n QuoteLess(body, _) => names_exported_by(body, quote_depth - 1),\n\n ast if quote_depth <= 0 => icp!(\"beta SameAs refers to an invalid AST node: {}\", ast),\n\n _ => vec![],\n\n }\n\n}\n\n\n", "file_path": "src/beta.rs", "rank": 39, "score": 182983.2756820108 }, { "content": "pub fn core_values() -> Assoc<Name, Value> { core_typed_values().map(&erase_type) }\n\n\n\n// Helper for building an environment by reifying a bunch of Rust types\n\nmacro_rules! reified_ty_env {\n\n ( $($t:ty),* ) => {\n\n Assoc::new() $( .set(<$t>::ty_name(), <$t>::ty()))*\n\n };\n\n}\n\n\n", "file_path": "src/runtime/core_values.rs", "rank": 40, "score": 180436.62131747036 }, { "content": "// Like just taking the (non-Protected) keys from `env_from_beta`, but faster and non-failing.\n\n// It's a runtime error if the definition of a form causes `env_from_beta` to diverge from this.\n\npub fn bound_from_beta(b: &Beta, parts: &EnvMBE<crate::ast::Ast>, quote_depth: i16) -> Vec<Name> {\n\n match *b {\n\n Nothing => vec![],\n\n Shadow(ref lhs, ref rhs) => {\n\n let mut res = bound_from_beta(&*lhs, parts, quote_depth);\n\n let mut res_r = bound_from_beta(&*rhs, parts, quote_depth);\n\n res.append(&mut res_r);\n\n res\n\n }\n\n ShadowAll(ref sub_beta, ref drivers) => {\n\n let mut res = vec![];\n\n for sub_parts in &parts.march_all(drivers) {\n\n res.append(&mut bound_from_beta(&*sub_beta, sub_parts, quote_depth));\n\n }\n\n res\n\n }\n\n SameAs(ref n_s, _) | BoundButNotUsable(ref n_s) => {\n\n // Can be a non-atom\n\n names_exported_by(parts.get_leaf_or_panic(n_s), quote_depth)\n\n }\n\n Protected(ref _n_s) => vec![], // Non-binding\n\n Basic(ref n_s, _) | Underspecified(ref n_s) => {\n\n vec![parts.get_leaf_or_panic(n_s).to_name()]\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/beta.rs", "rank": 41, "score": 179562.65914668236 }, { "content": "#[wasm_bindgen]\n\npub fn html__eval_program(program: &str, stashed_lang: &str) -> String {\n\n let lang: Language =\n\n language_stash.with(|ls| (*ls.borrow()).get(stashed_lang).unwrap().clone());\n\n html_render(eval_program(program, lang))\n\n}\n\n\n\n/// Evaluate `program` in `lang_of_program`, and stash the resulting language in `result_name`.\n\n/// \"unseemly\" starts out in the stash, so it's possible to start from somewhere.\n", "file_path": "src/main.rs", "rank": 42, "score": 179240.71117497195 }, { "content": "/// Evaluate a program written in Unseemly.\n\n/// Of course, it may immediately do `include /[something]/` to switch languages.\n\npub fn eval_unseemly_program_top(program: &str) -> Result<Value, String> {\n\n eval_program(program, unseemly())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 43, "score": 179036.90412580737 }, { "content": "fn parse_flimsy_ast(flimsy: &Ast, grammar: &FormPat) -> Ast {\n\n use crate::grammar::FormPat::*;\n\n\n\n match grammar {\n\n Anyways(ref a) => a.clone(),\n\n Impossible => unimplemented!(),\n\n Scan(_, _) => flimsy.clone(),\n\n Literal(_, _) => raw_ast!(Trivial),\n\n VarRef(_) => match flimsy.c() {\n\n VariableReference(a) => raw_ast!(VariableReference(*a)),\n\n non_atom => panic!(\"Needed an atom, got {}\", non_atom),\n\n },\n\n NameImport(body, beta) => {\n\n raw_ast!(ExtendEnv(parse_flimsy_ast(flimsy, &*body), beta.clone()))\n\n }\n\n NameImportPhaseless(body, beta) => {\n\n raw_ast!(ExtendEnvPhaseless(parse_flimsy_ast(flimsy, &*body), beta.clone()))\n\n }\n\n QuoteDeepen(body, pos) => raw_ast!(QuoteMore(parse_flimsy_ast(flimsy, &*body), *pos)),\n\n QuoteEscape(body, depth) => raw_ast!(QuoteLess(parse_flimsy_ast(flimsy, &*body), *depth)),\n", "file_path": "src/macros/flimsy_syntax.rs", "rank": 44, "score": 176834.42101050293 }, { "content": "/// Type program written in Unseemly.\n\n/// Of course, it may immediately do `include /[something]/` to switch languages.\n\npub fn type_unseemly_program_top(program: &str) -> Result<Ast, String> {\n\n let unseemly = unseemly();\n\n let ast: Ast = crate::grammar::parse(&core_forms::outermost_form(), unseemly.pc, program)\n\n .map_err(|e| e.msg)?;\n\n\n\n ast_walk::walk::<ty::SynthTy>(\n\n &ast,\n\n &ast_walk::LazyWalkReses::new(unseemly.type_env, unseemly.type_env__phaseless, ast.clone()),\n\n )\n\n .map_err(|e| format!(\"{}\", e))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 45, "score": 174036.38634467596 }, { "content": "fn eval_struct_expr(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> {\n\n let mut res = Assoc::new();\n\n\n\n for component_parts in part_values.march_parts(&[n(\"component\"), n(\"component_name\")]) {\n\n res = res.set(\n\n component_parts.get_term(n(\"component_name\")).to_name(),\n\n component_parts.get_res(n(\"component\"))?,\n\n );\n\n }\n\n\n\n Ok(Struct(res))\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 46, "score": 171917.77916886166 }, { "content": "pub fn apply__function_value(f: &Value, args: Vec<Value>) -> Value {\n\n match *f {\n\n BuiltInFunction(BIF(ref f)) => f(args.into_iter().collect()),\n\n Function(ref cl) => {\n\n let mut clo_env = cl.env.clone();\n\n if cl.params.len() != args.len() {\n\n panic!(\n\n \"[type error] Attempted to apply {} arguments to function requiring {} \\\n\n parameters\",\n\n args.len(),\n\n cl.params.len()\n\n );\n\n }\n\n for (p, a) in cl.params.iter().zip(args.into_iter()) {\n\n clo_env = clo_env.set(*p, a)\n\n }\n\n eval(&cl.body, clo_env).unwrap()\n\n }\n\n _ => panic!(\"[type error] {:#?} is not a function\", f),\n\n }\n", "file_path": "src/runtime/eval.rs", "rank": 47, "score": 171017.34480026504 }, { "content": "/// Like `freshen_binders`, but to unite two `Ast`s with identical structure (else returns `None`).\n\npub fn freshen_binders_with(lhs: &Ast, rhs: &Ast) -> Option<(Ast, Ren, Ast, Ren)> {\n\n match (lhs.c(), rhs.c()) {\n\n (&Trivial, &Trivial) | (&VariableReference(_), &VariableReference(_)) => {\n\n Some((lhs.clone(), Ren::new(), rhs.clone(), Ren::new()))\n\n }\n\n (&Atom(old_name_lhs), &Atom(old_name_rhs)) => {\n\n let new_name = old_name_lhs.freshen();\n\n Some((\n\n lhs.with_c(Atom(new_name)),\n\n Ren::single(old_name_lhs, raw_ast!(VariableReference(new_name))),\n\n rhs.with_c(Atom(new_name)),\n\n Ren::single(old_name_rhs, raw_ast!(VariableReference(new_name))),\n\n ))\n\n }\n\n // TODO: Handle matching `'[let (a,b) = ⋯]'` against the pattern `'[let ,[p], = ⋯]'` !!\n\n (\n\n &Node(ref f, ref parts_lhs, ref export),\n\n &Node(ref f_rhs, ref parts_rhs, ref export_rhs),\n\n ) => {\n\n if f != f_rhs || export != export_rhs {\n", "file_path": "src/alpha.rs", "rank": 48, "score": 169864.83588602737 }, { "content": "pub fn dynamic__ace_rules(prog: &str, lang: &crate::Language) -> String {\n\n // This only works with the Unseemly syntax extension form, which sets this side-channel:\n\n crate::core_macro_forms::syn_envs__for__highlighting.with(|envs| envs.borrow_mut().clear());\n\n\n\n // Errors are okay, especially late!\n\n let _ = parse(&crate::core_forms::outermost_form(), lang.pc.clone(), prog);\n\n\n\n let mut result = String::new();\n\n\n\n crate::core_macro_forms::syn_envs__for__highlighting.with(|envs| {\n\n use indoc::writedoc;\n\n use std::fmt::Write;\n\n\n\n let mut prev_grammar = lang.pc.grammar.clone();\n\n let mut cur_rule_name = \"start\".to_string();\n\n let mut idx = 0;\n\n\n\n for (extender_ast, grammar) in &*envs.borrow() {\n\n let longest_line = extender_ast\n\n .orig_str(prog)\n", "file_path": "src/highlighter_generation.rs", "rank": 49, "score": 168159.3327444924 }, { "content": "fn parse_flimsy_seq<'a, I>(flimsy_seq: &mut Peekable<I>, grammar: &FormPat) -> EnvMBE<Ast>\n\nwhere I: Iterator<Item = &'a Ast> {\n\n use crate::grammar::FormPat::*;\n\n\n\n match grammar {\n\n Seq(ref grammar_parts) => {\n\n let mut result = EnvMBE::new();\n\n\n\n for grammar_part in grammar_parts {\n\n result = result.combine_overriding(&parse_flimsy_seq(flimsy_seq, grammar_part));\n\n }\n\n result\n\n }\n\n _ => {\n\n // `Anyways`es shouldn't consume anything (and they'll always be `Named`):\n\n let consuming = match grammar {\n\n Named(_, ref body) => match **body {\n\n Anyways(_) => false,\n\n // HACK: special case for core_macro_forms::macro_invocation.\n\n // There has to be a less flimsy way of doing this.\n", "file_path": "src/macros/flimsy_syntax.rs", "rank": 50, "score": 167272.69600729653 }, { "content": "/// Displays `res` on a color terminal.\n\npub fn terminal_display(res: Result<Value, String>) {\n\n match res {\n\n Ok(v) => println!(\"\\x1b[1;32m≉\\x1b[0m {}\", v),\n\n Err(s) => println!(\"\\x1b[1;31m✘\\x1b[0m {}\", s),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 165900.61968271763 }, { "content": "fn adjust_opacity(t: &Ast, env: Assoc<Name, Ast>, delta: i32) -> Ast {\n\n let ctxt = crate::ast_walk::LazyWalkReses {\n\n extra_info: delta,\n\n ..crate::ast_walk::LazyWalkReses::new_wrapper(env)\n\n };\n\n crate::ast_walk::walk::<MuProtect>(t, &ctxt).unwrap()\n\n}\n\n\n", "file_path": "src/core_qq_forms.rs", "rank": 52, "score": 165585.29782763834 }, { "content": "// TODO: when returning traits works, just make functions `Reifiable`\n\n// TOUNDERSTAND: 'x also allows things to be owned instead?!?\n\npub fn reify_1ary_function<A: Reifiable + 'static, R: Reifiable + 'static>(\n\n f: Rc<Box<(dyn Fn(A) -> R)>>,\n\n) -> Value {\n\n Value::BuiltInFunction(eval::BIF(Rc::new(move |args: Vec<Value>| {\n\n ((*f)(A::reflect(&args[0]))).reify()\n\n })))\n\n}\n\n\n", "file_path": "src/runtime/reify.rs", "rank": 53, "score": 165374.57548532693 }, { "content": "pub fn reflect_1ary_function<A: Reifiable + 'static, R: Reifiable + 'static>(\n\n f_v: Value,\n\n) -> Rc<Box<(dyn Fn(A) -> R)>> {\n\n Rc::new(Box::new(move |a: A| {\n\n extract!((&f_v)\n\n Value::BuiltInFunction = (ref bif) => R::reflect(&(*bif.0)(vec![a.reify()]));\n\n Value::Function = (ref closure) => {\n\n R::reflect(&eval::eval(&closure.body,\n\n closure.env.clone().set(closure.params[0], a.reify())).unwrap())\n\n })\n\n }))\n\n}\n\n\n", "file_path": "src/runtime/reify.rs", "rank": 54, "score": 165374.57548532693 }, { "content": "// `sub` must be a subtype of `sup`. (Note that `sub` becomes the context element!)\n\npub fn is_subtype(\n\n sub: &Ast,\n\n sup: &Ast,\n\n parts: &LazyWalkReses<crate::ty::SynthTy>,\n\n) -> Result<Assoc<Name, Ast>, TyErr> {\n\n walk::<Subtype>(sup, &parts.switch_mode::<Subtype>().with_context(sub.clone()))\n\n}\n\n\n", "file_path": "src/ty_compare.rs", "rank": 55, "score": 164585.0462407983 }, { "content": "#[wasm_bindgen]\n\npub fn generate__ace_rules(stashed_lang: &str) -> String {\n\n let rules = language_stash.with(|ls| {\n\n highlighter_generation::ace_rules(&(*ls.borrow()).get(stashed_lang).unwrap().pc.grammar)\n\n });\n\n format!(\n\n \"start: [ {} // HACK: comments aren't part of the base language:\n\n {{ token: 'comment', regex: '#[^\\\\\\\\n|][^\\\\\\\\n]*|#\\\\\\\\|.*?\\\\\\\\|#' }}]\",\n\n rules\n\n )\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 56, "score": 164516.57746649446 }, { "content": "// TODO #4: make this extensible, too! When the user creates a new NT,\n\n// do they need to specify the direction?\n\npub fn nt_is_positive(nt: Name) -> bool {\n\n if nt == n(\"Type\") || nt == n(\"Expr\") || nt == n(\"DefaultReference\") {\n\n true\n\n } else if nt == n(\"Pat\") || nt == n(\"Atom\") || nt == n(\"Ident\") {\n\n // TODO: Remove \"Ident\" entirely.\n\n // HACK: \"Ident\" and \"DefaultAtom\" are just not walked; this should probably be three-armed\n\n false\n\n } else {\n\n icp!(\"unknown NT {}\", nt)\n\n }\n\n}\n\n\n", "file_path": "src/core_type_forms.rs", "rank": 57, "score": 160658.41888064513 }, { "content": "/// Generate Unseemly.\n\n/// (This is the core language.)\n\npub fn unseemly() -> Language {\n\n Language {\n\n pc: crate::core_forms::outermost__parse_context(),\n\n type_env: crate::runtime::core_values::core_types(),\n\n type_env__phaseless: crate::runtime::core_values::core_types(),\n\n value_env: crate::runtime::core_values::core_values(),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 58, "score": 160657.91932763046 }, { "content": "// `sub` must be a subtype of `sup`. (Note that `sub` becomes the context element!)\n\n// Only use this in tests or at the top level; this discards any non-phase-0-environments!\n\npub fn must_subtype(\n\n sub: &Ast,\n\n sup: &Ast,\n\n env: Assoc<Name, Ast>,\n\n) -> Result<Assoc<Name, Ast>, TyErr> {\n\n // TODO: I think we should be canonicalizing first...\n\n // TODO: they might need different environments?\n\n let lwr_env = &LazyWalkReses::<Subtype>::new_wrapper(env).with_context(sub.clone());\n\n\n\n walk::<Subtype>(sup, lwr_env)\n\n}\n\n\n", "file_path": "src/ty_compare.rs", "rank": 59, "score": 160080.741258274 }, { "content": "// TODO: I think we need to route some other things (especially in macros.rs) through this...\n\npub fn must_equal(\n\n lhs: &Ast,\n\n rhs: &Ast,\n\n parts: &LazyWalkReses<crate::ty::SynthTy>,\n\n) -> Result<(), TyErr> {\n\n let canon_parts = parts.switch_mode::<Canonicalize>();\n\n if walk::<Canonicalize>(lhs, &canon_parts) == walk::<Canonicalize>(rhs, &canon_parts) {\n\n Ok(())\n\n } else {\n\n Err(TyErr::Mismatch(lhs.clone(), rhs.clone()))\n\n }\n\n}\n\n\n", "file_path": "src/ty_compare.rs", "rank": 60, "score": 160076.64792850747 }, { "content": "/// Run the file (which hopefully evaluates to `capture_language`), and get the language it defines.\n\n/// Returns the parse context, the type environment, the phaseless version of the type environment,\n\n/// and the value environment.\n\n/// This doesn't take a language 4-tuple -- it assumes that the language is in Unseemly\n\n/// (but of course it may do `include /[some_language.unseemly]/` itself).\n\n/// TODO: we only need the phaselessness for macros, and maybe we can get rid of it there?\n\npub fn language_from_file(path: &std::path::Path) -> Language {\n\n let mut raw_lib = String::new();\n\n\n\n use std::io::Read;\n\n let orig_dir = std::env::current_dir().unwrap();\n\n std::fs::File::open(path)\n\n .expect(\"Error opening file\")\n\n .read_to_string(&mut raw_lib)\n\n .expect(\"Error reading file\");\n\n // Evaluate the file in its own directory:\n\n if let Some(dir) = path.parent() {\n\n // Might be empty:\n\n if dir.is_dir() {\n\n std::env::set_current_dir(dir).unwrap();\n\n }\n\n }\n\n\n\n let lang = get_language(&raw_lib, unseemly());\n\n\n\n // Go back to the original directory:\n\n std::env::set_current_dir(orig_dir).unwrap();\n\n\n\n return lang;\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 61, "score": 159261.59064064338 }, { "content": "/// Like `beta::names_mentioned`, but for all the imports in `parts`\n\nfn mentioned_in_import(parts: &EnvMBE<Ast>) -> Vec<Name> {\n\n fn process_ast(a: &Ast, v: &mut Vec<Name>) {\n\n match a.c() {\n\n Node(_, _, _) => {} // new scope\n\n ExtendEnv(ref body, ref beta) | ExtendEnvPhaseless(ref body, ref beta) => {\n\n let mut beta_mentions = beta.names_mentioned_and_bound();\n\n v.append(&mut beta_mentions);\n\n process_ast(&*body, v);\n\n }\n\n // TODO: does it make sense to mention a name underneath a quotation?\n\n QuoteMore(ref body, _) | QuoteLess(ref body, _) => process_ast(body, v),\n\n Trivial | Atom(_) | VariableReference(_) => {} // no beta\n\n Shape(_) | IncompleteNode(_) => icp!(\"{:?} isn't a complete AST\", a),\n\n }\n\n }\n\n\n\n let mut res = vec![];\n\n parts.map(&mut |a| process_ast(a, &mut res));\n\n res\n\n}\n\n\n", "file_path": "src/alpha.rs", "rank": 62, "score": 157108.79426619547 }, { "content": "// TODO #38: This should take a grammar, not an NT, as an argument,\n\n// and be located underneath each Plus or Star.\n\npub fn dotdotdot_form(nt: Name) -> Rc<Form> {\n\n Rc::new(Form {\n\n name: n(\"dotdotdot\"),\n\n grammar: Rc::new(form_pat!((delim \"...[\", \"[\",\n\n [(star [(call \"DefaultSeparator\"), (scan_cat \"(,)\", \"keyword.operator\"),\n\n (named \"driver\", (-- 1 varref)),\n\n (call \"DefaultSeparator\"), (scan_cat \"(,)\", \"keyword.operator\")]), (lit \">>\"),\n\n (named \"body\", (call_by_name nt))]))),\n\n type_compare: Positive(NotWalked), // this is not a type form\n\n synth_type: Both(\n\n cust_rc_box!(|ddd_parts| { ddd_type__body!(ddd_parts) }),\n\n cust_rc_box!(|ddd_parts| { ddd_type__body!(ddd_parts) }),\n\n ),\n\n // An evaluate-time version of this might be a good idea;\n\n // it might be all that's needed to implement variable-number-of-argument functions.\n\n // It shouldn't be the same form, though. Maybe `...( >> )...` ?\n\n eval: Positive(NotWalked),\n\n quasiquote: Positive(cust_rc_box!(|ddd_parts| {\n\n use crate::{\n\n runtime::eval::{Sequence, Value},\n", "file_path": "src/core_qq_forms.rs", "rank": 63, "score": 153177.6634198244 }, { "content": "// Macro By Example transcription. TODO: currently positive-only\n\n// There are two parts to the way that Macro By Example works in Unseemly.\n\n//\n\n// The first is the types and how to construct them:\n\n// If `T` is `**[Int Float]**,\n\n// then `:::[T >> Expr<T> ]:::` is `**[Expr<Int> Expr<Float>]**`.\n\n// If you match syntax under a `*`, you'll get something like `::[T >> Expr<T> ]:::`.\n\n//\n\n// The second is how we use them:\n\n// In a syntax quotation, you can write `...[,x, >> some_syntax]...`\n\npub fn dotdotdot(nt: Name) -> Rc<FormPat> {\n\n Rc::new(FormPat::Scope(dotdotdot_form(nt), crate::beta::ExportBeta::Nothing))\n\n}\n\n\n\n// Once it's possible to write `where Mode::Elt = Ast and Mode::Err = <whatever>`,\n\n// this can be turned into a function.\n\n// The behavior of `...[]...` is identical in positive and negative modes.\n\nmacro_rules! ddd_type__body {\n\n ($ddd_parts:expr) => {\n\n {\n\n let drivers : Vec<Name> = $ddd_parts.get_rep_term(n(\"driver\")).into_iter().map(|a| {\n\n match a.c() {\n\n QuoteLess(ref d, _) => d.vr_to_name(),\n\n _ => icp!()\n\n }\n\n }).collect();\n\n\n\n\n\n // HACK: we want to operate on the environment one level less quoted\n\n // (that's why we put commas around the drivers)\n", "file_path": "src/core_qq_forms.rs", "rank": 64, "score": 153171.70978992363 }, { "content": "/// Parse `tt` with the grammar `f` in an empty syntactic environment.\n\n/// `Call` patterns are errors.\n\npub fn parse_top(f: &FormPat, toks: &str) -> Result<Ast, crate::earley::ParseError> {\n\n crate::earley::parse_in_syn_env(f, Assoc::new(), toks)\n\n}\n\n\n\nuse self::FormPat::*;\n\n\n", "file_path": "src/grammar.rs", "rank": 65, "score": 151948.49249951742 }, { "content": "#[wasm_bindgen]\n\npub fn generate__ace_rules__for(program: &str, stashed_lang: &str) -> String {\n\n let lang = language_stash\n\n .with(|ls| (*ls.borrow()).get(stashed_lang).expect(\"Language not defined\").clone());\n\n\n\n highlighter_generation::dynamic__ace_rules(program, &lang)\n\n}\n", "file_path": "src/main.rs", "rank": 66, "score": 151279.57626845388 }, { "content": "fn make_core_typed_values() -> Assoc<Name, TypedValue> {\n\n assoc_n!(\n\n \"fix\" =>\n\n tyf!( { \"Type\" \"forall_type\" :\n\n \"param\" => [\"F\"], // has to be a function, but we don't know its arity\n\n \"body\" => (import [* [forall \"param\"]] { \"Type\" \"fn\" :\n\n \"param\" => [ { \"Type\" \"fn\" :\n\n \"param\" => [{\"Type\" \"fn\" : \"param\" => [], \"ret\" => (vr \"F\") }],\n\n \"ret\" => (vr \"F\")} ],\n\n \"ret\" => (vr \"F\") })},\n\n // TODO: built-in functions, even though none of them make sense here, shouldn't crash\n\n ( Function(cl) ) => {\n\n let new_env = cl.env.set(cl.params[0],\n\n // reconstruct the invocation that caused this:\n\n Function(Rc::new(crate::runtime::eval::Closure {\n\n body: ast!({\"Expr\" \"apply\" :\n\n \"rator\" => (vr \"fix\"),\n\n \"rand\" => [(vr \"orig_arg\")]}),\n\n params: vec![],\n\n env: assoc_n!(\"orig_arg\" => Function(cl.clone()),\n", "file_path": "src/runtime/core_values.rs", "rank": 67, "score": 151111.36399372527 }, { "content": "pub fn new_scan(regex: &str, cat: Option<String>) -> FormPat {\n\n Scan(Scanner(regex::Regex::new(&format!(\"^{}\", regex)).unwrap()), cat)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Scanner(pub regex::Regex);\n\n\n\nimpl PartialEq for Scanner {\n\n fn eq(&self, other: &Scanner) -> bool { self.0.as_str() == other.0.as_str() }\n\n}\n\n\n\nimpl reify::Reifiable for Scanner {\n\n fn ty_name() -> Name { n(\"Scanner\") }\n\n\n\n fn reify(&self) -> Value { <String as reify::Reifiable>::reify(&self.0.as_str().to_owned()) }\n\n\n\n fn reflect(v: &Value) -> Self {\n\n Scanner(regex::Regex::new(&<String as reify::Reifiable>::reflect(v)).unwrap())\n\n }\n\n}\n", "file_path": "src/grammar.rs", "rank": 69, "score": 149006.65011370808 }, { "content": "fn eval_match(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> {\n\n for arm_values in part_values.march_all(&[n(\"arm\"), n(\"p\")]) {\n\n // TODO: don't we need to set a context?\n\n match arm_values.get_res(n(\"arm\")) {\n\n Ok(res) => {\n\n return Ok(res);\n\n }\n\n Err(()) => { /* try the next one */ }\n\n }\n\n }\n\n panic!(\"No arms matched! TODO #2\");\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 70, "score": 148170.93708722174 }, { "content": "fn eval_lambda(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> {\n\n Ok(Function(Rc::new(Closure {\n\n body: strip_ee(part_values.get_term_ref(n(\"body\"))).clone(),\n\n params: part_values.get_rep_term(n(\"param\")).iter().map(Ast::to_name).collect(),\n\n env: part_values.env,\n\n })))\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 71, "score": 148170.93708722174 }, { "content": "fn eval_apply(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> {\n\n match part_values.get_res(n(\"rator\"))? {\n\n Function(clos) => {\n\n let mut new_env = clos.env.clone();\n\n for (p, v) in clos.params.iter().zip(part_values.get_rep_res(n(\"rand\"))?) {\n\n new_env = new_env.set(*p, v);\n\n }\n\n\n\n // TODO: this seems wrong; it discards other phase information.\n\n // But would it be correct to have closures capture at all phases?\n\n crate::runtime::eval::eval(&clos.body, new_env)\n\n }\n\n BuiltInFunction(crate::runtime::eval::BIF(f)) => Ok(f(part_values.get_rep_res(n(\"rand\"))?)),\n\n other => {\n\n icp!(\"[type error] invoked {:#?} as if it were a function\", other)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 72, "score": 148170.93708722174 }, { "content": "// Deprecated; use `::core_forms::find` instead (keep it qualified!)\n\npub fn find_core_form(nt: &str, name: &str) -> Rc<Form> { find(nt, name) }\n\n\n", "file_path": "src/core_forms.rs", "rank": 73, "score": 145656.72625609091 }, { "content": "fn eval_enum_expr(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> {\n\n Ok(Enum(part_values.get_term(n(\"name\")).to_name(), part_values.get_rep_res(n(\"component\"))?))\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 74, "score": 145595.13829292147 }, { "content": "fn eval_tuple_expr(part_values: LazyWalkReses<Eval>) -> Result<Value, ()> {\n\n Ok(crate::runtime::eval::Value::Sequence(\n\n part_values.get_rep_res(n(\"component\"))?.into_iter().map(Rc::new).collect(),\n\n ))\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 75, "score": 145595.13829292147 }, { "content": "pub fn delim(s: &str) -> DelimChar {\n\n match s {\n\n \"(\" | \")\" => Paren,\n\n \"[\" | \"]\" => SquareBracket,\n\n \"{\" | \"}\" => CurlyBracket,\n\n _ => icp!(\"not a delimiter!\"),\n\n }\n\n}\n", "file_path": "src/read.rs", "rank": 77, "score": 145187.67036097718 }, { "content": "pub fn enable_fake_freshness(ff: bool) {\n\n fake_freshness.with(|fake_freshness_| {\n\n *fake_freshness_.borrow_mut() = ff;\n\n })\n\n}\n\n\n\n// only available on nightly:\n\n// impl !Send for Name {}\n\n\n\nimpl Name {\n\n /// Two names that are unequal to each other will have different \"spelling\"s.\n\n /// Tomatoes (🍅) may have been added to the end to ensure uniqueness.\n\n pub fn sp(self) -> String { spellings.with(|us| us.borrow()[self.id].unique.clone()) }\n\n /// The \"original spelling\" of a name; the string that was used to define it. These may collide.\n\n pub fn orig_sp(self) -> String { spellings.with(|us| us.borrow()[self.id].orig.clone()) }\n\n\n\n /// This extracts the \"original\" `Name`, prior to any freshening.\n\n /// This is probably not ever the *right* thing to do, but may be needed as a workaround.\n\n pub fn unhygienic_orig(self) -> Name {\n\n spellings.with(|us| Name::new(&us.borrow()[self.id].orig, false))\n", "file_path": "src/name.rs", "rank": 78, "score": 144259.0793223747 }, { "content": "/// Make a `<Mode::D as Dir>::Out` by walking `node` in the environment from `walk_ctxt`.\n\n/// `walk_ctxt` is used as an environment,\n\n/// and by betas to access other parts of the current node.\n\npub fn walk<Mode: WalkMode>(\n\n a: &Ast,\n\n walk_ctxt: &LazyWalkReses<Mode>,\n\n) -> Result<<Mode::D as Dir>::Out, Mode::Err> {\n\n layer_watch! { ast_walk_layer :\n\n // TODO: can we get rid of the & in front of our arguments and save the cloning?\n\n // TODO: this has a lot of direction-specific runtime hackery.\n\n // Maybe we want separate positive and negative versions?\n\n let (a, walk_ctxt) = match a.c() {\n\n // HACK: We want to process EE before pre_match before everything else.\n\n // This probably means we should find a way to get rid of pre_match.\n\n // But we can't just swap `a` and the ctxt when `a` is LiteralLike and the ctxt isn't.\n\n\n\n ExtendEnv(_,_) => { (a.clone(), walk_ctxt.clone()) }\n\n _ => Mode::D::pre_walk(a.clone(), walk_ctxt.clone())\n\n };\n\n\n\n ld!(ast_walk_layer, ld_enabled, \"{} {}\", Mode::name(), a);\n\n // lc!(ast_walk_layer, ld_enabled, \" from: {}\", walk_ctxt.this_ast);\n\n // match walk_ctxt.env.find(&negative_ret_val()) {\n", "file_path": "src/ast_walk.rs", "rank": 79, "score": 143943.1452723879 }, { "content": "fn assign_t_var(name: &str, t: &str) -> Result<Ast, String> {\n\n let ast = grammar::parse(\n\n &grammar::FormPat::Call(n(\"Type\")),\n\n core_forms::outermost__parse_context(),\n\n t,\n\n )\n\n .map_err(|e| e.msg)?;\n\n\n\n let res =\n\n ty_env.with(|tys| ty::synth_type(&ast, tys.borrow().clone()).map_err(|e| format!(\"{}\", e)));\n\n\n\n if let Ok(ref t) = res {\n\n ty_env.with(|tys| {\n\n let new_tys = tys.borrow().set(n(name), t.clone());\n\n *tys.borrow_mut() = new_tys;\n\n })\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "src/end_to_end__tests.rs", "rank": 80, "score": 143687.1686502052 }, { "content": "/// `var_to_out`, for positive walks where `Out` == `Elt`\n\npub fn var_lookup<Elt: Debug + Clone>(n: Name, env: &Assoc<Name, Elt>) -> Result<Elt, ()> {\n\n Ok((*env.find(&n).unwrap_or_else(|| panic!(\"Name {:#?} unbound in {:#?}\", n, env))).clone())\n\n}\n\n\n", "file_path": "src/walk_mode.rs", "rank": 82, "score": 140964.7946052153 }, { "content": "/// Only does anything if `Mode` is negative.\n\npub fn squirrel_away<Mode: WalkMode>(\n\n opt_oeh: Option<OutEnvHandle<Mode>>,\n\n more_env: <Mode::D as Dir>::Out,\n\n) {\n\n if let Some(oeh) = opt_oeh {\n\n let new_env = oeh.borrow().set_assoc(&Mode::out_as_env(more_env));\n\n *oeh.borrow_mut() = new_env;\n\n }\n\n}\n\n\n\n/// Package containing enough information to walk the subforms of some form on-demand.\n\n///\n\n/// It is safe to have unwalkable subforms, as long as nothing ever refers to them.\n\n///\n\n/// Contents probably shouldn't be `pub`...\n\n#[derive(Debug, Clone)]\n\npub struct LazyWalkReses<Mode: WalkMode> {\n\n /// Things that we have walked and that we might walk\n\n pub parts: EnvMBE<Rc<LazilyWalkedTerm<Mode>>>,\n\n /// The environment of the overall walk.\n", "file_path": "src/ast_walk.rs", "rank": 83, "score": 140406.90283867856 }, { "content": "pub fn erase_type(tv: &TypedValue) -> Value { tv.val.clone() }\n", "file_path": "src/runtime/core_values.rs", "rank": 84, "score": 138611.45909920658 }, { "content": "/// Generate a (depth-1) unquoting form.\n\n/// `pos_quot` is true iff the quotation itself (and thus the interpolation) is positive.\n\npub fn unquote(nt: Name, pos_quot: bool) -> Rc<FormPat> {\n\n Rc::new(FormPat::Scope(\n\n unquote_form(nt, pos_quot, 1),\n\n if pos_quot {\n\n crate::beta::ExportBeta::Nothing\n\n } else {\n\n crate::beta::ExportBeta::Use(n(\"body\"))\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/core_qq_forms.rs", "rank": 85, "score": 137736.00110803996 }, { "content": "pub fn sp<T>(t: T, a: crate::ast::Ast) -> Spanned<T> { Spanned { loc: a, body: t } }\n\n\n\nimpl<T: Display> Spanned<T> {\n\n pub fn emit_to_writer(&self, mut writer: &mut dyn WriteColor) {\n\n let diagnostic =\n\n Diagnostic::error().with_message(format!(\"{}\", self.body)).with_labels(vec![\n\n Label::primary(self.loc.0.file_id, self.loc.0.begin..self.loc.0.end),\n\n ]);\n\n\n\n let config = codespan_reporting::term::Config::default();\n\n\n\n if let Err(_) = crate::earley::files.with(|f| {\n\n codespan_reporting::term::emit(&mut writer, &config, &*f.borrow(), &diagnostic)\n\n }) {\n\n writer.write(format!(\"[NO FILE] {} at {}\", self.body, self.loc).as_bytes()).unwrap();\n\n }\n\n }\n\n\n\n pub fn emit(&self) {\n\n let mut writer = StandardStream::stderr(ColorChoice::Always);\n", "file_path": "src/util/err.rs", "rank": 86, "score": 136930.47192538166 }, { "content": "#[test]\n\nfn struct_subtyping() {\n\n // Trivial struct subtying:\n\n assert_m!(\n\n must_subtype(\n\n &ast!( { \"Type\" \"struct\" :\n\n \"component_name\" => [@\"c\" \"a\", \"b\"],\n\n \"component\" => [@\"c\" {\"Type\" \"Int\" :}, {\"Type\" \"Nat\" :}]}),\n\n &ast!( { \"Type\" \"struct\" :\n\n \"component_name\" => [@\"c\" \"a\", \"b\"],\n\n \"component\" => [@\"c\" {\"Type\" \"Int\" :}, {\"Type\" \"Nat\" :}]}),\n\n Assoc::new()\n\n ),\n\n Ok(_)\n\n );\n\n\n\n // Add a component:\n\n assert_m!(\n\n must_subtype(\n\n &ast!( { \"Type\" \"struct\" :\n\n \"component_name\" => [@\"c\" \"a\", \"b\"],\n", "file_path": "src/ty_compare.rs", "rank": 87, "score": 135980.34259514778 }, { "content": "pub fn ace_rules(se: &SynEnv) -> String {\n\n let mut categories = vec![];\n\n let mut keyword_operators = vec![];\n\n for (_, nt_grammar) in se.iter_pairs() {\n\n // Separate \"keyword.operator\" out; there are so many of them.\n\n // TODO: The principled thing to do would be to do this for each name...\n\n let (keyword_operator, mut normal) = nt_grammar\n\n .textmate_categories()\n\n .into_iter()\n\n .partition(|(_, cat)| cat == \"keyword.operator\");\n\n categories.append(&mut normal);\n\n keyword_operators.append(&mut keyword_operator.into_iter().map(|(pat, _)| pat).collect());\n\n }\n\n\n\n keyword_operators.sort();\n\n keyword_operators.dedup();\n\n\n\n // Make one big rule for all of them (will perform better, probably):\n\n categories.push((keyword_operators.join(\"|\"), \"keyword.operator\".to_string()));\n\n\n", "file_path": "src/highlighter_generation.rs", "rank": 88, "score": 134460.28985882096 }, { "content": "pub fn find(nt: &str, name: &str) -> Rc<Form> { core_forms.with(|cf| find_form(cf, nt, name)) }\n\n\n", "file_path": "src/core_forms.rs", "rank": 89, "score": 132757.35513809175 }, { "content": "#[test]\n\nfn use__let_type() {\n\n // Basic usage:\n\n assert_eq!(\n\n synth_type(\n\n &ast!( { \"Expr\" \"let_type\" :\n\n \"type_name\" => [@\"t\" \"T\"],\n\n \"type_def\" => [@\"t\" { \"Type\" \"Nat\" :}],\n\n \"body\" => (import [* [\"type_name\" = \"type_def\"]] { \"Expr\" \"lambda\" :\n\n \"param\" => [@\"p\" \"x\"],\n\n \"p_t\" => [@\"p\" (vr \"T\")],\n\n \"body\" => (import [* [\"param\" : \"p_t\"]] (vr \"x\"))\n\n })\n\n }),\n\n Assoc::new()\n\n ),\n\n Ok(ast!( { \"Type\" \"fn\" : \"param\" => [ {\"Type\" \"Nat\" :}], \"ret\" => {\"Type\" \"Nat\" :}}))\n\n );\n\n\n\n // useless type, but self-referential:\n\n let trivial_mu_type = ast!( { \"Type\" \"mu_type\" : \"param\" => [(import [prot \"param\"] (vr \"T\"))],\n", "file_path": "src/core_forms.rs", "rank": 90, "score": 131898.501045393 }, { "content": "// Inserts a new form into a grammar in the \"sensible\" place\n\n// (underneath any number of `Biased`s, as a new arm of an `Alt`).\n\n// Macros will usually want to do this to extend an existing NT.\n\npub fn insert_form_pat(se: &SynEnv, nt: Name, f: &FormPat) -> SynEnv {\n\n let nt_form: Rc<FormPat> = se.find_or_panic(&nt).clone();\n\n\n\n se.set(nt, Rc::new(add_form_at_the_alt(nt_form, f).unwrap()))\n\n}\n\n\n", "file_path": "src/core_forms.rs", "rank": 91, "score": 130054.61502141602 }, { "content": "pub fn unparse_mbe(pat: &FormPat, actl: &AstContents, context: &EnvMBE<Ast>, s: &SynEnv) -> String {\n\n // HACK: handle underdetermined forms\n\n let undet = crate::ty_compare::underdetermined_form.with(|u| u.clone());\n\n match actl {\n\n Node(form, body, _) if form == &undet => {\n\n return crate::ty_compare::unification.with(|unif| {\n\n let var = body.get_leaf_or_panic(&n(\"id\")).to_name();\n\n let looked_up = unif.borrow().get(&var).cloned();\n\n match looked_up {\n\n // Apparently the environment is recursive; `{}`ing it stack-overflows\n\n Some(ref clo) => {\n\n format!(\"{} in some environment\", clo.it /* , {:#?} clo.env */)\n\n }\n\n None => format!(\"¿{}?\", var),\n\n }\n\n });\n\n }\n\n _ => {}\n\n }\n\n\n", "file_path": "src/unparse.rs", "rank": 92, "score": 129274.7481730676 }, { "content": "// We keep a table, keyed on leaf names and actual atoms, to keep track of the freshening.\n\n// This means that shadowing in leaf-named atom set doesn't get separated.\n\n// (e.g. `.[a : Int a : Int . ⋯].` freshens to `.[a🍅 : Int a🍅 : Int . ⋯].`).\n\n// As long as betas can't select a different shadowing direction, this isn't a problem.\n\npub fn freshening_from_beta(\n\n b: &Beta,\n\n parts: &EnvMBE<crate::ast::Ast>,\n\n memo: &mut std::collections::HashMap<(Name, Name), Name>,\n\n) -> Assoc<Name, Ast> {\n\n match *b {\n\n Nothing => Assoc::new(),\n\n Shadow(ref lhs, ref rhs) => freshening_from_beta(&*lhs, parts, memo)\n\n .set_assoc(&freshening_from_beta(&*rhs, parts, memo)),\n\n ShadowAll(ref sub_beta, ref drivers) => {\n\n let mut res = Assoc::new();\n\n for parts in parts.march_all(drivers) {\n\n res = res.set_assoc(&freshening_from_beta(&*sub_beta, &parts, memo));\n\n }\n\n res\n\n }\n\n Protected(_n_s) => unimplemented!(\"Not hard, just not used yet\"),\n\n // TODO: n_s isn't necessarily just one name in the `SameAs` case! This is an ICP for sure.\n\n Basic(n_s, _) | SameAs(n_s, _) | Underspecified(n_s) | BoundButNotUsable(n_s) => {\n\n let this_name = parts.get_leaf_or_panic(&n_s).to_name();\n", "file_path": "src/beta.rs", "rank": 93, "score": 128316.2828120588 }, { "content": "#[test]\n\nfn eval_string_operations() {\n\n let mut prelude = core_values();\n\n\n\n prelude = prelude.set(n(\"first\"), val!(s \"Frederick\"));\n\n prelude = prelude.set(n(\"last\"), val!(s \"Douglass\"));\n\n\n\n assert_eq!(\n\n eval(&u!({apply : concat [first; last]}), prelude.clone()),\n\n Ok(val!(s \"FrederickDouglass\"))\n\n );\n\n\n\n prelude = prelude.set(n(\"names\"), val!(seq (s \"Frederick\") (s \"Douglass\")));\n\n prelude = prelude.set(n(\"space\"), val!(s \" \"));\n\n\n\n assert_eq!(\n\n eval(&u!({apply : join [names; space]}), prelude.clone()),\n\n Ok(val!(s \"Frederick Douglass\"))\n\n );\n\n}\n\n\n", "file_path": "src/runtime/core_values.rs", "rank": 94, "score": 127265.87744750331 }, { "content": "#[test]\n\nfn eval_sequence_operations() {\n\n let mut prelude = core_values();\n\n\n\n assert_eq!(eval(&u!({apply : len [empty]}), prelude.clone()), Ok(val!(i 0)));\n\n\n\n assert_eq!(\n\n eval(&u!({apply : push [{apply : push [empty ; one]} ; two]}), prelude.clone()),\n\n Ok(val!(seq (i 1) (i 2)))\n\n );\n\n\n\n prelude = prelude.set(n(\"one_two\"), val!(seq (i 1) (i 2)));\n\n\n\n assert_eq!(eval(&u!({apply : index [one_two ; one]}), prelude.clone()), Ok(val!(i 2)));\n\n\n\n assert_eq!(\n\n eval(&u!({apply : map [one_two ; (, ast!((vr \"zero?\"))) ]}), prelude.clone()),\n\n Ok(val!(seq (b false) (b false)))\n\n );\n\n\n\n assert_eq!(eval(&u!({apply : foldl [one_two ; zero ; plus ]}), prelude.clone()), Ok(val!(i 3)));\n\n}\n\n\n", "file_path": "src/runtime/core_values.rs", "rank": 95, "score": 127265.87744750331 }, { "content": "#[test]\n\nfn eval_cell_operations() {\n\n let prelude = core_values().set(n(\"c\"), val!(cell (i 5)));\n\n\n\n assert_eq!(\n\n eval(\n\n &u!(\n\n {block :\n\n [(~ {apply : assign [c ; {apply : plus [one ; {apply : value [c]}]}]})]\n\n {apply : value [c]}\n\n }),\n\n prelude.clone()\n\n ),\n\n Ok(val!(i 6))\n\n );\n\n}\n", "file_path": "src/runtime/core_values.rs", "rank": 96, "score": 127265.87744750331 }, { "content": "pub fn unquote_form(nt: Name, pos_quot: bool, depth: u8) -> Rc<Form> {\n\n let form_delim_start = &format!(\"{}[\", \",\".repeat(depth as usize));\n\n\n\n Rc::new(Form {\n\n name: n(\"unquote\"),\n\n grammar:\n\n // It's a pain to determine whether type annotation is needed at syntax time,\n\n // so it's optional\n\n Rc::new(if pos_quot {\n\n form_pat!((delim form_delim_start, \"[\",\n\n [(named \"nt\", (anyways (vr nt))),\n\n (alt\n\n [],\n\n [(name_lit__by_name nt),\n\n (call \"DefaultSeparator\"), (scan r\"(<)\"),\n\n (named \"ty_annot\", (call \"Type\")),\n\n (call \"DefaultSeparator\"), (scan r\"(>)\"), (lit \"|\")]),\n\n (named \"body\", (-- depth (call \"Expr\")))]))\n\n } else {\n\n form_pat!((delim form_delim_start, \"[\",\n", "file_path": "src/core_qq_forms.rs", "rank": 97, "score": 127226.71489203404 }, { "content": "pub fn find_type(form_name: &str) -> Rc<Form> {\n\n core_type_forms.with(|ctf| crate::core_forms::find_form(ctf, \"Type\", form_name))\n\n}\n\n\n", "file_path": "src/core_type_forms.rs", "rank": 98, "score": 125502.01726062782 }, { "content": "// Like just taking the keys from `env_from_export_beta`, but faster and non-failing\n\npub fn bound_from_export_beta(\n\n b: &ExportBeta,\n\n parts: &EnvMBE<crate::ast::Ast>,\n\n quote_depth: i16,\n\n) -> Vec<Name> {\n\n match *b {\n\n ExportBeta::Nothing => vec![],\n\n ExportBeta::Shadow(ref lhs, ref rhs) => {\n\n let mut res = bound_from_export_beta(&*lhs, parts, quote_depth);\n\n let mut res_r = bound_from_export_beta(&*rhs, parts, quote_depth);\n\n res.append(&mut res_r);\n\n res\n\n }\n\n ExportBeta::ShadowAll(ref sub_beta, ref drivers) => {\n\n let mut res = vec![];\n\n for sub_parts in &parts.march_all(drivers) {\n\n res.append(&mut bound_from_export_beta(&*sub_beta, sub_parts, quote_depth));\n\n }\n\n res\n\n }\n\n ExportBeta::Use(ref n_s) => {\n\n // Can be a non-atom\n\n names_exported_by(parts.get_leaf_or_panic(n_s), quote_depth)\n\n }\n\n }\n\n}\n\n\n\n// TODO NOW: make this return the atom-freshened node (possibly freshening recursive nodes)\n\n\n", "file_path": "src/beta.rs", "rank": 99, "score": 125323.95314150842 } ]
Rust
ast/src/child_iters/recursive_children_iter.rs
Robbepop/stevia
5f0132ef8fa0826a4cadfe07622cd594c31c57e9
use crate::{ AnyExpr, iter::{ Children, ChildrenIter, }, }; use std::iter::Iterator; pub fn children_recursive_with_event(expr: &AnyExpr) -> RecursiveChildrenIter { RecursiveChildrenIter::new(expr) } pub fn children_recursive_entering(expr: &AnyExpr) -> impl Iterator<Item = &AnyExpr> { children_recursive_with_event(expr).filter_map(|expr_and_event| match expr_and_event.event { YieldEvent::Entering => Some(expr_and_event.expr), YieldEvent::Leaving => None, }) } pub fn children_recursive_leaving(expr: &AnyExpr) -> impl Iterator<Item = &AnyExpr> { children_recursive_with_event(expr).filter_map(|expr_and_event| match expr_and_event.event { YieldEvent::Leaving => Some(expr_and_event.expr), YieldEvent::Entering => None, }) } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum YieldEvent { Entering, Leaving, } #[derive(Debug, Clone)] pub struct RecursiveChildrenIter<'it> { frames: Vec<Frame<'it>>, next: Option<&'it AnyExpr> } #[derive(Debug, Clone)] struct Frame<'it> { guard: &'it AnyExpr, incoming: ChildrenIter<'it> } impl<'it> Frame<'it> { #[inline] pub fn new(expr: &'it AnyExpr) -> Self { Frame { guard: expr, incoming: expr.children() } } #[inline] pub fn guard(self) -> &'it AnyExpr { self.guard } } impl<'it> Iterator for Frame<'it> { type Item = &'it AnyExpr; #[inline] fn next(&mut self) -> Option<Self::Item> { self.incoming.next() } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct AnyExprAndEvent<'it> { pub expr: &'it AnyExpr, pub event: YieldEvent, } impl<'it> AnyExprAndEvent<'it> { #[inline] pub fn entering(expr: &AnyExpr) -> AnyExprAndEvent { AnyExprAndEvent { event: YieldEvent::Entering, expr, } } #[inline] pub fn leaving(expr: &AnyExpr) -> AnyExprAndEvent { AnyExprAndEvent { event: YieldEvent::Leaving, expr, } } } impl<'it> RecursiveChildrenIter<'it> { pub fn new(expr: &AnyExpr) -> RecursiveChildrenIter { RecursiveChildrenIter { frames: Vec::new(), next: Some(expr) } } } impl<'it> Iterator for RecursiveChildrenIter<'it> { type Item = AnyExprAndEvent<'it>; fn next(&mut self) -> Option<Self::Item> { if let Some(next) = self.next { let mut frame = Frame::new(next); self.next = frame.next(); self.frames.push(frame); return Some(AnyExprAndEvent::entering(next)) } if let Some(frame) = self.frames.pop() { let guard = frame.guard(); self.next = self.frames .last_mut() .and_then(Iterator::next); return Some(AnyExprAndEvent::leaving(guard)) } None } } #[cfg(test)] mod tests { use super::*; use crate::PlainExprTreeBuilder; #[test] fn simple() { fn create_ast() -> AnyExpr { let b = PlainExprTreeBuilder::default(); b.or( b.and(b.bool_const(true), b.bool_const(false)), b.xor(b.bool_const(false), b.bool_const(true)), ).unwrap() } let b = PlainExprTreeBuilder::default(); let expr = create_ast(); let mut rec_iter = children_recursive_with_event(&expr); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&create_ast())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b .and(b.bool_const(true), b.bool_const(false)) .unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b.bool_const(true).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b.bool_const(true).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b.bool_const(false).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b.bool_const(false).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b .and(b.bool_const(true), b.bool_const(false)) .unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b .xor(b.bool_const(false), b.bool_const(true)) .unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b.bool_const(false).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b.bool_const(false).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b.bool_const(true).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b.bool_const(true).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b .xor(b.bool_const(false), b.bool_const(true)) .unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&create_ast())) ); assert_eq!(rec_iter.next(), None); } }
use crate::{ AnyExpr, iter::{ Children, ChildrenIter, }, }; use std::iter::Iterator; pub fn children_recursive_with_event(expr: &AnyExpr) -> RecursiveChildrenIter { RecursiveChildrenIter::new(expr) } pub fn children_recursive_entering(expr: &AnyExpr) -> impl Iterator<Item = &AnyExpr> { children_recursive_with_event(expr).filter_map(|expr_and_event| match expr_and_event.event { YieldEvent::Entering => Some(expr_and_event.expr), YieldEvent::Leaving => None, }) } pub fn children_recursive_leaving(expr: &AnyExpr) -> impl Iterator<Item = &AnyExpr> { children_recursive_with_event(expr).filter_map(|expr_and_event| match expr_and_event.event { YieldEvent::Leaving => Some(expr_and_event.expr), YieldEvent::Entering => None, }) } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum YieldEvent { Entering, Leaving, } #[derive(Debug, Clone)] pub struct RecursiveChildrenIter<'it> { frames: Vec<Frame<'it>>, next: Option<&'it AnyExpr> } #[derive(Debug, Clone)] struct Frame<'it> { guard: &'it AnyExpr, incoming: ChildrenIter<'it> } impl<'it> Frame<'it> { #[inline] pub fn new(expr: &'it AnyExpr) -> Self { Frame { guard: expr, incoming: expr.children() } } #[inline] pub fn guard(self) -> &'it AnyExpr { self.guard } } impl<'it> Iterator for Frame<'it> { type Item = &'it AnyExpr; #[inline] fn next(&mut self) -> Option<Self::Item> { self.incoming.next() } } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub struct AnyExprAndEvent<'it> { pub expr: &'it AnyExpr, pub event: YieldEvent, } impl<'it> AnyExprAndEvent<'it> { #[inline] pub fn entering(expr: &AnyExpr) -> AnyExprAndEvent { AnyExprAndEvent { event: YieldEvent::Entering, expr, } } #[inline] pub fn leaving(expr: &AnyExpr) -> AnyExprAndEvent { AnyExprAndEvent { event: YieldEvent::Leaving, expr, } } } impl<'it> RecursiveChildrenIter<'it> { pub fn new(expr: &AnyExpr) -> RecursiveChildrenIter { RecursiveChildrenIter { frames: Vec::new(), next: Some(expr) } } } impl<'it> Iterator for RecursiveChildrenIter<'it> { type Item = AnyExprAndEvent<'it>; fn next(&mut self) -> Option<Self::Item> { if let Some(next) = self.next { let mut frame = Frame::new(next); self.next = frame.next(); self.frames.push(frame); return Some(AnyExprAndEvent::entering(next)) }
None } } #[cfg(test)] mod tests { use super::*; use crate::PlainExprTreeBuilder; #[test] fn simple() { fn create_ast() -> AnyExpr { let b = PlainExprTreeBuilder::default(); b.or( b.and(b.bool_const(true), b.bool_const(false)), b.xor(b.bool_const(false), b.bool_const(true)), ).unwrap() } let b = PlainExprTreeBuilder::default(); let expr = create_ast(); let mut rec_iter = children_recursive_with_event(&expr); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&create_ast())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b .and(b.bool_const(true), b.bool_const(false)) .unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b.bool_const(true).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b.bool_const(true).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b.bool_const(false).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b.bool_const(false).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b .and(b.bool_const(true), b.bool_const(false)) .unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b .xor(b.bool_const(false), b.bool_const(true)) .unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b.bool_const(false).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b.bool_const(false).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::entering(&b.bool_const(true).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b.bool_const(true).unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&b .xor(b.bool_const(false), b.bool_const(true)) .unwrap())) ); assert_eq!( rec_iter.next(), Some(AnyExprAndEvent::leaving(&create_ast())) ); assert_eq!(rec_iter.next(), None); } }
if let Some(frame) = self.frames.pop() { let guard = frame.guard(); self.next = self.frames .last_mut() .and_then(Iterator::next); return Some(AnyExprAndEvent::leaving(guard)) }
if_condition
[ { "content": "/// A simple marker to mark expression types as such\n\n/// and provide them with an expression kind.\n\npub trait ExprMarker: fmt::Debug + Copy + Clone + PartialEq + Eq {\n\n /// The static kind of the expression.\n\n const EXPR_KIND: ExprKind;\n\n}\n\n\n\npub use self::{\n\n context::{\n\n ArcContext,\n\n Context,\n\n ContextAnd,\n\n SymbolInterner,\n\n SymbolIdGenerator,\n\n TypeMap\n\n },\n\n consistency_checker::{\n\n AssertConsistency,\n\n assert_consistency_recursively\n\n },\n\n error::{\n\n ExprError,\n", "file_path": "ast/src/lib.rs", "rank": 3, "score": 255596.6689459125 }, { "content": "pub fn forward_transform_any_expr_into<T>(transformer: &T, expr: AnyExpr, event: TransformEvent) -> TransformOutcome\n\n where T: Transformer\n\n{\n\n use self::AnyExpr::*;\n\n match expr {\n\n IfThenElse(expr) => transformer.transform_cond_with_event(expr, event),\n\n Symbol(expr) => transformer.transform_var_with_event(expr, event),\n\n BoolConst(expr) => transformer.transform_bool_const_with_event(expr, event),\n\n BoolEquals(expr) => transformer.transform_bool_equals_with_event(expr, event),\n\n Not(expr) => transformer.transform_not_with_event(expr, event),\n\n And(expr) => transformer.transform_and_with_event(expr, event),\n\n Or(expr) => transformer.transform_or_with_event(expr, event),\n\n Xor(expr) => transformer.transform_xor_with_event(expr, event),\n\n Implies(expr) => transformer.transform_implies_with_event(expr, event),\n\n ArrayRead(expr) => transformer.transform_array_read_with_event(expr, event),\n\n ArrayWrite(expr) => transformer.transform_array_write_with_event(expr, event),\n\n Add(expr) => transformer.transform_add_with_event(expr, event),\n\n BitvecConst(expr) => transformer.transform_bitvec_const_with_event(expr, event),\n\n Mul(expr) => transformer.transform_mul_with_event(expr, event),\n\n Neg(expr) => transformer.transform_neg_with_event(expr, event),\n", "file_path": "ast/src/transformer.rs", "rank": 4, "score": 232446.43525595253 }, { "content": "/// Propagates the constraints that have already been seen recursively through the\n\n/// given expression tree.\n\n/// Upon encountering another if-then-else structure the propagation is split for\n\n/// then-case and else-case respectively and propagated further.\n\nfn propagate_if_constraint<'e>(expr: &'e mut AnyExpr, seen: &mut HashMap<&'e AnyExpr, bool>) -> TransformEffect {\n\n // Replace the current expression with a constant value if it was already seen.\n\n // \n\n // Since conditions of if-expressions are always of boolean type this replacement\n\n // is only applicable for boolean expressions.\n\n if expr.ty() == Type::Bool && seen.contains_key(&*expr) {\n\n let polarity: bool = *seen.get(&*expr).unwrap();\n\n *expr = AnyExpr::from(expr::BoolConst::from(polarity));\n\n return TransformEffect::Transformed\n\n }\n\n // For If-Then-Else expressions split traversing and memorize its condition if its\n\n // condition wasn't already seen.\n\n if let AnyExpr::IfThenElse(cond) = expr {\n\n if !seen.contains_key(&cond.children.cond) {\n\n return split_if_costraint(cond, seen)\n\n }\n\n // Since `expr` was already destructed it cannot be used in the code below\n\n // so we need to state an exit strategy for this execution branch.\n\n let mut effect = TransformEffect::Identity;\n\n for child in cond.children_mut() {\n", "file_path": "simplifier/src/simplifications/if_constraint_prop.rs", "rank": 6, "score": 221835.4190835504 }, { "content": "/// Called upon encountering a new if-then-else construct during if-constraint propagation.\n\n/// \n\n/// This effectively splits the seen map into a then-case and else-case for true and false\n\n/// polarity of the condition respectively and continues to traverse recursively through\n\n/// the given expression sub tree.\n\nfn split_if_costraint<'e>(cond: &'e mut expr::IfThenElse, seen: &mut HashMap<&'e AnyExpr, bool>) -> TransformEffect {\n\n let (cond, then_case, else_case) = cond.as_children_tuple_mut();\n\n let mut effect = TransformEffect::Identity;\n\n\n\n // Traverse through then-case with `true` polarity.\n\n seen.insert(&*cond, true);\n\n effect |= propagate_if_constraint(then_case, seen);\n\n\n\n // Traverse through else-case with `false` polarity.\n\n // seen.upsert(&*cond, || false, |v| *v = false);\n\n *seen.get_mut(&*cond).unwrap() = false;\n\n effect |= propagate_if_constraint(else_case, seen);\n\n\n\n // Remove item from moved-only hashtable to not confuse other siblings\n\n // of the expression tree.\n\n seen.remove(&*cond);\n\n\n\n effect\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/if_constraint_prop.rs", "rank": 7, "score": 202171.6738062043 }, { "content": "/// Asserts that the given expression is of the expected concrete type.\n\npub fn expr_expect_type<T, E>(expected_ty: T, expr: &E) -> ExprResult<()>\n\nwhere\n\n\tT: Into<Type>,\n\n\tE: Into<AnyExpr> + Clone + HasType + fmt::Debug,\n\n{\n\n\tlet expected_ty = expected_ty.into();\n\n\tlet actual_ty = expr.ty();\n\n\tif actual_ty != expected_ty {\n\n\t\treturn Err(TypeError::unexpected_type(expected_ty, actual_ty))\n\n\t\t\t.map_err(ExprError::from)\n\n\t\t\t.map_err(|e| e.context_expr(\"Expression with unexpected type\", expr.clone().into()));\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "ast/src/error.rs", "rank": 8, "score": 192429.69472320357 }, { "content": "/// Asserts that all child expressions of the given expression are of the\n\n/// given expected concrete type.\n\npub fn expr_expect_type_n<T, E>(expected_ty: T, expr: &E) -> ExprResult<()>\n\nwhere\n\n\tT: Into<Type>,\n\n\tE: Children,\n\n{\n\n\tlet expected_ty = expected_ty.into();\n\n\tfor (n, child) in expr.children().enumerate() {\n\n\t\texpr_expect_type(expected_ty, child)\n\n\t\t\t.map_err(|e| {\n\n\t\t\t\te.context_msg(format!(\n\n\t\t\t\t\t\"Child expression with unexpected type at index {:?}.\",\n\n\t\t\t\t\tn\n\n\t\t\t\t))\n\n\t\t\t})?;\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "ast/src/error.rs", "rank": 9, "score": 192429.51721399784 }, { "content": "/// Asserts that the given expression has at least the expected minimum number of child expressions.\n\npub fn expr_expect_min_arity<E>(min_req_children: usize, expr: &E) -> ExprResult<()>\n\nwhere\n\n\tE: HasArity,\n\n{\n\n\tlet actual_children = expr.arity();\n\n\tif actual_children < min_req_children {\n\n\t\treturn Err(ExprError::too_few_children(\n\n\t\t\tmin_req_children,\n\n\t\t\tactual_children,\n\n\t\t));\n\n\t}\n\n\tOk(())\n\n}\n", "file_path": "ast/src/error.rs", "rank": 10, "score": 192303.2106961542 }, { "content": "/// Writes the given expression tree into the given writer in the SMTLib2 syntax format.\n\npub fn write_smtlib2<'e, E>(ctx: &Context, out: &mut fmt::Write, expr: E)\n\n where E: Into<&'e AnyExpr>\n\n{\n\n let expr = expr.into();\n\n SMTLibWriter::new(ctx, out).write_expr(expr)\n\n}\n\n\n", "file_path": "ast/src/writer.rs", "rank": 11, "score": 188038.68456235688 }, { "content": "/// Types that implement this trait allow to traverse their children mutably.\n\npub trait ChildrenMut {\n\n /// Iterates over the child expressions of `self` mutably.\n\n\t#[inline]\n\n fn children_mut(&mut self) -> ChildrenIterMut {\n\n\t\tChildrenIterMut::from_slice(self.children_slice_mut())\n\n\t}\n\n\n\n\tfn children_slice_mut(&mut self) -> &mut [AnyExpr];\n\n}\n\n\n", "file_path": "ast/src/child_iters/traits.rs", "rank": 12, "score": 182587.4364185595 }, { "content": "/// Types that implement this trait allow to traverse their children immutably.\n\npub trait Children {\n\n /// Iterates over the child expressions of `self` immutably.\n\n\t#[inline]\n\n fn children(&self) -> ChildrenIter {\n\n\t\tChildrenIter::from_slice(self.children_slice())\n\n\t}\n\n\n\n\tfn children_slice(&self) -> &[AnyExpr];\n\n}\n\n\n", "file_path": "ast/src/child_iters/traits.rs", "rank": 13, "score": 175330.16571445638 }, { "content": "/// Returns the accumulated arity of the given entity and all of its children recursively.\n\n/// \n\n/// This is used to identify complex expressions with many recursive child expressions.\n\npub fn recursive_arity<T>(expr: &T) -> usize\n\n where T: HasArity + Children\n\n{\n\n 1 + expr.children().map(|c| recursive_arity(c)).sum::<usize>()\n\n}\n\n\n", "file_path": "ast/src/arity.rs", "rank": 14, "score": 166902.26002224078 }, { "content": "/// Returns `true` if `lhs` and `rhs` share at least one same child expression.\n\nfn have_overlapping_children(lhs: &AnyExpr, rhs: &AnyExpr) -> bool {\n\n let seen = lhs.children().collect::<HashSet<_>>();\n\n for child in rhs.children() {\n\n if seen.contains(child) {\n\n return true\n\n }\n\n }\n\n false\n\n}\n\n\n\nmacro_rules! impl_join_equalities_for {\n\n ($eq_type:ident, $name:ident) => {\n\n fn $name(and: expr::And) -> TransformOutcome {\n\n // Separate equality expressions from the rest of the children.\n\n let (mut eqs, mut rest): (Vec<_>, Vec<_>) = and.into_children().partition_map(|c| {\n\n match c {\n\n AnyExpr::$eq_type(eq) => Either::Left(eq),\n\n other => Either::Right(other)\n\n }\n\n });\n", "file_path": "simplifier/src/simplifications/equality_joiner.rs", "rank": 15, "score": 164114.25025279494 }, { "content": "fn establish_ordering<E>(expr: &mut E) -> NormalizeFlag\n\n where E: Children + SortChildren\n\n{\n\n if is_sorted_norm(expr.children()) {\n\n return NormalizeFlag::Idle\n\n }\n\n expr.sort_children_by(normalization_cmp);\n\n // expr.children_vec_mut()\n\n // .sort_unstable_by(normalization_cmp);\n\n NormalizeFlag::Success\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/normalizer.rs", "rank": 16, "score": 162774.98944358644 }, { "content": "fn remove_duplicates<E>(expr: &mut E) -> NormalizeFlag\n\n where E: DedupChildren + HasArity\n\n{\n\n let arity_before = expr.arity();\n\n // expr.children_vec_mut().dedup();\n\n expr.dedup_children();\n\n let arity_after = expr.arity();\n\n assert!(arity_after <= arity_before);\n\n if arity_before != arity_after {\n\n return NormalizeFlag::Success\n\n }\n\n NormalizeFlag::Idle\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/normalizer.rs", "rank": 17, "score": 162774.98944358644 }, { "content": "fn collect_like_terms(add: expr::Add) -> HashMap<AnyExpr, Bitvec> {\n\n let width = add.bitvec_ty.width();\n\n let mut like_terms: HashMap<AnyExpr, Bitvec> = HashMap::new();\n\n let mut update_seen = |expr: AnyExpr, occurence: Bitvec| {\n\n match like_terms.entry(expr) {\n\n Entry::Occupied(mut occupied) => {\n\n occupied.get_mut()\n\n .bvadd_mut(&occurence)\n\n .unwrap();\n\n }\n\n Entry::Vacant(vacant) => {\n\n vacant.insert(occurence);\n\n }\n\n }\n\n };\n\n for child in add.into_children() {\n\n match child {\n\n AnyExpr::Neg(neg) => update_seen(neg.into_single_child(), Bitvec::all_set(width)),\n\n AnyExpr::Mul(mul) => {\n\n if (mul.arity() != 2) || (mul.children().filter(|c| c.kind() == ExprKind::BitvecConst).count() != 1) {\n", "file_path": "simplifier/src/simplifications/like_term_joiner.rs", "rank": 18, "score": 161673.06376848812 }, { "content": "/// Simplifies the given expression until no further simplification can be applied.\n\npub fn simplify<'e, E>(ctx: &Context, expr: E)\n\n where E: Into<&'e mut AnyExpr>\n\n{\n\n Simplifier::from(ctx).exhaustive_simplify(expr.into())\n\n}\n\n\n\nmodular_ast_transformer! {\n\n #[derive(Default)]\n\n struct SimplifierTransformer {\n\n _00: simplifications::InvolutionSimplifier,\n\n _01: simplifications::ComparisonReducer,\n\n _02: simplifications::BoolConstPropagator,\n\n _03: simplifications::BoolSymbolicSolver,\n\n _04: simplifications::BoolReducer,\n\n _05: simplifications::Normalizer,\n\n _06: simplifications::Flattener,\n\n _07: simplifications::TermConstPropagator,\n\n _08: simplifications::TermReducer,\n\n _09: simplifications::LikeTermJoiner,\n\n _10: simplifications::IfConstraintPropagator\n", "file_path": "simplifier/src/base.rs", "rank": 19, "score": 160294.72409915537 }, { "content": "/// Checks if the given typed params share the same type.\n\n///\n\n/// # Returns\n\n///\n\n/// The type of both typed params.\n\n///\n\n/// # Errors\n\n///\n\n/// - If the given typed params do not have the same type.\n\npub fn expect_common_ty<T1, T2>(lhs: &T1, rhs: &T2) -> TypeResult<Type>\n\nwhere\n\n T1: HasType,\n\n T2: HasType\n\n{\n\n use self::Type::{Bitvec, Array};\n\n match (lhs.ty(), rhs.ty()) {\n\n (Type::Bool, Type::Bool) => Ok(Type::Bool),\n\n (Bitvec(b1), Bitvec(b2)) if b1 == b2 => Ok(Bitvec(b1)),\n\n (Array(a1), Array(a2)) if a1 == a2 => Ok(Array(a1)),\n\n _ => Err(TypeError::type_mismatch(lhs.ty(), rhs.ty()))\n\n }\n\n}\n\n\n", "file_path": "ast/src/ty/assert.rs", "rank": 20, "score": 151188.50832969503 }, { "content": "/// Returns `true` if the given expression tree exceeds a recursive arity of `min_arity`.\n\n/// \n\n/// # Note\n\n/// \n\n/// This operation is generally more efficient than querying for the same upper arity bound\n\n/// with `recursive_arity` and should be preferred.\n\npub fn exceeds_recursive_arity<T>(min_arity: usize, expr: &T) -> bool\n\n where T: HasArity + Children\n\n{\n\n fn exceeds_recursive_arity_of<T>(actual_arity: &mut usize, min_arity: usize, expr: &T) -> bool\n\n where T: HasArity + Children\n\n {\n\n *actual_arity += expr.arity();\n\n if *actual_arity >= min_arity {\n\n return true\n\n }\n\n for child in expr.children() {\n\n if exceeds_recursive_arity_of(actual_arity, min_arity, child) {\n\n return true\n\n }\n\n }\n\n false\n\n }\n\n exceeds_recursive_arity_of(&mut 0, min_arity, expr)\n\n}\n\n\n", "file_path": "ast/src/arity.rs", "rank": 21, "score": 149359.9060683285 }, { "content": "/// Types that implement this trait allow to be transformed\n\n/// into a consuming children iter.\n\npub trait IntoChildren {\n\n /// Transforms `self` into a consuming children iter.\n\n\t#[inline]\n\n fn into_children(self) -> IntoChildrenIter\n\n\twhere\n\n\t\tSelf: Sized\n\n\t{\n\n\t\tIntoChildrenIter::from_expr(self)\n\n\t}\n\n\n\n\tfn into_children_vec(self) -> Vec<AnyExpr>;\n\n}\n", "file_path": "ast/src/child_iters/traits.rs", "rank": 22, "score": 146876.5652695995 }, { "content": "pub fn scan_smtlib2(input: &str) -> TokenIter {\n\n debug_assert!(!input.is_empty()); // TODO: convert to error.\n\n TokenIter::new(raw_smtlib2_tokens(input))\n\n}\n\n\n", "file_path": "parser/src/lexer/simple_lexer.rs", "rank": 23, "score": 144651.33028964003 }, { "content": "/// Checks if the given typed param is of array type\n\n/// and returns its concrete array type if it is the case.\n\n///\n\n/// # Errors\n\n///\n\n/// - If the given typed param is not of array type.\n\npub fn expect_array_ty<T>(typed: &T) -> TypeResult<ArrayTy>\n\nwhere\n\n T: HasType,\n\n{\n\n match typed.ty() {\n\n Type::Array(array_ty) => Ok(array_ty),\n\n _ => Err(TypeError::unexpected_type_kind(\n\n TypeKind::Array,\n\n typed.ty(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "ast/src/ty/assert.rs", "rank": 24, "score": 144561.24197798828 }, { "content": "/// Checks if the given typed param is of bitvec type\n\n/// and returns its bit width if it is the case.\n\n///\n\n/// # Errors\n\n///\n\n/// - If the given typed param is not of bitvec type.\n\npub fn expect_bitvec_ty<T>(typed: &T) -> TypeResult<BitvecTy>\n\nwhere\n\n T: HasType,\n\n{\n\n match typed.ty() {\n\n Type::Bitvec(width) => Ok(width),\n\n _ => Err(TypeError::unexpected_type_kind(\n\n TypeKind::Bitvec,\n\n typed.ty(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "ast/src/ty/assert.rs", "rank": 25, "score": 144561.150854765 }, { "content": "pub fn raw_smtlib2_tokens(input: &str) -> RawTokenIter {\n\n RawTokenIter::new(input)\n\n}\n\n\n\nuse std::str::CharIndices;\n\n\n", "file_path": "parser/src/lexer/raw_lexer.rs", "rank": 26, "score": 139454.34215619302 }, { "content": "/// Implement this to activate automatic default implementation\n\n/// of the `AnyTransformer` trait.\n\npub trait AutoImplAnyExprTransformer: Transformer {}\n\n\n", "file_path": "ast/src/transformer.rs", "rank": 27, "score": 137724.58231130568 }, { "content": "/// Checks if the given typed param is of bitvec type\n\n/// with the given expected bit width.\n\n///\n\n/// # Errors\n\n///\n\n/// - If the given typed param is not of bitvec type.\n\n/// - If the given typed param is of bitvec type but has not the expected bit width.\n\npub fn expect_type<T1, T2>(expected_ty: T1, found_ty: &T2) -> TypeResult<()>\n\nwhere\n\n T1: Into<Type>,\n\n T2: HasType\n\n{\n\n let expected_ty = expected_ty.into();\n\n if expected_ty != found_ty.ty() {\n\n return Err(TypeError::unexpected_type(\n\n expected_ty,\n\n found_ty.ty()\n\n ));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "ast/src/ty/assert.rs", "rank": 28, "score": 136285.1798918716 }, { "content": "/// Checks if the given iterator of typed items are all of the same bitvector type.\n\n///\n\n/// # Errors\n\n///\n\n/// - If the given iterator yields no elements.\n\n/// - If not all yielded typed items are of the same bitvector type.\n\npub fn expect_common_bitvec_ty_n<'t, I, T>(typed_vals: I) -> TypeResult<Option<BitvecTy>>\n\nwhere\n\n I: IntoIterator<Item = &'t T>,\n\n T: HasType + 't,\n\n{\n\n let mut typed_vals = typed_vals.into_iter();\n\n match typed_vals.next() {\n\n None => Ok(None),\n\n Some(ty) => {\n\n let head_bvty = expect_bitvec_ty(ty)?;\n\n for ty in typed_vals {\n\n expect_type(head_bvty, &ty.ty())?;\n\n }\n\n Ok(Some(head_bvty))\n\n }\n\n }\n\n}\n", "file_path": "ast/src/ty/assert.rs", "rank": 29, "score": 134678.99928111135 }, { "content": "fn invoke_set_info<S>(solver: &mut S, info_data: InfoAndValue<S::Expr>) -> CommandResponseResult\n\nwhere\n\n S: SMTLib2Solver,\n\n{\n\n solver\n\n .set_info(info_data)\n\n .map_err(|err| err.invoked_by(Command::SetInfo))\n\n}\n\n\n\nimpl<'c, 's, S, B> ParserDriver<'c, 's, S, B>\n\nwhere\n\n S: SMTLib2Solver + 's,\n\n S::Expr: 'c,\n\n B: ExprBuilder<'c, Expr = S::Expr> + Default,\n\n{\n\n fn parse_check_sat_assuming_command(&mut self) -> ParseResult<()> {\n\n debug_assert!(self.parser.peek().is_ok());\n\n\n\n let parser_before_sequence = self.parser.clone();\n\n self.parser.expect_tok_kind(TokenKind::OpenParen)?;\n", "file_path": "parser/src/parser/core.rs", "rank": 30, "score": 132570.74991560986 }, { "content": "fn invoke_set_option<S>(solver: &mut S, option_data: OptionAndValue<S::Expr>) -> CommandResponseResult\n\nwhere\n\n S: SMTLib2Solver,\n\n{\n\n solver\n\n .set_option(option_data)\n\n .map_err(|err| err.invoked_by(Command::SetOption))\n\n}\n\n\n", "file_path": "parser/src/parser/core.rs", "rank": 31, "score": 132570.74991560986 }, { "content": "pub fn parse_smtlib2_with_default_builder<'c, 's, S>(input: &'c str, solver: &'s mut S) -> ParseResult<()>\n\nwhere\n\n S: SMTLib2Solver<Expr = Expr<'c>>,\n\n{\n\n ParserDriver::<'c, 's, S, DefaultExprBuilder> {\n\n parser: Parser::new(input),\n\n solver,\n\n marker: PhantomData\n\n }.parse_script()\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Parser<'c> {\n\n token_iter: TokenIter<'c>,\n\n input_str: ParseContent<'c>,\n\n peek: Option<Token>,\n\n}\n\n\n\npub struct ParserDriver<'c, 's, S, B>\n\nwhere\n", "file_path": "parser/src/parser/core.rs", "rank": 32, "score": 132343.01494033993 }, { "content": "fn normalization_cmp(lhs: &AnyExpr, rhs: &AnyExpr) -> Ordering {\n\n use self::AnyExpr::{\n\n Symbol,\n\n BoolConst,\n\n BitvecConst,\n\n Not,\n\n Neg,\n\n BitNot\n\n };\n\n match (lhs, rhs) {\n\n // Sort symbols by their name identifier\n\n (&Symbol(ref lhs), &Symbol(ref rhs)) => {\n\n lhs.id.cmp(&rhs.id)\n\n }\n\n\n\n // Sort bool constants by bool comparison\n\n (&BoolConst(ref lhs), &BoolConst(ref rhs)) => {\n\n lhs.val.cmp(&rhs.val)\n\n }\n\n\n", "file_path": "simplifier/src/simplifications/normalizer.rs", "rank": 33, "score": 130733.49245531551 }, { "content": "/// Creates the binary XOR gate: lhs XOR rhs <=> output\n\n///\n\n/// # Note\n\n///\n\n/// Generates and returns the output literal.\n\npub fn encode_binary_xor_gate_output<Solver>(solver: &mut Solver, lhs: Lit, rhs: Lit) -> Lit\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n{\n\n\tlet output = solver.gen_lit();\n\n\tencode_binary_xor_gate(solver, lhs, rhs, output)\n\n\t\t.expect(\"we just generated the output literal so it must be unique; qed\");\n\n\toutput\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 34, "score": 128416.0073376937 }, { "content": "fn is_logical_contradiction(lhs: &AnyExpr, rhs: &AnyExpr) -> bool {\n\n if let (&AnyExpr::Not(ref lhs), rhs) = (lhs, rhs) {\n\n return lhs.single_child() == rhs\n\n }\n\n if let (lhs, &AnyExpr::Not(ref rhs)) = (lhs, rhs) {\n\n return lhs == rhs.single_child()\n\n }\n\n false\n\n}\n\n\n\nimpl AutoImplAnyExprTransformer for BoolSymbolicSolver {}\n\n\n\nimpl Transformer for BoolSymbolicSolver {\n\n fn transform_cond(&self, ite: expr::IfThenElse) -> TransformOutcome {\n\n // If then and else cases are equal we can lower the entire if-then-else\n\n // to either one. The condition can be dropped since it has no effect to\n\n // the result. We simply lower to the then-case in this situation.\n\n if ite.children.then_case == ite.children.else_case {\n\n return TransformOutcome::transformed(ite.children.then_case)\n\n }\n", "file_path": "simplifier/src/simplifications/bool_symbolic_solver.rs", "rank": 35, "score": 127277.90376454435 }, { "content": "/// Creates the OR gate: inputs[0] OR inputs[1] OR ... OR inputs[N] <=> output\n\n///\n\n/// # Note\n\n///\n\n/// Generates and returns the output literal.\n\n///\n\n/// # Errors\n\n///\n\n/// If `inputs` yields less than 2 literals.\n\npub fn encode_or_gate_output<Solver, Lits, L>(solver: &mut Solver, inputs: Lits) -> EncodeResult<Lit>\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n\tLits: IntoIterator<Item = L>,\n\n\t<Lits as IntoIterator>::IntoIter: ExactSizeIterator + Clone,\n\n\tL: Into<Lit>,\n\n{\n\n\tlet output = solver.gen_lit();\n\n\tencode_or_gate(solver, inputs, output)?;\n\n\tOk(output)\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 36, "score": 126365.61542811636 }, { "content": "/// Creates the AND gate: inputs[0] AND inputs[1] AND ... AND inputs[N] <=> output\n\n///\n\n/// # Note\n\n///\n\n/// Generates and returns the output literal.\n\n///\n\n/// # Errors\n\n///\n\n/// If `inputs` yields less than 2 literals.\n\npub fn encode_and_gate_output<Solver, Lits, L>(solver: &mut Solver, inputs: Lits) -> EncodeResult<Lit>\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n\tLits: IntoIterator<Item = L>,\n\n\t<Lits as IntoIterator>::IntoIter: ExactSizeIterator + Clone,\n\n\tL: Into<Lit>,\n\n{\n\n\tlet output = solver.gen_lit();\n\n\tencode_and_gate(solver, inputs, output)?;\n\n\tOk(output)\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 37, "score": 126365.61542811636 }, { "content": "/// Creates the AND gate: inputs[0] AND inputs[1] AND ... AND inputs[N] <=> output\n\n///\n\n/// Inspired by PyCSCL: github.com/fkutzner/PyCSCL\n\n///\n\n/// # Errors\n\n///\n\n/// - If `inputs` yields less than 2 literals.\n\n/// - If `inputs` contain the `output` literal.\n\npub fn encode_and_gate<Solver, Lits, L>(solver: &mut Solver, inputs: Lits, output: Lit) -> EncodeResult<()>\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n\tLits: IntoIterator<Item = L>,\n\n\t<Lits as IntoIterator>::IntoIter: ExactSizeIterator + Clone,\n\n\tL: Into<Lit>,\n\n{\n\n\tlet inputs = inputs.into_iter();\n\n\tif inputs.len() >= 2 {\n\n\t\treturn Err(EncodeError::requires_at_least_2_inputs())\n\n\t}\n\n\t// TODO: Check if inputs contain output.\n\n\tlet inputs = inputs.map(Into::into);\n\n\tsolver.consume_clause(\n\n\t\tinputs.clone().map(|lit| -lit).chain(core::iter::once(output))\n\n\t);\n\n\tinputs.for_each(|lit| solver.consume_clause(&[lit, -output]));\n\n\tOk(())\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 38, "score": 124496.52997158488 }, { "content": "/// Creates the OR gate: inputs[0] OR inputs[1] OR ... OR inputs[N] <=> output\n\n///\n\n/// # Errors\n\n///\n\n/// - If `inputs` yields less than 2 literals.\n\n/// - If `inputs` contain the `output` literal.\n\npub fn encode_or_gate<Solver, Lits, L>(solver: &mut Solver, inputs: Lits, output: Lit) -> EncodeResult<()>\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n\tLits: IntoIterator<Item = L>,\n\n\t<Lits as IntoIterator>::IntoIter: ExactSizeIterator + Clone,\n\n\tL: Into<Lit>,\n\n{\n\n\tlet inputs = inputs.into_iter();\n\n\tif inputs.len() >= 2 {\n\n\t\treturn Err(EncodeError::requires_at_least_2_inputs())\n\n\t}\n\n\t// TODO: Check if inputs contain output.\n\n\tlet inputs = inputs.map(Into::into);\n\n\tsolver.consume_clause(\n\n\t\tinputs.clone().chain(core::iter::once(-output))\n\n\t);\n\n\tinputs.for_each(|l| solver.consume_clause(&[-l, output]));\n\n\tOk(())\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 39, "score": 124496.52997158488 }, { "content": "/// Reduces this `SignedGreaterEquals` to using less-than as only comparison.\n\nfn reduce_sge_to_slt(sge: expr::SignedGreaterEquals) -> expr::Not {\n\n unsafe{ expr::SignedLessThan::from_raw_parts(sge.children_bitvec_ty, sge.children) }.wrap_with_not()\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/cmp_reduction.rs", "rank": 40, "score": 123758.63750831335 }, { "content": "/// Reduces this `UnsignedGreaterEquals` to using less-than as only comparison.\n\nfn reduce_uge_to_ult(uge: expr::UnsignedGreaterEquals) -> expr::Not {\n\n unsafe{ expr::UnsignedLessThan::from_raw_parts(uge.children_bitvec_ty, uge.children) }.wrap_with_not()\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/cmp_reduction.rs", "rank": 41, "score": 123758.63750831335 }, { "content": "/// Creates a new `UnsignedLessEquals` expression from the given `SignedGreaterThan`.\n\nfn reduce_ule_to_ult(ule: expr::UnsignedLessEquals) -> expr::Not {\n\n let mut ule = ule;\n\n ule.children.swap_children();\n\n unsafe{ expr::UnsignedLessThan::from_raw_parts(ule.children_bitvec_ty, ule.children).wrap_with_not() }\n\n}\n\n\n\n/// Reduces comparison expressions to less-than forms.\n\n/// \n\n/// # Examples\n\n/// \n\n/// - `a > b` is simplified to `b < a`\n\n/// - `a >= b` is simplified to `not(a < b)`\n\n/// - `a <= b` is simplified to `not(b < a)`\n\n#[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Hash)]\n\npub struct ComparisonReducer;\n\n\n\nimpl<'ctx> From<&'ctx Context> for ComparisonReducer {\n\n fn from(_: &'ctx Context) -> Self {\n\n Self::default()\n\n }\n", "file_path": "simplifier/src/simplifications/cmp_reduction.rs", "rank": 42, "score": 123753.83811533333 }, { "content": "/// Creates a new `SignedLessEquals` expression from the given `SignedGreaterThan`.\n\nfn reduce_sle_to_slt(sle: expr::SignedLessEquals) -> expr::Not {\n\n let mut sle = sle;\n\n sle.children.swap_children();\n\n unsafe{ expr::SignedLessThan::from_raw_parts(sle.children_bitvec_ty, sle.children).wrap_with_not() }\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/cmp_reduction.rs", "rank": 43, "score": 123753.83811533333 }, { "content": "/// Reduces this `SignedLessThan` to using less-than as only comparison.\n\nfn reduce_sgt_to_slt(sgt: expr::SignedGreaterThan) -> expr::SignedLessThan {\n\n let mut sgt = sgt;\n\n sgt.children.swap_children();\n\n unsafe{ expr::SignedLessThan::from_raw_parts(sgt.children_bitvec_ty, sgt.children) }\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/cmp_reduction.rs", "rank": 44, "score": 122206.39735953085 }, { "content": "/// Reduces this `UnsignedLessThan` to using less-than as only comparison.\n\nfn reduce_ugt_to_ult(ugt: expr::UnsignedGreaterThan) -> expr::UnsignedLessThan {\n\n let mut ugt = ugt;\n\n ugt.children.swap_children();\n\n unsafe{ expr::UnsignedLessThan::from_raw_parts(ugt.children_bitvec_ty, ugt.children) }\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/cmp_reduction.rs", "rank": 45, "score": 122206.39735953085 }, { "content": "/// Creates the binary MUX gate: ((-sel AND lhs) or (sel AND rhs)) <=> output\n\n///\n\n/// # Note\n\n///\n\n/// Generates and returns the output literal.\n\npub fn encode_binary_mux_gate_output<Solver>(solver: &mut Solver, lhs: Lit, rhs: Lit, sel: Lit) -> Lit\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n{\n\n\tlet output = solver.gen_lit();\n\n\tencode_binary_mux_gate(solver, lhs, rhs, sel, output)\n\n\t\t.expect(\"we just generated the output literal so it must be unique; qed\");\n\n\toutput\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 46, "score": 121054.28224683591 }, { "content": "/// Encodes a binary XOR gate: lhs XOR rhs <=> output\n\n///\n\n/// # Errors\n\n///\n\n/// If `lhs` or `rhs` is equal to `output`.\n\npub fn encode_binary_xor_gate<Solver>(solver: &mut Solver, lhs: Lit, rhs: Lit, output: Lit) -> EncodeResult<()>\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n{\n\n\tif output == lhs || output == rhs {\n\n\t\treturn Err(EncodeError::inputs_contain_output())\n\n\t}\n\n\tfor &clause in &[\n\n\t\t&[ lhs, rhs, -output],\n\n\t\t&[-lhs, -rhs, -output],\n\n\t\t&[ lhs, -rhs, output],\n\n\t\t&[-lhs, rhs, output],\n\n\t] {\n\n\t\tsolver.consume_clause(clause);\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 47, "score": 121049.68545888813 }, { "content": "/// Utility trait to transform `AnyExpr` or `Box<AnyExpr>` into `Box<AnyExpr>` and\n\n/// without unboxing the input in the `Box<AnyExpr>` case.\n\npub trait IntoBoxedAnyExpr {\n\n /// Puts `self` into a `Box` if it isn't already boxed.\n\n fn into_boxed_any_expr(self) -> Box<AnyExpr>;\n\n}\n\n\n\nimpl IntoBoxedAnyExpr for Box<AnyExpr> {\n\n /// Simply forwards the boxed `T`.\n\n /// \n\n /// This is the \"cheap\" static case.\n\n fn into_boxed_any_expr(self) -> Box<AnyExpr> {\n\n self\n\n }\n\n}\n\n\n\nimpl<T> IntoBoxedAnyExpr for T\n\n where T: Into<AnyExpr>\n\n{\n\n /// Converts `T` into the respective `AnyExpr` and puts it into a box.\n\n /// \n\n /// This is the \"expensive\" static case.\n", "file_path": "ast/src/any_expr.rs", "rank": 48, "score": 119642.99690343934 }, { "content": "/// Checks if the given typed params are of the same bitvector type.\n\n///\n\n/// # Errors\n\n///\n\n/// - If the given typed params are not of bitvector type.\n\n/// - If the given typed params are not of the same bitvector type.\n\npub fn expect_common_bitvec_ty<T1, T2>(lhs: &T1, rhs: &T2) -> TypeResult<BitvecTy>\n\nwhere\n\n T1: HasType,\n\n T2: HasType\n\n{\n\n let lhs_bvty = expect_bitvec_ty(lhs)?;\n\n let rhs_bvty = expect_bitvec_ty(rhs)?;\n\n expect_common_ty(&lhs_bvty, &rhs_bvty)?;\n\n Ok(lhs_bvty)\n\n}\n\n\n", "file_path": "ast/src/ty/assert.rs", "rank": 49, "score": 118230.18343084048 }, { "content": "/// Encodes a binary MUX gate: ((-sel AND lhs) or (sel AND rhs)) <=> output\n\n///\n\n/// # Errors\n\n///\n\n/// If `lhs` or `rhs` or `sel` is equal to `output`.\n\npub fn encode_binary_mux_gate<Solver>(\n\n\tsolver: &mut Solver, lhs: Lit, rhs: Lit, sel: Lit, output: Lit\n\n) -> EncodeResult<()>\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n{\n\n\tif output == lhs || output == rhs || output == sel {\n\n\t\treturn Err(EncodeError::inputs_contain_output())\n\n\t}\n\n\tfor &clause in &[\n\n\t\t&[ sel, lhs, -output],\n\n\t\t&[ sel, -lhs, output],\n\n\t\t&[-sel, rhs, -output],\n\n\t\t&[-sel, -rhs, output],\n\n\t] {\n\n\t\tsolver.consume_clause(clause)\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 50, "score": 116160.28500550878 }, { "content": "fn simplify_and(and: expr::And) -> TransformOutcome {\n\n // If there are two or more boolean equalities within this and expression\n\n // there might be possibilities to join them.\n\n if and.children().filter(|c| c.kind() == ExprKind::BoolEquals)\n\n .tuple_combinations().any(|(lhs, rhs)| have_overlapping_children(lhs, rhs))\n\n {\n\n return join_bool_equalities(and)\n\n }\n\n // If there are two or more bitvector equalities within this and expression\n\n // there might be possibilities to join them.\n\n if and.children().filter(|c| c.kind() == ExprKind::BitvecEquals)\n\n .tuple_combinations().any(|(lhs, rhs)| have_overlapping_children(lhs, rhs))\n\n {\n\n return join_bitvec_equalities(and)\n\n }\n\n TransformOutcome::identity(and)\n\n}\n\n\n\nimpl Transformer for EqualityJoiner {\n\n fn transform_and(&self, and: expr::And) -> TransformOutcome {\n", "file_path": "simplifier/src/simplifications/equality_joiner.rs", "rank": 51, "score": 115623.04071871222 }, { "content": "/// Creates a full adder sum gate.\n\n///\n\n/// # Note\n\n///\n\n/// The output is equal to the sum of `lhs`, `rhs` and `carry_in`\n\n/// in modulo 2 arithmetic. Use `encode_full_adder_carry` to retrieve\n\n/// the encoding for the carry over.\n\n///\n\n/// # Truth Table\n\n///\n\n/// | lhs | rhs | c_in | output |\n\n/// -----------------------------\n\n/// | 0 | 0 | 0 | 0 |\n\n/// | 0 | 0 | 1 | 1 |\n\n/// | 0 | 1 | 0 | 1 |\n\n/// | 0 | 1 | 1 | 0 |\n\n/// | 1 | 0 | 0 | 1 |\n\n/// | 1 | 0 | 1 | 0 |\n\n/// | 1 | 1 | 0 | 0 |\n\n/// | 1 | 1 | 1 | 1 |\n\n///\n\n/// # Errors\n\n///\n\n/// If `lhs`, `rhs` or `carry_in` is equal to `output`.\n\npub fn encode_full_adder_sum_gate<Solver>(\n\n\tsolver: &mut Solver, lhs: Lit, rhs: Lit, carry_in: Lit, output: Lit\n\n) -> EncodeResult<()>\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n{\n\n\tif output == lhs || output == rhs || output == carry_in {\n\n\t\treturn Err(EncodeError::inputs_contain_output())\n\n\t}\n\n\tfor &clause in &[\n\n\t\t&[ lhs, rhs, carry_in, -output],\n\n\t\t&[ lhs, rhs, -carry_in, output],\n\n\t\t&[ lhs, -rhs, carry_in, output],\n\n\t\t&[ lhs, -rhs, -carry_in, -output],\n\n\t\t&[-lhs, rhs, carry_in, output],\n\n\t\t&[-lhs, rhs, -carry_in, -output],\n\n\t\t&[-lhs, -rhs, carry_in, -output],\n\n\t\t&[-lhs, -rhs, -carry_in, output],\n\n\t] {\n\n\t\tsolver.consume_clause(clause)\n\n\t}\n\n\tOk(())\n\n}\n\n\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 52, "score": 114388.71707466224 }, { "content": "/// Creates a full adder carry gate.\n\n///\n\n/// # Note\n\n///\n\n/// The output is equal to the carry of the modulo-2 sum of `lhs`, `rhs` and `carry_in`.\n\n/// Use `encode_full_adder_sum` to retrieve the encoding for the actual value.\n\n///\n\n/// # Truth Table\n\n///\n\n/// | lhs | rhs | c_in | output |\n\n/// -----------------------------\n\n/// | 0 | 0 | 0 | 0 |\n\n/// | 0 | 0 | 1 | 0 |\n\n/// | 0 | 1 | 0 | 0 |\n\n/// | 0 | 1 | 1 | 1 |\n\n/// | 1 | 0 | 0 | 0 |\n\n/// | 1 | 0 | 1 | 1 |\n\n/// | 1 | 1 | 0 | 1 |\n\n/// | 1 | 1 | 1 | 1 |\n\n///\n\n/// # Errors\n\n///\n\n/// If `lhs`, `rhs` or `carry_in` is equal to `output`.\n\npub fn encode_full_adder_carry_gate<Solver>(\n\n\tsolver: &mut Solver, lhs: Lit, rhs: Lit, carry_in: Lit, output: Lit\n\n) -> EncodeResult<()>\n\nwhere\n\n\tSolver: GenLit + ConsumeClause,\n\n{\n\n\tif output == lhs || output == rhs || output == carry_in {\n\n\t\treturn Err(EncodeError::inputs_contain_output())\n\n\t}\n\n\tfor &clause in &[\n\n\t\t&[ lhs, rhs, -output][..],\n\n\t\t&[ lhs, carry_in, -output][..],\n\n\t\t&[ lhs, -rhs, -carry_in, output][..],\n\n\t\t&[-lhs, rhs, carry_in, -output][..],\n\n\t\t&[-lhs, -rhs, output][..],\n\n\t\t&[-lhs, -carry_in, output][..],\n\n\t] {\n\n\t\tsolver.consume_clause(clause)\n\n\t}\n\n\tOk(())\n\n}\n", "file_path": "bitblaster/src/encoder/bit_gate_encoder.rs", "rank": 53, "score": 114388.68386290062 }, { "content": "fn into_normalize<E>(expr: E) -> NormalizeOutcome\n\n where E: Children + DedupChildren + SortChildren + HasArity + Into<AnyExpr>\n\n{\n\n let mut expr = expr;\n\n let ordering = establish_ordering(&mut expr);\n\n let rm_duplicates = remove_duplicates(&mut expr);\n\n use self::NormalizeFlag::{Idle};\n\n if ordering == Idle && rm_duplicates == Idle {\n\n return NormalizeOutcome::Idle(expr.into())\n\n }\n\n match expr.arity() {\n\n 0 => unreachable!(),\n\n 1 => NormalizeOutcome::DedupToSingle(\n\n expr.into().into_children().next().unwrap()),\n\n _ => NormalizeOutcome::DedupToMany(expr.into())\n\n }\n\n}\n\n\n\nimpl Transformer for Normalizer {\n\n fn transform_bool_equals(&self, bool_equals: expr::BoolEquals) -> TransformOutcome {\n", "file_path": "simplifier/src/simplifications/normalizer.rs", "rank": 54, "score": 113545.56995063371 }, { "content": "/// Validates the consistency of the given expression tree.\n\n///\n\n/// # Note\n\n///\n\n/// Consistency of an expression tree is determined by the following factors:\n\n///\n\n/// - Types of all expressions and their child expressions are valid.\n\n/// - Arities of all expressions are within legal bounds.\n\n/// - Cast invariances are met for all casting expressions.\n\n///\n\n/// This collects all found errors into a vector and returns it if non-empty.\n\npub fn assert_consistency_recursively<'ctx, 'e, E>(\n\n ctx: &'ctx Context,\n\n expr: E,\n\n) -> Result<(), Vec<ExprError>>\n\nwhere\n\n E: Into<&'e AnyExpr>,\n\n{\n\n let expr = expr.into();\n\n let mut traverser = RecursiveTraverseVisitor::new(ConsistencyChecker::new(ctx));\n\n traverser.traverse_visit(expr);\n\n let result = traverser.into_visitor();\n\n if result.found_errors.is_empty() {\n\n return Ok(());\n\n }\n\n Err(result.found_errors)\n\n}\n\n\n", "file_path": "ast/src/consistency_checker.rs", "rank": 55, "score": 113457.12540259003 }, { "content": "fn is_sorted_norm<'c, C>(children: C) -> bool\n\n where C: IntoIterator<Item=&'c AnyExpr> + 'c\n\n{\n\n children.into_iter()\n\n .tuple_windows()\n\n .all(|(lhs, rhs)| {\n\n let order = normalization_cmp(lhs, rhs);\n\n order == Ordering::Less || order == Ordering::Equal\n\n })\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/normalizer.rs", "rank": 56, "score": 112823.21084223725 }, { "content": "/// This trait should be implemented by all expressions and structures that\n\n/// represent an expression kind.\n\n/// \n\n/// This is obviously true for `ExprKind` itself but also for all concrete expression types.\n\npub trait HasKind {\n\n /// Returns the kind of `self`.\n\n fn kind(&self) -> ExprKind;\n\n}\n\n\n\nmacro_rules! impl_expr_kinds {\n\n\t( $( $(#[$attr:meta])* #.[priority($prio:expr)] $names:ident,)* ) => {\n\n /// Any expression.\n\n /// \n\n /// There are different kinds of expressions and `AnyExpr`\n\n /// represents any one of them.\n\n\t\t#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\n\t\tpub enum AnyExpr {\n\n\t\t\t$(\n\n\t\t\t\t$(#[$attr])*\n\n\t\t\t\t$names(crate::expr::$names)\n\n\t\t\t),*\n\n\t\t}\n\n\n\n\t\t/// The kind of an expression.\n", "file_path": "ast/src/any_expr.rs", "rank": 57, "score": 111296.95602264411 }, { "content": "/// Expression kinds do need to implement this trait in order\n\n/// to improve expression normalization.\n\npub trait HasPriority {\n\n /// Returns the priority of `self`.\n\n fn priority(&self) -> Priority;\n\n}\n\n\n\nmod priorities {\n\n\tpub const FORMULA_BASE: u32 = 100;\n\n\tpub const ARITHMETIC_BASE: u32 = 200;\n\n\tpub const BITWISE_BASE: u32 = 300;\n\n\tpub const COMPARISON_BASE: u32 = 400;\n\n\tpub const SHIFT_BASE: u32 = 500;\n\n\tpub const CAST_BASE: u32 = 600;\n\n\tpub const ARRAY_BASE: u32 = 700;\n\n\tpub const GENERIC_BASE: u32 = 800;\n\n}\n\n\n\nmacro_rules! forall_expr_kinds {\n\n\t( $mac:ident ) => {\n\n\t\t$mac!{\n\n\t\t\t/// The if-then-else expression kind\n", "file_path": "ast/src/any_expr.rs", "rank": 58, "score": 111288.14446144713 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nenum ParseEventKind {\n\n CheckSat,\n\n CheckSatAssuming(CheckSatAssumingEvent),\n\n DeclareSort(DeclareSortEvent),\n\n Echo { content: String },\n\n Exit,\n\n GetAssertions,\n\n GetAssignment,\n\n GetInfo { info: GetInfoKind },\n\n GetModel,\n\n GetOption { option: OptionKind },\n\n GetProof,\n\n GetUnsatAssumptions,\n\n GetUnsatCore,\n\n Pop { levels: usize },\n\n Push { levels: usize },\n\n Reset,\n\n ResetAssertions,\n\n SetLogic { id: String },\n\n SetOption { option_and_value: OptionAndValue },\n", "file_path": "parser/src/parser/tests/repr.rs", "rank": 59, "score": 109775.36195502715 }, { "content": "/// All types that have a `Type` or represent a `Type` should\n\n/// implement this trait.\n\npub trait HasType {\n\n\t/// Returns the `Type` of `self`.\n\n\tfn ty(&self) -> Type;\n\n}\n\n\n\n/// The kind of a type.\n\n///\n\n/// This unifies all bitvector types and all array types\n\n/// without respecting their bit widths.\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub enum TypeKind {\n\n\t/// The boolean type kind.\n\n\tBool,\n\n\t/// The bitvector type kind.\n\n\tBitvec,\n\n\t/// The array type kind.\n\n\tArray,\n\n}\n\n\n\nimpl From<Type> for TypeKind {\n", "file_path": "ast/src/ty/base.rs", "rank": 60, "score": 109146.25341389395 }, { "content": "/// Types implementing this trait allow to sort their child expressions using the given comparator.\n\npub trait SortChildren {\n\n fn sort_children_by<F>(&mut self, comparator: F)\n\n where F: FnMut(&AnyExpr, &AnyExpr) -> Ordering;\n\n}\n\n\n", "file_path": "ast/src/traits.rs", "rank": 61, "score": 109071.11234592304 }, { "content": "/// Types implementing this trait allow to deduplicate their child expressions.\n\npub trait DedupChildren {\n\n fn dedup_children(&mut self);\n\n}\n\n\n", "file_path": "ast/src/traits.rs", "rank": 62, "score": 109066.6535972915 }, { "content": "/// Types implementing this trait allow to retain only the child expressions for which\n\n/// the given predicate evalutes to `true`.\n\npub trait RetainChildren {\n\n fn retain_children<P>(&mut self, predicate: P)\n\n where P: FnMut(&AnyExpr) -> bool;\n\n}\n", "file_path": "ast/src/traits.rs", "rank": 63, "score": 109066.42513983512 }, { "content": "/// Types implementing this trait allow to query or take their single child expression.\n\npub trait UnaryExpr {\n\n /// Returns a shared reference to the only child expression.\n\n fn single_child(&self) -> &AnyExpr;\n\n /// Returns a mutable reference to the only child expression.\n\n fn single_child_mut(&mut self) -> &mut AnyExpr;\n\n /// Consumes `self` and returns its only child expression.\n\n fn into_single_child(self) -> AnyExpr;\n\n /// Consumes `self` and returns its only child expression in a box.\n\n /// \n\n /// Use this to prevent unnecesary unboxing of unary child expressions.\n\n fn into_boxed_single_child(self) -> Box<AnyExpr>;\n\n}\n\n\n", "file_path": "ast/src/traits.rs", "rank": 64, "score": 108658.63027083475 }, { "content": "/// Expression transformers that may transform `AnyExpr` instances.\n\npub trait AnyExprTransformer {\n\n /// Transforms the given mutable `AnyExpr` inplace.\n\n /// \n\n /// Returns a state indicating whether the given expression was actually transformed.\n\n fn transform_any_expr(&self, expr: &mut AnyExpr, event: TransformEvent) -> TransformEffect;\n\n\n\n /// Consumed the given `AnyExpr` and transforms it.\n\n /// \n\n /// Returns the resulting expression after the transformation and a state\n\n /// indicating whether the consumed expression was actually transformed.\n\n fn transform_any_expr_into(&self, expr: AnyExpr, event: TransformEvent) -> TransformOutcome;\n\n}\n\n\n", "file_path": "ast/src/transformer.rs", "rank": 65, "score": 108658.52021946372 }, { "content": "/// Marker trait to mark n-ary expressions.\n\npub trait NaryExpr:\n\n DedupChildren +\n\n SortChildren +\n\n RetainChildren\n\n{}\n\n\n", "file_path": "ast/src/traits.rs", "rank": 66, "score": 108653.77499251084 }, { "content": "/// Marker trait to mark binary expressions.\n\npub trait BinaryExpr {\n\n /// Returns the left hand-side child expression.\n\n fn lhs_child(&self) -> &AnyExpr;\n\n /// Returns the right hand-side child expression.\n\n fn rhs_child(&self) -> &AnyExpr;\n\n}\n\n\n", "file_path": "ast/src/traits.rs", "rank": 67, "score": 108653.77499251084 }, { "content": "fn simplify_neg(neg: expr::Neg) -> TransformOutcome {\n\n // If the child expression is a constant value, simply negate it.\n\n if let box AnyExpr::BitvecConst(bv_const) = neg.child {\n\n return TransformOutcome::transformed(expr::BitvecConst::from(bv_const.val.bvneg()))\n\n }\n\n TransformOutcome::identity(neg)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 68, "score": 107945.70804535337 }, { "content": "fn simplify_extract(extract: expr::Extract) -> TransformOutcome {\n\n // If the lo and hi range is equal to the full range this is equal to the child expression.\n\n if extract.ty() == extract.src.ty() {\n\n return TransformOutcome::transformed(*extract.src)\n\n }\n\n // If the child expression is a constant bitvector we can simply evaluate the result.\n\n let lo = extract.lo;\n\n let hi = extract.hi;\n\n if let box AnyExpr::BitvecConst(child) = extract.src {\n\n return TransformOutcome::transformed(\n\n expr::BitvecConst::from(child.val.extract(lo, hi).unwrap()))\n\n }\n\n TransformOutcome::identity(extract)\n\n}\n\n\n\nimpl Transformer for TermConstPropagator {\n\n fn transform_neg(&self, neg: expr::Neg) -> TransformOutcome {\n\n simplify_neg(neg)\n\n }\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 69, "score": 107945.70804535337 }, { "content": "fn simplify_sub(sub: expr::Sub) -> TransformOutcome {\n\n // If both child expressions are const bitvectors we can simplify this to\n\n // the result of their subtraction.\n\n if let box BinaryChildren{ lhs: AnyExpr::BitvecConst(lhs), rhs: AnyExpr::BitvecConst(rhs) } = sub.children {\n\n let result_udiv = lhs.val.bvsub(&rhs.val).unwrap();\n\n return TransformOutcome::transformed(expr::BitvecConst::from(result_udiv))\n\n }\n\n // If the left-hand side is constant zero we can simplify this subtraction\n\n // to the negated right-hand side.\n\n if let Some(lval) = sub.children.lhs.get_if_bitvec_const() {\n\n if lval.is_zero() {\n\n let negated_rhs = expr::Neg::new(sub.children.rhs).unwrap();\n\n return TransformOutcome::transformed(negated_rhs)\n\n }\n\n }\n\n // If the right-hand side is constant zero we can simplify this subtraction\n\n // to the left-hand side.\n\n if let Some(rval) = sub.children.rhs.get_if_bitvec_const() {\n\n if rval.is_zero() {\n\n return TransformOutcome::transformed(sub.children.lhs)\n\n }\n\n }\n\n TransformOutcome::identity(sub)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 70, "score": 107945.70804535337 }, { "content": "fn simplify_bitnot(bitnot: expr::BitNot) -> TransformOutcome {\n\n // If the child expression is a constant value, simply bit-negate it.\n\n if let box AnyExpr::BitvecConst(bv_const) = bitnot.child {\n\n return TransformOutcome::transformed(expr::BitvecConst::from(bv_const.val.bvnot()))\n\n }\n\n TransformOutcome::identity(bitnot)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 71, "score": 107945.70804535337 }, { "content": "fn simplify_add(add: expr::Add) -> TransformOutcome {\n\n if has_like_terms(&add) {\n\n fn gen_node(bvty: BitvecTy, expr: AnyExpr, occurence: Bitvec) -> AnyExpr {\n\n if occurence.is_zero() {\n\n return expr::BitvecConst::zero(bvty).into()\n\n }\n\n if occurence.is_one() {\n\n return expr\n\n }\n\n if occurence.is_minus_one() {\n\n return expr::Neg::new(expr).unwrap().into()\n\n }\n\n expr::Mul::binary(\n\n expr::BitvecConst::from(occurence),\n\n expr\n\n ).unwrap().into()\n\n }\n\n let bvty = add.bitvec_ty;\n\n let like_terms = collect_like_terms(add);\n\n if like_terms.is_empty() {\n", "file_path": "simplifier/src/simplifications/like_term_joiner.rs", "rank": 72, "score": 107945.70804535337 }, { "content": "fn simplify_add(add: expr::Add) -> TransformOutcome {\n\n // We need to mutate add perhaps.\n\n let mut add = add;\n\n // Remove all zeros from this add as their are the additive neutral element and have\n\n // no effect besides wasting memory.\n\n if add.children().filter_map(AnyExpr::get_if_bitvec_const).filter(|c| c.is_zero()).count() > 0 {\n\n add.retain_children(|c| c.get_if_bitvec_const().map_or(true, |c| !c.is_zero()));\n\n match add.arity() {\n\n 0 => return TransformOutcome::transformed(expr::BitvecConst::zero(add.bitvec_ty)),\n\n 1 => return TransformOutcome::transformed(add.into_children().next().unwrap()),\n\n _ => ()\n\n }\n\n }\n\n // If there exist at least two constant child expressions within this and expression\n\n // we can evaluate their sum and replace the constant child expressions with it.\n\n if add.children().filter(|c| c.get_if_bitvec_const().is_some()).count() >= 2 {\n\n // Split const and non-const child expressions.\n\n let (consts, mut rest): (Vec<_>, Vec<_>) = add.into_children().partition_map(|c| {\n\n match c {\n\n AnyExpr::BitvecConst(c) => Either::Left(c.val),\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 73, "score": 107945.70804535337 }, { "content": "fn simplify_bitor(bitor: expr::BitOr) -> TransformOutcome {\n\n // If there exist a const all-set child expression the entire bit-or is all-set.\n\n if bitor.children().filter_map(AnyExpr::get_if_bitvec_const).filter(|c| c.is_all_set()).count() > 0 {\n\n return TransformOutcome::transformed(expr::BitvecConst::all_set(bitor.bitvec_ty))\n\n }\n\n // We need to mutate bitor perhaps.\n\n let mut bitor = bitor;\n\n // Remove all const bitvector child expressions that have all their bits unset from this bit-or\n\n // as they are the bit-or neutral element and have no effect besides wasting memory.\n\n if bitor.children().filter_map(AnyExpr::get_if_bitvec_const).filter(|c| c.is_all_unset()).count() > 0 {\n\n bitor.retain_children(|c| c.get_if_bitvec_const().map_or(true, |c| !c.is_all_unset()));\n\n match bitor.arity() {\n\n 0 => return TransformOutcome::transformed(expr::BitvecConst::all_set(bitor.bitvec_ty)),\n\n 1 => return TransformOutcome::transformed(bitor.into_children().next().unwrap()),\n\n _ => ()\n\n }\n\n }\n\n // If there exist at least two constant child expressions within this bit-or expression\n\n // we can evaluate their bit-or result and replace the constant child expressions with it.\n\n if bitor.children().filter(|c| c.get_if_bitvec_const().is_some()).count() >= 2 {\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 74, "score": 107945.70804535337 }, { "content": "fn simplify_bitand(bitand: expr::BitAnd) -> TransformOutcome {\n\n // If there exist a const zero child expression the entire bit-and is zero.\n\n if bitand.children().filter_map(AnyExpr::get_if_bitvec_const).filter(|c| c.is_zero()).count() > 0 {\n\n return TransformOutcome::transformed(expr::BitvecConst::zero(bitand.bitvec_ty))\n\n }\n\n // We need to mutate bitand perhaps.\n\n let mut bitand = bitand;\n\n // Remove all const bitvector child expressions that have all their bits set from this bit-and\n\n // as they are the bit-and neutral element and have no effect besides wasting memory.\n\n if bitand.children().filter_map(AnyExpr::get_if_bitvec_const).filter(|c| c.is_all_set()).count() > 0 {\n\n bitand.retain_children(|c| c.get_if_bitvec_const().map_or(true, |c| !c.is_all_set()));\n\n match bitand.arity() {\n\n 0 => return TransformOutcome::transformed(expr::BitvecConst::all_set(bitand.bitvec_ty)),\n\n 1 => return TransformOutcome::transformed(bitand.into_children().next().unwrap()),\n\n _ => ()\n\n }\n\n }\n\n // If there exist at least two constant child expressions within this bit-and expression\n\n // we can evaluate their bit-and result and replace the constant child expressions with it.\n\n if bitand.children().filter(|c| c.get_if_bitvec_const().is_some()).count() >= 2 {\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 75, "score": 107945.70804535337 }, { "content": "fn simplify_mul(mul: expr::Mul) -> TransformOutcome {\n\n // If there exist a const zero child expression the entire multiplication is zero.\n\n if mul.children().filter_map(AnyExpr::get_if_bitvec_const).filter(|c| c.is_zero()).count() > 0 {\n\n return TransformOutcome::transformed(expr::BitvecConst::zero(mul.bitvec_ty))\n\n }\n\n // We need to mutate mul perhaps.\n\n let mut mul = mul;\n\n // Remove all ones from this mul as they are the multiplicative neutral element and have\n\n // no effect besides wasting memory.\n\n if mul.children().filter_map(AnyExpr::get_if_bitvec_const).filter(|c| c.is_one()).count() > 0 {\n\n mul.retain_children(|c| c.get_if_bitvec_const().map_or(true, |c| !c.is_one()));\n\n match mul.arity() {\n\n 0 => return TransformOutcome::transformed(expr::BitvecConst::one(mul.bitvec_ty)),\n\n 1 => return TransformOutcome::transformed(mul.into_children().next().unwrap()),\n\n _ => ()\n\n }\n\n }\n\n // If there exist at least two constant child expressions within this and expression\n\n // we can evaluate their product and replace the constant child expressions with it.\n\n if mul.children().filter(|c| c.get_if_bitvec_const().is_some()).count() >= 2 {\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 76, "score": 107945.70804535337 }, { "content": "fn simplify_concat(concat: expr::Concat) -> TransformOutcome {\n\n // If the left-hand side is constant zero we can transform this concatenation expression\n\n // into an equisatisfiable zero-extend expression with the same target bit width.\n\n if let Some(c) = concat.children.lhs.get_if_bitvec_const() {\n\n if c.is_zero() {\n\n return TransformOutcome::transformed(\n\n expr::ZeroExtend::new(concat.bitvec_ty.width(), concat.children.rhs).unwrap())\n\n }\n\n }\n\n // If both child expressions are constant bitvectors we can simply evaluate the result.\n\n if let box BinaryChildren{ lhs: AnyExpr::BitvecConst(lhs), rhs: AnyExpr::BitvecConst(rhs) } = concat.children {\n\n return TransformOutcome::transformed(\n\n expr::BitvecConst::from(lhs.val.concat(&rhs.val)))\n\n }\n\n TransformOutcome::identity(concat)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 77, "score": 107945.70804535337 }, { "content": "/// Returns the SMTLib 2.5 name of the associated expression.\n\n/// \n\n/// # Note\n\n/// \n\n/// Some expressions such as boolean constants, bitvector constants and symbols\n\n/// do not have a specified SMTLib 2.5 name and will have a well-fitting replacement\n\n/// string returned.\n\npub fn smtlib2_name<K>(kinded: &K) -> &'static str\n\nwhere\n\n K: HasKind\n\n{\n\n use crate::ExprKind::*;\n\n match kinded.kind() {\n\n Symbol => \"symbol\",\n\n BoolConst => \"boolconst\",\n\n BitvecConst => \"bvconst\",\n\n IfThenElse => \"cond\",\n\n BoolEquals => \"bveq\",\n\n Not => \"not\",\n\n And => \"and\",\n\n Or => \"or\",\n\n Implies => \"=>\",\n\n Xor => \"xor\",\n\n BitvecEquals => \"bveq\",\n\n Neg => \"bvneg\",\n\n Add => \"bvadd\",\n\n Mul => \"bvmul\",\n", "file_path": "ast/src/writer.rs", "rank": 78, "score": 106556.81637515208 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nenum DefaultExprBuilderState<'s> {\n\n Uninitialized,\n\n Atom(Atom<'s>),\n\n SExpr(SExprBuilder<'s>)\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct DefaultExprBuilder<'s> {\n\n state: DefaultExprBuilderState<'s>\n\n}\n\n\n\nimpl<'s> DefaultExprBuilder<'s> {\n\n fn state_mut(&mut self) -> &mut DefaultExprBuilderState<'s> {\n\n &mut self.state\n\n }\n\n\n\n fn into_state(self) -> DefaultExprBuilderState<'s> {\n\n self.state\n\n }\n\n}\n", "file_path": "parser/src/parser/repr.rs", "rank": 79, "score": 106489.92544653555 }, { "content": "/// Deflattens n-ary multiplications with exactly one constant value into their\n\n/// constant part and the remaining elements.\n\n/// \n\n/// This structural invariant is used in like-term detection and merging to merge n-ary multiplications.\n\n/// \n\n/// The resulting multiplication (and its child elements) may be unnormalized after\n\n/// this operation. An additional normalization step is important if following operations\n\n/// depend on that structural invariant.\n\nfn separate_const_from_mul(mul: expr::Mul) -> TransformOutcome {\n\n // Nothing to do for binary multiplications.\n\n //\n\n // Invariant: Arities lower than 2 are invalid for multiplication.\n\n if mul.arity() == 2 {\n\n return TransformOutcome::identity(mul)\n\n }\n\n // Nothing to do for multiplications that do not have exactly a single constant value.\n\n if mul.children().filter(|c| c.kind() == ExprKind::BitvecConst).count() != 1 {\n\n return TransformOutcome::identity(mul)\n\n }\n\n // Pop the only constant value from the mul and create a wrapping binary\n\n // multiplication for the popped constant and the remaining multiplication.\n\n let mut mul = mul;\n\n let const_val = mul.children.swap_remove(\n\n mul.children().position(|c| c.kind() == ExprKind::BitvecConst).unwrap());\n\n let result = expr::Mul::binary(const_val, mul).unwrap();\n\n TransformOutcome::transformed(result)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/like_term_joiner.rs", "rank": 80, "score": 106267.37540050098 }, { "content": "fn simplify_slt(slt: expr::SignedLessThan) -> TransformOutcome {\n\n // If both child expressions are constant we can compute the result.\n\n if let box BinaryChildren{ lhs: AnyExpr::BitvecConst(lhs), rhs: AnyExpr::BitvecConst(rhs) } = slt.children {\n\n return TransformOutcome::transformed(expr::BoolConst::from(\n\n lhs.val.bvslt(&rhs.val).unwrap()\n\n ))\n\n }\n\n TransformOutcome::identity(slt)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 81, "score": 106263.8495426536 }, { "content": "fn simplify_sdiv(sdiv: expr::SignedDiv) -> TransformOutcome {\n\n transform_div_impl!(sdiv, bvsdiv)\n\n}\n\n\n\nmacro_rules! transform_rem_impl {\n\n ($varname:ident, $into_checked:ident) => {{\n\n // If both child expressions are constant bitvectors we can evaluate the remainder\n\n // and replace this remainder expression by the result.\n\n if let box BinaryChildren{ lhs: AnyExpr::BitvecConst(lhs), rhs: AnyExpr::BitvecConst(rhs) } = $varname.children {\n\n let result = lhs.val.$into_checked(&rhs.val).unwrap();\n\n return TransformOutcome::transformed(expr::BitvecConst::from(result))\n\n }\n\n if let Some(rhs) = $varname.children.rhs.get_if_bitvec_const() {\n\n // Encountered a division (remainder) by zero. Stevia simply returns the left-hand side in this case.\n\n if rhs.is_zero() {\n\n warn!(\"Encountered a division (remainder) by zero with: {:?}. \\\n\n Stevia simply returns the left-hand side in this case.\", $varname);\n\n return TransformOutcome::transformed($varname.children.lhs)\n\n }\n\n // Remainder of one can be replaced by constant zero.\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 82, "score": 106263.8495426536 }, { "content": "fn simplify_urem(urem: expr::UnsignedRemainder) -> TransformOutcome {\n\n transform_rem_impl!(urem, bvurem)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 83, "score": 106263.8495426536 }, { "content": "fn simplify_bitxor(bitxor: expr::BitXor) -> TransformOutcome {\n\n // If both child expressions are constant bitvectors we can simply evaluate the result.\n\n if let box BinaryChildren{ lhs: AnyExpr::BitvecConst(lhs), rhs: AnyExpr::BitvecConst(rhs) } = bitxor.children {\n\n return TransformOutcome::transformed(expr::BitvecConst::from(\n\n lhs.val.bvxor(&rhs.val).unwrap()\n\n ))\n\n }\n\n if let Some(lval) = bitxor.children.lhs.get_if_bitvec_const() {\n\n // If the left-hand side is constant zero we can simplify this bit-xor\n\n // to the right-hand side.\n\n if lval.is_zero() {\n\n return TransformOutcome::transformed(bitxor.children.rhs)\n\n }\n\n // If the left-hand side is constant all-set we can simplify this bit-xor\n\n // to the negated right-hand side.\n\n if lval.is_all_set() {\n\n let negated_rhs = expr::BitNot::new(bitxor.children.rhs).unwrap();\n\n return TransformOutcome::transformed(negated_rhs)\n\n }\n\n }\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 84, "score": 106263.8495426536 }, { "content": "fn simplify_shl(shl: expr::ShiftLeft) -> TransformOutcome {\n\n // If the left-hand side is constant zero the entire shift-left evaluates to zero.\n\n if let Some(lval) = shl.children.lhs.get_if_bitvec_const() {\n\n if lval.is_zero() {\n\n return TransformOutcome::transformed(expr::BitvecConst::zero(lval.width().into()))\n\n }\n\n }\n\n // If the right-hand side is constant zero the entire shift-left evaluates to the left-hand side.\n\n if let Some(rval) = shl.children.rhs.get_if_bitvec_const() {\n\n let width = shl.bitvec_ty.width();\n\n if rval.is_zero() {\n\n return TransformOutcome::transformed(shl.children.lhs)\n\n }\n\n match rval.to_u32() {\n\n Err(_) => {\n\n warn!(\"Encountered right-hand side left-shift shift-amount that is larger than 2^32 in: {:?} \\n\\\n\n Stevia handles this by returning constant zero.\", shl);\n\n return TransformOutcome::transformed(expr::BitvecConst::zero(shl.bitvec_ty))\n\n }\n\n Ok(val) => {\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 85, "score": 106263.8495426536 }, { "content": "fn simplify_zext(zext: expr::ZeroExtend) -> TransformOutcome {\n\n // If the target bitwidth is equal to the current bitwidth we can simplify this to its child.\n\n if zext.ty() == zext.src.ty() {\n\n return TransformOutcome::transformed(*zext.src)\n\n }\n\n // If child expression is constant we can compute the result.\n\n let target_width = zext.bitvec_ty.width();\n\n if let box AnyExpr::BitvecConst(child) = zext.src {\n\n return TransformOutcome::transformed(expr::BitvecConst::from(\n\n child.val.sign_extend(target_width).unwrap()\n\n ))\n\n }\n\n TransformOutcome::identity(zext)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 86, "score": 106263.8495426536 }, { "content": "fn simplify_udiv(udiv: expr::UnsignedDiv) -> TransformOutcome {\n\n transform_div_impl!(udiv, bvudiv)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 87, "score": 106263.8495426536 }, { "content": "fn simplify_srem(srem: expr::SignedRemainder) -> TransformOutcome {\n\n transform_rem_impl!(srem, bvsrem)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 88, "score": 106263.8495426536 }, { "content": "fn simplify_sext(sext: expr::SignExtend) -> TransformOutcome {\n\n // If the target bitwidth is equal to the current bitwidth we can simplify this to its child.\n\n if sext.ty() == sext.src.ty() {\n\n return TransformOutcome::transformed(*sext.src)\n\n }\n\n // If child expression is constant we can compute the result.\n\n let target_width = sext.bitvec_ty.width();\n\n if let box AnyExpr::BitvecConst(child) = sext.src {\n\n return TransformOutcome::transformed(expr::BitvecConst::from(\n\n child.val.zero_extend(target_width).unwrap()\n\n ))\n\n }\n\n TransformOutcome::identity(sext)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 89, "score": 106263.8495426536 }, { "content": "fn simplify_ult(ult: expr::UnsignedLessThan) -> TransformOutcome {\n\n // If both child expressions are constant we can compute the result.\n\n if let box BinaryChildren{ lhs: AnyExpr::BitvecConst(lhs), rhs: AnyExpr::BitvecConst(rhs) } = ult.children {\n\n return TransformOutcome::transformed(expr::BoolConst::from(\n\n lhs.val.bvult(&rhs.val).unwrap()\n\n ))\n\n }\n\n TransformOutcome::identity(ult)\n\n}\n\n\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 90, "score": 106263.8495426536 }, { "content": "/// Utility trait to be implemented by `AnyExpr` and `ExprResult<AnyExpr>`\n\n/// in order to allow for functions generically taking both types as input.\n\n/// \n\n/// This allows better ergonomics for users of AST factories when creating\n\n/// new expression trees.\n\n/// \n\n/// # Note\n\n/// \n\n/// This feature is explicitely not realized by generically implementing\n\n/// corresponding impls for `From` and `Into` because this system tries to\n\n/// be encapsulated and does not want to spam conversions between types\n\n/// that are not necesary outside of this frame.\n\npub trait IntoAnyExprOrError {\n\n /// Converts `self` into the apropriate `ExprResult<AnyExpr>`.\n\n fn into_any_expr_or_error(self) -> ExprResult<AnyExpr>;\n\n}\n\n\n\nimpl IntoAnyExprOrError for AnyExpr {\n\n /// Wraps `self` into `Result<Self>`.\n\n fn into_any_expr_or_error(self) -> ExprResult<AnyExpr> {\n\n Ok(self)\n\n }\n\n}\n\n\n\nimpl IntoAnyExprOrError for ExprResult<AnyExpr> {\n\n /// Simply forwards `self` as is.\n\n fn into_any_expr_or_error(self) -> ExprResult<AnyExpr> {\n\n self\n\n }\n\n}\n\n\n", "file_path": "ast/src/factory/builder.rs", "rank": 91, "score": 106193.69031160753 }, { "content": "/// Checks if there are like-terms that can and should be combined.\n\n/// \n\n/// Compared to the real merging of like-terms this check is very light-weight\n\n/// and thus should be performed to prevent unneeded expensive computations.\n\nfn has_like_terms<'a>(add: &'a expr::Add) -> bool {\n\n let mut seen_symbols: HashSet<&'a AnyExpr> = HashSet::new();\n\n let mut mutated = false;\n\n let mut update_seen = |expr: &'a AnyExpr| {\n\n if seen_symbols.contains(expr) {\n\n mutated = true;\n\n } else {\n\n seen_symbols.insert(expr);\n\n }\n\n };\n\n for child in add.children() {\n\n match child {\n\n AnyExpr::Neg(neg) => update_seen(neg.single_child()),\n\n // TODO: Extend the entire algorithm to work for n-ary multiplication.\n\n // For example: `(+ (* 2 x y) (* 4 x y))` is currently not allowed while\n\n // it could be theoretically simplified to `(* 6 x y)`.\n\n AnyExpr::Mul(mul) if (mul.arity() == 2)\n\n && (mul.children().filter(|c| c.kind() == ExprKind::BitvecConst).count() == 1) => {\n\n let mut mul_children = mul.children();\n\n let lhs = mul_children.next().unwrap();\n", "file_path": "simplifier/src/simplifications/like_term_joiner.rs", "rank": 92, "score": 104748.08759865758 }, { "content": "fn simplify_ashr(ashr: expr::ArithmeticShiftRight) -> TransformOutcome {\n\n if let Some(lval) = ashr.children.lhs.get_if_bitvec_const() {\n\n // If the left-hand side is constant zero the entire shift-right evaluates to zero.\n\n if lval.is_zero() {\n\n return TransformOutcome::transformed(expr::BitvecConst::zero(lval.width().into()))\n\n }\n\n // If the left-hand side is constant all-bits-set the entire arithmetical shift-right\n\n // always evaluates to zero.\n\n if lval.is_all_set() {\n\n return TransformOutcome::transformed(expr::BitvecConst::all_set(lval.width().into()))\n\n }\n\n }\n\n if let Some(rval) = ashr.children.rhs.get_if_bitvec_const() {\n\n let width = ashr.bitvec_ty.width();\n\n // If the right-hand side is constant zero the entire arithmetical shift-right evaluates to the left-hand side.\n\n if rval.is_zero() {\n\n return TransformOutcome::transformed(ashr.children.lhs)\n\n }\n\n // If the right-hand side represents an invalid shift amount the result is zero.\n\n match rval.to_u32().map(|v| v as usize) {\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 93, "score": 104663.21802370173 }, { "content": "fn simplify_bitvec_equals(equals: expr::BitvecEquals) -> TransformOutcome {\n\n // If there exist at least two constant child expressions we can compare\n\n // them in order to either merge them or simplify the entire equality to false.\n\n if equals.children().filter(|c| c.get_if_bitvec_const().is_some()).count() >= 2 {\n\n // Split const and non-const child expressions.\n\n let (mut consts, mut rest): (Vec<_>, Vec<_>) = equals.into_children().partition_map(|c| {\n\n match c {\n\n AnyExpr::BitvecConst(c) => Either::Left(c.val),\n\n other => Either::Right(other)\n\n }\n\n });\n\n assert!(!consts.is_empty());\n\n // Do not remove duplicates since this is already done by normalization.\n\n // Just look for unequal constant elements.\n\n if consts.iter().all_equal() {\n\n // We can simply put one of the constant elements back into the rest\n\n // and return it if the rest is not empty.\n\n if !rest.is_empty() {\n\n rest.push(consts.pop().map(expr::BitvecConst::from).map(AnyExpr::from).unwrap());\n\n return TransformOutcome::transformed(expr::BitvecEquals::nary(rest).unwrap())\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 94, "score": 104663.21802370173 }, { "content": "fn simplify_lshr(lshr: expr::LogicalShiftRight) -> TransformOutcome {\n\n // If the left-hand side is constant zero the entire shift-right evaluates to zero.\n\n if let Some(lval) = lshr.children.lhs.get_if_bitvec_const() {\n\n if lval.is_zero() {\n\n return TransformOutcome::transformed(expr::BitvecConst::zero(lval.width().into()))\n\n }\n\n }\n\n if let Some(rval) = lshr.children.rhs.get_if_bitvec_const() {\n\n let width = lshr.bitvec_ty.width();\n\n // If the right-hand side is constant zero the entire shift-left evaluates to the left-hand side.\n\n if rval.is_zero() {\n\n return TransformOutcome::transformed(lshr.children.lhs)\n\n }\n\n // If the right-hand side represents an invalid shift amount the result is zero.\n\n match rval.to_u32() {\n\n Err(_) => {\n\n warn!(\"Encountered right-hand side logical right-shift shift-amount that is larger than 2^32 in: {:?} \\n\\\n\n Stevia handles this by returning constant zero.\", lshr);\n\n return TransformOutcome::transformed(expr::BitvecConst::zero(lshr.bitvec_ty))\n\n }\n", "file_path": "simplifier/src/simplifications/term_const_prop.rs", "rank": 95, "score": 104663.21802370173 }, { "content": "pub trait ExprBuilder<'s> {\n\n type Expr;\n\n\n\n /// Introduces the given atom for the current scope.\n\n fn atom(&mut self, atom: Atom<'s>) -> BuildResult<()>;\n\n /// Opens a new S-expression scope.\n\n fn open_sexpr(&mut self) -> BuildResult<()>;\n\n /// Closes the latest opened S-expression scope.\n\n fn close_sexpr(&mut self) -> BuildResult<()>;\n\n /// Consumes the expression builder and returns the built expression.\n\n fn finalize(self) -> BuildResult<Self::Expr>;\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct SExprBuilder<'s> {\n\n s_exprs: Vec<SExpr<'s>>,\n\n}\n\n\n\nimpl<'s> Default for SExprBuilder<'s> {\n\n fn default() -> Self {\n", "file_path": "parser/src/parser/repr.rs", "rank": 96, "score": 104035.84245341764 }, { "content": "/// Marker trait to mark boolean expressions.\n\n/// \n\n/// This automatically implements methods on them that are safe for boolean expressions.\n\npub trait BoolExpr: WrapWithNot {}\n\n\n", "file_path": "ast/src/traits.rs", "rank": 97, "score": 104035.84245341764 }, { "content": "/// An expression tree factory.\n\n/// \n\n/// This is used to more easily create expression trees with less\n\n/// error handling and less boilerplate code.\n\n/// \n\n/// The realization via trait allows for different concrete expression\n\n/// tree factories, such as\n\n/// \n\n/// - A `NaiveExprTreeFactory` that simply creates all input expression trees.\n\n/// This is useful for debugging and shorter running SMT instances where\n\n/// simplifications are not required.\n\n/// - A slightly more advanced `SimplifyingExprTreeFactory` that immediately\n\n/// simplifies its given expression trees for better performance in longer\n\n/// running SMT instances.\n\npub trait ExprTreeFactory {\n\n /// Creates a new if-then-else expression with the given condition expression\n\n /// then case expression and else case expression.\n\n fn cond(&self, cond: AnyExpr, then_case: AnyExpr, else_case: AnyExpr) -> ExprResult<AnyExpr>;\n\n\n\n /// Creates a new boolean variable expression with the given name.\n\n fn bool_var<S>(&self, name: S) -> ExprResult<AnyExpr>\n\n where S: Into<String> + AsRef<str>;\n\n\n\n /// Creates a new bitvector variable expression with the given name and type.\n\n fn bitvec_var<S>(&self, ty: BitvecTy, name: S) -> ExprResult<AnyExpr>\n\n where S: Into<String> + AsRef<str>;\n\n\n\n /// Creates a new array variable expression with the given name and type.\n\n fn array_var<S>(&self, ty: ArrayTy, name: S) -> ExprResult<AnyExpr>\n\n where S: Into<String> + AsRef<str>;\n\n\n\n /// Creates a new constant boolean expression with the given value.\n\n fn bool_const(&self, val: bool) -> ExprResult<AnyExpr>;\n\n /// Create a new binary and expression with the given child expressions.\n", "file_path": "ast/src/factory/builder.rs", "rank": 98, "score": 103852.59498895907 }, { "content": "fn array_read_ite_lifting(read: expr::ArrayRead) -> TransformOutcome {\n\n if let box ArrayReadChildren{ index, array: AnyExpr::IfThenElse(ite) } = read.children {\n\n let (cond, then_case, else_case) = ite.into_children_tuple();\n\n return TransformOutcome::transformed(\n\n expr::IfThenElse::new(\n\n cond,\n\n expr::ArrayRead::new(\n\n then_case,\n\n // FIXME: Depending on the depth of the sub-tree behind `index`\n\n // `index.clone()` might be a very expensive operation and\n\n // should be avoided in the general case.\n\n index.clone()\n\n ).unwrap(),\n\n expr::ArrayRead::new(\n\n else_case,\n\n index\n\n ).unwrap(),\n\n ).unwrap()\n\n )\n\n }\n", "file_path": "simplifier/src/simplifications/read_ite_lifting.rs", "rank": 99, "score": 103138.06783088227 } ]
Rust
tokio/src/runtime/task/core.rs
akshay-deepsource/tokio
7ad0461cc1ea9a68804617a785c3bcc4f188cdab
use crate::future::Future; use crate::loom::cell::UnsafeCell; use crate::runtime::task::raw::{self, Vtable}; use crate::runtime::task::state::State; use crate::runtime::task::Schedule; use crate::util::linked_list; use std::pin::Pin; use std::ptr::NonNull; use std::task::{Context, Poll, Waker}; #[repr(C)] pub(super) struct Cell<T: Future, S> { pub(super) header: Header, pub(super) core: Core<T, S>, pub(super) trailer: Trailer, } pub(super) struct CoreStage<T: Future> { stage: UnsafeCell<Stage<T>>, } pub(super) struct Core<T: Future, S> { pub(super) scheduler: S, pub(super) stage: CoreStage<T>, } #[repr(C)] pub(crate) struct Header { pub(super) state: State, pub(super) owned: UnsafeCell<linked_list::Pointers<Header>>, pub(super) queue_next: UnsafeCell<Option<NonNull<Header>>>, pub(super) vtable: &'static Vtable, pub(super) owner_id: UnsafeCell<u64>, #[cfg(all(tokio_unstable, feature = "tracing"))] pub(super) id: Option<tracing::Id>, } unsafe impl Send for Header {} unsafe impl Sync for Header {} pub(super) struct Trailer { pub(super) waker: UnsafeCell<Option<Waker>>, } pub(super) enum Stage<T: Future> { Running(T), Finished(super::Result<T::Output>), Consumed, } impl<T: Future, S: Schedule> Cell<T, S> { pub(super) fn new(future: T, scheduler: S, state: State) -> Box<Cell<T, S>> { #[cfg(all(tokio_unstable, feature = "tracing"))] let id = future.id(); Box::new(Cell { header: Header { state, owned: UnsafeCell::new(linked_list::Pointers::new()), queue_next: UnsafeCell::new(None), vtable: raw::vtable::<T, S>(), owner_id: UnsafeCell::new(0), #[cfg(all(tokio_unstable, feature = "tracing"))] id, }, core: Core { scheduler, stage: CoreStage { stage: UnsafeCell::new(Stage::Running(future)), }, }, trailer: Trailer { waker: UnsafeCell::new(None), }, }) } } impl<T: Future> CoreStage<T> { pub(super) fn with_mut<R>(&self, f: impl FnOnce(*mut Stage<T>) -> R) -> R { self.stage.with_mut(f) } pub(super) fn poll(&self, mut cx: Context<'_>) -> Poll<T::Output> { let res = { self.stage.with_mut(|ptr| { let future = match unsafe { &mut *ptr } { Stage::Running(future) => future, _ => unreachable!("unexpected stage"), }; let future = unsafe { Pin::new_unchecked(future) }; future.poll(&mut cx) }) }; if res.is_ready() { self.drop_future_or_output(); } res } pub(super) fn drop_future_or_output(&self) { unsafe { self.set_stage(Stage::Consumed); } } pub(super) fn store_output(&self, output: super::Result<T::Output>) { unsafe { self.set_stage(Stage::Finished(output)); } } pub(super) fn take_output(&self) -> super::Result<T::Output> { use std::mem; self.stage.with_mut(|ptr| { match mem::replace(unsafe { &mut *ptr }, Stage::Consumed) { Stage::Finished(output) => output, _ => panic!("JoinHandle polled after completion"), } }) } unsafe fn set_stage(&self, stage: Stage<T>) { self.stage.with_mut(|ptr| *ptr = stage) } } cfg_rt_multi_thread! { impl Header { pub(super) unsafe fn set_next(&self, next: Option<NonNull<Header>>) { self.queue_next.with_mut(|ptr| *ptr = next); } } } impl Header { pub(super) unsafe fn set_owner_id(&self, owner: u64) { self.owner_id.with_mut(|ptr| *ptr = owner); } pub(super) fn get_owner_id(&self) -> u64 { unsafe { self.owner_id.with(|ptr| *ptr) } } } impl Trailer { pub(super) unsafe fn set_waker(&self, waker: Option<Waker>) { self.waker.with_mut(|ptr| { *ptr = waker; }); } pub(super) unsafe fn will_wake(&self, waker: &Waker) -> bool { self.waker .with(|ptr| (*ptr).as_ref().unwrap().will_wake(waker)) } pub(super) fn wake_join(&self) { self.waker.with(|ptr| match unsafe { &*ptr } { Some(waker) => waker.wake_by_ref(), None => panic!("waker missing"), }); } } #[test] #[cfg(not(loom))] fn header_lte_cache_line() { use std::mem::size_of; assert!(size_of::<Header>() <= 8 * size_of::<*const ()>()); }
use crate::future::Future; use crate::loom::cell::UnsafeCell; use crate::runtime::task::raw::{self, Vtable}; use crate::runtime::task::state::State; use crate::runtime::task::Schedule; use crate::util::linked_list; use std::pin::Pin; use std::ptr::NonNull; use std::task::{Context, Poll, Waker}; #[repr(C)] pub(super) struct Cell<T: Future, S> { pub(super) header: Header, pub(super) core: Core<T, S>, pub(super) trailer: Trailer, } pub(super) struct CoreStage<T: Future> { stage: UnsafeCell<Stage<T>>, } pub(super) struct Core<T: Future, S> { pub(super) scheduler: S, pub(super) stage: CoreStage<T>, } #[repr(C)] pub(crate) struct Header { pub(super) state: State, pub(super) owned: UnsafeCell<linked_list::Pointers<Header>>, pub(super) queue_next: UnsafeCell<Option<NonNull<Header>>>, pub(super) vtable: &'static Vtable, pub(super) owner_id: UnsafeCell<u64>, #[cfg(all(tokio_unstable, feature = "tracing"))] pub(super) id: Option<tracing::Id>, } unsafe impl Send for Header {} unsafe impl Sync for Header {} pub(super) struct Trailer { pub(super) waker: UnsafeCell<Option<Waker>>, } pub(super) enum Stage<T: Future> { Running(T), Finished(super::Result<T::Output>), Consumed, } impl<T: Future, S: Schedule> Cell<T, S> { pub(super) fn new(future: T, scheduler: S, state: State) -> Box<Cell<T, S>> { #[cfg(all(tokio_unstable, feature = "tracing"))] let id = future.id(); Box::new(Cell { header: Header { state, owned: UnsafeCell::new(linked_list::Pointers::new()), queue_next: UnsafeCell::new(None), vtable: raw::vtable::<T, S>(), owner_id: UnsafeCell::new(0), #[cfg(all(tokio_unstable, feature = "tracing"))] id, }, core: Core { scheduler, stage: CoreStage { stage: UnsafeCell::new(Stage::Running(future)), }, }, trailer: Trailer { waker: UnsafeCell::new(None), }, }) } } impl<T: Future> CoreStage<T> { pub(super) fn with_mut<R>(&self, f: impl FnOnce(*mut Stage<T>) -> R) -> R { self.stage.with_mut(f) } pub(super) fn poll(&self, mut cx: Context<'_>) -> Poll<T::Output> { let res = { self.stage.with_mut(|ptr| { let future = match unsafe { &mut *ptr } { Stage::Running(future) => future, _ => unreachable!("unexpected stage"), }; let future = unsafe { Pin::new_unchecked(future) }; future.poll(&mut cx) }) }; if res.is_ready() { self.drop_future_or_output(); } res } pub(super) fn drop_future_or_output(&self) { unsafe { self.set_stage(Stage::Consumed); } } pub(super) fn store_output(&self, output: super::Result<T::Output>) { unsafe { self.set_stage(Stage::Finished(output)); } }
unsafe fn set_stage(&self, stage: Stage<T>) { self.stage.with_mut(|ptr| *ptr = stage) } } cfg_rt_multi_thread! { impl Header { pub(super) unsafe fn set_next(&self, next: Option<NonNull<Header>>) { self.queue_next.with_mut(|ptr| *ptr = next); } } } impl Header { pub(super) unsafe fn set_owner_id(&self, owner: u64) { self.owner_id.with_mut(|ptr| *ptr = owner); } pub(super) fn get_owner_id(&self) -> u64 { unsafe { self.owner_id.with(|ptr| *ptr) } } } impl Trailer { pub(super) unsafe fn set_waker(&self, waker: Option<Waker>) { self.waker.with_mut(|ptr| { *ptr = waker; }); } pub(super) unsafe fn will_wake(&self, waker: &Waker) -> bool { self.waker .with(|ptr| (*ptr).as_ref().unwrap().will_wake(waker)) } pub(super) fn wake_join(&self) { self.waker.with(|ptr| match unsafe { &*ptr } { Some(waker) => waker.wake_by_ref(), None => panic!("waker missing"), }); } } #[test] #[cfg(not(loom))] fn header_lte_cache_line() { use std::mem::size_of; assert!(size_of::<Header>() <= 8 * size_of::<*const ()>()); }
pub(super) fn take_output(&self) -> super::Result<T::Output> { use std::mem; self.stage.with_mut(|ptr| { match mem::replace(unsafe { &mut *ptr }, Stage::Consumed) { Stage::Finished(output) => output, _ => panic!("JoinHandle polled after completion"), } }) }
function_block-full_function
[ { "content": "fn model(f: impl Fn() + Send + Sync + 'static) {\n\n #[cfg(loom)]\n\n loom::model(f);\n\n\n\n #[cfg(not(loom))]\n\n f();\n\n}\n\n\n", "file_path": "tokio/src/time/driver/tests/mod.rs", "rank": 0, "score": 405710.77752691077 }, { "content": "/// Poll the future. If the future completes, the output is written to the\n\n/// stage field.\n\nfn poll_future<T: Future>(core: &CoreStage<T>, cx: Context<'_>) -> Poll<()> {\n\n // Poll the future.\n\n let output = panic::catch_unwind(panic::AssertUnwindSafe(|| {\n\n struct Guard<'a, T: Future> {\n\n core: &'a CoreStage<T>,\n\n }\n\n impl<'a, T: Future> Drop for Guard<'a, T> {\n\n fn drop(&mut self) {\n\n // If the future panics on poll, we drop it inside the panic\n\n // guard.\n\n self.core.drop_future_or_output();\n\n }\n\n }\n\n let guard = Guard { core };\n\n let res = guard.core.poll(cx);\n\n mem::forget(guard);\n\n res\n\n }));\n\n\n\n // Prepare output for being placed in the core stage.\n", "file_path": "tokio/src/runtime/task/harness.rs", "rank": 1, "score": 400882.15271520114 }, { "content": "fn can_read_output(header: &Header, trailer: &Trailer, waker: &Waker) -> bool {\n\n // Load a snapshot of the current task state\n\n let snapshot = header.state.load();\n\n\n\n debug_assert!(snapshot.is_join_interested());\n\n\n\n if !snapshot.is_complete() {\n\n // The waker must be stored in the task struct.\n\n let res = if snapshot.has_join_waker() {\n\n // There already is a waker stored in the struct. If it matches\n\n // the provided waker, then there is no further work to do.\n\n // Otherwise, the waker must be swapped.\n\n let will_wake = unsafe {\n\n // Safety: when `JOIN_INTEREST` is set, only `JOIN_HANDLE`\n\n // may mutate the `waker` field.\n\n trailer.will_wake(waker)\n\n };\n\n\n\n if will_wake {\n\n // The task is not complete **and** the waker is up to date,\n", "file_path": "tokio/src/runtime/task/harness.rs", "rank": 2, "score": 385950.2297096801 }, { "content": "/// Enter the scheduler context. This sets the queue and other necessary\n\n/// scheduler state in the thread-local\n\nfn enter<F, R, P>(scheduler: &mut Inner<P>, f: F) -> R\n\nwhere\n\n F: FnOnce(&mut Inner<P>, &Context) -> R,\n\n P: Park,\n\n{\n\n // Ensures the run queue is placed back in the `BasicScheduler` instance\n\n // once `block_on` returns.`\n\n struct Guard<'a, P: Park> {\n\n context: Option<Context>,\n\n scheduler: &'a mut Inner<P>,\n\n }\n\n\n\n impl<P: Park> Drop for Guard<'_, P> {\n\n fn drop(&mut self) {\n\n let Context { tasks, .. } = self.context.take().expect(\"context missing\");\n\n self.scheduler.tasks = Some(tasks.into_inner());\n\n }\n\n }\n\n\n\n // Remove `tasks` from `self` and place it in a `Context`.\n", "file_path": "tokio/src/runtime/basic_scheduler.rs", "rank": 3, "score": 365811.1802066675 }, { "content": "fn gated() -> impl Future<Output = &'static str> {\n\n gated2(false)\n\n}\n\n\n", "file_path": "tokio/src/runtime/tests/loom_pool.rs", "rank": 4, "score": 334531.214881539 }, { "content": "/// Runs a test function in a separate thread, and panics if the test does not\n\n/// complete within the specified timeout, or if the test function panics.\n\n///\n\n/// This is intended for running tests whose failure mode is a hang or infinite\n\n/// loop that cannot be detected otherwise.\n\nfn with_timeout(timeout: Duration, f: impl FnOnce() + Send + 'static) {\n\n use std::sync::mpsc::RecvTimeoutError;\n\n\n\n let (done_tx, done_rx) = std::sync::mpsc::channel();\n\n let thread = std::thread::spawn(move || {\n\n f();\n\n\n\n // Send a message on the channel so that the test thread can\n\n // determine if we have entered an infinite loop:\n\n done_tx.send(()).unwrap();\n\n });\n\n\n\n // Since the failure mode of this test is an infinite loop, rather than\n\n // something we can easily make assertions about, we'll run it in a\n\n // thread. When the test thread finishes, it will send a message on a\n\n // channel to this thread. We'll wait for that message with a fairly\n\n // generous timeout, and if we don't receive it, we assume the test\n\n // thread has hung.\n\n //\n\n // Note that it should definitely complete in under a minute, but just\n", "file_path": "tokio/tests/task_local_set.rs", "rank": 5, "score": 317463.10731167253 }, { "content": "fn gated2(thread: bool) -> impl Future<Output = &'static str> {\n\n use loom::thread;\n\n use std::sync::Arc;\n\n\n\n let gate = Arc::new(AtomicBool::new(false));\n\n let mut fired = false;\n\n\n\n poll_fn(move |cx| {\n\n if !fired {\n\n let gate = gate.clone();\n\n let waker = cx.waker().clone();\n\n\n\n if thread {\n\n thread::spawn(move || {\n\n gate.store(true, SeqCst);\n\n waker.wake_by_ref();\n\n });\n\n } else {\n\n spawn(track(async move {\n\n gate.store(true, SeqCst);\n", "file_path": "tokio/src/runtime/tests/loom_pool.rs", "rank": 6, "score": 312051.8148397541 }, { "content": "/// Creates a new future wrapping around a function returning [`Poll`].\n\npub fn poll_fn<T, F>(f: F) -> PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<T>,\n\n{\n\n PollFn { f }\n\n}\n\n\n\nimpl<F> fmt::Debug for PollFn<F> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"PollFn\").finish()\n\n }\n\n}\n\n\n\nimpl<T, F> Future for PollFn<F>\n\nwhere\n\n F: FnMut(&mut Context<'_>) -> Poll<T>,\n\n{\n\n type Output = T;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {\n\n (&mut self.f)(cx)\n\n }\n\n}\n", "file_path": "tokio/src/future/poll_fn.rs", "rank": 7, "score": 308644.43999302725 }, { "content": "/// Runs the provided future, blocking the current thread until the\n\n/// future completes.\n\n///\n\n/// For more information, see the documentation for\n\n/// [`tokio::runtime::Runtime::block_on`][runtime-block-on].\n\n///\n\n/// [runtime-block-on]: https://docs.rs/tokio/1.3.0/tokio/runtime/struct.Runtime.html#method.block_on\n\npub fn block_on<F: std::future::Future>(future: F) -> F::Output {\n\n use tokio::runtime;\n\n\n\n let rt = runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n\n\n rt.block_on(future)\n\n}\n", "file_path": "tokio-test/src/lib.rs", "rank": 8, "score": 297495.1490677176 }, { "content": "fn notify_locked(waiters: &mut WaitList, state: &AtomicUsize, curr: usize) -> Option<Waker> {\n\n loop {\n\n match get_state(curr) {\n\n EMPTY | NOTIFIED => {\n\n let res = state.compare_exchange(curr, set_state(curr, NOTIFIED), SeqCst, SeqCst);\n\n\n\n match res {\n\n Ok(_) => return None,\n\n Err(actual) => {\n\n let actual_state = get_state(actual);\n\n assert!(actual_state == EMPTY || actual_state == NOTIFIED);\n\n state.store(set_state(actual, NOTIFIED), SeqCst);\n\n return None;\n\n }\n\n }\n\n }\n\n WAITING => {\n\n // At this point, it is guaranteed that the state will not\n\n // concurrently change as holding the lock is required to\n\n // transition **out** of `WAITING`.\n", "file_path": "tokio/src/sync/notify.rs", "rank": 9, "score": 287526.5729447325 }, { "content": "#[inline(always)]\n\nfn with_budget<R>(budget: Budget, f: impl FnOnce() -> R) -> R {\n\n struct ResetGuard<'a> {\n\n cell: &'a Cell<Budget>,\n\n prev: Budget,\n\n }\n\n\n\n impl<'a> Drop for ResetGuard<'a> {\n\n fn drop(&mut self) {\n\n self.cell.set(self.prev);\n\n }\n\n }\n\n\n\n CURRENT.with(move |cell| {\n\n let prev = cell.get();\n\n\n\n cell.set(budget);\n\n\n\n let _guard = ResetGuard { cell, prev };\n\n\n\n f()\n", "file_path": "tokio/src/coop.rs", "rank": 10, "score": 283811.5989219102 }, { "content": "fn block_on<T>(f: impl std::future::Future<Output = T>) -> T {\n\n #[cfg(loom)]\n\n return loom::future::block_on(f);\n\n\n\n #[cfg(not(loom))]\n\n return futures::executor::block_on(f);\n\n}\n\n\n", "file_path": "tokio/src/time/driver/tests/mod.rs", "rank": 11, "score": 270252.55498165614 }, { "content": "#[allow(dead_code)]\n\ntype BoxFutureSync<T> = std::pin::Pin<Box<dyn std::future::Future<Output = T> + Send + Sync>>;\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 12, "score": 264303.96913174045 }, { "content": "fn send_large(b: &mut Bencher) {\n\n let rt = rt();\n\n\n\n b.iter(|| {\n\n let (tx, mut rx) = mpsc::channel::<Large>(1000);\n\n\n\n let _ = rt.block_on(tx.send([[0; 64]; 64]));\n\n\n\n rt.block_on(rx.recv()).unwrap();\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 13, "score": 260950.8630570231 }, { "content": "fn send_medium(b: &mut Bencher) {\n\n let rt = rt();\n\n\n\n b.iter(|| {\n\n let (tx, mut rx) = mpsc::channel::<Medium>(1000);\n\n\n\n let _ = rt.block_on(tx.send([0; 64]));\n\n\n\n rt.block_on(rx.recv()).unwrap();\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 14, "score": 260950.8630570231 }, { "content": "struct Task(UnsafeCell<MaybeUninit<Waker>>);\n\n\n\nimpl Task {\n\n unsafe fn will_wake(&self, cx: &mut Context<'_>) -> bool {\n\n self.with_task(|w| w.will_wake(cx.waker()))\n\n }\n\n\n\n unsafe fn with_task<F, R>(&self, f: F) -> R\n\n where\n\n F: FnOnce(&Waker) -> R,\n\n {\n\n self.0.with(|ptr| {\n\n let waker: *const Waker = (&*ptr).as_ptr();\n\n f(&*waker)\n\n })\n\n }\n\n\n\n unsafe fn drop_task(&self) {\n\n self.0.with_mut(|ptr| {\n\n let ptr: *mut Waker = (&mut *ptr).as_mut_ptr();\n", "file_path": "tokio/src/sync/oneshot.rs", "rank": 15, "score": 252082.97315530106 }, { "content": "fn waker_vtable<W: Wake>() -> &'static RawWakerVTable {\n\n &RawWakerVTable::new(\n\n clone_arc_raw::<W>,\n\n wake_arc_raw::<W>,\n\n wake_by_ref_arc_raw::<W>,\n\n drop_arc_raw::<W>,\n\n )\n\n}\n\n\n\nunsafe fn inc_ref_count<T: Wake>(data: *const ()) {\n\n // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop\n\n let arc = ManuallyDrop::new(Arc::<T>::from_raw(data as *const T));\n\n\n\n // Now increase refcount, but don't drop new refcount either\n\n let _arc_clone: ManuallyDrop<_> = arc.clone();\n\n}\n\n\n\nunsafe fn clone_arc_raw<T: Wake>(data: *const ()) -> RawWaker {\n\n inc_ref_count::<T>(data);\n\n RawWaker::new(data, waker_vtable::<T>())\n", "file_path": "tokio/src/util/wake.rs", "rank": 16, "score": 245121.05909135626 }, { "content": "#[derive(Debug)]\n\nenum State {\n\n Init(usize),\n\n Waiting,\n\n Done,\n\n}\n\n\n\nconst NOTIFY_WAITERS_SHIFT: usize = 2;\n\nconst STATE_MASK: usize = (1 << NOTIFY_WAITERS_SHIFT) - 1;\n\nconst NOTIFY_WAITERS_CALLS_MASK: usize = !STATE_MASK;\n\n\n\n/// Initial \"idle\" state\n\nconst EMPTY: usize = 0;\n\n\n\n/// One or more threads are currently waiting to be notified.\n\nconst WAITING: usize = 1;\n\n\n\n/// Pending notification\n\nconst NOTIFIED: usize = 2;\n\n\n", "file_path": "tokio/src/sync/notify.rs", "rank": 17, "score": 243763.44390440139 }, { "content": "fn assert_pending<T: std::fmt::Debug, F: Future<Output = T>>(f: F) -> std::pin::Pin<Box<F>> {\n\n let mut pinned = Box::pin(f);\n\n\n\n assert_pending!(pinned\n\n .as_mut()\n\n .poll(&mut Context::from_waker(futures::task::noop_waker_ref())));\n\n\n\n pinned\n\n}\n\n\n", "file_path": "tokio/tests/io_async_fd.rs", "rank": 18, "score": 242564.0610136975 }, { "content": "/// Cancel the task and store the appropriate error in the stage field.\n\nfn cancel_task<T: Future>(stage: &CoreStage<T>) {\n\n // Drop the future from a panic guard.\n\n let res = panic::catch_unwind(panic::AssertUnwindSafe(|| {\n\n stage.drop_future_or_output();\n\n }));\n\n\n\n match res {\n\n Ok(()) => {\n\n stage.store_output(Err(JoinError::cancelled()));\n\n }\n\n Err(panic) => {\n\n stage.store_output(Err(JoinError::panic(panic)));\n\n }\n\n }\n\n}\n\n\n", "file_path": "tokio/src/runtime/task/harness.rs", "rank": 19, "score": 241548.26601834563 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\nenum PollState {\n\n /// The task has never interacted with the [`CancellationToken`].\n\n New,\n\n /// The task was added to the wait queue at the [`CancellationToken`].\n\n Waiting,\n\n /// The task has been polled to completion.\n\n Done,\n\n}\n\n\n", "file_path": "tokio-util/src/sync/cancellation_token.rs", "rank": 20, "score": 237742.15675267647 }, { "content": "#[test]\n\nfn barrier_future_is_send() {\n\n let b = Barrier::new(0);\n\n IsSend(b.wait());\n\n}\n\n\n", "file_path": "tokio/tests/sync_barrier.rs", "rank": 21, "score": 235260.68467087462 }, { "content": "#[allow(dead_code)]\n\ntype BoxFutureSend<T> = std::pin::Pin<Box<dyn std::future::Future<Output = T> + Send>>;\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 22, "score": 234917.85248727814 }, { "content": "#[derive(Clone, Copy)]\n\nstruct State(usize);\n\n\n", "file_path": "tokio/src/sync/oneshot.rs", "rank": 23, "score": 231596.9591515161 }, { "content": "fn is_error<T: std::error::Error + Send + Sync>() {}\n\n\n", "file_path": "tokio/tests/sync_errors.rs", "rank": 24, "score": 226884.47201913473 }, { "content": "struct IsSend<T: Send>(T);\n", "file_path": "tokio/tests/sync_barrier.rs", "rank": 25, "score": 225823.9004604622 }, { "content": "fn raw_waker<T, S>(header: *const Header) -> RawWaker\n\nwhere\n\n T: Future,\n\n S: Schedule,\n\n{\n\n let ptr = header as *const ();\n\n let vtable = &RawWakerVTable::new(\n\n clone_waker::<T, S>,\n\n wake_by_val::<T, S>,\n\n wake_by_ref::<T, S>,\n\n drop_waker::<T, S>,\n\n );\n\n RawWaker::new(ptr, vtable)\n\n}\n", "file_path": "tokio/src/runtime/task/waker.rs", "rank": 26, "score": 225390.16749550673 }, { "content": "#[allow(dead_code)]\n\nfn require_send<T: Send>(_t: &T) {}\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 27, "score": 216111.6879495787 }, { "content": "#[allow(dead_code)]\n\nfn require_sync<T: Sync>(_t: &T) {}\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 28, "score": 215677.27721731423 }, { "content": "fn sync_read(b: &mut Bencher) {\n\n b.iter(|| {\n\n let mut file = StdFile::open(DEV_ZERO).unwrap();\n\n let mut buffer = [0u8; BUFFER_SIZE];\n\n\n\n for _i in 0..BLOCK_COUNT {\n\n file.read_exact(&mut buffer).unwrap();\n\n }\n\n });\n\n}\n\n\n\nbenchmark_group!(\n\n file,\n\n async_read_std_file,\n\n async_read_buf,\n\n async_read_codec,\n\n sync_read\n\n);\n\n\n\nbenchmark_main!(file);\n", "file_path": "benches/fs.rs", "rank": 29, "score": 214694.2302862036 }, { "content": "fn uncontended(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(6)\n\n .build()\n\n .unwrap();\n\n\n\n let s = Arc::new(Semaphore::new(10));\n\n b.iter(|| {\n\n let s = s.clone();\n\n rt.block_on(async move {\n\n for _ in 0..6 {\n\n let permit = s.acquire().await;\n\n drop(permit);\n\n }\n\n })\n\n });\n\n}\n\n\n\nasync fn task(s: Arc<Semaphore>) {\n\n let permit = s.acquire().await;\n\n drop(permit);\n\n}\n\n\n", "file_path": "benches/sync_semaphore.rs", "rank": 30, "score": 214694.2302862036 }, { "content": "#[allow(dead_code)]\n\nfn require_send<T: Send>(_t: &T) {}\n", "file_path": "tokio-stream/tests/async_send_sync.rs", "rank": 31, "score": 213136.178050619 }, { "content": "#[allow(dead_code)]\n\nfn require_sync<T: Sync>(_t: &T) {}\n", "file_path": "tokio-stream/tests/async_send_sync.rs", "rank": 32, "score": 212694.0290933593 }, { "content": "fn poll_next(interval: &mut task::Spawn<time::Interval>) -> Poll<Instant> {\n\n interval.enter(|cx, mut interval| interval.poll_tick(cx))\n\n}\n\n\n", "file_path": "tokio/tests/time_pause.rs", "rank": 33, "score": 211194.34375605892 }, { "content": "fn poll_next(interval: &mut task::Spawn<time::Interval>) -> Poll<Instant> {\n\n interval.enter(|cx, mut interval| interval.poll_tick(cx))\n\n}\n\n\n", "file_path": "tokio/tests/time_interval.rs", "rank": 34, "score": 211194.34375605892 }, { "content": "fn contention_unbounded(b: &mut Bencher) {\n\n let rt = rt();\n\n\n\n b.iter(|| {\n\n rt.block_on(async move {\n\n let (tx, mut rx) = mpsc::unbounded_channel::<usize>();\n\n\n\n for _ in 0..5 {\n\n let tx = tx.clone();\n\n tokio::spawn(async move {\n\n for i in 0..1000 {\n\n tx.send(i).unwrap();\n\n }\n\n });\n\n }\n\n\n\n for _ in 0..1_000 * 5 {\n\n let _ = rx.recv().await;\n\n }\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 35, "score": 210630.9006551946 }, { "content": "fn contention_bounded(b: &mut Bencher) {\n\n let rt = rt();\n\n\n\n b.iter(|| {\n\n rt.block_on(async move {\n\n let (tx, mut rx) = mpsc::channel::<usize>(1_000_000);\n\n\n\n for _ in 0..5 {\n\n let tx = tx.clone();\n\n tokio::spawn(async move {\n\n for i in 0..1000 {\n\n tx.send(i).await.unwrap();\n\n }\n\n });\n\n }\n\n\n\n for _ in 0..1_000 * 5 {\n\n let _ = rx.recv().await;\n\n }\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 36, "score": 210630.9006551946 }, { "content": "fn uncontented_unbounded(b: &mut Bencher) {\n\n let rt = rt();\n\n\n\n b.iter(|| {\n\n rt.block_on(async move {\n\n let (tx, mut rx) = mpsc::unbounded_channel::<usize>();\n\n\n\n for i in 0..5000 {\n\n tx.send(i).unwrap();\n\n }\n\n\n\n for _ in 0..5_000 {\n\n let _ = rx.recv().await;\n\n }\n\n })\n\n });\n\n}\n\n\n\nbencher::benchmark_group!(\n\n create,\n", "file_path": "benches/sync_mpsc.rs", "rank": 37, "score": 210630.9006551946 }, { "content": "fn read_uncontended(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(6)\n\n .build()\n\n .unwrap();\n\n\n\n let lock = Arc::new(RwLock::new(()));\n\n b.iter(|| {\n\n let lock = lock.clone();\n\n rt.block_on(async move {\n\n for _ in 0..6 {\n\n let read = lock.read().await;\n\n black_box(read);\n\n }\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_rwlock.rs", "rank": 38, "score": 210630.9006551946 }, { "content": "fn create_100_000_medium(b: &mut Bencher) {\n\n b.iter(|| {\n\n black_box(&mpsc::channel::<Medium>(100_000));\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 39, "score": 210630.9006551946 }, { "content": "fn create_100_medium(b: &mut Bencher) {\n\n b.iter(|| {\n\n black_box(&mpsc::channel::<Medium>(100));\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 40, "score": 210630.9006551946 }, { "content": "fn create_1_medium(b: &mut Bencher) {\n\n b.iter(|| {\n\n black_box(&mpsc::channel::<Medium>(1));\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 41, "score": 210630.9006551946 }, { "content": "fn uncontented_bounded(b: &mut Bencher) {\n\n let rt = rt();\n\n\n\n b.iter(|| {\n\n rt.block_on(async move {\n\n let (tx, mut rx) = mpsc::channel::<usize>(1_000_000);\n\n\n\n for i in 0..5000 {\n\n tx.send(i).await.unwrap();\n\n }\n\n\n\n for _ in 0..5_000 {\n\n let _ = rx.recv().await;\n\n }\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 42, "score": 210630.9006551946 }, { "content": "fn mut_load(this: &mut AtomicUsize) -> usize {\n\n this.with_mut(|v| *v)\n\n}\n\n\n\nimpl<T> Drop for Inner<T> {\n\n fn drop(&mut self) {\n\n let state = State(mut_load(&mut self.state));\n\n\n\n if state.is_rx_task_set() {\n\n unsafe {\n\n self.rx_task.drop_task();\n\n }\n\n }\n\n\n\n if state.is_tx_task_set() {\n\n unsafe {\n\n self.tx_task.drop_task();\n\n }\n\n }\n\n }\n", "file_path": "tokio/src/sync/oneshot.rs", "rank": 43, "score": 209362.5294425634 }, { "content": "fn basic_scheduler_spawn(bench: &mut Bencher) {\n\n let runtime = tokio::runtime::Builder::new_current_thread()\n\n .build()\n\n .unwrap();\n\n bench.iter(|| {\n\n runtime.block_on(async {\n\n let h = tokio::spawn(work());\n\n assert_eq!(h.await.unwrap(), 2);\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/spawn.rs", "rank": 44, "score": 207927.7232832465 }, { "content": "fn threaded_scheduler_spawn(bench: &mut Bencher) {\n\n let runtime = tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(1)\n\n .build()\n\n .unwrap();\n\n bench.iter(|| {\n\n runtime.block_on(async {\n\n let h = tokio::spawn(work());\n\n assert_eq!(h.await.unwrap(), 2);\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/spawn.rs", "rank": 45, "score": 207927.7232832465 }, { "content": "fn basic_scheduler_spawn10(bench: &mut Bencher) {\n\n let runtime = tokio::runtime::Builder::new_current_thread()\n\n .build()\n\n .unwrap();\n\n bench.iter(|| {\n\n runtime.block_on(async {\n\n let mut handles = Vec::with_capacity(10);\n\n for _ in 0..10 {\n\n handles.push(tokio::spawn(work()));\n\n }\n\n for handle in handles {\n\n assert_eq!(handle.await.unwrap(), 2);\n\n }\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/spawn.rs", "rank": 46, "score": 207927.7232832465 }, { "content": "fn threaded_scheduler_spawn10(bench: &mut Bencher) {\n\n let runtime = tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(1)\n\n .build()\n\n .unwrap();\n\n bench.iter(|| {\n\n runtime.block_on(async {\n\n let mut handles = Vec::with_capacity(10);\n\n for _ in 0..10 {\n\n handles.push(tokio::spawn(work()));\n\n }\n\n for handle in handles {\n\n assert_eq!(handle.await.unwrap(), 2);\n\n }\n\n });\n\n });\n\n}\n\n\n\nbencher::benchmark_group!(\n\n spawn,\n\n basic_scheduler_spawn,\n\n basic_scheduler_spawn10,\n\n threaded_scheduler_spawn,\n\n threaded_scheduler_spawn10,\n\n);\n\n\n\nbencher::benchmark_main!(spawn);\n", "file_path": "benches/spawn.rs", "rank": 47, "score": 207927.7232832465 }, { "content": "fn uncontended_concurrent_single(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .build()\n\n .unwrap();\n\n\n\n let s = Arc::new(Semaphore::new(10));\n\n b.iter(|| {\n\n let s = s.clone();\n\n rt.block_on(async move {\n\n tokio::join! {\n\n task(s.clone()),\n\n task(s.clone()),\n\n task(s.clone()),\n\n task(s.clone()),\n\n task(s.clone()),\n\n task(s.clone())\n\n };\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_semaphore.rs", "rank": 48, "score": 206777.4271768047 }, { "content": "fn uncontended_concurrent_multi(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(6)\n\n .build()\n\n .unwrap();\n\n\n\n let s = Arc::new(Semaphore::new(10));\n\n b.iter(|| {\n\n let s = s.clone();\n\n rt.block_on(async move {\n\n let j = tokio::try_join! {\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone()))\n\n };\n\n j.unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_semaphore.rs", "rank": 49, "score": 206777.4271768047 }, { "content": "fn contended_concurrent_single(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .build()\n\n .unwrap();\n\n\n\n let s = Arc::new(Semaphore::new(5));\n\n b.iter(|| {\n\n let s = s.clone();\n\n rt.block_on(async move {\n\n tokio::join! {\n\n task(s.clone()),\n\n task(s.clone()),\n\n task(s.clone()),\n\n task(s.clone()),\n\n task(s.clone()),\n\n task(s.clone())\n\n };\n\n })\n\n });\n\n}\n", "file_path": "benches/sync_semaphore.rs", "rank": 50, "score": 206777.4271768047 }, { "content": "fn contended_concurrent_multi(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(6)\n\n .build()\n\n .unwrap();\n\n\n\n let s = Arc::new(Semaphore::new(5));\n\n b.iter(|| {\n\n let s = s.clone();\n\n rt.block_on(async move {\n\n let j = tokio::try_join! {\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone())),\n\n task::spawn(task(s.clone()))\n\n };\n\n j.unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_semaphore.rs", "rank": 51, "score": 206777.4271768047 }, { "content": "fn contention_bounded_full(b: &mut Bencher) {\n\n let rt = rt();\n\n\n\n b.iter(|| {\n\n rt.block_on(async move {\n\n let (tx, mut rx) = mpsc::channel::<usize>(100);\n\n\n\n for _ in 0..5 {\n\n let tx = tx.clone();\n\n tokio::spawn(async move {\n\n for i in 0..1000 {\n\n tx.send(i).await.unwrap();\n\n }\n\n });\n\n }\n\n\n\n for _ in 0..1_000 * 5 {\n\n let _ = rx.recv().await;\n\n }\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_mpsc.rs", "rank": 52, "score": 206777.4271768047 }, { "content": "fn read_concurrent_contended(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .build()\n\n .unwrap();\n\n\n\n async fn task(lock: Arc<RwLock<()>>) {\n\n let read = lock.read().await;\n\n black_box(read);\n\n }\n\n\n\n let lock = Arc::new(RwLock::new(()));\n\n b.iter(|| {\n\n let lock = lock.clone();\n\n rt.block_on(async move {\n\n let write = lock.write().await;\n\n tokio::join! {\n\n async move { drop(write) },\n\n task(lock.clone()),\n\n task(lock.clone()),\n\n task(lock.clone()),\n", "file_path": "benches/sync_rwlock.rs", "rank": 53, "score": 206777.4271768047 }, { "content": "fn read_concurrent_uncontended(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_current_thread()\n\n .build()\n\n .unwrap();\n\n\n\n async fn task(lock: Arc<RwLock<()>>) {\n\n let read = lock.read().await;\n\n black_box(read);\n\n }\n\n\n\n let lock = Arc::new(RwLock::new(()));\n\n b.iter(|| {\n\n let lock = lock.clone();\n\n rt.block_on(async move {\n\n tokio::join! {\n\n task(lock.clone()),\n\n task(lock.clone()),\n\n task(lock.clone()),\n\n task(lock.clone()),\n\n task(lock.clone()),\n\n task(lock.clone())\n\n };\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_rwlock.rs", "rank": 54, "score": 206777.4271768047 }, { "content": "fn read_concurrent_uncontended_multi(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(6)\n\n .build()\n\n .unwrap();\n\n\n\n async fn task(lock: Arc<RwLock<()>>) {\n\n let read = lock.read().await;\n\n black_box(read);\n\n }\n\n\n\n let lock = Arc::new(RwLock::new(()));\n\n b.iter(|| {\n\n let lock = lock.clone();\n\n rt.block_on(async move {\n\n let j = tokio::try_join! {\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone()))\n\n };\n\n j.unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_rwlock.rs", "rank": 55, "score": 203117.96164494238 }, { "content": "fn read_concurrent_contended_multi(b: &mut Bencher) {\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .worker_threads(6)\n\n .build()\n\n .unwrap();\n\n\n\n async fn task(lock: Arc<RwLock<()>>) {\n\n let read = lock.read().await;\n\n black_box(read);\n\n }\n\n\n\n let lock = Arc::new(RwLock::new(()));\n\n b.iter(|| {\n\n let lock = lock.clone();\n\n rt.block_on(async move {\n\n let write = lock.write().await;\n\n let j = tokio::try_join! {\n\n async move { drop(write); Ok(()) },\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone())),\n\n task::spawn(task(lock.clone())),\n\n };\n\n j.unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/sync_rwlock.rs", "rank": 56, "score": 203117.96164494238 }, { "content": "fn set_state(data: usize, state: usize) -> usize {\n\n (data & NOTIFY_WAITERS_CALLS_MASK) | (state & STATE_MASK)\n\n}\n\n\n", "file_path": "tokio/src/sync/notify.rs", "rank": 57, "score": 202821.1444878587 }, { "content": "fn put_back_original_data(output: &mut String, mut vector: Vec<u8>, num_bytes_read: usize) {\n\n let original_len = vector.len() - num_bytes_read;\n\n vector.truncate(original_len);\n\n *output = String::from_utf8(vector).expect(\"The original data must be valid utf-8.\");\n\n}\n\n\n\n/// This handles the various failure cases and puts the string back into `output`.\n\n///\n\n/// The `truncate_on_io_error` bool is necessary because `read_to_string` and `read_line`\n\n/// disagree on what should happen when an IO error occurs.\n\npub(super) fn finish_string_read(\n\n io_res: io::Result<usize>,\n\n utf8_res: Result<String, FromUtf8Error>,\n\n read: usize,\n\n output: &mut String,\n\n truncate_on_io_error: bool,\n\n) -> Poll<io::Result<usize>> {\n\n match (io_res, utf8_res) {\n\n (Ok(num_bytes), Ok(string)) => {\n\n debug_assert_eq!(read, 0);\n", "file_path": "tokio/src/io/util/read_line.rs", "rank": 58, "score": 202273.53004829277 }, { "content": "fn with(f: impl FnOnce(Runtime)) {\n\n struct Reset;\n\n\n\n impl Drop for Reset {\n\n fn drop(&mut self) {\n\n let _rt = CURRENT.try_lock().unwrap().take();\n\n }\n\n }\n\n\n\n let _reset = Reset;\n\n\n\n let rt = Runtime(Arc::new(Inner {\n\n owned: OwnedTasks::new(),\n\n core: TryLock::new(Core {\n\n queue: VecDeque::new(),\n\n }),\n\n }));\n\n\n\n *CURRENT.try_lock().unwrap() = Some(rt.clone());\n\n f(rt)\n\n}\n\n\n", "file_path": "tokio/src/runtime/tests/task.rs", "rank": 59, "score": 199580.33609138438 }, { "content": "#[allow(dead_code)]\n\ntype BoxFuture<T> = std::pin::Pin<Box<dyn std::future::Future<Output = T>>>;\n\n\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 60, "score": 198746.51187640472 }, { "content": "#[derive(Debug)]\n\nstruct BarrierState {\n\n waker: watch::Sender<usize>,\n\n arrived: usize,\n\n generation: usize,\n\n}\n\n\n\nimpl Barrier {\n\n /// Creates a new barrier that can block a given number of tasks.\n\n ///\n\n /// A barrier will block `n`-1 tasks which call [`Barrier::wait`] and then wake up all\n\n /// tasks at once when the `n`th task calls `wait`.\n\n pub fn new(mut n: usize) -> Barrier {\n\n let (waker, wait) = crate::sync::watch::channel(0);\n\n\n\n if n == 0 {\n\n // if n is 0, it's not clear what behavior the user wants.\n\n // in std::sync::Barrier, an n of 0 exhibits the same behavior as n == 1, where every\n\n // .wait() immediately unblocks, so we adopt that here as well.\n\n n = 1;\n\n }\n", "file_path": "tokio/src/sync/barrier.rs", "rank": 61, "score": 195884.88219455708 }, { "content": "#[derive(Clone)]\n\nstruct YN {\n\n _value: Cell<u8>,\n\n}\n\n\n\n// Send: No, Sync: No\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 62, "score": 195844.7815219788 }, { "content": "#[allow(dead_code)]\n\nstruct Invalid;\n\n\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 63, "score": 195844.7815219788 }, { "content": "#[derive(Clone)]\n\nstruct YY {}\n\n\n\n// Send: Yes, Sync: No\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 64, "score": 195844.7815219788 }, { "content": "#[derive(Clone)]\n\nstruct NN {\n\n _value: Rc<u8>,\n\n}\n\n\n", "file_path": "tokio/tests/async_send_sync.rs", "rank": 65, "score": 195844.7815219788 }, { "content": "trait AssertSend: Send + Sync {}\n\nimpl AssertSend for broadcast::Sender<i32> {}\n\nimpl AssertSend for broadcast::Receiver<i32> {}\n\n\n", "file_path": "tokio/tests/sync_broadcast.rs", "rank": 66, "score": 195753.50245107897 }, { "content": "trait AssertSend: Send + Sync {}\n\nimpl AssertSend for Notify {}\n\n\n", "file_path": "tokio/tests/sync_notify.rs", "rank": 67, "score": 195753.50245107897 }, { "content": "trait AssertSend: Send {}\n", "file_path": "tokio/src/sync/tests/atomic_waker.rs", "rank": 68, "score": 195113.3359336658 }, { "content": "trait AssertSync: Send {}\n\n\n\nimpl AssertSend for AtomicWaker {}\n\nimpl AssertSync for AtomicWaker {}\n\n\n\nimpl AssertSend for Waker {}\n\nimpl AssertSync for Waker {}\n\n\n", "file_path": "tokio/src/sync/tests/atomic_waker.rs", "rank": 69, "score": 194800.04419050814 }, { "content": "enum PollFuture {\n\n Complete,\n\n Notified,\n\n Done,\n\n Dealloc,\n\n}\n\n\n", "file_path": "tokio/src/runtime/task/harness.rs", "rank": 70, "score": 193756.801460539 }, { "content": "enum State {\n\n Begin,\n\n AwaitingAdvance(Pin<Box<dyn Future<Output = ()>>>),\n\n AfterAdvance,\n\n}\n\n\n", "file_path": "tokio/tests/time_pause.rs", "rank": 71, "score": 191848.06941864407 }, { "content": "#[derive(Debug)]\n\nenum State {\n\n Idle(Option<Buf>),\n\n Busy(JoinHandle<(Operation, Buf)>),\n\n}\n\n\n", "file_path": "tokio/src/fs/file.rs", "rank": 72, "score": 191848.06941864407 }, { "content": "#[allow(dead_code)]\n\nstruct Invalid;\n\n\n", "file_path": "tokio-stream/tests/async_send_sync.rs", "rank": 73, "score": 191766.70480808584 }, { "content": "#[test]\n\nfn poll_close() {\n\n let (tx, rx) = watch::channel(\"one\");\n\n\n\n {\n\n let mut t = spawn(tx.closed());\n\n assert_pending!(t.poll());\n\n\n\n drop(rx);\n\n\n\n assert!(t.is_woken());\n\n assert_ready!(t.poll());\n\n }\n\n\n\n assert!(tx.send(\"two\").is_err());\n\n}\n\n\n", "file_path": "tokio/tests/sync_watch.rs", "rank": 74, "score": 188993.12743605403 }, { "content": "#[test]\n\nfn send_recv() {\n\n let (tx, rx) = oneshot::channel();\n\n let mut rx = task::spawn(rx);\n\n\n\n assert_pending!(rx.poll());\n\n\n\n assert_ok!(tx.send(1));\n\n\n\n assert!(rx.is_woken());\n\n\n\n let val = assert_ready_ok!(rx.poll());\n\n assert_eq!(val, 1);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn async_send_recv() {\n\n let (tx, rx) = oneshot::channel();\n\n\n\n assert_ok!(tx.send(1));\n\n assert_eq!(1, assert_ok!(rx.await));\n\n}\n\n\n", "file_path": "tokio/tests/sync_oneshot.rs", "rank": 75, "score": 188957.4695023549 }, { "content": "#[test]\n\nfn blocking_send() {\n\n let (tx, mut rx) = mpsc::channel::<u8>(1);\n\n\n\n let sync_code = thread::spawn(move || {\n\n tx.blocking_send(10).unwrap();\n\n });\n\n\n\n Runtime::new().unwrap().block_on(async move {\n\n assert_eq!(Some(10), rx.recv().await);\n\n });\n\n sync_code.join().unwrap()\n\n}\n\n\n\n#[tokio::test]\n\n#[should_panic]\n\nasync fn blocking_send_async() {\n\n let (tx, _rx) = mpsc::channel::<()>(1);\n\n let _ = tx.blocking_send(());\n\n}\n\n\n", "file_path": "tokio/tests/sync_mpsc.rs", "rank": 76, "score": 188957.4695023549 }, { "content": "#[test]\n\nfn send_no_rx() {\n\n let (tx, _) = broadcast::channel(16);\n\n\n\n assert_err!(tx.send(\"hello\"));\n\n\n\n let mut rx = tx.subscribe();\n\n\n\n assert_ok!(tx.send(\"world\"));\n\n\n\n let val = assert_recv!(rx);\n\n assert_eq!(\"world\", val);\n\n}\n\n\n", "file_path": "tokio/tests/sync_broadcast.rs", "rank": 77, "score": 188957.4695023549 }, { "content": "#[test]\n\nfn is_send_and_sync() {\n\n fn assert_bound<T: Send + Sync>() {}\n\n\n\n assert_bound::<ReadHalf<RW>>();\n\n assert_bound::<WriteHalf<RW>>();\n\n}\n\n\n", "file_path": "tokio/tests/io_split.rs", "rank": 78, "score": 188957.4695023549 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum State {\n\n Initializing,\n\n JustFilling,\n\n Done,\n\n}\n\n\n", "file_path": "tokio/tests/io_read_to_end.rs", "rank": 79, "score": 188951.47788627844 }, { "content": "#[derive(Debug)]\n\nenum State {\n\n Idle(Option<std::fs::ReadDir>),\n\n Pending(JoinHandle<(Option<io::Result<std::fs::DirEntry>>, std::fs::ReadDir)>),\n\n}\n\n\n\nimpl ReadDir {\n\n /// Returns the next entry in the directory stream.\n\n ///\n\n /// # Cancel safety\n\n ///\n\n /// This method is cancellation safe.\n\n pub async fn next_entry(&mut self) -> io::Result<Option<DirEntry>> {\n\n use crate::future::poll_fn;\n\n poll_fn(|cx| self.poll_next_entry(cx)).await\n\n }\n\n\n\n /// Polls for the next directory entry in the stream.\n\n ///\n\n /// This method returns:\n\n ///\n", "file_path": "tokio/src/fs/read_dir.rs", "rank": 80, "score": 188951.47788627844 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nenum CancellationState {\n\n NotCancelled = 0,\n\n Cancelling = 1,\n\n Cancelled = 2,\n\n}\n\n\n\nimpl CancellationState {\n\n fn pack(self) -> usize {\n\n self as usize\n\n }\n\n\n\n fn unpack(value: usize) -> Self {\n\n match value {\n\n 0 => CancellationState::NotCancelled,\n\n 1 => CancellationState::Cancelling,\n\n 2 => CancellationState::Cancelled,\n\n _ => unreachable!(\"Invalid value\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tokio-util/src/sync/cancellation_token.rs", "rank": 81, "score": 188687.79926602484 }, { "content": "struct Core {\n\n queue: VecDeque<task::Notified<Runtime>>,\n\n}\n\n\n\nstatic CURRENT: TryLock<Option<Runtime>> = TryLock::new(None);\n\n\n\nimpl Runtime {\n\n fn spawn<T>(&self, future: T) -> JoinHandle<T::Output>\n\n where\n\n T: 'static + Send + Future,\n\n T::Output: 'static + Send,\n\n {\n\n let (handle, notified) = self.0.owned.bind(future, self.clone());\n\n\n\n if let Some(notified) = notified {\n\n self.schedule(notified);\n\n }\n\n\n\n handle\n\n }\n", "file_path": "tokio/src/runtime/tests/task.rs", "rank": 82, "score": 188271.05258269314 }, { "content": "struct Chan {\n\n num: AtomicUsize,\n\n task: AtomicWaker,\n\n}\n\n\n", "file_path": "tokio/src/sync/tests/loom_atomic_waker.rs", "rank": 83, "score": 187948.8467725803 }, { "content": "struct SynchronizedState {\n\n waiters: LinkedList<WaitQueueEntry>,\n\n first_child: Option<NonNull<CancellationTokenState>>,\n\n is_cancelled: bool,\n\n}\n\n\n\nimpl SynchronizedState {\n\n fn new() -> Self {\n\n Self {\n\n waiters: LinkedList::new(),\n\n first_child: None,\n\n is_cancelled: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "tokio-util/src/sync/cancellation_token.rs", "rank": 84, "score": 187937.27549897117 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nstruct StateSnapshot {\n\n /// The amount of references to this particular CancellationToken.\n\n /// `CancellationToken` structs hold these references to a `CancellationTokenState`.\n\n /// Also the state is referenced by the state of each child.\n\n refcount: usize,\n\n /// Whether the state is still referenced by it's parent and can therefore\n\n /// not be freed.\n\n has_parent_ref: bool,\n\n /// Whether the token is cancelled\n\n cancel_state: CancellationState,\n\n}\n\n\n\nimpl StateSnapshot {\n\n /// Packs the snapshot into a `usize`\n\n fn pack(self) -> usize {\n\n self.refcount << 3 | if self.has_parent_ref { 4 } else { 0 } | self.cancel_state.pack()\n\n }\n\n\n\n /// Unpacks the snapshot from a `usize`\n\n fn unpack(value: usize) -> Self {\n", "file_path": "tokio-util/src/sync/cancellation_token.rs", "rank": 85, "score": 187937.27549897117 }, { "content": "/// Utility to test things on both kinds of runtimes both before and after shutting it down.\n\nfn test_with_runtimes<F>(f: F)\n\nwhere\n\n F: Fn(),\n\n{\n\n {\n\n println!(\"current thread runtime\");\n\n\n\n let rt = new_current_thread();\n\n let _enter = rt.enter();\n\n f();\n\n\n\n println!(\"current thread runtime after shutdown\");\n\n rt.shutdown_timeout(Duration::from_secs(1000));\n\n f();\n\n }\n\n\n\n {\n\n println!(\"multi thread (1 thread) runtime\");\n\n\n\n let rt = new_multi_thread(1);\n", "file_path": "tokio/tests/rt_handle_block_on.rs", "rank": 86, "score": 187468.36668970273 }, { "content": "/// Core data\n\nstruct Core {\n\n /// Used to schedule bookkeeping tasks every so often.\n\n tick: u8,\n\n\n\n /// When a task is scheduled from a worker, it is stored in this slot. The\n\n /// worker will check this slot for a task **before** checking the run\n\n /// queue. This effectively results in the **last** scheduled task to be run\n\n /// next (LIFO). This is an optimization for message passing patterns and\n\n /// helps to reduce latency.\n\n lifo_slot: Option<Notified>,\n\n\n\n /// The worker-local run queue.\n\n run_queue: queue::Local<Arc<Shared>>,\n\n\n\n /// True if the worker is currently searching for more work. Searching\n\n /// involves attempting to steal from other workers.\n\n is_searching: bool,\n\n\n\n /// True if the scheduler is being shutdown\n\n is_shutdown: bool,\n", "file_path": "tokio/src/runtime/thread_pool/worker.rs", "rank": 87, "score": 185536.1656149706 }, { "content": "struct BlockedFuture {\n\n rx: Receiver<()>,\n\n num_polls: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl Future for BlockedFuture {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n self.num_polls.fetch_add(1, Release);\n\n\n\n match Pin::new(&mut self.rx).poll(cx) {\n\n Poll::Pending => Poll::Pending,\n\n _ => Poll::Ready(()),\n\n }\n\n }\n\n}\n", "file_path": "tokio/src/runtime/tests/loom_basic_scheduler.rs", "rank": 88, "score": 185391.06626006457 }, { "content": "#[test]\n\nfn explicit_close_poll() {\n\n // First, with message sent\n\n let (tx, rx) = oneshot::channel();\n\n let mut rx = task::spawn(rx);\n\n\n\n assert_ok!(tx.send(1));\n\n\n\n rx.close();\n\n\n\n let value = assert_ready_ok!(rx.poll());\n\n assert_eq!(value, 1);\n\n\n\n // Second, without the message sent\n\n let (tx, rx) = oneshot::channel::<i32>();\n\n let mut tx = task::spawn(tx);\n\n let mut rx = task::spawn(rx);\n\n\n\n assert_pending!(tx.enter(|cx, mut tx| tx.poll_closed(cx)));\n\n\n\n rx.close();\n", "file_path": "tokio/tests/sync_oneshot.rs", "rank": 89, "score": 184929.9155717844 }, { "content": "#[test]\n\nfn send_recv_bounded() {\n\n let (tx, mut rx) = broadcast::channel(16);\n\n\n\n let mut recv = task::spawn(rx.recv());\n\n\n\n assert_pending!(recv.poll());\n\n\n\n assert_ok!(tx.send(\"hello\"));\n\n\n\n assert!(recv.is_woken());\n\n let val = assert_ready_ok!(recv.poll());\n\n assert_eq!(val, \"hello\");\n\n}\n\n\n", "file_path": "tokio/tests/sync_broadcast.rs", "rank": 90, "score": 184895.2028474928 }, { "content": "#[test]\n\nfn send_sync_bound() {\n\n use tokio::runtime::Runtime;\n\n fn is_send<T: Send + Sync>() {}\n\n\n\n is_send::<Runtime>();\n\n}\n\n\n\nrt_test! {\n\n use tokio::net::{TcpListener, TcpStream, UdpSocket};\n\n use tokio::io::{AsyncReadExt, AsyncWriteExt};\n\n use tokio::runtime::Runtime;\n\n use tokio::sync::oneshot;\n\n use tokio::{task, time};\n\n use tokio_test::{assert_err, assert_ok};\n\n\n\n use futures::future::poll_fn;\n\n use std::future::Future;\n\n use std::pin::Pin;\n\n use std::sync::{mpsc, Arc};\n\n use std::task::{Context, Poll};\n", "file_path": "tokio/tests/rt_common.rs", "rank": 91, "score": 184895.2028474928 }, { "content": "#[test]\n\nfn send_slow_rx() {\n\n let (tx, mut rx1) = broadcast::channel(16);\n\n let mut rx2 = tx.subscribe();\n\n\n\n {\n\n let mut recv2 = task::spawn(rx2.recv());\n\n\n\n {\n\n let mut recv1 = task::spawn(rx1.recv());\n\n\n\n assert_pending!(recv1.poll());\n\n assert_pending!(recv2.poll());\n\n\n\n assert_ok!(tx.send(\"one\"));\n\n\n\n assert!(recv1.is_woken());\n\n assert!(recv2.is_woken());\n\n\n\n assert_ok!(tx.send(\"two\"));\n\n\n", "file_path": "tokio/tests/sync_broadcast.rs", "rank": 92, "score": 184895.2028474928 }, { "content": "#[test]\n\nfn send_two_recv() {\n\n let (tx, mut rx1) = broadcast::channel(16);\n\n let mut rx2 = tx.subscribe();\n\n\n\n assert_empty!(rx1);\n\n assert_empty!(rx2);\n\n\n\n let n = assert_ok!(tx.send(\"hello\"));\n\n assert_eq!(n, 2);\n\n\n\n let val = assert_recv!(rx1);\n\n assert_eq!(val, \"hello\");\n\n\n\n let val = assert_recv!(rx2);\n\n assert_eq!(val, \"hello\");\n\n\n\n assert_empty!(rx1);\n\n assert_empty!(rx2);\n\n}\n\n\n", "file_path": "tokio/tests/sync_broadcast.rs", "rank": 93, "score": 184895.2028474928 }, { "content": "/// Internal state of the `CancellationToken` pair above\n\nstruct CancellationTokenState {\n\n state: AtomicUsize,\n\n parent: Option<NonNull<CancellationTokenState>>,\n\n from_parent: SynchronizedThroughParent,\n\n synchronized: Mutex<SynchronizedState>,\n\n}\n\n\n\nimpl CancellationTokenState {\n\n fn new(\n\n parent: Option<NonNull<CancellationTokenState>>,\n\n state: StateSnapshot,\n\n ) -> CancellationTokenState {\n\n CancellationTokenState {\n\n parent,\n\n from_parent: SynchronizedThroughParent {\n\n prev_peer: None,\n\n next_peer: None,\n\n },\n\n state: AtomicUsize::new(state.pack()),\n\n synchronized: Mutex::new(SynchronizedState::new()),\n", "file_path": "tokio-util/src/sync/cancellation_token.rs", "rank": 94, "score": 184269.81682908474 }, { "content": "#[derive(Debug)]\n\nenum State<T> {\n\n Idle(Option<Buf>),\n\n Busy(sys::Blocking<(io::Result<usize>, Buf, T)>),\n\n}\n\n\n\ncfg_io_std! {\n\n impl<T> Blocking<T> {\n\n pub(crate) fn new(inner: T) -> Blocking<T> {\n\n Blocking {\n\n inner: Some(inner),\n\n state: State::Idle(Some(Buf::with_capacity(0))),\n\n need_flush: false,\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T> AsyncRead for Blocking<T>\n\nwhere\n\n T: Read + Unpin + Send + 'static,\n", "file_path": "tokio/src/io/blocking.rs", "rank": 95, "score": 181795.6188351086 }, { "content": "#[test]\n\nfn basic_usage() {\n\n let mut waker = task::spawn(AtomicWaker::new());\n\n\n\n waker.enter(|cx, waker| waker.register_by_ref(cx.waker()));\n\n waker.wake();\n\n\n\n assert!(waker.is_woken());\n\n}\n\n\n", "file_path": "tokio/src/sync/tests/atomic_waker.rs", "rank": 96, "score": 181092.3383391434 }, { "content": "#[test]\n\n#[should_panic]\n\nfn close_try_recv_poll() {\n\n let (_tx, rx) = oneshot::channel::<i32>();\n\n let mut rx = task::spawn(rx);\n\n\n\n rx.close();\n\n\n\n assert_err!(rx.try_recv());\n\n\n\n let _ = rx.poll();\n\n}\n\n\n", "file_path": "tokio/tests/sync_oneshot.rs", "rank": 97, "score": 181076.55377791135 }, { "content": "#[test]\n\nfn send_two_recv_bounded() {\n\n let (tx, mut rx1) = broadcast::channel(16);\n\n let mut rx2 = tx.subscribe();\n\n\n\n let mut recv1 = task::spawn(rx1.recv());\n\n let mut recv2 = task::spawn(rx2.recv());\n\n\n\n assert_pending!(recv1.poll());\n\n assert_pending!(recv2.poll());\n\n\n\n assert_ok!(tx.send(\"hello\"));\n\n\n\n assert!(recv1.is_woken());\n\n assert!(recv2.is_woken());\n\n\n\n let val1 = assert_ready_ok!(recv1.poll());\n\n let val2 = assert_ready_ok!(recv2.poll());\n\n assert_eq!(val1, \"hello\");\n\n assert_eq!(val2, \"hello\");\n\n\n", "file_path": "tokio/tests/sync_broadcast.rs", "rank": 98, "score": 181042.73744640945 }, { "content": "#[test]\n\nfn send_try_recv_bounded() {\n\n let (tx, mut rx) = broadcast::channel(16);\n\n\n\n assert_empty!(rx);\n\n\n\n let n = assert_ok!(tx.send(\"hello\"));\n\n assert_eq!(n, 1);\n\n\n\n let val = assert_recv!(rx);\n\n assert_eq!(val, \"hello\");\n\n\n\n assert_empty!(rx);\n\n}\n\n\n", "file_path": "tokio/tests/sync_broadcast.rs", "rank": 99, "score": 181042.73744640945 } ]
Rust
src/main.rs
pacman82/cargo-wheel
d4fd42d2878ff242e103a28efb44fced7d102354
mod header; mod templates; use cargo::{ core::{shell::Shell, Workspace}, util::important_paths, CliResult, Config, }; use std::{ env::current_dir, fs::create_dir_all, path::{Path, PathBuf}, process::Command, }; use structopt::{clap::AppSettings, StructOpt}; use crate::templates::SetupPyVars; #[derive(StructOpt)] #[structopt(bin_name = "cargo")] enum Opts { #[structopt( name = "wheel", setting = AppSettings::UnifiedHelpMessage, setting = AppSettings::DeriveDisplayOrder, setting = AppSettings::DontCollapseArgsInUsage )] Wheel(Args), } #[derive(StructOpt)] struct Args { #[structopt(long = "target-dir", value_name = "DIRECTORY", parse(from_os_str))] target_dir: Option<PathBuf>, #[structopt(long, short = "v", parse(from_occurrences))] verbose: u32, #[structopt(long, short = "q")] quiet: bool, #[structopt(long, value_name = "WHEN")] color: Option<String>, #[structopt(long)] frozen: bool, #[structopt(long)] locked: bool, #[structopt(long)] offline: bool, #[structopt(short = "Z", value_name = "FLAG")] unstable_flags: Vec<String>, } fn main() { env_logger::init(); let mut config = match Config::default() { Ok(cfg) => cfg, Err(e) => { let mut shell = Shell::new(); cargo::exit_with_error(e.into(), &mut shell) } }; let Opts::Wheel(args) = Opts::from_args(); if let Err(err) = real_main(args, &mut config) { let mut shell = Shell::new(); cargo::exit_with_error(err, &mut shell) } } fn real_main(args: Args, config: &mut Config) -> CliResult { let cli_config = []; config.configure( args.verbose, args.quiet, args.color.as_deref(), args.frozen, args.locked, args.offline, &args.target_dir, &args.unstable_flags, &cli_config, )?; let manifest_path = important_paths::find_root_manifest_for_wd(config.cwd())?; let workspace = Workspace::new(&manifest_path, config)?; let package = workspace.current()?; let crate_dir = manifest_path .parent() .expect("Expected manifest path to point to a file"); let setup_py_dir = Path::new("."); let absolute_setup_py_dir = current_dir() .expect("Error determining working directory") .join(setup_py_dir); let relative_crate_dir = pathdiff::diff_paths(crate_dir, &absolute_setup_py_dir) .expect("Could not determine crate directory relative to directory containing setup.py"); let relative_crate_dir = if relative_crate_dir == Path::new("") { PathBuf::from(".") } else { relative_crate_dir }; let setup_py_path = setup_py_dir.join("setup.py"); let py_package_name = &package.name().replace('-', "_"); let py_package_dir = setup_py_dir.join(&py_package_name); let c_dylib_name = package .targets() .iter() .find(|t| t.is_cdylib()) .expect( "No dynamic C-Library found in targets. Do you miss:\ \n\ \n[lib]\ \ncrate-type = [\"cdylib\"]\ \n\ \nin your Cargo.toml?", ) .crate_name(); println!("Generate C Header file"); header::generate_c_bindings(crate_dir, py_package_name); if !setup_py_path.exists() { let version = package.version().to_string(); let setup_py_vars = SetupPyVars::new( py_package_name, &c_dylib_name, &version, "url", "authors", "description", relative_crate_dir .to_str() .expect("Crate path contains invalid unicode characters."), ); templates::render_setup_py(&setup_py_path, setup_py_vars); } create_dir_all(&py_package_dir).expect("Error creating python package directory"); let init_py_path = py_package_dir.join("__init__.py"); if !init_py_path.exists() { templates::render_init_py(&init_py_path, py_package_name); } println!("python setup.py bdist_wheel"); let exit_code = Command::new("python") .arg("setup.py") .arg("bdist_wheel") .status() .expect("Error executing 'python setup.py bdist_wheel'"); println!("'python setup.py bdist_wheel' finished: {}", exit_code); Ok(()) }
mod header; mod templates; use cargo::{ core::{shell::Shell, Workspace}, util::important_paths, CliResult, Config, }; use std::{ env::current_dir, fs::create_dir_all, path::{Path, PathBuf}, process::Command, }; use structopt::{clap::AppSettings, StructOpt}; use crate::templates::SetupPyVars; #[derive(StructOpt)] #[structopt(bin_name = "cargo")] enum Opts { #[structopt( name = "wheel", setting = AppSettings::UnifiedHelpMessage, setting = AppSettings::DeriveDisplayOrder, setting = AppSettings::DontCollapseArgsInUsage )] Wheel(Args), } #[derive(StructOpt)] struct Args { #[structopt(long = "target-dir", value_name = "DIRECTORY", parse(from_os_str))] target_dir: Option<PathBuf>, #[structopt(long, short = "v", parse(from_occurrences))] verbose: u32, #[structopt(long, short = "q")] quiet: bool, #[structopt(long, value_name = "WHEN")] color: Option<String>, #[structopt(long)] frozen: bool, #[structopt(long)] locked: bool, #[structopt(long)] offline: bool, #[structopt(short = "Z", value_name = "FLAG")] unstable_flags: Vec<String>, }
fn real_main(args: Args, config: &mut Config) -> CliResult { let cli_config = []; config.configure( args.verbose, args.quiet, args.color.as_deref(), args.frozen, args.locked, args.offline, &args.target_dir, &args.unstable_flags, &cli_config, )?; let manifest_path = important_paths::find_root_manifest_for_wd(config.cwd())?; let workspace = Workspace::new(&manifest_path, config)?; let package = workspace.current()?; let crate_dir = manifest_path .parent() .expect("Expected manifest path to point to a file"); let setup_py_dir = Path::new("."); let absolute_setup_py_dir = current_dir() .expect("Error determining working directory") .join(setup_py_dir); let relative_crate_dir = pathdiff::diff_paths(crate_dir, &absolute_setup_py_dir) .expect("Could not determine crate directory relative to directory containing setup.py"); let relative_crate_dir = if relative_crate_dir == Path::new("") { PathBuf::from(".") } else { relative_crate_dir }; let setup_py_path = setup_py_dir.join("setup.py"); let py_package_name = &package.name().replace('-', "_"); let py_package_dir = setup_py_dir.join(&py_package_name); let c_dylib_name = package .targets() .iter() .find(|t| t.is_cdylib()) .expect( "No dynamic C-Library found in targets. Do you miss:\ \n\ \n[lib]\ \ncrate-type = [\"cdylib\"]\ \n\ \nin your Cargo.toml?", ) .crate_name(); println!("Generate C Header file"); header::generate_c_bindings(crate_dir, py_package_name); if !setup_py_path.exists() { let version = package.version().to_string(); let setup_py_vars = SetupPyVars::new( py_package_name, &c_dylib_name, &version, "url", "authors", "description", relative_crate_dir .to_str() .expect("Crate path contains invalid unicode characters."), ); templates::render_setup_py(&setup_py_path, setup_py_vars); } create_dir_all(&py_package_dir).expect("Error creating python package directory"); let init_py_path = py_package_dir.join("__init__.py"); if !init_py_path.exists() { templates::render_init_py(&init_py_path, py_package_name); } println!("python setup.py bdist_wheel"); let exit_code = Command::new("python") .arg("setup.py") .arg("bdist_wheel") .status() .expect("Error executing 'python setup.py bdist_wheel'"); println!("'python setup.py bdist_wheel' finished: {}", exit_code); Ok(()) }
fn main() { env_logger::init(); let mut config = match Config::default() { Ok(cfg) => cfg, Err(e) => { let mut shell = Shell::new(); cargo::exit_with_error(e.into(), &mut shell) } }; let Opts::Wheel(args) = Opts::from_args(); if let Err(err) = real_main(args, &mut config) { let mut shell = Shell::new(); cargo::exit_with_error(err, &mut shell) } }
function_block-full_function
[ { "content": "#[derive(Serialize)]\n\nstruct InitPyVars<'a> {\n\n name: &'a str,\n\n}\n\n\n", "file_path": "src/templates.rs", "rank": 3, "score": 38134.878776464204 }, { "content": "pub fn render_init_py(path: &Path, name: &str) {\n\n let template = mustache::compile_str(INIT_PY).unwrap();\n\n let mut file = File::create(path).expect(\"Unable to create __init__.py\");\n\n template\n\n .render(&mut file, &InitPyVars { name })\n\n .expect(\"Failed rendering __init__.py\");\n\n}\n", "file_path": "src/templates.rs", "rank": 4, "score": 29481.73125752947 }, { "content": "/// Generates c-header file.\n\npub fn generate_c_bindings(manifest_dir: &Path, crate_name: &str) {\n\n let config = Config {\n\n language: Language::C,\n\n ..Default::default()\n\n };\n\n generate_with_config(manifest_dir, config)\n\n .expect(\"Error generating C header file.\")\n\n .write_to_file(format!(\"target/{}.h\", crate_name));\n\n}\n", "file_path": "src/header.rs", "rank": 5, "score": 21260.377367732584 }, { "content": "# cargo-wheel\n\n\n\nUse milksnake and cbindgen to generate python binding to your Rust crate.\n\n\n\n## Quick start\n\n\n\nEdit your `Cargo.toml` and set the crate type to `cdylib`\n\n\n\n```toml\n\n[lib]\n\ncrate-type = [\"cdylib\"]\n\n```\n\n\n\nExport functions or datastructures in your library to make them visible to `C`.\n\n\n\n```rust\n\n#[no_mangle]\n\npub extern fn greet() {\n\n println!(\"Hello from Rust\");\n\n}\n\n```\n\n\n\nCall `cargo wheel` to invoke cbindgen and set up a python package.\n\n\n\n```bash\n\ncargo wheel\n\n```\n\n\n\nUse `cffi` in the generated `__init__.py` to expose the functionality to python\n\n\n\n```python\n\nfrom test_lib._native import ffi, lib\n\n\n\ndef greet():\n\n lib.greet()\n\n```\n\n\n\n## Why you want to use cargo-wheel\n\n\n\nTo save boilerplate if creating python bindings for a Rust crate\n\n\n\n## Why you do not want to use cargo wheel\n\n\n\nScenarios where you want to invoke `cargo` from your `setup.py` rather than the other way around.\n", "file_path": "README.md", "rank": 6, "score": 19705.156718646027 }, { "content": "Changelog\n\n=========\n\n\n\n0.2.1\n\n-----\n\n\n\n* Update dependencies\n\n* Use edition 2021\n\n\n\n0.2.0\n\n-----\n\n\n\n* Update to edition 2018\n", "file_path": "Changelog.md", "rank": 7, "score": 19701.41486610666 }, { "content": "use cbindgen::{generate_with_config, Config, Language};\n\nuse std::path::Path;\n\n\n\n/// Generates c-header file.\n", "file_path": "src/header.rs", "rank": 8, "score": 16512.646125987398 }, { "content": "use serde_derive::Serialize;\n\nuse std::{fs::File, path::Path};\n\n\n\nconst INIT_PY: &str = include_str!(\"__init__.py.mustache\");\n\nconst SETUP_PY: &str = include_str!(\"setup.py.mustache\");\n\n\n\n#[cfg(not(target_os = \"windows\"))]\n\nconst EXECUTABLE_FILE_ENDING: &str = \"\";\n\n\n\n#[cfg(target_os = \"windows\")]\n\nconst EXECUTABLE_FILE_ENDING: &str = \".exe\";\n\n\n\n#[derive(Serialize)]\n\npub struct SetupPyVars<'a> {\n\n name: &'a str,\n\n c_dylib: &'a str,\n\n version: &'a str,\n\n url: &'a str,\n\n author: &'a str,\n\n description: &'a str,\n", "file_path": "src/templates.rs", "rank": 9, "score": 15351.867652061985 }, { "content": " executable_file_ending: &'a str,\n\n crate_dir: &'a str,\n\n}\n\n\n\nimpl<'a> SetupPyVars<'a> {\n\n pub fn new(\n\n name: &'a str,\n\n c_dylib: &'a str,\n\n version: &'a str,\n\n url: &'a str,\n\n author: &'a str,\n\n description: &'a str,\n\n crate_dir: &'a str,\n\n ) -> Self {\n\n SetupPyVars {\n\n name,\n\n c_dylib,\n\n version,\n\n url,\n\n author,\n\n description,\n\n executable_file_ending: EXECUTABLE_FILE_ENDING,\n\n crate_dir,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/templates.rs", "rank": 10, "score": 15350.539678909667 }, { "content": "pub fn render_setup_py(\n\n path: &Path,\n\n setup_py_vars: SetupPyVars<'_>,\n\n) {\n\n let template = mustache::compile_str(SETUP_PY).unwrap();\n\n let mut file = File::create(path).expect(\"Unable to create setup.py\");\n\n template\n\n .render(\n\n &mut file,\n\n &setup_py_vars,\n\n )\n\n .expect(\"Failed rendering setup.py\");\n\n}\n\n\n", "file_path": "src/templates.rs", "rank": 11, "score": 12321.645849330107 } ]
Rust
src/node.rs
maidsafe/safe_gossip
9c9738072a576fdc654801c68d2b0255fcf5b2a9
use super::gossip::{Content, GossipState, Rumor, Statistics}; use super::messages::{Gossip, Message}; use crate::error::Error; use crate::id::Id; use bincode::serialize; use ed25519::{Keypair, PublicKey}; use rand::seq::SliceRandom; use rand_core::OsRng; use serde::ser::Serialize; use std::fmt::{self, Debug, Formatter}; pub struct Node { keys: Keypair, peers: Vec<Id>, gossip: GossipState, } impl Node { pub fn id(&self) -> Id { self.keys.public.into() } pub fn add_peer(&mut self, peer_id: Id) -> Result<(), Error> { if !self.gossip.rumors().is_empty() { return Err(Error::AlreadyStarted); } self.peers.push(peer_id); self.gossip.add_peer(); Ok(()) } pub fn initiate_rumor<T: Serialize>(&mut self, rumor: &T) -> Result<(), Error> { if self.peers.is_empty() { return Err(Error::NoPeers); } self.gossip.initiate_rumor(Content(serialize(rumor)?)); Ok(()) } pub fn next_round(&mut self) -> Result<(Id, Option<Vec<u8>>), Error> { let mut rng = rand::thread_rng(); let peer_id = match self.peers.choose(&mut rng) { Some(id) => *id, None => return Err(Error::NoPeers), }; if let Some(gossip) = self.gossip.next_round() { let serialized = self.serialise(gossip); debug!("{:?} Sending Push gossip to {:?}", self, peer_id); Ok((peer_id, Some(serialized))) } else { Ok((peer_id, None)) } } pub fn receive_gossip(&mut self, peer_id: &Id, serialised_gossip: &[u8]) -> Option<Vec<u8>> { debug!("{:?} handling gossip from {:?}", self, peer_id); let pub_key = if let Ok(pub_key) = PublicKey::from_bytes(&peer_id.0) { pub_key } else { return None; }; let gossip = if let Ok(gossip) = Message::deserialise(serialised_gossip, &pub_key) { gossip } else { error!("Failed to deserialise gossip"); return None; }; if let Some(response) = self.gossip.receive(*peer_id, gossip) { Some(self.serialise(response)) } else { None } } pub fn rumors(&self) -> Vec<Rumor> { self.gossip.rumors() } pub fn statistics(&self) -> Statistics { self.gossip.statistics() } #[cfg(test)] pub fn clear(&mut self) { self.gossip.clear(); } fn serialise(&mut self, gossip: Gossip) -> Vec<u8> { if let Ok(serialised_msg) = Message::serialise(&gossip, &self.keys) { return serialised_msg; } else { error!("Failed to serialise {:?}", gossip); } vec![] } } impl Default for Node { fn default() -> Self { let keys = Keypair::generate(&mut OsRng); Node { keys, peers: vec![], gossip: GossipState::new(), } } } impl Debug for Node { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { write!(formatter, "{:?}", self.id()) } } #[cfg(test)] mod tests { use super::*; use itertools::Itertools; use rand::seq::SliceRandom; use rand::{self, Rng}; use std::collections::BTreeMap; use std::time::Instant; use std::{cmp, u64}; fn create_network(node_count: u32) -> Vec<Node> { let mut nodes = std::iter::repeat_with(Node::default) .take(node_count as usize) .collect_vec(); for i in 0..(nodes.len() - 1) { let lhs_id = nodes[i].id(); for j in (i + 1)..nodes.len() { let rhs_id = nodes[j].id(); let _ = nodes[j].add_peer(lhs_id); let _ = nodes[i].add_peer(rhs_id); } } nodes } fn send_rumors(nodes: &mut Vec<Node>, num_of_msgs: u32) -> (u64, u64, Statistics) { let mut rng = rand::thread_rng(); let mut rumors: Vec<String> = Vec::new(); for _ in 0..num_of_msgs { let mut raw = [0u8; 20]; rng.fill(&mut raw[..]); rumors.push(String::from_utf8_lossy(&raw).to_string()); } { assert!(num_of_msgs >= 1); let node = unwrap!(nodes.choose_mut(&mut rng)); let rumor = unwrap!(rumors.pop()); let _ = node.initiate_rumor(&rumor); } let mut processed = true; while processed { processed = false; let mut gossips_to_push = BTreeMap::new(); for node in nodes.iter_mut() { if !rumors.is_empty() && rng.gen() { let rumor = unwrap!(rumors.pop()); let _ = node.initiate_rumor(&rumor); } if let Ok((dst_id, Some(push_gossip))) = node.next_round() { processed = true; let _ = gossips_to_push.insert((node.id(), dst_id), push_gossip); } } for ((src_id, dst_id), push_gossip) in gossips_to_push { let dst = unwrap!(nodes.iter_mut().find(|node| node.id() == dst_id)); let pull_gossip = dst.receive_gossip(&src_id, &push_gossip); let src = unwrap!(nodes.iter_mut().find(|node| node.id() == src_id)); if let Some(gossip) = pull_gossip { assert!(src.receive_gossip(&dst_id, &gossip).is_none()); } } } let mut statistics = Statistics::default(); let mut nodes_missed = 0; let mut msgs_missed = 0; for node in nodes.iter_mut() { let stat = node.statistics(); statistics.add(&stat); statistics.rounds = stat.rounds; if node.rumors().len() as u32 != num_of_msgs { nodes_missed += 1; msgs_missed += u64::from(num_of_msgs - node.rumors().len() as u32); } node.clear(); } (nodes_missed, msgs_missed, statistics) } fn one_rumor_test(num_of_nodes: u32) { let mut nodes = create_network(num_of_nodes); println!("Network of {} nodes:", num_of_nodes); let iterations = 100; let mut metrics = Vec::new(); for _ in 0..iterations { metrics.push(send_rumors(&mut nodes, 1)); } let mut stats_avg = Statistics::default(); let mut stats_max = Statistics::default(); let mut stats_min = Statistics::new_max(); let mut nodes_missed_avg = 0.0; let mut nodes_missed_max = 0; let mut nodes_missed_min = u64::MAX; let mut msgs_missed_avg = 0.0; let mut msgs_missed_max = 0; let mut msgs_missed_min = u64::MAX; for (nodes_missed, msgs_missed, stats) in metrics { nodes_missed_avg += nodes_missed as f64; nodes_missed_max = cmp::max(nodes_missed_max, nodes_missed); nodes_missed_min = cmp::min(nodes_missed_min, nodes_missed); msgs_missed_avg += msgs_missed as f64; msgs_missed_max = cmp::max(msgs_missed_max, msgs_missed); msgs_missed_min = cmp::min(msgs_missed_min, msgs_missed); stats_avg.add(&stats); stats_max.max(&stats); stats_min.min(&stats); } nodes_missed_avg /= iterations as f64; msgs_missed_avg /= iterations as f64; stats_avg.rounds /= iterations; stats_avg.sent_rumors /= iterations; stats_avg.received_rumors /= iterations; print!(" AVERAGE ---- "); print_metric( nodes_missed_avg, msgs_missed_avg, &stats_avg, num_of_nodes, 1, ); print!(" MIN -------- "); print_metric( nodes_missed_min as f64, msgs_missed_min as f64, &stats_min, num_of_nodes, 1, ); print!(" MAX -------- "); print_metric( nodes_missed_max as f64, msgs_missed_max as f64, &stats_max, num_of_nodes, 1, ); } fn print_metric( nodes_missed: f64, msgs_missed: f64, stats: &Statistics, num_of_nodes: u32, num_of_msgs: u32, ) { println!( "rounds: {}, msgs_sent: {}, msgs_missed: {} \ ({:.2}%), nodes_missed: {} ({:.2}%)", stats.rounds, stats.sent_rumors, msgs_missed, 100.0 * msgs_missed / f64::from(num_of_nodes) / f64::from(num_of_msgs), nodes_missed, 100.0 * nodes_missed / f64::from(num_of_nodes) / f64::from(num_of_msgs) ); } #[test] fn one_rumor() { one_rumor_test(20); one_rumor_test(200); one_rumor_test(2000); } #[test] fn multiple_rumors() { let num_of_nodes: Vec<u32> = vec![20, 200, 2000]; let num_of_msgs: Vec<u32> = vec![10, 100, 1000]; for number in &num_of_nodes { for msgs in &num_of_msgs { print!( "Network of {} nodes, gossiping {} rumors:\n\t", number, msgs ); let mut nodes = create_network(*number); let metric = send_rumors(&mut nodes, *msgs); print_metric(metric.0 as f64, metric.1 as f64, &metric.2, *number, *msgs); } } } #[test] fn avg_rounds_and_missed() { let num_nodes = 20; let num_msgs = 1; let iters = 100; let mut all_rounds = vec![]; let mut all_missed = vec![]; let mut total_rounds = 0; let mut total_missed = 0; let t = Instant::now(); for _ in 0..iters { let (rounds, nodes_missed) = prove_of_stop(num_nodes, num_msgs); all_rounds.push(rounds); all_missed.push(nodes_missed); total_rounds += rounds; total_missed += nodes_missed; } println!("Elapsed time: {:?}", t.elapsed()); all_rounds.sort(); all_missed.sort(); let avg_rounds = total_rounds / iters; let avg_missed = total_missed / iters; let median_rounds = all_rounds[iters / 2]; let median_missed = all_missed[iters / 2]; println!("Iters: {:?}", iters); println!("Avg rounds: {:?}", avg_rounds); println!("Median rounds: {:?}", median_rounds); println!( "Avg missed percent: {1:.*} %", 2, 100_f32 * (avg_missed as f32 / num_nodes as f32) ); println!( "Median missed percent: {1:.*} %", 2, 100_f32 * (median_missed as f32 / num_nodes as f32) ); } fn prove_of_stop(num_nodes: u32, num_msgs: u32) -> (usize, usize) { let mut nodes = create_network(num_nodes); let mut rng = rand::thread_rng(); let mut rumors: Vec<String> = Vec::new(); for _ in 0..num_msgs { let mut raw = [0u8; 20]; rng.fill(&mut raw[..]); rumors.push(String::from_utf8_lossy(&raw).to_string()); } let mut rounds = 0; let mut processed = true; while processed { rounds += 1; processed = false; let mut gossips_to_push = BTreeMap::new(); for node in nodes.iter_mut() { if !rumors.is_empty() && rng.gen() { let rumor = unwrap!(rumors.pop()); let _ = node.initiate_rumor(&rumor); } if let Ok((dst_id, Some(push_gossip))) = node.next_round() { processed = true; let _ = gossips_to_push.insert((node.id(), dst_id), push_gossip); } } for ((src_id, dst_id), push_gossip) in gossips_to_push { let dst = unwrap!(nodes.iter_mut().find(|node| node.id() == dst_id)); let pull_msgs = dst.receive_gossip(&src_id, &push_gossip); let src = unwrap!(nodes.iter_mut().find(|node| node.id() == src_id)); if let Some(pull_msg) = pull_msgs { assert!(src.receive_gossip(&dst_id, &pull_msg).is_none()); } } } let mut nodes_missed = 0; for node in nodes.iter() { if node.rumors().len() as u32 != num_msgs { nodes_missed += 1; } } (rounds, nodes_missed) } }
use super::gossip::{Content, GossipState, Rumor, Statistics}; use super::messages::{Gossip, Message}; use crate::error::Error; use crate::id::Id; use bincode::serialize; use ed25519::{Keypair, PublicKey}; use rand::seq::SliceRandom; use rand_core::OsRng; use serde::ser::Serialize; use std::fmt::{self, Debug, Formatter}; pub struct Node { keys: Keypair, peers: Vec<Id>, gossip: GossipState, } impl Node { pub fn id(&self) -> Id { self.keys.public.into() } pub fn add_peer(&mut self, peer_id: Id) -> Result<(), Error> { if !self.gossip.rumors().is_empty() { return Err(Error::AlreadyStarted); } self.peers.push(peer_id); self.gossip.add_peer(); Ok(()) } pub fn initiate_rumor<T: Serialize>(&mut self, rumor: &T) -> Result<(), Error> { if self.peers.is_empty() { return Err(Error::NoPeers); } self.gossip.initiate_rumor(Content(serialize(rumor)?)); Ok(()) } pub fn next_round(&mut self) -> Result<(Id, Option<Vec<u8>>), Error> { let mut rng = rand::thread_rng(); let peer_id = match self.peers.choose(&mut rng) { Some(id) => *id, None => return Err(Error::NoPeers), }; if let Some(gossip) = self.gossip.next_round() { let serialized = self.serialise(gossip); debug!("{:?} Sending Push gossip to {:?}", self, peer_id); Ok((peer_id, Some(serialized))) } else { Ok((peer_id, None)) } } pub fn receive_gossip(&mut self, peer_id: &Id, serialised_gossip: &[u8]) -> Option<Vec<u8>> { debug!("{:?} handling gossip from {:?}", self, peer_id); let pub_key = if let Ok(pub_key) = PublicKey::from_bytes(&peer_id.0) { pub_key } else { return None; }; let gossip = if let Ok(gossip) = Message::deserialise(serialised_gossip, &pub_key) { gossip } else { error!("Failed to deserialise gossip"); return None; }; if let Some(response) = self.gossip.receive(*peer_id, gossip) { Some(self.serialise(response)) } else { None } } pub fn rumors(&self) -> Vec<Rumor> { self.gossip.rumors() } pub fn statistics(&self) -> Statistics { self.gossip.statistics() } #[cfg(test)] pub fn clear(&mut self) { self.gossip.clear(); } fn serialise(&mut self, gossip: Gossip) -> Vec<u8> { if let Ok(serialised_msg) = Message::serialise(&gossip, &self.keys) { return serialised_msg; } else { error!("Failed to serialise {:?}", gossip); } vec![] } } impl Default for Node { fn default() -> Self { let keys = Keypair::generate(&mut OsRng); Node { keys, peers: vec![], gossip: GossipState::new(), } } } impl Debug for Node { fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result { write!(formatter, "{:?}", self.id()) } } #[cfg(test)] mod tests { use super::*; use itertools::Itertools; use rand::seq::SliceRandom; use rand::{self, Rng}; use std::collections::BTreeMap; use std::time::Instant; use std::{cmp, u64}; fn create_network(node_count: u32) -> Vec<Node> { let mut nodes = std::iter::repeat_with(Node::default) .take(node_count as usize) .collect_vec(); for i in 0..(nodes.len() - 1) { let lhs_id = nodes[i].id(); for j in (i + 1)..nodes.len() { let rhs_id = nodes[j].id(); let _ = nodes[j].add_peer(lhs_id); let _ = nodes[i].add_peer(rhs_id); } } nodes } fn send_rumors(nodes: &mut Vec<Node>, num_of_msgs: u32) -> (u64, u64, Statistics) { let mut rng = rand::thread_rng(); let mut rumors: Vec<String> = Vec::new(); for _ in 0..num_of_msgs { let mut raw = [0u8; 20]; rng.fill(&mut raw[..]); rumors.push(String::from_utf8_lossy(&raw).to_string()); } { assert!(num_of_msgs >= 1); let node = unwrap!(nodes.choose_mut(&mut rng)); let rumor = unwrap!(rumors.pop()); let _ = node.initiate_rumor(&rumor); } let mut processed = true; while processed { processed = false; let mut gossips_to_push = BTreeMap::new(); for node in nodes.iter_mut() { if !rumors.is_empty() && rng.gen() { let rumor = unwrap!(rumors.pop()); let _ = node.initiate_rumor(&rumor); } if let Ok((dst_id, Some(push_gossip))) = node.next_round() { processed = true; let _ = gossips_to_push.insert((node.id(), dst_id), push_gossip); } } for ((src_id, dst_id), push_gossip) in gossips_to_push { let dst = unwrap!(nodes.iter_mut().find(|node| node.id() == dst_id)); let pull_gossip = dst.receive_gossip(&src_id, &push_gossip); let src = unwrap!(nodes.iter_mut().find(|node| node.id() == src_id)); if let Some(gossip) = pull_gossip { assert!(src.receive_gossip(&dst_id, &gossip).is_none()); } } } let mut statistics = Statistics::default(); let mut nodes_missed = 0; let mut msgs_missed = 0; for node in nodes.iter_mut() { let stat = node.statistics(); statistics.add(&stat); statistics.rounds = stat.rounds; if node.rumors().len() as u32 != num_of_msgs { nodes_missed += 1; msgs_missed += u64::from(num_of_msgs - node.rumors().len() as u32); } node.clear(); } (nodes_missed, msgs_missed, statistics) } fn one_rumor_test(num_of_nodes: u32) { let mut nodes = create_network(num_of_nodes); println!("Network of {} nodes:", num_of_nodes); let iterations = 100; let mut metrics = Vec::new(); for _ in 0..iterations { metrics.push(send_rumors(&mut nodes, 1)); } let mut stats_avg = Statistics::default(); let mut stats_max = Statistics::default(); let mut stats_min = Statistics::new_max(); let mut nodes_missed_avg = 0.0; let mut nodes_missed_max = 0; let mut nodes_missed_min = u64::MAX; let mut msgs_missed_avg = 0.0; let mut msgs_missed_max = 0; let mut msgs_missed_min = u64::MAX; for (nodes_missed, msgs_missed, stats) in metrics { nodes_missed_avg += nodes_missed as f64; nodes_missed_max = cmp::max(nodes_missed_max, nodes_missed); nodes_missed_min = cmp::min(nodes_missed_min, nodes_missed); msgs_missed_avg += msgs_missed as f64; msgs_missed_max = cmp::max(msgs_missed_max, msgs_missed); msgs_missed_min = cmp::min(msgs_missed_min, msgs_missed); stats_avg.add(&stats); stats_max.max(&stats); stats_min.min(&stats); } nodes_missed_avg /= iterations as f64; msgs_missed_avg /= iterations as f64; stats_avg.rounds /= iterations; stats_avg.sent_rumors /= iterations; stats_avg.received_rumors /= iterations; print!(" AVERAGE ---- "); print_metric( nodes_missed_avg, msgs_missed_avg, &stats_avg, num_of_nodes, 1, ); print!(" MIN -------- "); print_metric( nodes_missed_min as f64, msgs_missed_min as f64, &stats_min, num_of_nodes, 1, ); print!(" MAX -------- "); print_metric( nodes_missed_max as f64, msgs_missed_max as f64, &stats_max, num_of_nodes, 1, ); } fn print_metric( nodes_missed: f64, msgs_missed: f64, stats: &Statistics, num_of_nodes: u32, num_of_msgs: u32, ) { println!( "rounds: {}, msgs_sent: {}, msgs_missed: {} \ ({:.2}%), nodes_missed: {} ({:.2}%)", stats.rounds, stats.sent_rumors, msgs_missed, 100.0 * msgs_missed / f64::from(num_of_nodes) / f64::from(num_of_msgs), nodes_missed, 100.0 * nodes_missed / f64::from(num_of_nodes) / f64::from(num_of_msgs) ); } #[test] fn one_rumor() { one_rumor_test(20); one_rumor_test(200); one_rumor_test(2000); } #[test] fn multiple_rumors() { let num_of_nodes: Vec<u32> = vec![20, 200, 2000]; let num_of_msgs: Vec<u32> = vec![10, 100, 1000]; for number in &num_of_nodes { for msgs in &num_of_msgs { print!( "Network of {} nodes, gossiping {} rumors:\n\t", number, msgs ); let mut nodes = create_network(*number); let metric = send_rumors(&mut nodes, *msgs); print_metric(metric.0 as f64, metric.1 as f64, &metric.2, *number, *msgs); } } } #[test] fn avg_rounds_and_missed() { let num_nodes = 20; let num_msgs = 1; let iters = 100; let mut all_rounds = vec![]; let mut all_missed = vec![]; let mut total_rounds = 0; let mut total_missed = 0; let t = Instant::now(); for _ in 0..iters { let (rounds, nodes_missed) = prove_of_stop(num_nodes, num_msgs); all_rounds.push(rounds); all_missed.push(nodes_missed); total_rounds += rounds; total_missed += nodes_missed; } println!("Elapsed time: {:?}", t.elapsed()); all_rounds.sort(); all_missed.sort(); let avg_rounds = total_rounds / iters; let avg_missed = total_missed / iters; let median_rounds = all_rounds[iters / 2]; let median_missed = all_missed[iters / 2]; println!("Iters: {:?}", iters); println!("Avg rounds: {:?}", avg_rounds); println!("Median rounds: {:?}", median_rounds); println!( "Avg missed percent: {1:.*} %", 2, 100_f32 * (avg_missed as f32 / num_nodes as f32) ); println!( "Median missed percent: {1:.*} %", 2, 100_f32 * (median_missed as f32 / num_nodes as f32) ); } fn prove_of_stop(num_nodes: u32, num_msgs: u32) -> (usize, usize) { let mut nodes = create_network(num_nodes); let mut rng = rand::thread_rng(); let mut rumors: Vec<String> = Vec::new(); for _ in 0..num_msgs { let mut raw = [0u8; 20]; rng.fill(&mut raw[..]); rumors.push(String::from_utf8_lossy(&raw).to_string()); } let mut rounds = 0; let mut processed = true; while processed { rounds += 1; processed = false; let mut gossips_to_push = BTreeMap::new(); for node in nodes.iter_mut() {
if let Ok((dst_id, Some(push_gossip))) = node.next_round() { processed = true; let _ = gossips_to_push.insert((node.id(), dst_id), push_gossip); } } for ((src_id, dst_id), push_gossip) in gossips_to_push { let dst = unwrap!(nodes.iter_mut().find(|node| node.id() == dst_id)); let pull_msgs = dst.receive_gossip(&src_id, &push_gossip); let src = unwrap!(nodes.iter_mut().find(|node| node.id() == src_id)); if let Some(pull_msg) = pull_msgs { assert!(src.receive_gossip(&dst_id, &pull_msg).is_none()); } } } let mut nodes_missed = 0; for node in nodes.iter() { if node.rumors().len() as u32 != num_msgs { nodes_missed += 1; } } (rounds, nodes_missed) } }
if !rumors.is_empty() && rng.gen() { let rumor = unwrap!(rumors.pop()); let _ = node.initiate_rumor(&rumor); }
if_condition
[ { "content": "/// This is effectively a container for all the state required to manage a node while the network\n\n/// is running. `Node` implements `Future` and hence each node is run continuously on a single\n\n/// thread from the threadpool. When the future returns, the `Node` has completed processing all\n\n/// rumors.\n\nstruct TestNode {\n\n node: Node,\n\n /// This receives new messages from the `Network` object; equivalent to e.g. a new client event.\n\n channel_receiver: mpsc::UnboundedReceiver<String>,\n\n /// This can be used to send the received client messages and `Node`'s stats to the\n\n /// `Network` object.\n\n stats_sender: mpsc::UnboundedSender<(Id, Vec<String>, Statistics)>,\n\n /// Map of peer ID to the wrapped TCP stream connecting us to them.\n\n peers: HashMap<Id, MessageStream>,\n\n /// Indicates whether is in a push&pull round\n\n is_in_round: bool,\n\n /// If a message is received via `channel_sender` matches this, the node should terminate.\n\n termination_message: String,\n\n}\n\n\n\nimpl TestNode {\n\n fn new(\n\n channel_receiver: mpsc::UnboundedReceiver<String>,\n\n stats_sender: mpsc::UnboundedSender<(Id, Vec<String>, Statistics)>,\n\n termination_message: String,\n", "file_path": "examples/network.rs", "rank": 0, "score": 91782.05348033698 }, { "content": "#[derive(Debug)]\n\nstruct MessageStream {\n\n tcp_stream: TcpStream,\n\n read_buffer: BytesMut,\n\n write_buffer: BytesMut,\n\n incoming_message_length: Option<usize>,\n\n}\n\n\n\nimpl MessageStream {\n\n fn new(tcp_stream: TcpStream) -> Self {\n\n MessageStream {\n\n tcp_stream,\n\n read_buffer: BytesMut::new(),\n\n write_buffer: BytesMut::new(),\n\n incoming_message_length: None,\n\n }\n\n }\n\n\n\n /// Buffer `message` to an internal buffer. Calls to `poll_flush` will attempt to flush this\n\n /// buffer to the TCP stream. The size of `message` as a `u32` is added to the buffer first so\n\n /// that the correct size can be read by the receiver before it tries to retrieve the actual\n", "file_path": "examples/network.rs", "rank": 1, "score": 70542.28027437768 }, { "content": "struct Network {\n\n pool: CpuPool,\n\n // An mpsc channel sender for each node for giving new client rumors to that node.\n\n message_senders: Vec<mpsc::UnboundedSender<String>>,\n\n // An mpsc channel receiver for getting the client rumors and stats from the nodes.\n\n stats_receiver: mpsc::UnboundedReceiver<(Id, Vec<String>, Statistics)>,\n\n // The last set of client rumors received via `stats_receiver` for each node.\n\n received_rumors: HashMap<Id, Vec<String>>,\n\n // The last set of stats received via `stats_receiver` for each node.\n\n stats: HashMap<Id, Statistics>,\n\n // The futures for all nodes. When these return ready, that node has finished running.\n\n node_futures: Vec<CpuFuture<(), Error>>,\n\n // All rumors sent in the order they were passed in. Tuple contains the rumors and the index\n\n // of the node used to send.\n\n client_rumors: Vec<(String, usize)>,\n\n // Message which when sent to a node via its `message_sender` indicates to the node that it\n\n // should terminate.\n\n termination_message: String,\n\n}\n\n\n", "file_path": "examples/network.rs", "rank": 2, "score": 64745.84017313305 }, { "content": "fn main() {\n\n let mut network = Network::new(8);\n\n unwrap!(network.send(\"Hello\", None));\n\n unwrap!(network.send(\"there\", Some(999)));\n\n unwrap!(network.send(\"world\", Some(0)));\n\n unwrap!(network.pool.clone().spawn(network).wait());\n\n}\n", "file_path": "examples/network.rs", "rank": 3, "score": 53265.22008525152 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse err_derive::Error;\n\n\n\n/// Node error variants.\n\n#[derive(Debug, Error)]\n\n#[allow(missing_docs)]\n\npub enum Error {\n\n #[error(display = \"Gossip group empty\")]\n\n NoPeers,\n\n #[error(display = \"Already started gossiping.\")]\n\n AlreadyStarted,\n\n #[error(display = \"Failed to verify signature.\")]\n\n SigFailure,\n\n #[error(display = \"IO error\")]\n\n Io(#[error(cause)] ::std::io::Error),\n\n #[error(display = \"Serialisation Error.\")]\n\n Serialisation(#[error(cause)] Box<bincode::ErrorKind>),\n\n}\n", "file_path": "src/error.rs", "rank": 4, "score": 44483.988033762835 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse ed25519::{PublicKey, PUBLIC_KEY_LENGTH};\n\nuse std::convert::From;\n\nuse std::fmt::{self, Debug, Formatter};\n\n\n\n/// The ID of a node - equivalent to its public key.\n\n#[derive(Clone, Copy, Ord, PartialOrd, Eq, PartialEq, Hash, Serialize, Deserialize)]\n\npub struct Id(pub [u8; PUBLIC_KEY_LENGTH]);\n\n\n\nimpl From<PublicKey> for Id {\n\n fn from(key: PublicKey) -> Self {\n\n Id(key.to_bytes())\n", "file_path": "src/id.rs", "rank": 5, "score": 44412.732393107246 }, { "content": " }\n\n}\n\n\n\nimpl Debug for Id {\n\n fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {\n\n write!(\n\n formatter,\n\n \"{:02x}{:02x}{:02x}..\",\n\n self.0[0], self.0[1], self.0[2]\n\n )\n\n }\n\n}\n", "file_path": "src/id.rs", "rank": 6, "score": 44407.51114195214 }, { "content": " pub fn serialise(gossip: &Gossip, keys: &Keypair) -> Result<Vec<u8>, Error> {\n\n let serialised_msg = serialize(gossip)?;\n\n let sig: Signature = keys.sign(&serialised_msg);\n\n Ok(serialize(&Message(serialised_msg, sig))?)\n\n }\n\n\n\n pub fn deserialise(serialised_msg: &[u8], key: &PublicKey) -> Result<Gossip, Error> {\n\n let msg: Message = deserialize(serialised_msg)?;\n\n if key.verify(&msg.0, &msg.1).is_ok() {\n\n Ok(deserialize(&msg.0)?)\n\n } else {\n\n Err(Error::SigFailure)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Message {\n\n pub fn serialise(gossip: &Gossip, _keys: &Keypair) -> Result<Vec<u8>, Error> {\n\n Ok(serialize(gossip)?)\n", "file_path": "src/messages.rs", "rank": 7, "score": 44282.94493774282 }, { "content": " }\n\n\n\n pub fn deserialise(serialised_msg: &[u8], _key: &PublicKey) -> Result<Gossip, Error> {\n\n Ok(deserialize(serialised_msg)?)\n\n }\n\n}\n\n\n\n/// Gossip with rumors\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub enum Gossip {\n\n /// Sent from Node A to Node B to push a rumor.\n\n Push(Vec<Rumor>),\n\n /// Sent from Node B to Node A as a reaction to receiving a push rumor from A.\n\n Pull(Vec<Rumor>),\n\n}\n", "file_path": "src/messages.rs", "rank": 8, "score": 44279.8685225931 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse super::gossip::Rumor;\n\nuse crate::error::Error;\n\nuse bincode::{deserialize, serialize};\n\nuse ed25519::{Keypair, PublicKey, Signature};\n\n\n\n/// Messages sent via a direct connection, wrapper of gossip protocol requests.\n\n#[derive(Serialize, Debug, Deserialize)]\n\npub struct Message(pub Vec<u8>, pub Signature);\n\n\n\n#[cfg(not(test))]\n\nimpl Message {\n", "file_path": "src/messages.rs", "rank": 9, "score": 44273.603101739354 }, { "content": " }\n\n (hash, rumor)\n\n })\n\n .collect();\n\n self.peers_in_this_round.clear();\n\n self.statistics.sent_rumors += rumors_to_push.len() as u64;\n\n if !rumors_to_push.is_empty() {\n\n Some(Gossip::Push(rumors_to_push))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// We've received `gossip` from `peer_id`. If this is a Push gossip and we've not already heard from\n\n /// `peer_id` in this round, this returns the list of Pull gossips which should be sent back to\n\n /// `peer_id`.\n\n pub fn receive(&mut self, peer_id: Id, gossip: Gossip) -> Option<Gossip> {\n\n let (is_push, received_rumors) = match gossip {\n\n Gossip::Push(received_rumors) => (true, received_rumors),\n\n Gossip::Pull(received_rumors) => (false, received_rumors),\n", "file_path": "src/gossip.rs", "rank": 10, "score": 43351.103801829326 }, { "content": " }\n\n\n\n /// Trigger the end of this round. Returns a list of Push gossips to be sent to a single random\n\n /// peer during this new round.\n\n pub fn next_round(&mut self) -> Option<Gossip> {\n\n self.statistics.rounds += 1;\n\n let mut rumors_to_push = vec![];\n\n let rumors = mem::replace(&mut self.rumors, BTreeMap::new());\n\n self.rumors = rumors\n\n .into_iter()\n\n .map(|(hash, mut rumor)| {\n\n rumor.state = rumor.state.next_round(\n\n self.max_b_age,\n\n self.max_c_rounds,\n\n self.max_rounds,\n\n &self.peers_in_this_round,\n\n );\n\n // Filter out any for which `rumor_age()` is `None`.\n\n if rumor.state.rumor_age().is_some() {\n\n rumors_to_push.push(rumor.clone());\n", "file_path": "src/gossip.rs", "rank": 11, "score": 43351.09899233957 }, { "content": " pub fn clear(&mut self) {\n\n self.statistics = Statistics::default();\n\n self.rumors.clear();\n\n self.peers_in_this_round.clear();\n\n }\n\n\n\n /// Returns the statistics.\n\n pub fn statistics(&self) -> Statistics {\n\n self.statistics\n\n }\n\n}\n\n\n\nimpl Debug for GossipState {\n\n fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {\n\n write!(formatter, \"GossipState {{ rumors: {{ \")?;\n\n for rumor in (&self.rumors).values() {\n\n write!(\n\n formatter,\n\n \"{:02x}{:02x}{:02x}{:02x}: {:?}, \",\n\n rumor.content.0[0],\n", "file_path": "src/gossip.rs", "rank": 12, "score": 43350.58261099954 }, { "content": "\n\nimpl GossipState {\n\n pub fn new() -> Self {\n\n GossipState {\n\n rumors: BTreeMap::new(),\n\n network_size: 1.0,\n\n max_b_age: Age::from(0),\n\n max_c_rounds: Round::from(0),\n\n max_rounds: Round::from(0),\n\n peers_in_this_round: BTreeSet::new(),\n\n statistics: Statistics::default(),\n\n }\n\n }\n\n\n\n pub fn add_peer(&mut self) {\n\n self.network_size += 1.0;\n\n self.max_b_age = Age::from(cmp::max(1, self.network_size.ln().ln().ceil() as u8));\n\n self.max_c_rounds = Round::from(cmp::max(1, self.network_size.ln().ln().ceil() as u8));\n\n self.max_rounds = Round::from(cmp::max(1, self.network_size.ln().ceil() as u8));\n\n }\n", "file_path": "src/gossip.rs", "rank": 13, "score": 43349.58154071414 }, { "content": "pub struct Statistics {\n\n /// Total rounds experienced (each push_tick is considered as one round).\n\n pub rounds: u64,\n\n /// Total rumors sent from this node.\n\n pub sent_rumors: u64,\n\n /// Total rumors this node received.\n\n pub received_rumors: u64,\n\n}\n\n\n\nimpl Statistics {\n\n /// Create a default with u64::MAX\n\n pub fn new_max() -> Self {\n\n Statistics {\n\n rounds: u64::MAX,\n\n sent_rumors: u64::MAX,\n\n received_rumors: u64::MAX,\n\n }\n\n }\n\n\n\n /// Add the value of other into self\n", "file_path": "src/gossip.rs", "rank": 14, "score": 43348.8592435278 }, { "content": " };\n\n\n\n // Collect any responses required.\n\n let is_new_this_round = self.peers_in_this_round.insert(peer_id);\n\n let response = if is_new_this_round && is_push {\n\n let response_rumors: Vec<Rumor> = self\n\n .rumors\n\n .iter()\n\n .filter_map(|(_, rumor)| {\n\n // Filter out any for which `rumor_age()` is `None`.\n\n rumor.state.rumor_age().map(|_| rumor.clone())\n\n })\n\n .collect();\n\n self.statistics.sent_rumors += response_rumors.len() as u64;\n\n let response_gossip = Gossip::Pull(response_rumors);\n\n Some(response_gossip)\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "src/gossip.rs", "rank": 15, "score": 43346.91220539727 }, { "content": "\n\n/// The gossip state of a node instance.\n\npub struct GossipState {\n\n rumors: BTreeMap<ContentHash, Rumor>,\n\n network_size: f64,\n\n // When in state B, if our age for a Rumor is incremented to this value, the state\n\n // transitions to C. Specified in the paper as `O(ln ln n)`.\n\n max_b_age: Age,\n\n // The maximum number of rounds to remain in state C for a given rumor. Specified in the\n\n // paper as `O(ln ln n)`.\n\n max_c_rounds: Round,\n\n // The maximum total number of rounds for a rumor to remain in states B or C. This is a\n\n // failsafe to allow the definite termination of a rumor being propagated. Specified in the\n\n // paper as `O(ln n)`.\n\n max_rounds: Round,\n\n // All peers with which we communicated during this round.\n\n peers_in_this_round: BTreeSet<Id>,\n\n // Statistics\n\n statistics: Statistics,\n\n}\n", "file_path": "src/gossip.rs", "rank": 16, "score": 43346.38751405201 }, { "content": "pub struct Content(pub Vec<u8>);\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\npub struct ContentHash(pub [u8; 32]);\n\n\n\nimpl From<Content> for ContentHash {\n\n fn from(content: Content) -> Self {\n\n let mut hasher = Sha3::v256();\n\n let mut out = [0u8; 32];\n\n hasher.update(content.0.as_slice());\n\n hasher.finalize(&mut out);\n\n Self(out)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Rumor {\n\n pub content: Content,\n\n pub state: RumorState,\n\n}\n", "file_path": "src/gossip.rs", "rank": 17, "score": 43344.15720885009 }, { "content": " pub fn add(&mut self, other: &Statistics) {\n\n self.rounds += other.rounds;\n\n self.sent_rumors += other.sent_rumors;\n\n self.received_rumors += other.received_rumors;\n\n }\n\n\n\n /// Update self with the min of self and other\n\n pub fn min(&mut self, other: &Statistics) {\n\n self.rounds = cmp::min(self.rounds, other.rounds);\n\n self.sent_rumors = cmp::min(self.sent_rumors, other.sent_rumors);\n\n self.received_rumors = cmp::min(self.received_rumors, other.received_rumors);\n\n }\n\n\n\n /// Update self with the max of self and other\n\n pub fn max(&mut self, other: &Statistics) {\n\n self.rounds = cmp::max(self.rounds, other.rounds);\n\n self.sent_rumors = cmp::max(self.sent_rumors, other.sent_rumors);\n\n self.received_rumors = cmp::max(self.received_rumors, other.received_rumors);\n\n }\n\n}\n", "file_path": "src/gossip.rs", "rank": 18, "score": 43343.07112723459 }, { "content": "\n\nimpl Debug for Statistics {\n\n fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {\n\n write!(\n\n formatter,\n\n \"rounds: {}, rumors sent: {}, \\n\n\n rumors received: {}\",\n\n self.rounds, self.sent_rumors, self.received_rumors\n\n )\n\n }\n\n}\n", "file_path": "src/gossip.rs", "rank": 19, "score": 43342.75349277887 }, { "content": " rumor.content.0[1],\n\n rumor.content.0[2],\n\n rumor.content.0[3],\n\n rumor.state\n\n )?;\n\n }\n\n write!(formatter, \"}}, network_size: {}, \", self.network_size)?;\n\n write!(formatter, \"max_b_age: {}, \", self.max_b_age.value)?;\n\n write!(formatter, \"max_c_rounds: {}, \", self.max_c_rounds.value)?;\n\n write!(formatter, \"max_rounds: {}, \", self.max_rounds.value)?;\n\n write!(\n\n formatter,\n\n \"peers_in_this_round: {:?} }}\",\n\n self.peers_in_this_round\n\n )\n\n }\n\n}\n\n\n\n/// Statistics on each node.\n\n#[derive(Clone, Copy, Default)]\n", "file_path": "src/gossip.rs", "rank": 20, "score": 43342.47252577432 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::id::Id;\n\nuse crate::messages::Gossip;\n\nuse crate::rumor_state::RumorState;\n\nuse crate::rumor_state::{Age, Round};\n\nuse std::collections::btree_map::Entry;\n\nuse std::collections::{BTreeMap, BTreeSet};\n\nuse std::fmt::{self, Debug, Formatter};\n\nuse std::{cmp, mem, u64};\n\nuse tiny_keccak::{Hasher, Sha3};\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n", "file_path": "src/gossip.rs", "rank": 21, "score": 43340.86733443372 }, { "content": " for rumor in received_rumors {\n\n self.statistics.received_rumors += 1;\n\n // Add or update the entry for this rumor.\n\n let age = rumor.state.rumor_age().unwrap_or_else(Age::max);\n\n match self.rumors.entry(ContentHash::from(rumor.content.clone())) {\n\n Entry::Occupied(mut entry) => entry.get_mut().state.receive(peer_id, age),\n\n Entry::Vacant(entry) => {\n\n let _ = entry.insert(Rumor {\n\n content: rumor.content,\n\n state: RumorState::new_from_peer(age, self.max_b_age),\n\n });\n\n }\n\n }\n\n }\n\n\n\n response\n\n }\n\n\n\n #[cfg(test)]\n\n /// Clear the cache.\n", "file_path": "src/gossip.rs", "rank": 22, "score": 43339.11636553611 }, { "content": "\n\n pub fn rumors(&self) -> Vec<Rumor> {\n\n self.rumors.values().cloned().collect()\n\n }\n\n\n\n /// Start gossiping a new rumor from this node.\n\n pub fn initiate_rumor(&mut self, content: Content) {\n\n if self\n\n .rumors\n\n .insert(\n\n ContentHash::from(content.clone()),\n\n Rumor {\n\n content,\n\n state: RumorState::new(),\n\n },\n\n )\n\n .is_some()\n\n {\n\n error!(\"New rumors should be unique.\");\n\n }\n", "file_path": "src/gossip.rs", "rank": 23, "score": 43338.300023310876 }, { "content": " debug!(\"Received the same rumor more than once this round from a given peer\");\n\n }\n\n }\n\n }\n\n\n\n /// Increment `round` value, consuming `self` and returning the new state.\n\n pub fn next_round(\n\n self,\n\n max_b_age: Age,\n\n max_c_rounds: Round,\n\n max_rounds: Round,\n\n peers_in_this_round: &BTreeSet<Id>,\n\n ) -> RumorState {\n\n match self {\n\n RumorState::B {\n\n mut round,\n\n mut rumor_age,\n\n mut peer_ages,\n\n } => {\n\n round += Round::from(1);\n", "file_path": "src/rumor_state.rs", "rank": 45, "score": 41045.03304941706 }, { "content": "impl std::ops::AddAssign for Age {\n\n fn add_assign(&mut self, rhs: Self) {\n\n self.value += rhs.value;\n\n }\n\n}\n\n\n\n#[derive(Default, Copy, Clone, Serialize, Debug, Deserialize, PartialEq, PartialOrd)]\n\npub struct Round {\n\n pub value: u8,\n\n}\n\n\n\nimpl Round {\n\n pub fn from(value: u8) -> Self {\n\n Self { value }\n\n }\n\n}\n\n\n\nimpl std::ops::Add for Round {\n\n type Output = Round;\n\n fn add(self, rhs: Self) -> Round {\n", "file_path": "src/rumor_state.rs", "rank": 46, "score": 41045.00015168307 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Serialize, Debug, Deserialize, PartialEq, PartialOrd)]\n\npub struct Age {\n\n pub value: u8,\n\n}\n\n\n\nimpl Age {\n\n pub fn from(value: u8) -> Self {\n\n Self { value }\n\n }\n\n pub fn max() -> Self {\n\n Self {\n\n value: u8::max_value(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rumor_state.rs", "rank": 47, "score": 41042.595490663465 }, { "content": " /// time `next_round()` is called.\n\n round: Round,\n\n /// Our age for this rumor. This may increase by 1 during a single round or may\n\n /// remain the same depending on the ages attached to incoming copies of this rumor.\n\n rumor_age: Age,\n\n /// The map of <peer, age>s which have sent us this rumor during this round.\n\n peer_ages: BTreeMap<Id, Age>,\n\n },\n\n /// Quadratic-shrinking phase.\n\n C {\n\n /// The number of rounds performed by the node while the rumor was in state B.\n\n rounds_in_state_b: Round,\n\n /// The round number for this rumor while in state C.\n\n round: Round,\n\n },\n\n /// Propagation complete.\n\n D,\n\n}\n\n\n\nimpl Default for RumorState {\n", "file_path": "src/rumor_state.rs", "rank": 48, "score": 41041.965629165505 }, { "content": " fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl RumorState {\n\n /// Construct a new `RumorState` where we're the initial node for the rumor. We start in\n\n /// state B with `rumor_age` set to `1`.\n\n pub fn new() -> Self {\n\n RumorState::B {\n\n round: Round::from(0),\n\n rumor_age: Age::from(1),\n\n peer_ages: BTreeMap::new(),\n\n }\n\n }\n\n\n\n /// Construct a new `RumorState` where we've received the rumor from a peer. If that peer\n\n /// is in state B (`age < max_b_age`) we start in state B with `rumor_age` set to `1`.\n\n /// If the peer is in state C, we start in state C too.\n\n pub fn new_from_peer(age: Age, max_b_age: Age) -> Self {\n", "file_path": "src/rumor_state.rs", "rank": 49, "score": 41041.36118535101 }, { "content": " if age < max_b_age {\n\n return RumorState::B {\n\n round: Round::from(0),\n\n rumor_age: Age::from(1),\n\n peer_ages: BTreeMap::new(),\n\n };\n\n }\n\n RumorState::C {\n\n rounds_in_state_b: Round::from(0),\n\n round: Round::from(0),\n\n }\n\n }\n\n\n\n /// Receive a copy of this rumor from `peer_id` with `age`.\n\n pub fn receive(&mut self, peer_id: Id, age: Age) {\n\n if let RumorState::B {\n\n ref mut peer_ages, ..\n\n } = *self\n\n {\n\n if peer_ages.insert(peer_id, age).is_some() {\n", "file_path": "src/rumor_state.rs", "rank": 50, "score": 41040.97510549745 }, { "content": " // If we've hit the maximum permitted number of rounds, transition to state D\n\n if round >= max_rounds {\n\n return RumorState::D;\n\n }\n\n\n\n // For any `peers_in_this_round` which aren't accounted for in `peer_ages`, add\n\n // a age of `0` for them to indicate they're in state A (i.e. they didn't have\n\n // the rumor).\n\n for peer in peers_in_this_round {\n\n if let Entry::Vacant(entry) = peer_ages.entry(*peer) {\n\n let _ = entry.insert(Age::from(0));\n\n }\n\n }\n\n\n\n // Apply the median rule, but if any peer's age >= `max_b_age` (i.e. that peer\n\n // is in state C), transition to state C.\n\n let mut less = 0;\n\n let mut greater_or_equal = 0;\n\n for peer_age in peer_ages.values() {\n\n if *peer_age < rumor_age {\n", "file_path": "src/rumor_state.rs", "rank": 51, "score": 41039.389322745395 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::id::Id;\n\nuse std::collections::btree_map::Entry;\n\nuse std::collections::{BTreeMap, BTreeSet};\n\n\n\n/// This represents the state of a single rumor from this node's perspective.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum RumorState {\n\n /// Exponential-growth phase.\n\n B {\n\n /// The round number for this rumor. This is not a globally-synchronised variable, rather\n\n /// it is set to 0 when we first receive a copy of this rumor and is incremented every\n", "file_path": "src/rumor_state.rs", "rank": 52, "score": 41039.148949317125 }, { "content": " return RumorState::D;\n\n }\n\n\n\n // Otherwise remain in state C.\n\n RumorState::C {\n\n rounds_in_state_b,\n\n round,\n\n }\n\n }\n\n RumorState::D => RumorState::D,\n\n }\n\n }\n\n\n\n /// We only need to push and pull this rumor if we're in states B or C, hence this returns\n\n /// `None` if we're in state D. State C is indicated by returning a value > `max_b_age`.\n\n pub fn rumor_age(&self) -> Option<Age> {\n\n match *self {\n\n RumorState::B { rumor_age, .. } => Some(rumor_age),\n\n RumorState::C { .. } => Some(Age::max()),\n\n RumorState::D => None,\n", "file_path": "src/rumor_state.rs", "rank": 53, "score": 41038.281436509715 }, { "content": " };\n\n }\n\n RumorState::B {\n\n round,\n\n rumor_age,\n\n peer_ages: BTreeMap::new(),\n\n }\n\n }\n\n RumorState::C {\n\n rounds_in_state_b,\n\n mut round,\n\n } => {\n\n round += Round::from(1);\n\n // If we've hit the maximum permitted number of rounds, transition to state D\n\n if round + rounds_in_state_b >= max_rounds {\n\n return RumorState::D;\n\n }\n\n\n\n // If we've hit the maximum rounds for remaining in state C, transition to state D.\n\n if round >= max_c_rounds {\n", "file_path": "src/rumor_state.rs", "rank": 54, "score": 41036.996111521985 }, { "content": " less += 1;\n\n } else if *peer_age >= max_b_age {\n\n return RumorState::C {\n\n rounds_in_state_b: round,\n\n round: Round::from(0),\n\n };\n\n } else {\n\n greater_or_equal += 1;\n\n }\n\n }\n\n if greater_or_equal > less {\n\n rumor_age += Age::from(1);\n\n }\n\n\n\n // If our age has reached `max_b_age`, transition to state C, otherwise remain\n\n // in state B.\n\n if rumor_age >= max_b_age {\n\n return RumorState::C {\n\n rounds_in_state_b: round,\n\n round: Round::from(0),\n", "file_path": "src/rumor_state.rs", "rank": 55, "score": 41034.09631425693 }, { "content": " Round::from(self.value + rhs.value)\n\n }\n\n}\n\n\n\nimpl std::ops::AddAssign for Round {\n\n fn add_assign(&mut self, rhs: Self) {\n\n self.value += rhs.value;\n\n }\n\n}\n", "file_path": "src/rumor_state.rs", "rank": 56, "score": 41033.28353455104 }, { "content": " fn send(&mut self, rumor: &str, node_index: Option<usize>) -> Result<(), Error> {\n\n let count = match node_index {\n\n Some(index) if index < self.message_senders.len() => index,\n\n _ => rand::thread_rng().gen_range(0, self.message_senders.len()),\n\n };\n\n self.client_rumors.push((rumor.to_string(), count));\n\n unwrap!(self.message_senders[count].unbounded_send(rumor.to_string(),));\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Future for Network {\n\n type Item = ();\n\n type Error = String;\n\n\n\n fn poll(&mut self) -> Poll<(), String> {\n\n while let Async::Ready(Some((node_id, rumors, stats))) = unwrap!(self.stats_receiver.poll())\n\n {\n\n println!(\"Received from {:?} -- {:?} -- {:?}\", node_id, rumors, stats);\n\n let _ = self.received_rumors.insert(node_id, rumors);\n", "file_path": "examples/network.rs", "rank": 57, "score": 23570.686322459213 }, { "content": "impl Future for TestNode {\n\n type Item = ();\n\n type Error = Error;\n\n\n\n fn poll(&mut self) -> Poll<(), Error> {\n\n if !self.receive_from_channel() {\n\n return Ok(Async::Ready(()));\n\n }\n\n self.receive_from_peers();\n\n self.tick();\n\n self.send_to_peers();\n\n let stats = self.node.statistics();\n\n let rumors = self\n\n .node\n\n .rumors()\n\n .into_iter()\n\n .map(|rumor| unwrap!(deserialize::<String>(&rumor.content.0)))\n\n .collect_vec();\n\n let id = self.id();\n\n unwrap!(self.stats_sender.unbounded_send((id, rumors, stats)));\n", "file_path": "examples/network.rs", "rank": 58, "score": 23569.42805245167 }, { "content": " self.node.id()\n\n }\n\n\n\n /// Receive all new messages from the `Network` object. If we receive the termination message,\n\n /// immediately return `false`, otherwise return `true`.\n\n fn receive_from_channel(&mut self) -> bool {\n\n while let Async::Ready(Some(message)) = unwrap!(self.channel_receiver.poll()) {\n\n if message == self.termination_message {\n\n return false;\n\n }\n\n unwrap!(self.node.initiate_rumor(&message));\n\n }\n\n true\n\n }\n\n\n\n /// Triggers a new push round\n\n fn tick(&mut self) {\n\n if !self.is_in_round {\n\n self.is_in_round = true;\n\n if let Ok((peer_id, Some(gossip_to_send))) = self.node.next_round() {\n", "file_path": "examples/network.rs", "rank": 59, "score": 23565.877210596715 }, { "content": " if let Some(message_stream) = self.peers.get_mut(&peer_id) {\n\n // Buffer the gossip to be sent.\n\n message_stream.buffer(&gossip_to_send);\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Iterate the peers reading any new messages from their TCP streams. Removes any peers that\n\n /// have disconnected.\n\n fn receive_from_peers(&mut self) {\n\n let mut disconnected_peers = vec![];\n\n let mut has_response = false;\n\n for (peer_id, ref mut message_stream) in &mut self.peers {\n\n loop {\n\n match message_stream.poll() {\n\n Ok(Async::Ready(Some(message))) => {\n\n let msgs_to_send = self.node.receive_gossip(peer_id, &message);\n\n // Buffer the messages to be sent back.\n\n if let Some(msg) = msgs_to_send {\n", "file_path": "examples/network.rs", "rank": 60, "score": 23564.2850396228 }, { "content": "impl Network {\n\n fn new(node_count: usize) -> Self {\n\n let (stats_sender, stats_receiver) = mpsc::unbounded();\n\n let mut network = Network {\n\n // pool: CpuPool::new(1),\n\n pool: CpuPool::new_num_cpus(),\n\n message_senders: vec![],\n\n stats_receiver,\n\n received_rumors: HashMap::new(),\n\n stats: HashMap::new(),\n\n node_futures: vec![],\n\n client_rumors: vec![],\n\n termination_message: rand::thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(20)\n\n .collect(),\n\n };\n\n\n\n let mut nodes = vec![];\n\n for _ in 0..node_count {\n", "file_path": "examples/network.rs", "rank": 61, "score": 23563.770804590076 }, { "content": " let _ = self.stats.insert(node_id, stats);\n\n }\n\n\n\n let client_rumors_len = self.client_rumors.len();\n\n let enough_rumors = |rumors: &Vec<String>| rumors.len() >= client_rumors_len;\n\n if !self.received_rumors.is_empty() && self.received_rumors.values().all(enough_rumors) {\n\n return Ok(Async::Ready(()));\n\n }\n\n\n\n if !self.stats.is_empty() && self.stats.values().all(|stats| stats.rounds > 200) {\n\n return Err(\"Not all nodes got all rumors.\".to_string());\n\n }\n\n\n\n Ok(Async::NotReady)\n\n }\n\n}\n\n\n\nimpl Drop for Network {\n\n fn drop(&mut self) {\n\n for message_sender in &mut self.message_senders {\n\n unwrap!(message_sender.unbounded_send(self.termination_message.clone(),));\n\n }\n\n let node_futures = mem::replace(&mut self.node_futures, vec![]);\n\n for node_future in node_futures {\n\n unwrap!(node_future.wait());\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/network.rs", "rank": 62, "score": 23562.68639789675 }, { "content": " }\n\n self.is_in_round = has_response;\n\n }\n\n\n\n /// Iterate the peers flushing the write buffers to the TCP streams. Removes any peers that\n\n /// have disconnected.\n\n fn send_to_peers(&mut self) {\n\n let mut disconnected_peers = vec![];\n\n for (peer_id, ref mut message_stream) in &mut self.peers {\n\n if let Err(error) = message_stream.poll_flush() {\n\n println!(\"Error writing messages to {:?}: {:?}\", peer_id, error);\n\n disconnected_peers.push(*peer_id);\n\n }\n\n }\n\n for disconnected_peer in disconnected_peers {\n\n let _ = unwrap!(self.peers.remove(&disconnected_peer));\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/network.rs", "rank": 63, "score": 23562.4215880413 }, { "content": " has_response = true;\n\n message_stream.buffer(&msg);\n\n }\n\n }\n\n Ok(Async::Ready(None)) => {\n\n // EOF was reached; the remote peer has disconnected.\n\n disconnected_peers.push(*peer_id);\n\n break;\n\n }\n\n Ok(Async::NotReady) => break,\n\n Err(error) => {\n\n println!(\"Error reading messages from {:?}: {:?}\", peer_id, error);\n\n disconnected_peers.push(*peer_id);\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n for disconnected_peer in disconnected_peers {\n\n let _ = unwrap!(self.peers.remove(&disconnected_peer));\n", "file_path": "examples/network.rs", "rank": 64, "score": 23561.676756532575 }, { "content": "\n\n // If we have no peers left, there is nothing more for this node to do.\n\n if self.peers.is_empty() {\n\n return Ok(Async::Ready(()));\n\n }\n\n Ok(Async::NotReady)\n\n }\n\n}\n\n\n\nimpl Debug for TestNode {\n\n fn fmt(&self, formatter: &mut Formatter<'_>) -> fmt::Result {\n\n write!(formatter, \"{:?} - {:?}\", thread::current().id(), self.id())\n\n }\n\n}\n\n\n", "file_path": "examples/network.rs", "rank": 65, "score": 23561.64995335877 }, { "content": " ) -> Self {\n\n TestNode {\n\n node: Node::default(),\n\n channel_receiver,\n\n stats_sender,\n\n peers: HashMap::new(),\n\n is_in_round: false,\n\n termination_message,\n\n }\n\n }\n\n\n\n fn add_peer(&mut self, id: Id, tcp_stream: TcpStream) {\n\n assert!(self\n\n .peers\n\n .insert(id, MessageStream::new(tcp_stream))\n\n .is_none());\n\n unwrap!(self.node.add_peer(id));\n\n }\n\n\n\n fn id(&self) -> Id {\n", "file_path": "examples/network.rs", "rank": 66, "score": 23560.412230440077 }, { "content": " let (message_sender, message_receiver) = mpsc::unbounded();\n\n let node = TestNode::new(\n\n message_receiver,\n\n stats_sender.clone(),\n\n network.termination_message.clone(),\n\n );\n\n network.message_senders.push(message_sender);\n\n nodes.push(node);\n\n }\n\n nodes.sort_by(|lhs, rhs| lhs.id().cmp(&rhs.id()));\n\n println!(\"Nodes: {:?}\", nodes.iter().map(TestNode::id).collect_vec());\n\n\n\n // Connect all the nodes.\n\n let listening_address = unwrap!(\"127.0.0.1:0\".parse());\n\n for i in 0..(node_count - 1) {\n\n let listener = unwrap!(TcpListener::bind(&listening_address));\n\n let lhs_id = nodes[i].id();\n\n let listener_address = unwrap!(listener.local_addr());\n\n let incoming = Rc::new(RefCell::new(listener.incoming().wait()));\n\n for j in (i + 1)..node_count {\n", "file_path": "examples/network.rs", "rank": 67, "score": 23558.505346490365 }, { "content": " let rhs_id = nodes[j].id();\n\n let rhs_stream =\n\n current_thread::run(|_| TcpStream::connect(&listener_address)).wait();\n\n nodes[j].add_peer(lhs_id, unwrap!(rhs_stream));\n\n let incoming = incoming.clone();\n\n let lhs_stream = unwrap!(current_thread::run(|_| incoming.borrow_mut()).next());\n\n nodes[i].add_peer(rhs_id, unwrap!(lhs_stream));\n\n }\n\n }\n\n\n\n // Start the nodes running by executing their `poll()` functions on the threadpool.\n\n for node in nodes {\n\n network.node_futures.push(network.pool.spawn(node));\n\n }\n\n\n\n network\n\n }\n\n\n\n /// Send the given `rumor`. If `node_index` is `Some` and is less than the number of `Node`s\n\n /// in the `Network`, then the `Node` at that index will be chosen as the initial informed one.\n", "file_path": "examples/network.rs", "rank": 68, "score": 23555.006147320684 }, { "content": "use futures::sync::mpsc;\n\nuse futures::{Async, Future, Poll, Stream};\n\nuse futures_cpupool::{CpuFuture, CpuPool};\n\nuse itertools::Itertools;\n\nuse rand::distributions::Alphanumeric;\n\nuse rand::Rng;\n\nuse sn_gossip::{Error, Id, Node, Statistics};\n\nuse std::cell::RefCell;\n\nuse std::collections::HashMap;\n\nuse std::fmt::{self, Debug, Formatter};\n\nuse std::io::Write;\n\nuse std::mem;\n\nuse std::rc::Rc;\n\nuse std::thread;\n\nuse tokio::executor::current_thread;\n\nuse tokio::net::{TcpListener, TcpStream};\n\nuse tokio_io::AsyncRead;\n\n\n\n/// TCP stream wrapper presenting a message-based read / write interface.\n\n#[derive(Debug)]\n", "file_path": "examples/network.rs", "rank": 69, "score": 23553.714951544855 }, { "content": " type Item = BytesMut;\n\n type Error = Error;\n\n\n\n fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {\n\n // First, read any new data that might have been received off the TCP stream\n\n let socket_closed = self.fill_read_buffer()?.is_ready();\n\n\n\n // If we're not part way through reading an incoming message, read the next incoming\n\n // message's length.\n\n if self.incoming_message_length.is_none() && self.read_buffer.len() >= 4 {\n\n let length_buffer = self.read_buffer.split_to(4);\n\n let length = unwrap!(deserialize::<u32>(&length_buffer)) as usize;\n\n self.incoming_message_length = Some(length);\n\n }\n\n\n\n // If we have the next message's length available, read it.\n\n if let Some(length) = self.incoming_message_length {\n\n if self.read_buffer.len() >= length {\n\n self.incoming_message_length = None;\n\n return Ok(Async::Ready(Some(self.read_buffer.split_to(length))));\n", "file_path": "examples/network.rs", "rank": 70, "score": 23553.471531139716 }, { "content": " /// message.\n\n fn buffer(&mut self, message: &[u8]) {\n\n let serialised_length = unwrap!(serialize(&(message.len() as u32)));\n\n if self.write_buffer.remaining_mut() < serialised_length.len() + message.len() {\n\n self.write_buffer.extend_from_slice(&serialised_length);\n\n self.write_buffer.extend_from_slice(message);\n\n } else {\n\n self.write_buffer.put(&serialised_length);\n\n self.write_buffer.put(message);\n\n }\n\n }\n\n\n\n /// Flush the write buffer to the TCP stream.\n\n fn poll_flush(&mut self) -> Poll<(), Error> {\n\n while !self.write_buffer.is_empty() {\n\n // `try_nb` is kind of like `try_ready`, but for operations that return `io::Result`\n\n // instead of `Async`. In the case of `io::Result`, an error of `WouldBlock` is\n\n // equivalent to `Async::NotReady`.\n\n let num_bytes = try_nb!(self.tcp_stream.write(&self.write_buffer));\n\n assert!(num_bytes > 0);\n", "file_path": "examples/network.rs", "rank": 71, "score": 23551.3212942681 }, { "content": " unused_qualifications,\n\n unused_results\n\n)]\n\n#![allow(\n\n box_pointers,\n\n missing_copy_implementations,\n\n missing_debug_implementations,\n\n variant_size_differences,\n\n non_camel_case_types\n\n)]\n\n\n\n#[macro_use]\n\nextern crate futures;\n\nuse rand;\n\n#[macro_use]\n\nextern crate tokio_io;\n\n#[macro_use]\n\nextern crate unwrap;\n\nuse bincode::{deserialize, serialize};\n\nuse bytes::{BufMut, BytesMut};\n", "file_path": "examples/network.rs", "rank": 72, "score": 23551.27812872657 }, { "content": " // Discard the first `num_bytes` bytes of the buffer.\n\n let _ = self.write_buffer.split_to(num_bytes);\n\n }\n\n\n\n Ok(Async::Ready(()))\n\n }\n\n\n\n /// Read data from the TCP stream. This only returns `Ready` when the socket has closed.\n\n fn fill_read_buffer(&mut self) -> Poll<(), Error> {\n\n loop {\n\n self.read_buffer.reserve(1024);\n\n let num_bytes = try_ready!(self.tcp_stream.read_buf(&mut self.read_buffer));\n\n if num_bytes == 0 {\n\n return Ok(Async::Ready(()));\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Stream for MessageStream {\n", "file_path": "examples/network.rs", "rank": 73, "score": 23547.481760799634 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\n//! Run a local network of gossiping nodes.\n\n\n\n#![forbid(\n\n exceeding_bitshifts,\n\n mutable_transmutes,\n\n no_mangle_const_items,\n\n unknown_crate_types\n\n)]\n\n#![deny(\n\n bad_style,\n\n improper_ctypes,\n", "file_path": "examples/network.rs", "rank": 74, "score": 23544.297895288542 }, { "content": " }\n\n }\n\n\n\n if socket_closed {\n\n Ok(Async::Ready(None))\n\n } else {\n\n Ok(Async::NotReady)\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/network.rs", "rank": 75, "score": 23543.57760627369 }, { "content": " missing_docs,\n\n non_shorthand_field_patterns,\n\n overflowing_literals,\n\n stable_features,\n\n unconditional_recursion,\n\n unknown_lints,\n\n unsafe_code,\n\n unused_allocation,\n\n unused_attributes,\n\n unused_comparisons,\n\n unused_features,\n\n unused_parens,\n\n while_true,\n\n unused\n\n)]\n\n#![warn(\n\n trivial_casts,\n\n trivial_numeric_casts,\n\n unused_extern_crates,\n\n unused_import_braces,\n", "file_path": "examples/network.rs", "rank": 76, "score": 23541.406556919217 }, { "content": "mod error;\n\nmod gossip;\n\nmod id;\n\nmod messages;\n\nmod node;\n\nmod rumor_state;\n\n\n\npub use crate::error::Error;\n\npub use crate::gossip::Statistics;\n\npub use crate::id::Id;\n\npub use crate::node::Node;\n", "file_path": "src/lib.rs", "rank": 77, "score": 19107.26800936548 }, { "content": " unused\n\n)]\n\n#![warn(\n\n trivial_casts,\n\n trivial_numeric_casts,\n\n unused_extern_crates,\n\n unused_import_braces,\n\n unused_qualifications,\n\n unused_results\n\n)]\n\n#![allow(missing_copy_implementations, missing_debug_implementations)]\n\n\n\n#[macro_use]\n\nextern crate log;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n#[cfg(test)]\n\n#[macro_use]\n\nextern crate unwrap;\n\n\n", "file_path": "src/lib.rs", "rank": 78, "score": 19090.8946115261 }, { "content": "// Copyright 2018 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\n//! An implementation of a push-pull gossip protocol.\n\n\n\n#![doc(\n\n html_logo_url = \"https://raw.githubusercontent.com/maidsafe/QA/master/Images/maidsafe_logo.png\",\n\n html_favicon_url = \"https://maidsafe.net/img/favicon.ico\",\n\n html_root_url = \"https://docs.rs/sn_gossip\"\n\n)]\n\n#![forbid(\n\n arithmetic_overflow,\n\n mutable_transmutes,\n\n no_mangle_const_items,\n", "file_path": "src/lib.rs", "rank": 79, "score": 19087.039719037522 }, { "content": " unknown_crate_types,\n\n warnings\n\n)]\n\n#![deny(\n\n bad_style,\n\n deprecated,\n\n improper_ctypes,\n\n missing_docs,\n\n non_shorthand_field_patterns,\n\n overflowing_literals,\n\n stable_features,\n\n unconditional_recursion,\n\n unknown_lints,\n\n unsafe_code,\n\n unused_allocation,\n\n unused_attributes,\n\n unused_comparisons,\n\n unused_features,\n\n unused_parens,\n\n while_true,\n", "file_path": "src/lib.rs", "rank": 80, "score": 19083.382516524238 }, { "content": "# sn_gossip\n\n\n\nAn implementation of a push-pull gossip protocol described in [Randomized Rumor Spreading - Karp et al. [FOCS 2000]](http://zoo.cs.yale.edu/classes/cs426/2013/bib/karp00randomized.pdf).\n\n\n\n## Evaluation Result\n\n\n\n![gossip](img/evaluate_result.png?raw=true)\n\n\n\n\n\n<a name=\"license\"></a>\n\n## License\n\nThis library is dual-licensed under the Modified BSD ( [LICENSE-BSD](https://opensource.org/licenses/BSD-3-Clause)) or the MIT license ( [LICENSE-MIT](http://opensource.org/licenses/MIT)) at your option.\n\n\n\n## Contributing\n\n\n\nWant to contribute? Great :tada:\n\n\n\nThere are many ways to give back to the project, whether it be writing new code, fixing bugs, or just reporting errors. All forms of contributions are encouraged!\n\n\n\nFor instructions on how to contribute, see our [Guide to contributing](https://github.com/maidsafe/QA/blob/master/CONTRIBUTING.md).\n", "file_path": "README.md", "rank": 81, "score": 14244.28952857 }, { "content": "# Changelog\n\n\n\nAll notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.\n\n\n\n### [0.1.2](https://github.com/maidsafe/sn_gossip/compare/v0.1.1...v0.1.2) (2020-11-23)\n\n\n\n### [0.1.1](https://github.com/maidsafe/sn_gossip/compare/v0.1.0...v0.1.1) (2020-09-29)\n\n\n\n\n\n### Bug Fixes\n\n\n\n* **clippy:** fix clippy warnings and errors ([812b78b](https://github.com/maidsafe/sn_gossip/commit/812b78bfa5014e087397b778bb4219ed97733923))\n\n\n\n### [0.1.0](https://github.com/maidsafe/sn_gossip/compare/v0.1.0...v0.1.0) (2018-02-28)\n\n* Initial implementation\n", "file_path": "CHANGELOG.md", "rank": 82, "score": 14238.027641695868 } ]
Rust
src/bpf/syscall.rs
RG4421/oxidebpf
18d721ac5f03fe291f4e4e88544b5c54937a7108
#[cfg(feature = "log_buf")] use lazy_static::lazy_static; use retry::delay::NoDelay; use retry::{retry_with_index, OperationResult}; use slog::info; use std::ffi::CString; use std::mem::MaybeUninit; use std::os::unix::io::RawFd; use Errno::EAGAIN; use libc::{c_uint, syscall, SYS_bpf}; use nix::errno::{errno, Errno}; use crate::bpf::constant::bpf_cmd::{ BPF_MAP_CREATE, BPF_MAP_LOOKUP_ELEM, BPF_MAP_UPDATE_ELEM, BPF_PROG_LOAD, }; use crate::bpf::{BpfAttr, BpfCode, BpfProgLoad, KeyVal, MapConfig, MapElem, SizedBpfAttr}; use crate::error::*; use crate::LOGGER; pub type BpfMapType = u32; #[cfg(feature = "log_buf")] lazy_static! { static ref LOG_BUF_SIZE_BYTE: usize = std::env::var("LOG_SIZE") .unwrap_or_else(|_| "4096".to_string()) .trim() .parse::<usize>() .unwrap_or(4096); } unsafe fn sys_bpf(cmd: u32, arg_bpf_attr: SizedBpfAttr) -> Result<usize, OxidebpfError> { #![allow(clippy::useless_conversion)] let size = arg_bpf_attr.size; let ptr: *const BpfAttr = &arg_bpf_attr.bpf_attr; let mut e = 0; let result = retry_with_index(NoDelay.take(5), |idx| { let ret = syscall((SYS_bpf as i32).into(), cmd, ptr, size); if ret < 0 { e = errno(); info!( LOGGER.0, "sys_bpf(); cmd: {}; errno: {}; arg_bpf_attr: {:?}", cmd, e, arg_bpf_attr ); if Errno::from_i32(e) == EAGAIN && idx < 5 { OperationResult::Retry("EAGAIN") } else { OperationResult::Err("Unrecoverable error retrying BPF load") } } else { OperationResult::Ok(ret as usize) } }); match result { Ok(size) => Ok(size), Err(err) => { if e == 0 { Err(err.into()) } else { Err(OxidebpfError::LinuxError( format!("sys_bpf({}, {:#?})", cmd, arg_bpf_attr), Errno::from_i32(e), )) } } } } pub(crate) fn bpf_prog_load( prog_type: u32, insns: &BpfCode, license: String, kernel_version: u32, ) -> Result<RawFd, OxidebpfError> { #![allow(clippy::redundant_closure)] let insn_cnt = insns.0.len(); let insns = insns.0.clone().into_boxed_slice(); let license = CString::new(license.as_bytes()).map_err(|e| OxidebpfError::CStringConversionError(e))?; #[cfg(feature = "log_buf")] let log_buf = vec![0u8; *LOG_BUF_SIZE_BYTE]; let bpf_prog_load = BpfProgLoad { prog_type, insn_cnt: insn_cnt as u32, insns: insns.as_ptr() as u64, license: license.as_ptr() as u64, kern_version: kernel_version, #[cfg(feature = "log_buf")] log_level: 1, #[cfg(feature = "log_buf")] log_size: *LOG_BUF_SIZE_BYTE as u32, #[cfg(feature = "log_buf")] log_buf: log_buf.as_ptr() as u64, ..Default::default() }; let bpf_attr = SizedBpfAttr { bpf_attr: BpfAttr { bpf_prog_load }, size: 48, }; unsafe { match sys_bpf(BPF_PROG_LOAD, bpf_attr) { Ok(fd) => Ok(fd as RawFd), Err(e) => { info!( LOGGER.0, "bpf_prog_load(); error with sys_bpf; bpf_attr: {:?}", bpf_attr ); #[cfg(feature = "log_buf")] { let log_string = String::from_utf8(log_buf) .unwrap_or_else(|_| String::from("")) .trim_matches('\0') .to_string(); let last_chars: String = log_string .chars() .rev() .take(100) .collect::<String>() .chars() .rev() .collect(); info!(LOGGER.0, "bpf_prog_load(); log_buf: {}", last_chars); Err(OxidebpfError::BpfProgLoadError((Box::new(e), log_string))) } #[cfg(not(feature = "log_buf"))] { Err(OxidebpfError::BpfProgLoadError(( Box::new(e), "".to_string(), ))) } } } } } pub(crate) fn bpf_map_lookup_elem<K, V>(map_fd: RawFd, key: K) -> Result<V, OxidebpfError> { let mut buf = MaybeUninit::zeroed(); let map_elem = MapElem { map_fd: map_fd as u32, key: &key as *const K as u64, keyval: KeyVal { value: &mut buf as *mut _ as u64, }, flags: 0, }; let bpf_attr = SizedBpfAttr { bpf_attr: BpfAttr { map_elem }, size: std::mem::size_of::<MapElem>(), }; unsafe { sys_bpf(BPF_MAP_LOOKUP_ELEM, bpf_attr)?; Ok(buf.assume_init()) } } pub(crate) fn bpf_map_update_elem<K, V>( map_fd: RawFd, key: K, val: V, ) -> Result<(), OxidebpfError> { let map_elem = MapElem { map_fd: map_fd as u32, key: &key as *const K as u64, keyval: KeyVal { value: &val as *const V as u64, }, flags: 0, }; let bpf_attr = SizedBpfAttr { bpf_attr: BpfAttr { map_elem }, size: std::mem::size_of::<MapElem>(), }; unsafe { sys_bpf(BPF_MAP_UPDATE_ELEM, bpf_attr)?; } Ok(()) } pub(crate) unsafe fn bpf_map_create_with_sized_attr( bpf_attr: SizedBpfAttr, ) -> Result<RawFd, OxidebpfError> { let fd = sys_bpf(BPF_MAP_CREATE, bpf_attr)?; Ok(fd as RawFd) } pub(crate) unsafe fn bpf_map_create_with_config( map_config: MapConfig, size: usize, ) -> Result<RawFd, OxidebpfError> { let bpf_attr = MaybeUninit::<BpfAttr>::zeroed(); let mut bpf_attr = bpf_attr.assume_init(); bpf_attr.map_config = map_config; let bpf_attr = SizedBpfAttr { bpf_attr, size }; let fd = sys_bpf(BPF_MAP_CREATE, bpf_attr)?; Ok(fd as RawFd) } pub(crate) fn bpf_map_create( map_type: BpfMapType, key_size: c_uint, value_size: c_uint, max_entries: u32, ) -> Result<RawFd, OxidebpfError> { let map_config = MapConfig { map_type: map_type as u32, key_size, value_size, max_entries, ..Default::default() }; let bpf_attr = SizedBpfAttr { bpf_attr: BpfAttr { map_config }, size: 16, }; unsafe { let fd = sys_bpf(BPF_MAP_CREATE, bpf_attr)?; Ok(fd as RawFd) } } #[cfg(test)] #[allow(unused_imports)] pub(crate) mod tests { use std::convert::TryInto; use std::ffi::c_void; use std::os::raw::{c_int, c_uint}; use std::os::unix::io::{FromRawFd, RawFd}; use std::path::PathBuf; use nix::errno::{errno, Errno}; use scopeguard::defer; use crate::blueprint::ProgramBlueprint; use crate::bpf::constant::bpf_map_type::BPF_MAP_TYPE_ARRAY; use crate::bpf::constant::bpf_prog_type::BPF_PROG_TYPE_KPROBE; use crate::bpf::syscall::{bpf_map_lookup_elem, bpf_prog_load}; use crate::bpf::{BpfCode, BpfInsn}; use crate::error::OxidebpfError; use crate::perf::syscall::{perf_event_ioc_set_bpf, perf_event_open}; use crate::perf::{PerfBpAddr, PerfBpLen, PerfEventAttr, PerfSample, PerfWakeup}; use std::fs; #[test] fn bpf_map_create() { let fd: RawFd = crate::bpf::syscall::bpf_map_create( BPF_MAP_TYPE_ARRAY, std::mem::size_of::<u32>() as c_uint, std::mem::size_of::<u32>() as c_uint, 10, ) .unwrap(); defer!(unsafe { libc::close(fd); }); } #[test] fn bpf_map_create_and_read() { let fd: RawFd = crate::bpf::syscall::bpf_map_create( BPF_MAP_TYPE_ARRAY, std::mem::size_of::<u32>() as c_uint, std::mem::size_of::<u32>() as c_uint, 20, ) .unwrap(); defer!(unsafe { libc::close(fd); }); match crate::bpf::syscall::bpf_map_lookup_elem::<u32, u32>(fd, 0) { Ok(val) => { assert_eq!(val, 0); } Err(e) => { panic!("{:?}", e); } } } #[test] fn bpf_map_create_and_write_and_read() { let fd: RawFd = crate::bpf::syscall::bpf_map_create( BPF_MAP_TYPE_ARRAY, std::mem::size_of::<u32>() as c_uint, std::mem::size_of::<u64>() as c_uint, 20, ) .unwrap(); defer!(unsafe { libc::close(fd); }); crate::bpf::syscall::bpf_map_update_elem::<u32, u64>(fd, 5, 50).unwrap(); match crate::bpf::syscall::bpf_map_lookup_elem::<u32, u64>(fd, 5) { Ok(val) => { assert_eq!(val, 50); } Err(e) => { panic!("{:?}", e) } } } #[repr(C)] struct Arg { arg: u32, } extern "C" fn clone_child(_: *mut c_void) -> c_int { std::thread::sleep(std::time::Duration::from_millis(1)); 0 } #[test] fn test_setns() { use libc::{clone, CLONE_NEWNS, SIGCHLD}; use memmap::MmapMut; use std::os::unix::io::AsRawFd; let mut arg = Arg { arg: 0x1337beef }; let mut stack = MmapMut::map_anon(1024 * 1024).unwrap(); unsafe { let ret = clone( clone_child, &mut stack as *mut _ as *mut _, CLONE_NEWNS, &mut arg as *mut _ as *mut _, ); if ret < 0 { let errno = errno(); let errmsg = Errno::from_i32(errno); panic!("could not create new mount namespace: {:?}", errmsg); } let file = std::fs::OpenOptions::new() .read(true) .write(false) .open(format!("/proc/{}/ns/mnt", ret)) .expect("Could not open mount ns file"); let fd = file.as_raw_fd(); crate::perf::syscall::setns(fd, CLONE_NEWNS).unwrap(); } } #[test] fn test_bpf_prog_load() { let program = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("test") .join(format!("test_program_{}", std::env::consts::ARCH)); let data = fs::read(program).unwrap(); let blueprint = ProgramBlueprint::new(&data, None).unwrap(); let program_object = blueprint.programs.get("test_program").unwrap(); match bpf_prog_load( BPF_PROG_TYPE_KPROBE, &program_object.code, program_object.license.clone(), program_object.kernel_version, ) { Ok(_fd) => {} Err(e) => panic!("{:?}", e), }; } }
#[cfg(feature = "log_buf")] use lazy_static::lazy_static; use retry::delay::NoDelay; use retry::{retry_with_index, OperationResult}; use slog::info; use std::ffi::CString; use std::mem::MaybeUninit; use std::os::unix::io::RawFd; use Errno::EAGAIN; use libc::{c_uint, syscall, SYS_bpf}; use nix::errno::{errno, Errno}; use crate::bpf::constant::bpf_cmd::{ BPF_MAP_CREATE, BPF_MAP_LOOKUP_ELEM, BPF_MAP_UPDATE_ELEM, BPF_PROG_LOAD, }; use crate::bpf::{BpfAttr, BpfCode, BpfProgLoad, KeyVal, MapConfig, MapElem, SizedBpfAttr}; use crate::error::*; use crate::LOGGER; pub type BpfMapType = u32; #[cfg(feature = "log_buf")] lazy_static! { static ref LOG_BUF_SIZE_BYTE: usize = std::env::var("LOG_SIZE") .unwrap_or_else(|_| "4096".to_string()) .trim() .parse::<usize>() .unwrap_or(4096); } unsafe fn sys_bpf(cmd: u32, arg_bpf_attr: SizedBpfAttr) -> Result<usize, OxidebpfError> { #![allow(clippy::useless_conversion)] let size = arg_bpf_attr.size; let ptr: *const BpfAttr = &arg_bpf_attr.bpf_attr; let mut e = 0; let result = retry_with_index(NoDelay.take(5), |idx| { let ret = syscall((SYS_bpf as i32).into(), cmd, ptr, size); if ret < 0 { e = errno(); info!( LOGGER.0, "sys_bpf(); cmd: {}; errno: {}; arg_bpf_attr: {:?}", cmd, e, arg_bpf_attr ); if Errno::from_i32(e) == EAGAIN && idx < 5 { OperationResult::Retry("EAGAIN") } else { OperationResult::Err("Unrecoverable error retrying BPF load") } } else { OperationResult::Ok(ret as usize) } }); match result { Ok(size) => Ok(size), Err(err) => { if e == 0 { Err(err.into()) } else { Err(OxidebpfError::LinuxError( format!("sys_bpf({}, {:#?})", cmd, arg_bpf_attr), Errno::from_i32(e), )) } } } } pub(crate) fn bpf_prog_load( prog_type: u32, insns: &BpfCode, license: String, kernel_version: u32, ) -> Result<RawFd, OxidebpfError> { #![allow(clippy::redundant_closure)] let insn_cnt = insns.0.len(); let insns = insns.0.clone().into_boxed_slice(); let license = CString::new(license.as_bytes()).map_err(|e| OxidebpfError::CStringConversionError(e))?; #[cfg(feature = "log_buf")] let log_buf = vec![0u8; *LOG_BUF_SIZE_BYTE]; let bpf_prog_load = BpfProgLoad { prog_type, insn_cnt: insn_cnt as u32, insns: insns.as_ptr() as u64, license: license.as_ptr() as u64, kern_version: kernel_version, #[cfg(feature = "log_buf")] log_level: 1, #[cfg(feature = "log_buf")] log_size: *LOG_BUF_SIZE_BYTE as u32, #[cfg(feature = "log_buf")] log_buf: log_buf.as_ptr() as u64, ..Default::default() }; let bpf_attr = SizedBpfAttr { bpf_attr: BpfAttr { bpf_prog_load }, size: 48, }; unsafe { match sys_bpf(BPF_PROG_LOAD, bpf_attr) { Ok(fd) => Ok(fd as RawFd), Err(e) => { info!( LOGGER.0, "bpf_prog_load(); error with sys_bpf; bpf_attr: {:?}", bpf_attr ); #[cfg(feature = "log_buf")] { let log_string = String::from_utf8(log_buf) .unwrap_or_else(|_| String::from("")) .trim_matches('\0') .to_string(); let last_chars: String = log_string .chars() .rev() .take(100) .collect::<String>() .chars() .rev() .collect(); info!(LOGGER.0, "bpf_prog_load(); log_buf: {}", last_chars); Err(OxidebpfError::BpfProgLoadError((Box::new(e), log_string))) } #[cfg(not(feature = "log_buf"))] { Err(OxidebpfError::BpfProgLoadError(( Box::new(e), "".to_string(), ))) } } } } } pub(crate) fn bpf_map_lookup_elem<K, V>(map_fd: RawFd, key: K) -> Result<V, OxidebpfError> { let mut buf = MaybeUninit::zeroed(); let map_elem = MapElem { map_fd: map_fd as u32, key: &key as *const K as u64, keyval: KeyVal { value: &mut buf as *mut _ as u64, }, flags: 0, }; let bpf_attr = SizedBpfAttr { bpf_attr: BpfAttr { map_elem }, size: std::mem::size_of::<MapElem>(), }; unsafe { sys_bpf(BPF_MAP_LOOKUP_ELEM, bpf_attr)?; Ok(buf.assume_init()) } } pub(crate) fn bpf_map_update_elem<K, V>( map_fd: RawFd, key: K, val: V, ) -> Result<(), OxidebpfError> {
let bpf_attr = SizedBpfAttr { bpf_attr: BpfAttr { map_elem }, size: std::mem::size_of::<MapElem>(), }; unsafe { sys_bpf(BPF_MAP_UPDATE_ELEM, bpf_attr)?; } Ok(()) } pub(crate) unsafe fn bpf_map_create_with_sized_attr( bpf_attr: SizedBpfAttr, ) -> Result<RawFd, OxidebpfError> { let fd = sys_bpf(BPF_MAP_CREATE, bpf_attr)?; Ok(fd as RawFd) } pub(crate) unsafe fn bpf_map_create_with_config( map_config: MapConfig, size: usize, ) -> Result<RawFd, OxidebpfError> { let bpf_attr = MaybeUninit::<BpfAttr>::zeroed(); let mut bpf_attr = bpf_attr.assume_init(); bpf_attr.map_config = map_config; let bpf_attr = SizedBpfAttr { bpf_attr, size }; let fd = sys_bpf(BPF_MAP_CREATE, bpf_attr)?; Ok(fd as RawFd) } pub(crate) fn bpf_map_create( map_type: BpfMapType, key_size: c_uint, value_size: c_uint, max_entries: u32, ) -> Result<RawFd, OxidebpfError> { let map_config = MapConfig { map_type: map_type as u32, key_size, value_size, max_entries, ..Default::default() }; let bpf_attr = SizedBpfAttr { bpf_attr: BpfAttr { map_config }, size: 16, }; unsafe { let fd = sys_bpf(BPF_MAP_CREATE, bpf_attr)?; Ok(fd as RawFd) } } #[cfg(test)] #[allow(unused_imports)] pub(crate) mod tests { use std::convert::TryInto; use std::ffi::c_void; use std::os::raw::{c_int, c_uint}; use std::os::unix::io::{FromRawFd, RawFd}; use std::path::PathBuf; use nix::errno::{errno, Errno}; use scopeguard::defer; use crate::blueprint::ProgramBlueprint; use crate::bpf::constant::bpf_map_type::BPF_MAP_TYPE_ARRAY; use crate::bpf::constant::bpf_prog_type::BPF_PROG_TYPE_KPROBE; use crate::bpf::syscall::{bpf_map_lookup_elem, bpf_prog_load}; use crate::bpf::{BpfCode, BpfInsn}; use crate::error::OxidebpfError; use crate::perf::syscall::{perf_event_ioc_set_bpf, perf_event_open}; use crate::perf::{PerfBpAddr, PerfBpLen, PerfEventAttr, PerfSample, PerfWakeup}; use std::fs; #[test] fn bpf_map_create() { let fd: RawFd = crate::bpf::syscall::bpf_map_create( BPF_MAP_TYPE_ARRAY, std::mem::size_of::<u32>() as c_uint, std::mem::size_of::<u32>() as c_uint, 10, ) .unwrap(); defer!(unsafe { libc::close(fd); }); } #[test] fn bpf_map_create_and_read() { let fd: RawFd = crate::bpf::syscall::bpf_map_create( BPF_MAP_TYPE_ARRAY, std::mem::size_of::<u32>() as c_uint, std::mem::size_of::<u32>() as c_uint, 20, ) .unwrap(); defer!(unsafe { libc::close(fd); }); match crate::bpf::syscall::bpf_map_lookup_elem::<u32, u32>(fd, 0) { Ok(val) => { assert_eq!(val, 0); } Err(e) => { panic!("{:?}", e); } } } #[test] fn bpf_map_create_and_write_and_read() { let fd: RawFd = crate::bpf::syscall::bpf_map_create( BPF_MAP_TYPE_ARRAY, std::mem::size_of::<u32>() as c_uint, std::mem::size_of::<u64>() as c_uint, 20, ) .unwrap(); defer!(unsafe { libc::close(fd); }); crate::bpf::syscall::bpf_map_update_elem::<u32, u64>(fd, 5, 50).unwrap(); match crate::bpf::syscall::bpf_map_lookup_elem::<u32, u64>(fd, 5) { Ok(val) => { assert_eq!(val, 50); } Err(e) => { panic!("{:?}", e) } } } #[repr(C)] struct Arg { arg: u32, } extern "C" fn clone_child(_: *mut c_void) -> c_int { std::thread::sleep(std::time::Duration::from_millis(1)); 0 } #[test] fn test_setns() { use libc::{clone, CLONE_NEWNS, SIGCHLD}; use memmap::MmapMut; use std::os::unix::io::AsRawFd; let mut arg = Arg { arg: 0x1337beef }; let mut stack = MmapMut::map_anon(1024 * 1024).unwrap(); unsafe { let ret = clone( clone_child, &mut stack as *mut _ as *mut _, CLONE_NEWNS, &mut arg as *mut _ as *mut _, ); if ret < 0 { let errno = errno(); let errmsg = Errno::from_i32(errno); panic!("could not create new mount namespace: {:?}", errmsg); } let file = std::fs::OpenOptions::new() .read(true) .write(false) .open(format!("/proc/{}/ns/mnt", ret)) .expect("Could not open mount ns file"); let fd = file.as_raw_fd(); crate::perf::syscall::setns(fd, CLONE_NEWNS).unwrap(); } } #[test] fn test_bpf_prog_load() { let program = PathBuf::from(env!("CARGO_MANIFEST_DIR")) .join("test") .join(format!("test_program_{}", std::env::consts::ARCH)); let data = fs::read(program).unwrap(); let blueprint = ProgramBlueprint::new(&data, None).unwrap(); let program_object = blueprint.programs.get("test_program").unwrap(); match bpf_prog_load( BPF_PROG_TYPE_KPROBE, &program_object.code, program_object.license.clone(), program_object.kernel_version, ) { Ok(_fd) => {} Err(e) => panic!("{:?}", e), }; } }
let map_elem = MapElem { map_fd: map_fd as u32, key: &key as *const K as u64, keyval: KeyVal { value: &val as *const V as u64, }, flags: 0, };
assignment_statement
[]
Rust
src/main.rs
Xe/gamebridge
b2e7ba21aa14b556e34d7a99dd02e22f9a1365aa
#[macro_use] extern crate bitflags; pub(crate) mod au; pub(crate) mod controller; pub(crate) mod twitch; use crate::au::Lerper; use anyhow::{anyhow, Result}; use log::{debug, error, info, warn}; use std::{ fs::{File, OpenOptions}, io::{Read, Write}, str::from_utf8, sync::{Arc, RwLock}, thread::spawn, }; pub(crate) struct State { frame: u64, stickx: Lerper, sticky: Lerper, a_button: Lerper, b_button: Lerper, z_button: Lerper, r_button: Lerper, start: Lerper, c_left: Lerper, c_right: Lerper, c_up: Lerper, c_down: Lerper, } pub(crate) type MTState = Arc<RwLock<State>>; fn main() -> Result<()> { pretty_env_logger::try_init()?; kankyo::init()?; let mut vblank = File::open("vblank")?; let mut input = OpenOptions::new().write(true).open("input")?; const STICK_LERP_TIME: f64 = 270.0; const BUTTON_LERP_TIME: f64 = 20.0; let st = { let st = State { frame: 0, stickx: Lerper::init(STICK_LERP_TIME, 127, -128, 0), sticky: Lerper::init(STICK_LERP_TIME, 127, -128, 0), a_button: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), b_button: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), z_button: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), r_button: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), start: Lerper::init(BUTTON_LERP_TIME / 4.0, 64, -1, 0), c_left: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), c_right: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), c_up: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), c_down: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), }; Arc::new(RwLock::new(st)) }; info!("ready"); { let st = st.clone(); spawn(move || twitch::run(st)); } loop { let mut data = [0; 3]; debug!("waiting for vblank"); vblank.read(&mut data)?; let str = from_utf8(&data)?; debug!("got data: {}", str); let mut controller = [0; 4]; match str { "OK\n" => { { let mut data = st.write().unwrap(); data.frame += 1; } let mut data = st.write().unwrap(); let frame = data.frame + 1; debug!("x before: {}", data.stickx.scalar); let mut stickx_scalar = data.stickx.apply(frame) as i8; debug!("x after: {}", data.stickx.scalar); debug!("y before: {}", data.sticky.scalar); let mut sticky_scalar = data.sticky.apply(frame) as i8; debug!("y after: {}", data.sticky.scalar); let dist = stick_distance(stickx_scalar, sticky_scalar); if dist <= 10 { stickx_scalar = 0; sticky_scalar = 0; } use controller::{HiButtons, LoButtons}; let mut hi = HiButtons::NONE; let mut lo = LoButtons::NONE; const BUTTON_PUSH_THRESHOLD: i64 = 2; data.a_button.apply(frame); if data.a_button.pressed(BUTTON_PUSH_THRESHOLD) { hi = hi | HiButtons::A_BUTTON; } data.b_button.apply(frame); if data.b_button.pressed(BUTTON_PUSH_THRESHOLD) { hi = hi | HiButtons::B_BUTTON; } data.z_button.apply(frame); if data.z_button.pressed(BUTTON_PUSH_THRESHOLD) { hi = hi | HiButtons::Z_BUTTON; } data.start.apply(frame); if data.start.pressed(BUTTON_PUSH_THRESHOLD) { hi = hi | HiButtons::START; } data.r_button.apply(frame); if data.r_button.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::R_BUTTON; } data.c_up.apply(frame); if data.c_up.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::C_UP; } data.c_down.apply(frame); if data.c_down.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::C_DOWN; } data.c_left.apply(frame); if data.c_left.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::C_LEFT; } data.c_right.apply(frame); if data.c_right.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::C_RIGHT; } debug!( "[ rust] {:02x}{:02x} {:02x}{:02x}", hi.bits(), lo.bits(), stickx_scalar as u8, sticky_scalar as u8 ); controller[0] = hi.bits() as u8; controller[1] = lo.bits() as u8; controller[2] = stickx_scalar as u8; controller[3] = sticky_scalar as u8; input.write(&controller)?; } "BYE" => { warn!("asked to exit by the game"); return Ok(()); } _ => { error!("got unknown FIFO data {}", str); return Err(anyhow!("unknown FIFO data received")); } }; } } fn stick_distance(x: i8, y: i8) -> i8 { let x = (x as f64).powi(2); let y = (y as f64).powi(2); (x + y).sqrt() as i8 } #[cfg(test)] mod test { #[test] fn stick_distance() { for case in [ (0, 0, 0), (127, 0, 127), (64, 64, 90), (-64, 64, 90), (-64, -64, 90), ] .iter() { let x = case.0; let y = case.1; assert_eq!(crate::stick_distance(x, y), case.2); } } }
#[macro_use] extern crate bitflags; pub(crate) mod au; pub(crate) mod controller; pub(crate) mod twitch; use crate::au::Lerper; use anyhow::{anyhow, Result}; use log::{debug, error, info, warn}; use std::{ fs::{File, OpenOptions}, io::{Read, Write}, str::from_utf8, sync::{Arc, RwLock}, thread::spawn, }; pub(crate) struct State { frame: u64, stickx: Lerper, sticky: Lerper, a_button: Lerper, b_button: Lerper, z_button: Lerper, r_button: Lerper, start: Lerper, c_left: Lerper, c_right: Lerper, c_up: Lerper, c_down: Lerper, } pub(crate) type MTState = Arc<RwLock<State>>; fn main() -> Result<()> { pretty_env_logger::try_init()?; kankyo::init()?; let mut vblank = File::open("vblank")?; let mut input = OpenOptions::new().write(true).open("input")?; const STICK_LERP_TIME: f64 = 270.0; const BUTTON_LERP_TIME: f64 = 20.0; let st = { let st = State { frame: 0, stickx: Lerper::init(STICK_LERP_TIME, 127, -128, 0), sticky: Lerper::init(STICK_LERP_TIME, 127, -128, 0), a_button: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), b_button: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), z_button: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), r_button: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), start: Lerper::init(BUTTON_LERP_TIME / 4.0, 64, -1, 0), c_left: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), c_right: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), c_up: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), c_down: Lerper::init(BUTTON_LERP_TIME, 64, -1, 0), }; Arc::new(RwLock::new(st)) }; info!("ready"); { let st = st.clone(); spawn(move || twitch::run(st)); } loop { let mut data = [0; 3]; debug!("waiting for vblank"); vblank.read(&mut data)?; let str = from_utf8(&data)?; debug!("got data: {}", str); let mut controller = [0; 4]; match str { "OK\n" => { { let mut data = st.write().unwrap(); data.frame += 1; } let mut data = st.write().unwrap(); let frame = data.frame + 1; debug!("x before: {}", data.stickx.scalar); let mut stickx_scalar = data.stickx.apply(frame) as i8; debug!("x after: {}", data.stickx.scalar); debug!("y before: {}", data.sticky.scalar); let mut sticky_scalar = data.sticky.apply(frame) as i8; debug!("y after: {}", data.sticky.scalar); let dist = stick_distance(stickx_scalar, sticky_scalar); if dist <= 10 { stickx_scalar = 0; sticky_scalar = 0; } use controller::{HiButtons, LoButtons}; let mut hi = HiButtons::NONE; let mut lo = LoButtons::NONE; const BUTTON_PUSH_THRESHOLD: i64 = 2; data.a_button.apply(frame); if data.a_button.pressed(BUTTON_PUSH_THRESHOLD) { hi = hi | HiButtons::A_BUTTON; } data.b_button.apply(frame); if data.b_button.pressed(BUTTON_PUSH_THRESHOLD) { hi = hi | HiButtons::B_BUTTON; } data.z_button.apply(frame); if data.z_button.pressed(BUTTON_PUSH_THRESHOLD) { hi = hi | HiButtons::Z_BUTTON; } data.start.apply(frame); if data.start.pressed(BUTTON_PUSH_THRESHOLD) { hi = hi | HiButtons::START; } data.r_button.apply(frame); if data.r_button.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::R_BUTTON; } data.c_up.apply(frame); if data.c_up.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::C_UP; } data.c_down.apply(frame); if data.c_down.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::C_DOWN; } data.c_left.apply(frame); if data.c_left.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::C_LEFT;
:02x}", hi.bits(), lo.bits(), stickx_scalar as u8, sticky_scalar as u8 ); controller[0] = hi.bits() as u8; controller[1] = lo.bits() as u8; controller[2] = stickx_scalar as u8; controller[3] = sticky_scalar as u8; input.write(&controller)?; } "BYE" => { warn!("asked to exit by the game"); return Ok(()); } _ => { error!("got unknown FIFO data {}", str); return Err(anyhow!("unknown FIFO data received")); } }; } } fn stick_distance(x: i8, y: i8) -> i8 { let x = (x as f64).powi(2); let y = (y as f64).powi(2); (x + y).sqrt() as i8 } #[cfg(test)] mod test { #[test] fn stick_distance() { for case in [ (0, 0, 0), (127, 0, 127), (64, 64, 90), (-64, 64, 90), (-64, -64, 90), ] .iter() { let x = case.0; let y = case.1; assert_eq!(crate::stick_distance(x, y), case.2); } } }
} data.c_right.apply(frame); if data.c_right.pressed(BUTTON_PUSH_THRESHOLD) { lo = lo | LoButtons::C_RIGHT; } debug!( "[ rust] {:02x}{:02x} {:02x}{
random
[ { "content": "fn lerp(start: i64, end: i64, t: f64) -> i64 {\n\n (start as f64 * (1.0 - t) + (end as f64) * t) as i64\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n fn lerp_scale() {\n\n for case in [(0.1, 10), (0.5, 31)].iter() {\n\n let t = case.0;\n\n let start = 127.0 * t;\n\n assert_eq!(super::lerp(start as i64, 0, t), case.1);\n\n }\n\n }\n\n\n\n #[test]\n\n fn lerper() {\n\n use super::Lerper;\n\n let mut lerper = Lerper::init(15.0, 127, -128, 0);\n\n\n", "file_path": "src/au.rs", "rank": 0, "score": 100966.6217408972 }, { "content": "static FILE *input;\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 3, "score": 43022.00019815491 }, { "content": "static FILE *vblank;\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 4, "score": 43022.00019815491 }, { "content": "#define input_fname \"input\"\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 5, "score": 26356.100508269104 }, { "content": "#define vblank_fname \"vblank\"\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 6, "score": 26356.100508269104 }, { "content": "#[derive(Copy, Clone)]\n\npub(crate) struct Lerper {\n\n extended_tick: u64,\n\n lerp_time: f64,\n\n goal: i64,\n\n pub(crate) scalar: i64,\n\n max: i64,\n\n min: i64,\n\n}\n\n\n\nimpl Lerper {\n\n pub(crate) fn init(lerp_time: f64, max: i64, min: i64, goal: i64) -> Lerper {\n\n Lerper {\n\n extended_tick: 0,\n\n lerp_time: lerp_time,\n\n goal: goal,\n\n scalar: 0, // I hope to GOD that 0 is the resting point\n\n max: max,\n\n min: min,\n\n }\n", "file_path": "src/au.rs", "rank": 7, "score": 19185.35354780441 }, { "content": " }\n\n\n\n pub(crate) fn add(&mut self, new_scalar: i64) {\n\n self.scalar += new_scalar;\n\n }\n\n\n\n pub(crate) fn update(&mut self, new_scalar: i64) {\n\n self.scalar = new_scalar;\n\n }\n\n\n\n pub(crate) fn apply(&mut self, now: u64) -> i64 {\n\n let scalar = self.scalar;\n\n self.scalar = match scalar {\n\n _ if scalar == self.goal => self.goal,\n\n _ if scalar >= self.max => {\n\n self.extended_tick = now;\n\n scalar - 1\n\n }\n\n _ if scalar <= self.min => {\n\n self.extended_tick = now;\n", "file_path": "src/au.rs", "rank": 8, "score": 19184.49795893879 }, { "content": " scalar + 1\n\n }\n\n _ => {\n\n let t = (now - self.extended_tick) as f64 / self.lerp_time;\n\n lerp(self.scalar, 0, t)\n\n }\n\n };\n\n\n\n if self.scalar >= self.max {\n\n return self.max;\n\n }\n\n\n\n if self.scalar <= self.min {\n\n return self.min;\n\n }\n\n\n\n self.scalar\n\n }\n\n\n\n pub(crate) fn pressed(&mut self, threshold: i64) -> bool {\n\n if self.scalar <= threshold {\n\n self.scalar = 0;\n\n }\n\n\n\n self.scalar >= threshold\n\n }\n\n}\n\n\n", "file_path": "src/au.rs", "rank": 9, "score": 19182.321686917356 }, { "content": " for case in [(127, 3, 126), (100, 8, 66), (-124, 8, -82)].iter() {\n\n let scalar = case.0;\n\n let now = case.1;\n\n let want = case.2;\n\n\n\n lerper.update(scalar);\n\n let result = lerper.apply(now);\n\n assert_eq!(result, want);\n\n }\n\n }\n\n}\n", "file_path": "src/au.rs", "rank": 10, "score": 19181.28447722191 }, { "content": "use crate::MTState;\n\nuse tokio::stream::StreamExt as _;\n\nuse twitchchat::{events, Control, Dispatcher, Runner, Status};\n\n\n\npub(crate) fn run(st: MTState) {\n\n use tokio::runtime::Runtime;\n\n Runtime::new()\n\n .expect(\"Failed to create Tokio runtime\")\n\n .block_on(handle(st));\n\n}\n\n\n\nasync fn handle(st: MTState) {\n\n let (nick, pass) = (\n\n // twitch name\n\n std::env::var(\"TWITCH_NICK\").unwrap(),\n\n // oauth token for twitch name\n\n std::env::var(\"TWITCH_PASS\").unwrap(),\n\n );\n\n\n\n // putting this in the env so people don't join my channel when running this\n", "file_path": "src/twitch.rs", "rank": 11, "score": 19002.490346868537 }, { "content": " let chatline = chatline.to_ascii_lowercase();\n\n let mut data = st.write().unwrap();\n\n const BUTTON_ADD_AMT: i64 = 64;\n\n\n\n for cmd in chatline.to_string().split(\" \").collect::<Vec<&str>>().iter() {\n\n match *cmd {\n\n \"a\" => data.a_button.add(BUTTON_ADD_AMT),\n\n \"b\" => data.b_button.add(BUTTON_ADD_AMT),\n\n \"z\" => data.z_button.add(BUTTON_ADD_AMT),\n\n \"r\" => data.r_button.add(BUTTON_ADD_AMT),\n\n \"cup\" => data.c_up.add(BUTTON_ADD_AMT),\n\n \"cdown\" => data.c_down.add(BUTTON_ADD_AMT),\n\n \"cleft\" => data.c_left.add(BUTTON_ADD_AMT),\n\n \"cright\" => data.c_right.add(BUTTON_ADD_AMT),\n\n \"start\" => data.start.add(BUTTON_ADD_AMT),\n\n \"up\" => data.sticky.add(127),\n\n \"down\" => data.sticky.add(-128),\n\n \"left\" => data.stickx.add(-128),\n\n \"right\" => data.stickx.add(127),\n\n \"stop\" => {data.stickx.update(0); data.sticky.update(0);},\n", "file_path": "src/twitch.rs", "rank": 12, "score": 19001.703408531954 }, { "content": "\n\nasync fn run_loop(\n\n mut control: Control,\n\n mut dispatcher: Dispatcher,\n\n channels: &[String],\n\n st: MTState,\n\n) {\n\n let mut join = dispatcher.subscribe::<events::Join>();\n\n let mut part = dispatcher.subscribe::<events::Part>();\n\n let mut pmsg = dispatcher.subscribe::<events::Privmsg>();\n\n\n\n async fn wait_and_join(\n\n control: &mut Control,\n\n dispatcher: &mut Dispatcher,\n\n channels: &[String],\n\n ) {\n\n let ready = dispatcher.wait_for::<events::IrcReady>().await.unwrap();\n\n eprintln!(\"our name: {}\", ready.nickname);\n\n\n\n let w = control.writer();\n", "file_path": "src/twitch.rs", "rank": 13, "score": 18998.931723169564 }, { "content": " let channels = &[std::env::var(\"TWITCH_CHANNEL\").unwrap()];\n\n\n\n let dispatcher = Dispatcher::new();\n\n let (runner, control) = Runner::new(dispatcher.clone(), twitchchat::RateLimit::default());\n\n let fut = run_loop(control.clone(), dispatcher, channels, st);\n\n\n\n let conn = twitchchat::connect_easy_tls(&nick, &pass).await.unwrap();\n\n\n\n tokio::select! {\n\n _ = fut => { control.stop() }\n\n status = runner.run(conn) => {\n\n match status {\n\n Ok(Status::Eof) => {}\n\n Ok(Status::Canceled) => {}\n\n Ok(Status::Timeout) => {}\n\n Err(err) => panic!(err),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/twitch.rs", "rank": 14, "score": 18998.25727437533 }, { "content": " for channel in channels {\n\n eprintln!(\"joining: {}\", channel);\n\n let _ = w.join(channel).await;\n\n eprintln!(\"joined\");\n\n }\n\n eprintln!(\"joined all channels\")\n\n }\n\n\n\n wait_and_join(&mut control, &mut dispatcher, channels).await;\n\n\n\n loop {\n\n tokio::select! {\n\n Some(msg) = join.next() => {\n\n eprintln!(\"{} joined {}\", msg.name, msg.channel);\n\n }\n\n Some(msg) = part.next() => {\n\n eprintln!(\"{} left {}\", msg.name, msg.channel);\n\n }\n\n Some(msg) = pmsg.next() => {\n\n let chatline = msg.data.to_string();\n", "file_path": "src/twitch.rs", "rank": 15, "score": 18996.455986519333 }, { "content": " _ => {},\n\n }\n\n }\n\n\n\n eprintln!(\"[{}] {}: {}\", msg.channel, msg.name, msg.data);\n\n }\n\n\n\n else => { break }\n\n }\n\n }\n\n}\n", "file_path": "src/twitch.rs", "rank": 16, "score": 18993.892283731715 }, { "content": "struct ControllerAPI controller_gamebridge = {\n\n gamebridge_init,\n\n gamebridge_read\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 25, "score": 17771.596062051816 }, { "content": "extern struct ControllerAPI controller_gamebridge;\n", "file_path": "contrib/sm64pc/controller_gamebridge.h", "rank": 26, "score": 17771.596062051816 }, { "content": "bitflags! {\n\n // 0x0100 Digital Pad Right\n\n // 0x0200 Digital Pad Left\n\n // 0x0400 Digital Pad Down\n\n // 0x0800 Digital Pad Up\n\n // 0x1000 Start\n\n // 0x2000 Z\n\n // 0x4000 B\n\n // 0x8000 A\n\n pub(crate) struct HiButtons: u8 {\n\n const NONE = 0x00;\n\n const DPAD_RIGHT = 0x01;\n\n const DPAD_LEFT = 0x02;\n\n const DPAD_DOWN = 0x04;\n\n const DPAD_UP = 0x08;\n\n const START = 0x10;\n\n const Z_BUTTON = 0x20;\n\n const B_BUTTON = 0x40;\n\n const A_BUTTON = 0x80;\n\n }\n", "file_path": "src/controller.rs", "rank": 27, "score": 17508.78107505796 }, { "content": "}\n\n\n\nbitflags! {\n\n // 0x0001 C-Right\n\n // 0x0002 C-Left\n\n // 0x0004 C-Down\n\n // 0x0008 C-Up\n\n // 0x0010 R\n\n // 0x0020 L\n\n // 0x0040 (reserved)\n\n // 0x0080 (reserved)\n\n pub(crate) struct LoButtons: u8 {\n\n const NONE = 0x00;\n\n const C_RIGHT = 0x01;\n\n const C_LEFT = 0x02;\n\n const C_DOWN = 0x04;\n\n const C_UP = 0x08;\n\n const R_BUTTON = 0x10;\n\n const L_BUTTON = 0x20;\n\n }\n\n}\n", "file_path": "src/controller.rs", "rank": 28, "score": 17507.5496748533 }, { "content": "#define ok \"OK\\n\"\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 29, "score": 13030.965861497994 }, { "content": "#define bye \"BYE\"\n\n\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 30, "score": 13030.965861497994 }, { "content": "static void gamebridge_init(void) {\n\n if (!configGameBridge) {\n\n return;\n\n }\n\n\n\n printf(\"[gamebridge] starting...\\n\");\n\n fflush(stdout);\n\n\n\n unlink(vblank_fname);\n\n unlink(input_fname);\n\n\n\n int result;\n\n\n\n result = mkfifo(vblank_fname, S_IRUSR|S_IWUSR);\n\n if (result < 0) {\n\n perror(\"mkfifo \"vblank_fname);\n\n assert(result < 0);\n\n }\n\n\n\n result = mkfifo(input_fname, S_IRUSR| S_IWUSR);\n\n if (result < 0) {\n\n perror(\"mkfifo \"input_fname);\n\n assert(result < 0);\n\n }\n\n\n\n vblank = fopen(vblank_fname, \"w+\");\n\n input = fopen(input_fname, \"rb+\");\n\n assert(vblank);\n\n assert(input);\n\n\n\n setvbuf(vblank, NULL, _IONBF, 0);\n\n setvbuf(input, NULL, _IONBF, 0);\n\n\n\n printf(\"[gamebridge] starting rust daemon\\n\");\n\n fflush(stdout);\n\n system(\"gamebridge &\");\n\n atexit(gamebridge_close);\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 31, "score": 12397.32056258292 }, { "content": "static void gamebridge_close(void) {\n\n if (!configGameBridge) {\n\n return;\n\n }\n\n\n\n printf(\"\\n[gamebridge] exiting\\n\");\n\n fwrite(bye, 1, strlen(bye), vblank);\n\n fclose(vblank);\n\n fclose(input);\n\n\n\n unlink(vblank_fname);\n\n unlink(input_fname);\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 32, "score": 12397.32056258292 }, { "content": "static void gamebridge_read(OSContPad *pad) {\n\n if (!configGameBridge) {\n\n return;\n\n }\n\n\n\n //printf(\"[gamebridge] waiting for input\\n\");\n\n fwrite(ok, 1, strlen(ok), vblank);\n\n uint8_t bytes[4] = {0};\n\n fread(bytes, 1, 4, input);\n\n pad->button = (bytes[0] << 8) | bytes[1];\n\n pad->stick_x = bytes[2];\n\n pad->stick_y = bytes[3];\n\n //printf(\"[gamebridge] %02x%02x %02x%02x\\n\", bytes[0], bytes[1], bytes[2], bytes[3]);\n\n fflush(stdout);\n", "file_path": "contrib/sm64pc/controller_gamebridge.c", "rank": 33, "score": 12397.32056258292 } ]
Rust
streamer/src/parquet.rs
schradert/podra
e13081e7571ce4427bd70bfaabf47cea6bc51a3e
use std::{ env, fs::File, iter::once, sync::Arc, thread::spawn, time::SystemTime, }; use arrow2::{ array::{Array, Int32Array, Utf8Array}, datatypes::{Field, PhysicalType, Schema}, error::Result, io::parquet::{read, write}, record_batch::RecordBatch, }; use crossbeam_channel::unbounded; use rayon::prelude::*; fn read_column_chunk( path: &str, row_group: usize, column: usize, ) -> Result<Box<dyn Array>> { let mut file = File::open(path)?; let file_metadata = read::read_metadata(&mut file)?; let metadata = file_metadata.row_groups[row_group].column(column); let arrow_schema = read::get_schema(&file_metadata)?; let data_type = arrow_schema.fields()[column].data_type().clone(); let pages = read::get_page_iterator(metadata, &mut file, None, vec![])?; let mut pages = read::Decompressor::new(pages, vec![]); read::page_iter_to_array(&mut pages, metadata, data_type) } fn concurrent_read(path: &str) -> Result<Vec<Box<dyn Array>>> { let (sender, receiver) = unbounded(); let mut file = File::open(path)?; let file_metadata = read::read_metadata(&mut file)?; let schema = Arc::new(read::get_schema(&file_metadata)?); let file_metadata = Arc::new(file_metadata); let start = SystemTime::now(); let producer_metadata = file_metadata.clone(); let child = spawn(move || { for col_num in 0..producer_metadata.schema().num_columns() { for row_group_num in 0..producer_metadata.row_groups.len() { let start = SystemTime::now(); let column_metadata = producer_metadata.row_groups[row_group_num].column(col_num); println!("produce start: {} {}", col_num, row_group_num); let pages = read::get_page_iterator( column_metadata, &mut file, None, vec![], ) .unwrap() .collect::<Vec<_>>(); println!( "produce end - {:?}: {} {}", start.elapsed().unwrap(), col_num, row_group_num ); sender.send((col_num, row_group_num, pages)).unwrap(); } } }); let mut children = Vec::new(); for _ in 0..3 { let receiver_consumer = receiver.clone(); let metadata_consumer = file_metadata.clone(); let schema_consumer = schema.clone(); let child = spawn(move || { let (col_num, row_group_num, iter) = receiver_consumer.recv().unwrap(); let start = SystemTime::now(); println!("consumer start: {} {}", col_num, row_group_num); let metadata = metadata_consumer.row_groups[row_group_num].column(col_num); let data_type = schema_consumer.fields()[col_num].data_type().clone(); let pages = iter .into_iter() .map(|x| x.and_then(|x| read::decompress(x, &mut vec![]))); let mut pages = read::streaming_iterator::convert(pages); let array = read::page_iter_to_array(&mut pages, metadata, data_type); println!( "Finished - {:?}: {} {}", start.elapsed().unwrap(), col_num, row_group_num ); array }); children.push(child); } child.join().expect("child thread panicked"); let arrays = children .into_iter() .map(|x| x.join().unwrap()) .collect::<Result<Vec<_>>>()?; println!("Finished - {:?}", start.elapsed().unwrap()); Ok(arrays) } fn write_array_single_thread( path: &str, array: &dyn Array, field: Field, ) -> Result<()> { let schema = Schema::new(vec![field]); let schema_parquet = write::to_parquet_schema(&schema)?; let options = write::WriteOptions { write_statistics: true, compression: write::Compression::Uncompressed, version: write::Version::V2, }; let encoding = write::Encoding::Plain; #[rustfmt::skip] let row_groups = once(Result::Ok(write::DynIter::new( once(Result::Ok(write::DynIter::new( once(array) .zip(schema_parquet.columns().to_vec().into_iter()) .map(|(array, descriptor)| { write::array_to_page(array, descriptor, options, encoding) }), ), ))))); let mut file = File::create(path)?; let _ = write::write_file( &mut file, row_groups, &schema, schema_parquet, options, None, ); Ok(()) } fn write_batch_single_thread_single_page( path: &str, batch: RecordBatch, ) -> Result<()> { let schema = batch.schema().clone(); let options = write::WriteOptions { write_statistics: true, compression: write::Compression::Uncompressed, version: write::Version::V2, }; let iter = vec![Ok(batch)]; let row_groups = write::RowGroupIterator::try_new( iter.into_iter(), &schema, options, vec![write::Encoding::Plain], )?; let mut file = File::create(path)?; let schema_parquet = row_groups.parquet_schema().clone(); let _ = write::write_file( &mut file, row_groups, &schema, schema_parquet, options, None, )?; Ok(()) } fn parallel_write_rayon( path: &str, batch: &RecordBatch, ) -> Result<()> { let options = write::WriteOptions { write_statistics: true, compression: write::Compression::Snappy, version: write::Version::V2, }; let schema_parquet = write::to_parquet_schema(batch.schema())?; let encodings = batch.schema().fields().par_iter().map(|field| match field .data_type() .to_physical_type() { PhysicalType::Binary | PhysicalType::LargeBinary | PhysicalType::Utf8 | PhysicalType::LargeUtf8 => write::Encoding::DeltaLengthByteArray, _ => write::Encoding::Plain, }); let columns = batch .columns() .par_iter() .zip(schema_parquet.columns().to_vec().into_par_iter()) .zip(encodings) .map(|((array, descriptor), encoding)| { let array = array.clone(); Ok(write::array_to_pages(array, descriptor, options, encoding)? .collect::<Vec<_>>()) }) .collect::<Result<Vec<_>>>()?; let row_groups = once(Result::Ok(write::DynIter::new( columns .into_iter() .map(|column| Ok(write::DynIter::new(column.into_iter()))), ))); let mut file = File::create(path)?; let _ = write::write_file( &mut file, row_groups, batch.schema(), schema_parquet, options, None, )?; Ok(()) } fn create_batch(size: usize) -> Result<RecordBatch> { let field1 = (0..size) .map(|x| if x % 9 == 0 { None } else { Some(x as i32) }) .collect::<Int32Array>(); let field2 = (0..size) .map(|x| { if x % 8 == 0 { None } else { Some(x.to_string()) } }) .collect::<Utf8Array<i32>>(); RecordBatch::try_from_iter([ ("field1", Arc::new(field1) as Arc<dyn Array>), ("field2", Arc::new(field2) as Arc<dyn Array>), ]) } pub fn main() -> Result<()> { let args: Vec<String> = env::args().collect(); let path = &args[1]; let column = args[2].parse::<usize>().unwrap(); let row_group = args[3].parse::<usize>().unwrap(); let array = read_column_chunk(path, row_group, column)?; println!("{:?}", array); for array in concurrent_read(path)? { println!("{}", array); } let array = Int32Array::from(&[Some(0), None, Some(2)]); let field = Field::new("field", array.data_type().clone(), true); write_array_single_thread("test.parquet", &array, field.clone())?; let schema = Schema::new(vec![field]); let batch = RecordBatch::try_new(Arc::new(schema), vec![Arc::new(array)])?; write_batch_single_thread_single_page("test2.parquet", batch)?; let batch = create_batch(10_000_000)?; parallel_write_rayon("parallel.parquet", &batch)?; Ok(()) }
use std::{ env, fs::File, iter::once, sync::Arc, thread::spawn, time::SystemTime, }; use arrow2::{ array::{Array, Int32Array, Utf8Array}, datatypes::{Field, PhysicalType, Schema}, error::Result, io::parquet::{read, write}, record_batch::RecordBatch, }; use crossbeam_channel::unbounded; use rayon::prelude::*; fn read_column_chunk( path: &str, row_group: usize, column: usize, ) -> Result<Box<dyn Array>> { let mut file = File::open(path)?; let file_metadata = read::read_metadata(&mut file)?; let metadata = file_metadata.row_groups[row_group].column(column); let arrow_schema = read::get_schema(&file_metadata)?; let data_type = arrow_schema.fields()[column].data_type().clone(); let pages = read::get_page_iterator(metadata, &mut file, None, vec![])?; let mut pages = read::Decompressor::new(pages, vec![]); read::page_iter_to_array(&mut pages, metadata, data_type) } fn concurrent_read(path: &str) -> Result<Vec<Box<dyn Array>>> { let (sender, receiver) = unbounded(); let mut file = File::open(path)?; let file_metadata = read::read_metadata(&mut file)?; let schema = Arc::new(read::get_schema(&file_metadata)?); let file_metadata = Arc::new(file_metadata); let start = SystemTime::now(); let producer_metadata = file_metadata.clone(); let child = spawn(move || { for col_num in 0..producer_metadata.schema().num_columns() { for row_group_num in 0..producer_metadata.row_groups.len() { let start = SystemTime::now(); let column_metadata = producer_metadata.row_groups[row_group_num].colum
schema_parquet, options, None, )?; Ok(()) } fn parallel_write_rayon( path: &str, batch: &RecordBatch, ) -> Result<()> { let options = write::WriteOptions { write_statistics: true, compression: write::Compression::Snappy, version: write::Version::V2, }; let schema_parquet = write::to_parquet_schema(batch.schema())?; let encodings = batch.schema().fields().par_iter().map(|field| match field .data_type() .to_physical_type() { PhysicalType::Binary | PhysicalType::LargeBinary | PhysicalType::Utf8 | PhysicalType::LargeUtf8 => write::Encoding::DeltaLengthByteArray, _ => write::Encoding::Plain, }); let columns = batch .columns() .par_iter() .zip(schema_parquet.columns().to_vec().into_par_iter()) .zip(encodings) .map(|((array, descriptor), encoding)| { let array = array.clone(); Ok(write::array_to_pages(array, descriptor, options, encoding)? .collect::<Vec<_>>()) }) .collect::<Result<Vec<_>>>()?; let row_groups = once(Result::Ok(write::DynIter::new( columns .into_iter() .map(|column| Ok(write::DynIter::new(column.into_iter()))), ))); let mut file = File::create(path)?; let _ = write::write_file( &mut file, row_groups, batch.schema(), schema_parquet, options, None, )?; Ok(()) } fn create_batch(size: usize) -> Result<RecordBatch> { let field1 = (0..size) .map(|x| if x % 9 == 0 { None } else { Some(x as i32) }) .collect::<Int32Array>(); let field2 = (0..size) .map(|x| { if x % 8 == 0 { None } else { Some(x.to_string()) } }) .collect::<Utf8Array<i32>>(); RecordBatch::try_from_iter([ ("field1", Arc::new(field1) as Arc<dyn Array>), ("field2", Arc::new(field2) as Arc<dyn Array>), ]) } pub fn main() -> Result<()> { let args: Vec<String> = env::args().collect(); let path = &args[1]; let column = args[2].parse::<usize>().unwrap(); let row_group = args[3].parse::<usize>().unwrap(); let array = read_column_chunk(path, row_group, column)?; println!("{:?}", array); for array in concurrent_read(path)? { println!("{}", array); } let array = Int32Array::from(&[Some(0), None, Some(2)]); let field = Field::new("field", array.data_type().clone(), true); write_array_single_thread("test.parquet", &array, field.clone())?; let schema = Schema::new(vec![field]); let batch = RecordBatch::try_new(Arc::new(schema), vec![Arc::new(array)])?; write_batch_single_thread_single_page("test2.parquet", batch)?; let batch = create_batch(10_000_000)?; parallel_write_rayon("parallel.parquet", &batch)?; Ok(()) }
n(col_num); println!("produce start: {} {}", col_num, row_group_num); let pages = read::get_page_iterator( column_metadata, &mut file, None, vec![], ) .unwrap() .collect::<Vec<_>>(); println!( "produce end - {:?}: {} {}", start.elapsed().unwrap(), col_num, row_group_num ); sender.send((col_num, row_group_num, pages)).unwrap(); } } }); let mut children = Vec::new(); for _ in 0..3 { let receiver_consumer = receiver.clone(); let metadata_consumer = file_metadata.clone(); let schema_consumer = schema.clone(); let child = spawn(move || { let (col_num, row_group_num, iter) = receiver_consumer.recv().unwrap(); let start = SystemTime::now(); println!("consumer start: {} {}", col_num, row_group_num); let metadata = metadata_consumer.row_groups[row_group_num].column(col_num); let data_type = schema_consumer.fields()[col_num].data_type().clone(); let pages = iter .into_iter() .map(|x| x.and_then(|x| read::decompress(x, &mut vec![]))); let mut pages = read::streaming_iterator::convert(pages); let array = read::page_iter_to_array(&mut pages, metadata, data_type); println!( "Finished - {:?}: {} {}", start.elapsed().unwrap(), col_num, row_group_num ); array }); children.push(child); } child.join().expect("child thread panicked"); let arrays = children .into_iter() .map(|x| x.join().unwrap()) .collect::<Result<Vec<_>>>()?; println!("Finished - {:?}", start.elapsed().unwrap()); Ok(arrays) } fn write_array_single_thread( path: &str, array: &dyn Array, field: Field, ) -> Result<()> { let schema = Schema::new(vec![field]); let schema_parquet = write::to_parquet_schema(&schema)?; let options = write::WriteOptions { write_statistics: true, compression: write::Compression::Uncompressed, version: write::Version::V2, }; let encoding = write::Encoding::Plain; #[rustfmt::skip] let row_groups = once(Result::Ok(write::DynIter::new( once(Result::Ok(write::DynIter::new( once(array) .zip(schema_parquet.columns().to_vec().into_iter()) .map(|(array, descriptor)| { write::array_to_page(array, descriptor, options, encoding) }), ), ))))); let mut file = File::create(path)?; let _ = write::write_file( &mut file, row_groups, &schema, schema_parquet, options, None, ); Ok(()) } fn write_batch_single_thread_single_page( path: &str, batch: RecordBatch, ) -> Result<()> { let schema = batch.schema().clone(); let options = write::WriteOptions { write_statistics: true, compression: write::Compression::Uncompressed, version: write::Version::V2, }; let iter = vec![Ok(batch)]; let row_groups = write::RowGroupIterator::try_new( iter.into_iter(), &schema, options, vec![write::Encoding::Plain], )?; let mut file = File::create(path)?; let schema_parquet = row_groups.parquet_schema().clone(); let _ = write::write_file( &mut file, row_groups, &schema,
random
[ { "content": "fn read_batches(path: &str) -> Result<Vec<RecordBatch>> {\n\n let mut file = File::open(path)?;\n\n let metadata = read::read_file_metadata(&mut file)?;\n\n let reader = read::FileReader::new(&mut file, metadata, None);\n\n\n\n reader.collect()\n\n}\n\n\n", "file_path": "streamer/src/arrow.rs", "rank": 1, "score": 181177.6212877933 }, { "content": "fn concurrent_read(path: &str) -> Result<Vec<RecordBatch>> {\n\n let batch_size = 100;\n\n let has_header = true;\n\n let projection: Option<&[usize]> = None;\n\n\n\n // 1. prep channel to funnel threads to serialized records\n\n let (sender, receiver) = unbounded();\n\n\n\n // 2. define reader and schema\n\n let mut reader = read::ReaderBuilder::new().from_path(path)?;\n\n let schema = read::infer_schema(\n\n &mut reader,\n\n Some(batch_size * 100),\n\n has_header,\n\n &read::infer,\n\n )?;\n\n let schema = Arc::new(schema);\n\n\n\n // 3. spawn IO-bounded thread to produce `Vec<ByteRecords>`\n\n let start = SystemTime::now();\n", "file_path": "streamer/src/csv.rs", "rank": 2, "score": 181177.6212877933 }, { "content": "fn write_ipc<W: Write + Seek>(\n\n writer: W,\n\n array: impl Array + 'static,\n\n) -> Result<W> {\n\n // schema with a field of array data type\n\n let schema = Schema::new(vec![Field::new(\n\n \"field\",\n\n array.data_type().clone(),\n\n false,\n\n )]);\n\n let mut writer = write::FileWriter::try_new(writer, &schema)?;\n\n let batch = RecordBatch::try_new(Arc::new(schema), vec![Arc::new(array)])?;\n\n\n\n writer.write(&batch)?;\n\n\n\n Ok(writer.into_inner())\n\n}\n\n\n", "file_path": "streamer/src/extension.rs", "rank": 6, "score": 110310.87033239685 }, { "content": "#[warn(dead_code)]\n\npub fn float_operator(array: &dyn Array) -> Result<Box<dyn Array>, String> {\n\n match array.data_type().to_physical_type() {\n\n PhysicalType::Primitive(PrimitiveType::Float32) => {\n\n let array = array\n\n .as_any()\n\n .downcast_ref::<PrimitiveArray<f32>>()\n\n .unwrap();\n\n let array = array.clone();\n\n Ok(Box::new(array))\n\n },\n\n PhysicalType::Primitive(PrimitiveType::Float64) => {\n\n let array = array\n\n .as_any()\n\n .downcast_ref::<PrimitiveArray<f32>>()\n\n .unwrap();\n\n let array = array.clone();\n\n Ok(Box::new(array))\n\n },\n\n _ => Err(\"Only for float point arrays\".to_string()),\n\n }\n\n}\n\n\n", "file_path": "streamer/src/main.rs", "rank": 7, "score": 105054.65480127727 }, { "content": "fn arrays() {\n\n let array1 = PrimitiveArray::<i32>::from([Some(1), None, Some(123)]);\n\n let array2 = PrimitiveArray::<f32>::from_slice([1.0, 0.0, 123.0]);\n\n // let array3: PrimitiveArray<u64> = [Some(1), None,\n\n // Some(123)].iter().collect();\n\n assert_eq!(array1.len(), array2.len());\n\n}\n\n\n\n// Array as a trait obect\n", "file_path": "streamer/src/main.rs", "rank": 8, "score": 101519.10345322962 }, { "content": "fn write_batches(\n\n path: &str,\n\n schema: &Schema,\n\n batches: &[RecordBatch],\n\n) -> Result<()> {\n\n let file = File::create(path)?;\n\n let mut writer = write::FileWriter::try_new(file, schema)?;\n\n\n\n for batch in batches {\n\n writer.write(batch)?;\n\n }\n\n\n\n writer.finish()\n\n}\n\n\n", "file_path": "streamer/src/arrow.rs", "rank": 9, "score": 99559.56443912102 }, { "content": "fn concurrent_write_batch(\n\n path: &str,\n\n batches: [RecordBatch; 2],\n\n) -> Result<()> {\n\n let mut writer = write::WriterBuilder::new().from_path(path)?;\n\n write::write_header(&mut writer, batches[0].schema())?;\n\n\n\n // prepare message channel & CSV serializer options + initialize children\n\n let (sender, receiver): (Sender<_>, Receiver<_>) = channel();\n\n let mut children = Vec::new();\n\n let options = write::SerializeOptions::default();\n\n\n\n (0..batches.len()).for_each(|idx| {\n\n let sender_thread = sender.clone();\n\n let options = options.clone();\n\n let batch = batches[idx].clone();\n\n let child = spawn(move || {\n\n let records = write::serialize(&batch, &options).unwrap();\n\n sender_thread.send(records).unwrap();\n\n });\n", "file_path": "streamer/src/csv.rs", "rank": 10, "score": 97715.97731356327 }, { "content": "fn sync_write_batch(\n\n path: &str,\n\n batches: &[RecordBatch],\n\n) -> Result<()> {\n\n let mut writer = write::WriterBuilder::new().from_path(path)?;\n\n\n\n write::write_header(&mut writer, batches[0].schema())?;\n\n\n\n let options = write::SerializeOptions::default();\n\n batches\n\n .iter()\n\n .try_for_each(|batch| write::write_batch(&mut writer, batch, &options))\n\n}\n\n\n", "file_path": "streamer/src/csv.rs", "rank": 11, "score": 97715.97731356327 }, { "content": "pub fn main() {\n\n // logical types\n\n let type1 = DataType::Date32;\n\n let type2 = DataType::Int32;\n\n\n\n // fields / columns\n\n let field1 = Field::new(\"c1\", type1, true);\n\n let field2 = Field::new(\"c2\", type2, true);\n\n\n\n // metadata on columns\n\n let mut metadata = BTreeMap::new();\n\n metadata.insert(\"key\".to_string(), \"value\".to_string());\n\n let field1 = field1.with_metadata(metadata);\n\n\n\n // create schema from fields with new metadata\n\n let schema = Schema::new(vec![field1, field2]);\n\n assert_eq!(schema.fields().len(), 2);\n\n\n\n // add metadata to schema as well with HashMap\n\n let mut metadata = HashMap::new();\n\n metadata.insert(\"key\".to_string(), \"value\".to_string());\n\n let schema = schema.with_metadata(metadata);\n\n assert_eq!(schema.fields().len(), 2);\n\n}\n", "file_path": "streamer/src/metadata.rs", "rank": 13, "score": 95377.38199591204 }, { "content": " def split(str: String): Array[String] = str.split(' ')\n\n\n", "file_path": "spark/src/main/scala/com/podra/transformer/batch/NLPLib.scala", "rank": 14, "score": 93189.59161977771 }, { "content": "fn buffers_and_bitmaps() {\n\n let x = Buffer::from(&[1u32, 2, 3]);\n\n assert_eq!(x.as_slice(), &[1u32, 2, 3]);\n\n\n\n let x = x.slice(1, 2);\n\n assert_eq!(x.as_slice(), &[2, 3]);\n\n\n\n // MutableBuffer<i64>\n\n let mut x: MutableBuffer<i64> = (0..3).collect();\n\n x[1] = 5;\n\n x.push(10);\n\n assert_eq!(x.as_slice(), &[0, 5, 2, 10]);\n\n\n\n // from another iterator\n\n let x = (0..1000).collect::<Vec<_>>();\n\n let y = MutableBuffer::from_trusted_len_iter(x.iter().map(|x| x * 2));\n\n assert_eq!(y[50], 100);\n\n\n\n // bitmaps for booleans\n\n use arrow2::bitmap::Bitmap;\n", "file_path": "streamer/src/main.rs", "rank": 16, "score": 62579.587560226486 }, { "content": "fn sync_read(\n\n path: &str,\n\n projection: Option<&[usize]>,\n\n) -> Result<RecordBatch> {\n\n // 1. CSV reader on file-reading thread\n\n let mut reader = read::ReaderBuilder::new().from_path(path)?;\n\n\n\n // 2. infer schema (string -> DataType)\n\n let schema = read::infer_schema(&mut reader, None, true, &read::infer)?;\n\n\n\n // 3. allocate space for reading from CSV (length is # rows)\n\n let mut rows = vec![read::ByteRecord::default(); 100];\n\n\n\n // 4. read rows (IO-intensive, NO CPU, No SerDe)\n\n let rows_read = read::read_rows(&mut reader, 0, &mut rows)?;\n\n let rows = &rows[..rows_read];\n\n\n\n // 5. parse into RecordBatch (NO IO, ALL CPU)\n\n // can be on different thread if rows passed through channel\n\n read::deserialize_batch(\n\n rows,\n\n schema.fields(),\n\n projection,\n\n 0,\n\n read::deserialize_column,\n\n )\n\n}\n\n\n", "file_path": "streamer/src/csv.rs", "rank": 17, "score": 62579.587560226486 }, { "content": "fn read_stream() -> Result<()> {\n\n const ADDRESS: &str = \"127.0.0.1:12989\";\n\n\n\n let mut reader = TcpStream::connect(ADDRESS)?;\n\n let metadata = read::read_stream_metadata(&mut reader)?;\n\n let stream = read::StreamReader::new(&mut reader, metadata);\n\n\n\n let mut idx = 0;\n\n for x in stream {\n\n match x {\n\n Ok(read::StreamState::Some(batch)) => {\n\n idx += 1;\n\n println!(\"batch: {:?}\", batch);\n\n },\n\n Ok(read::StreamState::Waiting) => {\n\n sleep(Duration::from_millis(2000))\n\n },\n\n Err(e) => println!(\"{:?} ({})\", e, idx),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "streamer/src/arrow.rs", "rank": 18, "score": 57390.40604412124 }, { "content": "pub fn main() -> Result<()> {\n\n // declare custom type\n\n let ext_type = DataType::Extension(\n\n \"date16\".to_string(),\n\n Box::new(DataType::UInt16),\n\n None,\n\n );\n\n\n\n // init array with custom type\n\n let array = UInt16Array::from_slice([1, 2]).to(ext_type.clone());\n\n\n\n // same standard workflow\n\n let buffer = Cursor::new(vec![]);\n\n let res_buffer = write_ipc(buffer, array)?;\n\n\n\n // verify datatype is preserved\n\n let batch = read_ipc(&res_buffer.into_inner())?;\n\n let new_array = &batch.columns()[0];\n\n assert_eq!(new_array.data_type(), &ext_type);\n\n\n\n Ok(())\n\n}\n", "file_path": "streamer/src/extension.rs", "rank": 19, "score": 54320.0851325621 }, { "content": "pub fn main() -> Result<()> {\n\n use std::env;\n\n let args: Vec<String> = env::args().collect();\n\n let file_path = &args[1];\n\n let array = args[2..]\n\n .iter()\n\n .map(|x| x.parse::<i32>().ok())\n\n .collect::<PrimitiveArray<_>>();\n\n\n\n // WRITE\n\n let field = Field::new(\"field\", array.data_type().clone(), true);\n\n let schema = Schema::new(vec![field]);\n\n let batch = RecordBatch::try_new(Arc::new(schema), vec![Arc::new(array)])?;\n\n sync_write_batch(file_path, &[batch.clone()])?;\n\n concurrent_write_batch(file_path, [batch.clone(), batch])?;\n\n\n\n // synchronous\n\n let batch = sync_read(file_path, None)?;\n\n println!(\"{:?}\", batch);\n\n\n\n // multithreading / concurrency\n\n let batches = concurrent_read(file_path)?;\n\n for batch in batches {\n\n println!(\"{}\", batch.num_rows());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "streamer/src/csv.rs", "rank": 20, "score": 54320.0851325621 }, { "content": "pub fn main() -> Result<()> {\n\n let args = env::args().collect::<Vec<_>>();\n\n let path = &args[1];\n\n\n\n // write\n\n let schema = Schema::new(vec![\n\n Field::new(\"field1\", DataType::Int32, false),\n\n Field::new(\"field2\", DataType::Utf8, false),\n\n ]);\n\n let arr1 = Int32Array::from_slice(&[1, 2, 3, 4, 5]);\n\n let arr2 = Utf8Array::<i32>::from_slice(&[\"a\", \"b\", \"c\", \"d\", \"e\"]);\n\n let batch = RecordBatch::try_new(Arc::new(schema.clone()), vec![\n\n Arc::new(arr1),\n\n Arc::new(arr2),\n\n ])?;\n\n write_batches(path, &schema, &[batch])?;\n\n\n\n // read\n\n let batches = read_batches(path)?;\n\n print::print(&batches);\n\n read_stream()?;\n\n Ok(())\n\n}\n", "file_path": "streamer/src/arrow.rs", "rank": 22, "score": 54320.0851325621 }, { "content": "pub fn main() -> Result<()> {\n\n let array =\n\n Arc::new(PrimitiveArray::<i32>::from([Some(1), None, Some(123)]))\n\n as Arc<dyn Array>;\n\n\n\n // initialize structs to receive data on import\n\n let array_ptr = Box::new(Ffi_ArrowArray::empty());\n\n let schema_ptr = Box::new(Ffi_ArrowSchema::empty());\n\n\n\n // reqlinquish ownership to allow thread-safe write\n\n let array_ptr = Box::into_raw(array_ptr);\n\n let schema_ptr = Box::into_raw(schema_ptr);\n\n\n\n // PRODUCER\n\n unsafe {\n\n export(array.clone(), array_ptr, schema_ptr);\n\n };\n\n\n\n // take ownership back in order to deallocate\n\n let array_ptr = unsafe { Box::from_raw(array_ptr) };\n\n let schema_ptr = unsafe { Box::from_raw(schema_ptr) };\n\n\n\n // interpret memory into new array\n\n let new_array = unsafe { import(array_ptr, schema_ptr.as_ref())? };\n\n\n\n assert_eq!(array.as_ref(), new_array.as_ref());\n\n\n\n Ok(())\n\n}\n", "file_path": "streamer/src/cdata.rs", "rank": 23, "score": 54320.0851325621 }, { "content": "fn main() -> Result<(), ArrowError> {\n\n buffers_and_bitmaps();\n\n arrays();\n\n\n\n // 1. two arrays\n\n let array1 = PrimitiveArray::<i64>::from(&[Some(1), Some(2), Some(3)]);\n\n let array2 = PrimitiveArray::<i64>::from(&[Some(4), None, Some(6)]);\n\n\n\n // 2. add them!\n\n assert_eq!(\n\n arithmetic_primitive(&array1, Operator::Add, &array2)?,\n\n PrimitiveArray::<i64>::from(&[Some(5), None, Some(9)])\n\n );\n\n\n\n // 3. array trait object\n\n let array1_trait = &array1 as &dyn Array;\n\n let array2_trait = &array2 as &dyn Array;\n\n assert!(can_arithmetic(\n\n array1_trait.data_type(),\n\n Operator::Add,\n", "file_path": "streamer/src/main.rs", "rank": 24, "score": 53449.201998079196 }, { "content": "// vectorized SIMD instructions\n\npub fn funary<I, F, O>(\n\n array: &PrimitiveArray<I>,\n\n op: F,\n\n data_type: &DataType,\n\n) -> PrimitiveArray<O>\n\nwhere\n\n I: NativeType,\n\n O: NativeType,\n\n F: Fn(I) -> O,\n\n{\n\n // create iterator over values\n\n let values = array.values().iter().map(|v| op(*v));\n\n let values = Buffer::from_trusted_len_iter(values);\n\n\n\n // create and clone validity\n\n // from_trusted_len_iter could be faster if op is expensive\n\n PrimitiveArray::<O>::from_data(\n\n data_type.clone(),\n\n values,\n\n array.validity().cloned(),\n\n )\n\n}\n\n\n", "file_path": "streamer/src/main.rs", "rank": 25, "score": 49332.101209032946 }, { "content": "fn read_ipc(reader: &[u8]) -> Result<RecordBatch> {\n\n let mut reader = Cursor::new(reader);\n\n let metadata = read::read_file_metadata(&mut reader)?;\n\n let mut reader = read::FileReader::new(&mut reader, metadata, None);\n\n reader.next().unwrap()\n\n}\n\n\n", "file_path": "streamer/src/extension.rs", "rank": 26, "score": 46957.753743401525 }, { "content": " def main(args: Array[String]): Unit = {\n\n if (args.length === 2) {\n\n System.err.println(\"USAGE:\\nSocketWordCount <hostname> <port>\")\n\n return\n\n }\n\n\n\n // extract arguments\n\n val hostname = args(0)\n\n val port = args(1).toInt\n\n\n\n // create environment\n\n val env = StreamExecutionEnvironment.getExecutionEnvironment\n\n\n\n // Create names & ages streams by mapping inputs to objects\n\n val text = env.socketTextStream(hostname, port)\n\n \n\n @annotation.nowarn(\"msg=unused\")\n\n val counts = text\n\n .flatMap {\n\n _ .toLowerCase(Locale.US).split(\"\\\\W+\")\n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/SocketWordCount.scala", "rank": 27, "score": 43669.00409155725 }, { "content": "use std::collections::{BTreeMap, HashMap};\n\n\n\nuse arrow2::datatypes::{DataType, Field, Schema};\n\n\n", "file_path": "streamer/src/metadata.rs", "rank": 28, "score": 39093.09628181299 }, { "content": " def run(conf: SparkConf, inputFile: String, outputFile: String): Unit = {\n\n implicit val session = createSession(conf, \"WARN\")\n\n val spCtx = session.sparkContext\n\n // import session.implicits._\n\n\n\n val csvData = session\n\n .read\n\n .options(\n\n Map(\n\n \"inferSchema\" -> \"true\",\n\n \"delimiter\" -> \",\",\n\n \"header\" -> \"true\",\n\n )\n\n )\n\n .csv(inputFile)\n\n print(csvData)\n\n csvData.printSchema()\n\n csvData.write.csv(outputFile)\n\n\n\n val apartments = Seq(\n", "file_path": "spark/src/main/scala/com/podra/transformer/batch/Job.scala", "rank": 29, "score": 36784.045195014354 }, { "content": "type LogFn struct{}\n", "file_path": "beam/kafka.go", "rank": 30, "score": 34708.55809499844 }, { "content": "func KVFn(elm []byte) ([]byte, []byte) {\n\n\treturn []byte(\"\"), elm\n", "file_path": "beam/kafka.go", "rank": 31, "score": 34708.55809499844 }, { "content": "func FormatFn(word string, count int) string {\n\n\treturn fmt.Sprintf(\"%s: %v\", word, count)\n", "file_path": "beam/wordcount.go", "rank": 32, "score": 34708.55809499844 }, { "content": "type ExtractFn struct {\n\n\tSmallWordLength int `json:\"smallWordLength\"`\n", "file_path": "beam/wordcount.go", "rank": 33, "score": 34708.55809499844 }, { "content": "func (fn *ExtractFn) ProcessElement(\n\n\tctx context.Context,\n\n\tline string,\n\n\temit func(string),\n\n) {\n\n\tlineLen.Update(ctx, int64(len(line)))\n\n\tif len(strings.TrimSpace(line)) == 0 {\n\n\t\tempty.Inc(ctx, 1)\n\n\t}\n\n\tfor _, word := range wordRE.FindAllString(line, -1) {\n\n\t\tif len(word) < fn.SmallWordLength {\n\n\t\t\tsmallWords.Inc(ctx, 1)\n\n\t\t}\n\n\t\temit(word)\n\n\t}\n", "file_path": "beam/wordcount.go", "rank": 34, "score": 33088.72315967851 }, { "content": "func (fn *LogFn) FinishBundle() {\n\n\ttime.Sleep(2 * time.Second)\n", "file_path": "beam/kafka.go", "rank": 35, "score": 33088.72315967851 }, { "content": "func (fn *LogFn) ProcessElement(ctx context.Context, elm []byte) {\n\n\tlog.Infof(ctx, \"Ride info: %v\", string(elm))\n", "file_path": "beam/kafka.go", "rank": 36, "score": 33088.72315967851 }, { "content": "use std::{\n\n env,\n\n fs::File,\n\n net::TcpStream,\n\n sync::Arc,\n\n thread::sleep,\n\n time::Duration,\n\n};\n\n\n\nuse arrow2::{\n\n array::{Int32Array, Utf8Array},\n\n datatypes::{DataType, Field, Schema},\n\n error::Result,\n\n io::{\n\n ipc::{read, write},\n\n print,\n\n },\n\n record_batch::RecordBatch,\n\n};\n\n\n", "file_path": "streamer/src/arrow.rs", "rank": 42, "score": 15.048356457320251 }, { "content": "use std::sync::Arc;\n\n\n\nuse arrow2::{\n\n array::{Array, PrimitiveArray},\n\n datatypes::Field,\n\n error::Result,\n\n ffi::{\n\n export_array_to_c,\n\n export_field_to_c,\n\n import_array_from_c,\n\n import_field_from_c,\n\n Ffi_ArrowArray,\n\n Ffi_ArrowSchema,\n\n },\n\n};\n\n\n\nunsafe fn export(\n\n array: Arc<dyn Array>,\n\n array_ptr: *mut Ffi_ArrowArray,\n\n schema_ptr: *mut Ffi_ArrowSchema,\n", "file_path": "streamer/src/cdata.rs", "rank": 44, "score": 13.659555295904108 }, { "content": "use std::{\n\n io::{Cursor, Seek, Write},\n\n sync::Arc,\n\n};\n\n\n\nuse arrow2::{\n\n array::{Array, UInt16Array},\n\n datatypes::{DataType, Field, Schema},\n\n error::Result,\n\n io::ipc::{read, write},\n\n record_batch::RecordBatch,\n\n};\n\n\n", "file_path": "streamer/src/extension.rs", "rank": 45, "score": 13.45262486676804 }, { "content": "// INTENSIVE TASKS\n\n// 1. split into rows (seeking, parsing)\n\n// 2. parse set of rows into RecordBatch\n\n// * use multiple readers that scan different file parts\n\n// * because bytes -> values is harder than line interpretation\n\n// parsing can be run on separate thread!\n\n\n\nuse std::{\n\n sync::{\n\n mpsc::{channel, Receiver, Sender},\n\n Arc,\n\n },\n\n thread::{spawn, JoinHandle},\n\n time::SystemTime,\n\n};\n\n\n\nuse arrow2::{\n\n array::{Array, PrimitiveArray},\n\n datatypes::{Field, Schema},\n\n error::Result,\n\n io::csv::{read, write},\n\n record_batch::RecordBatch,\n\n};\n\nuse crossbeam_channel::unbounded;\n\n\n", "file_path": "streamer/src/csv.rs", "rank": 46, "score": 13.173185491866272 }, { "content": " children.push(child);\n\n });\n\n\n\n for _ in 0..batches.len() {\n\n let records = receiver.recv().unwrap();\n\n records\n\n .iter()\n\n .try_for_each(|record| writer.write_byte_record(record))?;\n\n }\n\n\n\n // rejoin\n\n for child in children {\n\n child.join().expect(\"child thread panicked!\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n// fn write(\n\n// path: &str,\n", "file_path": "streamer/src/csv.rs", "rank": 48, "score": 11.38770321922269 }, { "content": " let thread = spawn(move || {\n\n let mut line_num = 0;\n\n let mut size = 1;\n\n while size > 0 {\n\n let mut rows = vec![read::ByteRecord::default(); batch_size];\n\n let rows_read = read::read_rows(&mut reader, 0, &mut rows).unwrap();\n\n rows.truncate(rows_read);\n\n\n\n line_num += rows.len();\n\n size = rows.len();\n\n\n\n sender.send((rows, line_num)).unwrap();\n\n }\n\n });\n\n\n\n // 4. decompress, decode, deserialize (we use 3 consumers)\n\n let mut children = Vec::<JoinHandle<RecordBatch>>::new();\n\n for _ in 0..3 {\n\n let consumer = receiver.clone();\n\n let consumer_schema = schema.clone();\n", "file_path": "streamer/src/csv.rs", "rank": 49, "score": 10.378022192567826 }, { "content": "// array: &dyn Array,\n\n// is_sync: bool\n\n// ) -> Result<()> {\n\n\n\n// if is_sync {\n\n// sync_write_batch(path, &[batch])\n\n// } else {\n\n// concurrent_write_batch(path, [batch.clone(), batch])\n\n// }\n\n// }\n\n\n", "file_path": "streamer/src/csv.rs", "rank": 51, "score": 8.65177989628228 }, { "content": " let child = spawn(move || {\n\n let (rows, line_num) = consumer.recv().unwrap();\n\n let start = SystemTime::now();\n\n println!(\"Consumer start - {}\", line_num);\n\n let batch = read::deserialize_batch(\n\n &rows,\n\n consumer_schema.fields(),\n\n projection,\n\n 0,\n\n read::deserialize_column,\n\n )\n\n .unwrap();\n\n println!(\n\n \"Consumer end - {:?}: {}\",\n\n start.elapsed().unwrap(),\n\n line_num\n\n );\n\n batch\n\n });\n\n children.push(child);\n", "file_path": "streamer/src/csv.rs", "rank": 52, "score": 8.068901018766677 }, { "content": "pub mod arrow;\n\npub mod cdata;\n\npub mod csv;\n\n// pub mod example;\n\npub mod extension;\n\npub mod metadata;\n\npub mod parquet;\n\n\n\nuse arrow2::{\n\n array::{Array, PrimitiveArray},\n\n buffer::{Buffer, MutableBuffer},\n\n compute::{\n\n arithmetics::*,\n\n arity::{binary, unary},\n\n },\n\n datatypes::{DataType, PhysicalType, PrimitiveType},\n\n error::ArrowError,\n\n types::NativeType,\n\n};\n\n\n", "file_path": "streamer/src/main.rs", "rank": 53, "score": 6.856198989045395 }, { "content": ") {\n\n let field = Field::new(\"field\", array.data_type().clone(), true);\n\n export_array_to_c(array, array_ptr);\n\n export_field_to_c(&field, schema_ptr);\n\n}\n\n\n\nunsafe fn import(\n\n array: Box<Ffi_ArrowArray>,\n\n schema: &Ffi_ArrowSchema,\n\n) -> Result<Box<dyn Array>> {\n\n let field = import_field_from_c(schema)?;\n\n import_array_from_c(array, &field)\n\n}\n\n\n", "file_path": "streamer/src/cdata.rs", "rank": 54, "score": 6.497224151759712 }, { "content": " }\n\n\n\n // 5. collect threads\n\n thread.join().expect(\"Child thread panicked!\");\n\n let batches = children\n\n .into_iter()\n\n .map(|x| x.join().unwrap())\n\n .collect::<Vec<_>>();\n\n println!(\"Finished - {:?}\", start.elapsed().unwrap());\n\n\n\n Ok(batches)\n\n}\n\n\n", "file_path": "streamer/src/csv.rs", "rank": 55, "score": 6.149637173859145 }, { "content": " array2_trait.data_type()\n\n ));\n\n assert_eq!(\n\n PrimitiveArray::<i64>::from(&[Some(5), None, Some(9)]),\n\n arithmetic(array1_trait, Operator::Add, array2_trait)\n\n .unwrap()\n\n .as_ref()\n\n );\n\n\n\n // 4. arbitrary binary operation\n\n let op = |x: i64, y: i64| x.pow(2) + y.pow(2);\n\n assert_eq!(\n\n binary(&array1, &array2, DataType::Int64, op)?,\n\n PrimitiveArray::<i64>::from(&[Some(1 + 16), None, Some(9 + 36)])\n\n );\n\n\n\n // 5. arbitrary unary operations\n\n let array_unary =\n\n PrimitiveArray::<f64>::from(&[Some(4.0), None, Some(6.0)]);\n\n let result = unary(\n", "file_path": "streamer/src/main.rs", "rank": 56, "score": 5.303375433228348 }, { "content": " &array_unary,\n\n |x| x.cos().powi(2) + x.sin().powi(2),\n\n DataType::Float64,\n\n );\n\n assert!((result.values()[0] - 1.0).abs() < 0.0001);\n\n assert!(result.is_null(1));\n\n assert!((result.values()[2] - 1.0).abs() < 0.0001);\n\n\n\n // 6. type transformation\n\n let arr = PrimitiveArray::<f64>::from(&[Some(4.4), None, Some(4.6)]);\n\n assert_eq!(\n\n unary(&arr, |x| x.round() as i64, DataType::Int64),\n\n PrimitiveArray::<i64>::from(&[Some(4), None, Some(5)])\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "streamer/src/main.rs", "rank": 57, "score": 5.0093529400070205 }, { "content": " let x = Bitmap::from(&[true, false]);\n\n let iter = x.iter().map(|x| !x);\n\n let y = Bitmap::from_trusted_len_iter(iter);\n\n assert!(!y.get_bit(0));\n\n assert!(y.get_bit(1));\n\n // and the mutable version\n\n use arrow2::bitmap::MutableBitmap;\n\n let mut x = MutableBitmap::new();\n\n x.push(true);\n\n x.push(false);\n\n assert!(!x.get(1));\n\n x.set(1, x.get(0));\n\n assert!(x.get(1));\n\n}\n\n\n", "file_path": "streamer/src/main.rs", "rank": 58, "score": 4.885797201862831 }, { "content": "export const deepElement = (\n\n fields: string,\n\n /* eslint-disable @typescript-eslint/no-explicit-any */\n\n /* eslint-disable @typescript-eslint/explicit-module-boundary-types */\n\n obj: any\n\n): any =>\n\n /* eslint-enable @typescript-eslint/no-explicit-any */\n\n /* eslint-enable @typescript-eslint/explicit-module-boundary-types */\n", "file_path": "pwa/src/scripts/functions.ts", "rank": 59, "score": 3.592986889451977 }, { "content": " print(vectorDS)\n\n\n\n val matrix = Matrices.dense(2, 1, Array(1, 2))\n\n val matrixDS = TypedDataset.create(Seq(\"label\" -> matrix))\n\n print(matrixDS)\n\n \n\n // Closing\n\n session.stop()\n\n }\n\n}\n\n\n\n// object WordCount {\n\n// val input = Context.ctx.textFile(\"data/input.txt\")\n\n\n\n// val count = input\n\n// .flatMap(_.split(\" \"))\n\n// .map((_, 1))\n\n// .reduceByKey(_ + _)\n\n\n\n// count.saveAsTextFile(\"data/output.txt\")\n", "file_path": "spark/src/main/scala/com/podra/transformer/batch/Job.scala", "rank": 60, "score": 3.5142400285610864 }, { "content": " // implicit val nbhToStr: Injection[Neighborhood, String] = Injection(\n\n // (_: Neighborhood).toString(),\n\n // (_: String).asInstanceOf[Neighborhood]\n\n // )\n\n\n\n final case class PhonebookEntry(\n\n address: Address,\n\n residents: String,\n\n phoneNumber: String\n\n )\n\n\n\n final case class CityMapEntry(\n\n address: Address,\n\n neighborhood: Neighborhood\n\n )\n\n\n\n final case class Family(residents: String, neighborhood: Neighborhood)\n\n @SuppressWarnings(Array(\"org.wartremover.warts.ArrayEquals\"))\n\n final case class FamilyMembers(\n\n residents: Array[String],\n\n neighborhood: Neighborhood\n\n )\n\n final case class Person(name: String, neighborhood: Neighborhood)\n\n final case class NeighborhoodCount(neighborhood: Neighborhood, count: Long)\n\n\n", "file_path": "spark/src/main/scala/com/podra/transformer/batch/NLPLib.scala", "rank": 61, "score": 3.305586718311366 }, { "content": " rideState.clear()\n\n }\n\n case None => {\n\n // remember the first event\n\n rideState.update(ride)\n\n\n\n if (ride.isStart) {\n\n\n\n // timer for rides that've gone too long (ie no END)\n\n ctx.timerService.registerEventTimeTimer(\n\n getTimerTime(ride)\n\n )\n\n }\n\n }\n\n }\n\n }\n\n\n\n override def onTimer(\n\n timestamp: Long, \n\n ctx: KeyedProcessFunction[Long, TaxiRide, Long]#OnTimerContext, \n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/SocketWordCount.scala", "rank": 62, "score": 2.942702956684474 }, { "content": "# Transformer\n\n\n\n## SBT\n\n\n\n### Flags\n\n\n\nJVM runtime flags are provided in `.sbtopts`:\n\n\n\n```bash\n\n# Thread Stack Size of 8MB\n\n-J-Xss8M\n\n# Initial Memory Allocation Pool of 1GB\n\n-J-Xms1G\n\n# Maximum Memory Allocation Pool of 8GB\n\n-J-Xmx8G\n\n\n\n-J-XX:ReservedCodeCacheSize=1G\n\n-J-XX:MaxMetaspaceSize=2G\n\n```\n\n\n\n## Flink\n\n\n\n### How To Run\n\n\n\n```bash\n\nflink run -c org.example.WordCount /path/to/snapshot.jar\n\n```\n\n\n\n## Frameless\n\n\n\n- typed abstraction of spark RDDs, Datasets, and more\n\n- *Benefits*:\n\n 1. typesafe column referencing ==> NO runtime errors!\n\n 2. custom typesafe encoders ==> encoder-less types don't compile!\n\n 3. typesafe casting/projection\n\n 4. builtin function signature ==> NO arithmetic on non-numerics!\n\n\n\n## Cats\n\n\n", "file_path": "spark/README.md", "rank": 63, "score": 2.7633746406260267 }, { "content": " isStart: Boolean,\n\n eventTime: Instant,\n\n startLon: Float,\n\n startLat: Float,\n\n endLon: Float,\n\n endLat: Float,\n\n passengerCnt: Short,\n\n)\n\nfinal case class TaxiFare(\n\n rideId: Long, \n\n taxiId: Long, \n\n driverId: Long, \n\n startTime: Instant, \n\n paymentType: String, \n\n tip: Float, \n\n tolls: Float, \n\n totalFare: Float,\n\n)\n\nfinal case class RideAndFare(\n\n ride: TaxiRide,\n\n fare: TaxiFare\n\n)\n\n\n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/SocketWordCount.scala", "rank": 64, "score": 2.7558812233863437 }, { "content": " case None => rideState.update(ride)\n\n }\n\n }\n\n\n\n override def flatMap2(fare: TaxiFare, out: Collector[RideAndFare]): Unit = {\n\n Option(rideState.value) match {\n\n case Some(ride) => {\n\n rideState.clear()\n\n out.collect(new RideAndFare(ride, fare))\n\n }\n\n case None => fareState.update(fare)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/SocketWordCount.scala", "rank": 65, "score": 2.3781440555238067 }, { "content": "export const GoogleAuth: React.FC = () => {\n\n const auth = useAuth();\n\n const styles = useStyles();\n\n\n\n const login = () => {\n\n chrome.identity.getAuthToken({ interactive: true }, (token) => {\n\n // const credential = firebase.auth.GoogleAuthProvider.credential(null, token);\n\n // firebase.auth().signInWithCredential(credential);\n\n auth.token.set(token);\n\n });\n\n };\n\n\n\n return (\n\n <div className={styles.container} data-test-id=\"GoogleAuth\">\n\n <Button\n\n startIcon={<Icon>google</Icon>}\n\n variant=\"contained\"\n\n onClick={login}\n\n >\n\n Signin\n\n </Button>\n\n </div>\n\n );\n", "file_path": "pwa/src/components/GoogleAuth/index.tsx", "rank": 66, "score": 2.3554693733952115 }, { "content": "const App: FC = () => {\n\n return (\n\n <BrowserRouter>\n\n <Switch>\n\n <PrivateRoute exact path=\"/\" component={Home} />\n\n <Route path=\"/login\" component={GoogleAuth} />\n\n </Switch>\n\n </BrowserRouter>\n\n );\n", "file_path": "pwa/src/App.tsx", "rank": 67, "score": 2.293075636186529 }, { "content": "// println(\"OK\")\n\n// }\n\n\n\n// object IntegerCount {\n\n// val data: Array[Int] = Array(1, 3, 5, 2, 3, 1, 4, 7, 8, 5, 2, 3);\n\n// val dataRdd = Context.ctx.parallelize(data)\n\n\n\n// val count = dataRdd\n\n// .map((_, 1))\n\n// .reduceByKey(_ + _)\n\n\n\n// println(count)\n\n// println(\"OK\")\n\n// }\n\n\n\n// object StreamCount {\n\n// val streamCtx = new StreamingContext(Context.ctx, Seconds(20))\n\n// val lines = streamCtx.socketTextStream(\"localhost\", 9999)\n\n// }\n\n\n", "file_path": "spark/src/main/scala/com/podra/transformer/batch/Job.scala", "rank": 68, "score": 2.142490479554746 }, { "content": "@SpringBootApplication\n\npublic class LoaderApplication {\n\n\n\n\tpublic static void main(String[] args) {\n\n\t\tSpringApplication.run(LoaderApplication.class, args);\n\n\t}\n\n\n", "file_path": "loader/src/main/java/com/podra/loader/LoaderApplication.java", "rank": 69, "score": 2.0589412273273053 }, { "content": " out: Collector[Long]\n\n ): Unit = {\n\n // timer only fires if ride was too long\n\n out.collect(rideState.value.rideId)\n\n // prevents duplicate alerts, but will leak state if END comes\n\n rideState.clear()\n\n }\n\n\n\n private def rideTooLong(\n\n startEvent: TaxiRide,\n\n endEvent: TaxiRide,\n\n ): Boolean = {\n\n Duration\n\n .between(startEvent.eventTime, endEvent.eventTime)\n\n .compareTo(Duration.ofHours(2)) > 0\n\n }\n\n\n\n private def getTimerTime(ride: TaxiRide): Long = {\n\n ride.eventTime.toEpochMilli + 2.hours.toMillis\n\n }\n\n }\n\n}\n\n\n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/SocketWordCount.scala", "rank": 70, "score": 1.9509242772383826 }, { "content": "package com.podra.transformer.stream\n\n\n\nimport org.apache.flink.connector.kafka.source.KafkaSource\n\nimport org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer\n\nimport org.apache.flink.api.common.serialization.SimpleStringSchema\n\nimport org.apache.flink.connector.kafka.sink.KafkaSink\n\nimport org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema\n\nimport org.apache.flink.connector.base.DeliveryGuarantee\n\n\n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/Sources.scala", "rank": 71, "score": 1.8858787461161217 }, { "content": "# Getting Started\n\n\n\n### Reference Documentation\n\nFor further reference, please consider the following sections:\n\n\n\n* [Official Apache Maven documentation](https://maven.apache.org/guides/index.html)\n\n* [Spring Boot Maven Plugin Reference Guide](https://docs.spring.io/spring-boot/docs/2.5.5/maven-plugin/reference/html/)\n\n* [Create an OCI image](https://docs.spring.io/spring-boot/docs/2.5.5/maven-plugin/reference/html/#build-image)\n\n* [Spring for Apache Kafka](https://docs.spring.io/spring-boot/docs/2.5.5/reference/htmlsingle/#boot-features-kafka)\n\n* [Apache Kafka Streams Support](https://docs.spring.io/spring-kafka/docs/current/reference/html/_reference.html#kafka-streams)\n\n* [Apache Kafka Streams Binding Capabilities of Spring Cloud Stream](https://docs.spring.io/spring-cloud-stream/docs/current/reference/htmlsingle/#_kafka_streams_binding_capabilities_of_spring_cloud_stream)\n\n\n\n### Guides\n\nThe following guides illustrate how to use some features concretely:\n\n\n\n* [Samples for using Apache Kafka Streams with Spring Cloud stream](https://github.com/spring-cloud/spring-cloud-stream-samples/tree/master/kafka-streams-samples)\n\n\n", "file_path": "loader/README.md", "rank": 72, "score": 1.8477575572061542 }, { "content": " .keyBy(_._1)\n\n .window(TumblingEventTimeWindows.of(Time.hours(1)))\n\n .reduce (\n\n (fare1: (Long, Float), fare2: (Long, Float)) => {\n\n (fare1._1, fare1._2 + fare2._2)\n\n },\n\n new WrapWithWindowInfo\n\n )\n\n .windowAll(TumblingEventTimeWindows.of(Time.hours(1)))\n\n .maxBy(2)\n\n .addSink(sink)\n\n }\n\n\n\n env.execute()\n\n }\n\n }\n\n\n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/SocketWordCount.scala", "rank": 73, "score": 1.727961358966866 }, { "content": " .filter { _.nonEmpty }\n\n }\n\n .map((_, 1))\n\n .keyBy(_._1)\n\n .sum(1)\n\n print(counts)\n\n\n\n val l = List(1, 2, 3).map((_, 1))\n\n println(l)\n\n\n\n val result = env.execute(\"SocketTextStreamWordCount\")\n\n println(result)\n\n\n\n }\n\n}\n\n\n\nfinal case class TaxiRide(\n\n rideId: Long, \n\n taxiId: Long, \n\n driverId: Long, \n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/SocketWordCount.scala", "rank": 74, "score": 1.7042223084068882 }, { "content": "export default class RedditProducer {\n\n client: Pool;\n\n requester: snoowrap;\n\n producer: Producer;\n\n posts: Reddit.Post[];\n\n debug: boolean;\n\n\n\n static env2ConfigNameMapping: SnoowrapOptions = {\n\n userAgent: 'REDDIT_PRODUCER_AGENT_STRING',\n\n clientId: 'REDDIT_PRODUCER_API_KEY',\n\n clientSecret: 'REDDIT_PRODUCER_API_SECRET',\n\n username: 'REDDIT_PRODUCER_USERNAME',\n\n password: 'REDDIT_PRODUCER_PASSWORD',\n\n };\n\n static defaultConfigOptions: Reddit.ConfigOptions = {\n\n redditConfig: {},\n\n debug: false,\n\n };\n\n\n\n constructor({ redditConfig, debug } = RedditProducer.defaultConfigOptions) {\n\n this.debug = !!debug;\n\n this.client = new Pool({\n\n user: 'admin',\n\n host: 'pod/podra-postgresql-0',\n\n database: 'podra-ingestion',\n\n password: process.env.POSTGRES_PASSWORD,\n\n port: 5432,\n\n });\n\n this.requester = new snoowrap({\n\n ...this.getRedditConfig(),\n\n ...redditConfig,\n\n });\n\n this.producer = new Kafka({\n\n logLevel: this.debug ? logLevel.DEBUG : logLevel.INFO,\n\n clientId: 'podra-reddit-ingestor',\n\n brokers: ['service/podra-kafka:9092'],\n\n }).producer();\n\n this.posts = [];\n\n }\n\n\n\n private getRedditConfig(): SnoowrapOptions {\n\n const config = { ...RedditProducer.env2ConfigNameMapping };\n\n\n\n Object.entries(config).forEach(\n\n ([confKey, envKey]) => {\n\n const envVar = process.env[envKey];\n\n if (envVar === undefined) {\n\n throw new Error(\n\n `${envKey} is not defined in running process.`\n\n );\n\n }\n\n \n\n // the value is generated from config above, so will NOT fail!\n\n config[confKey as keyof typeof config] = envVar;\n\n }\n\n );\n\n\n\n return config;\n\n }\n\n\n\n private getOld<T>(queryString: string): T[] {\n\n let oldOnes: T[] = [];\n\n this.client.query(queryString, (err, res) => {\n\n console.log(`Response: ${res}`);\n\n console.log(`Error: ${err}`);\n\n\n\n if (!err) {\n\n oldOnes = res as unknown as T[];\n\n }\n\n });\n\n return oldOnes;\n\n }\n\n\n\n private generatePostMessages(): Message[] {\n\n return this.posts.map(post => ({\n\n value: JSON.stringify(post),\n\n }));\n\n }\n\n\n\n async sendData(): Promise<void> {\n\n try {\n\n const metadata = await this.producer.send({\n\n topic: 'ingestion-reddit-posts',\n\n compression: CompressionTypes.GZIP,\n\n messages: this.generatePostMessages(),\n\n });\n\n metadata.forEach(console.log);\n\n } catch (err) {\n\n if (isErr(err)) {\n\n console.error(`[ingestor/reddit/sendData] ${err.message}`, err);\n\n return Promise.reject('Send failed!');\n\n }\n\n }\n\n }\n\n\n\n async run(): Promise<void> {\n\n this.updateSubreddits();\n\n const subreddits = this.getOldSubreddits();\n\n subreddits.forEach(subreddit => {\n\n this.updateSubredditPosts(subreddit);\n\n });\n\n\n\n try {\n\n await this.producer.connect();\n\n await this.sendData();\n\n } catch (err) {\n\n if (isErr(err)) {\n\n console.error(`[ingestion/reddit/run] ${err.message}`, err);\n\n return Promise.reject('Run failed!');\n\n }\n\n } finally {\n\n await this.producer.disconnect();\n\n }\n\n }\n\n\n\n getOldSubreddits(): Reddit.Subreddit[] {\n\n return this.getOld(`\n\n SELECT id, name\n\n FROM subreddit\n\n ORDER BY name\n\n `);\n\n }\n\n\n\n getOldSubredditPosts(subreddit: Reddit.Subreddit): Reddit.Post[] {\n\n return this.getOld(`\n\n SELECT id\n\n FROM posts\n\n WHERE subreddit = ${subreddit.id}\n\n `);\n\n }\n\n\n\n async updateSubreddits(): Promise<void> {\n\n const olds = new Set(this.getOldSubreddits().map(old => JSON.stringify(old)));\n\n const news = new Set(\n\n await this.requester\n\n .getSubscriptions()\n\n .fetchAll()\n\n .map(\n\n sub =>\n\n JSON.stringify(({\n\n id: sub.id,\n\n name: sub.display_name_prefixed,\n\n } as Reddit.Subreddit))\n\n )\n\n );\n\n\n\n const toDel = new Set([...olds].filter(old => !news.has(old)));\n\n const toAdd = new Set([...news].filter(new_ => !olds.has(new_)));\n\n\n\n const delIds = Array.from(toDel).map(sub => {\n\n const subObj = JSON.parse(sub);\n\n if (!(\"id\" in subObj)) throw new Error(`SerDe failed with ${sub}`);\n\n return subObj.id as string;\n\n });\n\n const delString = format(\n\n 'DELETE FROM subreddits WHERE id IN %L',\n\n delIds\n\n );\n\n this.client.query(delString, (err, res) => {\n\n console.log(`Response: ${res}`);\n\n console.log(`Error: ${err}`);\n\n });\n\n\n\n const addString = format(\n\n 'INSERT INTO subreddits (id, name) VALUES %L',\n\n Array.from(toAdd).map(sub => JSON.parse(sub))\n\n );\n\n this.client.query(addString, (err, res) => {\n\n console.log(`Response: ${res}`);\n\n console.log(`Error: ${err}`);\n\n });\n\n }\n\n\n\n async updateSubredditPosts(subreddit: Reddit.Subreddit): Promise<void> {\n\n const oldPosts = new Set(this.getOldSubredditPosts(subreddit));\n\n const newPosts = new Set(\n\n (await this.requester.getSubreddit(subreddit.id).getNew()).map(\n\n post =>\n\n ({\n\n id: post.id,\n\n title: post.title,\n\n date: new Date(post.created),\n\n subreddit,\n\n } as Reddit.Post)\n\n )\n\n );\n\n\n\n const toAdd = new Set(\n\n [...newPosts].filter(post => !oldPosts.has(post))\n\n );\n\n\n\n const addString = format(\n\n 'INSERT INTO posts (post_id, title, date, subreddit) VALUES %L',\n\n Array.from(toAdd).map(post => [\n\n post.id,\n\n post.title,\n\n post.date,\n\n post.subreddit,\n\n ])\n\n );\n\n this.client.query(addString, (err, res) => {\n\n console.log(`Response: ${res}`);\n\n console.log(`Error: ${err}`);\n\n });\n\n }\n", "file_path": "ingestion/src/reddit.ts", "rank": 75, "score": 1.7004034219137951 }, { "content": " ride: TaxiRide,\n\n ctx: KeyedProcessFunction[Long, TaxiRide, Long]#Context,\n\n out: Collector[Long]\n\n ): Unit = {\n\n Option(rideState.value) match {\n\n case Some(firstRideEvent) => {\n\n if (ride.isStart && rideTooLong(ride, firstRideEvent)) {\n\n out.collect(ride.rideId)\n\n } else {\n\n // first ride was START, so there's a timer unless fired\n\n ctx.timerService.deleteEventTimeTimer(\n\n getTimerTime(firstRideEvent)\n\n )\n\n // if ride has gone too long but timer didn't fire...\n\n if (rideTooLong(firstRideEvent, ride)) {\n\n out.collect(ride.rideId)\n\n }\n\n }\n\n\n\n // both events seen so clear state (can leak if missing)\n", "file_path": "spark/src/main/scala/com/podra/transformer/stream/SocketWordCount.scala", "rank": 76, "score": 1.691873043546627 }, { "content": "export const FilterSorterBox: FC = () => {\n\n const state = useState(featureState);\n\n const styles = useStyles();\n\n\n\n const onSortClick = (idx: number) => (): void => {\n\n const direction: string =\n\n {\n\n [\"normal\" as string]: \"asc\",\n\n [\"asc\" as string]: \"desc\",\n\n }[state.features[idx].direction.get() as string] || \"normal\";\n\n state.features[idx].set((p) =>\n\n direction === \"normal\"\n\n ? { filter: p.filter }\n\n : { ...p, direction: direction as SortOrder }\n\n );\n\n };\n\n\n\n return (\n\n <div className={styles.container} data-test-id=\"FilterSorterBox\">\n\n {FILTERS.map((filter, i) => (\n\n <FormControl\n\n className={styles.formControl}\n\n key={\"control\" + filter}\n\n >\n\n <p\n\n id={`${filter}-filter-label`}\n\n className={styles.filterLabel}\n\n >\n\n {capitalize(filter)}\n\n </p>\n\n <IconButton\n\n onClick={onSortClick(i)}\n\n className={styles.sortButton}\n\n style={{\n\n borderColor: getSortButtonColor(\n\n state.features[i].direction.get()\n\n ),\n\n }}\n\n >\n\n <SvgIcon\n\n htmlColor={getSortButtonColor(\n\n state.features[i].direction.get()\n\n )}\n\n >\n\n <path\n\n d=\"M3 18h6v-2H3v2zM3 6v2h18V6H3zm0 7h12v-2H3v2z\"\n\n transform=\"scale(0.7, 1)\"\n\n ></path>\n\n <ButtonArrowIcon\n\n {...(state.features[\n\n i\n\n ].direction.get() as string)}\n\n />\n\n </SvgIcon>\n\n </IconButton>\n\n <FilterArea\n\n filter={state.features[i].filter.get()}\n\n filterIdx={i}\n\n />\n\n </FormControl>\n\n ))}\n\n </div>\n\n );\n", "file_path": "pwa/src/components/FilterSorterBox/index.tsx", "rank": 77, "score": 1.673581652591198 }, { "content": "const PrivateRoute: FC<RouteProps> = (props: RouteProps) => {\n\n const auth = useAuth();\n\n if (auth.token.get()) return <Route {...props} />;\n\n return <Redirect to=\"/login\" />;\n", "file_path": "pwa/src/App.tsx", "rank": 78, "score": 1.619836083863099 }, { "content": "export const Home: React.FC = () => {\n\n const state = useState<FeatureState>(featureState);\n\n const appState = useState<AppState>(metaState);\n\n const styles = useStyles();\n\n\n\n return (\n\n <div className={styles.background}>\n\n <Header\n\n filtering={appState.filtering.get()}\n\n integrating={appState.integrating.get()}\n\n configuring={appState.configuring.get()}\n\n />\n\n {appState.filtering.get() && <FilterSorterBox />}\n\n <IntegrationModal />\n\n <ItemList\n\n list={state.results.get()}\n\n filters={state.features.map((feat) => feat.filter.get())}\n\n sort={state.features.map((feat) => ({\n\n name: feat.filter.name.get(),\n\n direction: feat.direction.get() as SortOrder,\n\n }))}\n\n />\n\n </div>\n\n );\n", "file_path": "pwa/src/components/Home/index.tsx", "rank": 79, "score": 1.3965078977807521 }, { "content": "export const RangeFilter: React.FC<RangeFilterProps> = ({\n\n filterName,\n\n binCount = 20,\n\n}: RangeFilterProps) => {\n\n const [toNum, toString] =\n\n filterName === \"words.length\"\n\n ? [lengthStringToNumber, lengthNumberToString]\n\n : [dateStringToNumber, dateNumberToString];\n\n const filterIdx = FILTERS.indexOf(filterName);\n\n const state = useState(featureState);\n\n const [min, max, bins, minString, maxString] = useMemo(() => {\n\n const vals = state.results.map((el) =>\n\n deepElement(filterName, el.get())\n\n );\n\n const minAsString = vals\n\n ? vals.reduce((min_, val) =>\n\n val.localeCompare(min_) < 0 ? val : min_\n\n )\n\n : \"\";\n\n const maxAsString = vals\n\n ? vals.reduce((max_, val) =>\n\n val.localeCompare(max_) > 0 ? val : max_\n\n )\n\n : \"\";\n\n const minNum = toNum(minAsString);\n\n const maxNum = toNum(maxAsString);\n\n const interval = (maxNum - minNum) / binCount;\n\n const countBins = Array(binCount).fill({ count: 0 });\n\n vals.forEach(\n\n (val) => countBins[((toNum(val) - minNum) / interval) >> 0].count++\n\n );\n\n return [minNum, maxNum, countBins, minAsString, maxAsString];\n\n }, [state.results]);\n\n\n\n const handleRangeChange = (\n\n _: React.ChangeEvent<unknown>,\n\n vals: number | number[]\n\n ): void => {\n\n const valsAsStr = (vals as number[]).map((val) => toString(val));\n\n state.features[filterIdx].filter.range.set(\n\n valsAsStr as [string, string]\n\n );\n\n };\n\n\n\n const getRange = (): number[] => {\n\n const valsAsString = state.features[filterIdx].filter.range.get();\n\n return (valsAsString as string[]).map((val) => toNum(val));\n\n };\n\n const marks = [\n\n { value: min, label: minString },\n\n { value: max, label: maxString },\n\n ];\n\n\n\n return (\n\n <>\n\n <AreaChart width={200} height={60} data={bins}>\n\n <Area\n\n type=\"monotone\"\n\n dataKey=\"count\"\n\n stroke=\"#8884d8\"\n\n fill=\"#8884d8\"\n\n />\n\n </AreaChart>\n\n <RangeSlider\n\n data-test-id=\"RangeFilter\"\n\n getAriaLabel={(idx: number) =>\n\n `${filterName} Slider - Step ${idx}`\n\n }\n\n getAriaValueText={(val: number, idx: number) =>\n\n `value at ${filterName} slider step ${idx} is ${toString(\n\n val\n\n )}`\n\n }\n\n defaultValue={[min, max]}\n\n min={min}\n\n max={max}\n\n onChange={handleRangeChange}\n\n value={getRange()}\n\n valueLabelDisplay=\"auto\"\n\n valueLabelFormat={toString}\n\n marks={marks}\n\n />\n\n </>\n\n );\n", "file_path": "pwa/src/components/RangeFilter/index.tsx", "rank": 80, "score": 1.34575797265021 }, { "content": "# Existing Errors\n\n\n\n## `yarn dev/build/storybook`\n\n\n\n```\n\nnode_modules/firebase-admin/node_modules/@firebase/database/dist/index.esm.js:1:7: error: No matching export in \"node_modules/@firebase/app/dist/index.esm2017.js\" for import \"default\"\n\n```\n\n\n\n```js\n\nimport firebase from \"@firebase/app\";\n\n```\n\n\n\n## `yarn test`\n\n\n\nImporting the service worker module during tests does not work because it imports them conditionally through vite and npm command. Probably best to just use workbox directly.\n", "file_path": "pwa/README.md", "rank": 81, "score": 1.3100121143683592 }, { "content": "\n\nExample of running beam pipeline from command line\n\n```bash\n\n# 1. build and push container for cross-language SDK\n\nexport DOCKER_ROOT=\"Your Docker Repository Root\"\n\n./gradlew :sdks:java:container:java8:docker \\\n\n -Pdocker-repository-root=$DOCKER_ROOT \\\n\n -Pdocker-tag=latest\n\ndocker push $DOCKER_ROOT/beam_java8_sdk:latest\n\n\n\n# 2. example of running beam pipeline with kafka sink\n\nexport PROJECT=\"$(gcloud config get-value project)\"\n\nexport TEMP_LOCATION=\"gs://MY-BUCKET/temp\"\n\nexport REGION=\"us-central1\"\n\nexport JOB_NAME=\"kafka-taxi-`date +%Y%m%d-%H%M%S`\"\n\nexport BOOTSTRAP_SERVERS=\"123.45.67.89:1234\"\n\nexport EXPANSION_ADDR=\"localhost:1234\"\n\ngo run ./sdks/go/examples/kafka/types/types.go \\\n\n --runner=DataflowRunner \\\n\n --temp_location=$TEMP_LOCATION \\\n\n --staging_location=$STAGING_LOCATION \\\n\n --project=$PROJECT \\\n\n --region=$REGION \\\n\n --job_name=\"${JOB_NAME}\" \\\n\n --bootstrap_servers=$BOOTSTRAP_SERVER \\\n\n --experiments=use_portable_job_submission,use_runner_v2 \\\n\n --expansion_addr=$EXPANSION_ADDR \\\n\n --sdk_harness_container_image_override=\".*java.*,${DOCKER_ROOT}/beam_java8_sdk:latest\"\n\n```\n", "file_path": "beam/README.md", "rank": 82, "score": 1.1900239220182067 }, { "content": "export const ItemLogo: React.FC<ItemLogoProps> = ({\n\n src = \"/favicon.ico\",\n\n width = 32,\n\n height = 32,\n\n alt = \"Logo\",\n\n}: ItemLogoProps) => {\n\n const styles = useStyles();\n\n return (\n\n <Image\n\n data-test-id=\"ItemLogo\"\n\n className={styles.image}\n\n src={src as string}\n\n alt={alt}\n\n imageStyle={{ width, height }}\n\n />\n\n );\n", "file_path": "pwa/src/components/ItemLogo/index.tsx", "rank": 83, "score": 1.176763272688781 }, { "content": "export const Item: React.FC<ItemProps> = ({\n\n url = \"https://www.google.com\",\n\n logo,\n\n words,\n\n}: ItemProps) => {\n\n const styles = useStyles();\n\n return (\n\n <a\n\n className={styles.container}\n\n data-test-id=\"Item\"\n\n title={words?.title}\n\n href={url}\n\n target=\"_blank\"\n\n rel=\"noopener noreferrer\"\n\n >\n\n <ItemLogo {...logo} />\n\n <ItemWords {...words} />\n\n </a>\n\n );\n", "file_path": "pwa/src/components/Item/index.tsx", "rank": 84, "score": 1.1093332145494976 }, { "content": "export const Header: React.FC<HeaderProps> = ({\n\n filtering = false,\n\n integrating = false,\n\n configuring = false,\n\n}: HeaderProps) => {\n\n const state = useState({ filtering, integrating, configuring });\n\n const styles = useStyles();\n\n return (\n\n <div className={styles.container} data-test-id=\"Header\">\n\n <IconButton\n\n color={state.filtering.get() ? \"secondary\" : \"primary\"}\n\n onClick={() => state.filtering.set((val) => !val)}\n\n >\n\n <FilterList />\n\n </IconButton>\n\n <IconButton\n\n color={state.integrating.get() ? \"secondary\" : \"primary\"}\n\n onClick={() => state.integrating.set((val) => !val)}\n\n >\n\n <AddCircle />\n\n </IconButton>\n\n <IconButton\n\n color={state.configuring.get() ? \"secondary\" : \"primary\"}\n\n onClick={() => state.configuring.set((val) => !val)}\n\n >\n\n <Settings />\n\n </IconButton>\n\n </div>\n\n );\n", "file_path": "pwa/src/components/Header/index.tsx", "rank": 85, "score": 1.101696684848606 }, { "content": "export const ValueFilter: React.FC<ValueFilterProps> = ({\n\n filterName,\n\n filterIdx,\n\n}: ValueFilterProps) => {\n\n const state = useState(featureState);\n\n const styles = useStyles();\n\n\n\n const filterChoices = useMemo(() => {\n\n const parser: (str: string) => string =\n\n filterName === \"url\" ? domainParser : (x: string) => x;\n\n const data: MenuItemData[] = state.results.get().map((item) =>\n\n filterName === \"url\"\n\n ? {\n\n name: parser(deepElement(filterName, item)),\n\n icon: item?.logo?.src,\n\n }\n\n : { name: parser(deepElement(filterName, item)) }\n\n );\n\n const uniques = [...new Set(data)];\n\n const sorted = uniques.sort((a, b) => a.name.localeCompare(b.name));\n\n return sorted;\n\n }, [state.results]);\n\n\n\n const onOptionChange = (e: React.ChangeEvent<{ value: unknown }>): void =>\n\n state.features[filterIdx].filter.values.set((p) => [\n\n ...(p as string[]),\n\n e.target.value as string,\n\n ]);\n\n const onOptionDelete = (val: string) => (): void =>\n\n state.features[filterIdx].filter.values.set((p) =>\n\n p?.filter((value) => value !== val)\n\n );\n\n\n\n return (\n\n <Select\n\n multiple\n\n labelId={`${filterName}-filter-label`}\n\n id={`${filterName}-filter`}\n\n value={state.features[filterIdx].filter.values.get()}\n\n onChange={onOptionChange}\n\n input={<Input id={`select-${filterName}-filter`} />}\n\n className={styles.container}\n\n data-test-id=\"ValueFilter\"\n\n MenuProps={MenuProps}\n\n renderValue={(sel) => (\n\n <div className={styles.chips}>\n\n {(sel as string[]).map((val: unknown, i: number) => (\n\n <Chip\n\n key={i}\n\n label={capitalize(val as string)}\n\n component=\"div\"\n\n icon={\n\n <img\n\n src={`https://${\n\n val as string\n\n }.com/favicon.ico`}\n\n alt={val as string}\n\n width={24}\n\n height={24}\n\n />\n\n }\n\n onDelete={onOptionDelete(val as string)}\n\n onMouseDown={(\n\n e: React.MouseEvent<HTMLDivElement>\n\n ) => e.stopPropagation()}\n\n className={styles.chip}\n\n />\n\n ))}\n\n </div>\n\n )}\n\n >\n\n {filterChoices.map((choice, i) => (\n\n <StyledFilterMenuItem key={i} value={choice.name}>\n\n {choice.icon && (\n\n <img\n\n src={choice.icon}\n\n alt={choice.name}\n\n width={16}\n\n height={16}\n\n />\n\n )}\n\n <ListItemText primary={capitalize(choice.name)} />\n\n </StyledFilterMenuItem>\n\n ))}\n\n </Select>\n\n );\n", "file_path": "pwa/src/components/ValueFilter/index.tsx", "rank": 86, "score": 1.0463081757095423 }, { "content": "export const ItemList: React.FC<ItemListProps> = ({\n\n list = [],\n\n filters,\n\n sort,\n\n}: ItemListProps) => {\n\n const styles = useStyles();\n\n const filteredList: ItemProps[] = filters\n\n ? filterList(list, filters)\n\n : list;\n\n const sortedList: ItemProps[] = sort\n\n ? sortList(filteredList, sort)\n\n : filteredList;\n\n return (\n\n <div className={styles.container} data-test-id=\"ItemList\">\n\n {sortedList.map((item) => (\n\n <Item key={item.url} {...item} />\n\n ))}\n\n </div>\n\n );\n", "file_path": "pwa/src/components/ItemList/index.tsx", "rank": 87, "score": 0.9131217483016787 }, { "content": "export const ItemWords: React.FC<ItemWordsProps> = ({\n\n title = \"... [no title extracted] ...\",\n\n author = \"unknown\",\n\n date = new Date().toLocaleDateString(),\n\n length = \"00:05:00\",\n\n}: ItemWordsProps) => {\n\n const styles = useStyles();\n\n return (\n\n <div className={styles.container} data-test-id=\"ItemWords\">\n\n <h4 className={[styles.words, styles.title].join(\" \")}>{title}</h4>\n\n <p className={[styles.words, styles.subtitles].join(\" \")}>\n\n {author}\n\n </p>\n\n <p className={[styles.words, styles.subtitles].join(\" \")}>\n\n {date} &#8226; {length}\n\n </p>\n\n </div>\n\n );\n", "file_path": "pwa/src/components/ItemWords/index.tsx", "rank": 88, "score": 0.8920836403883734 }, { "content": "export const KeywordFilter: FC<KeywordFilterProps> = ({\n\n filterName,\n\n filterIdx,\n\n}: KeywordFilterProps) => {\n\n const state = useState(featureState);\n\n const styles = useStyles();\n\n\n\n const options: Option[] = useMemo(() => {\n\n const text: string = state.results.reduce(\n\n (all, one) =>\n\n all.concat(\n\n \" \",\n\n deepElement(filterName, one.get()).toLowerCase()\n\n ),\n\n \"\"\n\n );\n\n const allWords: string[] = text.replace(/[^\\w\\d ]/g, \"\").split(\" \");\n\n const words = allWords.filter((word) => !commonWords.includes(word));\n\n return _.chain(words).countBy().sortBy().toPairs().value();\n\n }, [state.results]);\n\n\n\n const handleKeywordChange = (\n\n _: ChangeEvent<unknown>,\n\n value: Option[]\n\n ): void => {\n\n const vals = value.map((val) =>\n\n val[1] === -1 ? val[0].split(\":\")[1] : val[0]\n\n );\n\n state.features[filterIdx].filter.keywords.set(vals);\n\n };\n\n\n\n const value = state.features[filterIdx].filter.values\n\n .get()\n\n ?.map((word) => [word, 0] as Option);\n\n\n\n return (\n\n <Autocomplete\n\n multiple\n\n filterSelectedOptions\n\n disableListWrap\n\n limitTags={25}\n\n id={`${filterName}-filter`}\n\n data-test-id=\"KeywordFilter\"\n\n className={styles.container}\n\n value={value}\n\n onChange={handleKeywordChange}\n\n options={options}\n\n renderOption={Option}\n\n filterOptions={(options, params) => {\n\n const filtered = filter(options, params);\n\n if (params.inputValue !== \"\") {\n\n filtered.push([`Add: \"${params.inputValue}\"`, -1]);\n\n }\n\n return filtered;\n\n }}\n\n ListboxComponent={\n\n Listbox as ComponentType<HTMLAttributes<HTMLElement>>\n\n }\n\n renderInput={(params) => (\n\n <TextField\n\n {...params}\n\n variant=\"outlined\"\n\n placeholder=\"Keywords...\"\n\n label={filterName}\n\n margin=\"normal\"\n\n />\n\n )}\n\n renderGroup={renderGroup}\n\n groupBy={(option) => {\n\n const rounded = Math.round(option[1] / 10) * 10;\n\n return `${rounded}-${rounded + 10}`;\n\n }}\n\n selectOnFocus\n\n clearOnBlur\n\n handleHomeEndKeys\n\n />\n\n );\n", "file_path": "pwa/src/components/KeywordFilter/index.tsx", "rank": 89, "score": 0.8436962545519373 }, { "content": "export const SWReloadPrompt: React.FC<SWReloadPromptProps> = ({\n\n intervalMS = 60 * 60 * 1000, // default every hour\n\n}: SWReloadPromptProps) => {\n\n // replaced dynamically\n\n const reloadSW = \"__RELOAD_SW__\";\n\n\n\n // Hooks\n\n const styles = useStyles();\n\n const {\n\n offlineReady: [offlineReady, setOfflineReady],\n\n needRefresh: [needRefresh, setNeedRefresh],\n\n updateServiceWorker,\n\n } = useRegisterSW({\n\n onRegistered: (swReg) => {\n\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n\n // @ts-ignore\n\n reloadSW === \"true\"\n\n ? swReg &&\n\n setInterval(() => {\n\n console.log(\"Checking for SW updates ...\");\n\n swReg.update();\n\n }, intervalMS)\n\n : console.log(\"SW Registered:\", swReg);\n\n },\n\n onRegisterError: (error) =>\n\n console.error(\"SW Registration Error:\", error),\n\n });\n\n\n\n // logic\n\n function reload(): void {\n\n updateServiceWorker(true);\n\n }\n\n function close(): void {\n\n setOfflineReady(false);\n\n setNeedRefresh(false);\n\n }\n\n\n\n return (\n\n <div className={styles.container} data-test-id=\"SWReloadPrompt\">\n\n {(offlineReady || needRefresh) && (\n\n <div className=\"toast\">\n\n <div className=\"message\">\n\n {offlineReady ? (\n\n <span>Ready to work offline!</span>\n\n ) : (\n\n <span>\n\n New content available! Click &quot;Reload&quot;\n\n to update.\n\n </span>\n\n )}\n\n </div>\n\n {needRefresh && (\n\n <button className=\"toast-button\" onClick={reload}>\n\n Reload\n\n </button>\n\n )}\n\n <button className=\"toast-button\" onClick={close}>\n\n Close\n\n </button>\n\n </div>\n\n )}\n\n </div>\n\n );\n", "file_path": "pwa/src/components/SWReloadPrompt/index.tsx", "rank": 90, "score": 0.6927130504357564 }, { "content": "export const IntegrationModal: React.FC = () => {\n\n const appState = useState<AppState>(metaState);\n\n const styles = useStyles();\n\n\n\n const triggerIntegration = (service: string) => (): void => {\n\n switch (service) {\n\n case \"reddit\":\n\n redditIntegration();\n\n break;\n\n case \"twitter\":\n\n twitterIntegration();\n\n break;\n\n }\n\n };\n\n\n\n return (\n\n <Modal\n\n data-test-id=\"IntegrationModel\"\n\n className={styles.modal}\n\n open={appState.integrating.get()}\n\n onClose={() => appState.integrating.set(false)}\n\n closeAfterTransition\n\n BackdropComponent={Backdrop}\n\n BackdropProps={{ timeout: 500 }}\n\n >\n\n <Fade in={appState.integrating.get()}>\n\n <div className={styles.paper}>\n\n <TableContainer component={Paper}>\n\n <Table className={styles.table}>\n\n <TableHead>\n\n <TableRow>\n\n <TableCell>Service</TableCell>\n\n <TableCell>Status</TableCell>\n\n </TableRow>\n\n </TableHead>\n\n <TableBody>\n\n {appState.services.map((row) => (\n\n <TableRow key={row.name.get()}>\n\n <TableCell component=\"th\" scope=\"row\">\n\n <Icon>\n\n <img\n\n src={`/svg/${row.name.get()}.svg`}\n\n />\n\n </Icon>\n\n </TableCell>\n\n <TableCell align=\"center\">\n\n {\n\n {\n\n [\"Enabled\" as string]: (\n\n <Icon color=\"action\">\n\n <CheckCircleOutline />\n\n </Icon>\n\n ),\n\n [\"Error\" as string]: (\n\n <Icon color=\"error\">\n\n <ErrorOutline />\n\n </Icon>\n\n ),\n\n [\"Disabled\" as string]: (\n\n <IconButton\n\n disableRipple\n\n color=\"primary\"\n\n onClick={triggerIntegration(\n\n row.name.get()\n\n )}\n\n >\n\n <AddBox />\n\n </IconButton>\n\n ),\n\n }[row.status.get()]\n\n }\n\n </TableCell>\n\n </TableRow>\n\n ))}\n\n </TableBody>\n\n </Table>\n\n </TableContainer>\n\n </div>\n\n </Fade>\n\n </Modal>\n\n );\n", "file_path": "pwa/src/components/IntegrationModal/index.tsx", "rank": 91, "score": 0.6546487800068483 } ]
Rust
server/tests/write_buffer_delete.rs
re-gmbh/influxdb_iox
8717bfa74727bcac0adf762c81878bbdd63435b1
use std::collections::BTreeMap; use std::num::NonZeroU32; use std::sync::Arc; use arrow_util::assert_batches_eq; use data_types::delete_predicate::{DeleteExpr, DeletePredicate, Op, Scalar}; use data_types::router::{ Matcher, MatcherToShard, QuerySinks, Router as RouterConfig, ShardConfig, ShardId, WriteSink, WriteSinkSet, WriteSinkVariant, }; use data_types::server_id::ServerId; use data_types::timestamp::TimestampRange; use data_types::DatabaseName; use dml::{DmlDelete, DmlOperation, DmlWrite}; use generated_types::influxdata::iox::{ management::v1::DatabaseRules, write_buffer::v1::WriteBufferConnection, }; use mutable_batch_lp::lines_to_batches; use query::exec::ExecutionContextProvider; use query::frontend::sql::SqlQueryPlanner; use regex::Regex; use router::router::Router; use router::server::RouterServer; use server::db::test_helpers::wait_for_tables; use server::rules::ProvidedDatabaseRules; use server::test_utils::{make_application, make_initialized_server}; use server::{Db, Server}; use write_buffer::mock::MockBufferSharedState; struct DistributedTest { router: Arc<Router>, consumer: Arc<Server>, consumer_db: Arc<Db>, } impl DistributedTest { pub async fn new(db_name: &DatabaseName<'static>) -> Self { let write_buffer_state = MockBufferSharedState::empty_with_n_sequencers(NonZeroU32::new(1).unwrap()); let application = make_application(); application .write_buffer_factory() .register_mock("my_mock".to_string(), write_buffer_state); let write_buffer_connection = WriteBufferConnection { r#type: "mock".to_string(), connection: "my_mock".to_string(), connection_config: Default::default(), creation_config: None, }; let router_server = RouterServer::for_testing( None, None, Arc::clone(application.time_provider()), Some(Arc::clone(application.write_buffer_factory())), ) .await; let router_id = ServerId::new(NonZeroU32::new(1).unwrap()); router_server.set_server_id(router_id).unwrap(); router_server.update_router(RouterConfig { name: db_name.to_string(), write_sharder: ShardConfig { specific_targets: vec![MatcherToShard { matcher: Matcher { table_name_regex: Some(Regex::new(".*").unwrap()), }, shard: ShardId::new(1), }], hash_ring: None, }, write_sinks: BTreeMap::from([( ShardId::new(1), WriteSinkSet { sinks: vec![WriteSink { sink: WriteSinkVariant::WriteBuffer( write_buffer_connection.clone().try_into().unwrap(), ), ignore_errors: false, }], }, )]), query_sinks: QuerySinks::default(), }); let router = router_server.router(db_name).unwrap(); let consumer_id = ServerId::new(NonZeroU32::new(2).unwrap()); let consumer = make_initialized_server(consumer_id, Arc::clone(&application)).await; let consumer_db = consumer .create_database( ProvidedDatabaseRules::new_rules(DatabaseRules { name: db_name.to_string(), write_buffer_connection: Some(write_buffer_connection.clone()), ..Default::default() }) .unwrap(), ) .await .unwrap() .initialized_db() .unwrap(); Self { router, consumer, consumer_db, } } pub async fn wait_for_tables(&self, expected_tables: &[&str]) { wait_for_tables(&self.consumer_db, expected_tables).await } pub async fn write(&self, lp: &str) { self.router .write(DmlOperation::Write(DmlWrite::new( lines_to_batches(lp, 0).unwrap(), Default::default(), ))) .await .unwrap(); } pub async fn delete(&self, delete: DmlDelete) { self.router .write(DmlOperation::Delete(delete)) .await .unwrap(); } pub async fn query(&self, query: &str, expected: &[&'static str]) { let ctx = self.consumer_db.new_query_context(None); let physical_plan = SqlQueryPlanner::new().query(query, &ctx).await.unwrap(); let batches = ctx.collect(physical_plan).await.unwrap(); assert_batches_eq!(expected, &batches); } pub async fn drain(&self) { self.consumer.shutdown(); self.consumer.join().await.unwrap(); } } #[tokio::test] async fn write_buffer_deletes() { let db_name = DatabaseName::new("distributed").unwrap(); let fixture = DistributedTest::new(&db_name).await; fixture.write("foo x=1 1").await; fixture.write("foo x=3 2").await; fixture .delete(DmlDelete::new( DeletePredicate { range: TimestampRange { start: 0, end: 20 }, exprs: vec![DeleteExpr { column: "x".to_string(), op: Op::Eq, scalar: Scalar::I64(1), }], }, None, Default::default(), )) .await; fixture.write("bar x=2 1").await; fixture.wait_for_tables(&["bar", "foo"]).await; fixture .query( "select * from foo;", &[ "+--------------------------------+---+", "| time | x |", "+--------------------------------+---+", "| 1970-01-01T00:00:00.000000002Z | 3 |", "+--------------------------------+---+", ], ) .await; fixture .query( "select * from bar;", &[ "+--------------------------------+---+", "| time | x |", "+--------------------------------+---+", "| 1970-01-01T00:00:00.000000001Z | 2 |", "+--------------------------------+---+", ], ) .await; fixture.drain().await; }
use std::collections::BTreeMap; use std::num::NonZeroU32; use std::sync::Arc; use arrow_util::assert_batches_eq; use data_types::delete_predicate::{DeleteExpr, DeletePredicate, Op, Scalar}; use data_types::router::{ Matcher, MatcherToShard, QuerySinks, Router as RouterConfig, ShardConfig, ShardId, WriteSink, WriteSinkSet, WriteSinkVariant, }; use data_types::server_id::ServerId; use data_types::timestamp::TimestampRange; use data_types::DatabaseName; use dml::{DmlDelete, DmlOperation, DmlWrite}; use generated_types::influxdata::iox::{ management::v1::DatabaseRules, write_buffer::v1::WriteBufferConnection, }; use mutable_batch_lp::lines_to_batches; use query::exec::ExecutionContextProvider; use query::frontend::sql::SqlQueryPlanner; use regex::Regex; use router::router::Router; use router::server::RouterServer; use server::db::test_helpers::wait_for_tables; use server::rules::ProvidedDatabaseRules; use server::test_utils::{make_application, make_initialized_server}; use server::{Db, Server}; use write_buffer::mock::MockBufferSharedState; struct DistributedTest { router: Arc<Router>, consumer: Arc<Server>, consumer_db: Arc<Db>, } impl DistributedTest { pub async fn new(db_name: &DatabaseName<'static>) -> Self { let write_buffer_state = MockBufferSharedState::empty_with_n_sequencers(NonZeroU32::new(1).unwrap()); let application = make_application(); application .write_buffer_factory() .register_mock("my_mock".to_string(), write_buffer_state); let write_buffer_connection = WriteBufferConnection { r#type: "mock".to_string(), connection: "my_mock".to_string(), connection_config: Default::default(), creation_config: None, }; let router_server = RouterServer::for_testing( None, None, Arc::clone(application.time_provider()), Some(Arc::clone(application.write_buffer_factory())), ) .await; let router_id = ServerId::new(NonZeroU32::new(1).unwrap()); router_server.set_server_id(router_id).unwrap(); router_server.update_router(RouterConfig { name: db_name.to_string(), write_sharder: ShardConfig { specific_targets: vec![MatcherToShard { matcher: Matcher { table_name_regex: Some(Regex::new(".*").unwrap()), }, shard: ShardId::new(1), }], hash_ring: None, }, write_sinks: BTreeMap::from([( ShardId::new(1), WriteSinkSet { sinks: vec![WriteSink { sink: WriteSinkVariant::WriteBuffer( write_buffer_connection.clone().try_into().unwrap(), ), ignore_errors: false, }], }, )]), query_sinks: QuerySinks::default(), }); let router = router_server.router(db_name).unwrap(); let consumer_id = ServerId::new(NonZeroU32::new(2).unwrap()); let consumer = make_initialized_server(consumer_id, Arc::clone(&application)).await; let consumer_db = consumer .create_database( ProvidedDatabaseRules::new_rules(DatabaseRules { name: db_name.to_string(), write_buffer_connection: Some(write_buffer_connection.clone()), ..Default::default() }) .unwrap(), ) .await .unwrap() .initialized_db() .unwrap
: 20 }, exprs: vec![DeleteExpr { column: "x".to_string(), op: Op::Eq, scalar: Scalar::I64(1), }], }, None, Default::default(), )) .await; fixture.write("bar x=2 1").await; fixture.wait_for_tables(&["bar", "foo"]).await; fixture .query( "select * from foo;", &[ "+--------------------------------+---+", "| time | x |", "+--------------------------------+---+", "| 1970-01-01T00:00:00.000000002Z | 3 |", "+--------------------------------+---+", ], ) .await; fixture .query( "select * from bar;", &[ "+--------------------------------+---+", "| time | x |", "+--------------------------------+---+", "| 1970-01-01T00:00:00.000000001Z | 2 |", "+--------------------------------+---+", ], ) .await; fixture.drain().await; }
(); Self { router, consumer, consumer_db, } } pub async fn wait_for_tables(&self, expected_tables: &[&str]) { wait_for_tables(&self.consumer_db, expected_tables).await } pub async fn write(&self, lp: &str) { self.router .write(DmlOperation::Write(DmlWrite::new( lines_to_batches(lp, 0).unwrap(), Default::default(), ))) .await .unwrap(); } pub async fn delete(&self, delete: DmlDelete) { self.router .write(DmlOperation::Delete(delete)) .await .unwrap(); } pub async fn query(&self, query: &str, expected: &[&'static str]) { let ctx = self.consumer_db.new_query_context(None); let physical_plan = SqlQueryPlanner::new().query(query, &ctx).await.unwrap(); let batches = ctx.collect(physical_plan).await.unwrap(); assert_batches_eq!(expected, &batches); } pub async fn drain(&self) { self.consumer.shutdown(); self.consumer.join().await.unwrap(); } } #[tokio::test] async fn write_buffer_deletes() { let db_name = DatabaseName::new("distributed").unwrap(); let fixture = DistributedTest::new(&db_name).await; fixture.write("foo x=1 1").await; fixture.write("foo x=3 2").await; fixture .delete(DmlDelete::new( DeletePredicate { range: TimestampRange { start: 0, end
random
[ { "content": "pub fn make_server(\n\n server: Arc<RouterServer>,\n\n) -> router_service_server::RouterServiceServer<impl router_service_server::RouterService> {\n\n router_service_server::RouterServiceServer::new(RouterService { server })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/router.rs", "rank": 0, "score": 260332.83514555765 }, { "content": "pub fn make_server(server: Arc<Server>) -> FlightServer<impl Flight> {\n\n FlightServer::new(FlightService { server })\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl Flight for FlightService {\n\n type HandshakeStream = TonicStream<HandshakeResponse>;\n\n type ListFlightsStream = TonicStream<FlightInfo>;\n\n type DoGetStream = TonicStream<FlightData>;\n\n type DoPutStream = TonicStream<PutResult>;\n\n type DoActionStream = TonicStream<arrow_flight::Result>;\n\n type ListActionsStream = TonicStream<ActionType>;\n\n type DoExchangeStream = TonicStream<FlightData>;\n\n\n\n async fn get_schema(\n\n &self,\n\n _request: Request<FlightDescriptor>,\n\n ) -> Result<Response<SchemaResult>, tonic::Status> {\n\n Err(tonic::Status::unimplemented(\"Not yet implemented\"))\n\n }\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/flight.rs", "rank": 1, "score": 248171.63096646057 }, { "content": "pub fn make_server(\n\n server: Arc<RouterServer>,\n\n) -> remote_service_server::RemoteServiceServer<impl remote_service_server::RemoteService> {\n\n remote_service_server::RemoteServiceServer::new(RemoteService { server })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/remote.rs", "rank": 2, "score": 244341.0316385631 }, { "content": "pub fn make_server(\n\n server: Arc<RouterServer>,\n\n serving_readiness: ServingReadiness,\n\n) -> deployment_service_server::DeploymentServiceServer<\n\n impl deployment_service_server::DeploymentService,\n\n> {\n\n deployment_service_server::DeploymentServiceServer::new(DeploymentService {\n\n server,\n\n serving_readiness,\n\n })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/deployment.rs", "rank": 3, "score": 244341.0316385631 }, { "content": "pub fn make_server(\n\n server: Arc<RouterServer>,\n\n) -> delete_service_server::DeleteServiceServer<impl delete_service_server::DeleteService> {\n\n delete_service_server::DeleteServiceServer::new(DeleteService { server })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/delete.rs", "rank": 4, "score": 244341.0316385631 }, { "content": "/// Used to induce panics in tests\n\n///\n\n/// This needs to be public and not test-only to allow usage by other crates, unfortunately\n\n/// this means it currently isn't stripped out of production builds\n\n///\n\n/// The runtime cost if no keys are registered is that of a single atomic load, and so\n\n/// it is deemed not worth the feature flag plumbing necessary to strip it out\n\npub fn panic_test(key: impl FnOnce() -> Option<String>) {\n\n if let Some(panic_database) = PANIC_DATABASE.get() {\n\n if let Some(key) = key() {\n\n if panic_database.lock().contains(&key) {\n\n panic!(\"key {} registered in panic database\", key)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "server/src/utils.rs", "rank": 5, "score": 244236.56374334075 }, { "content": "pub fn make_server(\n\n server: Arc<RouterServer>,\n\n) -> write_service_server::WriteServiceServer<impl write_service_server::WriteService> {\n\n write_service_server::WriteServiceServer::new(PBWriteService { server })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/write_pb.rs", "rank": 6, "score": 239886.8865149959 }, { "content": "/// Register a key to trigger a panic when provided in a call to panic_test\n\npub fn register_panic_key(key: impl Into<String>) {\n\n let mut panic_database = PANIC_DATABASE.get_or_init(Default::default).lock();\n\n panic_database.insert(key.into());\n\n}\n\n\n\n#[inline]\n", "file_path": "server/src/utils.rs", "rank": 7, "score": 238978.75484773357 }, { "content": "pub fn make_server() -> IOxTestingServer<impl IOxTesting> {\n\n IOxTestingServer::new(IOxTestingService {})\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/rpc/testing.rs", "rank": 8, "score": 232749.50188769156 }, { "content": "pub fn make_server(application: Arc<ApplicationState>, config: &Config) -> Arc<Server> {\n\n let server_config = ServerConfig {\n\n wipe_catalog_on_error: config.wipe_catalog_on_error.into(),\n\n skip_replay_and_seek_instead: config.skip_replay_and_seek_instead.into(),\n\n };\n\n\n\n let app_server = Arc::new(Server::new(application, server_config));\n\n\n\n // if this ID isn't set the server won't be usable until this is set via an API\n\n // call\n\n if let Some(id) = config.run_config.server_id_config.server_id {\n\n app_server.set_id(id).expect(\"server id already set\");\n\n } else {\n\n warn!(\"server ID not set. ID must be set via the INFLUXDB_IOX_ID config or API before writing or querying data.\");\n\n }\n\n\n\n app_server\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/setup.rs", "rank": 9, "score": 223582.30431543302 }, { "content": "/// Decode server config that was encoded using `encode_persisted_server_config`\n\npub fn decode_persisted_server_config(\n\n bytes: prost::bytes::Bytes,\n\n) -> Result<management::ServerConfig, DecodeError> {\n\n prost::Message::decode(bytes)\n\n}\n\n\n", "file_path": "generated_types/src/server_config.rs", "rank": 10, "score": 213523.07265922544 }, { "content": "/// Encode server config into a serialized format suitable for storage in object store\n\npub fn encode_persisted_server_config(\n\n server_config: &management::ServerConfig,\n\n bytes: &mut prost::bytes::BytesMut,\n\n) -> Result<(), EncodeError> {\n\n prost::Message::encode(server_config, bytes)\n\n}\n\n\n", "file_path": "generated_types/src/server_config.rs", "rank": 11, "score": 213515.51841035456 }, { "content": "pub fn drop_chunk(\n\n partition: LifecycleWriteGuard<'_, Partition, LockableCatalogPartition>,\n\n mut guard: LifecycleWriteGuard<'_, CatalogChunk, LockableCatalogChunk>,\n\n) -> Result<(\n\n TaskTracker<Job>,\n\n TrackedFuture<impl Future<Output = Result<()>> + Send>,\n\n)> {\n\n let db = Arc::clone(&guard.data().db);\n\n let preserved_catalog = Arc::clone(&db.preserved_catalog);\n\n let table_name = partition.table_name().to_string();\n\n let partition_key = partition.key().to_string();\n\n let chunk_id = guard.id();\n\n let lifecycle_persist = db.rules().lifecycle_rules.persist;\n\n\n\n let (tracker, registration) = db.jobs.register(Job::DropChunk {\n\n chunk: guard.addr().clone(),\n\n });\n\n\n\n // check if we're dropping an unpersisted chunk in a persisted DB\n\n // See https://github.com/influxdata/influxdb_iox/issues/2291\n", "file_path": "server/src/db/lifecycle/drop.rs", "rank": 12, "score": 207214.01491646597 }, { "content": "/// Split and then persist the provided chunks\n\n///\n\n/// `flush_handle` describes both what to persist and also acts as a transaction\n\n/// on the persistence windows\n\n///\n\n/// TODO: Replace low-level locks with transaction object\n\npub fn persist_chunks(\n\n partition: LifecycleWriteGuard<'_, Partition, LockableCatalogPartition>,\n\n chunks: Vec<LifecycleWriteGuard<'_, CatalogChunk, LockableCatalogChunk>>,\n\n flush_handle: FlushHandle,\n\n) -> Result<(\n\n TaskTracker<Job>,\n\n TrackedFuture<impl Future<Output = Result<Option<Arc<DbChunk>>>> + Send>,\n\n)> {\n\n let now = std::time::Instant::now(); // time persist duration.\n\n let db = Arc::clone(&partition.data().db);\n\n let addr = partition.addr().clone();\n\n let chunk_ids: Vec<_> = chunks.iter().map(|x| x.id()).collect();\n\n\n\n info!(%addr, ?chunk_ids, \"splitting and persisting chunks\");\n\n\n\n let max_persistable_timestamp = flush_handle.timestamp();\n\n let flush_timestamp = max_persistable_timestamp.timestamp_nanos();\n\n\n\n let (tracker, registration) = db.jobs.register(Job::PersistChunks {\n\n partition: partition.addr().clone(),\n", "file_path": "server/src/db/lifecycle/persist.rs", "rank": 13, "score": 207214.01491646597 }, { "content": "/// Loads a chunk in object storage back into the read buffer\n\npub fn load_chunk(\n\n mut chunk: LifecycleWriteGuard<'_, CatalogChunk, LockableCatalogChunk>,\n\n) -> Result<(\n\n TaskTracker<Job>,\n\n TrackedFuture<impl Future<Output = Result<()>> + Send>,\n\n)> {\n\n let db = Arc::clone(&chunk.data().db);\n\n let addr = chunk.addr().clone();\n\n\n\n info!(%addr, \"loading chunk to read buffer\");\n\n\n\n let (tracker, registration) = db.jobs.register(Job::LoadReadBufferChunk {\n\n chunk: addr.clone(),\n\n });\n\n chunk.set_loading_to_read_buffer(&registration)?;\n\n\n\n // Get queryable chunk\n\n let db_chunk = DbChunk::snapshot(&*chunk);\n\n\n\n // Drop locks\n", "file_path": "server/src/db/lifecycle/load.rs", "rank": 14, "score": 207214.01491646597 }, { "content": "pub fn drop_partition(\n\n partition: LifecycleWriteGuard<'_, Partition, LockableCatalogPartition>,\n\n) -> Result<(\n\n TaskTracker<Job>,\n\n TrackedFuture<impl Future<Output = Result<()>> + Send>,\n\n)> {\n\n let db = Arc::clone(&partition.data().db);\n\n let preserved_catalog = Arc::clone(&db.preserved_catalog);\n\n let table_name = partition.table_name().to_string();\n\n let partition_key = partition.key().to_string();\n\n let lifecycle_persist = db.rules().lifecycle_rules.persist;\n\n\n\n let (tracker, registration) = db.jobs.register(Job::DropPartition {\n\n partition: partition.addr().clone(),\n\n });\n\n\n\n // Get locks for all chunks.\n\n //\n\n // Note that deadlocks cannot occur here for the following reasons:\n\n //\n", "file_path": "server/src/db/lifecycle/drop.rs", "rank": 15, "score": 207214.01491646597 }, { "content": "pub fn make_server(\n\n server: Arc<Server>,\n\n) -> delete_service_server::DeleteServiceServer<impl delete_service_server::DeleteService> {\n\n delete_service_server::DeleteServiceServer::new(DeleteService { server })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/delete.rs", "rank": 16, "score": 202469.95080457255 }, { "content": "pub fn make_server(\n\n application: Arc<ApplicationState>,\n\n server: Arc<Server>,\n\n) -> management_service_server::ManagementServiceServer<\n\n impl management_service_server::ManagementService,\n\n> {\n\n management_service_server::ManagementServiceServer::new(ManagementService {\n\n application,\n\n server,\n\n })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/management.rs", "rank": 17, "score": 202469.95080457255 }, { "content": "pub fn make_server(\n\n server: Arc<Server>,\n\n serving_readiness: ServingReadiness,\n\n) -> deployment_service_server::DeploymentServiceServer<\n\n impl deployment_service_server::DeploymentService,\n\n> {\n\n deployment_service_server::DeploymentServiceServer::new(DeploymentService {\n\n server,\n\n serving_readiness,\n\n })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/deployment.rs", "rank": 18, "score": 202469.95080457255 }, { "content": "/// Instantiate the write service\n\npub fn make_server(\n\n jobs: Arc<JobRegistry>,\n\n) -> operations_server::OperationsServer<impl operations_server::Operations> {\n\n operations_server::OperationsServer::new(OperationsService { jobs })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/operations.rs", "rank": 19, "score": 202469.95080457255 }, { "content": "/// Return the path that the database stores data for all databases:\n\n/// `<server_path>/dbs`\n\npub fn data_dir(server_path: impl AsRef<Path>) -> PathBuf {\n\n // Assume data layout is <dir>/dbs/<uuid>\n\n let mut data_dir: PathBuf = server_path.as_ref().into();\n\n data_dir.push(\"dbs\");\n\n data_dir\n\n}\n\n\n", "file_path": "influxdb_iox/tests/end_to_end_cases/scenario.rs", "rank": 20, "score": 202029.4563347137 }, { "content": "/// Creates new test server ID\n\npub fn make_server_id() -> ServerId {\n\n ServerId::new(NonZeroU32::new(1).unwrap())\n\n}\n\n\n\n/// Creates new in-memory database iox_object_store for testing.\n\npub async fn make_iox_object_store() -> Arc<IoxObjectStore> {\n\n Arc::new(\n\n IoxObjectStore::create(Arc::new(ObjectStore::new_in_memory()), Uuid::new_v4())\n\n .await\n\n .unwrap(),\n\n )\n\n}\n\n\n", "file_path": "parquet_file/src/test_utils.rs", "rank": 21, "score": 200840.31587765517 }, { "content": "pub fn make_server(\n\n server: Arc<Server>,\n\n) -> write_service_server::WriteServiceServer<impl write_service_server::WriteService> {\n\n write_service_server::WriteServiceServer::new(PBWriteService { server })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/write_pb.rs", "rank": 22, "score": 199152.49269861734 }, { "content": "/// Encode server information that was encoded using `encode_database_owner_info` to compare\n\n/// with the currently-running server\n\npub fn decode_database_owner_info(\n\n bytes: prost::bytes::Bytes,\n\n) -> Result<management::OwnerInfo, DecodeError> {\n\n prost::Message::decode(bytes)\n\n}\n", "file_path": "generated_types/src/server_config.rs", "rank": 23, "score": 198472.36391168245 }, { "content": "/// Encode server information to be serialized into a database's object store directory and used to\n\n/// identify that database's owning server\n\npub fn encode_database_owner_info(\n\n owner_info: &management::OwnerInfo,\n\n bytes: &mut prost::bytes::BytesMut,\n\n) -> Result<(), EncodeError> {\n\n prost::Message::encode(owner_info, bytes)\n\n}\n\n\n", "file_path": "generated_types/src/server_config.rs", "rank": 24, "score": 198472.36391168245 }, { "content": "pub fn unload_read_buffer_chunk(\n\n mut chunk: LifecycleWriteGuard<'_, CatalogChunk, LockableCatalogChunk>,\n\n) -> Result<Arc<DbChunk>> {\n\n debug!(chunk=%chunk.addr(), \"unloading chunk from read buffer\");\n\n\n\n chunk.set_unloaded_from_read_buffer()?;\n\n\n\n debug!(chunk=%chunk.addr(), \"chunk marked UNLOADED from read buffer\");\n\n\n\n Ok(DbChunk::snapshot(&chunk))\n\n}\n", "file_path": "server/src/db/lifecycle/unload.rs", "rank": 25, "score": 198458.80012803257 }, { "content": "pub fn wildcard_router_config(\n\n db_name: &str,\n\n write_buffer_path: &Path,\n\n) -> influxdb_iox_client::router::generated_types::Router {\n\n use influxdb_iox_client::router::generated_types::{\n\n write_sink::Sink, Matcher, MatcherToShard, Router, ShardConfig, WriteSink, WriteSinkSet,\n\n };\n\n\n\n let write_buffer_connection = WriteBufferConnection {\n\n r#type: \"file\".to_string(),\n\n connection: write_buffer_path.display().to_string(),\n\n creation_config: Some(WriteBufferCreationConfig {\n\n n_sequencers: 1,\n\n ..Default::default()\n\n }),\n\n ..Default::default()\n\n };\n\n Router {\n\n name: db_name.to_string(),\n\n write_sharder: Some(ShardConfig {\n", "file_path": "influxdb_iox/tests/end_to_end_cases/scenario.rs", "rank": 26, "score": 196355.48786560656 }, { "content": "struct RouterService {\n\n server: Arc<RouterServer>,\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl router_service_server::RouterService for RouterService {\n\n async fn get_router(\n\n &self,\n\n request: Request<GetRouterRequest>,\n\n ) -> Result<Response<GetRouterResponse>, Status> {\n\n let GetRouterRequest { router_name } = request.into_inner();\n\n let router = self\n\n .server\n\n .router(&router_name)\n\n .ok_or_else(|| NotFound::new(ResourceType::Router, router_name))?;\n\n Ok(Response::new(GetRouterResponse {\n\n router: Some(router.config().clone().into()),\n\n }))\n\n }\n\n\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/router.rs", "rank": 27, "score": 189851.3290429731 }, { "content": "#[derive(Debug)]\n\nstruct VariantGrpcRemote {\n\n db_name: String,\n\n server_id: ServerId,\n\n resolver: Arc<Resolver>,\n\n connection_pool: Arc<ConnectionPool>,\n\n}\n\n\n\nimpl VariantGrpcRemote {\n\n fn new(\n\n db_name: String,\n\n server_id: ServerId,\n\n resolver: Arc<Resolver>,\n\n connection_pool: Arc<ConnectionPool>,\n\n ) -> Self {\n\n Self {\n\n db_name,\n\n server_id,\n\n resolver,\n\n connection_pool,\n\n }\n", "file_path": "router/src/write_sink.rs", "rank": 28, "score": 189215.24736044416 }, { "content": "#[derive(Debug)]\n\nstruct VariantWriteBuffer {\n\n db_name: String,\n\n write_buffer_cfg: WriteBufferConnection,\n\n connection_pool: Arc<ConnectionPool>,\n\n}\n\n\n\nimpl VariantWriteBuffer {\n\n fn new(\n\n db_name: String,\n\n write_buffer_cfg: WriteBufferConnection,\n\n connection_pool: Arc<ConnectionPool>,\n\n ) -> Self {\n\n Self {\n\n db_name,\n\n write_buffer_cfg,\n\n connection_pool,\n\n }\n\n }\n\n\n\n async fn write(&self, operation: &DmlOperation) -> Result<(), Error> {\n", "file_path": "router/src/write_sink.rs", "rank": 29, "score": 189215.24736044416 }, { "content": "/// Return a reference to the specified scenario\n\npub fn get_db_setup(setup_name: impl AsRef<str>) -> Option<Arc<dyn DbSetup>> {\n\n get_all_setups().get(setup_name.as_ref()).map(Arc::clone)\n\n}\n\n\n\n/// No data\n\n#[derive(Debug)]\n\npub struct NoData {}\n\n#[async_trait]\n\nimpl DbSetup for NoData {\n\n async fn make(&self) -> Vec<DbScenario> {\n\n let partition_key = \"1970-01-01T00\";\n\n let table_name = \"cpu\";\n\n\n\n // Scenario 1: No data in the DB yet\n\n //\n\n let db = make_db().await.db;\n\n let scenario1 = DbScenario {\n\n scenario_name: \"New, Empty Database\".into(),\n\n db,\n\n };\n", "file_path": "query_tests/src/scenarios.rs", "rank": 30, "score": 188801.08549938927 }, { "content": "/// Return a random string suitable for use as a database name\n\npub fn rand_name() -> String {\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(10)\n\n .map(char::from)\n\n .collect()\n\n}\n\n\n", "file_path": "influxdb_iox/tests/end_to_end_cases/scenario.rs", "rank": 31, "score": 187636.15502771136 }, { "content": "pub fn make_server<T: DatabaseStore + 'static>(db_store: Arc<T>) -> StorageServer<impl Storage> {\n\n StorageServer::new(StorageService { db_store })\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/storage.rs", "rank": 32, "score": 187077.97669518754 }, { "content": "fn fmt_write_errors(errors: &BTreeMap<ShardId, WriteErrorShard>) -> String {\n\n const MAX_ERRORS: usize = 2;\n\n\n\n let mut out = String::new();\n\n\n\n for (shard_id, error) in errors.iter().take(MAX_ERRORS) {\n\n if !out.is_empty() {\n\n write!(&mut out, \", \").expect(\"write to string failed?!\");\n\n }\n\n write!(&mut out, \"{} => \\\"{}\\\"\", shard_id, error).expect(\"write to string failed?!\");\n\n }\n\n\n\n if errors.len() > MAX_ERRORS {\n\n write!(&mut out, \"...\").expect(\"write to string failed?!\");\n\n }\n\n\n\n out\n\n}\n\n\n\n/// Router for a single database.\n", "file_path": "router/src/router.rs", "rank": 33, "score": 186823.08748444915 }, { "content": "pub fn snapshot_mb(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"snapshot_mb\");\n\n for count in &[1, 2, 3, 4, 5] {\n\n let chunk = chunk(*count as _);\n\n group.bench_function(BenchmarkId::from_parameter(count), |b| {\n\n b.iter(|| snapshot_chunk(&chunk));\n\n });\n\n }\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, snapshot_mb);\n\ncriterion_main!(benches);\n", "file_path": "server_benchmarks/benches/snapshot.rs", "rank": 34, "score": 182030.54122672422 }, { "content": "/// map common [`server::Error`] errors to the appropriate tonic Status\n\npub fn default_server_error_handler(error: server::Error) -> tonic::Status {\n\n use server::{DatabaseNameFromRulesError, Error};\n\n\n\n match error {\n\n Error::IdNotSet => PreconditionViolation::ServerIdNotSet.into(),\n\n Error::DatabaseNotInitialized { db_name } => PreconditionViolation::DatabaseInvalidState(\n\n format!(\"Database ({}) is not yet initialized\", db_name),\n\n )\n\n .into(),\n\n Error::DatabaseAlreadyExists { db_name } => {\n\n AlreadyExists::new(ResourceType::Database, db_name).into()\n\n }\n\n Error::ServerNotInitialized { server_id } => {\n\n PreconditionViolation::ServerInvalidState(format!(\n\n \"Server ID is set ({}) but server is not yet initialized (e.g. DBs and remotes \\\n\n are not loaded). Server is not yet ready to read/write data.\",\n\n server_id\n\n ))\n\n .into()\n\n }\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/error.rs", "rank": 35, "score": 180534.6384007769 }, { "content": "/// Returns a struct that can format gRPC predicate (aka `RPCPredicates`) for\n\n/// Display\n\n///\n\n/// For example:\n\n/// let pred = RPCPredicate (...);\n\n/// println!(\"The predicate is {:?}\", loggable_predicate(pred));\n\npub fn displayable_predicate(pred: Option<&RPCPredicate>) -> impl fmt::Display + '_ {\n\n struct Wrapper<'a>(Option<&'a RPCPredicate>);\n\n\n\n impl<'a> fmt::Display for Wrapper<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self.0 {\n\n None => write!(f, \"<NONE>\"),\n\n Some(pred) => format_predicate(pred, f),\n\n }\n\n }\n\n }\n\n Wrapper(pred)\n\n}\n\n\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/storage/expr.rs", "rank": 36, "score": 180412.53137947392 }, { "content": "/// Return the path that the database with <uuid> stores its data:\n\n/// `<server_path>/dbs/<uuid>`\n\npub fn db_data_dir(server_path: impl AsRef<Path>, db_uuid: Uuid) -> PathBuf {\n\n // Assume data layout is <dir>/dbs/<uuid>\n\n let mut data_dir = data_dir(server_path);\n\n data_dir.push(db_uuid.to_string());\n\n data_dir\n\n}\n\n\n\npub struct DatabaseBuilder {\n\n name: String,\n\n partition_template: PartitionTemplate,\n\n lifecycle_rules: LifecycleRules,\n\n write_buffer: Option<WriteBufferConnection>,\n\n}\n\n\n\nimpl DatabaseBuilder {\n\n pub fn new(name: impl Into<String>) -> Self {\n\n Self {\n\n name: name.into(),\n\n partition_template: PartitionTemplate {\n\n parts: vec![partition_template::Part {\n", "file_path": "influxdb_iox/tests/end_to_end_cases/scenario.rs", "rank": 37, "score": 180407.24257596204 }, { "content": "/// return number of OS chunks of a given table of a partition\n\npub fn count_os_table_chunks(db: &Db, table_name: &str, partition_key: &str) -> usize {\n\n let mut table_names = BTreeSet::new();\n\n table_names.insert(table_name.to_string());\n\n count_os_tables_chunks(\n\n db,\n\n TableNameFilter::NamedTables(&table_names),\n\n partition_key,\n\n )\n\n}\n", "file_path": "server/src/utils.rs", "rank": 38, "score": 178836.47953614267 }, { "content": "/// return number of MUB chunks of a given table of a partition\n\npub fn count_mub_table_chunks(db: &Db, table_name: &str, partition_key: &str) -> usize {\n\n let mut table_names = BTreeSet::new();\n\n table_names.insert(table_name.to_string());\n\n count_mub_tables_chunks(\n\n db,\n\n TableNameFilter::NamedTables(&table_names),\n\n partition_key,\n\n )\n\n}\n", "file_path": "server/src/utils.rs", "rank": 39, "score": 178836.47953614267 }, { "content": "/// return number of RUB chunks of a given table of a partition\n\npub fn count_rub_table_chunks(db: &Db, table_name: &str, partition_key: &str) -> usize {\n\n let mut table_names = BTreeSet::new();\n\n table_names.insert(table_name.to_string());\n\n count_rub_tables_chunks(\n\n db,\n\n TableNameFilter::NamedTables(&table_names),\n\n partition_key,\n\n )\n\n}\n", "file_path": "server/src/utils.rs", "rank": 40, "score": 178836.47953614267 }, { "content": "fn get_database_name(input: &impl GrpcInputs) -> Result<DatabaseName<'static>, Status> {\n\n org_and_bucket_to_database(input.org_id()?.to_string(), &input.bucket_name()?)\n\n .map_err(|e| Status::internal(e.to_string()))\n\n}\n\n\n\n// The following code implements the business logic of the requests as\n\n// methods that return Results with module specific Errors (and thus\n\n// can use ?, etc). The trait implemententations then handle mapping\n\n// to the appropriate tonic Status\n\n\n\n/// Gathers all measurement names that have data in the specified\n\n/// (optional) range\n\nasync fn measurement_name_impl<D>(\n\n db: Arc<D>,\n\n db_name: DatabaseName<'static>,\n\n range: Option<TimestampRange>,\n\n rpc_predicate: Option<Predicate>,\n\n span_ctx: Option<SpanContext>,\n\n) -> Result<StringValuesResponse>\n\nwhere\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/storage/service.rs", "rank": 41, "score": 177777.5874500099 }, { "content": "/// Convert Series and Groups ` into a form suitable for gRPC transport:\n\n///\n\n/// ```\n\n/// (GroupFrame) potentially\n\n///\n\n/// (SeriesFrame for field1)\n\n/// (*Points for field1)\n\n/// (SeriesFrame for field12)\n\n/// (*Points for field1)\n\n/// (....)\n\n/// (SeriesFrame for field1)\n\n/// (*Points for field1)\n\n/// (SeriesFrame for field12)\n\n/// (*Points for field1)\n\n/// (....)\n\n/// ```\n\n///\n\n/// The specific type of (*Points) depends on the type of field column.\n\n///\n\n/// If `tag_key_binary_format` is `true` then tag keys for measurements and\n\n/// fields are emitted in the canonical TSM format represented by `\\x00` and\n\n/// `\\xff` respectively.\n\npub fn series_or_groups_to_read_response(\n\n series_or_groups: Vec<Either>,\n\n tag_key_binary_format: bool,\n\n) -> ReadResponse {\n\n let mut frames = vec![];\n\n\n\n for series_or_group in series_or_groups {\n\n match series_or_group {\n\n Either::Series(series) => {\n\n series_to_frames(&mut frames, series, tag_key_binary_format);\n\n }\n\n Either::Group(group) => {\n\n frames.push(group_to_frame(group));\n\n }\n\n }\n\n }\n\n\n\n trace!(frames=%DisplayableFrames::new(&frames), \"Response gRPC frames\");\n\n ReadResponse { frames }\n\n}\n\n\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/storage/data.rs", "rank": 42, "score": 177603.7228743126 }, { "content": "/// Builds GroupByAndAggregate::Windows\n\npub fn make_read_window_aggregate(\n\n aggregates: Vec<RPCAggregate>,\n\n window_every: i64,\n\n offset: i64,\n\n window: Option<RPCWindow>,\n\n) -> Result<GroupByAndAggregate> {\n\n // only support single aggregate for now\n\n if aggregates.len() != 1 {\n\n return AggregateNotSingleton { aggregates }.fail();\n\n }\n\n let agg = convert_aggregate(aggregates.into_iter().next())?;\n\n\n\n // Translation from these parameters to window bound\n\n // is defined in the Go code:\n\n // https://github.com/influxdata/idpe/pull/8636/files#diff-94c0a8d7e427e2d7abe49f01dced50ad776b65ec8f2c8fb2a2c8b90e2e377ed5R82\n\n //\n\n // Quoting:\n\n //\n\n // Window and the WindowEvery/Offset should be mutually\n\n // exclusive. If you set either the WindowEvery or Offset with\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/storage/expr.rs", "rank": 43, "score": 177603.7228743126 }, { "content": "pub fn make_read_group_aggregate(\n\n aggregate: Option<RPCAggregate>,\n\n group: RPCGroup,\n\n group_keys: Vec<String>,\n\n) -> Result<GroupByAndAggregate> {\n\n // validate Group setting\n\n match group {\n\n // Group:None is invalid if grouping keys are specified\n\n RPCGroup::None if !group_keys.is_empty() => InvalidGroupNone {\n\n num_group_keys: group_keys.len(),\n\n }\n\n .fail(),\n\n _ => Ok(()),\n\n }?;\n\n\n\n let gby_agg = GroupByAndAggregate::Columns {\n\n agg: convert_aggregate(aggregate)?,\n\n group_columns: group_keys,\n\n };\n\n Ok(gby_agg)\n\n}\n\n\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/storage/expr.rs", "rank": 44, "score": 177603.7228743126 }, { "content": "/// Translates FieldList into the gRPC format\n\npub fn fieldlist_to_measurement_fields_response(\n\n fieldlist: FieldList,\n\n) -> Result<MeasurementFieldsResponse> {\n\n let fields = fieldlist\n\n .fields\n\n .into_iter()\n\n .map(|f| {\n\n Ok(MessageField {\n\n key: f.name,\n\n r#type: datatype_to_measurement_field_enum(&f.data_type)? as i32,\n\n timestamp: f.last_timestamp,\n\n })\n\n })\n\n .collect::<Result<Vec<_>>>()?;\n\n\n\n Ok(MeasurementFieldsResponse { fields })\n\n}\n\n\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/storage/data.rs", "rank": 45, "score": 177603.7228743126 }, { "content": "// Run all benchmarks for `read_group`.\n\npub fn benchmark_read_group(c: &mut Criterion) {\n\n let scenarios = Runtime::new().unwrap().block_on(setup_scenarios());\n\n execute_benchmark_group(c, scenarios.as_slice());\n\n}\n\n\n", "file_path": "server_benchmarks/benches/read_group.rs", "rank": 47, "score": 173800.9619410554 }, { "content": "/// Returns the number of mutable buffer chunks in the specified database\n\npub fn count_mutable_buffer_chunks(db: &Db) -> usize {\n\n chunk_summary_iter(db)\n\n .filter(|s| {\n\n s.storage == ChunkStorage::OpenMutableBuffer\n\n || s.storage == ChunkStorage::ClosedMutableBuffer\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "server/src/utils.rs", "rank": 48, "score": 173800.9619410554 }, { "content": "/// Returns the number of object store chunks in the specified database\n\npub fn count_object_store_chunks(db: &Db) -> usize {\n\n chunk_summary_iter(db)\n\n .filter(|s| {\n\n s.storage == ChunkStorage::ReadBufferAndObjectStore\n\n || s.storage == ChunkStorage::ObjectStoreOnly\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "server/src/utils.rs", "rank": 49, "score": 173800.9619410554 }, { "content": "// Run all benchmarks for `read_filter`.\n\npub fn benchmark_read_filter(c: &mut Criterion) {\n\n let scenarios = Runtime::new().unwrap().block_on(setup_scenarios());\n\n execute_benchmark_group(c, scenarios.as_slice());\n\n}\n\n\n", "file_path": "server_benchmarks/benches/read_filter.rs", "rank": 50, "score": 173800.9619410554 }, { "content": "// Run all benchmarks for `tag_values`.\n\npub fn benchmark_tag_values(c: &mut Criterion) {\n\n let scenarios = Runtime::new().unwrap().block_on(setup_scenarios());\n\n\n\n execute_benchmark_group(c, scenarios.as_slice());\n\n}\n\n\n", "file_path": "server_benchmarks/benches/tag_values.rs", "rank": 51, "score": 173800.9619410554 }, { "content": "/// Returns the number of read buffer chunks in the specified database\n\npub fn count_read_buffer_chunks(db: &Db) -> usize {\n\n chunk_summary_iter(db)\n\n .filter(|s| {\n\n s.storage == ChunkStorage::ReadBuffer\n\n || s.storage == ChunkStorage::ReadBufferAndObjectStore\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "server/src/utils.rs", "rank": 52, "score": 173800.9619410554 }, { "content": "/// Stupid hack to fit the `Box<dyn ...>` in `WriteBufferError` into an `Arc`\n\nstruct EWrapper(WriteBufferError);\n\n\n\nimpl std::fmt::Debug for EWrapper {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for EWrapper {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl std::error::Error for EWrapper {}\n\n\n\n/// Connection pool for the entire routing server.\n\n///\n\n/// This avoids:\n\n/// 1. That every [`Router`](crate::router::Router) uses their own connections\n", "file_path": "router/src/connection_pool.rs", "rank": 53, "score": 173573.10872067098 }, { "content": "pub fn count_rub_tables_chunks(db: &Db, tables: TableNameFilter<'_>, partition_key: &str) -> usize {\n\n db.partition_tables_chunk_summaries(tables, partition_key)\n\n .into_iter()\n\n .filter_map(|chunk| match chunk.storage {\n\n ChunkStorage::ReadBuffer | ChunkStorage::ReadBufferAndObjectStore => Some(1),\n\n _ => None,\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "server/src/utils.rs", "rank": 54, "score": 173088.1031608637 }, { "content": "pub fn count_mub_tables_chunks(db: &Db, tables: TableNameFilter<'_>, partition_key: &str) -> usize {\n\n db.partition_tables_chunk_summaries(tables, partition_key)\n\n .into_iter()\n\n .filter_map(|chunk| match chunk.storage {\n\n ChunkStorage::OpenMutableBuffer | ChunkStorage::ClosedMutableBuffer => Some(1),\n\n _ => None,\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "server/src/utils.rs", "rank": 55, "score": 173088.1031608637 }, { "content": "pub fn count_os_tables_chunks(db: &Db, tables: TableNameFilter<'_>, partition_key: &str) -> usize {\n\n db.partition_tables_chunk_summaries(tables, partition_key)\n\n .into_iter()\n\n .filter_map(|chunk| match chunk.storage {\n\n ChunkStorage::ObjectStoreOnly | ChunkStorage::ReadBufferAndObjectStore => Some(1),\n\n _ => None,\n\n })\n\n .count()\n\n}\n\n\n\nstatic PANIC_DATABASE: once_cell::race::OnceBox<parking_lot::Mutex<hashbrown::HashSet<String>>> =\n\n once_cell::race::OnceBox::new();\n\n\n", "file_path": "server/src/utils.rs", "rank": 56, "score": 173088.1031608637 }, { "content": "pub trait Name {\n\n /// Returns a user understandable identifier of this thing\n\n fn name(&self) -> Cow<'_, str>;\n\n}\n", "file_path": "packers/src/lib.rs", "rank": 57, "score": 172094.70660341578 }, { "content": "pub fn tmp_file() -> Result<tempfile::NamedTempFile> {\n\n let _ = dotenv::dotenv();\n\n\n\n let root = env::var_os(\"TEST_INFLUXDB_IOX_DB_DIR\").unwrap_or_else(|| env::temp_dir().into());\n\n\n\n Ok(tempfile::Builder::new()\n\n .prefix(\"influxdb_iox\")\n\n .tempfile_in(root)?)\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 58, "score": 171938.1137754261 }, { "content": "#[tonic::async_trait]\n\npub trait ConnectionManager<T, E = Error> {\n\n async fn remote_server(&self, connect: String) -> Result<T, E>;\n\n}\n\n\n\n/// A Caching ConnectionManager implementation.\n\n///\n\n/// It caches connected gRPC clients of type T (not operations performed over the connection).\n\n/// Each cache access returns a clone of the tonic gRPC client. Cloning clients is cheap\n\n/// and allows them to communicate through the same channel, see\n\n/// <https://docs.rs/tonic/0.4.2/tonic/client/index.html#concurrent-usage>\n\n///\n\n/// The `CachingConnectionManager` implements a blocking cache-loading mechanism, that is, it guarantees that once a\n\n/// connection request for a given connection string is in flight, subsequent cache access requests\n\n/// get enqueued and wait for the first connection to complete instead of spawning each an\n\n/// outstanding connection request and thus suffer from the thundering herd problem.\n\n///\n\n/// It also supports an optional expiry mechanism based on TTL, see [`CachingConnectionManager::builder()`].\n\n///\n\n/// # Examples\n\n///\n", "file_path": "grpc-router/src/connection_manager.rs", "rank": 59, "score": 170527.70180779323 }, { "content": "/// Convert a [`DmlWrite`] to a [`DatabaseBatch`]\n\npub fn encode_write(db_name: &str, write: &DmlWrite) -> DatabaseBatch {\n\n DatabaseBatch {\n\n database_name: db_name.to_string(),\n\n table_batches: write\n\n .tables()\n\n .map(|(table_name, batch)| encode_batch(table_name, batch))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "mutable_batch_pb/src/encode.rs", "rank": 60, "score": 168880.78158180654 }, { "content": "/// Decode a message payload\n\npub fn decode(\n\n data: &[u8],\n\n headers: IoxHeaders,\n\n sequence: Sequence,\n\n producer_ts: Time,\n\n bytes_read: usize,\n\n) -> Result<DmlOperation, WriteBufferError> {\n\n match headers.content_type {\n\n ContentType::Protobuf => {\n\n let meta = DmlMeta::sequenced(sequence, producer_ts, headers.span_context, bytes_read);\n\n\n\n let payload: WriteBufferPayload = prost::Message::decode(data)\n\n .map_err(|e| format!(\"failed to decode WriteBufferPayload: {}\", e))?;\n\n\n\n let payload = payload.payload.ok_or_else(|| \"no payload\".to_string())?;\n\n\n\n match payload {\n\n Payload::Write(write) => {\n\n let tables = decode_database_batch(&write)\n\n .map_err(|e| format!(\"failed to decode database batch: {}\", e))?;\n", "file_path": "write_buffer/src/codec.rs", "rank": 61, "score": 167361.5743938344 }, { "content": "/// Returns the name of the gRPC service S.\n\npub fn service_name<S: NamedService>(_: &S) -> &'static str {\n\n S::NAME\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RpcBuilderInput {\n\n pub socket: TcpListener,\n\n pub trace_header_parser: TraceHeaderParser,\n\n pub shutdown: CancellationToken,\n\n pub serving_readiness: ServingReadiness,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RpcBuilder<T> {\n\n pub inner: T,\n\n pub health_reporter: HealthReporter,\n\n pub shutdown: CancellationToken,\n\n pub socket: TcpListener,\n\n pub serving_readiness: ServingReadiness,\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/rpc.rs", "rank": 62, "score": 167272.45290213832 }, { "content": "/// Returns an iterator over the [`NotFound`] in the provided [`tonic::Status`]\n\npub fn decode_not_found(status: &tonic::Status) -> impl Iterator<Item = NotFound> {\n\n decode_resource_info(status).map(Into::into)\n\n}\n\n\n\n/// A [`PreconditionViolation`] is returned by IOx when the system is in a state that\n\n/// prevents performing the requested operation\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum PreconditionViolation {\n\n /// Server ID not set\n\n ServerIdNotSet,\n\n /// Database is not mutable\n\n DatabaseImmutable,\n\n /// Server not in required state for operation\n\n ServerInvalidState(String),\n\n /// Database not in required state for operation\n\n DatabaseInvalidState(String),\n\n /// Partition not in required state for operation\n\n PartitionInvalidState(String),\n\n /// Chunk not in required state for operation\n\n ChunkInvalidState(String),\n", "file_path": "generated_types/src/google.rs", "rank": 63, "score": 167130.02650383263 }, { "content": "/// Returns an iterator over set bit positions in increasing order\n\npub fn iter_set_positions(bytes: &[u8]) -> impl Iterator<Item = usize> + '_ {\n\n iter_set_positions_with_offset(bytes, 0)\n\n}\n\n\n", "file_path": "arrow_util/src/bitset.rs", "rank": 64, "score": 167130.02650383263 }, { "content": "struct DeleteService {\n\n server: Arc<RouterServer>,\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl delete_service_server::DeleteService for DeleteService {\n\n async fn delete(\n\n &self,\n\n request: tonic::Request<DeleteRequest>,\n\n ) -> Result<tonic::Response<DeleteResponse>, tonic::Status> {\n\n let span_ctx = request.extensions().get().cloned();\n\n let DeleteRequest { payload } = request.into_inner();\n\n let DeletePayload {\n\n db_name,\n\n table_name,\n\n predicate,\n\n } = payload.unwrap_field(\"payload\")?;\n\n let predicate = predicate.required(\"predicate\")?;\n\n\n\n let table_name = NonEmptyString::new(table_name);\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/delete.rs", "rank": 65, "score": 165629.09696175202 }, { "content": "struct RemoteService {\n\n server: Arc<RouterServer>,\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl remote_service_server::RemoteService for RemoteService {\n\n async fn list_remotes(\n\n &self,\n\n _: Request<ListRemotesRequest>,\n\n ) -> Result<Response<ListRemotesResponse>, Status> {\n\n let remotes = self\n\n .server\n\n .resolver()\n\n .remotes()\n\n .into_iter()\n\n .map(|(id, connection_string)| Remote {\n\n id: id.get_u32(),\n\n connection_string,\n\n })\n\n .collect();\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/remote.rs", "rank": 66, "score": 165629.09696175202 }, { "content": "struct DeploymentService {\n\n server: Arc<RouterServer>,\n\n serving_readiness: ServingReadiness,\n\n}\n\n\n\nuse crate::influxdb_ioxd::serving_readiness::ServingReadiness;\n\n\n\n#[tonic::async_trait]\n\nimpl deployment_service_server::DeploymentService for DeploymentService {\n\n async fn get_server_id(\n\n &self,\n\n _: Request<GetServerIdRequest>,\n\n ) -> Result<Response<GetServerIdResponse>, Status> {\n\n match self.server.server_id() {\n\n Some(id) => Ok(Response::new(GetServerIdResponse { id: id.get_u32() })),\n\n None => return Err(NotFound::new(ResourceType::ServerId, Default::default()).into()),\n\n }\n\n }\n\n\n\n async fn update_server_id(\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/deployment.rs", "rank": 67, "score": 165629.09696175202 }, { "content": "/// map common [`db::Error`](server::db::Error) errors to the appropriate tonic Status\n\npub fn default_db_error_handler(error: server::db::Error) -> tonic::Status {\n\n use server::db::Error;\n\n match error {\n\n Error::LifecycleError { source } => PreconditionViolation::ChunkInvalidState(format!(\n\n \"Cannot perform operation due to wrong chunk lifecycle: {}\",\n\n source\n\n ))\n\n .into(),\n\n Error::CannotFlushPartition {\n\n table_name,\n\n partition_key,\n\n } => PreconditionViolation::PartitionInvalidState(format!(\n\n \"Cannot persist partition because it cannot be flushed at the moment: {}:{}\",\n\n table_name, partition_key\n\n ))\n\n .into(),\n\n Error::CatalogError { source } => default_catalog_error_handler(source),\n\n error => {\n\n error!(?error, \"Unexpected error\");\n\n InternalError {}.into()\n\n }\n\n }\n\n}\n\n\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/error.rs", "rank": 68, "score": 165043.8052720595 }, { "content": "/// map common [`database::Error`](server::database::Error) errors to the appropriate tonic Status\n\npub fn default_database_error_handler(error: server::database::Error) -> tonic::Status {\n\n use server::database::Error;\n\n match error {\n\n Error::InvalidState { .. } => {\n\n PreconditionViolation::DatabaseInvalidState(error.to_string()).into()\n\n }\n\n Error::RulesNotUpdateable { .. } => {\n\n PreconditionViolation::DatabaseInvalidState(error.to_string()).into()\n\n }\n\n Error::WipePreservedCatalog { source, .. } => {\n\n error!(%source, \"Unexpected error while wiping catalog\");\n\n InternalError {}.into()\n\n }\n\n Error::InvalidStateForRebuild { .. } => {\n\n PreconditionViolation::DatabaseInvalidState(error.to_string()).into()\n\n }\n\n Error::UnexpectedTransitionForRebuild { .. } => {\n\n error!(%error, \"Unexpected error during rebuild catalog\");\n\n InternalError {}.into()\n\n }\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/error.rs", "rank": 69, "score": 165043.8052720595 }, { "content": "/// Return a `Display`able structure that produces a single line, for\n\n/// this node only (does not recurse to children)\n\npub fn one_line(plan: &dyn ExecutionPlan) -> impl fmt::Display + '_ {\n\n struct Wrapper<'a> {\n\n plan: &'a dyn ExecutionPlan,\n\n }\n\n impl<'a> fmt::Display for Wrapper<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let t = DisplayFormatType::Default;\n\n self.plan.fmt_as(t, f)\n\n }\n\n }\n\n\n\n Wrapper { plan }\n\n}\n\n\n\n// TODO maybe also contribute these back upstream to datafusion (make\n\n// as a method on MetricsSet)\n\n\n", "file_path": "query/src/exec/query_tracing.rs", "rank": 70, "score": 163902.06599158843 }, { "content": "/// Configure a connection to container with given name on Microsoft Azure\n\n/// Blob store.\n\n///\n\n/// The credentials `account` and `access_key` must provide access to the\n\n/// store.\n\npub fn new_azure(\n\n account: impl Into<String>,\n\n access_key: impl Into<String>,\n\n container_name: impl Into<String>,\n\n) -> Result<MicrosoftAzure> {\n\n let account = account.into();\n\n let access_key = access_key.into();\n\n let http_client: Arc<dyn HttpClient> = Arc::new(reqwest::Client::new());\n\n\n\n let storage_account_client =\n\n StorageAccountClient::new_access_key(Arc::clone(&http_client), &account, &access_key);\n\n\n\n let storage_client = storage_account_client.as_storage_client();\n\n\n\n let container_name = container_name.into();\n\n\n\n let container_client = storage_client.as_container_client(&container_name);\n\n\n\n Ok(MicrosoftAzure {\n\n container_client,\n", "file_path": "object_store/src/azure.rs", "rank": 71, "score": 163891.3268209702 }, { "content": "/// Configure a connection to Google Cloud Storage.\n\npub fn new_gcs(\n\n service_account_path: impl AsRef<std::ffi::OsStr>,\n\n bucket_name: impl Into<String>,\n\n) -> Result<GoogleCloudStorage> {\n\n // The cloud storage crate currently only supports authentication via\n\n // environment variables. Set the environment variable explicitly so\n\n // that we can optionally accept command line arguments instead.\n\n env::set_var(\"SERVICE_ACCOUNT\", service_account_path);\n\n Ok(GoogleCloudStorage {\n\n client: Default::default(),\n\n bucket_name: bucket_name.into(),\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n tests::{get_nonexistent_object, list_with_delimiter, put_get_delete_list},\n\n Error as ObjectStoreError, ObjectStore, ObjectStoreApi, ObjectStorePath,\n", "file_path": "object_store/src/gcp.rs", "rank": 72, "score": 163886.28886391682 }, { "content": "/// Encodes a [`DmlOperation`] as a protobuf [`WriteBufferPayload`]\n\npub fn encode_operation(\n\n db_name: &str,\n\n operation: &DmlOperation,\n\n buf: &mut Vec<u8>,\n\n) -> Result<(), WriteBufferError> {\n\n let payload = match operation {\n\n DmlOperation::Write(write) => {\n\n let batch = mutable_batch_pb::encode::encode_write(db_name, write);\n\n Payload::Write(batch)\n\n }\n\n DmlOperation::Delete(delete) => Payload::Delete(DeletePayload {\n\n db_name: db_name.to_string(),\n\n table_name: delete\n\n .table_name()\n\n .map(ToString::to_string)\n\n .unwrap_or_default(),\n\n predicate: Some(delete.predicate().clone().into()),\n\n }),\n\n };\n\n\n", "file_path": "write_buffer/src/codec.rs", "rank": 73, "score": 163879.51455970336 }, { "content": "// Build a datafusion physical expression from its logical one\n\npub fn df_physical_expr(\n\n input: &dyn ExecutionPlan,\n\n expr: Expr,\n\n) -> std::result::Result<Arc<dyn PhysicalExpr>, DataFusionError> {\n\n // To create a physical expression for a logical expression we need appropriate\n\n // PhysicalPlanner and ExecutionContextState, however, our given logical expression is very basic\n\n // and any planner or context will work\n\n let physical_planner = DefaultPhysicalPlanner::default();\n\n let ctx_state = datafusion::execution::context::ExecutionContextState::new();\n\n\n\n let input_physical_schema = input.schema();\n\n let input_logical_schema: DFSchema = input_physical_schema.as_ref().clone().try_into()?;\n\n\n\n trace!(%expr, \"logical expression\");\n\n trace!(%input_logical_schema, \"input logical schema\");\n\n trace!(%input_physical_schema, \"input physical schema\");\n\n\n\n physical_planner.create_physical_expr(\n\n &expr,\n\n &input_logical_schema,\n\n &input_physical_schema,\n\n &ctx_state,\n\n )\n\n}\n", "file_path": "query/src/util.rs", "rank": 74, "score": 163879.51455970336 }, { "content": "/// gRPC indicates failure via a [special][1] header allowing it to signal an error\n\n/// at the end of an HTTP chunked stream as part of the [response trailer][2]\n\n///\n\n/// [1]: https://grpc.github.io/grpc/core/md_doc_statuscodes.html\n\n/// [2]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Trailer\n\npub fn classify_headers(\n\n headers: Option<&http::header::HeaderMap>,\n\n) -> (Cow<'static, str>, Classification) {\n\n match headers.and_then(|headers| headers.get(\"grpc-status\")) {\n\n Some(header) => {\n\n let value = match header.to_str() {\n\n Ok(value) => value,\n\n Err(_) => return (\"grpc status not string\".into(), Classification::ServerErr),\n\n };\n\n let value: i32 = match value.parse() {\n\n Ok(value) => value,\n\n Err(_) => return (\"grpc status not integer\".into(), Classification::ServerErr),\n\n };\n\n\n\n match value {\n\n 0 => (\"ok\".into(), Classification::Ok),\n\n 1 => (\"cancelled\".into(), Classification::ClientErr),\n\n 2 => (\"unknown\".into(), Classification::ServerErr),\n\n 3 => (\"invalid argument\".into(), Classification::ClientErr),\n\n 4 => (\"deadline exceeded\".into(), Classification::ServerErr),\n", "file_path": "trace_http/src/classify.rs", "rank": 75, "score": 163879.51455970336 }, { "content": "/// Enables debug logging regardless of the value of RUST_LOG\n\n/// environment variable. If RUST_LOG isn't specifies, defaults to\n\n/// \"debug\"\n\npub fn start_logging() {\n\n // ensure the global has been initialized\n\n LOG_SETUP.call_once(|| {\n\n // honor any existing RUST_LOG level\n\n if std::env::var(\"RUST_LOG\").is_err() {\n\n std::env::set_var(\"RUST_LOG\", \"debug\");\n\n }\n\n // Configure the logger to write to stderr and install it\n\n let output_stream = std::io::stderr;\n\n\n\n use tracing_subscriber::{prelude::*, EnvFilter};\n\n\n\n tracing_subscriber::registry()\n\n .with(EnvFilter::from_default_env())\n\n .with(tracing_subscriber::fmt::layer().with_writer(output_stream))\n\n .init();\n\n })\n\n}\n\n\n", "file_path": "test_helpers/src/lib.rs", "rank": 76, "score": 163879.51455970336 }, { "content": "/// map common [`server::db::DmlError`](server::db::DmlError) errors to the appropriate tonic Status\n\npub fn default_dml_error_handler(error: server::db::DmlError) -> tonic::Status {\n\n use server::db::DmlError;\n\n\n\n match error {\n\n DmlError::HardLimitReached {} => QuotaFailure {\n\n subject: \"influxdata.com/iox/buffer\".to_string(),\n\n description: \"hard buffer limit reached\".to_string(),\n\n }\n\n .into(),\n\n e => tonic::Status::invalid_argument(e.to_string()),\n\n }\n\n}\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/error.rs", "rank": 77, "score": 162732.00231585515 }, { "content": "/// Shard only based on table name\n\nfn shard_table(table: &str, config: &ShardConfig) -> Option<ShardId> {\n\n for matcher2shard in &config.specific_targets {\n\n if let Some(regex) = &matcher2shard.matcher.table_name_regex {\n\n if regex.is_match(table) {\n\n return Some(matcher2shard.shard);\n\n }\n\n }\n\n }\n\n\n\n if let Some(hash_ring) = &config.hash_ring {\n\n if let Some(id) = hash_ring.shards.find(table) {\n\n return Some(id);\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n\n/// Test utilities\n\npub mod test_util {\n", "file_path": "dml/src/lib.rs", "rank": 78, "score": 161408.10626510036 }, { "content": "#[tonic::async_trait]\n\npub trait Router<R, S, C> {\n\n /// For a given request return the routing decision for the call.\n\n async fn route_for(&self, request: &R) -> Result<RoutingDestination<'_, S, C>>;\n\n}\n\n\n\n/// A [`RoutingDestination`] is either a local in-process grpc Service or a remote grpc Client for that service.\n\n///\n\n/// Unfortunately tonic clients and servers don't share any traits, so it's up to\n\n/// you to ensure that C is a client for service S.\n\n#[derive(Debug)]\n\npub enum RoutingDestination<'a, S, C> {\n\n /// Reference to an implementation of a gRPC service trait. This causes the router to\n\n /// transfer control to an in-process implementation of a service, effectively zero cost routing for\n\n /// a local service.\n\n Local(&'a S),\n\n /// Routing to a remote service via a gRPC client instance connected to the remote endpoint.\n\n Remote(C),\n\n}\n\n\n\n/// Needs to be public because it's used by the [`grpc_router`] macro.\n\npub type PinnedStream<T> = Pin<Box<dyn Stream<Item = Result<T, tonic::Status>> + Send + Sync>>;\n\n\n", "file_path": "grpc-router/src/router.rs", "rank": 79, "score": 160889.3335528034 }, { "content": "/// Returns an iterator over the [`FieldViolation`] in the provided [`tonic::Status`]\n\npub fn decode_field_violation(status: &tonic::Status) -> impl Iterator<Item = FieldViolation> {\n\n get_details(status)\n\n .filter(|details| details.type_url == \"type.googleapis.com/google.rpc.BadRequest\")\n\n .flat_map(|details| rpc::BadRequest::decode(details.value).ok())\n\n .flat_map(|bad_request| bad_request.field_violations)\n\n .map(Into::into)\n\n}\n\n\n\n/// An internal error occurred, no context is provided to the client\n\n///\n\n/// Should be reserved for when a fundamental invariant of the system has been broken\n\n#[derive(Debug, Default, Clone)]\n\npub struct InternalError {}\n\n\n\nimpl From<InternalError> for tonic::Status {\n\n fn from(_: InternalError) -> Self {\n\n tonic::Status::new(tonic::Code::Internal, \"Internal Error\")\n\n }\n\n}\n\n\n", "file_path": "generated_types/src/google.rs", "rank": 80, "score": 160840.34066419376 }, { "content": "/// Returns an iterator over the [`AlreadyExists`] in the provided [`tonic::Status`]\n\npub fn decode_already_exists(status: &tonic::Status) -> impl Iterator<Item = AlreadyExists> {\n\n decode_resource_info(status).map(Into::into)\n\n}\n\n\n\n/// IOx returns [`NotFound`] when it is unable to perform an operation on a resource\n\n/// because it doesn't exist on the server\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct NotFound {\n\n pub resource_type: ResourceType,\n\n pub resource_name: String,\n\n pub owner: String,\n\n pub description: String,\n\n}\n\n\n\nimpl NotFound {\n\n pub fn new(resource_type: ResourceType, resource_name: String) -> Self {\n\n let description = format!(\"Resource {}/{} not found\", resource_type, resource_name);\n\n\n\n Self {\n\n resource_type,\n", "file_path": "generated_types/src/google.rs", "rank": 81, "score": 160840.34066419376 }, { "content": "/// Returns the pk in arrow's expression used for data sorting\n\npub fn arrow_pk_sort_exprs(\n\n key_columns: Vec<&str>,\n\n input_schema: &ArrowSchema,\n\n) -> Vec<PhysicalSortExpr> {\n\n let mut sort_exprs = vec![];\n\n for key in key_columns {\n\n let expr = physical_col(key, input_schema).expect(\"pk in schema\");\n\n sort_exprs.push(PhysicalSortExpr {\n\n expr,\n\n options: SortOptions {\n\n descending: false,\n\n nulls_first: false,\n\n },\n\n });\n\n }\n\n\n\n sort_exprs\n\n}\n\n\n", "file_path": "query/src/util.rs", "rank": 82, "score": 160629.8372719104 }, { "content": "/// Parse and convert the delete grpc API into ParseDeletePredicate to send to server\n\npub fn parse_delete_predicate(\n\n start_time: &str,\n\n stop_time: &str,\n\n predicate: &str,\n\n) -> Result<DeletePredicate> {\n\n // parse and check time range\n\n let (start_time, stop_time) = parse_time_range(start_time, stop_time)?;\n\n\n\n // Parse the predicate\n\n let delete_exprs = parse_predicate(predicate)?;\n\n\n\n Ok(DeletePredicate {\n\n range: TimestampRange {\n\n start: start_time,\n\n end: stop_time,\n\n },\n\n exprs: delete_exprs,\n\n })\n\n}\n\n\n", "file_path": "predicate/src/delete_predicate.rs", "rank": 83, "score": 160629.68002333844 }, { "content": "pub fn arrow_sort_key_exprs(\n\n sort_key: &SortKey<'_>,\n\n input_schema: &ArrowSchema,\n\n) -> Vec<PhysicalSortExpr> {\n\n let mut sort_exprs = vec![];\n\n for (key, options) in sort_key.iter() {\n\n let expr = physical_col(key, input_schema).expect(\"sort key column in schema\");\n\n sort_exprs.push(PhysicalSortExpr {\n\n expr,\n\n options: SortOptions {\n\n descending: options.descending,\n\n nulls_first: options.nulls_first,\n\n },\n\n });\n\n }\n\n\n\n sort_exprs\n\n}\n\n\n", "file_path": "query/src/util.rs", "rank": 84, "score": 160623.29803209548 }, { "content": "/// Returns an iterator over the [`PreconditionViolation`] in the provided [`tonic::Status`]\n\npub fn decode_precondition_violation(\n\n status: &tonic::Status,\n\n) -> impl Iterator<Item = PreconditionViolation> {\n\n get_details(status)\n\n .filter(|details| details.type_url == \"type.googleapis.com/google.rpc.PreconditionFailure\")\n\n .flat_map(|details| rpc::PreconditionFailure::decode(details.value).ok())\n\n .flat_map(|failure| failure.violations)\n\n .map(Into::into)\n\n}\n\n\n", "file_path": "generated_types/src/google.rs", "rank": 85, "score": 160623.29803209548 }, { "content": "/// Writes the [`ParsedLine`] to the [`MutableBatch`]\n\npub fn write_line(\n\n writer: &mut Writer<'_>,\n\n line: &ParsedLine<'_>,\n\n default_time: i64,\n\n) -> mutable_batch::writer::Result<()> {\n\n for (tag_key, tag_value) in line.series.tag_set.iter().flatten() {\n\n writer.write_tag(tag_key.as_str(), None, std::iter::once(tag_value.as_str()))?\n\n }\n\n\n\n for (field_key, field_value) in &line.field_set {\n\n match field_value {\n\n FieldValue::I64(value) => {\n\n writer.write_i64(field_key.as_str(), None, std::iter::once(*value))?;\n\n }\n\n FieldValue::U64(value) => {\n\n writer.write_u64(field_key.as_str(), None, std::iter::once(*value))?;\n\n }\n\n FieldValue::F64(value) => {\n\n writer.write_f64(field_key.as_str(), None, std::iter::once(*value))?;\n\n }\n", "file_path": "mutable_batch_lp/src/lib.rs", "rank": 86, "score": 160623.29803209548 }, { "content": "/// This is a copied version of nom's `recognize` that runs the parser\n\n/// **and** returns the entire matched input.\n\npub fn parse_and_recognize<\n\n I: Clone + nom::Offset + nom::Slice<std::ops::RangeTo<usize>>,\n\n O,\n\n E: nom::error::ParseError<I>,\n\n F,\n\n>(\n\n mut parser: F,\n\n) -> impl FnMut(I) -> IResult<I, (I, O), E>\n\nwhere\n\n F: FnMut(I) -> IResult<I, O, E>,\n\n{\n\n move |input: I| {\n\n let i = input.clone();\n\n match parser(i) {\n\n Ok((i, o)) => {\n\n let index = input.offset(&i);\n\n Ok((i, (input.slice(..index), o)))\n\n }\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 87, "score": 160623.29803209548 }, { "content": "/// Enables debug logging if the RUST_LOG environment variable is\n\n/// set. Does nothing if RUST_LOG is not set. If enable_logging has\n\n/// been set previously, does nothing\n\npub fn maybe_start_logging() {\n\n if std::env::var(\"RUST_LOG\").is_ok() {\n\n start_logging()\n\n }\n\n}\n\n\n\n#[macro_export]\n\n/// A macro to assert that one string is contained within another with\n\n/// a nice error message if they are not.\n\n///\n\n/// Usage: `assert_contains!(actual, expected)`\n\n///\n\n/// Is a macro so test error\n\n/// messages are on the same line as the failure;\n\n///\n\n/// Both arguments must be convertable into Strings (Into<String>)\n\nmacro_rules! assert_contains {\n\n ($ACTUAL: expr, $EXPECTED: expr) => {\n\n let actual_value: String = $ACTUAL.into();\n\n let expected_value: String = $EXPECTED.into();\n", "file_path": "test_helpers/src/lib.rs", "rank": 88, "score": 160623.29803209548 }, { "content": "/// map common [`catalog::Error`](server::db::catalog::Error) errors to the appropriate tonic Status\n\npub fn default_catalog_error_handler(error: server::db::catalog::Error) -> tonic::Status {\n\n use server::db::catalog::Error;\n\n match error {\n\n Error::TableNotFound { table } => NotFound::new(ResourceType::Table, table).into(),\n\n Error::PartitionNotFound { partition, table } => {\n\n NotFound::new(ResourceType::Partition, format!(\"{}:{}\", table, partition)).into()\n\n }\n\n Error::ChunkNotFound {\n\n chunk_id,\n\n partition,\n\n table,\n\n } => NotFound::new(\n\n ResourceType::Chunk,\n\n format!(\"{}:{}:{}\", table, partition, chunk_id),\n\n )\n\n .into(),\n\n }\n\n}\n\n\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/database/rpc/error.rs", "rank": 89, "score": 159045.17215436703 }, { "content": "/// Create a predicate representing tag_name=tag_value in the horrible gRPC\n\n/// structs\n\nfn make_tag_predicate(tag_name: impl Into<String>, tag_value: impl Into<String>) -> Predicate {\n\n Predicate {\n\n root: Some(Node {\n\n node_type: NodeType::ComparisonExpression as i32,\n\n children: vec![\n\n Node {\n\n node_type: NodeType::TagRef as i32,\n\n children: vec![],\n\n value: Some(Value::TagRefValue(tag_name.into().into())),\n\n },\n\n Node {\n\n node_type: NodeType::Literal as i32,\n\n children: vec![],\n\n value: Some(Value::StringValue(tag_value.into())),\n\n },\n\n ],\n\n value: Some(Value::Comparison(Comparison::Equal as _)),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "influxdb_iox/tests/end_to_end_cases/storage_api.rs", "rank": 90, "score": 158858.12772739195 }, { "content": "struct PBWriteService {\n\n server: Arc<RouterServer>,\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl write_service_server::WriteService for PBWriteService {\n\n async fn write(\n\n &self,\n\n request: tonic::Request<WriteRequest>,\n\n ) -> Result<tonic::Response<WriteResponse>, tonic::Status> {\n\n let span_ctx = request.extensions().get().cloned();\n\n let database_batch = request\n\n .into_inner()\n\n .database_batch\n\n .ok_or_else(|| FieldViolation::required(\"database_batch\"))?;\n\n\n\n let tables =\n\n mutable_batch_pb::decode::decode_database_batch(&database_batch).map_err(|e| {\n\n FieldViolation {\n\n field: \"database_batch\".into(),\n", "file_path": "influxdb_iox/src/influxdb_ioxd/server_type/router/rpc/write_pb.rs", "rank": 91, "score": 158711.33856294397 }, { "content": "// Translate the field name from tracing into the logfmt style\n\nfn translate_field_name(name: &str) -> &str {\n\n if name == \"message\" {\n\n \"msg\"\n\n } else {\n\n name\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn quote_and_escape_len0() {\n\n assert_eq!(quote_and_escape(\"\"), \"\");\n\n }\n\n\n\n #[test]\n\n fn quote_and_escape_len1() {\n\n assert_eq!(quote_and_escape(\"f\"), \"f\");\n", "file_path": "logfmt/src/lib.rs", "rank": 92, "score": 158029.75775952206 }, { "content": "/// Compute a sort key that orders lower cardinality columns first\n\n///\n\n/// In the absence of more precise information, this should yield a\n\n/// good ordering for RLE compression\n\npub fn compute_sort_key<'a>(summaries: impl Iterator<Item = &'a TableSummary>) -> SortKey<'a> {\n\n let mut cardinalities: HashMap<&str, u64> = Default::default();\n\n for summary in summaries {\n\n for column in &summary.columns {\n\n if column.influxdb_type != Some(InfluxDbType::Tag) {\n\n continue;\n\n }\n\n\n\n let mut cnt = 0;\n\n if let Some(count) = column.stats.distinct_count() {\n\n cnt = count.get();\n\n }\n\n *cardinalities.entry(column.name.as_str()).or_default() += cnt;\n\n }\n\n }\n\n\n\n trace!(cardinalities=?cardinalities, \"cardinalities of of columns to compute sort key\");\n\n\n\n let mut cardinalities: Vec<_> = cardinalities.into_iter().collect();\n\n // Sort by (cardinality, column_name) to have deterministic order if same cardinality\n", "file_path": "query/src/lib.rs", "rank": 93, "score": 157986.39855716826 }, { "content": "pub fn parse_lines(input: &str) -> impl Iterator<Item = Result<ParsedLine<'_>>> {\n\n split_lines(input).filter_map(|line| {\n\n let i = trim_leading(line);\n\n\n\n if i.is_empty() {\n\n return None;\n\n }\n\n\n\n let res = match parse_line(i) {\n\n Ok((remaining, line)) => {\n\n // should have parsed the whole input line, if any\n\n // data remains it is a parse error for this line\n\n // corresponding Go logic:\n\n // https://github.com/influxdata/influxdb/blob/217eddc87e14a79b01d0c22994fc139f530094a2/models/points_parser.go#L259-L266\n\n if !remaining.is_empty() {\n\n Some(Err(Error::CannotParseEntireLine {\n\n trailing_content: String::from(remaining),\n\n }))\n\n } else {\n\n Some(Ok(line))\n", "file_path": "influxdb_line_protocol/src/lib.rs", "rank": 94, "score": 157674.39519596903 }, { "content": "#[derive(Debug)]\n\nstruct ServerShared {\n\n /// A token that is used to trigger shutdown of the background worker\n\n shutdown: CancellationToken,\n\n\n\n /// Application-global state\n\n application: Arc<ApplicationState>,\n\n\n\n /// The state of the `Server`\n\n state: RwLock<Freezable<ServerState>>,\n\n\n\n /// Notify that the database state has changed\n\n state_notify: Notify,\n\n}\n\n\n\n#[derive(Debug, Snafu)]\n\npub enum InitError {\n\n #[snafu(display(\"error listing databases in object storage: {}\", source))]\n\n ListDatabases { source: object_store::Error },\n\n\n\n #[snafu(display(\"error getting server config from object storage: {}\", source))]\n", "file_path": "server/src/lib.rs", "rank": 95, "score": 157659.02384035473 }, { "content": "/// Creates an Arrow RecordBatches with schema and IOx statistics.\n\n///\n\n/// Generated columns are prefixes using `column_prefix`.\n\n///\n\n/// RecordBatches, schema and IOx statistics will be generated in separate ways to emulate what the normal data\n\n/// ingestion would do. This also ensures that the Parquet data that will later be created out of the RecordBatch is\n\n/// indeed self-contained and can act as a source to recorder schema and statistics.\n\npub fn make_record_batch(\n\n column_prefix: &str,\n\n test_size: TestSize,\n\n) -> (Vec<RecordBatch>, Schema, Vec<ColumnSummary>, usize) {\n\n // (name, array, nullable)\n\n let mut arrow_cols: Vec<Vec<(String, ArrayRef, bool)>> = vec![vec![], vec![], vec![]];\n\n let mut summaries = vec![];\n\n let mut schema_builder = SchemaBuilder::new();\n\n\n\n // tag\n\n create_columns_tag(\n\n column_prefix,\n\n test_size,\n\n &mut arrow_cols,\n\n &mut summaries,\n\n &mut schema_builder,\n\n );\n\n\n\n // field: string\n\n create_columns_field_string(\n", "file_path": "parquet_file/src/test_utils.rs", "rank": 96, "score": 157581.03184646158 }, { "content": "/// Decodes a [`DatabaseBatch`] to a map of [`MutableBatch`] keyed by table name\n\npub fn decode_database_batch(\n\n database_batch: &DatabaseBatch,\n\n) -> Result<HashMap<String, MutableBatch>> {\n\n let mut ret = HashMap::with_capacity(database_batch.table_batches.len());\n\n for table_batch in &database_batch.table_batches {\n\n let (_, batch) = ret\n\n .raw_entry_mut()\n\n .from_key(table_batch.table_name.as_str())\n\n .or_insert_with(|| (table_batch.table_name.clone(), MutableBatch::new()));\n\n write_table_batch(batch, table_batch)?;\n\n }\n\n Ok(ret)\n\n}\n\n\n", "file_path": "mutable_batch_pb/src/decode.rs", "rank": 97, "score": 157577.9499954073 }, { "content": "/// Converts the provided lines of line protocol to a set of [`MutableBatch`]\n\n/// keyed by measurement name, and a set of statistics about the converted line protocol\n\npub fn lines_to_batches_stats(\n\n lines: &str,\n\n default_time: i64,\n\n) -> Result<(HashMap<String, MutableBatch>, PayloadStatistics)> {\n\n let mut stats = PayloadStatistics::default();\n\n let mut batches = HashMap::new();\n\n for (line_idx, maybe_line) in parse_lines(lines).enumerate() {\n\n let line = maybe_line.context(LineProtocol { line: line_idx + 1 })?;\n\n\n\n stats.num_lines += 1;\n\n stats.num_fields += line.field_set.len();\n\n\n\n let measurement = line.series.measurement.as_str();\n\n\n\n let (_, batch) = batches\n\n .raw_entry_mut()\n\n .from_key(measurement)\n\n .or_insert_with(|| (measurement.to_string(), MutableBatch::new()));\n\n\n\n // TODO: Reuse writer\n\n let mut writer = Writer::new(batch, 1);\n\n write_line(&mut writer, &line, default_time).context(Write { line: line_idx + 1 })?;\n\n writer.commit();\n\n }\n\n ensure!(!batches.is_empty(), EmptyPayload);\n\n\n\n Ok((batches, stats))\n\n}\n\n\n", "file_path": "mutable_batch_lp/src/lib.rs", "rank": 98, "score": 157577.52059253355 }, { "content": "/// Returns an iterator over set bit positions in increasing order starting\n\n/// at the provided bit offset\n\npub fn iter_set_positions_with_offset(\n\n bytes: &[u8],\n\n offset: usize,\n\n) -> impl Iterator<Item = usize> + '_ {\n\n let mut byte_idx = offset >> 3;\n\n let mut in_progress = bytes.get(byte_idx).cloned().unwrap_or(0);\n\n\n\n let skew = offset & 7;\n\n in_progress &= 0xFF << skew;\n\n\n\n std::iter::from_fn(move || loop {\n\n if in_progress != 0 {\n\n let bit_pos = in_progress.trailing_zeros();\n\n in_progress ^= 1 << bit_pos;\n\n return Some((byte_idx << 3) + (bit_pos as usize));\n\n }\n\n byte_idx += 1;\n\n in_progress = *bytes.get(byte_idx)?;\n\n })\n\n}\n", "file_path": "arrow_util/src/bitset.rs", "rank": 99, "score": 157571.64297717653 } ]
Rust
src/database_models/match_models.rs
othello-storm-system/othello_storm_system_backend
5900a404dbbc66b8b5430aed6baba24d81fd42af
use diesel::prelude::*; use diesel::result::Error; use serde_json::{Map, Value}; use crate::errors::ErrorType; use crate::game_match::{GameMatchTransformer, IGameMatch}; use crate::properties::SpecialConditionScore; use super::{RoundDAO, RoundRowModel}; use crate::schema::matches; #[derive(AsChangeset, PartialEq, Debug, Queryable, Associations, Identifiable)] #[belongs_to(RoundRowModel, foreign_key = "round_id")] #[table_name = "matches"] pub struct MatchRowModel { pub id: i32, pub round_id: i32, pub black_player_id: i32, pub white_player_id: i32, pub black_score: i32, pub white_score: i32, pub meta_data: Value, } #[derive(Insertable)] #[table_name = "matches"] struct NewMatchRowModel<'a> { pub round_id: &'a i32, pub black_player_id: &'a i32, pub white_player_id: &'a i32, pub black_score: &'a i32, pub white_score: &'a i32, pub meta_data: &'a Value, } pub trait MatchDAO where Self: Sized, { fn create( round_id: &i32, black_player_id: &i32, white_player_id: &i32, black_score: &i32, white_score: &i32, meta_data: Map<String, Value>, connection: &PgConnection, ) -> Result<Self, ErrorType>; fn create_from( game_match: &Box<dyn IGameMatch>, connection: &PgConnection, ) -> Result<Self, ErrorType>; fn bulk_create_from( game_matches: &Vec<Box<dyn IGameMatch>>, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType>; fn get(id: &i32, connection: &PgConnection) -> Result<Self, ErrorType>; fn get_all_from_round( round_id: &i32, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType>; fn get_all_from_tournament( tournament_id: &i32, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType>; fn delete(&self, connection: &PgConnection) -> Result<(), ErrorType>; fn update(&self, connection: &PgConnection) -> Result<Self, ErrorType>; fn is_finished(&self) -> bool; } impl MatchRowModel { fn insert_to_database( new_match: NewMatchRowModel, connection: &PgConnection, ) -> Result<MatchRowModel, ErrorType> { let result: Result<MatchRowModel, Error> = diesel::insert_into(matches::table) .values(new_match) .get_result(connection); match result { Ok(game_match) => { let match_id = game_match.id.clone(); let round_id = game_match.round_id.clone(); let black_player_id = game_match.black_player_id.clone(); let white_player_id = game_match.white_player_id.clone(); info!( "Match id {} ({} vs {}) is added in round id {}", match_id, black_player_id, white_player_id, round_id, ); Ok(game_match) } Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn bulk_insert_to_database( new_matches: Vec<NewMatchRowModel>, connection: &PgConnection, ) -> Result<Vec<MatchRowModel>, ErrorType> { let result: Result<Vec<MatchRowModel>, Error> = diesel::insert_into(matches::table) .values(new_matches) .get_results(connection); match result { Ok(matches) => { matches[..].into_iter().for_each(|game_match| { info!( "Match id {} ({} vs {}) is added in round id {}", game_match.id.clone(), game_match.black_player_id.clone(), game_match.white_player_id.clone(), game_match.round_id.clone(), ); }); Ok(matches) } Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } } impl MatchDAO for MatchRowModel { fn create( round_id: &i32, black_player_id: &i32, white_player_id: &i32, black_score: &i32, white_score: &i32, meta_data: Map<String, Value>, connection: &PgConnection, ) -> Result<Self, ErrorType> { let meta_data_json = Value::from(meta_data); let new_match = NewMatchRowModel { round_id, black_player_id, white_player_id, black_score, white_score, meta_data: &meta_data_json, }; MatchRowModel::insert_to_database(new_match, connection) } fn create_from( game_match: &Box<dyn IGameMatch>, connection: &PgConnection, ) -> Result<Self, ErrorType> { let match_data = GameMatchTransformer::transform_to_match_model_data(game_match); let new_match = NewMatchRowModel { round_id: &match_data.round_id, black_player_id: &match_data.black_player_id, white_player_id: &match_data.white_player_id, black_score: &match_data.black_score, white_score: &match_data.white_score, meta_data: &match_data.meta_data, }; MatchRowModel::insert_to_database(new_match, connection) } fn bulk_create_from( game_matches: &Vec<Box<dyn IGameMatch>>, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType> { let new_matches_data: Vec<MatchRowModel> = game_matches .into_iter() .map(|game_match| GameMatchTransformer::transform_to_match_model_data(game_match)) .collect(); let new_matches = new_matches_data .iter() .map(|match_datum| NewMatchRowModel { round_id: &match_datum.round_id, black_player_id: &match_datum.black_player_id, white_player_id: &match_datum.white_player_id, black_score: &match_datum.black_score, white_score: &match_datum.white_score, meta_data: &match_datum.meta_data, }) .collect(); MatchRowModel::bulk_insert_to_database(new_matches, connection) } fn get(id: &i32, connection: &PgConnection) -> Result<Self, ErrorType> { let result = matches::table.find(id).first(connection); match result { Ok(game_match) => Ok(game_match), Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn get_all_from_round( round_id: &i32, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType> { let result = matches::table .filter(matches::round_id.eq(round_id)) .load::<MatchRowModel>(connection); match result { Ok(matches) => Ok(matches), Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn get_all_from_tournament( tournament_id: &i32, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType> { let rounds = RoundRowModel::get_all_from_tournament(tournament_id, connection)?; let round_ids: Vec<i32> = rounds.iter().map(|round| round.id.clone()).collect(); let result = matches::table .filter(matches::round_id.eq_any(round_ids)) .load::<MatchRowModel>(connection); match result { Ok(matches) => Ok(matches), Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn delete(&self, connection: &PgConnection) -> Result<(), ErrorType> { let result = diesel::delete(self).execute(connection); match result { Ok(_) => { info!( "Match id {} ({} vs {}) is deleted from round id {}", &self.id, &self.black_player_id, &self.white_player_id, &self.round_id, ); Ok(()) } Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn update(&self, connection: &PgConnection) -> Result<Self, ErrorType> { let result = diesel::update(self) .set(self) .get_result::<MatchRowModel>(connection); match result { Ok(game_match) => { info!("Match {} is updated.", &self.id); Ok(game_match) } Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn is_finished(&self) -> bool { !(self.black_score == SpecialConditionScore::NotFinished.to_i32() || self.white_score == SpecialConditionScore::NotFinished.to_i32()) } } #[cfg(test)] mod tests { mod crud { use serde_json::{Map, Value}; use crate::database_models::{MatchDAO, MatchRowModel}; use crate::game_match::GameMatchTransformer; use crate::utils; use crate::utils::{ create_mock_match_from_round, create_mock_player_from_tournament, create_mock_round_from_tournament, create_mock_tournament_with_creator, create_mock_user, }; #[test] fn test_create_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let black_player = create_mock_player_from_tournament(&tournament.id, &test_connection); let black_score = 20; let white_player = create_mock_player_from_tournament(&tournament.id, &test_connection); let white_score = 44; let result = MatchRowModel::create( &round.id, &black_player.id, &white_player.id, &black_score, &white_score, Map::new(), &test_connection, ); assert_eq!(result.is_ok(), true); } #[test] fn test_create_match_from_game_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let black_player = create_mock_player_from_tournament(&tournament.id, &test_connection); let black_score = 20; let white_player = create_mock_player_from_tournament(&tournament.id, &test_connection); let white_score = 44; let game_match = GameMatchTransformer::transform_to_game_match(&MatchRowModel { id: -1, round_id: round.id.clone(), black_player_id: black_player.id.clone(), white_player_id: white_player.id.clone(), black_score: black_score.clone(), white_score: white_score.clone(), meta_data: Value::from(Map::new()), }); let result = MatchRowModel::create_from(&game_match, &test_connection); assert_eq!(result.is_ok(), true); } #[test] fn test_create_bulk_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let black_player_1 = create_mock_player_from_tournament(&tournament.id, &test_connection); let black_score_1 = 20; let white_player_1 = create_mock_player_from_tournament(&tournament.id, &test_connection); let white_score_1 = 44; let black_player_2 = create_mock_player_from_tournament(&tournament.id, &test_connection); let black_score_2 = 20; let white_player_2 = create_mock_player_from_tournament(&tournament.id, &test_connection); let white_score_2 = 44; let game_match_1 = GameMatchTransformer::transform_to_game_match(&MatchRowModel { id: -1, round_id: round.id.clone(), black_player_id: black_player_1.id.clone(), white_player_id: white_player_1.id.clone(), black_score: black_score_1.clone(), white_score: white_score_1.clone(), meta_data: Value::from(Map::new()), }); let game_match_2 = GameMatchTransformer::transform_to_game_match(&MatchRowModel { id: -1, round_id: round.id.clone(), black_player_id: black_player_2.id.clone(), white_player_id: white_player_2.id.clone(), black_score: black_score_2.clone(), white_score: white_score_2.clone(), meta_data: Value::from(Map::new()), }); let matches = vec![game_match_1, game_match_2]; let result = MatchRowModel::bulk_create_from(&matches, &test_connection); assert_eq!(result.is_ok(), true); } #[test] fn test_get_all_matches() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round_1 = create_mock_round_from_tournament(&tournament.id, &test_connection); let round_2 = create_mock_round_from_tournament(&tournament.id, &test_connection); let match_1 = create_mock_match_from_round(&tournament.id, &round_1.id, &test_connection); let match_2 = create_mock_match_from_round(&tournament.id, &round_1.id, &test_connection); let _match_3 = create_mock_match_from_round(&tournament.id, &round_2.id, &test_connection); let round_1_matches = MatchRowModel::get_all_from_round(&round_1.id, &test_connection).unwrap(); assert_eq!(round_1_matches, vec![match_1, match_2]); } #[test] fn test_get_all_tournament_matches() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round_1 = create_mock_round_from_tournament(&tournament.id, &test_connection); let round_2 = create_mock_round_from_tournament(&tournament.id, &test_connection); let match_1 = create_mock_match_from_round(&tournament.id, &round_1.id, &test_connection); let match_2 = create_mock_match_from_round(&tournament.id, &round_1.id, &test_connection); let match_3 = create_mock_match_from_round(&tournament.id, &round_2.id, &test_connection); let tournament_2 = create_mock_tournament_with_creator(&user.username, &test_connection); let tournament_2_round = create_mock_round_from_tournament(&tournament_2.id, &test_connection); let _tournament_2_match = create_mock_match_from_round( &tournament_2.id, &tournament_2_round.id, &test_connection, ); let tournament_1_matches = MatchRowModel::get_all_from_tournament(&tournament.id, &test_connection).unwrap(); assert_eq!(tournament_1_matches, vec![match_1, match_2, match_3]); } #[test] fn test_get_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let game_match = create_mock_match_from_round(&tournament.id, &round.id, &test_connection); let match_obtained = MatchRowModel::get(&game_match.id, &test_connection).unwrap(); assert_eq!(match_obtained, game_match); } #[test] fn test_update_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let mut game_match = create_mock_match_from_round(&tournament.id, &round.id, &test_connection); let new_white_score = utils::generate_random_number(); let new_black_score = utils::generate_random_number(); game_match.white_score = new_white_score.clone(); game_match.black_score = new_black_score.clone(); game_match.update(&test_connection).unwrap(); let match_obtained = MatchRowModel::get(&game_match.id, &test_connection).unwrap(); assert_eq!(match_obtained.black_score, new_black_score); assert_eq!(match_obtained.white_score, new_white_score); } #[test] fn test_delete_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let game_match = create_mock_match_from_round(&tournament.id, &round.id, &test_connection); game_match.delete(&test_connection).unwrap(); let matches = MatchRowModel::get_all_from_round(&round.id, &test_connection).unwrap(); assert_eq!(matches, vec![]); } } }
use diesel::prelude::*; use diesel::result::Error; use serde_json::{Map, Value}; use crate::errors::ErrorType; use crate::game_match::{GameMatchTransformer, IGameMatch}; use crate::properties::SpecialConditionScore; use super::{RoundDAO, RoundRowModel}; use crate::schema::matches; #[derive(AsChangeset, PartialEq, Debug, Queryable, Associations, Identifiable)] #[belongs_to(RoundRowModel, foreign_key = "round_id")] #[table_name = "matches"] pub struct MatchRowModel { pub id: i32, pub round_id: i32, pub black_player_id: i32, pub white_player_id: i32, pub black_score: i32, pub white_score: i32, pub meta_data: Value, } #[derive(Insertable)] #[table_name = "matches"] struct NewMatchRowModel<'a> { pub round_id: &'a i32, pub black_player_id: &'a i32, pub white_player_id: &'a i32, pub black_score: &'a i32, pub white_score: &'a i32, pub meta_data: &'a Value, } pub trait MatchDAO where Self: Sized, { fn create( round_id: &i32, black_player_id: &i32, white_player_id: &i32, black_score: &i32, white_score: &i32, meta_data: Map<String, Value>, connection: &PgConnection, ) -> Result<Self, ErrorType>; fn create_from( game_match: &Box<dyn IGameMatch>, connection: &PgConnection, ) -> Result<Self, ErrorType>; fn bulk_create_from( game_matches: &Vec<Box<dyn IGameMatch>>, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType>; fn get(id: &i32, connection: &PgConnection) -> Result<Self, ErrorType>; fn get_all_from_round( round_id: &i32, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType>; fn get_all_from_tournament( tournament_id: &i32, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType>; fn delete(&self, connection: &PgConnection) -> Result<(), ErrorType>; fn update(&self, connection: &PgConnection) -> Result<Self, ErrorType>; fn is_finished(&self) -> bool; } impl MatchRowModel { fn insert_to_database( new_match: NewMatchRowModel, connection: &PgConnection, ) -> Result<MatchRowModel, ErrorType> { let result: Result<MatchRowModel, Error> = diesel::insert_into(matches::table) .values(new_match) .get_result(connection); match result { Ok(game_match) => { let match_id = game_match.id.clone(); let round_id = game_match.round_id.clone(); let black_player_id = game_match.black_player_id.clone(); let white_player_id = game_match.white_player_id.clone(); info!( "Match id {} ({} vs {}) is added in round id {}", match_id, black_player_id, white_player_id, round_id, ); Ok(game_match) } Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn bulk_insert_to_database( new_matches: Vec<NewMatchRowModel>, connection: &PgConnection, ) -> Result<Vec<MatchRowModel>, ErrorType> { let result: Result<Vec<MatchRowModel>, Error> = diesel::insert_into(matches::table) .values(new_matches) .get_results(connection); match result { Ok(matches) => { matches[..].into_iter().for_each(|game_match| { info!( "Match id {} ({} vs {}) is added in round id {}", game_match.id.clone(), game_match.black_player_id.clone(), game_match.white_player_id.clone(), game_match.round_id.clone(), ); }); Ok(matches) } Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } } impl MatchDAO for MatchRowModel { fn create( round_id: &i32, black_player_id: &i32, white_player_id: &i32, black_score: &i32, white_score: &i32, meta_data: Map<String, Value>, connection: &PgConnection, ) -> Result<Self, ErrorType> { let meta_data_json = Value::from(meta_data);
MatchRowModel::insert_to_database(new_match, connection) } fn create_from( game_match: &Box<dyn IGameMatch>, connection: &PgConnection, ) -> Result<Self, ErrorType> { let match_data = GameMatchTransformer::transform_to_match_model_data(game_match); let new_match = NewMatchRowModel { round_id: &match_data.round_id, black_player_id: &match_data.black_player_id, white_player_id: &match_data.white_player_id, black_score: &match_data.black_score, white_score: &match_data.white_score, meta_data: &match_data.meta_data, }; MatchRowModel::insert_to_database(new_match, connection) } fn bulk_create_from( game_matches: &Vec<Box<dyn IGameMatch>>, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType> { let new_matches_data: Vec<MatchRowModel> = game_matches .into_iter() .map(|game_match| GameMatchTransformer::transform_to_match_model_data(game_match)) .collect(); let new_matches = new_matches_data .iter() .map(|match_datum| NewMatchRowModel { round_id: &match_datum.round_id, black_player_id: &match_datum.black_player_id, white_player_id: &match_datum.white_player_id, black_score: &match_datum.black_score, white_score: &match_datum.white_score, meta_data: &match_datum.meta_data, }) .collect(); MatchRowModel::bulk_insert_to_database(new_matches, connection) } fn get(id: &i32, connection: &PgConnection) -> Result<Self, ErrorType> { let result = matches::table.find(id).first(connection); match result { Ok(game_match) => Ok(game_match), Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn get_all_from_round( round_id: &i32, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType> { let result = matches::table .filter(matches::round_id.eq(round_id)) .load::<MatchRowModel>(connection); match result { Ok(matches) => Ok(matches), Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn get_all_from_tournament( tournament_id: &i32, connection: &PgConnection, ) -> Result<Vec<Self>, ErrorType> { let rounds = RoundRowModel::get_all_from_tournament(tournament_id, connection)?; let round_ids: Vec<i32> = rounds.iter().map(|round| round.id.clone()).collect(); let result = matches::table .filter(matches::round_id.eq_any(round_ids)) .load::<MatchRowModel>(connection); match result { Ok(matches) => Ok(matches), Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn delete(&self, connection: &PgConnection) -> Result<(), ErrorType> { let result = diesel::delete(self).execute(connection); match result { Ok(_) => { info!( "Match id {} ({} vs {}) is deleted from round id {}", &self.id, &self.black_player_id, &self.white_player_id, &self.round_id, ); Ok(()) } Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn update(&self, connection: &PgConnection) -> Result<Self, ErrorType> { let result = diesel::update(self) .set(self) .get_result::<MatchRowModel>(connection); match result { Ok(game_match) => { info!("Match {} is updated.", &self.id); Ok(game_match) } Err(e) => { error!("{}", e); Err(ErrorType::DatabaseError) } } } fn is_finished(&self) -> bool { !(self.black_score == SpecialConditionScore::NotFinished.to_i32() || self.white_score == SpecialConditionScore::NotFinished.to_i32()) } } #[cfg(test)] mod tests { mod crud { use serde_json::{Map, Value}; use crate::database_models::{MatchDAO, MatchRowModel}; use crate::game_match::GameMatchTransformer; use crate::utils; use crate::utils::{ create_mock_match_from_round, create_mock_player_from_tournament, create_mock_round_from_tournament, create_mock_tournament_with_creator, create_mock_user, }; #[test] fn test_create_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let black_player = create_mock_player_from_tournament(&tournament.id, &test_connection); let black_score = 20; let white_player = create_mock_player_from_tournament(&tournament.id, &test_connection); let white_score = 44; let result = MatchRowModel::create( &round.id, &black_player.id, &white_player.id, &black_score, &white_score, Map::new(), &test_connection, ); assert_eq!(result.is_ok(), true); } #[test] fn test_create_match_from_game_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let black_player = create_mock_player_from_tournament(&tournament.id, &test_connection); let black_score = 20; let white_player = create_mock_player_from_tournament(&tournament.id, &test_connection); let white_score = 44; let game_match = GameMatchTransformer::transform_to_game_match(&MatchRowModel { id: -1, round_id: round.id.clone(), black_player_id: black_player.id.clone(), white_player_id: white_player.id.clone(), black_score: black_score.clone(), white_score: white_score.clone(), meta_data: Value::from(Map::new()), }); let result = MatchRowModel::create_from(&game_match, &test_connection); assert_eq!(result.is_ok(), true); } #[test] fn test_create_bulk_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let black_player_1 = create_mock_player_from_tournament(&tournament.id, &test_connection); let black_score_1 = 20; let white_player_1 = create_mock_player_from_tournament(&tournament.id, &test_connection); let white_score_1 = 44; let black_player_2 = create_mock_player_from_tournament(&tournament.id, &test_connection); let black_score_2 = 20; let white_player_2 = create_mock_player_from_tournament(&tournament.id, &test_connection); let white_score_2 = 44; let game_match_1 = GameMatchTransformer::transform_to_game_match(&MatchRowModel { id: -1, round_id: round.id.clone(), black_player_id: black_player_1.id.clone(), white_player_id: white_player_1.id.clone(), black_score: black_score_1.clone(), white_score: white_score_1.clone(), meta_data: Value::from(Map::new()), }); let game_match_2 = GameMatchTransformer::transform_to_game_match(&MatchRowModel { id: -1, round_id: round.id.clone(), black_player_id: black_player_2.id.clone(), white_player_id: white_player_2.id.clone(), black_score: black_score_2.clone(), white_score: white_score_2.clone(), meta_data: Value::from(Map::new()), }); let matches = vec![game_match_1, game_match_2]; let result = MatchRowModel::bulk_create_from(&matches, &test_connection); assert_eq!(result.is_ok(), true); } #[test] fn test_get_all_matches() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round_1 = create_mock_round_from_tournament(&tournament.id, &test_connection); let round_2 = create_mock_round_from_tournament(&tournament.id, &test_connection); let match_1 = create_mock_match_from_round(&tournament.id, &round_1.id, &test_connection); let match_2 = create_mock_match_from_round(&tournament.id, &round_1.id, &test_connection); let _match_3 = create_mock_match_from_round(&tournament.id, &round_2.id, &test_connection); let round_1_matches = MatchRowModel::get_all_from_round(&round_1.id, &test_connection).unwrap(); assert_eq!(round_1_matches, vec![match_1, match_2]); } #[test] fn test_get_all_tournament_matches() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round_1 = create_mock_round_from_tournament(&tournament.id, &test_connection); let round_2 = create_mock_round_from_tournament(&tournament.id, &test_connection); let match_1 = create_mock_match_from_round(&tournament.id, &round_1.id, &test_connection); let match_2 = create_mock_match_from_round(&tournament.id, &round_1.id, &test_connection); let match_3 = create_mock_match_from_round(&tournament.id, &round_2.id, &test_connection); let tournament_2 = create_mock_tournament_with_creator(&user.username, &test_connection); let tournament_2_round = create_mock_round_from_tournament(&tournament_2.id, &test_connection); let _tournament_2_match = create_mock_match_from_round( &tournament_2.id, &tournament_2_round.id, &test_connection, ); let tournament_1_matches = MatchRowModel::get_all_from_tournament(&tournament.id, &test_connection).unwrap(); assert_eq!(tournament_1_matches, vec![match_1, match_2, match_3]); } #[test] fn test_get_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let game_match = create_mock_match_from_round(&tournament.id, &round.id, &test_connection); let match_obtained = MatchRowModel::get(&game_match.id, &test_connection).unwrap(); assert_eq!(match_obtained, game_match); } #[test] fn test_update_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let mut game_match = create_mock_match_from_round(&tournament.id, &round.id, &test_connection); let new_white_score = utils::generate_random_number(); let new_black_score = utils::generate_random_number(); game_match.white_score = new_white_score.clone(); game_match.black_score = new_black_score.clone(); game_match.update(&test_connection).unwrap(); let match_obtained = MatchRowModel::get(&game_match.id, &test_connection).unwrap(); assert_eq!(match_obtained.black_score, new_black_score); assert_eq!(match_obtained.white_score, new_white_score); } #[test] fn test_delete_match() { let test_connection = utils::get_test_connection(); let user = create_mock_user(&test_connection); let tournament = create_mock_tournament_with_creator(&user.username, &test_connection); let round = create_mock_round_from_tournament(&tournament.id, &test_connection); let game_match = create_mock_match_from_round(&tournament.id, &round.id, &test_connection); game_match.delete(&test_connection).unwrap(); let matches = MatchRowModel::get_all_from_round(&round.id, &test_connection).unwrap(); assert_eq!(matches, vec![]); } } }
let new_match = NewMatchRowModel { round_id, black_player_id, white_player_id, black_score, white_score, meta_data: &meta_data_json, };
assignment_statement
[ { "content": "#[get(\"/<tournament_id>/rounds/<round_id>/standings\")]\n\npub fn get_standings(tournament_id: i32, round_id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetStandingsCommand {\n\n round_id_limit: round_id,\n\n tournament_id,\n\n }\n\n .execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct UpdateRoundRequest {\n\n updated_name: String,\n\n}\n\n\n", "file_path": "src/routes/round_match_routes.rs", "rank": 0, "score": 305556.6893047898 }, { "content": "#[get(\"/<_tournament_id>/rounds/<round_id>/matches\")]\n\npub fn get_round_matches(_tournament_id: i32, round_id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetRoundMatchesCommand { round_id }.execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct UpdateMatchRequest {\n\n black_score: i32,\n\n white_score: i32,\n\n}\n\n\n\n#[patch(\n\n \"/<tournament_id>/rounds/<_round_id>/matches/<match_id>\",\n\n data = \"<request>\"\n\n)]\n", "file_path": "src/routes/round_match_routes.rs", "rank": 1, "score": 297942.77158030245 }, { "content": "#[delete(\"/<tournament_id>/rounds/<round_id>\")]\n\npub fn delete_round(token: Token, tournament_id: i32, round_id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::DeleteRoundCommand {\n\n jwt: token.jwt,\n\n tournament_id,\n\n round_id,\n\n }\n\n .execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/round_match_routes.rs", "rank": 2, "score": 294213.6108607057 }, { "content": "#[get(\"/<_tournament_id>/rounds/<round_id>\")]\n\npub fn get_round(_tournament_id: i32, round_id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetRoundCommand { round_id }.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/round_match_routes.rs", "rank": 3, "score": 291798.0808480164 }, { "content": "#[get(\"/<tournament_id>/rounds\")]\n\npub fn get_tournament_rounds(tournament_id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetTournamentRoundsCommand { tournament_id }.execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct CreateManualNormalRoundRequest {\n\n name: String,\n\n match_data: Vec<(i32, i32)>,\n\n bye_match_data: Vec<i32>,\n\n}\n\n\n", "file_path": "src/routes/round_match_routes.rs", "rank": 4, "score": 274157.13666647515 }, { "content": "pub trait IGameMatch: ClonableIGameMatch + Debug {\n\n fn is_player_playing(&self, player_id: &i32) -> bool;\n\n fn get_player_color(&self, player_id: &i32) -> Option<PlayerColor>;\n\n fn get_players_id(&self) -> (Option<i32>, Option<i32>);\n\n fn get_opponent_id(&self, player_id: &i32) -> Option<i32>;\n\n fn calculate_major_score(&self, player_id: &i32) -> f64;\n\n fn calculate_minor_score(\n\n &self,\n\n player_id: &i32,\n\n major_scores_by_player_ids: &HashMap<i32, f64>,\n\n brightwell_constant: &f64,\n\n ) -> f64;\n\n fn extract_data(&self) -> MatchRowModel;\n\n}\n\n\n\nimpl Clone for Box<dyn IGameMatch> {\n\n fn clone(&self) -> Box<dyn IGameMatch> {\n\n self.clone_box()\n\n }\n\n}\n", "file_path": "src/game_match/abstract_game_match.rs", "rank": 5, "score": 225536.6030306604 }, { "content": "#[get(\"/<id>\")]\n\npub fn get_tournament(id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetTournamentCommand { id }.execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct TournamentCreationRequest {\n\n name: String,\n\n country: String,\n\n tournament_type: String,\n\n start_date: String,\n\n end_date: String,\n\n}\n\n\n", "file_path": "src/routes/tournament_routes.rs", "rank": 6, "score": 222896.30878441903 }, { "content": "#[get(\"/<id>/players\")]\n\npub fn get_players(id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetTournamentPlayersCommand { tournament_id: id }.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/player_routes.rs", "rank": 7, "score": 222896.250958848 }, { "content": "#[get(\"/<id>/summary\")]\n\npub fn get_tournament_summary(id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetTournamentSummaryCommand { id }.execute(&connection)\n\n}\n", "file_path": "src/routes/tournament_routes.rs", "rank": 8, "score": 218920.25323484876 }, { "content": "#[get(\"/<id>/joueurs_players\")]\n\npub fn get_joueurs_players(id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetTournamentJoueursPlayersCommand { tournament_id: id }.execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct AddPlayerRequest {\n\n pub joueurs_id: String,\n\n}\n\n\n", "file_path": "src/routes/player_routes.rs", "rank": 9, "score": 218920.19703775793 }, { "content": "#[delete(\"/<tournament_id>/players/<player_id>\")]\n\npub fn delete_player(token: Token, tournament_id: i32, player_id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n let command = response_commands::DeleteTournamentPlayerCommand {\n\n jwt: token.jwt,\n\n tournament_id,\n\n player_id,\n\n };\n\n command.execute(&connection)\n\n}\n", "file_path": "src/routes/player_routes.rs", "rank": 10, "score": 216168.02140549087 }, { "content": "#[get(\"/<id>/admins\")]\n\npub fn get_tournament_admins(id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetAllAdminsCommand { tournament_id: id }.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/tournament_admin_routes.rs", "rank": 11, "score": 215160.0525704183 }, { "content": "#[post(\"/<tournament_id>/rounds/create_automatic\", data = \"<request>\")]\n\npub fn create_automatic_round(\n\n token: Token,\n\n tournament_id: i32,\n\n request: Json<CreateAutomaticRoundRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::CreateAutomaticRoundCommand {\n\n jwt: token.jwt,\n\n tournament_id,\n\n name: request.name.clone(),\n\n }\n\n .execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/round_match_routes.rs", "rank": 12, "score": 213866.48169474487 }, { "content": "#[get(\"/<id>/potential_admins\")]\n\npub fn get_tournament_potential_admins(id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetPotentialAdminsCommand { tournament_id: id }.execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct AddAdminRequest {\n\n username: String,\n\n}\n\n\n", "file_path": "src/routes/tournament_admin_routes.rs", "rank": 13, "score": 211598.4883779434 }, { "content": "#[post(\"/<tournament_id>/rounds/create_manual_special\", data = \"<request>\")]\n\npub fn create_manual_special_round(\n\n token: Token,\n\n tournament_id: i32,\n\n request: Json<CreateManualSpecialRoundRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::CreateManualSpecialRoundCommand {\n\n jwt: token.jwt,\n\n tournament_id,\n\n name: request.name.clone(),\n\n match_data: request.match_data.clone(),\n\n bye_match_data: request.bye_match_data.clone(),\n\n }\n\n .execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct CreateAutomaticRoundRequest {\n\n name: String,\n\n}\n\n\n", "file_path": "src/routes/round_match_routes.rs", "rank": 14, "score": 209257.93681096047 }, { "content": "#[post(\"/<tournament_id>/rounds/create_manual_normal\", data = \"<request>\")]\n\npub fn create_manual_normal_round(\n\n token: Token,\n\n tournament_id: i32,\n\n request: Json<CreateManualNormalRoundRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::CreateManualNormalRoundCommand {\n\n jwt: token.jwt,\n\n tournament_id,\n\n name: request.name.clone(),\n\n match_data: request.match_data.clone(),\n\n bye_match_data: request.bye_match_data.clone(),\n\n }\n\n .execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct CreateManualSpecialRoundRequest {\n\n name: String,\n\n match_data: Vec<(i32, i32)>,\n\n bye_match_data: Vec<i32>,\n\n}\n\n\n", "file_path": "src/routes/round_match_routes.rs", "rank": 15, "score": 209257.93681096047 }, { "content": "#[delete(\"/<id>\")]\n\npub fn delete_tournament(token: Token, id: i32) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::DeleteTournamentCommand { jwt: token.jwt, id }.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/tournament_routes.rs", "rank": 16, "score": 206635.46134221408 }, { "content": "pub fn create_mock_match_from_round(\n\n tournament_id: &i32,\n\n round_id: &i32,\n\n connection: &PgConnection,\n\n) -> MatchRowModel {\n\n let black_player = create_mock_player_from_tournament(tournament_id, connection);\n\n let black_score = utils::generate_random_number();\n\n\n\n let white_player = create_mock_player_from_tournament(tournament_id, connection);\n\n let white_score = utils::generate_random_number();\n\n MatchRowModel::create(\n\n &round_id,\n\n &black_player.id,\n\n &white_player.id,\n\n &black_score,\n\n &white_score,\n\n Map::new(),\n\n connection,\n\n )\n\n .unwrap()\n\n}\n", "file_path": "src/utils/test_helpers.rs", "rank": 17, "score": 203628.61463132844 }, { "content": "pub fn create_result_keeper(matches: &Vec<Box<dyn IGameMatch>>) -> Box<dyn IResultKeeper> {\n\n let mut player_ids: HashSet<i32> = HashSet::new();\n\n\n\n matches[..].iter().for_each(|game_match| {\n\n let players_id = game_match.get_players_id();\n\n if let Some(id) = players_id.0 {\n\n player_ids.insert(id);\n\n }\n\n if let Some(id) = players_id.1 {\n\n player_ids.insert(id);\n\n }\n\n });\n\n\n\n let sorted_player_standings = get_sorted_player_standings(&player_ids, &matches);\n\n let opponents_ids_by_player_id = get_opponent_ids_by_player_id(&player_ids, &matches);\n\n\n\n let black_color_count_by_player_id =\n\n get_color_count_by_player_id(&player_ids, &matches, PlayerColor::Black);\n\n let white_color_count_by_player_id =\n\n get_color_count_by_player_id(&player_ids, &matches, PlayerColor::White);\n\n\n\n Box::from(ResultKeeper {\n\n sorted_player_standings,\n\n opponents_ids_by_player_id,\n\n black_color_count_by_player_id,\n\n white_color_count_by_player_id,\n\n })\n\n}\n\n\n", "file_path": "src/tournament_manager/result_keeper.rs", "rank": 18, "score": 192979.41424570442 }, { "content": "#[delete(\"/<id>/admins/<username>\")]\n\npub fn remove_admin(token: Token, id: i32, username: String) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n let command = response_commands::RemoveAdminCommand {\n\n jwt: token.jwt,\n\n tournament_id: id,\n\n admin_username: username,\n\n };\n\n command.execute(&connection)\n\n}\n", "file_path": "src/routes/tournament_admin_routes.rs", "rank": 19, "score": 189597.65492351673 }, { "content": "#[patch(\"/<tournament_id>/rounds/<round_id>\", data = \"<request>\")]\n\npub fn update_round(\n\n token: Token,\n\n tournament_id: i32,\n\n round_id: i32,\n\n request: Json<UpdateRoundRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::UpdateRoundCommand {\n\n jwt: token.jwt,\n\n tournament_id,\n\n round_id,\n\n updated_name: request.updated_name.clone(),\n\n }\n\n .execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/round_match_routes.rs", "rank": 21, "score": 186350.94481924325 }, { "content": "pub fn update_match(\n\n token: Token,\n\n tournament_id: i32,\n\n _round_id: i32,\n\n match_id: i32,\n\n request: Json<UpdateMatchRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::UpdateMatchCommand {\n\n jwt: token.jwt,\n\n tournament_id,\n\n match_id,\n\n black_score: request.black_score.clone(),\n\n white_score: request.white_score.clone(),\n\n }\n\n .execute(&connection)\n\n}\n", "file_path": "src/routes/round_match_routes.rs", "rank": 22, "score": 186199.26475874148 }, { "content": "fn calculate_major_score(player_id: &i32, matches: &Vec<Box<dyn IGameMatch>>) -> f64 {\n\n matches\n\n .iter()\n\n .map(|game_match| game_match.calculate_major_score(player_id))\n\n .sum()\n\n}\n\n\n", "file_path": "src/tournament_manager/result_keeper.rs", "rank": 23, "score": 184302.61193383002 }, { "content": "#[post(\"/<id>/players\", data = \"<request>\")]\n\npub fn add_player(token: Token, id: i32, request: Json<AddPlayerRequest>) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n let command = response_commands::AddTournamentPlayerCommand {\n\n jwt: token.jwt,\n\n tournament_id: id,\n\n joueurs_id: request.joueurs_id.clone(),\n\n };\n\n command.execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct AddPlayerNewRequest {\n\n pub first_name: String,\n\n pub last_name: String,\n\n pub country: String,\n\n}\n\n\n", "file_path": "src/routes/player_routes.rs", "rank": 24, "score": 180668.59671802365 }, { "content": "#[post(\"/<id>/admins\", data = \"<request>\")]\n\npub fn add_admin(token: Token, id: i32, request: Json<AddAdminRequest>) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n let command = response_commands::AddAdminCommand {\n\n jwt: token.jwt,\n\n tournament_id: id,\n\n admin_username: request.username.clone(),\n\n };\n\n command.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/tournament_admin_routes.rs", "rank": 25, "score": 177898.5352598598 }, { "content": "pub fn create_mock_round_from_tournament(\n\n tournament_id: &i32,\n\n connection: &PgConnection,\n\n) -> RoundRowModel {\n\n let name = utils::generate_random_string(10);\n\n RoundRowModel::create(\n\n tournament_id,\n\n &name,\n\n RoundType::ManualNormal,\n\n Map::new(),\n\n connection,\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/utils/test_helpers.rs", "rank": 26, "score": 173093.91323899885 }, { "content": "pub fn generate_random_number() -> i32 {\n\n let mut rng = thread_rng();\n\n rng.gen()\n\n}\n\n\n", "file_path": "src/utils/random.rs", "rank": 27, "score": 167609.71166481348 }, { "content": "pub fn string_to_date(date: String) -> Result<NaiveDate, ErrorType> {\n\n match NaiveDate::parse_from_str(&date[..], \"%Y-%m-%d\") {\n\n Ok(date) => Ok(date),\n\n Err(_e) => Err(ErrorType::BadRequestError(String::from(\"Bad date format.\"))),\n\n }\n\n}\n", "file_path": "src/utils/datetime.rs", "rank": 28, "score": 166769.5570034236 }, { "content": "#[mockable]\n\npub fn generate_random_number_ranged(low_ex: i32, hi_in: i32) -> i32 {\n\n let mut rng = thread_rng();\n\n rng.gen_range(low_ex, hi_in)\n\n}\n", "file_path": "src/utils/random.rs", "rank": 29, "score": 164887.81283302367 }, { "content": "#[mockable]\n\npub fn http_get_text(url: &String) -> Result<String, ErrorType> {\n\n let response = http_get(url)?;\n\n match response.text() {\n\n Ok(text) => Ok(text),\n\n Err(e) => Err(ErrorType::ExternalConnectionError(\n\n get_reqwest_error_message(e),\n\n )),\n\n }\n\n}\n", "file_path": "src/utils/http_request.rs", "rank": 30, "score": 163762.96049871604 }, { "content": "pub fn generate_rounds_meta(round_models: Vec<RoundRowModel>) -> Vec<Map<String, Value>> {\n\n let meta_generator = RoundPreviewMetaGenerator {};\n\n round_models\n\n .into_iter()\n\n .map(|round| meta_generator.generate_meta_for(&round))\n\n .collect()\n\n}\n\n\n", "file_path": "src/meta_generator/helpers.rs", "rank": 31, "score": 163589.7373618194 }, { "content": "pub fn generate_matches_meta(match_models: Vec<MatchRowModel>) -> Vec<Map<String, Value>> {\n\n let meta_generator = DefaultMatchMetaGenerator {};\n\n match_models\n\n .into_iter()\n\n .map(|game_match| meta_generator.generate_meta_for(&game_match))\n\n .collect()\n\n}\n\n\n", "file_path": "src/meta_generator/helpers.rs", "rank": 32, "score": 163016.90600048885 }, { "content": "pub trait IResultKeeper {\n\n fn has_player_bye(&self, player_id: &i32) -> bool;\n\n fn has_players_met(&self, player_1_id: &i32, player_2_id: &i32) -> bool;\n\n fn get_standings(&self) -> Vec<i32>;\n\n fn get_detailed_standings(&self) -> Vec<PlayerStanding>;\n\n fn is_empty(&self) -> bool;\n\n fn get_color_count(&self, player_id: &i32, color: PlayerColor) -> i32;\n\n}\n\n\n\npub struct ResultKeeper {\n\n sorted_player_standings: Vec<PlayerStanding>,\n\n opponents_ids_by_player_id: HashMap<i32, HashSet<i32>>,\n\n black_color_count_by_player_id: HashMap<i32, i32>,\n\n white_color_count_by_player_id: HashMap<i32, i32>,\n\n}\n\n\n\nimpl ResultKeeper {}\n\n\n\nimpl IResultKeeper for ResultKeeper {\n\n fn has_player_bye(&self, player_id: &i32) -> bool {\n", "file_path": "src/tournament_manager/result_keeper.rs", "rank": 33, "score": 159926.8208040099 }, { "content": "pub trait RoundDAO\n\nwhere\n\n Self: Sized,\n\n{\n\n fn create(\n\n tournament_id: &i32,\n\n name: &String,\n\n round_type: RoundType,\n\n meta_data: Map<String, Value>,\n\n connection: &PgConnection,\n\n ) -> Result<Self, ErrorType>;\n\n fn get(id: &i32, connection: &PgConnection) -> Result<Self, ErrorType>;\n\n fn get_all_from_tournament(\n\n tournament_id: &i32,\n\n connection: &PgConnection,\n\n ) -> Result<Vec<Self>, ErrorType>;\n\n fn update(&self, connection: &PgConnection) -> Result<Self, ErrorType>;\n\n fn delete(&self, connection: &PgConnection) -> Result<(), ErrorType>;\n\n}\n\n\n", "file_path": "src/database_models/round_models.rs", "rank": 34, "score": 159380.21408427728 }, { "content": "pub trait ClonableIGameMatch {\n\n fn clone_box(&self) -> Box<dyn IGameMatch>;\n\n}\n\n\n\nimpl<T> ClonableIGameMatch for T\n\nwhere\n\n T: 'static + IGameMatch + Clone,\n\n{\n\n fn clone_box(&self) -> Box<dyn IGameMatch> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n", "file_path": "src/game_match/abstract_game_match.rs", "rank": 35, "score": 158319.1715977225 }, { "content": "pub fn create_mock_user(connection: &PgConnection) -> UserRowModel {\n\n let username = utils::generate_random_string(10);\n\n let display_name = utils::generate_random_string(20);\n\n let password = utils::generate_random_string(30);\n\n let hashed_password = utils::hash(&password);\n\n UserRowModel::create(\n\n &username,\n\n &display_name,\n\n &hashed_password,\n\n UserRole::Superuser,\n\n connection,\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/utils/test_helpers.rs", "rank": 36, "score": 158227.04057343537 }, { "content": "pub fn create_date_format(year: i32, month: u32, date: u32) -> NaiveDate {\n\n NaiveDate::from_ymd(year, month, date)\n\n}\n\n\n", "file_path": "src/utils/datetime.rs", "rank": 37, "score": 154648.1117315229 }, { "content": "pub trait RoundMetaGenerator {\n\n fn generate_meta_for(&self, round: &RoundRowModel) -> Map<String, Value>;\n\n}\n\n\n\npub struct RoundPreviewMetaGenerator {}\n\n\n\nimpl RoundMetaGenerator for RoundPreviewMetaGenerator {\n\n fn generate_meta_for(&self, round: &RoundRowModel) -> Map<String, Value> {\n\n let mut meta = Map::new();\n\n meta.insert(String::from(\"id\"), Value::from(round.id.clone()));\n\n meta.insert(String::from(\"name\"), Value::from(round.name.clone()));\n\n meta\n\n }\n\n}\n\n\n\npub struct RoundDetailsMetaGenerator {}\n\n\n\nimpl RoundMetaGenerator for RoundDetailsMetaGenerator {\n\n fn generate_meta_for(&self, round: &RoundRowModel) -> Map<String, Value> {\n\n let mut meta = Map::new();\n\n meta.insert(String::from(\"id\"), Value::from(round.id.clone()));\n\n meta.insert(String::from(\"name\"), Value::from(round.name.clone()));\n\n meta.insert(String::from(\"type\"), Value::from(round.round_type.clone()));\n\n meta\n\n }\n\n}\n", "file_path": "src/meta_generator/round_meta_generators.rs", "rank": 38, "score": 153737.88727678303 }, { "content": "pub trait MatchMetaGenerator {\n\n fn generate_meta_for(&self, game_match: &MatchRowModel) -> Map<String, Value>;\n\n}\n\n\n\npub struct DefaultMatchMetaGenerator {}\n\n\n\nimpl MatchMetaGenerator for DefaultMatchMetaGenerator {\n\n fn generate_meta_for(&self, game_match: &MatchRowModel) -> Map<String, Value> {\n\n let mut meta = Map::new();\n\n meta.insert(String::from(\"id\"), Value::from(game_match.id.clone()));\n\n meta.insert(\n\n String::from(\"black_player_id\"),\n\n Value::from(game_match.black_player_id.clone()),\n\n );\n\n meta.insert(\n\n String::from(\"white_player_id\"),\n\n Value::from(game_match.white_player_id.clone()),\n\n );\n\n meta.insert(\n\n String::from(\"black_score\"),\n\n Value::from(game_match.black_score.clone()),\n\n );\n\n meta.insert(\n\n String::from(\"white_score\"),\n\n Value::from(game_match.white_score.clone()),\n\n );\n\n meta\n\n }\n\n}\n", "file_path": "src/meta_generator/match_meta_generator.rs", "rank": 39, "score": 153168.22508324598 }, { "content": "pub fn get_test_connection() -> PgConnection {\n\n dotenv().ok();\n\n\n\n let database_url = env::var(\"DATABASE_URL\").expect(\"DATABASE_URL must be set\");\n\n let connection = PgConnection::establish(&database_url)\n\n .expect(&format!(\"Error connecting to {}\", database_url));\n\n connection.begin_test_transaction().unwrap();\n\n connection\n\n}\n", "file_path": "src/utils/database_connection.rs", "rank": 40, "score": 151768.1295926535 }, { "content": "pub fn get_pooled_connection() -> PostgresPooledConnection {\n\n let pool = POOL.clone();\n\n let connection = pool.get().expect(\"Failed to get pooled connection\");\n\n connection\n\n}\n\n\n", "file_path": "src/utils/database_connection.rs", "rank": 41, "score": 149293.9964298727 }, { "content": "#[post(\"/\", data = \"<request>\")]\n\npub fn create_tournament(\n\n token: Token,\n\n request: Json<TournamentCreationRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::CreateTournamentCommand {\n\n jwt: token.jwt,\n\n name: request.name.clone(),\n\n country: request.country.clone(),\n\n tournament_type: request.tournament_type.clone(),\n\n start_date: request.start_date.clone(),\n\n end_date: request.end_date.clone(),\n\n }\n\n .execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct TournamentUpdateRequest {\n\n name: String,\n\n country: String,\n\n start_date: String,\n\n end_date: String,\n\n}\n\n\n", "file_path": "src/routes/tournament_routes.rs", "rank": 42, "score": 148364.38205903425 }, { "content": "#[get(\"/created_by_me\")]\n\npub fn get_all_created_tournaments(token: Token) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetAllCreatedTournamentsCommand { jwt: token.jwt }.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/tournament_routes.rs", "rank": 43, "score": 147958.0936545783 }, { "content": "pub fn verify(string: &String, hashed_string: &String) -> bool {\n\n bcrypt::verify(string, hashed_string)\n\n}\n", "file_path": "src/utils/hash.rs", "rank": 44, "score": 143875.02630924617 }, { "content": "fn get_reqwest_error_message(e: Error) -> String {\n\n match e.status() {\n\n Some(status_code) => String::from(status_code.as_str()),\n\n None => String::from(\"Unknown error\"),\n\n }\n\n}\n\n\n", "file_path": "src/utils/http_request.rs", "rank": 45, "score": 142205.62766023225 }, { "content": "pub fn create_mock_tournament_with_creator(\n\n username: &String,\n\n connection: &PgConnection,\n\n) -> TournamentRowModel {\n\n let name = utils::generate_random_string(20);\n\n let country = utils::generate_random_string(10);\n\n let joueurs: Vec<Player> = Vec::new();\n\n let tournament_type = TournamentType::RoundRobin;\n\n let date = utils::create_date_format(2020, 1, 1);\n\n\n\n TournamentRowModel::create(\n\n &name,\n\n &country,\n\n &date,\n\n &date,\n\n &username,\n\n joueurs,\n\n tournament_type,\n\n Map::new(),\n\n connection,\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/utils/test_helpers.rs", "rank": 46, "score": 142122.00886090647 }, { "content": "pub fn create_mock_player_from_tournament(\n\n tournament_id: &i32,\n\n connection: &PgConnection,\n\n) -> PlayerRowModel {\n\n let first_name = utils::generate_random_string(5);\n\n let last_name = utils::generate_random_string(5);\n\n let country = utils::generate_random_string(3);\n\n let joueurs_id = utils::generate_random_string(10);\n\n let rating = utils::generate_random_number();\n\n\n\n let player = Player {\n\n joueurs_id,\n\n first_name,\n\n last_name,\n\n country,\n\n rating,\n\n };\n\n PlayerRowModel::create(&tournament_id, &player, Map::new(), connection).unwrap()\n\n}\n\n\n", "file_path": "src/utils/test_helpers.rs", "rank": 47, "score": 142122.00886090647 }, { "content": "pub fn create_mock_tournament_with_creator_and_joueurs(\n\n creator_username: &String,\n\n joueurs: Vec<Player>,\n\n connection: &PgConnection,\n\n) -> TournamentRowModel {\n\n let name = utils::generate_random_string(20);\n\n let country = utils::generate_random_string(10);\n\n let tournament_type = TournamentType::RoundRobin;\n\n let date = utils::create_date_format(2020, 1, 1);\n\n\n\n TournamentRowModel::create(\n\n &name,\n\n &country,\n\n &date,\n\n &date,\n\n &creator_username,\n\n joueurs,\n\n tournament_type,\n\n Map::new(),\n\n connection,\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/utils/test_helpers.rs", "rank": 48, "score": 139285.8672082701 }, { "content": "#[get(\"/\")]\n\npub fn get_tournaments() -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetAllTournamentsCommand {}.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/tournament_routes.rs", "rank": 49, "score": 132654.93292497908 }, { "content": "#[post(\"/\", data = \"<request>\")]\n\npub fn create_user(token: Token, request: Json<UserCreationRequest>) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::CreateUserCommand {\n\n jwt: token.jwt,\n\n username: request.username.clone(),\n\n display_name: request.display_name.clone(),\n\n password: request.password.clone(),\n\n }\n\n .execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct UserUpdateRequest {\n\n display_name: Option<String>,\n\n password: Option<String>,\n\n}\n\n\n", "file_path": "src/routes/user_routes.rs", "rank": 50, "score": 131208.17788144937 }, { "content": "fn get_opponent_ids_by_player_id(\n\n player_ids: &HashSet<i32>,\n\n matches: &Vec<Box<dyn IGameMatch>>,\n\n) -> HashMap<i32, HashSet<i32>> {\n\n HashMap::from_iter(player_ids.iter().map(|id| {\n\n let opponent_ids = HashSet::from_iter(\n\n matches[..]\n\n .iter()\n\n .map(|game_match| game_match.get_opponent_id(id))\n\n .filter(|result| result.is_some())\n\n .map(|result| result.unwrap()),\n\n );\n\n (id.clone(), opponent_ids)\n\n }))\n\n}\n\n\n", "file_path": "src/tournament_manager/result_keeper.rs", "rank": 52, "score": 127788.52098491232 }, { "content": "pub trait ResponseCommand {\n\n fn execute(&self, connection: &PgConnection) -> Json<JsonValue> {\n\n match self.do_execute(connection) {\n\n Ok(result) => {\n\n info!(\"Successful request for {}\", self.get_request_summary());\n\n Json(json!({\n\n \"success\": result,\n\n \"error\": {\n\n \"code\": 0,\n\n \"message\": \"\",\n\n }\n\n }))\n\n }\n\n Err(error) => {\n\n error!(\n\n \"Failed request for {}, {}\",\n\n self.get_request_summary(),\n\n &error.to_error_message()\n\n );\n\n Json(json!({\n", "file_path": "src/response_commands/command_trait.rs", "rank": 53, "score": 127570.78445972534 }, { "content": "fn http_get(url: &String) -> Result<Response, ErrorType> {\n\n match get(url) {\n\n Ok(response) => Ok(response),\n\n Err(e) => Err(ErrorType::ExternalConnectionError(\n\n get_reqwest_error_message(e),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/utils/http_request.rs", "rank": 54, "score": 126330.2944993616 }, { "content": "#[get(\"/<username>\")]\n\npub fn get_user(username: String) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetUserCommand { username }.execute(&connection)\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct UserCreationRequest {\n\n username: String,\n\n display_name: String,\n\n password: String,\n\n}\n\n\n", "file_path": "src/routes/user_routes.rs", "rank": 55, "score": 122445.50484650885 }, { "content": "#[get(\"/managed_by_me\")]\n\npub fn get_all_managed_tournaments(token: Token) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::GetAllManagedTournamentsCommand { jwt: token.jwt }.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/tournament_routes.rs", "rank": 56, "score": 120071.43172345616 }, { "content": "#[get(\"/profile\")]\n\npub fn get_current_user_profile(token: Token) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::CurrentUserCommand { jwt: token.jwt }.execute(&connection)\n\n}\n", "file_path": "src/routes/general_routes.rs", "rank": 57, "score": 117826.21128964511 }, { "content": "fn get_color_count_by_player_id(\n\n player_ids: &HashSet<i32>,\n\n matches: &Vec<Box<dyn IGameMatch>>,\n\n color: PlayerColor,\n\n) -> HashMap<i32, i32> {\n\n HashMap::from_iter(player_ids.iter().map(|id| {\n\n (\n\n id.clone(),\n\n matches\n\n .iter()\n\n .map(|game_match| game_match.get_player_color(id))\n\n .filter(|result| result.is_some() && result.as_ref().unwrap() == &color)\n\n .count() as i32,\n\n )\n\n }))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n mod test_get_standings {\n", "file_path": "src/tournament_manager/result_keeper.rs", "rank": 58, "score": 117314.80462527488 }, { "content": "fn get_major_scores_by_player_id(\n\n player_ids: &HashSet<i32>,\n\n matches: &Vec<Box<dyn IGameMatch>>,\n\n) -> HashMap<i32, f64> {\n\n HashMap::from_iter(\n\n player_ids\n\n .iter()\n\n .map(|id| (id.clone(), calculate_major_score(id, matches))),\n\n )\n\n}\n\n\n", "file_path": "src/tournament_manager/result_keeper.rs", "rank": 59, "score": 117314.80462527488 }, { "content": "pub trait MetaGenerator {\n\n fn generate_meta(&self) -> Map<String, Value>;\n\n}\n", "file_path": "src/meta_generator/meta_generator.rs", "rank": 60, "score": 117303.38848627641 }, { "content": "#[post(\"/login\", data = \"<request>\")]\n\npub fn login(request: Json<UserLoginRequest>) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n\n\n response_commands::LoginCommand {\n\n username: request.username.clone(),\n\n password: request.password.clone(),\n\n }\n\n .execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/general_routes.rs", "rank": 61, "score": 115734.81954284057 }, { "content": "pub trait PairingGenerator {\n\n fn generate_pairings(&self, round_id: &i32) -> Result<Pairings, ErrorType>;\n\n}\n", "file_path": "src/pairings_generator/abstract_pairings_generator.rs", "rank": 62, "score": 115281.35864702973 }, { "content": "#[patch(\"/<id>\", data = \"<request>\")]\n\npub fn update_tournament(\n\n token: Token,\n\n id: i32,\n\n request: Json<TournamentUpdateRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::UpdateTournamentCommand {\n\n jwt: token.jwt,\n\n id,\n\n updated_name: request.name.clone(),\n\n updated_country: request.country.clone(),\n\n updated_start_date: request.start_date.clone(),\n\n updated_end_date: request.end_date.clone(),\n\n }\n\n .execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/tournament_routes.rs", "rank": 63, "score": 114881.32338620299 }, { "content": "#[patch(\"/<username>\", data = \"<request>\")]\n\npub fn update_user(\n\n token: Token,\n\n username: String,\n\n request: Json<UserUpdateRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n response_commands::UpdateUserCommand {\n\n jwt: token.jwt,\n\n username,\n\n display_name: request.display_name.clone(),\n\n password: request.password.clone(),\n\n }\n\n .execute(&connection)\n\n}\n", "file_path": "src/routes/user_routes.rs", "rank": 64, "score": 114877.38858753518 }, { "content": "pub trait StandingMetaGenerator {\n\n fn generate_meta_for(&self, standing: &PlayerStanding) -> Map<String, Value>;\n\n}\n\n\n\npub struct DefaultStandingMetaGenerator {}\n\n\n\n\n\nimpl StandingMetaGenerator for DefaultStandingMetaGenerator {\n\n fn generate_meta_for(&self, standing: &PlayerStanding) -> Map<String, Value> {\n\n let mut meta = Map::new();\n\n meta.insert(\n\n String::from(\"player_id\"),\n\n Value::from(standing.player_id),\n\n );\n\n meta.insert(\n\n String::from(\"major_score\"),\n\n Value::from(standing.major_score),\n\n );\n\n meta.insert(\n\n String::from(\"minor_score\"),\n", "file_path": "src/meta_generator/standing_meta_generators.rs", "rank": 65, "score": 113382.43783889659 }, { "content": "pub trait TournamentMetaGenerator {\n\n fn generate_meta_for(&self, tournament: &TournamentRowModel) -> Map<String, Value>;\n\n}\n\n\n\npub struct TournamentPreviewMetaGenerator<'a> {\n\n pub users_by_username: HashMap<&'a str, &'a UserRowModel>,\n\n}\n\n\n\npub struct TournamentDetailsMetaGenerator {\n\n tournament: TournamentRowModel,\n\n creator: UserRowModel,\n\n}\n\n\n\nimpl TournamentMetaGenerator for TournamentPreviewMetaGenerator<'_> {\n\n fn generate_meta_for(&self, tournament: &TournamentRowModel) -> Map<String, Value> {\n\n let mut meta = Map::new();\n\n meta.insert(String::from(\"id\"), Value::from(tournament.id.clone()));\n\n meta.insert(String::from(\"name\"), Value::from(tournament.name.clone()));\n\n meta.insert(\n\n String::from(\"tournament_type\"),\n", "file_path": "src/meta_generator/tournament_meta_generators.rs", "rank": 66, "score": 113382.43783889659 }, { "content": "#[post(\"/<id>/players/new\", data = \"<request>\")]\n\npub fn add_player_new(\n\n token: Token,\n\n id: i32,\n\n request: Json<AddPlayerNewRequest>,\n\n) -> Json<JsonValue> {\n\n let connection = get_pooled_connection();\n\n let command = response_commands::AddTournamentPlayerNewCommand {\n\n jwt: token.jwt,\n\n tournament_id: id,\n\n first_name: request.first_name.clone(),\n\n last_name: request.last_name.clone().to_uppercase(),\n\n country: request.country.clone(),\n\n };\n\n command.execute(&connection)\n\n}\n\n\n", "file_path": "src/routes/player_routes.rs", "rank": 67, "score": 112744.35259262429 }, { "content": "pub fn is_allowed_to_manage_tournament(\n\n account: &Account,\n\n tournament: &TournamentRowModel,\n\n connection: &PgConnection,\n\n) -> Result<bool, ErrorType> {\n\n if account.has_superuser_access() {\n\n return Ok(true);\n\n }\n\n\n\n let username = account.get_username();\n\n let is_created_by_account = tournament.is_created_by(&username);\n\n let is_managed_by_account = tournament.is_managed_by(&username, connection)?;\n\n return Ok(is_created_by_account || is_managed_by_account);\n\n}\n", "file_path": "src/response_commands/helpers.rs", "rank": 68, "score": 112740.52557032742 }, { "content": "pub fn get_player_1_color(\n\n player_1_id: &i32,\n\n player_2_id: &i32,\n\n past_results: &Box<dyn IResultKeeper>,\n\n) -> PlayerColor {\n\n let player_1_black_count = past_results.get_color_count(player_1_id, PlayerColor::Black);\n\n let player_1_white_count = past_results.get_color_count(player_1_id, PlayerColor::White);\n\n let player_2_black_count = past_results.get_color_count(player_2_id, PlayerColor::Black);\n\n let player_2_white_count = past_results.get_color_count(player_2_id, PlayerColor::White);\n\n\n\n if player_1_black_count + player_2_white_count > player_2_black_count + player_1_white_count {\n\n return PlayerColor::White;\n\n }\n\n PlayerColor::Black\n\n}\n", "file_path": "src/pairings_generator/helpers.rs", "rank": 69, "score": 112740.52557032742 }, { "content": "pub fn generate_tournaments_meta(\n\n tournament_models: Vec<TournamentRowModel>,\n\n user_models: Vec<UserRowModel>,\n\n) -> Vec<Map<String, Value>> {\n\n let mut users_by_username: HashMap<&str, &UserRowModel> = HashMap::new();\n\n user_models.iter().for_each(|user| {\n\n users_by_username.insert(user.username.as_str(), user.clone());\n\n });\n\n let meta_generator = TournamentPreviewMetaGenerator { users_by_username };\n\n tournament_models\n\n .into_iter()\n\n .map(|tournament| meta_generator.generate_meta_for(&tournament))\n\n .collect()\n\n}\n\n\n", "file_path": "src/meta_generator/helpers.rs", "rank": 70, "score": 112740.52557032742 }, { "content": "CREATE TABLE rounds\n\n(\n\n id SERIAL PRIMARY KEY,\n\n tournament_id INT NOT NULL REFERENCES tournaments (id) ON DELETE NO ACTION,\n\n name VARCHAR NOT NULL,\n\n round_type INT NOT NULL,\n\n meta_data json NOT NULL\n\n);\n\n\n", "file_path": "migrations/2021-01-20-021849_add_rounds_matches/up.sql", "rank": 71, "score": 112309.29272773032 }, { "content": "CREATE TABLE matches\n\n(\n\n id SERIAL PRIMARY KEY,\n\n round_id INT NOT NULL REFERENCES rounds (id) ON DELETE NO ACTION,\n\n black_player_id INT NOT NULL REFERENCES players (id) ON DELETE NO ACTION,\n\n white_player_id INT NOT NULL REFERENCES players (id) ON DELETE NO ACTION,\n\n black_score INT NOT NULL,\n\n white_score INT NOT NULL,\n\n meta_data json NOT NULL\n\n);\n", "file_path": "migrations/2021-01-20-021849_add_rounds_matches/up.sql", "rank": 72, "score": 112171.53385933448 }, { "content": "#[mockable]\n\npub fn get_current_timestamp() -> u64 {\n\n let start = SystemTime::now();\n\n start\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Time went backwards\")\n\n .as_secs()\n\n}\n\n\n", "file_path": "src/utils/datetime.rs", "rank": 73, "score": 109206.3800270609 }, { "content": "pub fn generate_standings_meta(standings: Vec<PlayerStanding>) -> Vec<Map<String, Value>> {\n\n let meta_generator = DefaultStandingMetaGenerator {};\n\n standings\n\n .into_iter()\n\n .map(|player_standing| meta_generator.generate_meta_for(&player_standing))\n\n .collect()\n\n}\n", "file_path": "src/meta_generator/helpers.rs", "rank": 74, "score": 108040.43473528454 }, { "content": "pub fn generate_users_meta(user_models: Vec<UserRowModel>) -> Vec<Map<String, Value>> {\n\n user_models\n\n .into_iter()\n\n .map(|user| {\n\n let meta_generator = UserMetaGenerator::from_user(user);\n\n meta_generator.generate_meta()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/meta_generator/helpers.rs", "rank": 75, "score": 104302.50332462773 }, { "content": "pub fn generate_players_meta(player_models: Vec<PlayerRowModel>) -> Vec<Map<String, Value>> {\n\n player_models\n\n .into_iter()\n\n .map(|player| {\n\n let meta_generator = PlayerMetaGenerator::from_player_model(player);\n\n meta_generator.generate_meta()\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/meta_generator/helpers.rs", "rank": 76, "score": 104302.50332462773 }, { "content": "pub fn hash(string: &String) -> String {\n\n bcrypt::hash(string).unwrap()\n\n}\n\n\n", "file_path": "src/utils/hash.rs", "rank": 77, "score": 103475.73289654196 }, { "content": "#[derive(Insertable)]\n\n#[table_name = \"rounds\"]\n\nstruct NewRoundRowModel<'a> {\n\n pub tournament_id: &'a i32,\n\n pub name: &'a String,\n\n pub round_type: &'a i32,\n\n pub meta_data: &'a Value,\n\n}\n\n\n", "file_path": "src/database_models/round_models.rs", "rank": 78, "score": 101422.69991171664 }, { "content": "pub fn date_to_string(date: NaiveDate) -> String {\n\n date.format(\"%Y-%m-%d\").to_string()\n\n}\n\n\n", "file_path": "src/utils/datetime.rs", "rank": 79, "score": 99592.30842528545 }, { "content": "pub fn generate_random_string(length: usize) -> String {\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(length)\n\n .collect()\n\n}\n\n\n", "file_path": "src/utils/random.rs", "rank": 80, "score": 99592.30842528545 }, { "content": "fn create_default_superuser() {\n\n let connection = utils::get_pooled_connection();\n\n let username = env::var(\"SUPERUSER_ID\").unwrap();\n\n let display_name = env::var(\"SUPERUSER_DISPLAY_NAME\").unwrap();\n\n let password = env::var(\"SUPERUSER_PASS\").unwrap();\n\n let hashed_password = utils::hash(&password);\n\n let _ = database_models::UserRowModel::create(\n\n &username,\n\n &display_name,\n\n &hashed_password,\n\n properties::UserRole::Superuser,\n\n &connection,\n\n );\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 81, "score": 97706.2633530912 }, { "content": "fn calculate_minor_score(\n\n player_id: &i32,\n\n matches: &Vec<Box<dyn IGameMatch>>,\n\n major_scores_by_player_ids: &HashMap<i32, f64>,\n\n) -> f64 {\n\n // Following https://www.worldothello.org/about/world-othello-championship/woc-rules\n\n let brightwell_constant = f64::from_str(&env::var(\"BRIGHTWELL_CONSTANT\").unwrap()[..]).unwrap();\n\n matches\n\n .iter()\n\n .map(|game_match| {\n\n game_match.calculate_minor_score(\n\n player_id,\n\n major_scores_by_player_ids,\n\n &brightwell_constant,\n\n )\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "src/tournament_manager/result_keeper.rs", "rank": 82, "score": 90770.79646526996 }, { "content": "fn is_allowed_to_manage_admin(account: &Account, tournament_model: &TournamentRowModel) -> bool {\n\n let username = account.get_username();\n\n account.has_superuser_access() || tournament_model.is_created_by(&username)\n\n}\n", "file_path": "src/response_commands/tournament_admin_commands.rs", "rank": 83, "score": 90572.27721913886 }, { "content": "fn init_pool() -> PostgresPool {\n\n let database_url = env::var(\"DATABASE_URL\").expect(\"DATABASE_URL must be set\");\n\n let manager = ConnectionManager::<PgConnection>::new(database_url);\n\n Pool::builder()\n\n .build(manager)\n\n .expect(\"Failed to create pool.\")\n\n}\n\n\n", "file_path": "src/utils/database_connection.rs", "rank": 84, "score": 88900.9151554044 }, { "content": "fn get_sorted_player_standings<'a>(\n\n player_ids: &HashSet<i32>,\n\n matches: &Vec<Box<dyn IGameMatch>>,\n\n) -> Vec<PlayerStanding> {\n\n let major_scores_by_id = get_major_scores_by_player_id(player_ids, matches);\n\n let mut standings: Vec<PlayerStanding> = player_ids\n\n .iter()\n\n .map(|id| {\n\n let filtered_matches: Vec<Box<dyn IGameMatch>> = matches\n\n .into_iter()\n\n .filter(|game_match| game_match.is_player_playing(id))\n\n .map(|game_match| game_match.clone())\n\n .collect();\n\n let major_score = major_scores_by_id.get(id).unwrap_or(&0.0);\n\n let minor_score = calculate_minor_score(id, &filtered_matches, &major_scores_by_id);\n\n PlayerStanding {\n\n player_id: id.clone(),\n\n major_score: major_score.clone(),\n\n minor_score,\n\n match_history: filtered_matches,\n", "file_path": "src/tournament_manager/result_keeper.rs", "rank": 85, "score": 86691.28679095468 }, { "content": "DROP TABLE rounds;\n", "file_path": "migrations/2021-01-20-021849_add_rounds_matches/down.sql", "rank": 86, "score": 77661.54805091504 }, { "content": "DROP TABLE matches;\n", "file_path": "migrations/2021-01-20-021849_add_rounds_matches/down.sql", "rank": 87, "score": 77523.78918251918 }, { "content": "-- This file should undo anything in `up.sql`\n\n\n", "file_path": "migrations/2021-01-20-021849_add_rounds_matches/down.sql", "rank": 88, "score": 72960.59933071837 }, { "content": "-- Your SQL goes here\n\n\n", "file_path": "migrations/2021-01-20-021849_add_rounds_matches/up.sql", "rank": 89, "score": 72960.59933071837 }, { "content": "use rocket_contrib::json::{Json, JsonValue};\n\nuse serde::Deserialize;\n\n\n\nuse crate::response_commands;\n\nuse crate::response_commands::ResponseCommand;\n\nuse crate::utils::get_pooled_connection;\n\n\n\nuse super::Token;\n\n\n\n#[get(\"/<tournament_id>/rounds\")]\n", "file_path": "src/routes/round_match_routes.rs", "rank": 90, "score": 70357.52031700812 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 91, "score": 68830.17474119115 }, { "content": " }\n\n\n\n fn get_request_summary(&self) -> String {\n\n String::from(format!(\"GetRoundMatches for {}\", &self.round_id))\n\n }\n\n}\n\n\n\npub struct UpdateMatchCommand {\n\n pub jwt: String,\n\n pub tournament_id: i32,\n\n pub match_id: i32,\n\n pub black_score: i32,\n\n pub white_score: i32,\n\n}\n\n\n\nimpl ResponseCommand for UpdateMatchCommand {\n\n fn do_execute(&self, connection: &PgConnection) -> Result<JsonValue, ErrorType> {\n\n let account = Account::login_from_jwt(&self.jwt, connection)?;\n\n let tournament_model = TournamentRowModel::get(&self.tournament_id, connection)?;\n\n\n", "file_path": "src/response_commands/round_match_commands.rs", "rank": 92, "score": 67935.8689808047 }, { "content": " }\n\n}\n\n\n\npub struct GetRoundMatchesCommand {\n\n pub round_id: i32,\n\n}\n\n\n\nimpl ResponseCommand for GetRoundMatchesCommand {\n\n fn do_execute(&self, connection: &PgConnection) -> Result<JsonValue, ErrorType> {\n\n let matches = MatchRowModel::get_all_from_round(&self.round_id, connection)?;\n\n let matches_meta = generate_matches_meta(\n\n matches\n\n .into_iter()\n\n .sorted_by_key(|game_match| game_match.id)\n\n .collect(),\n\n );\n\n Ok(json!({\n\n \"round_id\": &self.round_id,\n\n \"matches\": matches_meta,\n\n }))\n", "file_path": "src/response_commands/round_match_commands.rs", "rank": 93, "score": 67935.76159285051 }, { "content": "use crate::properties::{RoundType, TournamentType};\n\nuse crate::tournament_manager::create_result_keeper;\n\n\n\nuse super::{is_allowed_to_manage_tournament, ResponseCommand};\n\n\n\npub struct GetTournamentRoundsCommand {\n\n pub tournament_id: i32,\n\n}\n\n\n\nimpl ResponseCommand for GetTournamentRoundsCommand {\n\n fn do_execute(&self, connection: &PgConnection) -> Result<JsonValue, ErrorType> {\n\n let rounds = RoundRowModel::get_all_from_tournament(&self.tournament_id, connection)?;\n\n let rounds_meta = generate_rounds_meta(rounds);\n\n Ok(json!({\n\n \"tournament_id\": &self.tournament_id,\n\n \"rounds\": rounds_meta,\n\n }))\n\n }\n\n\n\n fn get_request_summary(&self) -> String {\n", "file_path": "src/response_commands/round_match_commands.rs", "rank": 94, "score": 67935.35165956191 }, { "content": " )?;\n\n let matches = pairing_generator.generate_pairings(&round.id)?;\n\n MatchRowModel::bulk_create_from(&matches, connection)?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl ResponseCommand for CreateAutomaticRoundCommand {\n\n fn do_execute(&self, connection: &PgConnection) -> Result<JsonValue, ErrorType> {\n\n let account = Account::login_from_jwt(&self.jwt, connection)?;\n\n let tournament_model = TournamentRowModel::get(&self.tournament_id, connection)?;\n\n\n\n let is_allowed_to_manage =\n\n is_allowed_to_manage_tournament(&account, &tournament_model, connection)?;\n\n if !is_allowed_to_manage {\n\n return Err(ErrorType::PermissionDenied);\n\n }\n\n\n\n if let Err(_) = connection.transaction::<(), Error, _>(|| {\n\n match self.create_new_automatic_pairings_round(&tournament_model, connection) {\n", "file_path": "src/response_commands/round_match_commands.rs", "rank": 95, "score": 67934.34402742164 }, { "content": " pub tournament_id: i32,\n\n pub round_id: i32,\n\n pub updated_name: String,\n\n}\n\n\n\nimpl ResponseCommand for UpdateRoundCommand {\n\n fn do_execute(&self, connection: &PgConnection) -> Result<JsonValue, ErrorType> {\n\n let account = Account::login_from_jwt(&self.jwt, connection)?;\n\n let tournament_model = TournamentRowModel::get(&self.tournament_id, connection)?;\n\n\n\n let is_allowed_to_manage =\n\n is_allowed_to_manage_tournament(&account, &tournament_model, connection)?;\n\n if !is_allowed_to_manage {\n\n return Err(ErrorType::PermissionDenied);\n\n }\n\n\n\n let mut round = RoundRowModel::get(&self.round_id, connection)?;\n\n round.name = self.updated_name.clone();\n\n round.update(connection)?;\n\n\n", "file_path": "src/response_commands/round_match_commands.rs", "rank": 96, "score": 67933.7923351385 }, { "content": " }\n\n}\n\n\n\npub struct CreateAutomaticRoundCommand {\n\n pub jwt: String,\n\n pub tournament_id: i32,\n\n pub name: String,\n\n}\n\n\n\nimpl CreateAutomaticRoundCommand {\n\n fn create_new_automatic_pairings_round(\n\n &self,\n\n tournament_model: &TournamentRowModel,\n\n connection: &PgConnection,\n\n ) -> Result<(), ErrorType> {\n\n let automatic_round_ids: HashSet<i32> = HashSet::from_iter(\n\n RoundRowModel::get_all_from_tournament(&tournament_model.id, connection)?\n\n .into_iter()\n\n .filter(|round| round.round_type == RoundType::Automatic.to_i32())\n\n .map(|round| round.id),\n", "file_path": "src/response_commands/round_match_commands.rs", "rank": 97, "score": 67933.42303458042 }, { "content": " String::from(format!(\"GetTournamentRounds for {}\", &self.tournament_id))\n\n }\n\n}\n\n\n\npub struct GetRoundCommand {\n\n pub round_id: i32,\n\n}\n\n\n\nimpl ResponseCommand for GetRoundCommand {\n\n fn do_execute(&self, connection: &PgConnection) -> Result<JsonValue, ErrorType> {\n\n let round = RoundRowModel::get(&self.round_id, connection)?;\n\n let tournament_id = round.tournament_id.clone();\n\n let matches = MatchRowModel::get_all_from_round(&self.round_id, connection)?;\n\n\n\n let round_meta_generator = RoundDetailsMetaGenerator {};\n\n let mut round_meta = round_meta_generator.generate_meta_for(&round);\n\n let matches_meta = generate_matches_meta(\n\n matches\n\n .into_iter()\n\n .sorted_by_key(|game_match| game_match.id)\n", "file_path": "src/response_commands/round_match_commands.rs", "rank": 98, "score": 67933.1362841874 }, { "content": " Ok(json!({\"message\": \"Round has been updated.\"}))\n\n }\n\n\n\n fn get_request_summary(&self) -> String {\n\n String::from(format!(\n\n \"UpdateRound for {} in tournament {}\",\n\n &self.round_id, &self.tournament_id\n\n ))\n\n }\n\n}\n\n\n\npub struct DeleteRoundCommand {\n\n pub jwt: String,\n\n pub tournament_id: i32,\n\n pub round_id: i32,\n\n}\n\n\n\nimpl ResponseCommand for DeleteRoundCommand {\n\n fn do_execute(&self, connection: &PgConnection) -> Result<JsonValue, ErrorType> {\n\n let account = Account::login_from_jwt(&self.jwt, connection)?;\n", "file_path": "src/response_commands/round_match_commands.rs", "rank": 99, "score": 67932.96028474707 } ]
Rust
src/lib.rs
vinaychandra/embedded-text
71c5e8abbb940deff1fcbab0c06c2c2fced5de10
#![cfg_attr(not(test), no_std)] #![deny(clippy::missing_inline_in_public_items)] #![deny(clippy::cargo)] #![deny(missing_docs)] #![warn(clippy::all)] pub mod alignment; pub mod parser; pub mod rendering; pub mod style; pub mod utils; use alignment::{HorizontalTextAlignment, VerticalTextAlignment}; use embedded_graphics::{prelude::*, primitives::Rectangle}; use rendering::RendererFactory; use style::{height_mode::HeightMode, TextBoxStyle}; use utils::rect_ext::RectExt; pub mod prelude { #[doc(no_inline)] pub use crate::{ alignment::*, style::{ height_mode::{Exact, FitToText, HeightMode, ShrinkToText}, TextBoxStyle, TextBoxStyleBuilder, }, StyledTextBox, TextBox, }; #[doc(no_inline)] pub use embedded_graphics::{ primitives::Rectangle, style::{TextStyle, TextStyleBuilder}, }; } pub struct TextBox<'a> { pub text: &'a str, pub bounds: Rectangle, } impl<'a> TextBox<'a> { #[inline] #[must_use] pub fn new(text: &'a str, bounds: Rectangle) -> Self { Self { text, bounds: bounds.into_well_formed(), } } #[inline] #[must_use] pub fn into_styled<C, F, A, V, H>( self, style: TextBoxStyle<C, F, A, V, H>, ) -> StyledTextBox<'a, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { let mut styled = StyledTextBox { text_box: self, style, }; H::apply(&mut styled); styled } } impl Transform for TextBox<'_> { #[inline] #[must_use] fn translate(&self, by: Point) -> Self { Self { bounds: self.bounds.translate(by), ..*self } } #[inline] fn translate_mut(&mut self, by: Point) -> &mut Self { self.bounds.translate_mut(by); self } } impl Dimensions for TextBox<'_> { #[inline] #[must_use] fn top_left(&self) -> Point { self.bounds.top_left } #[inline] #[must_use] fn bottom_right(&self) -> Point { self.bounds.bottom_right } #[inline] #[must_use] fn size(&self) -> Size { RectExt::size(self.bounds) } } pub struct StyledTextBox<'a, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { pub text_box: TextBox<'a>, pub style: TextBoxStyle<C, F, A, V, H>, } impl<C, F, A, V, H> StyledTextBox<'_, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] pub fn fit_height(&mut self) -> &mut Self { self.fit_height_limited(u32::max_value()) } #[inline] pub fn fit_height_limited(&mut self, max_height: u32) -> &mut Self { let text_height = self .style .measure_text_height(self.text_box.text, self.text_box.size().width) .min(max_height) .min(i32::max_value() as u32) as i32; let y = self.text_box.bounds.top_left.y; let new_y = y.saturating_add(text_height - 1); self.text_box.bounds.bottom_right.y = new_y; self } } impl<'a, C, F, A, V, H> Drawable<C> for &'a StyledTextBox<'a, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, StyledTextBox<'a, C, F, A, V, H>: RendererFactory<'a, C>, H: HeightMode, { #[inline] fn draw<D: DrawTarget<C>>(self, display: &mut D) -> Result<(), D::Error> { display.draw_iter(StyledTextBox::create_renderer(self)) } } impl<C, F, A, V, H> Transform for StyledTextBox<'_, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] #[must_use] fn translate(&self, by: Point) -> Self { Self { text_box: self.text_box.translate(by), style: self.style, } } #[inline] fn translate_mut(&mut self, by: Point) -> &mut Self { self.text_box.bounds.translate_mut(by); self } } impl<C, F, A, V, H> Dimensions for StyledTextBox<'_, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] #[must_use] fn top_left(&self) -> Point { self.text_box.bounds.top_left } #[inline] #[must_use] fn bottom_right(&self) -> Point { self.text_box.bounds.bottom_right } #[inline] #[must_use] fn size(&self) -> Size { self.text_box.size() } }
#![cfg_attr(not(test), no_std)] #![deny(clippy::missing_inline_in_public_items)] #![deny(clippy::cargo)] #![deny(missing_docs)] #![warn(clippy::all)] pub mod alignment; pub mod parser; pub mod rendering; pub mod style; pub mod utils; use alignment::{HorizontalTextAlignment, VerticalTextAlignment}; use embedded_graphics::{prelude::*, primitives::Rectangle}; use rendering::RendererFactory; use style::{height_mode::HeightMode, TextBoxStyle}; use utils::rect_ext::RectExt; pub mod prelude { #[doc(no_inline)] pub use crate::{ alignment::*, style::{ height_mode::{Exact, FitToText, HeightMode, ShrinkToText}, TextBoxStyle, TextBoxStyleBuilder, }, StyledTextBox, TextBox, }; #[doc(no_inline)] pub use embedded_graphics::{ primitives::Rectangle, style::{TextStyle, TextStyleBuilder}, }; } pub struct TextBox<'a> { pub text: &'a str, pub bounds: Rectangle, } impl<'a> TextBox<'a> { #[inline] #[must_use] pub fn new(text: &'a str, bounds: Rectangle) -> Self { Self { text, bounds: bounds.into_well_formed(), } } #[inline] #[must_use] pub fn into_styled<C, F, A, V, H>( self, style: TextBoxStyle<C, F, A, V, H>, ) -> StyledTextBox<'a, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { let mut styled = StyledTextBox { text_box: self, style, }; H::apply(&mut styled); styled } } impl Transform for TextBox<'_> { #[inline] #[must_use] fn translate(&self, by: Point) -> Self { Self { bounds: self.bounds.translate(by), ..*self } } #[inline] fn translate_mut(&mut self, by: Point) -> &mut Self { self.bounds.translate_mut(by); self } } impl Dimensions for TextBox<'_> { #[inline] #[must_use] fn top_left(&self) -> Point { self.bounds.top_left } #[inline] #[must_use] fn bottom_right(&self) -> Point { self.bounds.bottom_right } #[inline] #[must_use] fn size(&self) -> Size { RectExt::size(self.bounds) } } pub struct StyledTextBox<'a, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { pub text_box: TextBox<'a>, pub style: TextBoxStyle<C, F, A, V, H>, } impl<C, F, A, V, H> StyledTextBox<'_, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] pub fn fit_height(&mut self) -> &mut Self { self.fit_height_limited(u32::max_value()) } #[inline] pub fn fit_height_limited(&mut self, max_height: u32) -> &mut Self {
let y = self.text_box.bounds.top_left.y; let new_y = y.saturating_add(text_height - 1); self.text_box.bounds.bottom_right.y = new_y; self } } impl<'a, C, F, A, V, H> Drawable<C> for &'a StyledTextBox<'a, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, StyledTextBox<'a, C, F, A, V, H>: RendererFactory<'a, C>, H: HeightMode, { #[inline] fn draw<D: DrawTarget<C>>(self, display: &mut D) -> Result<(), D::Error> { display.draw_iter(StyledTextBox::create_renderer(self)) } } impl<C, F, A, V, H> Transform for StyledTextBox<'_, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] #[must_use] fn translate(&self, by: Point) -> Self { Self { text_box: self.text_box.translate(by), style: self.style, } } #[inline] fn translate_mut(&mut self, by: Point) -> &mut Self { self.text_box.bounds.translate_mut(by); self } } impl<C, F, A, V, H> Dimensions for StyledTextBox<'_, C, F, A, V, H> where C: PixelColor, F: Font + Copy, A: HorizontalTextAlignment, V: VerticalTextAlignment, H: HeightMode, { #[inline] #[must_use] fn top_left(&self) -> Point { self.text_box.bounds.top_left } #[inline] #[must_use] fn bottom_right(&self) -> Point { self.text_box.bounds.bottom_right } #[inline] #[must_use] fn size(&self) -> Size { self.text_box.size() } }
let text_height = self .style .measure_text_height(self.text_box.text, self.text_box.size().width) .min(max_height) .min(i32::max_value() as u32) as i32;
assignment_statement
[ { "content": "fn str_width<F: Font>(s: &str, ignore_cr: bool) -> u32 {\n\n let mut width = 0;\n\n let mut current_width = 0;\n\n for c in s.chars() {\n\n if !ignore_cr && c == '\\r' {\n\n width = current_width.max(width);\n\n current_width = 0;\n\n } else {\n\n current_width += F::total_char_width(c);\n\n }\n\n }\n\n\n\n current_width.max(width)\n\n}\n\n\n", "file_path": "src/utils/font_ext.rs", "rank": 0, "score": 238486.9442689357 }, { "content": "fn max_str_width<F: Font>(s: &str, max_width: u32, ignore_cr: bool) -> (u32, &str) {\n\n let mut width = 0;\n\n let mut current_width = 0;\n\n for (idx, c) in s.char_indices() {\n\n if !ignore_cr && c == '\\r' {\n\n width = current_width.max(width);\n\n current_width = 0;\n\n } else {\n\n let new_width = current_width + F::total_char_width(c);\n\n if new_width > max_width {\n\n width = current_width.max(width);\n\n return (width, unsafe { s.get_unchecked(0..idx) });\n\n } else {\n\n current_width = new_width;\n\n }\n\n }\n\n }\n\n width = current_width.max(width);\n\n (width, s)\n\n}\n", "file_path": "src/utils/font_ext.rs", "rank": 1, "score": 237413.82826058404 }, { "content": "type LineIteratorSource<'a, C, F, A, V, H, SP> =\n\n fn(\n\n TextBoxStyle<C, F, A, V, H>,\n\n Option<Token<'a>>,\n\n Cursor<F>,\n\n Parser<'a>,\n\n ) -> StyledLinePixelIterator<'a, C, F, SP, A, V, H>;\n\n\n\n/// Pixel iterator for styled text.\n\npub struct StyledTextBoxIterator<'a, C, F, A, V, H, SP>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n\n SP: SpaceConfig<Font = F>,\n\n{\n\n style: TextBoxStyle<C, F, A, V, H>,\n\n state: State<'a, C, F, SP, A, V, H>,\n", "file_path": "src/rendering/mod.rs", "rank": 2, "score": 213444.36978313042 }, { "content": "fn demo_loop<V>(window: &mut Window, bounds: &mut Rectangle, alignment: V) -> bool\n\nwhere\n\n V: VerticalTextAlignment + std::fmt::Debug,\n\n for<'a> &'a StyledTextBox<'a, BinaryColor, Font6x8, LeftAligned, TopAligned, Exact<FullRowsOnly>>:\n\n Drawable<BinaryColor>,\n\n{\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n loop {\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(Size::new(255, 255));\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .vertical_alignment(alignment)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let tb = TextBox::new(text, *bounds).into_styled(textbox_style);\n\n tb.draw(&mut display).unwrap();\n\n\n", "file_path": "examples/interactive_vertical.rs", "rank": 3, "score": 203543.55440232833 }, { "content": "fn benchmark_render_textbox_aligned(c: &mut Criterion) {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(RightAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"TextBox, RightAligned\", |b| {\n\n b.iter(|| {\n\n let obj = TextBox::new(\n\n black_box(TEXT),\n\n Rectangle::new(Point::zero(), Point::new(6 * 15 - 1, 7)),\n\n )\n\n .into_styled(style);\n\n let object = obj.create_renderer();\n\n object.collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/render.rs", "rank": 4, "score": 200885.23188906352 }, { "content": "fn benchmark_render_textbox_both_aligned(c: &mut Criterion) {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .vertical_alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"TextBox, H/V CenterAligned\", |b| {\n\n b.iter(|| {\n\n let obj = TextBox::new(\n\n black_box(TEXT),\n\n Rectangle::new(Point::zero(), Point::new(6 * 15 - 1, 7)),\n\n )\n\n .into_styled(style);\n\n let object = obj.create_renderer();\n\n object.collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n render,\n\n benchmark_render_text,\n\n benchmark_render_textbox,\n\n benchmark_render_textbox_aligned,\n\n benchmark_render_textbox_vertical_aligned,\n\n benchmark_render_textbox_both_aligned,\n\n);\n\ncriterion_main!(render);\n", "file_path": "benches/render.rs", "rank": 5, "score": 200885.23188906352 }, { "content": "fn benchmark_render_textbox_vertical_aligned(c: &mut Criterion) {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .vertical_alignment(BottomAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"TextBox, BottomAligned\", |b| {\n\n b.iter(|| {\n\n let obj = TextBox::new(\n\n black_box(TEXT),\n\n Rectangle::new(Point::zero(), Point::new(6 * 15 - 1, 7)),\n\n )\n\n .into_styled(style);\n\n let object = obj.create_renderer();\n\n object.collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/render.rs", "rank": 6, "score": 195911.7029223816 }, { "content": "fn demo_loop<H>(window: &mut Window, bounds: &mut Rectangle, height_mode: H) -> bool\n\nwhere\n\n H: HeightMode + std::fmt::Debug,\n\n for<'a> &'a StyledTextBox<'a, BinaryColor, Font6x8, LeftAligned, TopAligned, Exact<FullRowsOnly>>:\n\n Drawable<BinaryColor>,\n\n{\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n loop {\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(Size::new(255, 255));\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .height_mode(height_mode)\n\n .build();\n\n\n\n let tb = TextBox::new(text, *bounds).into_styled(textbox_style);\n\n tb.draw(&mut display).unwrap();\n\n\n", "file_path": "examples/interactive_fit.rs", "rank": 7, "score": 178956.31270491713 }, { "content": "fn benchmark_render_textbox(c: &mut Criterion) {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"TextBox\", |b| {\n\n b.iter(|| {\n\n let obj = TextBox::new(\n\n black_box(TEXT),\n\n Rectangle::new(Point::zero(), Point::new(6 * 15 - 1, 7)),\n\n )\n\n .into_styled(style);\n\n let object = obj.create_renderer();\n\n object.collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/render.rs", "rank": 8, "score": 177719.0968159906 }, { "content": "fn benchmark_render_text(c: &mut Criterion) {\n\n let style = TextStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n c.bench_function(\"Text\", |b| {\n\n b.iter(|| {\n\n let object = Text::new(black_box(TEXT), Point::zero()).into_styled(style);\n\n object.into_iter().collect::<Vec<Pixel<BinaryColor>>>()\n\n })\n\n });\n\n}\n\n\n", "file_path": "benches/render.rs", "rank": 9, "score": 177626.34289394628 }, { "content": "/// This trait is used to associate a renderer type to a horizontal alignment option.\n\n///\n\n/// Implementing this trait is only necessary when creating new alignment algorithms.\n\npub trait RendererFactory<'a, C: PixelColor> {\n\n /// The type of the pixel iterator.\n\n type Renderer: Iterator<Item = Pixel<C>>;\n\n\n\n /// Creates a new renderer object.\n\n fn create_renderer(&self) -> Self::Renderer;\n\n}\n\n\n", "file_path": "src/rendering/mod.rs", "rank": 10, "score": 171400.17038059086 }, { "content": "/// Vertical text alignment base trait.\n\n///\n\n/// Use implementors to parametrize [`TextBoxStyle`] and [`TextBoxStyleBuilder`].\n\n///\n\n/// [`TextBoxStyle`]: ../style/struct.TextBoxStyle.html\n\n/// [`TextBoxStyleBuilder`]: ../style/builder/struct.TextBoxStyleBuilder.html\n\npub trait VerticalTextAlignment: Copy {\n\n /// Set the cursor's initial vertical position\n\n fn apply_vertical_alignment<'a, C, F, A, H>(\n\n cursor: &mut Cursor<F>,\n\n styled_text_box: &'a StyledTextBox<'a, C, F, A, Self, H>,\n\n ) where\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n H: HeightMode;\n\n}\n\n\n\npub use bottom::BottomAligned;\n\npub use center::CenterAligned;\n\npub use justified::Justified;\n\npub use left::LeftAligned;\n\npub use right::RightAligned;\n\npub use top::TopAligned;\n", "file_path": "src/alignment/mod.rs", "rank": 11, "score": 170951.4849318395 }, { "content": "/// Horizontal text alignment base trait.\n\n///\n\n/// Use implementors to parametrize [`TextBoxStyle`] and [`TextBoxStyleBuilder`].\n\n///\n\n/// [`TextBoxStyle`]: ../style/struct.TextBoxStyle.html\n\n/// [`TextBoxStyleBuilder`]: ../style/builder/struct.TextBoxStyleBuilder.html\n\npub trait HorizontalTextAlignment: Copy {\n\n /// Whether or not render spaces in the start of the line.\n\n const STARTING_SPACES: bool;\n\n\n\n /// Whether or not render spaces in the end of the line.\n\n const ENDING_SPACES: bool;\n\n}\n\n\n", "file_path": "src/alignment/mod.rs", "rank": 12, "score": 170951.4849318395 }, { "content": "fn demo_loop<A>(window: &mut Window, bounds: &mut Rectangle, alignment: A) -> bool\n\nwhere\n\n A: HorizontalTextAlignment + core::fmt::Debug,\n\n for<'a> &'a StyledTextBox<'a, BinaryColor, Font6x8, A, TopAligned, Exact<FullRowsOnly>>:\n\n Drawable<BinaryColor>,\n\n{\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book. \\n\\\n\n super\\u{AD}­cali\\u{AD}­fragi\\u{AD}­listic\\u{AD}­espeali\\u{AD}­docious\";\n\n loop {\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(Size::new(255, 255));\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(alignment)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n TextBox::new(text, *bounds)\n\n .into_styled(textbox_style)\n", "file_path": "examples/interactive.rs", "rank": 13, "score": 162070.77944250306 }, { "content": "#[derive(Debug)]\n\nenum State<C, F>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n{\n\n /// Fetch next render element.\n\n FetchNext,\n\n\n\n /// Render a character.\n\n Char(CharacterIterator<C, F>),\n\n\n\n /// Render a block of whitespace.\n\n Space(EmptySpaceIterator<C, F>),\n\n\n\n /// Render a block of whitespace with underlined or strikethrough effect.\n\n ModifiedSpace(ModifiedEmptySpaceIterator<C, F>),\n\n}\n\n\n\n/// Pixel iterator to render a single line of styled text.\n\n#[derive(Debug)]\n", "file_path": "src/rendering/line.rs", "rank": 14, "score": 131857.70182048227 }, { "content": "#[inline]\n\npub fn try_parse_sgr(v: &[u8]) -> Option<Sgr> {\n\n let code = *v.get(0)?;\n\n match code {\n\n 0 => Some(Sgr::Reset),\n\n 4 => Some(Sgr::Underline),\n\n 9 => Some(Sgr::CrossedOut),\n\n 24 => Some(Sgr::UnderlineOff),\n\n 29 => Some(Sgr::NotCrossedOut),\n\n 39 => Some(Sgr::DefaultTextColor),\n\n 49 => Some(Sgr::DefaultBackgroundColor),\n\n 30..=37 => Some(Sgr::ChangeTextColor(standard_to_rgb(code - 30))),\n\n 38 => {\n\n let color = try_parse_color(&v[1..])?;\n\n Some(Sgr::ChangeTextColor(color))\n\n }\n\n 90..=97 => Some(Sgr::ChangeTextColor(standard_to_rgb(code - 82))),\n\n 40..=47 => Some(Sgr::ChangeBackgroundColor(standard_to_rgb(code - 40))),\n\n 48 => {\n\n let color = try_parse_color(&v[1..])?;\n\n Some(Sgr::ChangeBackgroundColor(color))\n\n }\n\n 100..=107 => Some(Sgr::ChangeBackgroundColor(standard_to_rgb(code - 92))),\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/rendering/ansi.rs", "rank": 15, "score": 124379.727006841 }, { "content": "fn is_space_char(c: char) -> bool {\n\n // zero-width space breaks whitespace sequences - this works as long as\n\n // space handling is symmetrical (i.e. starting == ending behaviour)\n\n c.is_whitespace() && !['\\n', '\\r', '\\t', SPEC_CHAR_NBSP].contains(&c) || c == SPEC_CHAR_ZWSP\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n\n /// Create a new parser object to process the given piece of text.\n\n #[inline]\n\n #[must_use]\n\n pub fn parse(text: &'a str) -> Self {\n\n Self {\n\n inner: text.chars(),\n\n }\n\n }\n\n\n\n /// Returns true if there are no tokens to process.\n\n #[inline]\n\n #[must_use]\n\n pub fn is_empty(&self) -> bool {\n", "file_path": "src/parser/mod.rs", "rank": 16, "score": 120741.13851983545 }, { "content": "fn is_word_char(c: char) -> bool {\n\n // Word tokens are terminated when a whitespace, zwsp or shy character is found. An exception\n\n // to this rule is the nbsp, which is whitespace but is included in the word.\n\n (!c.is_whitespace() || c == SPEC_CHAR_NBSP)\n\n && ![SPEC_CHAR_ZWSP, SPEC_CHAR_SHY, SPEC_CHAR_ESCAPE].contains(&c)\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 17, "score": 120741.13851983545 }, { "content": "/// `Font` extensions\n\npub trait FontExt {\n\n /// Returns the total width of the character plus the character spacing.\n\n fn total_char_width(c: char) -> u32;\n\n\n\n /// Measure text width\n\n fn str_width(s: &str) -> u32;\n\n\n\n /// This function is identical to [`str_width`] except it does **not** handle carriage\n\n /// return characters.\n\n ///\n\n /// [`str_width`]: #method.str_width\n\n fn str_width_nocr(s: &str) -> u32;\n\n\n\n /// Measures a sequence of characters in a line with a determinate maximum width.\n\n ///\n\n /// Returns the width of the characters that fit into the given space and the processed string.\n\n fn max_str_width(s: &str, max_width: u32) -> (u32, &str);\n\n\n\n /// This function is identical to [`max_str_width`] except it does **not** handle carriage\n\n /// return characters.\n", "file_path": "src/utils/font_ext.rs", "rank": 18, "score": 120644.44358164127 }, { "content": "fn benchmark_current(c: &mut Criterion) {\n\n c.bench_function(\"Current parser\", |b| {\n\n b.iter(|| Parser::parse(black_box(TEXT)).collect::<Vec<Token<'_>>>())\n\n });\n\n}\n\n\n\ncriterion_group!(parse, benchmark_original, benchmark_current);\n\ncriterion_main!(parse);\n", "file_path": "benches/parse.rs", "rank": 19, "score": 113192.6783006243 }, { "content": "fn benchmark_original(c: &mut Criterion) {\n\n c.bench_function(\"Original parser\", |b| {\n\n b.iter(|| OriginalParser::parse(black_box(TEXT)).collect::<Vec<Token<'_>>>())\n\n });\n\n}\n\n\n", "file_path": "benches/parse.rs", "rank": 20, "score": 113192.6783006243 }, { "content": "/// Specifies how the [`TextBox`]'s height is adjusted when it is turned into a [`StyledTextBox`].\n\n///\n\n/// [`TextBox`]: ../../struct.TextBox.html\n\npub trait HeightMode: Copy {\n\n /// Apply the height mode to the textbox\n\n ///\n\n /// *Note:* This function is used by [`TextBox::into_styled`] and normally does not need to be\n\n /// called manually.\n\n ///\n\n /// [`TextBox::into_styled`]: ../../struct.TextBox.html#method.into_styled\n\n fn apply<C, F, A, V, H>(text_box: &mut StyledTextBox<'_, C, F, A, V, H>)\n\n where\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode;\n\n\n\n /// Calculate the range of rows of the current line that can be drawn.\n\n ///\n\n /// If a line does not fully fit in the bounding box, some `HeightMode` options allow drawing\n\n /// partial lines. For a partial line, this function calculates, which rows of each character\n\n /// should be displayed.\n", "file_path": "src/style/height_mode.rs", "rank": 21, "score": 105129.78581545362 }, { "content": "/// Implementors of this trait specify how drawing vertically outside the bounding box is handled.\n\npub trait VerticalOverdraw: Copy {\n\n /// Calculate the range of rows of the current line that can be drawn.\n\n fn calculate_displayed_row_range<F: Font>(cursor: &Cursor<F>) -> Range<i32>;\n\n}\n\n\n\n/// Only render full rows of text.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct FullRowsOnly;\n\nimpl VerticalOverdraw for FullRowsOnly {\n\n #[inline]\n\n fn calculate_displayed_row_range<F: Font>(cursor: &Cursor<F>) -> Range<i32> {\n\n if cursor.in_display_area() {\n\n 0..F::CHARACTER_SIZE.height as i32\n\n } else {\n\n 0..0\n\n }\n\n }\n\n}\n\n\n\n/// Render partially visible rows, but only inside the bounding box.\n", "file_path": "src/style/vertical_overdraw.rs", "rank": 22, "score": 105122.60815888512 }, { "content": "/// Retrieves size of space characters.\n\npub trait SpaceConfig: Copy + Default {\n\n /// The font for which this space config belongs.\n\n type Font: Font;\n\n\n\n /// Look at the size of next n spaces, without advancing.\n\n fn peek_next_width(&self, n: u32) -> u32;\n\n\n\n /// Advance the internal state\n\n fn consume(&mut self, n: u32) -> u32;\n\n}\n\n\n\n/// Contains the fixed width of a space character.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct UniformSpaceConfig<F: Font + Copy> {\n\n _font: PhantomData<F>,\n\n\n\n /// Space width.\n\n pub space_width: u32,\n\n}\n\n\n", "file_path": "src/rendering/space_config.rs", "rank": 23, "score": 99977.10523304397 }, { "content": "fn try_parse_color(v: &[u8]) -> Option<Rgb> {\n\n let color_type = *v.get(0)?;\n\n\n\n match color_type {\n\n 2 => try_parse_rgb(&v[1..]),\n\n 5 => try_parse_8b_color(&v[1..]),\n\n\n\n _ => None,\n\n }\n\n}\n\n\n\n/// Parse a set of SGR parameter numbers into a more convenient type\n", "file_path": "src/rendering/ansi.rs", "rank": 24, "score": 84703.37182388695 }, { "content": "fn try_parse_8b_color(v: &[u8]) -> Option<Rgb> {\n\n let color = *v.get(0)?;\n\n match color {\n\n // 0- 7: standard colors (as in ESC [ 30–37 m)\n\n // 8- 15: high intensity colors (as in ESC [ 90–97 m)\n\n 0..=15 => Some(standard_to_rgb(color)),\n\n\n\n // 16-231: 6 × 6 × 6 cube (216 colors): 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5)\n\n 16..=231 => {\n\n let color = color - 16;\n\n let extend_6 = |c| c * 51;\n\n\n\n let b = extend_6(color % 6);\n\n let color = color / 6;\n\n\n\n let g = extend_6(color % 6);\n\n let color = color / 6;\n\n\n\n let r = extend_6(color % 6);\n\n\n", "file_path": "src/rendering/ansi.rs", "rank": 25, "score": 84703.37182388695 }, { "content": "fn try_parse_rgb(v: &[u8]) -> Option<Rgb> {\n\n let r = *v.get(0)?;\n\n let g = *v.get(1)?;\n\n let b = *v.get(2)?;\n\n\n\n Some(Rgb::new(r, g, b))\n\n}\n\n\n", "file_path": "src/rendering/ansi.rs", "rank": 26, "score": 84703.37182388695 }, { "content": "/// [`Rectangle`] extensions\n\npub trait RectExt {\n\n /// Returns the (correct) size of a [`Rectangle`].\n\n fn size(self) -> Size;\n\n\n\n /// Sorts the coordinates of a [`Rectangle`] so that `top` < `bottom` and `left` < `right`.\n\n fn into_well_formed(self) -> Rectangle;\n\n}\n\n\n\nimpl RectExt for Rectangle {\n\n #[inline]\n\n #[must_use]\n\n fn size(self) -> Size {\n\n // TODO: remove if fixed in embedded-graphics\n\n let width = (self.bottom_right.x - self.top_left.x) as u32 + 1;\n\n let height = (self.bottom_right.y - self.top_left.y) as u32 + 1;\n\n\n\n Size::new(width, height)\n\n }\n\n\n\n #[inline]\n", "file_path": "src/utils/rect_ext.rs", "rank": 27, "score": 81142.42712717911 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = format!(\n\n \"{comment}/// Comment\\n\\\n\n {base_text}#[{attribute}derive{base_text}(Debug)]\\n\\\n\n {keyword}enum {type_name}{underlined}Foo{underlined_off}{base_text}<{lifetime}'a{base_text}> {{\\n\\\n\n {comment}\\t/// Decide what {strikethrough}not{strikethrough_off} to do next.\\n\\\n\n {highlighted_background}\\t{enum_variant}Bar{base_text}({type_name}{underlined}Token{underlined_off}{base_text}<{lifetime}'a{base_text}>),{end_of_line}\\n\\\n\n {line_background}{base_text}}}\",\n\n // colors\n\n line_background = \"\\x1b[48;5;16m\",\n\n highlighted_background = \"\\x1b[48;5;235m\",\n\n enum_variant = \"\\x1b[38;2;36;144;241m\",\n\n keyword = \"\\x1b[38;2;84;128;166m\",\n\n comment = \"\\x1b[38;2;94;153;73m\",\n\n base_text = \"\\x1b[97m\",\n\n attribute =\"\\x1b[38;2;220;220;157m\",\n\n type_name = \"\\x1b[38;2;78;201;176m\",\n\n lifetime = \"\\x1b[38;2;84;128;166m\",\n\n end_of_line = \"\\x1b[40C\",\n\n underlined = \"\\x1b[4m\",\n", "file_path": "examples/colored_text.rs", "rank": 28, "score": 70164.88404352074 }, { "content": "fn standard_to_rgb(idx: u8) -> Rgb {\n\n // These colors are used in PowerShell 6 in Windows 10\n\n match idx {\n\n 0 => Rgb::new(12, 12, 12),\n\n 1 => Rgb::new(197, 15, 31),\n\n 2 => Rgb::new(19, 161, 14),\n\n 3 => Rgb::new(193, 156, 0),\n\n 4 => Rgb::new(0, 55, 218),\n\n 5 => Rgb::new(136, 23, 152),\n\n 6 => Rgb::new(58, 150, 221),\n\n 7 => Rgb::new(204, 204, 204),\n\n\n\n 8 => Rgb::new(118, 118, 118),\n\n 9 => Rgb::new(231, 72, 86),\n\n 10 => Rgb::new(22, 198, 12),\n\n 11 => Rgb::new(249, 241, 165),\n\n 12 => Rgb::new(59, 120, 255),\n\n 13 => Rgb::new(180, 0, 158),\n\n 14 => Rgb::new(97, 214, 214),\n\n _ => Rgb::new(242, 242, 242),\n\n }\n\n}\n\n\n", "file_path": "src/rendering/ansi.rs", "rank": 29, "score": 69372.41924943258 }, { "content": "//! Module of small helpers\n\n\n\npub mod font_ext;\n\npub mod rect_ext;\n", "file_path": "src/utils/mod.rs", "rank": 30, "score": 69309.7744879684 }, { "content": " self.inner.as_str().is_empty()\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Parser<'a> {\n\n type Item = Token<'a>;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let string = self.inner.as_str();\n\n\n\n if let Some(c) = self.inner.next() {\n\n if is_word_char(c) {\n\n // find the longest consecutive slice of text for a Word token\n\n while let Some(c) = self.inner.next() {\n\n if !is_word_char(c) {\n\n // pointer arithmetic to get the offset of `c` relative to `string`\n\n let offset = {\n\n let ptr_start = string.as_ptr() as usize;\n\n let ptr_cur = self.inner.as_str().as_ptr() as usize;\n", "file_path": "src/parser/mod.rs", "rank": 31, "score": 69300.70840140125 }, { "content": " }\n\n }\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::{Parser, Token};\n\n use ansi_parser::AnsiSequence;\n\n use heapless::Vec;\n\n\n\n fn assert_tokens(text: &str, tokens: std::vec::Vec<Token>) {\n\n assert_eq!(\n\n Parser::parse(text).collect::<std::vec::Vec<Token>>(),\n\n tokens\n\n )\n", "file_path": "src/parser/mod.rs", "rank": 32, "score": 69298.76177421946 }, { "content": " Break(Option<char>),\n\n\n\n /// An extra character - used to carry soft breaking chars.\n\n ExtraCharacter(char),\n\n\n\n /// An ANSI escape sequence\n\n EscapeSequence(AnsiSequence),\n\n}\n\n\n\n/// Text parser. Turns a string into a stream of [`Token`] objects.\n\n///\n\n/// [`Token`]: enum.Token.html\n\n#[derive(Clone, Debug)]\n\npub struct Parser<'a> {\n\n inner: Chars<'a>,\n\n}\n\n\n\npub(crate) const SPEC_CHAR_NBSP: char = '\\u{a0}';\n\npub(crate) const SPEC_CHAR_ZWSP: char = '\\u{200b}';\n\npub(crate) const SPEC_CHAR_SHY: char = '\\u{ad}';\n\npub(crate) const SPEC_CHAR_ESCAPE: char = '\\x1b';\n\n\n", "file_path": "src/parser/mod.rs", "rank": 33, "score": 69297.47214852691 }, { "content": "\n\n/// A text token\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Token<'a> {\n\n /// A newline character.\n\n NewLine,\n\n\n\n /// A \\r character.\n\n CarriageReturn,\n\n\n\n /// A \\t character.\n\n Tab,\n\n\n\n /// A number of whitespace characters.\n\n Whitespace(u32),\n\n\n\n /// A word (a sequence of non-whitespace characters).\n\n Word(&'a str),\n\n\n\n /// A possible wrapping point\n", "file_path": "src/parser/mod.rs", "rank": 34, "score": 69295.88491135031 }, { "content": "//! Parse text into words, newlines and whitespace sequences.\n\n//!\n\n//! ```rust\n\n//! use embedded_text::parser::{Parser, Token};\n\n//!\n\n//! let parser = Parser::parse(\"Hello, world!\\n\");\n\n//! let tokens = parser.collect::<Vec<Token<'_>>>();\n\n//!\n\n//! assert_eq!(\n\n//! vec![\n\n//! Token::Word(\"Hello,\"),\n\n//! Token::Whitespace(1),\n\n//! Token::Word(\"world!\"),\n\n//! Token::NewLine\n\n//! ],\n\n//! tokens\n\n//! );\n\n//! ```\n\nuse ansi_parser::AnsiSequence;\n\nuse core::str::Chars;\n", "file_path": "src/parser/mod.rs", "rank": 35, "score": 69295.60086290972 }, { "content": " }\n\n } else {\n\n // pointer arithmetic to get the offset of `c` relative to `string`\n\n let offset = {\n\n let ptr_start = string.as_ptr() as usize;\n\n let ptr_cur = self.inner.as_str().as_ptr() as usize;\n\n ptr_cur - ptr_start - c.len_utf8()\n\n };\n\n // consume the whitespaces\n\n self.inner = unsafe {\n\n // SAFETY: we only work with character boundaries and\n\n // offset is <= length\n\n string.get_unchecked(offset..).chars()\n\n };\n\n return Some(Token::Whitespace(len));\n\n }\n\n }\n\n\n\n // consumed all the text\n\n Some(Token::Whitespace(len))\n", "file_path": "src/parser/mod.rs", "rank": 36, "score": 69286.05034876811 }, { "content": " '\\n' => Some(Token::NewLine),\n\n '\\r' => Some(Token::CarriageReturn),\n\n '\\t' => Some(Token::Tab),\n\n SPEC_CHAR_ZWSP => Some(Token::Break(None)),\n\n SPEC_CHAR_SHY => Some(Token::Break(Some('-'))),\n\n SPEC_CHAR_ESCAPE => ansi_parser::parse_escape(string).map_or(\n\n Some(Token::EscapeSequence(AnsiSequence::Escape)),\n\n |(string, output)| {\n\n self.inner = string.chars();\n\n Some(Token::EscapeSequence(output))\n\n },\n\n ),\n\n\n\n // count consecutive whitespace\n\n _ => {\n\n let mut len = 1;\n\n while let Some(c) = self.inner.next() {\n\n if is_space_char(c) {\n\n if c != SPEC_CHAR_ZWSP {\n\n len += 1;\n", "file_path": "src/parser/mod.rs", "rank": 37, "score": 69285.9494385761 }, { "content": " ptr_cur - ptr_start - c.len_utf8()\n\n };\n\n self.inner = unsafe {\n\n // SAFETY: we only work with character boundaries and\n\n // offset is <= length\n\n string.get_unchecked(offset..).chars()\n\n };\n\n return Some(Token::Word(unsafe {\n\n // SAFETY: we only work with character boundaries and\n\n // offset is <= length\n\n string.get_unchecked(0..offset)\n\n }));\n\n }\n\n }\n\n\n\n // consumed all the text\n\n Some(Token::Word(string))\n\n } else {\n\n match c {\n\n // special characters\n", "file_path": "src/parser/mod.rs", "rank": 38, "score": 69283.37125935836 }, { "content": " }\n\n\n\n #[test]\n\n fn test_parse() {\n\n assert_tokens(\n\n \"Lorem ipsum \\r dolor sit am\\u{00AD}et,\\tconse😅ctetur adipiscing\\nelit\",\n\n vec![\n\n Token::Word(\"Lorem\"),\n\n Token::Whitespace(1),\n\n Token::Word(\"ipsum\"),\n\n Token::Whitespace(1),\n\n Token::CarriageReturn,\n\n Token::Whitespace(1),\n\n Token::Word(\"dolor\"),\n\n Token::Whitespace(1),\n\n Token::Word(\"sit\"),\n\n Token::Whitespace(1),\n\n Token::Word(\"am\"),\n\n Token::Break(Some('-')),\n\n Token::Word(\"et,\"),\n", "file_path": "src/parser/mod.rs", "rank": 39, "score": 69280.94556345737 }, { "content": " }\n\n\n\n #[test]\n\n fn parse_multibyte_last() {\n\n assert_tokens(\"test😅\", vec![Token::Word(\"test😅\")]);\n\n }\n\n\n\n #[test]\n\n fn parse_nbsp_as_word_char() {\n\n assert_eq!(9, \"test\\u{A0}word\".chars().count());\n\n assert_tokens(\"test\\u{A0}word\", vec![Token::Word(\"test\\u{A0}word\")]);\n\n assert_tokens(\n\n \" \\u{A0}word\",\n\n vec![Token::Whitespace(1), Token::Word(\"\\u{A0}word\")],\n\n );\n\n }\n\n\n\n #[test]\n\n fn parse_shy_issue_42() {\n\n assert_tokens(\n", "file_path": "src/parser/mod.rs", "rank": 40, "score": 69280.94556345737 }, { "content": " Token::Tab,\n\n Token::Word(\"conse😅ctetur\"),\n\n Token::Whitespace(1),\n\n Token::Word(\"adipiscing\"),\n\n Token::NewLine,\n\n Token::Word(\"elit\"),\n\n ],\n\n );\n\n }\n\n\n\n #[test]\n\n fn parse_zwsp() {\n\n assert_eq!(9, \"two\\u{200B}words\".chars().count());\n\n\n\n assert_tokens(\n\n \"two\\u{200B}words\",\n\n vec![Token::Word(\"two\"), Token::Break(None), Token::Word(\"words\")],\n\n );\n\n\n\n assert_tokens(\" \\u{200B} \", vec![Token::Whitespace(3)]);\n", "file_path": "src/parser/mod.rs", "rank": 41, "score": 69280.94556345737 }, { "content": " \"foo\\u{AD}bar\",\n\n vec![\n\n Token::Word(\"foo\"),\n\n Token::Break(Some('-')),\n\n Token::Word(\"bar\"),\n\n ],\n\n );\n\n }\n\n\n\n #[test]\n\n fn escape_char_ignored_if_not_ansi_sequence() {\n\n assert_tokens(\n\n \"foo\\x1bbar\",\n\n vec![\n\n Token::Word(\"foo\"),\n\n Token::EscapeSequence(AnsiSequence::Escape),\n\n Token::Word(\"bar\"),\n\n ],\n\n );\n\n\n", "file_path": "src/parser/mod.rs", "rank": 42, "score": 69280.94556345737 }, { "content": " assert_tokens(\n\n \"foo\\x1b[bar\",\n\n vec![\n\n Token::Word(\"foo\"),\n\n Token::EscapeSequence(AnsiSequence::Escape),\n\n Token::Word(\"[bar\"),\n\n ],\n\n );\n\n\n\n // can escape the escape char\n\n assert_tokens(\n\n \"foo\\x1b\\x1bbar\",\n\n vec![\n\n Token::Word(\"foo\"),\n\n Token::EscapeSequence(AnsiSequence::Escape),\n\n Token::Word(\"bar\"),\n\n ],\n\n );\n\n }\n\n\n", "file_path": "src/parser/mod.rs", "rank": 43, "score": 69280.94556345737 }, { "content": " #[test]\n\n fn escape_char_colors() {\n\n assert_tokens(\n\n \"foo\\x1b[34mbar\",\n\n vec![\n\n Token::Word(\"foo\"),\n\n Token::EscapeSequence(AnsiSequence::SetGraphicsMode(\n\n Vec::from_slice(&[34]).unwrap(),\n\n )),\n\n Token::Word(\"bar\"),\n\n ],\n\n );\n\n assert_tokens(\n\n \"foo\\x1b[95mbar\",\n\n vec![\n\n Token::Word(\"foo\"),\n\n Token::EscapeSequence(AnsiSequence::SetGraphicsMode(\n\n Vec::from_slice(&[95]).unwrap(),\n\n )),\n\n Token::Word(\"bar\"),\n", "file_path": "src/parser/mod.rs", "rank": 44, "score": 69280.94556345737 }, { "content": " ],\n\n );\n\n assert_tokens(\n\n \"foo\\x1b[48;5;16mbar\",\n\n vec![\n\n Token::Word(\"foo\"),\n\n Token::EscapeSequence(AnsiSequence::SetGraphicsMode(\n\n Vec::from_slice(&[48, 5, 16]).unwrap(),\n\n )),\n\n Token::Word(\"bar\"),\n\n ],\n\n );\n\n }\n\n}\n", "file_path": "src/parser/mod.rs", "rank": 45, "score": 69280.94556345737 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Lorem Ipsum is simply dummy text of the printing and typesetting industry.\";\n\n\n\n let underlined_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .underlined(true)\n\n .line_spacing(2)\n\n .build();\n\n let strikethrough_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .strikethrough(true)\n\n .line_spacing(2)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(96, 0)))\n\n .into_styled(underlined_style);\n\n\n\n let text_box2 = TextBox::new(text, Rectangle::new(Point::new(96, 0), Point::new(192, 0)))\n", "file_path": "examples/extra_styles.rs", "rank": 46, "score": 69254.71889214947 }, { "content": "pub mod vertical_overdraw;\n\n\n\nuse crate::{\n\n alignment::{HorizontalTextAlignment, VerticalTextAlignment},\n\n parser::{Parser, Token},\n\n rendering::{\n\n ansi::Sgr,\n\n cursor::Cursor,\n\n line_iter::{LineElementIterator, RenderElement},\n\n space_config::UniformSpaceConfig,\n\n },\n\n style::height_mode::HeightMode,\n\n utils::font_ext::FontExt,\n\n};\n\nuse core::marker::PhantomData;\n\nuse embedded_graphics::{prelude::*, primitives::Rectangle, style::TextStyle};\n\n\n\npub use builder::TextBoxStyleBuilder;\n\n\n\n/// Tab size helper\n", "file_path": "src/style/mod.rs", "rank": 47, "score": 68298.2607317176 }, { "content": " /// Desired space between lines, in pixels\n\n pub line_spacing: i32,\n\n\n\n /// Desired column width for tabs\n\n pub tab_size: TabSize<F>,\n\n\n\n /// If true, the text will be underlined\n\n pub underlined: bool,\n\n\n\n /// If true, the text will be crossed out\n\n pub strikethrough: bool,\n\n}\n\n\n\nimpl<C, F, A, V, H> TextBoxStyle<C, F, A, V, H>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n", "file_path": "src/style/mod.rs", "rank": 48, "score": 68296.4794134283 }, { "content": "{\n\n /// Creates a `TextBoxStyle` object with transparent background.\n\n #[inline]\n\n pub fn new(\n\n font: F,\n\n text_color: C,\n\n alignment: A,\n\n vertical_alignment: V,\n\n height_mode: H,\n\n ) -> Self {\n\n Self {\n\n text_style: TextStyle::new(font, text_color),\n\n alignment,\n\n vertical_alignment,\n\n height_mode,\n\n line_spacing: 0,\n\n tab_size: TabSize::default(),\n\n underlined: false,\n\n strikethrough: false,\n\n }\n", "file_path": "src/style/mod.rs", "rank": 49, "score": 68295.10278721878 }, { "content": "pub struct TextBoxStyle<C, F, A, V, H>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n\n{\n\n /// Style properties for text.\n\n pub text_style: TextStyle<C, F>,\n\n\n\n /// Horizontal text alignment.\n\n pub alignment: A,\n\n\n\n /// Vertical text alignment.\n\n pub vertical_alignment: V,\n\n\n\n /// The height behaviour\n\n pub height_mode: H,\n\n\n", "file_path": "src/style/mod.rs", "rank": 50, "score": 68294.58507080036 }, { "content": "///\n\n/// This type makes it more obvious what unit is used to define the width of tabs.\n\n/// The default tab size is 4 spaces.\n\n#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct TabSize<F: Font> {\n\n pub(crate) width: i32,\n\n _font: PhantomData<F>,\n\n}\n\n\n\nimpl<F: Font> Default for TabSize<F> {\n\n #[inline]\n\n fn default() -> Self {\n\n Self::spaces(4)\n\n }\n\n}\n\n\n\nimpl<F: Font> TabSize<F> {\n\n /// Calculate tab size from a number of spaces in the current font.\n\n #[inline]\n\n pub fn spaces(n: u32) -> Self {\n", "file_path": "src/style/mod.rs", "rank": 51, "score": 68293.37249700022 }, { "content": " }\n\n\n\n /// Creates a `TextBoxStyle` object from the given text style and alignment.\n\n #[inline]\n\n pub fn from_text_style(\n\n text_style: TextStyle<C, F>,\n\n alignment: A,\n\n vertical_alignment: V,\n\n height_mode: H,\n\n ) -> Self {\n\n Self {\n\n text_style,\n\n alignment,\n\n vertical_alignment,\n\n height_mode,\n\n line_spacing: 0,\n\n tab_size: TabSize::default(),\n\n underlined: false,\n\n strikethrough: false,\n\n }\n", "file_path": "src/style/mod.rs", "rank": 52, "score": 68292.78284173513 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::{alignment::*, parser::Parser, style::builder::TextBoxStyleBuilder};\n\n use embedded_graphics::{\n\n fonts::{Font, Font6x8},\n\n pixelcolor::BinaryColor,\n\n };\n\n\n\n #[test]\n\n fn no_infinite_loop() {\n\n let _ = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .build()\n\n .measure_text_height(\"a\", 5);\n\n }\n\n\n\n #[test]\n", "file_path": "src/style/mod.rs", "rank": 53, "score": 68285.86296572135 }, { "content": " }\n\n\n\n /// Measure the width and count spaces in a single line of text.\n\n ///\n\n /// Returns (width, rendered space count, carried token)\n\n ///\n\n /// Instead of peeking ahead when processing tokens, this function advances the parser before\n\n /// processing a token. If a token opens a new line, it will be returned as the carried token.\n\n /// If the carried token is `None`, the parser has finished processing the text.\n\n #[inline]\n\n #[must_use]\n\n pub fn measure_line<'a>(\n\n &self,\n\n parser: &mut Parser<'a>,\n\n carried_token: Option<Token<'a>>,\n\n max_line_width: u32,\n\n ) -> (u32, u32, Option<Token<'a>>, bool) {\n\n let cursor: Cursor<F> = Cursor::new(\n\n Rectangle::new(\n\n Point::zero(),\n", "file_path": "src/style/mod.rs", "rank": 54, "score": 68284.83070488168 }, { "content": " /// // |Lorem Ipsum |\n\n /// // |is simply |\n\n /// // |dummy text |\n\n /// // |of the |\n\n /// // |printing and|\n\n /// // |typesetting |\n\n /// // |industry. |\n\n ///\n\n /// assert_eq!(7 * 8, height);\n\n /// ```\n\n #[inline]\n\n #[must_use]\n\n pub fn measure_text_height(&self, text: &str, max_width: u32) -> u32 {\n\n let mut n_lines = 0_i32;\n\n let mut parser = Parser::parse(text);\n\n let mut carry = None;\n\n\n\n loop {\n\n let (w, _, t, underlined) = self.measure_line(&mut parser, carry.clone(), max_width);\n\n\n", "file_path": "src/style/mod.rs", "rank": 55, "score": 68282.89695887495 }, { "content": " let space = F::total_char_width(' ') as i32;\n\n // make sure n is at least 1, and the multiplication doesn't overflow\n\n let size = (n.max(1) as i32).checked_mul(space).unwrap_or(4 * space);\n\n\n\n Self::pixels(size)\n\n }\n\n\n\n /// Define the tab size in pixels.\n\n #[inline]\n\n pub fn pixels(px: i32) -> Self {\n\n Self {\n\n width: px,\n\n _font: PhantomData,\n\n }\n\n }\n\n\n\n /// Calculate the rendered with of the next tab\n\n #[inline]\n\n pub fn next_width(self, pos: i32) -> u32 {\n\n let next_tab_pos = (pos / self.width + 1) * self.width;\n", "file_path": "src/style/mod.rs", "rank": 56, "score": 68281.18179079577 }, { "content": " .build();\n\n\n\n let mut text = Parser::parse(\"123\\u{A0}45\");\n\n\n\n let (w, s, _, _) =\n\n textbox_style.measure_line(&mut text, None, 5 * Font6x8::CHARACTER_SIZE.width);\n\n assert_eq!(w, 5 * Font6x8::CHARACTER_SIZE.width);\n\n assert_eq!(s, 1);\n\n }\n\n\n\n #[test]\n\n fn test_measure_height_nbsp() {\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let text = \"123\\u{A0}45 123\";\n\n\n\n let height = textbox_style.measure_text_height(text, 5 * Font6x8::CHARACTER_SIZE.width);\n", "file_path": "src/style/mod.rs", "rank": 57, "score": 68280.77471890369 }, { "content": " #[test]\n\n fn test_measure_line() {\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let mut text = Parser::parse(\"123 45 67\");\n\n\n\n let (w, s, _, _) =\n\n textbox_style.measure_line(&mut text, None, 6 * Font6x8::CHARACTER_SIZE.width);\n\n assert_eq!(w, 6 * Font6x8::CHARACTER_SIZE.width);\n\n assert_eq!(s, 1);\n\n }\n\n\n\n #[test]\n\n fn test_measure_line_counts_nbsp() {\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n", "file_path": "src/style/mod.rs", "rank": 58, "score": 68280.66762280992 }, { "content": " /// Measures text height when rendered using a given width.\n\n ///\n\n /// # Example: measure height of text when rendered using a 6x8 font and 72px width.\n\n ///\n\n /// ```rust\n\n /// # use embedded_text::style::builder::TextBoxStyleBuilder;\n\n /// # use embedded_graphics::fonts::Font6x8;\n\n /// # use embedded_graphics::pixelcolor::BinaryColor;\n\n /// #\n\n /// let style = TextBoxStyleBuilder::new(Font6x8)\n\n /// .text_color(BinaryColor::On)\n\n /// .build();\n\n ///\n\n /// let height = style.measure_text_height(\n\n /// \"Lorem Ipsum is simply dummy text of the printing and typesetting industry.\",\n\n /// 72,\n\n /// );\n\n ///\n\n /// // Expect 7 lines of text, wrapped in something like the following:\n\n ///\n", "file_path": "src/style/mod.rs", "rank": 59, "score": 68276.68355736339 }, { "content": " (next_tab_pos - pos) as u32\n\n }\n\n}\n\n\n\n/// Styling options of a [`TextBox`].\n\n///\n\n/// `TextBoxStyle` contains the `Font`, foreground and background `PixelColor`, line spacing,\n\n/// [`HeightMode`], [`HorizontalTextAlignment`] and [`VerticalTextAlignment`] information necessary\n\n/// to draw a [`TextBox`].\n\n///\n\n/// To construct a new `TextBoxStyle` object, use the [`new`] or [`from_text_style`] methods or\n\n/// the [`TextBoxStyleBuilder`] object.\n\n///\n\n/// [`TextBox`]: ../struct.TextBox.html\n\n/// [`HorizontalTextAlignment`]: ../alignment/trait.HorizontalTextAlignment.html\n\n/// [`VerticalTextAlignment`]: ../alignment/trait.VerticalTextAlignment.html\n\n/// [`TextBoxStyleBuilder`]: builder/struct.TextBoxStyleBuilder.html\n\n/// [`new`]: #method.new\n\n/// [`from_text_style`]: #method.from_text_style\n\n#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default)]\n", "file_path": "src/style/mod.rs", "rank": 60, "score": 68276.1719204936 }, { "content": "//! You have the following options:\n\n//!\n\n//! - Move the cursor forward `<n>` characters: `\\x1b[<n>C`. This command will stop at the end of\n\n//! line, so you can use it to simulate a highlighted line, for example.\n\n//! *Note:* Moving the cursor *forward* fills the line with the background color. If you want to\n\n//! avoid this, make sure to reset the background color before moving the cursor!\n\n//! - Move the cursor backward `<n>` characters: `\\x1b[<n>D`. This command will stop at the start\n\n//! of line.\n\n//!\n\n//! [`Sgr`]: ../rendering/ansi/enum.Sgr.html\n\n//! [`Rgb`]: ./color/struct.Rgb.html\n\n//! [`TextBox`]: ../struct.TextBox.html\n\n//! [`TextBoxStyle`]: struct.TextBoxStyle.html\n\n//! [`TextBoxStyleBuilder`]: builder/struct.TextBoxStyleBuilder.html\n\n//! [`TextBoxStyleBuilder::new`]: builder/struct.TextBoxStyleBuilder.html#method.new\n\n//! [`TextBox::into_styled`]: ../struct.TextBox.html#method.into_styled\n\n\n\npub mod builder;\n\npub mod color;\n\npub mod height_mode;\n", "file_path": "src/style/mod.rs", "rank": 61, "score": 68276.00053862047 }, { "content": " assert_eq!(height, 16);\n\n\n\n // bug discovered while using the interactive example\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(LeftAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let text = \"embedded-text also\\u{A0}supports non-breaking spaces.\";\n\n\n\n let height = textbox_style.measure_text_height(text, 79);\n\n assert_eq!(height, 4 * Font6x8::CHARACTER_SIZE.height);\n\n }\n\n\n\n #[test]\n\n fn height_with_line_spacing() {\n\n let style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .line_spacing(2)\n\n .build();\n", "file_path": "src/style/mod.rs", "rank": 62, "score": 68275.11780620222 }, { "content": "//! `TextBox` styling.\n\n//!\n\n//! Style objects and why you need them\n\n//! ===================================\n\n//!\n\n//! By itself, a [`TextBox`] does not contain the information necessary to draw it on a display.\n\n//! This information is called \"style\" and it is contained in [`TextBoxStyle`] objects.\n\n//!\n\n//! The recommended (and most flexible) way of constructing a style object is using the\n\n//! [`TextBoxStyleBuilder`] builder object. The least amount of information necessary to create a\n\n//! text style is the `Font` used to render the text, so you'll need to specify this when you call\n\n//! [`TextBoxStyleBuilder::new`].\n\n//! You can then chain together various builder methods to customize font rendering.\n\n//!\n\n//! See the [`TextBoxStyleBuilder`] for more information on what styling options you have.\n\n//!\n\n//! To apply a style, call [`TextBox::into_styled`].\n\n//!\n\n//! In-band text styling using ANSI escape codes\n\n//! ============================================\n", "file_path": "src/style/mod.rs", "rank": 63, "score": 68273.89160917654 }, { "content": " Point::new(\n\n max_line_width.saturating_sub(1) as i32,\n\n F::CHARACTER_SIZE.height.saturating_sub(1) as i32,\n\n ),\n\n ),\n\n self.line_spacing,\n\n );\n\n let mut iter: LineElementIterator<'_, F, _, A> = LineElementIterator::new(\n\n parser.clone(),\n\n cursor,\n\n UniformSpaceConfig::default(),\n\n carried_token.clone(),\n\n self.tab_size,\n\n );\n\n\n\n let mut current_width = 0;\n\n let mut last_spaces = 0;\n\n let mut total_spaces = 0;\n\n let mut underlined = self.underlined;\n\n while let Some(token) = iter.next() {\n", "file_path": "src/style/mod.rs", "rank": 64, "score": 68272.35146778566 }, { "content": " (\" \", 6, 0),\n\n (\"\\n \", 6, 8),\n\n (\"word\\n\", 2 * 6, 16),\n\n (\"word\\n \\nnext\", 50, 24),\n\n (\" Word \", 36, 8),\n\n ];\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n for (i, (text, width, expected_height)) in data.iter().enumerate() {\n\n let height = textbox_style.measure_text_height(text, *width);\n\n assert_eq!(\n\n height, *expected_height,\n\n r#\"#{}: Height of \"{}\" is {} but is expected to be {}\"#,\n\n i, text, height, expected_height\n\n );\n\n }\n\n }\n\n\n", "file_path": "src/style/mod.rs", "rank": 65, "score": 68270.88685982884 }, { "content": " if (w != 0 || t.is_some()) && carry != Some(Token::CarriageReturn) {\n\n // something was in this line, increment height\n\n // if last carried token was a carriage return, we already counted the height\n\n n_lines += 1;\n\n }\n\n\n\n if t.is_none() {\n\n let mut height = (n_lines * F::CHARACTER_SIZE.height as i32\n\n + n_lines.saturating_sub(1) * self.line_spacing)\n\n as u32;\n\n\n\n if underlined {\n\n height += 1;\n\n }\n\n\n\n return height;\n\n }\n\n\n\n carry = t;\n\n }\n", "file_path": "src/style/mod.rs", "rank": 66, "score": 68267.78070781988 }, { "content": "//!\n\n//! Sometimes you need more flexibility than what a single style object can provide, like changing\n\n//! font color for a specific word in the text. `embedded-text` supports this use case by using a\n\n//! subset of the standard [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code).\n\n//! These are special character sequences you can use *in the text* to change the font stlye of the\n\n//! text itself. This documentation does not aim to provide a full specification of all the ANSI\n\n//! escape codes, only describes the supported subset.\n\n//!\n\n//! > *Note:* if `embedded-text` fails to parse an escape sequence, it will ignore the `\\x1b` character\n\n//! and display the rest as normal text.\n\n//!\n\n//! All escape sequences start with the `\\x1b[` sequence, where `\\x1b` is the ASCII `escape`\n\n//! character. `embedded-text` supports a subset of the `SGR` parameters, which are numeric codes\n\n//! with specific functions, followed by a number of parameters and end with the `m` character.\n\n//!\n\n//! Currently, `embedded-text` supports changing the text and background colors. To do this, you\n\n//! have the following options:\n\n//!\n\n//! Standard color codes\n\n//! --------------------\n", "file_path": "src/style/mod.rs", "rank": 67, "score": 68267.26391938374 }, { "content": " (\"Longer\\rnowrap\", 36, 8),\n\n ];\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n for (i, (text, width, expected_height)) in data.iter().enumerate() {\n\n let height = textbox_style.measure_text_height(text, *width);\n\n assert_eq!(\n\n height, *expected_height,\n\n r#\"#{}: Height of \"{}\" is {} but is expected to be {}\"#,\n\n i, text, height, expected_height\n\n );\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_measure_height_ignored_spaces() {\n\n let data = [\n\n (\"\", 0, 0),\n\n (\" \", 0, 0),\n", "file_path": "src/style/mod.rs", "rank": 68, "score": 68267.25481229665 }, { "content": "//! * `\\x1b[4m`: Underlined text\n\n//! * `\\x1b[24m`: Turn off text underline\n\n//! * `\\x1b[9m`: Crossed out/strikethrough text\n\n//! * `\\x1b[29m`: Turn off strikethrough\n\n//! * `\\x1b[39m`: Reset text color\n\n//! * `\\x1b[49m`: Reset background color\n\n//!\n\n//! Reset style options to default\n\n//! ------------------------------\n\n//!\n\n//! `embedded-text` supports the `Reset all` (`\\x1b[0m`), `Default text color` (`\\x1b[39m`) and\n\n//! `Default background color` (`\\x1b[49m`) codes. These codes can be used to reset colors to\n\n//! *transparent* (i.e. no pixels drawn for text or background).\n\n//!\n\n//! In addition, `Reset all` turns off the underlined and crossed out styles.\n\n//!\n\n//! Other supported ANSI escape codes\n\n//! ---------------------------------\n\n//!\n\n//! Besides changing text style, you can also move the cursor using ANSI escape codes!\n", "file_path": "src/style/mod.rs", "rank": 69, "score": 68266.24100044213 }, { "content": " total_spaces += 1;\n\n } else if !A::ENDING_SPACES {\n\n // if ENDING_SPACES is true, spaces have already been counted and\n\n // last_spaces is 0\n\n total_spaces = last_spaces;\n\n }\n\n }\n\n\n\n RenderElement::Sgr(Sgr::Underline) => underlined = true,\n\n\n\n // Ignore color changes\n\n _ => {}\n\n }\n\n }\n\n\n\n let carried = iter.remaining_token();\n\n *parser = iter.parser;\n\n (current_width as u32, total_spaces, carried, underlined)\n\n }\n\n\n", "file_path": "src/style/mod.rs", "rank": 70, "score": 68265.511948093 }, { "content": "//!\n\n//! <style>\n\n//! .ansi_color {\n\n//! display: block;\n\n//! text-align: center;\n\n//! color: white;\n\n//! }\n\n//! </style>\n\n//!\n\n//! The standard color codes option is the simplest, and least flexible way to set color.\n\n//!\n\n//! | Color name | Text color | Background color | RGB888 |\n\n//! |---------------------|------------|------------------|-------------------------------------------------------------------------------------------------|\n\n//! | Black | `\\x1b[30m` | `\\x1b[40m` | <span class=\"ansi_color\" style=\"background: rgb(12,12,12);\"> 12,12,12 </span> |\n\n//! | Red | `\\x1b[31m` | `\\x1b[41m` | <span class=\"ansi_color\" style=\"background: rgb(197,15,31);\"> 197,15,31 </span> |\n\n//! | Green | `\\x1b[32m` | `\\x1b[42m` | <span class=\"ansi_color\" style=\"background: rgb(19,161,14);\"> 19,161,14 </span> |\n\n//! | Yellow | `\\x1b[33m` | `\\x1b[43m` | <span class=\"ansi_color\" style=\"background: rgb(193,156,0);\"> 193,156,0 </span> |\n\n//! | Blue | `\\x1b[34m` | `\\x1b[44m` | <span class=\"ansi_color\" style=\"background: rgb(0,55,218);\"> 0,55,218 </span> |\n\n//! | Magenta | `\\x1b[35m` | `\\x1b[45m` | <span class=\"ansi_color\" style=\"background: rgb(136,23,152);\"> 136,23,152 </span> |\n\n//! | Cyan | `\\x1b[36m` | `\\x1b[46m` | <span class=\"ansi_color\" style=\"background: rgb(58,150,221);\"> 58,150,221 </span> |\n", "file_path": "src/style/mod.rs", "rank": 71, "score": 68264.24636195666 }, { "content": "//! to the above types. The resulting color will be the closest match to what you specify.\n\n//!\n\n//! If you wish to use a different color type, you'll need to implement `From<Rgb>` for your color\n\n//! type and write the conversion yourself.\n\n//!\n\n//! Color values on monochrome displays\n\n//! -----------------------------------\n\n//!\n\n//! Monochrome displays use the `BinaryColor` color which can have two values: `On` or `Off`.\n\n//! You can still use the ANSI colors with the following considerations:\n\n//!\n\n//! * If the value of all three color channels are greater than `127`, the resulting color in `On`\n\n//! * Otherwise, the color is converted to `Off`.\n\n//!\n\n//! Other text styling options\n\n//! --------------------------\n\n//!\n\n//! The following [`Sgr`] sequences are supported:\n\n//!\n\n//! * `\\x1b[0m`: Reset everything\n", "file_path": "src/style/mod.rs", "rank": 72, "score": 68263.99322749053 }, { "content": "\n\n let height = style.measure_text_height(\n\n \"Lorem Ipsum is simply dummy text of the printing and typesetting industry.\",\n\n 72,\n\n );\n\n\n\n assert_eq!(height, 7 * 8 + 6 * 2);\n\n }\n\n}\n", "file_path": "src/style/mod.rs", "rank": 73, "score": 68262.70522194813 }, { "content": "//! * 232-255: grayscale from black to white\n\n//!\n\n//! 24 bit colors\n\n//! -------------\n\n//!\n\n//! 8 bit colors are in the form of either `\\x1b[38;2;<r>;<g>;<b>m` (text color) or\n\n//! `\\x1b[48;2;<r>;<g>;<b>m` (background color) sequece. Here, `<r>`, `<g>` and `<b>` can take any\n\n//! value between `0` and `255`.\n\n//!\n\n//! Color values on color spaces other than `Rgb888`\n\n//! ------------------------------------------------\n\n//!\n\n//! By default, `embedded-text` uses the following color types provided by `embedded-graphics`:\n\n//!\n\n//! * `Rgb888`\n\n//! * `Rgb565`\n\n//! * `Rgb555`\n\n//! * `BinaryColor`\n\n//!\n\n//! Internally, all ANSI color sequences are turned into the [`Rgb`] type, which can be converted\n", "file_path": "src/style/mod.rs", "rank": 74, "score": 68261.59186787148 }, { "content": "//! | White | `\\x1b[37m` | `\\x1b[47m` | <span class=\"ansi_color\" style=\"background: rgb(204,204,204); color: black;\"> 204,204,204 </span> |\n\n//! | Gray (Bright Black) | `\\x1b[90m` | `\\x1b[100m` | <span class=\"ansi_color\" style=\"background: rgb(118,118,118); color: black;\"> 118,118,118 </span> |\n\n//! | Bright Red | `\\x1b[91m` | `\\x1b[101m` | <span class=\"ansi_color\" style=\"background: rgb(231,72,86);\"> 231,72,86 </span> |\n\n//! | Bright Green | `\\x1b[92m` | `\\x1b[102m` | <span class=\"ansi_color\" style=\"background: rgb(22,198,12); color: black;\"> 22,198,12 </span> |\n\n//! | Bright Yellow | `\\x1b[93m` | `\\x1b[103m` | <span class=\"ansi_color\" style=\"background: rgb(249,241,165); color: black;\"> 249,241,165 </span> |\n\n//! | Bright Blue | `\\x1b[94m` | `\\x1b[104m` | <span class=\"ansi_color\" style=\"background: rgb(59,120,255);\"> 59,120,255 </span> |\n\n//! | Bright Magenta | `\\x1b[95m` | `\\x1b[105m` | <span class=\"ansi_color\" style=\"background: rgb(180,0,158);\"> 180,0,158 </span> |\n\n//! | Bright Cyan | `\\x1b[96m` | `\\x1b[106m` | <span class=\"ansi_color\" style=\"background: rgb(97,214,214); color: black;\"> 97,214,214 </span> |\n\n//! | Bright White | `\\x1b[97m` | `\\x1b[107m` | <span class=\"ansi_color\" style=\"background: rgb(242,242,242); color: black;\"> 242,242,242 </span> |\n\n//!\n\n//! 8 bit colors\n\n//! ------------\n\n//!\n\n//! 8 bit colors are in the form of either `\\x1b[38;5;<n>m` (text color) or `\\x1b[48;5;<n>m`\n\n//! (background color) sequece. Here, `<n>` marks a parameter that determines the color. `<n>` can\n\n//! have the following values:\n\n//!\n\n//! * 0-15: standard colors in the order of the above table.\n\n//! For example, `\\x1b[38;5;12m` is the `Bright Blue` color.\n\n//! * 16-231: 6 × 6 × 6 cube (216 colors): `16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5)`\n", "file_path": "src/style/mod.rs", "rank": 75, "score": 68261.01659790418 }, { "content": " match token {\n\n RenderElement::Space(_, count) => {\n\n if A::ENDING_SPACES {\n\n // only track width if spaces are rendered at the end of a line\n\n current_width = iter.cursor.position.x;\n\n\n\n // in this case, count all spaces\n\n total_spaces += count;\n\n } else {\n\n // ... otherwise save the number of spaces and it will be tracked with\n\n // the next printed character, or it will be discarded\n\n last_spaces = total_spaces + count;\n\n }\n\n }\n\n\n\n RenderElement::PrintedCharacter(c) => {\n\n // the current width is always the position where the cursor is (left is 0)\n\n current_width = iter.cursor.position.x;\n\n\n\n if c == '\\u{A0}' {\n", "file_path": "src/style/mod.rs", "rank": 76, "score": 68259.80442223421 }, { "content": " fn test_measure_height() {\n\n let data = [\n\n (\"\", 0, 0),\n\n (\" \", 0, 8),\n\n (\" \", 5, 8),\n\n (\" \", 6, 8),\n\n (\"\\n\", 6, 8),\n\n (\"\\n \", 6, 16),\n\n (\"word\", 4 * 6, 8), // exact fit into 1 line\n\n (\"word\", 4 * 6 - 1, 16),\n\n (\"word\", 2 * 6, 16), // exact fit into 2 lines\n\n (\"word word\", 4 * 6, 16), // exact fit into 2 lines\n\n (\"word\\n\", 2 * 6, 16),\n\n (\"word\\nnext\", 50, 16),\n\n (\"word\\n\\nnext\", 50, 24),\n\n (\"word\\n \\nnext\", 50, 24),\n\n (\"verylongword\", 50, 16),\n\n (\"some verylongword\", 50, 24),\n\n (\"1 23456 12345 61234 561\", 36, 40),\n\n (\" Word \", 36, 24),\n", "file_path": "src/style/mod.rs", "rank": 77, "score": 68255.84180612057 }, { "content": "//! Text alignment options.\n\nuse crate::{rendering::cursor::Cursor, style::height_mode::HeightMode, StyledTextBox};\n\nuse embedded_graphics::prelude::*;\n\n\n\npub mod bottom;\n\npub mod center;\n\npub mod justified;\n\npub mod left;\n\npub mod right;\n\npub mod top;\n\n\n\n/// Horizontal text alignment base trait.\n\n///\n\n/// Use implementors to parametrize [`TextBoxStyle`] and [`TextBoxStyleBuilder`].\n\n///\n\n/// [`TextBoxStyle`]: ../style/struct.TextBoxStyle.html\n\n/// [`TextBoxStyleBuilder`]: ../style/builder/struct.TextBoxStyleBuilder.html\n", "file_path": "src/alignment/mod.rs", "rank": 78, "score": 68074.59181021551 }, { "content": " ) -> Self {\n\n let mut cursor = Cursor::new(styled.text_box.bounds, styled.style.line_spacing);\n\n\n\n V::apply_vertical_alignment(&mut cursor, &styled);\n\n\n\n Self {\n\n style: styled.style,\n\n state: State::NextLine(None, cursor, Parser::parse(styled.text_box.text)),\n\n next_line_fn: f,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, C, F, A, V, H, SP> Iterator for StyledTextBoxIterator<'a, C, F, A, V, H, SP>\n\nwhere\n\n C: PixelColor + From<Rgb>,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n", "file_path": "src/rendering/mod.rs", "rank": 79, "score": 67359.58019493302 }, { "content": " next_line_fn: LineIteratorSource<'a, C, F, A, V, H, SP>,\n\n}\n\n\n\nimpl<'a, C, F, A, V, H, SP> StyledTextBoxIterator<'a, C, F, A, V, H, SP>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n\n SP: SpaceConfig<Font = F>,\n\n{\n\n /// Creates a new pixel iterator to render the styled [`TextBox`].\n\n ///\n\n /// [`TextBox`]: ../struct.TextBox.html\n\n #[inline]\n\n #[must_use]\n\n pub fn new(\n\n styled: &StyledTextBox<'a, C, F, A, V, H>,\n\n f: LineIteratorSource<'a, C, F, A, V, H, SP>,\n", "file_path": "src/rendering/mod.rs", "rank": 80, "score": 67358.57529529238 }, { "content": "/// State variable used by the right aligned text renderer.\n\n#[derive(Debug)]\n\npub enum State<'a, C, F, SP, A, V, H>\n\nwhere\n\n C: PixelColor,\n\n F: Font + Copy,\n\n SP: SpaceConfig<Font = F>,\n\n A: HorizontalTextAlignment,\n\n V: VerticalTextAlignment,\n\n H: HeightMode,\n\n{\n\n /// Starts processing a line.\n\n NextLine(Option<Token<'a>>, Cursor<F>, Parser<'a>),\n\n\n\n /// Renders the processed line.\n\n DrawLine(StyledLinePixelIterator<'a, C, F, SP, A, V, H>),\n\n}\n\n\n\n/// This trait is used to associate a renderer type to a horizontal alignment option.\n\n///\n\n/// Implementing this trait is only necessary when creating new alignment algorithms.\n", "file_path": "src/rendering/mod.rs", "rank": 81, "score": 67353.59300234946 }, { "content": "//! Pixel iterators used for text rendering.\n\n\n\npub mod ansi;\n\npub mod character;\n\npub mod cursor;\n\npub mod line;\n\npub mod line_iter;\n\npub mod modified_whitespace;\n\npub mod space_config;\n\npub mod whitespace;\n\n\n\nuse crate::{\n\n alignment::{HorizontalTextAlignment, VerticalTextAlignment},\n\n parser::{Parser, Token},\n\n rendering::{cursor::Cursor, line::StyledLinePixelIterator, space_config::SpaceConfig},\n\n style::{color::Rgb, height_mode::HeightMode, TextBoxStyle},\n\n StyledTextBox,\n\n};\n\nuse embedded_graphics::prelude::*;\n\n\n", "file_path": "src/rendering/mod.rs", "rank": 82, "score": 67347.1697121679 }, { "content": " SP: SpaceConfig<Font = F>,\n\n{\n\n type Item = Pixel<C>;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n match self.state {\n\n State::NextLine(ref carried_token, ref cursor, ref parser) => {\n\n if carried_token.is_none() && parser.is_empty() {\n\n break None;\n\n }\n\n\n\n let f = self.next_line_fn;\n\n self.state = State::DrawLine(f(\n\n self.style,\n\n carried_token.clone(),\n\n *cursor,\n\n parser.clone(),\n\n ));\n", "file_path": "src/rendering/mod.rs", "rank": 83, "score": 67337.51096713312 }, { "content": " }\n\n\n\n State::DrawLine(ref mut line_iterator) => {\n\n if let pixel @ Some(_) = line_iterator.next() {\n\n break pixel;\n\n }\n\n\n\n self.style = line_iterator.style;\n\n self.state = State::NextLine(\n\n line_iterator.remaining_token(),\n\n line_iterator.cursor(),\n\n line_iterator.parser(),\n\n );\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/rendering/mod.rs", "rank": 84, "score": 67321.70716237162 }, { "content": "\n\nimpl<F> FontExt for F\n\nwhere\n\n F: Font,\n\n{\n\n #[inline]\n\n fn total_char_width(c: char) -> u32 {\n\n if c == '\\u{A0}' {\n\n // A non-breaking space is as wide as a regular one\n\n return F::char_width(' ') + F::CHARACTER_SPACING;\n\n }\n\n F::char_width(c) + F::CHARACTER_SPACING\n\n }\n\n\n\n #[inline]\n\n fn str_width(s: &str) -> u32 {\n\n str_width::<F>(s, false)\n\n }\n\n\n\n #[inline]\n", "file_path": "src/utils/font_ext.rs", "rank": 85, "score": 67035.80118419521 }, { "content": " fn str_width_nocr(s: &str) -> u32 {\n\n str_width::<F>(s, true)\n\n }\n\n\n\n #[inline]\n\n #[must_use]\n\n fn max_str_width(s: &str, max_width: u32) -> (u32, &str) {\n\n max_str_width::<F>(s, max_width, false)\n\n }\n\n\n\n #[inline]\n\n #[must_use]\n\n fn max_str_width_nocr(s: &str, max_width: u32) -> (u32, &str) {\n\n max_str_width::<F>(s, max_width, true)\n\n }\n\n\n\n #[inline]\n\n #[must_use]\n\n fn max_space_width(n: u32, max_width: u32) -> (u32, u32) {\n\n let space_width = F::total_char_width(' ');\n", "file_path": "src/utils/font_ext.rs", "rank": 86, "score": 67033.6489368311 }, { "content": " let num_spaces = (max_width / space_width).min(n);\n\n\n\n (num_spaces * space_width, num_spaces)\n\n }\n\n\n\n #[inline]\n\n fn strikethrough_pos() -> u32 {\n\n F::CHARACTER_SIZE.height / 2\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use embedded_graphics::fonts::{Font6x6, Font6x8};\n\n\n\n #[test]\n\n fn nbsp_width_equal_to_space() {\n\n assert_eq!(\n\n Font6x8::total_char_width('\\u{A0}'),\n", "file_path": "src/utils/font_ext.rs", "rank": 87, "score": 67032.05048250561 }, { "content": " ///\n\n /// [`max_str_width`]: #method.max_str_width\n\n fn max_str_width_nocr(s: &str, max_width: u32) -> (u32, &str);\n\n\n\n /// Measures a sequence of spaces in a line with a determinate maximum width.\n\n ///\n\n /// Returns the width of the spaces that fit into the given space and the number of spaces that\n\n /// fit.\n\n fn max_space_width(n: u32, max_width: u32) -> (u32, u32);\n\n\n\n /// Returns the y offset for the strikethrough line.\n\n fn strikethrough_pos() -> u32;\n\n}\n\n\n", "file_path": "src/utils/font_ext.rs", "rank": 88, "score": 67021.633544063 }, { "content": "//! Font helper extensions.\n\n//!\n\n//! Extends font types with some helper methods.\n\nuse embedded_graphics::fonts::Font;\n\n\n\n/// `Font` extensions\n", "file_path": "src/utils/font_ext.rs", "rank": 89, "score": 67020.2920371651 }, { "content": " Font6x8::total_char_width(' ')\n\n );\n\n assert_eq!(\n\n Font6x6::total_char_width('\\u{A0}'),\n\n Font6x6::total_char_width(' ')\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_str_width() {\n\n let data: [(&str, u32); 4] = [(\"\", 0), (\"foo\", 3), (\"foo\\rbar\", 3), (\"foo\\rfoobar\", 6)];\n\n for (word, chars) in data.iter() {\n\n assert_eq!(chars * 6, Font6x8::str_width(word));\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_max_fitting_empty() {\n\n assert_eq!((0, \"\"), Font6x8::max_str_width(\"\", 54));\n\n assert_eq!((0, \"\"), Font6x8::max_str_width_nocr(\"\", 54));\n", "file_path": "src/utils/font_ext.rs", "rank": 90, "score": 67019.14091143866 }, { "content": " }\n\n\n\n #[test]\n\n fn test_max_fitting_exact() {\n\n assert_eq!((54, \"somereall\"), Font6x8::max_str_width(\"somereall\", 54));\n\n assert_eq!(\n\n (54, \"somereall\"),\n\n Font6x8::max_str_width_nocr(\"somereall\", 54)\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_max_fitting_long_exact() {\n\n assert_eq!(\n\n (54, \"somereall\"),\n\n Font6x8::max_str_width(\"somereallylongword\", 54)\n\n );\n\n assert_eq!(\n\n (54, \"somereall\"),\n\n Font6x8::max_str_width_nocr(\"somereallylongword\", 54)\n", "file_path": "src/utils/font_ext.rs", "rank": 91, "score": 67016.46667681444 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn test_max_fitting_long() {\n\n assert_eq!(\n\n (54, \"somereall\"),\n\n Font6x8::max_str_width(\"somereallylongword\", 55)\n\n );\n\n assert_eq!(\n\n (54, \"somereall\"),\n\n Font6x8::max_str_width_nocr(\"somereallylongword\", 55)\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_cr() {\n\n assert_eq!(\n\n (48, \"somereal\\rlylong\"),\n\n Font6x8::max_str_width(\"somereal\\rlylong\", 55)\n", "file_path": "src/utils/font_ext.rs", "rank": 92, "score": 67016.35345369797 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn test_max_space_width() {\n\n assert_eq!((0, 0), Font6x8::max_space_width(0, 36));\n\n assert_eq!((36, 6), Font6x8::max_space_width(6, 36));\n\n assert_eq!((36, 6), Font6x8::max_space_width(6, 36));\n\n assert_eq!((36, 6), Font6x8::max_space_width(6, 38));\n\n assert_eq!((36, 6), Font6x8::max_space_width(7, 36));\n\n }\n\n}\n", "file_path": "src/utils/font_ext.rs", "rank": 93, "score": 67011.86695993906 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(Justified)\n\n .text_color(Rgb565::RED)\n\n .background_color(Rgb565::GREEN)\n\n .build();\n\n\n\n let mut display: SimulatorDisplay<Rgb565> = SimulatorDisplay::new(Size::new(129, 129));\n\n\n\n TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 128)))\n\n .into_styled(textbox_style)\n\n .draw(&mut display)\n\n .unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new().build();\n\n Window::new(\"Hello TextBox with text background color\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/color.rs", "rank": 94, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/left.rs", "rank": 95, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(RightAligned)\n\n .text_color(BinaryColor::On)\n\n .height_mode(FitToText)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello right aligned TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/right.rs", "rank": 96, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n let mut window = Window::new(\"TextBox demonstration\", &output_settings);\n\n\n\n let mut bounds = Rectangle::new(Point::new(0, 8), Point::new(128, 200));\n\n\n\n 'running: loop {\n\n if !demo_loop(&mut window, &mut bounds, Justified) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, LeftAligned) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, CenterAligned) {\n\n break 'running;\n\n }\n\n if !demo_loop(&mut window, &mut bounds, RightAligned) {\n\n break 'running;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/interactive.rs", "rank": 97, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let text = \"Hello, World!\\nLorem Ipsum is simply dummy text of the printing and typesetting \\\n\n industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when \\\n\n an unknown printer took a galley of type and scrambled it to make a type specimen book.\";\n\n\n\n let textbox_style = TextBoxStyleBuilder::new(Font6x8)\n\n .alignment(CenterAligned)\n\n .height_mode(FitToText)\n\n .text_color(BinaryColor::On)\n\n .build();\n\n\n\n let text_box = TextBox::new(text, Rectangle::new(Point::zero(), Point::new(128, 0)))\n\n .into_styled(textbox_style);\n\n\n\n // Create a window just tall enough to fit the text.\n\n let mut display: SimulatorDisplay<BinaryColor> = SimulatorDisplay::new(text_box.size());\n\n text_box.draw(&mut display).unwrap();\n\n\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n Window::new(\"Hello center aligned TextBox\", &output_settings).show_static(&display);\n\n Ok(())\n\n}\n", "file_path": "examples/center.rs", "rank": 98, "score": 42747.260701452535 }, { "content": "fn main() -> Result<(), core::convert::Infallible> {\n\n let output_settings = OutputSettingsBuilder::new()\n\n .theme(BinaryColorTheme::OledBlue)\n\n .build();\n\n let mut window = Window::new(\"TextBox input demonstration\", &output_settings);\n\n let bounds = Rectangle::new(Point::new(0, 0), Point::new(128, 640));\n\n\n\n let inputs: HashMap<Keycode, (&str, &str, &str, &str)> = [\n\n // (Keycode, (NO, SHIFT, CAPS, ALT_GR))\n\n (Keycode::A, (\"a\", \"A\", \"A\", \"ä\")),\n\n (Keycode::B, (\"b\", \"B\", \"B\", \"{\")),\n\n (Keycode::C, (\"c\", \"C\", \"C\", \"&\")),\n\n (Keycode::D, (\"d\", \"D\", \"D\", \"Đ\")),\n\n (Keycode::E, (\"e\", \"E\", \"E\", \"Ä\")),\n\n (Keycode::F, (\"f\", \"F\", \"F\", \"[\")),\n\n (Keycode::G, (\"g\", \"G\", \"G\", \"]\")),\n\n (Keycode::H, (\"h\", \"H\", \"H\", \"\")),\n\n (Keycode::I, (\"i\", \"I\", \"I\", \"Í\")),\n\n (Keycode::J, (\"j\", \"J\", \"J\", \"í\")),\n\n (Keycode::K, (\"k\", \"K\", \"K\", \"ł\")),\n", "file_path": "examples/editor.rs", "rank": 99, "score": 42747.260701452535 } ]
Rust
src/pool.rs
ilammy/dynamic-pool
6237462e096814d2f9aa7977097433d3199018e3
use crossbeam_queue::ArrayQueue; use std::fmt::{Debug, Formatter}; use std::ops::{Deref, DerefMut}; use std::sync::{Arc, Weak}; use crate::DynamicReset; #[derive(Debug)] pub struct DynamicPool<T: DynamicReset> { data: Arc<PoolData<T>>, } impl<T: DynamicReset> DynamicPool<T> { pub fn new<F: Fn() -> T + Sync + Send + 'static>( initial_capacity: usize, maximum_capacity: usize, create: F, ) -> DynamicPool<T> { assert![initial_capacity <= maximum_capacity]; let items = ArrayQueue::new(maximum_capacity); for x in (0..initial_capacity).map(|_| create()) { items .push(x) .map_err(drop) .expect("invariant: items.len() always less than initial_capacity."); } let data = PoolData { items, create: Box::new(create), }; let data = Arc::new(data); DynamicPool { data } } pub fn take(&self) -> DynamicPoolItem<T> { let object = self .data .items .pop() .unwrap_or_else(|| (self.data.create)()); DynamicPoolItem { data: Arc::downgrade(&self.data), object: Some(object), } } pub fn try_take(&self) -> Option<DynamicPoolItem<T>> { let object = self.data.items.pop()?; let data = Arc::downgrade(&self.data); Some(DynamicPoolItem { data, object: Some(object), }) } #[inline] pub fn available(&self) -> usize { self.data.items.len() } #[inline] pub fn used(&self) -> usize { Arc::weak_count(&self.data) } #[inline] pub fn capacity(&self) -> usize { self.data.items.capacity() } } impl<T: DynamicReset> Clone for DynamicPool<T> { fn clone(&self) -> Self { Self { data: self.data.clone(), } } } struct PoolData<T> { items: ArrayQueue<T>, create: Box<dyn Fn() -> T + Sync + Send + 'static>, } impl<T: DynamicReset + Debug> Debug for PoolData<T> { fn fmt(&self, formatter: &mut Formatter) -> Result<(), std::fmt::Error> { formatter .debug_struct("PoolData") .field("items", &self.items) .field("create", &"Box<dyn Fn() -> T>") .finish() } } #[derive(Debug)] pub struct DynamicPoolItem<T: DynamicReset> { data: Weak<PoolData<T>>, object: Option<T>, } impl<T: DynamicReset> DynamicPoolItem<T> { pub fn detach(mut self) -> T { self.object .take() .expect("invariant: object is always `some`.") } } impl<T: DynamicReset> AsRef<T> for DynamicPoolItem<T> { fn as_ref(&self) -> &T { self.object .as_ref() .expect("invariant: object is always `some`.") } } impl<T: DynamicReset> Deref for DynamicPoolItem<T> { type Target = T; fn deref(&self) -> &T { self.object .as_ref() .expect("invariant: object is always `some`.") } } impl<T: DynamicReset> DerefMut for DynamicPoolItem<T> { fn deref_mut(&mut self) -> &mut T { self.object .as_mut() .expect("invariant: object is always `some`.") } } impl<T: DynamicReset> Drop for DynamicPoolItem<T> { fn drop(&mut self) { if let Some(mut object) = self.object.take() { object.reset(); if let Some(pool) = self.data.upgrade() { pool.items.push(object).ok(); } } } }
use crossbeam_queue::ArrayQueue; use std::fmt::{Debug, Formatter}; use std::ops::{Deref, DerefMut}; use std::sync::{Arc, Weak}; use crate::DynamicReset; #[derive(Debug)] pub struct DynamicPool<T: DynamicReset> { data: Arc<PoolData<T>>, } impl<T: DynamicReset> DynamicPool<T> { pub fn new<F: Fn() -> T + Sync + Send + 'static>( initial_capacity: usize, maximum_capacity: usize, create: F, ) -> DynamicPool<T> { assert![initial_capacity <= maximum_capacity]; let items = ArrayQueue::new(maximum_capacity); for x in (0..initial_capacity).map(|_| create()) { items .push(x) .map_err(drop) .expect("invariant: items.len() always less than initial_capacity."); } let data = PoolData { items, create: Box::new(create), }; let data = Arc::new(data); DynamicPool { data } } pub fn take(&self) -> DynamicPoolItem<T> { let object = self .data .items .pop() .unwrap_or_else(|| (self.data.create)()); DynamicPoolItem { data: Arc::downgrade(&self.data), object: Some(object), } } pub fn try_take(&self) -> Option<DynamicPoolItem<T>> { let object = self.data.items.pop()?; let data = Arc::downgrade(&self.data); Some(DynamicPoolItem { data, object: Some(object), }) } #[inline] pub fn available(&self) -> usize { self.data.items.len() } #[inline] pub fn used(&self) -> usize { Arc::weak_count(&self.data) } #[inline] pub fn capacity(&self) -> usize { self.data.items.capacity() } } impl<T: DynamicReset> Clone for DynamicPool<T> { fn clone(&self) -> Self { Self { data: self.data.clone(), } } } struct PoolData<T> { items: ArrayQueue<T>, create: Box<dyn Fn() -> T + Sync + Send + 'static>, } impl<T: DynamicReset + Debug> Debug for PoolData<T> { fn fmt(&self, formatter: &mut Formatter) -> Result<(), std::fmt::Error> { formatter .debug_struct("PoolData") .field("items", &self.items) .field("create", &"Box<dyn Fn() -> T>") .finish() } } #[derive(Debug)] pub struct DynamicPoolItem<T: DynamicReset> { data: Weak<PoolData<T>>, object: Option<T>, } impl<T: DynamicReset> DynamicPoolItem<T> { pub fn detach(mut self) -> T { self.object .take() .expect("invariant: object is always `some`.") } } impl<T: DynamicReset> AsRef<T> for DynamicPoolItem<T> { fn as_ref(&self) -> &T { self.object .as_ref() .expect("invariant: object is always `some`.") } } impl<T: DynamicReset> Deref for DynamicPoolItem<T> { type Target = T; fn deref(&self) -> &T { self.object .as_ref() .expect("invariant: object is always `some`.") } } impl<T: DynamicReset> DerefMut for DynamicPoolItem<T> { fn deref_mut(&mut self) -> &mut T { self.object .as_mut() .expect("invariant: object is always `some`.") } } impl<T: DynamicReset> Drop for DynamicPoolItem<T> {
}
fn drop(&mut self) { if let Some(mut object) = self.object.take() { object.reset(); if let Some(pool) = self.data.upgrade() { pool.items.push(object).ok(); } } }
function_block-full_function
[ { "content": "pub trait DynamicReset {\n\n fn reset(&mut self);\n\n}\n\n\n\nimpl<T> DynamicReset for Option<T>\n\nwhere\n\n T: DynamicReset,\n\n{\n\n fn reset(&mut self) {\n\n if let Some(x) = self {\n\n x.reset();\n\n }\n\n }\n\n}\n\n\n\nimpl<T1, T2> DynamicReset for (T1, T2)\n\nwhere\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n{\n", "file_path": "src/reset.rs", "rank": 1, "score": 27795.80915816769 }, { "content": "fn main() {\n\n // Creates a new pool that will hold at most 10 items, starting with 1 item by default.\n\n let pool = DynamicPool::new(1, 10, Person::default);\n\n // Assert we have one item in the pool.\n\n assert_eq!(pool.available(), 1);\n\n\n\n // Take an item from the pool.\n\n let mut person = pool.take();\n\n person.name = \"jake\".into();\n\n person.age = 99;\n\n\n\n // Assert the pool is empty since we took the person above.\n\n assert_eq!(pool.available(), 0);\n\n // Dropping returns the item to the pool.\n\n drop(person);\n\n // We now have stuff available in the pool to take.\n\n assert_eq!(pool.available(), 1);\n\n\n\n // Take person from the pool again, it should be reset.\n\n let person = pool.take();\n", "file_path": "examples/simple.rs", "rank": 2, "score": 24730.391523530616 }, { "content": "#[derive(Default)]\n\nstruct Person {\n\n name: String,\n\n age: u16,\n\n}\n\n\n\nimpl DynamicReset for Person {\n\n fn reset(&mut self) {\n\n self.name.clear();\n\n self.age = 0;\n\n }\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 3, "score": 23820.85563392989 }, { "content": "# `dynamic-pool`\n\n\n\n[![Build Status](https://travis-ci.org/discordapp/dynamic-pool.svg?branch=master)](https://travis-ci.org/discordapp/dynamic-pool)\n\n[![License](https://img.shields.io/github/license/discordapp/dynamic-pool.svg)](LICENSE)\n\n[![Documentation](https://docs.rs/dynamic-pool/badge.svg)](https://docs.rs/dynamic-pool)\n\n[![Cargo](https://img.shields.io/crates/v/dynamic-pool.svg)](https://crates.io/crates/dynamic-pool)\n\n\n\nA lock-free, thread-safe, dynamically-sized object pool.\n\n\n\nThis pool begins with an initial capacity and will continue creating new objects on request when none are available.\n\npooled objects are returned to the pool on destruction (with an extra provision to optionally \"reset\" the state of\n\nan object for re-use).\n\n\n\nIf, during an attempted return, a pool already has `maximum_capacity` objects in the pool, the pool will throw away\n\nthat object.\n\n\n\n## Basic Usage\n\n\n\nAdd this to your `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\ndynamic-pool = \"0.1\"\n\n```\n\n\n\nNext, do some pooling:\n\n\n\n```rust\n\nuse dynamic_pool::{DynamicPool, DynamicReset};\n\n\n\n#[derive(Default)]\n\nstruct Person {\n\n name: String,\n\n age: u16,\n\n}\n\n\n\nimpl DynamicReset for Person {\n\n fn reset(&mut self) {\n\n self.name.clear();\n\n self.age = 0;\n\n }\n\n}\n\n\n\nfn main() {\n\n // Creates a new pool that will hold at most 10 items, starting with 1 item by default.\n\n let pool = DynamicPool::new(1, 10, Person::default);\n\n // Assert we have one item in the pool.\n\n assert_eq!(pool.available(), 1);\n\n\n\n // Take an item from the pool.\n\n let mut person = pool.take();\n\n person.name = \"jake\".into();\n\n person.age = 99;\n\n\n\n // Assert the pool is empty since we took the person above.\n\n assert_eq!(pool.available(), 0);\n\n // Dropping returns the item to the pool.\n\n drop(person);\n\n // We now have stuff available in the pool to take.\n\n assert_eq!(pool.available(), 1);\n\n\n\n // Take person from the pool again, it should be reset.\n\n let person = pool.take();\n\n assert_eq!(person.name, \"\");\n", "file_path": "README.md", "rank": 10, "score": 7.989387100346655 }, { "content": "mod pool;\n\nmod reset;\n\n\n\npub use self::pool::{DynamicPool, DynamicPoolItem};\n\npub use self::reset::DynamicReset;\n", "file_path": "src/lib.rs", "rank": 12, "score": 5.541316288747318 }, { "content": " assert_eq!(person.age, 0);\n\n\n\n // Nothing is in the queue.\n\n assert_eq!(pool.available(), 0);\n\n // try_take returns an Option. Since the pool is empty, nothing will be created.\n\n assert!(pool.try_take().is_none());\n\n // Dropping again returns the person to the pool.\n\n drop(person);\n\n // We have stuff in the pool now!\n\n assert_eq!(pool.available(), 1);\n\n\n\n // try_take would succeed here!\n\n let person = pool.try_take().unwrap();\n\n\n\n // We can also then detach the `person` from the pool, meaning it won't get\n\n // recycled.\n\n let person = person.detach();\n\n // We can then drop that person, and see that it's not returned to the pool.\n\n drop(person);\n\n assert_eq!(pool.available(), 0);\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 13, "score": 5.531242640341275 }, { "content": " assert_eq!(person.name, \"\");\n\n assert_eq!(person.age, 0);\n\n\n\n // Nothing is in the queue.\n\n assert_eq!(pool.available(), 0);\n\n // try_take returns an Option. Since the pool is empty, nothing will be created.\n\n assert!(pool.try_take().is_none());\n\n // Dropping again returns the person to the pool.\n\n drop(person);\n\n // We have stuff in the pool now!\n\n assert_eq!(pool.available(), 1);\n\n\n\n // try_take would succeed here!\n\n let person = pool.try_take().unwrap();\n\n\n\n // We can also then detach the `person` from the pool, meaning it won't get\n\n // recycled.\n\n let person = person.detach();\n\n // We can then drop that person, and see that it's not returned to the pool.\n\n drop(person);\n\n assert_eq!(pool.available(), 0);\n\n}\n", "file_path": "examples/simple.rs", "rank": 14, "score": 4.234761130760232 }, { "content": "use dynamic_pool::{DynamicPool, DynamicReset};\n\n\n\n#[derive(Default)]\n", "file_path": "examples/simple.rs", "rank": 15, "score": 1.8521003404337948 }, { "content": " fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n }\n\n}\n\n\n\nimpl<T1, T2, T3> DynamicReset for (T1, T2, T3)\n\nwhere\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n T3: DynamicReset,\n\n{\n\n fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n self.2.reset();\n\n }\n\n}\n\n\n\nimpl<T1, T2, T3, T4> DynamicReset for (T1, T2, T3, T4)\n", "file_path": "src/reset.rs", "rank": 16, "score": 1.4046967354507012 }, { "content": " T5: DynamicReset,\n\n T6: DynamicReset,\n\n T7: DynamicReset,\n\n T8: DynamicReset,\n\n T9: DynamicReset,\n\n{\n\n fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n self.2.reset();\n\n self.3.reset();\n\n self.4.reset();\n\n self.5.reset();\n\n self.6.reset();\n\n self.7.reset();\n\n self.8.reset();\n\n }\n\n}\n\n\n\nimpl<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10> DynamicReset\n", "file_path": "src/reset.rs", "rank": 17, "score": 0.9435394280705947 }, { "content": " T5: DynamicReset,\n\n{\n\n fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n self.2.reset();\n\n self.3.reset();\n\n self.4.reset();\n\n }\n\n}\n\n\n\nimpl<T1, T2, T3, T4, T5, T6> DynamicReset for (T1, T2, T3, T4, T5, T6)\n\nwhere\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n T3: DynamicReset,\n\n T4: DynamicReset,\n\n T5: DynamicReset,\n\n T6: DynamicReset,\n\n{\n", "file_path": "src/reset.rs", "rank": 18, "score": 0.8943967495252512 }, { "content": "where\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n T3: DynamicReset,\n\n T4: DynamicReset,\n\n{\n\n fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n self.2.reset();\n\n self.3.reset();\n\n }\n\n}\n\n\n\nimpl<T1, T2, T3, T4, T5> DynamicReset for (T1, T2, T3, T4, T5)\n\nwhere\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n T3: DynamicReset,\n\n T4: DynamicReset,\n", "file_path": "src/reset.rs", "rank": 19, "score": 0.8943967495252512 }, { "content": " fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n self.2.reset();\n\n self.3.reset();\n\n self.4.reset();\n\n self.5.reset();\n\n }\n\n}\n\n\n\nimpl<T1, T2, T3, T4, T5, T6, T7> DynamicReset for (T1, T2, T3, T4, T5, T6, T7)\n\nwhere\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n T3: DynamicReset,\n\n T4: DynamicReset,\n\n T5: DynamicReset,\n\n T6: DynamicReset,\n\n T7: DynamicReset,\n\n{\n", "file_path": "src/reset.rs", "rank": 20, "score": 0.8607728115731741 }, { "content": " T8: DynamicReset,\n\n{\n\n fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n self.2.reset();\n\n self.3.reset();\n\n self.4.reset();\n\n self.5.reset();\n\n self.6.reset();\n\n self.7.reset();\n\n }\n\n}\n\n\n\nimpl<T1, T2, T3, T4, T5, T6, T7, T8, T9> DynamicReset for (T1, T2, T3, T4, T5, T6, T7, T8, T9)\n\nwhere\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n T3: DynamicReset,\n\n T4: DynamicReset,\n", "file_path": "src/reset.rs", "rank": 21, "score": 0.8607728115731741 }, { "content": " fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n self.2.reset();\n\n self.3.reset();\n\n self.4.reset();\n\n self.5.reset();\n\n self.6.reset();\n\n }\n\n}\n\n\n\nimpl<T1, T2, T3, T4, T5, T6, T7, T8> DynamicReset for (T1, T2, T3, T4, T5, T6, T7, T8)\n\nwhere\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n T3: DynamicReset,\n\n T4: DynamicReset,\n\n T5: DynamicReset,\n\n T6: DynamicReset,\n\n T7: DynamicReset,\n", "file_path": "src/reset.rs", "rank": 22, "score": 0.8295853908640012 }, { "content": " for (T1, T2, T3, T4, T5, T6, T7, T8, T9, T10)\n\nwhere\n\n T1: DynamicReset,\n\n T2: DynamicReset,\n\n T3: DynamicReset,\n\n T4: DynamicReset,\n\n T5: DynamicReset,\n\n T6: DynamicReset,\n\n T7: DynamicReset,\n\n T8: DynamicReset,\n\n T9: DynamicReset,\n\n T10: DynamicReset,\n\n{\n\n fn reset(&mut self) {\n\n self.0.reset();\n\n self.1.reset();\n\n self.2.reset();\n\n self.3.reset();\n\n self.4.reset();\n\n self.5.reset();\n\n self.6.reset();\n\n self.7.reset();\n\n self.8.reset();\n\n self.9.reset();\n\n }\n\n}\n", "file_path": "src/reset.rs", "rank": 23, "score": 0.8005789086659592 } ]
Rust
minidump-processor/src/processor.rs
sigiesec/rust-minidump
aaa0e8cff6d4ec63cbdccd2a1b4df796dec35b24
use breakpad_symbols::{FrameSymbolizer, Symbolizer}; use chrono::{TimeZone, Utc}; use minidump::{self, *}; use process_state::{CallStack, CallStackInfo, ProcessState}; use stackwalker; use std::boxed::Box; use std::ops::Deref; use system_info::SystemInfo; pub trait SymbolProvider { fn fill_symbol(&self, module: &Module, frame: &mut FrameSymbolizer); } impl SymbolProvider for Symbolizer { fn fill_symbol(&self, module: &Module, frame: &mut FrameSymbolizer) { self.fill_symbol(module, frame); } } #[derive(Default)] pub struct MultiSymbolProvider { providers: Vec<Box<SymbolProvider>>, } impl MultiSymbolProvider { pub fn new() -> MultiSymbolProvider { Default::default() } pub fn add(&mut self, provider: Box<SymbolProvider>) { self.providers.push(provider); } } impl SymbolProvider for MultiSymbolProvider { fn fill_symbol(&self, module: &Module, frame: &mut FrameSymbolizer) { for p in self.providers.iter() { p.fill_symbol(module, frame); } } } #[derive(Debug, Fail)] pub enum ProcessError { #[fail(display = "Failed to read minidump")] MinidumpReadError(minidump::Error), #[fail(display = "An unknown error occurred")] UnknownError, #[fail(display = "The system information stream was not found")] MissingSystemInfo, #[fail(display = "The thread list stream was not found")] MissingThreadList, } impl From<minidump::Error> for ProcessError { fn from(err: minidump::Error) -> ProcessError { ProcessError::MinidumpReadError(err) } } pub fn process_minidump<'a, T, P>( dump: &Minidump<'a, T>, symbol_provider: &P, ) -> Result<ProcessState, ProcessError> where T: Deref<Target=[u8]> + 'a, P: SymbolProvider, { let thread_list = dump.get_stream::<MinidumpThreadList>() .or(Err(ProcessError::MissingThreadList))?; let dump_system_info = dump.get_stream::<MinidumpSystemInfo>() .or(Err(ProcessError::MissingSystemInfo))?; let system_info = SystemInfo { os: dump_system_info.os, os_version: None, cpu: dump_system_info.cpu, cpu_info: None, cpu_count: dump_system_info.raw.number_of_processors as usize, }; let process_create_time = if let Ok(misc_info) = dump.get_stream::<MinidumpMiscInfo>() { misc_info.process_create_time() } else { None }; let breakpad_info = dump.get_stream::<MinidumpBreakpadInfo>(); let (dump_thread_id, requesting_thread_id) = if let Ok(info) = breakpad_info { (info.dump_thread_id, info.requesting_thread_id) } else { (None, None) }; let exception_stream = dump.get_stream::<MinidumpException>().ok(); let exception_ref = exception_stream.as_ref(); let (crash_reason, crash_address) = if let Some(exception) = exception_ref { ( Some(exception.get_crash_reason(system_info.os)), Some(exception.get_crash_address(system_info.os)), ) } else { (None, None) }; let exception_context = exception_ref.and_then(|e| e.context.as_ref()); let assertion = None; let modules = if let Ok(module_list) = dump.get_stream::<MinidumpModuleList>() { module_list.clone() } else { MinidumpModuleList::new() }; let mut threads = vec![]; let mut requesting_thread = None; for (i, thread) in thread_list.threads.iter().enumerate() { if dump_thread_id.is_some() && dump_thread_id.unwrap() == thread.raw.thread_id { threads.push(CallStack::with_info(CallStackInfo::DumpThreadSkipped)); continue; } let context = if requesting_thread_id.is_some() && requesting_thread_id.unwrap() == thread.raw.thread_id { requesting_thread = Some(i); exception_context.or(thread.context.as_ref()) } else { thread.context.as_ref() }; let stack = stackwalker::walk_stack(&context, &thread.stack, &modules, symbol_provider); threads.push(stack); } Ok(ProcessState { time: Utc.timestamp(dump.header.time_date_stamp as i64, 0), process_create_time: process_create_time, crash_reason: crash_reason, crash_address: crash_address, assertion: assertion, requesting_thread: requesting_thread, system_info: system_info, threads: threads, modules: modules, }) }
use breakpad_symbols::{FrameSymbolizer, Symbolizer}; use chrono::{TimeZone, Utc}; use minidump::{self, *}; use process_state::{CallStack, CallStackInfo, ProcessState}; use stackwalker; use std::boxed::Box; use std::ops::Deref; use system_info::SystemInfo; pub trait SymbolProvider { fn fill_symbol(&self, module: &Module, frame: &mut FrameSymbolizer); } impl SymbolProvider for Symbolizer { fn fill_symbol(&self, module: &Module, frame: &mut FrameSymbolizer) { self.fill_symbol(module, frame); } } #[derive(Default)] pub struct MultiSymbolProvider { providers: Vec<Box<SymbolProvider>>, } impl MultiSymbolProvider { pub fn new() -> MultiSymbolProvider { Default::default() } pub fn add(&mut self, provider: Box<SymbolProvider>) { self.providers.push(provider); } } impl SymbolProvider for MultiSymbolProvider { fn fill_symbol(&self, module: &Module, frame: &mut FrameSymbolizer) { for p in self.providers.iter() { p.fill_symbol(module, frame); } } } #[derive(Debug, Fail)] pub enum ProcessError { #[fail(display = "Failed to read minidump")] MinidumpReadError(minidump::Error), #[fail(display = "An unknown error occurred")] UnknownError, #[fail(display = "The system information stream was not found")] MissingSystemInfo, #[fail(display = "The thread list stream was not found")] MissingThreadList, } impl From<minidump::Error> for ProcessError { fn from(err: minidump::Error) -> ProcessError { ProcessError::MinidumpReadError(err) } } pub fn process_minidump<'a, T, P>( dump: &Minidump<'a, T>, symbol_provider: &P, ) -> Result<ProcessState, ProcessError> where T: Deref<Target=[u8]> + 'a, P: SymbolProvider, { let thread_list = dump.get_stream::<MinidumpThreadList>() .or(Err(ProcessError::MissingThreadList))?; let dump_system_info = dump.get_stream::<MinidumpSystemInfo>() .or(Err(ProcessError::MissingSystemInfo))?; let system_info = SystemInfo { os: dump_system_info.os, os_version: None, cpu: dump_system_info.cpu, cpu_info: None, cpu_count: dump_system_info.raw.number_of_processors as usize, }; let process_create_time = if let Ok(misc_info) = dump.get_stream::<MinidumpMiscInfo>() { misc_info.process_create_time() } else { None }; let breakpad_info = dump.get_stream::<MinidumpBreakpadInfo>(); let (dump_thread_id, requesting_thread_id) = if let Ok(info) = breakpad_info { (info.dump_thread_id, info.requesting_thread_id) } else { (None, None) }; let exception_stream = dump.get_stream::<MinidumpException>().ok(); let exception_ref = exception_stream.as_ref(); let (crash_reason, crash_address) = if let Some(exception) = exception_ref { ( Some(exception.get_crash_reason(system_info.os)), Some(exception.get_crash_address(system_info.os)), ) } else { (None, None) }; let exception_context = exception_ref.and_then(|e| e.context.as_ref()); let assertion = None; let modules = if let Ok(module_list) = dump.get_stream::<MinidumpModuleList>() { module_list.clone() } else { MinidumpModuleList::new() }; let mut threads = vec![]; let mut requesting_thread = None; for (i, thread) in thread_list.threads.iter().enumerate() { if dump_thread_id.is_some() && dump_thread_id.unwrap() == thread.raw.thread_id { threads.push(CallStack::with_info(CallStackInfo::DumpThreadSkipped)); continue; } let context = if requesting_thread_id.is_some() && requesting_thread_id.unwrap() == thread.raw.thread_id { requesting_thread = Some(i); exception_context.or(thread.context.as_ref()) } else { thread.context.as_ref() }; let stack = stackwalker::walk_stack(&context, &thread.stack, &modules, symbol_provider); threads.push(stack); } Ok(ProcessState { time: Utc.timestamp(dump.header.time_date_stamp as i64, 0), process_create_time: process_create_time, crash_reason: crash_reason, crash_address: crash_address,
assertion: assertion, requesting_thread: requesting_thread, system_info: system_info, threads: threads, modules: modules, }) }
function_block-function_prefix_line
[ { "content": "pub fn walk_stack<P>(\n\n maybe_context: &Option<&MinidumpContext>,\n\n stack_memory: &Option<MinidumpMemory>,\n\n modules: &MinidumpModuleList,\n\n symbol_provider: &P,\n\n) -> CallStack\n\nwhere\n\n P: SymbolProvider,\n\n{\n\n // Begin with the context frame, and keep getting callers until there are\n\n // no more.\n\n let mut frames = vec![];\n\n let mut info = CallStackInfo::Ok;\n\n if let &Some(context) = maybe_context {\n\n let ctx = context.clone();\n\n let mut maybe_frame = Some(StackFrame::from_context(ctx, FrameTrust::Context));\n\n while let Some(mut frame) = maybe_frame {\n\n fill_source_line_info(&mut frame, modules, symbol_provider);\n\n frames.push(frame);\n\n let last_frame = &frames.last().unwrap();\n", "file_path": "minidump-processor/src/stackwalker/mod.rs", "rank": 0, "score": 220169.28535936424 }, { "content": "fn read_stream_list<'a, T>(offset: &mut usize, bytes: &'a [u8], endian: scroll::Endian) -> Result<Vec<T>, Error>\n\n where T: TryFromCtx<'a, scroll::Endian, [u8], Error=scroll::Error, Size=usize>,\n\n T: SizeWith<scroll::Endian, Units=usize>,\n\n{\n\n let u: u32 = bytes.gread_with(offset, endian).or(Err(Error::StreamReadFailure))?;\n\n let count = u as usize;\n\n let counted_size = match count.checked_mul(<T>::size_with(&endian)).and_then(|v| v.checked_add(mem::size_of::<u32>())) {\n\n Some(s) => s,\n\n None => return Err(Error::StreamReadFailure),\n\n };\n\n if bytes.len() < counted_size {\n\n return Err(Error::StreamSizeMismatch {\n\n expected: counted_size,\n\n actual: bytes.len(),\n\n });\n\n }\n\n match bytes.len() - counted_size {\n\n 0 => {}\n\n 4 => {\n\n // 4 bytes of padding.\n", "file_path": "src/minidump.rs", "rank": 1, "score": 217839.82893685027 }, { "content": "pub fn dump_minidump_stack() -> Result<(), Error> {\n\n env_logger::init();\n\n let DumpStack { all, minidump, symbol_paths } =\n\n DumpStack::from_args();\n\n info!(\"Reading minidump {:?}\", minidump);\n\n let dump = Minidump::read_path(&minidump)?;\n\n let symbolizer = handle_symbol_paths(symbol_paths)?;\n\n let modules = dump.get_stream::<MinidumpModuleList>()?;\n\n let memory_list = dump.get_stream::<MinidumpMemoryList>()?;\n\n let sys_info = dump.get_stream::<MinidumpSystemInfo>()?;\n\n let wordsize = match sys_info.cpu {\n\n Cpu::X86 | Cpu::Ppc | Cpu::Sparc | Cpu::Arm => 4,\n\n Cpu::X86_64 | Cpu::Ppc64 | Cpu::Arm64 => 8,\n\n Cpu::Unknown(u) => bail!(\"Unknown cpu: {:#x}\", u),\n\n };\n\n // TODO: provide a commandline option for the address.\n\n // Default to the top of the crashing stack.\n\n let exception = dump.get_stream::<MinidumpException>()?;\n\n let context = exception.context.as_ref().ok_or(format_err!(\"Missing exception context\"))?;\n\n let sp = context.get_stack_pointer();\n", "file_path": "minidump-tools/src/lib.rs", "rank": 2, "score": 217644.1754355278 }, { "content": "/// A minidump stream.\n\npub trait Stream: DumpSection {\n\n /// The stream type, used in the stream directory.\n\n fn stream_type(&self) -> u32;\n\n /// Append an `MDRawDirectory` referring to this stream to `section`.\n\n fn cite_stream_in(&self, section: Section) -> Section {\n\n section.D32(self.stream_type())\n\n .cite_location(self)\n\n }\n\n}\n\n\n\nimpl SynthMinidump {\n\n /// Create a `SynthMinidump` with default endianness.\n\n pub fn new() -> SynthMinidump {\n\n SynthMinidump::with_endian(DEFAULT_ENDIAN)\n\n }\n\n\n\n /// Create a `SynthMinidump` with `endian` endianness.\n\n pub fn with_endian(endian: Endian) -> SynthMinidump {\n\n let flags = Label::new();\n\n let stream_count_label = Label::new();\n", "file_path": "src/synth_minidump.rs", "rank": 3, "score": 212401.40960841283 }, { "content": "/// Generic over the specifics of a CPU context.\n\npub trait CPUContext {\n\n /// The word size of general-purpose registers in the context.\n\n type Register: fmt::LowerHex;\n\n\n\n /// Get a register value if it is valid.\n\n ///\n\n /// Get the value of the register named `reg` from this CPU context\n\n /// if `valid` indicates that it has a valid value, otherwise return\n\n /// `None`.\n\n fn get_register(&self, reg: &str, valid: &MinidumpContextValidity) -> Option<Self::Register> {\n\n if let &MinidumpContextValidity::Some(ref which) = valid {\n\n if !which.contains(reg) {\n\n return None;\n\n }\n\n }\n\n Some(self.get_register_always(reg))\n\n }\n\n\n\n /// Return a String containing the value of `reg` formatted to its natural width.\n\n fn format_register(&self, reg: &str) -> String {\n", "file_path": "src/context.rs", "rank": 5, "score": 188963.85972744436 }, { "content": "/// An executable or shared library loaded in a process.\n\npub trait Module {\n\n /// The base address of this code module as it was loaded by the process.\n\n fn base_address(&self) -> u64;\n\n /// The size of the code module.\n\n fn size(&self) -> u64;\n\n /// The path or file name that the code module was loaded from.\n\n fn code_file(&self) -> Cow<str>;\n\n /// An identifying string used to discriminate between multiple versions and\n\n /// builds of the same code module. This may contain a uuid, timestamp,\n\n /// version number, or any combination of this or other information, in an\n\n /// implementation-defined format.\n\n fn code_identifier(&self) -> Cow<str>;\n\n /// The filename containing debugging information associated with the code\n\n /// module. If debugging information is stored in a file separate from the\n\n /// code module itself (as is the case when .pdb or .dSYM files are used),\n\n /// this will be different from code_file. If debugging information is\n\n /// stored in the code module itself (possibly prior to stripping), this\n\n /// will be the same as code_file.\n\n fn debug_file(&self) -> Option<Cow<str>>;\n\n /// An identifying string similar to code_identifier, but identifies a\n", "file_path": "minidump-common/src/traits.rs", "rank": 6, "score": 178295.5438566208 }, { "content": "/// A trait for setting symbol information on something like a stack frame.\n\npub trait FrameSymbolizer {\n\n /// Get the program counter value for this frame.\n\n fn get_instruction(&self) -> u64;\n\n /// Set the name and base address of the function in which this frame is executing.\n\n fn set_function(&mut self, name: &str, base: u64);\n\n /// Set the source file and (1-based) line number this frame represents.\n\n fn set_source_file(&mut self, file: &str, line: u32, base: u64);\n\n}\n\n\n\n/// A simple implementation of `FrameSymbolizer` that just holds data.\n\n#[derive(Debug, Default)]\n\npub struct SimpleFrame {\n\n /// The program counter value for this frame.\n\n pub instruction: u64,\n\n /// The name of the function in which the current instruction is executing.\n\n pub function: Option<String>,\n\n /// The offset of the start of `function` from the module base.\n\n pub function_base: Option<u64>,\n\n /// The name of the source file in which the current instruction is executing.\n\n pub source_file: Option<String>,\n", "file_path": "breakpad-symbols/src/lib.rs", "rank": 7, "score": 177107.08894130148 }, { "content": "fn locate_symbols(path: &str) -> Result<Context<EndianRcSlice<RunTimeEndian>>, Error> {\n\n let f = File::open(path)?;\n\n let buf = unsafe { memmap::Mmap::map(&f)? };\n\n let obj = object::File::parse(&*buf).map_err(|_| format_err!(\"Failed to parse {}\", path))?;\n\n if obj.has_debug_symbols() {\n\n let context = Context::new(&obj)\n\n .map_err(|_| format_err!(\"Failed to load debug symbols for {}\", path))?;\n\n Ok(context)\n\n } else {\n\n //TODO: use moria\n\n bail!(\"No debug symbols in {}\", path)\n\n }\n\n}\n\n\n\nimpl DwarfSymbolizer {\n\n pub fn new() -> DwarfSymbolizer {\n\n Default::default()\n\n }\n\n}\n\n\n", "file_path": "minidump-processor/src/dwarf_symbolizer.rs", "rank": 8, "score": 169785.56230171898 }, { "content": "/// The fundamental unit of data in a `Minidump`.\n\npub trait MinidumpStream<'a>: Sized {\n\n /// The stream type constant used in the `md::MDRawDirectory` entry.\n\n const STREAM_TYPE: MINIDUMP_STREAM_TYPE;\n\n /// Read this `MinidumpStream` type from `bytes`.\n\n ///\n\n /// `bytes` is the contents of this specific stream.\n\n /// `all` refers to the full contents of the minidump, for reading auxilliary data\n\n /// referred to with `MINIDUMP_LOCATION_DESCRIPTOR`s.\n\n fn read(bytes: &'a [u8], all: &'a [u8], endian: scroll::Endian) -> Result<Self, Error>;\n\n}\n\n\n\n/// CodeView data describes how to locate debug symbols\n\n#[derive(Clone)]\n\npub enum CodeView {\n\n /// PDB 2.0 format data in a separate file\n\n Pdb20(md::CV_INFO_PDB20),\n\n /// PDB 7.0 format data in a separate file (most common)\n\n Pdb70(md::CV_INFO_PDB70),\n\n /// Indicates data is in an ELF binary with build ID `build_id`\n\n Elf(md::CV_INFO_ELF),\n", "file_path": "src/minidump.rs", "rank": 9, "score": 168682.2648515129 }, { "content": "fn handle_symbol_paths(symbol_paths: Vec<PathBuf>) -> Result<Symbolizer, Error> {\n\n let tmp_path = env::temp_dir().join(\"symbols\");\n\n fs::create_dir_all(&tmp_path)?;\n\n let (symbol_paths, symbol_urls) = if symbol_paths.is_empty() {\n\n // Use the Mozilla symbol server if no symbol paths are supplied.\n\n let symbol_urls = vec![\"https://symbols.mozilla.org/\".to_owned()];\n\n (symbol_paths, symbol_urls)\n\n } else {\n\n let urls = symbol_paths.iter()\n\n .filter(|p| p.starts_with(\"http\"))\n\n .filter_map(|p| p.to_str().map(str::to_owned)).collect();\n\n let paths = symbol_paths.into_iter().filter(|p| !p.starts_with(\"http\")).collect();\n\n (paths, urls)\n\n };\n\n let supplier = HttpSymbolSupplier::new(symbol_urls, tmp_path, symbol_paths);\n\n Ok(Symbolizer::new(supplier))\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 10, "score": 168060.9247195191 }, { "content": "pub fn get_minidump_instructions() -> Result<(), Error> {\n\n env_logger::init();\n\n let GetMinidumpInstructions { color, minidump, symbol_paths } =\n\n GetMinidumpInstructions::from_args();\n\n let dump = Minidump::read_path(&minidump)?;\n\n let modules = dump.get_stream::<MinidumpModuleList>()?;\n\n let exception = dump.get_stream::<MinidumpException>()?;\n\n let context = exception.context.as_ref().ok_or(format_err!(\"Missing exception context\"))?;\n\n let ip = context.get_instruction_pointer();\n\n let memory_list = dump.get_stream::<MinidumpMemoryList>()?;\n\n let memory = memory_list.memory_at_address(ip)\n\n .ok_or(format_err!(\"Minidump doesn't contain a memory region that contains the instruction pointer from the exception record\"))?;\n\n let sys_info = dump.get_stream::<MinidumpSystemInfo>()?;\n\n let arch = match sys_info.cpu {\n\n Cpu::X86 => CpuArch::X86,\n\n Cpu::X86_64 => CpuArch::X86_64,\n\n Cpu::Arm64 => CpuArch::ARM64,\n\n _ => return Err(format_err!(\"Unsupported CPU architecture: {}\", sys_info.cpu)),\n\n };\n\n let symbolizer = handle_symbol_paths(symbol_paths)?;\n", "file_path": "minidump-tools/src/lib.rs", "rank": 12, "score": 165710.38435986804 }, { "content": "/// A trait for things that can unwind to a caller.\n\npub trait Unwind {\n\n /// Get the caller frame of this frame.\n\n fn get_caller_frame(\n\n &self,\n\n valid: &MinidumpContextValidity,\n\n stack_memory: &Option<MinidumpMemory>,\n\n ) -> Option<StackFrame>;\n\n}\n", "file_path": "minidump-processor/src/stackwalker/unwind.rs", "rank": 13, "score": 162848.5140280423 }, { "content": "fn print_frame(module: &MinidumpModule, frame: SimpleFrame) {\n\n print!(\"{}\", basename(&module.code_file()));\n\n if let Some(ref func) = frame.function {\n\n print!(\"!{}\", func);\n\n if let (Some(ref file), Some(line)) = (frame.source_file, frame.source_line) {\n\n print!(\" [\");\n\n match parse_vcs_info(file) {\n\n Ok(info) => print!(\"{}\", info.annotate_url(line as u64)),\n\n _ => print!(\"{} : {}\", basename(file), line),\n\n }\n\n if let Some(line_base) = frame.source_line_base {\n\n print!(\" + {}\", frame.instruction - line_base);\n\n }\n\n print!(\"]\");\n\n } else if let Some(func_base) = frame.function_base {\n\n print!(\" + {:#x}\", frame.instruction - func_base);\n\n }\n\n } else {\n\n print!(\" + {:#x}\", frame.instruction - module.base_address());\n\n }\n\n println!(\"\");\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 14, "score": 162531.62132931306 }, { "content": "fn read_test_minidump<'a>() -> Result<Minidump<'a, Mmap>, Error> {\n\n let path = get_test_minidump_path(\"test.dmp\");\n\n Minidump::read_path(&path)\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 15, "score": 160433.18610517812 }, { "content": "/// Shorthand for Read + Seek\n\npub trait Readable: Read + Seek {}\n\nimpl<T: Read + Seek> Readable for T {}\n\n\n", "file_path": "src/iostuff.rs", "rank": 16, "score": 157440.50067134976 }, { "content": "/// Read a u32 length-prefixed UTF-16 string from `bytes` at `offset`.\n\nfn read_string_utf16(offset: &mut usize, bytes: &[u8],\n\n endian: scroll::Endian) -> Result<String, ()> {\n\n let u: u32 = bytes.gread_with(offset, endian).or(Err(()))?;\n\n let size = u as usize;\n\n if size % 2 != 0 || (*offset + size) > bytes.len() {\n\n return Err(());\n\n }\n\n match UTF_16LE.decode(&bytes[*offset..*offset+size], DecoderTrap::Strict) {\n\n Ok(s) => {\n\n *offset += size;\n\n Ok(s)\n\n }\n\n Err(_) => Err(()),\n\n }\n\n}\n\n\n", "file_path": "src/minidump.rs", "rank": 17, "score": 157135.08706154034 }, { "content": "/// A block of data contained in a minidump.\n\npub trait DumpSection: Into<Section> {\n\n /// A label representing this `DumpSection`'s offset in bytes from the start of the minidump.\n\n fn file_offset(&self) -> Label;\n\n /// A label representing this `DumpSection`'s size in bytes within the minidump.\n\n fn file_size(&self) -> Label;\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 18, "score": 155115.0273412709 }, { "content": "/// A trait for things that can locate symbols for a given module.\n\npub trait SymbolSupplier {\n\n /// Locate and load a symbol file for `module`.\n\n ///\n\n /// Implementations may use any strategy for locating and loading\n\n /// symbols.\n\n fn locate_symbols(&self, module: &Module) -> SymbolResult;\n\n}\n\n\n\n/// An implementation of `SymbolSupplier` that loads Breakpad text-format symbols from local disk\n\n/// paths.\n\n///\n\n/// See [`relative_symbol_path`] for details on how paths are searched.\n\n///\n\n/// [`relative_symbol_path`]: fn.relative_symbol_path.html\n\npub struct SimpleSymbolSupplier {\n\n /// Local disk paths in which to search for symbols.\n\n paths: Vec<PathBuf>,\n\n}\n\n\n\nimpl SimpleSymbolSupplier {\n", "file_path": "breakpad-symbols/src/lib.rs", "rank": 19, "score": 147765.45236349315 }, { "content": "fn read_test_minidump() -> Result<Minidump<'static, memmap::Mmap>, Error> {\n\n let path = locate_testdata().join(\"test.dmp\");\n\n println!(\"minidump: {:?}\", path);\n\n Minidump::read_path(&path)\n\n}\n\n\n", "file_path": "minidump-processor/tests/test_processor.rs", "rank": 20, "score": 147593.055994275 }, { "content": "/// Parse a `SymbolFile` from `path`.\n\npub fn parse_symbol_file(path: &Path) -> Result<SymbolFile, Error> {\n\n let mut f = File::open(path)?;\n\n let mut bytes = vec![];\n\n f.read_to_end(&mut bytes)?;\n\n parse_symbol_bytes(&bytes)\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 21, "score": 146552.3807724056 }, { "content": "/// Parse a `SymbolFile` from `bytes`.\n\npub fn parse_symbol_bytes(bytes: &[u8]) -> Result<SymbolFile, Error> {\n\n match symbol_file(&bytes) {\n\n Done(rest, symfile) => {\n\n if rest == b\"\" {\n\n Ok(symfile)\n\n } else {\n\n // Junk left over, or maybe didn't parse anything.\n\n let next_line = rest.split(|b| *b == b'\\r').next()\n\n .map(|bytes| String::from_utf8_lossy(bytes))\n\n .unwrap_or(Cow::Borrowed(\"\"));\n\n Err(format_err!(\"Failed to parse file, next line was: `{}`\", next_line))\n\n }\n\n }\n\n Error(e) => Err(format_err!(\"Failed to parse file: {}\", e)),\n\n Incomplete(_) => Err(format_err!(\"Failed to parse file: incomplete data\")),\n\n }\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 22, "score": 146552.3807724056 }, { "content": "#[derive(StructOpt)]\n\n#[structopt(name = \"dumpstack\", about = \"Display possible stack frames from a minidump\")]\n\nstruct DumpStack {\n\n #[structopt(help = \"Input minidump\", parse(from_os_str))]\n\n minidump: PathBuf,\n\n #[structopt(help = \"Symbol paths\", parse(from_os_str))]\n\n symbol_paths: Vec<PathBuf>,\n\n #[structopt(short = \"a\", help = \"Show stack addresses that don't resolve to functions\")]\n\n all: bool,\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 23, "score": 145967.63879766015 }, { "content": "fn fill_source_line_info<P>(\n\n frame: &mut StackFrame,\n\n modules: &MinidumpModuleList,\n\n symbol_provider: &P,\n\n) where\n\n P: SymbolProvider,\n\n{\n\n // Find the module whose address range covers this frame's instruction.\n\n if let &Some(module) = &modules.module_at_address(frame.instruction) {\n\n // FIXME: this shouldn't need to clone, we should be able to use\n\n // the same lifetime as the module list that's passed in.\n\n frame.module = Some(module.clone());\n\n symbol_provider.fill_symbol(module, frame);\n\n }\n\n}\n\n\n", "file_path": "minidump-processor/src/stackwalker/mod.rs", "rank": 24, "score": 142895.31532858493 }, { "content": "pub trait CiteLocation {\n\n /// Append an `MINIDUMP_LOCATION_DESCRIPTOR` to `section` referring to this section.\n\n fn cite_location_in(&self, section: Section) -> Section;\n\n}\n\n\n\nimpl<T: DumpSection> CiteLocation for T {\n\n fn cite_location_in(&self, section: Section) -> Section {\n\n // An MINIDUMP_LOCATION_DESCRIPTOR is just a 32-bit size + 32-bit offset.\n\n section.D32(&self.file_size()).D32(&self.file_offset())\n\n }\n\n}\n\n\n\nimpl CiteLocation for (Label, Label) {\n\n fn cite_location_in(&self, section: Section) -> Section {\n\n section.D32(&self.0).D32(&self.1)\n\n }\n\n}\n\n\n\nimpl<T: CiteLocation> CiteLocation for Option<T> {\n\n fn cite_location_in(&self, section: Section) -> Section {\n\n match self {\n\n &Some(ref inner) => inner.cite_location_in(section),\n\n &None => section.D32(0).D32(0),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 25, "score": 137342.02198371242 }, { "content": "/// Additional methods to make working with `Section`s simpler\n\npub trait SectionExtra {\n\n /// A chainable version of `CiteLocation::cite_location_in`\n\n fn cite_location<T: CiteLocation>(self, thing: &T) -> Self;\n\n /// A chainable version of `Memory::cite_memory_in`\n\n fn cite_memory(self, memory: &Memory) -> Self;\n\n}\n\n\n\nimpl SectionExtra for Section {\n\n fn cite_location<T: CiteLocation>(self, thing: &T) -> Self {\n\n thing.cite_location_in(self)\n\n }\n\n fn cite_memory(self, memory: &Memory) -> Self {\n\n memory.cite_memory_in(self)\n\n }\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 26, "score": 137342.02198371242 }, { "content": "/// Get a relative symbol path at which to locate symbols for `module`.\n\n///\n\n/// Symbols are generally stored in the layout used by Microsoft's symbol\n\n/// server and associated tools:\n\n/// `<debug filename>/<debug identifier>/<debug filename>.sym`. If\n\n/// `debug filename` ends with *.pdb* the leaf filename will have that\n\n/// removed.\n\n/// `extension` is the expected extension for the symbol filename, generally\n\n/// *sym* if Breakpad text format symbols are expected.\n\n///\n\n/// The debug filename and debug identifier can be found in the\n\n/// [first line][module_line] of the symbol file output by the dump_syms tool.\n\n/// You can use [this script][packagesymbols] to run dump_syms and put the\n\n/// resulting symbol files in the proper directory structure.\n\n///\n\n/// [module_line]: https://chromium.googlesource.com/breakpad/breakpad/+/master/docs/symbol_files.md#MODULE-records\n\n/// [packagesymbols]: https://gist.github.com/luser/2ad32d290f224782fcfc#file-packagesymbols-py\n\npub fn relative_symbol_path(module: &Module, extension: &str) -> Option<String> {\n\n module.debug_file().and_then(|debug_file| {\n\n module.debug_identifier().map(|debug_id| {\n\n // Can't use PathBuf::file_name here, it doesn't handle\n\n // Windows file paths on non-Windows.\n\n let leaf = leafname(&debug_file);\n\n let filename = replace_or_add_extension(leaf, \"pdb\", extension);\n\n [leaf, &debug_id[..], &filename[..]].join(\"/\")\n\n })\n\n })\n\n}\n\n\n\n/// Possible results of locating symbols.\n\n#[derive(Debug)]\n\npub enum SymbolResult {\n\n /// Symbols loaded successfully.\n\n Ok(SymbolFile),\n\n /// Symbol file could not be found.\n\n NotFound,\n\n /// Error loading symbol file.\n", "file_path": "breakpad-symbols/src/lib.rs", "rank": 27, "score": 137184.45700340017 }, { "content": "#[test]\n\nfn test_thread_list() {\n\n let dump = read_test_minidump().unwrap();\n\n let thread_list = dump.get_stream::<MinidumpThreadList>().unwrap();\n\n let ref threads = thread_list.threads;\n\n assert_eq!(threads.len(), 2);\n\n assert_eq!(threads[0].raw.thread_id, 0xbf4);\n\n assert_eq!(threads[1].raw.thread_id, 0x11c0);\n\n let id = threads[1].raw.thread_id;\n\n assert_eq!(thread_list.get_thread(id).unwrap().raw.thread_id, id);\n\n if let Some(ref ctx) = threads[0].context {\n\n assert_eq!(ctx.get_instruction_pointer(), 0x7c90eb94);\n\n assert_eq!(ctx.get_stack_pointer(), 0x12f320);\n\n if let &MinidumpContext {\n\n raw: MinidumpRawContext::X86(ref raw),\n\n ref valid,\n\n } = ctx\n\n {\n\n assert_eq!(raw.eip, 0x7c90eb94);\n\n assert_eq!(*valid, MinidumpContextValidity::All);\n\n } else {\n", "file_path": "tests/test_minidump.rs", "rank": 28, "score": 136067.60391123715 }, { "content": "#[test]\n\nfn test_module_list() {\n\n let dump = read_test_minidump().unwrap();\n\n let module_list = dump.get_stream::<MinidumpModuleList>().unwrap();\n\n assert_eq!(\n\n module_list.module_at_address(0x400000).unwrap().code_file(),\n\n \"c:\\\\test_app.exe\"\n\n );\n\n let modules = module_list.iter().collect::<Vec<_>>();\n\n let module_files = modules.iter().map(|m| m.code_file()).collect::<Vec<_>>();\n\n assert_eq!(modules.len(), 13);\n\n assert_eq!(modules[0].base_address(), 0x400000);\n\n assert_eq!(modules[0].size(), 0x2d000);\n\n assert_eq!(modules[0].code_file(), \"c:\\\\test_app.exe\");\n\n assert_eq!(modules[0].code_identifier(), \"45D35F6C2d000\");\n\n assert_eq!(modules[0].debug_file().unwrap(), \"c:\\\\test_app.pdb\");\n\n assert_eq!(\n\n modules[0].debug_identifier().unwrap(),\n\n \"5A9832E5287241C1838ED98914E9B7FF1\"\n\n );\n\n assert!(modules[0].version().is_none());\n", "file_path": "tests/test_minidump.rs", "rank": 29, "score": 135975.74415736677 }, { "content": "fn get_caller_frame(\n\n frame: &StackFrame,\n\n stack_memory: &Option<MinidumpMemory>,\n\n) -> Option<StackFrame> {\n\n match frame.context.raw {\n\n /*\n\n MinidumpRawContext::AMD64(ctx) => ctx.get_caller_frame(stack_memory),\n\n MinidumpRawContext::ARM(ctx) => ctx.get_caller_frame(stack_memory),\n\n MinidumpRawContext::ARM64(ctx) => ctx.get_caller_frame(stack_memory),\n\n MinidumpRawContext::PPC(ctx) => ctx.get_caller_frame(stack_memory),\n\n MinidumpRawContext::PPC64(ctx) => ctx.get_caller_frame(stack_memory),\n\n MinidumpRawContext::SPARC(ctx) => ctx.get_caller_frame(stack_memory),\n\n MinidumpRawContext::MIPS(ctx) => ctx.get_caller_frame(stack_memory),\n\n */\n\n MinidumpRawContext::X86(ref ctx) => ctx.get_caller_frame(&frame.context.valid, stack_memory),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "minidump-processor/src/stackwalker/mod.rs", "rank": 30, "score": 126325.40790687944 }, { "content": "fn get_caller_by_frame_pointer(\n\n ctx: &CONTEXT_X86,\n\n valid: &MinidumpContextValidity,\n\n stack_memory: &MinidumpMemory,\n\n) -> Option<StackFrame> {\n\n match valid {\n\n &MinidumpContextValidity::All => {}\n\n &MinidumpContextValidity::Some(ref which) => {\n\n if !which.contains(\"ebp\") {\n\n return None;\n\n }\n\n }\n\n }\n\n\n\n let last_ebp = ctx.ebp;\n\n // Assume that the standard %ebp-using x86 calling convention is in\n\n // use.\n\n //\n\n // The typical x86 calling convention, when frame pointers are present,\n\n // is for the calling procedure to use CALL, which pushes the return\n", "file_path": "minidump-processor/src/stackwalker/x86.rs", "rank": 31, "score": 122116.81949121549 }, { "content": "#[test]\n\nfn test_stack_win_line_frame_data() {\n\n let line = b\"STACK WIN 0 1000 30 a1 b2 c3 d4 e5 f6 0 1\\n\";\n\n match stack_win_line(line) {\n\n Done(rest, WinFrameType::FPO(stack)) => {\n\n assert_eq!(rest, &b\"\"[..]);\n\n assert_eq!(stack.address, 0x1000);\n\n assert_eq!(stack.size, 0x30);\n\n assert_eq!(stack.prologue_size, 0xa1);\n\n assert_eq!(stack.epilogue_size, 0xb2);\n\n assert_eq!(stack.parameter_size, 0xc3);\n\n assert_eq!(stack.saved_register_size, 0xd4);\n\n assert_eq!(stack.local_size, 0xe5);\n\n assert_eq!(stack.max_stack_size, 0xf6);\n\n assert_eq!(\n\n stack.program_string_or_base_pointer,\n\n WinStackThing::AllocatesBasePointer(true)\n\n );\n\n }\n\n Error(e) => {\n\n assert!(false, format!(\"Parse error: {:?}\", e));\n\n }\n\n Incomplete(_) => {\n\n assert!(false, \"Incomplete parse!\");\n\n }\n\n _ => assert!(false, \"Something bad happened\"),\n\n }\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 32, "score": 114680.2667707619 }, { "content": "/// Populate a `CONTEXT_AMD64` struct with the given `endian`, `rip`, and `rsp`.\n\npub fn amd64_context(endian: Endian, rip: u64, rsp: u64) -> Section {\n\n let section = Section::with_endian(endian)\n\n .append_repeated(0, mem::size_of::<u64>() * 6) // p[1-6]_home\n\n .D32(0x10001f) // context_flags: CONTEXT_ALL\n\n .D32(0) // mx_csr\n\n .append_repeated(0, mem::size_of::<u16>() * 6) // cs,ds,es,fs,gs,ss\n\n .D32(0) // eflags\n\n .append_repeated(0, mem::size_of::<u64>() * 6) // dr0,1,2,3,6,7\n\n .append_repeated(0, mem::size_of::<u64>() * 4) // rax,rcx,rdx,rbx\n\n .D64(rsp)\n\n .append_repeated(0, mem::size_of::<u64>() * 11) // rbp-r15\n\n .D64(rip)\n\n .append_repeated(0, 512) // float_save\n\n .append_repeated(0, mem::size_of::<u128>() * 26) // vector_register\n\n .append_repeated(0, mem::size_of::<u64>() * 6); // trailing stuff\n\n assert_eq!(section.size(), md::CONTEXT_AMD64::size_with(&LE) as u64);\n\n section\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 33, "score": 114550.77899170449 }, { "content": "/// Populate a `CONTEXT_X86` struct with the given `endian`, `eip`, and `esp`.\n\npub fn x86_context(endian: Endian, eip: u32, esp: u32) -> Section {\n\n let section = Section::with_endian(endian)\n\n .D32(0x1007f) // context_flags: CONTEXT_ALL\n\n .append_repeated(0, 4 * 6) // dr0,1,2,3,6,7, 4 bytes each\n\n .append_repeated(0, md::FLOATING_SAVE_AREA_X86::size_with(&LE)) // float_save\n\n .append_repeated(0, 4 * 11) // gs-ebp, 4 bytes each\n\n .D32(eip)\n\n .D32(0) // cs\n\n .D32(0) // eflags\n\n .D32(esp)\n\n .D32(0) // ss\n\n .append_repeated(0, 512); // extended_registers\n\n assert_eq!(section.size(), md::CONTEXT_X86::size_with(&LE) as u64);\n\n section\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 34, "score": 114550.77899170449 }, { "content": "fn print_registers<T: Write>(f: &mut T, ctx: &MinidumpContext) -> io::Result<()> {\n\n let registers: Cow<HashSet<&str>> = match ctx.valid {\n\n MinidumpContextValidity::All => {\n\n let gpr = ctx.general_purpose_registers();\n\n let set: HashSet<&str> = gpr.iter().cloned().collect();\n\n Cow::Owned(set)\n\n }\n\n MinidumpContextValidity::Some(ref which) => Cow::Borrowed(which),\n\n };\n\n\n\n // Iterate over registers in a known order.\n\n let mut output = String::new();\n\n for reg in ctx.general_purpose_registers() {\n\n if registers.contains(reg) {\n\n let reg_val = ctx.format_register(reg);\n\n let next = format!(\" {: >5} = {}\", reg, reg_val);\n\n if output.chars().count() + next.chars().count() > 80 {\n\n // Flush the buffer.\n\n writeln!(f, \" {}\", output)?;\n\n output.truncate(0);\n", "file_path": "minidump-processor/src/process_state.rs", "rank": 35, "score": 114265.90795368365 }, { "content": "#[test]\n\nfn test_minidump_read() {\n\n let path = get_test_minidump_path(\"test.dmp\");\n\n let mut f = File::open(path).unwrap();\n\n let mut buf = vec![];\n\n f.read_to_end(&mut buf).unwrap();\n\n let _dump = Minidump::read(buf).unwrap();\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 36, "score": 113873.09319146263 }, { "content": "/// Return a `SymbolFile` given a vec of `Line` data.\n\nfn symbol_file_from_lines<'a>(lines: Vec<Line<'a>>) -> SymbolFile {\n\n let mut files = HashMap::new();\n\n let mut publics = vec![];\n\n let mut funcs = vec![];\n\n let mut stack_cfi = vec![];\n\n let mut stack_win_framedata: Vec<StackInfoWin> = vec![];\n\n let mut stack_win_fpo: Vec<StackInfoWin> = vec![];\n\n for line in lines {\n\n match line {\n\n Line::Info => {}\n\n Line::File(id, filename) => {\n\n files.insert(id, filename.to_string());\n\n }\n\n Line::Public(p) => {\n\n publics.push(p);\n\n }\n\n Line::Function(f) => {\n\n funcs.push(f);\n\n }\n\n Line::StackWin(frame_type) => {\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 37, "score": 112647.87340475377 }, { "content": "/// Helper for deriving a hash key from a `Module` for `Symbolizer`.\n\nfn key(module: &Module) -> ModuleKey {\n\n (\n\n module.code_file().to_string(),\n\n module.code_identifier().to_string(),\n\n module.debug_file().map(|s| s.to_string()),\n\n module.debug_identifier().map(|s| s.to_string()),\n\n )\n\n}\n\n\n\n/// Symbolicate stack frames.\n\n///\n\n/// A `Symbolizer` manages loading symbols and looking up symbols in them\n\n/// including caching so that symbols for a given module are only loaded once.\n\n///\n\n/// Call [`Symbolizer::new`][new] to instantiate a `Symbolizer`. A Symbolizer\n\n/// requires a [`SymbolSupplier`][supplier] to locate symbols. If you have\n\n/// symbols on disk in the [customary directory layout][dirlayout], a\n\n/// [`SimpleSymbolSupplier`][simple] will work.\n\n///\n\n/// Use [`get_symbol_at_address`][get_symbol] or [`fill_symbol`][fill_symbol] to\n", "file_path": "breakpad-symbols/src/lib.rs", "rank": 38, "score": 110645.96705710131 }, { "content": "#[test]\n\nfn test_minidump_read_path() {\n\n read_test_minidump().unwrap();\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 39, "score": 110301.39196895596 }, { "content": "#[test]\n\nfn test_assertion() {\n\n let path = get_test_minidump_path(\"invalid-parameter.dmp\");\n\n let dump = Minidump::read_path(&path).unwrap();\n\n let assertion = dump.get_stream::<MinidumpAssertion>().unwrap();\n\n assert_eq!(assertion.expression().unwrap(), \"format != nullptr\");\n\n assert_eq!(assertion.function().unwrap(), \"common_vfprintf\");\n\n assert_eq!(assertion.file().unwrap(), r\"minkernel\\crts\\ucrt\\src\\appcrt\\stdio\\output.cpp\");\n\n assert_eq!(assertion.raw.line, 32);\n\n assert_eq!(md::AssertionType::from_u32(assertion.raw._type),\n\n Some(md::AssertionType::InvalidParameter));\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 40, "score": 109425.57817462855 }, { "content": "#[test]\n\nfn test_list() {\n\n // Empty list\n\n let list = List::<DumpString>::new(0x11223344u32, Endian::Little);\n\n assert_eq!(\n\n Into::<Section>::into(list).get_contents().unwrap(),\n\n vec![0, 0, 0, 0]\n\n );\n\n let list = List::new(0x11223344u32, Endian::Little)\n\n .add(DumpString::new(\"a\", Endian::Little))\n\n .add(DumpString::new(\"b\", Endian::Little));\n\n assert_eq!(\n\n Into::<Section>::into(list).get_contents().unwrap(),\n\n vec![2, 0, 0, 0, // entry count\n\n // first entry\n\n 0x2, 0x0, 0x0, 0x0, // length\n\n b'a', 0x0,\n\n // second entry\n\n 0x2, 0x0, 0x0, 0x0, // length\n\n b'b', 0x0]\n\n );\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 41, "score": 109392.40828872591 }, { "content": "struct TestFixture {\n\n pub raw: CONTEXT_X86,\n\n pub modules: MinidumpModuleList,\n\n pub symbolizer: Symbolizer,\n\n}\n\n\n\nimpl TestFixture {\n\n pub fn new() -> TestFixture {\n\n TestFixture {\n\n raw: CONTEXT_X86::default(),\n\n // Give the two modules reasonable standard locations and names\n\n // for tests to play with.\n\n modules: MinidumpModuleList::from_modules(vec![\n\n MinidumpModule::new(0x40000000, 0x10000, \"module1\"),\n\n MinidumpModule::new(0x50000000, 0x10000, \"module2\"),\n\n ]),\n\n symbolizer: Symbolizer::new(SimpleSymbolSupplier::new(vec![])),\n\n }\n\n }\n\n\n", "file_path": "minidump-processor/src/stackwalker/x86_unittest.rs", "rank": 42, "score": 108110.30907413445 }, { "content": "/// Format `bytes` to `f` as a hex string.\n\npub fn write_bytes<T: Write>(f: &mut T, bytes: &[u8]) -> io::Result<()> {\n\n for b in bytes {\n\n write!(f, \"{:02x}\", b)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/iostuff.rs", "rank": 43, "score": 107594.22805311566 }, { "content": "#[test]\n\nfn test_simple_stream() {\n\n let section = Section::with_endian(Endian::Little).D32(0x55667788);\n\n let stream_rva = mem::size_of::<md::MINIDUMP_HEADER>() as u8;\n\n let directory_rva = stream_rva + section.size() as u8;\n\n let dump = SynthMinidump::with_endian(Endian::Little)\n\n .flags(0x9f738b33685cc84c)\n\n .add_stream(SimpleStream {\n\n stream_type: 0x11223344,\n\n section: section,\n\n });\n\n assert_eq!(\n\n dump.finish().unwrap(),\n\n vec![\n\n 0x4d,\n\n 0x44,\n\n 0x4d,\n\n 0x50, // signature\n\n 0x93,\n\n 0xa7,\n\n 0x00,\n", "file_path": "src/synth_minidump.rs", "rank": 44, "score": 105366.86330953822 }, { "content": "#[test]\n\nfn test_dump_string() {\n\n let dump = SynthMinidump::with_endian(Endian::Little);\n\n let s = DumpString::new(\"hello\", Endian::Little);\n\n let contents = dump.add(s).finish().unwrap();\n\n // Skip over the header\n\n assert_eq!(\n\n &contents[mem::size_of::<md::MINIDUMP_HEADER>()..],\n\n &[0xa, 0x0, 0x0, 0x0, // length\n\n b'h', 0x0, b'e', 0x0, b'l', 0x0, b'l', 0x0, b'o', 0x0]\n\n );\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 45, "score": 105346.81427053857 }, { "content": "#[test]\n\nfn test_dump_header() {\n\n let dump = SynthMinidump::with_endian(Endian::Little).flags(0x9f738b33685cc84c);\n\n assert_eq!(\n\n dump.finish().unwrap(),\n\n vec![0x4d, 0x44, 0x4d, 0x50, // signature\n\n 0x93, 0xa7, 0x00, 0x00, // version\n\n 0, 0, 0, 0, // stream count\n\n 0x20, 0, 0, 0, // directory RVA\n\n 0, 0, 0, 0, // checksum\n\n 0x3d, 0xe1, 0x44, 0x4b, // time_date_stamp\n\n 0x4c, 0xc8, 0x5c, 0x68, // flags\n\n 0x33, 0x8b, 0x73, 0x9f,\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 46, "score": 105346.81427053857 }, { "content": "#[test]\n\nfn test_system_info() {\n\n let dump = read_test_minidump().unwrap();\n\n let system_info = dump.get_stream::<MinidumpSystemInfo>().unwrap();\n\n assert_eq!(system_info.os, Os::Windows);\n\n assert_eq!(system_info.cpu, Cpu::X86);\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 47, "score": 105326.78513460897 }, { "content": "/// This trait exists to allow creating `RangeMap`s from possibly-overlapping input data.\n\n///\n\n/// The `RangeMap` struct will panic if you attempt to initialize it with overlapping data,\n\n/// and we deal with many sources of untrusted input data that could run afoul of this.\n\n/// [Upstream issue](https://github.com/jneem/range-map/issues/1)\n\npub trait IntoRangeMapSafe<V>: IntoIterator<Item=(Range<u64>, V)> + Sized\n\n where V: Clone + Debug + Eq,\n\n{\n\n fn into_rangemap_safe(self) -> RangeMap<u64, V> {\n\n let mut input: Vec<_> = self.into_iter().collect();\n\n input.sort_by_key(|x| x.0);\n\n let mut vec: Vec<(Range<u64>, V)> = Vec::with_capacity(input.len());\n\n for (range, val) in input.into_iter() {\n\n if let Some(&mut (ref mut last_range, ref last_val)) = vec.last_mut() {\n\n if range.start <= last_range.end && &val != last_val {\n\n //TODO: add a way for callers to do custom logging here? Perhaps\n\n // a callback function?\n\n warn!(\"overlapping ranges {:?} and {:?} map to values {:?} and {:?}\",\n\n last_range, range, last_val, val);\n\n continue;\n\n }\n\n\n\n if range.start <= last_range.end.saturating_add(1) && &val == last_val {\n\n last_range.end = cmp::max(range.end, last_range.end);\n\n continue;\n", "file_path": "minidump-common/src/traits.rs", "rank": 48, "score": 103154.8772860458 }, { "content": "#[test]\n\nfn test_simple_stream_bigendian() {\n\n let section = Section::with_endian(Endian::Big).D32(0x55667788);\n\n let stream_rva = mem::size_of::<md::MINIDUMP_HEADER>() as u8;\n\n let directory_rva = stream_rva + section.size() as u8;\n\n let dump = SynthMinidump::with_endian(Endian::Big)\n\n .flags(0x9f738b33685cc84c)\n\n .add_stream(SimpleStream {\n\n stream_type: 0x11223344,\n\n section: section,\n\n });\n\n assert_eq!(\n\n dump.finish().unwrap(),\n\n vec![\n\n 0x50,\n\n 0x4d,\n\n 0x44,\n\n 0x4d, // signature\n\n 0x00,\n\n 0x00,\n\n 0xa7,\n", "file_path": "src/synth_minidump.rs", "rank": 49, "score": 101696.44479488229 }, { "content": "#[test]\n\nfn test_dump_header_bigendian() {\n\n let dump = SynthMinidump::with_endian(Endian::Big).flags(0x9f738b33685cc84c);\n\n assert_eq!(\n\n dump.finish().unwrap(),\n\n vec![0x50, 0x4d, 0x44, 0x4d, // signature\n\n 0x00, 0x00, 0xa7, 0x93, // version\n\n 0, 0, 0, 0, // stream count\n\n 0, 0, 0, 0x20, // directory RVA\n\n 0, 0, 0, 0, // checksum\n\n 0x4b, 0x44, 0xe1, 0x3d, // time_date_stamp\n\n 0x9f, 0x73, 0x8b, 0x33, // flags\n\n 0x68, 0x5c, 0xc8, 0x4c,\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 50, "score": 101677.2560918153 }, { "content": "fn format_time_t(t: u32) -> String {\n\n if let Some(datetime) = NaiveDateTime::from_timestamp_opt(t as i64, 0) {\n\n datetime.format(\"%Y-%m-%d %H:%M:%S\").to_string()\n\n } else {\n\n String::new()\n\n }\n\n}\n\n\n", "file_path": "src/minidump.rs", "rank": 51, "score": 100836.8600823578 }, { "content": "#[test]\n\nfn test_stack_cfi() {\n\n let line = b\"STACK CFI deadf00d some rules\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(\n\n stack_cfi(line),\n\n Done(\n\n rest,\n\n CFIRules {\n\n address: 0xdeadf00d,\n\n rules: \"some rules\".to_string(),\n\n }\n\n )\n\n );\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 52, "score": 98910.20054743535 }, { "content": "#[test]\n\nfn test_module_line() {\n\n let line = b\"MODULE Linux x86 D3096ED481217FD4C16B29CD9BC208BA0 firefox-bin\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(module_line(line), Done(rest, ()));\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 53, "score": 98892.53502918125 }, { "content": "#[test]\n\nfn test_simple() {\n\n let mut f = TestFixture::new();\n\n let mut stack = Section::new();\n\n stack.start().set_const(0x80000000);\n\n stack = stack.D32(0).D32(0); // end-of-stack marker\n\n f.raw.eip = 0x40000200;\n\n f.raw.ebp = 0x80000000;\n\n let s = f.walk_stack(stack);\n\n assert_eq!(s.frames.len(), 1);\n\n let f = &s.frames[0];\n\n let m = f.module.as_ref().unwrap();\n\n assert_eq!(m.code_file(), \"module1\");\n\n}\n\n\n\n// Walk a traditional frame. A traditional frame saves the caller's\n\n// %ebp just below the return address, and has its own %ebp pointing\n\n// at the saved %ebp.\n", "file_path": "minidump-processor/src/stackwalker/x86_unittest.rs", "rank": 54, "score": 98198.65410853396 }, { "content": "#[test]\n\nfn test_traditional() {\n\n let mut f = TestFixture::new();\n\n let frame0_ebp = Label::new();\n\n let frame1_ebp = Label::new();\n\n let mut stack = Section::new();\n\n stack.start().set_const(0x80000000);\n\n stack = stack\n\n .append_repeated(12, 0) // frame 0: space\n\n .mark(&frame0_ebp) // frame 0 %ebp points here\n\n .D32(&frame1_ebp) // frame 0: saved %ebp\n\n .D32(0x40008679) // frame 0: return address\n\n .append_repeated(8, 0) // frame 1: space\n\n .mark(&frame1_ebp) // frame 1 %ebp points here\n\n .D32(0) // frame 1: saved %ebp (stack end)\n\n .D32(0); // frame 1: return address (stack end)\n\n f.raw.eip = 0x4000c7a5;\n\n f.raw.esp = stack.start().value().unwrap() as u32;\n\n f.raw.ebp = frame0_ebp.value().unwrap() as u32;\n\n let s = f.walk_stack(stack);\n\n assert_eq!(s.frames.len(), 2);\n", "file_path": "minidump-processor/src/stackwalker/x86_unittest.rs", "rank": 55, "score": 98198.65410853396 }, { "content": "#[test]\n\nfn test_processor_symbols() {\n\n let dump = read_test_minidump().unwrap();\n\n let path = testdata_symbol_path();\n\n println!(\"symbol path: {:?}\", path);\n\n let state = minidump_processor::process_minidump(\n\n &dump,\n\n &Symbolizer::new(SimpleSymbolSupplier::new(vec![path])),\n\n ).unwrap();\n\n let f0 = &state.threads[0].frames[0];\n\n assert_eq!(\n\n f0.function_name.as_ref().map(|s| s.as_str()),\n\n Some(\"`anonymous namespace'::CrashFunction\")\n\n );\n\n}\n", "file_path": "minidump-processor/tests/test_processor.rs", "rank": 56, "score": 97153.10020207454 }, { "content": "#[test]\n\nfn test_stack_cfi_lines() {\n\n let data = b\"STACK CFI INIT badf00d abc init rules\n\nSTACK CFI deadf00d some rules\n\nSTACK CFI deadbeef more rules\n\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(\n\n stack_cfi_lines(data),\n\n Done(\n\n rest,\n\n StackInfoCFI {\n\n init: CFIRules {\n\n address: 0xbadf00d,\n\n rules: \"init rules\".to_string(),\n\n },\n\n size: 0xabc,\n\n add_rules: vec![\n\n CFIRules {\n\n address: 0xdeadbeef,\n\n rules: \"more rules\".to_string(),\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 57, "score": 95903.86717240824 }, { "content": "#[test]\n\nfn test_stack_cfi_init() {\n\n let line = b\"STACK CFI INIT badf00d abc init rules\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(\n\n stack_cfi_init(line),\n\n Done(\n\n rest,\n\n (\n\n CFIRules {\n\n address: 0xbadf00d,\n\n rules: \"init rules\".to_string(),\n\n },\n\n 0xabc\n\n )\n\n )\n\n );\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 58, "score": 95903.86717240824 }, { "content": "#[test]\n\nfn test_module_line_crcrlf() {\n\n let line = b\"MODULE Windows x86_64 D3096ED481217FD4C16B29CD9BC208BA0 firefox\\r\\r\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(module_line(line), Done(rest, ()));\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 59, "score": 95886.87325153349 }, { "content": "/// Produce a slice of `bytes` corresponding to the offset and size in `loc`, or an\n\n/// `Error` if the data is not fully contained within `bytes`.\n\nfn location_slice<'a>(bytes: &'a [u8], loc: &md::MINIDUMP_LOCATION_DESCRIPTOR) -> Result<&'a [u8], Error> {\n\n let start = loc.rva as usize;\n\n let end = (loc.rva + loc.data_size) as usize;\n\n if start < bytes.len() && end <= bytes.len() {\n\n Ok(&bytes[start..end])\n\n } else {\n\n Err(Error::StreamReadFailure)\n\n }\n\n}\n\n\n", "file_path": "src/minidump.rs", "rank": 60, "score": 94619.41946252406 }, { "content": "fn parse_vcs_info(filename: &str) -> Result<Box<VCSFile>, Error> {\n\n let mut bits = filename.split(':');\n\n Ok(match (bits.next(), bits.next(), bits.next(), bits.next()) {\n\n (Some(\"hg\"), Some(repo), Some(path), Some(rev)) if repo.starts_with(\"hg.mozilla.org/\") => {\n\n let mut s = repo.splitn(2, '/');\n\n let host = s.next().unwrap().to_owned();\n\n let repo = s.next().unwrap().to_owned();\n\n let path = path.to_owned();\n\n let rev = rev.to_owned();\n\n Box::new(HgWebFile {\n\n host,\n\n repo,\n\n rev,\n\n path,\n\n })\n\n }\n\n (Some(\"git\"), Some(repo), Some(path), Some(rev)) if repo.starts_with(\"github.com/\") => {\n\n let repo = repo.splitn(2, '/').nth(1).unwrap().to_owned();\n\n let path = path.to_owned();\n\n let rev = rev.to_owned();\n\n Box::new(GitHubFile {\n\n repo,\n\n rev,\n\n path,\n\n })\n\n }\n\n _ => return Err(format_err!(\"No VCS info in filename\")),\n\n })\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 61, "score": 93151.84146633054 }, { "content": "#[test]\n\nfn test_module_line_filename_spaces() {\n\n let line = b\"MODULE Windows x86_64 D3096ED481217FD4C16B29CD9BC208BA0 firefox x y z\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(module_line(line), Done(rest, ()));\n\n}\n\n\n\n/// Sometimes dump_syms on Windows does weird things and produces multiple carriage returns\n\n/// before the line feed.\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 62, "score": 93101.37636931645 }, { "content": "/// Attempt to read a CodeView record from `data` at `location`\n\nfn read_codeview(location: &md::MINIDUMP_LOCATION_DESCRIPTOR, data: &[u8],\n\n endian: scroll::Endian) -> Result<CodeView, failure::Error> {\n\n let bytes = location_slice(data, location)?;\n\n // The CodeView data can be one of a few different formats. Try to read the\n\n // signature first to figure out what format the data is.\n\n let signature: u32 = bytes.pread_with(0, endian)?;\n\n Ok(match CvSignature::from_u32(signature) {\n\n // PDB data has two known versions: the current 7.0 and the older 2.0 version.\n\n Some(CvSignature::Pdb70) => CodeView::Pdb70(bytes.pread_with(0, endian)?),\n\n Some(CvSignature::Pdb20) => CodeView::Pdb20(bytes.pread_with(0, endian)?),\n\n // Breakpad's ELF build ID format.\n\n Some(CvSignature::Elf) => CodeView::Elf(bytes.pread_with(0, endian)?),\n\n // Other formats aren't handled, but save the raw bytes.\n\n _ => CodeView::Unknown(bytes.to_owned())\n\n })\n\n}\n\n\n\nimpl MinidumpModule {\n\n /// Create a `MinidumpModule` with some basic info.\n\n ///\n", "file_path": "src/minidump.rs", "rank": 63, "score": 91837.650261762 }, { "content": "#[derive(StructOpt)]\n\n#[structopt(name = \"get-minidump-instructions\", about = \"Display instructions from a minidump\")]\n\nstruct GetMinidumpInstructions {\n\n #[structopt(long = \"color\", help = \"Enable colored output\")]\n\n color: Option<Color>,\n\n #[structopt(help = \"Input minidump\", parse(from_os_str))]\n\n minidump: PathBuf,\n\n #[structopt(help = \"Symbol paths\", parse(from_os_str))]\n\n symbol_paths: Vec<PathBuf>,\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 64, "score": 90952.35926651885 }, { "content": "#[test]\n\nfn test_stack_win_line_program_string() {\n\n let line =\n\n b\"STACK WIN 4 2170 14 a1 b2 c3 d4 e5 f6 1 $eip 4 + ^ = $esp $ebp 8 + = $ebp $ebp ^ =\\n\";\n\n match stack_win_line(line) {\n\n Done(rest, WinFrameType::FrameData(stack)) => {\n\n assert_eq!(rest, &b\"\"[..]);\n\n assert_eq!(stack.address, 0x2170);\n\n assert_eq!(stack.size, 0x14);\n\n assert_eq!(stack.prologue_size, 0xa1);\n\n assert_eq!(stack.epilogue_size, 0xb2);\n\n assert_eq!(stack.parameter_size, 0xc3);\n\n assert_eq!(stack.saved_register_size, 0xd4);\n\n assert_eq!(stack.local_size, 0xe5);\n\n assert_eq!(stack.max_stack_size, 0xf6);\n\n assert_eq!(\n\n stack.program_string_or_base_pointer,\n\n WinStackThing::ProgramString(\n\n \"$eip 4 + ^ = $esp $ebp 8 + = $ebp $ebp ^ =\".to_string()\n\n )\n\n );\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 65, "score": 90528.50152148292 }, { "content": "fn testdata_symbol_path() -> PathBuf {\n\n let path = locate_testdata().join(\"symbols\");\n\n println!(\"symbol path: {:?}\", path);\n\n path\n\n}\n\n\n", "file_path": "minidump-processor/tests/test_processor.rs", "rank": 66, "score": 89478.56509686037 }, { "content": "fn fetch_url_to_path(client: &Client, url: &str, path: &Path) -> Result<(), Error> {\n\n debug!(\"fetch_url_to_path({}, {:?})\", url, path);\n\n let mut res = client.get(url).send()?.error_for_status()?;\n\n debug!(\"fetch_url_to_path: HTTP success\");\n\n let mut tmp_path = path.to_owned().into_os_string();\n\n tmp_path.push(\".tmp\");\n\n {\n\n let mut f = File::create(&tmp_path)?;\n\n res.copy_to(&mut f)?;\n\n }\n\n fs::rename(&tmp_path, &path)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 67, "score": 87789.30414203028 }, { "content": "fn maybe_fetch_source_file(client: &Client, url: &str, path: &Path) -> Result<(), Error> {\n\n if file_exists(path) {\n\n Ok(())\n\n } else {\n\n fetch_url_to_path(client, url, path)\n\n }\n\n}\n\n\n\nimpl SourceLookup for SymLookup {\n\n fn lookup(&mut self, address: u64) -> Option<SourceLocation> {\n\n self.modules.module_at_address(address).and_then(|module| {\n\n let mut frame = SimpleFrame::with_instruction(address);\n\n self.symbolizer.fill_symbol(module, &mut frame);\n\n let SimpleFrame { source_file, source_line, .. } = frame;\n\n if let (Some(file), Some(line)) = (source_file, source_line) {\n\n let line = line as u64;\n\n match parse_vcs_info(&file) {\n\n Ok(info) => {\n\n let url = info.raw_url();\n\n let local = env::temp_dir().join(info.as_local_filename());\n", "file_path": "minidump-tools/src/lib.rs", "rank": 68, "score": 85904.5178669981 }, { "content": "struct SymLookup {\n\n modules: MinidumpModuleList,\n\n symbolizer: Symbolizer,\n\n client: Client,\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 69, "score": 84704.66021452467 }, { "content": "trait VCSFile {\n\n fn raw_url(&self) -> String;\n\n fn annotate_url(&self, line: u64) -> String;\n\n fn as_local_filename(&self) -> String;\n\n}\n\n\n\nimpl VCSFile for HgWebFile {\n\n fn raw_url(&self) -> String {\n\n format!(\"https://{}/{}/raw-file/{}/{}\",\n\n self.host, self.repo, self.rev, self.path)\n\n }\n\n\n\n fn annotate_url(&self, line: u64) -> String {\n\n format!(\"https://{}/{}/annotate/{}/{}#l{}\",\n\n self.host, self.repo, self.rev, self.path, line)\n\n }\n\n fn as_local_filename(&self) -> String {\n\n format!(\"{}_{}_{}\", self.host, self.rev, self.path).replace('/', \"_\")\n\n }\n\n}\n", "file_path": "minidump-tools/src/lib.rs", "rank": 70, "score": 84111.97526455617 }, { "content": "enum Line<'a> {\n\n Info,\n\n File(u32, &'a str),\n\n Public(PublicSymbol),\n\n Function(Function),\n\n StackWin(WinFrameType),\n\n StackCFI(StackInfoCFI),\n\n}\n\n\n\n// Nom's `eol` doesn't use complete! so it will return Incomplete.\n\nnamed!(my_eol<char>, complete!(preceded!(many0!(char!('\\r')), char!('\\n'))));\n\n\n\n/// Match a hex string, parse it to a u64.\n\nnamed!(hex_str_u64<&[u8], u64>,\n\n map_res!(map_res!(hex_digit, str::from_utf8), |s| u64::from_str_radix(s, 16)));\n\n\n\n/// Match a decimal string, parse it to a u32.\n\nnamed!(decimal_u32<&[u8], u32>, map_res!(map_res!(digit, str::from_utf8), FromStr::from_str));\n\n\n\n/// Matches a MODULE record.\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 71, "score": 83969.32742648559 }, { "content": "#[test]\n\nfn test_empty_minidump() {\n\n match Minidump::read(&b\"\"[..]) {\n\n Ok(_) => panic!(\"Should have failed to read minidump\"),\n\n Err(e) => assert_eq!(e, Error::MissingHeader),\n\n }\n\n}\n", "file_path": "tests/test_minidump.rs", "rank": 72, "score": 83254.24445393203 }, { "content": "struct GitHubFile {\n\n repo: String,\n\n rev: String,\n\n path: String,\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 73, "score": 82310.29838565577 }, { "content": "struct HgWebFile {\n\n host: String,\n\n repo: String,\n\n rev: String,\n\n path: String,\n\n}\n\n\n", "file_path": "minidump-tools/src/lib.rs", "rank": 74, "score": 82310.29838565577 }, { "content": "#[test]\n\nfn test_parse_symbol_bytes() {\n\n let bytes = &b\"MODULE Linux x86 D3096ED481217FD4C16B29CD9BC208BA0 firefox-bin\n\nINFO blah blah blah\n\nFILE 0 foo.c\n\nFILE 100 bar.c\n\nPUBLIC abcd 10 func 1\n\nPUBLIC ff00 3 func 2\n\nFUNC 900 30 10 some other func\n\nFUNC 1000 30 10 some func\n\n1000 10 42 7\n\n1010 10 52 8\n\n1020 10 62 15\n\nFUNC 1100 30 10 a third func\n\nSTACK WIN 4 900 30 a1 b2 c3 d4 e5 f6 1 prog string\n\nSTACK WIN 0 1000 30 a1 b2 c3 d4 e5 f6 0 1\n\nSTACK CFI INIT badf00d abc init rules\n\nSTACK CFI deadf00d some rules\n\nSTACK CFI deadbeef more rules\n\nSTACK CFI INIT f00f f0 more init rules\n\n\"[..];\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 75, "score": 80154.68484560515 }, { "content": "fn print_minidump_modules<T: AsRef<Path>>(path: T, verbose: Verbose) {\n\n match Minidump::read_path(path.as_ref()) {\n\n Ok(dump) => {\n\n if let Ok(module_list) = dump.get_stream::<MinidumpModuleList>() {\n\n for module in module_list.iter() {\n\n print!(\"{}\", module.code_file());\n\n if verbose == Verbose::Yes {\n\n print!(\"\\t\");\n\n if let Some(debug_id) = module.debug_identifier() {\n\n print!(\"{}\", debug_id);\n\n }\n\n }\n\n println!(\"\");\n\n }\n\n }\n\n }\n\n Err(err) => {\n\n let mut stderr = std::io::stderr();\n\n writeln!(&mut stderr, \"Error reading dump: {:?}\", err).unwrap();\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/dumpmodules.rs", "rank": 76, "score": 79050.91972333862 }, { "content": "#[test]\n\nfn test_leafname() {\n\n assert_eq!(leafname(\"c:\\\\foo\\\\bar\\\\test.pdb\"), \"test.pdb\");\n\n assert_eq!(leafname(\"c:/foo/bar/test.pdb\"), \"test.pdb\");\n\n assert_eq!(leafname(\"test.pdb\"), \"test.pdb\");\n\n assert_eq!(leafname(\"test\"), \"test\");\n\n assert_eq!(leafname(\"/path/to/test\"), \"test\");\n\n}\n\n\n", "file_path": "breakpad-symbols/src/lib.rs", "rank": 77, "score": 78927.83902770726 }, { "content": "#[test]\n\nfn test_parse_symbol_bytes_malformed() {\n\n assert!(\n\n parse_symbol_bytes(&b\"this is not a symbol file\\n\"[..]).is_err(),\n\n \"Should fail to parse junk\"\n\n );\n\n\n\n assert!(\n\n parse_symbol_bytes(\n\n &b\"MODULE Linux x86 xxxxxx\n\nFILE 0 foo.c\n\n\"[..]\n\n ).is_err(),\n\n \"Should fail to parse malformed MODULE line\"\n\n );\n\n\n\n assert!(\n\n parse_symbol_bytes(\n\n &b\"MODULE Linux x86 abcd1234 foo\n\nFILE x foo.c\n\n\"[..]\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 78, "score": 78314.79529617028 }, { "content": "#[test]\n\nfn test_exception() {\n\n let dump = read_test_minidump().unwrap();\n\n let exception = dump.get_stream::<MinidumpException>().unwrap();\n\n assert_eq!(exception.thread_id, 0xbf4);\n\n assert_eq!(exception.raw.exception_record.exception_code, 0xc0000005);\n\n if let Some(ref ctx) = exception.context {\n\n assert_eq!(ctx.get_instruction_pointer(), 0x40429e);\n\n assert_eq!(ctx.get_stack_pointer(), 0x12fe84);\n\n if let &MinidumpContext {\n\n raw: MinidumpRawContext::X86(ref raw),\n\n ref valid,\n\n } = ctx\n\n {\n\n assert_eq!(raw.eip, 0x40429e);\n\n assert_eq!(*valid, MinidumpContextValidity::All);\n\n } else {\n\n assert!(false, \"Wrong context type\");\n\n }\n\n } else {\n\n assert!(false, \"Missing context\");\n\n }\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 79, "score": 77294.28744494374 }, { "content": "#[test]\n\nfn test_breakpad_info() {\n\n let dump = read_test_minidump().unwrap();\n\n let breakpad_info = dump.get_stream::<MinidumpBreakpadInfo>().unwrap();\n\n assert_eq!(breakpad_info.dump_thread_id.unwrap(), 0x11c0);\n\n assert_eq!(breakpad_info.requesting_thread_id.unwrap(), 0xbf4);\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 80, "score": 74717.9529196625 }, { "content": "#[test]\n\nfn test_misc_info() {\n\n let dump = read_test_minidump().unwrap();\n\n let misc_info = dump.get_stream::<MinidumpMiscInfo>().unwrap();\n\n assert_eq!(misc_info.raw.process_id(), Some(3932));\n\n assert_eq!(misc_info.raw.process_create_time(), Some(0x45d35f73));\n\n assert_eq!(\n\n misc_info.process_create_time().unwrap(),\n\n Utc.ymd(2007, 02, 14).and_hms(19, 13, 55)\n\n );\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 81, "score": 74717.9529196625 }, { "content": "#[test]\n\nfn test_section_cite() {\n\n let s1 = Section::with_endian(Endian::Little).append_repeated(0, 0x0a);\n\n s1.start().set_const(0xff00ee11);\n\n let s2 = Section::with_endian(Endian::Little);\n\n let s2 = s1.cite_location_in(s2);\n\n s1.get_contents().unwrap();\n\n assert_eq!(\n\n s2.get_contents().unwrap(),\n\n vec![0x0a, 0, 0, 0, 0x11, 0xee, 0x00, 0xff]\n\n );\n\n}\n\n\n", "file_path": "src/synth_minidump.rs", "rank": 82, "score": 74717.9529196625 }, { "content": "#[test]\n\nfn test_func_with_m() {\n\n let data = b\"FUNC m 1000 30 10 some func\n\n1000 10 42 7\n\n1010 10 52 8\n\n1020 10 62 15\n\n\";\n\n if let Done(rest, _) = func_lines(data) {\n\n assert_eq!(rest, &b\"\"[..]);\n\n } else {\n\n assert!(false, \"Failed to parse!\");\n\n }\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 83, "score": 74064.76478507994 }, { "content": "#[test]\n\nfn test_public_with_m() {\n\n let line = b\"PUBLIC m f00d d00d some func\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(\n\n public_line(line),\n\n Done(\n\n rest,\n\n PublicSymbol {\n\n address: 0xf00d,\n\n parameter_size: 0xd00d,\n\n name: \"some func\".to_string(),\n\n }\n\n )\n\n );\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 84, "score": 74064.76478507994 }, { "content": "#[test]\n\nfn test_replace_or_add_extension() {\n\n assert_eq!(\n\n replace_or_add_extension(\"test.pdb\", \"pdb\", \"sym\"),\n\n \"test.sym\"\n\n );\n\n assert_eq!(\n\n replace_or_add_extension(\"TEST.PDB\", \"pdb\", \"sym\"),\n\n \"TEST.sym\"\n\n );\n\n assert_eq!(replace_or_add_extension(\"test\", \"pdb\", \"sym\"), \"test.sym\");\n\n assert_eq!(\n\n replace_or_add_extension(\"test.x\", \"pdb\", \"sym\"),\n\n \"test.x.sym\"\n\n );\n\n assert_eq!(replace_or_add_extension(\"\", \"pdb\", \"sym\"), \".sym\");\n\n assert_eq!(replace_or_add_extension(\"test.x\", \"x\", \"y\"), \"test.y\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n", "file_path": "breakpad-symbols/src/lib.rs", "rank": 85, "score": 74064.76478507994 }, { "content": "/// If `filename` ends with `match_extension`, remove it. Append `new_extension` to the result.\n\nfn replace_or_add_extension(filename: &str, match_extension: &str, new_extension: &str) -> String {\n\n let mut bits = filename.split('.').collect::<Vec<_>>();\n\n if bits.len() > 1\n\n && bits.last()\n\n .map_or(false, |e| e.to_lowercase() == match_extension)\n\n {\n\n bits.pop();\n\n }\n\n bits.push(new_extension);\n\n bits.join(\".\")\n\n}\n\n\n", "file_path": "breakpad-symbols/src/lib.rs", "rank": 86, "score": 73683.80671421839 }, { "content": "#[test]\n\nfn test_processor() {\n\n let dump = read_test_minidump().unwrap();\n\n let state = minidump_processor::process_minidump(\n\n &dump,\n\n &Symbolizer::new(SimpleSymbolSupplier::new(vec![])),\n\n ).unwrap();\n\n assert_eq!(state.system_info.os, Os::Windows);\n\n // TODO\n\n // assert_eq!(state.system_info.os_version.unwrap(),\n\n // \"5.1.2600 Service Pack 2\");\n\n assert_eq!(state.system_info.cpu, Cpu::X86);\n\n // TODO:\n\n // assert_eq!(state.system_info.cpu_info.unwrap(),\n\n // \"GenuineIntel family 6 model 13 stepping 8\");\n\n assert_eq!(state.crash_address.unwrap(), 0x45);\n\n assert_eq!(state.threads.len(), 2);\n\n assert_eq!(state.requesting_thread.unwrap(), 0);\n\n\n\n // Check thread 0.\n\n assert_eq!(state.threads[0].info, CallStackInfo::Ok);\n", "file_path": "minidump-processor/tests/test_processor.rs", "rank": 87, "score": 72362.72756242455 }, { "content": "#[test]\n\nfn test_parse_with_overlap() {\n\n //TODO: deal with duplicate PUBLIC records? Not as important since they don't go\n\n // into a RangeMap.\n\n let bytes = b\"MODULE Linux x86 D3096ED481217FD4C16B29CD9BC208BA0 firefox-bin\n\nFILE 0 foo.c\n\nPUBLIC abcd 10 func 1\n\nPUBLIC ff00 3 func 2\n\nFUNC 1000 30 10 some func\n\n1000 10 42 0\n\n1000 10 43 0\n\n1001 10 44 0\n\n1001 5 45 0\n\n1010 10 52 0\n\nFUNC 1000 30 10 some func overlap exact\n\nFUNC 1001 30 10 some func overlap end\n\nFUNC 1001 10 10 some func overlap contained\n\n\";\n\n let sym = parse_symbol_bytes(&bytes[..]).unwrap();\n\n assert_eq!(sym.publics.len(), 2);\n\n {\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 88, "score": 71921.50968597984 }, { "content": "#[test]\n\nfn test_public_line() {\n\n let line = b\"PUBLIC f00d d00d some func\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(\n\n public_line(line),\n\n Done(\n\n rest,\n\n PublicSymbol {\n\n address: 0xf00d,\n\n parameter_size: 0xd00d,\n\n name: \"some func\".to_string(),\n\n }\n\n )\n\n );\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 89, "score": 71921.50968597984 }, { "content": "#[test]\n\nfn test_file_line() {\n\n let line = b\"FILE 1 foo.c\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(file_line(line), Done(rest, (1, \"foo.c\")));\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 90, "score": 71921.50968597984 }, { "content": "#[test]\n\nfn test_info_line() {\n\n let line = b\"INFO blah blah blah\\n\";\n\n let bits = &b\"blah blah blah\"[..];\n\n let rest = &b\"\"[..];\n\n assert_eq!(info_line(line), Done(rest, bits));\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 91, "score": 71921.50968597984 }, { "content": "#[test]\n\nfn test_info_line2() {\n\n let line = b\"INFO CODE_ID abc xyz\\n\";\n\n let bits = &b\"CODE_ID abc xyz\"[..];\n\n let rest = &b\"\"[..];\n\n assert_eq!(info_line(line), Done(rest, bits));\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 92, "score": 71921.50968597984 }, { "content": "#[test]\n\nfn test_file_line_spaces() {\n\n let line = b\"FILE 1234 foo bar.xyz\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(file_line(line), Done(rest, (1234, \"foo bar.xyz\")));\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 93, "score": 69941.21663385144 }, { "content": "#[test]\n\nfn test_func_lines_no_lines() {\n\n use range_map::RangeMap;\n\n let line = b\"FUNC c184 30 0 nsQueryInterfaceWithError::operator()(nsID const&, void**) const\\n\";\n\n let rest = &b\"\"[..];\n\n assert_eq!(\n\n func_lines(line),\n\n Done(\n\n rest,\n\n Function {\n\n address: 0xc184,\n\n size: 0x30,\n\n parameter_size: 0,\n\n name: \"nsQueryInterfaceWithError::operator()(nsID const&, void**) const\"\n\n .to_string(),\n\n lines: RangeMap::new(),\n\n }\n\n )\n\n );\n\n}\n\n\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 94, "score": 69941.21663385144 }, { "content": "#[test]\n\nfn test_func_lines_and_lines() {\n\n let data = b\"FUNC 1000 30 10 some func\n\n1000 10 42 7\n\n1010 10 52 8\n\n1020 10 62 15\n\n\";\n\n if let Done(rest, f) = func_lines(data) {\n\n assert_eq!(rest, &b\"\"[..]);\n\n assert_eq!(f.address, 0x1000);\n\n assert_eq!(f.size, 0x30);\n\n assert_eq!(f.parameter_size, 0x10);\n\n assert_eq!(f.name, \"some func\".to_string());\n\n assert_eq!(\n\n f.lines.get(0x1000).unwrap(),\n\n &SourceLine {\n\n address: 0x1000,\n\n size: 0x10,\n\n file: 7,\n\n line: 42,\n\n }\n", "file_path": "breakpad-symbols/src/sym_file/parser.rs", "rank": 95, "score": 69941.21663385144 }, { "content": "/// Like `PathBuf::file_name`, but try to work on Windows or POSIX-style paths.\n\nfn leafname(path: &str) -> &str {\n\n path.rsplit(|c| c == '/' || c == '\\\\')\n\n .next()\n\n .unwrap_or(path)\n\n}\n\n\n", "file_path": "breakpad-symbols/src/lib.rs", "rank": 96, "score": 69504.12548982314 }, { "content": "fn get_test_minidump_path(filename: &str) -> PathBuf {\n\n let mut path = PathBuf::from(file!());\n\n path.pop();\n\n path.pop();\n\n path.push(\"testdata\");\n\n path.push(filename);\n\n path\n\n}\n\n\n", "file_path": "tests/test_minidump.rs", "rank": 97, "score": 68851.8068198419 }, { "content": "/// Format `bytes` as a String of hex digits\n\nfn bytes_to_hex(bytes: &[u8]) -> String {\n\n let hex_bytes: Vec<String> = bytes.iter().map(|b| format!(\"{:02x}\", b)).collect();\n\n hex_bytes.join(\"\")\n\n}\n\n\n", "file_path": "src/minidump.rs", "rank": 98, "score": 67802.08826716774 }, { "content": "fn locate_testdata() -> PathBuf {\n\n // This is a little weird because while cargo will always build this code by running rustc\n\n // from the crate root, if you run `cargo test --all` from the workspace root, then the test\n\n // binary will be run from the crate root, so relative paths from `file!` won't work.\n\n let paths = &[\n\n // First, try relative to the current directory for if we're being run from the workspace.\n\n Path::new(\"testdata\"),\n\n // If that doesn't work, try looking in the parent directory.\n\n Path::new(\"../testdata\"),\n\n ];\n\n for path in paths {\n\n if path.is_dir() {\n\n return path.to_path_buf();\n\n }\n\n }\n\n\n\n panic!(\"Couldn't find testdata directory! Tried: {:?}\", paths);\n\n}\n\n\n", "file_path": "minidump-processor/tests/test_processor.rs", "rank": 99, "score": 66415.27672153205 } ]
Rust
tools/lib/cml/macro/src/common.rs
wwjiang007/fuchsia-1
0db66b52b5bcd3e27c8b8c2163925309e8522f94
extern crate proc_macro; use { proc_macro2::TokenStream as TokenStream2, quote::{quote, TokenStreamExt}, syn, }; pub fn gen_visit_str(ty: Option<TokenStream2>, expected: &syn::LitStr) -> TokenStream2 { let ret = match ty { Some(ty) => quote!(#ty(value)), None => quote!(value), }; quote! { fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: serde::de::Error, { let value = value .parse() .map_err(|e| match e { ParseError::InvalidValue => { E::invalid_value(serde::de::Unexpected::Str(value), &#expected) } ParseError::InvalidLength => { E::invalid_length(value.len(), &#expected) } e => { panic!("unexpected parse error: {:?}", e); } })?; Ok(#ret) } } } pub fn gen_visit_seq( ty: TokenStream2, inner_type: &syn::Path, expected: &syn::LitStr, min_length: Option<usize>, unique_items: bool, ) -> TokenStream2 { let inner = { let mut tokens = quote!(); tokens.append_all(quote! { let mut elements = vec![]; while let Some(e) = seq.next_element::<#inner_type>()? { elements.push(e); } }); if let Some(min_length) = min_length { tokens.append_all(quote! { if elements.len() < #min_length { return Err(serde::de::Error::invalid_length(elements.len(), &#expected)); } }); } if unique_items { tokens.append_all(quote! { let mut items = std::collections::HashSet::new(); for e in &elements { if !items.insert(e) { return Err(serde::de::Error::invalid_value( serde::de::Unexpected::Other( "array with duplicate element"), &#expected) ); } } Ok(#ty(elements)) }); } else { tokens.append_all(quote! { Ok(#ty(elements)) }); } tokens }; let mut tokens = quote!(); tokens.append_all(quote! { fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: serde::de::SeqAccess<'de> { #inner } }); tokens } pub fn extract_inner_type( ast: &syn::DeriveInput, attr: syn::MetaNameValue, inner_type: &mut Option<syn::LitStr>, ) -> Result<(), syn::Error> { match attr.lit { syn::Lit::Str(l) => { if inner_type.is_some() { return Err(syn::Error::new_spanned(ast, "duplicate `inner_type` attribute")); } *inner_type = Some(l); } _ => { return Err(syn::Error::new_spanned( ast, "`inner_type` attribute value must be string", )); } } Ok(()) } pub fn extract_min_length( ast: &syn::DeriveInput, attr: syn::MetaNameValue, min_length: &mut Option<usize>, ) -> Result<(), syn::Error> { match attr.lit { syn::Lit::Int(l) => { if min_length.is_some() { return Err(syn::Error::new_spanned(ast, "duplicate `min_length` attribute")); } let l: usize = l.base10_parse().map_err(|_| { syn::Error::new_spanned(ast, "`min_length` attribute is not base 10") })?; *min_length = Some(l); } _ => { return Err(syn::Error::new_spanned(ast, "`min_length` attribute value must be int")); } } Ok(()) } pub fn extract_unique_items( ast: &syn::DeriveInput, attr: syn::MetaNameValue, unique_items: &mut Option<bool>, ) -> Result<(), syn::Error> { match attr.lit { syn::Lit::Bool(b) => { if unique_items.is_some() { return Err(syn::Error::new_spanned(ast, "duplicate `unique_items` attribute")); } *unique_items = Some(b.value); } _ => { return Err(syn::Error::new_spanned( ast, "`unique_items` attribute value must be bool", )); } } Ok(()) } pub fn ident_from_path(path: &syn::Path) -> String { path.get_ident().map(|i| i.to_string()).unwrap_or_else(|| String::new()) } pub fn extract_expected( ast: &syn::DeriveInput, attr: syn::MetaNameValue, expected: &mut Option<syn::LitStr>, ) -> Result<(), syn::Error> { match attr.lit { syn::Lit::Str(l) => { if expected.is_some() { return Err(syn::Error::new_spanned(ast, "duplicate `expected` attribute")); } *expected = Some(l); } _ => { return Err(syn::Error::new_spanned(ast, "`expected` attribute value must be string")); } } Ok(()) }
extern crate proc_macro; use { proc_macro2::TokenStream as TokenStream2, quote::{quote, TokenStreamExt}, syn, }; pub fn gen_visit_str(ty: Option<TokenStream2>, expected: &syn::LitStr) -> TokenStream2 { let ret = match ty { Some(ty) => quote!(#ty(value)), None => quote!(value), }; quote! { fn visit_str<E>(self, value: &str) -> Result<Self::Value, E> where E: serde::de::Error, { let value = value .parse() .map_err(|e| match e { ParseError::InvalidValue => { E::invalid_value(serde::de::Unexpected::Str(value), &#expected) } ParseError::InvalidLength => { E::invalid_length(value.len(), &#expected) } e => { panic!("unexpected parse error: {:?}", e); } })?; Ok(#ret) } } } pub fn gen_visit_seq( ty: TokenStream2, inner_type: &syn::Path, expected: &syn::LitStr, min_length: Option<usize>, unique_items: bool, ) -> TokenStream2 { let inner = { let mut tokens = quote!(); tokens.append_all(quote! { let mut elements = vec![]; while let Some(e) = seq.next_element::<#inner_type>()? { elements.push(e); } }); if let Some(min_length) = min_length { tokens.append_all(quote! { if elements.len() < #min_length { return Err(serde::de::Error::invalid_length(elements.len(), &#expected)); } }); } if unique_items { tokens.append_all(quote! { let mut items = std::collections::HashSet::new(); for e in &elements { if !items.insert(e) { return Err(serde::de::Error::invalid_value( serde::de::Unexpected::Other( "array with duplicate element"), &#expected) ); } } Ok(#ty(elements)) }); } else { tokens.append_all(quote! { Ok(#ty(elements)) }); } tokens }; let mut tokens = quote!(); tokens.append_all(quote! { fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error> where A: serde::de::SeqAccess<'de> { #inner } }); tokens } pub fn extract_inner_type( ast: &syn::DeriveInput, attr: syn::MetaNameValue, inner_type: &mut Option<syn::LitStr>, ) -> Result<(), syn::Error> { match attr.lit { syn::Lit::Str(l) => { if inner_type.is_some() { return Err(syn::Error::new_spanned(ast, "duplicate `inner_type` attribute")); } *inner_type = Some(l); } _ => { return Err(syn::Error::new_spanned( ast, "`inner_type` attribute value must be string", )); } } Ok(()) } pub fn extract_min_length( ast: &syn::DeriveInput, attr: syn::MetaNameValue, min_length: &mut Option<usize>, ) -> Result<(), syn::Error> { match attr.lit { syn::Lit::Int(l) => { if min_length.is_some() { return Err(syn::Error::new_spanned(ast, "duplicate `min_length` attribute")); } let l: usize = l.base10_parse().map_err(|_| { syn::Error::ne
pub fn extract_unique_items( ast: &syn::DeriveInput, attr: syn::MetaNameValue, unique_items: &mut Option<bool>, ) -> Result<(), syn::Error> { match attr.lit { syn::Lit::Bool(b) => { if unique_items.is_some() { return Err(syn::Error::new_spanned(ast, "duplicate `unique_items` attribute")); } *unique_items = Some(b.value); } _ => { return Err(syn::Error::new_spanned( ast, "`unique_items` attribute value must be bool", )); } } Ok(()) } pub fn ident_from_path(path: &syn::Path) -> String { path.get_ident().map(|i| i.to_string()).unwrap_or_else(|| String::new()) } pub fn extract_expected( ast: &syn::DeriveInput, attr: syn::MetaNameValue, expected: &mut Option<syn::LitStr>, ) -> Result<(), syn::Error> { match attr.lit { syn::Lit::Str(l) => { if expected.is_some() { return Err(syn::Error::new_spanned(ast, "duplicate `expected` attribute")); } *expected = Some(l); } _ => { return Err(syn::Error::new_spanned(ast, "`expected` attribute value must be string")); } } Ok(()) }
w_spanned(ast, "`min_length` attribute is not base 10") })?; *min_length = Some(l); } _ => { return Err(syn::Error::new_spanned(ast, "`min_length` attribute value must be int")); } } Ok(()) }
function_block-function_prefixed
[]
Rust
07-rust/stm32l0x1/stm32l0x1_pac/src/lpuart1/icr.rs
aaronhktan/stm32-exploration
dcd7674424cd17b02b85c6b3ce533456d5037d65
#[doc = "Writer for register ICR"] pub type W = crate::W<u32, super::ICR>; #[doc = "Register ICR `reset()`'s with value 0"] impl crate::ResetValue for super::ICR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `WUCF`"] pub struct WUCF_W<'a> { w: &'a mut W, } impl<'a> WUCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } #[doc = "Write proxy for field `CMCF`"] pub struct CMCF_W<'a> { w: &'a mut W, } impl<'a> CMCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Write proxy for field `CTSCF`"] pub struct CTSCF_W<'a> { w: &'a mut W, } impl<'a> CTSCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Write proxy for field `TCCF`"] pub struct TCCF_W<'a> { w: &'a mut W, } impl<'a> TCCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Write proxy for field `IDLECF`"] pub struct IDLECF_W<'a> { w: &'a mut W, } impl<'a> IDLECF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `ORECF`"] pub struct ORECF_W<'a> { w: &'a mut W, } impl<'a> ORECF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `NCF`"] pub struct NCF_W<'a> { w: &'a mut W, } impl<'a> NCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `FECF`"] pub struct FECF_W<'a> { w: &'a mut W, } impl<'a> FECF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `PECF`"] pub struct PECF_W<'a> { w: &'a mut W, } impl<'a> PECF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl W { #[doc = "Bit 20 - Wakeup from Stop mode clear flag"] #[inline(always)] pub fn wucf(&mut self) -> WUCF_W { WUCF_W { w: self } } #[doc = "Bit 17 - Character match clear flag"] #[inline(always)] pub fn cmcf(&mut self) -> CMCF_W { CMCF_W { w: self } } #[doc = "Bit 9 - CTS clear flag"] #[inline(always)] pub fn ctscf(&mut self) -> CTSCF_W { CTSCF_W { w: self } } #[doc = "Bit 6 - Transmission complete clear flag"] #[inline(always)] pub fn tccf(&mut self) -> TCCF_W { TCCF_W { w: self } } #[doc = "Bit 4 - Idle line detected clear flag"] #[inline(always)] pub fn idlecf(&mut self) -> IDLECF_W { IDLECF_W { w: self } } #[doc = "Bit 3 - Overrun error clear flag"] #[inline(always)] pub fn orecf(&mut self) -> ORECF_W { ORECF_W { w: self } } #[doc = "Bit 2 - Noise detected clear flag"] #[inline(always)] pub fn ncf(&mut self) -> NCF_W { NCF_W { w: self } } #[doc = "Bit 1 - Framing error clear flag"] #[inline(always)] pub fn fecf(&mut self) -> FECF_W { FECF_W { w: self } } #[doc = "Bit 0 - Parity error clear flag"] #[inline(always)] pub fn pecf(&mut self) -> PECF_W { PECF_W { w: self } } }
#[doc = "Writer for register ICR"] pub type W = crate::W<u32, super::ICR>; #[doc = "Register ICR `reset()`'s with value 0"] impl crate::ResetValue for super::ICR { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `WUCF`"] pub struct WUCF_W<'a> { w: &'a mut W, } impl<'a> WUCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20); self.w } } #[doc = "Write proxy for field `CMCF`"] pub struct CMCF_W<'a> { w: &'a mut W, } impl<'a> CMCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Write proxy for field `CTSCF`"] pub struct CTSCF_W<'a> { w: &'a mut W, } impl<'a> CTSCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.b
self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `PECF`"] pub struct PECF_W<'a> { w: &'a mut W, } impl<'a> PECF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } impl W { #[doc = "Bit 20 - Wakeup from Stop mode clear flag"] #[inline(always)] pub fn wucf(&mut self) -> WUCF_W { WUCF_W { w: self } } #[doc = "Bit 17 - Character match clear flag"] #[inline(always)] pub fn cmcf(&mut self) -> CMCF_W { CMCF_W { w: self } } #[doc = "Bit 9 - CTS clear flag"] #[inline(always)] pub fn ctscf(&mut self) -> CTSCF_W { CTSCF_W { w: self } } #[doc = "Bit 6 - Transmission complete clear flag"] #[inline(always)] pub fn tccf(&mut self) -> TCCF_W { TCCF_W { w: self } } #[doc = "Bit 4 - Idle line detected clear flag"] #[inline(always)] pub fn idlecf(&mut self) -> IDLECF_W { IDLECF_W { w: self } } #[doc = "Bit 3 - Overrun error clear flag"] #[inline(always)] pub fn orecf(&mut self) -> ORECF_W { ORECF_W { w: self } } #[doc = "Bit 2 - Noise detected clear flag"] #[inline(always)] pub fn ncf(&mut self) -> NCF_W { NCF_W { w: self } } #[doc = "Bit 1 - Framing error clear flag"] #[inline(always)] pub fn fecf(&mut self) -> FECF_W { FECF_W { w: self } } #[doc = "Bit 0 - Parity error clear flag"] #[inline(always)] pub fn pecf(&mut self) -> PECF_W { PECF_W { w: self } } }
its = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Write proxy for field `TCCF`"] pub struct TCCF_W<'a> { w: &'a mut W, } impl<'a> TCCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Write proxy for field `IDLECF`"] pub struct IDLECF_W<'a> { w: &'a mut W, } impl<'a> IDLECF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `ORECF`"] pub struct ORECF_W<'a> { w: &'a mut W, } impl<'a> ORECF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `NCF`"] pub struct NCF_W<'a> { w: &'a mut W, } impl<'a> NCF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `FECF`"] pub struct FECF_W<'a> { w: &'a mut W, } impl<'a> FECF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W {
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 0, "score": 192988.70578231275 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 1, "score": 192988.70578231278 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 2, "score": 192988.70578231275 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~1s period; STM32F4 by default uses the 16MHz HSI on boot\n\n // (See section 6.2.2 in the reference manual)\n\n syst.set_reload(16_000_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin A5 as push-pull output\n\n let p = stm32f446::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.ahb1enr.write(|w| w.gpioaen().set_bit());\n\n\n\n // Set moder on fifth pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32f446/rust-blink-f446re/src/main.rs", "rank": 3, "score": 134178.0685062561 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~2s period; STM32F0 by default uses the 8MHz HSI on boot\n\n // (See section 6.2 of the reference manual)\n\n syst.set_reload(16_000_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin B3 as push-pull output\n\n let p = stm32f0x1::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.ahbenr.write(|w| w.iopben().set_bit());\n\n\n\n // Set moder on third pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32f0x1/rust-blink-f031k6/src/main.rs", "rank": 4, "score": 134178.0685062561 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~2s period; STM32L0 boots to a ~2.1MHz internal oscillator\n\n // (See Section 7.2 of the STM32L0x1 reference manual)\n\n syst.set_reload(4_200_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin B3 as push-pull output\n\n let p = stm32l0x1::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.iopenr.write(|w| w.iopben().set_bit());\n\n\n\n // Set moder on third pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32l0x1/rust-blink-l031k6/src/main.rs", "rank": 5, "score": 134178.0685062561 }, { "content": "TickType_t uxTaskResetEventItemValue( void )\n\n{\n\nTickType_t uxReturn;\n\n\n\n\tuxReturn = listGET_LIST_ITEM_VALUE( &( pxCurrentTCB->xEventListItem ) );\n\n\n\n\t/* Reset the event list item to its normal value - so it can be used with\n\n\tqueues and semaphores. */\n\n\tlistSET_LIST_ITEM_VALUE( &( pxCurrentTCB->xEventListItem ), ( ( TickType_t ) configMAX_PRIORITIES - ( TickType_t ) pxCurrentTCB->uxPriority ) ); /*lint !e961 MISRA exception as the casts are only redundant for some ports. */\n\n\n\n\treturn uxReturn;\n", "file_path": "06-freertos/freertos/Source/tasks.c", "rank": 6, "score": 104903.10307163426 }, { "content": "#define portMAX_8_BIT_VALUE\t\t\t\t\t( ( uint8_t ) 0xff )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 7, "score": 100509.13876308527 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/build.rs", "rank": 8, "score": 88441.66588380146 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/build.rs", "rank": 9, "score": 88441.66588380146 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/build.rs", "rank": 10, "score": 88441.66588380146 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32f446/rust-blink-f446re/build.rs", "rank": 11, "score": 86885.72247686045 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32l0x1/rust-blink-l031k6/build.rs", "rank": 12, "score": 86885.72247686045 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32f0x1/rust-blink-f031k6/build.rs", "rank": 13, "score": 86885.72247686045 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 14, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 15, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 16, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 17, "score": 79431.945204443 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 18, "score": 79431.945204443 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 19, "score": 79431.945204443 }, { "content": "#[doc = \"Reader of register ICR\"]\n\npub type R = crate::R<u32, super::ICR>;\n\n#[doc = \"Writer for register ICR\"]\n\npub type W = crate::W<u32, super::ICR>;\n\n#[doc = \"Register ICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `WUCF`\"]\n\npub type WUCF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `WUCF`\"]\n\npub struct WUCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WUCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 20, "score": 60421.94477550142 }, { "content": "#[doc = \"Writer for register ICR\"]\n\npub type W = crate::W<u32, super::ICR>;\n\n#[doc = \"Register ICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `WUCF`\"]\n\npub struct WUCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WUCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 22, "score": 60420.80601800152 }, { "content": "#[doc = \"Reader of register ICR\"]\n\npub type R = crate::R<u32, super::ICR>;\n\n#[doc = \"Writer for register ICR\"]\n\npub type W = crate::W<u32, super::ICR>;\n\n#[doc = \"Register ICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MCEIC`\"]\n\npub type MCEIC_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `MCEIC`\"]\n\npub struct MCEIC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MCEIC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/icr.rs", "rank": 23, "score": 60413.96434165216 }, { "content": "#[doc = \"Reader of register ICR\"]\n\npub type R = crate::R<u32, super::ICR>;\n\n#[doc = \"Writer for register ICR\"]\n\npub type W = crate::W<u32, super::ICR>;\n\n#[doc = \"Register ICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ESYNCC`\"]\n\npub type ESYNCC_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ESYNCC`\"]\n\npub struct ESYNCC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ESYNCC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crs/icr.rs", "rank": 24, "score": 60413.96434165216 }, { "content": "#[doc = \"Writer for register ICR\"]\n\npub type W = crate::W<u32, super::ICR>;\n\n#[doc = \"Register ICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DOWNCF`\"]\n\npub struct DOWNCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DOWNCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/icr.rs", "rank": 25, "score": 60413.503326640945 }, { "content": "#[doc = \"Writer for register ICR\"]\n\npub type W = crate::W<u32, super::ICR>;\n\n#[doc = \"Register ICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ALERTCF`\"]\n\npub struct ALERTCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ALERTCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 26, "score": 60413.503326640945 }, { "content": "#[doc = \"Writer for register ICR\"]\n\npub type W = crate::W<u32, super::ICR>;\n\n#[doc = \"Register ICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ALERTCF`\"]\n\npub struct ALERTCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ALERTCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 27, "score": 60413.503326640945 }, { "content": "#[doc = \"Writer for register ICR\"]\n\npub type W = crate::W<u32, super::ICR>;\n\n#[doc = \"Register ICR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ICR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `LINE_ISC`\"]\n\npub struct LINE_ISC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LINE_ISC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/icr.rs", "rank": 28, "score": 60412.32105841868 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CTSCF`\"]\n\npub type CTSCF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CTSCF`\"]\n\npub struct CTSCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CTSCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 29, "score": 60400.301644176274 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CMCF`\"]\n\npub struct CMCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CMCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 32, "score": 60397.89296444453 }, { "content": "impl<'a> RTOCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CTSCF`\"]\n\npub struct CTSCF_W<'a> {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 34, "score": 60396.38404071972 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CMCF`\"]\n\npub type CMCF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CMCF`\"]\n\npub struct CMCF_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 35, "score": 60394.743575649256 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `NCF`\"]\n\npub type NCF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `NCF`\"]\n\npub struct NCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> NCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 36, "score": 60392.087510481775 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 20 - Wakeup from Stop mode clear flag\"]\n\n #[inline(always)]\n\n pub fn wucf(&mut self) -> WUCF_W {\n\n WUCF_W { w: self }\n\n }\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 37, "score": 60391.71570270865 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `FECF`\"]\n\npub struct FECF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FECF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 38, "score": 60391.49680419799 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `FECF`\"]\n\npub type FECF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `FECF`\"]\n\npub struct FECF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FECF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 39, "score": 60391.339878112034 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `LBDCF`\"]\n\npub type LBDCF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `LBDCF`\"]\n\npub struct LBDCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LBDCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 40, "score": 60391.339878112034 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `PECCF`\"]\n\npub struct PECCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PECCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 41, "score": 60391.02759114531 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ARROKCF`\"]\n\npub struct ARROKCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ARROKCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/icr.rs", "rank": 42, "score": 60391.02759114531 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `PECCF`\"]\n\npub struct PECCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PECCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 43, "score": 60391.02759114531 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `EOBCF`\"]\n\npub struct EOBCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EOBCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 44, "score": 60391.02759114531 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `NCF`\"]\n\npub struct NCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> NCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 45, "score": 60390.930251127334 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TIMOUTCF`\"]\n\npub struct TIMOUTCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TIMOUTCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 46, "score": 60390.59030281414 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `UPCF`\"]\n\npub struct UPCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> UPCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/icr.rs", "rank": 47, "score": 60390.59030281414 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TIMOUTCF`\"]\n\npub struct TIMOUTCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TIMOUTCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 48, "score": 60390.59030281414 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `PECF`\"]\n\npub struct PECF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PECF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 49, "score": 60390.59030281414 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CMCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 50, "score": 60390.32316343432 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> CTSCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 51, "score": 60390.32316343432 }, { "content": "#[doc = \"Write proxy for field `BERRCF`\"]\n\npub struct BERRCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BERRCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 52, "score": 60390.09017970864 }, { "content": "#[doc = \"Write proxy for field `ARRMCF`\"]\n\npub struct ARRMCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ARRMCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/icr.rs", "rank": 53, "score": 60390.09017970864 }, { "content": "#[doc = \"Write proxy for field `LBDCF`\"]\n\npub struct LBDCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LBDCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 54, "score": 60390.09017970864 }, { "content": "#[doc = \"Write proxy for field `BERRCF`\"]\n\npub struct BERRCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BERRCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 56, "score": 60390.09017970864 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ERR_ISC`\"]\n\npub struct ERR_ISC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ERR_ISC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/icr.rs", "rank": 57, "score": 60390.07159260968 }, { "content": "impl<'a> CMPOKCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `EXTTRIGCF`\"]\n\npub struct EXTTRIGCF_W<'a> {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/icr.rs", "rank": 58, "score": 60389.97091118058 }, { "content": "impl<'a> OVRCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ARLOCF`\"]\n\npub struct ARLOCF_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 60, "score": 60389.97091118058 }, { "content": "impl<'a> OVRCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ARLOCF`\"]\n\npub struct ARLOCF_W<'a> {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 61, "score": 60389.97091118058 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `VSYNC_ISC`\"]\n\npub struct VSYNC_ISC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VSYNC_ISC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/icr.rs", "rank": 62, "score": 60389.71563109678 }, { "content": "impl<'a> OVR_ISC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `FRAME_ISC`\"]\n\npub struct FRAME_ISC_W<'a> {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/icr.rs", "rank": 63, "score": 60389.08294288497 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CMPOKCF`\"]\n\npub struct CMPOKCF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/icr.rs", "rank": 65, "score": 60387.185015510724 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `OVRCF`\"]\n\npub struct OVRCF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 66, "score": 60387.185015510724 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RTOCF`\"]\n\npub struct RTOCF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 67, "score": 60387.185015510724 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `OVRCF`\"]\n\npub struct OVRCF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 68, "score": 60387.185015510724 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `ERRC`\"]\n\npub type ERRC_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ERRC`\"]\n\npub struct ERRC_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crs/icr.rs", "rank": 69, "score": 60386.99525539531 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TCCF`\"]\n\npub type TCCF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TCCF`\"]\n\npub struct TCCF_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 70, "score": 60386.99525539531 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `EOAIC`\"]\n\npub type EOAIC_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `EOAIC`\"]\n\npub struct EOAIC_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/icr.rs", "rank": 71, "score": 60386.99525539531 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `PECF`\"]\n\npub type PECF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `PECF`\"]\n\npub struct PECF_W<'a> {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 72, "score": 60386.99525539531 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `OVR_ISC`\"]\n\npub struct OVR_ISC_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/icr.rs", "rank": 73, "score": 60386.633333828046 }, { "content": "#[doc = \"Reader of field `IDLECF`\"]\n\npub type IDLECF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `IDLECF`\"]\n\npub struct IDLECF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IDLECF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 74, "score": 60385.826660889914 }, { "content": "#[doc = \"Reader of field `SYNCWARNC`\"]\n\npub type SYNCWARNC_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `SYNCWARNC`\"]\n\npub struct SYNCWARNC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SYNCWARNC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crs/icr.rs", "rank": 75, "score": 60385.826660889914 }, { "content": "#[doc = \"Reader of field `EOBCF`\"]\n\npub type EOBCF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `EOBCF`\"]\n\npub struct EOBCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EOBCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 76, "score": 60385.826660889914 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `CMPMCF`\"]\n\npub struct CMPMCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CMPMCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/icr.rs", "rank": 77, "score": 60385.69682528463 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `STOPCF`\"]\n\npub struct STOPCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> STOPCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 78, "score": 60385.69682528463 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `STOPCF`\"]\n\npub struct STOPCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> STOPCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 80, "score": 60385.69682528463 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `TCCF`\"]\n\npub struct TCCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TCCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 81, "score": 60385.69682528463 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> TCCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 82, "score": 60385.440534339934 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> ARLOCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 83, "score": 60385.440534339934 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> EXTTRIGCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/lptim/icr.rs", "rank": 84, "score": 60385.440534339934 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> ARLOCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 85, "score": 60385.440534339934 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> ERRC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/crs/icr.rs", "rank": 86, "score": 60385.440534339934 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> PECF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 87, "score": 60385.440534339934 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> EOAIC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tsc/icr.rs", "rank": 89, "score": 60385.440534339934 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `NACKCF`\"]\n\npub struct NACKCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> NACKCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 91, "score": 60385.21152503835 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `IDLECF`\"]\n\npub struct IDLECF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IDLECF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 92, "score": 60385.21152503835 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `NACKCF`\"]\n\npub struct NACKCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> NACKCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 93, "score": 60385.21152503835 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ORECF`\"]\n\npub struct ORECF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ORECF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/icr.rs", "rank": 95, "score": 60385.16399779319 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ADDRCF`\"]\n\npub struct ADDRCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDRCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/i2c1/icr.rs", "rank": 96, "score": 60385.16399779319 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ADDRCF`\"]\n\npub struct ADDRCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDRCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/i2c1/icr.rs", "rank": 97, "score": 60385.16399779319 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> FRAME_ISC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/dcmi/icr.rs", "rank": 98, "score": 60385.15542720932 }, { "content": " self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RTOCF`\"]\n\npub type RTOCF_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `RTOCF`\"]\n\npub struct RTOCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RTOCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/icr.rs", "rank": 99, "score": 60383.8563202847 } ]
Rust
src/mp/integer/integer_ops.rs
selpoG/qboot-rs
ff7bb5bf6486689f4610435121224d6b21bc171d
use super::super::mp; use std::cmp::Ordering; use std::ops::{ Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Rem, RemAssign, Sub, SubAssign, }; use super::super::{Long, ULong}; use super::integer::Integer; fn _add(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_add(target, op1, op2); } } fn _add_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_add_ui(target, op1, op2); } } fn _add_si(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: Long) { unsafe { if op2 >= 0 { mp::__gmpz_add_ui(target, op1, op2 as ULong); } else { mp::__gmpz_sub_ui(target, op1, -op2 as ULong); } } } fn _mul(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_mul(target, op1, op2); } } fn _mul_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_mul_ui(target, op1, op2); } } fn _mul_si(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: Long) { unsafe { mp::__gmpz_mul_si(target, op1, op2); } } fn _sub(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_sub(target, op1, op2); } } fn _sub_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_sub_ui(target, op1, op2); } } fn _sub_si(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: Long) { unsafe { if op2 >= 0 { mp::__gmpz_sub_ui(target, op1, op2 as ULong); } else { mp::__gmpz_add_ui(target, op1, -op2 as ULong); } } } fn _ui_sub(target: mp::mpz_ptr, op1: ULong, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_ui_sub(target, op1, op2); } } fn _si_sub(target: mp::mpz_ptr, op1: Long, op2: mp::mpz_srcptr) { unsafe { _sub_si(target, op2, op1); mp::__gmpz_neg(target, target); } } fn _div(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_fdiv_q(target, op1, op2); } } fn _div_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_fdiv_q_ui(target, op1, op2); } } fn _ui_div(target: mp::mpz_ptr, op1: ULong, op2: mp::mpz_srcptr) { unsafe { if mp::__gmpz_cmp_ui(op2, 0) < 0 { let mut x: mp::__mpz_struct = std::mem::MaybeUninit::uninit().assume_init(); mp::__gmpz_init_set(&mut x, op2); mp::__gmpz_neg(&mut x, &x); _ui_div(target, op1, &x); mp::__gmpz_clear(&mut x); } else if mp::__gmpz_cmp_ui(op2, op1) > 0 { mp::__gmpz_set_ui(target, 0); } else { let op2 = mp::__gmpz_get_ui(op2); mp::__gmpz_set_ui(target, op1 / op2); } } } fn _rem(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_fdiv_r(target, op1, op2); } } fn _rem_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_fdiv_r_ui(target, op1, op2); } } fn _ui_rem(target: mp::mpz_ptr, op1: ULong, op2: mp::mpz_srcptr) { unsafe { if mp::__gmpz_cmp_ui(op2, op1) > 0 { mp::__gmpz_set_ui(target, op1); } else { let op2 = if mp::__gmpz_cmp_ui(op2, 0) >= 0 { mp::__gmpz_get_ui(op2) } else { let mut x: mp::__mpz_struct = std::mem::MaybeUninit::uninit().assume_init(); mp::__gmpz_init_set(&mut x, op2); mp::__gmpz_neg(&mut x, &x); let val = mp::__gmpz_get_ui(&x); mp::__gmpz_clear(&mut x); val }; mp::__gmpz_set_ui(target, op1 % op2); } } } macro_rules! define_flipped { ($f: ident, $f2: ident, $T: ty) => { fn $f2(target: mp::mpz_ptr, op1: $T, op2: mp::mpz_srcptr) { $f(target, op2, op1); } }; } macro_rules! define_binops { ($Op: ident, $op_name: ident, $f: ident, $f2:ident, $T: ty) => { impl $Op<$T> for &Integer { type Output = Integer; fn $op_name(self, rhs: $T) -> Integer { let mut x = Integer::new(); $f(&mut x.data, &self.data, rhs); x } } impl $Op<$T> for Integer { type Output = Integer; fn $op_name(mut self, rhs: $T) -> Integer { $f(&mut self.data, &self.data, rhs); self } } impl $Op<&Integer> for $T { type Output = Integer; fn $op_name(self, rhs: &Integer) -> Integer { let mut x = Integer::new(); $f2(&mut x.data, self, &rhs.data); x } } impl $Op<Integer> for $T { type Output = Integer; fn $op_name(self, mut rhs: Integer) -> Integer { $f2(&mut rhs.data, self, &rhs.data); rhs } } }; } macro_rules! define_assign { ($Op: ident, $op_name: ident, $f: ident, $T: ty) => { impl $Op<$T> for Integer { fn $op_name(&mut self, rhs: $T) { $f(&mut self.data, &self.data, rhs); } } }; } macro_rules! define_add { ($f: ident, $f2: ident, $T: ty) => { define_flipped!($f, $f2, $T); define_binops!(Add, add, $f, $f2, $T); }; } macro_rules! define_add_assign { ($f: ident, $T: ty) => { define_assign!(AddAssign, add_assign, $f, $T); }; } macro_rules! define_mul { ($f: ident, $f2: ident, $T: ty) => { define_flipped!($f, $f2, $T); define_binops!(Mul, mul, $f, $f2, $T); }; } macro_rules! define_mul_assign { ($f: ident, $T: ty) => { define_assign!(MulAssign, mul_assign, $f, $T); }; } macro_rules! define_sub { ($f: ident, $f2: ident, $T: ty) => { define_binops!(Sub, sub, $f, $f2, $T); }; } macro_rules! define_sub_assign { ($f: ident, $T: ty) => { define_assign!(SubAssign, sub_assign, $f, $T); }; } macro_rules! define_div { ($f: ident, $f2: ident, $T: ty) => { define_binops!(Div, div, $f, $f2, $T); }; } macro_rules! define_div_assign { ($f: ident, $T: ty) => { define_assign!(DivAssign, div_assign, $f, $T); }; } macro_rules! define_addmul { ($f_add: ident, $f_add2: ident, $f_mul: ident, $f_mul2: ident, $T: ty) => { define_add!($f_add, $f_add2, $T); define_add_assign!($f_add, $T); define_mul!($f_mul, $f_mul2, $T); define_mul_assign!($f_mul, $T); }; } macro_rules! define_subdiv { ($f_sub: ident, $f_sub2: ident, $f_div: ident, $f_div2: ident, $T: ty) => { define_sub!($f_sub, $f_sub2, $T); define_sub_assign!($f_sub, $T); define_div!($f_div, $f_div2, $T); define_div_assign!($f_div, $T); }; } define_addmul!(_add_ui, _ui_add, _mul_ui, _ui_mul, ULong); define_addmul!(_add_si, _si_add, _mul_si, _si_mul, Long); define_subdiv!(_sub_ui, _ui_sub, _div_ui, _ui_div, ULong); define_binops!(Rem, rem, _rem_ui, _ui_rem, ULong); define_assign!(RemAssign, rem_assign, _rem_ui, ULong); define_sub!(_sub_si, _si_sub, Long); define_sub_assign!(_sub_si, Long); macro_rules! define_realops { ($Op: ident, $op_name: ident, $f: ident) => { impl $Op for &Integer { type Output = Integer; fn $op_name(self, rhs: &Integer) -> Integer { let mut x = Integer::new(); $f(&mut x.data, &self.data, &rhs.data); x } } impl $Op<Integer> for &Integer { type Output = Integer; fn $op_name(self, mut rhs: Integer) -> Integer { $f(&mut rhs.data, &self.data, &rhs.data); rhs } } impl $Op<&Integer> for Integer { type Output = Integer; fn $op_name(mut self, rhs: &Integer) -> Integer { $f(&mut self.data, &self.data, &rhs.data); self } } impl $Op for Integer { type Output = Integer; fn $op_name(mut self, rhs: Integer) -> Integer { $f(&mut self.data, &self.data, &rhs.data); self } } }; } macro_rules! define_realassign { ($Op: ident, $op_name: ident, $f: ident) => { impl $Op<&Integer> for Integer { fn $op_name(&mut self, rhs: &Integer) { $f(&mut self.data, &self.data, &rhs.data); } } impl $Op for Integer { fn $op_name(&mut self, rhs: Integer) { $f(&mut self.data, &self.data, &rhs.data); } } }; } define_realops!(Add, add, _add); define_realops!(Mul, mul, _mul); define_realops!(Sub, sub, _sub); define_realops!(Div, div, _div); define_realops!(Rem, rem, _rem); define_realassign!(AddAssign, add_assign, _add); define_realassign!(MulAssign, mul_assign, _mul); define_realassign!(SubAssign, sub_assign, _sub); define_realassign!(DivAssign, div_assign, _div); define_realassign!(RemAssign, rem_assign, _rem); impl Neg for &Integer { type Output = Integer; fn neg(self) -> Integer { let mut x = Integer::new(); unsafe { mp::__gmpz_neg(&mut x.data, &self.data) } x } } impl Neg for Integer { type Output = Integer; fn neg(mut self) -> Integer { unsafe { mp::__gmpz_neg(&mut self.data, &self.data) } self } } impl PartialEq for Integer { fn eq(&self, other: &Self) -> bool { self.cmp(other) == Ordering::Equal } } impl Eq for Integer {} impl PartialOrd for Integer { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Ord for Integer { fn cmp(&self, other: &Self) -> Ordering { unsafe { mp::__gmpz_cmp(&self.data, &other.data).cmp(&0) } } }
use super::super::mp; use std::cmp::Ordering; use std::ops::{ Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Rem, RemAssign, Sub, SubAssign, }; use super::super::{Long, ULong}; use super::integer::Integer; fn _add(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_add(target, op1, op2); } } fn _add_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_add_ui(target, op1, op2); } } fn _add_si(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: Long) { unsafe { if op2 >= 0 { mp::__gmpz_add_ui(target, op1, op2 as ULong); } else { mp::__gmpz_sub_ui(target, op1, -op2 as ULong); } } } fn _mul(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_mul(target, op1, op2); } } fn _mul_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_mul_ui(target, op1, op2); } } fn _mul_si(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: Long) { unsafe { mp::__gmpz_mul_si(target, op1, op2); } } fn _sub(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_sub(target, op1, op2); } } fn _sub_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_sub_ui(target, op1, op2); } }
fn _ui_sub(target: mp::mpz_ptr, op1: ULong, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_ui_sub(target, op1, op2); } } fn _si_sub(target: mp::mpz_ptr, op1: Long, op2: mp::mpz_srcptr) { unsafe { _sub_si(target, op2, op1); mp::__gmpz_neg(target, target); } } fn _div(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_fdiv_q(target, op1, op2); } } fn _div_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_fdiv_q_ui(target, op1, op2); } } fn _ui_div(target: mp::mpz_ptr, op1: ULong, op2: mp::mpz_srcptr) { unsafe { if mp::__gmpz_cmp_ui(op2, 0) < 0 { let mut x: mp::__mpz_struct = std::mem::MaybeUninit::uninit().assume_init(); mp::__gmpz_init_set(&mut x, op2); mp::__gmpz_neg(&mut x, &x); _ui_div(target, op1, &x); mp::__gmpz_clear(&mut x); } else if mp::__gmpz_cmp_ui(op2, op1) > 0 { mp::__gmpz_set_ui(target, 0); } else { let op2 = mp::__gmpz_get_ui(op2); mp::__gmpz_set_ui(target, op1 / op2); } } } fn _rem(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: mp::mpz_srcptr) { unsafe { mp::__gmpz_fdiv_r(target, op1, op2); } } fn _rem_ui(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: ULong) { unsafe { mp::__gmpz_fdiv_r_ui(target, op1, op2); } } fn _ui_rem(target: mp::mpz_ptr, op1: ULong, op2: mp::mpz_srcptr) { unsafe { if mp::__gmpz_cmp_ui(op2, op1) > 0 { mp::__gmpz_set_ui(target, op1); } else { let op2 = if mp::__gmpz_cmp_ui(op2, 0) >= 0 { mp::__gmpz_get_ui(op2) } else { let mut x: mp::__mpz_struct = std::mem::MaybeUninit::uninit().assume_init(); mp::__gmpz_init_set(&mut x, op2); mp::__gmpz_neg(&mut x, &x); let val = mp::__gmpz_get_ui(&x); mp::__gmpz_clear(&mut x); val }; mp::__gmpz_set_ui(target, op1 % op2); } } } macro_rules! define_flipped { ($f: ident, $f2: ident, $T: ty) => { fn $f2(target: mp::mpz_ptr, op1: $T, op2: mp::mpz_srcptr) { $f(target, op2, op1); } }; } macro_rules! define_binops { ($Op: ident, $op_name: ident, $f: ident, $f2:ident, $T: ty) => { impl $Op<$T> for &Integer { type Output = Integer; fn $op_name(self, rhs: $T) -> Integer { let mut x = Integer::new(); $f(&mut x.data, &self.data, rhs); x } } impl $Op<$T> for Integer { type Output = Integer; fn $op_name(mut self, rhs: $T) -> Integer { $f(&mut self.data, &self.data, rhs); self } } impl $Op<&Integer> for $T { type Output = Integer; fn $op_name(self, rhs: &Integer) -> Integer { let mut x = Integer::new(); $f2(&mut x.data, self, &rhs.data); x } } impl $Op<Integer> for $T { type Output = Integer; fn $op_name(self, mut rhs: Integer) -> Integer { $f2(&mut rhs.data, self, &rhs.data); rhs } } }; } macro_rules! define_assign { ($Op: ident, $op_name: ident, $f: ident, $T: ty) => { impl $Op<$T> for Integer { fn $op_name(&mut self, rhs: $T) { $f(&mut self.data, &self.data, rhs); } } }; } macro_rules! define_add { ($f: ident, $f2: ident, $T: ty) => { define_flipped!($f, $f2, $T); define_binops!(Add, add, $f, $f2, $T); }; } macro_rules! define_add_assign { ($f: ident, $T: ty) => { define_assign!(AddAssign, add_assign, $f, $T); }; } macro_rules! define_mul { ($f: ident, $f2: ident, $T: ty) => { define_flipped!($f, $f2, $T); define_binops!(Mul, mul, $f, $f2, $T); }; } macro_rules! define_mul_assign { ($f: ident, $T: ty) => { define_assign!(MulAssign, mul_assign, $f, $T); }; } macro_rules! define_sub { ($f: ident, $f2: ident, $T: ty) => { define_binops!(Sub, sub, $f, $f2, $T); }; } macro_rules! define_sub_assign { ($f: ident, $T: ty) => { define_assign!(SubAssign, sub_assign, $f, $T); }; } macro_rules! define_div { ($f: ident, $f2: ident, $T: ty) => { define_binops!(Div, div, $f, $f2, $T); }; } macro_rules! define_div_assign { ($f: ident, $T: ty) => { define_assign!(DivAssign, div_assign, $f, $T); }; } macro_rules! define_addmul { ($f_add: ident, $f_add2: ident, $f_mul: ident, $f_mul2: ident, $T: ty) => { define_add!($f_add, $f_add2, $T); define_add_assign!($f_add, $T); define_mul!($f_mul, $f_mul2, $T); define_mul_assign!($f_mul, $T); }; } macro_rules! define_subdiv { ($f_sub: ident, $f_sub2: ident, $f_div: ident, $f_div2: ident, $T: ty) => { define_sub!($f_sub, $f_sub2, $T); define_sub_assign!($f_sub, $T); define_div!($f_div, $f_div2, $T); define_div_assign!($f_div, $T); }; } define_addmul!(_add_ui, _ui_add, _mul_ui, _ui_mul, ULong); define_addmul!(_add_si, _si_add, _mul_si, _si_mul, Long); define_subdiv!(_sub_ui, _ui_sub, _div_ui, _ui_div, ULong); define_binops!(Rem, rem, _rem_ui, _ui_rem, ULong); define_assign!(RemAssign, rem_assign, _rem_ui, ULong); define_sub!(_sub_si, _si_sub, Long); define_sub_assign!(_sub_si, Long); macro_rules! define_realops { ($Op: ident, $op_name: ident, $f: ident) => { impl $Op for &Integer { type Output = Integer; fn $op_name(self, rhs: &Integer) -> Integer { let mut x = Integer::new(); $f(&mut x.data, &self.data, &rhs.data); x } } impl $Op<Integer> for &Integer { type Output = Integer; fn $op_name(self, mut rhs: Integer) -> Integer { $f(&mut rhs.data, &self.data, &rhs.data); rhs } } impl $Op<&Integer> for Integer { type Output = Integer; fn $op_name(mut self, rhs: &Integer) -> Integer { $f(&mut self.data, &self.data, &rhs.data); self } } impl $Op for Integer { type Output = Integer; fn $op_name(mut self, rhs: Integer) -> Integer { $f(&mut self.data, &self.data, &rhs.data); self } } }; } macro_rules! define_realassign { ($Op: ident, $op_name: ident, $f: ident) => { impl $Op<&Integer> for Integer { fn $op_name(&mut self, rhs: &Integer) { $f(&mut self.data, &self.data, &rhs.data); } } impl $Op for Integer { fn $op_name(&mut self, rhs: Integer) { $f(&mut self.data, &self.data, &rhs.data); } } }; } define_realops!(Add, add, _add); define_realops!(Mul, mul, _mul); define_realops!(Sub, sub, _sub); define_realops!(Div, div, _div); define_realops!(Rem, rem, _rem); define_realassign!(AddAssign, add_assign, _add); define_realassign!(MulAssign, mul_assign, _mul); define_realassign!(SubAssign, sub_assign, _sub); define_realassign!(DivAssign, div_assign, _div); define_realassign!(RemAssign, rem_assign, _rem); impl Neg for &Integer { type Output = Integer; fn neg(self) -> Integer { let mut x = Integer::new(); unsafe { mp::__gmpz_neg(&mut x.data, &self.data) } x } } impl Neg for Integer { type Output = Integer; fn neg(mut self) -> Integer { unsafe { mp::__gmpz_neg(&mut self.data, &self.data) } self } } impl PartialEq for Integer { fn eq(&self, other: &Self) -> bool { self.cmp(other) == Ordering::Equal } } impl Eq for Integer {} impl PartialOrd for Integer { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Ord for Integer { fn cmp(&self, other: &Self) -> Ordering { unsafe { mp::__gmpz_cmp(&self.data, &other.data).cmp(&0) } } }
fn _sub_si(target: mp::mpz_ptr, op1: mp::mpz_srcptr, op2: Long) { unsafe { if op2 >= 0 { mp::__gmpz_sub_ui(target, op1, op2 as ULong); } else { mp::__gmpz_add_ui(target, op1, -op2 as ULong); } } }
function_block-full_function
[ { "content": "fn _mul_si(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: Long) {\n\n unsafe {\n\n mp::mpfr_mul_si(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 0, "score": 149717.46631052246 }, { "content": "fn _add_si(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: Long) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpq_set(target, op1);\n\n }\n\n if op2 >= 0 {\n\n mp::__gmpz_addmul_ui(nummut(target), denref(op1), op2 as ULong);\n\n } else {\n\n mp::__gmpz_submul_ui(nummut(target), denref(op1), -op2 as ULong);\n\n }\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 1, "score": 149717.46631052246 }, { "content": "fn _mul_si(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: Long) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpz_set(denmut(target), denref(op1));\n\n }\n\n mp::__gmpz_mul_si(nummut(target), numref(op1), op2);\n\n mp::__gmpq_canonicalize(target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 2, "score": 149717.46631052246 }, { "content": "fn _add_si(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: Long) {\n\n unsafe {\n\n mp::mpfr_add_si(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 4, "score": 149717.46631052246 }, { "content": "fn _div_si(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: Long) {\n\n unsafe {\n\n mp::mpfr_div_si(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 6, "score": 149505.51749220956 }, { "content": "fn _div_si(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: Long) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpz_set(nummut(target), numref(op1));\n\n }\n\n mp::__gmpz_mul_si(denmut(target), denref(op1), op2);\n\n mp::__gmpq_canonicalize(target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 7, "score": 149505.51749220956 }, { "content": "fn _si_div(target: mp::mpfr_ptr, op1: Long, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_si_div(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 8, "score": 149505.51749220956 }, { "content": "fn _si_div(target: mp::mpq_ptr, op1: Long, op2: mp::mpq_srcptr) {\n\n _div_si(target, op2, op1);\n\n unsafe {\n\n mp::__gmpq_inv(target, target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 9, "score": 149505.51749220956 }, { "content": "fn _sub_si(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: Long) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpq_set(target, op1);\n\n }\n\n if op2 >= 0 {\n\n mp::__gmpz_submul_ui(nummut(target), denref(op1), op2 as ULong);\n\n } else {\n\n mp::__gmpz_addmul_ui(nummut(target), denref(op1), -op2 as ULong);\n\n }\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 11, "score": 149450.84718720143 }, { "content": "fn _sub_si(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: Long) {\n\n unsafe {\n\n mp::mpfr_sub_si(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 13, "score": 149450.84718720143 }, { "content": "fn _si_sub(target: mp::mpfr_ptr, op1: Long, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_si_sub(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 14, "score": 149450.84718720143 }, { "content": "fn _si_sub(target: mp::mpq_ptr, op1: Long, op2: mp::mpq_srcptr) {\n\n _sub_si(target, op2, op1);\n\n unsafe {\n\n mp::__gmpq_neg(target, target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 15, "score": 149450.84718720143 }, { "content": "fn _add_ui(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: ULong) {\n\n unsafe {\n\n mp::mpfr_add_ui(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 18, "score": 147097.84873704344 }, { "content": "fn _mul_ui(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: ULong) {\n\n unsafe {\n\n mp::mpfr_mul_ui(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 19, "score": 147097.84873704344 }, { "content": "fn _add_ui(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: ULong) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpq_set(target, op1);\n\n }\n\n mp::__gmpz_addmul_ui(nummut(target), denref(op1), op2);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 20, "score": 147097.84873704344 }, { "content": "fn _mul_ui(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: ULong) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpz_set(denmut(target), denref(op1));\n\n }\n\n mp::__gmpz_mul_ui(nummut(target), numref(op1), op2);\n\n mp::__gmpq_canonicalize(target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 21, "score": 147097.84873704344 }, { "content": "fn _div_ui(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: ULong) {\n\n unsafe {\n\n mp::mpfr_div_ui(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 25, "score": 146890.8756607849 }, { "content": "fn _div_ui(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: ULong) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpz_set(nummut(target), numref(op1));\n\n }\n\n mp::__gmpz_mul_ui(denmut(target), denref(op1), op2);\n\n mp::__gmpq_canonicalize(target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 26, "score": 146890.8756607849 }, { "content": "fn _ui_div(target: mp::mpq_ptr, op1: ULong, op2: mp::mpq_srcptr) {\n\n _div_ui(target, op2, op1);\n\n unsafe {\n\n mp::__gmpq_inv(target, target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 27, "score": 146890.8756607849 }, { "content": "fn _ui_div(target: mp::mpfr_ptr, op1: ULong, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_ui_div(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 28, "score": 146890.8756607849 }, { "content": "fn _ui_sub(target: mp::mpfr_ptr, op1: ULong, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_ui_sub(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 31, "score": 146837.48880400695 }, { "content": "fn _sub_ui(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: ULong) {\n\n unsafe {\n\n mp::mpfr_sub_ui(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 33, "score": 146837.48880400695 }, { "content": "fn _sub_ui(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: ULong) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpq_set(target, op1);\n\n }\n\n mp::__gmpz_submul_ui(nummut(target), denref(op1), op2);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 34, "score": 146837.48880400698 }, { "content": "fn _ui_sub(target: mp::mpq_ptr, op1: ULong, op2: mp::mpq_srcptr) {\n\n _sub_ui(target, op2, op1);\n\n unsafe {\n\n mp::__gmpq_neg(target, target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 35, "score": 146837.48880400698 }, { "content": "fn _add_q(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: &Rational) {\n\n unsafe {\n\n mp::mpfr_add_q(target, op1, &op2.data, _GLOBAL_RND);\n\n }\n\n}\n\n\n", "file_path": "src/mp/real/real_ops.rs", "rank": 36, "score": 110974.71922182167 }, { "content": "fn _mul_z(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: &Integer) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpz_set(denmut(target), denref(op1));\n\n }\n\n mp::__gmpz_mul(nummut(target), numref(op1), &op2.data);\n\n mp::__gmpq_canonicalize(target);\n\n }\n\n}\n\n\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 37, "score": 110974.71922182167 }, { "content": "fn _add_d(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: f64) {\n\n unsafe {\n\n mp::mpfr_add_d(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 38, "score": 110974.71922182167 }, { "content": "fn _add_z(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: &Integer) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpq_set(target, op1);\n\n }\n\n mp::__gmpz_addmul(nummut(target), denref(op1), &op2.data);\n\n }\n\n}\n\n\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 39, "score": 110974.71922182167 }, { "content": "fn _mul_d(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: f64) {\n\n unsafe {\n\n mp::mpfr_mul_d(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 40, "score": 110974.71922182167 }, { "content": "fn _add_z(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: &Integer) {\n\n unsafe {\n\n mp::mpfr_add_z(target, op1, &op2.data, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 41, "score": 110974.71922182167 }, { "content": "fn _mul_q(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: &Rational) {\n\n unsafe {\n\n mp::mpfr_mul_q(target, op1, &op2.data, _GLOBAL_RND);\n\n }\n\n}\n\n\n", "file_path": "src/mp/real/real_ops.rs", "rank": 42, "score": 110974.71922182167 }, { "content": "fn _mul_z(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: &Integer) {\n\n unsafe {\n\n mp::mpfr_mul_z(target, op1, &op2.data, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 43, "score": 110974.71922182167 }, { "content": "fn _div_z(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: &Integer) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpz_set(nummut(target), numref(op1));\n\n }\n\n mp::__gmpz_mul(denmut(target), denref(op1), &op2.data);\n\n mp::__gmpq_canonicalize(target);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 44, "score": 110757.54952941749 }, { "content": "fn _z_div(target: mp::mpfr_ptr, op1: &Integer, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_ui_div(target, 1 as ULong, op2, _GLOBAL_RND);\n\n }\n\n _mul_z(target, target, op1)\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 45, "score": 110757.54952941749 }, { "content": "fn _div_d(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: f64) {\n\n unsafe {\n\n mp::mpfr_div_d(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 46, "score": 110757.54952941749 }, { "content": "fn _q_div(target: mp::mpfr_ptr, op1: &Rational, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_ui_div(target, 1 as ULong, op2, _GLOBAL_RND);\n\n }\n\n _mul_q(target, target, op1)\n\n}\n\n\n\nmacro_rules! define_flipped {\n\n ($f: ident, $f2: ident, $T: ty) => {\n\n fn $f2(target: mp::mpfr_ptr, op1: $T, op2: mp::mpfr_srcptr) {\n\n $f(target, op2, op1);\n\n }\n\n };\n\n}\n\n// Op: Add, Sub, Mul, Div\n\n// op_name: add, sub, mul, div\n\n// f: _add_ui\n\n// f2: _ui_add (if Op is Add or Mul, f2 must be defined by define_flipped! macro)\n\n// T: ULong\n\nmacro_rules! define_binops {\n", "file_path": "src/mp/real/real_ops.rs", "rank": 47, "score": 110757.54952941749 }, { "content": "fn _div_z(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: &Integer) {\n\n unsafe {\n\n mp::mpfr_div_z(target, op1, &op2.data, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 48, "score": 110757.54952941749 }, { "content": "fn _d_div(target: mp::mpfr_ptr, op1: f64, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_d_div(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 49, "score": 110757.54952941749 }, { "content": "fn _z_div(target: mp::mpq_ptr, op1: &Integer, op2: mp::mpq_srcptr) {\n\n _div_z(target, op2, op1);\n\n unsafe {\n\n mp::__gmpq_inv(target, target);\n\n }\n\n}\n\n\n\nmacro_rules! define_flipped {\n\n ($f: ident, $f2: ident, $T: ty) => {\n\n fn $f2(target: mp::mpq_ptr, op1: $T, op2: mp::mpq_srcptr) {\n\n $f(target, op2, op1);\n\n }\n\n };\n\n}\n\n// Op: Add, Sub, Mul, Div\n\n// op_name: add, sub, mul, div\n\n// f: _add_ui\n\n// f2: _ui_add (if Op is Add or Mul, f2 must be defined by define_flipped! macro)\n\n// T: ULong\n\nmacro_rules! define_binops {\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 50, "score": 110757.54952941749 }, { "content": "fn _div_q(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: &Rational) {\n\n unsafe {\n\n mp::mpfr_div_q(target, op1, &op2.data, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 51, "score": 110757.54952941749 }, { "content": "fn _sub_z(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: &Integer) {\n\n unsafe {\n\n if target as mp::mpq_srcptr != op1 {\n\n mp::__gmpq_set(target, op1);\n\n }\n\n mp::__gmpz_submul(nummut(target), denref(op1), &op2.data);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 52, "score": 110701.53254655925 }, { "content": "fn _sub_d(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: f64) {\n\n unsafe {\n\n mp::mpfr_sub_d(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 53, "score": 110701.53254655925 }, { "content": "fn _z_sub(target: mp::mpq_ptr, op1: &Integer, op2: mp::mpq_srcptr) {\n\n _sub_z(target, op2, op1);\n\n unsafe {\n\n mp::__gmpq_neg(target, target);\n\n }\n\n}\n\n\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 54, "score": 110701.53254655925 }, { "content": "fn _q_sub(target: mp::mpfr_ptr, op1: &Rational, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_sub_q(target, op2, &op1.data, _GLOBAL_RND);\n\n mp::mpfr_neg(target, target, _GLOBAL_RND);\n\n }\n\n}\n\n\n", "file_path": "src/mp/real/real_ops.rs", "rank": 55, "score": 110701.53254655925 }, { "content": "fn _sub_z(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: &Integer) {\n\n unsafe {\n\n mp::mpfr_sub_z(target, op1, &op2.data, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 56, "score": 110701.53254655925 }, { "content": "fn _d_sub(target: mp::mpfr_ptr, op1: f64, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_d_sub(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 57, "score": 110701.53254655925 }, { "content": "fn _z_sub(target: mp::mpfr_ptr, op1: &Integer, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_z_sub(target, &op1.data, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 58, "score": 110701.53254655925 }, { "content": "fn _sub_q(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: &Rational) {\n\n unsafe {\n\n mp::mpfr_sub_q(target, op1, &op2.data, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 59, "score": 110701.53254655925 }, { "content": "fn _mul(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_mul(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 62, "score": 106064.77143631261 }, { "content": "fn _mul(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: mp::mpq_srcptr) {\n\n unsafe {\n\n mp::__gmpq_mul(target, op1, op2);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 64, "score": 106064.77143631261 }, { "content": "fn _add(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_add(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 65, "score": 106064.77143631261 }, { "content": "fn _add(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: mp::mpq_srcptr) {\n\n unsafe {\n\n mp::__gmpq_add(target, op1, op2);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 66, "score": 106064.77143631261 }, { "content": "fn _div(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: mp::mpq_srcptr) {\n\n unsafe {\n\n mp::__gmpq_div(target, op1, op2);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 68, "score": 105857.79836005403 }, { "content": "fn _div(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_div(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 69, "score": 105857.79836005403 }, { "content": "fn _sub(target: mp::mpfr_ptr, op1: mp::mpfr_srcptr, op2: mp::mpfr_srcptr) {\n\n unsafe {\n\n mp::mpfr_sub(target, op1, op2, _GLOBAL_RND);\n\n }\n\n}\n", "file_path": "src/mp/real/real_ops.rs", "rank": 71, "score": 105804.41150327609 }, { "content": "fn _sub(target: mp::mpq_ptr, op1: mp::mpq_srcptr, op2: mp::mpq_srcptr) {\n\n unsafe {\n\n mp::__gmpq_sub(target, op1, op2);\n\n }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 72, "score": 105804.41150327612 }, { "content": "fn main() {\n\n let mut n = Integer::from(1 as ULong);\n\n for i in 1..=100 {\n\n n *= i as ULong;\n\n }\n\n println!(\"100! = {}\", n);\n\n\n\n let mut t = Rational::from(1.0);\n\n let mut s_d = Rational::from(1.0);\n\n for i in 1..=10 {\n\n t *= i as ULong;\n\n s_d += 1 as ULong / &t;\n\n }\n\n println!(\"Sum is {}\", s_d);\n\n\n\n let mut t = Real::from(1.0);\n\n let mut s_d = Real::from(1.0);\n\n for i in 1..=100 {\n\n t *= i as ULong;\n\n s_d += t.recip();\n", "file_path": "src/main.rs", "rank": 73, "score": 38109.060765051516 }, { "content": "fn cin<T: std::str::FromStr>() -> T {\n\n let mut s = String::new();\n\n stdin().read_line(&mut s).ok();\n\n s.trim().parse::<T>().ok().unwrap()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 74, "score": 31659.87193357665 }, { "content": "// 1 回だけ安全に初期化できることにする (その前に _GLOBAL_PREC, _GLOBAL_RND が参照されていないことは良識に任せる…)\n\npub fn set_prec(prec: Precision) -> Result<(), &'static str> {\n\n match _PREC_SET.set(true) {\n\n Ok(()) => {\n\n unsafe { _GLOBAL_PREC = prec }\n\n Ok(())\n\n }\n\n Err(_) => Err(\"prec is already set\"),\n\n }\n\n}\n", "file_path": "src/mp/real/real.rs", "rank": 75, "score": 28702.698103478746 }, { "content": "pub fn set_rnd(rnd: RoundMode) -> Result<(), &'static str> {\n\n match _RND_SET.set(true) {\n\n Ok(()) => {\n\n unsafe { _GLOBAL_RND = rnd }\n\n Ok(())\n\n }\n\n Err(_) => Err(\"rnd is already set\"),\n\n }\n\n}\n\n\n\npub struct Real {\n\n pub data: mp::__mpfr_struct,\n\n}\n\n\n\nimpl Real {\n\n pub fn new() -> Real {\n\n let mut x = Real::_nan();\n\n unsafe {\n\n mp::mpfr_set_zero(&mut x.data, 1);\n\n }\n", "file_path": "src/mp/real/real.rs", "rank": 76, "score": 28128.729247266674 }, { "content": "fn numref(p: mp::mpq_srcptr) -> *const mp::__mpz_struct {\n\n unsafe { &(*p)._mp_num }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 77, "score": 28128.729247266674 }, { "content": "fn denref(p: mp::mpq_srcptr) -> *const mp::__mpz_struct {\n\n unsafe { &(*p)._mp_den }\n\n}\n\n\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 78, "score": 28128.729247266674 }, { "content": "fn denmut(p: mp::mpq_ptr) -> *mut mp::__mpz_struct {\n\n unsafe { &mut (*p)._mp_den }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 79, "score": 28128.729247266674 }, { "content": "fn nummut(p: mp::mpq_ptr) -> *mut mp::__mpz_struct {\n\n unsafe { &mut (*p)._mp_num }\n\n}\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 80, "score": 28128.729247266674 }, { "content": "#define MPFR_USE_INTMAX_T\n", "file_path": "src/mp/c/mp_in.h", "rank": 81, "score": 25201.202094270455 }, { "content": "use super::super::mp;\n\n\n\nuse std::cmp::Ordering;\n\nuse std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign};\n\n\n\nuse super::super::{Integer, Long, ULong};\n\nuse super::rational::Rational;\n\n\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 83, "score": 17.864898483085057 }, { "content": "use super::super::mp;\n\n\n\nuse std::cmp::Ordering;\n\nuse std::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign};\n\n\n\nuse super::super::{Integer, Long, Rational, ULong};\n\nuse super::real::{Real, _GLOBAL_RND};\n\n\n", "file_path": "src/mp/real/real_ops.rs", "rank": 84, "score": 17.42623692142292 }, { "content": "define_realops!(Sub, sub, _sub);\n\ndefine_realops!(Div, div, _div);\n\n\n\ndefine_realassign!(AddAssign, add_assign, _add);\n\ndefine_realassign!(MulAssign, mul_assign, _mul);\n\ndefine_realassign!(SubAssign, sub_assign, _sub);\n\ndefine_realassign!(DivAssign, div_assign, _div);\n\n\n\nimpl Neg for &Real {\n\n type Output = Real;\n\n fn neg(self) -> Real {\n\n let mut x = Real::new();\n\n unsafe {\n\n mp::mpfr_neg(&mut x.data, &self.data, _GLOBAL_RND);\n\n }\n\n x\n\n }\n\n}\n\nimpl Neg for Real {\n\n type Output = Real;\n", "file_path": "src/mp/real/real_ops.rs", "rank": 86, "score": 15.295723361011525 }, { "content": "}\n\n\n\nmacro_rules! define_subdiv {\n\n ($f_sub: ident, $f_sub2: ident, $f_div: ident, $f_div2: ident, $T: ty) => {\n\n define_sub!($f_sub, $f_sub2, $T);\n\n define_sub_assign!($f_sub, $T);\n\n define_div!($f_div, $f_div2, $T);\n\n define_div_assign!($f_div, $T);\n\n };\n\n}\n\n\n\ndefine_addmul!(_add_ui, _ui_add, _mul_ui, _ui_mul, ULong);\n\ndefine_addmul!(_add_si, _si_add, _mul_si, _si_mul, Long);\n\ndefine_addmul!(_add_d, _d_add, _mul_d, _d_mul, f64);\n\ndefine_addmul!(_add_z, _z_add, _mul_z, _z_mul, &Integer);\n\ndefine_addmul!(_add_q, _q_add, _mul_q, _q_mul, &Rational);\n\n\n\ndefine_subdiv!(_sub_ui, _ui_sub, _div_ui, _ui_div, ULong);\n\ndefine_subdiv!(_sub_si, _si_sub, _div_si, _si_div, Long);\n\ndefine_subdiv!(_sub_d, _d_sub, _div_d, _d_div, f64);\n", "file_path": "src/mp/real/real_ops.rs", "rank": 88, "score": 14.868075688810432 }, { "content": "}\n\n\n\nmacro_rules! define_subdiv {\n\n ($f_sub: ident, $f_sub2: ident, $f_div: ident, $f_div2: ident, $T: ty) => {\n\n define_sub!($f_sub, $f_sub2, $T);\n\n define_sub_assign!($f_sub, $T);\n\n define_div!($f_div, $f_div2, $T);\n\n define_div_assign!($f_div, $T);\n\n };\n\n}\n\n\n\ndefine_addmul!(_add_ui, _ui_add, _mul_ui, _ui_mul, ULong);\n\ndefine_addmul!(_add_si, _si_add, _mul_si, _si_mul, Long);\n\ndefine_addmul!(_add_z, _z_add, _mul_z, _z_mul, &Integer);\n\n\n\ndefine_subdiv!(_sub_ui, _ui_sub, _div_ui, _ui_div, ULong);\n\ndefine_subdiv!(_sub_si, _si_sub, _div_si, _si_div, Long);\n\ndefine_subdiv!(_sub_z, _z_sub, _div_z, _z_div, &Integer);\n\n\n\n// Op: Add, Sub, Mul, Div\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 89, "score": 14.728324788914453 }, { "content": "define_realassign!(MulAssign, mul_assign, _mul);\n\ndefine_realassign!(SubAssign, sub_assign, _sub);\n\ndefine_realassign!(DivAssign, div_assign, _div);\n\n\n\nimpl Neg for &Rational {\n\n type Output = Rational;\n\n fn neg(self) -> Rational {\n\n let mut x = Rational::new();\n\n unsafe { mp::__gmpq_neg(&mut x.data, &self.data) }\n\n x\n\n }\n\n}\n\nimpl Neg for Rational {\n\n type Output = Rational;\n\n fn neg(mut self) -> Rational {\n\n unsafe { mp::__gmpq_neg(&mut self.data, &self.data) }\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 91, "score": 13.279585890777108 }, { "content": " ($Op: ident, $op_name: ident, $f: ident) => {\n\n impl $Op<&Rational> for Rational {\n\n fn $op_name(&mut self, rhs: &Rational) {\n\n $f(&mut self.data, &self.data, &rhs.data);\n\n }\n\n }\n\n impl $Op for Rational {\n\n fn $op_name(&mut self, rhs: Rational) {\n\n $f(&mut self.data, &self.data, &rhs.data);\n\n }\n\n }\n\n };\n\n}\n\n\n\ndefine_realops!(Add, add, _add);\n\ndefine_realops!(Mul, mul, _mul);\n\ndefine_realops!(Sub, sub, _sub);\n\ndefine_realops!(Div, div, _div);\n\n\n\ndefine_realassign!(AddAssign, add_assign, _add);\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 93, "score": 10.576953146158605 }, { "content": "define_subdiv!(_sub_z, _z_sub, _div_z, _z_div, &Integer);\n\ndefine_subdiv!(_sub_q, _q_sub, _div_q, _q_div, &Rational);\n\n\n\n// Op: Add, Sub, Mul, Div\n\n// op_name: add, sub, mul, div\n\n// f: _add\n\nmacro_rules! define_realops {\n\n ($Op: ident, $op_name: ident, $f: ident) => {\n\n impl $Op for &Real {\n\n type Output = Real;\n\n fn $op_name(self, rhs: &Real) -> Real {\n\n let mut x = Real::_nan();\n\n $f(&mut x.data, &self.data, &rhs.data);\n\n x\n\n }\n\n }\n\n impl $Op<Real> for &Real {\n\n type Output = Real;\n\n fn $op_name(self, mut rhs: Real) -> Real {\n\n $f(&mut rhs.data, &self.data, &rhs.data);\n", "file_path": "src/mp/real/real_ops.rs", "rank": 95, "score": 10.234124147238818 }, { "content": "// Op: AddAssign, SubAssign, MulAssign, DivAssign\n\n// op_name: add_assign, sub_assign, mul_assign, div_assign\n\n// f: _add\n\nmacro_rules! define_realassign {\n\n ($Op: ident, $op_name: ident, $f: ident) => {\n\n impl $Op<&Real> for Real {\n\n fn $op_name(&mut self, rhs: &Real) {\n\n $f(&mut self.data, &self.data, &rhs.data);\n\n }\n\n }\n\n impl $Op for Real {\n\n fn $op_name(&mut self, rhs: Real) {\n\n $f(&mut self.data, &self.data, &rhs.data);\n\n }\n\n }\n\n };\n\n}\n\n\n\ndefine_realops!(Add, add, _add);\n\ndefine_realops!(Mul, mul, _mul);\n", "file_path": "src/mp/real/real_ops.rs", "rank": 96, "score": 9.847157850970824 }, { "content": " $f2(&mut x.data, self, &rhs.data);\n\n x\n\n }\n\n }\n\n impl $Op<Rational> for $T {\n\n type Output = Rational;\n\n fn $op_name(self, mut rhs: Rational) -> Rational {\n\n $f2(&mut rhs.data, self, &rhs.data);\n\n rhs\n\n }\n\n }\n\n };\n\n}\n\n// Op: AddAssign, SubAssign, MulAssign, DivAssign\n\n// op_name: add_assign, sub_assign, mul_assign, div_assign\n\n// f: _add_ui\n\n// T: ULong\n\nmacro_rules! define_assign {\n\n ($Op: ident, $op_name: ident, $f: ident, $T: ty) => {\n\n impl $Op<$T> for Rational {\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 97, "score": 9.781743697628908 }, { "content": " $f2(&mut x.data, self, &rhs.data);\n\n x\n\n }\n\n }\n\n impl $Op<Real> for $T {\n\n type Output = Real;\n\n fn $op_name(self, mut rhs: Real) -> Real {\n\n $f2(&mut rhs.data, self, &rhs.data);\n\n rhs\n\n }\n\n }\n\n };\n\n}\n\n// Op: AddAssign, SubAssign, MulAssign, DivAssign\n\n// op_name: add_assign, sub_assign, mul_assign, div_assign\n\n// f: _add_ui\n\n// T: ULong\n\nmacro_rules! define_assign {\n\n ($Op: ident, $op_name: ident, $f: ident, $T: ty) => {\n\n impl $Op<$T> for Real {\n", "file_path": "src/mp/real/real_ops.rs", "rank": 98, "score": 9.781743697628908 }, { "content": " type Output = Rational;\n\n fn $op_name(mut self, rhs: &Rational) -> Rational {\n\n $f(&mut self.data, &self.data, &rhs.data);\n\n self\n\n }\n\n }\n\n impl $Op for Rational {\n\n type Output = Rational;\n\n fn $op_name(mut self, rhs: Rational) -> Rational {\n\n $f(&mut self.data, &self.data, &rhs.data);\n\n self\n\n }\n\n }\n\n };\n\n}\n\n\n\n// Op: AddAssign, SubAssign, MulAssign, DivAssign\n\n// op_name: add_assign, sub_assign, mul_assign, div_assign\n\n// f: _add\n\nmacro_rules! define_realassign {\n", "file_path": "src/mp/rational/rational_ops.rs", "rank": 99, "score": 9.120642846633807 } ]
Rust
src/main.rs
JingYenLoh/fluminurs
bc8d6cf0b74f9e0d952caade114cc413d1199349
use std::collections::HashSet; use std::fs; use std::io; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use clap::{App, Arg}; use futures_util::future; use serde::{Deserialize, Serialize}; use tokio; use crate::api::module::{File, Module, OverwriteMode, OverwriteResult}; use crate::api::Api; #[macro_use] extern crate bitflags; type Error = &'static str; type Result<T> = std::result::Result<T, Error>; const PKG_NAME: &str = env!("CARGO_PKG_NAME"); const VERSION: &str = env!("CARGO_PKG_VERSION"); const DESCRIPTION: &str = env!("CARGO_PKG_DESCRIPTION"); mod api; #[derive(Serialize, Deserialize)] struct Login { username: String, password: String, } bitflags! { struct ModuleTypeFlags: u8 { const TAKING = 0x01; const TEACHING = 0x02; } } fn flush_stdout() { io::stdout().flush().expect("Unable to flush stdout"); } fn get_input(prompt: &str) -> String { let mut input = String::new(); print!("{}", prompt); flush_stdout(); io::stdin() .read_line(&mut input) .expect("Unable to get input"); input.trim().to_string() } fn get_password(prompt: &str) -> String { print!("{}", prompt); flush_stdout(); rpassword::read_password().expect("Unable to get non-echo input mode for password") } fn print_files(file: &File, prefix: &str) { if file.is_directory() { for child in file.children().expect("Children must have been loaded") { print_files(&child, &format!("{}/{}", prefix, file.name())); } } else { println!("{}/{}", prefix, file.name()); } } async fn print_announcements(api: &Api, modules: &[Module]) -> Result<()> { let apic = api.clone(); let module_announcements = future::join_all( modules .iter() .map(|module| module.get_announcements(&apic, false)), ) .await; for (module, announcements) in modules.iter().zip(module_announcements) { let announcements = announcements?; println!("# {} {}", module.code, module.name); println!(); for ann in announcements { println!("=== {} ===", ann.title); let stripped = ammonia::Builder::new() .tags(HashSet::new()) .clean(&ann.description) .to_string(); let decoded = htmlescape::decode_html(&stripped) .unwrap_or_else(|_| "Unable to decode HTML Entities".to_owned()); println!("{}", decoded); } println!(); println!(); } Ok(()) } async fn load_modules_files( api: &Api, modules: &[Module], include_uploadable_folders: ModuleTypeFlags, ) -> Result<Vec<File>> { let apic = api.clone(); let files = modules .iter() .filter(|module| module.has_access()) .map(|module| (module.as_file(), module.is_teaching())) .collect::<Vec<_>>(); let errors = future::join_all(files.iter().map(|(file, is_teaching)| { file.load_all_children( &apic, include_uploadable_folders.contains(if is_teaching.to_owned() { ModuleTypeFlags::TEACHING } else { ModuleTypeFlags::TAKING }), ) })) .await .into_iter() .filter(Result::is_err); for e in errors { println!("Failed loading module files: {}", e.unwrap_err()); } Ok(files.into_iter().map(|(file, _)| file).collect::<Vec<_>>()) } async fn list_files( api: &Api, modules: &[Module], include_uploadable_folders: ModuleTypeFlags, ) -> Result<()> { let files = load_modules_files(api, modules, include_uploadable_folders).await?; for file in files { print_files(&file, ""); } Ok(()) } async fn download_file(api: &Api, file: File, path: PathBuf, overwrite_mode: &OverwriteMode) { match file.download(api.clone(), &path, overwrite_mode).await { Ok(OverwriteResult::NewFile) => println!("Downloaded to {}", path.to_string_lossy()), Ok(OverwriteResult::AlreadyHave) => {} Ok(OverwriteResult::Skipped) => println!("Skipped {}", path.to_string_lossy()), Ok(OverwriteResult::Overwritten) => println!("Updated {}", path.to_string_lossy()), Ok(OverwriteResult::Renamed { renamed_path }) => println!( "Renamed {} to {}", path.to_string_lossy(), renamed_path.to_string_lossy() ), Err(e) => println!("Failed to download file: {}", e), } } async fn download_files( api: &Api, modules: &[Module], destination: &str, include_uploadable_folders: ModuleTypeFlags, overwrite_mode: &OverwriteMode, ) -> Result<()> { println!("Download to {}", destination); let path = Path::new(destination).to_owned(); if !path.is_dir() { return Err("Download destination does not exist or is not a directory"); } let files = load_modules_files(api, modules, include_uploadable_folders).await?; let mut directories = files .into_iter() .zip(std::iter::repeat(path)) .collect::<Vec<_>>(); let mut files: Vec<(File, PathBuf)> = vec![]; while let Some((file, path)) = directories.pop() { let path = path.join(file.name()); if file.is_directory() { directories.append( &mut file .children() .expect("Children should have been loaded") .into_iter() .map(|child| (child, path.clone())) .collect(), ); } else { files.push((file, path)); } } future::join_all( files .into_iter() .map(|(file, path)| download_file(api, file, path, overwrite_mode)), ) .await; Ok(()) } fn get_credentials(credential_file: &str) -> Result<(String, String)> { if let Ok(mut file) = fs::File::open(credential_file) { let mut content = String::new(); file.read_to_string(&mut content) .map_err(|_| "Unable to read credentials")?; if let Ok(login) = serde_json::from_str::<Login>(&content) { Ok((login.username, login.password)) } else { println!("Corrupt credentials.json, deleting file..."); fs::remove_file(Path::new(credential_file)) .map_err(|_| "Unable to delete credential file")?; get_credentials(credential_file) } } else { let username = get_input("Username (include the nusstu\\ prefix): "); let password = get_password("Password: "); Ok((username, password)) } } fn store_credentials(credential_file: &str, username: &str, password: &str) -> Result<()> { if confirm("Store credentials (WARNING: they are stored in plain text)? [y/n]") { let login = Login { username: username.to_owned(), password: password.to_owned(), }; let serialised = serde_json::to_string(&login).map_err(|_| "Unable to serialise credentials")?; fs::write(credential_file, serialised) .map_err(|_| "Unable to write to credentials file")?; } Ok(()) } fn confirm(prompt: &str) -> bool { print!("{} ", prompt); flush_stdout(); let mut answer = String::new(); while answer != "y" && answer != "n" { answer = get_input(""); answer.make_ascii_lowercase(); } answer == "y" } #[tokio::main] async fn main() -> Result<()> { let matches = App::new(PKG_NAME) .version(VERSION) .author(&*format!("{} and contributors", clap::crate_authors!(", "))) .about(DESCRIPTION) .arg(Arg::with_name("announcements").long("announcements")) .arg(Arg::with_name("files").long("files")) .arg( Arg::with_name("download") .long("download-to") .takes_value(true), ) .arg( Arg::with_name("credential-file") .long("credential-file") .takes_value(true), ) .arg( Arg::with_name("include-uploadable") .long("include-uploadable-folders") .takes_value(true) .min_values(0) .max_values(u64::max_value()) .possible_values(&["taking", "teaching", "all"]), ) .arg( Arg::with_name("updated") .long("updated") .takes_value(true) .value_name("action-on-updated-files") .possible_values(&["skip", "overwrite", "rename"]) .number_of_values(1) .default_value("skip"), ) .get_matches(); let credential_file = matches .value_of("credential-file") .unwrap_or("login.json") .to_owned(); let do_announcements = matches.is_present("announcements"); let do_files = matches.is_present("files"); let download_destination = matches.value_of("download").map(|s| s.to_owned()); let include_uploadable_folders = matches .values_of("include-uploadable") .map(|values| { let include_flags = values .fold(Ok(ModuleTypeFlags::empty()), |acc, s| { acc.and_then(|flag| match s.to_lowercase().as_str() { "taking" => Ok(flag | ModuleTypeFlags::TAKING), "teaching" => Ok(flag | ModuleTypeFlags::TEACHING), "all" => Ok(flag | ModuleTypeFlags::all()), _ => Err("Invalid module type"), }) }) .expect("Unable to parse parameters of include-uploadable"); if include_flags.is_empty() { ModuleTypeFlags::all() } else { include_flags } }) .unwrap_or(ModuleTypeFlags::empty()); let overwrite_mode = matches .value_of("updated") .map(|s| match s.to_lowercase().as_str() { "skip" => OverwriteMode::Skip, "overwrite" => OverwriteMode::Overwrite, "rename" => OverwriteMode::Rename, _ => panic!("Unable to parse parameter of overwrite_mode"), }) .unwrap_or(OverwriteMode::Skip); let (username, password) = get_credentials(&credential_file).expect("Unable to get credentials"); let api = Api::with_login(&username, &password).await?; if !Path::new(&credential_file).exists() { match store_credentials(&credential_file, &username, &password) { Ok(_) => (), Err(e) => println!("Failed to store credentials: {}", e), } } let name = api.name().await?; println!("Hi {}!", name); let modules = api.modules(true).await?; println!("You are taking:"); for module in modules.iter().filter(|m| m.is_taking()) { println!("- {} {}", module.code, module.name); } println!("You are teaching:"); for module in modules.iter().filter(|m| m.is_teaching()) { println!("- {} {}", module.code, module.name); } if do_announcements { print_announcements(&api, &modules).await?; } if do_files { list_files(&api, &modules, include_uploadable_folders).await?; } if let Some(destination) = download_destination { download_files( &api, &modules, &destination, include_uploadable_folders, &overwrite_mode, ) .await?; } Ok(()) }
use std::collections::HashSet; use std::fs; use std::io; use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use clap::{App, Arg}; use futures_util::future; use serde::{Deserialize, Serialize}; use tokio; use crate::api::module::{File, Module, OverwriteMode, OverwriteResult}; use crate::api::Api; #[macro_use] extern crate bitflags; type Error = &'static str; type Result<T> = std::result::Result<T, Error>; const PKG_NAME: &str = env!("CARGO_PKG_NAME"); const VERSION: &str = env!("CARGO_PKG_VERSION"); const DESCRIPTION: &str = env!("CARGO_PKG_DESCRIPTION"); mod api; #[derive(Serialize, Deserialize)] struct Login { username: String, password: String, } bitflags! { struct ModuleTypeFlags: u8 { const TAKING = 0x01; const TEACHING = 0x02; } } fn flush_stdout() { io::stdout().flush().expect("Unable to flush stdout"); } fn get_input(prompt: &str) -> String { let mut input = String::new(); print!("{}", prompt); flush_stdout(); io::stdin() .read_line(&mut input) .expect("Unable to get input"); input.trim().to_string() } fn get_password(prompt: &str) -> String { print!("{}", prompt); flush_stdout(); rpassword::read_password().expect("Unable to get non-echo input mode for password") } fn print_files(file: &File, prefix: &str) {
} async fn print_announcements(api: &Api, modules: &[Module]) -> Result<()> { let apic = api.clone(); let module_announcements = future::join_all( modules .iter() .map(|module| module.get_announcements(&apic, false)), ) .await; for (module, announcements) in modules.iter().zip(module_announcements) { let announcements = announcements?; println!("# {} {}", module.code, module.name); println!(); for ann in announcements { println!("=== {} ===", ann.title); let stripped = ammonia::Builder::new() .tags(HashSet::new()) .clean(&ann.description) .to_string(); let decoded = htmlescape::decode_html(&stripped) .unwrap_or_else(|_| "Unable to decode HTML Entities".to_owned()); println!("{}", decoded); } println!(); println!(); } Ok(()) } async fn load_modules_files( api: &Api, modules: &[Module], include_uploadable_folders: ModuleTypeFlags, ) -> Result<Vec<File>> { let apic = api.clone(); let files = modules .iter() .filter(|module| module.has_access()) .map(|module| (module.as_file(), module.is_teaching())) .collect::<Vec<_>>(); let errors = future::join_all(files.iter().map(|(file, is_teaching)| { file.load_all_children( &apic, include_uploadable_folders.contains(if is_teaching.to_owned() { ModuleTypeFlags::TEACHING } else { ModuleTypeFlags::TAKING }), ) })) .await .into_iter() .filter(Result::is_err); for e in errors { println!("Failed loading module files: {}", e.unwrap_err()); } Ok(files.into_iter().map(|(file, _)| file).collect::<Vec<_>>()) } async fn list_files( api: &Api, modules: &[Module], include_uploadable_folders: ModuleTypeFlags, ) -> Result<()> { let files = load_modules_files(api, modules, include_uploadable_folders).await?; for file in files { print_files(&file, ""); } Ok(()) } async fn download_file(api: &Api, file: File, path: PathBuf, overwrite_mode: &OverwriteMode) { match file.download(api.clone(), &path, overwrite_mode).await { Ok(OverwriteResult::NewFile) => println!("Downloaded to {}", path.to_string_lossy()), Ok(OverwriteResult::AlreadyHave) => {} Ok(OverwriteResult::Skipped) => println!("Skipped {}", path.to_string_lossy()), Ok(OverwriteResult::Overwritten) => println!("Updated {}", path.to_string_lossy()), Ok(OverwriteResult::Renamed { renamed_path }) => println!( "Renamed {} to {}", path.to_string_lossy(), renamed_path.to_string_lossy() ), Err(e) => println!("Failed to download file: {}", e), } } async fn download_files( api: &Api, modules: &[Module], destination: &str, include_uploadable_folders: ModuleTypeFlags, overwrite_mode: &OverwriteMode, ) -> Result<()> { println!("Download to {}", destination); let path = Path::new(destination).to_owned(); if !path.is_dir() { return Err("Download destination does not exist or is not a directory"); } let files = load_modules_files(api, modules, include_uploadable_folders).await?; let mut directories = files .into_iter() .zip(std::iter::repeat(path)) .collect::<Vec<_>>(); let mut files: Vec<(File, PathBuf)> = vec![]; while let Some((file, path)) = directories.pop() { let path = path.join(file.name()); if file.is_directory() { directories.append( &mut file .children() .expect("Children should have been loaded") .into_iter() .map(|child| (child, path.clone())) .collect(), ); } else { files.push((file, path)); } } future::join_all( files .into_iter() .map(|(file, path)| download_file(api, file, path, overwrite_mode)), ) .await; Ok(()) } fn get_credentials(credential_file: &str) -> Result<(String, String)> { if let Ok(mut file) = fs::File::open(credential_file) { let mut content = String::new(); file.read_to_string(&mut content) .map_err(|_| "Unable to read credentials")?; if let Ok(login) = serde_json::from_str::<Login>(&content) { Ok((login.username, login.password)) } else { println!("Corrupt credentials.json, deleting file..."); fs::remove_file(Path::new(credential_file)) .map_err(|_| "Unable to delete credential file")?; get_credentials(credential_file) } } else { let username = get_input("Username (include the nusstu\\ prefix): "); let password = get_password("Password: "); Ok((username, password)) } } fn store_credentials(credential_file: &str, username: &str, password: &str) -> Result<()> { if confirm("Store credentials (WARNING: they are stored in plain text)? [y/n]") { let login = Login { username: username.to_owned(), password: password.to_owned(), }; let serialised = serde_json::to_string(&login).map_err(|_| "Unable to serialise credentials")?; fs::write(credential_file, serialised) .map_err(|_| "Unable to write to credentials file")?; } Ok(()) } fn confirm(prompt: &str) -> bool { print!("{} ", prompt); flush_stdout(); let mut answer = String::new(); while answer != "y" && answer != "n" { answer = get_input(""); answer.make_ascii_lowercase(); } answer == "y" } #[tokio::main] async fn main() -> Result<()> { let matches = App::new(PKG_NAME) .version(VERSION) .author(&*format!("{} and contributors", clap::crate_authors!(", "))) .about(DESCRIPTION) .arg(Arg::with_name("announcements").long("announcements")) .arg(Arg::with_name("files").long("files")) .arg( Arg::with_name("download") .long("download-to") .takes_value(true), ) .arg( Arg::with_name("credential-file") .long("credential-file") .takes_value(true), ) .arg( Arg::with_name("include-uploadable") .long("include-uploadable-folders") .takes_value(true) .min_values(0) .max_values(u64::max_value()) .possible_values(&["taking", "teaching", "all"]), ) .arg( Arg::with_name("updated") .long("updated") .takes_value(true) .value_name("action-on-updated-files") .possible_values(&["skip", "overwrite", "rename"]) .number_of_values(1) .default_value("skip"), ) .get_matches(); let credential_file = matches .value_of("credential-file") .unwrap_or("login.json") .to_owned(); let do_announcements = matches.is_present("announcements"); let do_files = matches.is_present("files"); let download_destination = matches.value_of("download").map(|s| s.to_owned()); let include_uploadable_folders = matches .values_of("include-uploadable") .map(|values| { let include_flags = values .fold(Ok(ModuleTypeFlags::empty()), |acc, s| { acc.and_then(|flag| match s.to_lowercase().as_str() { "taking" => Ok(flag | ModuleTypeFlags::TAKING), "teaching" => Ok(flag | ModuleTypeFlags::TEACHING), "all" => Ok(flag | ModuleTypeFlags::all()), _ => Err("Invalid module type"), }) }) .expect("Unable to parse parameters of include-uploadable"); if include_flags.is_empty() { ModuleTypeFlags::all() } else { include_flags } }) .unwrap_or(ModuleTypeFlags::empty()); let overwrite_mode = matches .value_of("updated") .map(|s| match s.to_lowercase().as_str() { "skip" => OverwriteMode::Skip, "overwrite" => OverwriteMode::Overwrite, "rename" => OverwriteMode::Rename, _ => panic!("Unable to parse parameter of overwrite_mode"), }) .unwrap_or(OverwriteMode::Skip); let (username, password) = get_credentials(&credential_file).expect("Unable to get credentials"); let api = Api::with_login(&username, &password).await?; if !Path::new(&credential_file).exists() { match store_credentials(&credential_file, &username, &password) { Ok(_) => (), Err(e) => println!("Failed to store credentials: {}", e), } } let name = api.name().await?; println!("Hi {}!", name); let modules = api.modules(true).await?; println!("You are taking:"); for module in modules.iter().filter(|m| m.is_taking()) { println!("- {} {}", module.code, module.name); } println!("You are teaching:"); for module in modules.iter().filter(|m| m.is_teaching()) { println!("- {} {}", module.code, module.name); } if do_announcements { print_announcements(&api, &modules).await?; } if do_files { list_files(&api, &modules, include_uploadable_folders).await?; } if let Some(destination) = download_destination { download_files( &api, &modules, &destination, include_uploadable_folders, &overwrite_mode, ) .await?; } Ok(()) }
if file.is_directory() { for child in file.children().expect("Children must have been loaded") { print_files(&child, &format!("{}/{}", prefix, file.name())); } } else { println!("{}/{}", prefix, file.name()); }
if_condition
[ { "content": "fn build_auth_form<'a>(username: &'a str, password: &'a str) -> HashMap<&'static str, &'a str> {\n\n let mut map = HashMap::new();\n\n map.insert(\"UserName\", username);\n\n map.insert(\"Password\", password);\n\n map.insert(\"AuthMethod\", \"FormsAuthentication\");\n\n map\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 3, "score": 151587.75280329667 }, { "content": "fn build_token_form<'a>(code: &'a str) -> HashMap<&'static str, &'a str> {\n\n let mut map = HashMap::new();\n\n map.insert(\"grant_type\", \"authorization_code\");\n\n map.insert(\"client_id\", ADFS_CLIENT_ID);\n\n map.insert(\"resource\", ADFS_RESOURCE_TYPE);\n\n map.insert(\"code\", code);\n\n map.insert(\"redirect_uri\", ADFS_REDIRECT_URI);\n\n map\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 7, "score": 117768.30741416624 }, { "content": "fn sanitise_filename(name: String) -> String {\n\n if cfg!(windows) {\n\n sanitize_filename::sanitize_with_options(\n\n name.trim(),\n\n sanitize_filename::Options {\n\n windows: true,\n\n truncate: true,\n\n replacement: \"-\",\n\n },\n\n )\n\n } else {\n\n [\"\\0\", \"/\"].iter().fold(name, |acc, x| acc.replace(x, \"-\"))\n\n }\n\n}\n\n\n", "file_path": "src/api/module.rs", "rank": 8, "score": 106932.63350837797 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ApiFileDirectory {\n\n id: String,\n\n name: String,\n\n allow_upload: Option<bool>,\n\n creator_name: Option<String>,\n\n last_updated_date: String,\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 11, "score": 97797.88232629193 }, { "content": "struct FileInner {\n\n id: String,\n\n name: String,\n\n is_directory: bool,\n\n children: RwLock<Option<Vec<File>>>,\n\n allow_upload: bool,\n\n last_updated: SystemTime,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct File {\n\n inner: Arc<FileInner>,\n\n}\n\n\n", "file_path": "src/api/module.rs", "rank": 12, "score": 96078.9755753438 }, { "content": "fn full_api_url(path: &str) -> Url {\n\n Url::parse(API_BASE_URL)\n\n .and_then(|u| u.join(path))\n\n .expect(\"Unable to join URL's\")\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 13, "score": 96064.25855064011 }, { "content": "fn parse_time(time: &String) -> SystemTime {\n\n SystemTime::from(\n\n chrono::DateTime::<chrono::FixedOffset>::parse_from_rfc3339(&time)\n\n .expect(\"Failed to parse last updated time\"),\n\n )\n\n}\n\n\n\npub enum OverwriteMode {\n\n Skip,\n\n Overwrite,\n\n Rename,\n\n}\n\n\n\npub enum OverwriteResult {\n\n NewFile,\n\n AlreadyHave,\n\n Skipped,\n\n Overwritten,\n\n Renamed { renamed_path: PathBuf },\n\n}\n", "file_path": "src/api/module.rs", "rank": 14, "score": 91828.25911956668 }, { "content": "fn generate_random_bytes(size: usize) -> String {\n\n (0..size)\n\n .map(|_| format!(\"{:02x}\", rand::random::<u8>()))\n\n .collect()\n\n}\n\n\n\nasync fn infinite_retry_http<F>(\n\n client: Client,\n\n url: Url,\n\n method: Method,\n\n form: Option<&HashMap<&str, &str>>,\n\n edit_request: F,\n\n) -> Result<Response>\n\nwhere\n\n F: (Fn(RequestBuilder) -> RequestBuilder),\n\n{\n\n let form = if let Some(form) = form {\n\n Some(serde_urlencoded::to_string(form).map_err(|_| \"Failed to serialise HTTP form\")?)\n\n } else {\n\n None\n", "file_path": "src/api/mod.rs", "rank": 15, "score": 91736.80472622659 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct ApiData {\n\n data: Data,\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 17, "score": 80271.10576742461 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Access {\n\n #[serde(rename = \"access_Full\")]\n\n full: bool,\n\n #[serde(rename = \"access_Read\")]\n\n read: bool,\n\n #[serde(rename = \"access_Create\")]\n\n create: bool,\n\n #[serde(rename = \"access_Update\")]\n\n update: bool,\n\n #[serde(rename = \"access_Delete\")]\n\n delete: bool,\n\n #[serde(rename = \"access_Settings_Read\")]\n\n settings_read: bool,\n\n #[serde(rename = \"access_Settings_Update\")]\n\n settings_update: bool,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Announcement {\n\n pub title: String,\n", "file_path": "src/api/module.rs", "rank": 18, "score": 77933.04292090864 }, { "content": "#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Name {\n\n user_name_original: String,\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 19, "score": 77818.11108471488 }, { "content": "#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Term {\n\n term_detail: TermDetail,\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 20, "score": 77818.11108471488 }, { "content": "#[derive(Deserialize)]\n\nstruct TokenResponse {\n\n access_token: String,\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 21, "score": 74933.92828613483 }, { "content": "#[derive(Deserialize)]\n\nstruct TermDetail {\n\n term: String,\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 22, "score": 74933.92828613483 }, { "content": "fn build_auth_url() -> Url {\n\n let nonce = generate_random_bytes(16);\n\n let mut url = Url::parse(ADFS_OAUTH2_URL).expect(\"Unable to parse ADFS URL\");\n\n url.query_pairs_mut()\n\n .append_pair(\"response_type\", \"code\")\n\n .append_pair(\"client_id\", ADFS_CLIENT_ID)\n\n .append_pair(\"state\", &nonce)\n\n .append_pair(\"redirect_uri\", ADFS_REDIRECT_URI)\n\n .append_pair(\"scope\", \"\")\n\n .append_pair(\"resource\", ADFS_RESOURCE_TYPE)\n\n .append_pair(\"nonce\", &nonce);\n\n url\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 23, "score": 65756.6713274322 }, { "content": "fn build_client() -> Result<Client> {\n\n Client::builder()\n\n .http1_title_case_headers()\n\n .cookie_store(true)\n\n .redirect(Policy::custom(|attempt| {\n\n if attempt.previous().len() > 5 {\n\n attempt.error(\"too many redirects\")\n\n } else {\n\n attempt.follow()\n\n }\n\n }))\n\n .build()\n\n .map_err(|_| \"Unable to create HTTP client\")\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 24, "score": 64525.027967578964 }, { "content": "use std::path::{Path, PathBuf};\n\nuse std::sync::{Arc, RwLock};\n\nuse std::time::SystemTime;\n\n\n\nuse futures_util::future;\n\nuse reqwest::{Method, Url};\n\nuse serde::Deserialize;\n\nuse tokio::io::AsyncWriteExt;\n\n\n\nuse crate::api::{Api, ApiData, Data};\n\nuse crate::Result;\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/api/module.rs", "rank": 26, "score": 39763.804952746854 }, { "content": " pub description: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Module {\n\n pub id: String,\n\n #[serde(rename = \"name\")]\n\n pub code: String,\n\n #[serde(rename = \"courseName\")]\n\n pub name: String,\n\n access: Option<Access>,\n\n pub term: String,\n\n}\n\n\n\nimpl Module {\n\n pub fn is_teaching(&self) -> bool {\n\n self.access\n\n .as_ref()\n\n .map(|access| {\n\n access.full\n", "file_path": "src/api/module.rs", "rank": 27, "score": 39761.09705674481 }, { "content": " if let Some(parent) = destination.parent() {\n\n tokio::fs::create_dir_all(parent)\n\n .await\n\n .map_err(|_| \"Unable to create directory\")?;\n\n };\n\n let mut file = tokio::fs::File::create(destination)\n\n .await\n\n .map_err(|_| \"Unable to open file\")?;\n\n let mut res = api\n\n .get_client()\n\n .get(download_url)\n\n .send()\n\n .await\n\n .map_err(|_| \"Failed during download\")?;\n\n while let Some(ref chunk) = res.chunk().await.map_err(|_| \"Failed during streaming\")? {\n\n file.write_all(chunk)\n\n .await\n\n .map_err(|_| \"Failed writing to disk\")?;\n\n }\n\n // Note: We should actually manually set the last updated time on the disk to the time fetched from server, otherwise there might be situations where we will miss an updated file.\n\n }\n\n Ok(result)\n\n }\n\n}\n", "file_path": "src/api/module.rs", "rank": 28, "score": 39760.12034292464 }, { "content": " )\n\n .await?;\n\n let mut files = match files.data {\n\n Data::ApiFileDirectory(files) => files\n\n .into_iter()\n\n .map(|s| File {\n\n inner: Arc::new(FileInner {\n\n id: s.id,\n\n name: sanitise_filename(format!(\n\n \"{}{}\",\n\n if allow_upload {\n\n format!(\n\n \"{} - \",\n\n s.creator_name.unwrap_or_else(|| \"Unknown\".to_string())\n\n )\n\n } else {\n\n \"\".to_string()\n\n },\n\n s.name\n\n )),\n", "file_path": "src/api/module.rs", "rank": 29, "score": 39758.778022560815 }, { "content": "\n\n let apic = api.clone();\n\n if !self.inner.is_directory {\n\n return self\n\n .inner\n\n .children\n\n .write()\n\n .map(|mut ptr| {\n\n *ptr = Some(Vec::new());\n\n })\n\n .map_err(|_| \"Failed to acquire write lock on File\");\n\n }\n\n if self\n\n .inner\n\n .children\n\n .read()\n\n .map(|children| children.is_some())\n\n .unwrap_or(false)\n\n {\n\n return Ok(());\n", "file_path": "src/api/module.rs", "rank": 30, "score": 39758.46343980066 }, { "content": " || access.create\n\n || access.update\n\n || access.delete\n\n || access.settings_read\n\n || access.settings_update\n\n })\n\n .unwrap_or(false)\n\n }\n\n\n\n pub fn is_taking(&self) -> bool {\n\n !self.is_teaching()\n\n }\n\n\n\n pub fn has_access(&self) -> bool {\n\n self.access.is_some()\n\n }\n\n\n\n pub async fn get_announcements(&self, api: &Api, archived: bool) -> Result<Vec<Announcement>> {\n\n let path = format!(\n\n \"announcement/{}/{}?sortby=displayFrom%20ASC\",\n", "file_path": "src/api/module.rs", "rank": 31, "score": 39757.843910811935 }, { "content": " };\n\n }\n\n let old_time = metadata\n\n .unwrap()\n\n .modified()\n\n .map_err(|_| \"File system does not support last modified time\")?;\n\n if self.inner.last_updated <= old_time {\n\n Ok((false, OverwriteResult::AlreadyHave)) // don't download, because we already have updated file\n\n } else {\n\n match overwrite {\n\n OverwriteMode::Skip => Ok((false, OverwriteResult::Skipped)), // don't download, because user wants to skip updated files\n\n OverwriteMode::Overwrite => Ok((true, OverwriteResult::Overwritten)), // do download, because user wants to overwrite updated files\n\n OverwriteMode::Rename => {\n\n let mut new_stem = path\n\n .file_stem()\n\n .expect(\"File does not have name\")\n\n .to_os_string();\n\n let date = chrono::DateTime::<chrono::Local>::from(old_time).date();\n\n use chrono::Datelike;\n\n new_stem.push(format!(\n", "file_path": "src/api/module.rs", "rank": 32, "score": 39757.807083753556 }, { "content": " }\n\n let subdirs = apic\n\n .api_as_json::<ApiData>(\n\n &format!(\"files/?ParentID={}\", self.inner.id),\n\n Method::GET,\n\n None,\n\n )\n\n .await?;\n\n let mut subdirs = match subdirs.data {\n\n Data::ApiFileDirectory(subdirs) => subdirs\n\n .into_iter()\n\n .filter(|s| include_uploadable || !s.allow_upload.unwrap_or(false))\n\n .map(|s| File {\n\n inner: Arc::new(FileInner {\n\n id: s.id,\n\n name: sanitise_filename(s.name),\n\n is_directory: true,\n\n children: RwLock::new(None),\n\n allow_upload: s.allow_upload.unwrap_or(false),\n\n last_updated: parse_time(&s.last_updated_date),\n", "file_path": "src/api/module.rs", "rank": 33, "score": 39757.34414489532 }, { "content": " if let Data::Text(url) = data.data {\n\n Ok(Url::parse(&url).map_err(|_| \"Unable to parse URL\")?)\n\n } else {\n\n Err(\"Invalid API response from server: type mismatch\")\n\n }\n\n }\n\n\n\n async fn prepare_path(\n\n &self,\n\n path: &Path,\n\n overwrite: &OverwriteMode,\n\n ) -> Result<(bool, OverwriteResult)> {\n\n let metadata = tokio::fs::metadata(path).await;\n\n if let Err(e) = metadata {\n\n return match e.kind() {\n\n std::io::ErrorKind::NotFound => Ok((true, OverwriteResult::NewFile)), // do download, because file does not already exist\n\n std::io::ErrorKind::PermissionDenied => {\n\n Err(\"Permission denied when retrieving file metadata\")\n\n }\n\n _ => Err(\"Unable to retrieve file metadata\"),\n", "file_path": "src/api/module.rs", "rank": 34, "score": 39757.34136934535 }, { "content": " suffixed_stem.push(format!(\"_{}\", i));\n\n };\n\n tokio::fs::rename(path, renamed_path.clone())\n\n .await\n\n .map_err(|_| \"Failed renaming existing file\")?;\n\n Ok((true, OverwriteResult::Renamed { renamed_path })) // do download, because we renamed the old file\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub async fn download(\n\n &self,\n\n api: Api,\n\n destination: &Path,\n\n overwrite: &OverwriteMode,\n\n ) -> Result<OverwriteResult> {\n\n let (should_download, result) = self.prepare_path(destination, overwrite).await?;\n\n if should_download {\n\n let download_url = self.get_download_url(api.clone()).await?;\n", "file_path": "src/api/module.rs", "rank": 35, "score": 39757.03695577339 }, { "content": " is_directory: false,\n\n children: RwLock::new(Some(Vec::new())),\n\n allow_upload: false,\n\n last_updated: parse_time(&s.last_updated_date),\n\n }),\n\n })\n\n .collect::<Vec<_>>(),\n\n _ => vec![],\n\n };\n\n\n\n let self_clone = self.clone();\n\n subdirs.append(&mut files);\n\n self_clone\n\n .inner\n\n .children\n\n .write()\n\n .map(|mut ptr| {\n\n *ptr = Some(subdirs);\n\n })\n\n .map_err(|_| \"Failed to acquire write lock on File\")\n", "file_path": "src/api/module.rs", "rank": 36, "score": 39756.87136528886 }, { "content": " if archived { \"Archived\" } else { \"NonArchived\" },\n\n self.id\n\n );\n\n let api_data = api.api_as_json::<ApiData>(&path, Method::GET, None).await?;\n\n if let Data::Announcements(announcements) = api_data.data {\n\n Ok(announcements)\n\n } else if let Data::Empty(_) = api_data.data {\n\n Ok(vec![])\n\n } else {\n\n Err(\"Invalid API response from server: type mismatch\")\n\n }\n\n }\n\n\n\n pub fn as_file(&self) -> File {\n\n File {\n\n inner: Arc::new(FileInner {\n\n id: self.id.to_owned(),\n\n name: sanitise_filename(self.code.to_owned()),\n\n is_directory: true,\n\n children: RwLock::new(None),\n\n allow_upload: false,\n\n last_updated: std::time::UNIX_EPOCH,\n\n }),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/api/module.rs", "rank": 37, "score": 39756.747078231965 }, { "content": "\n\nimpl File {\n\n pub fn name(&self) -> &str {\n\n &self.inner.name\n\n }\n\n\n\n pub fn is_directory(&self) -> bool {\n\n self.inner.is_directory\n\n }\n\n\n\n pub fn children(&self) -> Option<Vec<File>> {\n\n self.inner\n\n .children\n\n .read()\n\n .expect(\"Failed to lock mutex\")\n\n .clone()\n\n }\n\n\n\n pub async fn load_children(&self, api: &Api, include_uploadable: bool) -> Result<()> {\n\n debug_assert!(include_uploadable || !self.inner.allow_upload);\n", "file_path": "src/api/module.rs", "rank": 38, "score": 39756.42392974381 }, { "content": " file.children()\n\n .expect(\"Children must have been loaded\")\n\n .into_iter()\n\n })\n\n .collect();\n\n if files.is_empty() {\n\n break;\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n pub async fn get_download_url(&self, api: Api) -> Result<Url> {\n\n let data = api\n\n .api_as_json::<ApiData>(\n\n &format!(\"files/file/{}/downloadurl\", self.inner.id),\n\n Method::GET,\n\n None,\n\n )\n\n .await?;\n", "file_path": "src/api/module.rs", "rank": 39, "score": 39755.63574189866 }, { "content": " }),\n\n })\n\n .collect::<Vec<_>>(),\n\n _ => vec![],\n\n };\n\n\n\n let allow_upload = self.inner.allow_upload;\n\n let files = apic\n\n .api_as_json::<ApiData>(\n\n &format!(\n\n \"files/{}/file{}\",\n\n self.inner.id,\n\n if self.inner.allow_upload {\n\n \"?populate=Creator\"\n\n } else {\n\n \"\"\n\n }\n\n ),\n\n Method::GET,\n\n None,\n", "file_path": "src/api/module.rs", "rank": 40, "score": 39754.83202270645 }, { "content": " }\n\n\n\n pub async fn load_all_children(&self, api: &Api, include_uploadable: bool) -> Result<()> {\n\n let apic = api.clone();\n\n self.load_children(api, include_uploadable).await?;\n\n\n\n let mut files = vec![self.clone()];\n\n loop {\n\n for res in future::join_all(\n\n files\n\n .iter()\n\n .map(|file| file.load_children(&apic, include_uploadable)),\n\n )\n\n .await\n\n {\n\n res?;\n\n }\n\n files = files\n\n .into_iter()\n\n .flat_map(|file| {\n", "file_path": "src/api/module.rs", "rank": 41, "score": 39754.71838462389 }, { "content": " \"_autorename_{:04}-{:02}-{:02}\",\n\n date.year(),\n\n date.month(),\n\n date.day()\n\n ));\n\n let path_extension = path.extension();\n\n let mut i = 0;\n\n let mut suffixed_stem = new_stem.clone();\n\n let renamed_path = loop {\n\n let renamed_path_without_ext = path.with_file_name(suffixed_stem);\n\n let renamed_path = if let Some(ext) = path_extension {\n\n renamed_path_without_ext.with_extension(ext)\n\n } else {\n\n renamed_path_without_ext\n\n };\n\n if !renamed_path.exists() {\n\n break renamed_path;\n\n }\n\n i += 1;\n\n suffixed_stem = new_stem.clone();\n", "file_path": "src/api/module.rs", "rank": 42, "score": 39754.20697858729 }, { "content": "use std::collections::HashMap;\n\nuse std::sync::Arc;\n\n\n\nuse reqwest::header::CONTENT_TYPE;\n\nuse reqwest::redirect::Policy;\n\nuse reqwest::Method;\n\nuse reqwest::{Client, RequestBuilder, Response, Url};\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Deserialize;\n\n\n\nuse crate::Result;\n\n\n\nuse self::module::{Announcement, Module};\n\n\n\npub mod module;\n\n\n\nconst ADFS_OAUTH2_URL: &str = \"https://vafs.nus.edu.sg/adfs/oauth2/authorize\";\n\nconst ADFS_CLIENT_ID: &str = \"E10493A3B1024F14BDC7D0D8B9F649E9-234390\";\n\nconst ADFS_RESOURCE_TYPE: &str = \"sg_edu_nus_oauth\";\n\nconst ADFS_REDIRECT_URI: &str = \"https://luminus.nus.edu.sg/auth/callback\";\n\nconst API_BASE_URL: &str = \"https://luminus.nus.edu.sg/v2/api/\";\n\nconst OCP_APIM_SUBSCRIPTION_KEY: &str = \"6963c200ca9440de8fa1eede730d8f7e\";\n\nconst OCP_APIM_SUBSCRIPTION_KEY_HEADER: &str = \"Ocp-Apim-Subscription-Key\";\n\n\n\n#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n", "file_path": "src/api/mod.rs", "rank": 43, "score": 39642.73412644389 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub struct Api {\n\n pub jwt: Arc<String>,\n\n pub client: Client,\n\n}\n\n\n\nimpl Api {\n\n pub fn get_client(&self) -> &Client {\n\n &self.client\n\n }\n\n\n\n async fn api_as_json<T: DeserializeOwned + 'static>(\n\n &self,\n\n path: &str,\n\n method: Method,\n\n form: Option<&HashMap<&str, &str>>,\n\n ) -> Result<T> {\n\n let res = self.api(path, method, form).await?;\n\n res.json::<T>()\n", "file_path": "src/api/mod.rs", "rank": 44, "score": 39641.04426446661 }, { "content": " Ok(self\n\n .api_as_json::<Name>(\"user/Profile\", Method::GET, None)\n\n .await?\n\n .user_name_original)\n\n }\n\n\n\n pub async fn with_login<'a>(username: &str, password: &str) -> Result<Api> {\n\n let params = build_auth_form(username, password);\n\n let client = build_client()?;\n\n\n\n let auth_resp =\n\n auth_http_post(client.clone(), build_auth_url(), Some(&params), false).await?;\n\n if !auth_resp.url().as_str().starts_with(ADFS_REDIRECT_URI) {\n\n return Err(\"Invalid credentials\");\n\n }\n\n let code = auth_resp\n\n .url()\n\n .query_pairs()\n\n .find(|(key, _)| key == \"code\")\n\n .map(|(_key, code)| code.into_owned())\n", "file_path": "src/api/mod.rs", "rank": 45, "score": 39640.19245611187 }, { "content": " .api_as_json::<ApiData>(\"module\", Method::GET, None)\n\n .await?;\n\n\n\n if let Data::Modules(modules) = modules.data {\n\n if let Some(current_term) = current_term {\n\n Ok(modules\n\n .into_iter()\n\n .filter(|m| m.term == current_term)\n\n .collect())\n\n } else {\n\n Ok(modules)\n\n }\n\n } else if let Data::Empty(_) = modules.data {\n\n Ok(vec![])\n\n } else {\n\n Err(\"Invalid API response from server: type mismatch\")\n\n }\n\n }\n\n\n\n pub async fn name(&self) -> Result<String> {\n", "file_path": "src/api/mod.rs", "rank": 46, "score": 39637.05599153816 }, { "content": " async fn current_term(&self) -> Result<String> {\n\n Ok(self\n\n .api_as_json::<Term>(\n\n \"setting/AcademicWeek/current?populate=termDetail\",\n\n Method::GET,\n\n None,\n\n )\n\n .await?\n\n .term_detail\n\n .term)\n\n }\n\n\n\n pub async fn modules(&self, current_term_only: bool) -> Result<Vec<Module>> {\n\n let current_term = if current_term_only {\n\n Some(self.current_term().await?)\n\n } else {\n\n None\n\n };\n\n\n\n let modules = self\n", "file_path": "src/api/mod.rs", "rank": 47, "score": 39636.264160622515 }, { "content": " .await\n\n .map_err(|_| \"Unable to deserialize JSON\")\n\n }\n\n\n\n pub async fn api(\n\n &self,\n\n path: &str,\n\n method: Method,\n\n form: Option<&HashMap<&str, &str>>,\n\n ) -> Result<Response> {\n\n let url = full_api_url(path);\n\n let jwt = Arc::clone(&self.jwt);\n\n\n\n infinite_retry_http(self.client.clone(), url, method, form, move |req| {\n\n req.header(OCP_APIM_SUBSCRIPTION_KEY_HEADER, OCP_APIM_SUBSCRIPTION_KEY)\n\n .bearer_auth(jwt.clone())\n\n })\n\n .await\n\n }\n\n\n", "file_path": "src/api/mod.rs", "rank": 48, "score": 39635.44399952305 }, { "content": " };\n\n\n\n // LumiNUS randomly returns 400 to a perfectly good request for no apparent reason\n\n // We'll just ignore it and repeat the request\n\n let res = loop {\n\n let mut request_builder = client.request(method.clone(), url.clone());\n\n if let Some(ref form) = form {\n\n request_builder = request_builder\n\n .body(form.clone())\n\n .header(CONTENT_TYPE, \"application/x-www-form-urlencoded\");\n\n } else {\n\n request_builder = request_builder.header(CONTENT_TYPE, \"application/json\");\n\n }\n\n let request = edit_request(request_builder)\n\n .build()\n\n .map_err(|_| \"Failed to build request\")?;\n\n\n\n let res = client.execute(request).await.map_err(|_| \"HTTP error\");\n\n if let Ok(res) = res {\n\n break res;\n", "file_path": "src/api/mod.rs", "rank": 49, "score": 39634.33783317322 }, { "content": " .ok_or(\"Unknown authentication failure (no code returned)\")?;\n\n let client2 = client.clone();\n\n let token_resp = auth_http_post(\n\n client2,\n\n full_api_url(\"login/adfstoken\"),\n\n Some(&build_token_form(&code)),\n\n true,\n\n )\n\n .await?;\n\n if !token_resp.status().is_success() {\n\n return Err(\"Unknown authentication failure (no token returned)\");\n\n }\n\n let token = token_resp\n\n .json::<TokenResponse>()\n\n .await\n\n .map_err(|_| \"Failed to deserialise token exchange response\")?;\n\n Ok(Api {\n\n jwt: Arc::new(token.access_token),\n\n client,\n\n })\n\n }\n\n}\n", "file_path": "src/api/mod.rs", "rank": 50, "score": 39632.98623236274 }, { "content": " }\n\n };\n\n Ok(res)\n\n}\n\n\n\nasync fn auth_http_post(\n\n client: Client,\n\n url: Url,\n\n form: Option<&HashMap<&str, &str>>,\n\n with_apim: bool,\n\n) -> Result<Response> {\n\n infinite_retry_http(client, url, Method::POST, form, move |req| {\n\n if with_apim {\n\n req.header(OCP_APIM_SUBSCRIPTION_KEY_HEADER, OCP_APIM_SUBSCRIPTION_KEY)\n\n } else {\n\n req\n\n }\n\n })\n\n .await\n\n}\n", "file_path": "src/api/mod.rs", "rank": 51, "score": 39632.16174307331 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(untagged)]\n\nenum Data {\n\n Empty(Vec<[(); 0]>),\n\n Modules(Vec<Module>),\n\n Announcements(Vec<Announcement>),\n\n ApiFileDirectory(Vec<ApiFileDirectory>),\n\n Text(String),\n\n}\n\n\n", "file_path": "src/api/mod.rs", "rank": 52, "score": 37608.62291629871 }, { "content": "# fluminurs\n\n\n\n[![Build Status](https://travis-ci.com/indocomsoft/fluminurs.svg?branch=master)](https://travis-ci.com/indocomsoft/fluminurs)\n\n\n\n<sup><sub>F LumiNUS! IVLE ftw! Why fix what ain't broken?!</sub></sup>\n\n\n\nSince IVLE will be deprecated next academic year (AY2019/2020), while LumiNUS has consistently pushed back its schedule to release an API, I have decided to reverse-engineer the API used by the Angular front-end of LumiNUS. Currently, my goal is to be able to automatically download files a la [ivle-sync](https://github.com/goweiwen/ivle-sync)\n\n\n\nI try to keep to best coding practices and use as little dependencies as possible. Do let me know if you have any suggestions!\n\n\n\nPR's are welcome.\n\n\n\n## Installation\n\nSimply click Releases (https://github.com/indocomsoft/fluminurs/releases) and download the executable for your platform\n", "file_path": "README.md", "rank": 65, "score": 5.022048414422471 } ]
Rust
macroquad_macro/src/lib.rs
The-Jon/macroquad
8e80940d611e09cbb360a52c2550a8fed866e9e5
extern crate proc_macro; use proc_macro::{Ident, TokenStream, TokenTree}; use std::iter::Peekable; fn next_group(source: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Option<proc_macro::Group> { if let Some(TokenTree::Group(_)) = source.peek() { let group = match source.next().unwrap() { TokenTree::Group(group) => group, _ => unreachable!("just checked with peek()!"), }; Some(group) } else { None } } fn next_literal(source: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Option<String> { if let Some(TokenTree::Literal(lit)) = source.peek() { let mut literal = lit.to_string(); if literal.starts_with("\"") { literal.remove(0); literal.remove(literal.len() - 1); } source.next(); return Some(literal); } return None; } #[proc_macro_attribute] pub fn main(attr: TokenStream, item: TokenStream) -> TokenStream { let mut modified = TokenStream::new(); let mut source = item.into_iter().peekable(); let mut crate_rename = None; while let Some(TokenTree::Punct(punct)) = source.peek() { assert_eq!(format!("{}", punct), "#"); let _ = source.next().unwrap(); let group = next_group(&mut source); let mut group = group.unwrap().stream().into_iter().peekable(); let attribute_name = format!("{}", group.next().unwrap()); if attribute_name == "macroquad" { let group = next_group(&mut group); let mut group = group.unwrap().stream().into_iter().peekable(); let config_name = format!("{}", group.next().unwrap()); if group.peek().is_some() { let _ = group.next(); let config_value = Some(next_literal(&mut group).unwrap()); if config_name == "crate_rename" { crate_rename = config_value; } } } } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "async"); modified.extend(std::iter::once(TokenTree::Ident(ident))); } else { panic!("[macroquad::main] is allowed only for async functions"); } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "fn"); modified.extend(std::iter::once(TokenTree::Ident(ident))); } else { panic!("[macroquad::main] is allowed only for functions"); } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "main"); modified.extend(std::iter::once(TokenTree::Ident(Ident::new( "amain", ident.span(), )))); } else { panic!("[macroquad::main] expecting main function"); } modified.extend(std::iter::once(source.next().unwrap())); let next = source.next().unwrap(); let use_result = if let TokenTree::Punct(punct) = &next { format!("{}", punct) == "-" } else { false }; modified.extend(std::iter::once(next)); modified.extend(source); let (method, ident) = match attr.into_iter().next() { Some(TokenTree::Ident(ident)) => ("from_config", format!("{}()", ident)), Some(TokenTree::Literal(literal)) => ("new", literal.to_string()), Some(wrong_ident) => panic!( "Wrong argument: {:?}. Place function returned `Conf`", wrong_ident ), None => panic!("No argument! Place function returned `Conf`"), }; let crate_name = crate_rename.unwrap_or_else(|| "macroquad".to_string()); let mut prelude: TokenStream = format!( " fn main() {{ {crate_name}::Window::{method}({ident}, {main}); }} ", crate_name = crate_name, method = method, ident = ident, main = if use_result { format!( "async {{ if let Err(err) = amain().await {{ {}::logging::error!(\"Error: {{:?}}\", err); }} }}", crate_name ) } else { "amain()".to_string() } ) .parse() .unwrap(); prelude.extend(modified); prelude } #[proc_macro_attribute] pub fn test(_attr: TokenStream, item: TokenStream) -> TokenStream { let mut modified = TokenStream::new(); let mut source = item.into_iter().peekable(); while let Some(TokenTree::Punct(punct)) = source.peek() { assert_eq!(format!("{}", punct), "#"); let _ = source.next().unwrap(); let _group = next_group(&mut source); } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "async"); modified.extend(std::iter::once(TokenTree::Ident(ident))); } else { panic!("[macroquad::test] is allowed only for async functions"); } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "fn"); modified.extend(std::iter::once(TokenTree::Ident(ident))); } else { panic!("[macroquad::test] is allowed only for functions"); } let test_name = if let TokenTree::Ident(ident) = source.next().unwrap() { let test_name = format!("{}", ident); modified.extend(std::iter::once(TokenTree::Ident(Ident::new( &format!("{}_async", test_name), ident.span(), )))); test_name } else { panic!("[macroquad::test] expecting main function"); }; modified.extend(std::iter::once(source.next().unwrap())); modified.extend(source); let mut prelude: TokenStream = format!( " #[test] fn {test_name}() {{ let _lock = unsafe {{ let mutex = macroquad::test::ONCE.call_once(|| {{ macroquad::test::MUTEX = Some(std::sync::Mutex::new(())); }}); macroquad::test::MUTEX.as_mut().unwrap().lock() }}; macroquad::Window::new(\"test\", {test_name}_async()); }} ", test_name = test_name, ) .parse() .unwrap(); prelude.extend(modified); prelude } #[doc(hidden)] #[proc_macro_derive(CapabilityTrait)] pub fn capability_trait_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let mut source = input.into_iter().peekable(); while let Some(TokenTree::Punct(_)) = source.peek() { let _ = source.next(); let _ = next_group(&mut source); } assert_eq!("pub", &format!("{}", source.next().unwrap())); assert_eq!("struct", &format!("{}", source.next().unwrap())); let struct_name = format!("{}", source.next().unwrap()); let mut group = next_group(&mut source) .unwrap() .stream() .into_iter() .peekable(); let mut trait_decl = format!("pub trait {}Trait {{", struct_name); let mut trait_impl = format!("impl {}Trait for NodeWith<{}> {{", struct_name, struct_name); fn next_str(group: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Option<String> { group.next().map(|tok| format!("{}", tok)) } loop { while let Some(TokenTree::Punct(_)) = group.peek() { let _ = group.next(); let _ = next_group(&mut group); } let _pub = next_str(&mut group); if _pub.is_none() { break; } assert_eq!("pub", _pub.unwrap()); let fn_name = next_str(&mut group).unwrap(); let mut fn_res = "()".to_string(); assert_eq!(":", &next_str(&mut group).unwrap()); assert_eq!("fn", &next_str(&mut group).unwrap()); let fn_args_decl = next_str(&mut group).unwrap(); let mut fn_args_impl = String::new(); let args = fn_args_decl.split(":").collect::<Vec<&str>>(); for arg in &args[1..args.len() - 1] { fn_args_impl.push_str(&format!(", {}", arg.split(", ").last().unwrap())); } let p = next_str(&mut group); match p.as_deref() { Some("-") => { assert_eq!(">", next_str(&mut group).unwrap()); fn_res = next_str(&mut group).unwrap(); let _ = next_str(&mut group); } Some(",") => {} None => break, _ => panic!(), }; trait_decl.push_str(&format!( "fn {} {} -> {};", fn_name, fn_args_decl.replace("node : HandleUntyped", "&self"), fn_res )); let args = fn_args_impl .replace("node : HandleUntyped", "") .replace("(", "") .replace(")", ""); trait_impl.push_str(&format!( "fn {} {} -> {} {{", fn_name, fn_args_decl.replace("node : HandleUntyped", "&self"), fn_res )); trait_impl.push_str(&format!( "(self.capability.{})(self.node {})", fn_name, args )); trait_impl.push_str("}"); } trait_decl.push_str("}"); trait_impl.push_str("}"); let res = format!( "{} {}", trait_decl, trait_impl ); res.parse().unwrap() }
extern crate proc_macro; use proc_macro::{Ident, TokenStream, TokenTree}; use std::iter::Peekable; fn next_group(source: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Option<proc_macro::Group> { if let Some(TokenTree::Group(_)) = source.peek() { let group = match source.next().unwrap() { TokenTree::Group(group) => group, _ => unreachable!("just checked with peek()!"), }; Some(group) } else { None } } fn next_literal(source: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Option<String> { if let Some(TokenTree::Literal(lit)) = source.peek() { let mut literal = lit.to_string(); if literal.starts_with("\"") { literal.remove(0); literal.remove(literal.len() - 1); } source.next(); return Some(literal); } return None; } #[proc_macro_attribute] pub fn main(attr: TokenStream, item: TokenStream) -> TokenStream { let mut modified = TokenStream::new(); let mut source = item.into_iter().peekable(); let mut crate_rename = None; while let Some(TokenTree::Punct(punct)) = source.peek() { assert_eq!(format!("{}", punct), "#"); let _ = source.next().unwrap(); let group = next_group(&mut source); let mut group = group.unwrap().stream().into_iter().peekable(); let attribute_name = format!("{}", group.next().unwrap()); if attribute_name == "macroquad" { let group = next_group(&mut group); let mut group = group.unwrap().stream().into_iter().peekable(); let config_name = format!("{}", group.next().unwrap()); if group.peek().is_some() { let _ = group.next(); let config_value = Some(next_literal(&mut group).unwrap()); if config_name == "crate_rename" { crate_rename = config_value; } } } } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "async"); modified.extend(std::iter::once(TokenTree::Ident(ident))); } else { panic!("[macroquad::main] is allowed only for async functions"); } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "fn"); modified.extend(std::iter::once(TokenTree::Ident(ident))); } else { panic!("[macroquad::main] is allowed only for functions"); } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "main"); modified.extend(std::iter::once(TokenTree::Ident(Ident::new( "amain", ident.span(), )))); } else { panic!("[macroquad::main] expecting main function"); } modified.extend(std::iter::once(source.next().unwrap())); let next = source.next().unwrap(); let use_result = if let TokenTree::Punct(punct) = &next { format!("{}", punct) == "-" } else { false }; modified.extend(std::iter::once(next)); modified.extend(source); let (method, ident) = match attr.into_iter().next() { Some(TokenTree::Ident(ident)) => ("from_config", format!("{}()", ident)), Some(TokenTree::Literal(literal)) => ("new", literal.to_string()), Some(wrong_ident) => panic!( "Wrong argument: {:?}. Place function returned `Conf`", wrong_ident ), None => panic!("No argument! Place function returned `Conf`"), }; let crate_name = crate_rename.unwrap_or_else(|| "macroquad".to_string()); let mut prelude: TokenStream = format!( " fn main() {{ {crate_name}::Window::{method}({ident}, {main}); }} ", crate_name = crate_name, method = method, ident = ident, main = if use_result { format!( "async {{ if let Err(err) = amain().await {{ {}::logging::error!(\"Error: {{:?}}\", err); }} }}", crate_name ) } else { "amain()".to_string() } ) .parse() .unwrap(); prelude.extend(modified); prelude } #[proc_macro_attribute] pub fn test(_attr: TokenStream, item: TokenStream) -> TokenStream { let mut modified = TokenStream::new(); let mut source = item.into_iter().peekable(); while let Some(TokenTree::Punct(punct)) = source.peek() { assert_eq!(format!("{}", punct), "#");
#[doc(hidden)] #[proc_macro_derive(CapabilityTrait)] pub fn capability_trait_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let mut source = input.into_iter().peekable(); while let Some(TokenTree::Punct(_)) = source.peek() { let _ = source.next(); let _ = next_group(&mut source); } assert_eq!("pub", &format!("{}", source.next().unwrap())); assert_eq!("struct", &format!("{}", source.next().unwrap())); let struct_name = format!("{}", source.next().unwrap()); let mut group = next_group(&mut source) .unwrap() .stream() .into_iter() .peekable(); let mut trait_decl = format!("pub trait {}Trait {{", struct_name); let mut trait_impl = format!("impl {}Trait for NodeWith<{}> {{", struct_name, struct_name); fn next_str(group: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Option<String> { group.next().map(|tok| format!("{}", tok)) } loop { while let Some(TokenTree::Punct(_)) = group.peek() { let _ = group.next(); let _ = next_group(&mut group); } let _pub = next_str(&mut group); if _pub.is_none() { break; } assert_eq!("pub", _pub.unwrap()); let fn_name = next_str(&mut group).unwrap(); let mut fn_res = "()".to_string(); assert_eq!(":", &next_str(&mut group).unwrap()); assert_eq!("fn", &next_str(&mut group).unwrap()); let fn_args_decl = next_str(&mut group).unwrap(); let mut fn_args_impl = String::new(); let args = fn_args_decl.split(":").collect::<Vec<&str>>(); for arg in &args[1..args.len() - 1] { fn_args_impl.push_str(&format!(", {}", arg.split(", ").last().unwrap())); } let p = next_str(&mut group); match p.as_deref() { Some("-") => { assert_eq!(">", next_str(&mut group).unwrap()); fn_res = next_str(&mut group).unwrap(); let _ = next_str(&mut group); } Some(",") => {} None => break, _ => panic!(), }; trait_decl.push_str(&format!( "fn {} {} -> {};", fn_name, fn_args_decl.replace("node : HandleUntyped", "&self"), fn_res )); let args = fn_args_impl .replace("node : HandleUntyped", "") .replace("(", "") .replace(")", ""); trait_impl.push_str(&format!( "fn {} {} -> {} {{", fn_name, fn_args_decl.replace("node : HandleUntyped", "&self"), fn_res )); trait_impl.push_str(&format!( "(self.capability.{})(self.node {})", fn_name, args )); trait_impl.push_str("}"); } trait_decl.push_str("}"); trait_impl.push_str("}"); let res = format!( "{} {}", trait_decl, trait_impl ); res.parse().unwrap() }
let _ = source.next().unwrap(); let _group = next_group(&mut source); } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "async"); modified.extend(std::iter::once(TokenTree::Ident(ident))); } else { panic!("[macroquad::test] is allowed only for async functions"); } if let TokenTree::Ident(ident) = source.next().unwrap() { assert_eq!(format!("{}", ident), "fn"); modified.extend(std::iter::once(TokenTree::Ident(ident))); } else { panic!("[macroquad::test] is allowed only for functions"); } let test_name = if let TokenTree::Ident(ident) = source.next().unwrap() { let test_name = format!("{}", ident); modified.extend(std::iter::once(TokenTree::Ident(Ident::new( &format!("{}_async", test_name), ident.span(), )))); test_name } else { panic!("[macroquad::test] expecting main function"); }; modified.extend(std::iter::once(source.next().unwrap())); modified.extend(source); let mut prelude: TokenStream = format!( " #[test] fn {test_name}() {{ let _lock = unsafe {{ let mutex = macroquad::test::ONCE.call_once(|| {{ macroquad::test::MUTEX = Some(std::sync::Mutex::new(())); }}); macroquad::test::MUTEX.as_mut().unwrap().lock() }}; macroquad::Window::new(\"test\", {test_name}_async()); }} ", test_name = test_name, ) .parse() .unwrap(); prelude.extend(modified); prelude }
function_block-function_prefix_line
[]
Rust
src/lib/storage/ramdevice_client/rust/src/lib.rs
gnoliyil/fuchsia
a98c2d6ae44b7c485c2ee55855d0441da422f4cf
#![deny(missing_docs)] #[allow(bad_style)] mod ramdevice_sys; use { anyhow::Error, fdio, fuchsia_zircon as zx, std::{ ffi, fs, os::unix::io::{AsRawFd, RawFd}, ptr, }, zx::HandleBased, }; enum DevRoot { Provided(fs::File), Isolated, } pub struct VmoRamdiskClientBuilder { vmo: zx::Vmo, block_size: Option<u64>, dev_root: Option<DevRoot>, } impl VmoRamdiskClientBuilder { pub fn new(vmo: zx::Vmo) -> Self { Self { vmo, block_size: None, dev_root: None } } pub fn block_size(mut self, block_size: u64) -> Self { self.block_size = Some(block_size); self } pub fn dev_root(mut self, dev_root: fs::File) -> Self { self.dev_root = Some(DevRoot::Provided(dev_root)); self } pub fn isolated_dev_root(mut self) -> Self { self.dev_root = Some(DevRoot::Isolated); self } pub fn build(self) -> Result<RamdiskClient, zx::Status> { let vmo_handle = self.vmo.into_raw(); let mut ramdisk: *mut ramdevice_sys::ramdisk_client_t = ptr::null_mut(); let status = match (&self.dev_root, &self.block_size) { (Some(dev_root), Some(block_size)) => { let (dev_root_fd, _dev_root) = match &dev_root { DevRoot::Provided(f) => (f.as_raw_fd(), None), DevRoot::Isolated => { let devmgr = open_isolated_devmgr()?; (devmgr.as_raw_fd(), Some(devmgr)) } }; unsafe { ramdevice_sys::ramdisk_create_at_from_vmo_with_block_size( dev_root_fd, vmo_handle, *block_size, &mut ramdisk, ) } } (Some(dev_root), None) => { let (dev_root_fd, _dev_root) = match &dev_root { DevRoot::Provided(f) => (f.as_raw_fd(), None), DevRoot::Isolated => { let devmgr = open_isolated_devmgr()?; (devmgr.as_raw_fd(), Some(devmgr)) } }; unsafe { ramdevice_sys::ramdisk_create_at_from_vmo(dev_root_fd, vmo_handle, &mut ramdisk) } } (None, Some(block_size)) => { unsafe { ramdevice_sys::ramdisk_create_from_vmo_with_block_size( vmo_handle, *block_size, &mut ramdisk, ) } } (None, None) => { unsafe { ramdevice_sys::ramdisk_create_from_vmo(vmo_handle, &mut ramdisk) } } }; zx::Status::ok(status)?; Ok(RamdiskClient { ramdisk }) } } pub struct RamdiskClientBuilder { block_size: u64, block_count: u64, dev_root: Option<DevRoot>, guid: Option<[u8; 16]>, } impl RamdiskClientBuilder { pub fn new(block_size: u64, block_count: u64) -> Self { Self { block_size, block_count, dev_root: None, guid: None } } pub fn dev_root(&mut self, dev_root: fs::File) -> &mut Self { self.dev_root = Some(DevRoot::Provided(dev_root)); self } pub fn isolated_dev_root(&mut self) -> &mut Self { self.dev_root = Some(DevRoot::Isolated); self } pub fn guid(&mut self, guid: [u8; 16]) -> &mut Self { self.guid = Some(guid); self } pub fn build(&mut self) -> Result<RamdiskClient, zx::Status> { let block_size = self.block_size; let block_count = self.block_count; let mut ramdisk: *mut ramdevice_sys::ramdisk_client_t = ptr::null_mut(); let status = match (&self.dev_root, &self.guid) { (Some(dev_root), Some(guid)) => { let (dev_root_fd, _dev_root) = match &dev_root { DevRoot::Provided(f) => (f.as_raw_fd(), None), DevRoot::Isolated => { let devmgr = open_isolated_devmgr()?; (devmgr.as_raw_fd(), Some(devmgr)) } }; unsafe { ramdevice_sys::ramdisk_create_at_with_guid( dev_root_fd, block_size, block_count, guid.as_ptr(), 16, &mut ramdisk, ) } } (Some(dev_root), None) => { let (dev_root_fd, _dev_root) = match &dev_root { DevRoot::Provided(f) => (f.as_raw_fd(), None), DevRoot::Isolated => { let devmgr = open_isolated_devmgr()?; (devmgr.as_raw_fd(), Some(devmgr)) } }; unsafe { ramdevice_sys::ramdisk_create_at( dev_root_fd, block_size, block_count, &mut ramdisk, ) } } (None, Some(guid)) => { unsafe { ramdevice_sys::ramdisk_create_with_guid( block_size, block_count, guid.as_ptr(), 16, &mut ramdisk, ) } } (None, None) => { unsafe { ramdevice_sys::ramdisk_create(block_size, block_count, &mut ramdisk) } } }; zx::Status::ok(status)?; Ok(RamdiskClient { ramdisk }) } } pub struct RamdiskClient { ramdisk: *mut ramdevice_sys::ramdisk_client_t, } impl RamdiskClient { pub fn builder(block_size: u64, block_count: u64) -> RamdiskClientBuilder { RamdiskClientBuilder::new(block_size, block_count) } pub fn create(block_size: u64, block_count: u64) -> Result<Self, zx::Status> { Self::builder(block_size, block_count).build() } pub fn get_path(&self) -> &str { unsafe { let raw_path = ramdevice_sys::ramdisk_get_path(self.ramdisk); ffi::CStr::from_ptr(raw_path).to_str().expect("ramdisk path was not utf8?") } } pub fn open(&self) -> Result<zx::Channel, zx::Status> { struct UnownedFd(RawFd); impl AsRawFd for UnownedFd { fn as_raw_fd(&self) -> RawFd { self.0 } } let fd = unsafe { ramdevice_sys::ramdisk_get_block_fd(self.ramdisk) }; let client_chan = fdio::clone_channel(&UnownedFd(fd))?; Ok(client_chan) } pub fn destroy(self) -> Result<(), zx::Status> { let status = unsafe { ramdevice_sys::ramdisk_destroy(self.ramdisk) }; std::mem::forget(self); zx::Status::ok(status) } } unsafe impl Send for RamdiskClient {} unsafe impl Sync for RamdiskClient {} impl Drop for RamdiskClient { fn drop(&mut self) { let _ = unsafe { ramdevice_sys::ramdisk_destroy(self.ramdisk) }; } } fn open_isolated_devmgr() -> Result<fs::File, zx::Status> { let (client_chan, server_chan) = zx::Channel::create()?; fdio::service_connect("/svc/fuchsia.test.IsolatedDevmgr", server_chan)?; Ok(fdio::create_fd(client_chan.into())?) } pub fn wait_for_device(path: &str, duration: std::time::Duration) -> Result<(), Error> { let c_path = ffi::CString::new(path)?; Ok(zx::Status::ok(unsafe { ramdevice_sys::wait_for_device(c_path.as_ptr(), duration.as_nanos() as u64) })?) } pub fn wait_for_device_at( dirfd: &fs::File, path: &str, duration: std::time::Duration, ) -> Result<(), Error> { let c_path = ffi::CString::new(path)?; Ok(zx::Status::ok(unsafe { ramdevice_sys::wait_for_device_at( dirfd.as_raw_fd(), c_path.as_ptr(), duration.as_nanos() as u64, ) })?) } #[cfg(test)] mod tests { use { super::*, fidl_fuchsia_io::{NodeInfo, NodeProxy}, fuchsia_async as fasync, matches::assert_matches, }; const TEST_GUID: [u8; 16] = [ 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x10, ]; #[test] fn create_get_path_destroy() { let devmgr = open_isolated_devmgr().expect("failed to open isolated devmgr"); let ramdisk = RamdiskClient::builder(512, 2048) .dev_root(devmgr) .build() .expect("failed to create ramdisk"); let _path = ramdisk.get_path(); assert_eq!(ramdisk.destroy(), Ok(())); } #[test] fn create_with_dev_root_and_guid_get_path_destroy() { let devmgr = open_isolated_devmgr().expect("failed to open isolated devmgr"); let ramdisk = RamdiskClient::builder(512, 2048) .dev_root(devmgr) .guid(TEST_GUID) .build() .expect("failed to create ramdisk"); let _path = ramdisk.get_path(); assert_eq!(ramdisk.destroy(), Ok(())); } #[test] fn create_with_guid_get_path_destroy() { let devmgr = open_isolated_devmgr().expect("failed to open isolated devmgr"); let ramdisk = RamdiskClient::builder(512, 2048) .dev_root(devmgr) .guid(TEST_GUID) .build() .expect("failed to create ramdisk"); let _path = ramdisk.get_path(); assert_eq!(ramdisk.destroy(), Ok(())); } #[test] fn create_open_destroy() { let ramdisk = RamdiskClient::builder(512, 2048).isolated_dev_root().build().unwrap(); assert_matches!(ramdisk.open(), Ok(_)); assert_eq!(ramdisk.destroy(), Ok(())); } #[fasync::run_singlethreaded(test)] async fn create_describe_destroy() { let ramdisk = RamdiskClient::builder(512, 2048).isolated_dev_root().build().unwrap(); let device = ramdisk.open().unwrap(); let fasync_channel = fasync::Channel::from_channel(device).expect("failed to convert to fasync channel"); let proxy = NodeProxy::new(fasync_channel); let info = proxy.describe().await.expect("failed to get node info"); assert_matches!(info, NodeInfo::Device(_)); assert_eq!(ramdisk.destroy(), Ok(())); } }
#![deny(missing_docs)] #[allow(bad_style)] mod ramdevice_sys; use { anyhow::Error, fdio, fuchsia_zircon as zx, std::{ ffi, fs, os::unix::io::{AsRawFd, RawFd}, ptr, }, zx::HandleBased, }; enum DevRoot { Provided(fs::File), Isolated, } pub struct VmoRamdiskClientBuilder { vmo: zx::Vmo, block_size: Option<u64>, dev_root: Option<DevRoot>, } impl VmoRamdiskClientBuilder { pub fn new(vmo: zx::Vmo) -> Self { Self { vmo, block_size: None, dev_root: None } } pub fn block_size(mut self, block_size: u64) -> Self { self.block_size = Some(block_size); self } pub fn dev_root(mut self, dev_root: fs::File) -> Self { self.dev_root = Some(DevRoot::Provided(dev_root)); self } pub fn isolated_dev_root(mut self) -> Self { self.dev_root = Some(DevRoot::Isolated); self } pub fn build(self) -> Result<RamdiskClient, zx::Status> { let vmo_handle = self.vmo.into_raw(); let mut ramdisk: *mut ramdevice_sys::ramdisk_client_t = ptr::null_mut(); let status = match (&self.dev_root, &self.block_size) { (Some(dev_root), Some(block_size)) => { let (dev_root_fd, _dev_root) = match &dev_root { DevRoot::Provided(f) => (f.as_raw_fd(), None), DevRoot::Isolated => { let devmgr = open_isolated_devmgr()?; (devmgr.as_raw_fd(), Some(devmgr)) } }; unsafe { ramdevice_sys::ramdisk_create_at_from_vmo_with_block_size( dev_root_fd, vmo_handle, *block_size, &mut ramdisk, ) } } (Some(dev_root), None) => { let (dev_root_fd, _dev_root) = match &dev_root { DevRoot::Provided(f) => (f.as_raw_fd(), None), DevRoot::Isolated => { let devmgr = open_isolated_devmgr()?; (devmgr.as_raw_fd(), Some(devmgr)) } }; unsafe { ramdevice_sys::ramdisk_create_at_from_vmo(dev_root_fd, vmo_handle, &mut ramdisk) } } (None, Some(block_size)) => { unsafe { ramdevice_sys::ramdisk_create_from_vmo_with_block_size( vmo_handle, *block_size, &mut ramdisk, ) } } (None, None) => { unsafe { ramdevice_sys::ramdisk_create_from_vmo(vmo_handle, &mut ramdisk) } } }; zx::Status::ok(status)?; Ok(RamdiskClient { ramdisk }) } } pub struct RamdiskClientBuilder { block_size: u64, block_count: u64, dev_root: Option<DevRoot>, guid: Option<[u8; 16]>, } impl RamdiskClientBuilder { pub fn new(block_size: u64, block_count: u64) -> Self { Self { block_size, block_count, dev_root: None, guid: None } } pub fn dev_root(&mut self, dev_root: fs::File) -> &mut Self { self.dev_root = Some(DevRoot::Provided(dev_root)); self } pub fn isolated_dev_root(&mut self) -> &mut Self { self.dev_root = Some(DevRoot::Isolated); self } pub fn guid(&mut self, guid: [u8; 16]) -> &mut Self { self.guid = Some(guid); self } pub fn build(&mut self) -> Result<RamdiskClient, zx::Status> { let block_size = self.block_size; let block_count = self.block_count; let mut ramdisk: *mut ramdevice_sys::ramdisk_client_t = ptr::null_mut(); let status = match (&self.dev_root, &self.guid) { (Some(dev_root), Some(guid)) => { let (dev_root_fd, _dev_root) = match &dev_root { DevRoot::Provided(f) => (f.as_raw_fd(), None), DevRoot::Isolated => { let devmgr = open_isolated_devmgr()?; (devmgr.as_raw_fd(), Some(devmgr)) } }; unsafe { ramdevice_sys::ramdisk_create_at_with_guid( dev_root_fd, block_size, block_count, guid.as_ptr(), 16, &mut ramdisk, ) } } (Some(dev_root), None) => { let (dev_root_fd, _dev_root) =
; unsafe { ramdevice_sys::ramdisk_create_at( dev_root_fd, block_size, block_count, &mut ramdisk, ) } } (None, Some(guid)) => { unsafe { ramdevice_sys::ramdisk_create_with_guid( block_size, block_count, guid.as_ptr(), 16, &mut ramdisk, ) } } (None, None) => { unsafe { ramdevice_sys::ramdisk_create(block_size, block_count, &mut ramdisk) } } }; zx::Status::ok(status)?; Ok(RamdiskClient { ramdisk }) } } pub struct RamdiskClient { ramdisk: *mut ramdevice_sys::ramdisk_client_t, } impl RamdiskClient { pub fn builder(block_size: u64, block_count: u64) -> RamdiskClientBuilder { RamdiskClientBuilder::new(block_size, block_count) } pub fn create(block_size: u64, block_count: u64) -> Result<Self, zx::Status> { Self::builder(block_size, block_count).build() } pub fn get_path(&self) -> &str { unsafe { let raw_path = ramdevice_sys::ramdisk_get_path(self.ramdisk); ffi::CStr::from_ptr(raw_path).to_str().expect("ramdisk path was not utf8?") } } pub fn open(&self) -> Result<zx::Channel, zx::Status> { struct UnownedFd(RawFd); impl AsRawFd for UnownedFd { fn as_raw_fd(&self) -> RawFd { self.0 } } let fd = unsafe { ramdevice_sys::ramdisk_get_block_fd(self.ramdisk) }; let client_chan = fdio::clone_channel(&UnownedFd(fd))?; Ok(client_chan) } pub fn destroy(self) -> Result<(), zx::Status> { let status = unsafe { ramdevice_sys::ramdisk_destroy(self.ramdisk) }; std::mem::forget(self); zx::Status::ok(status) } } unsafe impl Send for RamdiskClient {} unsafe impl Sync for RamdiskClient {} impl Drop for RamdiskClient { fn drop(&mut self) { let _ = unsafe { ramdevice_sys::ramdisk_destroy(self.ramdisk) }; } } fn open_isolated_devmgr() -> Result<fs::File, zx::Status> { let (client_chan, server_chan) = zx::Channel::create()?; fdio::service_connect("/svc/fuchsia.test.IsolatedDevmgr", server_chan)?; Ok(fdio::create_fd(client_chan.into())?) } pub fn wait_for_device(path: &str, duration: std::time::Duration) -> Result<(), Error> { let c_path = ffi::CString::new(path)?; Ok(zx::Status::ok(unsafe { ramdevice_sys::wait_for_device(c_path.as_ptr(), duration.as_nanos() as u64) })?) } pub fn wait_for_device_at( dirfd: &fs::File, path: &str, duration: std::time::Duration, ) -> Result<(), Error> { let c_path = ffi::CString::new(path)?; Ok(zx::Status::ok(unsafe { ramdevice_sys::wait_for_device_at( dirfd.as_raw_fd(), c_path.as_ptr(), duration.as_nanos() as u64, ) })?) } #[cfg(test)] mod tests { use { super::*, fidl_fuchsia_io::{NodeInfo, NodeProxy}, fuchsia_async as fasync, matches::assert_matches, }; const TEST_GUID: [u8; 16] = [ 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x10, ]; #[test] fn create_get_path_destroy() { let devmgr = open_isolated_devmgr().expect("failed to open isolated devmgr"); let ramdisk = RamdiskClient::builder(512, 2048) .dev_root(devmgr) .build() .expect("failed to create ramdisk"); let _path = ramdisk.get_path(); assert_eq!(ramdisk.destroy(), Ok(())); } #[test] fn create_with_dev_root_and_guid_get_path_destroy() { let devmgr = open_isolated_devmgr().expect("failed to open isolated devmgr"); let ramdisk = RamdiskClient::builder(512, 2048) .dev_root(devmgr) .guid(TEST_GUID) .build() .expect("failed to create ramdisk"); let _path = ramdisk.get_path(); assert_eq!(ramdisk.destroy(), Ok(())); } #[test] fn create_with_guid_get_path_destroy() { let devmgr = open_isolated_devmgr().expect("failed to open isolated devmgr"); let ramdisk = RamdiskClient::builder(512, 2048) .dev_root(devmgr) .guid(TEST_GUID) .build() .expect("failed to create ramdisk"); let _path = ramdisk.get_path(); assert_eq!(ramdisk.destroy(), Ok(())); } #[test] fn create_open_destroy() { let ramdisk = RamdiskClient::builder(512, 2048).isolated_dev_root().build().unwrap(); assert_matches!(ramdisk.open(), Ok(_)); assert_eq!(ramdisk.destroy(), Ok(())); } #[fasync::run_singlethreaded(test)] async fn create_describe_destroy() { let ramdisk = RamdiskClient::builder(512, 2048).isolated_dev_root().build().unwrap(); let device = ramdisk.open().unwrap(); let fasync_channel = fasync::Channel::from_channel(device).expect("failed to convert to fasync channel"); let proxy = NodeProxy::new(fasync_channel); let info = proxy.describe().await.expect("failed to get node info"); assert_matches!(info, NodeInfo::Device(_)); assert_eq!(ramdisk.destroy(), Ok(())); } }
match &dev_root { DevRoot::Provided(f) => (f.as_raw_fd(), None), DevRoot::Isolated => { let devmgr = open_isolated_devmgr()?; (devmgr.as_raw_fd(), Some(devmgr)) } }
if_condition
[]
Rust
libtransact/src/families/xo.rs
peterschwarz/transact
a356d456aee65b1aa73a5cf397a72b504b27093e
use crate::error::InvalidStateError; use crate::protocol::batch::BatchBuilder; use crate::protocol::batch::BatchPair; use crate::protocol::transaction::HashMethod; use crate::protocol::transaction::TransactionBuilder; use crate::protocol::transaction::TransactionPair; use crate::workload::BatchWorkload; use crate::workload::ExpectedBatchResult; use crate::workload::TransactionWorkload; use cylinder::{secp256k1::Secp256k1Context, Context, Signer}; use rand::{distributions::Alphanumeric, prelude::*}; use sha2::{Digest, Sha512}; static FAMILY_NAME: &str = "xo"; static FAMILY_VERSION: &str = "0.1"; static NONCE_SIZE: usize = 32; pub struct XoTransactionWorkload { rng: StdRng, signer: Box<dyn Signer>, } impl Default for XoTransactionWorkload { fn default() -> Self { Self::new(None, None) } } impl XoTransactionWorkload { pub fn new(seed: Option<u64>, signer: Option<Box<dyn Signer>>) -> Self { let rng = match seed { Some(seed) => StdRng::seed_from_u64(seed), None => StdRng::from_entropy(), }; let signer = signer.unwrap_or_else(new_signer); XoTransactionWorkload { rng, signer } } } impl TransactionWorkload for XoTransactionWorkload { fn next_transaction( &mut self, ) -> Result<(TransactionPair, Option<ExpectedBatchResult>), InvalidStateError> { let nonce = std::iter::repeat(()) .map(|()| self.rng.sample(Alphanumeric)) .map(char::from) .take(NONCE_SIZE) .collect::<String>() .into_bytes(); let payload = Payload::new_as_create_with_random_name(&mut self.rng); Ok(( TransactionBuilder::new() .with_family_name(FAMILY_NAME.to_string()) .with_family_version(FAMILY_VERSION.to_string()) .with_inputs(payload.inputs()) .with_outputs(payload.outputs()) .with_nonce(nonce) .with_payload(payload.bytes()) .with_payload_hash_method(HashMethod::Sha512) .build_pair(&*self.signer) .map_err(|err| { InvalidStateError::with_message(format!( "Failed to build transaction pair: {}", err )) })?, None, )) } } pub struct XoBatchWorkload { transaction_workload: XoTransactionWorkload, signer: Box<dyn Signer>, } impl Default for XoBatchWorkload { fn default() -> Self { Self::new(None, None) } } impl XoBatchWorkload { pub fn new(seed: Option<u64>, signer: Option<Box<dyn Signer>>) -> Self { let signer = signer.unwrap_or_else(new_signer); XoBatchWorkload { transaction_workload: XoTransactionWorkload::new(seed, Some(signer.clone())), signer, } } } impl BatchWorkload for XoBatchWorkload { fn next_batch( &mut self, ) -> Result<(BatchPair, Option<ExpectedBatchResult>), InvalidStateError> { let (txn, result) = self.transaction_workload.next_transaction()?; Ok(( BatchBuilder::new() .with_transactions(vec![txn.take().0]) .build_pair(&*self.signer) .map_err(|err| { InvalidStateError::with_message(format!("Failed to build batch pair: {}", err)) })?, result, )) } } enum Action { Create, } struct Payload { name: String, action: Action, } impl Payload { pub fn new_as_create_with_random_name(rnd: &mut StdRng) -> Self { let length = rnd.gen_range(5..20); Payload::new_as_create( rnd.sample_iter(&Alphanumeric) .map(char::from) .take(length) .collect::<String>() .as_str(), ) } pub fn new_as_create(name: &str) -> Self { Payload { name: String::from(name), action: Action::Create, } } pub fn bytes(&self) -> Vec<u8> { match self.action { Action::Create => format!("create,{},", self.name), } .into_bytes() } fn address(&self) -> Vec<u8> { let mut address = Vec::new(); let mut prefix_sha = Sha512::new(); prefix_sha.update(FAMILY_NAME); address.append(&mut prefix_sha.finalize()[..6].to_vec()); let mut name_sha = Sha512::new(); name_sha.update(&self.name); address.append(&mut name_sha.finalize()[..64].to_vec()); address } pub fn inputs(&self) -> Vec<Vec<u8>> { vec![self.address()] } pub fn outputs(&self) -> Vec<Vec<u8>> { vec![self.address()] } } fn new_signer() -> Box<dyn Signer> { let context = Secp256k1Context::new(); let key = context.new_random_private_key(); context.new_signer(key) } #[cfg(test)] mod tests { use super::*; use crate::workload::tests::test_batch_workload; use crate::workload::tests::test_transaction_workload; #[test] fn test_xo_transaction_workload() { let mut workload = XoTransactionWorkload::default(); test_transaction_workload(&mut workload) } #[test] fn test_xo_batch_workload() { let mut workload = XoBatchWorkload::default(); test_batch_workload(&mut workload) } }
use crate::error::InvalidStateError; use crate::protocol::batch::BatchBuilder; use crate::protocol::batch::BatchPair; use crate::protocol::transaction::HashMethod; use crate::protocol::transaction::TransactionBuilder; use crate::protocol::transaction::TransactionPair; use crate::workload::BatchWorkload; use crate::workload::ExpectedBatchResult; use crate::workload::TransactionWorkload; use cylinder::{secp256k1::Secp256k1Context, Context, Signer}; use rand::{distributions::Alphanumeric, prelude::*}; use sha2::{Digest, Sha512}; static FAMILY_NAME: &str = "xo"; static FAMILY_VERSION: &str = "0.1"; static NONCE_SIZ
p(|()| self.rng.sample(Alphanumeric)) .map(char::from) .take(NONCE_SIZE) .collect::<String>() .into_bytes(); let payload = Payload::new_as_create_with_random_name(&mut self.rng); Ok(( TransactionBuilder::new() .with_family_name(FAMILY_NAME.to_string()) .with_family_version(FAMILY_VERSION.to_string()) .with_inputs(payload.inputs()) .with_outputs(payload.outputs()) .with_nonce(nonce) .with_payload(payload.bytes()) .with_payload_hash_method(HashMethod::Sha512) .build_pair(&*self.signer) .map_err(|err| { InvalidStateError::with_message(format!( "Failed to build transaction pair: {}", err )) })?, None, )) } } pub struct XoBatchWorkload { transaction_workload: XoTransactionWorkload, signer: Box<dyn Signer>, } impl Default for XoBatchWorkload { fn default() -> Self { Self::new(None, None) } } impl XoBatchWorkload { pub fn new(seed: Option<u64>, signer: Option<Box<dyn Signer>>) -> Self { let signer = signer.unwrap_or_else(new_signer); XoBatchWorkload { transaction_workload: XoTransactionWorkload::new(seed, Some(signer.clone())), signer, } } } impl BatchWorkload for XoBatchWorkload { fn next_batch( &mut self, ) -> Result<(BatchPair, Option<ExpectedBatchResult>), InvalidStateError> { let (txn, result) = self.transaction_workload.next_transaction()?; Ok(( BatchBuilder::new() .with_transactions(vec![txn.take().0]) .build_pair(&*self.signer) .map_err(|err| { InvalidStateError::with_message(format!("Failed to build batch pair: {}", err)) })?, result, )) } } enum Action { Create, } struct Payload { name: String, action: Action, } impl Payload { pub fn new_as_create_with_random_name(rnd: &mut StdRng) -> Self { let length = rnd.gen_range(5..20); Payload::new_as_create( rnd.sample_iter(&Alphanumeric) .map(char::from) .take(length) .collect::<String>() .as_str(), ) } pub fn new_as_create(name: &str) -> Self { Payload { name: String::from(name), action: Action::Create, } } pub fn bytes(&self) -> Vec<u8> { match self.action { Action::Create => format!("create,{},", self.name), } .into_bytes() } fn address(&self) -> Vec<u8> { let mut address = Vec::new(); let mut prefix_sha = Sha512::new(); prefix_sha.update(FAMILY_NAME); address.append(&mut prefix_sha.finalize()[..6].to_vec()); let mut name_sha = Sha512::new(); name_sha.update(&self.name); address.append(&mut name_sha.finalize()[..64].to_vec()); address } pub fn inputs(&self) -> Vec<Vec<u8>> { vec![self.address()] } pub fn outputs(&self) -> Vec<Vec<u8>> { vec![self.address()] } } fn new_signer() -> Box<dyn Signer> { let context = Secp256k1Context::new(); let key = context.new_random_private_key(); context.new_signer(key) } #[cfg(test)] mod tests { use super::*; use crate::workload::tests::test_batch_workload; use crate::workload::tests::test_transaction_workload; #[test] fn test_xo_transaction_workload() { let mut workload = XoTransactionWorkload::default(); test_transaction_workload(&mut workload) } #[test] fn test_xo_batch_workload() { let mut workload = XoBatchWorkload::default(); test_batch_workload(&mut workload) } }
E: usize = 32; pub struct XoTransactionWorkload { rng: StdRng, signer: Box<dyn Signer>, } impl Default for XoTransactionWorkload { fn default() -> Self { Self::new(None, None) } } impl XoTransactionWorkload { pub fn new(seed: Option<u64>, signer: Option<Box<dyn Signer>>) -> Self { let rng = match seed { Some(seed) => StdRng::seed_from_u64(seed), None => StdRng::from_entropy(), }; let signer = signer.unwrap_or_else(new_signer); XoTransactionWorkload { rng, signer } } } impl TransactionWorkload for XoTransactionWorkload { fn next_transaction( &mut self, ) -> Result<(TransactionPair, Option<ExpectedBatchResult>), InvalidStateError> { let nonce = std::iter::repeat(()) .ma
random
[ { "content": "fn create_batch(signer: &dyn Signer, game_name: &str, payload: &str) -> BatchPair {\n\n let mut sha = Sha512::default();\n\n sha.update(game_name);\n\n let game_address = \"5b7349\".to_owned() + &hex::encode(&sha.finalize())[..64];\n\n let txn_pair = TransactionBuilder::new()\n\n .with_family_name(\"xo\".to_string())\n\n .with_family_version(\"1.0\".to_string())\n\n .with_inputs(vec![hex::decode(&game_address).unwrap()])\n\n .with_nonce(b\"test_nonce\".to_vec())\n\n .with_outputs(vec![hex::decode(&game_address).unwrap()])\n\n .with_payload_hash_method(HashMethod::Sha512)\n\n .with_payload(payload.as_bytes().to_vec())\n\n .build_pair(signer)\n\n .expect(\"The TransactionBuilder was not given the correct items\");\n\n\n\n BatchBuilder::new()\n\n .with_transactions(vec![txn_pair.take().0])\n\n .build_pair(signer)\n\n .expect(\"Unable to build batch a pair\")\n\n}\n\n\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 0, "score": 184088.60790447064 }, { "content": "// load signing key from key file\n\nfn load_cylinder_signer_key(key_name: &str) -> Result<Box<dyn Signer>, CliError> {\n\n let private_key = if key_name.contains('/') {\n\n load_key_from_path(Path::new(key_name))\n\n .map_err(|err| CliError::ActionError(err.to_string()))?\n\n } else {\n\n let path = &current_user_search_path();\n\n load_key(key_name, path)\n\n .map_err(|err| CliError::ActionError(err.to_string()))?\n\n .ok_or_else(|| {\n\n CliError::ActionError({\n\n format!(\n\n \"No signing key found in {}. Specify the --key argument\",\n\n path.iter()\n\n .map(|path| path.as_path().display().to_string())\n\n .collect::<Vec<String>>()\n\n .join(\":\")\n\n )\n\n })\n\n })?\n\n };\n\n\n\n let context = Secp256k1Context::new();\n\n Ok(context.new_signer(private_key))\n\n}\n", "file_path": "cli/src/action/mod.rs", "rank": 1, "score": 120977.86239351961 }, { "content": "fn new_signer() -> Box<dyn Signer> {\n\n let context = Secp256k1Context::new();\n\n let key = context.new_random_private_key();\n\n context.new_signer(key)\n\n}\n\n\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 3, "score": 118592.66168889157 }, { "content": "fn print_current_state(value: &[u8]) -> &str {\n\n let val = match str::from_utf8(value) {\n\n Ok(v) => v,\n\n Err(e) => panic!(\"Invalid UTF-8 sequence: {}\", e),\n\n };\n\n\n\n let split: Vec<&str> = val.split(',').collect();\n\n println!(\"Board:\");\n\n println!(\n\n \"\\t {} | {} | {} \",\n\n &split[1][0..1],\n\n &split[1][1..2],\n\n &split[1][2..3]\n\n );\n\n println!(\"\\t ---|---|---\");\n\n println!(\n\n \"\\t {} | {} | {} \",\n\n &split[1][3..4],\n\n &split[1][4..5],\n\n &split[1][5..6]\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 4, "score": 103635.84447372612 }, { "content": "/// Given a path, split it into its parent's path and the specific branch for\n\n/// this path, such that the following assertion is true:\n\nfn parent_and_branch(path: &str) -> (&str, &str) {\n\n let parent_address = if !path.is_empty() {\n\n &path[..path.len() - TOKEN_SIZE]\n\n } else {\n\n \"\"\n\n };\n\n\n\n let path_branch = if !path.is_empty() {\n\n &path[(path.len() - TOKEN_SIZE)..]\n\n } else {\n\n \"\"\n\n };\n\n\n\n (parent_address, path_branch)\n\n}\n\n\n", "file_path": "libtransact/src/state/merkle/kv/mod.rs", "rank": 5, "score": 97471.38097437627 }, { "content": "/// Given a path, split it into its parent's path and the specific branch for\n\n/// this path, such that the following assertion is true:\n\nfn parent_and_branch(path: &str) -> (&str, &str) {\n\n let parent_address = if !path.is_empty() {\n\n &path[..path.len() - TOKEN_SIZE]\n\n } else {\n\n \"\"\n\n };\n\n\n\n let path_branch = if !path.is_empty() {\n\n &path[(path.len() - TOKEN_SIZE)..]\n\n } else {\n\n \"\"\n\n };\n\n\n\n (parent_address, path_branch)\n\n}\n", "file_path": "libtransact/src/state/merkle/sql/mod.rs", "rank": 6, "score": 97471.38097437627 }, { "content": "struct StaticContext<'a, 'b> {\n\n context_manager: &'a ContextManager,\n\n context_id: &'b ContextId,\n\n}\n\n\n\nimpl<'a, 'b> StaticContext<'a, 'b> {\n\n fn new(context_manager: &'a ContextManager, context_id: &'b ContextId) -> Self {\n\n StaticContext {\n\n context_manager,\n\n context_id,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, 'b> TransactionContext for StaticContext<'a, 'b> {\n\n fn get_state_entries(\n\n &self,\n\n addresses: &[String],\n\n ) -> Result<Vec<(String, Vec<u8>)>, ContextError> {\n\n self.context_manager\n", "file_path": "libtransact/src/execution/adapter/static_adapter.rs", "rank": 7, "score": 94274.13799386307 }, { "content": "fn create_executor(context_manager: &ContextManager) -> Executor {\n\n Executor::new(vec![Box::new(\n\n StaticExecutionAdapter::new_adapter(\n\n vec![Box::new(SawtoothToTransactHandlerAdapter::new(\n\n XoTransactionHandler::new(),\n\n ))],\n\n context_manager.clone(),\n\n )\n\n .expect(\"Unable to create static execution adapter\"),\n\n )])\n\n}\n\n\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 8, "score": 86136.0292072775 }, { "content": "/// Splits an address into tokens\n\nfn tokenize_address(address: &str) -> Box<[&str]> {\n\n let mut tokens: Vec<&str> = Vec::with_capacity(address.len() / TOKEN_SIZE);\n\n let mut i = 0;\n\n while i < address.len() {\n\n tokens.push(&address[i..i + TOKEN_SIZE]);\n\n i += TOKEN_SIZE;\n\n }\n\n tokens.into_boxed_slice()\n\n}\n\n\n", "file_path": "libtransact/src/state/merkle/kv/mod.rs", "rank": 9, "score": 85604.6182897982 }, { "content": "// Validate that the metadata collected from the manifest contains a contract name which matches\n\n// the name of the scar file. This includes swapping any underscores which appear in the contract\n\n// name with dashes, as underscores are not allowed in scar file names.\n\nfn validate_metadata(file_name: &str, contract_name: &str) -> Result<(), Error> {\n\n if file_name != contract_name.replace(\"_\", \"-\") {\n\n return Err(Error::new(&format!(\n\n \"scar file name `{}` does not match contract name in manifest `{}`\",\n\n file_name, contract_name,\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "libtransact/src/contract/archive/mod.rs", "rank": 10, "score": 79831.78161675614 }, { "content": "fn batch_transactions(txns: Vec<Transaction>, signer: &dyn Signer) -> BatchResult {\n\n BatchBuilder::new()\n\n .with_transactions(txns)\n\n .build(signer)\n\n .map_err(|_| {\n\n BatchingError::InvalidStateError(InvalidStateError::with_message(\n\n \"Failed to build batch\".into(),\n\n ))\n\n })\n\n}\n\n\n", "file_path": "libtransact/src/workload/batch_gen.rs", "rank": 11, "score": 78742.20072273709 }, { "content": "fn create_signer() -> Box<dyn Signer> {\n\n let context = Secp256k1Context::new();\n\n let key = context.new_random_private_key();\n\n context.new_signer(key)\n\n}\n\n\n", "file_path": "examples/sabre_command_executor/src/main.rs", "rank": 12, "score": 78700.70046450477 }, { "content": "fn assert_value_at_address(merkle_db: &MerkleRadixTree, address: &str, expected_value: &str) {\n\n let value = merkle_db.get_value(address);\n\n match value {\n\n Ok(Some(value)) => assert_eq!(\n\n expected_value,\n\n from_utf8(&value).expect(\"could not convert bytes to string\")\n\n ),\n\n Ok(None) => panic!(\"value at address {} was not found\", address),\n\n Err(err) => panic!(\"value at address {} produced an error: {}\", address, err),\n\n }\n\n}\n\n\n", "file_path": "libtransact/tests/state/merkle/mod.rs", "rank": 13, "score": 77566.6542487164 }, { "content": "/// Compute a state address for a given contract.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `name` - the name of the contract\n\n/// * `version` - the version of the contract\n\npub fn compute_contract_address(name: &str, version: &str) -> Result<Vec<u8>, AddressingError> {\n\n let s = String::from(name) + \",\" + version;\n\n let hash = sha512_hash(s.as_bytes());\n\n Ok([CONTRACT_ADDRESS_PREFIX_BYTES, &hash[..32]].concat())\n\n}\n\n\n", "file_path": "libtransact/src/protocol/sabre.rs", "rank": 14, "score": 74377.0169022069 }, { "content": "fn find_scar<P: AsRef<Path>>(name: &str, version: &str, paths: &[P]) -> Result<PathBuf, Error> {\n\n let file_name_pattern = format!(\"{}_*.scar\", name);\n\n\n\n validate_scar_file_name(name)?;\n\n\n\n let version_req = VersionReq::parse(version)?;\n\n\n\n // Start with all scar files that match the name, from all paths\n\n paths\n\n .iter()\n\n .map(|path| {\n\n let file_path_pattern = path.as_ref().join(&file_name_pattern);\n\n let pattern_string = file_path_pattern\n\n .to_str()\n\n .ok_or_else(|| Error::new(\"name is not valid UTF-8\"))?;\n\n Ok(glob(pattern_string)?)\n\n })\n\n .collect::<Result<Vec<_>, Error>>()?\n\n .into_iter()\n\n .flatten()\n", "file_path": "libtransact/src/contract/archive/mod.rs", "rank": 15, "score": 69173.74872833531 }, { "content": "fn load_yaml_array(yaml_str: &str) -> Result<Cow<Vec<Yaml>>, PlaylistError> {\n\n let mut yaml = YamlLoader::load_from_str(yaml_str).map_err(PlaylistError::YamlInputError)?;\n\n let element = yaml.remove(0);\n\n let yaml_array = element.as_vec().cloned().unwrap();\n\n\n\n Ok(Cow::Owned(yaml_array))\n\n}\n\n\n\npub struct SmallbankGeneratingIter {\n\n num_accounts: usize,\n\n current_account: usize,\n\n rng: StdRng,\n\n accounts: Vec<u32>,\n\n}\n\n\n\nimpl SmallbankGeneratingIter {\n\n pub fn new(num_accounts: usize, seed: u64) -> Self {\n\n SmallbankGeneratingIter {\n\n num_accounts,\n\n current_account: 0,\n", "file_path": "libtransact/src/families/smallbank/workload/playlist.rs", "rank": 16, "score": 66147.57722001249 }, { "content": "fn create_smallbank_address(payload: &str) -> String {\n\n let mut sha = Sha512::new();\n\n sha.update(payload.as_bytes());\n\n get_smallbank_prefix() + &hex::encode(&sha.finalize())[..64]\n\n}\n", "file_path": "libtransact/src/families/smallbank/handler.rs", "rank": 17, "score": 65726.14047197305 }, { "content": "fn save_account(account: &Account, context: &mut dyn TransactionContext) -> Result<(), ApplyError> {\n\n let address = create_smallbank_address(&format!(\"{}\", account.get_customer_id()));\n\n let data = protobuf::Message::write_to_bytes(account).map_err(|err| {\n\n warn!(\n\n \"Invalid transaction: Failed to serialize Account: {:?}\",\n\n err\n\n );\n\n ApplyError::InvalidTransaction(format!(\"Failed to serialize Account: {:?}\", err))\n\n })?;\n\n\n\n context.set_state_entry(address, data).map_err(|err| {\n\n warn!(\"Invalid transaction: Failed to save Account: {:?}\", err);\n\n ApplyError::InvalidTransaction(format!(\"Failed to save Account: {:?}\", err))\n\n })\n\n}\n\n\n", "file_path": "libtransact/src/families/smallbank/handler.rs", "rank": 18, "score": 65488.74442278409 }, { "content": "// Validate that the scar file name does not contain underscores, otherwise return an error.\n\nfn validate_scar_file_name(name: &str) -> Result<(), Error> {\n\n if name.contains('_') {\n\n return Err(Error::new(&format!(\n\n \"invalid scar file name, must not include '_': {}\",\n\n name\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "libtransact/src/contract/archive/mod.rs", "rank": 19, "score": 61782.53493035418 }, { "content": "fn make_lmdb(merkle_path: &str) -> Box<dyn Database> {\n\n let ctx = LmdbContext::new(\n\n Path::new(merkle_path),\n\n INDEXES.len(),\n\n Some(120 * 1024 * 1024),\n\n )\n\n .map_err(|err| DatabaseError::InitError(format!(\"{}\", err)))\n\n .unwrap();\n\n Box::new(\n\n LmdbDatabase::new(ctx, &INDEXES)\n\n .map_err(|err| DatabaseError::InitError(format!(\"{}\", err)))\n\n .unwrap(),\n\n )\n\n}\n\n\n", "file_path": "libtransact/tests/state/merkle/lmdb.rs", "rank": 20, "score": 60050.75695173166 }, { "content": "fn new_lmdb_state_and_root(lmdb_path: &str) -> (MerkleState, String) {\n\n let lmdb_db = make_lmdb(lmdb_path);\n\n let merkle_state = MerkleState::new(lmdb_db.clone());\n\n\n\n let merkle_db = MerkleRadixTree::new(lmdb_db, None)\n\n .expect(\"Could not overlay the merkle tree on the database\");\n\n\n\n let orig_root = merkle_db.get_merkle_root();\n\n\n\n (merkle_state, orig_root)\n\n}\n\n\n", "file_path": "libtransact/tests/state/merkle/lmdb.rs", "rank": 21, "score": 60028.59914858086 }, { "content": "fn new_sqlite_state_and_root(db_path: &str) -> (MerkleState, String) {\n\n let db = Box::new(\n\n SqliteDatabase::new(&db_path, &INDEXES).expect(\"Unable to create Sqlite database\"),\n\n );\n\n let merkle_state = MerkleState::new(db.clone());\n\n\n\n let merkle_db =\n\n MerkleRadixTree::new(db, None).expect(\"Could not overlay the merkle tree on the database\");\n\n\n\n let orig_root = merkle_db.get_merkle_root();\n\n\n\n (merkle_state, orig_root)\n\n}\n\n\n", "file_path": "libtransact/tests/state/merkle/sqlitedb.rs", "rank": 22, "score": 60028.59914858086 }, { "content": "fn parse_hex(hex: &str) -> Result<Vec<u8>, AddressingError> {\n\n if hex.len() % 2 != 0 {\n\n return Err(AddressingError::InvalidInput(format!(\n\n \"hex string has odd number of digits: {}\",\n\n hex\n\n )));\n\n }\n\n\n\n let mut res = vec![];\n\n for i in (0..hex.len()).step_by(2) {\n\n res.push(u8::from_str_radix(&hex[i..i + 2], 16).map_err(|_| {\n\n AddressingError::InvalidInput(format!(\"string contains invalid hex: {}\", hex))\n\n })?);\n\n }\n\n\n\n Ok(res)\n\n}\n\n\n", "file_path": "libtransact/src/protocol/sabre.rs", "rank": 23, "score": 58536.71124515946 }, { "content": "pub fn hash(hash_length: usize, key: &str) -> String {\n\n let mut sha = Sha512::new();\n\n sha.update(key.as_bytes());\n\n hex::encode(sha.finalize().to_vec())[..hash_length].to_string()\n\n}\n", "file_path": "libtransact/src/contract/address/mod.rs", "rank": 24, "score": 58536.71124515946 }, { "content": "/// Takes the original target URL and a batch status link and removes the overlapping portion of the\n\n/// target URL that is also given in the status link so that the two can be combined to create the\n\n/// full URL that can be used to check the status of the associated batch\n\n///\n\n/// For example, if the target is 'http://127.0.0.1:8080/service/12345-ABCDE/a000' and the\n\n/// status_link is '/service/12345-ABCDE/a000/batch_statuses?ids=6ff35474a572087e08fd6a54d56' the\n\n/// then the duplicate '/service/12345-ABCDE/a000' will be removed from the target making it\n\n/// 'http://127.0.0.1:8080' so that it can be combined with the status link to create the full URL\n\n/// that can be queried to get the batch status,\n\n/// 'http://127.0.0.1:8080/service/12345-ABCDE/a000/batch_statuses?ids=6ff35474a572087e08fd6a54d56'\n\nfn get_batch_status_url(mut target: String, status_link: &str) -> String {\n\n let status_link_parts = status_link.splitn(5, '/');\n\n for p in status_link_parts {\n\n if !p.is_empty() && target.contains(format!(\"/{}\", p).as_str()) {\n\n target = target.replacen(format!(\"/{}\", p).as_str(), \"\", 1);\n\n }\n\n }\n\n format!(\"{}{}\", target, status_link)\n\n}\n\n\n", "file_path": "libtransact/src/workload/runner.rs", "rank": 25, "score": 56785.828018246844 }, { "content": "fn to_context_error(err: ContextError) -> SawtoothContextError {\n\n SawtoothContextError::ReceiveError(Box::new(err))\n\n}\n\n\n\n#[cfg(test)]\n\nmod xo_compat_test {\n\n use std::panic;\n\n\n\n use cylinder::{secp256k1::Secp256k1Context, Context, Signer};\n\n use sawtooth_xo::handler::XoTransactionHandler;\n\n use sha2::{Digest, Sha512};\n\n\n\n use crate::context::manager::sync::ContextManager;\n\n use crate::database::{btree::BTreeDatabase, Database};\n\n use crate::execution::{\n\n adapter::static_adapter::StaticExecutionAdapter,\n\n executor::{ExecutionTaskSubmitter, Executor},\n\n };\n\n use crate::protocol::{\n\n batch::{BatchBuilder, BatchPair},\n", "file_path": "libtransact/src/sawtooth.rs", "rank": 26, "score": 56392.57699032617 }, { "content": "/// Compute a state address for a given namespace registry.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `namespace` - the address prefix for this namespace\n\npub fn compute_namespace_registry_address(namespace: &str) -> Result<Vec<u8>, AddressingError> {\n\n let prefix = match namespace.get(..6) {\n\n Some(x) => x,\n\n None => {\n\n return Err(AddressingError::InvalidInput(format!(\n\n \"namespace '{}' is less than 6 characters long\",\n\n namespace,\n\n )));\n\n }\n\n };\n\n let hash = sha512_hash(prefix.as_bytes());\n\n Ok([NAMESPACE_REGISTRY_ADDRESS_PREFIX_BYTES, &hash[..32]].concat())\n\n}\n\n\n", "file_path": "libtransact/src/protocol/sabre.rs", "rank": 27, "score": 54633.483030267154 }, { "content": "/// Compute a state address for a given contract registry.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `name` - the name of the contract registry\n\npub fn compute_contract_registry_address(name: &str) -> Result<Vec<u8>, AddressingError> {\n\n let hash = sha512_hash(name.as_bytes());\n\n Ok([CONTRACT_REGISTRY_ADDRESS_PREFIX_BYTES, &hash[..32]].concat())\n\n}\n\n\n", "file_path": "libtransact/src/protocol/sabre.rs", "rank": 28, "score": 54633.483030267154 }, { "content": "fn parse_bytes_entry(bytes_entry: &str) -> Result<(String, Vec<u8>), CliError> {\n\n let mut parts = bytes_entry.splitn(2, ':');\n\n match (parts.next(), parts.next()) {\n\n (Some(key), Some(value)) => match key {\n\n \"\" => Err(CliError::ActionError(\n\n \"Empty '--bytes-entry' argument detected\".into(),\n\n )),\n\n _ => match value {\n\n \"\" => Err(CliError::ActionError(format!(\n\n \"Empty value detected for address: {}\",\n\n key\n\n ))),\n\n _ => Ok((key.to_string(), value.as_bytes().to_vec())),\n\n },\n\n },\n\n (Some(key), None) => Err(CliError::ActionError(format!(\n\n \"Missing value for address '{}'\",\n\n key\n\n ))),\n\n _ => unreachable!(), // splitn always returns at least one item\n\n }\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct ServerError {\n\n pub message: String,\n\n}\n", "file_path": "cli/src/action/command.rs", "rank": 29, "score": 54633.483030267154 }, { "content": "/// ContextManager functionality used by the Scheduler.\n\npub trait ContextLifecycle: Send {\n\n /// Create a new Context, returning a unique ContextId.\n\n fn create_context(&mut self, dependent_contexts: &[ContextId], state_id: &str) -> ContextId;\n\n\n\n fn drop_context(&mut self, context_id: ContextId) -> Result<(), InternalError>;\n\n\n\n fn get_transaction_receipt(\n\n &self,\n\n context_id: &ContextId,\n\n transaction_id: &str,\n\n ) -> Result<TransactionReceipt, ContextManagerError>;\n\n\n\n /// Clone implementation for `ContextLifecycle`. The implementation of the `Clone` trait for\n\n /// `Box<dyn ContextLifecycle>` calls this method.\n\n fn clone_box(&self) -> Box<dyn ContextLifecycle>;\n\n}\n\n\n\nimpl Clone for Box<dyn ContextLifecycle> {\n\n fn clone(&self) -> Box<dyn ContextLifecycle> {\n\n self.clone_box()\n", "file_path": "libtransact/src/context/mod.rs", "rank": 30, "score": 52985.8502495589 }, { "content": "fn read_yaml(input: &mut dyn Read) -> Result<Cow<str>, PlaylistError> {\n\n let mut buf: String = String::new();\n\n input\n\n .read_to_string(&mut buf)\n\n .map_err(PlaylistError::IoError)?;\n\n Ok(buf.into())\n\n}\n\n\n", "file_path": "libtransact/src/families/smallbank/workload/playlist.rs", "rank": 31, "score": 52671.15095767249 }, { "content": "enum StaticAdapterCommand {\n\n Start(Box<dyn ExecutionRegistry>),\n\n Stop,\n\n Execute(Box<(TransactionPair, ContextId, OnDoneCallback)>),\n\n}\n\n\n", "file_path": "libtransact/src/execution/adapter/static_adapter.rs", "rank": 32, "score": 52403.49775138827 }, { "content": "/// Fetch a node by its hash\n\nfn get_node_by_hash(db: &dyn Database, hash: &str) -> Result<Node, StateDatabaseError> {\n\n match db.get_reader()?.get(hash.as_bytes())? {\n\n Some(bytes) => Node::from_bytes(&bytes).map_err(StateDatabaseError::from),\n\n None => Err(StateDatabaseError::NotFound(hash.to_string())),\n\n }\n\n}\n\n\n", "file_path": "libtransact/src/state/merkle/kv/mod.rs", "rank": 33, "score": 51221.69533623378 }, { "content": "fn write_vec_as_hex(f: &mut fmt::Formatter, field_name: &str, data: &[Vec<u8>]) -> fmt::Result {\n\n write!(f, \"{}: [\", field_name)?;\n\n f.write_str(\n\n &data\n\n .iter()\n\n .map(|datum| format!(\"{:?}\", hex::encode(datum)))\n\n .collect::<Vec<_>>()\n\n .join(\", \"),\n\n )?;\n\n f.write_str(\"]\")\n\n}\n\n\n\nimpl From<hex::FromHexError> for ProtoConversionError {\n\n fn from(e: hex::FromHexError) -> Self {\n\n ProtoConversionError::SerializationError(format!(\"{}\", e))\n\n }\n\n}\n\n\n\nimpl From<std::string::FromUtf8Error> for ProtoConversionError {\n\n fn from(e: std::string::FromUtf8Error) -> Self {\n", "file_path": "libtransact/src/protocol/transaction.rs", "rank": 34, "score": 49053.029377067134 }, { "content": " }\n\n false\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::protocol::receipt::StateChange;\n\n\n\n static KEY1: &str = \"111111111111111111111111111111111111111111111111111111111111111111\";\n\n static KEY2: &str = \"222222222222222222222222222222222222222222222222222222222222222222\";\n\n static KEY3: &str = \"333333333333333333333333333333333333333333333333333333333333333333\";\n\n static BYTES1: [u8; 4] = [0x01, 0x02, 0x03, 0x04];\n\n static BYTES2: [u8; 4] = [0x05, 0x06, 0x07, 0x08];\n\n static BYTES3: [u8; 4] = [0x09, 0x0a, 0x0b, 0x0c];\n\n\n\n #[test]\n\n fn get_state() {\n\n let first_key = &KEY1.to_string();\n", "file_path": "libtransact/src/context/mod.rs", "rank": 43, "score": 46336.20514437885 }, { "content": " }\n\n}\n\n\n\nimpl std::fmt::Display for ContextId {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n f.write_str(&Uuid::from_bytes(self.0).to_simple_ref().to_string())\n\n }\n\n}\n\n\n\n/// ContextManager functionality used by the Scheduler.\n", "file_path": "libtransact/src/context/mod.rs", "rank": 44, "score": 46333.73674898804 }, { "content": "//! previous contexts. The context manager implements the context lifecycle and services the calls\n\n//! that read, write, and delete data from state.\n\n\n\nmod error;\n\npub mod manager;\n\n\n\nuse crate::context::manager::ContextManagerError;\n\nuse crate::error::InternalError;\n\nuse crate::protocol::receipt::{Event, StateChange, TransactionReceipt};\n\nuse std::mem;\n\nuse uuid::Uuid;\n\n\n\n/// Unique id that references a \"Context\" from which a `Transaction` can query state and\n\n/// modify events, data, and state.\n\n#[derive(Clone, Copy, Hash, PartialEq, Eq)]\n\npub struct ContextId([u8; 16]);\n\n\n\nimpl ContextId {\n\n fn new_random() -> ContextId {\n\n ContextId(*Uuid::new_v4().as_bytes())\n", "file_path": "libtransact/src/context/mod.rs", "rank": 45, "score": 46331.52031552085 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub struct Context {\n\n base_contexts: Vec<ContextId>,\n\n state_changes: Vec<StateChange>,\n\n id: ContextId,\n\n data: Vec<Vec<u8>>,\n\n events: Vec<Event>,\n\n state_id: String,\n\n}\n\n\n\nimpl Context {\n\n pub fn new(state_id: &str, base_contexts: Vec<ContextId>) -> Self {\n\n Context {\n\n base_contexts,\n\n state_changes: Vec::new(),\n\n id: ContextId::new_random(),\n\n data: Vec::new(),\n", "file_path": "libtransact/src/context/mod.rs", "rank": 46, "score": 46329.62479984266 }, { "content": "\n\n#[derive(Debug)]\n\npub enum ContextManagerError {\n\n MissingContextError(String),\n\n TransactionReceiptBuilderError(TransactionReceiptBuilderError),\n\n StateReadError(StateReadError),\n\n}\n\n\n\nimpl Error for ContextManagerError {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n match *self {\n\n ContextManagerError::MissingContextError(_) => Some(self),\n\n ContextManagerError::TransactionReceiptBuilderError(ref err) => Some(err),\n\n ContextManagerError::StateReadError(ref err) => Some(err),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for ContextManagerError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n", "file_path": "libtransact/src/context/error.rs", "rank": 47, "score": 46328.87030204608 }, { "content": " });\n\n }\n\n None\n\n }\n\n\n\n /// Checks to see if the Key is referenced by any StateChanges within the Context\n\n pub fn contains(&self, key: &str) -> bool {\n\n for state_change in self.state_changes().iter().rev() {\n\n match state_change {\n\n StateChange::Set { key: k, .. } => {\n\n if k == key {\n\n return true;\n\n }\n\n }\n\n StateChange::Delete { key: k } => {\n\n if k == key {\n\n return false;\n\n }\n\n }\n\n }\n", "file_path": "libtransact/src/context/mod.rs", "rank": 48, "score": 46327.68815255817 }, { "content": "/*\n\n * Copyright 2019 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n//! Transaction context management.\n\n//!\n\n//! In Transact, state reads and writes are scoped (sandboxed) to a specific \"context\" that\n\n//! contains a reference to a state ID (such as a Merkle-Radix state root hash) and one or more\n", "file_path": "libtransact/src/context/mod.rs", "rank": 49, "score": 46327.0012596425 }, { "content": " assert!(context.contains(&KEY1));\n\n\n\n context.set_state(KEY2.to_string(), BYTES3.to_vec());\n\n let deleted_value = context.delete_state(&KEY1);\n\n assert_eq!(deleted_value, Some(BYTES2.to_vec()));\n\n\n\n assert!(context.contains(&KEY2));\n\n assert!(!context.contains(&KEY1));\n\n }\n\n\n\n #[test]\n\n fn verify_state_changes() {\n\n let mut context = Context::new(&KEY3, Vec::new());\n\n context.set_state(KEY1.to_string(), BYTES1.to_vec());\n\n context.set_state(KEY1.to_string(), BYTES2.to_vec());\n\n context.set_state(KEY2.to_string(), BYTES3.to_vec());\n\n assert_eq!(context.state_changes().len(), 3);\n\n\n\n let deleted_value = context.delete_state(&KEY1);\n\n assert_ne!(deleted_value, Some(BYTES3.to_vec()));\n", "file_path": "libtransact/src/context/mod.rs", "rank": 50, "score": 46326.11909571574 }, { "content": " }\n\n}\n\n\n\nimpl From<[u8; 16]> for ContextId {\n\n fn from(bytes: [u8; 16]) -> Self {\n\n ContextId(bytes)\n\n }\n\n}\n\n\n\nimpl Default for ContextId {\n\n fn default() -> Self {\n\n ContextId(*Uuid::nil().as_bytes())\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for ContextId {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n f.debug_tuple(\"ContextId\")\n\n .field(&Uuid::from_bytes(self.0).to_simple_ref())\n\n .finish()\n", "file_path": "libtransact/src/context/mod.rs", "rank": 51, "score": 46325.91583792958 }, { "content": " match *self {\n\n ContextManagerError::MissingContextError(ref s) => {\n\n write!(f, \"Unable to find specified Context: {:?}\", s)\n\n }\n\n ContextManagerError::TransactionReceiptBuilderError(ref err) => {\n\n write!(f, \"A TransactionReceiptBuilder error occured: {}\", err)\n\n }\n\n ContextManagerError::StateReadError(ref err) => {\n\n write!(f, \"A State Read error occured: {}\", err)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl From<TransactionReceiptBuilderError> for ContextManagerError {\n\n fn from(err: TransactionReceiptBuilderError) -> Self {\n\n ContextManagerError::TransactionReceiptBuilderError(err)\n\n }\n\n}\n\n\n\nimpl From<StateReadError> for ContextManagerError {\n\n fn from(err: StateReadError) -> Self {\n\n ContextManagerError::StateReadError(err)\n\n }\n\n}\n", "file_path": "libtransact/src/context/error.rs", "rank": 52, "score": 46325.85848414329 }, { "content": " events: Vec::new(),\n\n state_id: state_id.to_string(),\n\n }\n\n }\n\n\n\n pub fn base_contexts(&self) -> &[ContextId] {\n\n &self.base_contexts\n\n }\n\n\n\n pub fn events(&self) -> &Vec<Event> {\n\n &self.events\n\n }\n\n\n\n pub fn state_changes(&self) -> &Vec<StateChange> {\n\n &self.state_changes\n\n }\n\n\n\n pub fn id(&self) -> &ContextId {\n\n &self.id\n\n }\n", "file_path": "libtransact/src/context/mod.rs", "rank": 53, "score": 46325.72485422517 }, { "content": " let first_value = &BYTES1.to_vec();\n\n let base_contexts = Vec::new();\n\n let mut context = Context::new(&KEY3, base_contexts);\n\n context.set_state(first_key.to_string(), first_value.to_vec());\n\n assert!(context.contains(&first_key));\n\n let state_value = context.get_state(&first_key);\n\n assert_eq!(state_value, Some(first_value.as_slice()));\n\n }\n\n\n\n #[test]\n\n fn test_compare_state_change() {\n\n let first_set: StateChange = StateChange::Set {\n\n key: KEY1.to_string(),\n\n value: BYTES1.to_vec(),\n\n };\n\n let second_set: StateChange = StateChange::Set {\n\n key: KEY2.to_string(),\n\n value: BYTES2.to_vec(),\n\n };\n\n let delete_first: StateChange = StateChange::Delete {\n", "file_path": "libtransact/src/context/mod.rs", "rank": 54, "score": 46325.637471831426 }, { "content": " key: KEY1.to_string(),\n\n };\n\n let delete_second: StateChange = StateChange::Delete {\n\n key: KEY2.to_string(),\n\n };\n\n let first_set_key = KEY1.to_string();\n\n assert_eq!(first_set.has_key(&first_set_key), true);\n\n assert_eq!(second_set.has_key(&first_set_key), false);\n\n assert_eq!(delete_first.has_key(&first_set_key), true);\n\n assert_eq!(delete_second.has_key(&first_set_key), false);\n\n }\n\n\n\n #[test]\n\n fn test_contains() {\n\n let base_contexts = Vec::new();\n\n let mut context = Context::new(&KEY3, base_contexts);\n\n context.set_state(KEY1.to_string(), BYTES1.to_vec());\n\n assert!(context.contains(&KEY1));\n\n\n\n context.set_state(KEY1.to_string(), BYTES2.to_vec());\n", "file_path": "libtransact/src/context/mod.rs", "rank": 55, "score": 46325.58500798156 }, { "content": "\n\n assert_eq!(context.state_changes().len(), 3);\n\n let first_key_set = context\n\n .state_changes()\n\n .iter()\n\n .cloned()\n\n .find(|change| change.has_key(&KEY1));\n\n if let Some(StateChange::Set { key: k, value: v }) = first_key_set {\n\n assert_eq!(k, KEY1.to_string());\n\n assert_ne!(Some(v), deleted_value);\n\n }\n\n if let StateChange::Set { key: k, value: v } = &context.state_changes()[1] {\n\n assert_eq!(k, KEY1);\n\n assert_eq!(Some(v.clone()), deleted_value);\n\n }\n\n }\n\n}\n", "file_path": "libtransact/src/context/mod.rs", "rank": 56, "score": 46325.02089964019 }, { "content": "/*\n\n * Copyright 2019 Bitwise IO, Inc.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\nuse std::error::Error;\n\n\n\nuse crate::protocol::receipt::TransactionReceiptBuilderError;\n\nuse crate::state::error::StateReadError;\n", "file_path": "libtransact/src/context/error.rs", "rank": 57, "score": 46325.01194423128 }, { "content": "\n\n pub fn get_state(&self, key: &str) -> Option<&[u8]> {\n\n if let Some(StateChange::Set { value: v, .. }) = self\n\n .state_changes\n\n .iter()\n\n .rev()\n\n .find(|state_change| state_change.has_key(key))\n\n {\n\n return Some(v);\n\n }\n\n None\n\n }\n\n\n\n /// Adds StateChange::Set without deleting previous StateChanges associated with the Key\n\n pub fn set_state(&mut self, key: String, value: Vec<u8>) {\n\n let new_state_change = StateChange::Set { key, value };\n\n self.state_changes.push(new_state_change);\n\n }\n\n\n\n /// Adds StateChange::Delete and returns the value associated to the key being deleted\n", "file_path": "libtransact/src/context/mod.rs", "rank": 58, "score": 46322.8530648572 }, { "content": " pub fn delete_state(&mut self, key: &str) -> Option<Vec<u8>> {\n\n let found_state_change = self\n\n .state_changes\n\n .iter_mut()\n\n .rev()\n\n .find(|state_change| state_change.has_key(key));\n\n if let Some(StateChange::Set { .. }) = found_state_change {\n\n // If a StateChange::Set is found associated with the key, the value set is returned.\n\n let mut new_state_change: StateChange = StateChange::Delete {\n\n key: key.to_string(),\n\n };\n\n mem::swap(found_state_change.unwrap(), &mut new_state_change);\n\n if let StateChange::Set { value: v, .. } = new_state_change {\n\n return Some(v);\n\n }\n\n } else if found_state_change.is_none() {\n\n // If no StateChange, Set or Delete, is found associated with the key, a new Delete\n\n // is added to the list of StateChanges with the value returned as None.\n\n self.state_changes.push(StateChange::Delete {\n\n key: key.to_string(),\n", "file_path": "libtransact/src/context/mod.rs", "rank": 59, "score": 46322.286626046545 }, { "content": "\n\n pub fn data(&self) -> &Vec<Vec<u8>> {\n\n &self.data\n\n }\n\n\n\n pub fn state_id(&self) -> &String {\n\n &self.state_id\n\n }\n\n\n\n pub fn add_event(&mut self, event: Event) {\n\n if !self.events().contains(&event) {\n\n self.events.push(event);\n\n }\n\n }\n\n\n\n pub fn add_data(&mut self, data: Vec<u8>) {\n\n if !self.data().contains(&data) {\n\n self.data.push(data);\n\n }\n\n }\n", "file_path": "libtransact/src/context/mod.rs", "rank": 60, "score": 46319.590489001675 }, { "content": "// Copyright 2019 IBM Corp.\n\n// Copyright 2019 Cargill Incorporated\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse cylinder::{secp256k1::Secp256k1Context, Context, Signer};\n\nuse sawtooth_xo::handler::XoTransactionHandler;\n\nuse transact::context::{manager::sync::ContextManager, ContextLifecycle};\n\nuse transact::database::btree::BTreeDatabase;\n\nuse transact::execution::{\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 61, "score": 45262.61540126812 }, { "content": " adapter::static_adapter::StaticExecutionAdapter,\n\n executor::{ExecutionTaskSubmitter, Executor},\n\n};\n\nuse transact::protocol::receipt::{Event, TransactionResult};\n\nuse transact::protocol::{\n\n batch::{BatchBuilder, BatchPair},\n\n receipt::StateChange,\n\n transaction::{HashMethod, TransactionBuilder},\n\n};\n\nuse transact::sawtooth::SawtoothToTransactHandlerAdapter;\n\nuse transact::scheduler::{serial::SerialScheduler, BatchExecutionResult, Scheduler};\n\nuse transact::state::merkle::{self, MerkleRadixTree, MerkleState};\n\nuse transact::state::StateChange as ChangeSet;\n\nuse transact::state::Write;\n\n\n\nuse sha2::{Digest, Sha512};\n\nuse std::io;\n\nuse std::str;\n\n\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 62, "score": 45260.72831633415 }, { "content": " &orig_root,\n\n &*signer,\n\n \"my_game,create,\",\n\n );\n\n let (game_address, value) = get_state_change(current_result);\n\n\n\n let state_change = ChangeSet::Set {\n\n key: game_address,\n\n value: value.clone(),\n\n };\n\n let mut state_root = merkle_state.commit(&orig_root, &[state_change]).unwrap();\n\n assert_ne!(orig_root, state_root);\n\n print_current_state(&value);\n\n\n\n loop {\n\n let next_tx = get_next_tx();\n\n let current_result = play_game(\n\n &task_executor,\n\n Box::new(context_manager.clone()),\n\n &state_root,\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 63, "score": 45250.786736009584 }, { "content": " &*signer,\n\n &format!(\"my_game,take,{}\", next_tx),\n\n );\n\n let (key, value) = get_state_change(current_result);\n\n let state_change = ChangeSet::Set {\n\n key,\n\n value: value.clone(),\n\n };\n\n state_root = merkle_state.commit(&state_root, &[state_change]).unwrap();\n\n\n\n let value = print_current_state(&value);\n\n if value.contains(\"WIN\") || value.contains(\"TIE\") {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 64, "score": 45248.02784150181 }, { "content": " .send(batch_result)\n\n .expect(\"Unable to send batch result\")\n\n }))\n\n .expect(\"Failed to set result callback\");\n\n\n\n let batch_pair = create_batch(signer, \"my_game\", tx);\n\n\n\n scheduler\n\n .add_batch(batch_pair)\n\n .expect(\"Failed to add batch\");\n\n scheduler.finalize().expect(\"Failed to finalize scheduler\");\n\n\n\n run_schedule(task_executor, &mut scheduler);\n\n\n\n result_receiver\n\n .recv()\n\n .expect(\"Unable to receive result from executor\")\n\n .expect(\"Should not have received None from the executor\")\n\n}\n\n\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 65, "score": 45247.783904253556 }, { "content": " );\n\n println!(\"\\t ---|---|---\");\n\n println!(\n\n \"\\t {} | {} | {} \",\n\n &split[1][6..7],\n\n &split[1][7..8],\n\n &split[1][8..9]\n\n );\n\n println!();\n\n println!(\"Status: {}\", split[2]);\n\n val\n\n}\n\n\n", "file_path": "examples/simple_xo/src/main.rs", "rank": 67, "score": 45244.22475978594 }, { "content": " context_id: &ContextId,\n\n data: Vec<u8>,\n\n ) -> Result<(), ContextManagerError> {\n\n let context = self.get_context_mut(context_id)?;\n\n context.add_data(data);\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::collections::HashMap;\n\n\n\n use crate::protocol::receipt::{EventBuilder, TransactionResult};\n\n use crate::state;\n\n use crate::state::hashmap::HashMapState;\n\n use crate::state::Write;\n\n\n\n static KEY1: &str = \"111111111111111111111111111111111111111111111111111111111111111111\";\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 69, "score": 44845.2304023547 }, { "content": "use std::collections::VecDeque;\n\nuse std::str;\n\n\n\nuse crate::collections::RefMap;\n\npub use crate::context::error::ContextManagerError;\n\nuse crate::context::{Context, ContextId, ContextLifecycle};\n\nuse crate::error::InternalError;\n\nuse crate::protocol::receipt::{Event, StateChange, TransactionReceipt, TransactionReceiptBuilder};\n\nuse crate::state::Read;\n\n\n\n#[derive(Clone)]\n\npub struct ContextManager {\n\n contexts: HashMap<ContextId, Context>,\n\n context_refs: RefMap<ContextId>,\n\n database: Box<dyn Read<StateId = String, Key = String, Value = Vec<u8>>>,\n\n}\n\n\n\nimpl ContextLifecycle for ContextManager {\n\n /// Creates a Context, and returns the resulting ContextId.\n\n fn create_context(&mut self, dependent_contexts: &[ContextId], state_id: &str) -> ContextId {\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 70, "score": 44842.52751755838 }, { "content": " static KEY2: &str = \"222222222222222222222222222222222222222222222222222222222222222222\";\n\n static KEY3: &str = \"333333333333333333333333333333333333333333333333333333333333333333\";\n\n static KEY4: &str = \"444444444444444444444444444444444444444444444444444444444444444444\";\n\n static KEY5: &str = \"555555555555555555555555555555555555555555555555555555555555555555\";\n\n\n\n static BYTES1: [u8; 4] = [0x01, 0x02, 0x03, 0x04];\n\n static BYTES2: [u8; 4] = [0x05, 0x06, 0x07, 0x08];\n\n static BYTES3: [u8; 4] = [0x09, 0x10, 0x11, 0x12];\n\n static BYTES4: [u8; 4] = [0x13, 0x14, 0x15, 0x16];\n\n\n\n static EVENT_TYPE1: &str = \"sawtooth/block-commit\";\n\n static ATTR1: (&str, &str) = (\n\n \"block_id\",\n\n \"f40b90d06b4a9074af2ab09e0187223da7466be75ec0f472 \\\n\n f2edd5f22960d76e402e6c07c90b7816374891d698310dd25d9b88dce7dbcba8219d9f7c9cae1861\",\n\n );\n\n static ATTR2: (&str, &str) = (\"block_num\", \"3\");\n\n\n\n fn make_manager(state_changes: Option<Vec<state::StateChange>>) -> (ContextManager, String) {\n\n let state = HashMapState::new();\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 71, "score": 44841.97622132889 }, { "content": "//! For many uses of the context manager, it will need to be shared between multiple threads,\n\n//! with some threads reading and writing to a context while others create contexts.\n\nuse std::sync::{Arc, Mutex};\n\n\n\nuse crate::context::error::ContextManagerError;\n\nuse crate::context::{manager, ContextId, ContextLifecycle};\n\nuse crate::error::InternalError;\n\nuse crate::protocol::receipt::{Event, TransactionReceipt};\n\nuse crate::state::Read;\n\n\n\n/// A thread-safe ContextManager.\n\n#[derive(Clone)]\n\npub struct ContextManager {\n\n internal_manager: Arc<Mutex<manager::ContextManager>>,\n\n}\n\n\n\nimpl ContextManager {\n\n /// Constructs a new Context Manager around a given state Read.\n\n ///\n\n /// The Read defines the state on which the context built.\n", "file_path": "libtransact/src/context/manager/sync.rs", "rank": 72, "score": 44838.53154372226 }, { "content": " \"Failed to remove a context whose ref count was 0 \\\n\n but was not in the context collection\"\n\n .into(),\n\n ));\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Generates a valid `TransactionReceipt` based on the information available within the\n\n /// specified `Context`.\n\n fn get_transaction_receipt(\n\n &self,\n\n context_id: &ContextId,\n\n transaction_id: &str,\n\n ) -> Result<TransactionReceipt, ContextManagerError> {\n\n let context = self.get_context(context_id)?;\n\n let new_transaction_receipt = TransactionReceiptBuilder::new()\n\n .valid()\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 73, "score": 44836.26468738447 }, { "content": " }\n\n\n\n pub fn delete_state(\n\n &self,\n\n context_id: &ContextId,\n\n key: &str,\n\n ) -> Result<Option<Vec<u8>>, ContextManagerError> {\n\n self.internal_manager\n\n .lock()\n\n .expect(\"Lock in delete_state was poisoned\")\n\n .delete_state(context_id, key)\n\n }\n\n\n\n pub fn add_event(\n\n &self,\n\n context_id: &ContextId,\n\n event: Event,\n\n ) -> Result<(), ContextManagerError> {\n\n self.internal_manager\n\n .lock()\n", "file_path": "libtransact/src/context/manager/sync.rs", "rank": 74, "score": 44836.19814251639 }, { "content": " .expect(\"Lock in add_event was poisoned\")\n\n .add_event(context_id, event)\n\n }\n\n\n\n pub fn add_data(\n\n &self,\n\n context_id: &ContextId,\n\n data: Vec<u8>,\n\n ) -> Result<(), ContextManagerError> {\n\n self.internal_manager\n\n .lock()\n\n .expect(\"Lock in add_data was poisoned\")\n\n .add_data(context_id, data)\n\n }\n\n}\n\n\n\nimpl ContextLifecycle for ContextManager {\n\n /// Creates a Context, and returns the resulting ContextId.\n\n fn create_context(&mut self, dependent_contexts: &[ContextId], state_id: &str) -> ContextId {\n\n self.internal_manager\n", "file_path": "libtransact/src/context/manager/sync.rs", "rank": 75, "score": 44836.06242164506 }, { "content": " pub fn delete_state(\n\n &mut self,\n\n context_id: &ContextId,\n\n key: &str,\n\n ) -> Result<Option<Vec<u8>>, ContextManagerError> {\n\n // Adding a StateChange::Delete to the specified Context, which will occur no matter which\n\n // Context or State the key and associated value is found in.\n\n let context_value = self.get_context_mut(context_id)?.delete_state(key);\n\n if let Some(value) = context_value {\n\n return Ok(Some(value));\n\n }\n\n\n\n let current_context = self.get_context(context_id)?;\n\n let mut containing_context = self.get_context(context_id)?;\n\n\n\n let mut contexts = VecDeque::new();\n\n contexts.push_front(containing_context);\n\n // Adding dependent Contexts to search for the Key\n\n for context_id in containing_context.base_contexts().iter() {\n\n contexts.push_back(self.get_context(context_id)?);\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 76, "score": 44835.706676050446 }, { "content": " .lock()\n\n .expect(\"Lock in create_context was poisoned\")\n\n .create_context(dependent_contexts, state_id)\n\n }\n\n\n\n fn drop_context(&mut self, context_id: ContextId) -> Result<(), InternalError> {\n\n self.internal_manager\n\n .lock()\n\n .map_err(|_| InternalError::with_message(\"ContextManager lock was poisoned\".into()))?\n\n .drop_context(context_id)\n\n }\n\n\n\n fn get_transaction_receipt(\n\n &self,\n\n context_id: &ContextId,\n\n transaction_id: &str,\n\n ) -> Result<TransactionReceipt, ContextManagerError> {\n\n self.internal_manager\n\n .lock()\n\n .expect(\"Lock in get_transaction_receipt was poisoned\")\n\n .get_transaction_receipt(context_id, transaction_id)\n\n }\n\n\n\n fn clone_box(&self) -> Box<dyn ContextLifecycle> {\n\n Box::new(self.clone())\n\n }\n\n}\n", "file_path": "libtransact/src/context/manager/sync.rs", "rank": 77, "score": 44835.62374405555 }, { "content": " /// 2) Add some state to it\n\n /// 3) Drop the context\n\n /// 4) Validate that the context can no longer be used.\n\n #[test]\n\n fn drop_context_with_no_dependencies() -> Result<(), Box<dyn std::error::Error>> {\n\n let state_changes = vec![state::StateChange::Set {\n\n key: KEY1.to_string(),\n\n value: BYTES1.to_vec(),\n\n }];\n\n let (mut manager, state_id) = make_manager(Some(state_changes));\n\n let context_id = manager.create_context(&[], &state_id);\n\n manager.set_state(&context_id, KEY2.to_string(), BYTES2.to_vec())?;\n\n\n\n manager.drop_context(context_id.clone())?;\n\n\n\n assert!(matches!(\n\n manager.set_state(&context_id, KEY3.to_string(), BYTES3.to_vec()),\n\n Err(ContextManagerError::MissingContextError { .. })\n\n ));\n\n\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 78, "score": 44835.39134670286 }, { "content": "#[derive(Debug)]\n\npub enum ContractContextError {\n\n AddresserError(AddresserError),\n\n ProtoConversionError(ProtoConversionError),\n\n ProtocolBuildError(Box<dyn StdError>),\n\n TransactionContextError(ContextError),\n\n}\n\n\n\nimpl StdError for ContractContextError {\n\n fn source(&self) -> Option<&(dyn StdError + 'static)> {\n\n match *self {\n\n ContractContextError::AddresserError(_) => None,\n\n ContractContextError::ProtoConversionError(ref err) => Some(err),\n\n ContractContextError::ProtocolBuildError(ref err) => Some(&**err),\n\n ContractContextError::TransactionContextError(ref err) => Some(err),\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for ContractContextError {\n", "file_path": "libtransact/src/contract/context/error.rs", "rank": 79, "score": 44835.35927897249 }, { "content": " .with_data(BYTES1.to_vec())\n\n .build()\n\n .unwrap();\n\n let event_add_result = manager.add_event(&context_id, event.clone());\n\n assert!(event_add_result.is_ok());\n\n context = manager.get_context(&context_id).unwrap();\n\n assert_eq!(context.events()[0], event.clone());\n\n\n\n // Adding Data to the Context, to be used to build the TransactionReceipt\n\n let data_add_result = manager.add_data(&context_id, BYTES2.to_vec());\n\n context = manager.get_context(&context_id).unwrap();\n\n assert!(data_add_result.is_ok());\n\n assert_eq!(context.data()[0], BYTES2);\n\n\n\n // Building the TransactionReceipt from the objects within the specified Context\n\n let transaction_receipt = manager.get_transaction_receipt(&context_id, KEY2).unwrap();\n\n check_transaction_receipt(transaction_receipt, event)\n\n }\n\n\n\n #[test]\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 80, "score": 44835.256245321914 }, { "content": " #[test]\n\n fn create_transaction_receipt() {\n\n let (mut manager, state_id) = make_manager(None);\n\n\n\n let context_id = manager.create_context(&[], &state_id);\n\n let mut context = manager.get_context(&context_id).unwrap();\n\n assert_eq!(&context_id, context.id());\n\n\n\n let set_result = manager.set_state(&context_id, KEY1.to_string(), BYTES3.to_vec());\n\n assert!(set_result.is_ok());\n\n let delete_result = manager.delete_state(&context_id, KEY1).unwrap();\n\n assert!(delete_result.is_some());\n\n\n\n // Adding an Event to the Context, to be used to build the TransactionReceipt\n\n let event = EventBuilder::new()\n\n .with_event_type(EVENT_TYPE1.to_string())\n\n .with_attributes(vec![\n\n (ATTR1.0.to_string(), ATTR1.1.to_string()),\n\n (ATTR2.0.to_string(), ATTR2.1.to_string()),\n\n ])\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 81, "score": 44835.18897898987 }, { "content": "// Copyright 2019 Cargill Incorporated\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::error::Error as StdError;\n\n\n\nuse crate::contract::address::AddresserError;\n\nuse crate::handler::ContextError;\n\nuse crate::protos::ProtoConversionError;\n\n\n", "file_path": "libtransact/src/contract/context/error.rs", "rank": 82, "score": 44834.73559303103 }, { "content": " }\n\n\n\n /// Returns a mutable Context within the ContextManager's Context list specified by the ContextId\n\n fn get_context_mut(\n\n &mut self,\n\n context_id: &ContextId,\n\n ) -> Result<&mut Context, ContextManagerError> {\n\n self.contexts\n\n .get_mut(context_id)\n\n .ok_or_else(|| ContextManagerError::MissingContextError(context_id.to_string()))\n\n }\n\n\n\n /// Returns a Context within the ContextManager's Context list specified by the ContextId\n\n fn get_context(&self, context_id: &ContextId) -> Result<&Context, ContextManagerError> {\n\n self.contexts\n\n .get(context_id)\n\n .ok_or_else(|| ContextManagerError::MissingContextError(context_id.to_string()))\n\n }\n\n\n\n /// Get the values associated with list of keys, from a specific Context.\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 83, "score": 44832.963738906314 }, { "content": " for ctx_id in dependent_contexts {\n\n self.context_refs.add_ref(*ctx_id);\n\n }\n\n\n\n let new_context = Context::new(state_id, dependent_contexts.to_vec());\n\n self.contexts.insert(*new_context.id(), new_context.clone());\n\n self.context_refs.add_ref(*new_context.id());\n\n *new_context.id()\n\n }\n\n\n\n fn drop_context(&mut self, context_id: ContextId) -> Result<(), InternalError> {\n\n let mut contexts = VecDeque::new();\n\n contexts.push_back(context_id);\n\n\n\n while let Some(context_id) = contexts.pop_front() {\n\n if let Ok(Some(_)) = self.context_refs.remove_ref(&context_id) {\n\n if let Some(context) = self.contexts.remove(&context_id) {\n\n contexts.extend(context.base_contexts.into_iter());\n\n } else {\n\n return Err(InternalError::with_message(\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 84, "score": 44832.93761269286 }, { "content": " /// If a key is not found in the context, State is then checked for these keys.\n\n /// Keys are returned with the associated value, if found in Context or State.\n\n pub fn get(\n\n &self,\n\n context_id: &ContextId,\n\n keys: &[String],\n\n ) -> Result<Vec<(String, Vec<u8>)>, ContextManagerError> {\n\n let mut key_values = Vec::new();\n\n for key in keys.iter().rev() {\n\n let mut context = self.get_context(context_id)?;\n\n let mut contexts = VecDeque::new();\n\n for context_id in context.base_contexts().iter() {\n\n contexts.push_back(self.get_context(context_id)?);\n\n }\n\n if !context.contains(key) && !contexts.is_empty() {\n\n while let Some(current_context) = contexts.pop_front() {\n\n context = current_context;\n\n if current_context.contains(key) {\n\n break;\n\n } else {\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 85, "score": 44832.8702854735 }, { "content": " }\n\n\n\n while let Some(context) = contexts.pop_front() {\n\n if context.contains(key) {\n\n containing_context = context;\n\n break;\n\n } else {\n\n for context_id in context.base_contexts().iter() {\n\n contexts.push_back(self.get_context(context_id)?);\n\n }\n\n }\n\n }\n\n if containing_context.contains(key) {\n\n if let Some(v) = containing_context.get_state(key) {\n\n return Ok(Some(v.to_vec()));\n\n }\n\n } else if let Some(value) = self\n\n .database\n\n .get(current_context.state_id(), &[key.to_string()])?\n\n .get(&key.to_string())\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 86, "score": 44832.84693881421 }, { "content": "\n\n let context_id = manager.create_context(&[ancestor_context], &state_id);\n\n manager.set_state(&context_id, KEY3.to_string(), BYTES3.to_vec())?;\n\n\n\n manager.drop_context(ancestor_context.clone())?;\n\n\n\n assert!(manager.get_context(&ancestor_context).is_ok());\n\n\n\n manager.drop_context(context_id.clone())?;\n\n\n\n assert!(\n\n matches!(\n\n manager.get_context(&context_id),\n\n Err(ContextManagerError::MissingContextError { .. })\n\n ),\n\n \"Did not drop context\"\n\n );\n\n\n\n assert!(\n\n matches!(\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 87, "score": 44832.82664441501 }, { "content": "/*\n\n * Copyright 2019 Bitwise IO, Inc.\n\n * Copyright 2019 Cargill Incorporated\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\n//! This module provides a thread-safe ContextManager.\n\n//!\n", "file_path": "libtransact/src/context/manager/sync.rs", "rank": 88, "score": 44832.68555708866 }, { "content": " let first_context_id = manager.create_context(&[], &state_id);\n\n assert!(!manager.contexts.is_empty());\n\n assert!(manager.contexts.get(&first_context_id).is_some());\n\n\n\n let second_context_id = manager.create_context(&[], &state_id);\n\n let second_context = manager.get_context(&second_context_id).unwrap();\n\n assert_eq!(&second_context_id, second_context.id());\n\n assert_eq!(manager.contexts.len(), 2);\n\n }\n\n\n\n #[test]\n\n fn add_context_event() {\n\n let (mut manager, state_id) = make_manager(None);\n\n let context_id = manager.create_context(&[], &state_id);\n\n let event = EventBuilder::new()\n\n .with_event_type(EVENT_TYPE1.to_string())\n\n .with_attributes(vec![\n\n (ATTR1.0.to_string(), ATTR1.1.to_string()),\n\n (ATTR2.0.to_string(), ATTR2.1.to_string()),\n\n ])\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 89, "score": 44832.681804460895 }, { "content": " {\n\n return Ok(Some(value.to_vec()));\n\n }\n\n Ok(None)\n\n }\n\n\n\n /// Adds an Event to the specified Context.\n\n pub fn add_event(\n\n &mut self,\n\n context_id: &ContextId,\n\n event: Event,\n\n ) -> Result<(), ContextManagerError> {\n\n let context = self.get_context_mut(context_id)?;\n\n context.add_event(event);\n\n Ok(())\n\n }\n\n\n\n /// Adds Data to the specified Context.\n\n pub fn add_data(\n\n &mut self,\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 90, "score": 44832.6516556621 }, { "content": " manager.get_context(&ancestor_context),\n\n Err(ContextManagerError::MissingContextError { .. })\n\n ),\n\n \"Did not drop ancestor context\"\n\n );\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 91, "score": 44832.62260321798 }, { "content": "impl From<ProtoConversionError> for ContractContextError {\n\n fn from(e: ProtoConversionError) -> Self {\n\n ContractContextError::ProtoConversionError(e)\n\n }\n\n}\n\n\n\nimpl From<AddresserError> for ContractContextError {\n\n fn from(e: AddresserError) -> Self {\n\n ContractContextError::AddresserError(e)\n\n }\n\n}\n\n\n\nimpl From<ContextError> for ContractContextError {\n\n fn from(e: ContextError) -> Self {\n\n ContractContextError::TransactionContextError(e)\n\n }\n\n}\n", "file_path": "libtransact/src/contract/context/error.rs", "rank": 92, "score": 44832.61191443859 }, { "content": " .with_data(BYTES1.to_vec())\n\n .build()\n\n .unwrap();\n\n let event_add_result = manager.add_event(&context_id, event.clone());\n\n assert!(event_add_result.is_ok());\n\n let context = manager.get_context(&context_id).unwrap();\n\n assert_eq!(context.events()[0], event.clone());\n\n }\n\n\n\n #[test]\n\n fn add_context_data() {\n\n let (mut manager, state_id) = make_manager(None);\n\n let context_id = manager.create_context(&[], &state_id);\n\n\n\n let data_add_result = manager.add_data(&context_id, BYTES2.to_vec());\n\n let context = manager.get_context(&context_id).unwrap();\n\n assert!(data_add_result.is_ok());\n\n assert_eq!(context.data()[0], BYTES2);\n\n }\n\n\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 93, "score": 44832.60099077707 }, { "content": " for context_id in context.base_contexts().iter() {\n\n contexts.push_back(self.get_context(context_id)?);\n\n }\n\n }\n\n }\n\n }\n\n if context.contains(key) {\n\n if let Some(StateChange::Set { key: k, value: v }) = context\n\n .state_changes()\n\n .iter()\n\n .rev()\n\n .find(|state_change| state_change.has_key(key))\n\n {\n\n key_values.push((k.clone(), v.clone()));\n\n }\n\n } else if let Some(v) = self\n\n .database\n\n .get(context.state_id(), &[key.to_string()])?\n\n .get(&key.to_string())\n\n {\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 94, "score": 44832.592221077175 }, { "content": " Ok(())\n\n }\n\n\n\n /// Test that a context with base contexts may be dropped\n\n /// 1) Create a parent context\n\n /// 2) Create a context with the parent as its base context\n\n /// 3) Add some state to it\n\n /// 4) Drop the ancestor context\n\n /// 5) Validate that the ancestor is still available\n\n /// 6) Drop the context\n\n /// 7) Validate that both the ancestor context and the context are no longer available\n\n #[test]\n\n fn drop_context_with_dependencies() -> Result<(), Box<dyn std::error::Error>> {\n\n let state_changes = vec![state::StateChange::Set {\n\n key: KEY1.to_string(),\n\n value: BYTES1.to_vec(),\n\n }];\n\n let (mut manager, state_id) = make_manager(Some(state_changes));\n\n let ancestor_context = manager.create_context(&[], &state_id);\n\n manager.set_state(&ancestor_context, KEY2.to_string(), BYTES2.to_vec())?;\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 95, "score": 44832.594183065456 }, { "content": " key_values.push((key.to_string(), v.clone()));\n\n }\n\n }\n\n Ok(key_values)\n\n }\n\n\n\n /// Adds a StateChange::Set to the specified Context\n\n pub fn set_state(\n\n &mut self,\n\n context_id: &ContextId,\n\n key: String,\n\n value: Vec<u8>,\n\n ) -> Result<(), ContextManagerError> {\n\n let context = self.get_context_mut(context_id)?;\n\n context.set_state(key, value);\n\n Ok(())\n\n }\n\n\n\n /// Adds a StateChange::Delete to the specified Context, returning the value, if found, that is\n\n /// associated with the specified key.\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 96, "score": 44832.514604994496 }, { "content": "/*\n\n * Copyright 2019 Cargill Incorporated\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n * -----------------------------------------------------------------------------\n\n */\n\n\n\npub mod error;\n\n#[cfg(feature = \"contract-context-key-value\")]\n\npub mod key_value;\n", "file_path": "libtransact/src/contract/context/mod.rs", "rank": 97, "score": 44832.50734836405 }, { "content": " value: BYTES1.to_vec(),\n\n }];\n\n let (mut manager, state_id) = make_manager(Some(state_changes));\n\n let ancestor_context = manager.create_context(&[], &state_id);\n\n let add_result = manager.set_state(&ancestor_context, KEY2.to_string(), BYTES2.to_vec());\n\n assert!(add_result.is_ok());\n\n\n\n let context_id = manager.create_context(&[ancestor_context], &state_id);\n\n\n\n // Validates the result from adding the state change to the Context within the ContextManager.\n\n assert!(manager\n\n .set_state(&context_id, KEY3.to_string(), BYTES3.to_vec())\n\n .is_ok());\n\n assert!(manager\n\n .set_state(&context_id, KEY4.to_string(), BYTES4.to_vec())\n\n .is_ok());\n\n assert!(manager.delete_state(&context_id, KEY4).unwrap().is_some());\n\n\n\n // Creating a collection of keys to retrieve the values saved in Context or State.\n\n let keys = [\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 98, "score": 44832.46028793922 }, { "content": " .with_state_changes(context.state_changes().to_vec())\n\n .with_events(context.events().to_vec())\n\n .with_data(context.data().to_vec())\n\n .with_transaction_id(transaction_id.to_string())\n\n .build()?;\n\n Ok(new_transaction_receipt)\n\n }\n\n\n\n fn clone_box(&self) -> Box<dyn ContextLifecycle> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl ContextManager {\n\n pub fn new(database: Box<dyn Read<StateId = String, Key = String, Value = Vec<u8>>>) -> Self {\n\n ContextManager {\n\n contexts: HashMap::new(),\n\n context_refs: RefMap::new(),\n\n database,\n\n }\n", "file_path": "libtransact/src/context/manager/mod.rs", "rank": 99, "score": 44832.35199064374 } ]
Rust
tests/trk_write.rs
czotti/trk-io
b90205ccd7a568e4957ea53b7f5b669a775bfac5
mod test; use std::iter::FromIterator; use test::{get_random_trk_path, load_trk}; use trk_io::{Affine4, Point, Reader, Writer}; #[test] fn test_write_dynamic() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/simple.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); writer.write_from_iter([Point::new(0.0, 1.0, 2.0)].iter().cloned(), 1); let v = vec![Point::new(0.0, 1.0, 2.0), Point::new(3.0, 4.0, 5.0)]; writer.write_from_iter(v, 2); let v = Vec::from_iter(0..15); let iter = v.chunks(3).map(|ints| Point::new(ints[0] as f32, ints[1] as f32, ints[2] as f32)); writer.write_from_iter(iter, 5); } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_empty() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/empty.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); writer.write(original_tractogram.clone()); } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_simple() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/simple.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); writer.write(original_tractogram.clone()); } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_points_simple() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/simple.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); for streamline in original_tractogram.streamlines.into_iter() { writer.write(streamline); } } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_tractogram_item_simple() { let write_to = get_random_trk_path(); let reader = Reader::new("data/simple.trk").unwrap(); { let mut writer = Writer::new(&write_to, Some(reader.header.clone())).unwrap(); for item in reader.into_iter() { writer.write(item); } } let (original_header, original_tractogram) = load_trk("data/simple.trk"); assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_ref_tractogram_item_simple() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/simple.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); for ref_item in original_tractogram.into_iter() { writer.write(ref_item); } } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_standard() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/standard.trk"); { let mut writer = Writer::new(&write_to, Some(original_header)).unwrap(); writer.write(&original_tractogram.streamlines[0]); writer.write(&original_tractogram.streamlines[1]); writer.write(&original_tractogram.streamlines[2]); } let (header, tractogram) = load_trk(&write_to); assert_eq!(header.nb_streamlines, 3); assert_eq!( tractogram.streamlines[0], [Point::new(-0.5, -1.5, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.5, 1.5, 3.0)] ); } #[test] fn test_write_standard_lps() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/standard.LPS.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); #[rustfmt::skip] assert_eq!( writer.affine4, Affine4::new( -1.0, 0.0, 0.0, 3.5, 0.0, -1.0, 0.0, 13.5, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, ) ); for i in 0..10 { writer.write(&original_tractogram.streamlines[i]); } } let (header, tractogram) = load_trk(&write_to); assert_eq!(header.nb_streamlines, 10); #[rustfmt::skip] assert_eq!( header.affine4_to_rasmm, Affine4::new( -1.0, 0.0, 0.0, 3.5, 0.0, -1.0, 0.0, 13.5, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0, 0.0, 1.0, ) ); assert_eq!( tractogram.streamlines[0], [Point::new(-0.5, -1.5, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.5, 1.5, 3.0)] ); } #[test] fn test_write_complex() { let write_to = get_random_trk_path(); let reader = Reader::new("data/complex.trk").unwrap(); { let mut writer = Writer::new(&write_to, Some(reader.header.clone())).unwrap(); for item in reader.into_iter() { writer.write(item); } } let (original_header, original_tractogram) = load_trk("data/complex.trk"); assert!((original_header, original_tractogram) == load_trk(&write_to)); }
mod test; use std::iter::FromIterator; use test::{get_random_trk_path, load_trk}; use trk_io::{Affine4, Point, Reader, Writer}; #[test] fn test_write_dynamic() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/simple.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); writer.write_from_iter([Point::new(0.0, 1.0, 2.0)].iter().cloned(), 1); let v = vec![Point::new(0.0, 1.0, 2.0), Point::new(3.0, 4.0, 5.0)]; writer.write_from_iter(v, 2); let v = Vec::from_iter(0..15); let iter = v.chunks(3).map(|ints| Point::new(ints[0] as f32, ints[1] as f32, ints[2] as f32)); writer.write_from_iter(iter, 5); } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_empty() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/empty.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); writer.write(original_tractogram.clone()); } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_simple() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/simple.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); writer.write(original_tractogram.clone()); } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_points_simple() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/simple.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); for streamline in original_tractogram.streamlines.into_iter() { writer.write(streamline); } } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_tractogram_item_simple() { let write_to = get_random_trk_path(); let reader = Reader::new("data/simple.trk").unwrap(); { let mut writer = Writer::new(&write_to, Some(reader.header.clone())).unwrap(); for item in reader.into_iter() { writer.write(item); } } let (original_header, original_tractogram) = load_trk("data/simple.trk"); assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_ref_tractogram_item_simple() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/simple.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); for ref_item in original_tractogram.into_iter() { writer.write(ref_item); } } assert!((original_header, original_tractogram) == load_trk(&write_to)); } #[test] fn test_write_standard() { let write_to = get_random_trk_path(); let (original_header, original_tractogram) = load_trk("data/standard.trk"); { let mut writer = Writer::new(&write_to, Some(original_header)).unwrap(); writer.write(&original_tractogram.streamlines[0]); writer.write(&original_tractogram.streamlines[1]); writer.write(&original_tractogram.streamlines[2]); } let (header, tractogram) = load_trk(&write_to); assert_eq!(header.nb_streamlines, 3); assert_eq!( tractogram.streamlines[0], [Point::new(-0.5, -1.5, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.5, 1.5, 3.0)] ); } #[test] fn test_write_standard_lps() { let write_to = get_random_trk_p
writer.write(&original_tractogram.streamlines[i]); } } let (header, tractogram) = load_trk(&write_to); assert_eq!(header.nb_streamlines, 10); #[rustfmt::skip] assert_eq!( header.affine4_to_rasmm, Affine4::new( -1.0, 0.0, 0.0, 3.5, 0.0, -1.0, 0.0, 13.5, 0.0, 0.0, 1.0, -1.0, 0.0, 0.0, 0.0, 1.0, ) ); assert_eq!( tractogram.streamlines[0], [Point::new(-0.5, -1.5, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.5, 1.5, 3.0)] ); } #[test] fn test_write_complex() { let write_to = get_random_trk_path(); let reader = Reader::new("data/complex.trk").unwrap(); { let mut writer = Writer::new(&write_to, Some(reader.header.clone())).unwrap(); for item in reader.into_iter() { writer.write(item); } } let (original_header, original_tractogram) = load_trk("data/complex.trk"); assert!((original_header, original_tractogram) == load_trk(&write_to)); }
ath(); let (original_header, original_tractogram) = load_trk("data/standard.LPS.trk"); { let mut writer = Writer::new(&write_to, Some(original_header.clone())).unwrap(); #[rustfmt::skip] assert_eq!( writer.affine4, Affine4::new( -1.0, 0.0, 0.0, 3.5, 0.0, -1.0, 0.0, 13.5, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, ) ); for i in 0..10 {
random
[ { "content": "fn uniform(reader: Reader, header: Header, write_to: &str, r: f32, g: f32, b: f32) {\n\n let mut writer = Writer::new(write_to, Some(header)).unwrap();\n\n for (streamline, mut scalars, properties) in reader.into_iter() {\n\n for _ in 0..streamline.len() {\n\n scalars.push(r);\n\n scalars.push(g);\n\n scalars.push(b);\n\n }\n\n\n\n writer.write((streamline, scalars, properties));\n\n }\n\n}\n\n\n", "file_path": "examples/trk_color.rs", "rank": 0, "score": 151835.17763860745 }, { "content": "pub fn load_trk(path: &str) -> (Header, Tractogram) {\n\n let mut reader = Reader::new(path).unwrap();\n\n (reader.header.clone(), reader.read_all())\n\n}\n", "file_path": "tests/test/mod.rs", "rank": 1, "score": 143968.94410247265 }, { "content": "fn sampling_write(writer: &mut Writer, reader: Reader, number: usize, rng: &mut SmallRng) {\n\n let mut sampled_indices =\n\n rand::seq::index::sample(rng, reader.header.nb_streamlines, number).into_vec();\n\n sampled_indices.sort();\n\n\n\n let mut reader_iter = reader.into_iter();\n\n let mut last = 0;\n\n for idx in sampled_indices {\n\n writer.write(reader_iter.nth(idx - last).unwrap());\n\n last = idx + 1;\n\n }\n\n}\n\n\n", "file_path": "examples/trk_subsampler.rs", "rank": 2, "score": 132333.12596430874 }, { "content": "fn local(reader: Reader, header: Header, write_to: &str) {\n\n let mut writer = Writer::new(write_to, Some(header)).unwrap();\n\n for (streamline, mut scalars, properties) in reader.into_iter() {\n\n // Scope to avoid scalars mutable sharing\n\n {\n\n let mut add = |p1: &Point, p2: &Point| {\n\n let x = p2.x - p1.x;\n\n let y = p2.y - p1.y;\n\n let z = p2.z - p1.z;\n\n let norm = (x.powi(2) + y.powi(2) + z.powi(2)).sqrt();\n\n scalars.push((x / norm).abs() * 255.0);\n\n scalars.push((y / norm).abs() * 255.0);\n\n scalars.push((z / norm).abs() * 255.0);\n\n };\n\n\n\n // Manage first point\n\n add(&streamline[0], &streamline[1]);\n\n\n\n for p in streamline.windows(3) {\n\n add(&p[0], &p[2]);\n\n }\n\n\n\n // Manage last point\n\n add(&streamline[streamline.len() - 2], &streamline[streamline.len() - 1]);\n\n }\n\n\n\n writer.write((streamline, scalars, properties));\n\n }\n\n}\n", "file_path": "examples/trk_color.rs", "rank": 3, "score": 127180.77857012193 }, { "content": "/// Returns the endianness used when saving the trk file read by `reader`\n\n///\n\n/// We use `version` to discover the endianness because it's the biggest\n\n/// integer field with the most constrained possible values {1, 2}.\n\n/// Read in LittleEndian, version == 1 or 2.\n\n/// Read in BigEndian, version == 511 or 767\n\n/// Even with hundreds major updates, `version` should be safe.\n\nfn test_endianness(reader: &mut BufReader<File>) -> Result<Endianness> {\n\n let version_offset = (HEADER_SIZE - 8) as u64;\n\n reader.seek(SeekFrom::Start(version_offset))?;\n\n let version = reader.read_i32::<LittleEndian>()?;\n\n let endianness = if version <= 255 { Endianness::Little } else { Endianness::Big };\n\n reader.seek(SeekFrom::Start(0))?;\n\n\n\n Ok(endianness)\n\n}\n\n\n", "file_path": "src/cheader.rs", "rank": 4, "score": 107645.81666193261 }, { "content": "#[test]\n\nfn test_iterator_mut() {\n\n let p0 = Point::origin();\n\n\n\n let mut streamlines = get_toy_streamlines();\n\n for (i, streamline) in streamlines.iter_mut().enumerate() {\n\n for p in streamline {\n\n if i % 2 == 0 {\n\n *p = p0;\n\n }\n\n }\n\n }\n\n\n\n let mut iter = streamlines.into_iter();\n\n assert_eq!(iter.next().unwrap(), [p0, p0]);\n\n assert_eq!(\n\n iter.next().unwrap(),\n\n [Point::new(0.0, 1.0, 0.0), Point::new(0.0, 2.0, 0.0), Point::new(0.0, 3.0, 0.0)]\n\n );\n\n assert_eq!(iter.next().unwrap(), [p0, p0, p0]);\n\n assert_eq!(iter.next(), None);\n\n assert_eq!(iter.next(), None);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 5, "score": 107510.82875986795 }, { "content": "fn get_toy_streamlines() -> ArraySequence<Point> {\n\n ArraySequence::new(\n\n vec![2, 3, 3],\n\n vec![\n\n Point::new(1.0, 0.0, 0.0), // 1\n\n Point::new(2.0, 0.0, 0.0), // 1\n\n Point::new(0.0, 1.0, 0.0), // 2\n\n Point::new(0.0, 2.0, 0.0), // 2\n\n Point::new(0.0, 3.0, 0.0), // 2\n\n Point::new(0.0, 0.0, 1.0), // 3\n\n Point::new(0.0, 0.0, 2.0), // 3\n\n Point::new(0.0, 0.0, 3.0), // 3\n\n ],\n\n )\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 6, "score": 102712.70839560244 }, { "content": "pub fn get_random_trk_path() -> String {\n\n let dir = TempDir::new().unwrap();\n\n let path = dir.into_path().join(\"out.trk\");\n\n path.to_str().unwrap().to_string()\n\n}\n\n\n", "file_path": "tests/test/mod.rs", "rank": 9, "score": 91234.00197450051 }, { "content": "#[test]\n\nfn test_iterator() {\n\n let streamlines = get_toy_streamlines();\n\n let mut iter = streamlines.into_iter();\n\n assert_eq!(iter.next().unwrap(), [Point::new(1.0, 0.0, 0.0), Point::new(2.0, 0.0, 0.0)]);\n\n assert_eq!(\n\n iter.next().unwrap(),\n\n [Point::new(0.0, 1.0, 0.0), Point::new(0.0, 2.0, 0.0), Point::new(0.0, 3.0, 0.0)]\n\n );\n\n assert_eq!(\n\n iter.next().unwrap(),\n\n [Point::new(0.0, 0.0, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.0, 0.0, 3.0)]\n\n );\n\n assert_eq!(iter.next(), None);\n\n assert_eq!(iter.next(), None);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 10, "score": 88709.63787246801 }, { "content": "#[test]\n\nfn test_add_scalar() {\n\n let torsion = \"torsion\".to_string();\n\n\n\n let mut header = Header::default();\n\n header.add_scalar(&torsion).unwrap();\n\n assert_eq!(header.scalars_name, vec![torsion])\n\n}\n\n\n", "file_path": "tests/header.rs", "rank": 11, "score": 87888.88335468729 }, { "content": "#[test]\n\nfn test_add_property() {\n\n let torsion = \"torsion\".to_string();\n\n\n\n let mut header = Header::default();\n\n header.add_property(&torsion).unwrap();\n\n assert_eq!(header.properties_name, vec![torsion])\n\n}\n\n\n", "file_path": "tests/header.rs", "rank": 12, "score": 87888.88335468729 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_unicode_scalar() {\n\n let mut header = Header::default();\n\n header.add_scalar(\"平仮名, ひらがな\").unwrap();\n\n}\n\n\n", "file_path": "tests/header.rs", "rank": 13, "score": 87888.79162830848 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_too_much_scalars() {\n\n let mut header = Header::default();\n\n for _ in 0..11 {\n\n header.add_scalar(\"test\").unwrap();\n\n }\n\n}\n\n\n", "file_path": "tests/header.rs", "rank": 14, "score": 87888.79162830848 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_unicode_property() {\n\n let mut header = Header::default();\n\n header.add_property(\"平仮名, ひらがな\").unwrap();\n\n}\n", "file_path": "tests/header.rs", "rank": 15, "score": 87888.79162830848 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_too_much_properties() {\n\n let mut header = Header::default();\n\n for _ in 0..11 {\n\n header.add_property(\"test\").unwrap();\n\n }\n\n}\n\n\n", "file_path": "tests/header.rs", "rank": 16, "score": 87888.79162830848 }, { "content": "#[test]\n\nfn test_reverse_iterator() {\n\n let streamlines = get_toy_streamlines();\n\n let lengths = streamlines.iter().rev().map(|streamline| streamline.len()).collect::<Vec<_>>();\n\n assert_eq!(lengths, vec![3, 3, 2]);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 18, "score": 85269.82779090536 }, { "content": "#[test]\n\nfn test_copy_scalars_and_properties() {\n\n let s1 = \"s1\".to_string();\n\n let s2 = \"s2\".to_string();\n\n let p1 = \"p1\".to_string();\n\n let p2 = \"p2\".to_string();\n\n\n\n let mut header1 = Header::default();\n\n header1.add_scalar(&s2).unwrap();\n\n header1.add_property(&p2).unwrap();\n\n\n\n let mut header2 = Header::default();\n\n header2.add_scalar(&s1).unwrap();\n\n header2.add_scalar(&s2).unwrap();\n\n header2.add_property(&p1).unwrap();\n\n header2.add_property(&p2).unwrap();\n\n\n\n header2.copy_scalars_and_properties(&header1);\n\n assert_eq!(header2.scalars_name, vec![s2]);\n\n assert_eq!(header2.properties_name, vec![p2]);\n\n}\n\n\n", "file_path": "tests/header.rs", "rank": 19, "score": 84492.47794995796 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_property_name_too_long() {\n\n let mut header = Header::default();\n\n header.add_property(\"01234567890123456789a\").unwrap();\n\n}\n\n\n", "file_path": "tests/header.rs", "rank": 20, "score": 84492.38622357915 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_scalar_name_too_long() {\n\n let mut header = Header::default();\n\n header.add_scalar(\"01234567890123456789a\").unwrap();\n\n}\n\n\n", "file_path": "tests/header.rs", "rank": 21, "score": 84492.38622357915 }, { "content": "#[test]\n\nfn test_dynamic() {\n\n let mut arr = ArraySequence::empty();\n\n for i in 0..10 {\n\n assert_eq!(arr.nb_push_done(), i);\n\n arr.push(i);\n\n assert_eq!(arr.nb_push_done(), i + 1);\n\n }\n\n arr.end_push();\n\n assert_eq!(arr.nb_push_done(), 0);\n\n\n\n assert_eq!(arr.len(), 1);\n\n assert_eq!(arr.length_of_array(0), 10);\n\n assert_eq!(arr[0].len(), 10);\n\n assert_eq!(arr.offsets, vec![0, 10]);\n\n\n\n arr.extend(vec![11, 12, 13, 14, 15]);\n\n assert_eq!(arr.len(), 2);\n\n assert_eq!(arr.length_of_array(0), 10);\n\n assert_eq!(arr[0].len(), 10);\n\n assert_eq!(arr.length_of_array(1), 5);\n\n assert_eq!(arr[1].len(), 5);\n\n assert_eq!(arr.offsets, vec![0, 10, 15]);\n\n\n\n arr.extend_from_slice(&[20, 21, 22, 23]);\n\n assert_eq!(arr.len(), 3);\n\n assert_eq!(arr[2].len(), 4);\n\n assert_eq!(arr.offsets, vec![0, 10, 15, 19]);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 23, "score": 65226.77203307516 }, { "content": "#[test]\n\nfn test_integers() {\n\n let arr = ArraySequence::new(vec![2, 3, 2, 1], vec![4, 5, 6, 7, 8, 9, 10, 11]);\n\n assert_eq!(arr.len(), 4);\n\n assert_eq!(arr.offsets, vec![0, 2, 5, 7, 8]);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 24, "score": 65226.77203307516 }, { "content": "#[test]\n\nfn test_empty() {\n\n let mut arr = ArraySequence::empty();\n\n assert_eq!(arr.is_empty(), true);\n\n assert_eq!(arr.len(), 0);\n\n\n\n for _ in 0..2 {\n\n arr.push(1);\n\n assert_eq!(arr.is_empty(), false);\n\n assert_eq!(arr.len(), 0);\n\n }\n\n\n\n arr.end_push();\n\n assert_eq!(arr.is_empty(), false);\n\n assert_eq!(arr.len(), 1);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 25, "score": 65226.77203307516 }, { "content": "#[test]\n\nfn test_filter() {\n\n let p = Point::new(1.0, 1.0, 1.0);\n\n let arr = ArraySequence::new(\n\n vec![2, 3, 2, 3],\n\n vec![\n\n p * 1.0,\n\n p * 2.0,\n\n p * 2.0,\n\n p * 3.0,\n\n p * 4.0,\n\n p * 3.0,\n\n p * 4.0,\n\n p * 4.0,\n\n p * 5.0,\n\n p * 6.0,\n\n ],\n\n );\n\n let filtered = arr.filter(&mut |arr: &[Point]| arr.len() == 3);\n\n assert_eq!(filtered.len(), 2);\n\n assert_eq!(filtered[0], [p * 2.0, p * 3.0, p * 4.0]);\n\n assert_eq!(filtered[1], [p * 4.0, p * 5.0, p * 6.0]);\n\n\n\n // Ensure that arr is still usable\n\n assert_eq!(arr.len(), 4);\n\n}\n", "file_path": "tests/array_sequence.rs", "rank": 26, "score": 65226.77203307516 }, { "content": "#[test]\n\nfn test_construction() {\n\n let streamlines = get_toy_streamlines();\n\n assert_eq!(streamlines.len(), 3);\n\n assert_eq!(streamlines.offsets, vec![0, 2, 5, 8]);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 27, "score": 65226.77203307516 }, { "content": "#[test]\n\nfn test_load_standard() {\n\n let mut reader = Reader::new(\"data/standard.trk\").unwrap();\n\n let Tractogram { streamlines, scalars, properties } = reader.read_all();\n\n\n\n assert_eq!(reader.affine_to_rasmm, Affine::identity());\n\n assert_eq!(reader.translation, Translation::new(-0.5, -1.5, -1.0));\n\n\n\n assert_eq!(streamlines.len(), 120);\n\n assert_eq!(\n\n streamlines[0],\n\n [Point::new(-0.5, -1.5, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.5, 1.5, 3.0)]\n\n );\n\n assert_eq!(\n\n streamlines[1],\n\n [Point::new(-0.5, 1.5, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.5, -1.5, 3.0)]\n\n );\n\n assert!(scalars.is_empty());\n\n assert!(properties.is_empty());\n\n\n\n // Test generator\n\n let reader = Reader::new(\"data/standard.trk\").unwrap();\n\n for (streamline, _, _) in reader.into_iter() {\n\n assert_eq!(streamline.len(), 3);\n\n }\n\n}\n\n\n", "file_path": "tests/trk_read.rs", "rank": 28, "score": 63028.82682194277 }, { "content": "#[test]\n\nfn test_load_complex() {\n\n let mut reader = Reader::new(\"data/complex.trk\").unwrap();\n\n let Tractogram { streamlines, scalars, properties } = reader.read_all();\n\n assert_eq!(reader.affine_to_rasmm, Affine::identity());\n\n assert_eq!(reader.translation, Translation::new(-0.5, -0.5, -0.5));\n\n\n\n assert_eq!(streamlines.len(), 3);\n\n assert_eq!(streamlines[0], [Point::new(0.0, 1.0, 2.0)]);\n\n assert_eq!(streamlines[1], [Point::new(0.0, 1.0, 2.0), Point::new(3.0, 4.0, 5.0)]);\n\n assert_eq!(\n\n streamlines[2],\n\n [\n\n Point::new(0.0, 1.0, 2.0),\n\n Point::new(3.0, 4.0, 5.0),\n\n Point::new(6.0, 7.0, 8.0),\n\n Point::new(9.0, 10.0, 11.0),\n\n Point::new(12.0, 13.0, 14.0)\n\n ]\n\n );\n\n\n\n check_complex_scalars_and_properties(reader.header, scalars, properties);\n\n}\n\n\n", "file_path": "tests/trk_read.rs", "rank": 29, "score": 63028.82682194277 }, { "content": "#[test]\n\nfn test_load_simple() {\n\n let first = [Point::new(0.0, 1.0, 2.0)];\n\n let second = [Point::new(0.0, 1.0, 2.0), Point::new(3.0, 4.0, 5.0)];\n\n let third = [\n\n Point::new(0.0, 1.0, 2.0),\n\n Point::new(3.0, 4.0, 5.0),\n\n Point::new(6.0, 7.0, 8.0),\n\n Point::new(9.0, 10.0, 11.0),\n\n Point::new(12.0, 13.0, 14.0),\n\n ];\n\n\n\n let Tractogram { streamlines, scalars, properties } =\n\n Reader::new(\"data/simple.trk\").unwrap().read_all();\n\n\n\n assert_eq!(streamlines.len(), 3);\n\n assert_eq!(streamlines[0], first);\n\n assert_eq!(streamlines[1], second);\n\n assert_eq!(streamlines[2], third);\n\n assert!(scalars.is_empty());\n\n assert!(properties.is_empty());\n", "file_path": "tests/trk_read.rs", "rank": 32, "score": 63028.82682194277 }, { "content": "#[test]\n\nfn test_empty_push() {\n\n let mut arr = ArraySequence::<f64>::empty();\n\n assert_eq!(arr.len(), 0);\n\n assert_eq!(arr.offsets, vec![0]);\n\n\n\n // An `end_push` without any `push` should do nothing\n\n arr.end_push();\n\n arr.end_push();\n\n\n\n assert_eq!(arr.len(), 0);\n\n assert_eq!(arr.offsets, vec![0]);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 33, "score": 63028.82682194277 }, { "content": "#[test]\n\nfn test_load_empty() {\n\n let Tractogram { streamlines, scalars, properties } =\n\n Reader::new(\"data/empty.trk\").unwrap().read_all();\n\n\n\n assert_eq!(streamlines.len(), 0);\n\n assert!(scalars.is_empty());\n\n assert!(properties.is_empty());\n\n\n\n // Test generator\n\n let reader = Reader::new(\"data/empty.trk\").unwrap();\n\n assert_eq!(reader.into_iter().count(), 0);\n\n}\n\n\n", "file_path": "tests/trk_read.rs", "rank": 34, "score": 63028.82682194277 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_new_too_much() {\n\n ArraySequence::new(\n\n vec![2],\n\n vec![Point::new(1.0, 0.0, 0.0), Point::new(1.0, 0.0, 0.0), Point::new(1.0, 0.0, 0.0)],\n\n );\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 37, "score": 63028.73509556397 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_new_not_enough() {\n\n ArraySequence::new(vec![2], vec![Point::new(1.0, 0.0, 0.0)]);\n\n}\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 38, "score": 63028.73509556397 }, { "content": "fn main() {\n\n let version = String::from(env!(\"CARGO_PKG_VERSION\"));\n\n let args = Docopt::new(USAGE)\n\n .and_then(|dopt| dopt.version(Some(version)).parse())\n\n .unwrap_or_else(|e| e.exit());\n\n let print_all = args.get_bool(\"--all\");\n\n let input = Path::new(args.get_str(\"<input>\"));\n\n if !input.exists() {\n\n panic!(\"Input trk '{:?}' doesn't exist.\", input);\n\n }\n\n\n\n let f = File::open(args.get_str(\"<input>\")).expect(\"Can't read trk file.\");\n\n let mut reader = BufReader::new(f);\n\n let (header, endianness) = CHeader::read(&mut reader).expect(\"Read header\");\n\n\n\n if print_all {\n\n println!(\"---------- Actual fields ----------\");\n\n }\n\n println!(\"id_string: {:?} ({})\", header.id_string, str::from_utf8(&header.id_string).unwrap());\n\n println!(\"dim: {:?}\", header.dim);\n", "file_path": "examples/trk_header.rs", "rank": 39, "score": 61363.43993602859 }, { "content": "#[test]\n\nfn test_load_standard_lps() {\n\n let mut reader = Reader::new(\"data/standard.LPS.trk\").unwrap();\n\n let Tractogram { streamlines, scalars, properties } = reader.read_all();\n\n #[rustfmt::skip]\n\n assert_eq!(reader.affine_to_rasmm, Affine::from_diagonal(&Vector3::new(-1.0, -1.0, 1.0)));\n\n assert_eq!(reader.translation, Translation::new(3.5, 13.5, -1.0));\n\n\n\n assert_eq!(streamlines.len(), 120);\n\n assert_eq!(\n\n streamlines[0],\n\n [Point::new(-0.5, -1.5, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.5, 1.5, 3.0)]\n\n );\n\n assert_eq!(\n\n streamlines[1],\n\n [Point::new(-0.5, 1.5, 1.0), Point::new(0.0, 0.0, 2.0), Point::new(0.5, -1.5, 3.0)]\n\n );\n\n assert!(scalars.is_empty());\n\n assert!(properties.is_empty());\n\n}\n\n\n", "file_path": "tests/trk_read.rs", "rank": 40, "score": 61019.8977530471 }, { "content": "#[test]\n\nfn test_load_complex_big_endian() {\n\n let first = [Point::new(0.0, 1.0, 2.0)];\n\n let second = [Point::new(0.0, 1.0, 2.0), Point::new(3.0, 4.0, 5.0)];\n\n let third = [\n\n Point::new(0.0, 1.0, 2.0),\n\n Point::new(3.0, 4.0, 5.0),\n\n Point::new(6.0, 7.0, 8.0),\n\n Point::new(9.0, 10.0, 11.0),\n\n Point::new(12.0, 13.0, 14.0),\n\n ];\n\n\n\n let mut reader = Reader::new(\"data/complex_big_endian.trk\").unwrap();\n\n let Tractogram { streamlines, scalars, properties } = reader.read_all();\n\n assert_eq!(streamlines.len(), 3);\n\n assert_eq!(streamlines[0], first);\n\n assert_eq!(streamlines[1], second);\n\n assert_eq!(streamlines[2], third);\n\n check_complex_scalars_and_properties(reader.header, scalars, properties);\n\n\n\n // Test generator\n", "file_path": "tests/trk_read.rs", "rank": 42, "score": 59175.96355276719 }, { "content": "// This helper module is automatically imported for all integration tests, even for those who don't\n\n// import it. Because of this, I think it's acceptable to use a `allow(unused)` directive.\n\n#![allow(unused)]\n\n\n\nuse tempfile::TempDir;\n\nuse trk_io::{Header, Reader, Streamlines, Tractogram};\n\n\n", "file_path": "tests/test/mod.rs", "rank": 43, "score": 57972.712530482 }, { "content": "fn check_complex_scalars_and_properties(\n\n header: Header,\n\n scalars: ArraySequence<f32>,\n\n properties: ArraySequence<f32>,\n\n) {\n\n // Scalars\n\n assert_eq!(\n\n header.scalars_name,\n\n vec![\n\n String::from(\"colors\"),\n\n String::from(\"colors\"),\n\n String::from(\"colors\"),\n\n String::from(\"fa\")\n\n ]\n\n );\n\n assert_eq!(&scalars[0], &[1.0, 0.0, 0.0, 0.200000003]);\n\n assert_eq!(&scalars[1], &[0.0, 1.0, 0.0, 0.300000012, 0.0, 1.0, 0.0, 0.400000006]);\n\n assert_eq!(\n\n &scalars[2],\n\n &[\n", "file_path": "tests/trk_read.rs", "rank": 44, "score": 53186.92581930742 }, { "content": "use trk_io::Header;\n\n\n\n#[test]\n", "file_path": "tests/header.rs", "rank": 45, "score": 52922.456553516604 }, { "content": "#[cfg(feature = \"nifti_images\")]\n\npub fn trackvis_to_rasmm(h: &NiftiHeader) -> Affine4 {\n\n let c_header = CHeader::from_nifti(h.dim, h.pixdim, h.srow_x, h.srow_y, h.srow_z);\n\n c_header.get_affine_to_rasmm()\n\n}\n", "file_path": "src/affine.rs", "rank": 46, "score": 46991.277914083 }, { "content": "#[cfg(feature = \"nifti_images\")]\n\npub fn rasmm_to_trackvis(h: &NiftiHeader) -> Affine4 {\n\n trackvis_to_rasmm(h).try_inverse().unwrap()\n\n}\n\n\n", "file_path": "src/affine.rs", "rank": 47, "score": 46991.277914083 }, { "content": "fn main() {\n\n let version = String::from(env!(\"CARGO_PKG_VERSION\"));\n\n let args = Docopt::new(USAGE)\n\n .and_then(|dopt| dopt.version(Some(version)).parse())\n\n .unwrap_or_else(|e| e.exit());\n\n\n\n let input = std::path::Path::new(args.get_str(\"<input>\"));\n\n if !input.exists() {\n\n panic!(\"Input trk '{:?}' doesn't exist.\", input);\n\n }\n\n\n\n let precision = args.get_str(\"--precision\").parse::<usize>().unwrap();\n\n let print = |p: &Point| {\n\n println!(\"({:.*} {:.*} {:.*})\", precision, p[0], precision, p[1], precision, p[2]);\n\n };\n\n\n\n // nb - 1 because we don't want to print the last \\n\n\n let nb = args.get_str(\"<nb>\").parse::<usize>().unwrap() - 1;\n\n let upto = args.get_str(\"--upto\").parse::<usize>().unwrap_or(std::usize::MAX);\n\n let first_part = upto / 2;\n", "file_path": "examples/trk_n_first.rs", "rank": 48, "score": 37361.117292958515 }, { "content": "fn main() {\n\n let version = String::from(env!(\"CARGO_PKG_VERSION\"));\n\n let args = Docopt::new(USAGE)\n\n .and_then(|dopt| dopt.version(Some(version)).parse())\n\n .unwrap_or_else(|e| e.exit());\n\n\n\n let input = std::path::Path::new(args.get_str(\"<input>\"));\n\n if !input.exists() {\n\n panic!(\"Input trk '{:?}' doesn't exist.\", input);\n\n }\n\n\n\n let reader = Reader::new(args.get_str(\"<input>\")).expect(\"Read header\");\n\n let mut header = reader.header.clone();\n\n header.add_scalar(\"color_x\").unwrap();\n\n header.add_scalar(\"color_y\").unwrap();\n\n header.add_scalar(\"color_z\").unwrap();\n\n\n\n if args.get_bool(\"uniform\") {\n\n let r = args.get_str(\"<r>\").parse::<f32>().unwrap();\n\n let g = args.get_str(\"<g>\").parse::<f32>().unwrap();\n\n let b = args.get_str(\"<b>\").parse::<f32>().unwrap();\n\n uniform(reader, header, args.get_str(\"<output>\"), r, g, b);\n\n } else if args.get_bool(\"local\") {\n\n local(reader, header, args.get_str(\"<output>\"));\n\n }\n\n}\n\n\n", "file_path": "examples/trk_color.rs", "rank": 49, "score": 37361.117292958515 }, { "content": "fn main() {\n\n let version = String::from(env!(\"CARGO_PKG_VERSION\"));\n\n let args = Docopt::new(USAGE)\n\n .and_then(|dopt| dopt.version(Some(version)).parse())\n\n .unwrap_or_else(|e| e.exit());\n\n let input = std::path::Path::new(args.get_str(\"<input>\"));\n\n if !input.exists() {\n\n panic!(\"Input trk '{:?}' doesn't exist.\", input);\n\n }\n\n\n\n let reader = Reader::new(args.get_str(\"<input>\")).expect(\"Read header\");\n\n let mut writer = Writer::new(args.get_str(\"<output>\"), Some(reader.header.clone())).unwrap();\n\n\n\n let mut rng = match args.get_str(\"--seed\").parse::<u8>() {\n\n Ok(seed) => SmallRng::from_seed([seed; 32]),\n\n Err(_) => SmallRng::from_entropy(),\n\n };\n\n\n\n if let Ok(percent) = args.get_str(\"--percent\").parse::<f32>() {\n\n let percent = percent / 100.0;\n", "file_path": "examples/trk_subsampler.rs", "rank": 50, "score": 37361.117292958515 }, { "content": "/// Affine transform reversing transforms implied in `orientations`\n\n///\n\n/// Imagine you have an array `arr` of shape `shape`, and you apply the\n\n/// transforms implied by `orientations`, to get `tarr`. `tarr` may have a\n\n/// different shape `shape_prime`. This routine returns the affine that will\n\n/// take an array coordinate for `tarr` and give you the corresponding array\n\n/// coordinate in `arr`.\n\npub fn inverse_orientations_affine(orientations: &Orientations, dim: [i16; 3]) -> Affine4 {\n\n let mut undo_reorder = Affine4::zeros();\n\n for (i, &(j, _)) in orientations.iter().enumerate() {\n\n undo_reorder[(i, j)] = 1.0;\n\n }\n\n undo_reorder[(3, 3)] = 1.0;\n\n\n\n let center = Translation::new(\n\n -(dim[0] - 1) as f32 / 2.0,\n\n -(dim[1] - 1) as f32 / 2.0,\n\n -(dim[2] - 1) as f32 / 2.0,\n\n );\n\n let mut undo_flip = Affine4::from_diagonal(&Vector4::new(\n\n orientations[0].1.to_f32(),\n\n orientations[1].1.to_f32(),\n\n orientations[2].1.to_f32(),\n\n 1.0,\n\n ));\n\n undo_flip[(0, 3)] = undo_flip[(0, 0)] * center[0] - center[0];\n\n undo_flip[(1, 3)] = undo_flip[(1, 1)] * center[1] - center[1];\n", "file_path": "src/orientation.rs", "rank": 51, "score": 34688.51128001217 }, { "content": "/// Return the orientation that transforms from `start_orientations` to\n\n/// `end_orientations`\n\npub fn orientations_transform(\n\n start_orientations: &Orientations,\n\n end_orientations: &Orientations,\n\n) -> Orientations {\n\n let mut result = [(0, Direction::Normal), (0, Direction::Normal), (0, Direction::Normal)];\n\n for (end_in_idx, &(ref end_out_idx, ref end_flip)) in end_orientations.iter().enumerate() {\n\n for (start_in_idx, &(ref start_out_idx, ref start_flip)) in\n\n start_orientations.iter().enumerate()\n\n {\n\n if end_out_idx == start_out_idx {\n\n if start_flip == end_flip {\n\n result[start_in_idx] = (end_in_idx, Direction::Normal)\n\n } else {\n\n result[start_in_idx] = (end_in_idx, Direction::Reversed)\n\n }\n\n break;\n\n }\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/orientation.rs", "rank": 52, "score": 34210.857931420556 }, { "content": "/// Orientation of input axes in terms of output axes for `affine`\n\n///\n\n/// Valid for an affine transformation from `p` dimensions to `q` dimensions\n\n/// (`affine.shape == (q + 1, p + 1)`).\n\n///\n\n/// The calculated orientations can be used to transform associated arrays to\n\n/// best match the output orientations. If `p` > `q`, then some of the output\n\n/// axes should be considered dropped in this orientation.\n\nfn io_orientations(affine: &Affine) -> Orientations {\n\n // Extract the underlying rotation, zoom, shear matrix\n\n let rzs2 = affine.component_mul(affine);\n\n let mut zooms = RowVector3::new(\n\n (rzs2[0] + rzs2[1] + rzs2[2]).sqrt(),\n\n (rzs2[3] + rzs2[4] + rzs2[5]).sqrt(),\n\n (rzs2[6] + rzs2[7] + rzs2[8]).sqrt(),\n\n );\n\n\n\n // Zooms can be zero, in which case all elements in the column are zero,\n\n // and we can leave them as they are\n\n zooms.apply(|z| if z == 0.0 { 1.0 } else { z });\n\n\n\n #[rustfmt::skip]\n\n let rs = Affine::new(\n\n affine[0] / zooms[0], affine[3] / zooms[1], affine[6] / zooms[2],\n\n affine[1] / zooms[0], affine[4] / zooms[1], affine[7] / zooms[2],\n\n affine[2] / zooms[0], affine[5] / zooms[1], affine[8] / zooms[2],\n\n );\n\n\n", "file_path": "src/orientation.rs", "rank": 53, "score": 29447.629201280517 }, { "content": "/// Convert orientation `orientations` to labels for axis directions\n\nfn orientations_to_axcodes(orientations: Orientations) -> String {\n\n let labels = [\n\n (\"L\".to_string(), \"R\".to_string()),\n\n (\"P\".to_string(), \"A\".to_string()),\n\n (\"I\".to_string(), \"S\".to_string()),\n\n ];\n\n\n\n orientations\n\n .iter()\n\n .map(|&(ref axis, ref direction)| {\n\n if *direction == Direction::Normal {\n\n labels[*axis].1.clone()\n\n } else {\n\n labels[*axis].0.clone()\n\n }\n\n })\n\n .collect::<Vec<_>>()\n\n .join(\"\")\n\n}\n\n\n", "file_path": "src/orientation.rs", "rank": 54, "score": 29445.709249796117 }, { "content": "use nalgebra::Point3;\n\n\n\nuse crate::ArraySequence;\n\n\n\npub type Point = Point3<f32>;\n\npub type Points = Vec<Point>;\n\npub type Streamlines = ArraySequence<Point>;\n\n\n\npub type TractogramItem = (Points, ArraySequence<f32>, Vec<f32>);\n\npub type RefTractogramItem<'data> = (&'data [Point], &'data [f32], &'data [f32]);\n\n\n\n#[derive(Clone, PartialEq)]\n\npub struct Tractogram {\n\n pub streamlines: Streamlines,\n\n pub scalars: ArraySequence<f32>,\n\n pub properties: ArraySequence<f32>,\n\n}\n\n\n\nimpl Tractogram {\n\n pub fn new(\n", "file_path": "src/tractogram.rs", "rank": 55, "score": 28069.234837959142 }, { "content": " streamlines: Streamlines,\n\n scalars: ArraySequence<f32>,\n\n properties: ArraySequence<f32>,\n\n ) -> Tractogram {\n\n Tractogram { streamlines, scalars, properties }\n\n }\n\n\n\n pub fn item(&self, idx: usize) -> RefTractogramItem {\n\n // Do not use .get(idx).unwrap_or(). The empty slice is valid only if the ArraySequence are\n\n // empty. It should crash if the index is invalid.\n\n let scalars = if self.scalars.is_empty() { &[] } else { &self.scalars[idx] };\n\n let properties = if self.properties.is_empty() { &[] } else { &self.properties[idx] };\n\n (&self.streamlines[idx], scalars, properties)\n\n }\n\n}\n\n\n\nimpl<'data> IntoIterator for &'data Tractogram {\n\n type Item = RefTractogramItem<'data>;\n\n type IntoIter = TractogramIterator<'data>;\n\n\n", "file_path": "src/tractogram.rs", "rank": 56, "score": 28067.91168934026 }, { "content": " fn into_iter(self) -> Self::IntoIter {\n\n TractogramIterator { tractogram: self, index: 0..self.streamlines.len() }\n\n }\n\n}\n\n\n\npub struct TractogramIterator<'data> {\n\n tractogram: &'data Tractogram,\n\n index: std::ops::Range<usize>,\n\n}\n\n\n\nimpl<'data> Iterator for TractogramIterator<'data> {\n\n type Item = RefTractogramItem<'data>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let idx = self.index.next()?;\n\n Some(self.tractogram.item(idx))\n\n }\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n (0, Some(self.tractogram.streamlines.len()))\n", "file_path": "src/tractogram.rs", "rank": 57, "score": 28067.120579177503 }, { "content": " }\n\n}\n\n\n\nimpl<'data> ExactSizeIterator for TractogramIterator<'data> {}\n\n\n\nimpl<'data> DoubleEndedIterator for TractogramIterator<'data> {\n\n fn next_back(&mut self) -> Option<Self::Item> {\n\n let idx = self.index.next_back()?;\n\n Some(self.tractogram.item(idx))\n\n }\n\n}\n", "file_path": "src/tractogram.rs", "rank": 58, "score": 28064.62174463952 }, { "content": " data.write(self);\n\n }\n\n\n\n pub fn write_from_iter<I>(&mut self, streamline: I, len: usize)\n\n where\n\n I: IntoIterator<Item = Point>,\n\n {\n\n write_streamline!(self, streamline, len);\n\n }\n\n\n\n fn write_point(&mut self, p: &Point) {\n\n let p = self.affine * p + self.translation;\n\n self.writer.write_f32::<TrkEndianness>(p.x).unwrap();\n\n self.writer.write_f32::<TrkEndianness>(p.y).unwrap();\n\n self.writer.write_f32::<TrkEndianness>(p.z).unwrap();\n\n }\n\n\n\n fn write_f32s(&mut self, data: &[f32]) {\n\n for &d in data {\n\n self.writer.write_f32::<TrkEndianness>(d).unwrap();\n", "file_path": "src/writer.rs", "rank": 59, "score": 27992.954985201264 }, { "content": "use std::{\n\n fs::File,\n\n io::{BufWriter, Result},\n\n path::Path,\n\n};\n\n\n\nuse byteorder::WriteBytesExt;\n\n\n\nuse crate::{\n\n affine::get_affine_and_translation,\n\n tractogram::{Point, RefTractogramItem, Tractogram, TractogramItem},\n\n Affine, Affine4, CHeader, Header, Translation, TrkEndianness,\n\n};\n\n\n\nmacro_rules! write_streamline {\n\n ($writer:ident, $streamline:expr, $scalars:expr, $properties:expr) => {\n\n if $writer.nb_scalars == 0 {\n\n $streamline.write($writer);\n\n } else {\n\n $writer.writer.write_i32::<TrkEndianness>($streamline.len() as i32).unwrap();\n", "file_path": "src/writer.rs", "rank": 60, "score": 27991.43722584321 }, { "content": " fn write(self, writer: &mut Writer) {\n\n let (streamline, scalars, properties) = self;\n\n write_streamline!(writer, streamline, scalars, properties);\n\n }\n\n}\n\n\n\nimpl<'data> Writable for &'data [Point] {\n\n fn write(self, writer: &mut Writer) {\n\n write_streamline!(writer, self, self.len());\n\n }\n\n}\n\n\n\nimpl Writer {\n\n pub fn new<P: AsRef<Path>>(path: P, reference: Option<Header>) -> Result<Writer> {\n\n let f = File::create(path).expect(\"Can't create new trk file.\");\n\n let mut writer = BufWriter::new(f);\n\n\n\n let header = match reference {\n\n Some(r) => r,\n\n None => Header::default(),\n", "file_path": "src/writer.rs", "rank": 61, "score": 27987.81443093001 }, { "content": " $writer.real_n_count += 1;\n\n\n\n let scalars = $scalars.chunks($writer.nb_scalars);\n\n for (p, scalars) in $streamline.into_iter().zip(scalars) {\n\n $writer.write_point(&p);\n\n $writer.write_f32s(scalars);\n\n }\n\n }\n\n\n\n $writer.write_f32s($properties);\n\n };\n\n // Fast method, without scalars and properties\n\n ($writer:ident, $streamline:expr, $nb_points:expr) => {\n\n $writer.writer.write_i32::<TrkEndianness>($nb_points as i32).unwrap();\n\n for p in $streamline {\n\n $writer.write_point(&p);\n\n }\n\n $writer.real_n_count += 1;\n\n };\n\n}\n", "file_path": "src/writer.rs", "rank": 62, "score": 27985.819350718783 }, { "content": " };\n\n header.write(&mut writer)?;\n\n let nb_scalars = header.scalars_name.len();\n\n\n\n // We are only interested in the inversed affine\n\n let affine4 =\n\n header.affine4_to_rasmm.try_inverse().expect(\"Unable to inverse 4x4 affine matrix\");\n\n let (affine, translation) = get_affine_and_translation(&affine4);\n\n\n\n Ok(Writer { writer, affine4, affine, translation, real_n_count: 0, nb_scalars })\n\n }\n\n\n\n pub fn apply_affine(&mut self, affine: &Affine4) {\n\n self.affine4 = self.affine4 * affine;\n\n let (affine, translation) = get_affine_and_translation(&self.affine4);\n\n self.affine = affine;\n\n self.translation = translation;\n\n }\n\n\n\n pub fn write<T: Writable>(&mut self, data: T) {\n", "file_path": "src/writer.rs", "rank": 63, "score": 27982.113418121044 }, { "content": " }\n\n }\n\n}\n\n\n\n// Finally write `n_count`\n\nimpl Drop for Writer {\n\n fn drop(&mut self) {\n\n CHeader::seek_n_count_field(&mut self.writer)\n\n .expect(\"Unable to seek to 'n_count' field before closing trk file.\");\n\n self.writer\n\n .write_i32::<TrkEndianness>(self.real_n_count)\n\n .expect(\"Unable to write 'n_count' field before closing trk file.\");\n\n }\n\n}\n", "file_path": "src/writer.rs", "rank": 64, "score": 27982.056726830855 }, { "content": "\n\npub struct Writer {\n\n writer: BufWriter<File>,\n\n pub affine4: Affine4,\n\n affine: Affine,\n\n translation: Translation,\n\n real_n_count: i32,\n\n nb_scalars: usize,\n\n}\n\n\n", "file_path": "src/writer.rs", "rank": 65, "score": 27978.47809525488 }, { "content": " properties.end_push();\n\n }\n\n\n\n fn read_properties_to_vec<E: ByteOrder>(&mut self, properties: &mut Vec<f32>) {\n\n for _ in 0..self.nb_properties {\n\n properties.push(self.reader.read_f32::<E>().unwrap());\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for Reader {\n\n type Item = TractogramItem;\n\n\n\n fn next(&mut self) -> Option<TractogramItem> {\n\n if let Ok(nb_points) = match self.endianness {\n\n Endianness::Little => self.reader.read_i32::<LittleEndian>(),\n\n Endianness::Big => self.reader.read_i32::<BigEndian>(),\n\n } {\n\n let nb_points = nb_points as usize;\n\n let mut streamline = Vec::with_capacity(nb_points);\n", "file_path": "src/reader.rs", "rank": 66, "score": 27913.468941897267 }, { "content": "use std::{\n\n fs::File,\n\n io::{BufReader, Result},\n\n path::Path,\n\n};\n\n\n\nuse byteorder::{BigEndian, ByteOrder, LittleEndian, ReadBytesExt};\n\n\n\nuse crate::{\n\n cheader::Endianness,\n\n tractogram::{Point, Points, Streamlines, Tractogram, TractogramItem},\n\n Affine, ArraySequence, Header, Translation,\n\n};\n\n\n\npub struct Reader {\n\n reader: BufReader<File>,\n\n endianness: Endianness,\n\n pub header: Header,\n\n pub affine_to_rasmm: Affine,\n\n pub translation: Translation,\n", "file_path": "src/reader.rs", "rank": 67, "score": 27911.589803771465 }, { "content": " let mut v = Vec::with_capacity(300);\n\n let mut scalars = ArraySequence::with_capacity(300);\n\n let mut properties = ArraySequence::with_capacity(300);\n\n while let Ok(nb_points) = self.reader.read_i32::<E>() {\n\n lengths.push(nb_points as usize);\n\n self.read_streamline::<E>(&mut v, &mut scalars, nb_points as usize);\n\n self.read_properties_to_arr::<E>(&mut properties);\n\n }\n\n\n\n self.float_buffer = vec![];\n\n Tractogram::new(Streamlines::new(lengths, v), scalars, properties)\n\n }\n\n\n\n fn read_streamline<E: ByteOrder>(\n\n &mut self,\n\n points: &mut Points,\n\n scalars: &mut ArraySequence<f32>,\n\n nb_points: usize,\n\n ) {\n\n // Vec::resize never decreases capacity, it can only increase it\n", "file_path": "src/reader.rs", "rank": 68, "score": 27908.21490110743 }, { "content": "\n\n nb_scalars: usize,\n\n nb_properties: usize,\n\n nb_floats_per_point: usize,\n\n float_buffer: Vec<f32>,\n\n}\n\n\n\nimpl Reader {\n\n pub fn new<P: AsRef<Path>>(path: P) -> Result<Reader> {\n\n let mut reader = BufReader::new(File::open(&path)?);\n\n let (header, endianness) = Header::read(&mut reader)?;\n\n let affine_to_rasmm = header.affine_to_rasmm;\n\n let translation = header.translation;\n\n let nb_scalars = header.scalars_name.len();\n\n let nb_properties = header.properties_name.len();\n\n let nb_floats_per_point = 3 + nb_scalars;\n\n\n\n Ok(Reader {\n\n reader,\n\n endianness,\n", "file_path": "src/reader.rs", "rank": 69, "score": 27907.183392328105 }, { "content": " // so there won't be any useless allocation.\n\n let nb_floats = nb_points * self.nb_floats_per_point;\n\n self.float_buffer.resize(nb_floats as usize, 0.0);\n\n self.reader.read_f32_into::<E>(self.float_buffer.as_mut_slice()).unwrap();\n\n\n\n for floats in self.float_buffer.chunks(self.nb_floats_per_point) {\n\n let p = Point::new(floats[0], floats[1], floats[2]);\n\n points.push((self.affine_to_rasmm * p) + self.translation);\n\n\n\n for f in &floats[3..] {\n\n scalars.push(*f);\n\n }\n\n }\n\n scalars.end_push();\n\n }\n\n\n\n fn read_properties_to_arr<E: ByteOrder>(&mut self, properties: &mut ArraySequence<f32>) {\n\n for _ in 0..self.nb_properties {\n\n properties.push(self.reader.read_f32::<E>().unwrap());\n\n }\n", "file_path": "src/reader.rs", "rank": 70, "score": 27905.484195635298 }, { "content": " let mut scalars = ArraySequence::with_capacity(nb_points * self.nb_scalars);\n\n let mut properties = Vec::with_capacity(self.nb_properties);\n\n match self.endianness {\n\n Endianness::Little => {\n\n self.read_streamline::<LittleEndian>(&mut streamline, &mut scalars, nb_points);\n\n self.read_properties_to_vec::<LittleEndian>(&mut properties);\n\n }\n\n Endianness::Big => {\n\n self.read_streamline::<BigEndian>(&mut streamline, &mut scalars, nb_points);\n\n self.read_properties_to_vec::<BigEndian>(&mut properties);\n\n }\n\n };\n\n\n\n Some((streamline, scalars, properties))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/reader.rs", "rank": 71, "score": 27905.049846492 }, { "content": " header,\n\n affine_to_rasmm,\n\n translation,\n\n nb_scalars,\n\n nb_properties,\n\n nb_floats_per_point,\n\n float_buffer: Vec::with_capacity(300),\n\n })\n\n }\n\n\n\n pub fn read_all(&mut self) -> Tractogram {\n\n match self.endianness {\n\n Endianness::Little => self.read_all_::<LittleEndian>(),\n\n Endianness::Big => self.read_all_::<BigEndian>(),\n\n }\n\n }\n\n\n\n fn read_all_<E: ByteOrder>(&mut self) -> Tractogram {\n\n // TODO Anything we can do to reerve?\n\n let mut lengths = Vec::new();\n", "file_path": "src/reader.rs", "rank": 72, "score": 27904.042415919317 }, { "content": "/// Axis direction codes for affine `affine`\n\npub fn affine_to_axcodes(affine: &Affine) -> String {\n\n let orientations = io_orientations(affine);\n\n orientations_to_axcodes(orientations)\n\n}\n\n\n", "file_path": "src/orientation.rs", "rank": 73, "score": 27580.64557666527 }, { "content": "/// Convert axis codes `axcodes` to an orientation\n\npub fn axcodes_to_orientations(axcodes: &str) -> Orientations {\n\n let labels = [('L', 'R'), ('P', 'A'), ('I', 'S')];\n\n let mut orientations = [(0, Direction::Normal), (0, Direction::Normal), (0, Direction::Normal)];\n\n for (code_idx, code) in axcodes.chars().enumerate() {\n\n for (label_idx, codes) in labels.iter().enumerate() {\n\n if code == codes.0 {\n\n orientations[code_idx] = (label_idx, Direction::Reversed);\n\n } else if code == codes.1 {\n\n orientations[code_idx] = (label_idx, Direction::Normal);\n\n }\n\n }\n\n }\n\n orientations\n\n}\n\n\n", "file_path": "src/orientation.rs", "rank": 74, "score": 27580.64557666527 }, { "content": " nb_streamlines: 0,\n\n scalars_name: vec![],\n\n properties_name: vec![],\n\n }\n\n }\n\n\n\n /// Retrieve a trk header, along with its byte order, from a file in the file system.\n\n pub fn from_trk<P: AsRef<Path>>(path: P) -> Result<Header> {\n\n let mut reader = BufReader::new(File::open(&path)?);\n\n let (header, _) = Self::read(&mut reader)?;\n\n Ok(header)\n\n }\n\n\n\n /// Retrieve a copy of the original trk header, as read on disk.\n\n pub fn raw_header(&self) -> CHeader {\n\n self.c_header.clone()\n\n }\n\n\n\n /// Retrieve a trk header, along with its byte order, from the given `BufReader`. It is assumed\n\n /// that the `reader` is currently at the start of the trk header.\n", "file_path": "src/header.rs", "rank": 75, "score": 27231.57381748117 }, { "content": " pub fn read(reader: &mut BufReader<File>) -> Result<(Header, Endianness)> {\n\n let (c_header, endianness) = CHeader::read(reader)?;\n\n let affine4 = c_header.get_affine_to_rasmm();\n\n let (affine, translation) = get_affine_and_translation(&affine4);\n\n let nb_streamlines = c_header.n_count as usize;\n\n let scalars_name = c_header.get_scalars_name();\n\n let properties_name = c_header.get_properties_name();\n\n\n\n let header = Header {\n\n c_header,\n\n affine4_to_rasmm: affine4,\n\n affine_to_rasmm: affine,\n\n translation,\n\n nb_streamlines,\n\n scalars_name,\n\n properties_name,\n\n };\n\n Ok((header, endianness))\n\n }\n\n\n", "file_path": "src/header.rs", "rank": 76, "score": 27231.447760433963 }, { "content": " pub fn add_scalar(&mut self, name: &str) -> Result<()> {\n\n self.c_header.add_scalar(name)?;\n\n self.scalars_name.push(name.to_string());\n\n Ok(())\n\n }\n\n\n\n pub fn add_property(&mut self, name: &str) -> Result<()> {\n\n self.c_header.add_property(name)?;\n\n self.properties_name.push(name.to_string());\n\n Ok(())\n\n }\n\n\n\n pub fn write<W: WriteBytesExt>(&self, writer: &mut W) -> Result<()> {\n\n Ok(self.c_header.write(writer)?)\n\n }\n\n}\n\n\n\nimpl Default for Header {\n\n fn default() -> Header {\n\n Header {\n", "file_path": "src/header.rs", "rank": 77, "score": 27230.122646284617 }, { "content": "use std::{\n\n fs::File,\n\n io::{BufReader, Result},\n\n path::Path,\n\n};\n\n\n\nuse byteorder::WriteBytesExt;\n\n#[cfg(feature = \"nifti_images\")]\n\nuse nifti::NiftiHeader;\n\n\n\nuse crate::{\n\n affine::get_affine_and_translation,\n\n cheader::{CHeader, Endianness},\n\n Affine, Affine4, Translation,\n\n};\n\n\n\n#[derive(Clone)]\n\npub struct Header {\n\n c_header: CHeader,\n\n pub affine4_to_rasmm: Affine4,\n", "file_path": "src/header.rs", "rank": 78, "score": 27229.87911928813 }, { "content": " pub affine_to_rasmm: Affine,\n\n pub translation: Translation,\n\n pub nb_streamlines: usize,\n\n\n\n pub scalars_name: Vec<String>,\n\n pub properties_name: Vec<String>,\n\n}\n\n\n\nimpl Header {\n\n #[cfg(feature = \"nifti_images\")]\n\n /// Build a trk header using the affine from a Nifti header.\n\n pub fn from_nifti(h: &NiftiHeader) -> Header {\n\n let c_header = CHeader::from_nifti(h.dim, h.pixdim, h.srow_x, h.srow_y, h.srow_z);\n\n let affine4 = c_header.get_affine_to_rasmm();\n\n let (affine, translation) = get_affine_and_translation(&affine4);\n\n Header {\n\n c_header,\n\n affine4_to_rasmm: affine4,\n\n affine_to_rasmm: affine,\n\n translation,\n", "file_path": "src/header.rs", "rank": 79, "score": 27228.022130150475 }, { "content": " c_header: CHeader::default(),\n\n affine4_to_rasmm: Affine4::identity(),\n\n affine_to_rasmm: Affine::identity(),\n\n translation: Translation::zeros(),\n\n nb_streamlines: 0,\n\n scalars_name: vec![],\n\n properties_name: vec![],\n\n }\n\n }\n\n}\n\n\n\nimpl PartialEq for Header {\n\n fn eq(&self, other: &Header) -> bool {\n\n self.affine_to_rasmm == other.affine_to_rasmm\n\n && self.translation == other.translation\n\n && self.nb_streamlines == other.nb_streamlines\n\n && self.scalars_name == other.scalars_name\n\n && self.properties_name == other.properties_name\n\n }\n\n}\n", "file_path": "src/header.rs", "rank": 80, "score": 27227.799301553514 }, { "content": " /// Clear all scalars and properties from `self`.\n\n pub fn clear_scalars_and_properties(&mut self) {\n\n self.clear_scalars();\n\n self.clear_properties();\n\n }\n\n\n\n /// Clear all scalars from `self`.\n\n pub fn clear_scalars(&mut self) {\n\n self.scalars_name.clear();\n\n self.c_header.clear_scalars();\n\n }\n\n\n\n /// Clear all properties from `self`.\n\n pub fn clear_properties(&mut self) {\n\n self.properties_name.clear();\n\n self.c_header.clear_properties();\n\n }\n\n\n\n /// Clear all scalars and properties from `self` and copy scalars and properties from `rhs`.\n\n pub fn copy_scalars_and_properties(&mut self, rhs: &Self) {\n", "file_path": "src/header.rs", "rank": 81, "score": 27227.22355569056 }, { "content": " self.copy_scalars(rhs);\n\n self.copy_properties(rhs);\n\n }\n\n\n\n /// Clear all scalars from `self` and copy scalars from `rhs`.\n\n pub fn copy_scalars(&mut self, rhs: &Self) {\n\n self.clear_scalars();\n\n for scalar in &rhs.scalars_name {\n\n self.add_scalar(scalar).unwrap(); // Can't fail\n\n }\n\n }\n\n\n\n /// Clear all properties from `self` and copy properties from `rhs`.\n\n pub fn copy_properties(&mut self, rhs: &Self) {\n\n self.clear_properties();\n\n for property in &rhs.properties_name {\n\n self.add_property(property).unwrap(); // Can't fail\n\n }\n\n }\n\n\n", "file_path": "src/header.rs", "rank": 82, "score": 27224.24836583541 }, { "content": "mod test;\n\n\n\n#[cfg(feature = \"nifti_images\")]\n\nmod nifti_tests {\n\n use crate::test::{get_random_trk_path, load_trk};\n\n use nifti::{NiftiObject, ReaderOptions};\n\n use trk_io::{\n\n affine::{rasmm_to_trackvis, trackvis_to_rasmm},\n\n Affine, Affine4, CHeader, Header, Point, Translation, Writer,\n\n };\n\n\n\n #[test]\n\n fn test_complex_affine() {\n\n let header =\n\n ReaderOptions::new().read_file(\"data/complex_affine.nii.gz\").unwrap().header().clone();\n\n let write_to = get_random_trk_path();\n\n\n\n {\n\n let mut writer = Writer::new(&write_to, Some(Header::from_nifti(&header))).unwrap();\n\n writer.apply_affine(&header.affine());\n", "file_path": "tests/affine.rs", "rank": 83, "score": 25707.804857166102 }, { "content": " writer.write(\n\n &[\n\n Point::new(13.75, 27.90, 51.55),\n\n Point::new(14.00, 27.95, 51.98),\n\n Point::new(14.35, 28.05, 52.33),\n\n ][..],\n\n );\n\n }\n\n\n\n // Loading them back without the right transformation is not supposed to give back the same\n\n // points. Results are exactly the same as with DiPy.\n\n let streamlines = load_trk(&write_to).1.streamlines;\n\n let streamline = &streamlines[0];\n\n assert_eq!(streamline[0], Point::new(-82.54104, -25.178139, 37.788338));\n\n assert_eq!(streamline[1], Point::new(-81.933876, -25.032265, 38.850258));\n\n assert_eq!(streamline[2], Point::new(-81.07349, -24.765305, 39.70571));\n\n }\n\n\n\n #[test]\n\n fn test_qform_affine() {\n", "file_path": "tests/affine.rs", "rank": 84, "score": 25702.203072288838 }, { "content": " let header = ReaderOptions::new().read_file(\"data/qform.nii.gz\").unwrap().header().clone();\n\n #[rustfmt::skip]\n\n assert_eq!(\n\n header.affine(),\n\n Affine4::new(\n\n -0.9375, 0.0, 0.0, 59.557503,\n\n 0.0, 0.9375, 0.0, 73.172,\n\n 0.0, 0.0, 3.0, 43.4291,\n\n 0.0, 0.0, 0.0, 1.0,\n\n )\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_simple_header_from_nifti() {\n\n let c_header = CHeader::from_nifti(\n\n [3, 100, 100, 100, 0, 0, 0, 0],\n\n [3.0, 1.1, 1.2, 1.3, 0.0, 0.0, 0.0, 0.0],\n\n [1.1, 0.0, 0.0, 10.0],\n\n [0.0, 1.2, 0.0, 11.0],\n", "file_path": "tests/affine.rs", "rank": 85, "score": 25699.36367589815 }, { "content": " assert_eq!(c_header.voxel_order, *b\"LAS\\0\");\n\n\n\n let header = Header::from_nifti(&nifti_header);\n\n #[rustfmt::skip]\n\n assert_eq!(\n\n header.affine_to_rasmm,\n\n Affine::new(\n\n -1.0, 0.0, 0.0,\n\n 0.0, 1.0, 0.0,\n\n 0.0, 0.0, 1.0,\n\n )\n\n );\n\n assert_eq!(header.translation, Translation::new(91.0, -127.0, -73.0));\n\n assert_eq!(header.nb_streamlines, 0);\n\n assert_eq!(header.scalars_name.len(), 0);\n\n assert_eq!(header.properties_name.len(), 0);\n\n }\n\n\n\n #[test]\n\n fn test_complex_affine_from_nifti() {\n", "file_path": "tests/affine.rs", "rank": 86, "score": 25698.532082207217 }, { "content": " [0.0, 0.0, 1.3, 12.0],\n\n );\n\n assert_eq!(c_header.dim, [100, 100, 100]);\n\n assert_eq!(c_header.voxel_size, [1.1, 1.2, 1.3]);\n\n #[rustfmt::skip]\n\n assert_eq!(\n\n c_header.vox_to_ras,\n\n [\n\n 1.1, 0.0, 0.0, 10.0,\n\n 0.0, 1.2, 0.0, 11.0,\n\n 0.0, 0.0, 1.3, 12.0,\n\n 0.0, 0.0, 0.0, 1.0,\n\n ]\n\n );\n\n assert_eq!(c_header.voxel_order, *b\"RAS\\0\");\n\n }\n\n\n\n #[test]\n\n fn test_complex_header_from_nifti() {\n\n let nifti_header =\n", "file_path": "tests/affine.rs", "rank": 87, "score": 25697.49132474319 }, { "content": " ReaderOptions::new().read_file(\"data/3x3.nii.gz\").unwrap().header().clone();\n\n let c_header = CHeader::from_nifti(\n\n nifti_header.dim,\n\n nifti_header.pixdim,\n\n nifti_header.srow_x,\n\n nifti_header.srow_y,\n\n nifti_header.srow_z,\n\n );\n\n assert_eq!(c_header.dim, [3, 3, 3]);\n\n assert_eq!(c_header.voxel_size, [2.0, 2.0, 2.0]);\n\n #[rustfmt::skip]\n\n assert_eq!(\n\n c_header.vox_to_ras,\n\n [\n\n -2.0, 0.0, 0.0, 90.0,\n\n 0.0, 2.0, 0.0, -126.0,\n\n 0.0, 0.0, 2.0, -72.0,\n\n 0.0, 0.0, 0.0, 1.0,\n\n ]\n\n );\n", "file_path": "tests/affine.rs", "rank": 88, "score": 25696.51571267617 }, { "content": " let nifti_header =\n\n ReaderOptions::new().read_file(\"data/3x3.nii.gz\").unwrap().header().clone();\n\n #[rustfmt::skip]\n\n assert_eq!(\n\n trackvis_to_rasmm(&nifti_header),\n\n Affine4::new(\n\n -1.0, 0.0, 0.0, 91.0,\n\n 0.0, 1.0, 0.0, -127.0,\n\n 0.0, 0.0, 1.0, -73.0,\n\n 0.0, 0.0, 0.0, 1.0,\n\n )\n\n );\n\n #[rustfmt::skip]\n\n assert_eq!(\n\n rasmm_to_trackvis(&nifti_header),\n\n Affine4::new(\n\n -1.0, 0.0, 0.0, 91.0,\n\n 0.0, 1.0, 0.0, 127.0,\n\n 0.0, 0.0, 1.0, 73.0,\n\n 0.0, 0.0, 0.0, 1.0,\n\n )\n\n );\n\n }\n\n}\n", "file_path": "tests/affine.rs", "rank": 89, "score": 25696.23912252384 }, { "content": "use std::{fs::File, io::BufReader, path::Path, str};\n\n\n\nuse docopt::Docopt;\n\n\n\nuse trk_io::CHeader;\n\n\n\nstatic USAGE: &'static str = \"\n\nPrint a TrackVis (.trk) header in an readable form\n\n\n\nUsage:\n\n trk_header <input> [options]\n\n trk_header (-h | --help)\n\n trk_header (-v | --version)\n\n\n\nOptions:\n\n -a --all Also print computed fields (endianness, affine, etc.)\n\n -h --help Show this screen.\n\n -v --version Show version.\n\n\";\n\n\n", "file_path": "examples/trk_header.rs", "rank": 90, "score": 25518.168823612763 }, { "content": " println!(\"voxel_size: {:?}\", header.voxel_size);\n\n println!(\"origin: {:?}\", header.origin);\n\n println!(\"n_scalars: {:?}\", header.n_scalars);\n\n for (i, scalar_name) in header.get_scalars_name().iter().enumerate() {\n\n println!(\" {}: {}\", i, scalar_name);\n\n }\n\n println!(\"n_properties: {:?}\", header.n_properties);\n\n for (i, property_name) in header.get_properties_name().iter().enumerate() {\n\n println!(\" {}: {}\", i, property_name);\n\n }\n\n println!(\"vox_to_ras: {:?}\", &header.vox_to_ras[0..4]);\n\n println!(\" {:?}\", &header.vox_to_ras[4..8]);\n\n println!(\" {:?}\", &header.vox_to_ras[8..12]);\n\n println!(\" {:?}\", &header.vox_to_ras[12..16]);\n\n println!(\n\n \"voxel_order: {:?} ({})\",\n\n header.voxel_order,\n\n str::from_utf8(&header.voxel_order).unwrap()\n\n );\n\n println!(\"image_orientation_patient: {:?}\", header.image_orientation_patient);\n", "file_path": "examples/trk_header.rs", "rank": 91, "score": 25516.434404436874 }, { "content": " println!(\"invert: {:?} {:?} {:?}\", header.invert_x, header.invert_y, header.invert_z);\n\n println!(\"swap: {:?} {:?} {:?}\", header.swap_x, header.swap_y, header.swap_z);\n\n println!(\"n_count: {:?}\", header.n_count);\n\n println!(\"version: {:?}\", header.version);\n\n println!(\"hdr_size: {:?}\", header.hdr_size);\n\n\n\n if print_all {\n\n let to_rasmm = header.get_affine_to_rasmm();\n\n let to_trackvis = to_rasmm.try_inverse().unwrap();\n\n println!(\"\\n---------- Computed fields ----------\");\n\n println!(\"Endianness {}\", endianness);\n\n print!(\"to rasmm {}\", to_rasmm);\n\n print!(\"to to_trackvis {}\", to_trackvis);\n\n }\n\n}\n", "file_path": "examples/trk_header.rs", "rank": 92, "score": 25514.701564364175 }, { "content": "pub trait Writable {\n\n fn write(self, w: &mut Writer);\n\n}\n\n\n\nimpl Writable for Tractogram {\n\n fn write(self, w: &mut Writer) {\n\n for item in &self {\n\n item.write(w);\n\n }\n\n }\n\n}\n\n\n\nimpl Writable for TractogramItem {\n\n fn write(self, writer: &mut Writer) {\n\n let (streamline, scalars, properties) = self;\n\n write_streamline!(writer, streamline, scalars.data.as_slice(), &properties);\n\n }\n\n}\n\n\n\nimpl<'data> Writable for RefTractogramItem<'data> {\n", "file_path": "src/writer.rs", "rank": 93, "score": 24666.1126659849 }, { "content": "use nalgebra::Vector3;\n\nuse trk_io::{Affine, ArraySequence, Header, Point, Reader, Tractogram, Translation};\n\n\n\n#[test]\n", "file_path": "tests/trk_read.rs", "rank": 95, "score": 24092.041010727295 }, { "content": "\n\n // Test generator\n\n let reader = Reader::new(\"data/simple.trk\").unwrap();\n\n for (i, (streamline, _, _)) in reader.into_iter().enumerate() {\n\n if i == 0 {\n\n assert_eq!(streamline, first);\n\n } else if i == 1 {\n\n assert_eq!(streamline, second);\n\n } else if i == 2 {\n\n assert_eq!(streamline, third);\n\n } else {\n\n panic!(\"Failed test.\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/trk_read.rs", "rank": 97, "score": 24087.728044289663 }, { "content": " let reader = Reader::new(\"data/complex_big_endian.trk\").unwrap();\n\n for (i, (streamline, _, _)) in reader.into_iter().enumerate() {\n\n if i == 0 {\n\n assert_eq!(streamline, first);\n\n } else if i == 1 {\n\n assert_eq!(streamline, second);\n\n } else if i == 2 {\n\n assert_eq!(streamline, third);\n\n } else {\n\n panic!(\"Failed test.\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/trk_read.rs", "rank": 98, "score": 24086.962099196 }, { "content": "use trk_io::{ArraySequence, Point};\n\n\n", "file_path": "tests/array_sequence.rs", "rank": 99, "score": 24082.507769104115 } ]
Rust
src/hypergraph/remove.rs
saona-raimundo/ferret_hypergraph
d9c677bdf9e72d491989674ae00cbfccd4ee61c2
use crate::{ elements::{ElementType, ElementValue}, errors, Direction, Hypergraph, }; impl<N, E, H, L, Ty> Hypergraph<N, E, H, L, Ty> { pub fn remove( &mut self, id: impl AsRef<[usize]>, ) -> Result<ElementValue<N, E, H, L>, errors::RemoveError> { let id = id.as_ref(); if !self.contains(&id) { Err(errors::NoElement(id.to_vec()))? } let element = match self.element_type(&id).unwrap() { ElementType::Edge => { let value = self.remove_edge(id)?; ElementValue::Edge { value } } ElementType::Hypergraph => { let value = self.remove_subhypergraph(id)?; ElementValue::Hypergraph { value } } ElementType::Link => { let value = self.remove_link(id)?; ElementValue::Link { value } } ElementType::Node => { let value = self.remove_node(id)?; ElementValue::Node { value } } }; Ok(element) } pub fn remove_edge(&mut self, id: impl AsRef<[usize]>) -> Result<E, errors::RemoveError> { let id = id.as_ref(); if !self.contains_edge(&id) { Err(errors::NoEdge(id.to_vec()))? } let edge_links = self.links_of(id).unwrap().clone(); for (link_id, _) in edge_links.into_iter().skip(2) { self.remove_link(link_id)?; } let local_id = id.last().unwrap(); let hypergraph = self.hypergraph_of_mut(id).unwrap(); let raw_edges = hypergraph.raw_edges_mut(); let (edge_value, edge_links) = raw_edges.remove(local_id).unwrap(); for (link_id, direction) in edge_links { let local_id = link_id.last().unwrap(); match direction { Direction::Incoming => { let (_, source_id, _) = self .hypergraph_of_mut(id) .unwrap() .raw_links_mut() .remove(local_id) .unwrap(); self.remove_link_from_unchecked(link_id, source_id); } Direction::Outgoing => { let (_, _, target_id) = self .hypergraph_of_mut(id) .unwrap() .raw_links_mut() .remove(local_id) .unwrap(); self.remove_link_from_unchecked(link_id, target_id); } } } Ok(edge_value) } pub fn remove_subhypergraph( &mut self, id: impl AsRef<[usize]>, ) -> Result<Option<H>, errors::RemoveError> { let id = id.as_ref(); if !self.contains_subhypergraph(&id) { Err(errors::NoHypergraph(id.to_vec()))? } let subhypergraph_links = self.links_of(id).unwrap().clone(); for (link_id, _) in subhypergraph_links { self.remove_link(link_id)?; } let id = id.to_vec(); for local_id in self .subhypergraph(&id) .unwrap() .ids() .skip(1) .collect::<Vec<_>>() { let mut gloabl_id = id.clone(); gloabl_id.extend(local_id); println!("Removing id {:?}", gloabl_id); self.remove(gloabl_id)?; } let local_id = id.last().unwrap(); let (subhypergraph, _) = self .hypergraph_of_mut(&id) .unwrap() .raw_hypergraphs_mut() .remove(local_id) .unwrap(); Ok(subhypergraph.value) } pub fn remove_link( &mut self, id: impl AsRef<[usize]>, ) -> Result<Option<L>, errors::RemoveError> { let id = id.as_ref(); { if !self.contains_link(&id) { Err(errors::NoLink(id.to_vec()))? } let (source_id, target_id) = self.link_endpoints(id).unwrap(); if !self.contains_linkable(source_id) { Err(errors::NoElement(source_id.clone()))? } if !self.contains_linkable(target_id) { Err(errors::NoElement(target_id.clone()))? } } let local_id = id.last().unwrap(); let (link_value, source_id, target_id) = self .hypergraph_of_mut(id) .unwrap() .raw_links_mut() .remove(local_id) .unwrap(); self.remove_link_from_unchecked(&id, source_id); self.remove_link_from_unchecked(&id, target_id); Ok(link_value) } fn remove_link_from_unchecked( &mut self, link_id: impl AsRef<[usize]>, id: impl AsRef<[usize]>, ) { let id = id.as_ref(); let link_id = link_id.as_ref(); let local_id = id.last().expect("empty id"); let element_type = self.element_type(id).expect("id is not a valid element"); let hypergraph = self.hypergraph_of_mut(&id).unwrap(); match element_type { ElementType::Edge => { let raw_edges = hypergraph.raw_edges_mut(); let (_, edge_links) = raw_edges.get_mut(local_id).unwrap(); let link_index = edge_links .iter() .position(|(l_id, _)| link_id == l_id) .expect("link_id is not among the links of id"); edge_links.remove(link_index); if edge_links.len() < 2 { self.remove_edge(id).unwrap(); } } ElementType::Hypergraph => { let raw_hypergraphs = hypergraph.raw_hypergraphs_mut(); let (_, hyperraph_links) = raw_hypergraphs.get_mut(local_id).unwrap(); let link_index = hyperraph_links .iter() .position(|(link_id, _)| link_id == id) .expect("link_id is not among the links of id"); hyperraph_links.remove(link_index); } ElementType::Link => { panic!("id refers to a link! (link_id {:?}, id {:?})", link_id, id); } ElementType::Node => { let raw_nodes = hypergraph.raw_nodes_mut(); let (_, node_links) = raw_nodes.get_mut(local_id).unwrap(); let link_index = node_links .iter() .position(|(l_id, _)| link_id == l_id) .expect("link_id is not among the links of id"); node_links.remove(link_index); } } } pub fn remove_node(&mut self, id: impl AsRef<[usize]>) -> Result<N, errors::RemoveError> { let id = id.as_ref(); if !self.contains_node(&id) { Err(errors::NoNode(id.to_vec()))? } let local_id = id.last().unwrap(); for (link_id, _) in self.links_of(id).unwrap().clone() { self.remove_link(link_id)?; } let hypergraph = self.hypergraph_of_mut(id).unwrap(); let raw_nodes = hypergraph.raw_nodes_mut(); let (node_value, _) = raw_nodes.remove(local_id).unwrap(); Ok(node_value) } pub fn remove_element_by_value( &mut self, value: ElementValue<&N, &E, &H, &L>, ) -> Result<(), errors::FindError> where N: PartialEq, E: PartialEq, H: PartialEq, L: PartialEq, { let id = self.find_element_by_value(value)?; self.remove(id).unwrap(); Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn remove() { let mut h = Hypergraph::new(); h.add_node("zero"); h.add_node("one"); h.add_edge([0], [1], "two").unwrap(); h.add_link([0], [2], "five").unwrap(); h.add_hypergraph("six"); println!("{:#?}", h); assert_eq!( h.remove([5]), Ok(ElementValue::Link { value: Some("five") }) ); assert_eq!(h.remove([2]), Ok(ElementValue::Edge { value: "two" })); assert_eq!(h.remove([0]), Ok(ElementValue::Node { value: "zero" })); println!("{:#?}", h); assert_eq!( h.remove([6]), Ok(ElementValue::Hypergraph { value: Some("six") }) ); assert_eq!(h.ids().collect::<Vec<_>>(), vec![vec![], vec![1]]); } #[test] fn remove_element_by_value() { let mut h = Hypergraph::<_, ()>::new(); h.add_node("zero"); h.add_node("zero"); let result = h.remove_element_by_value(ElementValue::Node { value: &"zero" }); assert_eq!(result, Ok(())); println!("{:#?}", h); assert_eq!(h.ids().collect::<Vec<_>>(), vec![vec![], vec![1]]); } }
use crate::{ elements::{ElementType, ElementValue}, errors, Direction, Hypergraph, }; impl<N, E, H, L, Ty> Hypergraph<N, E, H, L, Ty> { pub fn remove( &mut self, id: impl AsRef<[usize]>, ) -> Result<ElementValue<N, E, H, L>, errors::RemoveError> { let id = id.as_ref(); if !self.contains(&id) { Err(errors::NoElement(id.to_vec()))? } let element = match self.element_type(&id).unwrap() { ElementType::Edge => { let value = self.remove_edge(id)?; ElementValue::Edge { value } } ElementType::Hypergraph => { let value = self.remove_subhypergraph(id)?; ElementValue::Hypergraph { value } } ElementType::Link => { let value = self.remove_link(id)?; ElementValue::Link { value } } ElementType::Node => { let value = self.remove_node(id)?; ElementValue::Node { value } } }; Ok(element) } pub fn remove_edge(&mut self, id: impl AsRef<[usize]>) -> Result<E, errors::RemoveError> { let id = id.as_ref(); if !self.contains_edge(&id) { Err(errors::NoEdge(id.to_vec()))? } let edge_links = self.links_of(id).unwrap().clone(); for (link_id, _) in edge_links.into_iter().skip(2) { self.remove_link(link_id)?; } let local_id = id.last().unwrap(); let hypergraph = self.hypergraph_of_mut(id).unwrap(); let raw_edges = hypergraph.raw_edges_mut(); let (edge_value, edge_links) = raw_edges.remove(local_id).unwrap(); for (link_id, direction) in edge_links { let local_id = link_id.last().unwrap(); match direction { Direction::Incoming => { let (_, source_id, _) = self .hypergraph_of_mut(id) .unwrap() .raw_links_mut() .remove(local_id) .unwrap(); self.remove_link_from_unchecked(link_id, source_id); } Direction::Outgoing => { let (_, _, target_id) = self .hypergraph_of_mut(id) .unwrap() .raw_links_mut() .remove(local_id) .unwrap(); self.remove_link_from_unchecked(link_id, target_id); } } } Ok(edge_value) } pub fn remove_subhypergraph( &mut self, id: impl AsRef<[usize]>, ) -> Result<Option<H>, errors::RemoveError> { let id = id.as_ref(); if !self.contains_subhypergraph(&id) { Err(errors::NoHypergraph(id.to_vec()))? } let subhypergraph_links = self.links_of(id).unwrap().clone(); for (link_id, _) in subhypergraph_links { self.remove_link(link_id)?; } let id = id.to_vec(); for local_id in self .subhypergraph(&id) .unwrap() .ids() .skip(1) .collect::<Vec<_>>() { let mut gloabl_id = id.clone(); gloabl_id.extend(local_id); println!("Removing id {:?}", gloabl_id); self.remove(gloabl_id)?; } let local_id = id.last().unwrap(); let (subhypergraph, _) = self .hypergraph_of_mut(&id) .unwrap() .raw_hypergraphs_mut() .remove(local_id) .unwrap(); Ok(subhypergraph.value) } pub fn remove_link( &mut self, id: impl AsRef<[usize]>, ) -> Result<Option<L>, errors::RemoveError> { let id = id.as_ref(); { if !self.contains_link(&id) { Err(errors::NoLink(id.to_vec()))? } let (source_id, target_id) = self.link_endpoints(id).unwrap(); if !self.contains_linkable(source_id) { Err(errors::NoElement(source_id.clone()))? } if !self.contains_linkable(target_id) { Err(errors::NoElement(target_id.clone()))? } } let local_id = id.last().unwrap(); let (link_value, source_id, target_id) = self .hypergraph_of_mut(id) .unwrap() .raw_links_mut() .remove(local_id) .unwrap(); self.remove_link_from_unchecked(&id, source_id); self.remove_link_from_unchecked(&id, target_id); Ok(link_value) } fn remove_link_from_unchecked( &mut self, link_id: impl AsRef<[usize]>, id: impl AsRef<[usize]>, ) { let id = id.as_ref(); let link_id = link_id.as_ref(); let local_id = id.last().expect("empty id"); let element_type = self.element_type(id).expect("id is not a valid element"); let hypergraph = self.hypergraph_of_mut(&id).unwrap(); match element_type { ElementType::Edge => { let raw_edges = hypergraph.raw_edges_mut(); let (_, edge_links) = raw_edges.get_mut(local_id).unwrap(); let link_index = edge_links .iter() .position(|(l_id, _)| link_id == l_id) .expect("link_id is not among the links of id"); edge_links.remove(link_index); if edge_links.len() < 2 { self.remove_edge(i
pub fn remove_node(&mut self, id: impl AsRef<[usize]>) -> Result<N, errors::RemoveError> { let id = id.as_ref(); if !self.contains_node(&id) { Err(errors::NoNode(id.to_vec()))? } let local_id = id.last().unwrap(); for (link_id, _) in self.links_of(id).unwrap().clone() { self.remove_link(link_id)?; } let hypergraph = self.hypergraph_of_mut(id).unwrap(); let raw_nodes = hypergraph.raw_nodes_mut(); let (node_value, _) = raw_nodes.remove(local_id).unwrap(); Ok(node_value) } pub fn remove_element_by_value( &mut self, value: ElementValue<&N, &E, &H, &L>, ) -> Result<(), errors::FindError> where N: PartialEq, E: PartialEq, H: PartialEq, L: PartialEq, { let id = self.find_element_by_value(value)?; self.remove(id).unwrap(); Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn remove() { let mut h = Hypergraph::new(); h.add_node("zero"); h.add_node("one"); h.add_edge([0], [1], "two").unwrap(); h.add_link([0], [2], "five").unwrap(); h.add_hypergraph("six"); println!("{:#?}", h); assert_eq!( h.remove([5]), Ok(ElementValue::Link { value: Some("five") }) ); assert_eq!(h.remove([2]), Ok(ElementValue::Edge { value: "two" })); assert_eq!(h.remove([0]), Ok(ElementValue::Node { value: "zero" })); println!("{:#?}", h); assert_eq!( h.remove([6]), Ok(ElementValue::Hypergraph { value: Some("six") }) ); assert_eq!(h.ids().collect::<Vec<_>>(), vec![vec![], vec![1]]); } #[test] fn remove_element_by_value() { let mut h = Hypergraph::<_, ()>::new(); h.add_node("zero"); h.add_node("zero"); let result = h.remove_element_by_value(ElementValue::Node { value: &"zero" }); assert_eq!(result, Ok(())); println!("{:#?}", h); assert_eq!(h.ids().collect::<Vec<_>>(), vec![vec![], vec![1]]); } }
d).unwrap(); } } ElementType::Hypergraph => { let raw_hypergraphs = hypergraph.raw_hypergraphs_mut(); let (_, hyperraph_links) = raw_hypergraphs.get_mut(local_id).unwrap(); let link_index = hyperraph_links .iter() .position(|(link_id, _)| link_id == id) .expect("link_id is not among the links of id"); hyperraph_links.remove(link_index); } ElementType::Link => { panic!("id refers to a link! (link_id {:?}, id {:?})", link_id, id); } ElementType::Node => { let raw_nodes = hypergraph.raw_nodes_mut(); let (_, node_links) = raw_nodes.get_mut(local_id).unwrap(); let link_index = node_links .iter() .position(|(l_id, _)| link_id == l_id) .expect("link_id is not among the links of id"); node_links.remove(link_index); } } }
function_block-function_prefixed
[ { "content": "/// A walker is like an iterator, where part of the\n\n/// information is supplied manually at each \"next\" call (named `walk_next`).\n\n///\n\n/// # Remarks\n\n///\n\n/// This allows to visit a Hypergraph without a fixed reference to it.\n\npub trait Walker<'a, N, E, H, L, Ty>: Sized {\n\n type Item;\n\n /// Advance to the next item.\n\n fn walk_next(&mut self, hypergraph: &'a Hypergraph<N, E, H, L, Ty>) -> Option<Self::Item>;\n\n\n\n /// Create an iterator fixing a reference to a hypergraph.\n\n fn build_iter(\n\n self,\n\n hypergraph: &'a Hypergraph<N, E, H, L, Ty>,\n\n ) -> WalkIter<'a, N, E, H, L, Ty, Self> {\n\n WalkIter::new(self, hypergraph)\n\n }\n\n}\n", "file_path": "src/traits.rs", "rank": 0, "score": 120343.88850744693 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let mut h = Hypergraph::<&str, &str, &str, &str>::new();\n\n h.set_value(\"(main)hypergraph\");\n\n h.add_node(\"node\");\n\n h.add_node(\"node\");\n\n h.add_edge([0], [1], \"edge\")?;\n\n h.set_link_value([3], \"link\")?;\n\n h.set_link_value([4], \"link\")?;\n\n h.add_node(\"node\");\n\n h.add_link([2], [5], \"link\")?;\n\n\n\n h.add_hypergraph(\"(sub)hypergraph\");\n\n h.add_node_in(\"node\", [7])?;\n\n h.add_node_in(\"node\", [7])?;\n\n h.add_edge_in([7, 0], [7, 1], \"edge\", [7])?;\n\n h.set_link_value([7, 3], \"link\")?;\n\n h.set_link_value([7, 4], \"link\")?;\n\n\n\n h.add_link([2], [7, 0], \"link\")?;\n\n h.add_link([2], [7], \"link\")?;\n", "file_path": "examples/documentation_image.rs", "rank": 1, "score": 61859.72335271624 }, { "content": "/// Marker trait for classes of Hypergraphs.\n\npub trait HypergraphClass: Debug + Eq {\n\n fn new() -> Self;\n\n fn is_main(&self) -> bool {\n\n false\n\n }\n\n fn is_sub(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/traits.rs", "rank": 17, "score": 43500.62630647915 }, { "content": "use crate::{traits, Hypergraph};\n\n\n\n#[derive(Debug)]\n\npub struct WalkIter<'a, N, E, H, L, Ty, Walker> {\n\n walker: Walker,\n\n hypergraph: &'a Hypergraph<N, E, H, L, Ty>,\n\n}\n\n\n\nimpl<'a, N, E, H, L, Ty, Walker> WalkIter<'a, N, E, H, L, Ty, Walker> {\n\n pub fn new(walker: Walker, hypergraph: &'a Hypergraph<N, E, H, L, Ty>) -> Self {\n\n WalkIter { walker, hypergraph }\n\n }\n\n\n\n pub fn detach(self) -> Walker {\n\n self.walker\n\n }\n\n}\n\n\n\nimpl<'a, N, E, H, L, Ty, Walker> Iterator for WalkIter<'a, N, E, H, L, Ty, Walker>\n\nwhere\n\n Walker: traits::Walker<'a, N, E, H, L, Ty>,\n\n{\n\n type Item = Walker::Item;\n\n fn next(&mut self) -> std::option::Option<Walker::Item> {\n\n self.walker.walk_next(self.hypergraph)\n\n }\n\n}\n", "file_path": "src/iterators.rs", "rank": 18, "score": 30053.38902800992 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n/// Edge direction.\n\n#[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq, Hash, Serialize, Deserialize)]\n\npub enum Direction {\n\n /// An `Outgoing` edge is an outward link *from* the current element.\n\n Outgoing,\n\n /// An `Incoming` edge is an inbound link *to* the current element.\n\n Incoming,\n\n}\n\n\n\nimpl Direction {\n\n /// Return the opposite `Direction`.\n\n pub fn opposite(self) -> Direction {\n\n match self {\n\n Direction::Outgoing => Direction::Incoming,\n\n Direction::Incoming => Direction::Outgoing,\n\n }\n\n }\n\n}\n", "file_path": "src/direction.rs", "rank": 19, "score": 30040.593115988813 }, { "content": " NoLink,\n\n NoNode,\n\n NoLocation(#[from] NoHypergraph),\n\n}\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Failed to get an element.\")]\n\npub enum GetError {\n\n NoEdge(#[from] NoEdge),\n\n NoElement(#[from] NoElement),\n\n NoElementLinkable(#[from] NoElementLinkable),\n\n NoHypergraph(#[from] NoHypergraph),\n\n NoLink(#[from] NoLink),\n\n NoNode(#[from] NoNode),\n\n RootHypergraph(#[from] RootHypergraph),\n\n}\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Failed to remove element.\")]\n\npub enum RemoveError {\n", "file_path": "src/errors.rs", "rank": 20, "score": 29896.574878947715 }, { "content": "#[error(\"There is no hypergraph with id {0:?}.\")]\n\npub struct NoHypergraph(pub Vec<usize>);\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"There is no link with id {0:?}.\")]\n\npub struct NoLink(pub Vec<usize>);\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"There is no node with id {0:?}.\")]\n\npub struct NoNode(pub Vec<usize>);\n\n\n\n#[derive(Copy, Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"The method does not apply to the root hypergraph.\")]\n\npub struct RootHypergraph;\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"These elements can not be linked (source {0:?}, target {0:?}).\")]\n\npub struct Unlinkable(pub Vec<usize>, pub Vec<usize>);\n\n\n\n/// # Compound\n", "file_path": "src/errors.rs", "rank": 21, "score": 29894.342998894343 }, { "content": "#[error(\"Source can not be a link.\")]\n\npub struct LinkSource(pub Vec<usize>);\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Target can not be a link.\")]\n\npub struct LinkTarget(pub Vec<usize>);\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"There is no edge with id {0:?}.\")]\n\npub struct NoEdge(pub Vec<usize>);\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"There is no element with id {0:?}.\")]\n\npub struct NoElement(pub Vec<usize>);\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"There is no linkable element with id {0:?}.\")]\n\npub struct NoElementLinkable(pub Vec<usize>);\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n", "file_path": "src/errors.rs", "rank": 22, "score": 29894.072865207156 }, { "content": " NoEdge(#[from] NoEdge),\n\n NoElement(#[from] NoElement),\n\n NoHypergraph(#[from] NoHypergraph),\n\n NoLink(#[from] NoLink),\n\n NoNode(#[from] NoNode),\n\n}\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Failed to set an element.\")]\n\npub enum SetError {\n\n NoEdge(#[from] NoEdge),\n\n NoElement(#[from] NoElement),\n\n NoElementLinkable(#[from] NoElementLinkable),\n\n NoHypergraph(#[from] NoHypergraph),\n\n NoLink(#[from] NoLink),\n\n NoNode(#[from] NoNode),\n\n}\n", "file_path": "src/errors.rs", "rank": 23, "score": 29893.25935158608 }, { "content": "#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Failed to add element.\")]\n\npub enum AddError {\n\n EmptySource(#[from] EmptySource),\n\n EmptyTarget(#[from] EmptyTarget),\n\n IncoherentLink(#[from] IncoherentLink),\n\n LinkSource(#[from] LinkSource),\n\n LinkTarget(#[from] LinkTarget),\n\n NoLocation(#[from] NoHypergraph),\n\n NoSource(#[source] NoElementLinkable),\n\n NoTarget(#[source] NoElementLinkable),\n\n Unlinkable(#[from] Unlinkable),\n\n}\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Failed to find the element.\")]\n\npub enum FindError {\n\n NoEdge,\n\n NoElement,\n\n NoHypergraph,\n", "file_path": "src/errors.rs", "rank": 24, "score": 29892.5926260655 }, { "content": "use thiserror::Error;\n\n\n\n/// # Basic\n\n#[derive(Copy, Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Source can not be empty.\")]\n\npub struct EmptySource;\n\n\n\n#[derive(Copy, Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Target can not be empty.\")]\n\npub struct EmptyTarget;\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"The link is incoherent (location {0:?}, source {1:?}, target {2:?}).\")]\n\npub struct IncoherentLink(pub Vec<usize>, pub Vec<usize>, pub Vec<usize>);\n\n\n\n#[derive(Copy, Debug, Error, Clone, PartialEq, Eq)]\n\n#[error(\"Failed to convert to ElementLinkable because element is a link.\")]\n\npub struct LinkPresent;\n\n\n\n#[derive(Debug, Error, Clone, PartialEq, Eq)]\n", "file_path": "src/errors.rs", "rank": 25, "score": 29892.01513530929 }, { "content": "}\n\n\n\nimpl<N, E, H, L, Id> TryFrom<Element<N, E, H, L, Id>> for ElementLinkable<N, E, H> {\n\n type Error = LinkPresent;\n\n fn try_from(element: Element<N, E, H, L, Id>) -> Result<ElementLinkable<N, E, H>, Self::Error> {\n\n match element {\n\n Element::Edge { value } => Ok(ElementLinkable::Edge { value }),\n\n Element::Hypergraph { value } => Ok(ElementLinkable::Hypergraph { value }),\n\n Element::Node { value } => Ok(ElementLinkable::Node { value }),\n\n Element::Link { .. } => Err(LinkPresent),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Element<N, E, H, L, Id> {\n\n /// A graph edge.\n\n Edge { value: E },\n\n /// A hypergraph.\n\n Hypergraph { value: Option<H> },\n", "file_path": "src/elements.rs", "rank": 26, "score": 29683.615428963 }, { "content": " /// A graph link.\n\n Link {\n\n source: Id,\n\n target: Id,\n\n value: Option<L>,\n\n },\n\n /// A graph node.\n\n Node { value: N },\n\n}\n\n\n\nimpl<N, E, H, L, Id> Element<N, E, H, L, Id> {\n\n pub fn is_edge(&self) -> bool {\n\n matches!(self, Element::Edge { .. })\n\n }\n\n\n\n pub fn is_hypergraph(&self) -> bool {\n\n matches!(self, Element::Hypergraph { .. })\n\n }\n\n\n\n pub fn is_link(&self) -> bool {\n", "file_path": "src/elements.rs", "rank": 27, "score": 29681.510859810533 }, { "content": " /// A hypergraph.\n\n Hypergraph { value: Option<H> },\n\n /// A graph link.\n\n Link {\n\n source: Id,\n\n target: Id,\n\n value: Option<L>,\n\n },\n\n /// A graph node.\n\n Node { value: N },\n\n}\n\n\n\nimpl<N, E, H, L, Id> ElementExt<N, E, H, L, Id> {\n\n pub fn is_edge(&self) -> bool {\n\n matches!(self, ElementExt::Edge { .. })\n\n }\n\n\n\n pub fn is_hypergraph(&self) -> bool {\n\n matches!(self, ElementExt::Hypergraph { .. })\n\n }\n", "file_path": "src/elements.rs", "rank": 28, "score": 29681.057081269264 }, { "content": "use core::{convert::TryFrom, fmt};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::errors::LinkPresent;\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum ElementValue<N, E, H, L> {\n\n /// A graph edge.\n\n Edge { value: E },\n\n /// A hypergraph.\n\n Hypergraph { value: Option<H> },\n\n /// A graph link.\n\n Link { value: Option<L> },\n\n /// A graph node.\n\n Node { value: N },\n\n}\n\n\n\nimpl<N, E, H, L> fmt::Display for ElementValue<N, E, H, L>\n\nwhere\n\n N: fmt::Display,\n", "file_path": "src/elements.rs", "rank": 29, "score": 29679.814206538376 }, { "content": "\n\nimpl<N, E, H, L, Id> From<ElementExt<N, E, H, L, Id>> for Element<N, E, H, L, Id> {\n\n fn from(element_ext: ElementExt<N, E, H, L, Id>) -> Self {\n\n match element_ext {\n\n ElementExt::Edge { value, .. } => Element::Edge { value },\n\n ElementExt::Link {\n\n source,\n\n target,\n\n value,\n\n } => Element::Link {\n\n source,\n\n target,\n\n value,\n\n },\n\n ElementExt::Hypergraph { value } => Element::Hypergraph { value },\n\n ElementExt::Node { value } => Element::Node { value },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/elements.rs", "rank": 30, "score": 29679.789957137153 }, { "content": "impl<N, E, H, L, Id> From<ElementLinkable<N, E, H>> for Element<N, E, H, L, Id> {\n\n fn from(element_linkable: ElementLinkable<N, E, H>) -> Self {\n\n match element_linkable {\n\n ElementLinkable::Edge { value } => Element::Edge { value },\n\n ElementLinkable::Hypergraph { value } => Element::Hypergraph { value },\n\n ElementLinkable::Node { value } => Element::Node { value },\n\n }\n\n }\n\n}\n\n\n\n// impl TryInto<ElementExt> for Element ...\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum ElementType {\n\n Edge,\n\n Hypergraph,\n\n Link,\n\n Node,\n\n}\n\n\n", "file_path": "src/elements.rs", "rank": 31, "score": 29679.344813302643 }, { "content": "impl ElementType {\n\n pub fn wrapping_next(self) -> Self {\n\n match self {\n\n ElementType::Edge => ElementType::Hypergraph,\n\n ElementType::Hypergraph => ElementType::Link,\n\n ElementType::Link => ElementType::Node,\n\n ElementType::Node => ElementType::Edge,\n\n }\n\n }\n\n}\n\n\n\nimpl<N, E, H, L> From<ElementValue<N, E, H, L>> for ElementType {\n\n fn from(element_value: ElementValue<N, E, H, L>) -> Self {\n\n match element_value {\n\n ElementValue::Edge { .. } => ElementType::Edge,\n\n ElementValue::Link { .. } => ElementType::Link,\n\n ElementValue::Hypergraph { .. } => ElementType::Hypergraph,\n\n ElementValue::Node { .. } => ElementType::Node,\n\n }\n\n }\n", "file_path": "src/elements.rs", "rank": 32, "score": 29679.208194220824 }, { "content": "}\n\n\n\nimpl<N, E, H, L, Id> From<Element<N, E, H, L, Id>> for ElementType {\n\n fn from(element: Element<N, E, H, L, Id>) -> Self {\n\n match element {\n\n Element::Edge { .. } => ElementType::Edge,\n\n Element::Link { .. } => ElementType::Link,\n\n Element::Hypergraph { .. } => ElementType::Hypergraph,\n\n Element::Node { .. } => ElementType::Node,\n\n }\n\n }\n\n}\n\n\n\n/// Element extended with information to be added to a hypergraph.\n\n///\n\n/// `Edge` variant now has `source` and `target`.\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum ElementExt<N, E, H, L, Id> {\n\n /// A graph edge.\n\n Edge { source: Id, target: Id, value: E },\n", "file_path": "src/elements.rs", "rank": 33, "score": 29678.102902133174 }, { "content": " }\n\n ElementValue::Link {\n\n value: value_option,\n\n } => {\n\n if let Some(value) = value_option {\n\n write!(f, \"{}\", value)\n\n } else {\n\n write!(f, \"\")\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<N, E, H, L> ElementValue<N, E, H, L> {\n\n pub fn is_edge(&self) -> bool {\n\n matches!(self, ElementValue::Edge { .. })\n\n }\n\n\n\n pub fn is_hypergraph(&self) -> bool {\n", "file_path": "src/elements.rs", "rank": 34, "score": 29677.84402890827 }, { "content": " matches!(self, ElementValue::Hypergraph { .. })\n\n }\n\n\n\n pub fn is_link(&self) -> bool {\n\n matches!(self, ElementValue::Link { .. })\n\n }\n\n\n\n pub fn is_node(&self) -> bool {\n\n matches!(self, ElementValue::Node { .. })\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum ElementLinkable<N, E, H> {\n\n /// A graph edge.\n\n Edge { value: E },\n\n /// A hypergraph.\n\n Hypergraph { value: Option<H> },\n\n /// A graph node.\n\n Node { value: N },\n", "file_path": "src/elements.rs", "rank": 35, "score": 29673.61588083306 }, { "content": " E: fmt::Display,\n\n H: fmt::Display,\n\n L: fmt::Display,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {\n\n match self {\n\n ElementValue::Edge { value } => {\n\n write!(f, \"{}\", value)\n\n }\n\n ElementValue::Node { value } => {\n\n write!(f, \"{}\", value)\n\n }\n\n ElementValue::Hypergraph {\n\n value: value_option,\n\n } => {\n\n if let Some(value) = value_option {\n\n write!(f, \"{}\", value)\n\n } else {\n\n write!(f, \"\")\n\n }\n", "file_path": "src/elements.rs", "rank": 36, "score": 29673.45387996438 }, { "content": " matches!(self, Element::Link { .. })\n\n }\n\n\n\n pub fn is_node(&self) -> bool {\n\n matches!(self, Element::Node { .. })\n\n }\n\n pub fn source(&self) -> Option<&Id> {\n\n match self {\n\n Element::Link { source, .. } => Some(&source),\n\n Element::Edge { .. } | Element::Hypergraph { .. } | Element::Node { .. } => None,\n\n }\n\n }\n\n\n\n pub fn target(&self) -> Option<&Id> {\n\n match self {\n\n Element::Link { target, .. } => Some(&target),\n\n Element::Edge { .. } | Element::Hypergraph { .. } | Element::Node { .. } => None,\n\n }\n\n }\n\n}\n", "file_path": "src/elements.rs", "rank": 37, "score": 29668.190143274725 }, { "content": "\n\n pub fn is_link(&self) -> bool {\n\n matches!(self, ElementExt::Link { .. })\n\n }\n\n\n\n pub fn is_node(&self) -> bool {\n\n matches!(self, ElementExt::Node { .. })\n\n }\n\n\n\n pub fn into_source(self) -> Option<Id> {\n\n match self {\n\n ElementExt::Edge { source, .. } => Some(source),\n\n ElementExt::Link { source, .. } => Some(source),\n\n ElementExt::Hypergraph { .. } | ElementExt::Node { .. } => None,\n\n }\n\n }\n\n pub fn into_target(self) -> Option<Id> {\n\n match self {\n\n ElementExt::Edge { target, .. } => Some(target),\n\n ElementExt::Link { target, .. } => Some(target),\n", "file_path": "src/elements.rs", "rank": 38, "score": 29667.202504663783 }, { "content": " ElementExt::Hypergraph { .. } | ElementExt::Node { .. } => None,\n\n }\n\n }\n\n\n\n pub fn source(&self) -> Option<&Id> {\n\n match self {\n\n ElementExt::Edge { source, .. } => Some(&source),\n\n ElementExt::Link { source, .. } => Some(&source),\n\n ElementExt::Hypergraph { .. } | ElementExt::Node { .. } => None,\n\n }\n\n }\n\n pub fn target(&self) -> Option<&Id> {\n\n match self {\n\n ElementExt::Edge { target, .. } => Some(&target),\n\n ElementExt::Link { target, .. } => Some(&target),\n\n ElementExt::Hypergraph { .. } | ElementExt::Node { .. } => None,\n\n }\n\n }\n\n}\n", "file_path": "src/elements.rs", "rank": 39, "score": 29665.988738288954 }, { "content": "\n\nimpl<'a, N, E, H, L, Ty> Walker<'a, N, E, H, L, Ty> for WalkIds {\n\n type Item = Vec<usize>;\n\n\n\n fn walk_next(&mut self, hypergraph: &'a Hypergraph<N, E, H, L, Ty>) -> Option<Self::Item> {\n\n match &self.next_id {\n\n None => None,\n\n Some(id) => {\n\n if hypergraph.contains(id) {\n\n let mut next = hypergraph.next_id(id);\n\n core::mem::swap(&mut next, &mut self.next_id);\n\n next\n\n } else {\n\n // Update to the next valid id in hypergraph\n\n self.next_id = hypergraph.next_id(id);\n\n self.walk_next(hypergraph)\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/walkers/walk_ids.rs", "rank": 40, "score": 26446.6002596624 }, { "content": "use crate::{traits::Walker, Hypergraph};\n\n\n\n/// A “walker” object that can be used to step through a hypergraph without borrowing it.\n\n///\n\n/// Created with [`.detach()`](struct.IdIter.html#method.detach).\n\n#[derive(Debug, Clone)]\n\npub struct WalkIds {\n\n next_id: Option<Vec<usize>>,\n\n}\n\nimpl WalkIds {\n\n pub fn new() -> Self {\n\n Self::new_from(vec![])\n\n }\n\n\n\n pub fn new_from(next_id: impl Into<Option<Vec<usize>>>) -> Self {\n\n WalkIds {\n\n next_id: next_id.into(),\n\n }\n\n }\n\n}\n", "file_path": "src/walkers/walk_ids.rs", "rank": 41, "score": 26439.653697685648 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn walk_next() {\n\n let mut h = Hypergraph::new();\n\n h.add_node(\"zero\");\n\n h.add_node(\"one\");\n\n h.add_edge([0], [1], \"two\").unwrap();\n\n h.add_link([0], [2], \"three\").unwrap();\n\n h.add_hypergraph(\"six\").unwrap();\n\n let mut id_walk = WalkIds::new();\n\n\n\n assert_eq!(id_walk.walk_next(&h).unwrap(), vec![]);\n\n\n\n for i in 0..7 {\n\n assert_eq!(id_walk.walk_next(&h).unwrap(), vec![i]);\n\n }\n\n assert_eq!(id_walk.walk_next(&h), None);\n\n }\n\n}\n", "file_path": "src/walkers/walk_ids.rs", "rank": 42, "score": 26432.758145405143 }, { "content": " match self {\n\n HypergraphEnum::Original(h) => h.raw_nodes(),\n\n HypergraphEnum::Sub(h) => h.raw_nodes(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, N, E, H, L, Ty>\n\n HypergraphEnum<&'a mut Hypergraph<N, E, H, L, Ty>, &'a mut Hypergraph<N, E, H, L, Sub>>\n\n{\n\n pub fn element_type(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<ElementType, errors::GetError> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.element_type(id),\n\n HypergraphEnum::Sub(h) => h.element_type(id),\n\n }\n\n }\n\n fn add_local_element(&mut self, element: Element<N, E, H, L, Vec<usize>>) -> usize {\n", "file_path": "src/hypergraph.rs", "rank": 43, "score": 21071.15905639211 }, { "content": " HypergraphEnum::Original(h) => h.raw_hypergraphs_mut(),\n\n HypergraphEnum::Sub(h) => h.raw_hypergraphs_mut(),\n\n }\n\n }\n\n\n\n pub fn raw_nodes_mut(self) -> &'a mut IndexMap<usize, (N, Vec<(Vec<usize>, Direction)>)> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.raw_nodes_mut(),\n\n HypergraphEnum::Sub(h) => h.raw_nodes_mut(),\n\n }\n\n }\n\n}\n\n\n\n// # Note\n\n//\n\n// This should not be public.\n\nimpl<N, E, H, L, Ty> Hypergraph<N, E, H, L, Ty> {\n\n fn add_local_element(&mut self, element: Element<N, E, H, L, Vec<usize>>) -> usize {\n\n match element {\n\n Element::Edge { value } => {\n", "file_path": "src/hypergraph.rs", "rank": 44, "score": 21066.737819960294 }, { "content": " &self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<&'a Vec<(Vec<usize>, Direction)>, errors::GetError> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.links_of(id),\n\n HypergraphEnum::Sub(h) => h.links_of(id),\n\n }\n\n }\n\n\n\n pub fn element_type(&self, id: impl AsRef<[usize]>) -> Result<ElementType, errors::GetError> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.element_type(id),\n\n HypergraphEnum::Sub(h) => h.element_type(id),\n\n }\n\n }\n\n\n\n pub fn raw_edges(&self) -> &'a IndexMap<usize, (E, Vec<(Vec<usize>, Direction)>)> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.raw_edges(),\n\n HypergraphEnum::Sub(h) => h.raw_edges(),\n", "file_path": "src/hypergraph.rs", "rank": 45, "score": 21065.433452844016 }, { "content": "impl<O, S> HypergraphEnum<O, S> {\n\n pub fn is_original(&self) -> bool {\n\n matches!(self, HypergraphEnum::Original(_))\n\n }\n\n\n\n pub fn is_sub(&self) -> bool {\n\n matches!(self, HypergraphEnum::Sub(_))\n\n }\n\n}\n\n\n\nimpl<'a, N, E, H, L, Ty>\n\n HypergraphEnum<&'a Hypergraph<N, E, H, L, Ty>, &'a Hypergraph<N, E, H, L, Sub>>\n\n{\n\n pub fn contains(&self, id: impl AsRef<[usize]>) -> bool {\n\n match self {\n\n HypergraphEnum::Original(h) => h.contains(id),\n\n HypergraphEnum::Sub(h) => h.contains(id),\n\n }\n\n }\n\n pub fn links_of(\n", "file_path": "src/hypergraph.rs", "rank": 46, "score": 21065.023437609547 }, { "content": " } else {\n\n panic!(\"The local id {} is not a valid.\", local_id);\n\n }\n\n }\n\n}\n\n\n\n/// # Create\n\n///\n\n/// A graph that can be created.\n\nimpl<N, E, H, L, Ty: HypergraphClass> Hypergraph<N, E, H, L, Ty> {\n\n pub fn new() -> Self {\n\n let nodes = IndexMap::new();\n\n let edges = IndexMap::new();\n\n let links = IndexMap::new();\n\n let hypergraphs = IndexMap::new();\n\n let next_id = 0;\n\n Hypergraph {\n\n value: None,\n\n nodes,\n\n edges,\n", "file_path": "src/hypergraph.rs", "rank": 47, "score": 21064.949701482066 }, { "content": " }\n\n\n\n pub fn reserve_links(&mut self, additional: usize) -> &mut Self {\n\n self.links.reserve(additional);\n\n self\n\n }\n\n pub fn reserve_nodes(&mut self, additional: usize) -> &mut Self {\n\n self.nodes.reserve(additional);\n\n self\n\n }\n\n}\n\n\n\n/// # Inform\n\n///\n\n/// Various information about the hypergraph.\n\nimpl<N, E, H, L, Ty> Hypergraph<N, E, H, L, Ty> {\n\n /// Returns the current capacity of the underlying `Map`s.\n\n ///\n\n /// The output is ordered allhabetically: edges, hypergraphs, links, nodes.\n\n pub fn capacities(&self) -> (usize, usize, usize, usize) {\n", "file_path": "src/hypergraph.rs", "rank": 48, "score": 21063.319874176024 }, { "content": "/// - [`Remove`](#remove)\n\n/// - [`Set`](#set)\n\n/// - [`Transform`](#transform)\n\n/// - [`Visualize`](#visualize)\n\n//\n\n// # Note\n\n//\n\n// You might want to change Vec by SmallVec in the future\n\n// and let the user decide the storage capacity (ie. how many nested structures are there).\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Hypergraph<N, E, H = (), L = (), Ty = Main> {\n\n /// Value of the hypergraph as a whole.\n\n value: Option<H>,\n\n /// nodes: their weight and links (in absolute format)\n\n nodes: IndexMap<usize, (N, Vec<(Vec<usize>, Direction)>)>,\n\n /// edges: weight and links (in absolute format)\n\n edges: IndexMap<usize, (E, Vec<(Vec<usize>, Direction)>)>, // This vector always has at least two elements\n\n /// links: weight, source and target ids (in absolute format)\n\n links: IndexMap<usize, (Option<L>, Vec<usize>, Vec<usize>)>, // Links have no neighbors\n\n /// subhypergraps: subhypergraph and links\n", "file_path": "src/hypergraph.rs", "rank": 49, "score": 21060.71024071046 }, { "content": " }\n\n\n\n pub fn raw_edges_mut(self) -> &'a mut IndexMap<usize, (E, Vec<(Vec<usize>, Direction)>)> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.raw_edges_mut(),\n\n HypergraphEnum::Sub(h) => h.raw_edges_mut(),\n\n }\n\n }\n\n\n\n pub fn raw_links_mut(self) -> &'a mut IndexMap<usize, (Option<L>, Vec<usize>, Vec<usize>)> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.raw_links_mut(),\n\n HypergraphEnum::Sub(h) => h.raw_links_mut(),\n\n }\n\n }\n\n\n\n pub fn raw_hypergraphs_mut(\n\n self,\n\n ) -> &'a mut IndexMap<usize, (Hypergraph<N, E, H, L, Sub>, Vec<(Vec<usize>, Direction)>)> {\n\n match self {\n", "file_path": "src/hypergraph.rs", "rank": 50, "score": 21060.186088977985 }, { "content": " self.edges.insert(self.next_id, (value, Vec::new()));\n\n }\n\n Element::Hypergraph { value } => {\n\n let hypergraph = {\n\n let mut h = Hypergraph::<N, E, H, L, Sub>::new();\n\n h.set_value(value);\n\n h\n\n };\n\n self.hypergraphs\n\n .insert(self.next_id, (hypergraph, Vec::new()));\n\n }\n\n Element::Link {\n\n source,\n\n target,\n\n value,\n\n } => {\n\n self.links.insert(self.next_id, (value, source, target));\n\n }\n\n Element::Node { value } => {\n\n self.nodes.insert(self.next_id, (value, Vec::new()));\n", "file_path": "src/hypergraph.rs", "rank": 51, "score": 21059.48313156546 }, { "content": "\n\n /// Return the number of nodes in the graph.\n\n pub fn node_count(&self) -> usize {\n\n self.nodes.len()\n\n }\n\n}\n\n\n\nimpl<N, E, H, L, Ty: HypergraphClass> Hypergraph<N, E, H, L, Ty> {\n\n pub fn is_main(&self) -> bool {\n\n self.class().is_main()\n\n }\n\n pub fn is_sub(&self) -> bool {\n\n self.class().is_main()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use test_case::test_case;\n", "file_path": "src/hypergraph.rs", "rank": 52, "score": 21058.85384754349 }, { "content": " }\n\n\n\n pub fn contains_hypergraph(&self, id: impl AsRef<[usize]>) -> bool {\n\n let id = id.as_ref();\n\n id.is_empty() || self.contains_subhypergraph(id)\n\n }\n\n\n\n /// Returns true if `id` refers to a subhypergraph (possibly nested).\n\n pub fn contains_subhypergraph(&self, id: impl AsRef<[usize]>) -> bool {\n\n let id = id.as_ref();\n\n match id.len() {\n\n 0 => false,\n\n 1 => self.raw_hypergraphs().contains_key(&id[0]),\n\n _ => {\n\n let mut hypergraph = match self.raw_hypergraphs().get(&id[0]) {\n\n Some(h_full) => &h_full.0,\n\n None => return false,\n\n };\n\n for local_id in id.iter().skip(1) {\n\n hypergraph = match hypergraph.raw_hypergraphs().get(local_id) {\n", "file_path": "src/hypergraph.rs", "rank": 53, "score": 21057.511442141287 }, { "content": " match self {\n\n HypergraphEnum::Original(h) => h.add_local_element(element),\n\n HypergraphEnum::Sub(h) => h.add_local_element(element),\n\n }\n\n }\n\n /// Add a neighbor to the local element with id `local_id`.\n\n /// the neighbor corresponds to `(link_id, Direction)`.\n\n ///\n\n /// # Contract\n\n ///\n\n /// `local_id` exists and refers to a linkable element and neighbor's id is a valid link.\n\n fn add_local_neighbor_unchecked(\n\n &mut self,\n\n local_id: usize,\n\n link_info: (Vec<usize>, Direction),\n\n ) {\n\n match self {\n\n HypergraphEnum::Original(h) => h.add_local_neighbor_unchecked(local_id, link_info),\n\n HypergraphEnum::Sub(h) => h.add_local_neighbor_unchecked(local_id, link_info),\n\n }\n", "file_path": "src/hypergraph.rs", "rank": 54, "score": 21057.011660132728 }, { "content": " hypergraphs: IndexMap<usize, (Hypergraph<N, E, H, L, Sub>, Vec<(Vec<usize>, Direction)>)>,\n\n /// Counter for the next id when adding elements. It also serves as an upper bound on the number of elements.\n\n next_id: usize,\n\n /// Type (either Main or Sub)\n\n class: Ty,\n\n}\n\n\n\n/// Wrapper for ease of implementation.\n\n///\n\n/// It is not meant to be part of the public API.\n\n//\n\n// # Alternative\n\n//\n\n// Implement thorugh [enum_dispatch](https://crates.io/crates/enum_dispatch)\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub enum HypergraphEnum<O, S> {\n\n Original(O),\n\n Sub(S),\n\n}\n\n\n", "file_path": "src/hypergraph.rs", "rank": 55, "score": 21056.950723063106 }, { "content": " }\n\n }\n\n\n\n pub fn raw_links(&self) -> &'a IndexMap<usize, (Option<L>, Vec<usize>, Vec<usize>)> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.raw_links(),\n\n HypergraphEnum::Sub(h) => h.raw_links(),\n\n }\n\n }\n\n\n\n pub fn raw_hypergraphs(\n\n &self,\n\n ) -> &'a IndexMap<usize, (Hypergraph<N, E, H, L, Sub>, Vec<(Vec<usize>, Direction)>)> {\n\n match self {\n\n HypergraphEnum::Original(h) => h.raw_hypergraphs(),\n\n HypergraphEnum::Sub(h) => h.raw_hypergraphs(),\n\n }\n\n }\n\n\n\n pub fn raw_nodes(&self) -> &'a IndexMap<usize, (N, Vec<(Vec<usize>, Direction)>)> {\n", "file_path": "src/hypergraph.rs", "rank": 56, "score": 21056.46279008902 }, { "content": "use core::fmt::Debug;\n\nuse indexmap::IndexMap;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::{direction::Direction, elements::*, errors, traits::HypergraphClass};\n\n\n\nmod add;\n\nmod classes;\n\nmod clear;\n\nmod extend;\n\nmod find;\n\nmod get;\n\nmod remove;\n\nmod set;\n\nmod transform;\n\npub mod visualize;\n\n\n\npub use classes::{Main, Sub};\n\n\n\n/// Directed-hyper-multi-graphs.\n", "file_path": "src/hypergraph.rs", "rank": 57, "score": 21055.901604861312 }, { "content": " Err(_) => return false,\n\n };\n\n hypergraph.raw_edges().contains_key(&local_id)\n\n }\n\n }\n\n }\n\n\n\n pub fn contains_link(&self, id: impl AsRef<[usize]>) -> bool {\n\n let mut id = id.as_ref().to_vec();\n\n match id.len() {\n\n 0 => false,\n\n _ => {\n\n let local_id = id.pop().unwrap(); // Never fails since id is non empty.\n\n let hypergraph = match self.hypergraph(id) {\n\n Ok(h) => h,\n\n Err(_) => return false,\n\n };\n\n hypergraph.raw_links().contains_key(&local_id)\n\n }\n\n }\n", "file_path": "src/hypergraph.rs", "rank": 58, "score": 21055.531391711826 }, { "content": " self.edges.len()\n\n }\n\n\n\n /// Returns a bound on valid ids.\n\n ///\n\n /// All valid ids are strictly smaller than the output (in lexicographic order).\n\n pub fn id_bound(&self) -> Vec<usize> {\n\n let mut result = vec![self.next_local_id()];\n\n match self.hypergraphs.last() {\n\n None => result,\n\n Some((_, (h, _))) => {\n\n result.extend(h.id_bound());\n\n result\n\n }\n\n }\n\n }\n\n\n\n /// Returns `true` if there are no nodes or hypergraphs.\n\n ///\n\n /// # Esamples\n", "file_path": "src/hypergraph.rs", "rank": 59, "score": 21055.531852018066 }, { "content": " 1 => {\n\n self.contains_edge(&id)\n\n | self.contains_link(&id)\n\n | self.contains_hypergraph(&id)\n\n | self.contains_node(&id)\n\n }\n\n _ => {\n\n let local_id = id.pop().unwrap(); // Never fails since id is non empty.\n\n let hypergraph = match self.hypergraph(id) {\n\n Ok(h) => h,\n\n Err(_) => return false,\n\n };\n\n hypergraph.contains([local_id])\n\n }\n\n }\n\n }\n\n\n\n pub fn contains_node(&self, id: impl AsRef<[usize]>) -> bool {\n\n let mut id = id.as_ref().to_vec();\n\n match id.len() {\n", "file_path": "src/hypergraph.rs", "rank": 60, "score": 21055.027815574962 }, { "content": " (\n\n self.raw_edges().capacity(),\n\n self.raw_hypergraphs().capacity(),\n\n self.raw_links().capacity(),\n\n self.raw_nodes().capacity(),\n\n )\n\n }\n\n\n\n /// Returns `true` if `id` corresponds to an existing element of `self`\n\n /// and it can be linked (node, edge or hypergraph).\n\n pub fn contains_linkable(&self, id: impl AsRef<[usize]>) -> bool {\n\n let id = id.as_ref();\n\n !id.is_empty()\n\n & (self.contains_edge(id) | self.contains_hypergraph(id) | self.contains_node(id))\n\n }\n\n\n\n pub fn contains(&self, id: impl AsRef<[usize]>) -> bool {\n\n let mut id = id.as_ref().to_vec();\n\n match id.len() {\n\n 0 => true,\n", "file_path": "src/hypergraph.rs", "rank": 61, "score": 21054.97685578732 }, { "content": " 0 => false,\n\n _ => {\n\n let local_id = id.pop().unwrap(); // Never fails since id is non empty.\n\n let hypergraph = match self.hypergraph(id) {\n\n Ok(h) => h,\n\n Err(_) => return false,\n\n };\n\n hypergraph.raw_nodes().contains_key(&local_id)\n\n }\n\n }\n\n }\n\n\n\n pub fn contains_edge(&self, id: impl AsRef<[usize]>) -> bool {\n\n let mut id = id.as_ref().to_vec();\n\n match id.len() {\n\n 0 => false,\n\n _ => {\n\n let local_id = id.pop().unwrap(); // Never fails since id is non empty.\n\n let hypergraph = match self.hypergraph(id) {\n\n Ok(h) => h,\n", "file_path": "src/hypergraph.rs", "rank": 62, "score": 21053.813889043886 }, { "content": " links,\n\n hypergraphs,\n\n next_id,\n\n class: Ty::new(),\n\n }\n\n }\n\n\n\n pub fn with_capacity(nodes: usize, edges: usize, links: usize, hypergraphs: usize) -> Self {\n\n let nodes = IndexMap::with_capacity(nodes);\n\n let edges = IndexMap::with_capacity(edges);\n\n let links = IndexMap::with_capacity(links);\n\n let hypergraphs = IndexMap::with_capacity(hypergraphs);\n\n let next_id = 0;\n\n Hypergraph {\n\n value: None,\n\n nodes,\n\n edges,\n\n links,\n\n hypergraphs,\n\n next_id,\n", "file_path": "src/hypergraph.rs", "rank": 63, "score": 21053.580615688323 }, { "content": " class: Ty::new(),\n\n }\n\n }\n\n\n\n /// Reserve `additional` in all underlying maps of `self`.\n\n pub fn reserve(&mut self, additional: usize) -> &mut Self {\n\n self.reserve_edges(additional)\n\n .reserve_hypergraphs(additional)\n\n .reserve_links(additional)\n\n .reserve_nodes(additional)\n\n }\n\n\n\n pub fn reserve_edges(&mut self, additional: usize) -> &mut Self {\n\n self.edges.reserve(additional);\n\n self\n\n }\n\n\n\n pub fn reserve_hypergraphs(&mut self, additional: usize) -> &mut Self {\n\n self.hypergraphs.reserve(additional);\n\n self\n", "file_path": "src/hypergraph.rs", "rank": 64, "score": 21052.95889704213 }, { "content": " 4; //\n\n \"tree\"\n\n )]\n\n fn depth<N, E, H, L, Ty: HypergraphClass>(h: Hypergraph<N, E, H, L, Ty>, expected: usize) {\n\n assert_eq!(h.depth(), expected)\n\n }\n\n\n\n #[test]\n\n fn new() {\n\n Hypergraph::<(), ()>::new();\n\n }\n\n}\n", "file_path": "src/hypergraph.rs", "rank": 65, "score": 21052.751929609807 }, { "content": " }\n\n }\n\n self.next_id += 1;\n\n self.next_id - 1\n\n }\n\n\n\n fn add_local_neighbor_unchecked(\n\n &mut self,\n\n local_id: usize,\n\n link_info: (Vec<usize>, Direction),\n\n ) {\n\n if let Some(edge_full) = self.raw_edges_mut().get_mut(&local_id) {\n\n let (_, ref mut links_info) = edge_full;\n\n links_info.push(link_info);\n\n } else if let Some(hypergraph_full) = self.raw_hypergraphs_mut().get_mut(&local_id) {\n\n let (_, ref mut links_info) = hypergraph_full;\n\n links_info.push(link_info);\n\n } else if let Some(node_full) = self.raw_nodes_mut().get_mut(&local_id) {\n\n let (_, ref mut links_info) = node_full;\n\n links_info.push(link_info);\n", "file_path": "src/hypergraph.rs", "rank": 66, "score": 21050.644692463513 }, { "content": "///\n\n/// Directed graphs allow connections to have a direction.\n\n/// Hyper-graphs allow edges to connect more than two elements.\n\n/// Multi-graphs allow more than one connection between two elements.\n\n/// `Hypergraph` is a directed-hyper-multi-graph that is also recursive:\n\n/// it can contain another `Hypergraph` inside it\n\n/// (with a marker `Sub` which restricts its methods).\n\n///\n\n/// # Data structure\n\n///\n\n/// In a nutshell, (hyper)edges are treated the same as nodes, while links take the role\n\n/// of simple edges. Nodes and hypergraphs can be connected through (hyper)edges,\n\n/// for which links are used in the middle.\n\n/// `Hypergraph` is a multi-graph in two sense:\n\n/// - There can be more than one (hyper)edge connecting two elements.\n\n/// - There can be more than one link between a node or hypergraph and an edge.\n\n///\n\n/// # Type paramenters\n\n///\n\n/// - `E`: hyperedge\n", "file_path": "src/hypergraph.rs", "rank": 67, "score": 21050.314947572693 }, { "content": " ///\n\n /// New hypergraphs are always empty.\n\n /// ```\n\n /// # use ferret_hypergraph::Hypergraph;\n\n /// let h = Hypergraph::<(), ()>::new();\n\n /// assert!(h.is_empty());\n\n /// ```\n\n pub fn is_empty(&self) -> bool {\n\n self.raw_nodes().is_empty() && self.raw_hypergraphs().is_empty()\n\n }\n\n\n\n /// Return the number of hypergraphs in the graph (including itself).\n\n pub fn hypergraph_count(&self) -> usize {\n\n 1 + self.hypergraphs.len()\n\n }\n\n\n\n /// Return the number of links in the graph.\n\n pub fn link_count(&self) -> usize {\n\n self.links.len()\n\n }\n", "file_path": "src/hypergraph.rs", "rank": 68, "score": 21047.743343487055 }, { "content": "/// - `H`: hypergraph\n\n/// - `L`: link (simple edge)\n\n/// - `N`: node\n\n/// - `Ty`: Main or sub hypergraph marker\n\n///\n\n/// # Indices\n\n///\n\n/// Indices are represented by `Vec<usize>` by default. They are stable except upon usage\n\n/// of any method under [`Optimization`](#optimization) (like [`shrink_to_fit`]).\n\n///\n\n/// # Contents\n\n///\n\n/// - [`Add`](#add)\n\n/// - [`Create`](#create)\n\n/// - [`Clear`](#clear)\n\n/// - [`Extend`](#extend)\n\n/// - [`Find`](#find)\n\n/// - [`Get`](#get)\n\n/// - [`Inform`](#inform)\n\n/// - [`Optimization`](#optimization)\n", "file_path": "src/hypergraph.rs", "rank": 69, "score": 21046.589019478415 }, { "content": "\n\n #[test_case(Hypergraph::<(), ()>::new(), 1; \"no recursion\")]\n\n #[test_case(\n\n {\n\n let mut h = Hypergraph::<(), ()>::new();\n\n h.add_hypergraph(());\n\n h\n\n }, //\n\n 2; //\n\n \"one recursion\"\n\n )]\n\n #[test_case(\n\n {\n\n let mut h = Hypergraph::<(), ()>::new();\n\n h.add_hypergraph(());\n\n h.add_hypergraph_in((), [0]).unwrap();\n\n h.add_hypergraph_in((), [0, 0]).unwrap();\n\n h.add_hypergraph(());\n\n h\n\n }, //\n", "file_path": "src/hypergraph.rs", "rank": 70, "score": 21045.612779950723 }, { "content": " Some(hypergraph_full) => &hypergraph_full.0,\n\n None => return false,\n\n };\n\n }\n\n true\n\n }\n\n }\n\n }\n\n\n\n /// Returns the number of levels of nested hypergraphs.\n\n pub fn depth(&self) -> usize {\n\n let mut recursive = 0;\n\n for (_, (h, _)) in &self.hypergraphs {\n\n recursive = h.depth().max(recursive);\n\n }\n\n recursive + 1\n\n }\n\n\n\n /// Return the number of edges in the graph.\n\n pub fn edge_count(&self) -> usize {\n", "file_path": "src/hypergraph.rs", "rank": 71, "score": 21044.293065531536 }, { "content": "use indexmap::IndexMap;\n\n\n\nuse crate::{\n\n direction::Direction,\n\n elements::{ElementType, ElementValue},\n\n errors, iterators,\n\n traits::Walker,\n\n walkers, Hypergraph, HypergraphEnum, Sub,\n\n};\n\n\n\n/// # Get\n\n///\n\n/// Access node and edge weights (associated data).\n\nimpl<N, E, H, L, Ty> Hypergraph<N, E, H, L, Ty> {\n\n /// Returns the class marker.\n\n pub fn class(&self) -> &Ty {\n\n &self.class\n\n }\n\n\n\n // /// Returns an iterator over all valid edge ids.\n", "file_path": "src/hypergraph/get.rs", "rank": 72, "score": 19753.916953641226 }, { "content": "use core::mem;\n\n\n\nuse crate::{elements::ElementValue, errors, Hypergraph};\n\n\n\n/// # Set\n\n///\n\n/// Set the value of elements\n\nimpl<N, E, H, L, Ty> Hypergraph<N, E, H, L, Ty> {\n\n pub fn set_edge_value(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n mut new_value: E,\n\n ) -> Result<E, errors::SetError> {\n\n let id = id.as_ref();\n\n if !self.contains_edge(id) {\n\n Err(errors::NoEdge(id.to_vec()))?\n\n }\n\n let old_value = self.edge_value_mut(id).unwrap(); // Never fails since id refers to an edge\n\n mem::swap(old_value, &mut new_value);\n\n Ok(new_value)\n", "file_path": "src/hypergraph/set.rs", "rank": 73, "score": 19753.249555859373 }, { "content": "use crate::{elements::ElementValue, errors, Hypergraph};\n\n\n\n/// # Find\n\n///\n\n/// Find elements.\n\nimpl<N, E, H, L, Ty> Hypergraph<N, E, H, L, Ty> {\n\n /// Returns the id of the link that belongs to hypergraph `location` linking `source` and `target`.\n\n ///\n\n /// An empty `location` means the main hypergraph.\n\n ///\n\n /// Returns `None` if it does not exists.\n\n pub fn find_link_id<'a>(\n\n &self,\n\n source: impl AsRef<[usize]>,\n\n target: impl AsRef<[usize]>,\n\n value: impl Into<Option<&'a L>>,\n\n location: impl AsRef<[usize]>,\n\n ) -> Result<Vec<usize>, errors::FindError>\n\n where\n\n L: 'a + PartialEq,\n", "file_path": "src/hypergraph/find.rs", "rank": 74, "score": 19750.64773182132 }, { "content": " // pub fn edge_ids<'a>(&'a self) -> EdgeIterIds<'a, N, E, H, L, Ty> {\n\n // EdgeIterIds::new(&self)\n\n // }\n\n\n\n pub fn edge_value(&self, id: impl AsRef<[usize]>) -> Result<&E, errors::GetError> {\n\n let id = id.as_ref();\n\n if !self.contains_edge(&id) {\n\n Err(errors::NoEdge(id.to_vec()))?\n\n }\n\n let hypergraph = self.hypergraph_of(&id).unwrap(); // Never fails since id refers to a valid edge\n\n let local_id = id.last().unwrap(); // Never fails since id refers to a valid edge\n\n let edge_value = hypergraph\n\n .raw_edges()\n\n .get(local_id)\n\n .map(|edge_full| &edge_full.0)\n\n .unwrap(); // Never fails since id refers to a valid edge\n\n Ok(edge_value)\n\n }\n\n\n\n pub fn edge_value_mut(&mut self, id: impl AsRef<[usize]>) -> Result<&mut E, errors::GetError> {\n", "file_path": "src/hypergraph/get.rs", "rank": 75, "score": 19748.971671510455 }, { "content": " pub fn hypergraph_mut(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<HypergraphEnum<&mut Self, &mut Hypergraph<N, E, H, L, Sub>>, errors::GetError> {\n\n let id = id.as_ref();\n\n if id.is_empty() {\n\n return Ok(HypergraphEnum::Original(self));\n\n }\n\n let subhypergraph = self.subhypergraph_mut(&id)?;\n\n Ok(HypergraphEnum::Sub(subhypergraph))\n\n }\n\n\n\n /// Returns the hypergraph in which `id` lives, if it exists.\n\n ///\n\n /// `None` is returned when: there is no element with id `id`; or `id` is empty.\n\n pub fn hypergraph_of(\n\n &self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<HypergraphEnum<&Self, &Hypergraph<N, E, H, L, Sub>>, errors::GetError> {\n\n let id = id.as_ref();\n", "file_path": "src/hypergraph/get.rs", "rank": 76, "score": 19748.304594606205 }, { "content": " match id.len() {\n\n 0 => Err(errors::RootHypergraph)?,\n\n 1 => Ok(HypergraphEnum::Original(&self)),\n\n _ => {\n\n let id = &id[0..id.len() - 1];\n\n let subhypergraph = self.subhypergraph(&id)?;\n\n Ok(HypergraphEnum::Sub(subhypergraph))\n\n }\n\n }\n\n }\n\n\n\n /// Returns the hypergraph in which `id` lives, if it exists.\n\n ///\n\n /// `None` is returned when there is no element with id `id`.\n\n pub fn hypergraph_of_mut(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<HypergraphEnum<&mut Self, &mut Hypergraph<N, E, H, L, Sub>>, errors::GetError> {\n\n let id = id.as_ref();\n\n match id.len() {\n", "file_path": "src/hypergraph/get.rs", "rank": 77, "score": 19746.75724701389 }, { "content": "use core::mem;\n\n\n\nuse crate::{errors, Hypergraph, Main, Sub};\n\n\n\n/// # Add\n\n///\n\n/// A graph that can be extended with further nodes and edges\n\nimpl<N, E, H, L> Hypergraph<N, E, H, L, Main> {\n\n /// Clones and adds all elements in `other` into a new hypergraph inside `location`.\n\n pub fn extend_from_hypegraph<Ty>(\n\n &mut self,\n\n other: &Hypergraph<N, E, H, L, Ty>,\n\n location: impl AsRef<[usize]>,\n\n ) -> Result<Vec<usize>, errors::AddError>\n\n where\n\n N: Clone,\n\n E: Clone,\n\n H: Clone,\n\n L: Clone,\n\n Ty: Clone,\n", "file_path": "src/hypergraph/extend.rs", "rank": 78, "score": 19746.47615267512 }, { "content": " Err(errors::NoHypergraph(id.to_vec()))?\n\n }\n\n\n\n match id.len() {\n\n 0 => Ok(self.value_mut()),\n\n _ => {\n\n let hypergraph = self.hypergraph_of_mut(&id)?;\n\n let local_id = id.last().unwrap(); // Never fails since id is non empty.\n\n let hypergraph_value = hypergraph\n\n .raw_hypergraphs_mut()\n\n .get_mut(local_id)\n\n .map(|hypergraph_full| &mut hypergraph_full.0.value)\n\n .unwrap(); // Never fails since id refers to a hypergraph\n\n Ok(hypergraph_value)\n\n }\n\n }\n\n }\n\n\n\n /// Returns an iterator over all valid ids of `self`.\n\n pub fn ids<'a>(&'a self) -> iterators::WalkIter<'a, N, E, H, L, Ty, walkers::WalkIds> {\n", "file_path": "src/hypergraph/get.rs", "rank": 79, "score": 19745.9530270875 }, { "content": " }\n\n\n\n pub fn set_element_value(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n new_value: ElementValue<N, E, H, L>,\n\n ) -> Result<ElementValue<N, E, H, L>, errors::SetError> {\n\n match new_value {\n\n ElementValue::Edge { value } => {\n\n let old_value = self.set_edge_value(id, value)?;\n\n Ok(ElementValue::Edge { value: old_value })\n\n }\n\n ElementValue::Hypergraph { value } => {\n\n let old_value = self.set_hypergraph_value(id, value)?;\n\n Ok(ElementValue::Hypergraph { value: old_value })\n\n }\n\n ElementValue::Link { value } => {\n\n let old_value = self.set_link_value(id, value)?;\n\n Ok(ElementValue::Link { value: old_value })\n\n }\n", "file_path": "src/hypergraph/set.rs", "rank": 80, "score": 19745.794466547897 }, { "content": "\n\n pub fn link_value_mut(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<&mut Option<L>, errors::GetError> {\n\n let id = id.as_ref();\n\n if !self.contains_link(&id) {\n\n Err(errors::NoLink(id.to_vec()))?\n\n }\n\n let hypergraph = self.hypergraph_of_mut(&id).unwrap(); // Never fails since id refers to a valid link\n\n let local_id = id.last().unwrap(); // Never fails since id refers to a valid link\n\n let link_value = hypergraph\n\n .raw_links_mut()\n\n .get_mut(local_id)\n\n .map(|link_full| &mut link_full.0)\n\n .unwrap(); // Never fails since id refers to a valid link\n\n Ok(link_value)\n\n }\n\n\n\n /// Returns an iterator over outgoing neighbors.\n", "file_path": "src/hypergraph/get.rs", "rank": 81, "score": 19745.55538188872 }, { "content": " &mut self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<ElementValue<&mut N, &mut E, &mut H, &mut L>, errors::GetError> {\n\n let id = id.as_ref();\n\n if id.is_empty() {\n\n return Ok(ElementValue::Hypergraph {\n\n value: self.value_mut().as_mut(),\n\n });\n\n }\n\n\n\n let local_id = id.last().unwrap(); // Never fails by previous check\n\n let mut hypergraph = self.hypergraph_of_mut(&id)?;\n\n\n\n let element = match hypergraph.element_type([*local_id])? {\n\n ElementType::Edge => {\n\n let edge_full = hypergraph.raw_edges_mut().get_mut(local_id).unwrap();\n\n ElementValue::Edge {\n\n value: &mut edge_full.0,\n\n }\n\n }\n", "file_path": "src/hypergraph/get.rs", "rank": 82, "score": 19745.334021326365 }, { "content": " ///\n\n /// If `id` is not a valid element, the iterator returns always `None`.\n\n pub fn neighbors<'a>(\n\n &'a self,\n\n id: impl AsRef<[usize]>,\n\n ) -> iterators::WalkIter<'a, N, E, H, L, Ty, walkers::WalkNeighbors> {\n\n let direction = Direction::Outgoing;\n\n walkers::WalkNeighbors::new(direction, id).build_iter(self)\n\n }\n\n\n\n pub fn neighbors_directed<'a>(\n\n &'a self,\n\n id: impl AsRef<[usize]>,\n\n direction: Direction,\n\n ) -> iterators::WalkIter<'a, N, E, H, L, Ty, walkers::WalkNeighbors> {\n\n walkers::WalkNeighbors::new(direction, id).build_iter(self)\n\n }\n\n\n\n /// Returns the next valid id.\n\n ///\n", "file_path": "src/hypergraph/get.rs", "rank": 83, "score": 19745.27556901239 }, { "content": " }\n\n\n\n /// Returns the links of an element of the current hypergraph, `None` if the element does not exists or is a link.\n\n ///\n\n /// # Notes\n\n ///\n\n /// Not meant to be public.\n\n /// Be very careful when using this method! All invariants of the datastructure must hold!\n\n pub(crate) fn links_of_mut(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<&mut Vec<(Vec<usize>, Direction)>, errors::GetError> {\n\n let id = id.as_ref();\n\n if !self.contains_linkable(&id) {\n\n Err(errors::NoElementLinkable(id.to_vec()))?;\n\n }\n\n let mut hypergraph = self.hypergraph_of_mut(&id)?;\n\n let local_id = id.last().unwrap(); // Never fails since id refers to a linkable element\n\n let links = match hypergraph.element_type([*local_id]).unwrap() // Never fails since id refers to a linkable element\n\n {\n", "file_path": "src/hypergraph/get.rs", "rank": 84, "score": 19745.179745370515 }, { "content": "\n\n pub fn raw_nodes(&self) -> &IndexMap<usize, (N, Vec<(Vec<usize>, Direction)>)> {\n\n &self.nodes\n\n }\n\n\n\n pub(crate) fn raw_nodes_mut(\n\n &mut self,\n\n ) -> &mut IndexMap<usize, (N, Vec<(Vec<usize>, Direction)>)> {\n\n &mut self.nodes\n\n }\n\n\n\n /// Returns the subgraph with id `id`, if it exists.\n\n ///\n\n /// `None` is returned when `id` is empty, or there is no (sub-)hypergraph with such `id`.\n\n pub fn subhypergraph(\n\n &self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<&Hypergraph<N, E, H, L, Sub>, errors::GetError> {\n\n let id = id.as_ref().to_vec();\n\n match id.len() {\n", "file_path": "src/hypergraph/get.rs", "rank": 85, "score": 19744.86157341853 }, { "content": "use crate::{Hypergraph, Main, Sub};\n\n\n\n/// # Add\n\n///\n\n/// A graph that can be extended with further nodes and edges\n\nimpl<N, E, H, L, Ty> Hypergraph<N, E, H, L, Ty> {\n\n /// Utility method to transform into a Hypergraph of class `Sub`.\n\n pub fn into_sub(self) -> Hypergraph<N, E, H, L, Sub> {\n\n Hypergraph {\n\n value: self.value,\n\n edges: self.edges,\n\n nodes: self.nodes,\n\n links: self.links,\n\n hypergraphs: self.hypergraphs,\n\n next_id: self.next_id,\n\n class: Sub,\n\n }\n\n }\n\n\n\n /// Pre-appends `location` to all absolute ids.\n", "file_path": "src/hypergraph/transform.rs", "rank": 86, "score": 19744.284434735793 }, { "content": " let mut subhypergraph = match self.raw_hypergraphs_mut().get_mut(&local_id) {\n\n None => Err(errors::NoHypergraph(vec![local_id]))?,\n\n Some(hypergraph_full) => &mut hypergraph_full.0,\n\n };\n\n for (counter, local_id) in id.iter().enumerate().skip(1) {\n\n subhypergraph = match subhypergraph.raw_hypergraphs_mut().get_mut(local_id) {\n\n None => Err(errors::NoHypergraph(id[0..=counter].to_vec()))?,\n\n Some(hypergraph_full) => &mut hypergraph_full.0,\n\n };\n\n }\n\n Ok(subhypergraph)\n\n }\n\n }\n\n }\n\n\n\n pub fn value(&self) -> &Option<H> {\n\n &self.value\n\n }\n\n pub fn value_mut(self: &mut Self) -> &mut Option<H> {\n\n &mut self.value\n", "file_path": "src/hypergraph/get.rs", "rank": 87, "score": 19744.082973461136 }, { "content": "use crate::{\n\n direction::Direction,\n\n elements::{Element, ElementExt, ElementValue},\n\n errors, Hypergraph, Main,\n\n};\n\n\n\n/// # Add\n\n///\n\n/// A graph that can be extended with further nodes and edges\n\nimpl<N, E, H, L> Hypergraph<N, E, H, L, Main> {\n\n /// Adds an element to the top level.\n\n ///\n\n /// If you want to specify a location, see method [`add_element_in`].\n\n ///\n\n /// # Errors\n\n ///\n\n /// If `element` is a connection (edge or link) and `source` or `target` can not be connected through `elmenet`.\n\n ///\n\n // # Note\n\n //\n", "file_path": "src/hypergraph/add.rs", "rank": 88, "score": 19743.420764263014 }, { "content": " for (_, source, target) in self.raw_links_mut().values_mut() {\n\n let mut new_source = location.clone();\n\n new_source.extend_from_slice(source);\n\n *source = new_source;\n\n let mut new_target = location.clone();\n\n new_target.extend_from_slice(target);\n\n *target = new_target;\n\n }\n\n\n\n // Recursive call\n\n for local_id in self.raw_hypergraphs().keys().cloned().collect::<Vec<_>>() {\n\n let subhypergraph = self.subhypergraph_mut([local_id]).unwrap(); // Never fails since local_id is valid\n\n subhypergraph.preappend_id(&location);\n\n }\n\n self\n\n }\n\n}\n\n\n\nimpl<N, E, H, L> From<Hypergraph<N, E, H, L, Main>> for Hypergraph<N, E, H, L, Sub> {\n\n fn from(source: Hypergraph<N, E, H, L, Main>) -> Self {\n", "file_path": "src/hypergraph/transform.rs", "rank": 89, "score": 19743.25353228709 }, { "content": " pub fn find_element_by_value(\n\n &self,\n\n value: ElementValue<&N, &E, &H, &L>,\n\n ) -> Result<Vec<usize>, errors::FindError>\n\n where\n\n N: PartialEq,\n\n E: PartialEq,\n\n H: PartialEq,\n\n L: PartialEq,\n\n {\n\n match value {\n\n ElementValue::Edge { value } => self.find_edge_by_value(value),\n\n ElementValue::Hypergraph { value } => self.find_hypergraph_by_value(value),\n\n ElementValue::Link { value } => self.find_link_by_value(value),\n\n ElementValue::Node { value } => self.find_node_by_value(value),\n\n }\n\n }\n\n\n\n pub fn find_edge_by_value(&self, value: &E) -> Result<Vec<usize>, errors::FindError>\n\n where\n", "file_path": "src/hypergraph/find.rs", "rank": 90, "score": 19742.93651705114 }, { "content": " }\n\n\n\n pub fn set_hypergraph_value(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n new_value: impl Into<Option<H>>,\n\n ) -> Result<Option<H>, errors::SetError> {\n\n let id = id.as_ref();\n\n let mut new_value = new_value.into();\n\n if !self.contains_hypergraph(id) {\n\n Err(errors::NoHypergraph(id.to_vec()))?\n\n }\n\n let old_value = self.hypergraph_value_mut(id).unwrap(); // Never fails since id refers to a link\n\n mem::swap(old_value, &mut new_value);\n\n Ok(new_value)\n\n }\n\n\n\n pub fn set_node_value(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n", "file_path": "src/hypergraph/set.rs", "rank": 91, "score": 19742.70447211789 }, { "content": " .unwrap() // Never fails since id refers to a linkable element\n\n }\n\n };\n\n Ok(links)\n\n }\n\n\n\n pub fn link_value(&self, id: impl AsRef<[usize]>) -> Result<&Option<L>, errors::GetError> {\n\n let id = id.as_ref();\n\n if !self.contains_link(&id) {\n\n Err(errors::NoLink(id.to_vec()))?\n\n }\n\n let hypergraph = self.hypergraph_of(&id).unwrap(); // Never fails since id refers to a valid link\n\n let local_id = id.last().unwrap(); // Never fails since id refers to a valid link\n\n let link_value = hypergraph\n\n .raw_links()\n\n .get(local_id)\n\n .map(|link_full| &link_full.0)\n\n .unwrap(); // Never fails since id refers to a valid link\n\n Ok(link_value)\n\n }\n", "file_path": "src/hypergraph/get.rs", "rank": 92, "score": 19742.52456263681 }, { "content": " let id = id.as_ref();\n\n if !self.contains_edge(&id) {\n\n Err(errors::NoEdge(id.to_vec()))?\n\n }\n\n let hypergraph = self.hypergraph_of_mut(&id).unwrap(); // Never fails since id refers to a valid edge\n\n let local_id = id.last().unwrap(); // Never fails since id refers to a valid edge\n\n let edge_value = hypergraph\n\n .raw_edges_mut()\n\n .get_mut(local_id)\n\n .map(|edge_full| &mut edge_full.0)\n\n .unwrap(); // Never fails since id refers to a valid edge\n\n Ok(edge_value)\n\n }\n\n\n\n pub fn element_type(&self, id: impl AsRef<[usize]>) -> Result<ElementType, errors::GetError> {\n\n self.element_value(id)\n\n .map(|element| -> ElementType { element.into() })\n\n }\n\n\n\n pub fn element_value(\n", "file_path": "src/hypergraph/get.rs", "rank": 93, "score": 19742.477522439054 }, { "content": " ElementValue::Node { value } => {\n\n let old_value = self.set_node_value(id, value)?;\n\n Ok(ElementValue::Node { value: old_value })\n\n }\n\n }\n\n }\n\n\n\n pub fn set_link_value(\n\n &mut self,\n\n id: impl AsRef<[usize]>,\n\n new_value: impl Into<Option<L>>,\n\n ) -> Result<Option<L>, errors::SetError> {\n\n let id = id.as_ref();\n\n let mut new_value = new_value.into();\n\n if !self.contains_link(id) {\n\n Err(errors::NoLink(id.to_vec()))?\n\n }\n\n let old_value = self.link_value_mut(id).unwrap(); // Never fails since id refers to a link\n\n mem::swap(old_value, &mut new_value);\n\n Ok(new_value)\n", "file_path": "src/hypergraph/set.rs", "rank": 94, "score": 19742.373756103716 }, { "content": " ///\n\n /// If `location` does not correspond to a hypergraph.\n\n pub fn add_hypergraph_in(\n\n &mut self,\n\n value: impl Into<Option<H>>,\n\n location: impl AsRef<[usize]>,\n\n ) -> Result<Vec<usize>, errors::AddError> {\n\n let element = ElementExt::Hypergraph {\n\n value: value.into(),\n\n };\n\n self.add_element_in(element, location)\n\n }\n\n\n\n /// Adds a link in the top level.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If `source` or `target` do not correspond to linkable elements.\n\n pub fn add_link(\n\n &mut self,\n", "file_path": "src/hypergraph/add.rs", "rank": 95, "score": 19741.44932787315 }, { "content": " 0 => Err(errors::RootHypergraph)?,\n\n 1 => Ok(HypergraphEnum::Original(self)),\n\n _ => {\n\n let id = &id[0..id.len() - 1];\n\n let subhypergraph = self.subhypergraph_mut(&id)?;\n\n Ok(HypergraphEnum::Sub(subhypergraph))\n\n }\n\n }\n\n }\n\n\n\n pub fn hypergraph_value(\n\n &self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<&Option<H>, errors::GetError> {\n\n let id = id.as_ref();\n\n if !self.contains_hypergraph(id) {\n\n Err(errors::NoHypergraph(id.to_vec()))?\n\n }\n\n\n\n match id.len() {\n", "file_path": "src/hypergraph/get.rs", "rank": 96, "score": 19741.439711490006 }, { "content": " &self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<ElementValue<&N, &E, &H, &L>, errors::GetError> {\n\n let id = id.as_ref();\n\n if id.is_empty() {\n\n return Ok(ElementValue::Hypergraph {\n\n value: self.value().as_ref(),\n\n });\n\n }\n\n\n\n let local_id = id.last().unwrap(); // Never fails by previous check\n\n\n\n let hypergraph = self.hypergraph_of(&id)?;\n\n\n\n let element;\n\n if let Some(edge_full) = hypergraph.raw_edges().get(local_id) {\n\n element = ElementValue::Edge {\n\n value: &edge_full.0,\n\n };\n\n return Ok(element);\n", "file_path": "src/hypergraph/get.rs", "rank": 97, "score": 19741.346904977265 }, { "content": "use core::fmt::{Debug, Display};\n\nuse std::{fs, io, io::Write, process, rc::Rc};\n\n\n\nuse crate::{traits::HypergraphClass, Hypergraph};\n\n\n\npub struct DotFormatter<N, E, H, L> {\n\n pub edge: Rc<dyn Fn(&Vec<usize>, &E) -> String>,\n\n pub node: Rc<dyn Fn(&Vec<usize>, &N) -> String>,\n\n pub hypergraph: Rc<dyn Fn(&Vec<usize>, &Option<H>) -> String>,\n\n pub link: Rc<dyn Fn(&Vec<usize>, &Option<L>) -> String>,\n\n}\n\n\n\nimpl<N, E, H, L> DotFormatter<N, E, H, L> {\n\n /// Creates a new `DotFormatter` that forwards the `Debug` implementation in all fields\n\n ///\n\n /// Values `None` are left blank.\n\n pub fn debug() -> Self\n\n where\n\n N: Debug,\n\n E: Debug,\n", "file_path": "src/hypergraph/visualize.rs", "rank": 98, "score": 19741.18769598555 }, { "content": " }\n\n\n\n /// Returns the hypergraph with id `id`, if it exists.\n\n ///\n\n /// `None` is returned when the element does not exists.\n\n pub fn hypergraph(\n\n &self,\n\n id: impl AsRef<[usize]>,\n\n ) -> Result<HypergraphEnum<&Self, &Hypergraph<N, E, H, L, Sub>>, errors::GetError> {\n\n let id = id.as_ref();\n\n if id.is_empty() {\n\n return Ok(HypergraphEnum::Original(&self));\n\n }\n\n let h = self.subhypergraph(id)?;\n\n Ok(HypergraphEnum::Sub(h))\n\n }\n\n\n\n /// Returns the hypergraph with id `id`, if it exists.\n\n ///\n\n /// `None` is returned when the element does not exists.\n", "file_path": "src/hypergraph/get.rs", "rank": 99, "score": 19741.146874669852 } ]
Rust
src/main.rs
danielphan2003/rnix-lsp
c89c8c20700317716cbfbd1bc6731e5056d8e66b
#![warn( missing_copy_implementations, missing_debug_implementations, clippy::cargo_common_metadata, clippy::clone_on_ref_ptr, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::float_cmp_const, clippy::get_unwrap, clippy::integer_arithmetic, clippy::integer_division, clippy::pedantic, )] #![allow( clippy::filter_map, clippy::integer_arithmetic, )] mod lookup; mod utils; use dirs::home_dir; use log::{error, trace, warn}; use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; use lsp_types::{ notification::{Notification as _, *}, request::{Request as RequestTrait, *}, *, }; use rnix::{ parser::*, types::*, value::{Anchor as RAnchor, Value as RValue}, SyntaxNode, TextRange, TextSize, }; use std::{ collections::HashMap, panic, path::{Path, PathBuf}, process, rc::Rc, }; type Error = Box<dyn std::error::Error>; fn main() { if let Err(err) = real_main() { error!("Error: {} ({:?})", err, err); error!("A fatal error has occured and rnix-lsp will shut down."); drop(err); process::exit(libc::EXIT_FAILURE); } } fn real_main() -> Result<(), Error> { env_logger::init(); panic::set_hook(Box::new(move |panic| { error!("----- Panic -----"); error!("{}", panic); })); let (connection, io_threads) = Connection::stdio(); let capabilities = serde_json::to_value(&ServerCapabilities { text_document_sync: Some(TextDocumentSyncCapability::Options( TextDocumentSyncOptions { open_close: Some(true), change: Some(TextDocumentSyncKind::Full), ..TextDocumentSyncOptions::default() }, )), completion_provider: Some(CompletionOptions { ..CompletionOptions::default() }), definition_provider: Some(true), document_formatting_provider: Some(true), document_link_provider: Some(DocumentLinkOptions { resolve_provider: Some(false), work_done_progress_options: WorkDoneProgressOptions::default(), }), rename_provider: Some(RenameProviderCapability::Simple(true)), selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), ..ServerCapabilities::default() }) .unwrap(); connection.initialize(capabilities)?; App { files: HashMap::new(), conn: connection, } .main(); io_threads.join()?; Ok(()) } struct App { files: HashMap<Url, (AST, String)>, conn: Connection, } impl App { fn reply(&mut self, response: Response) { trace!("Sending response: {:#?}", response); self.conn.sender.send(Message::Response(response)).unwrap(); } fn notify(&mut self, notification: Notification) { trace!("Sending notification: {:#?}", notification); self.conn .sender .send(Message::Notification(notification)) .unwrap(); } fn err<E>(&mut self, id: RequestId, err: E) where E: std::fmt::Display, { warn!("{}", err); self.reply(Response::new_err( id, ErrorCode::UnknownErrorCode as i32, err.to_string(), )); } fn main(&mut self) { while let Ok(msg) = self.conn.receiver.recv() { trace!("Message: {:#?}", msg); match msg { Message::Request(req) => { let id = req.id.clone(); match self.conn.handle_shutdown(&req) { Ok(true) => break, Ok(false) => self.handle_request(req), Err(err) => { self.err(id, err); break; } } } Message::Notification(notification) => { let _ = self.handle_notification(notification); } Message::Response(_) => (), } } } fn handle_request(&mut self, req: Request) { fn cast<Kind>(req: &mut Option<Request>) -> Option<(RequestId, Kind::Params)> where Kind: RequestTrait, Kind::Params: serde::de::DeserializeOwned, { match req.take().unwrap().extract::<Kind::Params>(Kind::METHOD) { Ok(value) => Some(value), Err(owned) => { *req = Some(owned); None } } } let mut req = Some(req); if let Some((id, params)) = cast::<GotoDefinition>(&mut req) { if let Some(pos) = self.lookup_definition(params) { self.reply(Response::new_ok(id, pos)); } else { self.reply(Response::new_ok(id, ())); } } else if let Some((id, params)) = cast::<Completion>(&mut req) { let completions = self .completions(&params.text_document_position) .unwrap_or_default(); self.reply(Response::new_ok(id, completions)); } else if let Some((id, params)) = cast::<Rename>(&mut req) { let changes = self.rename(params); self.reply(Response::new_ok( id, WorkspaceEdit { changes, ..WorkspaceEdit::default() }, )); } else if let Some((id, params)) = cast::<DocumentLinkRequest>(&mut req) { let document_links = self.document_links(&params).unwrap_or_default(); self.reply(Response::new_ok(id, document_links)); } else if let Some((id, params)) = cast::<Formatting>(&mut req) { let changes = if let Some((ast, code)) = self.files.get(&params.text_document.uri) { let fmt = nixpkgs_fmt::reformat_node(&ast.node()); vec![TextEdit { range: utils::range(&code, TextRange::up_to(ast.node().text().len())), new_text: fmt.text().to_string(), }] } else { Vec::new() }; self.reply(Response::new_ok(id, changes)); } else if let Some((id, params)) = cast::<SelectionRangeRequest>(&mut req) { let mut selections = Vec::new(); if let Some((ast, code)) = self.files.get(&params.text_document.uri) { for pos in params.positions { selections.push(utils::selection_ranges(&ast.node(), code, pos)); } } self.reply(Response::new_ok(id, selections)); } else { let req = req.expect("internal error: req should have been wrapped in Some"); self.reply(Response::new_err( req.id, ErrorCode::MethodNotFound as i32, format!("Unhandled method {}", req.method), )) } } fn handle_notification(&mut self, req: Notification) -> Result<(), Error> { match &*req.method { DidOpenTextDocument::METHOD => { let params: DidOpenTextDocumentParams = serde_json::from_value(req.params)?; let text = params.text_document.text; let parsed = rnix::parse(&text); self.send_diagnostics(params.text_document.uri.clone(), &text, &parsed)?; self.files.insert(params.text_document.uri, (parsed, text)); } DidChangeTextDocument::METHOD => { let params: DidChangeTextDocumentParams = serde_json::from_value(req.params)?; if let Some(change) = params.content_changes.into_iter().last() { let parsed = rnix::parse(&change.text); self.send_diagnostics(params.text_document.uri.clone(), &change.text, &parsed)?; self.files .insert(params.text_document.uri, (parsed, change.text)); } } _ => (), } Ok(()) } fn lookup_definition(&mut self, params: TextDocumentPositionParams) -> Option<Location> { let (current_ast, current_content) = self.files.get(&params.text_document.uri)?; let offset = utils::lookup_pos(current_content, params.position)?; let node = current_ast.node(); let (name, scope, _) = self.scope_for_ident(params.text_document.uri, &node, offset)?; let var_e = scope.get(name.as_str())?; if let (_, Some(var)) = var_e { let (_definition_ast, definition_content) = self.files.get(&var.file)?; Some(Location { uri: (*var.file).clone(), range: utils::range(definition_content, var.key.text_range()), }) } else { None } } #[allow(clippy::shadow_unrelated)] fn completions(&mut self, params: &TextDocumentPositionParams) -> Option<Vec<CompletionItem>> { let (ast, content) = self.files.get(&params.text_document.uri)?; let offset = utils::lookup_pos(content, params.position)?; let node = ast.node(); let (node, scope, name) = self.scope_for_ident(params.text_document.uri.clone(), &node, offset)?; let (_, content) = self.files.get(&params.text_document.uri)?; let mut completions = Vec::new(); for (var, (datatype, _)) in scope { if var.starts_with(&name.as_str()) { completions.push(CompletionItem { label: var.clone(), text_edit: Some(TextEdit { range: utils::range(content, node.node().text_range()), new_text: var.clone(), }), detail: Some(datatype.to_string()), ..CompletionItem::default() }); } } Some(completions) } fn rename(&mut self, params: RenameParams) -> Option<HashMap<Url, Vec<TextEdit>>> { struct Rename<'a> { edits: Vec<TextEdit>, code: &'a str, old: &'a str, new_name: String, } fn rename_in_node(rename: &mut Rename, node: &SyntaxNode) -> Option<()> { if let Some(ident) = Ident::cast(node.clone()) { if ident.as_str() == rename.old { rename.edits.push(TextEdit { range: utils::range(rename.code, node.text_range()), new_text: rename.new_name.clone(), }); } } else if let Some(index) = Select::cast(node.clone()) { rename_in_node(rename, &index.set()?); } else if let Some(attr) = Key::cast(node.clone()) { let mut path = attr.path(); if let Some(ident) = path.next() { rename_in_node(rename, &ident); } } else { for child in node.children() { rename_in_node(rename, &child); } } Some(()) } let uri = params.text_document_position.text_document.uri; let (ast, code) = self.files.get(&uri)?; let offset = utils::lookup_pos(code, params.text_document_position.position)?; let info = utils::ident_at(&ast.node(), offset)?; if !info.path.is_empty() { return None; } let old = info.ident; let scope = utils::scope_for(&Rc::new(uri.clone()), old.node().clone())?; let mut rename = Rename { edits: Vec::new(), code, old: old.as_str(), new_name: params.new_name, }; let definition = scope.get(old.as_str())?; rename_in_node(&mut rename, &definition.set); let mut changes = HashMap::new(); changes.insert(uri, rename.edits); Some(changes) } fn document_links(&mut self, params: &DocumentLinkParams) -> Option<Vec<DocumentLink>> { let (current_ast, current_content) = self.files.get(&params.text_document.uri)?; let parent_dir = Path::new(params.text_document.uri.path()).parent(); let home_dir = home_dir(); let home_dir = home_dir.as_ref(); let mut document_links = vec![]; for node in current_ast.node().descendants() { let value = Value::cast(node.clone()).and_then(|v| v.to_value().ok()); if let Some(RValue::Path(anchor, path)) = value { let file_url = match anchor { RAnchor::Absolute => Some(PathBuf::from(&path)), RAnchor::Relative => parent_dir.map(|p| p.join(path)), RAnchor::Home => home_dir.map(|home| home.join(path)), RAnchor::Store => None, } .and_then(|path| std::fs::canonicalize(&path).ok()) .filter(|path| path.is_file()) .and_then(|s| Url::parse(&format!("file://{}", s.to_string_lossy())).ok()); if let Some(file_url) = file_url { document_links.push(DocumentLink { target: file_url, range: utils::range(current_content, node.text_range()), tooltip: None, }) } } } Some(document_links) } fn send_diagnostics(&mut self, uri: Url, code: &str, ast: &AST) -> Result<(), Error> { let errors = ast.errors(); let mut diagnostics = Vec::with_capacity(errors.len()); for err in errors { let node_range = match err { ParseError::Unexpected(range) | ParseError::UnexpectedDoubleBind(range) | ParseError::UnexpectedExtra(range) | ParseError::UnexpectedWanted(_, range, _) => Some(range), ParseError::UnexpectedEOF | ParseError::UnexpectedEOFWanted(_) => { Some(TextRange::at(TextSize::of(code), TextSize::from(0))) } _ => None, }; if let Some(node_range) = node_range { diagnostics.push(Diagnostic { range: utils::range(code, node_range), severity: Some(DiagnosticSeverity::Error), message: err.to_string(), ..Diagnostic::default() }); } } self.notify(Notification::new( "textDocument/publishDiagnostics".into(), PublishDiagnosticsParams { uri, diagnostics, version: None, }, )); Ok(()) } }
#![warn( missing_copy_implementations, missing_debug_implementations, clippy::cargo_common_metadata, clippy::clone_on_ref_ptr, clippy::dbg_macro, clippy::decimal_literal_representation, clippy::float_cmp_const, clippy::get_unwrap, clippy::integer_arithmetic, clippy::integer_division, clippy::pedantic, )] #![allow( clippy::filter_map, clippy::integer_arithmetic, )] mod lookup; mod utils; use dirs::home_dir; use log::{error, trace, warn}; use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; use lsp_types::{ notification::{Notification as _, *}, request::{Request as RequestTrait, *}, *, }; use rnix::{ parser::*, types::*, value::{Anchor as RAnchor, Value as RValue}, SyntaxNode, TextRange, TextSize, }; use std::{ collections::HashMap, panic, path::{Path, PathBuf}, process, rc::Rc, }; type Error = Box<dyn std::error::Error>; fn main() { if let Err(err) = real_main() { error!("Error: {} ({:?})", err, err); error!("A fatal error has occured and rnix-lsp will shut down."); drop(err); process::exit(libc::EXIT_FAILURE); } } fn real_main() -> Result<(), Error> { env_logger::init(); panic::set_hook(Box::new(move |panic| { error!("----- Panic -----"); error!("{}", panic); })); let (connection, io_threads) = Connection::stdio(); let capabilities = serde_json::to_value(&ServerCapabilities { text_document_sync: Some(TextDocumentSyncCapability::Options( TextDocumentSyncOptions { open_close: Some(true), change: Some(TextDocumentSyncKind::Full), ..TextDocumentSyncOptions::default() }, )), completion_provider: Some(CompletionOptions { ..CompletionOptions::default() }), definition_provider: Some(true), document_formatting_provider: Some(true), document_link_provider: Some(DocumentLinkOptions { resolve_provider: Some(false), work_done_progress_options: WorkDoneProgressOptions::default(), }), rename_provider: Some(RenameProviderCapability::Simple(true)), selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), ..ServerCapabilities::default() }) .unwrap(); connection.initialize(capabilities)?; App { files: HashMap::new(), conn: connection, } .main(); io_threads.join()?; Ok(()) } struct App { files: HashMap<Url, (AST, String)>, conn: Connection, } impl App { fn reply(&mut self, response: Response) { trace!("Sending response: {:#?}", response); self.conn.sender.send(Message::Response(response)).unwrap(); } fn notify(&mut self, notification: Notification) { trace!("Sending notification: {:#?}", notification); self.conn .sender .send(Message::Notification(notification)) .unwrap(); } fn err<E>(&mut self, id: RequestId, err: E) where E: std::fmt::Display, { warn!("{}", err); self.reply(Response::new_err( id, ErrorCode::UnknownErrorCode as i32, err.to_string(), )); } fn main(&mut self) { while let Ok(msg) = self.conn.receiver.recv() { trace!("Message: {:#?}", msg); match msg { Message::Request(req) => { let id = req.id.clone(); match self.conn.handle_shutdown(&req) { Ok(true) => break, Ok(false) => self.handle_request(req), Err(err) => { self.err(id, err); break; } } } Message::Notification(notification) => { let _ = self.handle_notification(notification); } Message::Response(_) => (), } } } fn handle_request(&mut self, req: Request) { fn cast<Kind>(req: &mut Option<Request>) -> Option<(RequestId, Kind::Params)> where Kind: RequestTrait, Kind::Params: serde::de::DeserializeOwned, { match req.take().unwrap().extract::<Kind::Params>(Kind::METHOD) { Ok(value) => Some(value), Err(owned) => { *req = Some(owned); None } } } let mut req = Some(req); if let Some((id, params)) = cast::<GotoDefinition>(&mut req) { if let Some(pos) = self.lookup_definition(params) { self.reply(Response::new_ok(id, pos)); } else { self.reply(Response::new_ok(id, ())); } } else if let Some((id, params)) = cast::<Completion>(&mut req) { let completions = self .completions(&params.text_document_position) .unwrap_or_default(); self.reply(Response::new_ok(id, completions)); } else if let Some((id, params)) = cast::<Rename>(&mut req) { let changes = self.rename(params); self.reply(Response::new_ok( id, WorkspaceEdit { changes, ..WorkspaceEdit::default() }, )); } else if let Some((id, params)) = cast::<DocumentLinkRequest>(&mut req) { let document_links = self.document_links(&params).unwrap_or_default(); self.reply(Response::new_ok(id, document_links)); } else if let Some((id, params)) = cast::<Formatting>(&mut req) { let changes = if let Some((ast, code)) = self.files.get(&params.text_document.uri) { let fmt = nixpkgs_fmt::reformat_node(&ast.node()); vec![TextEdit { range: utils::range(&code, TextRange::up_to(ast.node().text().len())), new_text: fmt.text().to_string(), }] } else { Vec::new() }; self.reply(Response::new_ok(id, changes)); } else if let Some((id, params)) = cast::<SelectionRangeRequest>(&mut req) { let mut selections = Vec::new(); if let Some((ast, code)) = self.files.get(&params.text_document.uri) { for pos in params.positions { selections.push(utils::selection_ranges(&ast.node(), code, pos)); } } self.reply(Response::new_ok(id, selections)); } else { let req = req.expect("internal error: req should have been wrapped in Some"); self.reply(Response::new_err( req.id, ErrorCode::MethodNotFound as i32, format!("Unhandled method {}", req.method), )) } } fn handle_notification(&mut self, req: Notification) -> Result<(), Error> { match &*req.method { DidOpenTextDocument::METHOD => { let params: DidOpenTextDocumentParams = serde_json::from_value(req.params)?; let text = params.text_document.text; let parsed = rnix::parse(&text); self.send_diagnostics(params.text_document.uri.clone(), &text, &parsed)?; self.files.insert(params.text_document.uri, (parsed, text)); } DidChangeTextDocument::METHOD => { let params: DidChangeTextDocumentParams = serde_json::from_value(req.params)?; if let Some(change) = params.content_changes.into_iter().last() { let parsed = rnix::parse(&change.text); self.send_diagnostics(params.text_document.uri.clone(), &change.text, &parsed)?; self.files .insert(params.text_document.uri, (parsed, change.text)); } } _ => (), } Ok(()) } fn lookup_definition(&mut self, params: TextDocumentPositionParams) -> Option<Location> { let (current_ast, current_content) = self.files.get(&params.text_document.uri)?; let offset = utils::lookup_pos(current_content, params.position)?; let node = current_ast.node(); let (name, scope, _) = self.scope_for_ident(params.text_document.uri, &node, offset)?; let var_e = scope.get(name.as_str())?; if let (_, Some(var)) = var_e { let (_definition_ast, definition_content) = self.files.get(&var.file)?; Some(Location { uri: (*var.file).clone(), range: utils::range(definition_content, var.key.text_range()), }) } else { None } } #[allow(clippy::shadow_unrelated)] fn completions(&mut self, params: &TextDocumentPositionParams) -> Option<Vec<CompletionItem>> { let (ast, content) = sel
( "textDocument/publishDiagnostics".into(), PublishDiagnosticsParams { uri, diagnostics, version: None, }, )); Ok(()) } }
f.files.get(&params.text_document.uri)?; let offset = utils::lookup_pos(content, params.position)?; let node = ast.node(); let (node, scope, name) = self.scope_for_ident(params.text_document.uri.clone(), &node, offset)?; let (_, content) = self.files.get(&params.text_document.uri)?; let mut completions = Vec::new(); for (var, (datatype, _)) in scope { if var.starts_with(&name.as_str()) { completions.push(CompletionItem { label: var.clone(), text_edit: Some(TextEdit { range: utils::range(content, node.node().text_range()), new_text: var.clone(), }), detail: Some(datatype.to_string()), ..CompletionItem::default() }); } } Some(completions) } fn rename(&mut self, params: RenameParams) -> Option<HashMap<Url, Vec<TextEdit>>> { struct Rename<'a> { edits: Vec<TextEdit>, code: &'a str, old: &'a str, new_name: String, } fn rename_in_node(rename: &mut Rename, node: &SyntaxNode) -> Option<()> { if let Some(ident) = Ident::cast(node.clone()) { if ident.as_str() == rename.old { rename.edits.push(TextEdit { range: utils::range(rename.code, node.text_range()), new_text: rename.new_name.clone(), }); } } else if let Some(index) = Select::cast(node.clone()) { rename_in_node(rename, &index.set()?); } else if let Some(attr) = Key::cast(node.clone()) { let mut path = attr.path(); if let Some(ident) = path.next() { rename_in_node(rename, &ident); } } else { for child in node.children() { rename_in_node(rename, &child); } } Some(()) } let uri = params.text_document_position.text_document.uri; let (ast, code) = self.files.get(&uri)?; let offset = utils::lookup_pos(code, params.text_document_position.position)?; let info = utils::ident_at(&ast.node(), offset)?; if !info.path.is_empty() { return None; } let old = info.ident; let scope = utils::scope_for(&Rc::new(uri.clone()), old.node().clone())?; let mut rename = Rename { edits: Vec::new(), code, old: old.as_str(), new_name: params.new_name, }; let definition = scope.get(old.as_str())?; rename_in_node(&mut rename, &definition.set); let mut changes = HashMap::new(); changes.insert(uri, rename.edits); Some(changes) } fn document_links(&mut self, params: &DocumentLinkParams) -> Option<Vec<DocumentLink>> { let (current_ast, current_content) = self.files.get(&params.text_document.uri)?; let parent_dir = Path::new(params.text_document.uri.path()).parent(); let home_dir = home_dir(); let home_dir = home_dir.as_ref(); let mut document_links = vec![]; for node in current_ast.node().descendants() { let value = Value::cast(node.clone()).and_then(|v| v.to_value().ok()); if let Some(RValue::Path(anchor, path)) = value { let file_url = match anchor { RAnchor::Absolute => Some(PathBuf::from(&path)), RAnchor::Relative => parent_dir.map(|p| p.join(path)), RAnchor::Home => home_dir.map(|home| home.join(path)), RAnchor::Store => None, } .and_then(|path| std::fs::canonicalize(&path).ok()) .filter(|path| path.is_file()) .and_then(|s| Url::parse(&format!("file://{}", s.to_string_lossy())).ok()); if let Some(file_url) = file_url { document_links.push(DocumentLink { target: file_url, range: utils::range(current_content, node.text_range()), tooltip: None, }) } } } Some(document_links) } fn send_diagnostics(&mut self, uri: Url, code: &str, ast: &AST) -> Result<(), Error> { let errors = ast.errors(); let mut diagnostics = Vec::with_capacity(errors.len()); for err in errors { let node_range = match err { ParseError::Unexpected(range) | ParseError::UnexpectedDoubleBind(range) | ParseError::UnexpectedExtra(range) | ParseError::UnexpectedWanted(_, range, _) => Some(range), ParseError::UnexpectedEOF | ParseError::UnexpectedEOFWanted(_) => { Some(TextRange::at(TextSize::of(code), TextSize::from(0))) } _ => None, }; if let Some(node_range) = node_range { diagnostics.push(Diagnostic { range: utils::range(code, node_range), severity: Some(DiagnosticSeverity::Error), message: err.to_string(), ..Diagnostic::default() }); } } self.notify(Notification::new
random
[ { "content": "pub fn scope_for(file: &Rc<Url>, node: SyntaxNode) -> Option<HashMap<String, Var>> {\n\n let mut scope = HashMap::new();\n\n\n\n let mut current = Some(node);\n\n while let Some(node) = current {\n\n match ParsedType::try_from(node.clone()) {\n\n Ok(ParsedType::LetIn(let_in)) => {\n\n populate(&file, &mut scope, &let_in, Datatype::Variable);\n\n }\n\n Ok(ParsedType::LegacyLet(let_)) => {\n\n populate(&file, &mut scope, &let_, Datatype::Variable);\n\n }\n\n Ok(ParsedType::AttrSet(set)) => {\n\n if set.recursive() {\n\n populate(&file, &mut scope, &set, Datatype::Attribute);\n\n }\n\n }\n\n Ok(ParsedType::Lambda(lambda)) => match ParsedType::try_from(lambda.arg()?) {\n\n Ok(ParsedType::Ident(ident)) => {\n\n if !scope.contains_key(ident.as_str()) {\n", "file_path": "src/utils.rs", "rank": 2, "score": 106053.9805711738 }, { "content": "pub fn range(code: &str, range: TextRange) -> Range {\n\n Range {\n\n start: offset_to_pos(code, usize::from(range.start())),\n\n end: offset_to_pos(code, usize::from(range.end())),\n\n }\n\n}\n\npub struct CursorInfo {\n\n pub path: Vec<String>,\n\n pub ident: Ident,\n\n pub name: String,\n\n}\n\n\n\nimpl CursorInfo {\n\n pub fn new(path: Vec<String>, ident: Ident, name: Option<String>) -> CursorInfo {\n\n let myname = match name {\n\n Some(n) => n,\n\n None => String::from((Ident::cast(ident.node().clone()).unwrap()).as_str()),\n\n };\n\n\n\n CursorInfo {\n\n path,\n\n ident,\n\n name: myname,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 3, "score": 98458.71635098064 }, { "content": "pub fn selection_ranges(root: &SyntaxNode, content: &str, pos: Position) -> Option<SelectionRange> {\n\n let pos = lookup_pos(content, pos)?;\n\n let node = root\n\n .token_at_offset(TextSize::try_from(pos).expect(\"big number goes brrr\"))\n\n .left_biased()?;\n\n\n\n let mut root = None;\n\n let mut cursor = &mut root;\n\n\n\n let mut last = None;\n\n for parent in node.ancestors() {\n\n // De-duplicate\n\n if last.as_ref() == Some(&parent) {\n\n continue;\n\n }\n\n\n\n let text_range = parent.text_range();\n\n *cursor = Some(Box::new(SelectionRange {\n\n range: range(content, text_range),\n\n parent: None,\n\n }));\n\n cursor = &mut cursor.as_mut().unwrap().parent;\n\n\n\n last = Some(parent);\n\n }\n\n\n\n root.map(|b| *b)\n\n}\n", "file_path": "src/utils.rs", "rank": 4, "score": 95507.62788616116 }, { "content": "pub fn offset_to_pos(code: &str, offset: usize) -> Position {\n\n let start_of_line = code[..offset].rfind('\\n').map_or(0, |n| n + 1);\n\n Position {\n\n line: code[..start_of_line].chars().filter(|&c| c == '\\n').count() as u64,\n\n character: code[start_of_line..offset]\n\n .chars()\n\n .map(|c| c.len_utf16() as u64)\n\n .sum(),\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 6, "score": 88627.91842823662 }, { "content": "pub fn uri_path(uri: &Url) -> Option<PathBuf> {\n\n if uri.scheme() != \"file\" || uri.has_host() {\n\n return None;\n\n }\n\n Some(PathBuf::from(uri.path()))\n\n}\n", "file_path": "src/utils.rs", "rank": 7, "score": 77514.41347033405 }, { "content": "pub fn lookup_pos(code: &str, pos: Position) -> Option<usize> {\n\n let mut lines = code.split('\\n');\n\n\n\n let mut offset = 0;\n\n for _ in 0..pos.line {\n\n let line = lines.next()?;\n\n\n\n offset += line.len() + 1;\n\n }\n\n\n\n lines.next().and_then(|line| {\n\n Some(\n\n offset\n\n + line\n\n .chars()\n\n .take(usize::try_from(pos.character).ok()?)\n\n .map(char::len_utf8)\n\n .sum::<usize>(),\n\n )\n\n })\n\n}\n", "file_path": "src/utils.rs", "rank": 9, "score": 67893.54380937692 }, { "content": "pub fn ident_at(root: &SyntaxNode, offset: usize) -> Option<CursorInfo> {\n\n let mut add = false;\n\n let ident =\n\n match root.token_at_offset(TextSize::try_from(offset).expect(\"aaah big number scary\")) {\n\n TokenAtOffset::None => None,\n\n TokenAtOffset::Single(node) => Ident::cast(node.parent()),\n\n TokenAtOffset::Between(left, right) => {\n\n let result = Ident::cast(left.parent()).or_else(|| Ident::cast(right.parent()));\n\n match result {\n\n Some(_) => result,\n\n None => {\n\n if let Some(sel) = Select::cast(left.parent()) {\n\n add = true;\n\n if let Some(s) = sel.set().and_then(Select::cast) {\n\n Ident::cast(s.index()?)\n\n } else {\n\n Ident::cast(sel.set()?)\n\n }\n\n } else {\n\n None\n", "file_path": "src/utils.rs", "rank": 10, "score": 66523.67172873896 }, { "content": "pub fn populate<T: EntryHolder>(\n\n file: &Rc<Url>,\n\n scope: &mut HashMap<String, Var>,\n\n set: &T,\n\n datatype: Datatype,\n\n) -> Option<()> {\n\n for entry in set.entries() {\n\n let attr = entry.key()?;\n\n let mut path = attr.path();\n\n if let Some(ident) = path.next().and_then(Ident::cast) {\n\n if !scope.contains_key(ident.as_str()) {\n\n scope.insert(\n\n ident.as_str().into(),\n\n Var {\n\n file: Rc::clone(file),\n\n set: set.node().to_owned(),\n\n key: ident.node().to_owned(),\n\n value: Some(entry.value()?.to_owned()),\n\n datatype: datatype,\n\n },\n\n );\n\n }\n\n }\n\n }\n\n Some(())\n\n}\n", "file_path": "src/utils.rs", "rank": 11, "score": 38673.19724145993 }, { "content": " node = match self.files.entry((**file).clone()) {\n\n Entry::Occupied(entry) => {\n\n let (ast, _code) = entry.get();\n\n ast.root().inner()?.clone()\n\n }\n\n Entry::Vacant(placeholder) => {\n\n let content = fs::read_to_string(&path).ok()?;\n\n let ast = rnix::parse(&content);\n\n let node = ast.root().inner()?.clone();\n\n placeholder.insert((ast, content));\n\n node\n\n }\n\n };\n\n }\n\n\n\n if let Some(set) = AttrSet::cast(node) {\n\n utils::populate(&file, &mut scope, &set, Datatype::Attribute);\n\n }\n\n Some(scope)\n\n }\n\n}\n", "file_path": "src/lookup.rs", "rank": 12, "score": 20976.98409707395 }, { "content": " // Resolve simple imports\n\n loop {\n\n let apply = match Apply::cast(node.clone()) {\n\n None => break,\n\n Some(apply) => apply,\n\n };\n\n if Ident::cast(apply.lambda()?).map_or(true, |ident| ident.as_str() != \"import\") {\n\n break;\n\n }\n\n let (_anchor, path) = match Value::cast(apply.value()?) {\n\n None => break,\n\n Some(value) => match value.to_value() {\n\n Ok(ParsedValue::Path(anchor, path)) => (anchor, path),\n\n _ => break,\n\n },\n\n };\n\n\n\n // TODO use anchor\n\n *file = Rc::new(file.join(&path).ok()?);\n\n let path = utils::uri_path(&file)?;\n", "file_path": "src/lookup.rs", "rank": 13, "score": 20974.67105376663 }, { "content": "use crate::{\n\n utils::{self, Datatype, Var},\n\n App,\n\n};\n\nuse lsp_types::Url;\n\nuse rnix::{types::*, value::Value as ParsedValue, SyntaxNode};\n\nuse std::{\n\n collections::{hash_map::Entry, HashMap},\n\n fs,\n\n rc::Rc,\n\n};\n\n\n\nuse lazy_static::lazy_static;\n\n\n\n// FIXME use Nix bindings to dynamically extract existing builtins.\n\n// e.g. use API behind `nix __dump-builtins`.\n\nlazy_static! {\n\n static ref BUILTINS: Vec<String> = vec![\n\n // `nix __dump-builtins | jq 'keys'\n\n \"abort\", \"add\", \"all\", \"any\", \"attrNames\", \"attrValues\", \"baseNameOf\", \"bitAnd\", \"bitOr\",\n", "file_path": "src/lookup.rs", "rank": 14, "score": 20974.15976704211 }, { "content": " \"bitXor\", \"catAttrs\", \"compareVersions\", \"concatLists\", \"concatMap\", \"concatStringsSep\", \"deepSeq\",\n\n \"dirOf\", \"div\", \"elem\", \"elemAt\", \"fetchGit\", \"fetchTarball\", \"fetchurl\", \"filter\", \"filterSource\", \"foldl'\",\n\n \"fromJSON\", \"functionArgs\", \"genList\", \"getAttr\", \"getEnv\", \"hasAttr\", \"hashFile\", \"hashString\", \"head\",\n\n \"import\", \"intersectAttrs\", \"isAttrs\", \"isBool\", \"isFloat\", \"isFunction\", \"isInt\", \"isList\", \"isNull\",\n\n \"isPath\", \"isString\", \"length\", \"lessThan\", \"listToAttrs\", \"map\", \"mapAttrs\", \"match\", \"mul\", \"parseDrvName\",\n\n \"partition\", \"path\", \"pathExists\", \"placeholder\", \"readDir\", \"readFile\", \"removeAttrs\", \"replaceStrings\",\n\n \"seq\", \"sort\", \"split\", \"splitVersion\", \"storePath\", \"stringLength\", \"sub\", \"substring\", \"tail\", \"throw\",\n\n \"toFile\", \"toJSON\", \"toPath\", \"toString\", \"toXML\", \"trace\", \"tryEval\", \"typeOf\"\n\n ].into_iter().map(String::from).collect::<Vec<_>>();\n\n}\n\n\n\nimpl App {\n\n pub fn scope_for_ident(\n\n &mut self,\n\n file: Url,\n\n root: &SyntaxNode,\n\n offset: usize,\n\n ) -> Option<(Ident, HashMap<String, (Datatype, Option<Var>)>, String)> {\n\n let mut file = Rc::new(file);\n\n let info = utils::ident_at(&root, offset)?;\n", "file_path": "src/lookup.rs", "rank": 15, "score": 20972.13322819767 }, { "content": " }\n\n }\n\n }\n\n Some((\n\n Ident::cast(ident.node().clone()).unwrap(),\n\n entries,\n\n info.name,\n\n ))\n\n }\n\n pub fn scope_from_node(\n\n &mut self,\n\n file: &mut Rc<Url>,\n\n mut node: SyntaxNode,\n\n ) -> Option<HashMap<String, Var>> {\n\n let mut scope = HashMap::new();\n\n\n\n if let Some(entry) = KeyValue::cast(node.clone()) {\n\n node = entry.value()?;\n\n }\n\n\n", "file_path": "src/lookup.rs", "rank": 16, "score": 20971.936522738546 }, { "content": " let ident = info.ident;\n\n let mut entries = utils::scope_for(&file, ident.node().clone())?\n\n .into_iter()\n\n .map(|(x, var)| (x.to_owned(), (var.datatype, Some(var))))\n\n .collect::<HashMap<_, _>>();\n\n for var in info.path {\n\n if !entries.contains_key(&var) && var == \"builtins\" {\n\n entries = BUILTINS\n\n .iter()\n\n .map(|x| (x.to_owned(), (Datatype::Lambda, None)))\n\n .collect::<HashMap<_, _>>();\n\n } else {\n\n let node_entry = entries.get(&var)?;\n\n if let (_, Some(var)) = node_entry {\n\n let node = var.value.clone()?;\n\n entries = self\n\n .scope_from_node(&mut file, node)?\n\n .into_iter()\n\n .map(|(x, var)| (x.to_owned(), (var.datatype, Some(var))))\n\n .collect::<HashMap<_, _>>();\n", "file_path": "src/lookup.rs", "rank": 17, "score": 20968.15943929549 }, { "content": "use lsp_types::*;\n\nuse rnix::{types::*, SyntaxNode, TextRange, TextSize, TokenAtOffset};\n\nuse std::{\n\n collections::HashMap,\n\n convert::TryFrom,\n\n fmt::{Debug, Display, Formatter, Result},\n\n path::PathBuf,\n\n rc::Rc,\n\n};\n\n\n\n#[derive(Copy, Clone)]\n\npub enum Datatype {\n\n Lambda,\n\n Variable,\n\n Attribute,\n\n}\n\n\n\nimpl Display for Datatype {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result {\n\n write!(\n", "file_path": "src/utils.rs", "rank": 18, "score": 19398.11883098824 }, { "content": " f,\n\n \"{}\",\n\n match self {\n\n Self::Lambda => \"Lambda\",\n\n Self::Variable => \"Variable\",\n\n Self::Attribute => \"Attribute\",\n\n }\n\n )\n\n }\n\n}\n\n\n\nimpl Debug for Datatype {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n Display::fmt(self, f)\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 19, "score": 19390.79356952926 }, { "content": " }\n\n }\n\n }\n\n }\n\n }?;\n\n let parent = ident.node().parent();\n\n if let Some(node) = parent.clone().and_then(Inherit::cast) {\n\n if let Some(node) = node.from() {\n\n if let Some(tok) = node.inner() {\n\n if let Some(_) = Ident::cast(tok.clone()) {\n\n return Some(CursorInfo::new(\n\n vec![tok.text().to_string()],\n\n ident.clone(),\n\n None,\n\n ));\n\n } else if let Some(mut attr) = Select::cast(tok.clone()) {\n\n let mut result = Vec::new();\n\n result.push(attr.index()?.to_string().into());\n\n while let Some(new) = Select::cast(attr.set()?) {\n\n result.push(Ident::cast(new.index()?)?.as_str().into());\n", "file_path": "src/utils.rs", "rank": 20, "score": 19389.972645869 }, { "content": " scope.insert(\n\n ident.as_str().into(),\n\n Var {\n\n file: Rc::clone(&file),\n\n set: lambda.node().clone(),\n\n key: ident.node().clone(),\n\n value: None,\n\n datatype: Datatype::Lambda,\n\n },\n\n );\n\n }\n\n }\n\n Ok(ParsedType::Pattern(pattern)) => {\n\n for entry in pattern.entries() {\n\n let ident = entry.name()?;\n\n if !scope.contains_key(ident.as_str()) {\n\n scope.insert(\n\n ident.as_str().into(),\n\n Var {\n\n file: Rc::clone(&file),\n", "file_path": "src/utils.rs", "rank": 21, "score": 19388.872065940686 }, { "content": " attr = new;\n\n }\n\n result.push(Ident::cast(attr.set()?)?.as_str().into());\n\n result.reverse();\n\n return Some(CursorInfo::new(result, ident.clone(), None));\n\n }\n\n }\n\n }\n\n Some(CursorInfo::new(Vec::new(), ident, None))\n\n } else if let Some(attr) = parent.clone().and_then(Key::cast) {\n\n let mut path = Vec::new();\n\n for item in attr.path() {\n\n if item == *ident.node() {\n\n return Some(CursorInfo::new(path, ident, None));\n\n }\n\n\n\n path.push(Ident::cast(item)?.as_str().into());\n\n }\n\n panic!(\"identifier at cursor is somehow not a child of its parent\");\n\n } else if let Some(mut index) = parent.and_then(Select::cast) {\n", "file_path": "src/utils.rs", "rank": 22, "score": 19387.173785322353 }, { "content": " set: lambda.node().to_owned(),\n\n key: ident.node().to_owned(),\n\n value: None,\n\n datatype: Datatype::Lambda,\n\n },\n\n );\n\n }\n\n }\n\n if let Some(ident) = pattern.at() {\n\n if !scope.contains_key(ident.as_str()) {\n\n scope.insert(\n\n ident.as_str().into(),\n\n Var {\n\n file: Rc::clone(&file),\n\n set: lambda.node().to_owned(),\n\n key: ident.node().to_owned(),\n\n value: None,\n\n datatype: Datatype::Lambda,\n\n },\n\n );\n", "file_path": "src/utils.rs", "rank": 23, "score": 19385.640354815598 }, { "content": " true => Some(String::from(\"\")),\n\n false => None,\n\n },\n\n ))\n\n } else {\n\n Some(CursorInfo::new(Vec::new(), ident, None))\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Var {\n\n pub file: Rc<Url>,\n\n pub set: SyntaxNode,\n\n pub key: SyntaxNode,\n\n pub value: Option<SyntaxNode>,\n\n pub datatype: Datatype,\n\n}\n", "file_path": "src/utils.rs", "rank": 24, "score": 19384.737290956702 }, { "content": " let mut path = Vec::new();\n\n while let Some(new) = Select::cast(index.set()?) {\n\n path.push(Ident::cast(new.index()?)?.as_str().into());\n\n index = new;\n\n }\n\n if index.set()? != *ident.node() {\n\n // Only push if not the cursor ident, so that\n\n // a . b\n\n // ^\n\n // is not [a] and a, but rather [] and a\n\n path.push(Ident::cast(index.set()?)?.as_str().into());\n\n }\n\n path.reverse();\n\n if add {\n\n path.push(String::from(ident.as_str()));\n\n }\n\n Some(CursorInfo::new(\n\n path,\n\n ident,\n\n match add {\n", "file_path": "src/utils.rs", "rank": 25, "score": 19384.276518986175 }, { "content": " }\n\n }\n\n }\n\n _ => (),\n\n },\n\n _ => (),\n\n }\n\n current = node.parent();\n\n }\n\n\n\n Some(scope)\n\n}\n", "file_path": "src/utils.rs", "rank": 26, "score": 19381.633753090096 }, { "content": "# rnix-lsp ![Crates.io](https://img.shields.io/crates/v/rnix-lsp)\n\n\n\nA syntax-checking language server using\n\n[rnix](https://github.com/nix-community/rnix-parser).\n\n\n\n- [x] Syntax-checking diagnostics\n\n- [x] Basic completion\n\n- [x] Basic renaming\n\n- [x] Basic goto definition\n\n- [x] Expand selection proposal\n\n- [x] Formatting using [nixpkgs-fmt](https://github.com/nix-community/nixpkgs-fmt)\n\n\n\nThis is beta-level quality *at best* - I didn't expect maintaining a\n\nlanguage server when writing rnix, the goal was that others would\n\nflock around the parser and write a bunch of editor tooling :)\n\n\n\nBreakages are expected. No semver compatibility before 1.x.y.\n\n\n\nTurn on logging with `RUST_LOG=trace`, and redirect stderr to a file.\n\n\n\n```sh\n\nbash -c \"env RUST_LOG=trace rnix-lsp 2> /tmp/rnix-lsp.log\"\n\n```\n\n\n\n## Install\n\n\n\n```\n\n$ nix-env -i -f https://github.com/nix-community/rnix-lsp/archive/master.tar.gz\n\n```\n\n\n\n## Integrate with your editor\n\n\n\nThese instructions are not fully tested - see issue #3. Please raise\n\nan issue and/or send a PR if a config below didn't work out of the box.\n\n\n\n### Vim/Neovim\n\n\n\n#### [coc.nvim](https://github.com/neoclide/coc.nvim)\n\n\n\n```vim\n\n{\n\n \"languageserver\": {\n\n \"nix\": {\n\n \"command\": \"rnix-lsp\",\n\n \"filetypes\": [\n\n \"nix\"\n\n ]\n\n }\n\n }\n\n}\n\n\n\n```\n\n\n\n#### [LanguageClient-neovim](https://github.com/autozimu/LanguageClient-neovim)\n\n\n\n```vim\n\nlet g:LanguageClient_serverCommands = {\n\n \\ 'nix': ['rnix-lsp']\n\n\\ }\n\n```\n\n\n\n#### [vim-lsp](https://github.com/prabirshrestha/vim-lsp)\n\n\n\n```vim\n\nif executable('rnix-lsp')\n\n au User lsp_setup call lsp#register_server({\n\n \\ 'name': 'rnix-lsp',\n\n \\ 'cmd': {server_info->[&shell, &shellcmdflag, 'rnix-lsp']},\n\n \\ 'whitelist': ['nix'],\n\n \\ })\n\nendif\n\n```\n\n\n\n### Emacs\n\n\n\n#### [lsp-mode](https://github.com/emacs-lsp/lsp-mode)\n\n\n\n```elisp\n\n(add-to-list 'lsp-language-id-configuration '(nix-mode . \"nix\"))\n\n(lsp-register-client\n\n (make-lsp-client :new-connection (lsp-stdio-connection '(\"rnix-lsp\"))\n\n :major-modes '(nix-mode)\n\n :server-id 'nix))\n\n```\n", "file_path": "README.md", "rank": 44, "score": 12487.58929197368 }, { "content": "#### [eglot](https://github.com/joaotavora/eglot)\n\n```elisp\n\n(add-to-list 'eglot-server-programs '(nix-mode . (\"rnix-lsp\")))\n\n```\n\n\n\n### Kakoune\n\n\n\n#### [kak-lsp](https://github.com/kak-lsp/kak-lsp)\n\n\n\n```toml\n\n[language.nix]\n\nfiletypes = [\"nix\"]\n\ncommand = \"rnix-lsp\"\n\n```\n\n\n\n\n\n### VSCode\n\n\n\n#### [vscode-nix-ide](https://github.com/nix-community/vscode-nix-ide/)\n\n\n\n```json\n\n{\n\n \"nix.enableLanguageServer\": true\n\n}\n\n```\n", "file_path": "README.md", "rank": 45, "score": 12479.195639099438 } ]
Rust
src/svi_data_acquirer.rs
Lifars/gargamel
2f220e1fb1e25061f3f29db4860a4ab1df1058a3
use crate::remote::{Connector, Computer, PsExec, PsRemote, Rdp, Wmi, SevenZipCompressCopier, RemoteFileCopier, Compression, Local, RevShareConnector}; use std::path::{Path, PathBuf}; use std::{io, thread}; use std::time::Duration; pub struct SystemVolumeInformationAcquirer<'a> { pub local_store_directory: &'a Path, pub connector: Box<dyn Connector>, pub image_timeout: Option<Duration>, pub compress_timeout: Option<Duration>, pub compression: Compression, } impl<'a> SystemVolumeInformationAcquirer<'a> { pub fn psexec32( remote_computer: Computer, local_store_directory: &'a Path, no_7zip: bool, remote_temp_storage: PathBuf, custom_share_folder: Option<String>, reverse: bool, ) -> SystemVolumeInformationAcquirer<'a> { let connector = Box::new(PsExec::psexec32(remote_computer, remote_temp_storage, custom_share_folder)); SystemVolumeInformationAcquirer { local_store_directory, connector: if reverse { Box::new(RevShareConnector::new(connector)) } else { connector }, image_timeout: Some(Duration::from_secs(20)), compress_timeout: None, compression: if no_7zip { Compression::No } else { Compression::Yes }, } } pub fn local( username: String, local_store_directory: &'a Path, temp_storage: PathBuf, ) -> SystemVolumeInformationAcquirer<'a> { SystemVolumeInformationAcquirer { local_store_directory, connector: Box::new(Local::new(username, temp_storage)), image_timeout: Some(Duration::from_secs(20)), compress_timeout: None, compression: Compression::No, } } pub fn psexec64( remote_computer: Computer, local_store_directory: &'a Path, no_7zip: bool, remote_temp_storage: PathBuf, custom_share_folder: Option<String>, reverse: bool, ) -> SystemVolumeInformationAcquirer<'a> { let connector = Box::new(PsExec::psexec64(remote_computer, remote_temp_storage, custom_share_folder)); SystemVolumeInformationAcquirer { local_store_directory, connector: if reverse { Box::new(RevShareConnector::new(connector)) } else { connector }, image_timeout: Some(Duration::from_secs(20)), compress_timeout: None, compression: if no_7zip { Compression::No } else { Compression::Yes }, } } pub fn psremote( remote_computer: Computer, local_store_directory: &'a Path, _no_7zip: bool, remote_temp_storage: PathBuf, custom_share_folder: Option<String>, reverse: bool, ) -> SystemVolumeInformationAcquirer<'a> { let connector = Box::new(PsRemote::new(remote_computer, remote_temp_storage, custom_share_folder)); SystemVolumeInformationAcquirer { local_store_directory, connector: if reverse { Box::new(RevShareConnector::new(connector)) } else { connector }, image_timeout: Some(Duration::from_secs(20)), compress_timeout: None, compression: Compression::No, } } pub fn wmi( remote_computer: Computer, local_store_directory: &'a Path, timeout: Duration, compress_timeout: Duration, no_7zip: bool, remote_temp_storage: PathBuf, ) -> SystemVolumeInformationAcquirer<'a> { SystemVolumeInformationAcquirer { local_store_directory, connector: Box::new(Wmi { computer: remote_computer.clone(), remote_temp_storage }), image_timeout: Some(timeout), compress_timeout: Some(compress_timeout), compression: if no_7zip { Compression::No } else { Compression::YesSplit }, } } pub fn rdp( remote_computer: Computer, local_store_directory: &'a Path, nla: bool, image_timeout: Duration, compress_timeout: Duration, no_7zip: bool, remote_temp_storage: PathBuf, ) -> SystemVolumeInformationAcquirer<'a> { SystemVolumeInformationAcquirer { local_store_directory, connector: Box::new(Rdp { nla, computer: remote_computer.clone(), remote_temp_storage, }), image_timeout: Some(image_timeout), compress_timeout: Some(compress_timeout), compression: if no_7zip { Compression::No } else { Compression::YesSplit }, } } pub fn download_data( &self ) -> io::Result<()> { let local_store_directory = self.local_store_directory; self.connector.acquire_perms(Path::new("C:\\System Volume Information")); let _copier = self.connector.copier(); let _compression_split_copier = SevenZipCompressCopier::new(self.connector.as_ref(), true, self.compress_timeout, false); let _compression_copier = SevenZipCompressCopier::new(self.connector.as_ref(), false, self.compress_timeout, false); let copier = match self.compression { Compression::No => _copier, Compression::Yes => &_compression_copier as &dyn RemoteFileCopier, Compression::YesSplit => &_compression_split_copier as &dyn RemoteFileCopier, }; let svi_path = Path::new("C:\\System Volume Information\\*.lnk"); if let Err(err) = copier.copy_from_remote( &svi_path, &local_store_directory, ) { error!("Cannot download {} from {} using method {} due to {}", &svi_path.display(), self.connector.computer().address, self.connector.connect_method_name(), err ); } thread::sleep(Duration::from_millis(20000)); self.connector.release_perms(Path::new("C:\\System Volume Information")); Ok(()) } }
use crate::remote::{Connector, Computer, PsExec, PsRemote, Rdp, Wmi, SevenZipCompressCopier, RemoteFileCopier, Compression, Local, RevShareConnector}; use std::path::{Path, PathBuf}; use std::{io, thread}; use std::time::Duration; pub struct SystemVolumeInformationAcquirer<'a> { pub local_store_directory: &'a Path, pub connector: Box<dyn Connector>, pub image_timeout: Option<Duration>, pub compress_timeout: Option<Duration>, pub compression: Compression, } impl<'a> SystemVolumeInformationAcquirer<'a> { pub f
pub fn local( username: String, local_store_directory: &'a Path, temp_storage: PathBuf, ) -> SystemVolumeInformationAcquirer<'a> { SystemVolumeInformationAcquirer { local_store_directory, connector: Box::new(Local::new(username, temp_storage)), image_timeout: Some(Duration::from_secs(20)), compress_timeout: None, compression: Compression::No, } } pub fn psexec64( remote_computer: Computer, local_store_directory: &'a Path, no_7zip: bool, remote_temp_storage: PathBuf, custom_share_folder: Option<String>, reverse: bool, ) -> SystemVolumeInformationAcquirer<'a> { let connector = Box::new(PsExec::psexec64(remote_computer, remote_temp_storage, custom_share_folder)); SystemVolumeInformationAcquirer { local_store_directory, connector: if reverse { Box::new(RevShareConnector::new(connector)) } else { connector }, image_timeout: Some(Duration::from_secs(20)), compress_timeout: None, compression: if no_7zip { Compression::No } else { Compression::Yes }, } } pub fn psremote( remote_computer: Computer, local_store_directory: &'a Path, _no_7zip: bool, remote_temp_storage: PathBuf, custom_share_folder: Option<String>, reverse: bool, ) -> SystemVolumeInformationAcquirer<'a> { let connector = Box::new(PsRemote::new(remote_computer, remote_temp_storage, custom_share_folder)); SystemVolumeInformationAcquirer { local_store_directory, connector: if reverse { Box::new(RevShareConnector::new(connector)) } else { connector }, image_timeout: Some(Duration::from_secs(20)), compress_timeout: None, compression: Compression::No, } } pub fn wmi( remote_computer: Computer, local_store_directory: &'a Path, timeout: Duration, compress_timeout: Duration, no_7zip: bool, remote_temp_storage: PathBuf, ) -> SystemVolumeInformationAcquirer<'a> { SystemVolumeInformationAcquirer { local_store_directory, connector: Box::new(Wmi { computer: remote_computer.clone(), remote_temp_storage }), image_timeout: Some(timeout), compress_timeout: Some(compress_timeout), compression: if no_7zip { Compression::No } else { Compression::YesSplit }, } } pub fn rdp( remote_computer: Computer, local_store_directory: &'a Path, nla: bool, image_timeout: Duration, compress_timeout: Duration, no_7zip: bool, remote_temp_storage: PathBuf, ) -> SystemVolumeInformationAcquirer<'a> { SystemVolumeInformationAcquirer { local_store_directory, connector: Box::new(Rdp { nla, computer: remote_computer.clone(), remote_temp_storage, }), image_timeout: Some(image_timeout), compress_timeout: Some(compress_timeout), compression: if no_7zip { Compression::No } else { Compression::YesSplit }, } } pub fn download_data( &self ) -> io::Result<()> { let local_store_directory = self.local_store_directory; self.connector.acquire_perms(Path::new("C:\\System Volume Information")); let _copier = self.connector.copier(); let _compression_split_copier = SevenZipCompressCopier::new(self.connector.as_ref(), true, self.compress_timeout, false); let _compression_copier = SevenZipCompressCopier::new(self.connector.as_ref(), false, self.compress_timeout, false); let copier = match self.compression { Compression::No => _copier, Compression::Yes => &_compression_copier as &dyn RemoteFileCopier, Compression::YesSplit => &_compression_split_copier as &dyn RemoteFileCopier, }; let svi_path = Path::new("C:\\System Volume Information\\*.lnk"); if let Err(err) = copier.copy_from_remote( &svi_path, &local_store_directory, ) { error!("Cannot download {} from {} using method {} due to {}", &svi_path.display(), self.connector.computer().address, self.connector.connect_method_name(), err ); } thread::sleep(Duration::from_millis(20000)); self.connector.release_perms(Path::new("C:\\System Volume Information")); Ok(()) } }
n psexec32( remote_computer: Computer, local_store_directory: &'a Path, no_7zip: bool, remote_temp_storage: PathBuf, custom_share_folder: Option<String>, reverse: bool, ) -> SystemVolumeInformationAcquirer<'a> { let connector = Box::new(PsExec::psexec32(remote_computer, remote_temp_storage, custom_share_folder)); SystemVolumeInformationAcquirer { local_store_directory, connector: if reverse { Box::new(RevShareConnector::new(connector)) } else { connector }, image_timeout: Some(Duration::from_secs(20)), compress_timeout: None, compression: if no_7zip { Compression::No } else { Compression::Yes }, } }
function_block-function_prefixed
[ { "content": "pub fn copy_from_local_wildcards<F>(\n\n source: &Path,\n\n target: &Path,\n\n connector: &dyn Connector,\n\n copy_fn: F,\n\n) -> io::Result<()>\n\n where F: Fn(&Path, &Path) -> io::Result<()> {\n\n trace!(\"Copier supports wildcards\");\n\n let dir = source\n\n .components()\n\n .take_while(|item| !item.as_os_str().to_str().unwrap_or_default().contains(\"*\"))\n\n .map(|item| item.as_os_str())\n\n .collect::<PathBuf>();\n\n\n\n let wildcarded = source\n\n .components()\n\n .skip_while(|item| !item.as_os_str().to_str().unwrap_or_default().contains(\"*\"))\n\n .take(1)\n\n .collect::<Vec<Component>>()\n\n .get(0)\n", "file_path": "src/remote/copier.rs", "rank": 0, "score": 107478.4198028771 }, { "content": "pub fn path_to_part(path: &Path, part: usize) -> PathBuf {\n\n let joined = match part {\n\n part if part < 10 => format!(\"{}.00{}\", path.display(), part),\n\n part if part < 100 => format!(\"{}.0{}\", path.display(), part),\n\n part => format!(\"{}.{}\", path.display(), part)\n\n };\n\n PathBuf::from(joined)\n\n}\n\n\n", "file_path": "src/remote/utils.rs", "rank": 1, "score": 101410.16291904985 }, { "content": "pub trait Connector {\n\n fn connect_method_name(&self) -> &'static str;\n\n\n\n fn computer(&self) -> &Computer;\n\n\n\n fn copier(&self) -> &dyn RemoteFileCopier;\n\n\n\n fn remote_temp_storage(&self) -> &Path;\n\n\n\n fn mkdir(&self, path: &Path) {\n\n let command = Command::new(\n\n vec![\n\n \"cmd.exe\".to_string(),\n\n \"/c\".to_string(),\n\n \"md\".to_string(),\n\n path.to_str().unwrap_or_default().to_string(),\n\n ],\n\n None,\n\n \"\",\n\n true,\n", "file_path": "src/remote/connector.rs", "rank": 2, "score": 99659.90815562702 }, { "content": "pub fn download_files_from_embedded(local_store_directory: &Path,\n\n downloader: &dyn RemoteFileCopier,\n\n separate_stores: bool\n\n) -> io::Result<()> {\n\n let local_store_directory = dunce::canonicalize(local_store_directory)\n\n .expect(&format!(\"Cannot canonicalize {}\", local_store_directory.display()));\n\n for path_to_find in embedded_search_list() {\n\n if path_to_find.starts_with(\"#\") {\n\n continue;\n\n }\n\n let _ = download_file(&path_to_find, &local_store_directory, downloader, separate_stores);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/file_acquirer.rs", "rank": 3, "score": 96195.55834447837 }, { "content": "pub fn path_join_to_string_ntfs(path: &Path) -> String {\n\n path\n\n .to_str()\n\n .unwrap_or(\"y\")\n\n .replace(\"*\", \"--S--\")\n\n .replace(\"?\", \"--Q--\")\n\n .replace(\":\", \"\")\n\n .replace(\"\\\\\", \"-\")\n\n .replace(\"/\", \"-\")\n\n}\n", "file_path": "src/utils.rs", "rank": 4, "score": 92732.02845849191 }, { "content": "pub fn download_files_from_path(file_list: &Path,\n\n local_store_directory: &Path,\n\n downloader: &dyn RemoteFileCopier,\n\n separate_stores: bool\n\n) -> io::Result<()> {\n\n let input_file = File::open(file_list)?;\n\n let local_store_directory = dunce::canonicalize(local_store_directory)\n\n .expect(&format!(\"Cannot canonicalize {}\", local_store_directory.display()));\n\n for path_to_find in BufReader::new(input_file).lines() {\n\n if path_to_find.is_err() {\n\n warn!(\"Cannot read line in {}\", file_list.display());\n\n }\n\n let path_to_find = path_to_find.unwrap();\n\n let _ = download_file(&path_to_find, &local_store_directory, downloader, separate_stores);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/file_acquirer.rs", "rank": 5, "score": 83845.05585367276 }, { "content": "pub fn copy_from_remote_wildcards<F>(\n\n source: &Path,\n\n target: &Path,\n\n connector: &dyn Connector,\n\n copy_fn: F,\n\n) -> io::Result<()>\n\n where F: Fn(&Path, &Path) -> io::Result<()> {\n\n trace!(\"Copier supports wildcards\");\n\n let dir = source\n\n .components()\n\n .take_while(|item| !item.as_os_str().to_str().unwrap_or_default().contains(\"*\"))\n\n .map(|item| item.as_os_str())\n\n .collect::<PathBuf>();\n\n\n\n let wildcarded = source\n\n .components()\n\n .skip_while(|item| !item.as_os_str().to_str().unwrap_or_default().contains(\"*\"))\n\n .take(1)\n\n .collect::<Vec<Component>>()\n\n .get(0)\n", "file_path": "src/remote/copier.rs", "rank": 6, "score": 81028.89035075565 }, { "content": "pub fn file_is_empty(target_downloaded: &Path) -> bool {\n\n let mut file = match File::open(target_downloaded){\n\n Ok(file) => file,\n\n Err(_) => return true,\n\n };\n\n let mut buf: [u8; 100] = [0; 100];\n\n if file.read_exact(&mut buf).is_err() {\n\n return true\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/remote/utils.rs", "rank": 7, "score": 72123.46191666277 }, { "content": "pub fn create_report_path(\n\n remote_computer: &Computer,\n\n store_directory: &Path,\n\n filename_prefix: &str,\n\n method_name: &str,\n\n file_extension: &str,\n\n) -> PathBuf {\n\n let address_formatted = remote_computer.address.replace(\".\", \"-\");\n\n let filename = format!(\"{}-{}-{}-{}.{}\",\n\n method_name,\n\n filename_prefix,\n\n address_formatted,\n\n remote_computer.username.replace(\" \", \"\"),\n\n file_extension\n\n );\n\n let file_path = store_directory.join(filename);\n\n {\n\n File::create(&file_path).expect(&format!(\"Cannot create file {}\", file_path.display()));\n\n }\n\n let result = dunce::canonicalize(file_path).expect(\"Cannot canonicalize\");\n\n trace!(\"Report will be saved at {}\", result.display());\n\n result\n\n}", "file_path": "src/process_runner.rs", "rank": 8, "score": 68060.1006432826 }, { "content": "pub trait Quoted{\n\n fn quoted(&self) -> String;\n\n}\n\n\n\nimpl Quoted for str {\n\n fn quoted(&self) -> String {\n\n format!(\"\\\"{}\\\"\", self)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/utils.rs", "rank": 9, "score": 44284.653995009045 }, { "content": "pub trait Archiver {\n\n fn compress(&self, path: &Path, split: bool) -> PathBuf;\n\n fn uncompress(&self, path: &Path) -> io::Result<()>;\n\n}\n\n\n\npub struct SevenZipArchiver<'a> {\n\n connector: &'a dyn Connector,\n\n timeout: Option<Duration>,\n\n}\n\n\n\nimpl<'a> SevenZipArchiver<'a> {\n\n pub fn remote(\n\n connector: &'a dyn Connector,\n\n timeout: Option<Duration>,\n\n ) -> SevenZipArchiver<'a> {\n\n SevenZipArchiver {\n\n connector,\n\n timeout,\n\n }\n\n }\n", "file_path": "src/remote/archiver.rs", "rank": 10, "score": 42920.538003508496 }, { "content": "pub fn print_logo(){\n\n info!(\"\\n\\n{}\\n{}\\n\", LIFARS, APP)\n\n}", "file_path": "src/logo.rs", "rank": 11, "score": 42920.538003508496 }, { "content": "pub trait FileCopier {\n\n fn copy_file(\n\n &self,\n\n source: &Path,\n\n target: &Path,\n\n ) -> io::Result<()>;\n\n\n\n fn delete_file(&self,\n\n target: &Path,\n\n ) -> io::Result<()>;\n\n\n\n fn method_name(&self) -> &'static str;\n\n}\n\n\n\npub struct Cmd {}\n\n\n\nimpl FileCopier for Cmd {\n\n fn copy_file(\n\n &self,\n\n source: &Path,\n", "file_path": "src/remote/copier.rs", "rank": 12, "score": 41671.38853959976 }, { "content": "pub fn download_file(\n\n path: &str,\n\n local_store_directory: &Path,\n\n downloader: &dyn RemoteFileCopier,\n\n separate_stores: bool,\n\n) -> io::Result<()> {\n\n if path.starts_with(\"#\") {\n\n return Ok(());\n\n }\n\n let path_to_download = Path::new(path);\n\n trace!(\"Establishing download of {} using {}\", path_to_download.display(), downloader.method_name());\n\n\n\n let local_store_directory = if separate_stores {\n\n let dir_name = path\n\n .replace(\"\\\\\", \"-\")\n\n .replace(\"/\", \"-\")\n\n .replace(\" \", \"\")\n\n .replace(\"*\", \"--S--\")\n\n .replace(\"?\", \"--Q--\")\n\n .replace(\":\", \"\");\n", "file_path": "src/file_acquirer.rs", "rank": 13, "score": 41671.38853959976 }, { "content": "pub fn run_process_blocking(\n\n command_name: &str,\n\n command_args: &[String],\n\n) -> Result<()> {\n\n trace!(\"Starting process {}, with args: {:?}\", command_name, command_args);\n\n let mut command = Command::new(command_name);\n\n // command.stdout(Stdio::null());\n\n if command_args.is_empty().not() {\n\n command.args(command_args);\n\n }\n\n let output = command.output()?;\n\n trace!(\"Command {} output: {}\", command_name, String::from_utf8_lossy(&output.stdout));\n\n trace!(\"Command {} error: {}\", command_name, String::from_utf8_lossy(&output.stderr));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/process_runner.rs", "rank": 14, "score": 40523.25937080843 }, { "content": "pub trait RemoteFileCopier {\n\n fn remote_computer(&self) -> &Computer;\n\n fn copier_impl(&self) -> &dyn FileCopier;\n\n\n\n fn path_to_remote_form(\n\n &self,\n\n path: &Path,\n\n ) -> PathBuf;\n\n\n\n fn copy_to_remote(\n\n &self,\n\n source: &Path,\n\n target: &Path,\n\n ) -> io::Result<()> {\n\n self.copier_impl().copy_file(source, &self.path_to_remote_form(target))\n\n }\n\n\n\n fn delete_remote_file(&self, target: &Path) -> io::Result<()> {\n\n self.copier_impl().delete_file(&self.path_to_remote_form(target))\n\n }\n", "file_path": "src/remote/copier.rs", "rank": 15, "score": 40523.25937080843 }, { "content": "pub fn run_process_blocking_timed(\n\n command_name: &str,\n\n command_args: &[String],\n\n wait_for: Duration,\n\n) -> Result<()> {\n\n trace!(\"Starting process {}, with args: {:?} and timeout of {} seconds\", command_name, command_args, wait_for.as_secs());\n\n let mut command = Command::new(command_name);\n\n if command_args.is_empty().not() {\n\n command.args(command_args);\n\n }\n\n let mut child = command.spawn()?;\n\n match child.wait_timeout(wait_for)? {\n\n Some(_) => {}\n\n None => {\n\n // child hasn't exited yet\n\n match child.kill() {\n\n Ok(_) => {}\n\n Err(_) => {}\n\n }\n\n\n\n trace!(\"Process \\\"{} {}\\\" reached time out\", command_name, command_args.join(\" \"));\n\n }\n\n };\n\n Ok(())\n\n}\n\n\n\n\n", "file_path": "src/process_runner.rs", "rank": 16, "score": 39464.37229774804 }, { "content": "pub fn run_piped_processes_blocking(\n\n command_name_first: &str,\n\n command_args_first: &[String],\n\n command_name_second: &str,\n\n command_args_second: &[String],\n\n) -> Result<()> {\n\n let mut first = Command::new(command_name_first);\n\n if !command_args_first.is_empty() {\n\n first.args(command_args_first);\n\n }\n\n trace!(\"Starting process \\\"{}\\\" with params {:?}\", command_name_first, command_args_first);\n\n let mut first = first.stdout(Stdio::piped())\n\n .spawn()?;\n\n\n\n if let Some(first_output) = first.stdout.take() {\n\n let mut second = Command::new(command_name_second);\n\n second.stdin(first_output);\n\n if !command_args_second.is_empty() {\n\n second.args(command_args_second);\n\n }\n\n trace!(\"Starting process {} with params {:?}\", command_name_second, command_args_second);\n\n let output = second.output()?;\n\n trace!(\"Command {} output: {}\", command_name_second, String::from_utf8_lossy(&output.stdout));\n\n trace!(\"Command {} error: {}\", command_name_second, String::from_utf8_lossy(&output.stderr));\n\n } else {\n\n trace!(\"Child not invoked\")\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/process_runner.rs", "rank": 17, "score": 39464.37229774804 }, { "content": "pub fn run_process_blocking_maybe_timed(\n\n command_name: &str,\n\n command_args: &[String],\n\n wait_for: Option<Duration>,\n\n) -> Result<()> {\n\n match wait_for {\n\n None => run_process_blocking(command_name, command_args),\n\n Some(wait) => run_process_blocking_timed(command_name, command_args, wait),\n\n }\n\n}\n\n\n", "file_path": "src/process_runner.rs", "rank": 18, "score": 38484.71162114742 }, { "content": "fn handle_remote_computer(opts: &Opts, remote_computer: &Computer) -> Result<(), io::Error> {\n\n info!(\"Connecting to {} with user {}\", remote_computer.address, remote_computer.domain_username());\n\n let local_store_directory_owned = dunce::canonicalize(Path::new(&opts.local_store_directory)).unwrap();\n\n let local_store_directory = local_store_directory_owned.as_path();\n\n let remote_temp_storage = Path::new(&opts.remote_store_directory);\n\n let key_file = opts.ssh_key.clone().map(|it| PathBuf::from(it));\n\n let local = opts.computer == \"127.0.0.1\" || opts.computer == \"localhost\";\n\n\n\n if let Some(remote_file) = &opts.re_download {\n\n let connectors = create_connectors(\n\n &opts,\n\n &remote_computer,\n\n remote_temp_storage,\n\n true,\n\n local,\n\n opts.reverse_share,\n\n );\n\n let remote_file = Path::new(&remote_file);\n\n for connector in connectors {\n\n let _compress_copier = SevenZipCompressCopier::new(connector.as_ref(), false, None, false);\n", "file_path": "src/main.rs", "rank": 19, "score": 37315.640704667305 }, { "content": "pub fn download_files(file_list: &str,\n\n local_store_directory: &Path,\n\n downloader: &dyn RemoteFileCopier,\n\n separate_stores: bool\n\n) -> io::Result<()> {\n\n if file_list == \"EMBEDDED\" {\n\n download_files_from_embedded(local_store_directory, downloader, separate_stores)\n\n } else {\n\n download_files_from_path(Path::new(file_list), local_store_directory, downloader, separate_stores)\n\n }\n\n}\n\n\n", "file_path": "src/file_acquirer.rs", "rank": 20, "score": 36105.27849146002 }, { "content": "pub fn embedded_search_list() -> Vec<String> {\n\n vec![\n\n \"C:\\\\Windows\\\\System32\\\\winevt\\\\Logs\\\\*.evtx\".to_string(),\n\n \"C:\\\\Windows.old\\\\Windows\\\\System32\\\\winevt\\\\Logs\\\\*.evtx\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\AppData\\\\Roaming\\\\Microsoft\\\\Windows\\\\Recent\\\\*.lnk\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\AppData\\\\Roaming\\\\Microsoft\\\\Office\\\\Recent\\\\*.lnk\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\Desktop\\\\*.lnk\".to_string(),\n\n \"C:\\\\ProgramData\\\\Microsoft\\\\Windows\\\\Start Menu\\\\Programs\\\\*.lnk\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\AppData\\\\Roaming\\\\Microsoft\\\\Word\\\\\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\AppData\\\\Roaming\\\\Microsoft\\\\Excel\\\\\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\AppData\\\\Roaming\\\\Microsoft\\\\Publisher\\\\\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\NTUSER.DAT\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\NTUSER.DAT.LOG*\".to_string(),\n\n \"C:\\\\Windows\\\\System32\\\\config\\\\DEFAULT\".to_string(),\n\n \"C:\\\\Windows.old\\\\Windows\\\\System32\\\\config\\\\DEFAULT\".to_string(),\n\n \"C:\\\\Windows\\\\System32\\\\config\\\\DEFAULT.LOG*\".to_string(),\n\n \"C:\\\\Windows.old\\\\Windows\\\\System32\\\\config\\\\DEFAULT.LOG*\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\AppData\\\\Local\\\\Microsoft\\\\Windows\\\\UsrClass.dat\".to_string(),\n\n \"C:\\\\Users\\\\*\\\\AppData\\\\Local\\\\Microsoft\\\\Windows\\\\UsrClass.dat.LOG*\".to_string(),\n\n \"C:\\\\ProgramData\\\\Microsoft\\\\Search\\\\Data\\\\Applications\\\\Windows\\\\Windows.edb\".to_string(),\n\n ]\n\n}", "file_path": "src/embedded_search_list.rs", "rank": 21, "score": 35196.27322313638 }, { "content": "pub fn parse_command(command: &str) -> Vec<String> {\n\n if command.is_empty() {\n\n return vec![]\n\n }\n\n\n\n let mut result = Vec::<String>::new();\n\n // let mut last_quote_index: Option<usize> = None;\n\n let mut quote_opened = false;\n\n let mut take_from: usize = 0;\n\n for (i, character) in command.char_indices() {\n\n let is_quote = character == '\"';\n\n let is_space = character == ' ';\n\n\n\n // if last_space_index.unwrap_or(i)\n\n\n\n if is_quote {\n\n quote_opened = !quote_opened;\n\n }\n\n\n\n if is_space && !quote_opened {\n", "file_path": "src/command_utils.rs", "rank": 22, "score": 33540.06486393256 }, { "content": "use crate::remote::{Connector, Computer, FileCopier, RemoteFileCopier, Command, DEFAULT_REMOTE_PATH_STORAGE, Cmd, copy_from_local_wildcards};\n\nuse std::path::{Path, PathBuf};\n\nuse std::io;\n\nuse std::time::Duration;\n\n\n\npub struct Local {\n\n localhost: Computer,\n\n temp_storage: PathBuf,\n\n}\n\n\n\nimpl Local {\n\n pub fn new(username: String, temp_storage: PathBuf) -> Local {\n\n Local {\n\n localhost: Computer {\n\n address: String::from(\"127.0.0.1\"),\n\n username,\n\n password: None,\n\n domain: None,\n\n },\n\n temp_storage\n", "file_path": "src/remote/local.rs", "rank": 23, "score": 32848.12692966338 }, { "content": " }\n\n }\n\n\n\n pub fn new_default(username: String) -> Local {\n\n Local::new(username, PathBuf::from(DEFAULT_REMOTE_PATH_STORAGE))\n\n }\n\n}\n\n\n\nimpl Connector for Local {\n\n fn connect_method_name(&self) -> &'static str {\n\n return \"LOCAL\";\n\n }\n\n\n\n fn computer(&self) -> &Computer {\n\n &self.localhost\n\n }\n\n\n\n fn copier(&self) -> &dyn RemoteFileCopier {\n\n self as &dyn RemoteFileCopier\n\n }\n", "file_path": "src/remote/local.rs", "rank": 24, "score": 32844.08179283876 }, { "content": "use crate::remote::{Connector, Computer, FileCopier, RemoteFileCopier, WindowsRemoteFileHandler, copy_from_remote_wildcards};\n\nuse std::path::{Path, PathBuf};\n\nuse std::io;\n\nuse crate::process_runner::run_process_blocking;\n\n\n\npub struct PsRemote {\n\n computer: Computer,\n\n copier_impl: WindowsRemoteFileHandler,\n\n remote_temp_storage: PathBuf,\n\n}\n\n\n\nimpl PsRemote {\n\n pub fn new(computer: Computer, remote_temp_storage: PathBuf, custom_share_folder: Option<String>) -> PsRemote {\n\n PsRemote {\n\n computer: computer.clone(),\n\n copier_impl: WindowsRemoteFileHandler::new(computer, Box::new(Powershell {}), custom_share_folder),\n\n remote_temp_storage,\n\n }\n\n }\n\n}\n", "file_path": "src/remote/psremote.rs", "rank": 25, "score": 32843.446663462026 }, { "content": " \"XCopy (local)\"\n\n }\n\n}\n\n\n\nimpl RemoteFileCopier for Local {\n\n fn remote_computer(&self) -> &Computer {\n\n &self.localhost\n\n }\n\n\n\n fn copier_impl(&self) -> &dyn FileCopier {\n\n self\n\n }\n\n\n\n fn path_to_remote_form(&self, path: &Path) -> PathBuf {\n\n path.to_path_buf()\n\n }\n\n\n\n fn copy_from_remote(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n copy_from_local_wildcards(\n\n source,\n\n target,\n\n self,\n\n |s, t| self.copier_impl().copy_file(s, t),\n\n )\n\n }\n\n}", "file_path": "src/remote/local.rs", "rank": 26, "score": 32839.20534001979 }, { "content": "\n\nimpl Connector for PsRemote {\n\n fn connect_method_name(&self) -> &'static str {\n\n return \"PSREM\";\n\n }\n\n\n\n fn computer(&self) -> &Computer {\n\n &self.computer\n\n }\n\n\n\n fn copier(&self) -> &dyn RemoteFileCopier {\n\n &self.copier_impl\n\n }\n\n\n\n fn remote_temp_storage(&self) -> &Path {\n\n self.remote_temp_storage.as_path()\n\n }\n\n\n\n fn prepare_command(&self,\n\n command: Vec<String>,\n", "file_path": "src/remote/psremote.rs", "rank": 27, "score": 32837.96665447131 }, { "content": " fn remote_computer(&self) -> &Computer {\n\n self.computer()\n\n }\n\n\n\n fn copier_impl(&self) -> &dyn FileCopier {\n\n self.copier_impl.copier_impl()\n\n }\n\n\n\n fn path_to_remote_form(&self, path: &Path) -> PathBuf {\n\n self.copier_impl.path_to_remote_form(path)\n\n }\n\n\n\n fn copy_to_remote(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n self.copier_impl.copy_from_remote(source, target)\n\n }\n\n\n\n fn delete_remote_file(&self, target: &Path) -> io::Result<()> {\n\n self.copier_impl.delete_remote_file(target)\n\n }\n\n\n", "file_path": "src/remote/psremote.rs", "rank": 28, "score": 32835.791927544815 }, { "content": " fn copy_from_remote(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n copy_from_remote_wildcards(\n\n source,\n\n target,\n\n self,\n\n |s, t| self.copier_impl.copy_from_remote(s, t),\n\n )\n\n }\n\n}\n\n\n\npub struct Powershell {}\n\n\n\nimpl FileCopier for Powershell {\n\n fn copy_file(\n\n &self,\n\n source: &Path,\n\n target: &Path,\n\n ) -> io::Result<()> {\n\n let args = vec![\n\n \"Copy-Item\".to_string(),\n", "file_path": "src/remote/psremote.rs", "rank": 29, "score": 32834.824054710465 }, { "content": "\n\n fn remote_temp_storage(&self) -> &Path {\n\n self.temp_storage.as_path()\n\n }\n\n\n\n fn connect_and_run_local_program(\n\n &self,\n\n command_to_run: Command<'_>,\n\n timeout: Option<Duration>\n\n ) -> io::Result<Option<PathBuf>> {\n\n self.connect_and_run_command(command_to_run, timeout)\n\n }\n\n\n\n fn prepare_command(&self,\n\n command: Vec<String>,\n\n output_file_path: Option<&str>,\n\n _elevated: bool,\n\n ) -> Vec<String> {\n\n match output_file_path {\n\n None => command,\n", "file_path": "src/remote/local.rs", "rank": 30, "score": 32834.52801095411 }, { "content": " Some(output_file_path) => {\n\n let mut result : Vec<String> = command.into();\n\n result.push(\">\".to_string());\n\n result.push(output_file_path.to_string());\n\n result\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl FileCopier for Local {\n\n fn copy_file(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n Cmd{}.copy_file(source, target)\n\n }\n\n\n\n fn delete_file(&self, target: &Path) -> io::Result<()> {\n\n Cmd{}.delete_file(target)\n\n }\n\n\n\n fn method_name(&self) -> &'static str {\n", "file_path": "src/remote/local.rs", "rank": 31, "score": 32834.49877304656 }, { "content": " output_file_path: Option<&str>,\n\n _elevated: bool,\n\n ) -> Vec<String> {\n\n let remote_computer = self.computer();\n\n let program_name = \"powershell.exe\".to_string();\n\n let mut prepared_command = vec![\n\n program_name,\n\n \"-command\".to_string(),\n\n \"Invoke-Command\".to_string(),\n\n \"-ComputerName\".to_string(),\n\n remote_computer.address.clone(),\n\n \"-ScriptBlock\".to_string(),\n\n \"{\".to_string(),\n\n ];\n\n prepared_command.extend(command);\n\n let username = remote_computer.domain_username();\n\n let credential = match &remote_computer.password {\n\n None => username,\n\n Some(password) =>\n\n format!(\n", "file_path": "src/remote/psremote.rs", "rank": 32, "score": 32834.4585913275 }, { "content": " \"(New-Object Management.Automation.PSCredential ('{}', (ConvertTo-SecureString '{}' -AsPlainText -Force)))\",\n\n username,\n\n password\n\n ),\n\n };\n\n prepared_command.push(\"}\".to_string());\n\n prepared_command.push(\"-credential\".to_string());\n\n prepared_command.push(credential);\n\n match output_file_path {\n\n None => prepared_command,\n\n Some(output_file_path) => {\n\n prepared_command.push(\">\".to_string());\n\n prepared_command.push(output_file_path.to_string());\n\n prepared_command\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl RemoteFileCopier for PsRemote {\n", "file_path": "src/remote/psremote.rs", "rank": 33, "score": 32831.73744440569 }, { "content": " format!(\"'{}'\", source.to_string_lossy()),\n\n format!(\"'{}'\", target.to_string_lossy()),\n\n ];\n\n run_process_blocking(\n\n \"powershell.exe\",\n\n &args,\n\n )\n\n }\n\n\n\n fn delete_file(&self, target: &Path) -> io::Result<()> {\n\n let args = vec![\n\n \"Remove-Item\".to_string(),\n\n \"-Force\".to_string(),\n\n format!(\"'{}'\", target.to_string_lossy()),\n\n ];\n\n run_process_blocking(\n\n \"powershell.exe\",\n\n &args,\n\n )\n\n }\n\n\n\n fn method_name(&self) -> &'static str {\n\n \"PSCOPY\"\n\n }\n\n}\n", "file_path": "src/remote/psremote.rs", "rank": 34, "score": 32830.86279859424 }, { "content": "use crate::remote::{Connector, Computer, Command, RemoteFileCopier, Cmd, WindowsRemoteFileHandler, FileCopier, copy_from_remote_wildcards};\n\nuse std::time::Duration;\n\nuse std::io::Error;\n\nuse std::path::{PathBuf, Path};\n\nuse std::io;\n\n\n\npub struct PsExec {\n\n computer: Computer,\n\n copier_impl: WindowsRemoteFileHandler,\n\n psexec_name: String,\n\n remote_temp_storage: PathBuf,\n\n ms_psexec: bool,\n\n}\n\n\n\npub const PSEXEC64_NAME: &str = \"PsExec64.exe\";\n\npub const PSEXEC32_NAME: &str = \"PsExec.exe\";\n\n\n\nimpl PsExec {\n\n pub fn paexec(computer: Computer, remote_temp_storage: PathBuf, custom_share_folder: Option<String>) -> PsExec {\n\n PsExec {\n", "file_path": "src/remote/psexec.rs", "rank": 35, "score": 32825.459510762914 }, { "content": " computer: computer.clone(),\n\n copier_impl: WindowsRemoteFileHandler::new(computer, Box::new(Cmd {}), custom_share_folder),\n\n psexec_name: \"paexec.exe\".to_string(),\n\n remote_temp_storage,\n\n ms_psexec: false,\n\n }\n\n }\n\n\n\n pub fn psexec32(computer: Computer, remote_temp_storage: PathBuf, custom_share_folder: Option<String>) -> PsExec {\n\n PsExec {\n\n computer: computer.clone(),\n\n copier_impl: WindowsRemoteFileHandler::new(computer, Box::new(Cmd {}), custom_share_folder),\n\n psexec_name: PSEXEC32_NAME.to_string(),\n\n remote_temp_storage,\n\n ms_psexec: true,\n\n }\n\n }\n\n\n\n pub fn psexec64(computer: Computer, remote_temp_storage: PathBuf, custom_share_folder: Option<String>) -> PsExec {\n\n PsExec {\n", "file_path": "src/remote/psexec.rs", "rank": 36, "score": 32819.799972400564 }, { "content": " computer: computer.clone(),\n\n copier_impl: WindowsRemoteFileHandler::new(computer, Box::new(Cmd {}), custom_share_folder),\n\n psexec_name: PSEXEC64_NAME.to_string(),\n\n remote_temp_storage,\n\n ms_psexec: true,\n\n }\n\n }\n\n}\n\n\n\nimpl Connector for PsExec {\n\n fn connect_method_name(&self) -> &'static str {\n\n return \"PSEXEC\";\n\n }\n\n\n\n fn computer(&self) -> &Computer {\n\n &self.computer\n\n }\n\n\n\n fn copier(&self) -> &dyn RemoteFileCopier {\n\n self as &dyn RemoteFileCopier\n", "file_path": "src/remote/psexec.rs", "rank": 37, "score": 32817.64028056548 }, { "content": " };\n\n self.connect_and_run_command(command_to_run, timeout)\n\n }\n\n\n\n fn prepare_command(&self,\n\n command: Vec<String>,\n\n output_file_path: Option<&str>,\n\n elevated: bool,\n\n ) -> Vec<String> {\n\n let remote_computer = self.computer();\n\n let address = format!(\"\\\\\\\\{}\", remote_computer.address);\n\n let program_name = self.psexec_name.clone();\n\n let mut prepared_command = vec![\n\n program_name,\n\n address,\n\n \"-u\".to_string(),\n\n remote_computer.domain_username(),\n\n ];\n\n if let Some(password) = &remote_computer.password {\n\n prepared_command.push(\"-p\".to_string());\n", "file_path": "src/remote/psexec.rs", "rank": 38, "score": 32815.55434790363 }, { "content": " }\n\n\n\n fn remote_temp_storage(&self) -> &Path {\n\n self.remote_temp_storage.as_path()\n\n }\n\n\n\n fn connect_and_run_local_program(&self,\n\n command_to_run: Command<'_>,\n\n timeout: Option<Duration>,\n\n ) -> Result<Option<PathBuf>, Error> {\n\n let mut command = command_to_run.command;\n\n if self.ms_psexec {\n\n command.insert(0, \"-accepteula\".to_string());\n\n }\n\n command.insert(0, \"-c\".to_string());\n\n command.insert(0, \"-f\".to_string());\n\n\n\n let command_to_run = Command {\n\n command,\n\n ..command_to_run\n", "file_path": "src/remote/psexec.rs", "rank": 39, "score": 32815.48846519994 }, { "content": " prepared_command.push(password.clone());\n\n }\n\n if elevated {\n\n prepared_command.push(\"-h\".to_string());\n\n }\n\n prepared_command.extend(command.into_iter());\n\n match output_file_path {\n\n None => prepared_command,\n\n Some(output_file_path) => {\n\n prepared_command.push(\">\".to_string());\n\n prepared_command.push(output_file_path.to_string());\n\n prepared_command\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl RemoteFileCopier for PsExec {\n\n fn remote_computer(&self) -> &Computer {\n\n self.computer()\n", "file_path": "src/remote/psexec.rs", "rank": 40, "score": 32813.93353424741 }, { "content": " \"del\".to_string(),\n\n \"/F\".to_string(),\n\n \"/Q\".to_string(),\n\n target.to_string_lossy().to_string(),\n\n ],\n\n report_store_directory: None,\n\n report_filename_prefix: \"\",\n\n elevated: true,\n\n },\n\n None,\n\n ).map(|_| ())\n\n }\n\n\n\n fn copy_from_remote(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n copy_from_remote_wildcards(\n\n source,\n\n target,\n\n self,\n\n |s, t| self.copier_impl.copy_from_remote(s, t),\n\n )\n\n }\n\n}\n", "file_path": "src/remote/psexec.rs", "rank": 41, "score": 32813.114375286 }, { "content": " }\n\n\n\n fn copier_impl(&self) -> &dyn FileCopier {\n\n self.copier_impl.copier_impl()\n\n }\n\n\n\n fn path_to_remote_form(&self, path: &Path) -> PathBuf {\n\n self.copier_impl.path_to_remote_form(path)\n\n }\n\n\n\n fn copy_to_remote(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n self.copier_impl.copy_from_remote(source, target)\n\n }\n\n\n\n fn delete_remote_file(&self, target: &Path) -> io::Result<()> {\n\n self.connect_and_run_command(\n\n Command {\n\n command: vec![\n\n \"cmd\".to_string(),\n\n \"/c\".to_string(),\n", "file_path": "src/remote/psexec.rs", "rank": 42, "score": 32811.23371681067 }, { "content": "use crate::remote::{Connector, Computer, FileCopier, RemoteFileCopier, copy_from_remote_wildcards};\n\nuse std::path::{Path, PathBuf};\n\nuse std::io;\n\nuse crate::process_runner::{run_process_blocking_maybe_timed, run_process_blocking_timed};\n\nuse std::time::Duration;\n\n\n\npub struct Wmi {\n\n pub computer: Computer,\n\n pub remote_temp_storage: PathBuf\n\n}\n\n\n\nimpl Connector for Wmi {\n\n fn connect_method_name(&self) -> &'static str {\n\n \"WMI\"\n\n }\n\n\n\n fn computer(&self) -> &Computer {\n\n &self.computer\n\n }\n\n\n", "file_path": "src/remote/wmi.rs", "rank": 43, "score": 32807.22518562844 }, { "content": " &prepared_command,\n\n Duration::from_secs(10)\n\n )\n\n }\n\n\n\n fn method_name(&self) -> &'static str {\n\n self.connect_method_name()\n\n }\n\n}\n\n\n\nimpl RemoteFileCopier for Wmi {\n\n fn remote_computer(&self) -> &Computer {\n\n self.computer()\n\n }\n\n\n\n fn copier_impl(&self) -> &dyn FileCopier {\n\n self as &dyn FileCopier\n\n }\n\n\n\n fn path_to_remote_form(&self, path: &Path) -> PathBuf {\n", "file_path": "src/remote/wmi.rs", "rank": 44, "score": 32795.97467457271 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Wmi {\n\n fn copy_impl(&self,\n\n source: &Path,\n\n target: &Path,\n\n method_name: &str,\n\n target_is_remote: bool,\n\n ) -> io::Result<()>{\n\n let remote_computer = self.remote_computer();\n\n\n\n let target = match source.file_name() {\n\n None => target.to_path_buf(),\n\n Some(file_name) => target.join(file_name),\n\n };\n\n\n\n let mut prepared_command = vec![\n\n \"-File\".to_string(),\n", "file_path": "src/remote/wmi.rs", "rank": 45, "score": 32795.042607327116 }, { "content": " if let Some(password) = &remote_computer.password {\n\n prepared_command.push(\"-RemotePass\".to_string());\n\n prepared_command.push(password.clone());\n\n }\n\n\n\n run_process_blocking_maybe_timed(\n\n \"powershell.exe\",\n\n &prepared_command,\n\n None\n\n )\n\n }\n\n}\n\n\n\nimpl FileCopier for Wmi {\n\n fn copy_file(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n self.copy_impl(source, target, \"-Copy\", true)\n\n }\n\n\n\n fn delete_file(&self, target: &Path) -> io::Result<()> {\n\n let remote_computer = self.remote_computer();\n", "file_path": "src/remote/wmi.rs", "rank": 46, "score": 32794.8499802655 }, { "content": " \"-ComputerName\".to_string(),\n\n remote_computer.address.clone(),\n\n \"-RemoteUser\".to_string(),\n\n remote_computer.domain_username(),\n\n \"-CommandExec\".to_string(),\n\n \"-RemoteCommand\".to_string(),\n\n format!(\"{}\", command_joined),\n\n ];\n\n\n\n if let Some(password) = &remote_computer.password {\n\n prepared_command.push(\"-RemotePass\".to_string());\n\n prepared_command.push(password.clone());\n\n }\n\n match output_file_path {\n\n None => prepared_command,\n\n Some(output_file_path) => {\n\n prepared_command.push(\">\".to_string());\n\n prepared_command.push(output_file_path.to_string());\n\n prepared_command\n\n }\n", "file_path": "src/remote/wmi.rs", "rank": 47, "score": 32792.554829745124 }, { "content": " \"WMImplant.ps1\".to_string(),\n\n method_name.to_string(),\n\n \"-ComputerName\".to_string(),\n\n remote_computer.address.clone(),\n\n \"-RemoteUser\".to_string(),\n\n remote_computer.domain_username()\n\n ];\n\n\n\n if target_is_remote {\n\n prepared_command.push(\"-RemoteFile\".to_string());\n\n prepared_command.push(target.to_string_lossy().to_string());\n\n prepared_command.push(\"-LocalFile\".to_string());\n\n prepared_command.push(source.to_string_lossy().to_string());\n\n } else {\n\n prepared_command.push(\"-RemoteFile\".to_string());\n\n prepared_command.push(source.to_string_lossy().to_string());\n\n prepared_command.push(\"-LocalFile\".to_string());\n\n prepared_command.push(target.to_string_lossy().to_string());\n\n }\n\n\n", "file_path": "src/remote/wmi.rs", "rank": 48, "score": 32792.018620223076 }, { "content": "\n\n let mut prepared_command = vec![\n\n \"-File\".to_string(),\n\n \"WMImplant.ps1\".to_string(),\n\n \"-Delete\".to_string(),\n\n \"-LocalFile\".to_string(),\n\n target.to_string_lossy().to_string(),\n\n \"-ComputerName\".to_string(),\n\n remote_computer.address.clone(),\n\n \"-RemoteUser\".to_string(),\n\n remote_computer.domain_username()\n\n ];\n\n\n\n if let Some(password) = &remote_computer.password {\n\n prepared_command.push(\"-RemotePass\".to_string());\n\n prepared_command.push(password.clone());\n\n }\n\n\n\n run_process_blocking_timed(\n\n \"powershell.exe\",\n", "file_path": "src/remote/wmi.rs", "rank": 49, "score": 32792.01425918907 }, { "content": " fn copier(&self) -> &dyn RemoteFileCopier {\n\n self as &dyn RemoteFileCopier\n\n }\n\n\n\n fn remote_temp_storage(&self) -> &Path {\n\n self.remote_temp_storage.as_path()\n\n }\n\n\n\n fn prepare_command(&self,\n\n command: Vec<String>,\n\n output_file_path: Option<&str>,\n\n _elevated: bool,\n\n ) -> Vec<String> {\n\n let remote_computer = self.remote_computer();\n\n let program_name = \"powershell.exe\".to_string();\n\n let command_joined: String = command.join(\" \");\n\n let mut prepared_command = vec![\n\n program_name,\n\n \"-File\".to_string(),\n\n \"WMImplant.ps1\".to_string(),\n", "file_path": "src/remote/wmi.rs", "rank": 50, "score": 32791.94395267132 }, { "content": " path.to_path_buf()\n\n }\n\n\n\n fn copy_to_remote(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n self.copy_impl(source, target, \"-Upload\", true)\n\n }\n\n\n\n fn delete_remote_file(&self, target: &Path) -> io::Result<()> {\n\n self.delete_file(target)\n\n }\n\n\n\n fn copy_from_remote(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n copy_from_remote_wildcards(\n\n source,\n\n target,\n\n self,\n\n |s, t| self.copy_impl(s, t, \"-Download\", false),\n\n )\n\n }\n\n}", "file_path": "src/remote/wmi.rs", "rank": 51, "score": 32789.860441998135 }, { "content": "use crate::remote::{Connector, Computer, FileCopier, Command, RemoteFileCopier, copy_from_remote_wildcards};\n\nuse std::path::{Path, PathBuf};\n\nuse std::io;\n\nuse crate::process_runner::{run_process_blocking, create_report_path};\n\nuse std::time::Duration;\n\n\n\n#[derive(Clone)]\n\npub struct Rdp {\n\n pub computer: Computer,\n\n pub nla: bool,\n\n pub remote_temp_storage: PathBuf,\n\n}\n\n\n\nimpl Connector for Rdp {\n\n fn connect_method_name(&self) -> &'static str {\n\n return \"RDP\";\n\n }\n\n\n\n fn computer(&self) -> &Computer {\n\n &self.computer\n", "file_path": "src/remote/rdp.rs", "rank": 52, "score": 32764.680390894166 }, { "content": " let result = run_process_blocking(\n\n \"cmd.exe\",\n\n &prepared_command,\n\n );\n\n if let Some(timeout) = timeout {\n\n std::thread::sleep(timeout);\n\n }\n\n result.map(|_| output_file_path.map(|it| PathBuf::from(it)))\n\n }\n\n\n\n fn prepare_command(&self,\n\n command: Vec<String>,\n\n output_file_path: Option<&str>,\n\n elevated: bool,\n\n ) -> Vec<String> {\n\n let remote_computer = self.remote_computer();\n\n let program_name = \"SharpRDP.exe\".to_string();\n\n\n\n let mut prepared_command = vec![\n\n program_name,\n", "file_path": "src/remote/rdp.rs", "rank": 53, "score": 32754.07917196121 }, { "content": " fn delete_file(&self, target: &Path) -> io::Result<()> {\n\n self.run_command(format!(\n\n \"command=del /f {}\",\n\n target.to_string_lossy()\n\n ))\n\n }\n\n\n\n fn method_name(&self) -> &'static str {\n\n \"RDP\"\n\n }\n\n}\n\n\n\nimpl RemoteFileCopier for Rdp {\n\n fn remote_computer(&self) -> &Computer {\n\n &self.computer\n\n }\n\n\n\n fn copier_impl(&self) -> &dyn FileCopier {\n\n self as &dyn FileCopier\n\n }\n", "file_path": "src/remote/rdp.rs", "rank": 54, "score": 32752.774661040494 }, { "content": " if self.nla {\n\n args.push(\"nla=true\".to_string());\n\n }\n\n args.push(command);\n\n run_process_blocking(\n\n \"SharpRDP.exe\",\n\n &args,\n\n )\n\n }\n\n}\n\n\n\nimpl FileCopier for Rdp {\n\n fn copy_file(&self, source: &Path, target: &Path) -> io::Result<()> {\n\n self.run_command(format!(\n\n \"command=xcopy {} {} /y\",\n\n source.to_string_lossy(),\n\n target.to_string_lossy()\n\n ))\n\n }\n\n\n", "file_path": "src/remote/rdp.rs", "rank": 55, "score": 32749.792309150816 }, { "content": " }\n\n };\n\n prepared_command.push(command_as_arg);\n\n prepared_command\n\n }\n\n}\n\n\n\nimpl Rdp {\n\n fn run_command(&self, command: String) -> io::Result<()> {\n\n let mut args = vec![\n\n format!(\"computername={}\", &self.computer.address),\n\n \"exec=cmd\".to_string(),\n\n \"takeover=true\".to_string(),\n\n \"connectdrive=true\".to_string(),\n\n ];\n\n let username = self.computer.domain_username();\n\n args.push(format!(\"username={}\", username));\n\n if let Some(password) = &self.computer.password {\n\n args.push(format!(\"password={}\", password));\n\n }\n", "file_path": "src/remote/rdp.rs", "rank": 56, "score": 32749.302118909778 }, { "content": " None => None,\n\n Some(store_directory) => {\n\n let file_path = create_report_path(\n\n self.computer(),\n\n store_directory,\n\n &remote_connection.report_filename_prefix,\n\n self.connect_method_name(),\n\n \"txt\",\n\n );\n\n Some(file_path.to_str().unwrap().to_string())\n\n }\n\n };\n\n\n\n let processed_command = self.prepare_command(\n\n remote_connection.command,\n\n output_file_path.as_deref(),\n\n remote_connection.elevated,\n\n );\n\n\n\n let prepared_command = self.prepare_remote_process(processed_command);\n", "file_path": "src/remote/rdp.rs", "rank": 57, "score": 32748.86726378894 }, { "content": " }\n\n\n\n fn copier(&self) -> &dyn RemoteFileCopier {\n\n self as &dyn RemoteFileCopier\n\n }\n\n\n\n fn remote_temp_storage(&self) -> &Path {\n\n self.remote_temp_storage.as_path()\n\n }\n\n\n\n fn connect_and_run_command(\n\n &self,\n\n remote_connection: Command<'_>,\n\n timeout: Option<Duration>,\n\n ) -> io::Result<Option<PathBuf>> {\n\n debug!(\"Trying to run command {:?} on {}\",\n\n remote_connection.command,\n\n &self.computer().address\n\n );\n\n let output_file_path = match remote_connection.report_store_directory {\n", "file_path": "src/remote/rdp.rs", "rank": 58, "score": 32748.80829870786 }, { "content": "\n\n fn path_to_remote_form(&self, path: &Path) -> PathBuf {\n\n trace!(\"Converting path {}\", path.display());\n\n // let canon_path = dunce::canonicalize(path).unwrap();\n\n let as_remote_path = path\n\n .to_string_lossy()\n\n .replacen(\":\", \"\", 1);\n\n let tsclient_path = format!(\"\\\\\\\\tsclient\\\\{}\", as_remote_path);\n\n PathBuf::from(tsclient_path)\n\n }\n\n\n\n fn copy_to_remote(\n\n &self,\n\n source: &Path,\n\n target: &Path,\n\n ) -> io::Result<()> {\n\n self.copier_impl().copy_file(&self.path_to_remote_form(source), target)\n\n }\n\n\n\n fn delete_remote_file(&self, target: &Path) -> io::Result<()> {\n", "file_path": "src/remote/rdp.rs", "rank": 59, "score": 32747.341993581205 }, { "content": " format!(\"computername={}\", &remote_computer.address),\n\n ];\n\n\n\n let username = match &remote_computer.domain {\n\n None =>\n\n remote_computer.username.clone(),\n\n Some(domain) =>\n\n format!(\"{}\\\\{}\", domain, remote_computer.username),\n\n };\n\n prepared_command.push(format!(\"username={}\", username));\n\n if let Some(password) = &remote_computer.password {\n\n prepared_command.push(format!(\"password={}\", password));\n\n }\n\n\n\n if self.nla {\n\n prepared_command.push(\"nla=true\".to_string());\n\n }\n\n\n\n if elevated {\n\n prepared_command.push(\"elevated=taskmgr\".to_string());\n", "file_path": "src/remote/rdp.rs", "rank": 60, "score": 32747.237485964753 }, { "content": " }\n\n\n\n prepared_command.push(\"exec=ps\".to_string());\n\n prepared_command.push(\"takeover=true\".to_string());\n\n prepared_command.push(\"connectdrive=true\".to_string());\n\n\n\n let command_joined: String = command.join(\" \");\n\n let command_as_arg = match output_file_path {\n\n None => format!(\"command={}\", command_joined),\n\n Some(output_file_path) => {\n\n let path = Path::new(&output_file_path);\n\n let canon_path = dunce::canonicalize(path).unwrap();\n\n let as_remote_path = canon_path\n\n .to_string_lossy()\n\n .replacen(\":\", \"\", 1);\n\n format!(\n\n\"command=cmd.exe /c {} -p.i.p.e- Out-File -FilePath \\\\\\\\tsclient\\\\{}\",\n\ncommand_joined,\n\nas_remote_path\n\n )\n", "file_path": "src/remote/rdp.rs", "rank": 61, "score": 32747.12190232879 }, { "content": " self.copier_impl().delete_file(target)\n\n }\n\n\n\n fn copy_from_remote(\n\n &self,\n\n source: &Path,\n\n target: &Path,\n\n ) -> io::Result<()> {\n\n copy_from_remote_wildcards(\n\n source,\n\n target,\n\n self,\n\n |source, target| self.copier_impl().copy_file(source, &self.path_to_remote_form(target)),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/remote/rdp.rs", "rank": 62, "score": 32746.731750857103 }, { "content": "use std::io::{Result, BufReader, BufRead};\n\nuse crate::process_runner::{run_process_blocking, create_report_path, run_process_blocking_timed};\n\nuse std::{iter, thread, io};\n\nuse std::path::{Path, PathBuf};\n\nuse crate::arg_parser::Opts;\n\nuse std::time::Duration;\n\nuse crate::remote::{RemoteFileCopier, Local};\n\nuse std::fs::File;\n\nuse uuid::Uuid;\n\nuse rpassword::read_password;\n\nuse username::get_user_name;\n\n\n\n#[derive(Clone)]\n\npub struct Computer {\n\n pub address: String,\n\n pub username: String,\n\n pub domain: Option<String>,\n\n pub password: Option<String>,\n\n}\n\n\n", "file_path": "src/remote/connector.rs", "rank": 63, "score": 32315.303072836265 }, { "content": "impl Computer {\n\n pub fn domain_username(&self) -> String {\n\n match &self.domain {\n\n None =>\n\n self.username.clone(),\n\n Some(domain) =>\n\n format!(\"{}\\\\{}\", domain, self.username),\n\n }\n\n }\n\n}\n\n\n\npub struct Command<'a> {\n\n pub command: Vec<String>,\n\n pub report_store_directory: Option<&'a Path>,\n\n pub report_filename_prefix: &'a str,\n\n pub elevated: bool,\n\n}\n\n\n\nimpl From<Opts> for Computer {\n\n fn from(opts: Opts) -> Self {\n", "file_path": "src/remote/connector.rs", "rank": 64, "score": 32308.28324235396 }, { "content": " command_to_run,\n\n timeout,\n\n )\n\n }\n\n\n\n fn connect_and_run_local_program(\n\n &self,\n\n command_to_run: Command<'_>,\n\n timeout: Option<Duration>,\n\n ) -> Result<Option<PathBuf>> {\n\n let local_program_path = Path::new(command_to_run.command.first().unwrap());\n\n let remote_storage = self.remote_temp_storage();\n\n let copier = self.copier();\n\n copier.copy_to_remote(&local_program_path, &remote_storage)?;\n\n thread::sleep(Duration::from_millis(20_000));\n\n let remote_program_path = remote_storage.join(local_program_path\n\n .file_name()\n\n .expect(&format!(\"Must specify file instead of {}\", local_program_path.display())\n\n )\n\n );\n", "file_path": "src/remote/connector.rs", "rank": 65, "score": 32308.23503548279 }, { "content": " }\n\n };\n\n if local_mode {\n\n return Local::new(username, PathBuf::from(opts.remote_store_directory)).computer().clone();\n\n }\n\n let password = match &opts.password {\n\n Some(password) => if password.is_empty() {\n\n None\n\n } else {\n\n Some(password.clone())\n\n },\n\n None => {\n\n println!(\"Password: \");\n\n let password = read_password().ok();\n\n password\n\n }\n\n };\n\n Computer {\n\n address: opts.computer,\n\n username,\n", "file_path": "src/remote/connector.rs", "rank": 66, "score": 32307.895833575945 }, { "content": " .filter(|line| !line.is_empty())\n\n .collect::<Vec<String>>();\n\n debug!(\"Remote path {} has dirs {:?}\", path.display(), result);\n\n if let Err(err) = std::fs::remove_file(&result_file_path) {\n\n error!(\"{}\", err);\n\n }\n\n result\n\n }\n\n\n\n fn acquire_perms(&self, path: &Path) {\n\n debug!(\"Acquiring ownership\");\n\n let grant_svi = Command {\n\n command: vec![\n\n \"cmd.exe\".to_string(),\n\n \"/c\".to_string(),\n\n \"icacls.exe\".to_string(),\n\n path.to_string_lossy().to_string(),\n\n \"/grant\".to_string(),\n\n format!(\"{}:F\", self.computer().domain_username())\n\n ],\n", "file_path": "src/remote/connector.rs", "rank": 67, "score": 32306.604514013434 }, { "content": " address: address.clone(),\n\n username,\n\n domain,\n\n password,\n\n }\n\n })\n\n })\n\n .collect()\n\n }\n\n Err(_) => vec![Computer::from(self)]\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Command<'a> {\n\n pub fn new(\n\n command: Vec<String>,\n\n store_directory: Option<&'a Path>,\n\n report_filename_prefix: &'a str,\n\n elevated: bool,\n", "file_path": "src/remote/connector.rs", "rank": 68, "score": 32306.604797515825 }, { "content": " \"/c\".to_string(),\n\n \"icacls.exe\".to_string(),\n\n path.to_string_lossy().to_string(),\n\n \"/deny\".to_string(),\n\n format!(\"{}:F\", self.computer().username)\n\n ],\n\n report_store_directory: None,\n\n report_filename_prefix: \"DENY_VSI\",\n\n elevated: true,\n\n };\n\n\n\n if let Err(err) = self.connect_and_run_command(\n\n grant_svi,\n\n None,\n\n ) {\n\n warn!(\"Cannot release ownership: {}\", err)\n\n }\n\n }\n\n}", "file_path": "src/remote/connector.rs", "rank": 69, "score": 32306.072460005886 }, { "content": " let mut command = command_to_run.command;\n\n command[0] = remote_program_path.to_string_lossy().to_string();\n\n let command_to_run = Command {\n\n command,\n\n ..command_to_run\n\n };\n\n let result = self.connect_and_run_command(command_to_run, timeout)?;\n\n thread::sleep(Duration::from_millis(10_000));\n\n copier.delete_remote_file(&remote_program_path)?;\n\n Ok(result)\n\n }\n\n\n\n fn connect_and_run_command(\n\n &self,\n\n command_to_run: Command<'_>,\n\n timeout: Option<Duration>,\n\n ) -> Result<Option<PathBuf>> {\n\n debug!(\"Trying to run command {:?} on {}\",\n\n command_to_run.command,\n\n &self.computer().address\n", "file_path": "src/remote/connector.rs", "rank": 70, "score": 32305.86824801035 }, { "content": " report_store_directory: None,\n\n report_filename_prefix: \"GRANT_VSI\",\n\n elevated: true,\n\n };\n\n\n\n if let Err(err) = self.connect_and_run_command(\n\n grant_svi,\n\n None,\n\n ) {\n\n warn!(\"Cannot acquire ownership: {}\", err)\n\n }\n\n thread::sleep(Duration::from_secs(5));\n\n }\n\n\n\n fn release_perms(&self, path: &Path) {\n\n thread::sleep(Duration::from_secs(5));\n\n debug!(\"Releasing ownership\");\n\n let grant_svi = Command {\n\n command: vec![\n\n \"cmd.exe\".to_string(),\n", "file_path": "src/remote/connector.rs", "rank": 71, "score": 32304.917905515125 }, { "content": " let local_mode = opts.local || opts.computer == \"127.0.0.1\" || opts.computer == \"localhost\";\n\n let (domain, username) = match &opts.user {\n\n Some(user) => if user.is_empty() {\n\n (None, \"\".to_string())\n\n } else {\n\n (opts.domain, user.clone())\n\n },\n\n None => {\n\n if local_mode {\n\n (None, get_user_name().expect(\"Non unicode character in this username\"))\n\n }else {\n\n println!(\"Domain (optional): \");\n\n let mut domain = String::new();\n\n let _ = io::stdin().read_line(&mut domain);\n\n\n\n println!(\"Username: \");\n\n let mut user = String::new();\n\n io::stdin().read_line(&mut user).ok();\n\n (if domain.trim().is_empty() { None } else { Some(domain) }, user)\n\n }\n", "file_path": "src/remote/connector.rs", "rank": 72, "score": 32304.813088702944 }, { "content": " );\n\n if let Err(err) = self.connect_and_run_command(command, Some(Duration::from_secs(10))) {\n\n error!(\"{}\", err);\n\n }\n\n }\n\n\n\n fn connect_and_run_local_program_in_current_directory(\n\n &self,\n\n command_to_run: Command<'_>,\n\n timeout: Option<Duration>,\n\n ) -> Result<Option<PathBuf>> {\n\n let mut command = command_to_run.command;\n\n command[0] = std::env::current_dir().unwrap()\n\n .join(Path::new(&command[0]).file_name().unwrap())\n\n .to_string_lossy().to_string();\n\n let command_to_run = Command {\n\n command,\n\n ..command_to_run\n\n };\n\n self.connect_and_run_local_program(\n", "file_path": "src/remote/connector.rs", "rank": 73, "score": 32304.315322786315 }, { "content": " );\n\n let output_file_path = match command_to_run.report_store_directory {\n\n None => None,\n\n Some(store_directory) => {\n\n let file_path = create_report_path(\n\n self.computer(),\n\n store_directory,\n\n &command_to_run.report_filename_prefix,\n\n self.connect_method_name(),\n\n \"txt\",\n\n );\n\n Some(file_path.to_str().unwrap().to_string())\n\n }\n\n };\n\n\n\n let processed_command = self.prepare_command(\n\n command_to_run.command,\n\n output_file_path.as_deref(),\n\n command_to_run.elevated,\n\n );\n", "file_path": "src/remote/connector.rs", "rank": 74, "score": 32304.306505089146 }, { "content": " domain,\n\n password,\n\n }\n\n }\n\n}\n\n\n\nimpl Into<Vec<Computer>> for Opts {\n\n fn into(self) -> Vec<Computer> {\n\n let file = File::open(&self.computer);\n\n match file {\n\n Ok(file) => {\n\n BufReader::new(file)\n\n .lines()\n\n .filter_map(|line| line.ok())\n\n .filter(|line| !line.trim().is_empty())\n\n .filter_map(|line| {\n\n let splitted = line\n\n .split(\" \")\n\n .map(|item| item.to_string())\n\n .collect::<Vec<String>>();\n", "file_path": "src/remote/connector.rs", "rank": 75, "score": 32302.24576033414 }, { "content": " processed_command: Vec<String>,\n\n // post_command: Vec<String>,\n\n ) -> Vec<String> {\n\n let all_args = iter::once(\"/c\".to_string())\n\n // .chain(pre_command.into_iter())\n\n .chain(processed_command.into_iter())\n\n // .chain(post_command.into_iter())\n\n .collect();\n\n all_args\n\n }\n\n\n\n fn prepare_command(&self,\n\n command: Vec<String>,\n\n output_file_path: Option<&str>,\n\n elevated: bool,\n\n ) -> Vec<String>;\n\n\n\n fn list_dirs(&self, path: &Path, store_directory: &Path) -> Vec<String> {\n\n debug!(\"Listing dirs in remote path {}\", path.display());\n\n let prefix = format!(\"--TEMP_LIST_DIR_{}\", Uuid::new_v4());\n", "file_path": "src/remote/connector.rs", "rank": 76, "score": 32302.149675523826 }, { "content": " .map(|item| item.file_name())\n\n .find(|item| item.to_string_lossy().contains(&prefix))\n\n }\n\n _ => None,\n\n };\n\n if result_file_name.is_none() {\n\n error!(\"Cannot find dir result file\");\n\n debug!(\"Remote path {} has dirs []\", path.display());\n\n return vec![];\n\n }\n\n let result_file_path = store_directory.join(&result_file_name.unwrap());\n\n let result_file = File::open(&result_file_path);\n\n if result_file.is_err() {\n\n error!(\"Cannot open file {} due to {}\", result_file_path.display(), result_file.err().unwrap());\n\n debug!(\"Remote path {} has dirs []\", path.display());\n\n return vec![];\n\n }\n\n let result = BufReader::new(result_file.unwrap()).lines()\n\n .filter_map(|line| line.ok())\n\n .map(|line| line.trim().to_string())\n", "file_path": "src/remote/connector.rs", "rank": 77, "score": 32302.098417058656 }, { "content": "\n\n let prepared_command = self.prepare_remote_process(processed_command);\n\n match timeout {\n\n None =>\n\n run_process_blocking(\n\n \"cmd.exe\",\n\n &prepared_command,\n\n ),\n\n Some(timeout) =>\n\n run_process_blocking_timed(\n\n \"cmd.exe\",\n\n &prepared_command,\n\n timeout.clone(),\n\n ),\n\n }?;\n\n Ok(output_file_path.map(|it| PathBuf::from(it)))\n\n }\n\n\n\n fn prepare_remote_process(&self,\n\n // pre_command: Vec<String>,\n", "file_path": "src/remote/connector.rs", "rank": 78, "score": 32301.528003390984 }, { "content": " let password = match password {\n\n None => match self.password.clone() {\n\n Some(password) => if password.trim().is_empty() {\n\n None\n\n } else {\n\n Some(password)\n\n },\n\n None => {\n\n println!(\"Password for {}: \", address);\n\n let password_user = read_password().ok();\n\n if password_user.is_none() || password_user.as_ref().unwrap().trim().is_empty() {\n\n None\n\n } else {\n\n Some(password_user.unwrap())\n\n }\n\n }\n\n },\n\n Some(password) => Some(password)\n\n };\n\n Computer {\n", "file_path": "src/remote/connector.rs", "rank": 79, "score": 32301.1679981277 }, { "content": " let command = Command::new(\n\n vec![\n\n \"cmd.exe\".to_string(),\n\n \"/c\".to_string(),\n\n \"dir\".to_string(),\n\n path.to_str().unwrap_or_default().to_string(),\n\n \"/Ad\".to_string(),\n\n \"/B\".to_string()\n\n ],\n\n Some(store_directory),\n\n &prefix,\n\n true,\n\n );\n\n if let Err(err) = self.connect_and_run_command(command, Some(Duration::from_secs(10))) {\n\n error!(\"{}\", err);\n\n }\n\n let result_file_name = match store_directory.read_dir() {\n\n Ok(dir_entry_iter) => {\n\n dir_entry_iter\n\n .filter_map(|item| item.ok())\n", "file_path": "src/remote/connector.rs", "rank": 80, "score": 32300.64922957811 }, { "content": " ) -> Command<'a> {\n\n Command {\n\n command,\n\n report_store_directory: store_directory,\n\n report_filename_prefix,\n\n elevated,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/remote/connector.rs", "rank": 81, "score": 32298.81964429866 }, { "content": " let address = splitted.get(0).cloned();\n\n let domain_username = splitted.get(1).cloned();\n\n let password = splitted.get(2).cloned();\n\n address.map(|address| {\n\n let (domain_option, username_option) = match domain_username {\n\n None => (self.domain.clone(), self.user.clone()),\n\n Some(domain_username_unwrapped) => {\n\n let splitted_du = domain_username_unwrapped\n\n .split(\"\\\\\")\n\n .map(|item| item.to_string())\n\n .collect::<Vec<String>>();\n\n match splitted_du.len() {\n\n 0 => (self.domain.clone(), self.user.clone()),\n\n 1 => (self.domain.clone(), Some(splitted_du[0].clone())),\n\n _ => (Some(splitted_du[0].clone()), Some(splitted_du[1].clone()))\n\n }\n\n }\n\n };\n\n let (domain, username) = match username_option {\n\n Some(user) => if user.is_empty() {\n", "file_path": "src/remote/connector.rs", "rank": 82, "score": 32298.81964429866 }, { "content": " (domain_option, \"\".to_string())\n\n } else {\n\n (domain_option, user.clone())\n\n },\n\n None => {\n\n let domain = match domain_option {\n\n None => {\n\n println!(\"Domain (optional): \");\n\n let mut domain = String::new();\n\n let _ = io::stdin().read_line(&mut domain);\n\n if domain.trim().is_empty() { None } else { Some(domain) }\n\n }\n\n Some(domain) => Some(domain)\n\n };\n\n println!(\"Username: \");\n\n let mut user = String::new();\n\n io::stdin().read_line(&mut user).ok();\n\n (domain, user)\n\n }\n\n };\n", "file_path": "src/remote/connector.rs", "rank": 83, "score": 32298.81964429866 }, { "content": "fn create_connectors(\n\n opts: &Opts,\n\n computer: &Computer,\n\n remote_temp_storage: &Path,\n\n allowed_ssh: bool,\n\n local: bool,\n\n reverse_share: bool,\n\n) -> Vec<Box<dyn Connector>> {\n\n if local {\n\n return vec![Box::new(Local::new(computer.username.clone(), remote_temp_storage.to_path_buf()))];\n\n }\n\n\n\n let mut copiers = Vec::<Box<dyn Connector>>::new();\n\n if opts.psexec32 {\n\n trace!(\"Creating psexec32 copier\");\n\n let _copier = Box::new(PsExec::psexec32(computer.clone(), remote_temp_storage.to_path_buf(), opts.share.clone()));\n\n let copier: Box<dyn Connector> = if reverse_share { Box::new(RevShareConnector::new(_copier)) } else { _copier };\n\n copiers.push(copier);\n\n }\n\n if opts.psexec64 || opts.all {\n", "file_path": "src/main.rs", "rank": 84, "score": 30812.495050202608 }, { "content": "use crate::remote::{Connector, Computer, Command, RemoteFileCopier, FileCopier, GARGAMEL_SHARED_FOLDER_NAME, RemoteCmd};\n\nuse std::time::Duration;\n\nuse std::io::Error;\n\nuse std::path::{PathBuf, Path};\n\nuse std::io;\n\nuse crate::process_runner::run_process_blocking;\n\n\n\n\n\npub struct RevShareConnector {\n\n connector_impl: Box<dyn Connector>\n\n}\n\n\n\nimpl RevShareConnector {\n\n pub fn new(connector_impl: Box<dyn Connector>) -> RevShareConnector {\n\n let result = RevShareConnector { connector_impl };\n\n result.open_connection();\n\n result\n\n }\n\n\n\n fn open_connection(\n", "file_path": "src/remote/reverse_share_connector.rs", "rank": 85, "score": 29472.52487721069 }, { "content": " let command_to_run = Command {\n\n command,\n\n ..command_to_run\n\n };\n\n let result = self.connect_and_run_command(command_to_run, timeout);\n\n let _ = self.delete_remote_file(&local_program_on_target_path);\n\n result\n\n }\n\n\n\n fn prepare_command(&self,\n\n command: Vec<String>,\n\n output_file_path: Option<&str>,\n\n elevated: bool,\n\n ) -> Vec<String> {\n\n self.connector_impl.prepare_command(command, output_file_path, elevated)\n\n }\n\n}\n\n\n\nimpl RemoteFileCopier for RevShareConnector {\n\n fn remote_computer(&self) -> &Computer {\n", "file_path": "src/remote/reverse_share_connector.rs", "rank": 86, "score": 29467.968042473243 }, { "content": " self as &dyn RemoteFileCopier\n\n }\n\n\n\n fn remote_temp_storage(&self) -> &Path {\n\n self.connector_impl.remote_temp_storage()\n\n }\n\n\n\n fn connect_and_run_local_program(&self,\n\n command_to_run: Command<'_>,\n\n timeout: Option<Duration>,\n\n ) -> Result<Option<PathBuf>, Error> {\n\n let local_program = &command_to_run.command[0];\n\n let local_program_path = Path::new(local_program);\n\n if let Err(err) = self.copy_to_remote(local_program_path, self.remote_temp_storage()){\n\n error!(\"{}\", err);\n\n }\n\n let local_program_on_target_path = self.remote_temp_storage().join(local_program_path.file_name().unwrap());\n\n let mut command = command_to_run.command;\n\n command[0] = local_program_on_target_path.to_string_lossy().to_string();\n\n\n", "file_path": "src/remote/reverse_share_connector.rs", "rank": 87, "score": 29466.32606892928 }, { "content": "\n\n fn delete_file(&self, target: &Path) -> Result<(), Error> {\n\n RemoteCmd::new(self).delete_file(target)\n\n }\n\n\n\n fn method_name(&self) -> &'static str {\n\n RemoteCmd::new(self).method_name()\n\n }\n\n}\n\n\n\nimpl Connector for RevShareConnector {\n\n fn connect_method_name(&self) -> &'static str {\n\n self.connector_impl.connect_method_name()\n\n }\n\n\n\n fn computer(&self) -> &Computer {\n\n self.connector_impl.computer()\n\n }\n\n\n\n fn copier(&self) -> &dyn RemoteFileCopier {\n", "file_path": "src/remote/reverse_share_connector.rs", "rank": 88, "score": 29466.20384704534 }, { "content": " &self\n\n ) {\n\n let args = vec![\n\n \"share\".to_string(),\n\n format!(\"{}=C:\", GARGAMEL_SHARED_FOLDER_NAME),\n\n \"/GRANT:Everyone,FULL\".to_string()\n\n ];\n\n run_process_blocking(\n\n \"NET\",\n\n &args,\n\n ).expect(&format!(\n\n \"Cannot establish share using \\\"net share\\\" for {}=C:\", GARGAMEL_SHARED_FOLDER_NAME\n\n ));\n\n }\n\n}\n\n\n\nimpl FileCopier for RevShareConnector {\n\n fn copy_file(&self, source: &Path, target: &Path) -> Result<(), Error> {\n\n RemoteCmd::new(self).copy_file(source, target)\n\n }\n", "file_path": "src/remote/reverse_share_connector.rs", "rank": 89, "score": 29464.06693102461 }, { "content": " self.computer()\n\n }\n\n\n\n fn copier_impl(&self) -> &dyn FileCopier {\n\n self as &dyn FileCopier\n\n }\n\n\n\n fn path_to_remote_form(\n\n &self,\n\n path: &Path,\n\n ) -> PathBuf {\n\n PathBuf::from(format!(\n\n \"\\\\\\\\{}\\\\{}\",\n\n gethostname::gethostname().to_string_lossy(),\n\n path.to_str().unwrap().replace(\"C:\", GARGAMEL_SHARED_FOLDER_NAME)\n\n ))\n\n }\n\n\n\n fn copy_to_remote(\n\n &self,\n", "file_path": "src/remote/reverse_share_connector.rs", "rank": 90, "score": 29462.989266079574 }, { "content": " source: &Path,\n\n target: &Path,\n\n ) -> io::Result<()> {\n\n self.copier_impl().copy_file(&self.path_to_remote_form(source), target)\n\n }\n\n\n\n fn delete_remote_file(&self, target: &Path) -> io::Result<()> {\n\n self.copier_impl().delete_file(target)\n\n }\n\n\n\n fn copy_from_remote(\n\n &self,\n\n source: &Path,\n\n target: &Path,\n\n ) -> io::Result<()> {\n\n self.copier_impl().copy_file(source, &self.path_to_remote_form(target))\n\n }\n\n}\n\n\n\nimpl Drop for RevShareConnector {\n", "file_path": "src/remote/reverse_share_connector.rs", "rank": 91, "score": 29462.790725232542 }, { "content": " fn drop(&mut self) {\n\n run_process_blocking(\n\n \"NET\",\n\n &[\n\n \"share\".to_string(),\n\n \"/Y\".to_string(),\n\n \"/D\".to_string(),\n\n GARGAMEL_SHARED_FOLDER_NAME.to_string()\n\n ],\n\n ).expect(&format!(\n\n \"Cannot drop connection using \\\"net share\\\" to {}\", GARGAMEL_SHARED_FOLDER_NAME\n\n ));\n\n }\n\n}\n", "file_path": "src/remote/reverse_share_connector.rs", "rank": 92, "score": 29459.839860812346 }, { "content": "use crate::remote::{Connector, Computer, Command, PsExec, PsRemote, Rdp, Wmi, SevenZipCompressCopier, RemoteFileCopier, Compression, Local, RevShareConnector};\n\nuse std::path::{Path, PathBuf};\n\nuse std::{io, thread};\n\nuse std::time::Duration;\n\nuse crate::process_runner::create_report_path;\n\n\n\npub struct MemoryAcquirer<'a> {\n\n pub local_store_directory: &'a Path,\n\n pub connector: Box<dyn Connector>,\n\n pub image_timeout: Option<Duration>,\n\n pub compress_timeout: Option<Duration>,\n\n pub compression: Compression,\n\n}\n\n\n\nimpl<'a> MemoryAcquirer<'a> {\n\n pub fn psexec32(\n\n remote_computer: Computer,\n\n local_store_directory: &'a Path,\n\n no_7zip: bool,\n\n remote_temp_storage: PathBuf,\n", "file_path": "src/memory_acquirer.rs", "rank": 94, "score": 29.570600513861788 }, { "content": "pub mod connector;\n\n\n\npub use self::connector::*;\n\n\n\npub mod local;\n\n\n\npub use self::local::*;\n\n\n\npub mod psexec;\n\n\n\npub use self::psexec::*;\n\n\n\npub mod wmi;\n\n\n\npub use self::wmi::*;\n\n\n\npub mod psremote;\n\n\n\npub use self::psremote::*;\n\n\n", "file_path": "src/remote/mod.rs", "rank": 95, "score": 27.727338788678264 }, { "content": "use crate::remote::{Computer, Connector, Command, PsExec, PsRemote, Ssh, Rdp, Wmi, Local};\n\nuse std::path::{Path, PathBuf};\n\nuse std::fs::File;\n\nuse crate::command_utils::parse_command;\n\nuse std::time::Duration;\n\n\n\npub struct CommandRunner<'a> {\n\n local_store_directory: &'a Path,\n\n pub(crate) connector: Box<dyn Connector>,\n\n run_implicit: bool,\n\n}\n\n\n\nimpl<'a> CommandRunner<'a> {\n\n pub fn psexec(\n\n remote_computer: Computer,\n\n local_store_directory: &'a Path,\n\n remote_temp_storage: PathBuf,\n\n custom_share_folder: Option<String>\n\n ) -> CommandRunner<'a> {\n\n CommandRunner {\n", "file_path": "src/command_runner.rs", "rank": 96, "score": 25.888753035086665 }, { "content": "use std::path::{Path, PathBuf};\n\nuse crate::remote::{Computer, Connector, PsExec, PsRemote, Rdp, Wmi, Compression, Local, RevShareConnector};\n\nuse std::time::Duration;\n\nuse crate::large_evidence_acquirer::LargeEvidenceAcquirer;\n\n\n\npub struct EventsAcquirer<'a> {\n\n store_directory: &'a Path,\n\n connector: Box<dyn Connector>,\n\n\n\n application_event_logs_command: Vec<String>,\n\n system_event_logs_command: Vec<String>,\n\n\n\n compress_timeout: Option<Duration>,\n\n compression: Compression,\n\n}\n\n\n\nimpl<'a> EventsAcquirer<'a> {\n\n pub fn new(\n\n store_directory: &'a Path,\n\n connector: Box<dyn Connector>,\n", "file_path": "src/events_acquirer.rs", "rank": 97, "score": 24.99703365072525 }, { "content": "use std::path::{Path, PathBuf};\n\nuse crate::remote::{Computer, Connector, PsExec, PsRemote, Rdp, Wmi, Compression, Local, RevShareConnector};\n\nuse std::time::Duration;\n\nuse crate::large_evidence_acquirer::LargeEvidenceAcquirer;\n\n\n\npub struct RegistryAcquirer<'a> {\n\n store_directory: &'a Path,\n\n connector: Box<dyn Connector>,\n\n\n\n registry_hklm_command: Vec<String>,\n\n registry_hkcu_command: Vec<String>,\n\n registry_hkcr_command: Vec<String>,\n\n registry_hku_command: Vec<String>,\n\n registry_hkcc_command: Vec<String>,\n\n\n\n compress_timeout: Option<Duration>,\n\n compression: Compression,\n\n}\n\n\n\nimpl<'a> RegistryAcquirer<'a> {\n", "file_path": "src/registry_acquirer.rs", "rank": 98, "score": 23.11587161761459 }, { "content": " }\n\n }\n\n\n\n pub fn wmi(\n\n remote_computer: Computer,\n\n local_store_directory: &'a Path,\n\n timeout: Duration,\n\n compress_timeout: Duration,\n\n no_7zip: bool,\n\n remote_temp_storage: PathBuf,\n\n ) -> MemoryAcquirer<'a> {\n\n MemoryAcquirer {\n\n local_store_directory,\n\n connector: Box::new(Wmi { computer: remote_computer.clone(), remote_temp_storage }),\n\n image_timeout: Some(timeout),\n\n compress_timeout: Some(compress_timeout),\n\n compression: if no_7zip { Compression::No } else { Compression::YesSplit },\n\n }\n\n }\n\n\n", "file_path": "src/memory_acquirer.rs", "rank": 99, "score": 21.37912923264569 } ]
Rust
rulex-lib/src/reference.rs
Aloso/rulex
f56a448003e95283391d1813abe0b9f495cd5dea
use crate::{ compile::{CompileResult, CompileState}, error::{CompileErrorKind, Feature, ParseError}, features::RulexFeatures, options::{CompileOptions, ParseOptions, RegexFlavor}, regex::Regex, span::Span, }; #[derive(Clone, Copy, PartialEq, Eq)] pub(crate) struct Reference<'i> { pub(crate) target: ReferenceTarget<'i>, pub(crate) span: Span, } #[derive(Clone, Copy, PartialEq, Eq)] #[non_exhaustive] pub(crate) enum ReferenceTarget<'i> { Named(&'i str), Number(u32), Relative(i32), } #[derive(Clone, Copy, PartialEq, Eq)] enum ReferenceDirection { Backwards, Forwards, } impl<'i> Reference<'i> { pub(crate) fn new(target: ReferenceTarget<'i>, span: Span) -> Self { Reference { target, span } } pub(crate) fn compile( &self, options: CompileOptions, state: &mut CompileState, ) -> CompileResult<'i> { let (direction, number) = match self.target { ReferenceTarget::Named(name) => match state.used_names.get(name) { Some(&n) => { let direction = if n >= state.next_idx { ReferenceDirection::Forwards } else { ReferenceDirection::Backwards }; (direction, n) } None => { return Err( CompileErrorKind::UnknownReferenceName(name.to_string()).at(self.span) ); } }, ReferenceTarget::Number(idx) => { let direction = if idx > 99 { return Err(CompileErrorKind::HugeReference.at(self.span)); } else if idx > state.groups_count { return Err(CompileErrorKind::UnknownReferenceNumber(idx as i32).at(self.span)); } else if idx >= state.next_idx { ReferenceDirection::Forwards } else { ReferenceDirection::Backwards }; (direction, idx) } ReferenceTarget::Relative(offset) => { let direction = if offset >= 0 { ReferenceDirection::Forwards } else { ReferenceDirection::Backwards }; let num = match offset { 0 => { return Err( CompileErrorKind::Other("Relative references can't be 0").at(self.span) ) } i32::MIN..=-1 => offset + (state.next_idx as i32), 1..=i32::MAX => offset + (state.next_idx as i32) - 1, }; if num <= 0 || (num as u32) > state.groups_count { return Err(CompileErrorKind::UnknownReferenceNumber(num).at(self.span)); } (direction, num as u32) } }; match options.flavor { RegexFlavor::Rust => Err(CompileErrorKind::Unsupported( if direction == ReferenceDirection::Backwards { Feature::Backreference } else { Feature::ForwardReference }, options.flavor, ) .at(self.span)), RegexFlavor::JavaScript if direction == ReferenceDirection::Forwards => { Err(CompileErrorKind::Unsupported(Feature::ForwardReference, options.flavor) .at(self.span)) } _ => Ok(Regex::Reference(RegexReference { number })), } } pub(crate) fn validate(&self, options: &ParseOptions) -> Result<(), ParseError> { options.allowed_features.require(RulexFeatures::REFERENCES, self.span) } } #[cfg(feature = "dbg")] impl std::fmt::Debug for Reference<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.target { ReferenceTarget::Named(n) => write!(f, "::{}", n), ReferenceTarget::Number(i) => write!(f, "::{}", i), ReferenceTarget::Relative(o) => write!(f, "::{}{}", if o < 0 { '-' } else { '+' }, o), } } } #[cfg_attr(feature = "dbg", derive(Debug))] pub(crate) struct RegexReference { number: u32, } impl RegexReference { pub(crate) fn codegen(&self, buf: &mut String, _: RegexFlavor) { use std::fmt::Write; debug_assert!(self.number <= 99); write!(buf, "\\{}", self.number).unwrap(); } }
use crate::{ compile::{CompileResult, CompileState}, error::{CompileErrorKind, Feature, ParseError}, features::RulexFeatures, options::{CompileOptions, ParseOptions, RegexFlavor}, regex::Regex, span::Span, }; #[derive(Clone, Copy, PartialEq, Eq)] pub(crate) struct Reference<'i> { pub(crate) target: ReferenceTarget<'i>, pub(crate) span: Span, } #[derive(Clone, Copy, PartialEq, Eq)] #[non_exhaustive] pub(crate) enum ReferenceTarget<'i> { Named(&'i str), Number(u32), Relative(i32), } #[derive(Clone, Copy, PartialEq, Eq)] enum ReferenceDirection { Backwards, Forwards, } impl<'i> Reference<'i> { pub(crate) fn new(target: ReferenceTarget<'i>, span: Span) -> Self { Reference { target, span } } pub(crate) fn compile( &self, options: CompileOptions, state: &mut CompileState, ) -> CompileResult<'i> { let (direction, number) = match self.target { ReferenceTarget::Named(name) => match state.used_names.get(name) { Some(&n) => { let direction = if n >= state.next_idx { ReferenceDirection::Forwards } else { ReferenceDirection::Backwards }; (direction, n) } None => { return Err( CompileErrorKind::UnknownReferenceName(name.to_string()).at(self.span) ); } }, ReferenceTarget::Number(idx) => { let direction = if idx > 99 { return Err(CompileErrorKind::HugeReference.at(self.span)); } else if idx > state.groups_count { return Err(CompileErrorKind::UnknownReferenceNumber(idx as i32).at(self.span)); } else if idx >= state.next_idx { ReferenceDirection::Forwards } else { ReferenceDirection::Backwards }; (direction, idx) } ReferenceTarget::Relative(offset) => { let direction = if offset >= 0 { ReferenceDirection::Forwards } else { ReferenceDirection::Backwards }; let num = match offset { 0 => { return
} i32::MIN..=-1 => offset + (state.next_idx as i32), 1..=i32::MAX => offset + (state.next_idx as i32) - 1, }; if num <= 0 || (num as u32) > state.groups_count { return Err(CompileErrorKind::UnknownReferenceNumber(num).at(self.span)); } (direction, num as u32) } }; match options.flavor { RegexFlavor::Rust => Err(CompileErrorKind::Unsupported( if direction == ReferenceDirection::Backwards { Feature::Backreference } else { Feature::ForwardReference }, options.flavor, ) .at(self.span)), RegexFlavor::JavaScript if direction == ReferenceDirection::Forwards => { Err(CompileErrorKind::Unsupported(Feature::ForwardReference, options.flavor) .at(self.span)) } _ => Ok(Regex::Reference(RegexReference { number })), } } pub(crate) fn validate(&self, options: &ParseOptions) -> Result<(), ParseError> { options.allowed_features.require(RulexFeatures::REFERENCES, self.span) } } #[cfg(feature = "dbg")] impl std::fmt::Debug for Reference<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self.target { ReferenceTarget::Named(n) => write!(f, "::{}", n), ReferenceTarget::Number(i) => write!(f, "::{}", i), ReferenceTarget::Relative(o) => write!(f, "::{}{}", if o < 0 { '-' } else { '+' }, o), } } } #[cfg_attr(feature = "dbg", derive(Debug))] pub(crate) struct RegexReference { number: u32, } impl RegexReference { pub(crate) fn codegen(&self, buf: &mut String, _: RegexFlavor) { use std::fmt::Write; debug_assert!(self.number <= 99); write!(buf, "\\{}", self.number).unwrap(); } }
Err( CompileErrorKind::Other("Relative references can't be 0").at(self.span) )
call_expression
[ { "content": "fn get_backslash_help(str: &str) -> Option<String> {\n\n assert!(str.starts_with('\\\\'));\n\n let str = &str[1..];\n\n let mut iter = str.chars();\n\n\n\n Some(match iter.next() {\n\n Some('b') => \"Replace `\\\\b` with `%` to match a word boundary\".into(),\n\n Some('B') => \"Replace `\\\\B` with `!%` to match a place without a word boundary\".into(),\n\n Some('A') => \"Replace `\\\\A` with `<%` to match the start of the string\".into(),\n\n Some('z') => \"Replace `\\\\z` with `%>` to match the end of the string\".into(),\n\n Some('Z') => \"\\\\Z is not supported. Use `%>` to match the end of the string. \\\n\n Note, however, that `%>` doesn't match the position before the final newline.\"\n\n .into(),\n\n Some('N') => \"Replace `\\\\N` with `[.]`\".into(),\n\n Some('X') => \"Replace `\\\\X` with `Grapheme`\".into(),\n\n Some('R') => \"Replace `\\\\R` with `(r n | v)`\".into(),\n\n Some('D') => \"Replace `\\\\D` with `[!d]`\".into(),\n\n Some('W') => \"Replace `\\\\W` with `[!w]`\".into(),\n\n Some('S') => \"Replace `\\\\S` with `[!s]`\".into(),\n\n Some(c @ ('a' | 'e' | 'f' | 'n' | 'r' | 't' | 'h' | 'v' | 'd' | 'w' | 's')) => {\n\n format!(\"Replace `\\\\{c}` with `[{c}]`\")\n\n }\n\n _ => return None,\n\n })\n\n}\n\n\n", "file_path": "rulex-lib/src/error/diagnostics.rs", "rank": 0, "score": 139110.39428537886 }, { "content": "fn get_backslash_help_k(str: &str) -> Option<String> {\n\n assert!(str.starts_with(\"\\\\k<\"));\n\n let name_len = str[3..].chars().take_while(|&c| c != '>').count();\n\n let name = &str[3..3 + name_len];\n\n Some(format!(\"Replace `\\\\k<{name}>` with `::{name}`\"))\n\n}\n", "file_path": "rulex-lib/src/error/diagnostics.rs", "rank": 1, "score": 139110.39428537886 }, { "content": "fn get_backslash_help_unicode(str: &str) -> Option<String> {\n\n let hex_len = str[3..].chars().take_while(|c| c.is_ascii_hexdigit()).count();\n\n let hex = &str[3..3 + hex_len];\n\n Some(format!(\"Try `U+{hex}` instead\"))\n\n}\n\n\n", "file_path": "rulex-lib/src/error/diagnostics.rs", "rank": 2, "score": 136806.82084680093 }, { "content": "fn get_backslash_help_x2(str: &str) -> Option<String> {\n\n assert!(str.starts_with(\"\\\\x\"));\n\n let hex = &str[2..4];\n\n Some(format!(\"Try `U+{hex}` instead\"))\n\n}\n\n\n", "file_path": "rulex-lib/src/error/diagnostics.rs", "rank": 3, "score": 136806.82084680093 }, { "content": "fn get_backslash_help_u4(str: &str) -> Option<String> {\n\n assert!(str.starts_with(\"\\\\u\"));\n\n let hex = &str[2..6];\n\n Some(format!(\"Try `U+{hex}` instead\"))\n\n}\n\n\n", "file_path": "rulex-lib/src/error/diagnostics.rs", "rank": 4, "score": 136806.82084680093 }, { "content": "fn get_special_group_help(str: &str) -> Option<String> {\n\n assert!(str.starts_with(\"(?\"));\n\n let str = &str[2..];\n\n let mut iter = str.chars();\n\n\n\n Some(match (iter.next(), iter.next()) {\n\n (Some(':'), _) => \"Non-capturing groups are just parentheses: `(...)`. \\\n\n Capturing groups use the `:(...)` syntax.\"\n\n .into(),\n\n (Some('P'), Some('<')) => {\n\n let str = &str[2..];\n\n let rest = str.trim_start_matches(char::is_alphanumeric);\n\n let name = &str[..str.len() - rest.len()];\n\n format!(\n\n \"Named capturing groups use the `:name(...)` syntax. Try `:{name}(...)` instead\"\n\n )\n\n }\n\n (Some('>'), _) => \"Atomic capturing groups are not supported\".into(),\n\n (Some('|'), _) => \"Branch reset groups are not supported\".into(),\n\n (Some('('), _) => \"Branch reset groups are not supported\".into(),\n", "file_path": "rulex-lib/src/error/diagnostics.rs", "rank": 5, "score": 136806.82084680093 }, { "content": "fn find_unescaped_quote(input: &str) -> Option<usize> {\n\n let mut s = input;\n\n\n\n loop {\n\n match s.find(|c| c == '\\\\' || c == '\"') {\n\n Some(n) => {\n\n if s.as_bytes()[n] == b'\"' {\n\n return Some(n + (input.len() - s.len()));\n\n } else if n + 2 <= s.len() {\n\n s = &s[n + 2..];\n\n } else {\n\n return None;\n\n }\n\n }\n\n None => return None,\n\n }\n\n }\n\n}\n", "file_path": "rulex-lib/src/parse/tokenize.rs", "rank": 7, "score": 121776.58791804941 }, { "content": "pub fn compile(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"compile\");\n\n\n\n for &(sample_name, sample) in SAMPLES {\n\n group.throughput(Throughput::Bytes(sample.len() as u64));\n\n group.bench_function(sample_name, |b| {\n\n let rulex = Rulex::parse(black_box(sample), Default::default()).unwrap();\n\n b.iter(|| black_box(&rulex).compile(ruby()).unwrap())\n\n });\n\n }\n\n}\n\n\n", "file_path": "benchmark/benches/main.rs", "rank": 8, "score": 121395.27816755036 }, { "content": "fn process_content<'a>(content: &'a str, path: &Path) -> (&'a str, &'a str, Options) {\n\n let (mut input, expected) = content.split_once(\"\\n-----\").unwrap();\n\n let expected = expected.trim_start_matches('-').trim_start_matches('\\n');\n\n\n\n let options = if input.starts_with(\"#!\") {\n\n let (first_line, new_input) = input.split_once('\\n').unwrap_or_default();\n\n input = new_input;\n\n Options::parse(first_line, path)\n\n } else {\n\n Options::default()\n\n };\n\n (input, expected, options)\n\n}\n\n\n", "file_path": "rulex-lib/tests/it/files.rs", "rank": 9, "score": 119210.85751807797 }, { "content": "fn ruby() -> CompileOptions {\n\n CompileOptions { flavor: RegexFlavor::Ruby }\n\n}\n\n\n", "file_path": "benchmark/benches/main.rs", "rank": 10, "score": 114211.49175174313 }, { "content": "fn filter_matches(filter: &str, path: &Path) -> bool {\n\n if filter.is_empty() {\n\n return true;\n\n }\n\n let path = path.to_string_lossy();\n\n path.contains(filter)\n\n}\n\n\n", "file_path": "rulex-lib/tests/it/main.rs", "rank": 11, "score": 106949.76475315526 }, { "content": "fn strip_first_last(s: &str) -> &str {\n\n &s[1..s.len() - 1]\n\n}\n\n\n", "file_path": "rulex-lib/src/parse/parsers.rs", "rank": 12, "score": 103860.09961252211 }, { "content": "fn err<'i, 'b, T>(\n\n mut error_fn: impl FnMut() -> ParseErrorKind,\n\n) -> impl FnMut(Input<'i, 'b>) -> IResult<Input<'i, 'b>, T, ParseError> {\n\n move |input| Err(nom::Err::Error(error_fn().at(input.span())))\n\n}\n", "file_path": "rulex-lib/src/parse/parsers.rs", "rank": 13, "score": 93344.44732305064 }, { "content": "struct Options {\n\n flavor: RegexFlavor,\n\n ignore: bool,\n\n expected_outcome: Outcome,\n\n}\n\n\n\nimpl Default for Options {\n\n fn default() -> Self {\n\n Self { flavor: RegexFlavor::Pcre, ignore: false, expected_outcome: Outcome::Success }\n\n }\n\n}\n\n\n\nimpl Options {\n\n fn parse(line: &str, path: &Path) -> Self {\n\n let mut result = Options::default();\n\n\n\n for part in line.trim_start_matches(\"#!\").split(',') {\n\n let part = part.trim();\n\n let (key, value) = part.split_once('=').unwrap_or((part, \"\"));\n\n match key {\n", "file_path": "rulex-lib/tests/it/files.rs", "rank": 14, "score": 90475.2189593889 }, { "content": "pub fn range(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"range\");\n\n group.plot_config(PlotConfiguration::default().summary_scale(AxisScale::Logarithmic));\n\n\n\n for size in 1..=15 {\n\n group.throughput(Throughput::Elements(size));\n\n group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, &size| {\n\n let max = \"3458709621\".repeat(((size + 9) / 10) as usize);\n\n let max = &max[..size as usize];\n\n let input = format!(\"range '0'-'{max}'\");\n\n let rulex = Rulex::parse(black_box(&input), Default::default()).unwrap();\n\n\n\n b.iter(|| black_box(&rulex).compile(Default::default()).unwrap())\n\n });\n\n }\n\n}\n\n\n", "file_path": "benchmark/benches/main.rs", "rank": 15, "score": 90323.13843821529 }, { "content": "pub fn parse(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"parse\");\n\n\n\n for &(sample_name, sample) in SAMPLES {\n\n group.throughput(Throughput::Bytes(sample.len() as u64));\n\n group.bench_function(sample_name, |b| {\n\n b.iter(|| Rulex::parse(black_box(sample), Default::default()).unwrap())\n\n });\n\n }\n\n}\n\n\n", "file_path": "benchmark/benches/main.rs", "rank": 16, "score": 90323.13843821529 }, { "content": "pub fn benches(c: &mut Criterion) {\n\n parse(c);\n\n compile(c);\n\n range(c);\n\n}\n\n\n", "file_path": "benchmark/benches/main.rs", "rank": 17, "score": 90323.13843821529 }, { "content": "fn respan<T: Into<TokenTree>>(t: T, span: Span) -> TokenTree {\n\n let mut t = t.into();\n\n t.set_span(span);\n\n t\n\n}\n", "file_path": "rulex-macro/src/diagnostic.rs", "rank": 18, "score": 88873.99761950897 }, { "content": "fn strip_input(input: &str) -> String {\n\n input\n\n .lines()\n\n .filter(|l| {\n\n let l = l.trim_start();\n\n !l.is_empty() && !l.starts_with('#')\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "rulex-lib/tests/it/files.rs", "rank": 19, "score": 88395.6835277991 }, { "content": "fn from_str<T: FromStr>(s: &str) -> Result<T, ParseErrorKind> {\n\n str::parse(s).map_err(|_| ParseErrorKind::Number(NumberError::TooLarge))\n\n}\n\n\n", "file_path": "rulex-lib/src/parse/parsers.rs", "rank": 20, "score": 87207.63166316545 }, { "content": "fn compile(\n\n input: &str,\n\n debug: bool,\n\n flavor: Option<Flavor>,\n\n no_new_line: bool,\n\n) -> miette::Result<()> {\n\n let parse_options = ParseOptions { max_range_size: 12, ..ParseOptions::default() };\n\n let parsed = Rulex::parse(input, parse_options)\n\n .map_err(|err| Diagnostic::from_parse_error(err, input))?;\n\n\n\n if debug {\n\n eprintln!(\"======================== debug ========================\");\n\n eprintln!(\"{parsed:#?}\\n\");\n\n }\n\n\n\n let compile_options = CompileOptions { flavor: flavor.unwrap_or(Flavor::Pcre).into() };\n\n let compiled = parsed\n\n .compile(compile_options)\n\n .map_err(|err| Diagnostic::from_compile_error(err, input))?;\n\n\n\n if no_new_line {\n\n print!(\"{compiled}\");\n\n io::stdout().flush().unwrap();\n\n } else {\n\n println!(\"{compiled}\");\n\n }\n\n Ok(())\n\n}\n", "file_path": "rulex-bin/src/main.rs", "rank": 21, "score": 87049.46254088837 }, { "content": "fn get_flavor(item: Option<TokenTree>) -> Result<RegexFlavor, Error> {\n\n Ok(match item {\n\n Some(TokenTree::Ident(id)) => match id.to_string().as_str() {\n\n \"DotNet\" => RegexFlavor::DotNet,\n\n \"Java\" => RegexFlavor::Java,\n\n \"JavaScript\" => RegexFlavor::JavaScript,\n\n \"Pcre\" => RegexFlavor::Pcre,\n\n \"Python\" => RegexFlavor::Python,\n\n \"Ruby\" => RegexFlavor::Ruby,\n\n \"Rust\" => RegexFlavor::Rust,\n\n s => bail!(\n\n \"Expected one of: DotNet, Java, JavaScript, Pcre, Python, Ruby, Rust\\nGot: {s}\",\n\n id.span()\n\n ),\n\n },\n\n Some(tt) => bail!(\"Unexpected token `{tt}`\", tt.span()),\n\n None => bail!(\"Expected identifier\"),\n\n })\n\n}\n", "file_path": "rulex-macro/src/lib.rs", "rank": 22, "score": 86378.32202020763 }, { "content": "fn parse_quoted_text(input: &str) -> Result<Cow<'_, str>, ParseErrorKind> {\n\n Ok(match input.as_bytes()[0] {\n\n b'\"' => {\n\n let mut s = strip_first_last(input);\n\n let mut buf = String::new();\n\n\n\n loop {\n\n let mut chars = s.chars();\n\n match chars.next() {\n\n Some('\\\\') => match chars.next() {\n\n Some('\\\\') => {\n\n buf.push('\\\\');\n\n s = &s[1..];\n\n }\n\n Some('\"') => {\n\n buf.push('\"');\n\n s = &s[1..];\n\n }\n\n _ => {\n\n return Err(ParseErrorKind::InvalidEscapeInStringAt(\n", "file_path": "rulex-lib/src/parse/parsers.rs", "rank": 23, "score": 85551.93815322642 }, { "content": "fn catch_panics<R>(f: impl Fn() -> R + UnwindSafe) -> Result<R, Option<String>> {\n\n catch_unwind(f).map_err(|err| {\n\n err.downcast_ref::<String>()\n\n .map(ToOwned::to_owned)\n\n .or_else(|| err.downcast_ref::<&str>().map(|s| s.to_string()))\n\n })\n\n}\n", "file_path": "rulex-lib/tests/it/files.rs", "rank": 24, "score": 80386.2406413287 }, { "content": "#[derive(PartialEq, Eq)]\n\nenum Rule {\n\n Empty,\n\n Class(Class),\n\n Repeat(Box<Repeat>),\n\n Alt(Alt),\n\n}\n\n\n\n#[cfg(FALSE)]\n\nimpl std::fmt::Debug for Rule {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::Empty => write!(f, \"Empty\"),\n\n Self::Class(Class { start, end }) => write!(f, \"[{start}-{end}]\"),\n\n Self::Repeat(r) => {\n\n if f.alternate() {\n\n write!(f, \"{:#?}{{{}, {}}}\", r.rule, r.min, r.max)\n\n } else {\n\n write!(f, \"{:?}{{{}, {}}}\", r.rule, r.min, r.max)\n\n }\n\n }\n", "file_path": "rulex-lib/src/range.rs", "rank": 25, "score": 57506.616403200875 }, { "content": "#[derive(Clone, Copy)]\n\nenum Outcome {\n\n Success,\n\n Error,\n\n}\n\n\n\nimpl Outcome {\n\n fn of(self, inner: String) -> Result<String, String> {\n\n match self {\n\n Outcome::Success => Ok(inner),\n\n Outcome::Error => Err(inner),\n\n }\n\n }\n\n}\n\n\n\npub(crate) fn test_file(content: &str, path: &Path, args: &Args) -> TestResult {\n\n let (input, expected, options) = process_content(content, path);\n\n\n\n if options.ignore && !args.include_ignored {\n\n return TestResult::Ignored;\n\n }\n", "file_path": "rulex-lib/tests/it/files.rs", "rank": 26, "score": 57506.024757728446 }, { "content": "#[derive(Clone, Debug, ArgEnum)]\n\n#[clap(rename_all = \"lower\")]\n\nenum Flavor {\n\n Pcre,\n\n Python,\n\n Java,\n\n #[clap(alias = \"js\")]\n\n JavaScript,\n\n #[clap(alias = \".net\")]\n\n DotNet,\n\n Ruby,\n\n Rust,\n\n}\n\n\n\nimpl From<Flavor> for RegexFlavor {\n\n fn from(f: Flavor) -> Self {\n\n match f {\n\n Flavor::Pcre => RegexFlavor::Pcre,\n\n Flavor::Python => RegexFlavor::Python,\n\n Flavor::Java => RegexFlavor::Java,\n\n Flavor::JavaScript => RegexFlavor::JavaScript,\n\n Flavor::DotNet => RegexFlavor::DotNet,\n\n Flavor::Ruby => RegexFlavor::Ruby,\n\n Flavor::Rust => RegexFlavor::Rust,\n\n }\n\n }\n\n}\n\n\n", "file_path": "rulex-bin/src/main.rs", "rank": 27, "score": 57505.794650960765 }, { "content": "#[derive(Debug, miette::Diagnostic, thiserror::Error)]\n\nenum MyError {\n\n #[diagnostic(code(error::io))]\n\n #[error(\"{}\\nFile: {}\", .error, .path.display())]\n\n Io { error: io::Error, path: PathBuf },\n\n\n\n #[error(\"{}\", .0)]\n\n #[diagnostic(code(error::other))]\n\n Other(String),\n\n}\n\n\n", "file_path": "rulex-bin/src/main.rs", "rank": 28, "score": 57501.75953649679 }, { "content": "#[derive(PartialEq, Eq, Clone, Copy)]\n\nstruct Class {\n\n start: u8,\n\n end: u8,\n\n}\n\n\n", "file_path": "rulex-lib/src/range.rs", "rank": 29, "score": 57060.03232135027 }, { "content": "#[derive(PartialEq, Eq)]\n\nstruct Repeat {\n\n rule: Rule,\n\n min: usize,\n\n max: usize,\n\n}\n\n\n\nimpl Repeat {\n\n fn to_regex(&self) -> Regex<'static> {\n\n Regex::Repetition(Box::new(RegexRepetition::new(\n\n self.rule.to_regex(),\n\n RepetitionKind::try_from((self.min as u32, Some(self.max as u32))).unwrap(),\n\n RegexQuantifier::Greedy,\n\n )))\n\n }\n\n}\n\n\n", "file_path": "rulex-lib/src/range.rs", "rank": 30, "score": 57056.028976721675 }, { "content": "#[derive(Parser, Debug)]\n\n#[clap(name = \"rulex\")]\n\n#[clap(author, version, about, long_about = None)]\n\nstruct Args {\n\n /// Rulex expression to compile\n\n input: Option<String>,\n\n /// File containing the rulex expression to compile\n\n #[clap(short, long, parse(from_os_str), value_name = \"FILE\")]\n\n path: Option<PathBuf>,\n\n\n\n /// Show debug information\n\n #[clap(short, long)]\n\n debug: bool,\n\n\n\n /// Regex flavor\n\n #[clap(long, short, arg_enum, ignore_case(true))]\n\n flavor: Option<Flavor>,\n\n\n\n /// Does not print a new-line at the end of the compiled regular expression\n\n #[clap(long, short)]\n\n no_new_line: bool,\n\n}\n\n\n\n/// Regex flavor\n", "file_path": "rulex-bin/src/main.rs", "rank": 31, "score": 57054.98218443094 }, { "content": "struct Error {\n\n msg: String,\n\n span: Option<Span>,\n\n}\n\n\n\nimpl Error {\n\n fn new(msg: String, span: Span) -> Self {\n\n Error { msg, span: Some(span) }\n\n }\n\n\n\n fn from_msg(msg: String) -> Self {\n\n Error { msg, span: None }\n\n }\n\n}\n\n\n\nmacro_rules! bail {\n\n ($l:literal) => {\n\n return Err(Error::from_msg(format!($l)))\n\n };\n\n ($l:literal, $e:expr) => {\n\n return Err(Error::new(format!($l), $e))\n\n };\n\n ($e1:expr) => {\n\n return Err(Error::from_msg($e1))\n\n };\n\n ($e1:expr, $e2:expr) => {\n\n return Err(Error::new($e1, $e2))\n\n };\n\n}\n\n\n", "file_path": "rulex-macro/src/lib.rs", "rank": 32, "score": 57051.17211001759 }, { "content": "struct Error;\n\n\n", "file_path": "rulex-lib/src/range.rs", "rank": 33, "score": 57051.17211001759 }, { "content": "struct Args {\n\n include_ignored: bool,\n\n filter: String,\n\n\n\n fuzz_ranges: bool,\n\n fuzz_start: usize,\n\n fuzz_step: usize,\n\n thoroughness: usize,\n\n}\n\n\n\nimpl Args {\n\n fn parse() -> Self {\n\n let mut include_ignored = false;\n\n let mut filter = String::new();\n\n let mut fuzz_ranges = false;\n\n let mut fuzz_start = 0;\n\n let mut fuzz_step = 1;\n\n let mut thoroughness = 40;\n\n\n\n for arg in std::env::args().skip(1) {\n", "file_path": "rulex-lib/tests/it/main.rs", "rank": 34, "score": 57051.17211001759 }, { "content": "fn main() {\n\n let mut c = Criterion::default()\n\n .measurement_time(Duration::from_secs(3))\n\n .warm_up_time(Duration::from_secs(1))\n\n .configure_from_args();\n\n\n\n benches(&mut c);\n\n c.final_summary();\n\n}\n", "file_path": "benchmark/benches/main.rs", "rank": 35, "score": 54900.416619327916 }, { "content": "fn expect(\n\n iter: &mut Peekable<impl Iterator<Item = TokenTree>>,\n\n pred: fn(&TokenTree) -> bool,\n\n error_msg: impl Into<String>,\n\n) -> Result<(), Error> {\n\n match iter.peek() {\n\n Some(tt) if pred(tt) => {\n\n iter.next();\n\n Ok(())\n\n }\n\n Some(tt) => bail!(error_msg.into(), tt.span()),\n\n None => bail!(error_msg.into()),\n\n }\n\n}\n\n\n", "file_path": "rulex-macro/src/lib.rs", "rank": 36, "score": 53567.89674226755 }, { "content": "#[test]\n\nfn pcre() {\n\n const REGEX: &str = rulex!(\n\n #flavor = Pcre\n\n \"foo\" (!>> \"bar\")\n\n );\n\n\n\n assert_eq!(REGEX, \"foo(?!bar)\");\n\n}\n\n\n", "file_path": "rulex-macro/tests/test_macro.rs", "rank": 37, "score": 52334.847781814446 }, { "content": "#[test]\n\nfn rust() {\n\n const REGEX: &str = rulex! {\n\n // variables\n\n let number = '-'? [d]+;\n\n let op = [\"+-*/\"];\n\n number (op number)*\n\n };\n\n\n\n assert_eq!(REGEX, \"-?\\\\d+(?:[+\\\\-*/]-?\\\\d+)*\");\n\n}\n\n\n", "file_path": "rulex-macro/tests/test_macro.rs", "rank": 38, "score": 52334.847781814446 }, { "content": "fn test_dir_recursive(\n\n path: PathBuf,\n\n results: &mut Vec<(PathBuf, TestResult)>,\n\n tx: Sender<PathBuf>,\n\n args: &Args,\n\n) -> Result<(), io::Error> {\n\n let path = &path;\n\n if !path.exists() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::NotFound,\n\n format!(\"file {:?} not found\", Blue(path)),\n\n ));\n\n }\n\n if path.is_dir() {\n\n for test in fs::read_dir(path)? {\n\n test_dir_recursive(test?.path(), results, tx.clone(), args)?;\n\n }\n\n Ok(())\n\n } else if path.is_file() {\n\n let mut content = std::fs::read_to_string(path)?;\n", "file_path": "rulex-lib/tests/it/main.rs", "rank": 39, "score": 51190.53243459467 }, { "content": "#[test]\n\nfn composite_tokens() {\n\n const REGEX: &str = rulex!(\n\n Start \"Test\" End\n\n );\n\n\n\n assert_eq!(REGEX, \"^Test$\");\n\n}\n", "file_path": "rulex-macro/tests/test_macro.rs", "rank": 40, "score": 51190.53243459467 }, { "content": "pub fn main() {\n\n match defer_main() {\n\n Ok(_) => {}\n\n Err(e) => {\n\n eprintln!(\"error: {e}\");\n\n process::exit(1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "rulex-lib/tests/it/main.rs", "rank": 41, "score": 49803.08861290604 }, { "content": "/// Compiles a shorthand character class or Unicode category/script/block.\n\n///\n\n/// Refer to the [module-level documentation](self) for details about named\n\n/// character classes.\n\nfn named_class_to_regex(\n\n group: GroupName,\n\n negative: bool,\n\n flavor: RegexFlavor,\n\n span: Span,\n\n) -> CompileResult<'static> {\n\n Ok(match group {\n\n GroupName::Word => {\n\n if flavor == RegexFlavor::JavaScript {\n\n Regex::CharClass(RegexCharClass {\n\n negative,\n\n items: vec![\n\n RegexProperty::Other(OtherProperties::Alphabetic).negative_item(false),\n\n RegexProperty::Category(Category::Mark).negative_item(false),\n\n RegexProperty::Category(Category::Decimal_Number).negative_item(false),\n\n RegexProperty::Category(Category::Connector_Punctuation)\n\n .negative_item(false),\n\n ],\n\n })\n\n } else {\n", "file_path": "rulex-lib/src/char_class/mod.rs", "rank": 42, "score": 49142.81990245114 }, { "content": "fn named_class_to_regex_class_items(\n\n group: GroupName,\n\n negative: bool,\n\n flavor: RegexFlavor,\n\n span: Span,\n\n buf: &mut Vec<RegexClassItem>,\n\n) -> Result<(), CompileError> {\n\n match group {\n\n GroupName::Word => {\n\n if let RegexFlavor::JavaScript = flavor {\n\n if negative {\n\n return Err(\n\n CompileErrorKind::Unsupported(Feature::NegativeShorthandW, flavor).at(span)\n\n );\n\n }\n\n buf.push(RegexProperty::Other(OtherProperties::Alphabetic).negative_item(false));\n\n buf.push(RegexProperty::Category(Category::Mark).negative_item(false));\n\n buf.push(RegexProperty::Category(Category::Decimal_Number).negative_item(false));\n\n buf.push(\n\n RegexProperty::Category(Category::Connector_Punctuation).negative_item(false),\n", "file_path": "rulex-lib/src/char_class/mod.rs", "rank": 43, "score": 47333.14392898188 }, { "content": "#[derive(PartialEq, Eq)]\n\nstruct Alt(Vec<Vec<Rule>>);\n\n\n\nimpl Alt {\n\n fn to_regex(&self) -> Regex<'static> {\n\n Regex::Alternation(RegexAlternation::new(\n\n self.0\n\n .iter()\n\n .map(|v| {\n\n Regex::Group(RegexGroup::new(\n\n v.iter().map(|r| r.to_regex()).collect(),\n\n RegexCapture::None,\n\n ))\n\n })\n\n .collect(),\n\n ))\n\n }\n\n}\n\n\n\nimpl Class {\n\n fn to_regex(self) -> Regex<'static> {\n", "file_path": "rulex-lib/src/range.rs", "rank": 44, "score": 46573.27439846403 }, { "content": "struct Print(Result<String, String>);\n\n\n\nimpl fmt::Display for Print {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.0 {\n\n Ok(s) => write!(f, \"{} /{s}/\", Green(\"OK\")),\n\n Err(s) => write!(f, \"{}: {s}\", Red(\"ERR\")),\n\n }\n\n }\n\n}\n", "file_path": "rulex-lib/tests/it/main.rs", "rank": 45, "score": 46568.41753175996 }, { "content": "pub fn main() -> miette::Result<()> {\n\n let args = Args::parse();\n\n\n\n match (args.input, args.path) {\n\n (Some(input), None) => compile(&input, args.debug, args.flavor, args.no_new_line)?,\n\n (None, Some(path)) => match std::fs::read_to_string(&path) {\n\n Ok(input) => compile(&input, args.debug, args.flavor, args.no_new_line)?,\n\n Err(error) => return Err(MyError::Io { error, path }.into()),\n\n },\n\n (None, None) if atty::isnt(Stream::Stdin) => {\n\n let mut buf = Vec::new();\n\n std::io::stdin().read_to_end(&mut buf).unwrap();\n\n\n\n match String::from_utf8(buf) {\n\n Ok(input) => compile(&input, args.debug, args.flavor, args.no_new_line)?,\n\n Err(e) => return Err(MyError::Other(format!(\"error parsing stdin: {e}\")).into()),\n\n }\n\n }\n\n (Some(_), Some(_)) => {\n\n return Err(MyError::Other(\"error: Can't provide an input and a path\".into()).into())\n\n }\n\n (None, None) => return Err(MyError::Other(\"error: No input provided\".into()).into()),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "rulex-bin/src/main.rs", "rank": 46, "score": 44032.99656980309 }, { "content": "fn defer_main() -> Result<(), io::Error> {\n\n println!(\"\\nrunning integration tests\");\n\n\n\n let mut results = Vec::new();\n\n\n\n let args = Args::parse();\n\n if args.include_ignored {\n\n println!(\"{}\", Yellow(\"including ignored cases!\"));\n\n }\n\n\n\n let (tx, child) = timeout::timeout_thread();\n\n\n\n println!();\n\n let start = Instant::now();\n\n test_dir_recursive(\"./tests/testcases\".into(), &mut results, tx, &args)?;\n\n let elapsed = start.elapsed();\n\n println!();\n\n\n\n child.join().unwrap();\n\n\n", "file_path": "rulex-lib/tests/it/main.rs", "rank": 47, "score": 43039.6529827975 }, { "content": "struct ListWithoutBrackets<'a, T>(&'a [T]);\n\n\n\nimpl<T: core::fmt::Display> core::fmt::Display for ListWithoutBrackets<'_, T> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n for (i, item) in self.0.iter().enumerate() {\n\n if i > 0 {\n\n f.write_str(\", \")?;\n\n }\n\n write!(f, \"{}\", item)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "rulex-lib/src/error/parse_error.rs", "rank": 48, "score": 42246.21674012669 }, { "content": "#[proc_macro]\n\npub fn rulex(items: TokenStream) -> TokenStream {\n\n let group = Group::new(Delimiter::None, items);\n\n let global_span = group.span();\n\n\n\n match rulex_impl(group.stream().into_iter()) {\n\n Ok(lit) => TokenTree::Literal(lit).into(),\n\n Err(Error { msg, span }) => {\n\n let span = span.unwrap_or(global_span);\n\n let msg = format!(\"error: {msg}\");\n\n diagnostic::error(&msg, span, span)\n\n }\n\n }\n\n}\n\n\n", "file_path": "rulex-macro/src/lib.rs", "rank": 49, "score": 39940.74898598592 }, { "content": "fn merge_and_optimize_alternatives(alternatives: Vec<Vec<Rule>>) -> Rule {\n\n let capacity = alternatives.len();\n\n let mut alternatives = alternatives.into_iter().fold(\n\n Vec::with_capacity(capacity),\n\n |mut acc: Vec<Vec<Rule>>, mut rules| {\n\n if let [this1, this2] = rules.as_slice() {\n\n if this1 == this2 {\n\n let rule = rules.pop().unwrap();\n\n rules[0] = rule.repeat(2, 2);\n\n } else if *this2 == Rule::Empty {\n\n rules.pop();\n\n }\n\n }\n\n\n\n match acc.last_mut() {\n\n Some(last) => {\n\n if let [Rule::Class(prev_class), prev] = last.as_mut_slice() {\n\n if let [Rule::Class(this_class), ref mut this2] = rules.as_mut_slice() {\n\n if prev == this2 {\n\n debug_assert!(prev_class.end + 1 == this_class.start);\n", "file_path": "rulex-lib/src/range.rs", "rank": 50, "score": 38038.40649778614 }, { "content": "fn try_map<'i, 'b, O1, O2, P, M, EM>(\n\n mut parser: P,\n\n mut map: M,\n\n err_kind: EM,\n\n) -> impl FnMut(Input<'i, 'b>) -> IResult<Input<'i, 'b>, O2, ParseError>\n\nwhere\n\n P: Parser<Input<'i, 'b>, O1, ParseError>,\n\n M: FnMut(O1) -> Result<O2, ParseErrorKind>,\n\n EM: Copy + FnOnce(ParseError) -> nom::Err<ParseError>,\n\n{\n\n move |input| {\n\n let span = input.span();\n\n let (rest, o1) = parser.parse(input)?;\n\n let o2 = map(o1).map_err(|e| err_kind(e.at(span)))?;\n\n Ok((rest, o2))\n\n }\n\n}\n\n\n", "file_path": "rulex-lib/src/parse/parsers.rs", "rank": 51, "score": 36329.73586991113 }, { "content": "fn try_map2<'i, 'b, O1, O2, P, M, EM>(\n\n mut parser: P,\n\n mut map: M,\n\n err_kind: EM,\n\n) -> impl FnMut(Input<'i, 'b>) -> IResult<Input<'i, 'b>, O2, ParseError>\n\nwhere\n\n P: Parser<Input<'i, 'b>, O1, ParseError>,\n\n M: FnMut(O1) -> Result<O2, ParseError>,\n\n EM: Copy + FnOnce(ParseError) -> nom::Err<ParseError>,\n\n{\n\n move |input| {\n\n let (rest, o1) = parser.parse(input)?;\n\n let o2 = map(o1).map_err(err_kind)?;\n\n Ok((rest, o2))\n\n }\n\n}\n\n\n", "file_path": "rulex-lib/src/parse/parsers.rs", "rank": 52, "score": 36329.73586991113 }, { "content": "use std::{\n\n fmt::{Debug, Display},\n\n ops::Range,\n\n};\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq)]\n\npub struct Span {\n\n start: usize,\n\n end: usize,\n\n}\n\n\n\nimpl Span {\n\n pub(crate) fn new(start: usize, end: usize) -> Self {\n\n Span { start, end }\n\n }\n\n\n\n pub fn range(self) -> Range<usize> {\n\n self.start..self.end\n\n }\n\n\n", "file_path": "rulex-lib/src/span.rs", "rank": 53, "score": 34940.79689149321 }, { "content": " pub(crate) fn start(&self) -> Span {\n\n Span { start: self.start, end: self.start }\n\n }\n\n\n\n pub(crate) fn join(self, other: Span) -> Span {\n\n Span { start: usize::min(self.start, other.start), end: usize::max(self.end, other.end) }\n\n }\n\n}\n\n\n\nimpl From<Range<usize>> for Span {\n\n fn from(Range { start, end }: Range<usize>) -> Self {\n\n Span { start, end }\n\n }\n\n}\n\n\n\nimpl Default for Span {\n\n fn default() -> Self {\n\n Span { start: usize::MAX, end: 0 }\n\n }\n\n}\n", "file_path": "rulex-lib/src/span.rs", "rank": 54, "score": 34933.43214016711 }, { "content": "\n\nimpl Display for Span {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}..{}\", self.start, self.end)\n\n }\n\n}\n\n\n\nimpl Debug for Span {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"Span({}..{})\", self.start, self.end)\n\n }\n\n}\n", "file_path": "rulex-lib/src/span.rs", "rank": 55, "score": 34929.605684827875 }, { "content": "use std::fmt;\n\n\n\nuse crate::{\n\n error::{ParseError, ParseErrorKind, UnsupportedError},\n\n span::Span,\n\n};\n\n\n\n#[derive(Copy, Clone)]\n\npub struct RulexFeatures {\n\n bits: u16,\n\n}\n\n\n\nimpl fmt::Debug for RulexFeatures {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"RulexFeatures\")\n\n .field(\"grapheme\", &self.supports(Self::GRAPHEME))\n\n .field(\"numbered_groups\", &self.supports(Self::NUMBERED_GROUPS))\n\n .field(\"named_groups\", &self.supports(Self::NAMED_GROUPS))\n\n .field(\"references\", &self.supports(Self::REFERENCES))\n\n .field(\"lazy_mode\", &self.supports(Self::LAZY_MODE))\n", "file_path": "rulex-lib/src/features.rs", "rank": 62, "score": 34890.560328671374 }, { "content": " self.bits |= bit;\n\n } else {\n\n self.bits &= bit ^ 0xFF_FF_u16;\n\n }\n\n }\n\n\n\n fn supports(&self, bit: u16) -> bool {\n\n (self.bits & bit) != 0\n\n }\n\n\n\n pub(super) fn require(&self, feature: u16, span: Span) -> Result<(), ParseError> {\n\n if self.supports(feature) {\n\n Ok(())\n\n } else {\n\n Err(ParseErrorKind::Unsupported(match feature {\n\n Self::GRAPHEME => UnsupportedError::Grapheme,\n\n Self::NUMBERED_GROUPS => UnsupportedError::NumberedGroups,\n\n Self::NAMED_GROUPS => UnsupportedError::NamedGroups,\n\n Self::REFERENCES => UnsupportedError::References,\n\n Self::LAZY_MODE => UnsupportedError::LazyMode,\n", "file_path": "rulex-lib/src/features.rs", "rank": 63, "score": 34883.7652817428 }, { "content": " | Self::LOOKBEHIND\n\n | Self::BOUNDARIES,\n\n }\n\n }\n\n}\n\n\n\nimpl RulexFeatures {\n\n pub(crate) const GRAPHEME: u16 = 1 << 0;\n\n pub(crate) const NUMBERED_GROUPS: u16 = 1 << 1;\n\n pub(crate) const NAMED_GROUPS: u16 = 1 << 2;\n\n pub(crate) const REFERENCES: u16 = 1 << 3;\n\n pub(crate) const LAZY_MODE: u16 = 1 << 4;\n\n pub(crate) const RANGES: u16 = 1 << 5;\n\n pub(crate) const VARIABLES: u16 = 1 << 6;\n\n pub(crate) const LOOKAHEAD: u16 = 1 << 7;\n\n pub(crate) const LOOKBEHIND: u16 = 1 << 8;\n\n pub(crate) const BOUNDARIES: u16 = 1 << 9;\n\n\n\n fn set_bit(&mut self, bit: u16, support: bool) {\n\n if support {\n", "file_path": "rulex-lib/src/features.rs", "rank": 64, "score": 34879.18268791211 }, { "content": " Self::RANGES => UnsupportedError::Ranges,\n\n Self::VARIABLES => UnsupportedError::Variables,\n\n Self::LOOKAHEAD => UnsupportedError::Lookahead,\n\n Self::LOOKBEHIND => UnsupportedError::Lookbehind,\n\n Self::BOUNDARIES => UnsupportedError::Boundaries,\n\n _ => panic!(\"Unknown feature `0x{feature:0x}`\"),\n\n })\n\n .at(span))\n\n }\n\n }\n\n\n\n /// Set support for `Grapheme`\n\n pub fn grapheme(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::GRAPHEME, support);\n\n *self\n\n }\n\n\n\n /// Set support for numbered groups, e.g. `:('test')`\n\n pub fn numbered_groups(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::NUMBERED_GROUPS, support);\n", "file_path": "rulex-lib/src/features.rs", "rank": 65, "score": 34879.060618304815 }, { "content": " .field(\"ranges\", &self.supports(Self::RANGES))\n\n .field(\"variables\", &self.supports(Self::VARIABLES))\n\n .field(\"lookahead\", &self.supports(Self::LOOKAHEAD))\n\n .field(\"lookbehind\", &self.supports(Self::LOOKBEHIND))\n\n .field(\"boundaries\", &self.supports(Self::BOUNDARIES))\n\n .finish()\n\n }\n\n}\n\n\n\nimpl Default for RulexFeatures {\n\n fn default() -> Self {\n\n Self {\n\n bits: Self::GRAPHEME\n\n | Self::NUMBERED_GROUPS\n\n | Self::NAMED_GROUPS\n\n | Self::REFERENCES\n\n | Self::LAZY_MODE\n\n | Self::RANGES\n\n | Self::VARIABLES\n\n | Self::LOOKAHEAD\n", "file_path": "rulex-lib/src/features.rs", "rank": 66, "score": 34874.35424034263 }, { "content": " *self\n\n }\n\n\n\n /// Set support for named groups, e.g. `:test('!')`\n\n pub fn named_groups(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::NAMED_GROUPS, support);\n\n *self\n\n }\n\n\n\n /// Set support for references, e.g. `::-1` or `:foo() ::foo`\n\n pub fn references(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::REFERENCES, support);\n\n *self\n\n }\n\n\n\n /// Set support for lazy mode, i.e. `enable lazy;`\n\n pub fn lazy_mode(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::LAZY_MODE, support);\n\n *self\n\n }\n", "file_path": "rulex-lib/src/features.rs", "rank": 67, "score": 34874.33607370405 }, { "content": " pub fn lookbehind(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::LOOKBEHIND, support);\n\n *self\n\n }\n\n\n\n /// Set support for boundaries, i.e. `%` and `!%`\n\n pub fn boundaries(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::BOUNDARIES, support);\n\n *self\n\n }\n\n}\n", "file_path": "rulex-lib/src/features.rs", "rank": 68, "score": 34870.611458708 }, { "content": "\n\n /// Set support for ranges, e.g. `range '1'-'255'`\n\n pub fn ranges(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::RANGES, support);\n\n *self\n\n }\n\n\n\n /// Set support for variables, e.g. `let x = 'hello' 'world'?;`\n\n pub fn variables(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::VARIABLES, support);\n\n *self\n\n }\n\n\n\n /// Set support for lookahead, e.g. `>> 'test'`\n\n pub fn lookahead(&mut self, support: bool) -> Self {\n\n self.set_bit(Self::LOOKAHEAD, support);\n\n *self\n\n }\n\n\n\n /// Set support for lookbehind, e.g. `<< 'test'`\n", "file_path": "rulex-lib/src/features.rs", "rank": 69, "score": 34870.450473868455 }, { "content": "use std::collections::{HashMap, HashSet};\n\n\n\nuse crate::{error::CompileError, regex::Regex, repetition::RegexQuantifier, rule::Rule};\n\n\n\npub(crate) type CompileResult<'i> = Result<Regex<'i>, CompileError>;\n\n\n\n#[derive(Clone)]\n\npub(crate) struct CompileState<'c, 'i> {\n\n pub(crate) next_idx: u32,\n\n pub(crate) used_names: HashMap<String, u32>,\n\n pub(crate) groups_count: u32,\n\n\n\n pub(crate) default_quantifier: RegexQuantifier,\n\n pub(crate) variables: Vec<(&'i str, &'c Rule<'i>)>,\n\n pub(crate) current_vars: HashSet<usize>,\n\n}\n", "file_path": "rulex-lib/src/compile.rs", "rank": 70, "score": 34850.64771245734 }, { "content": "use crate::features::RulexFeatures;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct ParseOptions {\n\n pub max_range_size: u8,\n\n pub allowed_features: RulexFeatures,\n\n}\n\n\n\nimpl Default for ParseOptions {\n\n fn default() -> Self {\n\n Self { max_range_size: 6, allowed_features: Default::default() }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Default)]\n\npub struct CompileOptions {\n\n pub flavor: RegexFlavor,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n", "file_path": "rulex-lib/src/options.rs", "rank": 71, "score": 34798.21914716696 }, { "content": "#[non_exhaustive]\n\npub enum RegexFlavor {\n\n Pcre,\n\n Python,\n\n Java,\n\n JavaScript,\n\n DotNet,\n\n Ruby,\n\n Rust,\n\n}\n\n\n\nimpl Default for RegexFlavor {\n\n fn default() -> Self {\n\n RegexFlavor::Pcre\n\n }\n\n}\n", "file_path": "rulex-lib/src/options.rs", "rank": 72, "score": 34775.47196931822 }, { "content": "fn rulex_impl(items: impl Iterator<Item = TokenTree>) -> Result<Literal, Error> {\n\n let mut iter = items.peekable();\n\n\n\n let found_hashtag =\n\n expect(&mut iter, |t| matches!(t, TokenTree::Punct(p) if p.as_char() == '#'), \"\");\n\n\n\n let flavor = if found_hashtag.is_ok() {\n\n expect(\n\n &mut iter,\n\n |t| matches!(t, TokenTree::Ident(id) if &id.to_string() == \"flavor\"),\n\n \"expected `flavor`\",\n\n )?;\n\n expect(\n\n &mut iter,\n\n |t| matches!(t, TokenTree::Punct(p) if p.as_char() == '='),\n\n \"expected `=`\",\n\n )?;\n\n\n\n get_flavor(iter.next())?\n\n } else {\n", "file_path": "rulex-macro/src/lib.rs", "rank": 73, "score": 33467.12143240453 }, { "content": "/// This generates a set of rules that exactly match a string containing a\n\n/// number in a certain range.\n\n///\n\n/// For example, `range(&[1,2,0], &[2,0,0], 0, 10)` matches \"120\", \"125\", \"150\",\n\n/// \"199\", \"200\", but not \"119\" or \"201\".\n\n///\n\n/// The generated regex is always optimal in terms of search performance.\n\n/// However, it might be somewhat bigger than a regex optimized for size instead\n\n/// of performance.\n\n///\n\n/// This algorithm has been extensively fuzzed, so you can trust its correctness\n\n/// even in rare edge cases. The fuzzer generates all possible ranges and\n\n/// validates them by matching a large number of test strings against them using\n\n/// the `regex` crate. It starts with smaller ranges and tries larger and larger\n\n/// ranges with all permutations (0-0, 0-1, 1-1, 0-2, 1-2, 2-2, 0-3, 1-3, 2-3,\n\n/// 3-3, ...). Run the fuzzer with `cargo test --test it -- --fuzz-ranges`.\n\n///\n\n/// ## How it works\n\n///\n\n/// Lower and upper bound of the range are passed to this function as slices\n\n/// containing individual digits.\n\n///\n\n/// We always look only at the first digit of each bound; these digits are\n\n/// called `ax` (from lower bound) and `bx` (from upper bound). For simplicity,\n\n/// we assume that the radix is 10 (decimal). For example:\n\n///\n\n/// ```no_test\n\n/// a = [4]\n\n/// b = [7, 0, 5]\n\n/// ```\n\n///\n\n/// This means we need a regex that matches a number between 10 and 799. By\n\n/// looking at the first digit, we can deduce:\n\n///\n\n/// - The number can't start with 0 (leading zeros aren't allowed)\n\n/// - The number can start with 1, 2 or 3, but must be followed 1 or 2 more\n\n/// digit in that case\n\n/// - The number can be 4, 5 or 6, and can be followed by 0, 1 or 2 more digits\n\n/// - If the number starts with 7, it can be followed by\n\n/// - nothing\n\n/// - a zero, and possibly a third digit that is at most 5\n\n/// - a digit greater than zero, if there is no third digit.\n\n/// - If the number starts with 8 or 9, it can be followed by at most 1 more\n\n/// digit.\n\n///\n\n/// This is implemented recursively. We always remove the first digit from the\n\n/// slices. We then create a number of alternatives, each starting with a\n\n/// different digit or range of digits:\n\n///\n\n/// 1. `0 ..= ax-1`\n\n/// 2. `ax`\n\n/// 3. `ax+1 ..= bx-1`\n\n/// 4. `bx`\n\n/// 5. `bx+1 ..= 9`\n\n///\n\n/// If `ax` and `bx` are identical, 3. and 4. are omitted; if they're\n\n/// consecutive numbers, 3. is omitted. If `ax` is 0 or `bx` is 9, 1. or 5. is\n\n/// omitted, respectively. If `ax` is bigger than `bx`, the alternatives are a\n\n/// bit different, and this is important later:\n\n///\n\n/// 1. `0 ..= bx-1`\n\n/// 2. `bx`\n\n/// 3. `bx+1 ..= ax-1`\n\n/// 4. `ax`\n\n/// 5. `ax+1 ..= 9`\n\n///\n\n/// There is one more special case: The first digit in a number can't be 0,\n\n/// unless the range's lower bound is 0. So we check if we are currently looking\n\n/// at the first digit, and if that is the case, the first character class omits\n\n/// 0. If the lower bound is 0, then an alternative containing _only_ 0 is added\n\n/// _once_.\n\n///\n\n/// Now, for each of the above alternatives, we add two things: A character\n\n/// class matching the first digit, and _something_ matching the remaining\n\n/// digits. That _something_ is calculated by recursively calling the `range`\n\n/// function on the remaining digits. To make sure that this doesn't recurse for\n\n/// infinity, we must detect terminal calls (calls that stop recursing):\n\n///\n\n/// - If both slices are empty, we are done.\n\n///\n\n/// - If both slices contain exactly 1 digit, we simply add a character class\n\n/// matching a digit in that range.\n\n///\n\n/// - If the first slice is empty but not the second one, we apply a trick: We\n\n/// add a 0 to the lower bound and try again. Also, the returned\n\n/// sub-expression is made optional.\n\n///\n\n/// - For example, `range([4], [400])` at some point adds an alternative\n\n/// starting with `4` and calls `range([], [0, 0])` recursively. We want\n\n/// this to match the empty string, any single digit, or two zeros,\n\n/// because a \"4\" matching the range 4-400 can be followed by nothing, any\n\n/// single digit or two zeros.\n\n///\n\n/// If we just added a 0 to the lower bound, that would mean that the 4 MUST\n\n/// be followed by at least one more digit. We don't want that, so we make the\n\n/// expression following the 4 optional.\n\n///\n\n/// - If the second slice is empty but not the first, this is an error that\n\n/// should NEVER happen. The parser validates the input so that the upper\n\n/// bound can't be smaller/shorter than the lower bound.\n\n///\n\n/// Now, about the alternatives: This part is quite interesting. To recap, the\n\n/// alternatives are either this:\n\n///\n\n/// 1. `0 ..= ax-1`\n\n/// 2. `ax`\n\n/// 3. `ax+1 ..= bx-1`\n\n/// 4. `bx`\n\n/// 5. `bx+1 ..= 9`\n\n///\n\n/// or this, if `bx > ax`:\n\n///\n\n/// 1. `0 ..= bx-1`\n\n/// 2. `bx`\n\n/// 3. `bx+1 ..= ax-1`\n\n/// 4. `ax`\n\n/// 5. `ax+1 ..= 9`\n\n///\n\n/// Alternative 1 and 5 are the same, if we substitute `ax` and `bx` with\n\n/// `min(ax, bx)` in 1. and with `max(ax, bx)` in step 5:\n\n///\n\n/// ```no_test\n\n/// 1. [1-(min - 1)] [0-9]{la + 1, lb} (first digit)\n\n/// 1. [0-(min - 1)] [0-9]{la + 1, lb} (not first digit)\n\n/// 5. [(max + 1)-9] [0-9]{al, bl - 1}\n\n/// ```\n\n///\n\n/// (`la` and `lb` are the lengths of the remaining digits in the lower and\n\n/// upper bound, respectively).\n\n///\n\n/// What is the deal with the added or subtracted 1's? If we have a lower bound\n\n/// such as 533, the number must be at least 3 digits long, because the lower\n\n/// bound is three digits long. However, if the first digit is less than 5, it\n\n/// must be at least 4 digits long to be greater than 533. With the upper bound,\n\n/// it's the exact opposite: For example, with an upper bound of 6111, the\n\n/// number can be at most 3 digits if it starts with 7, 8 or 9.\n\n///\n\n/// I'm not going to explain the remaining alternatives (2 through 4), since you\n\n/// can understand them by reading the code.\n\n///\n\n/// The last step is to optimize the alternatives to be as compact as possible.\n\n/// This is achieved by simplifying and merging alternatives if possible. For\n\n/// example,\n\n///\n\n/// ```no_test\n\n/// [0-4] [5-9] | 5 [5-9]\n\n/// ```\n\n///\n\n/// This can be merged into `[0-5] [5-9]`. The rules are like addition and\n\n/// multiplication, where alternation (with `|`) is equivalent to `+` and\n\n/// concatenation is equivalent to `*`. This means we can use the distributive\n\n/// law: `a * x + b * x = (a + b) * x`. Note that we only do this if the first\n\n/// character class of each alternation are consecutive; for example,\n\n/// we merge `[0-4]` and `5`, but not `[0-4]` and `[6-9]`. This would be\n\n/// possible in theory, but would be computationally more expensive, since the\n\n/// second part of each alternation must be checked for equality.\n\n///\n\n/// The next optimization is to replace concatenation of equal elements with\n\n/// repetition. In other words, we replace `a + a` with `a * 2`, and `a + (a *\n\n/// 2)` with `a * 3`. This is important, because when we check whether two\n\n/// expressions are equal, it only works if they have the exact same structure:\n\n/// `[0-9][0-9]` is not considered equal to `[0-9]{2}`. So this optimization\n\n/// also serves as a _normalization_, to ensure that equal alternatives can be\n\n/// merged.\n\nfn range(a: &[u8], b: &[u8], is_first: bool, radix: u8) -> Result<Rule, Error> {\n\n let hi_digit = radix - 1;\n\n let lo_digit = if is_first { 1 } else { 0 };\n\n\n\n debug_assert!(a.len() <= b.len() && (a.len() < b.len() || a <= b));\n\n\n\n Ok(match (a.split_first(), b.split_first()) {\n\n (None, None) => Rule::Empty,\n\n (Some(_), None) => return Err(Error),\n\n (None, Some(_)) => range(&[0], b, false, radix)?.optional(),\n\n (Some((&ax, [])), Some((&bx, []))) => Rule::class(ax, bx),\n\n (Some((&ax, a_rest)), Some((&bx, b_rest))) => {\n\n let (min, max) = (u8::min(ax, bx), u8::max(ax, bx));\n\n let mut alternatives = vec![];\n\n\n\n if min > lo_digit && a_rest.len() < b_rest.len() {\n\n // 1.\n\n alternatives.push(vec![\n\n Rule::class(lo_digit, min - 1),\n\n Rule::class(0, hi_digit).repeat(a_rest.len() + 1, b_rest.len()),\n", "file_path": "rulex-lib/src/range.rs", "rank": 74, "score": 32559.362098643334 }, { "content": " CompileError { kind: self, span: Some(span) }\n\n }\n\n}\n\n\n\n/// Regex feature, possibly unsupported\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n#[non_exhaustive]\n\npub enum Feature {\n\n NamedCaptureGroups,\n\n Lookaround,\n\n Grapheme,\n\n UnicodeBlock,\n\n UnicodeProp,\n\n Backreference,\n\n ForwardReference,\n\n RelativeReference,\n\n NonNegativeRelativeReference,\n\n NegativeShorthandW,\n\n}\n\n\n", "file_path": "rulex-lib/src/error/compile_error.rs", "rank": 75, "score": 32255.150621538774 }, { "content": " EmptyClassNegated,\n\n\n\n #[error(\"Capturing groups within `let` statements are currently not supported\")]\n\n CaptureInLet,\n\n\n\n #[error(\"References within `let` statements are currently not supported\")]\n\n ReferenceInLet,\n\n\n\n #[error(\"Variable doesn't exist\")]\n\n UnknownVariable,\n\n\n\n #[error(\"Variables can't be used recursively\")]\n\n RecursiveVariable,\n\n\n\n #[error(\"Compile error: {}\", .0)]\n\n Other(&'static str),\n\n}\n\n\n\nimpl CompileErrorKind {\n\n pub(crate) fn at(self, span: Span) -> CompileError {\n", "file_path": "rulex-lib/src/error/compile_error.rs", "rank": 76, "score": 32253.80391855259 }, { "content": "use crate::{options::RegexFlavor, span::Span};\n\n\n\nuse super::{Diagnostic, ParseError, ParseErrorKind};\n\n\n\n/// An error that can occur during parsing or compiling\n\n#[derive(Debug, Clone, thiserror::Error)]\n\npub struct CompileError {\n\n pub(super) kind: CompileErrorKind,\n\n pub(super) span: Option<Span>,\n\n}\n\n\n\nimpl CompileError {\n\n /// Create a [Diagnostic] from this error.\n\n pub fn diagnostic(self, source_code: &str) -> Diagnostic {\n\n Diagnostic::from_compile_error(self, source_code)\n\n }\n\n}\n\n\n\nimpl core::fmt::Display for CompileError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n", "file_path": "rulex-lib/src/error/compile_error.rs", "rank": 77, "score": 32252.474515390528 }, { "content": "\n\n #[error(\"Compile error: Unsupported feature `{}` in the `{:?}` regex flavor\", .0.name(), .1)]\n\n Unsupported(Feature, RegexFlavor),\n\n\n\n #[error(\"Group references this large aren't supported\")]\n\n HugeReference,\n\n\n\n #[error(\"Reference to unknown group. There is no group number {}\", .0)]\n\n UnknownReferenceNumber(i32),\n\n\n\n #[error(\"Reference to unknown group. There is no group named `{}`\", .0)]\n\n UnknownReferenceName(String),\n\n\n\n #[error(\"Compile error: Group name `{}` used multiple times\", .0)]\n\n NameUsedMultipleTimes(String),\n\n\n\n #[error(\"Compile error: This character class is empty\")]\n\n EmptyClass,\n\n\n\n #[error(\"Compile error: This negated character class matches nothing\")]\n", "file_path": "rulex-lib/src/error/compile_error.rs", "rank": 78, "score": 32251.884820476123 }, { "content": " if let Some(span) = self.span {\n\n write!(f, \"{}\\n at {}\", self.kind, span)\n\n } else {\n\n self.kind.fmt(f)\n\n }\n\n }\n\n}\n\n\n\nimpl From<ParseError> for CompileError {\n\n fn from(e: ParseError) -> Self {\n\n CompileError { kind: CompileErrorKind::ParseError(e.kind), span: e.span }\n\n }\n\n}\n\n\n\n/// An error kind (without span) that can occur during parsing or compiling\n\n#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)]\n\n#[non_exhaustive]\n\npub(crate) enum CompileErrorKind {\n\n #[error(\"Parse error: {}\", .0)]\n\n ParseError(ParseErrorKind),\n", "file_path": "rulex-lib/src/error/compile_error.rs", "rank": 79, "score": 32248.67583868512 }, { "content": "impl Feature {\n\n fn name(self) -> &'static str {\n\n match self {\n\n Feature::NamedCaptureGroups => \"named capture groups\",\n\n Feature::Lookaround => \"lookahead/behind\",\n\n Feature::Grapheme => \"grapheme cluster matcher (\\\\X)\",\n\n Feature::UnicodeBlock => \"Unicode blocks (\\\\p{InBlock})\",\n\n Feature::UnicodeProp => \"Unicode properties (\\\\p{Property})\",\n\n Feature::Backreference => \"Backreference\",\n\n Feature::ForwardReference => \"Forward reference\",\n\n Feature::RelativeReference => \"Relative backreference\",\n\n Feature::NonNegativeRelativeReference => \"Non-negative relative backreference\",\n\n Feature::NegativeShorthandW => \"Negative `\\\\w` shorthand in character class\",\n\n }\n\n }\n\n}\n", "file_path": "rulex-lib/src/error/compile_error.rs", "rank": 80, "score": 32248.621946391697 }, { "content": "use crate::{\n\n compile::{CompileResult, CompileState},\n\n error::CompileErrorKind,\n\n options::CompileOptions,\n\n span::Span,\n\n};\n\n\n\n#[derive(Clone, PartialEq, Eq)]\n\npub(crate) struct Variable<'i> {\n\n name: &'i str,\n\n pub(crate) span: Span,\n\n}\n\n\n\nimpl<'i> Variable<'i> {\n\n pub(crate) fn new(name: &'i str, span: Span) -> Self {\n\n Variable { name, span }\n\n }\n\n\n\n pub(crate) fn compile<'c>(\n\n &'c self,\n", "file_path": "rulex-lib/src/var.rs", "rank": 81, "score": 32.17378081719365 }, { "content": "//! Contains the [`Grapheme`] type, which matches a\n\n//! [Unicode grapheme](https://www.regular-expressions.info/unicode.html#grapheme).\n\n\n\nuse crate::{\n\n compile::CompileResult,\n\n error::{CompileErrorKind, Feature, ParseError},\n\n features::RulexFeatures,\n\n options::{CompileOptions, ParseOptions, RegexFlavor},\n\n regex::Regex,\n\n span::Span,\n\n};\n\n\n\n/// The `Grapheme` expression, matching a\n\n/// [Unicode grapheme](https://www.regular-expressions.info/unicode.html#grapheme).\n\n#[derive(Clone, Copy, PartialEq, Eq)]\n\n#[cfg_attr(feature = \"dbg\", derive(Debug))]\n\npub(crate) struct Grapheme {\n\n pub(crate) span: Span,\n\n}\n\n\n", "file_path": "rulex-lib/src/grapheme.rs", "rank": 82, "score": 29.145512111182384 }, { "content": "use std::borrow::Cow;\n\n\n\nuse crate::{compile::CompileResult, options::RegexFlavor, regex::Regex, span::Span};\n\n\n\n#[derive(Clone, PartialEq, Eq)]\n\npub(crate) struct Literal<'i> {\n\n content: Cow<'i, str>,\n\n pub(crate) span: Span,\n\n}\n\n\n\nimpl<'i> Literal<'i> {\n\n pub(crate) fn new(content: Cow<'i, str>, span: Span) -> Self {\n\n Literal { content, span }\n\n }\n\n\n\n pub(crate) fn compile(&self) -> CompileResult<'i> {\n\n Ok(Regex::Literal(self.content.clone()))\n\n }\n\n}\n\n\n", "file_path": "rulex-lib/src/literal.rs", "rank": 83, "score": 27.859083737872083 }, { "content": " let target = ReferenceTarget::Number(from_str(s)?);\n\n Ok(Rule::Reference(Reference::new(target, span)))\n\n },\n\n nom::Err::Failure,\n\n ),\n\n map(Token::Identifier, |(s, span)| {\n\n let target = ReferenceTarget::Named(s);\n\n Rule::Reference(Reference::new(target, span))\n\n }),\n\n try_map(\n\n pair(alt((Token::Plus, Token::Dash)), Token::Number),\n\n |((sign, span1), (s, span2))| {\n\n let num = if sign == \"-\" { from_str(&format!(\"-{s}\")) } else { from_str(s) }?;\n\n let target = ReferenceTarget::Relative(num);\n\n Ok(Rule::Reference(Reference::new(target, span1.join(span2))))\n\n },\n\n nom::Err::Failure,\n\n ),\n\n err(|| ParseErrorKind::Expected(\"number or group name\")),\n\n )),\n\n )(input)\n\n}\n\n\n", "file_path": "rulex-lib/src/parse/parsers.rs", "rank": 84, "score": 27.238531439547263 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::{\n\n compile::{CompileResult, CompileState},\n\n error::{CompileError, CompileErrorKind, Feature, ParseError},\n\n features::RulexFeatures,\n\n options::{CompileOptions, ParseOptions, RegexFlavor},\n\n regex::Regex,\n\n rule::Rule,\n\n span::Span,\n\n};\n\n\n\n#[derive(Clone)]\n\npub(crate) struct Lookaround<'i> {\n\n kind: LookaroundKind,\n\n rule: Rule<'i>,\n\n pub(crate) span: Span,\n\n}\n\n\n\n#[cfg(feature = \"dbg\")]\n", "file_path": "rulex-lib/src/lookaround.rs", "rank": 85, "score": 26.353989179637146 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::{\n\n compile::{CompileResult, CompileState},\n\n error::{CompileError, ParseError},\n\n features::RulexFeatures,\n\n options::{CompileOptions, ParseOptions},\n\n repetition::RegexQuantifier,\n\n rule::Rule,\n\n span::Span,\n\n};\n\n\n\n#[derive(Clone)]\n\npub(crate) struct StmtExpr<'i> {\n\n stmt: Stmt<'i>,\n\n rule: Rule<'i>,\n\n pub(crate) span: Span,\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "rulex-lib/src/stmt.rs", "rank": 86, "score": 26.095637195153344 }, { "content": "\n\n *count += 1;\n\n }\n\n None => {}\n\n };\n\n for rulex in &self.parts {\n\n rulex.get_capturing_groups(count, map, within_variable)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub(crate) fn compile<'c>(\n\n &'c self,\n\n options: CompileOptions,\n\n state: &mut CompileState<'c, 'i>,\n\n ) -> CompileResult<'i> {\n\n if self.capture.is_some() {\n\n state.next_idx += 1;\n\n }\n\n\n", "file_path": "rulex-lib/src/group.rs", "rank": 87, "score": 25.181922444758126 }, { "content": "}\n\n\n\nimpl<'i> Lookaround<'i> {\n\n pub(crate) fn get_capturing_groups(\n\n &self,\n\n count: &mut u32,\n\n map: &'i mut HashMap<String, u32>,\n\n within_variable: bool,\n\n ) -> Result<(), CompileError> {\n\n self.rule.get_capturing_groups(count, map, within_variable)\n\n }\n\n\n\n pub(crate) fn new(rule: Rule<'i>, kind: LookaroundKind, span: Span) -> Self {\n\n Lookaround { rule, kind, span }\n\n }\n\n\n\n pub(crate) fn compile<'c>(\n\n &'c self,\n\n options: CompileOptions,\n\n state: &mut CompileState<'c, 'i>,\n", "file_path": "rulex-lib/src/lookaround.rs", "rank": 88, "score": 24.668767016823676 }, { "content": " /// The parsed `Rulex` can be displayed with `Debug` if the `dbg` feature is\n\n /// enabled.\n\n pub fn parse(input: &'i str, options: ParseOptions) -> Result<Self, ParseError> {\n\n let rule = parse::parse(input)?;\n\n rule.validate(&options)?;\n\n Ok(Rulex(rule))\n\n }\n\n\n\n /// Compile a `Rulex` that has been parsed, to a regex\n\n pub fn compile(&self, options: CompileOptions) -> Result<String, CompileError> {\n\n let mut used_names = HashMap::new();\n\n let mut groups_count = 0;\n\n self.0.get_capturing_groups(&mut groups_count, &mut used_names, false)?;\n\n\n\n let empty_span = Span::new(0, 0);\n\n let start = Rule::Boundary(Boundary::new(BoundaryKind::Start, empty_span));\n\n let end = Rule::Boundary(Boundary::new(BoundaryKind::End, empty_span));\n\n let grapheme = Rule::Grapheme(Grapheme { span: empty_span });\n\n\n\n let builtins =\n", "file_path": "rulex-lib/src/lib.rs", "rank": 89, "score": 23.533064459885225 }, { "content": "//! Implements _boundaries_. The analogues in the regex world are\n\n//! [word boundaries](https://www.regular-expressions.info/wordboundaries.html) and\n\n//! [anchors](https://www.regular-expressions.info/anchors.html).\n\n\n\nuse crate::{\n\n compile::CompileResult, error::ParseError, features::RulexFeatures, options::ParseOptions,\n\n regex::Regex, span::Span,\n\n};\n\n\n\n/// A [word boundary](https://www.regular-expressions.info/wordboundaries.html) or\n\n/// [anchor](https://www.regular-expressions.info/anchors.html), which we combine under the term\n\n/// _boundary_.\n\n///\n\n/// All boundaries use a variation of the `%` sigil, so they are easy to\n\n/// remember.\n\n#[derive(Clone, Copy, PartialEq, Eq)]\n\npub(crate) struct Boundary {\n\n kind: BoundaryKind,\n\n pub(crate) span: Span,\n\n}\n", "file_path": "rulex-lib/src/boundary.rs", "rank": 90, "score": 23.429344061355323 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq)]\n\n#[cfg_attr(feature = \"dbg\", derive(Debug))]\n\npub(crate) struct Capture<'i> {\n\n pub(crate) name: Option<&'i str>,\n\n}\n\n\n\nimpl<'i> Capture<'i> {\n\n pub(crate) fn new(name: Option<&'i str>) -> Self {\n\n Capture { name }\n\n }\n\n}\n\n\n\n#[cfg_attr(feature = \"dbg\", derive(Debug))]\n\npub(crate) struct RegexGroup<'i> {\n\n parts: Vec<Regex<'i>>,\n\n capture: RegexCapture<'i>,\n\n}\n", "file_path": "rulex-lib/src/group.rs", "rank": 91, "score": 23.39374157930255 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::{\n\n alternation::Alternation,\n\n boundary::Boundary,\n\n char_class::CharClass,\n\n compile::{CompileResult, CompileState},\n\n error::{CompileError, CompileErrorKind, ParseError},\n\n grapheme::Grapheme,\n\n group::Group,\n\n literal::Literal,\n\n lookaround::Lookaround,\n\n options::{CompileOptions, ParseOptions},\n\n range::Range,\n\n reference::Reference,\n\n repetition::Repetition,\n\n span::Span,\n\n stmt::StmtExpr,\n\n var::Variable,\n\n};\n", "file_path": "rulex-lib/src/rule.rs", "rank": 92, "score": 23.376213516365354 }, { "content": "\n\nimpl Boundary {\n\n pub(crate) fn new(kind: BoundaryKind, span: Span) -> Self {\n\n Boundary { kind, span }\n\n }\n\n}\n\n\n\nimpl Boundary {\n\n pub(crate) fn compile(&self) -> CompileResult<'static> {\n\n Ok(Regex::Boundary(self.kind))\n\n }\n\n\n\n pub(crate) fn validate(&self, options: &ParseOptions) -> Result<(), ParseError> {\n\n options.allowed_features.require(RulexFeatures::BOUNDARIES, self.span)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"dbg\")]\n\nimpl core::fmt::Debug for Boundary {\n\n fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {\n", "file_path": "rulex-lib/src/boundary.rs", "rank": 93, "score": 23.12957271645434 }, { "content": " pub(crate) span: Span,\n\n}\n\n\n\nimpl Range {\n\n pub(crate) fn new(start: Vec<u8>, end: Vec<u8>, radix: u8, span: Span) -> Self {\n\n Range { start, end, radix, span }\n\n }\n\n\n\n pub(crate) fn compile(&self) -> CompileResult<'static> {\n\n match range(&self.start, &self.end, true, self.radix) {\n\n Ok(rule) => Ok(rule.to_regex()),\n\n Err(Error) => {\n\n Err(CompileErrorKind::Other(\"Expanding the range yielded an unexpected error\")\n\n .at(self.span))\n\n }\n\n }\n\n }\n\n\n\n pub(crate) fn validate(&self, options: &ParseOptions) -> Result<(), ParseError> {\n\n if self.end.len() > options.max_range_size as usize {\n", "file_path": "rulex-lib/src/range.rs", "rank": 94, "score": 23.05231480075 }, { "content": " ) -> Result<(), CompileError> {\n\n match self.capture {\n\n Some(Capture { name: Some(name) }) => {\n\n if within_variable {\n\n return Err(CompileErrorKind::CaptureInLet.at(self.span));\n\n }\n\n\n\n if map.contains_key(name) {\n\n return Err(\n\n CompileErrorKind::NameUsedMultipleTimes(name.to_string()).at(self.span)\n\n );\n\n }\n\n\n\n *count += 1;\n\n map.insert(name.to_string(), *count);\n\n }\n\n Some(Capture { name: None }) => {\n\n if within_variable {\n\n return Err(CompileErrorKind::CaptureInLet.at(self.span));\n\n }\n", "file_path": "rulex-lib/src/group.rs", "rank": 95, "score": 23.01355651839935 }, { "content": " vec![(\"Start\", &start), (\"End\", &end), (\"Grapheme\", &grapheme), (\"X\", &grapheme)];\n\n\n\n let mut state = CompileState {\n\n next_idx: 1,\n\n used_names,\n\n groups_count,\n\n default_quantifier: RegexQuantifier::Greedy,\n\n variables: builtins,\n\n current_vars: Default::default(),\n\n };\n\n let compiled = self.0.comp(options, &mut state)?;\n\n\n\n let mut buf = String::new();\n\n compiled.codegen(&mut buf, options.flavor);\n\n Ok(buf)\n\n }\n\n\n\n /// Parse a string to a `Rulex` and compile it to a regex.\n\n pub fn parse_and_compile(\n\n input: &'i str,\n", "file_path": "rulex-lib/src/lib.rs", "rank": 96, "score": 22.90154471683175 }, { "content": " Self { name, rule, name_span }\n\n }\n\n\n\n pub(crate) fn name(&self) -> &'i str {\n\n self.name\n\n }\n\n}\n\n\n\nimpl<'i> StmtExpr<'i> {\n\n pub(crate) fn new(stmt: Stmt<'i>, rule: Rule<'i>, span: Span) -> Self {\n\n Self { stmt, rule, span }\n\n }\n\n\n\n pub(crate) fn get_capturing_groups(\n\n &self,\n\n count: &mut u32,\n\n map: &'i mut HashMap<String, u32>,\n\n within_variable: bool,\n\n ) -> Result<(), CompileError> {\n\n if let Stmt::Let(l) = &self.stmt {\n", "file_path": "rulex-lib/src/stmt.rs", "rank": 97, "score": 22.855546488754115 }, { "content": "pub(crate) enum Stmt<'i> {\n\n Enable(BooleanSetting),\n\n Disable(BooleanSetting),\n\n Let(Let<'i>),\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq)]\n\npub(crate) enum BooleanSetting {\n\n Lazy,\n\n}\n\n\n\n#[derive(Clone)]\n\npub(crate) struct Let<'i> {\n\n name: &'i str,\n\n rule: Rule<'i>,\n\n pub(crate) name_span: Span,\n\n}\n\n\n\nimpl<'i> Let<'i> {\n\n pub(crate) fn new(name: &'i str, rule: Rule<'i>, name_span: Span) -> Self {\n", "file_path": "rulex-lib/src/stmt.rs", "rank": 98, "score": 22.586482529021854 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n match self.tokens.split_first() {\n\n Some((&(token, span), rest)) => {\n\n self.tokens = rest;\n\n Some((token, &self.source[span.range()]))\n\n }\n\n None => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'i, 'b> InputIter for Input<'i, 'b> {\n\n type Item = (Token, &'i str);\n\n\n\n type Iter = Enumerate<Self>;\n\n\n\n type IterElem = Self;\n\n\n\n fn iter_indices(&self) -> Self::Iter {\n\n self.iter_elements().enumerate()\n", "file_path": "rulex-lib/src/parse/input.rs", "rank": 99, "score": 22.495337481377284 } ]
Rust
src/bin/nydusd/fusedev.rs
changweige/image-service
ba35a388fd12a9c833d20b48a591d029b6af10b5
use std::any::Any; use std::ffi::{CStr, CString}; use std::fs::metadata; use std::io::Result; use std::ops::Deref; use std::os::linux::fs::MetadataExt; use std::os::unix::ffi::OsStrExt; use std::os::unix::net::UnixStream; use std::path::Path; use std::sync::{ atomic::{AtomicI32, AtomicU64, Ordering}, mpsc::{channel, Receiver, Sender}, Arc, Mutex, MutexGuard, }; use std::thread::{self, JoinHandle}; use std::time::{SystemTime, UNIX_EPOCH}; use nix::sys::stat::{major, minor}; use serde::Serialize; use fuse_rs::api::{ server::{MetricsHook, Server}, Vfs, }; use fuse_rs::abi::linux_abi::{InHeader, OutHeader}; use vmm_sys_util::eventfd::EventFd; use crate::upgrade::{self, FailoverPolicy, UpgradeManager}; use crate::{daemon, exit_event_manager}; use daemon::{ DaemonError, DaemonResult, DaemonState, DaemonStateMachineContext, DaemonStateMachineInput, DaemonStateMachineSubscriber, FsBackendCollection, FsBackendMountCmd, NydusDaemon, Trigger, }; use nydus_app::BuildTimeInfo; use nydus_utils::{FuseChannel, FuseSession}; #[derive(Serialize)] struct FuseOp { inode: u64, opcode: u32, unique: u64, timestamp_secs: u64, } #[derive(Default, Clone, Serialize)] struct FuseOpWrapper { op: Arc<Mutex<Option<FuseOp>>>, } impl Default for FuseOp { fn default() -> Self { Self { inode: u64::default(), opcode: u32::default(), unique: u64::default(), timestamp_secs: SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(), } } } pub(crate) struct FuseServer { server: Arc<Server<Arc<Vfs>>>, ch: FuseChannel, buf: Vec<u8>, } impl FuseServer { fn new(server: Arc<Server<Arc<Vfs>>>, se: &FuseSession, evtfd: EventFd) -> Result<FuseServer> { Ok(FuseServer { server, ch: se.new_channel(evtfd)?, buf: Vec::with_capacity(se.bufsize()), }) } fn svc_loop(&mut self, metrics_hook: &dyn MetricsHook) -> Result<()> { unsafe { self.buf.set_len(self.buf.capacity()); } let _ebadf = std::io::Error::from_raw_os_error(libc::EBADF); loop { if let Some(reader) = self.ch.get_reader(&mut self.buf)? { let writer = self.ch.get_writer()?; if let Err(e) = self .server .handle_message(reader, writer, None, Some(metrics_hook)) { match e { fuse_rs::Error::EncodeMessage(_ebadf) => { return Err(eio!("fuse session has been shut down")); } _ => { error!("Handling fuse message, {}", DaemonError::ProcessQueue(e)); continue; } } } } else { info!("fuse server exits"); break; } } Ok(()) } } pub struct FusedevDaemon { server: Arc<Server<Arc<Vfs>>>, vfs: Arc<Vfs>, pub session: Mutex<FuseSession>, thread_tx: Mutex<Option<Sender<JoinHandle<Result<()>>>>>, thread_rx: Mutex<Receiver<JoinHandle<Result<()>>>>, running_threads: AtomicI32, event_fd: EventFd, state: AtomicI32, pub threads_cnt: u32, trigger: Arc<Mutex<Trigger>>, result_receiver: Mutex<Receiver<DaemonResult<()>>>, pub supervisor: Option<String>, pub id: Option<String>, pub(crate) conn: AtomicU64, #[allow(dead_code)] pub(crate) failover_policy: FailoverPolicy, upgrade_mgr: Option<Mutex<UpgradeManager>>, backend_collection: Mutex<FsBackendCollection>, bti: BuildTimeInfo, inflight_ops: Mutex<Vec<FuseOpWrapper>>, } impl MetricsHook for FuseOpWrapper { fn collect(&self, ih: &InHeader) { let (n, u, o) = (ih.nodeid, ih.unique, ih.opcode); *self.op.lock().expect("Not expect poisoned lock") = Some(FuseOp { inode: n, unique: u, opcode: o, timestamp_secs: SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(), }) } fn release(&self, _oh: Option<&OutHeader>) { *self.op.lock().expect("Not expect poisoned lock") = None } } impl FusedevDaemon { fn kick_one_server(&self) -> Result<()> { let mut s = FuseServer::new( self.server.clone(), self.session.lock().unwrap().deref(), self.event_fd.try_clone().unwrap(), )?; let inflight_op = FuseOpWrapper::default(); self.inflight_ops.lock().unwrap().push(inflight_op.clone()); let thread = thread::Builder::new() .name("fuse_server".to_string()) .spawn(move || { let _ = s.svc_loop(&inflight_op); exit_event_manager(); Ok(()) }) .map_err(DaemonError::ThreadSpawn)?; self.thread_tx .lock() .expect("Not expect poisoned lock.") .as_ref() .unwrap() .send(thread) .map_err(|e| eother!(e))?; self.running_threads.fetch_add(1, Ordering::AcqRel); Ok(()) } } impl DaemonStateMachineSubscriber for FusedevDaemon { fn on_event(&self, event: DaemonStateMachineInput) -> DaemonResult<()> { self.trigger .lock() .unwrap() .send(event) .map_err(|e| DaemonError::Channel(format!("send {:?}", e)))?; self.result_receiver .lock() .expect("Not expect poisoned lock!") .recv() .map_err(|e| DaemonError::Channel(format!("recv {:?}", e)))? } } impl NydusDaemon for FusedevDaemon { #[inline] fn as_any(&self) -> &dyn Any { self } fn start(&self) -> DaemonResult<()> { for _ in 0..self.threads_cnt { self.kick_one_server() .map_err(|e| DaemonError::StartService(format!("{:?}", e)))?; } drop( self.thread_tx .lock() .expect("Not expect poisoned lock") .take() .unwrap(), ); Ok(()) } fn wait(&self) -> DaemonResult<()> { while let Ok(handle) = self.thread_rx.lock().unwrap().recv() { self.running_threads.fetch_sub(1, Ordering::AcqRel); handle .join() .map_err(|e| { DaemonError::WaitDaemon( *e.downcast::<std::io::Error>() .unwrap_or_else(|e| Box::new(eother!(e))), ) })? .map_err(DaemonError::WaitDaemon)? } if self.running_threads.load(Ordering::Acquire) != 0 { warn!("Not all threads are joined."); } Ok(()) } fn disconnect(&self) -> DaemonResult<()> { self.session .lock() .expect("Not expect poisoned lock.") .umount() .map_err(DaemonError::SessionShutdown) } #[inline] fn id(&self) -> Option<String> { self.id.clone() } #[inline] fn supervisor(&self) -> Option<String> { self.supervisor.clone() } #[inline] fn interrupt(&self) { self.event_fd.write(1).expect("Stop fuse service loop"); } #[inline] fn set_state(&self, state: DaemonState) { self.state.store(state as i32, Ordering::Relaxed); } #[inline] fn get_state(&self) -> DaemonState { self.state.load(Ordering::Relaxed).into() } fn save(&self) -> DaemonResult<()> { upgrade::fusedev_upgrade::save(self) } fn restore(&self) -> DaemonResult<()> { upgrade::fusedev_upgrade::restore(self) } #[inline] fn get_vfs(&self) -> &Vfs { &self.vfs } #[inline] fn upgrade_mgr(&self) -> Option<MutexGuard<UpgradeManager>> { self.upgrade_mgr.as_ref().map(|mgr| mgr.lock().unwrap()) } fn backend_collection(&self) -> MutexGuard<FsBackendCollection> { self.backend_collection.lock().unwrap() } fn version(&self) -> BuildTimeInfo { self.bti.clone() } fn export_inflight_ops(&self) -> DaemonResult<Option<String>> { let ops = self.inflight_ops.lock().unwrap(); let r = ops .iter() .filter(|w| w.op.lock().unwrap().is_some()) .map(|w| &w.op) .collect::<Vec<&Arc<Mutex<Option<FuseOp>>>>>(); if r.is_empty() { Ok(None) } else { let resp = serde_json::to_string(&r).map_err(DaemonError::Serde)?; Ok(Some(resp)) } } } fn is_mounted(mp: impl AsRef<Path>) -> Result<bool> { let mounts = CString::new("/proc/self/mounts").unwrap(); let ty = CString::new("r").unwrap(); let mounts_stream = unsafe { libc::setmntent( mounts.as_ptr() as *const libc::c_char, ty.as_ptr() as *const libc::c_char, ) }; loop { let mnt = unsafe { libc::getmntent(mounts_stream) }; if mnt as u32 == libc::PT_NULL { break; } if unsafe { CStr::from_ptr((*mnt).mnt_dir) } == CString::new(mp.as_ref().as_os_str().as_bytes())?.as_c_str() { unsafe { libc::endmntent(mounts_stream) }; return Ok(true); } } unsafe { libc::endmntent(mounts_stream) }; Ok(false) } fn is_sock_residual(sock: impl AsRef<Path>) -> bool { if metadata(&sock).is_ok() { return UnixStream::connect(&sock).is_err(); } false } fn is_crashed(path: impl AsRef<Path>, sock: &impl AsRef<Path>) -> Result<bool> { if is_mounted(path)? && is_sock_residual(sock) { warn!("A previous daemon crashed! Try to failover later."); return Ok(true); } Ok(false) } fn calc_fuse_conn(mp: impl AsRef<Path>) -> Result<u64> { let st = metadata(mp)?; let dev = st.st_dev(); let (major, minor) = (major(dev), minor(dev)); Ok(major << 20 | minor) } #[allow(clippy::too_many_arguments)] pub fn create_nydus_daemon( mountpoint: &str, vfs: Arc<Vfs>, supervisor: Option<String>, id: Option<String>, threads_cnt: u32, api_sock: Option<impl AsRef<Path>>, upgrade: bool, fp: FailoverPolicy, mount_cmd: Option<FsBackendMountCmd>, bti: BuildTimeInfo, ) -> Result<Arc<dyn NydusDaemon + Send>> { let (trigger, events_rx) = channel::<DaemonStateMachineInput>(); let session = FuseSession::new(Path::new(mountpoint), "rafs", "")?; let upgrade_mgr = supervisor .as_ref() .map(|s| Mutex::new(UpgradeManager::new(s.to_string().into()))); let (tx, rx) = channel::<JoinHandle<Result<()>>>(); let (result_sender, result_receiver) = channel::<DaemonResult<()>>(); let daemon = Arc::new(FusedevDaemon { session: Mutex::new(session), server: Arc::new(Server::new(vfs.clone())), vfs, thread_tx: Mutex::new(Some(tx)), thread_rx: Mutex::new(rx), running_threads: AtomicI32::new(0), event_fd: EventFd::new(0).unwrap(), state: AtomicI32::new(DaemonState::INIT as i32), threads_cnt, trigger: Arc::new(Mutex::new(trigger)), result_receiver: Mutex::new(result_receiver), supervisor, id, conn: AtomicU64::new(0), failover_policy: fp, upgrade_mgr, backend_collection: Default::default(), bti, inflight_ops: Mutex::new(Vec::new()), }); let machine = DaemonStateMachineContext::new(daemon.clone(), events_rx, result_sender); machine.kick_state_machine()?; if (api_sock.as_ref().is_some() && !upgrade && !is_crashed(mountpoint, api_sock.as_ref().unwrap())?) || api_sock.is_none() { if let Some(cmd) = mount_cmd { daemon.mount(cmd)?; } daemon.session.lock().unwrap().mount()?; daemon .on_event(DaemonStateMachineInput::Mount) .map_err(|e| eother!(e))?; daemon .conn .store(calc_fuse_conn(mountpoint)?, Ordering::Relaxed); } Ok(daemon) }
use std::any::Any; use std::ffi::{CStr, CString}; use std::fs::metadata; use std::io::Result; use std::ops::Deref; use std::os::linux::fs::MetadataExt; use std::os::unix::ffi::OsStrExt; use std::os::unix::net::UnixStream; use std::path::Path; use std::sync::{ atomic::{AtomicI32, AtomicU64, Ordering}, mpsc::{channel, Receiver, Sender}, Arc, Mutex, MutexGuard, }; use std::thread::{self, JoinHandle}; use std::time::{SystemTime, UNIX_EPOCH}; use nix::sys::stat::{major, minor}; use serde::Serialize; use fuse_rs::api::{ server::{MetricsHook, Server}, Vfs, }; use fuse_rs::abi::linux_abi::{InHeader, OutHeader}; use vmm_sys_util::eventfd::EventFd; use crate::upgrade::{self, FailoverPolicy, UpgradeManager}; use crate::{daemon, exit_event_manager}; use daemon::{ DaemonError, DaemonResult, DaemonState, DaemonStateMachineContext, DaemonStateMachineInput, DaemonStateMachineSubscriber, FsBackendCollection, FsBackendMountCmd, NydusDaemon, Trigger, }; use nydus_app::BuildTimeInfo; use nydus_utils::{FuseChannel, FuseSession}; #[derive(Serialize)] struct FuseOp { inode: u64, opcode: u32, unique: u64, timestamp_secs: u64, } #[derive(Default, Clone, Serialize)] struct FuseOpWrapper { op: Arc<Mutex<Option<FuseOp>>>, } impl Default for FuseOp { fn default() -> Self { Self { inode: u64::default(), opcode: u32::default(), unique: u64::default(), timestamp_secs: SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(), } } } pub(crate) struct FuseServer { server: Arc<Server<Arc<Vfs>>>, ch: FuseChannel, buf: Vec<u8>, } impl FuseServer { fn new(server: Arc<Server<Arc<Vfs>>>, se: &FuseSession, evtfd: EventFd) -> Result<FuseServer> { Ok(FuseServer { server, ch: se.new_channel(evtfd)?, buf: Vec::with_capacity(se.bufsize()), }) } fn svc_loop(&mut self, metrics_hook: &dyn MetricsHook) -> Result<()> { unsafe { self.buf.set_len(self.buf.capacity()); } let _ebadf = std::io::Error::from_raw_os_error(libc::EBADF); loop { if let Some(reader) = self.ch.get_reader(&mut self.buf)? { let writer = self.ch.get_writer()?; if let Err(e) = self .server .handle_message(reader, writer, None, Some(metrics_hook)) { match e { fuse_rs::Error::EncodeMessage(_ebadf) => { return Err(eio!("fuse session has been shut down")); } _ => { error!("Handling fuse message, {}", DaemonError::ProcessQueue(e)); continue; } } } } else { info!("fuse server exits"); break; } } Ok(()) } } pub struct FusedevDaemon { server: Arc<Server<Arc<Vfs>>>, vfs: Arc<Vfs>, pub session: Mutex<FuseSession>, thread_tx: Mutex<Option<Sender<JoinHandle<Result<()>>>>>, thread_rx: Mutex<Receiver<JoinHandle<Result<()>>>>, running_threads: AtomicI32, event_fd: EventFd, state: AtomicI32, pub threads_cnt: u32, trigger: Arc<Mutex<Trigger>>, result_receiver: Mutex<Receiver<DaemonResult<()>>>, pub supervisor: Option<String>, pub id: Option<String>, pub(crate) conn: AtomicU64, #[allow(dead_code)] pub(crate) failover_policy: FailoverPolicy, upgrade_mgr: Option<Mutex<UpgradeManager>>, backend_collection: Mutex<FsBackendCollection>, bti: BuildTimeInfo, inflight_ops: Mutex<Vec<FuseOpWrapper>>, } impl MetricsHook for FuseOpWrapper { fn collect(&self, ih: &InHeader) { let (n, u, o) = (ih.nodeid, ih.unique, ih.opcode); *self.op.lock().expect("Not expect poisoned lock") = Some(FuseOp { inode: n, unique: u, opcode: o, timestamp_secs: SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(), }) } fn release(&self, _oh: Option<&OutHeader>) { *self.op.lock().expect("Not expect poisoned lock") = None } } impl FusedevDaemon { fn kick_one_server(&self) -> Result<()> { let mut s = FuseServer::new( self.server.clone(), self.session.lock().unwrap().deref(), self.event_fd.try_clone().unwrap(), )?; let inflight_op = FuseOpWrapper::default(); self.inflight_ops.lock().unwrap().push(inflight_op.clone()); let thread = thread::Builder::new() .name("fuse_server".to_string()) .spawn(move || { let _ = s.svc_loop(&inflight_op); exit_event_manager(); Ok(()) }) .map_err(DaemonError::ThreadSpawn)?; self.thread_tx .lock() .expect("Not expect poisoned lock.") .as_ref() .unwrap() .send(thread) .map_err(|e| eother!(e))?; self.running_threads.fetch_add(1, Ordering::AcqRel); Ok(()) } } impl DaemonStateMachineSubscriber for FusedevDaemon { fn on_event(&self, event: DaemonStateMachineInput) -> DaemonResult<()> { self.trigger .lock() .unwrap() .send(event) .map_err(|e| DaemonError::Channel(format!("send {:?}", e)))?; self.result_receiver .lock() .expect("Not expect poisoned lock!") .recv() .map_err(|e| DaemonError::Channel(format!("recv {:?}", e)))? } } impl NydusDaemon for FusedevDaemon { #[inline] fn as_any(&self) -> &dyn Any { self } fn start(&self) -> DaemonResult<()> { for _ in 0..self.threads_cnt { self.kick_one_server() .map_err(|e| DaemonError::StartService(format!("{:?}", e)))?; }
; Ok(()) } fn wait(&self) -> DaemonResult<()> { while let Ok(handle) = self.thread_rx.lock().unwrap().recv() { self.running_threads.fetch_sub(1, Ordering::AcqRel); handle .join() .map_err(|e| { DaemonError::WaitDaemon( *e.downcast::<std::io::Error>() .unwrap_or_else(|e| Box::new(eother!(e))), ) })? .map_err(DaemonError::WaitDaemon)? } if self.running_threads.load(Ordering::Acquire) != 0 { warn!("Not all threads are joined."); } Ok(()) } fn disconnect(&self) -> DaemonResult<()> { self.session .lock() .expect("Not expect poisoned lock.") .umount() .map_err(DaemonError::SessionShutdown) } #[inline] fn id(&self) -> Option<String> { self.id.clone() } #[inline] fn supervisor(&self) -> Option<String> { self.supervisor.clone() } #[inline] fn interrupt(&self) { self.event_fd.write(1).expect("Stop fuse service loop"); } #[inline] fn set_state(&self, state: DaemonState) { self.state.store(state as i32, Ordering::Relaxed); } #[inline] fn get_state(&self) -> DaemonState { self.state.load(Ordering::Relaxed).into() } fn save(&self) -> DaemonResult<()> { upgrade::fusedev_upgrade::save(self) } fn restore(&self) -> DaemonResult<()> { upgrade::fusedev_upgrade::restore(self) } #[inline] fn get_vfs(&self) -> &Vfs { &self.vfs } #[inline] fn upgrade_mgr(&self) -> Option<MutexGuard<UpgradeManager>> { self.upgrade_mgr.as_ref().map(|mgr| mgr.lock().unwrap()) } fn backend_collection(&self) -> MutexGuard<FsBackendCollection> { self.backend_collection.lock().unwrap() } fn version(&self) -> BuildTimeInfo { self.bti.clone() } fn export_inflight_ops(&self) -> DaemonResult<Option<String>> { let ops = self.inflight_ops.lock().unwrap(); let r = ops .iter() .filter(|w| w.op.lock().unwrap().is_some()) .map(|w| &w.op) .collect::<Vec<&Arc<Mutex<Option<FuseOp>>>>>(); if r.is_empty() { Ok(None) } else { let resp = serde_json::to_string(&r).map_err(DaemonError::Serde)?; Ok(Some(resp)) } } } fn is_mounted(mp: impl AsRef<Path>) -> Result<bool> { let mounts = CString::new("/proc/self/mounts").unwrap(); let ty = CString::new("r").unwrap(); let mounts_stream = unsafe { libc::setmntent( mounts.as_ptr() as *const libc::c_char, ty.as_ptr() as *const libc::c_char, ) }; loop { let mnt = unsafe { libc::getmntent(mounts_stream) }; if mnt as u32 == libc::PT_NULL { break; } if unsafe { CStr::from_ptr((*mnt).mnt_dir) } == CString::new(mp.as_ref().as_os_str().as_bytes())?.as_c_str() { unsafe { libc::endmntent(mounts_stream) }; return Ok(true); } } unsafe { libc::endmntent(mounts_stream) }; Ok(false) } fn is_sock_residual(sock: impl AsRef<Path>) -> bool { if metadata(&sock).is_ok() { return UnixStream::connect(&sock).is_err(); } false } fn is_crashed(path: impl AsRef<Path>, sock: &impl AsRef<Path>) -> Result<bool> { if is_mounted(path)? && is_sock_residual(sock) { warn!("A previous daemon crashed! Try to failover later."); return Ok(true); } Ok(false) } fn calc_fuse_conn(mp: impl AsRef<Path>) -> Result<u64> { let st = metadata(mp)?; let dev = st.st_dev(); let (major, minor) = (major(dev), minor(dev)); Ok(major << 20 | minor) } #[allow(clippy::too_many_arguments)] pub fn create_nydus_daemon( mountpoint: &str, vfs: Arc<Vfs>, supervisor: Option<String>, id: Option<String>, threads_cnt: u32, api_sock: Option<impl AsRef<Path>>, upgrade: bool, fp: FailoverPolicy, mount_cmd: Option<FsBackendMountCmd>, bti: BuildTimeInfo, ) -> Result<Arc<dyn NydusDaemon + Send>> { let (trigger, events_rx) = channel::<DaemonStateMachineInput>(); let session = FuseSession::new(Path::new(mountpoint), "rafs", "")?; let upgrade_mgr = supervisor .as_ref() .map(|s| Mutex::new(UpgradeManager::new(s.to_string().into()))); let (tx, rx) = channel::<JoinHandle<Result<()>>>(); let (result_sender, result_receiver) = channel::<DaemonResult<()>>(); let daemon = Arc::new(FusedevDaemon { session: Mutex::new(session), server: Arc::new(Server::new(vfs.clone())), vfs, thread_tx: Mutex::new(Some(tx)), thread_rx: Mutex::new(rx), running_threads: AtomicI32::new(0), event_fd: EventFd::new(0).unwrap(), state: AtomicI32::new(DaemonState::INIT as i32), threads_cnt, trigger: Arc::new(Mutex::new(trigger)), result_receiver: Mutex::new(result_receiver), supervisor, id, conn: AtomicU64::new(0), failover_policy: fp, upgrade_mgr, backend_collection: Default::default(), bti, inflight_ops: Mutex::new(Vec::new()), }); let machine = DaemonStateMachineContext::new(daemon.clone(), events_rx, result_sender); machine.kick_state_machine()?; if (api_sock.as_ref().is_some() && !upgrade && !is_crashed(mountpoint, api_sock.as_ref().unwrap())?) || api_sock.is_none() { if let Some(cmd) = mount_cmd { daemon.mount(cmd)?; } daemon.session.lock().unwrap().mount()?; daemon .on_event(DaemonStateMachineInput::Mount) .map_err(|e| eother!(e))?; daemon .conn .store(calc_fuse_conn(mountpoint)?, Ordering::Relaxed); } Ok(daemon) }
drop( self.thread_tx .lock() .expect("Not expect poisoned lock") .take() .unwrap(), )
call_expression
[ { "content": "fn register_event(epoll_fd: c_int, fd: RawFd, evt: Events, data: u64) -> io::Result<()> {\n\n let event = Event::new(evt, data);\n\n epoll::ctl(epoll_fd, ControlOptions::EPOLL_CTL_ADD, fd, event)\n\n}\n\n\n\nimpl FuseChannel {\n\n fn new(fd: c_int, evtfd: EventFd, bufsize: usize) -> io::Result<Self> {\n\n const EPOLL_EVENTS_LEN: usize = 100;\n\n let epoll_fd = epoll::create(true)?;\n\n\n\n register_event(epoll_fd, fd, Events::EPOLLIN, 0)?;\n\n\n\n let exit_evtfd = evtfd.try_clone().unwrap();\n\n register_event(\n\n epoll_fd,\n\n exit_evtfd.as_raw_fd(),\n\n Events::EPOLLIN,\n\n EXIT_FUSE_SERVICE,\n\n )?;\n\n\n", "file_path": "utils/src/fuse.rs", "rank": 0, "score": 268758.52761526516 }, { "content": "pub fn copyv(src: &[u8], dst: &[VolatileSlice], offset: u64, mut max_size: usize) -> Result<usize> {\n\n let mut offset = offset as usize;\n\n let mut size: usize = 0;\n\n if max_size > src.len() {\n\n max_size = src.len()\n\n }\n\n\n\n for s in dst.iter() {\n\n if offset >= src.len() || size >= src.len() {\n\n break;\n\n }\n\n let mut len = max_size - size;\n\n if offset + len > src.len() {\n\n len = src.len() - offset;\n\n }\n\n if len > s.len() {\n\n len = s.len();\n\n }\n\n\n\n s.write_slice(&src[offset..offset + len], 0)\n\n .map_err(|e| einval!(e))?;\n\n offset += len;\n\n size += len;\n\n }\n\n\n\n Ok(size)\n\n}\n\n\n", "file_path": "storage/src/utils.rs", "rank": 1, "score": 266650.54762437934 }, { "content": "pub fn readv(fd: RawFd, bufs: &[VolatileSlice], offset: u64, max_size: usize) -> Result<usize> {\n\n if bufs.is_empty() {\n\n return Ok(0);\n\n }\n\n\n\n let mut size: usize = 0;\n\n let mut iovecs: Vec<IoVec<&mut [u8]>> = Vec::new();\n\n\n\n for buf in bufs {\n\n let mut exceed = false;\n\n let len = if size + buf.len() > max_size {\n\n exceed = true;\n\n max_size - size\n\n } else {\n\n buf.len()\n\n };\n\n size += len;\n\n let iov = IoVec::from_mut_slice(unsafe { from_raw_parts_mut(buf.as_ptr(), len) });\n\n iovecs.push(iov);\n\n if exceed {\n", "file_path": "storage/src/utils.rs", "rank": 2, "score": 261118.30825563762 }, { "content": "/// Overflow can fail this rounder if the base value is large enough with 4095 added.\n\npub fn try_round_up_4k<U: TryFrom<u64>, T: Into<u64>>(x: T) -> Option<U> {\n\n let t = 4095u64;\n\n if let Some(v) = x.into().checked_add(t) {\n\n let z = v & (!t);\n\n z.try_into().ok()\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 3, "score": 258110.60651515814 }, { "content": "/// Display error messages with line number, file path and optional backtrace.\n\npub fn make_error(err: std::io::Error, raw: impl Debug, file: &str, line: u32) -> std::io::Error {\n\n if cfg!(debug_assertions) {\n\n if let Ok(val) = env::var(\"RUST_BACKTRACE\") {\n\n if val.trim() != \"0\" {\n\n error!(\"Stack:\\n{:?}\", Backtrace::new());\n\n error!(\"Error:\\n\\t{:?}\\n\\tat {}:{}\", raw, file, line);\n\n return err;\n\n }\n\n }\n\n }\n\n error!(\n\n \"Error:\\n\\t{:?}\\n\\tat {}:{}\\n\\tnote: enable `RUST_BACKTRACE=1` env to display a backtrace\",\n\n raw, file, line\n\n );\n\n err\n\n}\n\n\n\n/// Define error macro like `x!()` or `x!(err)`.\n\n/// Note: The `x!()` macro will convert any origin error (Os, Simple, Custom) to Custom error.\n\nmacro_rules! define_error_macro {\n", "file_path": "error/src/error.rs", "rank": 4, "score": 256025.44229534664 }, { "content": "pub fn new(id: &str) -> Arc<GlobalIoStats> {\n\n let c = Arc::new(GlobalIoStats {\n\n id: id.to_string(),\n\n ..Default::default()\n\n });\n\n IOS_SET.write().unwrap().insert(id.to_string(), c.clone());\n\n c.init();\n\n c\n\n}\n\n\n", "file_path": "utils/src/metrics.rs", "rank": 5, "score": 250989.3065704358 }, { "content": "/// A customized readahead function to ask kernel to fault in all pages from offset to end.\n\n///\n\n/// Call libc::readahead on every 128KB range because otherwise readahead stops at kernel bdi\n\n/// readahead size which is 128KB by default.\n\npub fn readahead(fd: libc::c_int, mut offset: u64, end: u64) {\n\n let mut count;\n\n offset = round_down_4k(offset);\n\n loop {\n\n if offset >= end {\n\n break;\n\n }\n\n // Kernel default 128KB readahead size\n\n count = std::cmp::min(128 << 10, end - offset);\n\n unsafe { libc::readahead(fd, offset as i64, count as usize) };\n\n offset += count;\n\n }\n\n}\n\n\n", "file_path": "storage/src/utils.rs", "rank": 6, "score": 248423.5616969523 }, { "content": "/// Parse a utf8 byte slice into two strings.\n\npub fn parse_string(buf: &[u8]) -> Result<(&str, &str)> {\n\n std::str::from_utf8(buf)\n\n .map(|origin| {\n\n if let Some(pos) = origin.find('\\0') {\n\n let (a, b) = origin.split_at(pos);\n\n (a, &b[1..])\n\n } else {\n\n (origin, \"\")\n\n }\n\n })\n\n .map_err(|e| einval!(format!(\"failed in parsing string, {:?}\", e)))\n\n}\n\n\n", "file_path": "rafs/src/metadata/layout/mod.rs", "rank": 7, "score": 241302.33329639415 }, { "content": "pub fn export_events() -> IoStatsResult<String> {\n\n serde_json::to_string(ERROR_HOLDER.lock().unwrap().deref()).map_err(IoStatsError::Serialize)\n\n}\n\n\n", "file_path": "utils/src/metrics.rs", "rank": 8, "score": 234268.18179867475 }, { "content": "pub fn div_round_up(n: u64, d: u64) -> u64 {\n\n (n + d - 1) / d\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 9, "score": 229212.06212540326 }, { "content": "pub fn export_blobcache_metrics(id: &Option<String>) -> IoStatsResult<String> {\n\n let metrics = BLOBCACHE_METRICS.read().unwrap();\n\n\n\n match id {\n\n Some(k) => metrics\n\n .get(k)\n\n .ok_or(IoStatsError::NoCounter)\n\n .map(|v| v.export_metrics())?,\n\n None => {\n\n if metrics.len() == 1 {\n\n if let Some(m) = metrics.values().next() {\n\n return m.export_metrics();\n\n }\n\n }\n\n Err(IoStatsError::NoCounter)\n\n }\n\n }\n\n}\n\n\n", "file_path": "utils/src/metrics.rs", "rank": 10, "score": 227108.5234824437 }, { "content": "#[allow(clippy::useless_let_if_seq)]\n\npub fn new(config: serde_json::value::Value, id: Option<&str>) -> Result<Registry> {\n\n let common_config: CommonConfig =\n\n serde_json::from_value(config.clone()).map_err(|e| einval!(e))?;\n\n let retry_limit = common_config.retry_limit;\n\n let request = Request::new(common_config)?;\n\n\n\n let config: RegistryConfig = serde_json::from_value(config).map_err(|e| einval!(e))?;\n\n\n\n let auth = trim(config.auth);\n\n let registry_token = trim(config.registry_token);\n\n\n\n let (username, password) = if let Some(auth) = &auth {\n\n let auth = base64::decode(auth.as_bytes()).map_err(|e| {\n\n einval!(format!(\n\n \"Invalid base64 encoded registry auth config: {:?}\",\n\n e\n\n ))\n\n })?;\n\n let auth = std::str::from_utf8(&auth).map_err(|e| {\n\n einval!(format!(\n", "file_path": "storage/src/backend/registry.rs", "rank": 11, "score": 221376.41122157034 }, { "content": "pub fn new(config: serde_json::value::Value, id: Option<&str>) -> Result<Oss> {\n\n let common_config: CommonConfig =\n\n serde_json::from_value(config.clone()).map_err(|e| einval!(e))?;\n\n let retry_limit = common_config.retry_limit;\n\n let request = Request::new(common_config)?;\n\n\n\n let config: OssConfig = serde_json::from_value(config).map_err(|e| einval!(e))?;\n\n\n\n Ok(Oss {\n\n scheme: config.scheme,\n\n object_prefix: config.object_prefix,\n\n endpoint: config.endpoint,\n\n access_key_id: config.access_key_id,\n\n access_key_secret: config.access_key_secret,\n\n bucket_name: config.bucket_name,\n\n request,\n\n retry_limit,\n\n metrics: id.map(|i| BackendMetrics::new(i, \"oss\")),\n\n id: id.map(|i| i.to_string()),\n\n })\n", "file_path": "storage/src/backend/oss.rs", "rank": 12, "score": 221371.57267434784 }, { "content": "pub fn round_down_4k(x: u64) -> u64 {\n\n x & (!4095u64)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_rounders() {\n\n assert_eq!(round_down_4k(0), 0);\n\n assert_eq!(round_down_4k(100), 0);\n\n assert_eq!(round_down_4k(4300), 4096);\n\n assert_eq!(round_down_4k(4096), 4096);\n\n assert_eq!(round_down_4k(4095), 0);\n\n assert_eq!(round_down_4k(4097), 4096);\n\n assert_eq!(round_down_4k(u64::MAX - 1), u64::MAX - 4095);\n\n assert_eq!(round_down_4k(u64::MAX - 4095), u64::MAX - 4095);\n\n // zero is rounded up to zero\n\n assert_eq!(try_round_up_4k::<i32, _>(0u32), Some(0i32));\n", "file_path": "utils/src/lib.rs", "rank": 13, "score": 219842.49679942598 }, { "content": "/// Setup logging infrastructure for application.\n\n///\n\n/// `log_file_path` is an absolute path to logging files or relative path from current working\n\n/// directory to logging file.\n\n/// Flexi logger always appends a suffix to file name whose default value is \".log\"\n\n/// unless we set it intentionally. I don't like this passion. When the basename of `log_file_path`\n\n/// is \"bar\", the newly created log file will be \"bar.log\"\n\npub fn setup_logging(log_file_path: Option<PathBuf>, level: LevelFilter) -> Result<()> {\n\n if let Some(ref path) = log_file_path {\n\n // Do not try to canonicalize the path since the file may not exist yet.\n\n\n\n // We rely on rust `log` macro to limit current log level rather than `flexi_logger`\n\n // So we set `flexi_logger` log level to \"trace\" which is High enough. Otherwise, we\n\n // can't change log level to a higher level than what is passed to `flexi_logger`.\n\n let mut logger = Logger::with_env_or_str(\"trace\")\n\n .log_to_file()\n\n .suppress_timestamp()\n\n .append()\n\n .format(opt_format);\n\n\n\n // Parse log file to get the `basename` and `suffix`(extension) because `flexi_logger`\n\n // will automatically add `.log` suffix if we don't set explicitly, see:\n\n // https://github.com/emabee/flexi_logger/issues/74\n\n let basename = path\n\n .file_stem()\n\n .ok_or_else(|| {\n\n eprintln!(\"invalid file name input {:?}\", path);\n", "file_path": "app/src/lib.rs", "rank": 14, "score": 219784.00419788534 }, { "content": "/// Parse a byte slice into xattr pairs and invoke the callback for each xattr pair.\n\n///\n\n/// The iteration breaks if the callback returns false.\n\npub fn parse_xattr<F>(data: &[u8], size: usize, mut cb: F) -> Result<()>\n\nwhere\n\n F: FnMut(&OsStr, XattrValue) -> bool,\n\n{\n\n if data.len() < size {\n\n return Err(einval!(\"invalid xattr content size\"));\n\n }\n\n\n\n let mut rest_data = &data[0..size];\n\n let mut i: usize = 0;\n\n\n\n while i < size {\n\n if rest_data.len() < size_of::<u32>() {\n\n return Err(einval!(\n\n \"invalid xattr content, no enough data for xattr pair size\"\n\n ));\n\n }\n\n\n\n let (pair_size, rest) = rest_data.split_at(size_of::<u32>());\n\n let pair_size = u32::from_le_bytes(\n", "file_path": "rafs/src/metadata/layout/mod.rs", "rank": 15, "score": 219443.6828590122 }, { "content": "pub fn new(config: serde_json::value::Value, id: Option<&str>) -> Result<LocalFs> {\n\n let config: LocalFsConfig = serde_json::from_value(config).map_err(|e| einval!(e))?;\n\n\n\n if config.blob_file.is_empty() && config.dir.is_empty() {\n\n return Err(einval!(\"blob file or dir is required\"));\n\n }\n\n\n\n let metrics = id.map(|i| BackendMetrics::new(i, \"localfs\"));\n\n if !config.blob_file.is_empty() {\n\n return Ok(LocalFs {\n\n blob_file: config.blob_file,\n\n readahead: config.readahead,\n\n readahead_sec: config.readahead_sec,\n\n file_table: RwLock::new(HashMap::new()),\n\n metrics,\n\n ..Default::default()\n\n });\n\n }\n\n\n\n Ok(LocalFs {\n\n dir: config.dir,\n\n readahead: config.readahead,\n\n readahead_sec: config.readahead_sec,\n\n file_table: RwLock::new(HashMap::new()),\n\n metrics,\n\n ..Default::default()\n\n })\n\n}\n\n\n", "file_path": "storage/src/backend/localfs.rs", "rank": 16, "score": 219329.20877006894 }, { "content": "/// Start a HTTP server parsing http requests and send to nydus API server a concrete\n\n/// request to operate nydus or fetch working status.\n\n/// The HTTP server sends request by `to_api` channel and wait for response from `from_api` channel\n\n/// `api_notifier` is used to notify an execution context to fetch above request and handle it.\n\n/// We can't forward signal to native rust thread, so we rely on `exit_evtfd` to notify\n\n/// the server to exit. Therefore, it adds the unix domain socket fd receiving http request\n\n/// to a global epoll_fd associated with a event_fd which will be used later to notify\n\n/// the server thread to exit.\n\npub fn start_http_thread(\n\n path: &str,\n\n api_notifier: EventFd,\n\n to_api: Sender<ApiRequest>,\n\n from_api: Receiver<ApiResponse>,\n\n exit_evtfd: EventFd,\n\n) -> Result<thread::JoinHandle<Result<()>>> {\n\n // Try to remove existed unix domain socket\n\n std::fs::remove_file(path).unwrap_or_default();\n\n let socket_path = PathBuf::from(path);\n\n\n\n let thread = thread::Builder::new()\n\n .name(\"http-server\".to_string())\n\n .spawn(move || {\n\n let epoll_fd = epoll::create(true).unwrap();\n\n\n\n let mut server = HttpServer::new(socket_path).unwrap();\n\n // Must start the server successfully or just die by panic\n\n server.start_server().unwrap();\n\n epoll::ctl(\n", "file_path": "api/src/http.rs", "rank": 17, "score": 202612.69843185515 }, { "content": "fn default_readahead_sec() -> u32 {\n\n BLOB_ACCESS_RECORD_SECOND\n\n}\n\n\n", "file_path": "storage/src/backend/localfs.rs", "rank": 18, "score": 200852.8318104209 }, { "content": "/// Convert a byte slice into OsStr.\n\npub fn bytes_to_os_str(buf: &[u8]) -> &OsStr {\n\n OsStr::from_bytes(buf)\n\n}\n\n\n", "file_path": "rafs/src/metadata/layout/mod.rs", "rank": 19, "score": 189615.43714293605 }, { "content": "/// Umount a fuse file system\n\nfn fuse_kern_umount(mountpoint: &str, file: File) -> io::Result<()> {\n\n let mut fds = [PollFd::new(file.as_raw_fd(), PollFlags::empty())];\n\n let res = poll(&mut fds, 0);\n\n\n\n // Drop to close fuse session fd, otherwise synchronous umount\n\n // can recurse into filesystem and deadlock.\n\n drop(file);\n\n\n\n if res.is_ok() {\n\n // POLLERR means the file system is already umounted,\n\n // or the connection was severed via /sys/fs/fuse/connections/NNN/abort\n\n if let Some(event) = fds[0].revents() {\n\n if event == PollFlags::POLLERR {\n\n return Ok(());\n\n }\n\n }\n\n }\n\n\n\n umount2(mountpoint, MntFlags::MNT_DETACH).map_err(|e| eother!(e))\n\n}\n", "file_path": "utils/src/fuse.rs", "rank": 20, "score": 181368.95617300447 }, { "content": "pub fn exec(cmd: &str, output: bool) -> Result<String> {\n\n debug!(\"exec `{}`\", cmd);\n\n\n\n if output {\n\n let output = Command::new(\"sh\")\n\n .arg(\"-c\")\n\n .arg(cmd)\n\n .env(\"RUST_BACKTRACE\", \"1\")\n\n .output()?;\n\n\n\n if !output.status.success() {\n\n return Err(eother!(\"exit with non-zero status\"));\n\n }\n\n let stdout = std::str::from_utf8(&output.stdout).map_err(|e| einval!(e))?;\n\n\n\n return Ok(stdout.to_string());\n\n }\n\n\n\n let mut child = Command::new(\"sh\")\n\n .arg(\"-c\")\n", "file_path": "utils/src/exec.rs", "rank": 21, "score": 181010.1018948201 }, { "content": "fn default_threads_count() -> usize {\n\n 8\n\n}\n\n\n", "file_path": "rafs/src/fs.rs", "rank": 22, "score": 179278.83737415585 }, { "content": "/// A customized buf allocator that avoids zeroing\n\npub fn alloc_buf(size: usize) -> Vec<u8> {\n\n let mut buf = Vec::with_capacity(size);\n\n unsafe { buf.set_len(size) };\n\n buf\n\n}\n\n\n", "file_path": "storage/src/utils.rs", "rank": 23, "score": 176702.44714586606 }, { "content": "pub fn respond(resp: Response) -> RequestResult<Response> {\n\n if is_success_status(resp.status()) {\n\n return Ok(resp);\n\n }\n\n let msg = resp.text().map_err(RequestError::Format)?;\n\n Err(RequestError::ErrorWithMsg(msg))\n\n}\n\n\n\nimpl Request {\n\n fn build_client(proxy: &str, config: &CommonConfig) -> Result<Client> {\n\n let connect_timeout = if config.connect_timeout != 0 {\n\n Some(Duration::from_secs(config.connect_timeout))\n\n } else {\n\n None\n\n };\n\n let timeout = if config.timeout != 0 {\n\n Some(Duration::from_secs(config.timeout))\n\n } else {\n\n None\n\n };\n", "file_path": "storage/src/backend/request.rs", "rank": 24, "score": 174880.91543932873 }, { "content": "pub fn export_global_stats(name: &Option<String>) -> Result<String, IoStatsError> {\n\n // With only one rafs instance, we allow caller to ask for an unknown ios name.\n\n let ios_set = IOS_SET.read().unwrap();\n\n\n\n match name {\n\n Some(k) => ios_set\n\n .get(k)\n\n .ok_or(IoStatsError::NoCounter)\n\n .map(|v| v.export_global_stats())?,\n\n None => {\n\n if ios_set.len() == 1 {\n\n if let Some(ios) = ios_set.values().next() {\n\n return ios.export_global_stats();\n\n }\n\n }\n\n Err(IoStatsError::NoCounter)\n\n }\n\n }\n\n}\n\n\n", "file_path": "utils/src/metrics.rs", "rank": 25, "score": 174383.16524621376 }, { "content": "pub fn export_files_access_pattern(name: &Option<String>) -> Result<String, IoStatsError> {\n\n let ios_set = IOS_SET.read().unwrap();\n\n match name {\n\n Some(k) => ios_set\n\n .get(k)\n\n .ok_or(IoStatsError::NoCounter)\n\n .map(|v| v.export_files_access_patterns())?,\n\n None => {\n\n if ios_set.len() == 1 {\n\n if let Some(ios) = ios_set.values().next() {\n\n return ios.export_files_access_patterns();\n\n }\n\n }\n\n Err(IoStatsError::NoCounter)\n\n }\n\n }\n\n}\n\n\n", "file_path": "utils/src/metrics.rs", "rank": 26, "score": 172817.7611273333 }, { "content": "// For compatibility reason, we use liblz4 version to compress/decompress directly\n\n// with data blocks so that we don't really care about lz4 header magic numbers like\n\n// as being done with all these rust lz4 implementations\n\npub fn compress(src: &[u8], algorithm: Algorithm) -> Result<(Cow<[u8]>, bool)> {\n\n let src_size = src.len();\n\n if src_size == 0 {\n\n return Ok((Cow::Borrowed(src), false));\n\n }\n\n\n\n let compressed = match algorithm {\n\n Algorithm::None => return Ok((Cow::Borrowed(src), false)),\n\n Algorithm::Lz4Block => lz4_compress(src)?,\n\n Algorithm::GZip => {\n\n let dst: Vec<u8> = Vec::new();\n\n let mut gz = GzEncoder::new(dst, Compression::default());\n\n gz.write_all(src)?;\n\n gz.finish()?\n\n }\n\n };\n\n\n\n // Abandon compressed data when compression ratio greater than COMPRESSION_MINIMUM_RATIO\n\n if (COMPRESSION_MINIMUM_RATIO == 100 && compressed.len() >= src_size)\n\n || ((100 * compressed.len() / src_size) >= COMPRESSION_MINIMUM_RATIO)\n\n {\n\n return Ok((Cow::Borrowed(src), false));\n\n }\n\n Ok((Cow::Owned(compressed), true))\n\n}\n\n\n", "file_path": "storage/src/compress/mod.rs", "rank": 27, "score": 171064.44280666354 }, { "content": "/// Parse a byte slice into xattr name list.\n\npub fn parse_xattr_names(data: &[u8], size: usize) -> Result<Vec<XattrName>> {\n\n let mut result = Vec::new();\n\n\n\n parse_xattr(data, size, |name, _| {\n\n result.push(name.as_bytes().to_vec());\n\n true\n\n })?;\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "rafs/src/metadata/layout/mod.rs", "rank": 28, "score": 168622.47693173419 }, { "content": "pub fn export_backend_metrics(name: &Option<String>) -> IoStatsResult<String> {\n\n let metrics = BACKEND_METRICS.read().unwrap();\n\n\n\n match name {\n\n Some(k) => metrics\n\n .get(k)\n\n .ok_or(IoStatsError::NoCounter)\n\n .map(|v| v.export_metrics())?,\n\n None => {\n\n if metrics.len() == 1 {\n\n if let Some(m) = metrics.values().next() {\n\n return m.export_metrics();\n\n }\n\n }\n\n Err(IoStatsError::NoCounter)\n\n }\n\n }\n\n}\n\n\n", "file_path": "utils/src/metrics.rs", "rank": 29, "score": 167269.49304976244 }, { "content": "// A helper to extract blob table entries from disk.\n\nstruct BlobEntryFrontPart(u32, u32);\n\n\n\nimpl RafsV5BlobTable {\n\n pub fn new() -> Self {\n\n RafsV5BlobTable {\n\n entries: Vec::new(),\n\n extended: RafsV5ExtBlobTable::new(),\n\n }\n\n }\n\n\n\n /// Get blob table size, aligned with RAFS_ALIGNMENT bytes\n\n pub fn size(&self) -> usize {\n\n if self.entries.is_empty() {\n\n return 0;\n\n }\n\n // Blob entry split with '\\0'\n\n rafsv5_align(\n\n self.entries.iter().fold(0usize, |size, entry| {\n\n let entry_size = size_of::<u32>() * 2 + entry.blob_id.len();\n\n size + entry_size + 1\n", "file_path": "rafs/src/metadata/layout/v5.rs", "rank": 30, "score": 160397.0789438493 }, { "content": "/// Parse a 'buf' to xattr value by xattr name.\n\npub fn parse_xattr_value(data: &[u8], size: usize, name: &OsStr) -> Result<Option<XattrValue>> {\n\n let mut value = None;\n\n\n\n parse_xattr(data, size, |_name, _value| {\n\n if _name == name {\n\n value = Some(_value);\n\n // stop the iteration if we found the xattr name.\n\n return false;\n\n }\n\n true\n\n })?;\n\n\n\n Ok(value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::convert::TryFrom;\n\n use std::ffi::OsString;\n", "file_path": "rafs/src/metadata/layout/mod.rs", "rank": 31, "score": 160028.00591933227 }, { "content": "type AccessLogEntry = (u64, u32, u32);\n\n\n", "file_path": "storage/src/backend/localfs.rs", "rank": 32, "score": 155226.03563054762 }, { "content": "pub fn new(\n\n work_dir: &Path,\n\n enable_cache: bool,\n\n cache_compressed: bool,\n\n rafs_mode: RafsMode,\n\n api_sock: PathBuf,\n\n digest_validate: bool,\n\n) -> Nydusd {\n\n let cache_path = work_dir.join(\"cache\");\n\n fs::create_dir_all(cache_path).unwrap();\n\n\n\n let cache = format!(\n\n r###\"\n\n ,\"cache\": {{\n\n \"type\": \"blobcache\",\n\n \"config\": {{\n\n \"compressed\": {},\n\n \"work_dir\": {:?}\n\n }}\n\n }}\n", "file_path": "tests/nydusd.rs", "rank": 33, "score": 154398.50611320155 }, { "content": "/// Decompress a source slice or file stream into destination slice, with provided compression algorithm.\n\n/// Use the file as decompress source if provided.\n\npub fn decompress(\n\n src: &[u8],\n\n src_file: Option<File>,\n\n dst: &mut [u8],\n\n algorithm: Algorithm,\n\n) -> Result<usize> {\n\n match algorithm {\n\n Algorithm::None => Ok(dst.len()),\n\n Algorithm::Lz4Block => lz4_decompress(src, dst),\n\n Algorithm::GZip => {\n\n if let Some(f) = src_file {\n\n let mut gz = GzDecoder::new(BufReader::new(f));\n\n gz.read_exact(dst)?;\n\n } else {\n\n let mut gz = GzDecoder::new(src);\n\n gz.read_exact(dst)?;\n\n };\n\n Ok(dst.len())\n\n }\n\n }\n", "file_path": "storage/src/compress/mod.rs", "rank": 34, "score": 151731.76653132995 }, { "content": "pub fn new_backend(\n\n config: BackendConfig,\n\n id: &str,\n\n) -> IOResult<Arc<dyn BlobBackend + Send + Sync>> {\n\n match config.backend_type.as_str() {\n\n #[cfg(feature = \"backend-oss\")]\n\n \"oss\" => Ok(Arc::new(oss::new(config.backend_config, Some(id))?)),\n\n #[cfg(feature = \"backend-registry\")]\n\n \"registry\" => Ok(Arc::new(registry::new(config.backend_config, Some(id))?)),\n\n #[cfg(feature = \"backend-localfs\")]\n\n \"localfs\" => Ok(Arc::new(localfs::new(config.backend_config, Some(id))?)),\n\n _ => Err(einval!(format!(\n\n \"unsupported backend type '{}'\",\n\n config.backend_type\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "storage/src/factory.rs", "rank": 35, "score": 151727.6209370065 }, { "content": "pub fn new(\n\n config: CacheConfig,\n\n backend: Arc<dyn BlobBackend + Sync + Send>,\n\n compressor: compress::Algorithm,\n\n digester: digest::Algorithm,\n\n id: &str,\n\n) -> Result<Arc<BlobCache>> {\n\n let blob_config: BlobCacheConfig =\n\n serde_json::from_value(config.cache_config).map_err(|e| einval!(e))?;\n\n let work_dir = {\n\n let path = fs::metadata(&blob_config.work_dir)\n\n .or_else(|_| {\n\n fs::create_dir_all(&blob_config.work_dir)?;\n\n fs::metadata(&blob_config.work_dir)\n\n })\n\n .map_err(|e| {\n\n last_error!(format!(\n\n \"fail to stat blobcache work_dir {}: {}\",\n\n blob_config.work_dir, e\n\n ))\n", "file_path": "storage/src/cache/blobcache.rs", "rank": 36, "score": 151727.6209370065 }, { "content": "pub fn new(\n\n config: CacheConfig,\n\n backend: Arc<dyn BlobBackend + Sync + Send>,\n\n compressor: compress::Algorithm,\n\n digester: digest::Algorithm,\n\n) -> Result<DummyCache> {\n\n Ok(DummyCache {\n\n backend,\n\n validate: config.cache_validate,\n\n compressor,\n\n digester,\n\n })\n\n}\n", "file_path": "storage/src/cache/dummycache.rs", "rank": 37, "score": 151727.6209370065 }, { "content": "pub fn export_files_stats(\n\n name: &Option<String>,\n\n latest_read_files: bool,\n\n) -> Result<String, IoStatsError> {\n\n let ios_set = IOS_SET.read().unwrap();\n\n\n\n match name {\n\n Some(k) => ios_set.get(k).ok_or(IoStatsError::NoCounter).map(|v| {\n\n if !latest_read_files {\n\n v.export_files_stats()\n\n } else {\n\n Ok(v.export_latest_read_files())\n\n }\n\n })?,\n\n None => {\n\n if ios_set.len() == 1 {\n\n if let Some(ios) = ios_set.values().next() {\n\n return if !latest_read_files {\n\n ios.export_files_stats()\n\n } else {\n\n Ok(ios.export_latest_read_files())\n\n };\n\n }\n\n }\n\n Err(IoStatsError::NoCounter)\n\n }\n\n }\n\n}\n\n\n", "file_path": "utils/src/metrics.rs", "rank": 38, "score": 150441.21517261013 }, { "content": "pub fn new_rw_layer(\n\n config: Config,\n\n compressor: compress::Algorithm,\n\n digester: digest::Algorithm,\n\n id: &str,\n\n) -> IOResult<Arc<dyn RafsCache + Send + Sync>> {\n\n let backend = new_backend(config.backend, id)?;\n\n match config.cache.cache_type.as_str() {\n\n \"blobcache\" => Ok(blobcache::new(\n\n config.cache,\n\n backend,\n\n compressor,\n\n digester,\n\n id,\n\n )?),\n\n _ => Ok(Arc::new(dummycache::new(\n\n config.cache,\n\n backend,\n\n compressor,\n\n digester,\n\n )?)),\n\n }\n\n}\n", "file_path": "storage/src/factory.rs", "rank": 39, "score": 150441.21517261013 }, { "content": "\tID string\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/daemon.go", "rank": 40, "score": 147595.51921469855 }, { "content": "/// Mount a fuse file system\n\nfn fuse_kern_mount(\n\n mountpoint: &Path,\n\n fsname: &str,\n\n subtype: &str,\n\n flags: MsFlags,\n\n) -> io::Result<File> {\n\n let file = OpenOptions::new()\n\n .create(false)\n\n .read(true)\n\n .write(true)\n\n .open(FUSE_DEVICE)\n\n .map_err(|e| {\n\n error!(\"FUSE failed to open. {}\", e);\n\n e\n\n })?;\n\n\n\n let meta = mountpoint.metadata().map_err(|e| {\n\n error!(\"Can not get metadata from mount point. {}\", e);\n\n e\n\n })?;\n", "file_path": "utils/src/fuse.rs", "rank": 41, "score": 143704.8486797322 }, { "content": "#[derive(Serialize, Debug)]\n\nstruct ErrorMessage {\n\n code: String,\n\n message: String,\n\n}\n\n\n", "file_path": "api/src/http_endpoint.rs", "rank": 42, "score": 134573.4641628511 }, { "content": "fn kick_api_server(\n\n api_evt: &EventFd,\n\n to_api: &Sender<ApiRequest>,\n\n from_api: &Receiver<ApiResponse>,\n\n request: ApiRequest,\n\n) -> ApiResponse {\n\n to_api.send(request).map_err(ApiError::RequestSend)?;\n\n api_evt.write(1).map_err(ApiError::EventFdWrite)?;\n\n from_api.recv().map_err(ApiError::ResponseRecv)?\n\n}\n\n\n\n// Example:\n\n// <-- GET /\n\n// --> GET / 200 835ms 746b\n\n//\n\n\n", "file_path": "api/src/http.rs", "rank": 43, "score": 134165.486034195 }, { "content": "/// Dump program build and version information.\n\npub fn dump_program_info(prog_version: &str) {\n\n info!(\n\n \"Program Version: {}, Git Commit: {:?}, Build Time: {:?}, Profile: {:?}, Rustc Version: {:?}\",\n\n prog_version,\n\n built_info::GIT_COMMIT_HASH.unwrap_or_default(),\n\n built_info::BUILT_TIME_UTC,\n\n built_info::PROFILE,\n\n built_info::RUSTC_VERSION,\n\n );\n\n}\n\n\n\n/// Application build and version information.\n\n#[derive(Serialize, Clone)]\n\npub struct BuildTimeInfo {\n\n pub package_ver: String,\n\n pub git_commit: String,\n\n build_time: String,\n\n profile: String,\n\n rustc: String,\n\n}\n", "file_path": "app/src/lib.rs", "rank": 44, "score": 133743.7570104713 }, { "content": "struct BlobCacheState {\n\n /// Index blob info by blob index, HashMap<blob_index, (blob_file, blob_size, Arc<ChunkMap>)>.\n\n blob_map: HashMap<u32, (File, u64, Arc<dyn ChunkMap + Sync + Send>)>,\n\n work_dir: String,\n\n backend_size_valid: bool,\n\n metrics: Arc<BlobcacheMetrics>,\n\n backend: Arc<dyn BlobBackend + Sync + Send>,\n\n}\n\n\n\nimpl BlobCacheState {\n\n fn get(&self, blob: &RafsBlobEntry) -> Option<(RawFd, u64, Arc<dyn ChunkMap + Sync + Send>)> {\n\n self.blob_map\n\n .get(&blob.blob_index)\n\n .map(|(file, size, chunk_map)| (file.as_raw_fd(), *size, chunk_map.clone()))\n\n }\n\n\n\n fn set(\n\n &mut self,\n\n blob: &RafsBlobEntry,\n\n ) -> Result<(RawFd, u64, Arc<dyn ChunkMap + Sync + Send>)> {\n", "file_path": "storage/src/cache/blobcache.rs", "rank": 45, "score": 133277.0697665256 }, { "content": "// TODO: This function is too long... :-(\n\nfn kick_prefetch_workers(cache: Arc<BlobCache>) {\n\n for num in 0..cache.prefetch_ctx.threads_count {\n\n let blobcache = cache.clone();\n\n let rx = blobcache.mr_receiver.clone();\n\n // TODO: We now don't define prefetch policy. Prefetch works according to hints coming\n\n // from on-disk prefetch table or input arguments while nydusd starts. So better\n\n // we can have method to kill prefetch threads. But hopefully, we can add\n\n // another new prefetch policy triggering prefetch files belonging to the same\n\n // directory while one of them is read. We can easily get a continuous region on blob\n\n // that way.\n\n thread::Builder::new()\n\n .name(format!(\"prefetch_thread_{}\", num))\n\n .spawn(move || {\n\n blobcache.prefetch_ctx.grow_n(1);\n\n blobcache\n\n .metrics\n\n .prefetch_workers\n\n .fetch_add(1, Ordering::Relaxed);\n\n // Safe because channel must be established before prefetch workers\n\n 'wait_mr: while let Ok(mr) = rx.as_ref().unwrap().recv() {\n", "file_path": "storage/src/cache/blobcache.rs", "rank": 46, "score": 132854.10588110192 }, { "content": "#[derive(Clone)]\n\nstruct DirectMappingState {\n\n meta: RafsSuperMeta,\n\n inode_table: ManuallyDrop<RafsV5InodeTable>,\n\n blob_table: Arc<RafsV5BlobTable>,\n\n base: *const u8,\n\n end: *const u8,\n\n size: usize,\n\n fd: RawFd,\n\n mmapped_inode_table: bool,\n\n validate_digest: bool,\n\n}\n\n\n\nimpl DirectMappingState {\n\n fn new(meta: &RafsSuperMeta, validate_digest: bool) -> Self {\n\n DirectMappingState {\n\n meta: *meta,\n\n inode_table: ManuallyDrop::new(RafsV5InodeTable::default()),\n\n blob_table: Arc::new(RafsV5BlobTable::default()),\n\n fd: -1,\n\n base: std::ptr::null(),\n", "file_path": "rafs/src/metadata/direct_v5.rs", "rank": 47, "score": 132021.8192771863 }, { "content": "#[inline(always)]\n\nfn pagesize() -> usize {\n\n // Trivially safe\n\n unsafe { sysconf(_SC_PAGESIZE) as usize }\n\n}\n\n\n", "file_path": "utils/src/fuse.rs", "rank": 48, "score": 129865.28675320427 }, { "content": "pub fn is_success_status(status: StatusCode) -> bool {\n\n status >= StatusCode::OK && status < StatusCode::BAD_REQUEST\n\n}\n\n\n", "file_path": "storage/src/backend/request.rs", "rank": 49, "score": 128457.8864289623 }, { "content": "fn default_prefetch_all() -> bool {\n\n true\n\n}\n\n\n\n#[derive(Clone, Default, Deserialize)]\n\npub struct FsPrefetchControl {\n\n #[serde(default)]\n\n enable: bool,\n\n #[serde(default = \"default_threads_count\")]\n\n threads_count: usize,\n\n #[serde(default = \"default_merging_size\")]\n\n // In unit of Bytes\n\n merging_size: usize,\n\n #[serde(default)]\n\n // In unit of Bytes. It sets a limit to prefetch bandwidth usage in order to\n\n // reduce congestion with normal user IO.\n\n // bandwidth_rate == 0 -- prefetch bandwidth ratelimit disabled\n\n // bandwidth_rate > 0 -- prefetch bandwidth ratelimit enabled.\n\n // Please note that if the value is less than Rafs chunk size,\n\n // it will be raised to the chunk size.\n", "file_path": "rafs/src/fs.rs", "rank": 50, "score": 128418.53032402521 }, { "content": "/// Register signal handler for a signal.\n\npub fn register_signal_handler(sig: signal::Signal, handler: extern \"C\" fn(libc::c_int)) {\n\n let sa = signal::SigAction::new(\n\n signal::SigHandler::Handler(handler),\n\n signal::SaFlags::empty(),\n\n signal::SigSet::empty(),\n\n );\n\n\n\n unsafe {\n\n // Signal registration fails, just panic since nydusd won't work properly.\n\n signal::sigaction(sig, &sa).unwrap();\n\n }\n\n}\n", "file_path": "app/src/signal.rs", "rank": 51, "score": 128293.4055220256 }, { "content": "/// Readonly accessors for RAFS filesystem inodes.\n\n///\n\n/// The RAFS filesystem is a readonly filesystem, so does its inodes. The `RafsInode` trait acts\n\n/// as field accessors for those readonly inodes, to hide implementation details.\n\npub trait RafsInode {\n\n /// Validate the node for data integrity.\n\n ///\n\n /// The inode object may be transmuted from a raw buffer, read from an external file, so the\n\n /// caller must validate it before accessing any fields.\n\n fn validate(&self) -> Result<()>;\n\n\n\n fn get_entry(&self) -> Entry;\n\n fn get_attr(&self) -> Attr;\n\n fn get_name_size(&self) -> u16;\n\n fn get_symlink(&self) -> Result<OsString>;\n\n fn get_symlink_size(&self) -> u16;\n\n fn get_child_by_name(&self, name: &OsStr) -> Result<Arc<dyn RafsInode>>;\n\n fn get_child_by_index(&self, idx: Inode) -> Result<Arc<dyn RafsInode>>;\n\n fn get_child_index(&self) -> Result<u32>;\n\n fn get_child_count(&self) -> u32;\n\n fn get_chunk_info(&self, idx: u32) -> Result<Arc<dyn RafsChunkInfo>>;\n\n fn has_xattr(&self) -> bool;\n\n fn get_xattr(&self, name: &OsStr) -> Result<Option<XattrValue>>;\n\n fn get_xattrs(&self) -> Result<Vec<XattrName>>;\n", "file_path": "rafs/src/metadata/mod.rs", "rank": 52, "score": 127400.72631735858 }, { "content": "pub trait InodeStatsCounter {\n\n fn stats_fop_inc(&self, fop: StatsFop);\n\n fn stats_fop_err_inc(&self, fop: StatsFop);\n\n fn stats_cumulative(&self, fop: StatsFop, value: usize);\n\n}\n\n\n\nimpl InodeStatsCounter for InodeIoStats {\n\n fn stats_fop_inc(&self, fop: StatsFop) {\n\n self.fop_hits[fop as usize].fetch_add(1, Ordering::Relaxed);\n\n self.total_fops.fetch_add(1, Ordering::Relaxed);\n\n if fop == StatsFop::Open {\n\n self.nr_open.fetch_add(1, Ordering::Relaxed);\n\n // Below can't guarantee that load and store are atomic but it should be OK\n\n // for debug tracing info.\n\n if self.nr_open.load(Ordering::Relaxed) > self.nr_max_open.load(Ordering::Relaxed) {\n\n self.nr_max_open\n\n .store(self.nr_open.load(Ordering::Relaxed), Ordering::Relaxed)\n\n }\n\n }\n\n }\n", "file_path": "utils/src/metrics.rs", "rank": 53, "score": 127395.16090039584 }, { "content": "fn default_merging_size() -> usize {\n\n 128 * 1024\n\n}\n\n\n", "file_path": "rafs/src/fs.rs", "rank": 54, "score": 127131.739249426 }, { "content": "pub trait RafsSuperInodes {\n\n fn get_max_ino(&self) -> Inode;\n\n\n\n fn get_inode(&self, ino: Inode, digest_validate: bool) -> Result<Arc<dyn RafsInode>>;\n\n\n\n fn validate_digest(\n\n &self,\n\n inode: Arc<dyn RafsInode>,\n\n recursive: bool,\n\n digester: digest::Algorithm,\n\n ) -> Result<bool>;\n\n}\n\n\n", "file_path": "rafs/src/metadata/mod.rs", "rank": 55, "score": 126136.94934166838 }, { "content": "#[cfg(any(feature = \"backend-oss\", feature = \"backend-registry\"))]\n\nfn default_http_scheme() -> String {\n\n \"https\".to_string()\n\n}\n", "file_path": "storage/src/backend/mod.rs", "rank": 56, "score": 125876.06752091573 }, { "content": "fn default_work_dir() -> String {\n\n \".\".to_string()\n\n}\n\n\n", "file_path": "storage/src/cache/blobcache.rs", "rank": 57, "score": 125876.06752091573 }, { "content": "pub fn log_level_to_verbosity(level: log::LevelFilter) -> usize {\n\n if level == log::LevelFilter::Off {\n\n 0\n\n } else {\n\n level as usize - 1\n\n }\n\n}\n\n\n\npub mod built_info {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/built.rs\"));\n\n}\n\n\n", "file_path": "app/src/lib.rs", "rank": 58, "score": 124097.7530315588 }, { "content": "#[derive(Default)]\n\nstruct Cache(RwLock<String>);\n", "file_path": "storage/src/backend/registry.rs", "rank": 59, "score": 122601.09201797131 }, { "content": "// API server has successfully processed the request, but can't fulfill that. Therefore,\n\n// a `error_response` is generated whose status code is 4XX or 5XX. With error response,\n\n// it still returns Ok(error_response) to http request handling framework, which means\n\n// nydusd api server receives the request and try handle it, even the request can't be fulfilled.\n\nfn convert_to_response<O: FnOnce(ApiError) -> HttpError>(api_resp: ApiResponse, op: O) -> Response {\n\n match api_resp {\n\n Ok(r) => {\n\n use ApiResponsePayload::*;\n\n match r {\n\n Empty => success_response(None),\n\n DaemonInfo(d) => success_response(Some(d)),\n\n Events(d) => success_response(Some(d)),\n\n FsFilesMetrics(d) => success_response(Some(d)),\n\n FsGlobalMetrics(d) => success_response(Some(d)),\n\n FsFilesPatterns(d) => success_response(Some(d)),\n\n BackendMetrics(d) => success_response(Some(d)),\n\n BlobcacheMetrics(d) => success_response(Some(d)),\n\n FsBackendInfo(d) => success_response(Some(d)),\n\n InflightMetrics(d) => success_response(Some(d)),\n\n }\n\n }\n\n Err(e) => {\n\n let sc = translate_status_code(&e);\n\n error_response(op(e), sc)\n", "file_path": "api/src/http_endpoint.rs", "rank": 60, "score": 120466.58068844749 }, { "content": "pub fn error_response(error: HttpError, status: StatusCode) -> Response {\n\n let mut response = Response::new(Version::Http11, status);\n\n\n\n let err_msg = ErrorMessage {\n\n code: \"UNDEFINED\".to_string(),\n\n message: format!(\"{:?}\", error),\n\n };\n\n response.set_body(Body::new(serde_json::to_string(&err_msg).unwrap()));\n\n\n\n response\n\n}\n\n\n", "file_path": "api/src/http_endpoint.rs", "rank": 61, "score": 119306.79199374387 }, { "content": "pub fn extract_query_part(req: &Request, key: &str) -> Option<String> {\n\n // Splicing req.uri with \"http:\" prefix might look weird, but since it depends on\n\n // crate `Url` to generate query_pairs HashMap, which is working on top of Url not Uri.\n\n // Better that we can add query part support to Micro-http in the future. But\n\n // right now, below way makes it easy to obtain query parts from uri.\n\n let http_prefix: String = String::from(\"http:\");\n\n let url = Url::parse(&(http_prefix + req.uri().get_abs_path()))\n\n .map_err(|e| {\n\n error!(\"Can't parse request {:?}\", e);\n\n e\n\n })\n\n .ok()?;\n\n let v: Option<String> = None;\n\n for (k, v) in url.query_pairs() {\n\n if k != key {\n\n continue;\n\n } else {\n\n trace!(\"Got query part {:?}\", (k, &v));\n\n return Some(v.into_owned());\n\n }\n\n }\n\n v\n\n}\n\n\n\nconst EVENT_UNIX_SOCKET: u64 = 1;\n\nconst EVENT_HTTP_DIE: u64 = 2;\n\n\n", "file_path": "api/src/http.rs", "rank": 62, "score": 118275.12067801232 }, { "content": "pub fn new<'a>(work_dir: &'a Path, whiteout_spec: &'a str) -> Builder<'a> {\n\n let builder = std::env::var(\"NYDUS_IMAGE\")\n\n .unwrap_or_else(|_| String::from(\"./target-fusedev/release/nydus-image\"));\n\n Builder {\n\n builder,\n\n work_dir,\n\n whiteout_spec,\n\n }\n\n}\n\n\n\nimpl<'a> Builder<'a> {\n\n fn create_dir(&mut self, path: &Path) {\n\n fs::create_dir_all(path).unwrap();\n\n }\n\n\n\n fn create_file(&mut self, path: &Path, data: &[u8]) {\n\n File::create(path).unwrap().write_all(data).unwrap();\n\n }\n\n\n\n fn copy_file(&mut self, src: &Path, dst: &Path) -> u64 {\n", "file_path": "tests/builder.rs", "rank": 63, "score": 115964.13909035313 }, { "content": "fn parse_body<'a, F: Deserialize<'a>>(b: &'a Body) -> Result<F, HttpError> {\n\n serde_json::from_slice::<F>(b.raw()).map_err(HttpError::ParseBody)\n\n}\n\n\n\n#[derive(Clone, Deserialize, Debug)]\n\npub struct DaemonConf {\n\n pub log_level: String,\n\n}\n\n\n\n/// Errors associated with Nydus management\n\n#[derive(Debug)]\n\npub enum HttpError {\n\n NoRoute,\n\n BadRequest,\n\n QueryString(String),\n\n /// API request receive error\n\n SerdeJsonDeserialize(SerdeError),\n\n SerdeJsonSerialize(SerdeError),\n\n ParseBody(SerdeError),\n\n /// Could not query daemon info\n", "file_path": "api/src/http_endpoint.rs", "rank": 64, "score": 113855.38712951173 }, { "content": "#[derive(Default)]\n\nstruct HashCache(RwLock<HashMap<String, String>>);\n\n\n\n#[derive(Debug)]\n\npub enum RegistryError {\n\n Common(String),\n\n Url(ParseError),\n\n Request(RequestError),\n\n Scheme(String),\n\n Auth(String),\n\n ResponseHead(String),\n\n Response(Error),\n\n Transport(reqwest::Error),\n\n}\n\n\n\nimpl From<RegistryError> for BackendError {\n\n fn from(error: RegistryError) -> Self {\n\n BackendError::Registry(error)\n\n }\n\n}\n\n\n", "file_path": "storage/src/backend/registry.rs", "rank": 65, "score": 113041.27034320286 }, { "content": "/// Check hash of data matches provided one\n\npub fn digest_check(data: &[u8], digest: &RafsDigest, digester: digest::Algorithm) -> bool {\n\n digest == &RafsDigest::from_buf(data, digester)\n\n}\n", "file_path": "storage/src/utils.rs", "rank": 66, "score": 112846.86524596192 }, { "content": "pub trait RafsSuperBlock: RafsSuperBlobs + RafsSuperInodes {\n\n fn load(&mut self, r: &mut RafsIoReader) -> Result<()>;\n\n\n\n fn update(&self, r: &mut RafsIoReader) -> RafsResult<()>;\n\n\n\n fn destroy(&mut self);\n\n}\n\n\n", "file_path": "rafs/src/metadata/mod.rs", "rank": 67, "score": 112665.74903305391 }, { "content": "\tSharedNydusDaemonID = \"shared_daemon\"\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/daemon.go", "rank": 68, "score": 111169.76686313316 }, { "content": "\tImageID string\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/daemon.go", "rank": 69, "score": 108317.3170640432 }, { "content": "\tSnapshotID string\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/daemon.go", "rank": 70, "score": 108317.3170640432 }, { "content": "/*\n\n * Copyright (c) 2020. Ant Group. All rights reserved.\n\n *\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\n\n\npackage daemon\n\n\n\nimport (\n\n\t\"fmt\"\n\n\t\"os\"\n\n\t\"path/filepath\"\n\n\n\n\t\"github.com/pkg/errors\"\n\n\n\n\t\"github.com/dragonflyoss/image-service/contrib/nydus-snapshotter/config\"\n\n\t\"github.com/dragonflyoss/image-service/contrib/nydus-snapshotter/pkg/nydussdk\"\n\n\t\"github.com/dragonflyoss/image-service/contrib/nydus-snapshotter/pkg/nydussdk/model\"\n\n)\n\n\n\nconst (\n\n\tAPISocketFileName = \"api.sock\"\n\n\tSharedNydusDaemonID = \"shared_daemon\"\n\n)\n\n\n\ntype NewDaemonOpt func(d *Daemon) error\n\n\n\ntype Daemon struct {\n\n\tID string\n\n\tSnapshotID string\n\n\tConfigDir string\n\n\tSocketDir string\n\n\tLogDir string\n\n\tLogLevel string\n\n\tSnapshotDir string\n\n\tPid int\n\n\tImageID string\n\n\tDaemonMode string\n\n\tApiSock *string\n\n\tRootMountPoint *string\n\n}\n\n\n\nfunc (d *Daemon) SharedMountPoint() string {\n\n\treturn filepath.Join(*d.RootMountPoint, d.SnapshotID, \"fs\")\n\n}\n\n\n\nfunc (d *Daemon) MountPoint() string {\n\n\tif d.RootMountPoint != nil {\n\n\t\treturn filepath.Join(\"/\", d.SnapshotID, \"fs\")\n\n\t}\n\n\treturn filepath.Join(d.SnapshotDir, d.SnapshotID, \"fs\")\n\n}\n\n\n\nfunc (d *Daemon) BootstrapFile() (string, error) {\n\n\treturn GetBootstrapFile(d.SnapshotDir, d.SnapshotID)\n\n}\n\n\n\nfunc (d *Daemon) ConfigFile() string {\n\n\treturn filepath.Join(d.ConfigDir, \"config.json\")\n\n}\n\n\n\nfunc (d *Daemon) APISock() string {\n\n\tif d.ApiSock != nil {\n\n\t\treturn *d.ApiSock\n\n\t}\n\n\treturn filepath.Join(d.SocketDir, APISocketFileName)\n\n}\n\n\n\nfunc (d *Daemon) LogFile() string {\n\n\treturn filepath.Join(d.LogDir, \"stderr.log\")\n\n}\n\n\n\nfunc (d *Daemon) CheckStatus() (model.DaemonInfo, error) {\n\n\tclient, err := nydussdk.NewNydusClient(d.APISock())\n\n\tif err != nil {\n\n\t\treturn model.DaemonInfo{}, errors.Wrap(err, \"failed to check status, client has not been initialized\")\n\n\t}\n\n\treturn client.CheckStatus()\n\n}\n\n\n\nfunc (d *Daemon) SharedMount() error {\n\n\tclient, err := nydussdk.NewNydusClient(d.APISock())\n\n\tif err != nil {\n\n\t\treturn errors.Wrap(err, \"failed to mount\")\n\n\t}\n\n\tbootstrap, err := d.BootstrapFile()\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\treturn client.SharedMount(d.MountPoint(), bootstrap, d.ConfigFile())\n\n}\n\n\n\nfunc (d *Daemon) SharedUmount() error {\n\n\tclient, err := nydussdk.NewNydusClient(d.APISock())\n\n\tif err != nil {\n\n\t\treturn errors.Wrap(err, \"failed to mount\")\n\n\t}\n\n\treturn client.Umount(d.MountPoint())\n\n}\n\n\n\nfunc (d *Daemon) IsMultipleDaemon() bool {\n\n\treturn d.DaemonMode == config.DaemonModeMultiple\n\n}\n\n\n\nfunc (d *Daemon) IsSharedDaemon() bool {\n\n\treturn d.DaemonMode == config.DaemonModeShared\n\n}\n\n\n\nfunc (d *Daemon) IsPrefetchDaemon() bool {\n\n\treturn d.DaemonMode == config.DaemonModePrefetch\n\n}\n\n\n\nfunc NewDaemon(opt ...NewDaemonOpt) (*Daemon, error) {\n\n\td := &Daemon{Pid: 0}\n\n\td.ID = newID()\n\n\td.DaemonMode = config.DefaultDaemonMode\n\n\tfor _, o := range opt {\n\n\t\terr := o(d)\n\n\t\tif err != nil {\n\n\t\t\treturn nil, err\n\n\t\t}\n\n\t}\n\n\treturn d, nil\n\n}\n\n\n\nfunc GetBootstrapFile(dir, id string) (string, error) {\n\n\t// the meta file is stored to <snapshotid>/image/image.boot\n\n\tbootstrap := filepath.Join(dir, id, \"fs\", \"image\", \"image.boot\")\n\n\t_, err := os.Stat(bootstrap)\n\n\tif err == nil {\n\n\t\treturn bootstrap, nil\n\n\t}\n\n\tif os.IsNotExist(err) {\n\n\t\t// for backward compatibility check meta file from legacy location\n\n\t\tbootstrap = filepath.Join(dir, id, \"fs\", \"image.boot\")\n\n\t\t_, err = os.Stat(bootstrap)\n\n\t\tif err == nil {\n\n\t\t\treturn bootstrap, nil\n\n\t\t}\n\n\t}\n\n\treturn \"\", errors.Wrap(err, fmt.Sprintf(\"failed to find bootstrap file for ID %s\", id))\n\n}\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/daemon.go", "rank": 71, "score": 102514.64726754629 }, { "content": "type Daemon struct {\n\n\tID string\n\n\tSnapshotID string\n\n\tConfigDir string\n\n\tSocketDir string\n\n\tLogDir string\n\n\tLogLevel string\n\n\tSnapshotDir string\n\n\tPid int\n\n\tImageID string\n\n\tDaemonMode string\n\n\tApiSock *string\n\n\tRootMountPoint *string\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/daemon.go", "rank": 72, "score": 102514.64726754629 }, { "content": "func WithID(id string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.ID = id\n\n\t\treturn nil\n\n\t}\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/config.go", "rank": 73, "score": 98935.58585690023 }, { "content": "\teventCh chan struct{}\n", "file_path": "contrib/nydus-snapshotter/pkg/cache/manager.go", "rank": 74, "score": 98279.80357936447 }, { "content": "/*\n\n * Copyright (c) 2020. Ant Group. All rights reserved.\n\n *\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\n\n\npackage daemon\n\n\n\nimport (\n\n\t\"os\"\n\n\t\"path/filepath\"\n\n\n\n\t\"github.com/dragonflyoss/image-service/contrib/nydus-snapshotter/config\"\n\n\t\"github.com/pkg/errors\"\n\n)\n\n\n\nfunc WithSnapshotID(id string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.SnapshotID = id\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithID(id string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.ID = id\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithConfigDir(dir string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\ts := filepath.Join(dir, d.ID)\n\n\t\t// this may be failed, should handle that\n\n\t\tif err := os.MkdirAll(s, 0755); err != nil {\n\n\t\t\treturn errors.Wrapf(err, \"failed to create config dir %s\", s)\n\n\t\t}\n\n\t\td.ConfigDir = s\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithSocketDir(dir string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\ts := filepath.Join(dir, d.ID)\n\n\t\t// this may be failed, should handle that\n\n\t\tif err := os.MkdirAll(s, 0755); err != nil {\n\n\t\t\treturn errors.Wrapf(err, \"failed to create socket dir %s\", s)\n\n\t\t}\n\n\t\td.SocketDir = s\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithLogDir(dir string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.LogDir = filepath.Join(dir, d.ID)\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithLogLevel(logLevel string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\tif logLevel == \"\" {\n\n\t\t\td.LogLevel = config.DefaultLogLevel\n\n\t\t} else {\n\n\t\t\td.LogLevel = logLevel\n\n\t\t}\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithRootMountPoint(rootMountPoint string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\tif err := os.MkdirAll(rootMountPoint, 0755); err != nil {\n\n\t\t\treturn errors.Wrapf(err, \"failed to create rootMountPoint %s\", rootMountPoint)\n\n\t\t}\n\n\t\td.RootMountPoint = &rootMountPoint\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithSnapshotDir(dir string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.SnapshotDir = dir\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithImageID(imageID string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.ImageID = imageID\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithSharedDaemon() NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.DaemonMode = config.DaemonModeShared\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithPrefetchDaemon() NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.DaemonMode = config.DaemonModePrefetch\n\n\t\treturn nil\n\n\t}\n\n}\n\n\n\nfunc WithAPISock(apiSock string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.ApiSock = &apiSock\n\n\t\treturn nil\n\n\t}\n\n}\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/config.go", "rank": 75, "score": 98014.83740018388 }, { "content": "/*\n\n * Copyright (c) 2020. Ant Group. All rights reserved.\n\n *\n\n * SPDX-License-Identifier: Apache-2.0\n\n */\n\n\n\npackage daemon\n\n\n\nimport (\n\n\t\"encoding/base64\"\n\n\n\n\t\"github.com/google/uuid\"\n\n)\n\n\n\nfunc newID() string {\n\n\tid := uuid.New()\n\n\tb := [16]byte(id)\n\n\treturn base64.RawURLEncoding.EncodeToString(b[:])\n\n}\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/idgen.go", "rank": 76, "score": 98014.83740018388 }, { "content": "\tDaemonModeNone string = \"none\"\n", "file_path": "contrib/nydus-snapshotter/config/config.go", "rank": 77, "score": 97900.38160217437 }, { "content": "func newID() string {\n\n\tid := uuid.New()\n\n\tb := [16]byte(id)\n\n\treturn base64.RawURLEncoding.EncodeToString(b[:])\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/idgen.go", "rank": 78, "score": 97794.46547074638 }, { "content": "func WithImageID(imageID string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.ImageID = imageID\n\n\t\treturn nil\n\n\t}\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/config.go", "rank": 79, "score": 97794.46547074638 }, { "content": "func WithSnapshotID(id string) NewDaemonOpt {\n\n\treturn func(d *Daemon) error {\n\n\t\td.SnapshotID = id\n\n\t\treturn nil\n\n\t}\n", "file_path": "contrib/nydus-snapshotter/pkg/daemon/config.go", "rank": 80, "score": 97794.46547074638 }, { "content": "\tDefaultDaemonMode string = \"multiple\"\n", "file_path": "contrib/nydus-snapshotter/config/config.go", "rank": 81, "score": 97745.91847455413 }, { "content": "func (m *Manager) isNoneDaemon() bool {\n\n\treturn m.DaemonMode == config.DaemonModeNone\n", "file_path": "contrib/nydus-snapshotter/pkg/process/manager.go", "rank": 82, "score": 96784.07666516775 }, { "content": "func (s *Server) collectDaemonMetric(ctx context.Context) error {\n\n\t// TODO(renzhen): make collect interval time configurable\n\n\ttimer := time.NewTicker(time.Duration(1) * time.Minute)\n\n\n\nouter:\n\n\tfor {\n\n\t\tselect {\n\n\t\tcase <-timer.C:\n\n\t\t\tdaemons := s.pm.ListDaemons()\n\n\t\t\tfor _, d := range daemons {\n\n\t\t\t\tif d.ID == daemon.SharedNydusDaemonID {\n\n\t\t\t\t\tcontinue\n\n\t\t\t\t}\n\n\n\n\t\t\t\tclient, err := nydussdk.NewNydusClient(d.APISock())\n\n\t\t\t\tif err != nil {\n\n\t\t\t\t\tlog.G(ctx).Errorf(\"failed to connect nydusd: %v\", err)\n\n\t\t\t\t\tcontinue\n\n\t\t\t\t}\n\n\n\n\t\t\t\tfsMetrics, err := client.GetFsMetric(s.pm.IsSharedDaemon(), d.SnapshotID)\n\n\t\t\t\tif err != nil {\n\n\t\t\t\t\tlog.G(ctx).Errorf(\"failed to get fs metric: %v\", err)\n\n\t\t\t\t\tcontinue\n\n\t\t\t\t}\n\n\n\n\t\t\t\tif err := s.exp.ExportFsMetrics(fsMetrics, d.ImageID); err != nil {\n\n\t\t\t\t\tlog.G(ctx).Errorf(\"failed to export fs metrics for %s: %v\", d.ImageID, err)\n\n\t\t\t\t\tcontinue\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\tcase <-ctx.Done():\n\n\t\t\tlog.G(ctx).Infof(\"cancel daemon metrics collecting\")\n\n\t\t\tbreak outer\n\n\t\t}\n\n\t}\n\n\n\n\treturn nil\n", "file_path": "contrib/nydus-snapshotter/pkg/metric/serve.go", "rank": 83, "score": 95680.29528897202 }, { "content": "\tdefaultNydusDaemonConfigPath string = \"/etc/nydus/config.json\"\n", "file_path": "contrib/nydus-snapshotter/config/config.go", "rank": 84, "score": 95541.96164305152 }, { "content": "func (sl *defaultSourceLayer) ChainID() digest.Digest {\n\n\treturn sl.chainID\n", "file_path": "contrib/nydusify/pkg/converter/provider/source.go", "rank": 85, "score": 94734.48706600454 }, { "content": "func (sl *defaultSourceLayer) ParentChainID() *digest.Digest {\n\n\treturn sl.parentChainID\n", "file_path": "contrib/nydusify/pkg/converter/provider/source.go", "rank": 86, "score": 93690.00992529436 }, { "content": "\tid string\n", "file_path": "contrib/nydusify/tests/registry.go", "rank": 87, "score": 89075.3373552509 }, { "content": "\tErr() error\n", "file_path": "contrib/nydusify/pkg/utils/worker.go", "rank": 88, "score": 88565.84811853428 }, { "content": "\terr error\n", "file_path": "contrib/nydusify/pkg/converter/converter.go", "rank": 89, "score": 88565.84811853428 }, { "content": "\terr chan error\n", "file_path": "contrib/nydusify/pkg/utils/worker.go", "rank": 90, "score": 88565.84811853428 }, { "content": "type Server struct {\n\n\tlistener net.Listener\n\n\trootDir string\n\n\tmetricsFile string\n\n\tpm *process.Manager\n\n\texp *exporter.Exporter\n", "file_path": "contrib/nydus-snapshotter/pkg/metric/serve.go", "rank": 91, "score": 88017.73207289467 }, { "content": "\tState string `json:\"state\"`\n", "file_path": "contrib/nydusify/pkg/checker/tool/nydusd.go", "rank": 92, "score": 88016.33470648967 }, { "content": "\terr error\n", "file_path": "contrib/nydusify/pkg/utils/worker_test.go", "rank": 93, "score": 87979.34219580094 }, { "content": "var BTI = model.BuildTimeInfo{\n\n\tPackageVer: \"1.1.0\",\n\n\tGitCommit: \"67f4ecc7acee6dd37234e6a697e72ac09d6cc8ba\",\n\n\tBuildTime: \"Thu, 28 Jan 2021 14:02:39 +0000\",\n\n\tProfile: \"debug\",\n\n\tRustc: \"rustc 1.46.0 (04488afe3 2020-08-24)\",\n", "file_path": "contrib/nydus-snapshotter/pkg/nydussdk/client_test.go", "rank": 94, "score": 87464.41234288004 }, { "content": "\tState string `json:\"state\"`\n", "file_path": "contrib/docker-nydus-graphdriver/plugin/nydus/glue.go", "rank": 95, "score": 87443.05937959999 }, { "content": "\tState string `json:\"state\"`\n", "file_path": "contrib/nydus-snapshotter/pkg/nydussdk/model/model.go", "rank": 96, "score": 87443.05937959999 }, { "content": "\tID string `json:\"id\"`\n", "file_path": "contrib/nydus-snapshotter/pkg/nydussdk/model/model.go", "rank": 97, "score": 87317.97949151084 }, { "content": "\tID string `json:\"id\"`\n", "file_path": "contrib/docker-nydus-graphdriver/plugin/nydus/glue.go", "rank": 98, "score": 87317.97949151084 }, { "content": " };\n\n\n\n match evset {\n\n Events::EPOLLIN => {\n\n if event.data == EXIT_FUSE_SERVICE {\n\n // Directly return from here is reliable as we handle only one epoll event\n\n // which is `Read` or `Exit` once this function is called.\n\n // One more trick is we don't read the event fd so as to make all fuse threads exit.\n\n // That is because we configure this event fd as LEVEL triggered.\n\n info!(\"Will exit from fuse service\");\n\n return Ok(None);\n\n }\n\n\n\n match read(self.fd, buf.as_mut_slice()) {\n\n Ok(len) => {\n\n return Ok(Some(\n\n Reader::new(FuseBuf::new(&mut buf[..len]))\n\n .map_err(|e| eother!(e))?,\n\n ));\n\n }\n", "file_path": "utils/src/fuse.rs", "rank": 99, "score": 45.39495369088982 } ]
Rust
src/learning/lin_reg.rs
alfaevc/rusty-machine
e7cc57fc5e0f384aeb19169336deb5f66655c76a
use linalg::{Matrix, BaseMatrix}; use linalg::Vector; use learning::{LearningResult, SupModel}; use learning::toolkit::cost_fn::CostFunc; use learning::toolkit::cost_fn::MeanSqError; use learning::optim::grad_desc::GradientDesc; use learning::optim::{OptimAlgorithm, Optimizable}; use learning::error::Error; #[derive(Debug)] pub struct LinRegressor { parameters: Option<Vector<f64>>, } impl Default for LinRegressor { fn default() -> LinRegressor { LinRegressor { parameters: None } } } impl LinRegressor { pub fn parameters(&self) -> Option<&Vector<f64>> { self.parameters.as_ref() } } impl SupModel<Matrix<f64>, Vector<f64>> for LinRegressor { fn train(&mut self, inputs: &Matrix<f64>, targets: &Vector<f64>) -> LearningResult<()> { let ones = Matrix::<f64>::ones(inputs.rows(), 1); let full_inputs = ones.hcat(inputs); let xt = full_inputs.transpose(); self.parameters = Some((&xt * full_inputs).solve(&xt * targets)?); Ok(()) } fn predict(&self, inputs: &Matrix<f64>) -> LearningResult<Vector<f64>> { if let Some(ref v) = self.parameters { let ones = Matrix::<f64>::ones(inputs.rows(), 1); let full_inputs = ones.hcat(inputs); Ok(full_inputs * v) } else { Err(Error::new_untrained()) } } } impl Optimizable for LinRegressor { type Inputs = Matrix<f64>; type Targets = Vector<f64>; fn compute_grad(&self, params: &[f64], inputs: &Matrix<f64>, targets: &Vector<f64>) -> (f64, Vec<f64>) { let beta_vec = Vector::new(params.to_vec()); let outputs = inputs * beta_vec; let cost = MeanSqError::cost(&outputs, targets); let grad = (inputs.transpose() * (outputs - targets)) / (inputs.rows() as f64); (cost, grad.into_vec()) } } impl LinRegressor { pub fn train_with_optimization(&mut self, inputs: &Matrix<f64>, targets: &Vector<f64>) { let ones = Matrix::<f64>::ones(inputs.rows(), 1); let full_inputs = ones.hcat(inputs); let initial_params = vec![0.; full_inputs.cols()]; let gd = GradientDesc::default(); let optimal_w = gd.optimize(self, &initial_params[..], &full_inputs, targets); self.parameters = Some(Vector::new(optimal_w)); } }
use linalg::{Matrix, BaseMatrix}; use linalg::Vector; use learning::{LearningResult, SupModel}; use learning::toolkit::cost_fn::CostFunc; use learning::toolkit::cost_fn::MeanSqError; use learning::optim::grad_desc::GradientDesc; use learning::optim::{OptimAlgorithm, Optimizable}; use learning::error::Error; #[derive(Debug)] pub struct LinRegressor { parameters: Option<Vector<f64>>, } impl Default for LinRegressor { fn default() -> LinRegressor { LinRegressor { parameters: None } } } impl LinRegressor { pub fn parameters(&self) -> Option<&Vector<f64>> { self.parameters.as_ref() } } impl SupModel<Matrix<f64>, Vector<f64>> for LinRegressor { fn train(&mut self, inputs: &Matrix<f64>, targets: &Vector<f64>) -> LearningResult<()> { let ones = Matrix::<f64>::ones(inputs.rows(), 1); let full_inputs = ones.hcat(inputs); let xt = full_inputs.transpose(); self.parameters = Some((&xt * full_inputs).solve(&xt * targets)?); Ok(()) }
} impl Optimizable for LinRegressor { type Inputs = Matrix<f64>; type Targets = Vector<f64>; fn compute_grad(&self, params: &[f64], inputs: &Matrix<f64>, targets: &Vector<f64>) -> (f64, Vec<f64>) { let beta_vec = Vector::new(params.to_vec()); let outputs = inputs * beta_vec; let cost = MeanSqError::cost(&outputs, targets); let grad = (inputs.transpose() * (outputs - targets)) / (inputs.rows() as f64); (cost, grad.into_vec()) } } impl LinRegressor { pub fn train_with_optimization(&mut self, inputs: &Matrix<f64>, targets: &Vector<f64>) { let ones = Matrix::<f64>::ones(inputs.rows(), 1); let full_inputs = ones.hcat(inputs); let initial_params = vec![0.; full_inputs.cols()]; let gd = GradientDesc::default(); let optimal_w = gd.optimize(self, &initial_params[..], &full_inputs, targets); self.parameters = Some(Vector::new(optimal_w)); } }
fn predict(&self, inputs: &Matrix<f64>) -> LearningResult<Vector<f64>> { if let Some(ref v) = self.parameters { let ones = Matrix::<f64>::ones(inputs.rows(), 1); let full_inputs = ones.hcat(inputs); Ok(full_inputs * v) } else { Err(Error::new_untrained()) } }
function_block-full_function
[ { "content": "/// Returns the f1 score for 2 class classification.\n\n///\n\n/// F1-score is calculated with 2 * precision * recall / (precision + recall),\n\n/// see [F1 score](https://en.wikipedia.org/wiki/F1_score) for details.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `outputs` - Iterator of output (predicted) labels which only contains 0 or 1.\n\n/// * `targets` - Iterator of expected (actual) labels which only contains 0 or 1.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use rusty_machine::analysis::score::f1;\n\n/// let outputs = [1, 1, 1, 0, 0, 0];\n\n/// let targets = [1, 1, 0, 0, 1, 1];\n\n///\n\n/// assert_eq!(f1(outputs.iter(), targets.iter()), 0.5714285714285714);\n\n/// ```\n\n///\n\n/// # Panics\n\n///\n\n/// - outputs and targets have different length\n\n/// - outputs or targets contains a value which is not 0 or 1\n\npub fn f1<'a, I, T>(outputs: I, targets: I) -> f64\n\n where I: ExactSizeIterator<Item=&'a T>,\n\n T: 'a + PartialEq + Zero + One\n\n{\n\n assert!(outputs.len() == targets.len(), \"outputs and targets must have the same length\");\n\n\n\n let mut tpos = 0.0f64;\n\n let mut fpos = 0.0f64;\n\n let mut fneg = 0.0f64;\n\n\n\n for (ref o, ref t) in outputs.zip(targets) {\n\n if (*o == &T::one()) & (*t == &T::one()) {\n\n tpos += 1.0f64;\n\n } else if *t == &T::one() {\n\n fpos += 1.0f64;\n\n } else if *o == &T::one() {\n\n fneg += 1.0f64;\n\n }\n\n if ((*t != &T::zero()) & (*t != &T::one())) |\n\n ((*o != &T::zero()) & (*o != &T::one())) {\n\n panic!(\"f1-score must be used for 2 class classification\")\n\n }\n\n }\n\n 2.0f64 * tpos / (2.0f64 * tpos + fneg + fpos)\n\n}\n\n\n\n// ************************************\n\n// Regression Scores\n\n// ************************************\n\n\n", "file_path": "src/analysis/score.rs", "rank": 0, "score": 123397.80647501291 }, { "content": "/// Returns the recall score for 2 class classification.\n\n///\n\n/// Recall is calculated with true-positive / (true-positive + false-negative),\n\n/// see [Precision and Recall](https://en.wikipedia.org/wiki/Precision_and_recall) for details.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `outputs` - Iterator of output (predicted) labels which only contains 0 or 1.\n\n/// * `targets` - Iterator of expected (actual) labels which only contains 0 or 1.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use rusty_machine::analysis::score::recall;\n\n/// let outputs = [1, 1, 1, 0, 0, 0];\n\n/// let targets = [1, 1, 0, 0, 1, 1];\n\n///\n\n/// assert_eq!(recall(outputs.iter(), targets.iter()), 0.5);\n\n/// ```\n\n///\n\n/// # Panics\n\n///\n\n/// - outputs and targets have different length\n\n/// - outputs or targets contains a value which is not 0 or 1\n\npub fn recall<'a, I, T>(outputs: I, targets: I) -> f64\n\n where I: ExactSizeIterator<Item=&'a T>,\n\n T: 'a + PartialEq + Zero + One\n\n{\n\n assert!(outputs.len() == targets.len(), \"outputs and targets must have the same length\");\n\n\n\n let mut tpfn = 0.0f64;\n\n let mut tp = 0.0f64;\n\n\n\n for (ref o, ref t) in outputs.zip(targets) {\n\n if *t == &T::one() {\n\n tpfn += 1.0f64;\n\n if *o == &T::one() {\n\n tp += 1.0f64;\n\n }\n\n }\n\n if ((*t != &T::zero()) & (*t != &T::one())) |\n\n ((*o != &T::zero()) & (*o != &T::one())) {\n\n panic!(\"recall must be used for 2 class classification\")\n\n }\n\n }\n\n tp / tpfn\n\n}\n\n\n", "file_path": "src/analysis/score.rs", "rank": 1, "score": 123397.75755101816 }, { "content": "/// Returns the precision score for 2 class classification.\n\n///\n\n/// Precision is calculated with true-positive / (true-positive + false-positive),\n\n/// see [Precision and Recall](https://en.wikipedia.org/wiki/Precision_and_recall) for details.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `outputs` - Iterator of output (predicted) labels which only contains 0 or 1.\n\n/// * `targets` - Iterator of expected (actual) labels which only contains 0 or 1.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use rusty_machine::analysis::score::precision;\n\n/// let outputs = [1, 1, 1, 0, 0, 0];\n\n/// let targets = [1, 1, 0, 0, 1, 1];\n\n///\n\n/// assert_eq!(precision(outputs.iter(), targets.iter()), 2.0f64 / 3.0f64);\n\n/// ```\n\n///\n\n/// # Panics\n\n///\n\n/// - outputs and targets have different length\n\n/// - outputs or targets contains a value which is not 0 or 1\n\npub fn precision<'a, I, T>(outputs: I, targets: I) -> f64\n\n where I: ExactSizeIterator<Item=&'a T>,\n\n T: 'a + PartialEq + Zero + One\n\n{\n\n assert!(outputs.len() == targets.len(), \"outputs and targets must have the same length\");\n\n\n\n let mut tpfp = 0.0f64;\n\n let mut tp = 0.0f64;\n\n\n\n for (ref o, ref t) in outputs.zip(targets) {\n\n if *o == &T::one() {\n\n tpfp += 1.0f64;\n\n if *t == &T::one() {\n\n tp += 1.0f64;\n\n }\n\n }\n\n if ((*t != &T::zero()) & (*t != &T::one())) |\n\n ((*o != &T::zero()) & (*o != &T::one())) {\n\n panic!(\"precision must be used for 2 class classification\")\n\n }\n\n }\n\n tp / tpfp\n\n}\n\n\n", "file_path": "src/analysis/score.rs", "rank": 2, "score": 123397.66120934272 }, { "content": "/// Returns the fraction of outputs rows which match their target.\n\npub fn row_accuracy(outputs: &Matrix<f64>, targets: &Matrix<f64>) -> f64 {\n\n accuracy(outputs.row_iter().map(|r| r.raw_slice()),\n\n targets.row_iter().map(|r| r.raw_slice()))\n\n}\n\n\n", "file_path": "src/analysis/score.rs", "rank": 3, "score": 114118.77369550867 }, { "content": "/// Search K-nearest items\n\npub trait KNearestSearch: Default{\n\n\n\n /// build data structure for search optimization\n\n fn build(&mut self, data: Matrix<f64>);\n\n\n\n /// Serch k-nearest items close to the point\n\n /// Returns a tuple of searched item index and its distances\n\n fn search(&self, point: &[f64], k: usize) -> Result<(Vec<usize>, Vec<f64>), Error>;\n\n}\n\n\n", "file_path": "src/learning/knn/mod.rs", "rank": 4, "score": 112992.57598695795 }, { "content": "/// Returns the fraction of outputs which match their target.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `outputs` - Iterator of output (predicted) labels.\n\n/// * `targets` - Iterator of expected (actual) labels.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use rusty_machine::analysis::score::accuracy;\n\n/// let outputs = [1, 1, 1, 0, 0, 0];\n\n/// let targets = [1, 1, 0, 0, 1, 1];\n\n///\n\n/// assert_eq!(accuracy(outputs.iter(), targets.iter()), 0.5);\n\n/// ```\n\n///\n\n/// # Panics\n\n///\n\n/// - outputs and targets have different length\n\npub fn accuracy<I1, I2, T>(outputs: I1, targets: I2) -> f64\n\n where T: PartialEq,\n\n I1: ExactSizeIterator + Iterator<Item=T>,\n\n I2: ExactSizeIterator + Iterator<Item=T>\n\n{\n\n assert!(outputs.len() == targets.len(), \"outputs and targets must have the same length\");\n\n let len = outputs.len() as f64;\n\n let correct = outputs\n\n .zip(targets)\n\n .filter(|&(ref x, ref y)| x == y)\n\n .count();\n\n correct as f64 / len\n\n}\n\n\n", "file_path": "src/analysis/score.rs", "rank": 5, "score": 112719.50561554413 }, { "content": "// TODO: generalise to accept arbitrary iterators of diff-able things\n\n/// Returns the additive inverse of the mean-squared-error of the\n\n/// outputs. So higher is better, and the returned value is always\n\n/// negative.\n\npub fn neg_mean_squared_error(outputs: &Matrix<f64>, targets: &Matrix<f64>) -> f64\n\n{\n\n // MeanSqError divides the actual mean squared error by two.\n\n -2f64 * MeanSqError::cost(outputs, targets)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use linalg::Matrix;\n\n use super::{accuracy, precision, recall, f1, neg_mean_squared_error};\n\n\n\n #[test]\n\n fn test_accuracy() {\n\n let outputs = [1, 2, 3, 4, 5, 6];\n\n let targets = [1, 2, 3, 3, 5, 1];\n\n assert_eq!(accuracy(outputs.iter(), targets.iter()), 2f64/3f64);\n\n\n\n let outputs = [1, 1, 1, 0, 0, 0];\n\n let targets = [1, 1, 1, 0, 0, 1];\n\n assert_eq!(accuracy(outputs.iter(), targets.iter()), 5.0f64 / 6.0f64);\n", "file_path": "src/analysis/score.rs", "rank": 6, "score": 109645.25990230888 }, { "content": " /// Trait for models which can be gradient-optimized.\n\n pub trait Optimizable {\n\n /// The input data type to the model.\n\n type Inputs;\n\n /// The target data type to the model.\n\n type Targets;\n\n\n\n /// Compute the gradient for the model.\n\n fn compute_grad(&self,\n\n params: &[f64],\n\n inputs: &Self::Inputs,\n\n targets: &Self::Targets)\n\n -> (f64, Vec<f64>);\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 100359.26267368314 }, { "content": "/// Returns a square matrix C where C_ij is the count of the samples which were\n\n/// predicted to lie in the class with jth label but actually lie in the class with\n\n/// ith label.\n\n///\n\n/// # Arguments\n\n/// * `predictions` - A series of model predictions.\n\n/// * `targets` - A slice of equal length to predictions, containing the\n\n/// target results.\n\n/// * `labels` - If None then the rows and columns of the returned matrix\n\n/// correspond to the distinct labels appearing in either\n\n/// predictions or targets, in increasing order.\n\n/// If Some then the rows and columns correspond to the provided\n\n/// labels, in the provided order. Note that in this case the\n\n/// confusion matrix will only contain entries for the elements\n\n/// of `labels`.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use rusty_machine::analysis::confusion_matrix::confusion_matrix;\n\n/// use rusty_machine::linalg::Matrix;\n\n///\n\n/// let truth = vec![2, 0, 2, 2, 0, 1];\n\n/// let predictions = vec![0, 0, 2, 2, 0, 2];\n\n///\n\n/// let confusion = confusion_matrix(&predictions, &truth, None);\n\n///\n\n/// let expected = Matrix::new(3, 3, vec![\n\n/// 2, 0, 0,\n\n/// 0, 0, 1,\n\n/// 1, 0, 2]);\n\n///\n\n/// assert_eq!(confusion, expected);\n\n/// ```\n\n/// # Panics\n\n///\n\n/// - If user-provided labels are not distinct.\n\n/// - If predictions and targets have different lengths.\n\npub fn confusion_matrix<T>(predictions: &[T],\n\n targets: &[T],\n\n labels: Option<Vec<T>>) -> Matrix<usize>\n\n where T: Ord + Eq + Hash + Copy\n\n{\n\n assert!(predictions.len() == targets.len(),\n\n \"predictions and targets have different lengths\");\n\n\n\n let labels = match labels {\n\n Some(ls) => ls,\n\n None => ordered_distinct(predictions, targets)\n\n };\n\n\n\n let mut label_to_index: HashMap<T, usize> = HashMap::new();\n\n for (i, l) in labels.iter().enumerate() {\n\n match label_to_index.insert(*l, i) {\n\n None => {},\n\n Some(_) => { panic!(\"labels must be distinct\"); }\n\n }\n\n }\n", "file_path": "src/analysis/confusion_matrix.rs", "rank": 8, "score": 98808.94228718315 }, { "content": "/// Load trees dataset.\n\n///\n\n/// The data set contains a sample of 31 black cherry trees in the\n\n/// Allegheny National Forest, Pennsylvania.\n\n///\n\n/// ## Attribute Information\n\n///\n\n/// ### Data\n\n///\n\n/// ``Matrix<f64>`` contains following columns.\n\n///\n\n/// - diameter (inches)\n\n/// - height (feet)\n\n///\n\n/// ### Target\n\n///\n\n/// ``Vector<f64>`` contains volume (cubic feet) of trees.\n\n///\n\n/// Thomas A. Ryan, Brian L. Joiner, Barbara F. Ryan. (1976).\n\n/// Minitab student handbook. Duxbury Press\n\npub fn load() -> Dataset<Matrix<f64>, Vector<f64>> {\n\n let data = matrix![8.3, 70.;\n\n 8.6, 65.;\n\n 8.8, 63.;\n\n 10.5, 72.;\n\n 10.7, 81.;\n\n 10.8, 83.;\n\n 11.0, 66.;\n\n 11.0, 75.;\n\n 11.1, 80.;\n\n 11.2, 75.;\n\n 11.3, 79.;\n\n 11.4, 76.;\n\n 11.4, 76.;\n\n 11.7, 69.;\n\n 12.0, 75.;\n\n 12.9, 74.;\n\n 12.9, 85.;\n\n 13.3, 86.;\n\n 13.7, 71.;\n", "file_path": "src/datasets/trees.rs", "rank": 9, "score": 94819.21647349847 }, { "content": "/// Load iris dataset.\n\n///\n\n/// The data set contains 3 classes of 50 instances each, where each class refers to a type of iris plant.\n\n///\n\n/// ## Attribute Information\n\n///\n\n/// ### Data\n\n///\n\n/// ``Matrix<f64>`` contains following columns.\n\n///\n\n/// - sepal length in cm\n\n/// - sepal width in cm\n\n/// - petal length in cm\n\n/// - petal width in cm\n\n///\n\n/// ### Target\n\n///\n\n/// ``Vector<usize>`` contains numbers corresponding to iris species:\n\n///\n\n/// - ``0``: Iris Setosa\n\n/// - ``1``: Iris Versicolour\n\n/// - ``2``: Iris Virginica\n\n///\n\n/// Lichman, M. (2013). UCI Machine Learning Repository [http://archive.ics.uci.edu/ml].\n\n/// Irvine, CA: University of California, School of Information and Computer Science.\n\npub fn load() -> Dataset<Matrix<f64>, Vector<usize>> {\n\n let data: Matrix<f64> = matrix![5.1, 3.5, 1.4, 0.2;\n\n 4.9, 3.0, 1.4, 0.2;\n\n 4.7, 3.2, 1.3, 0.2;\n\n 4.6, 3.1, 1.5, 0.2;\n\n 5.0, 3.6, 1.4, 0.2;\n\n 5.4, 3.9, 1.7, 0.4;\n\n 4.6, 3.4, 1.4, 0.3;\n\n 5.0, 3.4, 1.5, 0.2;\n\n 4.4, 2.9, 1.4, 0.2;\n\n 4.9, 3.1, 1.5, 0.1;\n\n 5.4, 3.7, 1.5, 0.2;\n\n 4.8, 3.4, 1.6, 0.2;\n\n 4.8, 3.0, 1.4, 0.1;\n\n 4.3, 3.0, 1.1, 0.1;\n\n 5.8, 4.0, 1.2, 0.2;\n\n 5.7, 4.4, 1.5, 0.4;\n\n 5.4, 3.9, 1.3, 0.4;\n\n 5.1, 3.5, 1.4, 0.3;\n\n 5.7, 3.8, 1.7, 0.3;\n", "file_path": "src/datasets/iris.rs", "rank": 10, "score": 94818.8131379885 }, { "content": "#[test]\n\nfn test_default() {\n\n let mut model = PCA::default();\n\n\n\n let inputs = Matrix::new(7, 3, vec![8.3, 50., 23.,\n\n 10.2, 55., 21.,\n\n 11.1, 57., 22.,\n\n 12.5, 60., 15.,\n\n 11.3, 59., 20.,\n\n 12.4, 61., 11.,\n\n 11.2, 58., 23.]);\n\n model.train(&inputs).unwrap();\n\n\n\n let cexp = Matrix::new(3, 3, vec![0.2304196717022202, 0.2504639278931734, -0.9403055863478447,\n\n 0.5897383434061588, 0.7326863014098074, 0.3396755364211204,\n\n -0.7740254913174374, 0.6328021843757651, -0.021117155112842168]);\n\n let cmp = model.components().unwrap();\n\n assert_matrix_eq!(cmp, cexp, comp=abs, tol=1e-8);\n\n\n\n let new_data = Matrix::new(1, 3, vec![9., 45., 22.]);\n\n let outputs = model.predict(&new_data).unwrap();\n\n\n\n let exp = Matrix::new(1, 3, vec![-9.72287413262656, -7.680227015314077, -2.301338333438487]);\n\n assert_matrix_eq!(outputs, exp, comp=abs, tol=1e-8);\n\n}\n\n\n", "file_path": "tests/learning/pca.rs", "rank": 11, "score": 93875.45979201252 }, { "content": "/// The in place Fisher-Yates shuffle.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use rusty_machine::learning::toolkit::rand_utils;\n\n///\n\n/// // Collect the numbers 0..5\n\n/// let mut a = (0..5).collect::<Vec<_>>();\n\n///\n\n/// // Permute the values in place with Fisher-Yates\n\n/// rand_utils::in_place_fisher_yates(&mut a);\n\n/// ```\n\npub fn in_place_fisher_yates<T>(arr: &mut [T]) {\n\n let n = arr.len();\n\n let mut rng = thread_rng();\n\n\n\n for i in 0..n {\n\n // Swap i with a random point after it\n\n let j = rng.gen_range(0, n - i);\n\n arr.swap(i, i + j);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_reservoir_sample() {\n\n let a = vec![1, 2, 3, 4, 5, 6, 7];\n\n\n\n let b = reservoir_sample(&a, 3);\n", "file_path": "src/learning/toolkit/rand_utils.rs", "rank": 12, "score": 91560.02697122999 }, { "content": "#[test]\n\nfn test_default_gp() {\n\n\tlet mut gp = GaussianProcess::default();\n\n\tgp.noise = 10f64;\n\n\n\n\tlet inputs = Matrix::new(10,1,vec![0.,1.,2.,3.,4.,5.,6.,7.,8.,9.]);\n\n\tlet targets = Vector::new(vec![0.,1.,2.,3.,4.,4.,3.,2.,1.,0.]);\n\n\n\n\tgp.train(&inputs, &targets).unwrap();\n\n\n\n\tlet test_inputs = Matrix::new(5,1,vec![2.3,4.4,5.1,6.2,7.1]);\n\n\n\n\tlet _outputs = gp.predict(&test_inputs).unwrap();\n\n}\n", "file_path": "tests/learning/gp.rs", "rank": 13, "score": 91317.19964434262 }, { "content": "#[test]\n\nfn test_model_default() {\n\n let mut model = KMeansClassifier::<KPlusPlus>::new(3);\n\n let inputs = Matrix::new(3, 2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n let targets = Matrix::new(3,2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n\n\n model.train(&inputs).unwrap();\n\n\n\n let outputs = model.predict(&targets).unwrap();\n\n\n\n assert_eq!(outputs.size(), 3);\n\n}\n\n\n", "file_path": "tests/learning/k_means.rs", "rank": 14, "score": 91317.19964434262 }, { "content": "/// Randomly splits the inputs into k 'folds'. For each fold a model\n\n/// is trained using all inputs except for that fold, and tested on the\n\n/// data in the fold. Returns the scores for each fold.\n\n///\n\n/// # Arguments\n\n/// * `model` - Used to train and predict for each fold.\n\n/// * `inputs` - All input samples.\n\n/// * `targets` - All targets.\n\n/// * `k` - Number of folds to use.\n\n/// * `score` - Used to compare the outputs for each fold to the targets. Higher scores are better. See the `analysis::score` module for examples.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use rusty_machine::analysis::cross_validation::k_fold_validate;\n\n/// use rusty_machine::analysis::score::row_accuracy;\n\n/// use rusty_machine::learning::naive_bayes::{NaiveBayes, Bernoulli};\n\n/// use rusty_machine::linalg::{BaseMatrix, Matrix};\n\n///\n\n/// let inputs = Matrix::new(3, 2, vec![1.0, 1.1,\n\n/// 5.2, 4.3,\n\n/// 6.2, 7.3]);\n\n///\n\n/// let targets = Matrix::new(3, 3, vec![1.0, 0.0, 0.0,\n\n/// 0.0, 0.0, 1.0,\n\n/// 0.0, 0.0, 1.0]);\n\n///\n\n/// let mut model = NaiveBayes::<Bernoulli>::new();\n\n///\n\n/// let accuracy_per_fold: Vec<f64> = k_fold_validate(\n\n/// &mut model,\n\n/// &inputs,\n\n/// &targets,\n\n/// 3,\n\n/// // Score each fold by the fraction of test samples where\n\n/// // the model's prediction equals the target.\n\n/// row_accuracy\n\n/// ).unwrap();\n\n/// ```\n\npub fn k_fold_validate<M, S>(model: &mut M,\n\n inputs: &Matrix<f64>,\n\n targets: &Matrix<f64>,\n\n k: usize,\n\n score: S) -> LearningResult<Vec<f64>>\n\n where S: Fn(&Matrix<f64>, &Matrix<f64>) -> f64,\n\n M: SupModel<Matrix<f64>, Matrix<f64>>,\n\n{\n\n assert_eq!(inputs.rows(), targets.rows());\n\n let num_samples = inputs.rows();\n\n let shuffled_indices = create_shuffled_indices(num_samples);\n\n let folds = Folds::new(&shuffled_indices, k);\n\n\n\n let mut costs: Vec<f64> = Vec::new();\n\n\n\n for p in folds {\n\n // TODO: don't allocate fresh buffers for every fold\n\n let train_inputs = inputs.select_rows(p.train_indices_iter.clone());\n\n let train_targets = targets.select_rows(p.train_indices_iter.clone());\n\n let test_inputs = inputs.select_rows(p.test_indices_iter.clone());\n\n let test_targets = targets.select_rows(p.test_indices_iter.clone());\n\n\n\n model.train(&train_inputs, &train_targets)?;\n\n let outputs = model.predict(&test_inputs)?;\n\n costs.push(score(&outputs, &test_targets));\n\n }\n\n\n\n Ok(costs)\n\n}\n\n\n", "file_path": "src/analysis/cross_validation.rs", "rank": 15, "score": 91301.40404995765 }, { "content": " /// Trait for optimization algorithms.\n\n pub trait OptimAlgorithm<M: Optimizable> {\n\n /// Return the optimized parameter using gradient optimization.\n\n ///\n\n /// Takes in a set of starting parameters and related model data.\n\n fn optimize(&self,\n\n model: &M,\n\n start: &[f64],\n\n inputs: &M::Inputs,\n\n targets: &M::Targets)\n\n -> Vec<f64>;\n\n }\n\n\n\n pub mod grad_desc;\n\n pub mod fmincg;\n\n }\n\n\n\n /// Module for learning tools.\n\n pub mod toolkit {\n\n pub mod activ_fn;\n\n pub mod cost_fn;\n", "file_path": "src/lib.rs", "rank": 16, "score": 89493.05151329908 }, { "content": "/// The inside out Fisher-Yates algorithm.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use rusty_machine::learning::toolkit::rand_utils;\n\n///\n\n/// // Collect the numbers 0..5\n\n/// let a = (0..5).collect::<Vec<_>>();\n\n///\n\n/// // Perform a Fisher-Yates shuffle to get a random permutation\n\n/// let permutation = rand_utils::fisher_yates(&a);\n\n/// ```\n\npub fn fisher_yates<T: Copy>(arr: &[T]) -> Vec<T> {\n\n let n = arr.len();\n\n let mut rng = thread_rng();\n\n\n\n let mut shuffled_arr = Vec::with_capacity(n);\n\n\n\n unsafe {\n\n // We set the length here\n\n // We only access data which has been initialized in the algorithm\n\n shuffled_arr.set_len(n);\n\n }\n\n\n\n for i in 0..n {\n\n let j = rng.gen_range(0, i + 1);\n\n\n\n // If j isn't the last point in the active shuffled array\n\n if j != i {\n\n // Copy value at position j to the end of the shuffled array\n\n // This is safe as we only read initialized data (j < i)\n\n let x = shuffled_arr[j];\n\n shuffled_arr[i] = x;\n\n }\n\n\n\n // Place value at end of active array into shuffled array\n\n shuffled_arr[j] = arr[i];\n\n }\n\n\n\n shuffled_arr\n\n}\n\n\n", "file_path": "src/learning/toolkit/rand_utils.rs", "rank": 17, "score": 86454.13584540682 }, { "content": "/// Trait for cost functions in models.\n\npub trait CostFunc<T> {\n\n /// The cost function.\n\n fn cost(outputs: &T, targets: &T) -> f64;\n\n\n\n /// The gradient of the cost function.\n\n fn grad_cost(outputs: &T, targets: &T) -> T;\n\n}\n\n\n\n/// The mean squared error cost function.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct MeanSqError;\n\n\n\n// For generics we need a trait for \"Hadamard product\" here\n\n// Which is \"Elementwise multiplication\".\n\nimpl CostFunc<Matrix<f64>> for MeanSqError {\n\n fn cost(outputs: &Matrix<f64>, targets: &Matrix<f64>) -> f64 {\n\n let diff = outputs - targets;\n\n let sq_diff = &diff.elemul(&diff);\n\n\n\n let n = diff.rows();\n", "file_path": "src/learning/toolkit/cost_fn.rs", "rank": 18, "score": 85878.65938533467 }, { "content": "/// Trait for activation functions in models.\n\npub trait ActivationFunc: Clone + Debug {\n\n /// The activation function.\n\n fn func(x: f64) -> f64;\n\n\n\n /// The gradient of the activation function.\n\n fn func_grad(x: f64) -> f64;\n\n\n\n /// The gradient of the activation function calculated using the output of the function.\n\n /// Calculates f'(x) given f(x) as an input\n\n fn func_grad_from_output(y: f64) -> f64;\n\n\n\n /// The inverse of the activation function.\n\n fn func_inv(x: f64) -> f64;\n\n}\n\n\n\n/// Sigmoid activation function.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Sigmoid;\n\n\n\nimpl ActivationFunc for Sigmoid {\n", "file_path": "src/learning/toolkit/activ_fn.rs", "rank": 19, "score": 82484.74871546935 }, { "content": " /// Trait for supervised model.\n\n pub trait SupModel<T, U> {\n\n /// Predict output from inputs.\n\n fn predict(&self, inputs: &T) -> LearningResult<U>;\n\n\n\n /// Train the model using inputs and targets.\n\n fn train(&mut self, inputs: &T, targets: &U) -> LearningResult<()>;\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 20, "score": 80806.65918949983 }, { "content": "/// ```\n\n/// use rusty_machine::learning::toolkit::rand_utils;\n\n///\n\n/// let mut pool = &mut [1,2,3,4];\n\n/// let sample = rand_utils::reservoir_sample(pool, 3);\n\n///\n\n/// println!(\"{:?}\", sample);\n\n/// ```\n\npub fn reservoir_sample<T: Copy>(pool: &[T], reservoir_size: usize) -> Vec<T> {\n\n assert!(pool.len() >= reservoir_size,\n\n \"Sample size is greater than total.\");\n\n\n\n let mut pool_mut = &pool[..];\n\n\n\n let mut res = pool_mut[..reservoir_size].to_vec();\n\n pool_mut = &pool_mut[reservoir_size..];\n\n\n\n let mut ele_seen = reservoir_size;\n\n let mut rng = thread_rng();\n\n\n\n while !pool_mut.is_empty() {\n\n ele_seen += 1;\n\n let r = rng.gen_range(0, ele_seen);\n\n\n\n let p_0 = pool_mut[0];\n\n pool_mut = &pool_mut[1..];\n\n\n\n if r < reservoir_size {\n\n res[r] = p_0;\n\n }\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "src/learning/toolkit/rand_utils.rs", "rank": 21, "score": 79404.54997308446 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Dog {\n\n color: Color,\n\n friendliness: f64,\n\n furriness: f64,\n\n speed: f64,\n\n}\n\n\n\nimpl Rand for Dog {\n\n /// Generate a random dog.\n\n fn rand<R: rand::Rng>(rng: &mut R) -> Self {\n\n // Friendliness, furriness, and speed are normally distributed and\n\n // (given color:) independent.\n\n let mut red_dog_friendliness = Normal::new(0., 1.);\n\n let mut red_dog_furriness = Normal::new(0., 1.);\n\n let mut red_dog_speed = Normal::new(0., 1.);\n\n\n\n let mut white_dog_friendliness = Normal::new(1., 1.);\n\n let mut white_dog_furriness = Normal::new(1., 1.);\n\n let mut white_dog_speed = Normal::new(-1., 1.);\n\n\n", "file_path": "examples/naive_bayes_dogs.rs", "rank": 22, "score": 65614.89597666058 }, { "content": "/// A very simple model that looks at all the data it's\n\n/// given but doesn't do anything useful.\n\n/// Stores the sum of all elements in the inputs and targets\n\n/// matrices when trained. Its prediction for each row is the\n\n/// sum of the row's elements plus the precalculated training sum.\n\nstruct DummyModel {\n\n sum: f64\n\n}\n\n\n\nimpl SupModel<Matrix<f64>, Matrix<f64>> for DummyModel {\n\n fn predict(&self, inputs: &Matrix<f64>) -> LearningResult<Matrix<f64>> {\n\n let predictions: Vec<f64> = inputs\n\n .row_iter()\n\n .map(|row| { self.sum + sum(row.iter()) })\n\n .collect();\n\n Ok(Matrix::new(inputs.rows(), 1, predictions))\n\n }\n\n\n\n fn train(&mut self, inputs: &Matrix<f64>, targets: &Matrix<f64>) -> LearningResult<()> {\n\n self.sum = sum(inputs.iter()) + sum(targets.iter());\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "benches/examples/cross_validation.rs", "rank": 23, "score": 64333.08664049553 }, { "content": "/// Container for k-Nearest search results\n\nstruct KNearest {\n\n // number to search\n\n k: usize,\n\n // tuple of index and its distances, sorted by distances\n\n pairs: Vec<(usize, f64)>,\n\n}\n\n\n\nimpl KNearest {\n\n\n\n fn new(k: usize, index: Vec<usize>, distances: Vec<f64>) -> Self {\n\n debug_assert!(!index.is_empty(), \"index can't be empty\");\n\n debug_assert!(index.len() == distances.len(),\n\n \"index and distance must have the same length\");\n\n\n\n let mut pairs: Vec<(usize, f64)> = index.into_iter()\n\n .zip(distances.into_iter())\n\n .collect();\n\n // sort by distance, take k elements\n\n pairs.sort_by(|x, y| x.1.partial_cmp(&y.1).unwrap());\n\n pairs.truncate(k);\n", "file_path": "src/learning/knn/mod.rs", "rank": 24, "score": 64322.71860875943 }, { "content": "/// An iterator over the sets of indices required for k-fold cross validation.\n\nstruct Folds<'a> {\n\n num_folds: usize,\n\n indices: &'a[usize],\n\n count: usize\n\n}\n\n\n\nimpl<'a> Folds<'a> {\n\n /// Let n = indices.len(), and k = num_folds.\n\n /// The first n % k folds have size n / k + 1 and the\n\n /// rest have size n / k. (In particular, if n % k == 0 then all\n\n /// folds are the same size.)\n\n fn new(indices: &'a ShuffledIndices, num_folds: usize) -> Folds<'a> {\n\n let num_samples = indices.0.len();\n\n assert!(num_folds > 1 && num_samples >= num_folds,\n\n \"Require num_folds > 1 && num_samples >= num_folds\");\n\n\n\n Folds {\n\n num_folds: num_folds,\n\n indices: &indices.0,\n\n count: 0\n", "file_path": "src/analysis/cross_validation.rs", "rank": 25, "score": 63589.00400250092 }, { "content": "/// A partition of indices of all available samples into\n\n/// a training set and a test set.\n\nstruct Partition<'a> {\n\n train_indices_iter: TrainingIndices<'a>,\n\n test_indices_iter: TestIndices<'a>\n\n}\n\n\n", "file_path": "src/analysis/cross_validation.rs", "rank": 26, "score": 63589.00400250092 }, { "content": "#[derive(Clone)]\n\nstruct TrainingIndices<'a> {\n\n chain: Chain<Iter<'a, usize>, Iter<'a, usize>>,\n\n size: usize\n\n}\n\n\n\nimpl<'a> TestIndices<'a> {\n\n fn new(indices: &'a [usize]) -> TestIndices<'a> {\n\n TestIndices(indices.iter())\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for TestIndices<'a> {\n\n type Item = &'a usize;\n\n\n\n fn next(&mut self) -> Option<&'a usize> {\n\n self.0.next()\n\n }\n\n}\n\n\n\nimpl <'a> ExactSizeIterator for TestIndices<'a> {\n", "file_path": "src/analysis/cross_validation.rs", "rank": 27, "score": 62296.826634599776 }, { "content": "/// A model which uses the cost function\n\n/// y = (x - c)^2\n\n///\n\n/// The goal is to learn the true value c which minimizes the cost.\n\nstruct XSqModel {\n\n c: f64,\n\n}\n\n\n\nimpl Optimizable for XSqModel {\n\n type Inputs = Matrix<f64>;\n\n\ttype Targets = Matrix<f64>;\n\n\n\n fn compute_grad(&self, params: &[f64], _: &Matrix<f64>, _: &Matrix<f64>) -> (f64, Vec<f64>) {\n\n\n\n ((params[0] - self.c) * (params[0] - self.c),\n\n vec![2f64 * (params[0] - self.c)])\n\n }\n\n}\n\n\n", "file_path": "tests/learning/optim/grad_desc.rs", "rank": 28, "score": 62002.46141063802 }, { "content": "/// The criterion for the Generalized Linear Model.\n\n///\n\n/// This trait specifies a Link function and requires a model\n\n/// variance to be specified. The model variance must be defined\n\n/// to specify the regression family. The other functions need not\n\n/// be specified but can be used to control optimization.\n\npub trait Criterion {\n\n /// The link function of the GLM Criterion.\n\n type Link: LinkFunc;\n\n\n\n /// The variance of the regression family.\n\n fn model_variance(&self, mu: f64) -> f64;\n\n\n\n /// Initializes the mean value.\n\n ///\n\n /// By default the mean takes the training target values.\n\n fn initialize_mu(&self, y: &[f64]) -> Vec<f64> {\n\n y.to_vec()\n\n }\n\n\n\n /// Computes the working weights that make up the diagonal\n\n /// of the `W` matrix used in the iterative reweighted least squares\n\n /// algorithm.\n\n ///\n\n /// This is equal to:\n\n ///\n", "file_path": "src/learning/glm.rs", "rank": 29, "score": 61589.72433328552 }, { "content": "/// Logarithm for applying within cost function.\n\nfn ln(x: f64) -> f64 {\n\n x.ln()\n\n}\n", "file_path": "src/learning/toolkit/cost_fn.rs", "rank": 30, "score": 60491.31061031228 }, { "content": "/// Link functions.\n\n///\n\n/// Used within Generalized Linear Regression models.\n\npub trait LinkFunc {\n\n /// The link function.\n\n fn func(x: f64) -> f64;\n\n\n\n /// The gradient of the link function.\n\n fn func_grad(x: f64) -> f64;\n\n\n\n /// The inverse of the link function.\n\n /// Often called the 'mean' function.\n\n fn func_inv(x: f64) -> f64;\n\n}\n\n\n\n/// The Logit link function.\n\n///\n\n/// Used primarily as the canonical link in Binomial Regression.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Logit;\n\n\n\n/// The Logit link function.\n\n///\n", "file_path": "src/learning/glm.rs", "rank": 31, "score": 60306.90965852362 }, { "content": "/// Criterion for Neural Networks\n\n///\n\n/// Specifies an activation function and a cost function.\n\npub trait Criterion {\n\n /// The cost function for the criterion.\n\n type Cost: CostFunc<Matrix<f64>>;\n\n\n\n /// The cost function.\n\n ///\n\n /// Returns a scalar cost.\n\n fn cost(&self, outputs: &Matrix<f64>, targets: &Matrix<f64>) -> f64 {\n\n Self::Cost::cost(outputs, targets)\n\n }\n\n\n\n /// The gradient of the cost function.\n\n ///\n\n /// Returns a matrix of cost gradients.\n\n fn cost_grad(&self, outputs: &Matrix<f64>, targets: &Matrix<f64>) -> Matrix<f64> {\n\n Self::Cost::grad_cost(outputs, targets)\n\n }\n\n\n\n /// Returns the regularization for this criterion.\n\n ///\n", "file_path": "src/learning/nnet/mod.rs", "rank": 32, "score": 60302.891148466835 }, { "content": "/// Naive Bayes Distribution.\n\npub trait Distribution {\n\n /// Initialize the distribution parameters.\n\n fn from_model_params(class_count: usize, features: usize) -> Self;\n\n\n\n /// Updates the distribution parameters.\n\n fn update_params(&mut self, data: &Matrix<f64>, class: usize) -> LearningResult<()>;\n\n\n\n /// Compute the joint log likelihood of the data.\n\n ///\n\n /// Returns a matrix with rows containing the probability that the input lies in each class.\n\n fn joint_log_lik(&self,\n\n data: &Matrix<f64>,\n\n class_prior: &[f64])\n\n -> LearningResult<Matrix<f64>>;\n\n}\n\n\n\n/// The Gaussian Naive Bayes model distribution.\n\n///\n\n/// Defines:\n\n///\n", "file_path": "src/learning/naive_bayes.rs", "rank": 33, "score": 60302.891148466835 }, { "content": "/// The Kernel trait\n\n///\n\n/// Requires a function mapping two vectors to a scalar.\n\npub trait Kernel {\n\n /// The kernel function.\n\n ///\n\n /// Takes two equal length slices and returns a scalar.\n\n fn kernel(&self, x1: &[f64], x2: &[f64]) -> f64;\n\n}\n\n\n\n/// The sum of two kernels\n\n///\n\n/// This struct should not be directly instantiated but instead\n\n/// is created when we add two kernels together.\n\n///\n\n/// Note that it will be more efficient to implement the final kernel\n\n/// manually yourself. However this provides an easy mechanism to test\n\n/// different combinations.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use rusty_machine::learning::toolkit::kernel::{Kernel, Polynomial, HyperTan, KernelArith};\n", "file_path": "src/learning/toolkit/kernel.rs", "rank": 34, "score": 60302.891148466835 }, { "content": "/// Trait for GP mean functions.\n\npub trait MeanFunc {\n\n /// Compute the mean function applied elementwise to a matrix.\n\n fn func(&self, x: Matrix<f64>) -> Vector<f64>;\n\n}\n\n\n\n/// Constant mean function\n\n#[derive(Clone, Copy, Debug)]\n\npub struct ConstMean {\n\n a: f64,\n\n}\n\n\n\n/// Constructs the zero function.\n\nimpl Default for ConstMean {\n\n fn default() -> ConstMean {\n\n ConstMean { a: 0f64 }\n\n }\n\n}\n\n\n\nimpl MeanFunc for ConstMean {\n\n fn func(&self, x: Matrix<f64>) -> Vector<f64> {\n", "file_path": "src/learning/gp.rs", "rank": 35, "score": 60302.891148466835 }, { "content": "// AND gate\n\nfn main() {\n\n println!(\"AND gate learner sample:\");\n\n\n\n const THRESHOLD: f64 = 0.7;\n\n\n\n const SAMPLES: usize = 10000;\n\n println!(\"Generating {} training data and labels...\", SAMPLES as u32);\n\n\n\n let mut input_data = Vec::with_capacity(SAMPLES * 2);\n\n let mut label_data = Vec::with_capacity(SAMPLES);\n\n\n\n for _ in 0..SAMPLES {\n\n // The two inputs are \"signals\" between 0 and 1\n\n let Closed01(left) = random::<Closed01<f64>>();\n\n let Closed01(right) = random::<Closed01<f64>>();\n\n input_data.push(left);\n\n input_data.push(right);\n\n if left > THRESHOLD && right > THRESHOLD {\n\n label_data.push(1.0);\n\n } else {\n", "file_path": "examples/nnet-and_gate.rs", "rank": 36, "score": 59264.15634108513 }, { "content": "/// Trait for algorithms initializing the K-means centroids.\n\npub trait Initializer: Debug {\n\n /// Initialize the centroids for the initial state of the K-Means model.\n\n ///\n\n /// The `Matrix` returned must have `k` rows and the same column count as `inputs`.\n\n fn init_centroids(&self, k: usize, inputs: &Matrix<f64>) -> LearningResult<Matrix<f64>>;\n\n}\n\n\n\n/// The Forgy initialization scheme.\n\n#[derive(Debug)]\n\npub struct Forgy;\n\n\n\nimpl Initializer for Forgy {\n\n fn init_centroids(&self, k: usize, inputs: &Matrix<f64>) -> LearningResult<Matrix<f64>> {\n\n let mut random_choices = Vec::with_capacity(k);\n\n let mut rng = thread_rng();\n\n while random_choices.len() < k {\n\n let r = rng.gen_range(0, inputs.rows());\n\n\n\n if !random_choices.contains(&r) {\n\n random_choices.push(r);\n", "file_path": "src/learning/k_means.rs", "rank": 37, "score": 58648.33635748812 }, { "content": "/// A permutation of 0..n.\n\nstruct ShuffledIndices(Vec<usize>);\n\n\n", "file_path": "src/analysis/cross_validation.rs", "rank": 38, "score": 58205.54265270436 }, { "content": "// Sign learner:\n\n// * Model input a float number\n\n// * Model output: A float representing the input sign.\n\n// If the input is positive, the output is close to 1.0.\n\n// If the input is negative, the output is close to -1.0.\n\n// * Model generated with the SVM API.\n\nfn main() {\n\n println!(\"Sign learner sample:\");\n\n\n\n println!(\"Training...\");\n\n // Training data\n\n let inputs = Matrix::new(11, 1, vec![\n\n -0.1, -2., -9., -101., -666.7,\n\n 0., 0.1, 1., 11., 99., 456.7\n\n ]);\n\n let targets = Vector::new(vec![\n\n -1., -1., -1., -1., -1.,\n\n 1., 1., 1., 1., 1., 1.\n\n ]);\n\n\n\n // Trainee\n\n let mut svm_mod = SVM::new(HyperTan::new(100., 0.), 0.3);\n\n // Our train function returns a Result<(), E>\n\n svm_mod.train(&inputs, &targets).unwrap();\n\n\n\n println!(\"Evaluation...\");\n", "file_path": "examples/svm-sign_learner.rs", "rank": 39, "score": 57906.94881283173 }, { "content": "fn main() {\n\n let (training_set_size, test_set_size) = (1000, 1000);\n\n // Generate all of our train and test data\n\n let (training_matrix, target_matrix, test_matrix, test_dogs) = generate_dog_data(training_set_size, test_set_size);\n\n\n\n // Train!\n\n let mut model = NaiveBayes::<naive_bayes::Gaussian>::new();\n\n model.train(&training_matrix, &target_matrix)\n\n .expect(\"failed to train model of dogs\");\n\n\n\n // Predict!\n\n let predictions = model.predict(&test_matrix)\n\n .expect(\"failed to predict dogs!?\");\n\n\n\n // Score how well we did.\n\n let mut hits = 0;\n\n let unprinted_total = test_set_size.saturating_sub(10) as usize;\n\n for (dog, prediction) in test_dogs.iter().zip(predictions.row_iter()).take(unprinted_total) {\n\n evaluate_prediction(&mut hits, dog, prediction.raw_slice());\n\n }\n", "file_path": "examples/naive_bayes_dogs.rs", "rank": 40, "score": 57901.921097354185 }, { "content": "fn main() {\n\n println!(\"K-Means clustering example:\");\n\n\n\n const SAMPLES_PER_CENTROID: usize = 2000;\n\n\n\n println!(\"Generating {0} samples from each centroids:\",\n\n SAMPLES_PER_CENTROID);\n\n // Choose two cluster centers, at (-0.5, -0.5) and (0, 0.5).\n\n let centroids = Matrix::new(2, 2, vec![-0.5, -0.5, 0.0, 0.5]);\n\n println!(\"{}\", centroids);\n\n\n\n // Generate some data randomly around the centroids\n\n let samples = generate_data(&centroids, SAMPLES_PER_CENTROID, 0.4);\n\n\n\n // Create a new model with 2 clusters\n\n let mut model = KMeansClassifier::new(2);\n\n\n\n // Train the model\n\n println!(\"Training the model...\");\n\n // Our train function returns a Result<(), E>\n", "file_path": "examples/k-means_generating_cluster.rs", "rank": 41, "score": 57901.921097354185 }, { "content": "#[test]\n\nfn test_knn() {\n\n let data = matrix![1., 1., 1.;\n\n 1., 2., 3.;\n\n 2., 3., 1.;\n\n 2., 2., 0.];\n\n let target = Vector::new(vec![0, 0, 1, 1]);\n\n\n\n let mut knn = KNNClassifier::new(2);\n\n let _ = knn.train(&data, &target).unwrap();\n\n\n\n let res = knn.predict(&matrix![2., 3., 0.; 1., 1., 2.]).unwrap();\n\n let exp = Vector::new(vec![1, 0]);\n\n assert_eq!(res, exp);\n\n}\n\n\n", "file_path": "tests/learning/knn.rs", "rank": 42, "score": 57901.921097354185 }, { "content": "#[test]\n\nfn test_wide() {\n\n let mut model = PCA::default();\n\n\n\n let inputs = Matrix::new(2, 4, vec![8.3, 50., 23., 2.,\n\n 10.2, 55., 21., 3.]);\n\n model.train(&inputs).unwrap();\n\n\n\n let cexp = Matrix::new(2, 4, vec![0.3277323746171723, 0.8624536174136117, -0.3449814469654447, 0.17249072348272235,\n\n 0.933710591152088, -0.23345540994181946, 0.23959824886246414, -0.1275765757549414]);\n\n let cmp = model.components().unwrap();\n\n assert_matrix_eq!(cmp, cexp, comp=abs, tol=1e-8);\n\n\n\n let new_data = Matrix::new(1, 4, vec![9., 45., 22., 2.5]);\n\n let outputs = model.predict(&new_data).unwrap();\n\n\n\n let exp = Matrix::new(1, 2, vec![-6.550335224256381, 1.517487926775624]);\n\n assert_matrix_eq!(outputs, exp, comp=abs, tol=1e-8);\n\n}", "file_path": "tests/learning/pca.rs", "rank": 43, "score": 57901.921097354185 }, { "content": "#[test]\n\nfn test_not_centering() {\n\n let mut model = PCA::new(3, false);\n\n\n\n let inputs = Matrix::new(7, 3, vec![8.3, 50., 23.,\n\n 10.2, 55., 21.,\n\n 11.1, 57., 22.,\n\n 12.5, 60., 15.,\n\n 11.3, 59., 20.,\n\n 12.4, 61., 11.,\n\n 11.2, 58., 23.]);\n\n model.train(&inputs).unwrap();\n\n\n\n let cexp = Matrix::new(3, 3, vec![0.17994480617740657, -0.16908609066166264, 0.9690354795746806,\n\n 0.9326216647416523, -0.2839205184846983, -0.2227239763426676,\n\n 0.3127885822473139, 0.9438215049087068, 0.10660332868901998]);\n\n let cmp = model.components().unwrap();\n\n assert_matrix_eq!(cmp, cexp, comp=abs, tol=1e-8);\n\n\n\n let new_data = Matrix::new(1, 3, vec![9., 45., 22.]);\n\n let outputs = model.predict(&new_data).unwrap();\n\n\n\n let exp = Matrix::new(1, 3, vec![50.468826978411926, 6.465874960225161, 1.0440136119105228]);\n\n assert_matrix_eq!(outputs, exp, comp=abs, tol=1e-8);\n\n}\n\n\n", "file_path": "tests/learning/pca.rs", "rank": 44, "score": 57901.921097354185 }, { "content": "/// Trait for data transformers\n\npub trait Transformer<T> {\n\n /// Transforms the inputs\n\n fn transform(&mut self, inputs: T) -> LearningResult<T>;\n\n}\n\n\n", "file_path": "src/data/transforms/mod.rs", "rank": 45, "score": 57453.943932217626 }, { "content": "#[test]\n\nfn test_basic_clusters() {\n\n let inputs = Matrix::new(6, 2, vec![1.0, 2.0,\n\n 1.1, 2.2,\n\n 0.9, 1.9,\n\n 1.0, 2.1,\n\n -2.0, 3.0,\n\n -2.2, 3.1]);\n\n\n\n let mut model = DBSCAN::new(0.5, 2);\n\n model.train(&inputs).unwrap();\n\n\n\n let clustering = model.clusters().unwrap();\n\n\n\n assert!(clustering.data().iter().take(4).all(|x| *x == Some(0)));\n\n assert!(clustering.data().iter().skip(4).all(|x| *x == Some(1)));\n\n}\n\n\n\n\n", "file_path": "tests/learning/dbscan.rs", "rank": 46, "score": 56637.68963925553 }, { "content": "#[test]\n\nfn test_model_iter() {\n\n let mut model = KMeansClassifier::<KPlusPlus>::new(3);\n\n let inputs = Matrix::new(3, 2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n let targets = Matrix::new(3,2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n\n\n model.set_iters(1000);\n\n model.train(&inputs).unwrap();\n\n\n\n let outputs = model.predict(&targets).unwrap();\n\n\n\n assert_eq!(outputs.size(), 3);\n\n}\n\n\n", "file_path": "tests/learning/k_means.rs", "rank": 47, "score": 56637.68963925553 }, { "content": "#[test]\n\nfn test_basic_prediction() {\n\n let inputs = Matrix::new(6, 2, vec![1.0, 2.0,\n\n 1.1, 2.2,\n\n 0.9, 1.9,\n\n 1.0, 2.1,\n\n -2.0, 3.0,\n\n -2.2, 3.1]);\n\n\n\n let mut model = DBSCAN::new(0.5, 2);\n\n model.set_predictive(true);\n\n model.train(&inputs).unwrap();\n\n\n\n let new_points = Matrix::new(2,2, vec![1.0, 2.0, 4.0, 4.0]);\n\n\n\n let classes = model.predict(&new_points).unwrap();\n\n assert!(classes[0] == Some(0));\n\n assert!(classes[1] == None);\n\n}\n", "file_path": "tests/learning/dbscan.rs", "rank": 48, "score": 56637.68963925553 }, { "content": "#[test]\n\nfn test_two_centroids() {\n\n let mut model = KMeansClassifier::new(2);\n\n let inputs = Matrix::new(6, 2, vec![59.59375, 270.6875,\n\n 51.59375, 307.6875,\n\n 86.59375, 286.6875,\n\n 319.59375, 145.6875,\n\n 314.59375, 174.6875,\n\n 350.59375, 161.6875]);\n\n\n\n model.train(&inputs).unwrap();\n\n\n\n let classes = model.predict(&inputs).unwrap();\n\n let class_a = classes[0];\n\n\n\n let class_b = if class_a == 0 { 1 } else { 0 };\n\n\n\n assert!(classes.data().iter().take(3).all(|x| *x == class_a));\n\n assert!(classes.data().iter().skip(3).all(|x| *x == class_b));\n\n}\n", "file_path": "tests/learning/k_means.rs", "rank": 49, "score": 56637.68963925553 }, { "content": "#[test]\n\nfn test_regression() {\n\n let mut lin_mod = LinRegressor::default();\n\n let inputs = Matrix::new(3, 1, vec![2.0, 3.0, 4.0]);\n\n let targets = Vector::new(vec![5.0, 6.0, 7.0]);\n\n\n\n lin_mod.train(&inputs, &targets).unwrap();\n\n\n\n let parameters = lin_mod.parameters().unwrap();\n\n\n\n let err_1 = abs(parameters[0] - 3.0);\n\n let err_2 = abs(parameters[1] - 1.0);\n\n\n\n assert!(err_1 < 1e-8);\n\n assert!(err_2 < 1e-8);\n\n}\n\n\n", "file_path": "tests/learning/lin_reg.rs", "rank": 50, "score": 56637.68963925553 }, { "content": "#[test]\n\nfn test_model_kplusplus() {\n\n let mut model = KMeansClassifier::new_specified(3, 100, KPlusPlus);\n\n let inputs = Matrix::new(3, 2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n let targets = Matrix::new(3,2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n\n\n model.train(&inputs).unwrap();\n\n\n\n let outputs = model.predict(&targets).unwrap();\n\n\n\n assert_eq!(outputs.size(), 3);\n\n}\n\n\n", "file_path": "tests/learning/k_means.rs", "rank": 51, "score": 56637.68963925553 }, { "content": "#[test]\n\nfn test_filter_component() {\n\n let mut model = PCA::new(2, false);\n\n\n\n let inputs = Matrix::new(7, 3, vec![8.3, 50., 23.,\n\n 10.2, 55., 21.,\n\n 11.1, 57., 22.,\n\n 12.5, 60., 15.,\n\n 11.3, 59., 20.,\n\n 12.4, 61., 11.,\n\n 11.2, 58., 23.]);\n\n model.train(&inputs).unwrap();\n\n\n\n let cexp = Matrix::new(3, 2, vec![0.17994480617740657, -0.16908609066166264,\n\n 0.9326216647416523, -0.2839205184846983,\n\n 0.3127885822473139, 0.9438215049087068]);\n\n let cmp = model.components().unwrap();\n\n assert_matrix_eq!(cmp, cexp, comp=abs, tol=1e-8);\n\n\n\n let new_data = Matrix::new(1, 3, vec![9., 45., 22.]);\n\n let outputs = model.predict(&new_data).unwrap();\n\n\n\n let exp = Matrix::new(1, 2, vec![50.468826978411926, 6.465874960225161]);\n\n assert_matrix_eq!(outputs, exp, comp=abs, tol=1e-8);\n\n}\n\n\n", "file_path": "tests/learning/pca.rs", "rank": 52, "score": 56637.68963925553 }, { "content": "#[test]\n\nfn test_model_forgy() {\n\n let mut model = KMeansClassifier::new_specified(3, 100, Forgy);\n\n let inputs = Matrix::new(3, 2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n let targets = Matrix::new(3,2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n\n\n model.train(&inputs).unwrap();\n\n\n\n let outputs = model.predict(&targets).unwrap();\n\n\n\n assert_eq!(outputs.size(), 3);\n\n}\n\n\n", "file_path": "tests/learning/k_means.rs", "rank": 53, "score": 56637.68963925553 }, { "content": "#[test]\n\nfn test_knn_long() {\n\n let vals = (0..200000).map(|x: usize| x as f64).collect::<Vec<f64>>();\n\n let data = Matrix::new(100000, 2, vals);\n\n\n\n let mut tvals = vec![0; 50000];\n\n tvals.extend(vec![1; 50000]);\n\n let target = Vector::new(tvals);\n\n\n\n // check stack doesn't overflow\n\n let mut knn = KNNClassifier::new(10);\n\n let _ = knn.train(&data, &target).unwrap();\n\n\n\n let res = knn.predict(&matrix![5., 10.; 60000., 550000.]).unwrap();\n\n let exp = Vector::new(vec![0, 1]);\n\n assert_eq!(res, exp);\n\n\n\n // check stack doesn't overflow\n\n let mut knn = KNNClassifier::new(1000);\n\n let _ = knn.train(&data, &target).unwrap();\n\n assert_eq!(res, exp);\n", "file_path": "tests/learning/knn.rs", "rank": 54, "score": 56637.68963925553 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_no_train_predict() {\n\n let model = KMeansClassifier::<KPlusPlus>::new(3);\n\n let inputs = Matrix::new(3, 2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n\n\n model.predict(&inputs).unwrap();\n\n\n\n}\n\n\n", "file_path": "tests/learning/k_means.rs", "rank": 55, "score": 56637.68963925553 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_no_train_params() {\n\n let lin_mod = LinRegressor::default();\n\n\n\n let _ = lin_mod.parameters().unwrap();\n\n}\n\n\n", "file_path": "tests/learning/lin_reg.rs", "rank": 56, "score": 55461.253113419094 }, { "content": "#[test]\n\nfn test_model_ran_partition() {\n\n let mut model = KMeansClassifier::new_specified(3, 100, RandomPartition);\n\n let inputs = Matrix::new(3, 2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n let targets = Matrix::new(3,2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n\n\n model.train(&inputs).unwrap();\n\n\n\n let outputs = model.predict(&targets).unwrap();\n\n\n\n assert_eq!(outputs.size(), 3);\n\n}\n\n\n", "file_path": "tests/learning/k_means.rs", "rank": 57, "score": 55461.253113419094 }, { "content": "#[test]\n\nfn test_optimized_regression() {\n\n let mut lin_mod = LinRegressor::default();\n\n let inputs = Matrix::new(3, 1, vec![2.0, 3.0, 4.0]);\n\n let targets = Vector::new(vec![5.0, 6.0, 7.0]);\n\n\n\n lin_mod.train_with_optimization(&inputs, &targets);\n\n\n\n let _ = lin_mod.parameters().unwrap();\n\n}\n\n\n", "file_path": "tests/learning/lin_reg.rs", "rank": 58, "score": 55461.253113419094 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_no_train_predict() {\n\n let lin_mod = LinRegressor::default();\n\n let inputs = Matrix::new(3, 2, vec![1.0, 2.0, 1.0, 3.0, 1.0, 4.0]);\n\n\n\n let _ = lin_mod.predict(&inputs).unwrap();\n\n}\n\n\n", "file_path": "tests/learning/lin_reg.rs", "rank": 59, "score": 55461.253113419094 }, { "content": "#[test]\n\nfn test_predict_different_dimension() {\n\n let mut model = PCA::new(2, false);\n\n\n\n let inputs = Matrix::new(7, 3, vec![8.3, 50., 23.,\n\n 10.2, 55., 21.,\n\n 11.1, 57., 22.,\n\n 12.5, 60., 15.,\n\n 11.3, 59., 20.,\n\n 12.4, 61., 11.,\n\n 11.2, 58., 23.]);\n\n model.train(&inputs).unwrap();\n\n\n\n let new_data = Matrix::new(1, 2, vec![1., 2.]);\n\n let err = model.predict(&new_data);\n\n assert!(err.is_err());\n\n\n\n let new_data = Matrix::new(1, 4, vec![1., 2., 3., 4.]);\n\n let err = model.predict(&new_data);\n\n assert!(err.is_err());\n\n\n\n let mut model = PCA::new(5, false);\n\n let err = model.train(&inputs);\n\n assert!(err.is_err());\n\n}\n\n\n", "file_path": "tests/learning/pca.rs", "rank": 60, "score": 55461.253113419094 }, { "content": "#[test]\n\n#[ignore = \"FIXME #183 fails nondeterministically\"]\n\nfn test_train_no_data() {\n\n let inputs = Matrix::new(0, 1, vec![]);\n\n let targets = Vector::new(vec![]);\n\n\n\n let mut lin_mod = LinRegressor::default();\n\n let res = lin_mod.train(&inputs, &targets);\n\n\n\n assert!(res.is_err());\n\n}\n", "file_path": "tests/learning/lin_reg.rs", "rank": 61, "score": 55461.253113419094 }, { "content": "/// Trait for neural net layers\n\npub trait NetLayer : Debug {\n\n /// The result of propogating data forward through this layer\n\n fn forward(&self, input: &Matrix<f64>, params: MatrixSlice<f64>) -> LearningResult<Matrix<f64>>;\n\n\n\n /// The gradient of the output of this layer with respect to its input\n\n fn back_input(&self, out_grad: &Matrix<f64>, input: &Matrix<f64>, output: &Matrix<f64>, params: MatrixSlice<f64>) -> Matrix<f64>;\n\n \n\n /// The gradient of the output of this layer with respect to its parameters\n\n fn back_params(&self, out_grad: &Matrix<f64>, input: &Matrix<f64>, output: &Matrix<f64>, params: MatrixSlice<f64>) -> Matrix<f64>;\n\n\n\n /// The default value of the parameters of this layer before training\n\n fn default_params(&self) -> Vec<f64>;\n\n\n\n /// The shape of the parameters used by this layer\n\n fn param_shape(&self) -> (usize, usize);\n\n\n\n /// The number of parameters used by this layer\n\n fn num_params(&self) -> usize {\n\n let shape = self.param_shape();\n\n shape.0 * shape.1\n", "file_path": "src/learning/nnet/net_layer.rs", "rank": 62, "score": 55297.835609632166 }, { "content": "/// Binary splittable\n\npub trait BinarySplit: Sized {\n\n\n\n /// Build branch from passed args\n\n fn build(data: &Matrix<f64>, remains: Vec<usize>,\n\n dim: usize, split: f64, min: Vector<f64>, max: Vector<f64>,\n\n left: Node<Self>, right: Node<Self>)\n\n -> Node<Self>;\n\n\n\n /// Return a tuple of left and right node. First node is likely to be\n\n /// closer to the point\n\n unsafe fn maybe_close<'s, 'p>(&'s self, point: &'p [f64])\n\n -> (&'s Node<Self>, &'s Node<Self>);\n\n\n\n /// Return distance between the point and myself\n\n fn dist(&self, point: &[f64]) -> f64;\n\n\n\n /// Return left node\n\n fn left(&self) -> &Node<Self>;\n\n /// Return right node\n\n fn right(&self) -> &Node<Self>;\n", "file_path": "src/learning/knn/binary_tree.rs", "rank": 63, "score": 55297.835609632166 }, { "content": "#[derive(Clone)]\n\nstruct TestIndices<'a>(Iter<'a, usize>);\n\n\n", "file_path": "src/analysis/cross_validation.rs", "rank": 64, "score": 55271.82593246306 }, { "content": " /// Trait for unsupervised model.\n\n pub trait UnSupModel<T, U> {\n\n /// Predict output from inputs.\n\n fn predict(&self, inputs: &T) -> LearningResult<U>;\n\n\n\n /// Train the model using inputs.\n\n fn train(&mut self, inputs: &T) -> LearningResult<()>;\n\n }\n\n\n\n /// Module for optimization in machine learning setting.\n\n pub mod optim {\n\n\n", "file_path": "src/lib.rs", "rank": 65, "score": 54883.82400969989 }, { "content": "#[test]\n\nfn convex_adagrad_training() {\n\n let x_sq = XSqModel { c: 20f64 };\n\n\n\n let gd = AdaGrad::new(5f64, 1f64, 100);\n\n let test_data = vec![100f64];\n\n let params = gd.optimize(&x_sq,\n\n &test_data[..],\n\n &Matrix::zeros(100, 1),\n\n &Matrix::zeros(100, 1));\n\n\n\n assert!(params[0] - 20f64 < 1e-10);\n\n assert!(x_sq.compute_grad(&params, &Matrix::zeros(1, 1), &Matrix::zeros(1, 1)).0 < 1e-10);\n\n}\n\n\n", "file_path": "tests/learning/optim/grad_desc.rs", "rank": 66, "score": 54363.77299866991 }, { "content": "#[cfg(feature = \"datasets\")]\n\n#[test]\n\nfn test_regression_datasets_trees() {\n\n use rm::datasets::trees;\n\n let trees = trees::load();\n\n\n\n let mut lin_mod = LinRegressor::default();\n\n lin_mod.train(&trees.data(), &trees.target()).unwrap();\n\n let params = lin_mod.parameters().unwrap();\n\n assert_eq!(params, &Vector::new(vec![-57.98765891838409, 4.708160503017506, 0.3392512342447438]));\n\n\n\n let predicted = lin_mod.predict(&trees.data()).unwrap();\n\n let expected = vec![4.837659653793278, 4.55385163347481, 4.816981265588826, 15.874115228921276,\n\n 19.869008437727473, 21.018326956518717, 16.192688074961563, 19.245949183164257,\n\n 21.413021404689726, 20.187581283767756, 22.015402271048487, 21.468464618616007,\n\n 21.468464618616007, 20.50615412980805, 23.954109686181766, 27.852202904652785,\n\n 31.583966481344966, 33.806481916796706, 30.60097760433255, 28.697035014921106,\n\n 34.388184394951004, 36.008318964043994, 35.38525970948079, 41.76899799551756,\n\n 44.87770231764652, 50.942867757643015, 52.223751092491256, 53.42851282520877,\n\n 53.899328875510534, 53.899328875510534, 68.51530482306926];\n\n assert_eq!(predicted, Vector::new(expected));\n\n}\n\n\n", "file_path": "tests/learning/lin_reg.rs", "rank": 67, "score": 54363.77299866991 }, { "content": "#[test]\n\nfn convex_rmsprop_training() {\n\n let x_sq = XSqModel { c: 20f64 };\n\n\n\n let rms = RMSProp::new(0.05, 0.9, 1e-5, 50);\n\n let test_data = vec![100f64];\n\n let params = rms.optimize(&x_sq,\n\n &test_data[..],\n\n &Matrix::zeros(100, 1),\n\n &Matrix::zeros(100, 1));\n\n\n\n assert!(params[0] - 20f64 < 1e-10);\n\n assert!(x_sq.compute_grad(&params, &Matrix::zeros(1, 1), &Matrix::zeros(1, 1)).0 < 1e-10);\n\n}", "file_path": "tests/learning/optim/grad_desc.rs", "rank": 68, "score": 54363.77299866991 }, { "content": "#[test]\n\nfn convex_gd_training() {\n\n let x_sq = XSqModel { c: 20f64 };\n\n\n\n let gd = GradientDesc::default();\n\n let test_data = vec![500f64];\n\n let params = gd.optimize(&x_sq,\n\n &test_data[..],\n\n &Matrix::zeros(1, 1),\n\n &Matrix::zeros(1, 1));\n\n\n\n assert!(params[0] - 20f64 < 1e-10);\n\n assert!(x_sq.compute_grad(&params, &Matrix::zeros(1, 1), &Matrix::zeros(1, 1)).0 < 1e-10);\n\n}\n\n\n", "file_path": "tests/learning/optim/grad_desc.rs", "rank": 69, "score": 54363.77299866991 }, { "content": "#[test]\n\nfn convex_fmincg_training() {\n\n let x_sq = XSqModel { c: 20f64 };\n\n\n\n let cgd = ConjugateGD::default();\n\n let test_data = vec![500f64];\n\n let params = cgd.optimize(&x_sq,\n\n &test_data[..],\n\n &Matrix::zeros(1, 1),\n\n &Matrix::zeros(1, 1));\n\n\n\n assert!(params[0] - 20f64 < 1e-10);\n\n assert!(x_sq.compute_grad(&params, &Matrix::zeros(1, 1), &Matrix::zeros(1, 1)).0 < 1e-10);\n\n}\n\n\n", "file_path": "tests/learning/optim/grad_desc.rs", "rank": 70, "score": 54363.77299866991 }, { "content": "#[test]\n\nfn convex_stochastic_gd_training() {\n\n let x_sq = XSqModel { c: 20f64 };\n\n\n\n let gd = StochasticGD::new(0.9f64, 0.1f64, 100);\n\n let test_data = vec![100f64];\n\n let params = gd.optimize(&x_sq,\n\n &test_data[..],\n\n &Matrix::zeros(100, 1),\n\n &Matrix::zeros(100, 1));\n\n\n\n assert!(params[0] - 20f64 < 1e-10);\n\n assert!(x_sq.compute_grad(&params, &Matrix::zeros(1, 1), &Matrix::zeros(1, 1)).0 < 1e-10);\n\n}\n\n\n", "file_path": "tests/learning/optim/grad_desc.rs", "rank": 71, "score": 53337.55864694169 }, { "content": "/// Trait for invertible data transformers\n\npub trait Invertible<T> : Transformer<T> {\n\n /// Maps the inputs using the inverse of the fitted transform.\n\n fn inv_transform(&self, inputs: T) -> LearningResult<T>;\n\n}\n", "file_path": "src/data/transforms/mod.rs", "rank": 72, "score": 52550.968546791555 }, { "content": "#[bench]\n\nfn k_means_predict(b: &mut Bencher) {\n\n\n\n const SAMPLES_PER_CENTROID: usize = 2000;\n\n // Choose two cluster centers, at (-0.5, -0.5) and (0, 0.5).\n\n let centroids = Matrix::new(2, 2, vec![-0.5, -0.5, 0.0, 0.5]);\n\n\n\n // Generate some data randomly around the centroids\n\n let samples = generate_data(&centroids, SAMPLES_PER_CENTROID, 0.4);\n\n\n\n let mut model = KMeansClassifier::new(2);\n\n let _ = model.train(&samples).unwrap();\n\n b.iter(|| {\n\n let _ = black_box(model.centroids().as_ref().unwrap());\n\n let _ = black_box(model.predict(&samples).unwrap());\n\n });\n\n}\n", "file_path": "benches/examples/k_means.rs", "rank": 73, "score": 51362.58163370821 }, { "content": "#[bench]\n\nfn k_means_train(b: &mut Bencher) {\n\n\n\n const SAMPLES_PER_CENTROID: usize = 2000;\n\n // Choose two cluster centers, at (-0.5, -0.5) and (0, 0.5).\n\n let centroids = Matrix::new(2, 2, vec![-0.5, -0.5, 0.0, 0.5]);\n\n\n\n // Generate some data randomly around the centroids\n\n let samples = generate_data(&centroids, SAMPLES_PER_CENTROID, 0.4);\n\n\n\n b.iter(|| {\n\n let mut model = black_box(KMeansClassifier::new(2));\n\n let _ = black_box(model.train(&samples).unwrap());\n\n });\n\n}\n\n\n", "file_path": "benches/examples/k_means.rs", "rank": 74, "score": 51362.58163370821 }, { "content": "#[bench]\n\nfn nnet_and_gate_predict(b: &mut Bencher) {\n\n let (inputs, targets, test_inputs) = generate_data();\n\n let layers = &[2, 1];\n\n let criterion = BCECriterion::new(Regularization::L2(0.));\n\n\n\n let mut model = NeuralNet::mlp(layers, criterion, StochasticGD::default(), Sigmoid);\n\n let _ = model.train(&inputs, &targets);\n\n\n\n b.iter(|| {\n\n let _ = black_box(model.predict(&test_inputs));\n\n })\n\n}\n", "file_path": "benches/examples/nnet.rs", "rank": 75, "score": 50265.10151895904 }, { "content": "#[bench]\n\nfn nnet_and_gate_train(b: &mut Bencher) {\n\n let (inputs, targets, _) = generate_data();\n\n let layers = &[2, 1];\n\n let criterion = BCECriterion::new(Regularization::L2(0.));\n\n\n\n b.iter(|| {\n\n let mut model = black_box(NeuralNet::mlp(layers, criterion, StochasticGD::default(), Sigmoid));\n\n let _ = black_box(model.train(&inputs, &targets).unwrap());\n\n })\n\n}\n\n\n", "file_path": "benches/examples/nnet.rs", "rank": 76, "score": 50265.10151895904 }, { "content": "/// A trait used to construct Transformers which must first be fitted\n\npub trait TransformFitter<U, T: Transformer<U>> {\n\n /// Fit the inputs to create the `Transformer`\n\n fn fit(self, inputs: &U) -> LearningResult<T>;\n\n}\n\n\n", "file_path": "src/data/transforms/mod.rs", "rank": 77, "score": 49509.28549919953 }, { "content": "#[bench]\n\nfn svm_sign_learner_predict(b: &mut Bencher) {\n\n let (inputs, targets) = generate_data();\n\n\n\n let test_data = (-1000..1000).filter(|&x| x % 100 == 0).map(|x| x as f64).collect::<Vec<_>>();\n\n let test_inputs = Matrix::new(test_data.len(), 1, test_data);\n\n let mut svm_mod = SVM::new(HyperTan::new(100., 0.), 0.3);\n\n let _ = svm_mod.train(&inputs, &targets);\n\n b.iter(|| {\n\n let _ = black_box(svm_mod.predict(&test_inputs).unwrap());\n\n });\n\n}\n", "file_path": "benches/examples/svm.rs", "rank": 78, "score": 49238.887167230816 }, { "content": "#[bench]\n\nfn svm_sign_learner_train(b: &mut Bencher) {\n\n let (inputs, targets) = generate_data();\n\n\n\n // Trainee\n\n b.iter(|| {\n\n let mut svm_mod = black_box(SVM::new(HyperTan::new(100., 0.), 0.3));\n\n let _ = black_box(svm_mod.train(&inputs, &targets).unwrap());\n\n });\n\n}\n\n\n", "file_path": "benches/examples/svm.rs", "rank": 79, "score": 49238.887167230816 }, { "content": "fn generate_data(centroids: &Matrix<f64>,\n\n points_per_centroid: usize,\n\n noise: f64)\n\n -> Matrix<f64> {\n\n assert!(centroids.cols() > 0, \"Centroids cannot be empty.\");\n\n assert!(centroids.rows() > 0, \"Centroids cannot be empty.\");\n\n assert!(noise >= 0f64, \"Noise must be non-negative.\");\n\n let mut raw_cluster_data = Vec::with_capacity(centroids.rows() * points_per_centroid *\n\n centroids.cols());\n\n\n\n let mut rng = thread_rng();\n\n let normal_rv = Normal::new(0f64, noise);\n\n\n\n for _ in 0..points_per_centroid {\n\n // Generate points from each centroid\n\n for centroid in centroids.row_iter() {\n\n // Generate a point randomly around the centroid\n\n let mut point = Vec::with_capacity(centroids.cols());\n\n for feature in centroid.iter() {\n\n point.push(feature + normal_rv.ind_sample(&mut rng));\n", "file_path": "examples/k-means_generating_cluster.rs", "rank": 80, "score": 49238.887167230816 }, { "content": "fn generate_data() -> (Matrix<f64>, Vector<f64>) {\n\n // Training data\n\n let inputs = Matrix::new(11, 1, vec![\n\n -0.1, -2., -9., -101., -666.7,\n\n 0., 0.1, 1., 11., 99., 456.7\n\n ]);\n\n let targets = Vector::new(vec![\n\n -1., -1., -1., -1., -1.,\n\n 1., 1., 1., 1., 1., 1.\n\n ]);\n\n\n\n (inputs, targets)\n\n}\n\n\n\n// Sign learner:\n\n// * Model input a float number\n\n// * Model output: A float representing the input sign.\n\n// If the input is positive, the output is close to 1.0.\n\n// If the input is negative, the output is close to -1.0.\n\n// * Model generated with the SVM API.\n", "file_path": "benches/examples/svm.rs", "rank": 81, "score": 48182.17223095396 }, { "content": "/// Sample from an unnormalized distribution.\n\n///\n\n/// The input to this function is assumed to have all positive entries.\n\nfn sample_discretely(unnorm_dist: &Vector<f64>) -> usize {\n\n assert!(unnorm_dist.size() > 0, \"No entries in distribution vector.\");\n\n\n\n let sum = unnorm_dist.sum();\n\n\n\n let rand = thread_rng().gen_range(0.0f64, sum);\n\n\n\n let mut tempsum = 0.0;\n\n for (i, p) in unnorm_dist.data().iter().enumerate() {\n\n tempsum += *p;\n\n\n\n if rand < tempsum {\n\n return i;\n\n }\n\n }\n\n\n\n panic!(\"No random value was sampled! There may be more clusters than unique data points.\");\n\n}\n", "file_path": "src/learning/k_means.rs", "rank": 82, "score": 47224.62085918608 }, { "content": "/// Permute the indices of the inputs samples.\n\nfn create_shuffled_indices(num_samples: usize) -> ShuffledIndices {\n\n let mut indices: Vec<usize> = (0..num_samples).collect();\n\n in_place_fisher_yates(&mut indices);\n\n ShuffledIndices(indices)\n\n}\n\n\n", "file_path": "src/analysis/cross_validation.rs", "rank": 83, "score": 46528.91220630832 }, { "content": "fn dist(v1: &[f64], v2: &[f64]) -> f64 {\n\n // ToDo: use metrics\n\n let d: f64 = v1.iter()\n\n .zip(v2.iter())\n\n .map(|(&x, &y)| (x - y) * (x - y))\n\n .fold(0., |s, v| s + v);\n\n d.sqrt()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use std::f64;\n\n use super::KNearest;\n\n\n\n #[test]\n\n fn test_knearest() {\n\n let mut kn = KNearest::new(2, vec![1, 2, 3], vec![3., 2., 1.]);\n\n assert_eq!(kn.k, 2);\n\n assert_eq!(kn.pairs, vec![(3, 1.), (2, 2.)]);\n", "file_path": "src/learning/knn/mod.rs", "rank": 84, "score": 46277.06558905565 }, { "content": "/// max\n\nfn max(data: &Matrix<f64>) -> Vector<f64> {\n\n // ToDo: use rulinalg .max (v0.4.1?)\n\n // https://github.com/AtheMathmo/rulinalg/pull/115\n\n let mut results = Vec::with_capacity(data.cols());\n\n for i in 0..data.cols() {\n\n results.push(data[[0, i]]);\n\n }\n\n for row in data.row_iter() {\n\n for (r, v) in results.iter_mut().zip(row.iter()) {\n\n if *r < *v {\n\n *r = *v;\n\n }\n\n }\n\n }\n\n Vector::new(results)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n", "file_path": "src/learning/knn/binary_tree.rs", "rank": 85, "score": 45374.03290151941 }, { "content": "/// min\n\nfn min(data: &Matrix<f64>) -> Vector<f64> {\n\n // ToDo: use rulinalg .min (v0.4.1?)\n\n // https://github.com/AtheMathmo/rulinalg/pull/115\n\n let mut results = Vec::with_capacity(data.cols());\n\n for i in 0..data.cols() {\n\n results.push(data[[0, i]]);\n\n }\n\n for row in data.row_iter() {\n\n for (r, v) in results.iter_mut().zip(row.iter()) {\n\n if *r > *v {\n\n *r = *v;\n\n }\n\n }\n\n }\n\n Vector::new(results)\n\n}\n\n\n", "file_path": "src/learning/knn/binary_tree.rs", "rank": 86, "score": 45374.03290151941 }, { "content": "/// Count target label frequencies\n\n/// TODO: Used in decisition tree, move impl to somewhere\n\nfn freq(labels: &[usize]) -> (Vector<usize>, Vector<usize>) {\n\n let mut map: BTreeMap<usize, usize> = BTreeMap::new();\n\n for l in labels {\n\n let e = map.entry(*l).or_insert(0);\n\n *e += 1;\n\n }\n\n\n\n let mut uniques: Vec<usize> = Vec::with_capacity(map.len());\n\n let mut counts: Vec<usize> = Vec::with_capacity(map.len());\n\n for (&k, &v) in &map {\n\n uniques.push(k);\n\n counts.push(v);\n\n }\n\n (Vector::new(uniques), Vector::new(counts))\n\n}\n\n\n", "file_path": "src/learning/knn/mod.rs", "rank": 87, "score": 44538.59368533192 }, { "content": "fn generate_data() -> (Matrix<f64>, Matrix<f64>, Matrix<f64>) {\n\n const THRESHOLD: f64 = 0.7;\n\n const SAMPLES: usize = 1000;\n\n\n\n let mut input_data = Vec::with_capacity(SAMPLES * 2);\n\n let mut label_data = Vec::with_capacity(SAMPLES);\n\n\n\n for _ in 0..SAMPLES {\n\n // The two inputs are \"signals\" between 0 and 1\n\n let Closed01(left) = random::<Closed01<f64>>();\n\n let Closed01(right) = random::<Closed01<f64>>();\n\n input_data.push(left);\n\n input_data.push(right);\n\n if left > THRESHOLD && right > THRESHOLD {\n\n label_data.push(1.0);\n\n } else {\n\n label_data.push(0.0)\n\n }\n\n }\n\n\n", "file_path": "benches/examples/nnet.rs", "rank": 88, "score": 44526.58943896774 }, { "content": "fn sum<'a, I: Iterator<Item=&'a f64>>(x: I) -> f64 {\n\n x.fold(0f64, |acc, x| acc + x)\n\n}\n\n\n\nmacro_rules! bench {\n\n ($name:ident: $params:expr) => {\n\n #[bench]\n\n fn $name(b: &mut Bencher) {\n\n let (rows, cols, k) = $params;\n\n let inputs = generate_data(rows, cols);\n\n let targets = generate_data(rows, 1);\n\n\n\n b.iter(|| {\n\n let mut model = DummyModel { sum: 0f64 };\n\n let _ = black_box(\n\n k_fold_validate(&mut model, &inputs, &targets, k, row_accuracy)\n\n );\n\n });\n\n }\n\n }\n\n}\n\n\n\nbench!(bench_10_10_3: (10, 10, 3));\n\nbench!(bench_1000_10_3: (1000, 10, 3));\n\nbench!(bench_1000_10_10: (1000, 10, 10));\n\nbench!(bench_1000_10_100: (1000, 10, 100));\n", "file_path": "benches/examples/cross_validation.rs", "rank": 89, "score": 44297.808146375886 }, { "content": "fn remove_first_col(mat: Matrix<f64>) -> Matrix<f64>\n\n{\n\n let rows = mat.rows();\n\n let cols = mat.cols();\n\n let mut data = mat.into_vec();\n\n\n\n let len = data.len();\n\n let mut del = 0;\n\n {\n\n let v = &mut *data;\n\n\n\n for i in 0..len {\n\n if i % cols == 0 {\n\n del += 1;\n\n } else if del > 0 {\n\n v[i - del] = v[i];\n\n }\n\n }\n\n }\n\n if del > 0 {\n", "file_path": "src/learning/nnet/net_layer.rs", "rank": 90, "score": 43723.664403943345 }, { "content": "fn generate_data(rows: usize, cols: usize) -> Matrix<f64> {\n\n let mut rng = thread_rng();\n\n let mut data = Vec::with_capacity(rows * cols);\n\n\n\n for _ in 0..data.capacity() {\n\n data.push(rng.gen_range(0f64, 1f64));\n\n }\n\n\n\n Matrix::new(rows, cols, data)\n\n}\n\n\n", "file_path": "benches/examples/cross_validation.rs", "rank": 91, "score": 43676.991198871074 }, { "content": "fn generate_dog_data(training_set_size: u32, test_set_size: u32)\n\n -> (Matrix<f64>, Matrix<f64>, Matrix<f64>, Vec<Dog>) {\n\n let mut randomness = rand::StdRng::new()\n\n .expect(\"we should be able to get an RNG\");\n\n let rng = &mut randomness;\n\n\n\n // We'll train the model on these dogs\n\n let training_dogs = (0..training_set_size)\n\n .map(|_| { Dog::rand(rng) })\n\n .collect::<Vec<_>>();\n\n\n\n // ... and then use the model to make predictions about these dogs' color\n\n // given only their trait measurements.\n\n let test_dogs = (0..test_set_size)\n\n .map(|_| { Dog::rand(rng) })\n\n .collect::<Vec<_>>();\n\n\n\n // The model's `.train` method will take two matrices, each with a row for\n\n // each dog in the training set: the rows in the first matrix contain the\n\n // trait measurements; the rows in the second are either [1, 0] or [0, 1]\n", "file_path": "examples/naive_bayes_dogs.rs", "rank": 92, "score": 43196.12030818945 }, { "content": "/// Return distances between given point and data specified with row ids\n\nfn get_distances(data: &Matrix<f64>, point: &[f64], ids: &[usize]) -> Vec<f64> {\n\n assert!(!ids.is_empty(), \"target ids is empty\");\n\n\n\n let mut distances: Vec<f64> = Vec::with_capacity(ids.len());\n\n for id in ids.iter() {\n\n // ToDo: use .row(*id)\n\n let row: Vec<f64> = data.select_rows(&[*id]).into_vec();\n\n // let row: Vec<f64> = self.data.row(*id).into_vec();\n\n let d = dist(point, &row);\n\n distances.push(d);\n\n }\n\n distances\n\n}\n\n\n", "file_path": "src/learning/knn/mod.rs", "rank": 93, "score": 39311.75622361137 }, { "content": "fn evaluate_prediction(hits: &mut u32, dog: &Dog, prediction: &[f64]) -> (Color, bool) {\n\n let predicted_color = dog.color;\n\n let actual_color = if prediction[0] == 1. {\n\n Color::Red\n\n } else {\n\n Color::White\n\n };\n\n let accurate = predicted_color == actual_color;\n\n if accurate {\n\n *hits += 1;\n\n }\n\n (actual_color, accurate)\n\n}\n\n\n", "file_path": "examples/naive_bayes_dogs.rs", "rank": 94, "score": 39311.75622361137 }, { "content": "fn generate_data(centroids: &Matrix<f64>, points_per_centroid: usize, noise: f64) -> Matrix<f64> {\n\n assert!(centroids.cols() > 0, \"Centroids cannot be empty.\");\n\n assert!(centroids.rows() > 0, \"Centroids cannot be empty.\");\n\n assert!(noise >= 0f64, \"Noise must be non-negative.\");\n\n let mut raw_cluster_data = Vec::with_capacity(centroids.rows() * points_per_centroid *\n\n centroids.cols());\n\n\n\n let mut rng = thread_rng();\n\n let normal_rv = Normal::new(0f64, noise);\n\n\n\n for _ in 0..points_per_centroid {\n\n // Generate points from each centroid\n\n for centroid in centroids.row_iter() {\n\n // Generate a point randomly around the centroid\n\n let mut point = Vec::with_capacity(centroids.cols());\n\n for feature in centroid.iter() {\n\n point.push(feature + normal_rv.ind_sample(&mut rng));\n\n }\n\n\n\n // Push point to raw_cluster_data\n\n raw_cluster_data.extend(point);\n\n }\n\n }\n\n\n\n Matrix::new(centroids.rows() * points_per_centroid,\n\n centroids.cols(),\n\n raw_cluster_data)\n\n}\n\n\n", "file_path": "benches/examples/k_means.rs", "rank": 95, "score": 38634.639504953986 }, { "content": "fn ordered_distinct<T: Ord + Eq + Copy>(xs: &[T], ys: &[T]) -> Vec<T> {\n\n let mut ds: Vec<T> = xs.iter().chain(ys).cloned().collect();\n\n ds.sort();\n\n ds.dedup();\n\n ds\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::confusion_matrix;\n\n\n\n #[test]\n\n fn confusion_matrix_no_labels() {\n\n let truth = vec![2, 0, 2, 2, 0, 1];\n\n let predictions = vec![0, 0, 2, 2, 0, 2];\n\n\n\n let confusion = confusion_matrix(&predictions, &truth, None);\n\n\n\n let expected = matrix!(2, 0, 0;\n\n 0, 0, 1;\n", "file_path": "src/analysis/confusion_matrix.rs", "rank": 96, "score": 38054.80361497202 }, { "content": " fn func_inv(x: f64) -> f64 {\n\n x\n\n }\n\n}\n\n\n\n/// Exponential activation function.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Exp;\n\n\n\nimpl ActivationFunc for Exp {\n\n fn func(x: f64) -> f64 {\n\n x.exp()\n\n }\n\n\n\n fn func_grad(x: f64) -> f64 {\n\n Self::func(x)\n\n }\n\n\n\n fn func_grad_from_output(y: f64) -> f64 {\n\n y\n", "file_path": "src/learning/toolkit/activ_fn.rs", "rank": 97, "score": 35155.566964805395 }, { "content": " outputs - targets\n\n }\n\n}\n\n\n\n/// The cross entropy error cost function.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct CrossEntropyError;\n\n\n\nimpl CostFunc<Matrix<f64>> for CrossEntropyError {\n\n fn cost(outputs: &Matrix<f64>, targets: &Matrix<f64>) -> f64 {\n\n // The cost for a single\n\n let log_inv_output = (-outputs + 1f64).apply(&ln);\n\n let log_output = outputs.clone().apply(&ln);\n\n\n\n let mat_cost = targets.elemul(&log_output) + (-targets + 1f64).elemul(&log_inv_output);\n\n\n\n let n = outputs.rows();\n\n\n\n -(mat_cost.sum()) / (n as f64)\n\n }\n", "file_path": "src/learning/toolkit/cost_fn.rs", "rank": 98, "score": 35154.70064694009 }, { "content": "\n\n sq_diff.sum() / (2f64 * (n as f64))\n\n }\n\n\n\n fn grad_cost(outputs: &Matrix<f64>, targets: &Matrix<f64>) -> Matrix<f64> {\n\n outputs - targets\n\n }\n\n}\n\n\n\nimpl CostFunc<Vector<f64>> for MeanSqError {\n\n fn cost(outputs: &Vector<f64>, targets: &Vector<f64>) -> f64 {\n\n let diff = outputs - targets;\n\n let sq_diff = &diff.elemul(&diff);\n\n\n\n let n = diff.size();\n\n\n\n sq_diff.sum() / (2f64 * (n as f64))\n\n }\n\n\n\n fn grad_cost(outputs: &Vector<f64>, targets: &Vector<f64>) -> Vector<f64> {\n", "file_path": "src/learning/toolkit/cost_fn.rs", "rank": 99, "score": 35152.62545812081 } ]
Rust
cmd/replay/src/main.rs
LemonHX/starcoin
d87dbbd8d9b03a93d97075913d35a2c99f216222
use clap::Parser; use sp_utils::stop_watch::start_watch; use starcoin_chain::verifier::Verifier; use starcoin_chain::verifier::{BasicVerifier, ConsensusVerifier, FullVerifier, NoneVerifier}; use starcoin_chain::{BlockChain, ChainReader}; use starcoin_config::RocksdbConfig; use starcoin_config::{BuiltinNetworkID, ChainNetwork}; use starcoin_genesis::Genesis; use starcoin_storage::cache_storage::CacheStorage; use starcoin_storage::db_storage::DBStorage; use starcoin_storage::storage::StorageInstance; use starcoin_storage::{BlockStore, Storage}; use starcoin_types::startup_info::StartupInfo; use std::path::PathBuf; use std::sync::Arc; use std::time::Instant; #[derive(Debug, Parser)] #[clap(name = "replay")] pub struct ReplayOpt { #[clap(long, short = 'n')] pub net: Option<BuiltinNetworkID>, #[clap(short = 'f', long, parse(from_os_str))] pub from: PathBuf, #[clap(short = 't', long, parse(from_os_str))] pub to: PathBuf, #[clap(long)] pub end_block: Option<u64>, #[clap(long, short = 'c', default_value = "20000")] pub block_num: u64, #[clap(possible_values = Verifier::variants(), ignore_case = true)] pub verifier: Verifier, #[clap(long, short = 'w')] pub watch: bool, } fn main() -> anyhow::Result<()> { let _logger = starcoin_logger::init(); let opts: ReplayOpt = ReplayOpt::parse(); let network = match opts.net { Some(network) => network, None => BuiltinNetworkID::Proxima, }; let net = ChainNetwork::new_builtin(network); let from_dir = opts.from; let end_block = opts.end_block; let to_dir = opts.to; if opts.watch { start_watch(); } let db_storage = DBStorage::new( from_dir.join("starcoindb/db"), RocksdbConfig::default(), None, ) .unwrap(); let storage = Arc::new( Storage::new(StorageInstance::new_cache_and_db_instance( CacheStorage::new(None), db_storage, )) .unwrap(), ); let (chain_info, _) = Genesis::init_and_check_storage(&net, storage.clone(), from_dir.as_ref()) .expect("init storage by genesis fail."); let chain = BlockChain::new(net.time_service(), chain_info.head().id(), storage, None) .expect("create block chain should success."); let storage2 = Arc::new( Storage::new(StorageInstance::new_cache_and_db_instance( CacheStorage::new(None), DBStorage::new(to_dir.join("starcoindb"), RocksdbConfig::default(), None).unwrap(), )) .unwrap(), ); let (chain_info2, _) = Genesis::init_and_check_storage(&net, storage2.clone(), to_dir.as_ref()) .expect("init storage by genesis fail."); let mut chain2 = BlockChain::new( net.time_service(), chain_info2.status().head().id(), storage2.clone(), None, ) .expect("create block chain should success."); let time_begin = Instant::now(); let end_block = end_block.unwrap_or_else(|| chain.current_header().number()); { let b = chain2.get_block_by_number(end_block)?; if let Some(h) = b { if h.id() == chain.current_header().id() { println!("target chain already synced with source chain"); } else { println!("target chain have different block with source chain at latest block: {}, target: {}, source: {}", end_block, h.id(), chain.current_header().id(), ); } return Ok(()); } } let start_block = chain2.current_header().number() + 1; let mut last_block_hash = None; for i in start_block..=end_block { if let Ok(Some(block)) = chain.get_block_by_number(i) { let start = Instant::now(); let expected_state_root = block.header().state_root(); let block_id = block.id(); let block_height = block.header().number(); match opts.verifier { Verifier::Basic => { chain2.apply_with_verifier::<BasicVerifier>(block).unwrap(); } Verifier::Consensus => { chain2 .apply_with_verifier::<ConsensusVerifier>(block) .unwrap(); } Verifier::None => { chain2.apply_with_verifier::<NoneVerifier>(block).unwrap(); } Verifier::Full => { chain2.apply_with_verifier::<FullVerifier>(block).unwrap(); } }; println!( "apply block {} at height: {}, time_used: {:?}, source state root: {}, target state root: {}", block_id, block_height, start.elapsed(), expected_state_root, chain2.chain_state_reader().state_root() ); last_block_hash = Some(block_id); if i % 100 == 0 { if let Some(last_block_hash) = last_block_hash { let startup_info = StartupInfo::new(last_block_hash); storage2.save_startup_info(startup_info)?; } } } else { println!("read source block err, number : {:?}", i); break; } } if let Some(last_block_hash) = last_block_hash { let startup_info = StartupInfo::new(last_block_hash); storage2.save_startup_info(startup_info)?; } println!("apply use time: {:?}", time_begin.elapsed()); Ok(()) }
use clap::Parser; use sp_utils::stop_watch::start_watch; use starcoin_chain::verifier::Verifier; use starcoin_chain::verifier::{BasicVerifier, ConsensusVerifier, FullVerifier, NoneVerifier}; use starcoin_chain::{BlockChain, ChainReader}; use starcoin_config::RocksdbConfig; use starcoin_config::{BuiltinNetworkID, ChainNetwork}; use starcoin_genesis::Genesis; use starcoin_storage::cache_storage::CacheStorage; use starcoin_storage::db_storage::DBStorage; use starcoin_storage::storage::StorageInstance; use starcoin_storage::{BlockStore, Storage}; use starcoin_types::startup_info::StartupInfo; use std::path::PathBuf; use std::sync::Arc; use std::time::Instant; #[derive(Debug, Parser)] #[clap(name = "replay")] pub struct ReplayOpt { #[clap(long, short = 'n')] pub net: Option<BuiltinNetworkID>, #[clap(short = 'f', long, parse(from_os_str))] pub from: PathBuf, #[clap(short = 't', long, parse(from_os_str))] pub to: PathBuf, #[clap(long)] pub end_block: Option<u64>, #[clap(long, short = 'c', default_value = "20000")] pub block_num: u64, #[clap(possible_values = Verifier::variants(), ignore_case = true)] pub verifier: Verifier, #[clap(long, short = 'w')] pub watch: bool, } fn main() -> anyhow::Result<()> { let _logger = starcoin_logger::init(); let opts: ReplayOpt = ReplayOpt::parse(); let network = match opts.net { Some(network) => network, None => BuiltinNetworkID::Proxima, }; let net = ChainNetwork::new_builtin(network); let from_dir = opts.from; let end_block = opts.end_block; let to_dir = opts.to; if opts.watch { start_watch(); } let db_storage = DBStor
)) .unwrap(), ); let (chain_info, _) = Genesis::init_and_check_storage(&net, storage.clone(), from_dir.as_ref()) .expect("init storage by genesis fail."); let chain = BlockChain::new(net.time_service(), chain_info.head().id(), storage, None) .expect("create block chain should success."); let storage2 = Arc::new( Storage::new(StorageInstance::new_cache_and_db_instance( CacheStorage::new(None), DBStorage::new(to_dir.join("starcoindb"), RocksdbConfig::default(), None).unwrap(), )) .unwrap(), ); let (chain_info2, _) = Genesis::init_and_check_storage(&net, storage2.clone(), to_dir.as_ref()) .expect("init storage by genesis fail."); let mut chain2 = BlockChain::new( net.time_service(), chain_info2.status().head().id(), storage2.clone(), None, ) .expect("create block chain should success."); let time_begin = Instant::now(); let end_block = end_block.unwrap_or_else(|| chain.current_header().number()); { let b = chain2.get_block_by_number(end_block)?; if let Some(h) = b { if h.id() == chain.current_header().id() { println!("target chain already synced with source chain"); } else { println!("target chain have different block with source chain at latest block: {}, target: {}, source: {}", end_block, h.id(), chain.current_header().id(), ); } return Ok(()); } } let start_block = chain2.current_header().number() + 1; let mut last_block_hash = None; for i in start_block..=end_block { if let Ok(Some(block)) = chain.get_block_by_number(i) { let start = Instant::now(); let expected_state_root = block.header().state_root(); let block_id = block.id(); let block_height = block.header().number(); match opts.verifier { Verifier::Basic => { chain2.apply_with_verifier::<BasicVerifier>(block).unwrap(); } Verifier::Consensus => { chain2 .apply_with_verifier::<ConsensusVerifier>(block) .unwrap(); } Verifier::None => { chain2.apply_with_verifier::<NoneVerifier>(block).unwrap(); } Verifier::Full => { chain2.apply_with_verifier::<FullVerifier>(block).unwrap(); } }; println!( "apply block {} at height: {}, time_used: {:?}, source state root: {}, target state root: {}", block_id, block_height, start.elapsed(), expected_state_root, chain2.chain_state_reader().state_root() ); last_block_hash = Some(block_id); if i % 100 == 0 { if let Some(last_block_hash) = last_block_hash { let startup_info = StartupInfo::new(last_block_hash); storage2.save_startup_info(startup_info)?; } } } else { println!("read source block err, number : {:?}", i); break; } } if let Some(last_block_hash) = last_block_hash { let startup_info = StartupInfo::new(last_block_hash); storage2.save_startup_info(startup_info)?; } println!("apply use time: {:?}", time_begin.elapsed()); Ok(()) }
age::new( from_dir.join("starcoindb/db"), RocksdbConfig::default(), None, ) .unwrap(); let storage = Arc::new( Storage::new(StorageInstance::new_cache_and_db_instance( CacheStorage::new(None), db_storage,
random
[ { "content": "pub fn random_txn(seq_num: u64, net: &ChainNetwork) -> SignedUserTransaction {\n\n let random_public_key = random_public_key();\n\n let addr = account_address::from_public_key(&random_public_key);\n\n peer_to_peer_txn_sent_as_association(\n\n addr,\n\n seq_num,\n\n 1000,\n\n net.time_service().now_secs() + DEFAULT_EXPIRATION_TIME,\n\n net,\n\n )\n\n}\n", "file_path": "benchmarks/src/lib.rs", "rank": 0, "score": 334730.4870107643 }, { "content": "pub fn gen_blockchain_with_blocks_for_test(count: u64, net: &ChainNetwork) -> Result<BlockChain> {\n\n let mut block_chain = gen_blockchain_for_test(net)?;\n\n let miner_account = AccountInfo::random();\n\n for _i in 0..count {\n\n let (block_template, _) = block_chain\n\n .create_block_template(*miner_account.address(), None, Vec::new(), vec![], None)\n\n .unwrap();\n\n let block = block_chain\n\n .consensus()\n\n .create_block(block_template, net.time_service().as_ref())?;\n\n block_chain.apply(block)?;\n\n }\n\n\n\n Ok(block_chain)\n\n}\n", "file_path": "test-helper/src/chain.rs", "rank": 1, "score": 317421.32859906653 }, { "content": "pub fn vote_language_version(_net: &ChainNetwork, lang_version: u64) -> ScriptFunction {\n\n ScriptFunction::new(\n\n ModuleId::new(\n\n core_code_address(),\n\n Identifier::new(\"OnChainConfigScripts\").unwrap(),\n\n ),\n\n Identifier::new(\"propose_update_move_language_version\").unwrap(),\n\n vec![],\n\n vec![\n\n bcs_ext::to_bytes(&lang_version).unwrap(),\n\n bcs_ext::to_bytes(&0u64).unwrap(),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "test-helper/src/dao.rs", "rank": 2, "score": 301666.4099623355 }, { "content": "///reward on chain config script\n\npub fn vote_reward_scripts(_net: &ChainNetwork, reward_delay: u64) -> ScriptFunction {\n\n ScriptFunction::new(\n\n ModuleId::new(\n\n core_code_address(),\n\n Identifier::new(\"OnChainConfigScripts\").unwrap(),\n\n ),\n\n Identifier::new(\"propose_update_reward_config\").unwrap(),\n\n vec![],\n\n vec![\n\n bcs_ext::to_bytes(&reward_delay).unwrap(),\n\n bcs_ext::to_bytes(&0u64).unwrap(),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "test-helper/src/dao.rs", "rank": 3, "score": 301666.4099623355 }, { "content": "/// vote txn publish option scripts\n\npub fn vote_txn_timeout_script(_net: &ChainNetwork, duration_seconds: u64) -> ScriptFunction {\n\n ScriptFunction::new(\n\n ModuleId::new(\n\n core_code_address(),\n\n Identifier::new(\"OnChainConfigScripts\").unwrap(),\n\n ),\n\n Identifier::new(\"propose_update_txn_timeout_config\").unwrap(),\n\n vec![],\n\n vec![\n\n bcs_ext::to_bytes(&duration_seconds).unwrap(),\n\n bcs_ext::to_bytes(&0u64).unwrap(),\n\n ],\n\n )\n\n}\n", "file_path": "test-helper/src/dao.rs", "rank": 4, "score": 297162.5649484836 }, { "content": "pub fn full_sync_task<H, A, F, N>(\n\n current_block_id: HashValue,\n\n target: SyncTarget,\n\n skip_pow_verify: bool,\n\n time_service: Arc<dyn TimeService>,\n\n storage: Arc<dyn Store>,\n\n block_event_handle: H,\n\n fetcher: Arc<F>,\n\n ancestor_event_handle: A,\n\n peer_provider: N,\n\n max_retry_times: u64,\n\n sync_metrics: Option<SyncMetrics>,\n\n vm_metrics: Option<VMMetrics>,\n\n) -> Result<(\n\n BoxFuture<'static, Result<BlockChain, TaskError>>,\n\n TaskHandle,\n\n Arc<TaskEventCounterHandle>,\n\n)>\n\nwhere\n\n H: BlockConnectedEventHandle + Sync + 'static,\n", "file_path": "sync/src/tasks/mod.rs", "rank": 5, "score": 291894.6021060148 }, { "content": "pub fn prepare_customized_genesis(net: &ChainNetwork) -> ChainStateDB {\n\n let chain_state = ChainStateDB::mock();\n\n let genesis_txn = Genesis::build_genesis_transaction(net).unwrap();\n\n Genesis::execute_genesis_txn(&chain_state, genesis_txn).unwrap();\n\n chain_state\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 6, "score": 286188.3734784977 }, { "content": "pub fn build_init_script_v2(net: &ChainNetwork) -> ScriptFunction {\n\n let genesis_config = net.genesis_config();\n\n let chain_id = net.chain_id().id();\n\n let genesis_timestamp = net.genesis_block_parameter().timestamp;\n\n let genesis_parent_hash = net.genesis_block_parameter().parent_hash;\n\n\n\n let genesis_auth_key = genesis_config\n\n .genesis_key_pair\n\n .as_ref()\n\n .map(|(_, public_key)| AuthenticationKey::ed25519(public_key).to_vec())\n\n .unwrap_or_else(Vec::new);\n\n\n\n let association_auth_key =\n\n AuthenticationKey::multi_ed25519(&genesis_config.association_key_pair.1).to_vec();\n\n\n\n let instruction_schedule =\n\n bcs_ext::to_bytes(&genesis_config.vm_config.gas_schedule.instruction_table)\n\n .expect(\"Cannot serialize gas schedule\");\n\n let native_schedule = bcs_ext::to_bytes(&genesis_config.vm_config.gas_schedule.native_table)\n\n .expect(\"Cannot serialize gas schedule\");\n", "file_path": "vm/transaction-builder/src/lib.rs", "rank": 7, "score": 282226.7318051606 }, { "content": "pub fn build_init_script_v1(net: &ChainNetwork) -> ScriptFunction {\n\n let genesis_config = net.genesis_config();\n\n let chain_id = net.chain_id().id();\n\n let genesis_timestamp = net.genesis_block_parameter().timestamp;\n\n let genesis_parent_hash = net.genesis_block_parameter().parent_hash;\n\n\n\n let genesis_auth_key = genesis_config\n\n .genesis_key_pair\n\n .as_ref()\n\n .map(|(_, public_key)| AuthenticationKey::ed25519(public_key).to_vec())\n\n .unwrap_or_else(Vec::new);\n\n\n\n let association_auth_key =\n\n AuthenticationKey::multi_ed25519(&genesis_config.association_key_pair.1).to_vec();\n\n\n\n let instruction_schedule =\n\n bcs_ext::to_bytes(&genesis_config.vm_config.gas_schedule.instruction_table)\n\n .expect(\"Cannot serialize gas schedule\");\n\n let native_schedule = bcs_ext::to_bytes(&genesis_config.vm_config.gas_schedule.native_table)\n\n .expect(\"Cannot serialize gas schedule\");\n", "file_path": "vm/transaction-builder/src/lib.rs", "rank": 8, "score": 282226.7318051606 }, { "content": "pub fn gen_blockchain_for_test(net: &ChainNetwork) -> Result<BlockChain> {\n\n let (storage, chain_info, _) =\n\n Genesis::init_storage_for_test(net).expect(\"init storage by genesis fail.\");\n\n\n\n let block_chain = BlockChain::new(net.time_service(), chain_info.head().id(), storage, None)?;\n\n Ok(block_chain)\n\n}\n\n\n", "file_path": "test-helper/src/chain.rs", "rank": 9, "score": 277874.98749974224 }, { "content": "pub fn is_global(ip: Ipv4Addr) -> bool {\n\n // check if this address is 192.0.0.9 or 192.0.0.10. These addresses are the only two\n\n // globally routable addresses in the 192.0.0.0/24 range.\n\n if u32::from(ip) == 0xc000_0009 || u32::from(ip) == 0xc000_000a {\n\n return true;\n\n }\n\n !ip.is_private()\n\n && !ip.is_loopback()\n\n && !ip.is_link_local()\n\n && !ip.is_broadcast()\n\n && !ip.is_documentation()\n\n && !is_shared(ip)\n\n && !is_ietf_protocol_assignment(ip)\n\n && !is_reserved(ip)\n\n && !is_benchmarking(ip)\n\n // Make sure the address is not in 0.0.0.0/8\n\n && ip.octets()[0] != 0\n\n}\n\n\n", "file_path": "network/src/helper.rs", "rank": 10, "score": 276995.03143090394 }, { "content": "pub fn timeout<F, T>(timeout: u64, f: F, tx: Sender<Result<T>>)\n\nwhere\n\n F: FnOnce() -> T,\n\n F: Send + 'static,\n\n T: Send + 'static,\n\n{\n\n let handle = timeout_join_handler::spawn(f);\n\n let result = handle\n\n .join(Duration::from_secs(timeout))\n\n .map_err(|e| anyhow::anyhow!(\"{}\", e));\n\n let _ = tx.send(result);\n\n}\n\n\n", "file_path": "commons/stest/src/lib.rs", "rank": 11, "score": 275772.64558487764 }, { "content": "/// Start watching.\n\npub fn start_watch() {\n\n let _ = G_WATCH_STATUS\n\n .compare_exchange(false, true, Ordering::SeqCst, Ordering::Relaxed)\n\n .unwrap_or_else(|x| x);\n\n}\n\n\n", "file_path": "commons/utils/src/stop_watch.rs", "rank": 12, "score": 274801.94604499784 }, { "content": "/// Check the address is a memory protocol Multiaddr.\n\npub fn is_memory_addr(addr: &Multiaddr) -> bool {\n\n addr.iter()\n\n .any(|protocol| matches!(protocol, libp2p::core::multiaddr::Protocol::Memory(_)))\n\n}\n\n\n\n/// Address of a node, including its identity.\n\n///\n\n/// This struct represents a decoded version of a multiaddress that ends with `/p2p/<peerid>`.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use network_p2p_types::{Multiaddr, PeerId, MultiaddrWithPeerId};\n\n/// let addr: MultiaddrWithPeerId =\n\n/// \"/ip4/198.51.100.19/tcp/30333/p2p/QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV\".parse().unwrap();\n\n/// assert_eq!(addr.peer_id.to_base58(), \"QmSk5HQbn6LhUwDiNMseVUjuRYhEtYj4aUZ6WfWoGURpdV\");\n\n/// assert_eq!(addr.multiaddr.to_string(), \"/ip4/198.51.100.19/tcp/30333\");\n\n/// ```\n\n#[derive(\n\n Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize,\n", "file_path": "network-p2p/types/src/lib.rs", "rank": 13, "score": 266949.6308890258 }, { "content": "/// Build memory protocol Multiaddr by port\n\npub fn memory_addr(port: u64) -> Multiaddr {\n\n build_multiaddr!(Memory(port))\n\n}\n\n\n", "file_path": "network-p2p/types/src/lib.rs", "rank": 14, "score": 266880.20025361347 }, { "content": "pub fn export<W: std::io::Write>(\n\n db: &str,\n\n mut csv_writer: Writer<W>,\n\n schema: DbSchema,\n\n) -> anyhow::Result<()> {\n\n let db_storage = DBStorage::open_with_cfs(\n\n db,\n\n StorageVersion::current_version()\n\n .get_column_family_names()\n\n .to_vec(),\n\n true,\n\n Default::default(),\n\n None,\n\n )?;\n\n let mut iter = db_storage.iter::<Vec<u8>, Vec<u8>>(schema.to_string().as_str())?;\n\n iter.seek_to_first();\n\n let key_codec = schema.get_key_codec();\n\n let value_codec = schema.get_value_codec();\n\n let fields = schema.get_fields();\n\n // write csv header.\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 15, "score": 261484.18030638195 }, { "content": "pub fn mock<S, F>(f: F) -> MockFn<S>\n\nwhere\n\n S: ActorService,\n\n F: FnMut(Box<dyn Any>, &mut ServiceContext<S>) -> Box<dyn Any> + Send + 'static,\n\n{\n\n Box::new(f)\n\n}\n\n\n\nimpl<S> MockHandler<S> for MockFn<S>\n\nwhere\n\n S: ActorService,\n\n{\n\n fn handle(&mut self, r: Box<dyn Any>, ctx: &mut ServiceContext<S>) -> Box<dyn Any> {\n\n self(r, ctx)\n\n }\n\n}\n", "file_path": "commons/service-registry/src/mocker.rs", "rank": 16, "score": 250173.52880349525 }, { "content": "#[derive(Debug, Parser)]\n\nstruct Opt {\n\n #[clap(long, default_value = \"200\")]\n\n num_accounts: usize,\n\n\n\n #[clap(long, default_value = \"1000000\")]\n\n init_account_balance: u64,\n\n\n\n #[clap(long, default_value = \"20\")]\n\n block_size: usize,\n\n\n\n #[clap(long, default_value = \"10\")]\n\n num_transfer_blocks: usize,\n\n}\n\n\n", "file_path": "executor/benchmark/src/main.rs", "rank": 17, "score": 248331.23740731142 }, { "content": "pub fn with_logger<F, R>(f: F) -> R\n\nwhere\n\n F: FnOnce(&Logger) -> R,\n\n{\n\n f(&(*G_GLOBAL_SLOG_LOGGER.load()))\n\n}\n", "file_path": "commons/logger/src/structured_log.rs", "rank": 18, "score": 246654.87516942137 }, { "content": "pub fn build_stdlib_package(net: &ChainNetwork, stdlib_option: StdLibOptions) -> Result<Package> {\n\n let init_script = match net.genesis_config().stdlib_version {\n\n StdlibVersion::Version(1) => build_init_script_v1(net),\n\n _ => build_init_script_v2(net),\n\n };\n\n stdlib_package(stdlib_option, Some(init_script))\n\n}\n\n\n", "file_path": "vm/transaction-builder/src/lib.rs", "rank": 19, "score": 246146.54167825016 }, { "content": "#[derive(Parser)]\n\nstruct Opt {\n\n #[clap(subcommand)]\n\n cmd: Option<Cmd>,\n\n}\n\n\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 20, "score": 243365.8290100158 }, { "content": "/// Returns the number of non-leap seconds since January 1, 1970 0:00:00 UTC\n\n/// (aka \"UNIX timestamp\").\n\npub fn get_current_timestamp() -> u64 {\n\n chrono::Utc::now().timestamp() as u64\n\n}\n\n\n", "file_path": "vm/types/src/transaction/helpers.rs", "rank": 21, "score": 242556.46895190288 }, { "content": "pub fn error_split(code: u64) -> (u8, u64) {\n\n let category = code as u8;\n\n let reason = code >> 8;\n\n (category, reason)\n\n}\n\n\n", "file_path": "vm/vm-runtime/src/errors.rs", "rank": 22, "score": 241449.33306853008 }, { "content": "///vote script consensus\n\npub fn vote_script_consensus(_net: &ChainNetwork, strategy: u8) -> ScriptFunction {\n\n ScriptFunction::new(\n\n ModuleId::new(\n\n core_code_address(),\n\n Identifier::new(\"OnChainConfigScripts\").unwrap(),\n\n ),\n\n Identifier::new(\"propose_update_consensus_config\").unwrap(),\n\n vec![],\n\n vec![\n\n bcs_ext::to_bytes(&80u64).unwrap(),\n\n bcs_ext::to_bytes(&10000u64).unwrap(),\n\n bcs_ext::to_bytes(&64000000000u128).unwrap(),\n\n bcs_ext::to_bytes(&10u64).unwrap(),\n\n bcs_ext::to_bytes(&48u64).unwrap(),\n\n bcs_ext::to_bytes(&24u64).unwrap(),\n\n bcs_ext::to_bytes(&1000u64).unwrap(),\n\n bcs_ext::to_bytes(&60000u64).unwrap(),\n\n bcs_ext::to_bytes(&2u64).unwrap(),\n\n bcs_ext::to_bytes(&1000000u64).unwrap(),\n\n bcs_ext::to_bytes(&strategy).unwrap(),\n\n bcs_ext::to_bytes(&0u64).unwrap(),\n\n ],\n\n )\n\n}\n\n\n", "file_path": "test-helper/src/dao.rs", "rank": 23, "score": 238301.27454269584 }, { "content": "/// Watch some method handle time.\n\npub fn watch(watch_name: &str, label: &str) {\n\n if G_WATCH_STATUS.load(Ordering::SeqCst) {\n\n let stop_watch = match G_WATCH_MAP.get(watch_name) {\n\n Some(stop_watch) => stop_watch,\n\n None => &G_DEFAULT_WATCH,\n\n };\n\n let mut watch = stop_watch.lock();\n\n watch.restart();\n\n println!(\"{:?}: {:?}\", label, watch.ns());\n\n }\n\n}\n\n\n", "file_path": "commons/utils/src/stop_watch.rs", "rank": 24, "score": 236547.03933572682 }, { "content": "pub fn build_network_worker(\n\n network_config: &NetworkConfig,\n\n chain_info: ChainInfo,\n\n protocols: Vec<Cow<'static, str>>,\n\n rpc_service: Option<(RpcInfo, ServiceRef<NetworkRpcService>)>,\n\n metrics_registry: Option<Registry>,\n\n) -> Result<(PeerInfo, NetworkWorker)> {\n\n let node_name = network_config.node_name();\n\n let discover_local = network_config.discover_local();\n\n let transport_config = if is_memory_addr(&network_config.listen()) {\n\n TransportConfig::MemoryOnly\n\n } else {\n\n TransportConfig::Normal {\n\n enable_mdns: discover_local,\n\n allow_private_ipv4: true,\n\n wasm_external_transport: None,\n\n }\n\n };\n\n //TODO define RequestResponseConfig by rpc api\n\n let rpc_protocols = match rpc_service {\n", "file_path": "network/src/worker.rs", "rank": 25, "score": 236463.7711939494 }, { "content": "/// Stop watching.\n\npub fn stop_watch() {\n\n let _ = G_WATCH_STATUS\n\n .compare_exchange(true, false, Ordering::SeqCst, Ordering::Relaxed)\n\n .unwrap_or_else(|x| x);\n\n}\n", "file_path": "commons/utils/src/stop_watch.rs", "rank": 26, "score": 233521.50062898122 }, { "content": "pub fn load_config_with_opt(opt: &StarcoinOpt) -> Result<NodeConfig> {\n\n NodeConfig::load_with_opt(opt)\n\n}\n\n\n", "file_path": "config/src/lib.rs", "rank": 27, "score": 233465.66967289808 }, { "content": "/// Load a secret key from a file, if it exists, or generate a\n\n/// new secret key and write it to that file. In either case,\n\n/// the secret key is returned.\n\nfn get_secret<P, F, G, E, W, K>(file: P, parse: F, generate: G, serialize: W) -> io::Result<K>\n\nwhere\n\n P: AsRef<Path>,\n\n F: for<'r> FnOnce(&'r mut [u8]) -> Result<K, E>,\n\n G: FnOnce() -> K,\n\n E: Error + Send + Sync + 'static,\n\n W: Fn(&K) -> Vec<u8>,\n\n{\n\n std::fs::read(&file)\n\n .and_then(|mut sk_bytes| {\n\n parse(&mut sk_bytes).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })\n\n .or_else(|e| {\n\n if e.kind() == io::ErrorKind::NotFound {\n\n file.as_ref().parent().map_or(Ok(()), fs::create_dir_all)?;\n\n let sk = generate();\n\n let mut sk_vec = serialize(&sk);\n\n write_secret_file(file, &sk_vec)?;\n\n sk_vec.zeroize();\n\n Ok(sk)\n\n } else {\n\n Err(e)\n\n }\n\n })\n\n}\n\n\n", "file_path": "network-p2p/src/config.rs", "rank": 29, "score": 232085.01702108685 }, { "content": "pub fn get_free_mem_size() -> Result<u64> {\n\n let sys = System::new();\n\n let free = match sys.memory() {\n\n Ok(mem) => mem.free.as_u64(),\n\n Err(_x) => 0u64,\n\n };\n\n Ok(free)\n\n}\n", "file_path": "commons/system/src/lib.rs", "rank": 30, "score": 230608.0488412859 }, { "content": "fn full_update_with_version(version_number: u64) -> PathBuf {\n\n let options = fs_extra::dir::CopyOptions::new();\n\n\n\n let mut stdlib_src = PathBuf::from(LATEST_COMPILED_OUTPUT_PATH);\n\n stdlib_src.push(STDLIB_DIR_NAME);\n\n\n\n // into version dir.\n\n let mut dest = PathBuf::from(COMPILED_OUTPUT_PATH);\n\n dest.push(format!(\"{}\", version_number));\n\n\n\n // create if not exists.\n\n {\n\n if !dest.exists() {\n\n std::fs::create_dir_all(&dest).unwrap();\n\n }\n\n }\n\n\n\n {\n\n dest.push(STDLIB_DIR_NAME);\n\n if dest.exists() {\n\n std::fs::remove_dir_all(&dest).unwrap();\n\n }\n\n dest.pop();\n\n }\n\n fs_extra::dir::copy(stdlib_src, &dest, &options).unwrap();\n\n dest\n\n}\n\n\n", "file_path": "vm/stdlib/src/main.rs", "rank": 31, "score": 228947.70039817598 }, { "content": "/// vote vm config scripts\n\npub fn vote_vm_config_script(_net: &ChainNetwork, vm_config: VMConfig) -> ScriptFunction {\n\n let gas_constants = &vm_config.gas_schedule.gas_constants;\n\n ScriptFunction::new(\n\n ModuleId::new(\n\n core_code_address(),\n\n Identifier::new(\"OnChainConfigScripts\").unwrap(),\n\n ),\n\n Identifier::new(\"propose_update_vm_config\").unwrap(),\n\n vec![],\n\n vec![\n\n bcs_ext::to_bytes(\n\n &bcs_ext::to_bytes(&vm_config.gas_schedule.instruction_table).unwrap(),\n\n )\n\n .unwrap(),\n\n bcs_ext::to_bytes(&bcs_ext::to_bytes(&vm_config.gas_schedule.native_table).unwrap())\n\n .unwrap(),\n\n bcs_ext::to_bytes(&gas_constants.global_memory_per_byte_cost.get()).unwrap(),\n\n bcs_ext::to_bytes(&gas_constants.global_memory_per_byte_write_cost.get()).unwrap(),\n\n bcs_ext::to_bytes(&gas_constants.min_transaction_gas_units.get()).unwrap(),\n\n bcs_ext::to_bytes(&gas_constants.large_transaction_cutoff.get()).unwrap(),\n", "file_path": "test-helper/src/dao.rs", "rank": 32, "score": 227501.30167165224 }, { "content": "pub fn prepare_genesis() -> (ChainStateDB, ChainNetwork) {\n\n let net = ChainNetwork::new_test();\n\n let chain_state = ChainStateDB::mock();\n\n let genesis_txn = Genesis::build_genesis_transaction(&net).unwrap();\n\n Genesis::execute_genesis_txn(&chain_state, genesis_txn).unwrap();\n\n (chain_state, net)\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 33, "score": 226441.87682667363 }, { "content": "pub fn init_slog_logger(file: PathBuf, enable_stderr: bool) -> Result<()> {\n\n let logger = create_default_root_logger(file, slog::Level::Info, enable_stderr)?;\n\n G_GLOBAL_SLOG_LOGGER.store(Arc::new(logger));\n\n Ok(())\n\n}\n\n\n", "file_path": "commons/logger/src/structured_log.rs", "rank": 34, "score": 224667.3601331258 }, { "content": "pub fn run_node_by_opt(\n\n opt: &StarcoinOpt,\n\n) -> Result<(Option<NodeHandle>, Arc<NodeConfig>), NodeStartError> {\n\n //check genesis config is ready\n\n let mut base_config =\n\n BaseConfig::load_with_opt(opt).map_err(NodeStartError::LoadConfigError)?;\n\n if !base_config.net().is_ready() {\n\n let future_block_resolve =\n\n RpcFutureBlockParameterResolver::new(base_config.net().id().clone());\n\n base_config\n\n .resolve(&future_block_resolve)\n\n .map_err(NodeStartError::LoadConfigError)?;\n\n }\n\n let config = Arc::new(\n\n base_config\n\n .into_node_config(opt)\n\n .map_err(NodeStartError::LoadConfigError)?,\n\n );\n\n let ipc_file = config.rpc.get_ipc_file();\n\n let node_handle = if !ipc_file.exists() {\n\n let node_handle = run_node(config.clone())?;\n\n Some(node_handle)\n\n } else {\n\n //TODO check ipc file is available.\n\n info!(\"Node has started at {:?}\", ipc_file);\n\n None\n\n };\n\n Ok((node_handle, config))\n\n}\n\n\n", "file_path": "node/src/lib.rs", "rank": 35, "score": 223575.67765348405 }, { "content": "pub fn export(\n\n db: &str,\n\n output: &Path,\n\n block_id: HashValue,\n\n resource_struct_tag: StructTag,\n\n fields: &[String],\n\n) -> anyhow::Result<()> {\n\n let db_storage = DBStorage::open_with_cfs(\n\n db,\n\n StorageVersion::current_version()\n\n .get_column_family_names()\n\n .to_vec(),\n\n true,\n\n Default::default(),\n\n None,\n\n )?;\n\n let storage = Storage::new(StorageInstance::new_db_instance(db_storage))?;\n\n let storage = Arc::new(storage);\n\n let block = storage\n\n .get_block(block_id)?\n", "file_path": "cmd/resource-exporter/src/main.rs", "rank": 36, "score": 223373.17207165057 }, { "content": "/// Builds the transport that serves as a common ground for all connections.\n\n///\n\n/// If `memory_only` is true, then only communication within the same process are allowed. Only\n\n/// addresses with the format `/memory/...` are allowed.\n\n///\n\n/// Returns a `BandwidthSinks` object that allows querying the average bandwidth produced by all\n\n/// the connections spawned with this transport.\n\npub fn build_transport(\n\n keypair: identity::Keypair,\n\n memory_only: bool,\n\n wasm_external_transport: Option<wasm_ext::ExtTransport>,\n\n) -> (Boxed<(PeerId, StreamMuxerBox)>, Arc<BandwidthSinks>) {\n\n // Build the base layer of the transport.\n\n let transport = if let Some(t) = wasm_external_transport {\n\n OptionalTransport::some(t)\n\n } else {\n\n OptionalTransport::none()\n\n };\n\n #[cfg(not(target_os = \"unknown\"))]\n\n let transport = transport.or_transport(if !memory_only {\n\n let desktop_trans = tcp::TcpConfig::new();\n\n let desktop_trans =\n\n websocket::WsConfig::new(desktop_trans.clone()).or_transport(desktop_trans);\n\n let dns_init = futures::executor::block_on(dns::DnsConfig::system(desktop_trans.clone()));\n\n OptionalTransport::some(OptionalTransport::some(if let Ok(dns) = dns_init {\n\n EitherTransport::Left(dns)\n\n } else {\n", "file_path": "network-p2p/src/transport.rs", "rank": 37, "score": 222677.9971012324 }, { "content": "//TODO implement a generic metrics macros.\n\npub fn record_metrics<'a>(\n\n storage_type: &'a str,\n\n key_type: &'a str,\n\n method: &'a str,\n\n metrics: Option<&'a StorageMetrics>,\n\n) -> MetricsRecord<'a> {\n\n MetricsRecord::new(storage_type, key_type, method, metrics)\n\n}\n", "file_path": "storage/src/metrics.rs", "rank": 38, "score": 221659.5327127632 }, { "content": "#[stest::test]\n\npub fn test_db_upgrade() -> Result<()> {\n\n let tmpdir = starcoin_config::temp_dir();\n\n let txn_info_ids = generate_old_db(tmpdir.path())?;\n\n let mut instance = StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new(None),\n\n DBStorage::new(tmpdir.path(), RocksdbConfig::default(), None)?,\n\n );\n\n\n\n instance.check_upgrade()?;\n\n let storage = Storage::new(instance.clone())?;\n\n let old_transaction_info_storage = OldTransactionInfoStorage::new(instance);\n\n\n\n for txn_info_id in txn_info_ids {\n\n assert!(\n\n old_transaction_info_storage.get(txn_info_id)?.is_none(),\n\n \"expect BlockTransactionInfo is none\"\n\n );\n\n assert!(\n\n storage.get_transaction_info(txn_info_id)?.is_some(),\n\n \"expect RichTransactionInfo is some\"\n\n );\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "storage/src/tests/test_storage.rs", "rank": 39, "score": 219301.3719573935 }, { "content": "#[test]\n\npub fn test_snapshot_range() -> Result<()> {\n\n let tmpdir = starcoin_config::temp_dir();\n\n let instance = StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new(None),\n\n DBStorage::new(tmpdir.path(), RocksdbConfig::default(), None)?,\n\n );\n\n let storage = Storage::new(instance)?;\n\n let snapshot_range = storage.get_snapshot_range()?;\n\n assert!(snapshot_range.is_none(), \"export snapshot_range is none\");\n\n let snapshot_range = SnapshotRange::new(1, 1000);\n\n storage.save_snapshot_range(snapshot_range)?;\n\n let snapshot_range = storage.get_snapshot_range()?;\n\n assert!(snapshot_range.is_some(), \"expect snapshot_range is some\");\n\n let snapshot_range = snapshot_range.unwrap();\n\n assert_eq!(snapshot_range.get_start(), 1);\n\n assert_eq!(snapshot_range.get_end(), 1000);\n\n Ok(())\n\n}\n\n\n", "file_path": "storage/src/tests/test_storage.rs", "rank": 40, "score": 219301.3719573935 }, { "content": "pub fn apply_snapshot(\n\n to_dir: PathBuf,\n\n input_path: PathBuf,\n\n network: BuiltinNetworkID,\n\n) -> anyhow::Result<()> {\n\n let start_time = SystemTime::now();\n\n let net = ChainNetwork::new_builtin(network);\n\n let db_storage = DBStorage::new(to_dir.join(\"starcoindb/db\"), RocksdbConfig::default(), None)?;\n\n let storage = Arc::new(Storage::new(StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new(None),\n\n db_storage,\n\n ))?);\n\n let (chain_info, _) = Genesis::init_and_check_storage(&net, storage.clone(), to_dir.as_ref())?;\n\n let mut chain = BlockChain::new(\n\n net.time_service(),\n\n chain_info.head().id(),\n\n storage.clone(),\n\n None,\n\n )\n\n .expect(\"create block chain should success.\");\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 41, "score": 219016.13171998906 }, { "content": "/// manifest.csv layout\n\n/// block_accumulator num accumulator_root_hash\n\n/// block num block.header.hash\n\n/// block_info num block.header.hash\n\n/// txn_accumulator num accumulator_root_hash\n\n/// state num state_root_hash\n\npub fn export_snapshot(\n\n from_dir: PathBuf,\n\n output: PathBuf,\n\n network: BuiltinNetworkID,\n\n increment: Option<bool>,\n\n special_block_num: Option<BlockNumber>,\n\n) -> anyhow::Result<()> {\n\n let start_time = SystemTime::now();\n\n let net = ChainNetwork::new_builtin(network);\n\n let db_storage = DBStorage::open_with_cfs(\n\n from_dir.join(\"starcoindb/db/starcoindb\"),\n\n StorageVersion::current_version()\n\n .get_column_family_names()\n\n .to_vec(),\n\n true,\n\n Default::default(),\n\n None,\n\n )?;\n\n let storage = Arc::new(Storage::new(StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new(None),\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 42, "score": 219016.13171998906 }, { "content": "pub fn apply_block(\n\n to_dir: PathBuf,\n\n input_path: PathBuf,\n\n network: BuiltinNetworkID,\n\n verifier: Verifier,\n\n) -> anyhow::Result<()> {\n\n let net = ChainNetwork::new_builtin(network);\n\n let db_storage = DBStorage::new(to_dir.join(\"starcoindb/db\"), RocksdbConfig::default(), None)?;\n\n let storage = Arc::new(Storage::new(StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new(None),\n\n db_storage,\n\n ))?);\n\n let (chain_info, _) = Genesis::init_and_check_storage(&net, storage.clone(), to_dir.as_ref())?;\n\n let mut chain = BlockChain::new(\n\n net.time_service(),\n\n chain_info.head().id(),\n\n storage.clone(),\n\n None,\n\n )\n\n .expect(\"create block chain should success.\");\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 43, "score": 219016.13171998906 }, { "content": "pub fn convert_changeset_and_events_cached<C: AccessPathCache>(\n\n ap_cache: &mut C,\n\n changeset: MoveChangeSet,\n\n events: Vec<MoveEvent>,\n\n) -> Result<(WriteSet, Vec<ContractEvent>), VMStatus> {\n\n // TODO: Cache access path computations if necessary.\n\n let mut ops = vec![];\n\n\n\n for (addr, account_changeset) in changeset.into_inner() {\n\n let (modules, resources) = account_changeset.into_inner();\n\n for (struct_tag, blob_opt) in resources {\n\n let ap = ap_cache.get_resource_path(addr, struct_tag);\n\n let op = match blob_opt {\n\n None => WriteOp::Deletion,\n\n Some(blob) => WriteOp::Value(blob),\n\n };\n\n ops.push((ap, op))\n\n }\n\n\n\n for (name, blob_opt) in modules {\n", "file_path": "vm/vm-runtime/src/starcoin_vm.rs", "rank": 44, "score": 217831.27421206248 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nstruct DelayId(u64);\n\n\n\n/// State of a peer we're connected to.\n\n///\n\n/// The variants correspond to the state of the peer w.r.t. the peerset.\n", "file_path": "network-p2p/src/protocol/generic_proto/behaviour.rs", "rank": 45, "score": 217248.14096563493 }, { "content": "/// Check the abi is supported by generator\n\npub fn is_supported_abi(abi: &ScriptABI) -> bool {\n\n for arg in abi.args() {\n\n if let TypeTag::Vector(type_tag) = arg.type_tag() {\n\n match type_tag.as_ref() {\n\n TypeTag::U8 => continue,\n\n _ => {\n\n eprintln!(\n\n \"{} function's argument {:?}, the generator do not support, skip it.\",\n\n abi.name(),\n\n arg\n\n );\n\n return false;\n\n }\n\n }\n\n }\n\n }\n\n true\n\n}\n", "file_path": "vm/transaction-builder-generator/src/lib.rs", "rank": 46, "score": 216809.0948274023 }, { "content": "pub fn startup_info_back(\n\n to_dir: PathBuf,\n\n back_size: Option<u64>,\n\n network: BuiltinNetworkID,\n\n) -> anyhow::Result<()> {\n\n let net = ChainNetwork::new_builtin(network);\n\n let db_storage = DBStorage::new(to_dir.join(\"starcoindb/db\"), RocksdbConfig::default(), None)?;\n\n let storage = Arc::new(Storage::new(StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new(None),\n\n db_storage,\n\n ))?);\n\n let (chain_info, _) = Genesis::init_and_check_storage(&net, storage.clone(), to_dir.as_ref())?;\n\n let chain = BlockChain::new(\n\n net.time_service(),\n\n chain_info.head().id(),\n\n storage.clone(),\n\n None,\n\n )\n\n .expect(\"create block chain should success.\");\n\n\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 47, "score": 214909.37139119738 }, { "content": "pub fn export_block_range(\n\n from_dir: PathBuf,\n\n output: PathBuf,\n\n network: BuiltinNetworkID,\n\n start: BlockNumber,\n\n end: BlockNumber,\n\n) -> anyhow::Result<()> {\n\n let net = ChainNetwork::new_builtin(network);\n\n let db_storage = DBStorage::open_with_cfs(\n\n from_dir.join(\"starcoindb/db/starcoindb\"),\n\n StorageVersion::current_version()\n\n .get_column_family_names()\n\n .to_vec(),\n\n true,\n\n Default::default(),\n\n None,\n\n )?;\n\n let storage = Arc::new(Storage::new(StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new(None),\n\n db_storage,\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 48, "score": 214909.37139119738 }, { "content": "pub fn gen_block_transactions(\n\n to_dir: PathBuf,\n\n block_num: Option<u64>,\n\n trans_num: Option<u64>,\n\n txn_type: Txntype,\n\n) -> anyhow::Result<()> {\n\n ::logger::init();\n\n let net = ChainNetwork::new_builtin(BuiltinNetworkID::Halley);\n\n let db_storage = DBStorage::new(to_dir.join(\"starcoindb/db\"), RocksdbConfig::default(), None)?;\n\n let storage = Arc::new(Storage::new(StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new(None),\n\n db_storage,\n\n ))?);\n\n let (chain_info, _) = Genesis::init_and_check_storage(&net, storage.clone(), to_dir.as_ref())?;\n\n let mut chain = BlockChain::new(\n\n net.time_service(),\n\n chain_info.head().id(),\n\n storage.clone(),\n\n None,\n\n )\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 49, "score": 214909.37139119738 }, { "content": "pub fn compute_arg_identifiers(\n\n args: &Punctuated<syn::FnArg, syn::token::Comma>,\n\n) -> anyhow::Result<Vec<&syn::Ident>> {\n\n let mut arg_names = vec![];\n\n for arg in args {\n\n let pat = match arg {\n\n syn::FnArg::Typed(syn::PatType { pat, .. }) => pat,\n\n _ => continue,\n\n };\n\n let ident = match **pat {\n\n syn::Pat::Ident(syn::PatIdent { ref ident, .. }) => ident,\n\n syn::Pat::Wild(ref wild) => {\n\n let span = wild.underscore_token.spans[0];\n\n let msg = \"No wildcard patterns allowed in rpc trait.\";\n\n return Err(Error::from(syn::Error::new(span, msg)));\n\n }\n\n _ => continue,\n\n };\n\n arg_names.push(ident);\n\n }\n\n Ok(arg_names)\n\n}\n", "file_path": "network-rpc/derive/src/helper.rs", "rank": 50, "score": 214249.232228846 }, { "content": "pub fn build_lighting_network(\n\n net: &ChainNetwork,\n\n network_config: &NetworkConfig,\n\n) -> Result<(PeerInfo, NetworkWorker)> {\n\n let genesis = starcoin_genesis::Genesis::load_or_build(net)?;\n\n let storage = Arc::new(Storage::new(StorageInstance::new_cache_instance())?);\n\n let chain_info = genesis.execute_genesis_block(net, storage)?;\n\n build_network_worker(\n\n network_config,\n\n chain_info,\n\n NotificationMessage::protocols(),\n\n None,\n\n None,\n\n )\n\n}\n", "file_path": "cmd/peer-watcher/src/lib.rs", "rank": 51, "score": 214249.232228846 }, { "content": "#[test]\n\npub fn test_cache_evict_multi_get() -> Result<()> {\n\n let tmpdir = starcoin_config::temp_dir();\n\n let instance = StorageInstance::new_cache_and_db_instance(\n\n CacheStorage::new_with_capacity(2, None),\n\n DBStorage::new(tmpdir.path(), RocksdbConfig::default(), None)?,\n\n );\n\n let storage = Storage::new(instance.clone())?;\n\n let transaction_info1 = RichTransactionInfo::new(\n\n HashValue::random(),\n\n rand::random(),\n\n TransactionInfo::new(\n\n HashValue::random(),\n\n HashValue::zero(),\n\n vec![].as_slice(),\n\n 0,\n\n KeptVMStatus::Executed,\n\n ),\n\n rand::random(),\n\n rand::random(),\n\n );\n", "file_path": "storage/src/tests/test_storage.rs", "rank": 52, "score": 212421.2744043263 }, { "content": "pub fn get_unix_ts() -> u128 {\n\n get_unix_duration().as_nanos()\n\n}\n\n\n", "file_path": "network/src/helper.rs", "rank": 53, "score": 211986.91153757795 }, { "content": "// This use in test net create account then transfer faster then transfer non exist account\n\npub fn execute_transaction_with_create_account(\n\n storage: Arc<Storage>,\n\n chain: &mut BlockChain,\n\n net: &ChainNetwork,\n\n block_num: u64,\n\n trans_num: u64,\n\n) -> anyhow::Result<()> {\n\n let mut sequence = 0u64;\n\n for _i in 0..block_num {\n\n let mut txns = Vec::with_capacity(20);\n\n let miner_account = Account::new();\n\n let miner_info = AccountInfo::from(&miner_account);\n\n let txn = Transaction::UserTransaction(create_account_txn_sent_as_association(\n\n &miner_account,\n\n sequence,\n\n 50_000_000,\n\n net.time_service().now_secs() + DEFAULT_EXPIRATION_TIME,\n\n net,\n\n ));\n\n txns.push(txn.as_signed_user_txn()?.clone());\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 54, "score": 211043.82029219455 }, { "content": "pub fn execute_transaction_with_fixed_account(\n\n storage: Arc<Storage>,\n\n chain: &mut BlockChain,\n\n net: &ChainNetwork,\n\n block_num: u64,\n\n trans_num: u64,\n\n) -> anyhow::Result<()> {\n\n let miner_account = Account::new();\n\n let miner_info = AccountInfo::from(&miner_account);\n\n let mut send_sequence = 0u64;\n\n let receiver = Account::new();\n\n let (block_template, _) =\n\n chain.create_block_template(*miner_info.address(), None, vec![], vec![], None)?;\n\n let block =\n\n ConsensusStrategy::Dummy.create_block(block_template, net.time_service().as_ref())?;\n\n let block_hash = block.header.id();\n\n chain.apply_with_verifier::<BasicVerifier>(block)?;\n\n let startup_info = StartupInfo::new(block_hash);\n\n storage.save_startup_info(startup_info)?;\n\n for _i in 0..block_num {\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 55, "score": 211031.92804337176 }, { "content": "pub fn execute_empty_transaction_with_miner(\n\n storage: Arc<Storage>,\n\n chain: &mut BlockChain,\n\n net: &ChainNetwork,\n\n block_num: u64,\n\n trans_num: u64,\n\n) -> anyhow::Result<()> {\n\n let miner_account = Account::new();\n\n let miner_info = AccountInfo::from(&miner_account);\n\n let mut send_sequence = 0u64;\n\n let (block_template, _) =\n\n chain.create_block_template(*miner_info.address(), None, vec![], vec![], None)?;\n\n let block =\n\n ConsensusStrategy::Dummy.create_block(block_template, net.time_service().as_ref())?;\n\n let block_hash = block.header.id();\n\n chain.apply_with_verifier::<BasicVerifier>(block)?;\n\n let startup_info = StartupInfo::new(block_hash);\n\n storage.save_startup_info(startup_info)?;\n\n for _i in 0..block_num {\n\n let mut sequence = send_sequence;\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 56, "score": 211031.92804337176 }, { "content": "struct DelegateAsyncMethod<T, F> {\n\n delegate: Arc<T>,\n\n closure: F,\n\n}\n\n\n\nimpl<T, F, I> RpcMethod for DelegateAsyncMethod<T, F>\n\nwhere\n\n F: Fn(Arc<T>, PeerId, Vec<u8>) -> I,\n\n I: Future<Output = Result<Vec<u8>>> + Send + Unpin + 'static,\n\n T: Send + Sync + 'static,\n\n F: Send + Sync + 'static,\n\n{\n\n fn call(&self, peer_id: PeerId, params: Vec<u8>) -> BoxFuture<Result<Vec<u8>>> {\n\n let closure = &self.closure;\n\n Box::pin(closure(self.delegate.clone(), peer_id, params))\n\n }\n\n}\n\n\n\npub struct IoDelegate<T>\n\nwhere\n", "file_path": "network-rpc/core/src/delegates.rs", "rank": 57, "score": 210521.9361971796 }, { "content": "pub fn account_struct_tag() -> StructTag {\n\n StructTag {\n\n address: CORE_CODE_ADDRESS,\n\n module: G_ACCOUNT_MODULE_IDENTIFIER.clone(),\n\n name: G_ACCOUNT_STRUCT_NAME.to_owned(),\n\n type_params: vec![],\n\n }\n\n}\n", "file_path": "vm/types/src/account_config/constants/account.rs", "rank": 58, "score": 209336.56008358582 }, { "content": "fn is_benchmarking(ip: Ipv4Addr) -> bool {\n\n ip.octets()[0] == 198 && (ip.octets()[1] & 0xfe) == 18\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::helper::*;\n\n use std::net::Ipv4Addr;\n\n\n\n #[test]\n\n fn test_192_0_0() {\n\n let ip_9 = Ipv4Addr::new(192, 0, 0, 9);\n\n assert!(is_global(ip_9));\n\n\n\n let ip_10 = Ipv4Addr::new(192, 0, 0, 10);\n\n assert!(is_global(ip_10));\n\n }\n\n\n\n #[test]\n\n fn test_is_shared() {\n", "file_path": "network/src/helper.rs", "rank": 59, "score": 208617.26606017514 }, { "content": "fn is_reserved(ip: Ipv4Addr) -> bool {\n\n ip.octets()[0] & 240 == 240 && !ip.is_broadcast()\n\n}\n\n\n", "file_path": "network/src/helper.rs", "rank": 60, "score": 208617.26606017514 }, { "content": "fn is_shared(ip: Ipv4Addr) -> bool {\n\n ip.octets()[0] == 100 && (ip.octets()[1] & 0b1100_0000 == 0b0100_0000)\n\n}\n\n\n", "file_path": "network/src/helper.rs", "rank": 61, "score": 208617.26606017514 }, { "content": "pub fn spawn<T: Send + 'static, F: FnOnce() -> T + Send + 'static>(f: F) -> TimeoutJoinHandle<T> {\n\n let (send, recv) = channel();\n\n let t = thread::spawn(move || {\n\n let x = f();\n\n //ignore send error.\n\n let _e = send.send(());\n\n x\n\n });\n\n TimeoutJoinHandle {\n\n handle: t,\n\n signal: recv,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "commons/timeout-join-handler/src/lib.rs", "rank": 62, "score": 208446.8472863857 }, { "content": "pub fn get_unix_ts_as_millis() -> u128 {\n\n get_unix_duration().as_millis()\n\n}\n\n\n", "file_path": "network/src/helper.rs", "rank": 63, "score": 207899.67194785946 }, { "content": "#[cfg(not(unix))]\n\npub fn check_open_fds_limit(max_files: u64) -> Result<(), ConfigError> {\n\n Ok(())\n\n}\n", "file_path": "config/src/lib.rs", "rank": 64, "score": 207462.8737297508 }, { "content": "pub fn move_abort_code(status: KeptVMStatus) -> Option<u64> {\n\n match status {\n\n KeptVMStatus::MoveAbort(_, code) => Some(code),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 65, "score": 207462.8737297508 }, { "content": "pub fn execute_transaction_with_miner_create_account(\n\n storage: Arc<Storage>,\n\n chain: &mut BlockChain,\n\n net: &ChainNetwork,\n\n block_num: u64,\n\n trans_num: u64,\n\n) -> anyhow::Result<()> {\n\n let miner_account = Account::new();\n\n let miner_info = AccountInfo::from(&miner_account);\n\n let mut send_sequence = 0u64;\n\n let (block_template, _) =\n\n chain.create_block_template(*miner_info.address(), None, vec![], vec![], None)?;\n\n let block =\n\n ConsensusStrategy::Dummy.create_block(block_template, net.time_service().as_ref())?;\n\n let block_hash = block.header.id();\n\n chain.apply_with_verifier::<BasicVerifier>(block)?;\n\n let startup_info = StartupInfo::new(block_hash);\n\n storage.save_startup_info(startup_info)?;\n\n for _i in 0..block_num {\n\n let mut sequence = send_sequence;\n", "file_path": "cmd/db-exporter/src/main.rs", "rank": 66, "score": 207365.1161539388 }, { "content": "fn main() {\n\n let _logger = starcoin_logger::init();\n\n let opts = GenesisGeneratorOpt::parse();\n\n let networks: Vec<BuiltinNetworkID> = match opts.net {\n\n Some(network) => vec![network],\n\n None => BuiltinNetworkID::networks(),\n\n };\n\n for id in networks {\n\n // skip test && dev network generate.\n\n if id.is_test() || id.is_dev() {\n\n continue;\n\n }\n\n if !id.genesis_config().is_ready() {\n\n continue;\n\n }\n\n let net = ChainNetwork::new_builtin(id);\n\n let new_genesis = Genesis::build(&net).expect(\"build genesis fail.\");\n\n let generated_genesis = Genesis::load(&net);\n\n let regenerate = match generated_genesis {\n\n Ok(Some(generated_genesis)) => {\n", "file_path": "genesis/src/main.rs", "rank": 67, "score": 206772.86399388168 }, { "content": "#[proc_macro_attribute]\n\npub fn net_rpc(attr: TokenStream, input: TokenStream) -> TokenStream {\n\n let input_tokens = parse_macro_input!(input as syn::Item);\n\n let args = syn::parse_macro_input!(attr as syn::AttributeArgs);\n\n let derive_options = options::DeriveOptions::new(args);\n\n let token_stream = rpc_trait::rpc_impl(input_tokens, &derive_options).unwrap();\n\n token_stream.into()\n\n}\n", "file_path": "network-rpc/derive/src/lib.rs", "rank": 68, "score": 206201.58918820348 }, { "content": "/// Generate a random memory protocol Multiaddr\n\npub fn random_memory_addr() -> Multiaddr {\n\n memory_addr(rand::random::<u64>())\n\n}\n\n\n", "file_path": "network-p2p/types/src/lib.rs", "rank": 69, "score": 204040.65932250212 }, { "content": "// Generates the compiled stdlib and transaction scripts. Until this is run changes to the source\n\n// modules/scripts, and changes in the Move compiler will not be reflected in the stdlib used for\n\n// genesis, and everywhere else across the code-base unless otherwise specified.\n\nfn main() {\n\n SimpleLogger::init(LevelFilter::Info, Config::default()).expect(\"init logger failed.\");\n\n // pass argument 'version' to generate new release\n\n // for example, \"cargo run -- --version 1\"\n\n let cli = Command::new(\"stdlib\")\n\n .name(\"Move standard library\")\n\n .author(\"The Starcoin Core Contributors\")\n\n .after_help(\"this command can be used to generate an incremental package, with init script included.\")\n\n .arg(\n\n Arg::new(\"version\")\n\n .short('v')\n\n .long(\"version\")\n\n .takes_value(true)\n\n .value_name(\"VERSION\")\n\n .help(\"version number for compiled stdlib, for example 1. don't forget to record the release note\"),\n\n )\n\n .arg(\n\n Arg::new(\"pre-version\")\n\n .short('p')\n\n .long(\"pre-version\")\n", "file_path": "vm/stdlib/src/main.rs", "rank": 70, "score": 203665.12642697754 }, { "content": "fn main() {\n\n match run() {\n\n Ok(()) => {}\n\n Err(e) => {\n\n match e.downcast::<NodeStartError>() {\n\n Ok(e) => match e {\n\n //TODO not suggest clean data dir in main network.\n\n NodeStartError::LoadConfigError(e) => {\n\n error!(\"{:?}, please fix config.\", e);\n\n std::process::exit(G_EXIT_CODE_NEED_HELP);\n\n }\n\n NodeStartError::StorageInitError(e) => {\n\n error!(\"{:?}, please clean your data dir.\", e);\n\n std::process::exit(G_EXIT_CODE_NEED_HELP);\n\n }\n\n NodeStartError::GenesisError(e) => {\n\n error!(\"{:?}, please clean your data dir.\", e);\n\n std::process::exit(G_EXIT_CODE_NEED_HELP);\n\n }\n\n NodeStartError::Other(e) => {\n", "file_path": "cmd/starcoin/src/main.rs", "rank": 71, "score": 203659.96066502552 }, { "content": "fn main() {\n\n let opt = Opt::parse();\n\n\n\n logger::init();\n\n\n\n rayon::ThreadPoolBuilder::new()\n\n .thread_name(|index| format!(\"rayon-global-{}\", index))\n\n .build_global()\n\n .expect(\"Failed to build rayon global thread pool.\");\n\n\n\n starcoin_executor_benchmark::run_benchmark(\n\n opt.num_accounts,\n\n opt.init_account_balance,\n\n opt.block_size,\n\n opt.num_transfer_blocks,\n\n );\n\n}\n", "file_path": "executor/benchmark/src/main.rs", "rank": 72, "score": 203659.96066502552 }, { "content": "/// A tools for generate starcoin config and data.\n\nfn main() {\n\n let _logger_handle = starcoin_logger::init();\n\n if let Err(e) = run() {\n\n error!(\"error: {:?}\", e);\n\n }\n\n}\n", "file_path": "cmd/generator/src/main.rs", "rank": 73, "score": 203659.96066502552 }, { "content": "pub fn event_handle_generator_struct_tag() -> StructTag {\n\n StructTag {\n\n address: CORE_CODE_ADDRESS,\n\n module: event_module_name().to_owned(),\n\n name: event_handle_generator_struct_name().to_owned(),\n\n type_params: vec![],\n\n }\n\n}\n", "file_path": "vm/types/src/account_config/constants/event.rs", "rank": 74, "score": 203355.93192131564 }, { "content": "/// Turn off n right most bits\n\nfn turn_off_right_most_n_bits(v: u64, n: u32) -> u64 {\n\n debug_checked_precondition!(n < 64);\n\n (v >> n as u64) << n as u64\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct AncestorSiblingIterator {\n\n node_index: NodeIndex,\n\n}\n\n\n\nimpl Iterator for AncestorSiblingIterator {\n\n type Item = NodeIndex;\n\n\n\n fn next(&mut self) -> Option<NodeIndex> {\n\n let current_sibling_index = self.node_index.sibling();\n\n self.node_index = self.node_index.parent();\n\n Some(current_sibling_index)\n\n }\n\n}\n\n\n", "file_path": "commons/accumulator/src/node_index.rs", "rank": 75, "score": 202437.46039667295 }, { "content": "/// check if `port` is available.\n\nfn check_port_in_use(port: u16) -> bool {\n\n if G_USED_PORTS.lock().contains(&port) {\n\n return true;\n\n }\n\n use std::net::TcpStream;\n\n let in_use = match TcpStream::connect((\"0.0.0.0\", port)) {\n\n Ok(_) => true,\n\n Err(_e) => false,\n\n };\n\n if !in_use {\n\n G_USED_PORTS.lock().push(port);\n\n };\n\n in_use\n\n}\n\n\n", "file_path": "config/src/available_port.rs", "rank": 76, "score": 201487.288609798 }, { "content": "/// A lighting node, connect to peer to peer network, and monitor peers.\n\nfn main() {\n\n let _logger = starcoin_logger::init();\n\n let opt: StarcoinOpt = StarcoinOpt::parse();\n\n let config = NodeConfig::load_with_opt(&opt).unwrap();\n\n let (peer_info, worker) = build_lighting_network(config.net(), &config.network).unwrap();\n\n println!(\"Self peer_info: {:?}\", peer_info);\n\n let service = worker.service().clone();\n\n async_std::task::spawn(worker);\n\n let stream = service.event_stream(\"peer_watcher\");\n\n futures::executor::block_on(async move {\n\n stream\n\n .filter_map(|event| async move {\n\n match event {\n\n Event::NotificationStreamOpened {\n\n remote,\n\n protocol: _,\n\n info,\n\n notif_protocols,\n\n rpc_protocols,\n\n version_string,\n", "file_path": "cmd/peer-watcher/src/main.rs", "rank": 77, "score": 200717.42219317728 }, { "content": "fn main() {\n\n let _logger_handler = starcoin_logger::init();\n\n let opts: TxFactoryOpt = TxFactoryOpt::parse();\n\n\n\n let account_address = opts.account_address;\n\n let interval = Duration::from_millis(opts.interval);\n\n let account_password = opts.account_password.clone();\n\n\n\n let is_stress = opts.stress;\n\n let mut account_num = opts.account_num;\n\n let round_num = opts.round_num;\n\n\n\n if !is_stress {\n\n account_num = 0;\n\n }\n\n let watch_timeout = opts.watch_timeout;\n\n let batch_size = opts.batch_size;\n\n\n\n let mut connected = RpcClient::connect_ipc(opts.ipc_path.clone());\n\n while matches!(connected, Err(_)) {\n", "file_path": "cmd/tx-factory/src/main.rs", "rank": 78, "score": 200711.27802992455 }, { "content": "fn main() {\n\n let _logger_handle = logger::init();\n\n let opts: StarcoinOpt = StarcoinOpt::parse();\n\n let config = {\n\n MinerClientConfig {\n\n server: Some(opts.server.clone()),\n\n plugin_path: opts.plugin_path,\n\n miner_thread: opts.thread_num,\n\n enable_stderr: true,\n\n }\n\n };\n\n let user = opts.user;\n\n let system = System::with_tokio_rt(|| {\n\n tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .on_thread_stop(|| info!(\"starcoin-miner thread stopped\"))\n\n .thread_name(\"starcoin-miner\")\n\n .build()\n\n .expect(\"failed to create tokio runtime for starcoin-miner\")\n\n });\n", "file_path": "cmd/miner_client/src/main.rs", "rank": 79, "score": 200711.27802992455 }, { "content": "fn main() {\n\n let args = Args::parse();\n\n\n\n let mut location = args.location.trim().split(\"::\");\n\n let mut address_literal = location.next().expect(\"Could not find address\").to_string();\n\n let module_name = location\n\n .next()\n\n .expect(\"Could not find module name\")\n\n .to_string();\n\n if !address_literal.starts_with(\"0x\") {\n\n address_literal = format!(\"0x{}\", address_literal);\n\n }\n\n let module_id = ModuleId::new(\n\n AccountAddress::from_hex_literal(&address_literal).expect(\"Unable to parse module address\"),\n\n Identifier::new(module_name).expect(\"Invalid module name encountered\"),\n\n );\n\n\n\n match starcoin_move_explain::get_explanation(&module_id, args.abort_code) {\n\n None => println!(\n\n \"Unable to find a description for {}::{}\",\n", "file_path": "vm/move-explain/src/main.rs", "rank": 80, "score": 200711.27802992455 }, { "content": "fn main() {\n\n if let Err(e) = run() {\n\n eprintln!(\"{}\", e);\n\n let mut c = e.source();\n\n while let Some(s) = c {\n\n eprintln!(\"caused by: {}\", s);\n\n c = s.source();\n\n }\n\n std::process::exit(1)\n\n }\n\n}\n\n\n", "file_path": "vm/move-prover/src/main.rs", "rank": 81, "score": 200711.27802992455 }, { "content": "fn is_ietf_protocol_assignment(ip: Ipv4Addr) -> bool {\n\n ip.octets()[0] == 192 && ip.octets()[1] == 0 && ip.octets()[2] == 0\n\n}\n\n\n", "file_path": "network/src/helper.rs", "rank": 82, "score": 200650.63641348068 }, { "content": "fn prepare_module(chain_state: &ChainStateDB, net: &ChainNetwork) -> ModuleId {\n\n let account1 = Account::new();\n\n let txn1 = Transaction::UserTransaction(create_account_txn_sent_as_association(\n\n &account1, 0, 50_000_000, 1, net,\n\n ));\n\n let output1 = execute_and_apply(chain_state, txn1);\n\n assert_eq!(KeptVMStatus::Executed, output1.status().status().unwrap());\n\n let module_source = r#\"\n\n module {{sender}}::Test {\n\n struct R has key, store {\n\n i: u64,\n\n }\n\n\n\n fun fn_private() {\n\n }\n\n\n\n public fun fn_public() {\n\n }\n\n\n\n public(script) fun fn_script() {\n", "file_path": "executor/src/script_function_test.rs", "rank": 83, "score": 200070.29868160092 }, { "content": "pub fn current_block_number<S: StateView>(state_view: &S) -> u64 {\n\n let mut ret = execute_readonly_function(\n\n state_view,\n\n &ModuleId::new(genesis_address(), Identifier::new(\"Block\").unwrap()),\n\n &Identifier::new(\"get_current_block_number\").unwrap(),\n\n vec![],\n\n vec![],\n\n None,\n\n )\n\n .unwrap();\n\n assert_eq!(ret.len(), 1);\n\n bcs_ext::from_bytes(ret.pop().unwrap().as_slice()).unwrap()\n\n}\n\n\n", "file_path": "test-helper/src/executor.rs", "rank": 84, "score": 199108.2381284633 }, { "content": "fn test_create_block_template_by_net(net: ChainNetworkID) {\n\n debug!(\"test_create_block_template_by_net {:?}\", net);\n\n let mut opt = StarcoinOpt::default();\n\n let temp_path = temp_dir();\n\n opt.net = Some(net);\n\n opt.base_data_dir = Some(temp_path.path().to_path_buf());\n\n\n\n let node_config = Arc::new(NodeConfig::load_with_opt(&opt).unwrap());\n\n let (storage, chain_info, genesis) = StarcoinGenesis::init_storage_for_test(node_config.net())\n\n .expect(\"init storage by genesis fail.\");\n\n let genesis_id = genesis.block().id();\n\n let miner_account = AccountInfo::random();\n\n let inner = Inner::new(\n\n node_config.net(),\n\n storage,\n\n genesis_id,\n\n EmptyProvider,\n\n None,\n\n miner_account,\n\n None,\n\n None,\n\n )\n\n .unwrap();\n\n\n\n let block_template = inner.create_block_template().unwrap().template;\n\n assert_eq!(block_template.parent_hash, genesis_id);\n\n assert_eq!(block_template.parent_hash, chain_info.head().id());\n\n assert_eq!(block_template.number, 1);\n\n}\n\n\n", "file_path": "miner/src/create_block_template/test_create_block_template.rs", "rank": 85, "score": 196829.61052241182 }, { "content": "fn main() -> Result<()> {\n\n env_logger::Builder::from_env(env_logger::Env::default().default_filter_or(\"info\"))\n\n .format(|buf, record| {\n\n let color = match record.level() {\n\n Level::Warn => Color::Yellow,\n\n Level::Error => Color::Red,\n\n _ => Color::Green,\n\n };\n\n\n\n let mut level_style = buf.style();\n\n level_style.set_color(color).set_bold(true);\n\n\n\n writeln!(\n\n buf,\n\n \"{:>12} [{}] - {}\",\n\n level_style.value(record.level()),\n\n Local::now().format(\"%T%.3f\"),\n\n record.args()\n\n )\n\n })\n", "file_path": "devtools/x/src/main.rs", "rank": 86, "score": 195845.7098970065 }, { "content": "#[test]\n\npub fn test_native_function_matches() -> Result<()> {\n\n let modules = load_latest_compiled_modules();\n\n let native_functions: Vec<_> = modules\n\n .iter()\n\n .flat_map(|m| {\n\n m.function_defs()\n\n .iter()\n\n .filter_map(|func_def| {\n\n if func_def.is_native() {\n\n Some(Function::new(m, func_def).0)\n\n } else {\n\n None\n\n }\n\n })\n\n .map(|func_name| {\n\n (\n\n *m.self_id().address(),\n\n m.self_id().name().to_string(),\n\n func_name.to_string(),\n\n )\n", "file_path": "vm/vm-runtime/tests/test_native_functions.rs", "rank": 87, "score": 194428.58930565475 }, { "content": "#[test]\n\npub fn test_state_storage_dump() -> Result<()> {\n\n let storage = MockStateNodeStore::new();\n\n let state = StateTree::new(Arc::new(storage), None);\n\n let hash_value1 = HashValueKey(HashValue::random());\n\n let value1 = vec![1u8, 2u8];\n\n state.put(hash_value1, value1.clone());\n\n let hash_value2 = HashValueKey(HashValue::random());\n\n let value2 = vec![3u8, 4u8];\n\n state.put(hash_value2, value2.clone());\n\n state.commit()?;\n\n let state_set = state.dump()?;\n\n assert_eq!(2, state_set.len());\n\n let mut iter = state.dump_iter()?;\n\n let mut kv1 = HashMap::new();\n\n kv1.insert(hash_value1, Blob::from(value1));\n\n kv1.insert(hash_value2, Blob::from(value2));\n\n let mut kv2 = HashMap::new();\n\n let v1 = iter.next().unwrap()?;\n\n let v2 = iter.next().unwrap()?;\n\n assert!(iter.next().is_none(), \"iter next should none\");\n\n kv2.insert(v1.0, v1.1);\n\n kv2.insert(v2.0, v2.1);\n\n assert_eq!(kv1, kv2);\n\n Ok(())\n\n}\n\n\n", "file_path": "state/state-tree/src/state_tree_test.rs", "rank": 88, "score": 194252.8118392863 }, { "content": "fn main() {\n\n generate().unwrap();\n\n}\n\n\n", "file_path": "dataformat-generator/build.rs", "rank": 89, "score": 193801.2705814618 }, { "content": "#[derive(Debug, Parser)]\n\n#[clap(name = \"beta\")]\n\nstruct BetaOpts {\n\n #[clap(short = 'n', default_value = \"beta_default\")]\n\n #[allow(unused)]\n\n name: String,\n\n}\n\n\n", "file_path": "commons/scmd/examples/hello_main.rs", "rank": 90, "score": 193291.82699662304 }, { "content": "#[derive(Debug, Parser)]\n\n#[clap(name = \"alpha\")]\n\nstruct AlphaOpts {\n\n #[clap(short = 'n', default_value = \"alpha_default\")]\n\n #[allow(unused)]\n\n name: String,\n\n}\n\n\n", "file_path": "commons/scmd/examples/hello_main.rs", "rank": 91, "score": 193291.82699662304 }, { "content": "#[derive(Debug, Parser)]\n\n#[clap(name = \"test\")]\n\nstruct TestOpts {\n\n #[clap(short = 'd')]\n\n #[allow(unused)]\n\n debug: bool,\n\n}\n\n\n\npub(crate) fn init_context() -> CmdContext<Counter, GlobalOpts> {\n\n let context = CmdContext::<Counter, GlobalOpts>::with_default_action(\n\n \"0.1\",\n\n None,\n\n |global_opt| -> Result<Counter> { Ok(Counter::new(global_opt.counter)) },\n\n |_app, _opt, _state| {\n\n let running = Arc::new(AtomicBool::new(true));\n\n let r = running.clone();\n\n ctrlc::set_handler(move || {\n\n r.store(false, Ordering::SeqCst);\n\n })\n\n .expect(\"Error setting Ctrl-C handler\");\n\n println!(\"Waiting for Ctrl-C...\");\n\n while running.load(Ordering::SeqCst) {}\n", "file_path": "commons/scmd/examples/hello_main.rs", "rank": 92, "score": 193291.82699662304 }, { "content": "#[derive(Debug, Parser)]\n\n#[clap(name = \"show\")]\n\nstruct ShowOpts {\n\n #[clap(long, default_value = \"0\")]\n\n index: usize,\n\n}\n\n\n", "file_path": "commons/scmd/examples/hello_main.rs", "rank": 93, "score": 193291.82699662304 }, { "content": "#[derive(Debug, Parser)]\n\n#[clap(name = \"hello\")]\n\nstruct GlobalOpts {\n\n #[clap(short = 'c', default_value = \"0\")]\n\n counter: usize,\n\n #[clap(short = 'r')]\n\n #[allow(unused)]\n\n required: String,\n\n}\n\n\n", "file_path": "commons/scmd/examples/hello_main.rs", "rank": 94, "score": 193291.82699662304 }, { "content": "#[derive(Debug, Parser)]\n\n#[clap(name = \"list\", alias = \"list_alias\")]\n\nstruct ListOpts {\n\n #[clap(long, short = 'm', default_value = \"5\")]\n\n max_size: usize,\n\n}\n\n\n", "file_path": "commons/scmd/examples/hello_main.rs", "rank": 95, "score": 193291.6474696888 }, { "content": "fn main() -> Result<()> {\n\n let _logger_handle = starcoin_logger::init();\n\n let opts: FaucetOpt = FaucetOpt::parse();\n\n let client = RpcClient::connect_ipc(opts.ipc_path.as_path()).expect(\"Failed to connect ipc\");\n\n\n\n let account = match opts.faucet_address.as_ref() {\n\n Some(account_address) => client.account_get(*account_address)?,\n\n None => client.account_default()?,\n\n };\n\n let server = Server::http(&opts.server_addr)\n\n .unwrap_or_else(|_| panic!(\"Failed to serve on {}\", &opts.server_addr));\n\n\n\n let account = account\n\n .ok_or_else(|| format_err!(\"Can not find default account, Please input from account.\"))?;\n\n let faucet_address = account.address;\n\n let faucet = Faucet::new(\n\n client,\n\n account,\n\n opts.faucet_account_password.clone(),\n\n opts.max_amount_pre_request,\n\n );\n\n let fut = web::run(server, faucet);\n\n println!(\n\n \"Faucet serve on: {}, with faucet account: {}\",\n\n opts.server_addr, faucet_address\n\n );\n\n executor::block_on(fut);\n\n Ok(())\n\n}\n", "file_path": "cmd/faucet/src/main.rs", "rank": 96, "score": 192897.02726190552 }, { "content": "pub fn try_infer_help(ty: &syn::Type, match_ident: String) -> Option<syn::Type> {\n\n match ty {\n\n syn::Type::Path(syn::TypePath {\n\n path: syn::Path { segments, .. },\n\n ..\n\n }) => {\n\n let syn::PathSegment {\n\n ident, arguments, ..\n\n } = &segments[0];\n\n if ident.to_string().eq(&match_ident) {\n\n get_first_type_argument(arguments)\n\n } else {\n\n None\n\n }\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "network-rpc/derive/src/helper.rs", "rank": 97, "score": 190923.63416564756 }, { "content": "/// Smearing all the bits starting from MSB with ones\n\nfn smear_ones_for_u64(v: u64) -> u64 {\n\n let mut n = v;\n\n n |= n >> 1;\n\n n |= n >> 2;\n\n n |= n >> 4;\n\n n |= n >> 8;\n\n n |= n >> 16;\n\n n |= n >> 32;\n\n n\n\n}\n\n\n", "file_path": "commons/accumulator/src/node_index.rs", "rank": 98, "score": 190376.35247195986 } ]
Rust
src/lib.rs
tailcats/kvproto
4b2a2d52131a69b007765256956c5957d2d9655b
#[allow(dead_code)] #[allow(unknown_lints)] #[allow(clippy::all)] #[allow(renamed_and_removed_lints)] #[allow(bare_trait_objects)] mod protos { include!(concat!(env!("OUT_DIR"), "/protos/mod.rs")); use raft_proto::eraftpb; } pub use protos::*; #[cfg(feature = "prost-codec")] pub mod prost_adapt { use crate::backup::{error, ClusterIdError, Error}; use crate::import_kvpb::{write_engine_request, WriteBatch, WriteEngineRequest, WriteHead}; use crate::import_sstpb::{upload_request, SstMeta, UploadRequest}; use crate::{errorpb, kvrpcpb}; impl UploadRequest { pub fn set_data(&mut self, v: Vec<u8>) { self.chunk = Some(upload_request::Chunk::Data(v)); } pub fn get_data(&self) -> &[u8] { match &self.chunk { Some(upload_request::Chunk::Data(v)) => v, _ => &[], } } pub fn set_meta(&mut self, v: SstMeta) { self.chunk = Some(upload_request::Chunk::Meta(v)); } pub fn get_meta(&self) -> &SstMeta { match &self.chunk { Some(upload_request::Chunk::Meta(v)) => v, _ => SstMeta::default_ref(), } } pub fn has_meta(&self) -> bool { match self.chunk { Some(upload_request::Chunk::Meta(_)) => true, _ => false, } } } impl WriteEngineRequest { pub fn set_head(&mut self, v: WriteHead) { self.chunk = Some(write_engine_request::Chunk::Head(v)); } pub fn get_head(&self) -> &WriteHead { match &self.chunk { Some(write_engine_request::Chunk::Head(v)) => v, _ => WriteHead::default_ref(), } } pub fn has_head(&self) -> bool { match self.chunk { Some(write_engine_request::Chunk::Head(_)) => true, _ => false, } } pub fn set_batch(&mut self, v: WriteBatch) { self.chunk = Some(write_engine_request::Chunk::Batch(v)); } pub fn get_batch(&self) -> &WriteBatch { match &self.chunk { Some(write_engine_request::Chunk::Batch(v)) => v, _ => WriteBatch::default_ref(), } } pub fn has_batch(&self) -> bool { match self.chunk { Some(write_engine_request::Chunk::Batch(_)) => true, _ => false, } } pub fn take_batch(&mut self) -> WriteBatch { if self.has_batch() { match self.chunk.take() { Some(write_engine_request::Chunk::Batch(v)) => v, _ => unreachable!(), } } else { WriteBatch::default() } } } impl Error { pub fn set_region_error(&mut self, v: errorpb::Error) { self.detail = Some(error::Detail::RegionError(v)); } pub fn set_kv_error(&mut self, v: kvrpcpb::KeyError) { self.detail = Some(error::Detail::KvError(v)); } pub fn set_cluster_id_error(&mut self, v: ClusterIdError) { self.detail = Some(error::Detail::ClusterIdError(v)); } pub fn get_region_error(&self) -> &errorpb::Error { match &self.detail { Some(error::Detail::RegionError(v)) => v, _ => errorpb::Error::default_ref(), } } pub fn get_kv_error(&self) -> &kvrpcpb::KeyError { match &self.detail { Some(error::Detail::KvError(v)) => v, _ => kvrpcpb::KeyError::default_ref(), } } pub fn get_cluster_id_error(&self) -> &ClusterIdError { match &self.detail { Some(error::Detail::ClusterIdError(v)) => v, _ => ClusterIdError::default_ref(), } } pub fn has_region_error(&self) -> bool { match self.detail { Some(error::Detail::RegionError(_)) => true, _ => false, } } pub fn has_kv_error(&self) -> bool { match self.detail { Some(error::Detail::KvError(_)) => true, _ => false, } } pub fn has_cluster_id_error(&self) -> bool { match self.detail { Some(error::Detail::ClusterIdError(_)) => true, _ => false, } } pub fn mut_region_error(&mut self) -> &mut errorpb::Error { if let Some(error::Detail::RegionError(_)) = self.detail { } else { self.detail = Some(error::Detail::RegionError(errorpb::Error::default())); } match self.detail { Some(error::Detail::RegionError(ref mut v)) => v, _ => unreachable!(), } } pub fn mut_kv_error(&mut self) -> &mut kvrpcpb::KeyError { if let Some(error::Detail::KvError(_)) = self.detail { } else { self.detail = Some(error::Detail::KvError(kvrpcpb::KeyError::default())); } match self.detail { Some(error::Detail::KvError(ref mut v)) => v, _ => unreachable!(), } } pub fn mut_cluster_id_error(&mut self) -> &mut ClusterIdError { if let Some(error::Detail::ClusterIdError(_)) = self.detail { } else { self.detail = Some(error::Detail::ClusterIdError(ClusterIdError::default())); } match self.detail { Some(error::Detail::ClusterIdError(ref mut v)) => v, _ => unreachable!(), } } } } pub mod cdc_adapt { #[cfg(not(feature = "prost-codec"))] pub mod pb { impl ::std::fmt::Debug for crate::cdcpb::Event_oneof_event { #[allow(unused_variables)] fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { let mut buf = String::new(); match self { crate::cdcpb::Event_oneof_event::Entries(v) => ::protobuf::PbPrint::fmt(v, "Entries", &mut buf), crate::cdcpb::Event_oneof_event::Admin(v) => ::protobuf::PbPrint::fmt(v, "Admin", &mut buf), crate::cdcpb::Event_oneof_event::Error(v) => ::protobuf::PbPrint::fmt(v, "Error", &mut buf), crate::cdcpb::Event_oneof_event::ResolvedTs(v) => ::protobuf::PbPrint::fmt(v, "ResolvedTs", &mut buf), crate::cdcpb::Event_oneof_event::LongTxn(v) => ::protobuf::PbPrint::fmt(v, "Long", &mut buf), } write!(f, "{}", buf) } } #[allow(dead_code)] fn assert_fmt_debug() { fn require_impl_debug<T: ::std::fmt::Debug>(_: T) {} require_impl_debug(crate::cdcpb::Event_oneof_event::Entries(::std::default::Default::default())); require_impl_debug(crate::cdcpb::ChangeDataEvent::default()); } } #[cfg(feature = "prost-codec")] pub mod prost { #[allow(dead_code)] fn assert_fmt_debug() { fn require_impl_debug<T: ::std::fmt::Debug>(_: T) {} require_impl_debug(crate::cdcpb::event::Event::Entries(::std::default::Default::default())); require_impl_debug(crate::cdcpb::ChangeDataEvent::default()); } } }
#[allow(dead_code)] #[allow(unknown_lints)] #[allow(clippy::all)] #[allow(renamed_and_removed_lints)] #[allow(bare_trait_objects)] mod protos { include!(concat!(env!("OUT_DIR"), "/protos/mod.rs")); use raft_proto::eraftpb; } pub use protos::*; #[cfg(feature = "prost-codec")] pub mod prost_adapt { use crate::backup::{error, ClusterIdError, Error}; use crate::import_kvpb::{write_engine_request, WriteBatch, WriteEngineRequest, WriteHead}; use crate::import_sstpb::{upload_request, SstMeta, UploadRequest}; use crate::{errorpb, kvrpcpb}; impl UploadRequest { pub fn set_data(&mut self, v: Vec<u8>) { self.chunk = Some(upload_request::Chunk::Data(v)); } pub fn get_data(&self) -> &[u8] { match &self.chunk { Some(upload_request::Chunk::Data(v)) => v, _ => &[], } } pub fn set_meta(&mut self, v: SstMeta) { self.chunk = Some(upload_request::Chunk::Meta(v)); } pub fn get_meta(&self) -> &SstMeta { match &self.chunk { Some(upload_request::Chunk::Meta(v)) => v, _ => SstMeta::default_ref(), } } pub fn has_meta(&self) -> bool { match self.chunk { Some(upload_request::Chunk::Meta(_)) => true, _ => false, } } } impl WriteEngineRequest { pub fn set_head(&mut self, v: WriteHead) { self.chunk = Some(write_engine_request::Chunk::Head(v)); } pub fn get_head(&self) -> &WriteHead { match &self.chunk { Some(write_engine_request::Chunk::Head(v)) => v, _ => WriteHead::default_ref(), } } pub fn has_head(&self) -> bool { match self.chunk { Some(write_engine_request::Chunk::Head(_)) => true, _ => false, } } pub fn set_batch(&mut self, v: WriteBatch) { self.chunk = Some(write_engine_request::Chunk::Batch(v)); } pub fn get_batch(&self) -> &WriteBatch { match &self.chunk { Some(write_engine_request::Chunk::Batch(v)) => v, _ => WriteBatch::default_ref(), } } pub fn has_batch(&self) -> bool {
} pub fn take_batch(&mut self) -> WriteBatch { if self.has_batch() { match self.chunk.take() { Some(write_engine_request::Chunk::Batch(v)) => v, _ => unreachable!(), } } else { WriteBatch::default() } } } impl Error { pub fn set_region_error(&mut self, v: errorpb::Error) { self.detail = Some(error::Detail::RegionError(v)); } pub fn set_kv_error(&mut self, v: kvrpcpb::KeyError) { self.detail = Some(error::Detail::KvError(v)); } pub fn set_cluster_id_error(&mut self, v: ClusterIdError) { self.detail = Some(error::Detail::ClusterIdError(v)); } pub fn get_region_error(&self) -> &errorpb::Error { match &self.detail { Some(error::Detail::RegionError(v)) => v, _ => errorpb::Error::default_ref(), } } pub fn get_kv_error(&self) -> &kvrpcpb::KeyError { match &self.detail { Some(error::Detail::KvError(v)) => v, _ => kvrpcpb::KeyError::default_ref(), } } pub fn get_cluster_id_error(&self) -> &ClusterIdError { match &self.detail { Some(error::Detail::ClusterIdError(v)) => v, _ => ClusterIdError::default_ref(), } } pub fn has_region_error(&self) -> bool { match self.detail { Some(error::Detail::RegionError(_)) => true, _ => false, } } pub fn has_kv_error(&self) -> bool { match self.detail { Some(error::Detail::KvError(_)) => true, _ => false, } } pub fn has_cluster_id_error(&self) -> bool { match self.detail { Some(error::Detail::ClusterIdError(_)) => true, _ => false, } } pub fn mut_region_error(&mut self) -> &mut errorpb::Error { if let Some(error::Detail::RegionError(_)) = self.detail { } else { self.detail = Some(error::Detail::RegionError(errorpb::Error::default())); } match self.detail { Some(error::Detail::RegionError(ref mut v)) => v, _ => unreachable!(), } } pub fn mut_kv_error(&mut self) -> &mut kvrpcpb::KeyError { if let Some(error::Detail::KvError(_)) = self.detail { } else { self.detail = Some(error::Detail::KvError(kvrpcpb::KeyError::default())); } match self.detail { Some(error::Detail::KvError(ref mut v)) => v, _ => unreachable!(), } } pub fn mut_cluster_id_error(&mut self) -> &mut ClusterIdError { if let Some(error::Detail::ClusterIdError(_)) = self.detail { } else { self.detail = Some(error::Detail::ClusterIdError(ClusterIdError::default())); } match self.detail { Some(error::Detail::ClusterIdError(ref mut v)) => v, _ => unreachable!(), } } } } pub mod cdc_adapt { #[cfg(not(feature = "prost-codec"))] pub mod pb { impl ::std::fmt::Debug for crate::cdcpb::Event_oneof_event { #[allow(unused_variables)] fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { let mut buf = String::new(); match self { crate::cdcpb::Event_oneof_event::Entries(v) => ::protobuf::PbPrint::fmt(v, "Entries", &mut buf), crate::cdcpb::Event_oneof_event::Admin(v) => ::protobuf::PbPrint::fmt(v, "Admin", &mut buf), crate::cdcpb::Event_oneof_event::Error(v) => ::protobuf::PbPrint::fmt(v, "Error", &mut buf), crate::cdcpb::Event_oneof_event::ResolvedTs(v) => ::protobuf::PbPrint::fmt(v, "ResolvedTs", &mut buf), crate::cdcpb::Event_oneof_event::LongTxn(v) => ::protobuf::PbPrint::fmt(v, "Long", &mut buf), } write!(f, "{}", buf) } } #[allow(dead_code)] fn assert_fmt_debug() { fn require_impl_debug<T: ::std::fmt::Debug>(_: T) {} require_impl_debug(crate::cdcpb::Event_oneof_event::Entries(::std::default::Default::default())); require_impl_debug(crate::cdcpb::ChangeDataEvent::default()); } } #[cfg(feature = "prost-codec")] pub mod prost { #[allow(dead_code)] fn assert_fmt_debug() { fn require_impl_debug<T: ::std::fmt::Debug>(_: T) {} require_impl_debug(crate::cdcpb::event::Event::Entries(::std::default::Default::default())); require_impl_debug(crate::cdcpb::ChangeDataEvent::default()); } } }
match self.chunk { Some(write_engine_request::Chunk::Batch(_)) => true, _ => false, }
if_condition
[ { "content": "\tError *KeyError `protobuf:\"bytes,1,opt,name=error\" json:\"error,omitempty\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 0, "score": 165851.10744556796 }, { "content": "func (*KeyError) ProtoMessage() {}\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 1, "score": 163280.62371034533 }, { "content": "func (*VerError) ProtoMessage() {}\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 2, "score": 163280.62371034533 }, { "content": "func (m *VerError) GetError() string {\n\n\tif m != nil {\n\n\t\treturn m.Error\n\n\t}\n\n\treturn \"\"\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 3, "score": 128958.09709327602 }, { "content": "\tErrors []*KeyError `protobuf:\"bytes,2,rep,name=errors\" json:\"errors,omitempty\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 4, "score": 120621.16430483971 }, { "content": "\tRegionError *errorpb.Error `protobuf:\"bytes,1,opt,name=region_error,json=regionError\" json:\"region_error,omitempty\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 5, "score": 118154.81823725559 }, { "content": "type VerError struct {\n\n\tError string `protobuf:\"bytes,1,opt,name=error,proto3\" json:\"error,omitempty\"`\n\n\tXXX_NoUnkeyedLiteral struct{} `json:\"-\"`\n\n\tXXX_unrecognized []byte `json:\"-\"`\n\n\tXXX_sizecache int32 `json:\"-\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 6, "score": 118152.9466692283 }, { "content": "type KeyError struct {\n\n\tLocked *LockInfo `protobuf:\"bytes,1,opt,name=locked\" json:\"locked,omitempty\"`\n\n\tRetryable string `protobuf:\"bytes,2,opt,name=retryable,proto3\" json:\"retryable,omitempty\"`\n\n\tAbort string `protobuf:\"bytes,3,opt,name=abort,proto3\" json:\"abort,omitempty\"`\n\n\tConflict *WriteConflict `protobuf:\"bytes,4,opt,name=conflict\" json:\"conflict,omitempty\"`\n\n\tAlreadyExist *AlreadyExist `protobuf:\"bytes,5,opt,name=already_exist,json=alreadyExist\" json:\"already_exist,omitempty\"`\n\n\tDeadlock *Deadlock `protobuf:\"bytes,6,opt,name=deadlock\" json:\"deadlock,omitempty\"`\n\n\tCommitTsExpired *CommitTsExpired `protobuf:\"bytes,7,opt,name=commit_ts_expired,json=commitTsExpired\" json:\"commit_ts_expired,omitempty\"`\n\n\tTxnNotFound *TxnNotFound `protobuf:\"bytes,8,opt,name=txn_not_found,json=txnNotFound\" json:\"txn_not_found,omitempty\"`\n\n\tCommitTsTooLarge *CommitTsTooLarge `protobuf:\"bytes,9,opt,name=commit_ts_too_large,json=commitTsTooLarge\" json:\"commit_ts_too_large,omitempty\"`\n\n\tXXX_NoUnkeyedLiteral struct{} `json:\"-\"`\n\n\tXXX_unrecognized []byte `json:\"-\"`\n\n\tXXX_sizecache int32 `json:\"-\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 7, "score": 118152.9466692283 }, { "content": "\tUseAsyncCommit bool `protobuf:\"varint,8,opt,name=use_async_commit,json=useAsyncCommit,proto3\" json:\"use_async_commit,omitempty\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 8, "score": 115962.96679780752 }, { "content": "func (m *KeyError) String() string { return proto.CompactTextString(m) }\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 9, "score": 115788.34038386935 }, { "content": "func (*KeyError) Descriptor() ([]byte, []int) {\n\n\treturn fileDescriptor_kvrpcpb_c5aaf18514284d8d, []int{88}\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 10, "score": 115788.34038386935 }, { "content": "func (m *VerError) MarshalTo(dAtA []byte) (int, error) {\n\n\tvar i int\n\n\t_ = i\n\n\tvar l int\n\n\t_ = l\n\n\tif len(m.Error) > 0 {\n\n\t\tdAtA[i] = 0xa\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(len(m.Error)))\n\n\t\ti += copy(dAtA[i:], m.Error)\n\n\t}\n\n\tif m.XXX_unrecognized != nil {\n\n\t\ti += copy(dAtA[i:], m.XXX_unrecognized)\n\n\t}\n\n\treturn i, nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 11, "score": 115788.34038386935 }, { "content": "func (m *VerError) String() string { return proto.CompactTextString(m) }\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 12, "score": 115788.34038386935 }, { "content": "func (*VerError) Descriptor() ([]byte, []int) {\n\n\treturn fileDescriptor_kvrpcpb_c5aaf18514284d8d, []int{68}\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 13, "score": 115788.34038386935 }, { "content": "func (m *KeyError) MarshalTo(dAtA []byte) (int, error) {\n\n\tvar i int\n\n\t_ = i\n\n\tvar l int\n\n\t_ = l\n\n\tif m.Locked != nil {\n\n\t\tdAtA[i] = 0xa\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(m.Locked.Size()))\n\n\t\tn110, err := m.Locked.MarshalTo(dAtA[i:])\n\n\t\tif err != nil {\n\n\t\t\treturn 0, err\n\n\t\t}\n\n\t\ti += n110\n\n\t}\n\n\tif len(m.Retryable) > 0 {\n\n\t\tdAtA[i] = 0x12\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(len(m.Retryable)))\n\n\t\ti += copy(dAtA[i:], m.Retryable)\n\n\t}\n\n\tif len(m.Abort) > 0 {\n\n\t\tdAtA[i] = 0x1a\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(len(m.Abort)))\n\n\t\ti += copy(dAtA[i:], m.Abort)\n\n\t}\n\n\tif m.Conflict != nil {\n\n\t\tdAtA[i] = 0x22\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(m.Conflict.Size()))\n\n\t\tn111, err := m.Conflict.MarshalTo(dAtA[i:])\n\n\t\tif err != nil {\n\n\t\t\treturn 0, err\n\n\t\t}\n\n\t\ti += n111\n\n\t}\n\n\tif m.AlreadyExist != nil {\n\n\t\tdAtA[i] = 0x2a\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(m.AlreadyExist.Size()))\n\n\t\tn112, err := m.AlreadyExist.MarshalTo(dAtA[i:])\n\n\t\tif err != nil {\n\n\t\t\treturn 0, err\n\n\t\t}\n\n\t\ti += n112\n\n\t}\n\n\tif m.Deadlock != nil {\n\n\t\tdAtA[i] = 0x32\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(m.Deadlock.Size()))\n\n\t\tn113, err := m.Deadlock.MarshalTo(dAtA[i:])\n\n\t\tif err != nil {\n\n\t\t\treturn 0, err\n\n\t\t}\n\n\t\ti += n113\n\n\t}\n\n\tif m.CommitTsExpired != nil {\n\n\t\tdAtA[i] = 0x3a\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(m.CommitTsExpired.Size()))\n\n\t\tn114, err := m.CommitTsExpired.MarshalTo(dAtA[i:])\n\n\t\tif err != nil {\n\n\t\t\treturn 0, err\n\n\t\t}\n\n\t\ti += n114\n\n\t}\n\n\tif m.TxnNotFound != nil {\n\n\t\tdAtA[i] = 0x42\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(m.TxnNotFound.Size()))\n\n\t\tn115, err := m.TxnNotFound.MarshalTo(dAtA[i:])\n\n\t\tif err != nil {\n\n\t\t\treturn 0, err\n\n\t\t}\n\n\t\ti += n115\n\n\t}\n\n\tif m.CommitTsTooLarge != nil {\n\n\t\tdAtA[i] = 0x4a\n\n\t\ti++\n\n\t\ti = encodeVarintKvrpcpb(dAtA, i, uint64(m.CommitTsTooLarge.Size()))\n\n\t\tn116, err := m.CommitTsTooLarge.MarshalTo(dAtA[i:])\n\n\t\tif err != nil {\n\n\t\t\treturn 0, err\n\n\t\t}\n\n\t\ti += n116\n\n\t}\n\n\tif m.XXX_unrecognized != nil {\n\n\t\ti += copy(dAtA[i:], m.XXX_unrecognized)\n\n\t}\n\n\treturn i, nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 14, "score": 115788.34038386935 }, { "content": "func (m *KeyError) Reset() { *m = KeyError{} }\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 15, "score": 115788.34038386935 }, { "content": "func (m *KeyError) Marshal() (dAtA []byte, err error) {\n\n\tsize := m.Size()\n\n\tdAtA = make([]byte, size)\n\n\tn, err := m.MarshalTo(dAtA)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn dAtA[:n], nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 16, "score": 115788.34038386935 }, { "content": "func (m *VerError) Size() (n int) {\n\n\tvar l int\n\n\t_ = l\n\n\tl = len(m.Error)\n\n\tif l > 0 {\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tif m.XXX_unrecognized != nil {\n\n\t\tn += len(m.XXX_unrecognized)\n\n\t}\n\n\treturn n\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 17, "score": 115788.34038386935 }, { "content": "func (m *VerError) Reset() { *m = VerError{} }\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 18, "score": 115788.34038386935 }, { "content": "func (m *KeyError) Unmarshal(dAtA []byte) error {\n\n\tl := len(dAtA)\n\n\tiNdEx := 0\n\n\tfor iNdEx < l {\n\n\t\tpreIndex := iNdEx\n\n\t\tvar wire uint64\n\n\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\tif shift >= 64 {\n\n\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t}\n\n\t\t\tif iNdEx >= l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tb := dAtA[iNdEx]\n\n\t\t\tiNdEx++\n\n\t\t\twire |= (uint64(b) & 0x7F) << shift\n\n\t\t\tif b < 0x80 {\n\n\t\t\t\tbreak\n\n\t\t\t}\n\n\t\t}\n\n\t\tfieldNum := int32(wire >> 3)\n\n\t\twireType := int(wire & 0x7)\n\n\t\tif wireType == 4 {\n\n\t\t\treturn fmt.Errorf(\"proto: KeyError: wiretype end group for non-group\")\n\n\t\t}\n\n\t\tif fieldNum <= 0 {\n\n\t\t\treturn fmt.Errorf(\"proto: KeyError: illegal tag %d (wire type %d)\", fieldNum, wire)\n\n\t\t}\n\n\t\tswitch fieldNum {\n\n\t\tcase 1:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Locked\", wireType)\n\n\t\t\t}\n\n\t\t\tvar msglen int\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tmsglen |= (int(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif msglen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + msglen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tif m.Locked == nil {\n\n\t\t\t\tm.Locked = &LockInfo{}\n\n\t\t\t}\n\n\t\t\tif err := m.Locked.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tiNdEx = postIndex\n\n\t\tcase 2:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Retryable\", wireType)\n\n\t\t\t}\n\n\t\t\tvar stringLen uint64\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tstringLen |= (uint64(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tintStringLen := int(stringLen)\n\n\t\t\tif intStringLen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + intStringLen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tm.Retryable = string(dAtA[iNdEx:postIndex])\n\n\t\t\tiNdEx = postIndex\n\n\t\tcase 3:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Abort\", wireType)\n\n\t\t\t}\n\n\t\t\tvar stringLen uint64\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tstringLen |= (uint64(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tintStringLen := int(stringLen)\n\n\t\t\tif intStringLen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + intStringLen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tm.Abort = string(dAtA[iNdEx:postIndex])\n\n\t\t\tiNdEx = postIndex\n\n\t\tcase 4:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Conflict\", wireType)\n\n\t\t\t}\n\n\t\t\tvar msglen int\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tmsglen |= (int(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif msglen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + msglen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tif m.Conflict == nil {\n\n\t\t\t\tm.Conflict = &WriteConflict{}\n\n\t\t\t}\n\n\t\t\tif err := m.Conflict.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tiNdEx = postIndex\n\n\t\tcase 5:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field AlreadyExist\", wireType)\n\n\t\t\t}\n\n\t\t\tvar msglen int\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tmsglen |= (int(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif msglen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + msglen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tif m.AlreadyExist == nil {\n\n\t\t\t\tm.AlreadyExist = &AlreadyExist{}\n\n\t\t\t}\n\n\t\t\tif err := m.AlreadyExist.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tiNdEx = postIndex\n\n\t\tcase 6:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Deadlock\", wireType)\n\n\t\t\t}\n\n\t\t\tvar msglen int\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tmsglen |= (int(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif msglen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + msglen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tif m.Deadlock == nil {\n\n\t\t\t\tm.Deadlock = &Deadlock{}\n\n\t\t\t}\n\n\t\t\tif err := m.Deadlock.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tiNdEx = postIndex\n\n\t\tcase 7:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field CommitTsExpired\", wireType)\n\n\t\t\t}\n\n\t\t\tvar msglen int\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tmsglen |= (int(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif msglen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + msglen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tif m.CommitTsExpired == nil {\n\n\t\t\t\tm.CommitTsExpired = &CommitTsExpired{}\n\n\t\t\t}\n\n\t\t\tif err := m.CommitTsExpired.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tiNdEx = postIndex\n\n\t\tcase 8:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field TxnNotFound\", wireType)\n\n\t\t\t}\n\n\t\t\tvar msglen int\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tmsglen |= (int(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif msglen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + msglen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tif m.TxnNotFound == nil {\n\n\t\t\t\tm.TxnNotFound = &TxnNotFound{}\n\n\t\t\t}\n\n\t\t\tif err := m.TxnNotFound.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tiNdEx = postIndex\n\n\t\tcase 9:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field CommitTsTooLarge\", wireType)\n\n\t\t\t}\n\n\t\t\tvar msglen int\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tmsglen |= (int(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif msglen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + msglen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tif m.CommitTsTooLarge == nil {\n\n\t\t\t\tm.CommitTsTooLarge = &CommitTsTooLarge{}\n\n\t\t\t}\n\n\t\t\tif err := m.CommitTsTooLarge.Unmarshal(dAtA[iNdEx:postIndex]); err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tiNdEx = postIndex\n\n\t\tdefault:\n\n\t\t\tiNdEx = preIndex\n\n\t\t\tskippy, err := skipKvrpcpb(dAtA[iNdEx:])\n\n\t\t\tif err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tif skippy < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tif (iNdEx + skippy) > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tm.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)\n\n\t\t\tiNdEx += skippy\n\n\t\t}\n\n\t}\n\n\n\n\tif iNdEx > l {\n\n\t\treturn io.ErrUnexpectedEOF\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 19, "score": 115788.34038386935 }, { "content": "func (m *VerError) Unmarshal(dAtA []byte) error {\n\n\tl := len(dAtA)\n\n\tiNdEx := 0\n\n\tfor iNdEx < l {\n\n\t\tpreIndex := iNdEx\n\n\t\tvar wire uint64\n\n\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\tif shift >= 64 {\n\n\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t}\n\n\t\t\tif iNdEx >= l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tb := dAtA[iNdEx]\n\n\t\t\tiNdEx++\n\n\t\t\twire |= (uint64(b) & 0x7F) << shift\n\n\t\t\tif b < 0x80 {\n\n\t\t\t\tbreak\n\n\t\t\t}\n\n\t\t}\n\n\t\tfieldNum := int32(wire >> 3)\n\n\t\twireType := int(wire & 0x7)\n\n\t\tif wireType == 4 {\n\n\t\t\treturn fmt.Errorf(\"proto: VerError: wiretype end group for non-group\")\n\n\t\t}\n\n\t\tif fieldNum <= 0 {\n\n\t\t\treturn fmt.Errorf(\"proto: VerError: illegal tag %d (wire type %d)\", fieldNum, wire)\n\n\t\t}\n\n\t\tswitch fieldNum {\n\n\t\tcase 1:\n\n\t\t\tif wireType != 2 {\n\n\t\t\t\treturn fmt.Errorf(\"proto: wrong wireType = %d for field Error\", wireType)\n\n\t\t\t}\n\n\t\t\tvar stringLen uint64\n\n\t\t\tfor shift := uint(0); ; shift += 7 {\n\n\t\t\t\tif shift >= 64 {\n\n\t\t\t\t\treturn ErrIntOverflowKvrpcpb\n\n\t\t\t\t}\n\n\t\t\t\tif iNdEx >= l {\n\n\t\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t\t}\n\n\t\t\t\tb := dAtA[iNdEx]\n\n\t\t\t\tiNdEx++\n\n\t\t\t\tstringLen |= (uint64(b) & 0x7F) << shift\n\n\t\t\t\tif b < 0x80 {\n\n\t\t\t\t\tbreak\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tintStringLen := int(stringLen)\n\n\t\t\tif intStringLen < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tpostIndex := iNdEx + intStringLen\n\n\t\t\tif postIndex > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tm.Error = string(dAtA[iNdEx:postIndex])\n\n\t\t\tiNdEx = postIndex\n\n\t\tdefault:\n\n\t\t\tiNdEx = preIndex\n\n\t\t\tskippy, err := skipKvrpcpb(dAtA[iNdEx:])\n\n\t\t\tif err != nil {\n\n\t\t\t\treturn err\n\n\t\t\t}\n\n\t\t\tif skippy < 0 {\n\n\t\t\t\treturn ErrInvalidLengthKvrpcpb\n\n\t\t\t}\n\n\t\t\tif (iNdEx + skippy) > l {\n\n\t\t\t\treturn io.ErrUnexpectedEOF\n\n\t\t\t}\n\n\t\t\tm.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...)\n\n\t\t\tiNdEx += skippy\n\n\t\t}\n\n\t}\n\n\n\n\tif iNdEx > l {\n\n\t\treturn io.ErrUnexpectedEOF\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 20, "score": 115788.34038386935 }, { "content": "func (m *VerError) Marshal() (dAtA []byte, err error) {\n\n\tsize := m.Size()\n\n\tdAtA = make([]byte, size)\n\n\tn, err := m.MarshalTo(dAtA)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn dAtA[:n], nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 21, "score": 115788.34038386935 }, { "content": "func (m *KeyError) Size() (n int) {\n\n\tvar l int\n\n\t_ = l\n\n\tif m.Locked != nil {\n\n\t\tl = m.Locked.Size()\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tl = len(m.Retryable)\n\n\tif l > 0 {\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tl = len(m.Abort)\n\n\tif l > 0 {\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tif m.Conflict != nil {\n\n\t\tl = m.Conflict.Size()\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tif m.AlreadyExist != nil {\n\n\t\tl = m.AlreadyExist.Size()\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tif m.Deadlock != nil {\n\n\t\tl = m.Deadlock.Size()\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tif m.CommitTsExpired != nil {\n\n\t\tl = m.CommitTsExpired.Size()\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tif m.TxnNotFound != nil {\n\n\t\tl = m.TxnNotFound.Size()\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tif m.CommitTsTooLarge != nil {\n\n\t\tl = m.CommitTsTooLarge.Size()\n\n\t\tn += 1 + l + sovKvrpcpb(uint64(l))\n\n\t}\n\n\tif m.XXX_unrecognized != nil {\n\n\t\tn += len(m.XXX_unrecognized)\n\n\t}\n\n\treturn n\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 22, "score": 115788.34038386935 }, { "content": "func (*Deadlock) ProtoMessage() {}\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 23, "score": 115771.38065920421 }, { "content": "func (*Mutation) ProtoMessage() {}\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 24, "score": 115771.38065920421 }, { "content": "func (*Context) ProtoMessage() {}\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 25, "score": 115771.38065920421 }, { "content": "func (m *GetResponse) GetError() *KeyError {\n\n\tif m != nil {\n\n\t\treturn m.Error\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 26, "score": 113520.69401717644 }, { "content": "func (m *ScanResponse) GetError() *KeyError {\n\n\tif m != nil {\n\n\t\treturn m.Error\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 27, "score": 113520.69401717644 }, { "content": "\tDetail isError_Detail `protobuf_oneof:\"detail\"`\n", "file_path": "pkg/backup/backup.pb.go", "rank": 29, "score": 2.379284109716375 }, { "content": "\tValues [][]byte `protobuf:\"bytes,5,rep,name=values\" json:\"values,omitempty\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 30, "score": 2.1657325823383644 }, { "content": "var _ codes.Code\n", "file_path": "pkg/configpb/configpb.pb.gw.go", "rank": 31, "score": 2.1653232030862917 }, { "content": "\tMessage string `protobuf:\"bytes,1,opt,name=message,proto3\" json:\"message,omitempty\"`\n", "file_path": "pkg/errorpb/errorpb.pb.go", "rank": 33, "score": 1.8715680272885367 }, { "content": "func (m *SplitRequest) GetRightDerive() bool {\n\n\tif m != nil {\n\n\t\treturn m.RightDerive\n\n\t}\n\n\treturn false\n", "file_path": "pkg/raft_cmdpb/raft_cmdpb.pb.go", "rank": 34, "score": 1.8702014496120416 }, { "content": "func (m *SplitRegionResponse) GetLeft() *metapb.Region {\n\n\tif m != nil {\n\n\t\treturn m.Left\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 35, "score": 1.8702014496120416 }, { "content": "func (m *AdminRequest) GetSplit() *SplitRequest {\n\n\tif m != nil {\n\n\t\treturn m.Split\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/raft_cmdpb/raft_cmdpb.pb.go", "rank": 36, "score": 1.8702014496120416 }, { "content": "func (m *Event) GetResolvedTs() uint64 {\n\n\tif x, ok := m.GetEvent().(*Event_ResolvedTs); ok {\n\n\t\treturn x.ResolvedTs\n\n\t}\n\n\treturn 0\n", "file_path": "pkg/cdcpb/cdcpb.pb.go", "rank": 37, "score": 1.8702014496120416 }, { "content": "func (m *ScatterRegionRequest) GetRegionId() uint64 {\n\n\tif m != nil {\n\n\t\treturn m.RegionId\n\n\t}\n\n\treturn 0\n", "file_path": "pkg/pdpb/pdpb.pb.go", "rank": 38, "score": 1.8702014496120416 }, { "content": "func (c *pDClient) AskSplit(ctx context.Context, in *AskSplitRequest, opts ...grpc.CallOption) (*AskSplitResponse, error) {\n\n\tout := new(AskSplitResponse)\n\n\terr := c.cc.Invoke(ctx, \"/pdpb.PD/AskSplit\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/pdpb/pdpb.pb.go", "rank": 39, "score": 1.8702014496120416 }, { "content": "func (c *pDClient) ReportSplit(ctx context.Context, in *ReportSplitRequest, opts ...grpc.CallOption) (*ReportSplitResponse, error) {\n\n\tout := new(ReportSplitResponse)\n\n\terr := c.cc.Invoke(ctx, \"/pdpb.PD/ReportSplit\", in, out, opts...)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn out, nil\n", "file_path": "pkg/pdpb/pdpb.pb.go", "rank": 40, "score": 1.8702014496120416 }, { "content": "func (m *SplitRegionResponse) GetRight() *metapb.Region {\n\n\tif m != nil {\n\n\t\treturn m.Right\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 41, "score": 1.8702014496120416 }, { "content": "func (m *AdminResponse) GetSplit() *SplitResponse {\n\n\tif m != nil {\n\n\t\treturn m.Split\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/raft_cmdpb/raft_cmdpb.pb.go", "rank": 42, "score": 1.8702014496120416 }, { "content": "func (m *SplitRegionRequest) GetSplitKey() []byte {\n\n\tif m != nil {\n\n\t\treturn m.SplitKey\n\n\t}\n\n\treturn nil\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 43, "score": 1.8702014496120416 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/span/span.pb.go", "rank": 46, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/tikvpb/tikvpb.pb.go", "rank": 47, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/eraftpb/eraftpb.pb.go", "rank": 48, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/import_kvpb/import_kvpb.pb.go", "rank": 49, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/debugpb/debugpb.pb.go", "rank": 50, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/import_sstpb/import_sstpb.pb.go", "rank": 51, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/mpp/mpp.pb.go", "rank": 52, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/cdcpb/cdcpb.pb.go", "rank": 53, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/pdpb/pdpb.pb.go", "rank": 54, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/raft_cmdpb/raft_cmdpb.pb.go", "rank": 55, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/configpb/configpb.pb.go", "rank": 56, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/encryptionpb/encryptionpb.pb.go", "rank": 57, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/errorpb/errorpb.pb.go", "rank": 58, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/replication_modepb/replication_modepb.pb.go", "rank": 59, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/backup/backup.pb.go", "rank": 60, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/deadlock/deadlock.pb.go", "rank": 61, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/raft_serverpb/raft_serverpb.pb.go", "rank": 62, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 63, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/enginepb/enginepb.pb.go", "rank": 64, "score": 1.5234696463459176 }, { "content": "var _ context.Context\n", "file_path": "pkg/diagnosticspb/diagnosticspb.pb.go", "rank": 65, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/metapb/metapb.pb.go", "rank": 66, "score": 1.5234696463459176 }, { "content": "var _ = proto.Marshal\n", "file_path": "pkg/coprocessor/coprocessor.pb.go", "rank": 67, "score": 1.5234696463459176 }, { "content": "\tIsCacheEnabled bool `protobuf:\"varint,5,opt,name=is_cache_enabled,json=isCacheEnabled,proto3\" json:\"is_cache_enabled,omitempty\"`\n", "file_path": "pkg/coprocessor/coprocessor.pb.go", "rank": 69, "score": 1.40507867246402 }, { "content": "\tReadQuorum bool `protobuf:\"varint,3,opt,name=read_quorum,json=readQuorum,proto3\" json:\"read_quorum,omitempty\"`\n", "file_path": "pkg/raft_cmdpb/raft_cmdpb.pb.go", "rank": 70, "score": 1.3726148202032942 }, { "content": "\tCiphertextKey []byte `protobuf:\"bytes,5,opt,name=ciphertext_key,json=ciphertextKey,proto3\" json:\"ciphertext_key,omitempty\"`\n", "file_path": "pkg/encryptionpb/encryptionpb.pb.go", "rank": 71, "score": 1.3710943141320646 }, { "content": "func (*BatchCommandsResponse_Response) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _BatchCommandsResponse_Response_OneofMarshaler, _BatchCommandsResponse_Response_OneofUnmarshaler, _BatchCommandsResponse_Response_OneofSizer, []interface{}{\n\n\t\t(*BatchCommandsResponse_Response_Get)(nil),\n\n\t\t(*BatchCommandsResponse_Response_Scan)(nil),\n\n\t\t(*BatchCommandsResponse_Response_Prewrite)(nil),\n\n\t\t(*BatchCommandsResponse_Response_Commit)(nil),\n\n\t\t(*BatchCommandsResponse_Response_Import)(nil),\n\n\t\t(*BatchCommandsResponse_Response_Cleanup)(nil),\n\n\t\t(*BatchCommandsResponse_Response_BatchGet)(nil),\n\n\t\t(*BatchCommandsResponse_Response_BatchRollback)(nil),\n\n\t\t(*BatchCommandsResponse_Response_ScanLock)(nil),\n\n\t\t(*BatchCommandsResponse_Response_ResolveLock)(nil),\n\n\t\t(*BatchCommandsResponse_Response_GC)(nil),\n\n\t\t(*BatchCommandsResponse_Response_DeleteRange)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawGet)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawBatchGet)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawPut)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawBatchPut)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawDelete)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawBatchDelete)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawScan)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawDeleteRange)(nil),\n\n\t\t(*BatchCommandsResponse_Response_RawBatchScan)(nil),\n\n\t\t(*BatchCommandsResponse_Response_Coprocessor)(nil),\n\n\t\t(*BatchCommandsResponse_Response_PessimisticLock)(nil),\n\n\t\t(*BatchCommandsResponse_Response_PessimisticRollback)(nil),\n\n\t\t(*BatchCommandsResponse_Response_CheckTxnStatus)(nil),\n\n\t\t(*BatchCommandsResponse_Response_TxnHeartBeat)(nil),\n\n\t\t(*BatchCommandsResponse_Response_VerGet)(nil),\n\n\t\t(*BatchCommandsResponse_Response_VerBatchGet)(nil),\n\n\t\t(*BatchCommandsResponse_Response_VerMut)(nil),\n\n\t\t(*BatchCommandsResponse_Response_VerBatchMut)(nil),\n\n\t\t(*BatchCommandsResponse_Response_VerScan)(nil),\n\n\t\t(*BatchCommandsResponse_Response_VerDeleteRange)(nil),\n\n\t\t(*BatchCommandsResponse_Response_CheckSecondaryLocks)(nil),\n\n\t\t(*BatchCommandsResponse_Response_Empty)(nil),\n\n\t}\n", "file_path": "pkg/tikvpb/tikvpb.pb.go", "rank": 72, "score": 1.3268964393858678 }, { "content": "func (*ConfigKind) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _ConfigKind_OneofMarshaler, _ConfigKind_OneofUnmarshaler, _ConfigKind_OneofSizer, []interface{}{\n\n\t\t(*ConfigKind_Local)(nil),\n\n\t\t(*ConfigKind_Global)(nil),\n\n\t}\n", "file_path": "pkg/configpb/configpb.pb.go", "rank": 73, "score": 1.3268964393858678 }, { "content": "func (*ChangeDataRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _ChangeDataRequest_OneofMarshaler, _ChangeDataRequest_OneofUnmarshaler, _ChangeDataRequest_OneofSizer, []interface{}{\n\n\t\t(*ChangeDataRequest_Register_)(nil),\n\n\t\t(*ChangeDataRequest_NotifyTxnStatus_)(nil),\n\n\t}\n", "file_path": "pkg/cdcpb/cdcpb.pb.go", "rank": 74, "score": 1.3268964393858678 }, { "content": "func (*WriteRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _WriteRequest_OneofMarshaler, _WriteRequest_OneofUnmarshaler, _WriteRequest_OneofSizer, []interface{}{\n\n\t\t(*WriteRequest_Meta)(nil),\n\n\t\t(*WriteRequest_Batch)(nil),\n\n\t}\n", "file_path": "pkg/import_sstpb/import_sstpb.pb.go", "rank": 75, "score": 1.3268964393858678 }, { "content": "func (*Event) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _Event_OneofMarshaler, _Event_OneofUnmarshaler, _Event_OneofSizer, []interface{}{\n\n\t\t(*Event_Entries_)(nil),\n\n\t\t(*Event_Admin_)(nil),\n\n\t\t(*Event_Error)(nil),\n\n\t\t(*Event_ResolvedTs)(nil),\n\n\t\t(*Event_LongTxn_)(nil),\n\n\t}\n", "file_path": "pkg/cdcpb/cdcpb.pb.go", "rank": 76, "score": 1.3268964393858678 }, { "content": "func (*Link) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _Link_OneofMarshaler, _Link_OneofUnmarshaler, _Link_OneofSizer, []interface{}{\n\n\t\t(*Link_Root)(nil),\n\n\t\t(*Link_Parent)(nil),\n\n\t\t(*Link_Continue)(nil),\n\n\t}\n", "file_path": "pkg/span/span.pb.go", "rank": 77, "score": 1.3268964393858678 }, { "content": "func (*Error) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _Error_OneofMarshaler, _Error_OneofUnmarshaler, _Error_OneofSizer, []interface{}{\n\n\t\t(*Error_ClusterIdError)(nil),\n\n\t\t(*Error_KvError)(nil),\n\n\t\t(*Error_RegionError)(nil),\n\n\t}\n", "file_path": "pkg/backup/backup.pb.go", "rank": 78, "score": 1.3268964393858678 }, { "content": "func (*BatchCommandsRequest_Request) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _BatchCommandsRequest_Request_OneofMarshaler, _BatchCommandsRequest_Request_OneofUnmarshaler, _BatchCommandsRequest_Request_OneofSizer, []interface{}{\n\n\t\t(*BatchCommandsRequest_Request_Get)(nil),\n\n\t\t(*BatchCommandsRequest_Request_Scan)(nil),\n\n\t\t(*BatchCommandsRequest_Request_Prewrite)(nil),\n\n\t\t(*BatchCommandsRequest_Request_Commit)(nil),\n\n\t\t(*BatchCommandsRequest_Request_Import)(nil),\n\n\t\t(*BatchCommandsRequest_Request_Cleanup)(nil),\n\n\t\t(*BatchCommandsRequest_Request_BatchGet)(nil),\n\n\t\t(*BatchCommandsRequest_Request_BatchRollback)(nil),\n\n\t\t(*BatchCommandsRequest_Request_ScanLock)(nil),\n\n\t\t(*BatchCommandsRequest_Request_ResolveLock)(nil),\n\n\t\t(*BatchCommandsRequest_Request_GC)(nil),\n\n\t\t(*BatchCommandsRequest_Request_DeleteRange)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawGet)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawBatchGet)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawPut)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawBatchPut)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawDelete)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawBatchDelete)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawScan)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawDeleteRange)(nil),\n\n\t\t(*BatchCommandsRequest_Request_RawBatchScan)(nil),\n\n\t\t(*BatchCommandsRequest_Request_Coprocessor)(nil),\n\n\t\t(*BatchCommandsRequest_Request_PessimisticLock)(nil),\n\n\t\t(*BatchCommandsRequest_Request_PessimisticRollback)(nil),\n\n\t\t(*BatchCommandsRequest_Request_CheckTxnStatus)(nil),\n\n\t\t(*BatchCommandsRequest_Request_TxnHeartBeat)(nil),\n\n\t\t(*BatchCommandsRequest_Request_VerGet)(nil),\n\n\t\t(*BatchCommandsRequest_Request_VerBatchGet)(nil),\n\n\t\t(*BatchCommandsRequest_Request_VerMut)(nil),\n\n\t\t(*BatchCommandsRequest_Request_VerBatchMut)(nil),\n\n\t\t(*BatchCommandsRequest_Request_VerScan)(nil),\n\n\t\t(*BatchCommandsRequest_Request_VerDeleteRange)(nil),\n\n\t\t(*BatchCommandsRequest_Request_CheckSecondaryLocks)(nil),\n\n\t\t(*BatchCommandsRequest_Request_Empty)(nil),\n\n\t}\n", "file_path": "pkg/tikvpb/tikvpb.pb.go", "rank": 79, "score": 1.3268964393858678 }, { "content": "func (*UploadRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _UploadRequest_OneofMarshaler, _UploadRequest_OneofUnmarshaler, _UploadRequest_OneofSizer, []interface{}{\n\n\t\t(*UploadRequest_Meta)(nil),\n\n\t\t(*UploadRequest_Data)(nil),\n\n\t}\n", "file_path": "pkg/import_sstpb/import_sstpb.pb.go", "rank": 80, "score": 1.3268964393858678 }, { "content": "func (*MasterKey) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _MasterKey_OneofMarshaler, _MasterKey_OneofUnmarshaler, _MasterKey_OneofSizer, []interface{}{\n\n\t\t(*MasterKey_Plaintext)(nil),\n\n\t\t(*MasterKey_File)(nil),\n\n\t\t(*MasterKey_Kms)(nil),\n\n\t}\n", "file_path": "pkg/encryptionpb/encryptionpb.pb.go", "rank": 81, "score": 1.3268964393858678 }, { "content": "func (*SnapshotRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _SnapshotRequest_OneofMarshaler, _SnapshotRequest_OneofUnmarshaler, _SnapshotRequest_OneofSizer, []interface{}{\n\n\t\t(*SnapshotRequest_State)(nil),\n\n\t\t(*SnapshotRequest_Data)(nil),\n\n\t}\n", "file_path": "pkg/enginepb/enginepb.pb.go", "rank": 82, "score": 1.3268964393858678 }, { "content": "func (*StorageBackend) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _StorageBackend_OneofMarshaler, _StorageBackend_OneofUnmarshaler, _StorageBackend_OneofSizer, []interface{}{\n\n\t\t(*StorageBackend_Noop)(nil),\n\n\t\t(*StorageBackend_Local)(nil),\n\n\t\t(*StorageBackend_S3)(nil),\n\n\t\t(*StorageBackend_Gcs)(nil),\n\n\t}\n", "file_path": "pkg/backup/backup.pb.go", "rank": 83, "score": 1.3268964393858678 }, { "content": "func (*WriteEngineRequest) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {\n\n\treturn _WriteEngineRequest_OneofMarshaler, _WriteEngineRequest_OneofUnmarshaler, _WriteEngineRequest_OneofSizer, []interface{}{\n\n\t\t(*WriteEngineRequest_Head)(nil),\n\n\t\t(*WriteEngineRequest_Batch)(nil),\n\n\t}\n", "file_path": "pkg/import_kvpb/import_kvpb.pb.go", "rank": 84, "score": 1.3268964393858678 }, { "content": "\tNotFound bool `protobuf:\"varint,4,opt,name=not_found,json=notFound,proto3\" json:\"not_found,omitempty\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 86, "score": 1.0834663929618422 }, { "content": "\tForceSyncCommit bool `protobuf:\"varint,7,opt,name=force_sync_commit,json=forceSyncCommit,proto3\" json:\"force_sync_commit,omitempty\"`\n", "file_path": "pkg/kvrpcpb/kvrpcpb.pb.go", "rank": 87, "score": 1.0834663929618422 }, { "content": "\tRightDerive bool `protobuf:\"varint,2,opt,name=right_derive,json=rightDerive,proto3\" json:\"right_derive,omitempty\"`\n", "file_path": "pkg/raft_cmdpb/raft_cmdpb.pb.go", "rank": 88, "score": 1.0822661893765222 }, { "content": "\tIv []byte `protobuf:\"bytes,4,opt,name=iv,proto3\" json:\"iv,omitempty\"`\n", "file_path": "pkg/encryptionpb/encryptionpb.pb.go", "rank": 89, "score": 1.0822661893765222 }, { "content": "\tAdminCmdType_Split AdminCmdType = 2 // Deprecated: Do not use.\n", "file_path": "pkg/raft_cmdpb/raft_cmdpb.pb.go", "rank": 90, "score": 1.0822661893765222 }, { "content": "\tKvGet(context.Context, *kvrpcpb.GetRequest) (*kvrpcpb.GetResponse, error)\n", "file_path": "pkg/tikvpb/tikvpb.pb.go", "rank": 91, "score": 1.0822661893765222 }, { "content": "\tCurrentKeyId uint64 `protobuf:\"varint,2,opt,name=current_key_id,json=currentKeyId,proto3\" json:\"current_key_id,omitempty\"`\n", "file_path": "pkg/encryptionpb/encryptionpb.pb.go", "rank": 92, "score": 1.0822661893765222 } ]
Rust
semantics/src/traversal/contracts.rs
vaporydev/fe
246b23bad148e358ea04b80ca9e4e7a5ce4cec8d
use crate::errors::SemanticError; use crate::namespace::events::Event; use crate::namespace::scopes::{ ContractDef, ContractScope, ModuleScope, Scope, Shared, }; use crate::namespace::types::{ AbiDecodeLocation, FixedSize, Type, }; use crate::traversal::{ functions, types, }; use crate::{ Context, ContractAttributes, FunctionAttributes, RuntimeOperations, }; use fe_parser::ast as fe; use fe_parser::span::Spanned; use std::rc::Rc; pub fn contract_def( module_scope: Shared<ModuleScope>, context: Shared<Context>, stmt: &Spanned<fe::ModuleStmt>, ) -> Result<(), SemanticError> { if let fe::ModuleStmt::ContractDef { name: _, body } = &stmt.node { let contract_scope = ContractScope::new(module_scope); for stmt in body.iter() { match &stmt.node { fe::ContractStmt::ContractField { .. } => { contract_field(Rc::clone(&contract_scope), stmt)? } fe::ContractStmt::EventDef { .. } => { event_def(Rc::clone(&contract_scope), stmt)?; } fe::ContractStmt::FuncDef { .. } => { functions::func_def(Rc::clone(&contract_scope), Rc::clone(&context), stmt)?; } }; } let mut runtime_operations = vec![]; let mut public_functions = vec![]; for (name, def) in contract_scope.borrow().defs.iter() { match def { ContractDef::Event(event) => { runtime_operations.push(RuntimeOperations::AbiEncode { params: event.fields.clone(), }) } ContractDef::Function { is_public: true, params, returns, } => { if name != "__init__" { public_functions.push(FunctionAttributes { name: name.clone(), param_types: params.clone(), return_type: returns.clone(), }); for param in params { runtime_operations.push(RuntimeOperations::AbiDecode { param: param.clone(), location: AbiDecodeLocation::Calldata, }) } if !returns.is_empty_tuple() { runtime_operations.push(RuntimeOperations::AbiEncode { params: vec![returns.clone()], }) } } else { for param in params { runtime_operations.push(RuntimeOperations::AbiDecode { param: param.clone(), location: AbiDecodeLocation::Memory, }) } } } _ => {} } } runtime_operations.sort(); runtime_operations.dedup(); let attributes = ContractAttributes { runtime_operations, public_functions, }; context.borrow_mut().add_contract(stmt, attributes); return Ok(()); } unreachable!() } fn contract_field( scope: Shared<ContractScope>, stmt: &Spanned<fe::ContractStmt>, ) -> Result<(), SemanticError> { if let fe::ContractStmt::ContractField { qual: _, name, typ } = &stmt.node { match types::type_desc(Scope::Contract(Rc::clone(&scope)), typ)? { Type::Map(map) => scope.borrow_mut().add_map(name.node.to_string(), map), Type::Array { .. } => unimplemented!(), Type::Base(_) => unimplemented!(), Type::Tuple(_) => unimplemented!(), Type::String(_) => unimplemented!(), }; return Ok(()); } unreachable!() } fn event_def( scope: Shared<ContractScope>, stmt: &Spanned<fe::ContractStmt>, ) -> Result<(), SemanticError> { if let fe::ContractStmt::EventDef { name, fields } = &stmt.node { let name = name.node.to_string(); let fields = fields .iter() .map(|field| event_field(Rc::clone(&scope), field)) .collect::<Result<Vec<FixedSize>, SemanticError>>()?; scope .borrow_mut() .add_event(name.clone(), Event::new(name, fields)); return Ok(()); } unreachable!() } fn event_field( scope: Shared<ContractScope>, field: &Spanned<fe::EventField>, ) -> Result<FixedSize, SemanticError> { types::type_desc_fixed_size(Scope::Contract(scope), &field.node.typ) }
use crate::errors::SemanticError; use crate::namespace::events::Event; use crate::namespace::scopes::{ ContractDef, ContractScope, ModuleScope, Scope, Shared, }; use crate::namespace::types::{ AbiDecodeLocation, FixedSize, Type, }; use crate::traversal::{ functions, types, }; use crate::{ Context, ContractAttributes, FunctionAttributes, RuntimeOperations, }; use fe_parser::ast as fe; use fe_parser::span::Spanned; use std::rc::Rc; pub fn contract_def( module_scope: Shared<ModuleScope>, context: Shared<Context>, stmt: &Spanned<fe::ModuleStmt>, ) -> Result<(), SemanticError> { if let fe::ModuleStmt::ContractDef { name: _, body } = &stmt.node { let contract_scope = ContractScope::new(module_scope); for stmt in body.iter() { match &stmt.node { fe::ContractStmt::ContractField { .. } => { contract_field(Rc::clone(&contract_scope), stmt)? } fe::ContractStmt::EventDef { .. } => { event_def(Rc::clone(&contract_scope), stmt)?; } fe::ContractStmt::FuncDef { .. } => { functions::func_def(Rc::clone(&contract_scope), Rc::clone(&context), stmt)?; } }; } let mut runtime_operations = vec![]; let mut public_functions = vec![]; for (name, def) in contract_scope.borrow().defs.iter() { match def { ContractDef::Event(event) => { runtime_operations.push(RuntimeOperations::AbiEncode { params: event.fields.clone(), }) } ContractDef::Function { is_public: true, params, returns, } => { if name != "__init__" { public_functions.push(FunctionAttributes { name: name.clone(), param_types: params.clone(), return_type: returns.clone(), }); for param in params { runtime_operations.push(RuntimeOperations::AbiDecode { param: param.clone(), location: AbiDecodeLocation::Calldata, }) } if !returns.is_empty_tuple() { runtime_operations.push(RuntimeOperations::AbiEncode { params: vec![returns.clone()], }) } } else { for param in params { runtime_operations.push(RuntimeOperations::AbiDecode { param: param.clone(), location: AbiDecodeLocation::Memory, }) } } } _ => {} } } runtime_operations.sort(); runtime_operations.dedup(); let attributes = ContractAttributes { runtime_operations, public_functions, }; context.borrow_mut().add_contract(stmt, attributes); return Ok(()); } unreachable!() }
fn event_def( scope: Shared<ContractScope>, stmt: &Spanned<fe::ContractStmt>, ) -> Result<(), SemanticError> { if let fe::ContractStmt::EventDef { name, fields } = &stmt.node { let name = name.node.to_string(); let fields = fields .iter() .map(|field| event_field(Rc::clone(&scope), field)) .collect::<Result<Vec<FixedSize>, SemanticError>>()?; scope .borrow_mut() .add_event(name.clone(), Event::new(name, fields)); return Ok(()); } unreachable!() } fn event_field( scope: Shared<ContractScope>, field: &Spanned<fe::EventField>, ) -> Result<FixedSize, SemanticError> { types::type_desc_fixed_size(Scope::Contract(scope), &field.node.typ) }
fn contract_field( scope: Shared<ContractScope>, stmt: &Spanned<fe::ContractStmt>, ) -> Result<(), SemanticError> { if let fe::ContractStmt::ContractField { qual: _, name, typ } = &stmt.node { match types::type_desc(Scope::Contract(Rc::clone(&scope)), typ)? { Type::Map(map) => scope.borrow_mut().add_map(name.node.to_string(), map), Type::Array { .. } => unimplemented!(), Type::Base(_) => unimplemented!(), Type::Tuple(_) => unimplemented!(), Type::String(_) => unimplemented!(), }; return Ok(()); } unreachable!() }
function_block-full_function
[ { "content": "/// Maps a type description node to an enum type.\n\npub fn type_desc(scope: Scope, typ: &Spanned<fe::TypeDesc>) -> Result<Type, SemanticError> {\n\n types::type_desc(&scope.module_scope().borrow().defs, &typ.node)\n\n}\n\n\n", "file_path": "semantics/src/traversal/types.rs", "rank": 0, "score": 408841.46649907413 }, { "content": "/// Gather context information for a module and check for type errors.\n\npub fn module(context: Shared<Context>, module: &fe::Module) -> Result<(), SemanticError> {\n\n let scope = ModuleScope::new();\n\n\n\n for stmt in module.body.iter() {\n\n match &stmt.node {\n\n fe::ModuleStmt::TypeDef { .. } => type_def(Rc::clone(&scope), stmt)?,\n\n fe::ModuleStmt::ContractDef { .. } => {\n\n contracts::contract_def(Rc::clone(&scope), Rc::clone(&context), stmt)?\n\n }\n\n fe::ModuleStmt::FromImport { .. } => unimplemented!(),\n\n fe::ModuleStmt::SimpleImport { .. } => unimplemented!(),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "semantics/src/traversal/module.rs", "rank": 1, "score": 376663.7795481476 }, { "content": "fn emit(context: &Context, stmt: &Spanned<fe::FuncStmt>) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Emit { value } = &stmt.node {\n\n if let fe::Expr::Call { func: _, args } = &value.node {\n\n let event_values = args\n\n .node\n\n .iter()\n\n .map(|arg| call_arg(context, arg))\n\n .collect::<Result<_, _>>()?;\n\n\n\n if let Some(event) = context.get_emit(stmt) {\n\n return Ok(operations::emit_event(event.to_owned(), event_values));\n\n }\n\n\n\n return Err(CompileError::static_str(\"missing event definition\"));\n\n }\n\n\n\n return Err(CompileError::static_str(\n\n \"emit statements must contain a call expression\",\n\n ));\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 2, "score": 348836.6212018765 }, { "content": "fn assert(context: &Context, stmt: &Spanned<fe::FuncStmt>) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Assert { test, msg: _ } = &stmt.node {\n\n let test = expressions::expr(context, test)?;\n\n\n\n return Ok(statement! { if (iszero([test])) { (revert(0, 0)) } });\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 3, "score": 348836.62120187655 }, { "content": "fn expr(context: &Context, stmt: &Spanned<fe::FuncStmt>) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Expr { value } = &stmt.node {\n\n let spanned = spanned_expression(&stmt.span, value);\n\n let expr = expressions::expr(context, &spanned)?;\n\n if let Some(attributes) = context.get_expression(stmt.span) {\n\n if attributes.typ.is_empty_tuple() {\n\n return Ok(yul::Statement::Expression(expr));\n\n } else {\n\n return Ok(statement! { pop([expr])});\n\n }\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 4, "score": 348836.62120187655 }, { "content": "/// Performs semantic analysis of the source program and returns a `Context`\n\n/// instance.\n\npub fn analysis(module: &fe::Module) -> Result<Context, SemanticError> {\n\n let context = Context::new_shared();\n\n traversal::module::module(Rc::clone(&context), module)?;\n\n Ok(Rc::try_unwrap(context)\n\n .map_err(|_| \"more than one strong reference pointing to context\")\n\n // This should never panic.\n\n .expect(\"failed to unwrap reference counter\")\n\n .into_inner())\n\n}\n\n\n\npub mod test_utils {\n\n use crate::namespace::types::FixedSize;\n\n use crate::{\n\n Context,\n\n ExpressionAttributes,\n\n };\n\n use fe_parser::ast as fe;\n\n use fe_parser::span::{\n\n Span,\n\n Spanned,\n", "file_path": "semantics/src/lib.rs", "rank": 5, "score": 343025.9415484702 }, { "content": "pub fn expression_attributes_to_types(attributes: Vec<ExpressionAttributes>) -> Vec<Type> {\n\n attributes\n\n .iter()\n\n .map(|attributes| attributes.typ.clone())\n\n .collect()\n\n}\n\n\n", "file_path": "semantics/src/traversal/_utils.rs", "rank": 6, "score": 326242.7927366716 }, { "content": "pub fn fixed_sizes_to_types(sizes: Vec<FixedSize>) -> Vec<Type> {\n\n sizes\n\n .iter()\n\n .map(|param| param.clone().into_type())\n\n .collect()\n\n}\n", "file_path": "semantics/src/traversal/_utils.rs", "rank": 7, "score": 321021.7454006826 }, { "content": "/// Builds a vector of Yul contracts from a Fe module.\n\npub fn module(context: &Context, module: &fe::Module) -> Result<YulContracts, CompileError> {\n\n module\n\n .body\n\n .iter()\n\n .try_fold(YulContracts::new(), |mut contracts, stmt| {\n\n match &stmt.node {\n\n fe::ModuleStmt::TypeDef { .. } => {}\n\n fe::ModuleStmt::ContractDef { name, .. } => {\n\n let contract = contracts::contract_def(context, stmt)?;\n\n\n\n if contracts.insert(name.node.to_string(), contract).is_some() {\n\n return Err(CompileError::static_str(\"duplicate contract def\"));\n\n }\n\n }\n\n fe::ModuleStmt::FromImport { .. } => unimplemented!(),\n\n fe::ModuleStmt::SimpleImport { .. } => unimplemented!(),\n\n }\n\n\n\n Ok(contracts)\n\n })\n\n}\n", "file_path": "compiler/src/yul/mappers/module.rs", "rank": 8, "score": 307539.48689606646 }, { "content": "/// Builds a switch statement that dispatches calls to the contract.\n\npub fn dispatcher(attributes: Vec<FunctionAttributes>) -> Result<yul::Statement, CompileError> {\n\n let arms = attributes\n\n .iter()\n\n .map(|arm| dispatch_arm(arm.to_owned()))\n\n .collect::<Vec<_>>();\n\n\n\n Ok(switch! {\n\n switch (cloadn(0, 4))\n\n [arms...]\n\n })\n\n}\n\n\n", "file_path": "compiler/src/yul/runtime/abi_dispatcher.rs", "rank": 9, "score": 304366.6780971498 }, { "content": "/// Builds a Yul expression from a Fe expression.\n\npub fn expr(context: &Context, exp: &Spanned<fe::Expr>) -> Result<yul::Expression, CompileError> {\n\n match &exp.node {\n\n fe::Expr::Name(_) => expr_name(context, exp),\n\n fe::Expr::Num(_) => expr_num(exp),\n\n fe::Expr::Bool(_) => expr_bool(exp),\n\n fe::Expr::Subscript { .. } => expr_subscript(context, exp),\n\n fe::Expr::Attribute { .. } => expr_attribute(context, exp),\n\n fe::Expr::Ternary { .. } => expr_ternary(context, exp),\n\n fe::Expr::BoolOperation { .. } => unimplemented!(),\n\n fe::Expr::BinOperation { .. } => expr_bin_operation(context, exp),\n\n fe::Expr::UnaryOperation { .. } => unimplemented!(),\n\n fe::Expr::CompOperation { .. } => expr_comp_operation(context, exp),\n\n fe::Expr::Call { .. } => expr_call(context, exp),\n\n fe::Expr::List { .. } => unimplemented!(),\n\n fe::Expr::ListComp { .. } => unimplemented!(),\n\n fe::Expr::Tuple { .. } => unimplemented!(),\n\n fe::Expr::Str(_) => unimplemented!(),\n\n fe::Expr::Ellipsis => unimplemented!(),\n\n }\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 10, "score": 296783.22548293654 }, { "content": "/// Finds the type of an indexed expression.\n\n///\n\n/// e.g. `foo[42]`\n\npub fn index(value: Type, index: Type) -> Result<Type, SemanticError> {\n\n match value {\n\n Type::Array(array) => index_array(array, index),\n\n Type::Map(map) => index_map(map, index),\n\n Type::Base(_) => Err(SemanticError::NotSubscriptable),\n\n Type::Tuple(_) => Err(SemanticError::NotSubscriptable),\n\n Type::String(_) => Err(SemanticError::NotSubscriptable),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/namespace/operations.rs", "rank": 11, "score": 296722.1557237886 }, { "content": "/// Retrieves the &str value of a name expression and converts it to a String.\n\npub fn expr_name_string(exp: &Spanned<fe::Expr>) -> Result<String, SemanticError> {\n\n expr_name_str(exp).map(|name| name.to_string())\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 12, "score": 293942.5467179433 }, { "content": "/// Retrieves the &str value of a name expression.\n\npub fn expr_name_str<'a>(exp: &Spanned<fe::Expr<'a>>) -> Result<&'a str, SemanticError> {\n\n if let fe::Expr::Name(name) = exp.node {\n\n return Ok(name);\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 13, "score": 281435.11118781706 }, { "content": "fn revert(stmt: &Spanned<fe::FuncStmt>) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Revert = &stmt.node {\n\n return Ok(statement! { revert(0, 0) });\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 14, "score": 280241.59375373565 }, { "content": "/// Formats the name and params and calculates the 4 byte keccak256 value of the\n\n/// signature.\n\npub fn func_selector(name: String, params: Vec<String>) -> String {\n\n sig_keccak256(name, params, 4)\n\n}\n\n\n", "file_path": "compiler/src/abi/utils.rs", "rank": 15, "score": 278856.40025662613 }, { "content": "fn expr_name(_context: &Context, exp: &Spanned<fe::Expr>) -> Result<yul::Expression, CompileError> {\n\n if let fe::Expr::Name(name) = exp.node {\n\n return Ok(identifier_expression! {(name)});\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 16, "score": 267980.7879053482 }, { "content": "/// Retrieves the &str value of a name expression and converts it to a String.\n\npub fn expr_name_string(exp: &Spanned<fe::Expr>) -> Result<String, CompileError> {\n\n expr_name_str(exp).map(|name| name.to_string())\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 17, "score": 260968.2081132194 }, { "content": "pub fn func_stmt(input: Cursor) -> ParseResult<Vec<Spanned<FuncStmt>>> {\n\n alt((map(compound_stmt, |stmt| vec![stmt]), simple_stmt))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 18, "score": 253930.5623225484 }, { "content": "pub fn simple_stmt(input: Cursor) -> ParseResult<Vec<Spanned<FuncStmt>>> {\n\n terminated(separated(small_stmt, op(\";\"), true), newline_token)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 19, "score": 253930.56232254836 }, { "content": "/// Parse a type definition (type alias).\n\npub fn type_def(input: Cursor) -> ParseResult<Spanned<ModuleStmt>> {\n\n let (input, type_kw) = name(\"type\")(input)?;\n\n let (input, name) = name_token(input)?;\n\n let (input, _) = op(\"=\")(input)?;\n\n let (input, type_desc) = type_desc(input)?;\n\n let (input, _) = newline_token(input)?;\n\n\n\n let span = Span::from_pair(type_kw, &type_desc);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: ModuleStmt::TypeDef {\n\n name: name.into(),\n\n typ: type_desc,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 20, "score": 252896.5516262779 }, { "content": "/// Retrieves the &str value of a name expression.\n\npub fn expr_name_str<'a>(exp: &Spanned<fe::Expr<'a>>) -> Result<&'a str, CompileError> {\n\n if let fe::Expr::Name(name) = exp.node {\n\n return Ok(name);\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 21, "score": 251277.9188122629 }, { "content": "/// Parse a contract definition statement.\n\npub fn contract_def(input: Cursor) -> ParseResult<Spanned<ModuleStmt>> {\n\n // \"contract\" name \":\" NEWLINE\n\n let (input, contract_kw) = name(\"contract\")(input)?;\n\n let (input, name_tok) = name_token(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, _) = newline_token(input)?;\n\n\n\n // INDENT contract_stmt+ DEDENT\n\n let (input, _) = indent_token(input)?;\n\n let (input, body) = many1(contract_stmt)(input)?;\n\n let (input, _) = dedent_token(input)?;\n\n\n\n let last_stmt = body.last().unwrap();\n\n let span = Span::from_pair(contract_kw, last_stmt);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: ContractDef {\n\n name: name_tok.into(),\n\n body,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 22, "score": 250006.90223200852 }, { "content": "/// Parse a dotted import name.\n\npub fn dotted_name(input: Cursor) -> ParseResult<Vec<Spanned<&str>>> {\n\n separated(map(name_token, |t| t.into()), op(\".\"), false)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 23, "score": 246671.94193284522 }, { "content": "pub fn block(input: Cursor) -> ParseResult<Vec<Spanned<FuncStmt>>> {\n\n alt((simple_stmt, |input| {\n\n let (input, _) = newline_token(input)?;\n\n let (input, _) = indent_token(input)?;\n\n let (input, stmts) = many1(func_stmt)(input)?;\n\n let (input, _) = dedent_token(input)?;\n\n\n\n let result: Vec<_> = stmts.into_iter().flatten().collect();\n\n\n\n Ok((input, result))\n\n }))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 24, "score": 246563.5899836462 }, { "content": "/// Formats the name and fields and calculates the 32 byte keccak256 value of\n\n/// the signature.\n\npub fn event_topic(name: String, fields: Vec<String>) -> String {\n\n sig_keccak256(name, fields, 32)\n\n}\n\n\n", "file_path": "semantics/src/namespace/events.rs", "rank": 25, "score": 243339.09036408292 }, { "content": "/// Formats the name and fields and calculates the 32 byte keccak256 value of\n\n/// the signature.\n\npub fn event_topic(name: String, fields: Vec<String>) -> String {\n\n sig_keccak256(name, fields, 32)\n\n}\n", "file_path": "compiler/src/abi/utils.rs", "rank": 26, "score": 243339.0903640829 }, { "content": "pub fn else_block(input: Cursor) -> ParseResult<Vec<Spanned<FuncStmt>>> {\n\n let (input, _) = name(\"else\")(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, stmts) = block(input)?;\n\n\n\n Ok((input, stmts))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 27, "score": 243188.6099921839 }, { "content": "fn expr_num(exp: &Spanned<fe::Expr>) -> Result<ExpressionAttributes, SemanticError> {\n\n if let fe::Expr::Num(_) = &exp.node {\n\n return Ok(ExpressionAttributes {\n\n location: Location::Value,\n\n typ: Type::Base(Base::U256),\n\n });\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 28, "score": 242896.13990502755 }, { "content": "fn expr_bool(exp: &Spanned<fe::Expr>) -> Result<ExpressionAttributes, SemanticError> {\n\n if let fe::Expr::Bool(_) = &exp.node {\n\n return Ok(ExpressionAttributes {\n\n location: Location::Value,\n\n typ: Type::Base(Base::Bool),\n\n });\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 29, "score": 242896.13990502755 }, { "content": "pub fn arg_list(input: Cursor) -> ParseResult<Vec<Spanned<FuncDefArg>>> {\n\n match input[0] {\n\n Token { string: \")\", .. } => Ok((input, vec![])),\n\n _ => separated(arg_def, op(\",\"), true)(input),\n\n }\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 30, "score": 240093.67230099425 }, { "content": "/// Tokenize the given source code in `source` and filter out tokens not\n\n/// relevant to parsing.\n\npub fn get_parse_tokens(source: &str) -> Result<Vec<Token>, TokenizeError> {\n\n let tokens = tokenize(source)?;\n\n\n\n Ok(tokens\n\n .into_iter()\n\n .filter(|t| t.typ != TokenType::NL && t.typ != TokenType::COMMENT)\n\n .collect())\n\n}\n", "file_path": "parser/src/lib.rs", "rank": 31, "score": 239331.84333137417 }, { "content": "/// Generates an encoding function for any set of type parameters.\n\npub fn encode<T: AbiEncoding>(types: Vec<T>) -> yul::Statement {\n\n // the name of the function we're generating\n\n let func_name = encode_name(&types);\n\n\n\n // create a vector of identifiers and a vector of tuples, which contain\n\n // expressions that correspond to the identifiers.\n\n //\n\n // The identifier vector is injected into the parameter section of our\n\n // encoding function and the expressions are used to reference the parameters\n\n // while encoding.\n\n let (params, typed_params): (Vec<_>, Vec<_>) = types\n\n .iter()\n\n .enumerate()\n\n .map(|(i, typ)| {\n\n let ident = identifier! { (format!(\"val_{}\", i)) };\n\n let expr = identifier_expression! { [ident.clone()] };\n\n (ident, (expr, typ))\n\n })\n\n .unzip();\n\n\n", "file_path": "compiler/src/yul/abi/functions.rs", "rank": 32, "score": 237708.8070390002 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\n#[allow(clippy::trivial_regex)]\n\npub fn tokenize<'a>(input: &'a str) -> Result<Vec<Token<'a>>, TokenizeError> {\n\n // Static values/helpers\n\n let pseudo_token_re = compile_anchored(&get_pseudotoken_pattern());\n\n\n\n let triple_quoted = get_triple_quote_set();\n\n let single_quoted = get_single_quote_set();\n\n\n\n let double3_re = Regex::new(DOUBLE3).unwrap();\n\n let single3_re = Regex::new(SINGLE3).unwrap();\n\n let double_re = Regex::new(DOUBLE).unwrap();\n\n let single_re = Regex::new(SINGLE).unwrap();\n\n\n\n // The ordering of checks matters here. We need to eliminate the possibility of\n\n // triple quote delimiters before looking for single quote delimiters.\n\n let get_contstr_end_re = |token: &str| -> &Regex {\n\n let token_stripped = lstrip_slice(token, \"bBrRuUfF\");\n\n\n\n if token_stripped.starts_with(\"\\\"\\\"\\\"\") {\n\n &double3_re\n\n } else if token_stripped.starts_with(\"'''\") {\n", "file_path": "parser/src/tokenizer/tokenize.rs", "rank": 33, "score": 231663.84872711878 }, { "content": "fn index_map(map: Map, index: Type) -> Result<Type, SemanticError> {\n\n if index != Type::Base(map.key) {\n\n return Err(SemanticError::TypeError);\n\n }\n\n\n\n Ok(*map.value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::errors::SemanticError;\n\n use crate::namespace::operations;\n\n use crate::namespace::types::{\n\n Array,\n\n Base,\n\n Map,\n\n Type,\n\n };\n\n use rstest::rstest;\n\n\n", "file_path": "semantics/src/namespace/operations.rs", "rank": 34, "score": 231518.89275415867 }, { "content": "fn index_array(array: Array, index: Type) -> Result<Type, SemanticError> {\n\n if index != Type::Base(Base::U256) {\n\n return Err(SemanticError::TypeError);\n\n }\n\n\n\n Ok(Type::Base(array.inner))\n\n}\n\n\n", "file_path": "semantics/src/namespace/operations.rs", "rank": 35, "score": 231518.89275415867 }, { "content": "/// Returns all functions that should be available during runtime.\n\npub fn std() -> Vec<yul::Statement> {\n\n vec![\n\n avail(),\n\n alloc(),\n\n alloc_mstoren(),\n\n free(),\n\n ccopy(),\n\n mcopy(),\n\n scopy(),\n\n mloadn(),\n\n sloadn(),\n\n cloadn(),\n\n mstoren(),\n\n sstoren(),\n\n dualkeccak256(),\n\n ceil32(),\n\n ternary(),\n\n ]\n\n}\n\n\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 36, "score": 229680.97648176388 }, { "content": "/// Parse a map of contract ABIs from the input `module`.\n\npub fn module<'a>(module: &'a fe::Module<'a>) -> Result<ModuleABIs, CompileError> {\n\n let mut type_defs = TypeDefs::new();\n\n\n\n module.body.iter().try_fold(ModuleABIs::new(), |mut m, s| {\n\n match &s.node {\n\n fe::ModuleStmt::TypeDef { name, typ } => {\n\n if type_defs.insert(name.node, &typ.node).is_some() {\n\n return Err(CompileError::static_str(\"duplicate type definition\"));\n\n }\n\n }\n\n fe::ModuleStmt::ContractDef { name, body } => {\n\n if m.contracts\n\n .insert(name.node.to_string(), contract_def(&type_defs, body)?)\n\n .is_some()\n\n {\n\n return Err(CompileError::static_str(\"duplicate contract definition\"));\n\n }\n\n }\n\n _ => {}\n\n };\n\n\n\n Ok(m)\n\n })\n\n}\n\n\n", "file_path": "compiler/src/abi/builder.rs", "rank": 37, "score": 228992.7087120831 }, { "content": "pub fn return_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, return_kw) = name(\"return\")(input)?;\n\n let (input, value) = opt(exprs)(input)?;\n\n\n\n let span = match &value {\n\n Some(exp) => Span::from_pair(return_kw, exp),\n\n None => return_kw.span,\n\n };\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::Return { value },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 38, "score": 228430.00032127366 }, { "content": "pub fn t_tail(input: Cursor) -> ParseResult<Vec<Tail>> {\n\n let (input, mut tails) = many0(call_tail)(input)?;\n\n let (input, last) = alt((attr_tail, index_tail))(input)?;\n\n\n\n tails.push(last);\n\n\n\n Ok((input, tails))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 39, "score": 219812.79018721136 }, { "content": "#[allow(clippy::needless_lifetimes)]\n\npub fn name<'a>(string: &'a str) -> impl Fn(Cursor<'a>) -> ParseResult<&Token> {\n\n verify(\n\n name_token,\n\n move |t| t.string == string,\n\n move |inp, _| ParseError::str(inp, format!(\"expected \\\"{}\\\" name token\", string)),\n\n )\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 40, "score": 217957.16680441902 }, { "content": "pub fn func_def(input: Cursor) -> ParseResult<Spanned<ContractStmt>> {\n\n let (input, qual) = opt(func_qual)(input)?;\n\n let (input, def_kw) = name(\"def\")(input)?;\n\n let (input, name_tok) = name_token(input)?;\n\n\n\n let (input, _) = op(\"(\")(input)?;\n\n let (input, args) = arg_list(input)?;\n\n let (input, _) = op(\")\")(input)?;\n\n\n\n let (input, return_type) = opt(preceded(op(\"->\"), base_type))(input)?;\n\n\n\n let (input, _) = op(\":\")(input)?;\n\n\n\n let (input, body) = block(input)?;\n\n\n\n let last = body.last().unwrap();\n\n let span = match &qual {\n\n Some(qual) => Span::from_pair(qual, last),\n\n None => Span::from_pair(def_kw, last),\n\n };\n", "file_path": "parser/src/parsers.rs", "rank": 41, "score": 217710.80856555287 }, { "content": "/// Parse an event definition statement.\n\npub fn event_def(input: Cursor) -> ParseResult<Spanned<ContractStmt>> {\n\n // \"event\" name \":\" NEWLINE\n\n let (input, event_kw) = name(\"event\")(input)?;\n\n let (input, name_tok) = name_token(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, _) = newline_token(input)?;\n\n\n\n // INDENT event_field+ DEDENT\n\n let (input, _) = indent_token(input)?;\n\n let (input, fields) = many1(event_field)(input)?;\n\n let (input, _) = dedent_token(input)?;\n\n\n\n let last_field = fields.last().unwrap();\n\n let span = Span::from_pair(event_kw, last_field);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: ContractStmt::EventDef {\n\n name: name_tok.into(),\n\n fields,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 42, "score": 217710.80856555287 }, { "content": "/// Maps a type description node to a fixed size enum type.\n\npub fn type_desc_fixed_size(\n\n scope: Scope,\n\n typ: &Spanned<fe::TypeDesc>,\n\n) -> Result<FixedSize, SemanticError> {\n\n types::type_desc_fixed_size(&scope.module_scope().borrow().defs, &typ.node)\n\n}\n", "file_path": "semantics/src/traversal/types.rs", "rank": 43, "score": 216069.67974057444 }, { "content": "pub fn type_desc_fixed_size(\n\n defs: &HashMap<String, ModuleDef>,\n\n typ: &fe::TypeDesc,\n\n) -> Result<FixedSize, SemanticError> {\n\n match type_desc(defs, typ)? {\n\n Type::Base(base) => Ok(FixedSize::Base(base)),\n\n Type::Array(array) => Ok(FixedSize::Array(array)),\n\n Type::Tuple(tuple) => Ok(FixedSize::Tuple(tuple)),\n\n Type::String(string) => Ok(FixedSize::String(string)),\n\n Type::Map(_) => Err(SemanticError::TypeError),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/namespace/types.rs", "rank": 44, "score": 216060.61307239905 }, { "content": "pub fn slices(input: Cursor) -> ParseResult<Vec<Spanned<Slice>>> {\n\n separated(slice, op(\",\"), true)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 45, "score": 214087.6307580433 }, { "content": "fn sig_keccak256(name: String, params: Vec<String>, size: usize) -> String {\n\n let signature = format!(\"{}({})\", name, params.join(\",\"));\n\n\n\n let mut keccak = Keccak::v256();\n\n let mut selector = [0u8; 32];\n\n\n\n keccak.update(signature.as_bytes());\n\n keccak.finalize(&mut selector);\n\n\n\n format!(\"0x{}\", hex::encode(&selector[0..size]))\n\n}\n", "file_path": "compiler/src/abi/utils.rs", "rank": 46, "score": 213000.26293176095 }, { "content": "fn sig_keccak256(name: String, params: Vec<String>, size: usize) -> String {\n\n let signature = format!(\"{}({})\", name, params.join(\",\"));\n\n\n\n let mut keccak = Keccak::v256();\n\n let mut selector = [0u8; 32];\n\n\n\n keccak.update(signature.as_bytes());\n\n keccak.finalize(&mut selector);\n\n\n\n format!(\"0x{}\", hex::encode(&selector[0..size]))\n\n}\n", "file_path": "semantics/src/namespace/events.rs", "rank": 47, "score": 213000.26293176092 }, { "content": "/// Gather context information for a function definition and check for type\n\n/// errors.\n\npub fn func_def(\n\n contract_scope: Shared<ContractScope>,\n\n context: Shared<Context>,\n\n def: &Spanned<fe::ContractStmt>,\n\n) -> Result<(), SemanticError> {\n\n if let fe::ContractStmt::FuncDef {\n\n qual,\n\n name,\n\n args,\n\n return_type,\n\n body,\n\n } = &def.node\n\n {\n\n let function_scope = BlockScope::from_contract_scope(def.span, Rc::clone(&contract_scope));\n\n\n\n let name = name.node.to_string();\n\n let param_types = args\n\n .iter()\n\n .map(|arg| func_def_arg(Rc::clone(&function_scope), arg))\n\n .collect::<Result<Vec<_>, _>>()?;\n", "file_path": "semantics/src/traversal/functions.rs", "rank": 48, "score": 212374.3593137947 }, { "content": "/// Parse an array dimension list e.g. \"[2][3]\"\n\npub fn arr_list(input: Cursor) -> ParseResult<Vec<Spanned<usize>>> {\n\n many0(arr_dim)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 50, "score": 211299.77359109948 }, { "content": "pub fn kwargs(input: Cursor) -> ParseResult<Vec<Spanned<CallArg>>> {\n\n separated(kwarg, op(\",\"), false)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 51, "score": 211299.77359109948 }, { "content": "pub fn args(input: Cursor) -> ParseResult<Vec<Spanned<CallArg>>> {\n\n let kw_result = kwargs(input);\n\n if kw_result.is_ok() {\n\n return kw_result;\n\n }\n\n\n\n let (input, first) = expr(input)?;\n\n let (input, rest) = opt(preceded(op(\",\"), args))(input)?;\n\n\n\n let mut results = vec![Spanned {\n\n node: CallArg::Arg(first.node),\n\n span: first.span,\n\n }];\n\n if let Some(mut rest) = rest {\n\n results.append(&mut rest);\n\n }\n\n\n\n Ok((input, results))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 52, "score": 211299.77359109948 }, { "content": "/// Builds a Yul object from a Fe contract.\n\npub fn contract_def(\n\n context: &Context,\n\n stmt: &Spanned<fe::ModuleStmt>,\n\n) -> Result<yul::Object, CompileError> {\n\n if let (Some(attributes), fe::ModuleStmt::ContractDef { name: _, body }) =\n\n (context.get_contract(stmt), &stmt.node)\n\n {\n\n let mut init = None;\n\n let mut user_functions = vec![];\n\n\n\n let mut runtime = {\n\n let mut runtime = runtime_functions::std();\n\n runtime.append(&mut build_runtime_functions(\n\n attributes.runtime_operations.to_owned(),\n\n ));\n\n runtime\n\n };\n\n\n\n for stmt in body.iter() {\n\n if let (Some(attributes), fe::ContractStmt::FuncDef { name, .. }) =\n", "file_path": "compiler/src/yul/mappers/contracts.rs", "rank": 53, "score": 209761.0413061463 }, { "content": "/// Builds a contract constructor.\n\n///\n\n/// Takes an optional init function and its parameter types.\n\npub fn build(init: Option<(yul::Statement, Vec<FixedSize>, Vec<yul::Statement>)>) -> yul::Code {\n\n // statements that return the contract code\n\n let deploy_stmts = statements! {\n\n (let size := datasize(\"runtime\"))\n\n (datacopy(0, (dataoffset(\"runtime\")), size))\n\n (return(0, size))\n\n };\n\n\n\n let block = if let Some((init, params, runtime)) = init {\n\n // build a constructor with an init function\n\n\n\n // decode operations for `__init__` parameters\n\n let decoded_params = abi_operations::decode(\n\n params,\n\n expression! { params_start_mem },\n\n AbiDecodeLocation::Memory,\n\n );\n\n\n\n // Build a constructor that runs a user defined init function. Parameters for\n\n // init functions are appended to the end of the initialization code.\n", "file_path": "compiler/src/yul/constructor.rs", "rank": 54, "score": 209490.7201653505 }, { "content": "/// Builds a Yul function definition from a Fe function definition.\n\npub fn func_def(\n\n context: &Context,\n\n def: &Spanned<fe::ContractStmt>,\n\n) -> Result<yul::Statement, CompileError> {\n\n if let (\n\n Some(attributes),\n\n fe::ContractStmt::FuncDef {\n\n qual: _,\n\n name,\n\n args,\n\n return_type: _,\n\n body,\n\n },\n\n ) = (context.get_function(def).to_owned(), &def.node)\n\n {\n\n let function_name = identifier! {(name.node)};\n\n let param_names = args.iter().map(|arg| func_def_arg(arg)).collect::<Vec<_>>();\n\n let function_statements = multiple_func_stmt(context, body)?;\n\n\n\n return if attributes.return_type.is_empty_tuple() {\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 55, "score": 208899.07931496506 }, { "content": "/// Returns an expression that encodes the given values and returns a pointer to\n\n/// the encoding.\n\npub fn encode<T: AbiEncoding>(types: Vec<T>, vals: Vec<yul::Expression>) -> yul::Expression {\n\n let func_name = encode_name(&types);\n\n expression! { [func_name]([vals...]) }\n\n}\n\n\n", "file_path": "compiler/src/yul/abi/operations.rs", "rank": 56, "score": 207011.90599715957 }, { "content": "pub fn many0<'a, O, P>(parser: P) -> impl Fn(Cursor<'a>) -> ParseResult<Vec<O>>\n\nwhere\n\n P: Fn(Cursor<'a>) -> ParseResult<O>,\n\n{\n\n move |input| {\n\n if input.is_empty() {\n\n return Err(ParseError::eof(input));\n\n }\n\n\n\n let mut input = input;\n\n let mut results = vec![];\n\n\n\n while let Ok((next_input, next)) = parser(input) {\n\n input = next_input;\n\n results.push(next);\n\n }\n\n\n\n Ok((input, results))\n\n }\n\n}\n\n\n", "file_path": "parser/src/builders.rs", "rank": 57, "score": 206163.9274355386 }, { "content": "pub fn many1<'a, O, P>(parser: P) -> impl Fn(Cursor<'a>) -> ParseResult<Vec<O>>\n\nwhere\n\n P: Fn(Cursor<'a>) -> ParseResult<O>,\n\n{\n\n move |input| match parser(input) {\n\n Ok((input, first)) => {\n\n let mut input = input;\n\n let mut results = vec![first];\n\n\n\n while let Ok((next_input, next)) = parser(input) {\n\n input = next_input;\n\n results.push(next);\n\n }\n\n\n\n Ok((input, results))\n\n }\n\n Err(err) => Err(err.push(\n\n input,\n\n ErrorKind::StaticStr(\"many1: expected at least one occurrence\"),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "parser/src/builders.rs", "rank": 58, "score": 206163.92743553856 }, { "content": "pub fn multiple_func_stmt(\n\n context: &Context,\n\n statements: &[Spanned<fe::FuncStmt>],\n\n) -> Result<Vec<yul::Statement>, CompileError> {\n\n statements\n\n .iter()\n\n .map(|statement| func_stmt(context, statement))\n\n .collect::<Result<Vec<_>, _>>()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 59, "score": 205401.30150283 }, { "content": "/// Returns an expression that gives size of the encoded values.\n\npub fn encode_size<T: AbiEncoding>(types: Vec<T>, vals: Vec<yul::Expression>) -> yul::Expression {\n\n let mut static_size = 0;\n\n let mut dyn_size = vec![];\n\n\n\n let typed_vals = types.iter().zip(vals);\n\n\n\n for (typ, val) in typed_vals {\n\n match typ.abi_type() {\n\n AbiType::Uint { .. } => static_size += 32,\n\n AbiType::Array { inner, size } => {\n\n let inner_size = match *inner {\n\n AbiType::Uint {\n\n size: AbiUintSize { padded_size, .. },\n\n } => padded_size,\n\n AbiType::Array { .. } => unimplemented!(),\n\n };\n\n match size {\n\n AbiArraySize::Static { size } => static_size += ceil_32(inner_size * size),\n\n AbiArraySize::Dynamic => {\n\n static_size += 64;\n", "file_path": "compiler/src/yul/abi/operations.rs", "rank": 60, "score": 204908.2741207204 }, { "content": "/// Returns the offset at which each head is located in the static section\n\n/// of an encoding and the total size of the static section.\n\npub fn head_offsets<T: AbiEncoding>(types: &[T]) -> (Vec<usize>, usize) {\n\n let mut offsets = vec![];\n\n let mut curr_offset = 0;\n\n\n\n for typ in types {\n\n offsets.push(curr_offset);\n\n\n\n curr_offset += match typ.abi_type() {\n\n AbiType::Array {\n\n size: AbiArraySize::Dynamic { .. },\n\n ..\n\n } => 32,\n\n AbiType::Array {\n\n size: AbiArraySize::Static { size },\n\n inner,\n\n } => match *inner {\n\n AbiType::Array { .. } => unimplemented!(),\n\n AbiType::Uint {\n\n size: AbiUintSize { padded_size, .. },\n\n } => ceil_32(padded_size * size),\n\n },\n\n AbiType::Uint {\n\n size: AbiUintSize { padded_size, .. },\n\n } => padded_size,\n\n };\n\n }\n\n\n\n (offsets, curr_offset)\n\n}\n\n\n", "file_path": "compiler/src/yul/abi/utils.rs", "rank": 61, "score": 201687.8114444387 }, { "content": "pub fn for_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, for_kw) = name(\"for\")(input)?;\n\n let (input, target_expr) = targets(input)?;\n\n let (input, _) = name(\"in\")(input)?;\n\n let (input, iter) = exprs(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, body) = block(input)?;\n\n let (input, or_else) = opt(else_block)(input)?;\n\n\n\n let last_stmt = match &or_else {\n\n Some(or_else_body) => or_else_body.last().unwrap(),\n\n None => body.last().unwrap(),\n\n };\n\n let span = Span::from_pair(for_kw, last_stmt);\n\n let or_else = or_else.unwrap_or_else(Vec::new);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::For {\n\n target: target_expr,\n\n iter,\n\n body,\n\n or_else,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 62, "score": 195603.42517621352 }, { "content": "pub fn while_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, while_kw) = name(\"while\")(input)?;\n\n let (input, test) = expr(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, body) = block(input)?;\n\n let (input, or_else) = opt(else_block)(input)?;\n\n\n\n let last_stmt = match &or_else {\n\n Some(or_else_body) => or_else_body.last().unwrap(),\n\n None => body.last().unwrap(),\n\n };\n\n let span = Span::from_pair(while_kw, last_stmt);\n\n let or_else = or_else.unwrap_or_else(Vec::new);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::While {\n\n test,\n\n body,\n\n or_else,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 63, "score": 195603.42517621352 }, { "content": "pub fn if_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n if_stmt_builder(\"if\")(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 64, "score": 195603.42517621352 }, { "content": "/// Parse the names to be imported by a \"from\" import statement.\n\npub fn from_import_names(input: Cursor) -> ParseResult<Spanned<FromImportNames>> {\n\n alt((\n\n from_import_names_star,\n\n from_import_names_parens,\n\n from_import_names_list,\n\n ))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 65, "score": 193070.9827900473 }, { "content": "/// Parse an import name with an optional alias in a \"from\" import statement.\n\npub fn from_import_name(input: Cursor) -> ParseResult<Spanned<FromImportName>> {\n\n let (input, name_tok) = name_token(input)?;\n\n let (input, alias) = opt(preceded(name(\"as\"), name_token))(input)?;\n\n\n\n let span = match alias {\n\n Some(alias_tok) => Span::from_pair(name_tok, alias_tok),\n\n None => name_tok.span,\n\n };\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FromImportName {\n\n name: name_tok.into(),\n\n alias: alias.map(|t| t.into()),\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 66, "score": 193070.84131023154 }, { "content": "pub fn augassign_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, target_expr) = target(input)?;\n\n let (input, aug_tok) = alt((\n\n op(\"+=\"),\n\n op(\"-=\"),\n\n op(\"*=\"),\n\n op(\"/=\"),\n\n op(\"%=\"),\n\n op(\"&=\"),\n\n op(\"|=\"),\n\n op(\"^=\"),\n\n op(\"<<=\"),\n\n op(\">>=\"),\n\n op(\"**=\"),\n\n op(\"//=\"),\n\n ))(input)?;\n\n let (input, value) = expr(input)?;\n\n\n\n let span = Span::from_pair(&target_expr, &value);\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 67, "score": 192931.35092955426 }, { "content": "pub fn break_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n keyword_statement(\"break\", || FuncStmt::Break)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 68, "score": 192931.35092955426 }, { "content": "pub fn small_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n alt((\n\n return_stmt,\n\n assert_stmt,\n\n emit_stmt,\n\n pass_stmt,\n\n break_stmt,\n\n continue_stmt,\n\n revert_stmt,\n\n vardecl_stmt,\n\n assign_stmt,\n\n augassign_stmt,\n\n map(exprs, |spanned| Spanned {\n\n node: FuncStmt::Expr {\n\n value: spanned.node,\n\n },\n\n span: spanned.span,\n\n }),\n\n ))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 69, "score": 192931.35092955426 }, { "content": "pub fn compound_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n alt((if_stmt, while_stmt, for_stmt))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 70, "score": 192931.35092955426 }, { "content": "/// Parse a contract statement.\n\npub fn contract_stmt(input: Cursor) -> ParseResult<Spanned<ContractStmt>> {\n\n alt((contract_field, event_def, func_def))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 71, "score": 192931.35092955426 }, { "content": "pub fn emit_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, emit_kw) = name(\"emit\")(input)?;\n\n let (input, value) = expr(input)?;\n\n\n\n let span = Span::from_pair(emit_kw, &value);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::Emit { value },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 72, "score": 192931.35092955426 }, { "content": "pub fn elif_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n if_stmt_builder(\"elif\")(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 73, "score": 192931.35092955426 }, { "content": "/// Parse a module statement, such as a contract definition.\n\npub fn module_stmt(input: Cursor) -> ParseResult<Spanned<ModuleStmt>> {\n\n alt((import_stmt, type_def, contract_def))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 74, "score": 192931.35092955426 }, { "content": "pub fn assert_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, assert_kw) = name(\"assert\")(input)?;\n\n let (input, test) = expr(input)?;\n\n let (input, msg) = opt(preceded(op(\",\"), expr))(input)?;\n\n\n\n let span = match &msg {\n\n Some(msg_expr) => Span::from_pair(assert_kw, msg_expr),\n\n None => Span::from_pair(assert_kw, &test),\n\n };\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::Assert { test, msg },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 75, "score": 192931.35092955426 }, { "content": "pub fn revert_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n keyword_statement(\"revert\", || FuncStmt::Revert)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 76, "score": 192931.35092955426 }, { "content": "/// Parse an import statement.\n\npub fn import_stmt(input: Cursor) -> ParseResult<Spanned<ModuleStmt>> {\n\n terminated(alt((simple_import, from_import)), newline_token)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 77, "score": 192931.35092955426 }, { "content": "pub fn assign_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, targets_vec) = many1(terminated(targets, op(\"=\")))(input)?;\n\n let (input, value) = exprs(input)?;\n\n\n\n let first = targets_vec.first().unwrap();\n\n let span = Span::from_pair(first, &value);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::Assign {\n\n targets: targets_vec,\n\n value,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 78, "score": 192931.35092955426 }, { "content": "pub fn continue_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n keyword_statement(\"continue\", || FuncStmt::Continue)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 79, "score": 192931.35092955426 }, { "content": "pub fn vardecl_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, target_expr) = target(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, typ) = type_desc(input)?;\n\n let (input, value) = opt(preceded(op(\"=\"), expr))(input)?;\n\n\n\n let span = match &value {\n\n Some(exp) => Span::from_pair(&target_expr, exp),\n\n None => Span::from_pair(&target_expr, &typ),\n\n };\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::VarDecl {\n\n target: target_expr,\n\n typ,\n\n value,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 80, "score": 192931.35092955426 }, { "content": "pub fn pass_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n keyword_statement(\"pass\", || FuncStmt::Pass)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 81, "score": 192931.35092955426 }, { "content": "/// Parse a map type e.g. \"map<address, bool\".\n\npub fn map_type(input: Cursor) -> ParseResult<Spanned<TypeDesc>> {\n\n alt((map_type_double, map_type_single))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 82, "score": 192827.40576180248 }, { "content": "/// Parse a type description e.g. \"u256\" or \"map<address, bool>\".\n\npub fn type_desc(input: Cursor) -> ParseResult<Spanned<TypeDesc>> {\n\n alt((map_type, base_type))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 83, "score": 192827.33773225325 }, { "content": "/// Parse a base type along with an optional array dimension list.\n\n///\n\n/// Example:\n\n/// int128[2][3]\n\npub fn base_type(input: Cursor) -> ParseResult<Spanned<TypeDesc>> {\n\n let (input, base) = name_token(input)?;\n\n let (input, dims) = arr_list(input)?;\n\n\n\n let mut result = Spanned {\n\n node: TypeDesc::Base { base: base.string },\n\n span: base.into(),\n\n };\n\n for dim in dims {\n\n let span = Span::from_pair(&result, &dim);\n\n\n\n result = Spanned {\n\n node: TypeDesc::Array {\n\n typ: Box::new(result),\n\n dimension: dim.node,\n\n },\n\n span,\n\n };\n\n }\n\n\n\n Ok((input, result))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 84, "score": 192827.14628393468 }, { "content": "fn expr_attribute_msg(attr: &Spanned<&str>) -> Result<ExpressionAttributes, SemanticError> {\n\n match attr.node {\n\n \"sender\" => Ok(ExpressionAttributes {\n\n location: Location::Value,\n\n typ: Type::Base(Base::Address),\n\n }),\n\n value => Err(SemanticError::UndefinedValue {\n\n value: value.to_string(),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 85, "score": 192167.11074980954 }, { "content": "fn func_def_arg(arg: &Spanned<fe::FuncDefArg>) -> yul::Identifier {\n\n let name = arg.node.name.node.to_string();\n\n identifier! {(name)}\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 86, "score": 191826.69163198606 }, { "content": "pub fn arg_def(input: Cursor) -> ParseResult<Spanned<FuncDefArg>> {\n\n let (input, name_tok) = name_token(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, typ) = type_desc(input)?;\n\n\n\n let span = Span::from_pair(name_tok, &typ);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncDefArg {\n\n name: name_tok.into(),\n\n typ,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 87, "score": 190555.99407181435 }, { "content": "/// Parse a list of names to be imported by a \"from\" import statement.\n\npub fn from_import_names_list(input: Cursor) -> ParseResult<Spanned<FromImportNames>> {\n\n let (input, first_name) = from_import_name(input)?;\n\n let (input, mut other_names) = many0(preceded(op(\",\"), from_import_name))(input)?;\n\n let (input, comma_tok) = opt(op(\",\"))(input)?;\n\n\n\n let mut names = vec![first_name];\n\n names.append(&mut other_names);\n\n\n\n let span = {\n\n let first = names.first().unwrap();\n\n match comma_tok {\n\n Some(tok) => Span::from_pair(first, tok),\n\n None => {\n\n let last = names.last().unwrap();\n\n Span::from_pair(first, last)\n\n }\n\n }\n\n };\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FromImportNames::List(names),\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 88, "score": 190494.4457416441 }, { "content": "/// Parse a parenthesized list of names to be imported by a \"from\" import\n\n/// statement.\n\npub fn from_import_names_parens(input: Cursor) -> ParseResult<Spanned<FromImportNames>> {\n\n let (input, l_paren) = op(\"(\")(input)?;\n\n let (input, names) = from_import_names_list(input)?;\n\n let (input, r_paren) = op(\")\")(input)?;\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: names.node,\n\n span: Span::from_pair(l_paren, r_paren),\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 89, "score": 190494.37616938257 }, { "content": "/// Parse a wildcard token (\"*\") in a \"from\" import statement.\n\npub fn from_import_names_star(input: Cursor) -> ParseResult<Spanned<FromImportNames>> {\n\n let (input, star) = op(\"*\")(input)?;\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FromImportNames::Star,\n\n span: star.span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 90, "score": 190490.16124987157 }, { "content": "/// Parse a map type ending with a right-shift token.\n\n///\n\n/// Example:\n\n/// map<address, map<u256, bool>>\n\npub fn map_type_double(input: Cursor) -> ParseResult<Spanned<TypeDesc>> {\n\n let (input, map_kw_1) = name(\"map\")(input)?;\n\n let (input, _) = op(\"<\")(input)?;\n\n let (input, from_1) = base_type(input)?;\n\n let (input, _) = op(\",\")(input)?;\n\n\n\n let (input, map_kw_2) = name(\"map\")(input)?;\n\n let (input, _) = op(\"<\")(input)?;\n\n let (input, from_2) = base_type(input)?;\n\n let (input, _) = op(\",\")(input)?;\n\n\n\n let (input, to) = type_desc(input)?;\n\n let (input, r_bracket) = op(\">>\")(input)?;\n\n\n\n let inner_map = Spanned {\n\n node: TypeDesc::Map {\n\n from: Box::new(from_2),\n\n to: Box::new(to),\n\n },\n\n span: Span::new(map_kw_2.span.start, r_bracket.span.end - 1),\n", "file_path": "parser/src/parsers.rs", "rank": 91, "score": 190253.66154490408 }, { "content": "/// Parse a map type ending with a greater-than token.\n\n///\n\n/// Example:\n\n/// map< address, map<u256, map<bool, int128>> >\n\npub fn map_type_single(input: Cursor) -> ParseResult<Spanned<TypeDesc>> {\n\n let (input, map_kw) = name(\"map\")(input)?;\n\n let (input, _) = op(\"<\")(input)?;\n\n let (input, from) = base_type(input)?;\n\n let (input, _) = op(\",\")(input)?;\n\n let (input, to) = type_desc(input)?;\n\n let (input, r_bracket) = op(\">\")(input)?;\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: TypeDesc::Map {\n\n from: Box::new(from),\n\n to: Box::new(to),\n\n },\n\n span: Span::from_pair(map_kw, r_bracket),\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 92, "score": 190253.60678587324 }, { "content": "/// Parse a name token.\n\npub fn name_token(input: Cursor) -> ParseResult<&Token> {\n\n token(TokenType::NAME)(input)\n\n}\n\n\n\n/// Parse a name token containing a specific string.\n", "file_path": "parser/src/parsers.rs", "rank": 93, "score": 189907.09527975216 }, { "content": "/// Parse a token of a specific type.\n\npub fn token<'a>(typ: TokenType) -> impl Fn(Cursor<'a>) -> ParseResult<&Token> {\n\n verify(\n\n next,\n\n move |t| t.typ == typ,\n\n move |inp, _| ParseError::str(inp, format!(\"expected {:?} token\", typ)),\n\n )\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 94, "score": 189259.9566849168 }, { "content": "pub fn simple_import_name(input: Cursor) -> ParseResult<Spanned<SimpleImportName>> {\n\n let (input, path) = dotted_name(input)?;\n\n let (input, alias) = opt(preceded(name(\"as\"), name_token))(input)?;\n\n\n\n let first = path.first().unwrap();\n\n let last = path.last().unwrap();\n\n let path_span = Span::from_pair(first, last);\n\n\n\n let span = {\n\n match alias {\n\n Some(alias_tok) => Span::from_pair(path_span, alias_tok),\n\n None => path_span,\n\n }\n\n };\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: SimpleImportName {\n\n path,\n\n alias: alias.map(|t| t.into()),\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 95, "score": 188005.75237231475 }, { "content": "fn selector(name: String, params: &[FixedSize]) -> yul::Literal {\n\n let params = params\n\n .iter()\n\n .map(|param| param.abi_name())\n\n .collect::<Vec<String>>();\n\n\n\n literal! {(abi_utils::func_selector(name, params))}\n\n}\n\n\n", "file_path": "compiler/src/yul/runtime/abi_dispatcher.rs", "rank": 96, "score": 187175.14436477592 }, { "content": "fn selection(name: String, params: &[FixedSize]) -> yul::Expression {\n\n let decoded_params = abi_operations::decode(\n\n params.to_owned(),\n\n literal_expression! { 4 },\n\n AbiDecodeLocation::Calldata,\n\n );\n\n\n\n let name = identifier! { (name) };\n\n\n\n expression! { [name]([decoded_params...]) }\n\n}\n\n\n", "file_path": "compiler/src/yul/runtime/abi_dispatcher.rs", "rank": 97, "score": 187175.14436477592 }, { "content": "/// Parse an import statement beginning with the \"from\" keyword.\n\npub fn from_import(input: Cursor) -> ParseResult<Spanned<ModuleStmt>> {\n\n alt((from_import_parent_alt, from_import_sub_alt))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 98, "score": 185079.22804585533 }, { "content": "fn selection_as_statement(name: String, params: &[FixedSize]) -> yul::Statement {\n\n yul::Statement::Expression(selection(name, params))\n\n}\n\n\n", "file_path": "compiler/src/yul/runtime/abi_dispatcher.rs", "rank": 99, "score": 184542.981240206 } ]
Rust
src/image.rs
AlesTsurko/image_uploader
ae6bea76cbbdbc5347924d2ce2ddffc1682f7789
#[cfg(test)] mod tests; mod native_preview_maker; use native_preview_maker::NativePreviewMaker; use bytes::Bytes; use uuid::Uuid; use std::fs::{File, create_dir_all}; use std::io::Write; use crate::{ ImageUploaderResult, IMAGE_NAME, PREVIEW_NAME, }; use failure::Fail; use image::ImageFormat; #[derive(Clone)] pub struct Image { pub content: Bytes, pub image_type: ImageType, pub storage_path: String, pub id: Uuid, preview_maker: Option<Box<PreviewMaker>>, } impl Image { pub fn new(bytes: &Bytes, image_type: &ImageType, storage_path: &str) -> Self { let mut image = Image { id: Uuid::new_v4(), storage_path: storage_path.to_string(), image_type: image_type.clone(), content: bytes.clone(), preview_maker: None, }; let preview_maker = NativePreviewMaker::new(); image.set_preview_maker(Box::new(preview_maker)); image } fn set_preview_maker(&mut self, preview_maker: Box<PreviewMaker>) { self.preview_maker = Some(preview_maker); } pub fn save(&self) -> ImageUploaderResult<()> { self.save_at_path(&self.get_file_path()) } pub fn generate_preview(&self) -> ImageUploaderResult<()> { if let Some(ref preview_maker) = self.preview_maker { let preview = preview_maker.make_preview_from_image(self)?; preview.save_at_path(&self.get_preview_file_path()) } else { Err(ImageError::ErrorMakingPreview.into()) } } fn save_at_path(&self, path: &str) -> ImageUploaderResult<()> { create_dir_all(&self.get_directory_path())?; let mut file = File::create(path)?; Ok(file.write_all(&self.content)?) } pub fn get_directory_path(&self) -> String { format!("{}/{}", self.storage_path, self.id) } pub fn get_file_path(&self) -> String { format!("{}/{}.{}", self.get_directory_path(), IMAGE_NAME, self.image_type.to_string()) } pub fn get_preview_file_path(&self) -> String { format!("{}/{}.{}", self.get_directory_path(), PREVIEW_NAME, self.image_type.to_string()) } pub fn guess_type_for_bytes(bytes: &Bytes) -> ImageUploaderResult<ImageType> { let image_format = image::guess_format(bytes)?; Ok(image_format.into()) } } #[derive(Fail, Debug)] pub enum ImageError { #[fail(display = "Can't make preview for image")] ErrorMakingPreview, } #[derive(Debug, PartialEq, Eq, Clone)] pub enum ImageType { Jpeg, Bmp, Gif, Png, Unknown, } impl From<mime::Mime> for ImageType { fn from(t: mime::Mime) -> Self { match t.subtype() { mime::BMP => ImageType::Bmp, mime::GIF => ImageType::Gif, mime::JPEG => ImageType::Jpeg, mime::PNG => ImageType::Png, _ => ImageType::Unknown, } } } impl From<&mime::Mime> for ImageType { fn from(t: &mime::Mime) -> Self { ImageType::from(t.clone()) } } impl From<ImageFormat> for ImageType { fn from(image_format: ImageFormat) -> Self { match image_format { ImageFormat::BMP => ImageType::Bmp, ImageFormat::GIF => ImageType::Gif, ImageFormat::JPEG => ImageType::Jpeg, ImageFormat::PNG => ImageType::Png, _ => ImageType::Unknown, } } } impl ToString for ImageType { fn to_string(&self) -> String { match self { ImageType::Bmp => "bmp".to_string(), ImageType::Gif => "gif".to_string(), ImageType::Jpeg => "jpg".to_string(), ImageType::Png => "png".to_string(), ImageType::Unknown => String::new(), } } } pub trait PreviewMaker { fn make_preview_from_image(&self, image: &Image) -> ImageUploaderResult<Image>; fn box_clone(&self) -> Box<PreviewMaker>; } impl Clone for Box<PreviewMaker> { fn clone(&self) -> Box<PreviewMaker> { self.box_clone() } }
#[cfg(test)] mod tests; mod native_preview_maker; use native_preview_maker::NativePreviewMaker; use bytes::Bytes; use uuid::Uuid; use std::fs::{File, create_dir_all}; use std::io::Write; use crate::{ ImageUploaderResult, IMAGE_NAME, PREVIEW_NAME, }; use failure::Fail; use image::ImageFormat; #[derive(Clone)] pub struct Image { pub content: Bytes, pub image_type: ImageType, pub storage_path: String, pub id: Uuid, preview_maker: Option<Box<PreviewMaker>>, } impl Image { pub fn new(bytes: &Bytes, image_type: &ImageType, storage_path: &str) -> Self { let mut image = Image { id: Uuid::new_v4(), storage_path: storage_path.to_string(), image_type: image_type.clone(), content: bytes.clone(), preview_maker: None, }; let preview_maker = NativePreviewMaker::new(); image.set_preview_maker(Box::new(preview_maker)); image } fn set_preview_maker(&mut self, preview_maker: Box<PreviewMaker>) { self.preview_maker = Some(preview_maker); } pub fn save(&self) -> ImageUploaderResult<()> { self.save_at_path(&self.get_file_path()) } pub fn generate_preview(&self) -> ImageUploaderResult<()> { if let Some(ref preview_maker) = self.preview_maker { let preview = preview_maker.make_preview_from_image(self)?; preview.save_at_path(&self.get_preview_file_path()) } else { Err(ImageError::ErrorMakingPreview.into()) } } fn save_at_path(&self, path: &str) -> ImageUploaderResult<()> { create_dir_all(&self.get_directory_path())?; let mut file = File::create(path)?; Ok(file.write_all(&self.content)?) } pub fn get_directory_path(&self) -> String { format!("{}/{}", self.storage_path, self.id) } pub fn get_file_path(&self) -> String { format!("{}/{}.{}", self.get_directory_path(), IMAGE_NAME, self.image_type.to_string()) } pub fn get_preview_file_path(&self) -> String { format!("{}/{}.{}", self.get_directory_path(), PREVIEW_NAME, self.image_type.to_string()) } pub fn guess_type_for_bytes(bytes: &Bytes) -> ImageUploaderResult<ImageType> { let image_format = image::guess_format(bytes)?; Ok(image_format.into()) } } #[derive(Fail, Debug)] pub enum ImageError { #[fail(display = "Can't make preview for image")] ErrorMakingPreview, } #[derive(Debug, PartialEq, Eq, Clone)] pub enum ImageType { Jpeg, Bmp, Gif, Png, Unknown, } impl From<mime::Mime> for ImageType {
} impl From<&mime::Mime> for ImageType { fn from(t: &mime::Mime) -> Self { ImageType::from(t.clone()) } } impl From<ImageFormat> for ImageType { fn from(image_format: ImageFormat) -> Self { match image_format { ImageFormat::BMP => ImageType::Bmp, ImageFormat::GIF => ImageType::Gif, ImageFormat::JPEG => ImageType::Jpeg, ImageFormat::PNG => ImageType::Png, _ => ImageType::Unknown, } } } impl ToString for ImageType { fn to_string(&self) -> String { match self { ImageType::Bmp => "bmp".to_string(), ImageType::Gif => "gif".to_string(), ImageType::Jpeg => "jpg".to_string(), ImageType::Png => "png".to_string(), ImageType::Unknown => String::new(), } } } pub trait PreviewMaker { fn make_preview_from_image(&self, image: &Image) -> ImageUploaderResult<Image>; fn box_clone(&self) -> Box<PreviewMaker>; } impl Clone for Box<PreviewMaker> { fn clone(&self) -> Box<PreviewMaker> { self.box_clone() } }
fn from(t: mime::Mime) -> Self { match t.subtype() { mime::BMP => ImageType::Bmp, mime::GIF => ImageType::Gif, mime::JPEG => ImageType::Jpeg, mime::PNG => ImageType::Png, _ => ImageType::Unknown, } }
function_block-full_function
[ { "content": "#[test]\n\nfn get_preview_file_path() {\n\n let image = init_image();\n\n assert_eq!(format!(\"storage/{}/{}.jpg\", UUID, PREVIEW_NAME), image.get_preview_file_path());\n\n}\n\n\n", "file_path": "src/image/tests.rs", "rank": 0, "score": 112376.14271530061 }, { "content": "#[test]\n\nfn get_file_path() {\n\n let image = init_image();\n\n assert_eq!(format!(\"storage/{}/{}.jpg\", UUID, IMAGE_NAME), image.get_file_path());\n\n}\n\n\n", "file_path": "src/image/tests.rs", "rank": 1, "score": 99002.90394289292 }, { "content": "#[test]\n\nfn image_type_to_string() {\n\n assert_eq!(\"bmp\".to_string(), ImageType::Bmp.to_string());\n\n assert_eq!(\"gif\".to_string(), ImageType::Gif.to_string());\n\n assert_eq!(\"jpg\".to_string(), ImageType::Jpeg.to_string());\n\n assert_eq!(\"png\".to_string(), ImageType::Png.to_string());\n\n}\n\n\n", "file_path": "src/image/tests.rs", "rank": 2, "score": 85986.55945324435 }, { "content": "#[test]\n\nfn get_directory_path() {\n\n let image = init_image();\n\n assert_eq!(format!(\"storage/{}\", UUID), image.get_directory_path());\n\n}\n\n\n", "file_path": "src/image/tests.rs", "rank": 3, "score": 80407.1312890707 }, { "content": "fn init_image() -> Image {\n\n let content = Bytes::from(&b\"This is a test\"[..]);\n\n let id = Uuid::parse_str(UUID).unwrap();\n\n Image {\n\n content,\n\n image_type: ImageType::Jpeg,\n\n storage_path: \"storage\".to_string(),\n\n id, \n\n preview_maker: None,\n\n }\n\n}\n", "file_path": "src/image/tests.rs", "rank": 4, "score": 68328.18744212478 }, { "content": "#[test]\n\nfn image_type_from_mime() {\n\n assert_eq!(ImageType::Bmp, ImageType::from(mime::IMAGE_BMP));\n\n assert_eq!(ImageType::Gif, ImageType::from(mime::IMAGE_GIF));\n\n assert_eq!(ImageType::Jpeg, ImageType::from(mime::IMAGE_JPEG));\n\n assert_eq!(ImageType::Png, ImageType::from(mime::IMAGE_PNG));\n\n}\n\n\n", "file_path": "src/image/tests.rs", "rank": 5, "score": 67279.26585873376 }, { "content": "#[derive(Fail, Debug)]\n\nenum NativePreviewMakerError {\n\n #[fail(display = \"Unknown image type: {}\", _0)]\n\n UnknownImageType(String),\n\n}\n\n\n\nimpl Into<Option<ImageFormat>> for ImageType {\n\n fn into(self) -> Option<ImageFormat> {\n\n match self {\n\n ImageType::Bmp => Some(ImageFormat::BMP),\n\n ImageType::Gif => Some(ImageFormat::GIF),\n\n ImageType::Jpeg => Some(ImageFormat::JPEG),\n\n ImageType::Png => Some(ImageFormat::PNG),\n\n ImageType::Unknown => None,\n\n }\n\n }\n\n}\n", "file_path": "src/image/native_preview_maker.rs", "rank": 7, "score": 64294.00822645351 }, { "content": "#[derive(Debug, Fail)]\n\nenum GetImageError {\n\n #[fail(display = \"image with name {} doesn't exist\", _0)]\n\n CheckingImageName(String),\n\n #[fail(display = \"can't get file name for path {}\", _0)]\n\n GettingFileName(String),\n\n}\n", "file_path": "src/get_image_handler.rs", "rank": 8, "score": 49955.054456213926 }, { "content": "use super::*;\n\n\n\nconst UUID: &'static str = \"936da01f-9abd-4d9d-80c7-02af85c822a8\";\n\n\n\n#[test]\n", "file_path": "src/image/tests.rs", "rank": 9, "score": 38788.98676618735 }, { "content": "\n\nimpl PreviewMaker for NativePreviewMaker {\n\n fn make_preview_from_image(&self, image: &Image) -> ImageUploaderResult<Image> {\n\n let image_format = self.get_image_format_for_image_type(&image.image_type)?;\n\n let preview = self.get_preview_for_image_with_format(image, &image_format)?;\n\n let buf = self.dyn_image_with_format_into_buf(&preview, &image_format)?;\n\n\n\n Ok(Image {\n\n content: buf.into(),\n\n image_type: image.image_type.clone(),\n\n storage_path: image.storage_path.clone(),\n\n id: image.id.clone(), \n\n preview_maker: None,\n\n })\n\n }\n\n\n\n fn box_clone(&self) -> Box<PreviewMaker> {\n\n Box::new(self.clone())\n\n }\n\n}\n", "file_path": "src/image/native_preview_maker.rs", "rank": 10, "score": 35086.80692506852 }, { "content": "\n\nimpl NativePreviewMaker {\n\n pub fn new() -> Self {\n\n NativePreviewMaker {}\n\n }\n\n\n\n fn get_image_format_for_image_type(&self, image_type: &ImageType) -> ImageUploaderResult<ImageFormat> {\n\n match image_type.clone().into() {\n\n Some(f) => Ok(f),\n\n None => return Err(NativePreviewMakerError::UnknownImageType(image_type.to_string()).into())\n\n }\n\n }\n\n\n\n fn get_preview_for_image_with_format(&self, image: &Image, image_format: &ImageFormat) -> ImageUploaderResult<DynamicImage> {\n\n let dyn_image = load_from_memory_with_format(&image.content, *image_format)?;\n\n Ok(dyn_image.resize_to_fill(PREVIEW_SIZE.0, PREVIEW_SIZE.1, FilterType::Nearest))\n\n }\n\n\n\n fn dyn_image_with_format_into_buf(&self, dyn_image: &DynamicImage, image_format: &ImageFormat) -> ImageUploaderResult<Vec<u8>> {\n\n let mut buf: Vec<u8> = Vec::new();\n\n let image_output_format: ImageOutputFormat = image_format.clone().into();\n\n dyn_image.write_to(&mut buf, image_output_format)?;\n\n Ok(buf)\n\n }\n\n}\n\n\n\n#[derive(Fail, Debug)]\n", "file_path": "src/image/native_preview_maker.rs", "rank": 11, "score": 35086.283432126016 }, { "content": "use crate::{\n\n ImageUploaderResult, \n\n PREVIEW_SIZE,\n\n};\n\nuse super::{\n\n Image,\n\n ImageType,\n\n PreviewMaker,\n\n};\n\nuse image::{\n\n DynamicImage,\n\n FilterType,\n\n ImageFormat,\n\n ImageOutputFormat,\n\n load_from_memory_with_format,\n\n};\n\nuse failure::Fail;\n\n\n\n#[derive(Clone)]\n\npub struct NativePreviewMaker;\n", "file_path": "src/image/native_preview_maker.rs", "rank": 12, "score": 35084.20670251343 }, { "content": "#[derive(Serialize, Debug)]\n\nstruct SuccessResponse {\n\n ids: Vec<String>,\n\n}\n\n\n\n\n\nimpl Into<HttpResponse> for SuccessResponse {\n\n fn into(self) -> HttpResponse {\n\n let success = match serde_json::to_value(self) {\n\n Ok(s) => s,\n\n Err(e) => return HttpResponse::InternalServerError().body(e.to_string()),\n\n };\n\n HttpResponse::Ok().json(success)\n\n }\n\n}\n", "file_path": "src/upload_handler.rs", "rank": 13, "score": 31727.724853995547 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct ExpectedJsonRequest {\n\n data: Vec<String>,\n\n}\n", "file_path": "src/upload_handler/base64_strategy.rs", "rank": 14, "score": 29211.52471576781 }, { "content": " let mut path = PathBuf::from(storage_path);\n\n path.push(id);\n\n Ok(path)\n\n }\n\n\n\n fn check_image_name(&self, dir_entry: &DirEntry, name: &str) -> ImageUploaderResult<()> {\n\n let image_name = name.to_string();\n\n let file_name = self.get_file_stem_from_path(&dir_entry.path())?;\n\n if image_name == file_name {\n\n Ok(())\n\n } else {\n\n Err(GetImageError::CheckingImageName(image_name).into())\n\n }\n\n }\n\n\n\n fn get_file_stem_from_path(&self, path: &PathBuf) -> ImageUploaderResult<String> {\n\n let file_stem_result = match path.file_stem() {\n\n Some(s) => s,\n\n None => return Err(GetImageError::GettingFileName(format!(\"{:?}\", path)).into())\n\n };\n\n\n\n match file_stem_result.to_str() {\n\n Some(s) => Ok(s.to_string()),\n\n None => Err(GetImageError::GettingFileName(format!(\"{:?}\", path)).into())\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Fail)]\n", "file_path": "src/get_image_handler.rs", "rank": 22, "score": 16852.11838154379 }, { "content": "use actix_web::{\n\n HttpRequest,\n\n error,\n\n dev::Handler,\n\n Result as ActixResult,\n\n fs::NamedFile,\n\n};\n\nuse crate::{\n\n AppState,\n\n ImageUploaderResult, \n\n IMAGE_NAME,\n\n PREVIEW_NAME,\n\n};\n\nuse std::path::PathBuf;\n\nuse std::fs::{read_dir, DirEntry};\n\nuse failure::Fail;\n\n\n\npub struct GetImageHandler;\n\n\n\nimpl Handler<AppState> for GetImageHandler {\n", "file_path": "src/get_image_handler.rs", "rank": 23, "score": 16850.50486297706 }, { "content": " type Result = ActixResult<NamedFile>;\n\n\n\n fn handle(&self, req: &HttpRequest<AppState>) -> Self::Result {\n\n let directory_path = self.get_directory_path(req)?;\n\n let is_preview = req.query().contains_key(\"preview\");\n\n\n\n let item = if is_preview { (1, PREVIEW_NAME) } else { (0, IMAGE_NAME) };\n\n if let Some(Ok(dir_entry)) = read_dir(directory_path)?.nth(item.0) {\n\n self.check_image_name(&dir_entry, item.1)?;\n\n return Ok(NamedFile::open(dir_entry.path())?)\n\n }\n\n\n\n Err(error::ErrorNotFound(\"Image not found\"))\n\n }\n\n}\n\n\n\nimpl GetImageHandler {\n\n fn get_directory_path(&self, req: &HttpRequest<AppState>) -> ImageUploaderResult<PathBuf> {\n\n let id: String = req.match_info().query(\"id\")?;\n\n let storage_path = &req.state().storage_path;\n", "file_path": "src/get_image_handler.rs", "rank": 24, "score": 16849.899380816045 }, { "content": "[![Build Status](https://travis-ci.org/AlesTsurko/image_uploader.svg?branch=master)](https://travis-ci.org/AlesTsurko/image_uploader)\n\n\n\n# image_uploader\n\nA demo of an image uploader server written in Rust with [actix-web](https://github.com/actix/actix-web).\n\n\n\nSupports:\n\n- JPEG\n\n- BMP\n\n- GIF\n\n- PNG \n\n\n\nThe next methods are allowed:\n\n- send binary data directly (single file upload only);\n\n- using multipart/form-data;\n\n- using JSON request with base64 encoded string;\n\n- from destination URL.\n\n\n\n## Usage\n\n\n\nUsing docker (will run on port `8000`):\n\n```\n\n$ docker-compose up\n\n```\n\n\n\nOr install and:\n\n```\n\n$ image_uploader --help\n\nImage Uploader 0.1.0\n\nAles Tsurko\n\nAn image uploader server demo.\n\n\n\nUSAGE:\n\n image_uploader [OPTIONS] --bind_to <ADDRESS>\n\n\n\nFLAGS:\n\n -h, --help Prints help information\n\n -V, --version Prints version information\n\n\n\nOPTIONS:\n\n -b, --bind_to <ADDRESS> 127.0.0.1:8000 for ex.\n\n -s, --storage_path <PATH> Specifies the path where to store the data. If not specified the default one\n\n (./storage) will be used.\n\n```\n\n\n\n## API\n\n\n\nYou should properly set the \"Content-Type\" field of your request's header. The possible values are:\n\n\n\n- `image/format` for direct uploads;\n\n- `application/json` for json requests;\n\n- `multipart/form-data` for form-data.\n\n\n\nFor base64 strategy JSON request body model is:\n\n\n\n| Name | Type | Example |\n\n| ---- | ---- | ------- |\n\n| `data` | `Array<String>` | |\n\n\n\n### `PUT /upload`\n\n\n\n#### Response\n\n\n\n| Name | Type | Example |\n\n| ---- | ---- | ------- |\n\n| `ids` | `Array<String>` | `[\"936da01f-9abd-4d9d-80c7-02af85c822a8\"]` |\n\n\n\n### `PUT /upload?url=:url`\n\n\n\nUploads an image from the given URL.\n\n\n\n#### Response\n\n\n\nThe same as for `PUT /upload`\n\n\n\n### `GET /:id`\n\n\n\nResponses with an image.\n\n\n\n### `GET /:id?preview`\n\n\n\nResponses with an image preview.\n", "file_path": "README.md", "rank": 25, "score": 11096.522826682203 }, { "content": "# Contributor Covenant Code of Conduct\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as\n\ncontributors and maintainers pledge to making participation in our project and\n\nour community a harassment-free experience for everyone, regardless of age, body\n\nsize, disability, ethnicity, sex characteristics, gender identity and expression,\n\nlevel of experience, education, socio-economic status, nationality, personal\n\nappearance, race, religion, or sexual identity and orientation.\n\n\n\n## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment\n\ninclude:\n\n\n\n* Using welcoming and inclusive language\n\n* Being respectful of differing viewpoints and experiences\n\n* Gracefully accepting constructive criticism\n\n* Focusing on what is best for the community\n\n* Showing empathy towards other community members\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n* The use of sexualized language or imagery and unwelcome sexual attention or\n\n advances\n\n* Trolling, insulting/derogatory comments, and personal or political attacks\n\n* Public or private harassment\n\n* Publishing others' private information, such as a physical or electronic\n\n address, without explicit permission\n\n* Other conduct which could reasonably be considered inappropriate in a\n\n professional setting\n\n\n\n## Our Responsibilities\n\n\n\nProject maintainers are responsible for clarifying the standards of acceptable\n\nbehavior and are expected to take appropriate and fair corrective action in\n\nresponse to any instances of unacceptable behavior.\n\n\n\nProject maintainers have the right and responsibility to remove, edit, or\n\nreject comments, commits, code, wiki edits, issues, and other contributions\n\nthat are not aligned to this Code of Conduct, or to ban temporarily or\n\npermanently any contributor for other behaviors that they deem inappropriate,\n\nthreatening, offensive, or harmful.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 26, "score": 10719.377744275931 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies both within project spaces and in public spaces\n\nwhen an individual is representing the project or its community. Examples of\n\nrepresenting a project or community include using an official project e-mail\n\naddress, posting via an official social media account, or acting as an appointed\n\nrepresentative at an online or offline event. Representation of a project may be\n\nfurther defined and clarified by project maintainers.\n\n\n\n## Enforcement\n\n\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\n\nreported by contacting the project team at [email protected]. All\n\ncomplaints will be reviewed and investigated and will result in a response that\n\nis deemed necessary and appropriate to the circumstances. The project team is\n\nobligated to maintain confidentiality with regard to the reporter of an incident.\n\nFurther details of specific enforcement policies may be posted separately.\n\n\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\n\nfaith may face temporary or permanent repercussions as determined by other\n\nmembers of the project's leadership.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\n\navailable at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html\n\n\n\n[homepage]: https://www.contributor-covenant.org\n\n\n\nFor answers to common questions about this code of conduct, see\n\nhttps://www.contributor-covenant.org/faq\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 27, "score": 10718.309646124726 }, { "content": "mod image;\n\nmod upload_handler;\n\nmod get_image_handler;\n\nmod app_state;\n\npub use self::image::{\n\n ImageType,\n\n Image,\n\n};\n\npub use upload_handler::UploadHandler;\n\npub use app_state::AppState;\n\npub use get_image_handler::GetImageHandler;\n\n\n\npub(crate) const IMAGE_NAME: &str = \"00\";\n\npub(crate) const PREVIEW_NAME: &str = \"01\";\n\npub(crate) const PREVIEW_SIZE: (u32, u32) = (100, 100);\n\npub(crate) const MAX_JSON_PAYLOAD_SIZE: usize = 10485760; // 10 MB\n\n\n\npub type ImageUploaderResult<T> = Result<T, failure::Error>;\n", "file_path": "src/lib.rs", "rank": 28, "score": 15.709310341496932 }, { "content": " .and_then(move |body| Ok(MultipartStrategy::save_image_and_preview(&body, &image_type, &state.storage_path)?))\n\n .map_err(error::ErrorInternalServerError);\n\n\n\n Box::new(future_id)\n\n }\n\n\n\n fn check_content_type(field: &multipart::Field<Payload>) -> ActixResult<ImageType> {\n\n let image_type: ImageType = field.content_type().into();\n\n match image_type {\n\n ImageType::Unknown => Err(error::ErrorBadRequest(\"Unknown file format\")),\n\n _ => Ok(image_type)\n\n }\n\n }\n\n\n\n fn save_image_and_preview(bytes: &Bytes, image_type: &ImageType, storage_path: &str) -> ActixResult<String> {\n\n let image = Image::new(bytes, image_type, storage_path);\n\n image.save()?;\n\n image.generate_preview()?;\n\n Ok(image.id.to_string())\n\n }\n\n\n\n}\n", "file_path": "src/upload_handler/multipart_strategy.rs", "rank": 29, "score": 15.257380121947744 }, { "content": " let mut ids: Vec<String> = Vec::new();\n\n\n\n for encoded in request.data.iter() {\n\n let image = Base64Strategy::base64_into_image(&encoded, &state)?;\n\n image.save()?;\n\n image.generate_preview()?;\n\n ids.push(image.id.to_string());\n\n }\n\n\n\n Ok(SuccessResponse { ids }.into())\n\n }\n\n\n\n fn base64_into_image(data: &str, state: &AppState) -> ActixResult<Image> {\n\n let bytes: Bytes = base64::decode(data)\n\n .map_err(error::ErrorInternalServerError)?\n\n .into();\n\n let image_type = Base64Strategy::check_image_type(&bytes)?;\n\n Ok(Image::new(&bytes, &image_type, &state.storage_path))\n\n }\n\n\n", "file_path": "src/upload_handler/base64_strategy.rs", "rank": 30, "score": 13.48591350454697 }, { "content": "#[derive(Clone, Debug)]\n\npub struct AppState {\n\n pub storage_path: String,\n\n pub bind_to: String,\n\n}\n", "file_path": "src/app_state.rs", "rank": 31, "score": 12.004208055751505 }, { "content": "\n\n let image = Image::new(body, &image_type, &app_state.storage_path);\n\n image.save()?;\n\n image.generate_preview()?;\n\n\n\n Ok(SuccessResponse { ids: vec![image.id.to_string()] }.into())\n\n }\n\n\n\n fn get_image_type_from_mime_type(mime_type: &Option<mime::Mime>) -> ActixResult<ImageType> {\n\n match mime_type {\n\n Some(mime_type) => Ok(mime_type.into()),\n\n None => Err(error::ErrorBadRequest(\"mime type isn't specified\"))\n\n }\n\n }\n\n\n\n fn check_image_type(image_type: &ImageType) -> ActixResult<()> {\n\n if *image_type == ImageType::Unknown {\n\n return Err(error::ErrorBadRequest(\"unsupported image format\"));\n\n }\n\n Ok(())\n\n }\n\n\n\n}\n", "file_path": "src/upload_handler/direct_strategy.rs", "rank": 32, "score": 9.64535757036097 }, { "content": " SuccessResponse,\n\n};\n\nuse bytes::Bytes;\n\n\n\npub struct MultipartStrategy;\n\n\n\nimpl Strategy for MultipartStrategy {\n\n fn handle_request(&self, req: &HttpRequest<AppState>) -> HandlerResult {\n\n let app_state = req.state().clone();\n\n\n\n req.multipart()\n\n .map_err(error::ErrorInternalServerError)\n\n .map(move |item| MultipartStrategy::process_item_with_state(item, app_state.clone()))\n\n .flatten()\n\n .collect()\n\n .map(|ids | SuccessResponse { ids }.into())\n\n .responder()\n\n }\n\n}\n\n\n", "file_path": "src/upload_handler/multipart_strategy.rs", "rank": 33, "score": 9.536940835631915 }, { "content": " fn check_image_type(bytes: &Bytes) -> ActixResult<ImageType> {\n\n let image_type = Image::guess_type_for_bytes(&bytes)?;\n\n if image_type == ImageType::Unknown {\n\n return Err(error::ErrorBadRequest(\"Unknown file format\"));\n\n }\n\n Ok(image_type)\n\n }\n\n\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n", "file_path": "src/upload_handler/base64_strategy.rs", "rank": 34, "score": 9.206465933717237 }, { "content": "use actix_web::{\n\n AsyncResponder,\n\n HttpMessage,\n\n HttpRequest,\n\n HttpResponse,\n\n error,\n\n error::Result as ActixResult,\n\n};\n\nuse futures::{Future, Stream};\n\nuse crate::{\n\n ImageType, \n\n Image,\n\n AppState,\n\n};\n\nuse super::{\n\n HandlerResult, \n\n Strategy,\n\n SuccessResponse,\n\n};\n\nuse bytes::Bytes;\n", "file_path": "src/upload_handler/direct_strategy.rs", "rank": 35, "score": 8.37852528522895 }, { "content": "use bytes::Bytes;\n\nuse base64;\n\nuse serde_derive::Deserialize;\n\n\n\npub struct Base64Strategy;\n\n\n\nimpl Strategy for Base64Strategy {\n\n fn handle_request(&self, req: &HttpRequest<AppState>) -> HandlerResult {\n\n let app_state = req.state().clone();\n\n\n\n req.json()\n\n .limit(MAX_JSON_PAYLOAD_SIZE)\n\n .from_err()\n\n .and_then(|json_req: ExpectedJsonRequest| Base64Strategy::process_json_request_with_state(json_req, app_state))\n\n .responder()\n\n }\n\n}\n\n\n\nimpl Base64Strategy {\n\n fn process_json_request_with_state(request: ExpectedJsonRequest, state: AppState) -> ActixResult<HttpResponse> {\n", "file_path": "src/upload_handler/base64_strategy.rs", "rank": 36, "score": 7.602081300183479 }, { "content": "\n\npub struct DirectStrategy;\n\n\n\nimpl Strategy for DirectStrategy {\n\n fn handle_request(&self, req: &HttpRequest<AppState>) -> HandlerResult {\n\n let mime_type = req.mime_type();\n\n let app_state = req.state().clone();\n\n\n\n req.payload()\n\n .concat2()\n\n .from_err()\n\n .and_then(|body| DirectStrategy::respond_for_body_with_mime_type(app_state, &body, &mime_type?))\n\n .responder()\n\n }\n\n}\n\n\n\nimpl DirectStrategy {\n\n pub fn respond_for_body_with_mime_type(app_state: AppState, body: &Bytes, mime_type: &Option<mime::Mime>) -> ActixResult<HttpResponse> {\n\n let image_type = DirectStrategy::get_image_type_from_mime_type(&mime_type)?;\n\n DirectStrategy::check_image_type(&image_type)?;\n", "file_path": "src/upload_handler/direct_strategy.rs", "rank": 37, "score": 7.010893113028163 }, { "content": "mod direct_strategy;\n\nmod multipart_strategy;\n\nmod url_strategy;\n\nmod base64_strategy;\n\nuse actix_web::{\n\n HttpRequest,\n\n HttpResponse,\n\n HttpMessage,\n\n error,\n\n dev::Handler,\n\n error::Result as ActixResult,\n\n AsyncResponder,\n\n FutureResponse,\n\n};\n\nuse futures::future::{result};\n\nuse direct_strategy::DirectStrategy;\n\nuse multipart_strategy::MultipartStrategy;\n\nuse url_strategy::UrlStrategy;\n\nuse base64_strategy::Base64Strategy;\n\nuse crate::AppState;\n\nuse serde_derive::Serialize;\n\n\n", "file_path": "src/upload_handler.rs", "rank": 38, "score": 6.51840088704297 }, { "content": "use actix_web::{\n\n AsyncResponder,\n\n HttpMessage,\n\n HttpRequest,\n\n HttpResponse,\n\n error,\n\n error::Result as ActixResult,\n\n};\n\nuse futures::Future;\n\nuse crate::{\n\n ImageType, \n\n Image,\n\n AppState,\n\n MAX_JSON_PAYLOAD_SIZE,\n\n};\n\nuse super::{\n\n HandlerResult, \n\n Strategy,\n\n SuccessResponse,\n\n};\n", "file_path": "src/upload_handler/base64_strategy.rs", "rank": 39, "score": 6.060988680602587 }, { "content": "use actix_web::{\n\n AsyncResponder,\n\n HttpMessage,\n\n HttpRequest,\n\n error,\n\n error::Result as ActixResult,\n\n multipart,\n\n error::Error,\n\n dev::Payload,\n\n FutureResponse,\n\n};\n\nuse futures::{future::err, Future, Stream};\n\nuse crate::{\n\n ImageType, \n\n Image,\n\n AppState,\n\n};\n\nuse super::{\n\n HandlerResult, \n\n Strategy,\n", "file_path": "src/upload_handler/multipart_strategy.rs", "rank": 40, "score": 5.971017794861023 }, { "content": "impl MultipartStrategy {\n\n fn process_item_with_state(item: multipart::MultipartItem<Payload>, state: AppState) -> Box<Stream<Item = String, Error = Error>> {\n\n match item {\n\n multipart::MultipartItem::Field(field) => Box::new(MultipartStrategy::process_field_with_state(field, state).into_stream()),\n\n multipart::MultipartItem::Nested(nested) => Box::new(\n\n nested.map_err(error::ErrorInternalServerError)\n\n .map(move |nested_item| MultipartStrategy::process_item_with_state(nested_item, state.clone()))\n\n .flatten()\n\n )\n\n }\n\n }\n\n\n\n fn process_field_with_state(field: multipart::Field<Payload>, state: AppState) -> FutureResponse<String> {\n\n let image_type = match MultipartStrategy::check_content_type(&field) {\n\n Ok(t) => t,\n\n Err(e) => return Box::new(err(e))\n\n };\n\n\n\n let future_id = field.concat2()\n\n .from_err::<error::Error>()\n", "file_path": "src/upload_handler/multipart_strategy.rs", "rank": 41, "score": 5.8616152425542385 }, { "content": "use actix_web::{\n\n AsyncResponder,\n\n HttpMessage,\n\n HttpRequest,\n\n client,\n\n client::ClientResponse,\n\n error,\n\n error::Result as ActixResult,\n\n http::StatusCode,\n\n};\n\nuse futures::{future, Future};\n\nuse crate::AppState;\n\nuse super::{\n\n HandlerResult, \n\n Strategy,\n\n direct_strategy::DirectStrategy,\n\n};\n\n\n\npub struct UrlStrategy;\n\n\n", "file_path": "src/upload_handler/url_strategy.rs", "rank": 42, "score": 5.569578966127657 }, { "content": "impl Strategy for UrlStrategy {\n\n fn handle_request(&self, req: &HttpRequest<AppState>) -> HandlerResult {\n\n let state = req.state().clone();\n\n if let Some(url) = req.query().get(\"url\") {\n\n UrlStrategy::perform_download_and_respond(&url, state)\n\n } else {\n\n Box::new(future::err(error::ErrorBadRequest(\"Unknown file format\")))\n\n }\n\n }\n\n}\n\n\n\nimpl UrlStrategy {\n\n fn perform_download_and_respond(url: &str, state: AppState) -> HandlerResult {\n\n let request = match client::get(url).finish() {\n\n Ok(r) => r,\n\n Err(e) => return Box::new(future::err(error::ErrorInternalServerError(format!(\"Can't construct request {}\", e))))\n\n };\n\n\n\n request.send()\n\n .from_err()\n", "file_path": "src/upload_handler/url_strategy.rs", "rank": 43, "score": 4.9145180028662105 }, { "content": " } else {\n\n self.choose_content_based_strategy(req)\n\n }\n\n }\n\n\n\n fn choose_content_based_strategy(&self, req: &HttpRequest<AppState>) -> ActixResult<Box<Strategy>> {\n\n let mime = match req.mime_type()? {\n\n Some(t) => t,\n\n None => return Err(error::ErrorBadRequest(\"MIME not specified\"))\n\n };\n\n\n\n match (mime.type_(), mime.subtype()) {\n\n (mime::APPLICATION, mime::JSON) => Ok(Box::new(Base64Strategy {})),\n\n (mime::MULTIPART, mime::FORM_DATA) => Ok(Box::new(MultipartStrategy {})),\n\n (mime::IMAGE, _) => Ok(Box::new(DirectStrategy {})),\n\n _ => Err(error::ErrorBadRequest(\"Unsupported MIME type\")),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/upload_handler.rs", "rank": 44, "score": 3.974275099056181 }, { "content": " .and_then(|response| UrlStrategy::process_client_response_with_state(response, state))\n\n .responder()\n\n }\n\n\n\n fn process_client_response_with_state(response: ClientResponse, state: AppState) -> HandlerResult {\n\n if let Err(e) = UrlStrategy::check_status_of_response(&response) {\n\n return Box::new(future::err(e))\n\n }\n\n\n\n let mime_type = response.mime_type();\n\n\n\n response.body()\n\n .from_err()\n\n .and_then(|bytes| DirectStrategy::respond_for_body_with_mime_type(state, &bytes, &mime_type?))\n\n .responder()\n\n }\n\n\n\n fn check_status_of_response(response: &ClientResponse) -> ActixResult<()> {\n\n let status = response.status();\n\n if status != StatusCode::OK {\n\n let err_message = format!(\"Can't get the image: server responses with status code {}\", status);\n\n return Err(error::ErrorInternalServerError(err_message));\n\n }\n\n Ok(())\n\n }\n\n\n\n}\n", "file_path": "src/upload_handler/url_strategy.rs", "rank": 45, "score": 2.397526338853483 } ]
Rust
src/graphics/render_system.rs
BrassLion/rust-pbr
1b406984d27d7477eb4319ac3acbf2916d7ca755
use super::*; use specs::prelude::*; pub struct RenderSystem; pub struct RenderSystemData { depth_texture: Texture, } impl<'a> System<'a> for RenderSystem { type SystemData = ( WriteExpect<'a, RenderState>, ReadExpect<'a, Camera>, ReadExpect<'a, RenderSystemData>, ReadStorage<'a, Light>, ReadStorage<'a, Pose>, ReadStorage<'a, Renderable>, ); fn setup(&mut self, world: &mut World) { Self::SystemData::setup(world); let render_system_data; { let render_state: WriteExpect<RenderState> = world.system_data(); let depth_texture = Texture::new_framebuffer_texture( &render_state.device, render_state.swap_chain_desc.width, render_state.swap_chain_desc.height, wgpu::TextureFormat::Depth32Float, ); render_system_data = RenderSystemData { depth_texture }; } world.insert(render_system_data); } fn run(&mut self, data: Self::SystemData) { let (mut render_state, camera, render_system_data, light, pose, renderable) = data; let frame = render_state .swap_chain .get_next_texture() .expect("Timeout getting texture"); let mut encoder = render_state .device .create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("Render Encoder"), }); encoder.begin_render_pass(&wgpu::RenderPassDescriptor { color_attachments: &[wgpu::RenderPassColorAttachmentDescriptor { attachment: &frame.view, resolve_target: None, load_op: wgpu::LoadOp::Clear, store_op: wgpu::StoreOp::Store, clear_color: wgpu::Color::BLACK, }], depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachmentDescriptor { attachment: &render_system_data.depth_texture.view, depth_load_op: wgpu::LoadOp::Clear, depth_store_op: wgpu::StoreOp::Store, clear_depth: 1.0, stencil_load_op: wgpu::LoadOp::Clear, stencil_store_op: wgpu::StoreOp::Store, clear_stencil: 0, }), }); let mut light_positions = Vec::new(); for (pose, _) in (&pose, &light).join() { light_positions.push(pose.model_matrix.isometry.translation.vector); } let lighting_data = LightingBindGroup { position: light_positions[0], _padding: 0, }; for (pose, renderable) in (&pose, &renderable).join() { let transform_data = TransformBindGroup { model_matrix: pose.model_matrix.to_homogeneous(), view_matrix: camera.view_matrix.to_homogeneous(), proj_matrix: camera.proj_matrix.to_homogeneous(), camera_world_position: camera.view_matrix.inverse().translation.vector, }; let render_pass_desc = wgpu::RenderPassDescriptor { color_attachments: &[wgpu::RenderPassColorAttachmentDescriptor { attachment: &frame.view, resolve_target: None, load_op: wgpu::LoadOp::Load, store_op: wgpu::StoreOp::Store, clear_color: wgpu::Color::BLACK, }], depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachmentDescriptor { attachment: &render_system_data.depth_texture.view, depth_load_op: wgpu::LoadOp::Load, depth_store_op: wgpu::StoreOp::Store, clear_depth: 1.0, stencil_load_op: wgpu::LoadOp::Clear, stencil_store_op: wgpu::StoreOp::Store, clear_stencil: 0, }), }; renderable.render( &render_state, &render_pass_desc, &mut encoder, &transform_data, &lighting_data, ); } let command_buffer = encoder.finish(); render_state.queue.submit(&[command_buffer]); } }
use super::*; use specs::prelude::*; pub struct RenderSystem; pub struct RenderSystemData { depth_texture: Texture, } impl<'a> System<'a> for RenderSystem { type SystemData = ( WriteExpect<'a, RenderState>, ReadExpect<'a, Camera>, ReadExpect<'a, RenderSystemData>, ReadStorage<'a, Light>, ReadStorage<'a, Pose>, ReadStorage<'a, Renderable>, ); fn setup(&mut self, world: &mut World) { Self::SystemData::setup(world); let render_system_data; { let render_state: WriteExpect<RenderState> = world.system_data(); let depth_texture = Texture::new_framebuffer_texture( &render_state.device, render_state.swap_chain_desc.width, render_state.swap_chain_desc.height, wgpu::TextureFormat::Depth32Float, ); render_system_data = RenderSystemData { depth_texture }; } world.insert(render_system_data); }
}
fn run(&mut self, data: Self::SystemData) { let (mut render_state, camera, render_system_data, light, pose, renderable) = data; let frame = render_state .swap_chain .get_next_texture() .expect("Timeout getting texture"); let mut encoder = render_state .device .create_command_encoder(&wgpu::CommandEncoderDescriptor { label: Some("Render Encoder"), }); encoder.begin_render_pass(&wgpu::RenderPassDescriptor { color_attachments: &[wgpu::RenderPassColorAttachmentDescriptor { attachment: &frame.view, resolve_target: None, load_op: wgpu::LoadOp::Clear, store_op: wgpu::StoreOp::Store, clear_color: wgpu::Color::BLACK, }], depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachmentDescriptor { attachment: &render_system_data.depth_texture.view, depth_load_op: wgpu::LoadOp::Clear, depth_store_op: wgpu::StoreOp::Store, clear_depth: 1.0, stencil_load_op: wgpu::LoadOp::Clear, stencil_store_op: wgpu::StoreOp::Store, clear_stencil: 0, }), }); let mut light_positions = Vec::new(); for (pose, _) in (&pose, &light).join() { light_positions.push(pose.model_matrix.isometry.translation.vector); } let lighting_data = LightingBindGroup { position: light_positions[0], _padding: 0, }; for (pose, renderable) in (&pose, &renderable).join() { let transform_data = TransformBindGroup { model_matrix: pose.model_matrix.to_homogeneous(), view_matrix: camera.view_matrix.to_homogeneous(), proj_matrix: camera.proj_matrix.to_homogeneous(), camera_world_position: camera.view_matrix.inverse().translation.vector, }; let render_pass_desc = wgpu::RenderPassDescriptor { color_attachments: &[wgpu::RenderPassColorAttachmentDescriptor { attachment: &frame.view, resolve_target: None, load_op: wgpu::LoadOp::Load, store_op: wgpu::StoreOp::Store, clear_color: wgpu::Color::BLACK, }], depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachmentDescriptor { attachment: &render_system_data.depth_texture.view, depth_load_op: wgpu::LoadOp::Load, depth_store_op: wgpu::StoreOp::Store, clear_depth: 1.0, stencil_load_op: wgpu::LoadOp::Clear, stencil_store_op: wgpu::StoreOp::Store, clear_stencil: 0, }), }; renderable.render( &render_state, &render_pass_desc, &mut encoder, &transform_data, &lighting_data, ); } let command_buffer = encoder.finish(); render_state.queue.submit(&[command_buffer]); }
function_block-full_function
[ { "content": "pub fn build_render_pipeline(\n\n device: &wgpu::Device,\n\n vertex_shader_src: &str,\n\n fragment_shader_src: &str,\n\n bind_group_layouts: &[&wgpu::BindGroupLayout],\n\n vertex_state_desc: wgpu::VertexStateDescriptor,\n\n colour_states: &[wgpu::ColorStateDescriptor],\n\n depth_state: Option<wgpu::DepthStencilStateDescriptor>,\n\n) -> wgpu::RenderPipeline {\n\n // Init shaders.\n\n let mut compiler = shaderc::Compiler::new().unwrap();\n\n let options = shaderc::CompileOptions::new().unwrap();\n\n\n\n let vs_spirv = compiler\n\n .compile_into_spirv(\n\n vertex_shader_src,\n\n shaderc::ShaderKind::Vertex,\n\n \"vertex\",\n\n \"main\",\n\n Some(&options),\n", "file_path": "src/graphics/material_base.rs", "rank": 0, "score": 73708.53854298955 }, { "content": "pub fn run<R: RenderLoopEvent>() {\n\n // Init window.\n\n let event_loop = EventLoop::new();\n\n let window = WindowBuilder::new()\n\n .with_title(\"Rust PBR Example\")\n\n .with_inner_size(winit::dpi::PhysicalSize::new(1920, 1080))\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n let mut render_system = R::init(&window);\n\n\n\n // Run event loop.\n\n event_loop.run(move |event, _, control_flow| {\n\n *control_flow = ControlFlow::Poll;\n\n match event {\n\n Event::RedrawRequested(_) => {\n\n render_system.render();\n\n }\n\n Event::MainEventsCleared => window.request_redraw(),\n\n Event::WindowEvent {\n", "file_path": "src/graphics/render_loop.rs", "rank": 1, "score": 71727.19206900426 }, { "content": "pub fn create_texture_bind_group(\n\n device: &wgpu::Device,\n\n visibility: wgpu::ShaderStage,\n\n textures: &[&Texture],\n\n) -> (wgpu::BindGroupLayout, wgpu::BindGroup) {\n\n let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n label: None,\n\n bindings: textures\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(i, tex)| {\n\n std::iter::once(wgpu::BindGroupLayoutEntry {\n\n binding: (2 * i) as u32,\n\n visibility,\n\n ty: wgpu::BindingType::SampledTexture {\n\n dimension: tex.dimension,\n\n component_type: wgpu::TextureComponentType::Float,\n\n multisampled: false,\n\n },\n\n })\n", "file_path": "src/graphics/material_base.rs", "rank": 2, "score": 71453.07828656316 }, { "content": "struct ExampleRenderLoop {\n\n world: World,\n\n dispatcher: Dispatcher<'static, 'static>,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 55932.83266866213 }, { "content": "pub fn update_uniform_buffer<T>(\n\n device: &wgpu::Device,\n\n uniform_buffer: &wgpu::Buffer,\n\n encoder: &mut wgpu::CommandEncoder,\n\n uniform_data: &T,\n\n) {\n\n // TODO: Replace this with a function.\n\n let uniform_data_bytes = unsafe {\n\n let len = std::mem::size_of_val(uniform_data);\n\n let ptr = (uniform_data as *const _) as *const u8;\n\n std::slice::from_raw_parts(ptr, len)\n\n };\n\n\n\n let staging_buffer =\n\n device.create_buffer_with_data(uniform_data_bytes, wgpu::BufferUsage::COPY_SRC);\n\n\n\n encoder.copy_buffer_to_buffer(\n\n &staging_buffer,\n\n 0,\n\n &uniform_buffer,\n\n 0,\n\n std::mem::size_of::<T>() as wgpu::BufferAddress,\n\n );\n\n}\n\n\n", "file_path": "src/graphics/material_base.rs", "rank": 4, "score": 49630.82107036935 }, { "content": "pub fn create_uniform_buffer<T>(\n\n device: &wgpu::Device,\n\n visibility: wgpu::ShaderStage,\n\n) -> (wgpu::Buffer, wgpu::BindGroup, wgpu::BindGroupLayout) {\n\n let buffer = device.create_buffer(&wgpu::BufferDescriptor {\n\n label: None,\n\n size: std::mem::size_of::<T>() as u64,\n\n usage: wgpu::BufferUsage::UNIFORM | wgpu::BufferUsage::COPY_DST,\n\n });\n\n\n\n let bind_group_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n bindings: &[wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility,\n\n ty: wgpu::BindingType::UniformBuffer { dynamic: false },\n\n }],\n\n label: None,\n\n });\n\n\n\n let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {\n", "file_path": "src/graphics/material_base.rs", "rank": 5, "score": 49630.82107036935 }, { "content": "pub trait RenderLoopEvent: 'static + Sized {\n\n fn init(window: &Window) -> Self;\n\n\n\n fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>);\n\n\n\n fn handle_event(&mut self, window: &Window, event: &WindowEvent);\n\n\n\n fn render(&mut self);\n\n}\n\n\n", "file_path": "src/graphics/render_loop.rs", "rank": 6, "score": 48725.14335588645 }, { "content": "fn main() {\n\n graphics::run::<ExampleRenderLoop>();\n\n}\n", "file_path": "src/main.rs", "rank": 7, "score": 36249.600132986474 }, { "content": "struct RotatingModel;\n\nimpl Component for RotatingModel {\n\n type Storage = VecStorage<Self>;\n\n}\n\n\n\nimpl<'a> System<'a> for RotateObjectSystem {\n\n type SystemData = (\n\n WriteStorage<'a, graphics::Pose>,\n\n ReadStorage<'a, RotatingModel>,\n\n );\n\n\n\n fn run(&mut self, data: Self::SystemData) {\n\n let (mut pose, light) = data;\n\n\n\n for (pose, _) in (&mut pose, &light).join() {\n\n pose.model_matrix.append_rotation_wrt_point_mut(\n\n &nalgebra::UnitQuaternion::new(nalgebra::Vector3::new(0.0, 0.01, 0.0)),\n\n &nalgebra::Point3::new(0.0, 0.0, 0.0),\n\n )\n\n }\n", "file_path": "src/main.rs", "rank": 8, "score": 34844.12288486455 }, { "content": "// System that rotates entities with the RotatingModel component on every frame update.\n\nstruct RotateObjectSystem;\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 33425.78139647828 }, { "content": "pub trait MaterialBase {\n\n fn begin_render_pass<'a>(\n\n &'a self,\n\n device: &wgpu::Device,\n\n encoder: &'a mut wgpu::CommandEncoder,\n\n rp_desc: &'a wgpu::RenderPassDescriptor,\n\n transform_data: &TransformBindGroup,\n\n lighting_data: &LightingBindGroup,\n\n ) -> wgpu::RenderPass<'a>;\n\n}\n\n\n\npub struct TransformBindGroup {\n\n pub model_matrix: nalgebra::Matrix4<f32>,\n\n pub view_matrix: nalgebra::Matrix4<f32>,\n\n pub proj_matrix: nalgebra::Matrix4<f32>,\n\n pub camera_world_position: nalgebra::Vector3<f32>,\n\n}\n\n\n\npub struct LightingBindGroup {\n\n pub position: nalgebra::Vector3<f32>,\n\n pub _padding: u32,\n\n}\n\n\n", "file_path": "src/graphics/material_base.rs", "rank": 10, "score": 28239.33809350802 }, { "content": "use specs::prelude::*;\n\n\n\npub struct Light;\n\n\n\nimpl Component for Light {\n\n type Storage = VecStorage<Self>;\n\n}\n\n\n\nimpl Light {}\n", "file_path": "src/graphics/light.rs", "rank": 11, "score": 26574.043106033398 }, { "content": "use specs::prelude::*;\n\n\n\npub struct Pose {\n\n pub model_matrix: nalgebra::Similarity3<f32>,\n\n}\n\n\n\nimpl Component for Pose {\n\n type Storage = VecStorage<Self>;\n\n}\n", "file_path": "src/graphics/pose.rs", "rank": 12, "score": 26573.934503399276 }, { "content": " let rotation_in_world = UnitQuaternion::from_axis_angle(&axis_in_world, angle);\n\n\n\n // Apply rotation around target.\n\n transform.append_rotation_wrt_point_mut(&rotation_in_world, &self.camera_target.into());\n\n\n\n self.view_matrix = transform.inverse();\n\n }\n\n\n\n fn update_camera_position(&mut self, mouse_position: &Vector2<f32>) {\n\n let mut transform = self.view_matrix.inverse();\n\n\n\n let world_camera_up = (transform * self.camera_up).normalize();\n\n let world_camera_right = world_camera_up\n\n .cross(&(transform.translation.vector - self.camera_target))\n\n .normalize();\n\n\n\n let mouse_delta = (mouse_position - self.last_mouse_pos).scale(self.trans_scaling_factor);\n\n\n\n self.camera_target +=\n\n world_camera_up.scale(mouse_delta.y) + world_camera_right.scale(mouse_delta.x);\n", "file_path": "src/graphics/camera.rs", "rank": 13, "score": 26298.483254221625 }, { "content": " transform.translation.vector +=\n\n world_camera_up.scale(mouse_delta.y) + world_camera_right.scale(mouse_delta.x);\n\n\n\n self.view_matrix = transform.inverse();\n\n }\n\n\n\n pub fn handle_event(\n\n &mut self,\n\n window: &winit::window::Window,\n\n event: &winit::event::WindowEvent,\n\n ) {\n\n match event {\n\n winit::event::WindowEvent::MouseInput { button, state, .. } => {\n\n if *state == winit::event::ElementState::Pressed {\n\n self.current_button_pressed = Some(*button);\n\n self.is_first_mouse_press = true;\n\n } else {\n\n self.current_button_pressed = None;\n\n }\n\n }\n", "file_path": "src/graphics/camera.rs", "rank": 14, "score": 26296.942232118938 }, { "content": "use nalgebra::*;\n\n\n\npub struct Camera {\n\n pub view_matrix: Isometry3<f32>,\n\n pub proj_matrix: Perspective3<f32>,\n\n camera_up: Vector3<f32>,\n\n camera_target: Vector3<f32>,\n\n\n\n current_button_pressed: Option<winit::event::MouseButton>,\n\n is_first_mouse_press: bool,\n\n last_mouse_pos: Vector2<f32>,\n\n\n\n trans_scaling_factor: f32,\n\n rot_scaling_factor: f32,\n\n zoom_scaling_factor: f32,\n\n zoom_min_distance: f32,\n\n zoom_max_distance: f32,\n\n}\n\n\n\nimpl Camera {\n", "file_path": "src/graphics/camera.rs", "rank": 15, "score": 26296.223770631816 }, { "content": " } else {\n\n point_on_ball.normalize_mut();\n\n }\n\n\n\n point_on_ball\n\n };\n\n\n\n // Calculate rotation in camera space.\n\n let last_pos = get_mouse_pos_on_arcball(mouse_position.x as f32, mouse_position.y as f32);\n\n let cur_pos = get_mouse_pos_on_arcball(self.last_mouse_pos.x, self.last_mouse_pos.y);\n\n\n\n let angle = last_pos.dot(&cur_pos).min(1.0).acos() * self.rot_scaling_factor;\n\n\n\n let axis_in_camera = Unit::new_normalize(last_pos.cross(&cur_pos));\n\n\n\n // Calculate rotation in world space.\n\n let mut transform = self.view_matrix.inverse();\n\n\n\n let axis_in_world = transform * axis_in_camera;\n\n\n", "file_path": "src/graphics/camera.rs", "rank": 16, "score": 26295.901180275254 }, { "content": " pub fn new(\n\n eye: &Point3<f32>,\n\n target: &Point3<f32>,\n\n up: &Vector3<f32>,\n\n aspect_ratio: f32,\n\n fov_y: f32,\n\n z_near: f32,\n\n z_far: f32,\n\n ) -> Self {\n\n let view_matrix = Isometry3::look_at_rh(eye, target, up);\n\n let proj_matrix = Perspective3::new(aspect_ratio, fov_y, z_near, z_far);\n\n\n\n Self {\n\n view_matrix,\n\n proj_matrix,\n\n camera_up: *up,\n\n camera_target: Vector3::new(target.x, target.y, target.z),\n\n current_button_pressed: None,\n\n is_first_mouse_press: false,\n\n last_mouse_pos: Vector2::zeros(),\n", "file_path": "src/graphics/camera.rs", "rank": 17, "score": 26294.84597878217 }, { "content": " trans_scaling_factor: 0.01,\n\n rot_scaling_factor: 2.0,\n\n zoom_scaling_factor: 0.05,\n\n zoom_min_distance: 1.0,\n\n zoom_max_distance: 50.0,\n\n }\n\n }\n\n\n\n fn update_camera_zoom(&mut self, zoom_magnitude: f32) {\n\n let mut transform = self.view_matrix.inverse();\n\n\n\n let direction = Unit::new_normalize(transform.translation.vector - self.camera_target);\n\n let direction_scaled = direction.scale(zoom_magnitude * self.zoom_scaling_factor);\n\n let mut new_position = transform.translation.vector + direction_scaled;\n\n\n\n // Clamp to min zoom distance.\n\n if new_position.dot(&direction) < self.zoom_min_distance {\n\n new_position = self.camera_target + direction.scale(self.zoom_min_distance);\n\n }\n\n // Clamp to max zoom distance.\n", "file_path": "src/graphics/camera.rs", "rank": 18, "score": 26293.47493376158 }, { "content": " else if new_position.dot(&direction) > self.zoom_max_distance {\n\n new_position = self.camera_target + direction.scale(self.zoom_max_distance);\n\n }\n\n\n\n transform.translation.vector = new_position;\n\n self.view_matrix = transform.inverse();\n\n }\n\n\n\n fn update_camera_rotation(&mut self, window_size: [f32; 2], mouse_position: &Vector2<f32>) {\n\n let get_mouse_pos_on_arcball = |x, y| {\n\n let mut point_on_ball = Vector3::new(\n\n x as f32 / window_size[0] as f32 * 2.0 - 1.0,\n\n 1.0 - y as f32 / window_size[1] as f32 * 2.0,\n\n 0.0,\n\n );\n\n\n\n let xy_squared = point_on_ball.x * point_on_ball.x + point_on_ball.y * point_on_ball.y;\n\n\n\n if xy_squared <= 1.0 {\n\n point_on_ball.z = (1.0 - xy_squared).sqrt();\n", "file_path": "src/graphics/camera.rs", "rank": 19, "score": 26292.709260973115 }, { "content": " }\n\n winit::event::WindowEvent::MouseWheel { delta, .. } => match delta {\n\n winit::event::MouseScrollDelta::PixelDelta(winit::dpi::LogicalPosition {\n\n y,\n\n ..\n\n }) => {\n\n self.update_camera_zoom(*y as f32);\n\n }\n\n _ => {}\n\n },\n\n _ => {}\n\n }\n\n }\n\n}\n", "file_path": "src/graphics/camera.rs", "rank": 20, "score": 26290.27465423141 }, { "content": " winit::event::WindowEvent::CursorMoved { position, .. } => {\n\n let current_mouse_pos = Vector2::new(position.x as f32, position.y as f32);\n\n\n\n if self.is_first_mouse_press {\n\n self.last_mouse_pos = current_mouse_pos;\n\n self.is_first_mouse_press = false;\n\n return;\n\n }\n\n\n\n match self.current_button_pressed {\n\n Some(winit::event::MouseButton::Left) => {\n\n self.update_camera_rotation(window.inner_size().into(), &current_mouse_pos);\n\n }\n\n Some(winit::event::MouseButton::Right) => {\n\n self.update_camera_position(&current_mouse_pos);\n\n }\n\n _ => {}\n\n }\n\n\n\n self.last_mouse_pos = current_mouse_pos;\n", "file_path": "src/graphics/camera.rs", "rank": 21, "score": 26290.250590625958 }, { "content": "pub struct Texture {\n\n _texture: wgpu::Texture,\n\n pub dimension: wgpu::TextureViewDimension,\n\n pub view: wgpu::TextureView,\n\n pub sampler: wgpu::Sampler,\n\n}\n\n\n\nimpl Texture {\n\n pub fn new_texture_from_data(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n width: u32,\n\n height: u32,\n\n rgba_data: &[u8],\n\n image_format: wgpu::TextureFormat,\n\n wrap_mode: wgpu::AddressMode,\n\n ) -> Self {\n\n // Create texture.\n\n let size = wgpu::Extent3d {\n\n width: width,\n", "file_path": "src/graphics/texture.rs", "rank": 22, "score": 26088.919572739433 }, { "content": " });\n\n\n\n Self {\n\n _texture,\n\n dimension: wgpu::TextureViewDimension::D2,\n\n view,\n\n sampler,\n\n }\n\n }\n\n\n\n pub fn new_cubemap_texture(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n width: u32,\n\n height: u32,\n\n face_textures: &[Texture],\n\n image_format: wgpu::TextureFormat,\n\n mip_levels: u32,\n\n ) -> Self {\n\n // Create texture.\n", "file_path": "src/graphics/texture.rs", "rank": 23, "score": 26088.717057436494 }, { "content": "\n\n pub fn new_texture_from_framebuffer(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n width: u32,\n\n height: u32,\n\n framebuffer: &Texture,\n\n image_format: wgpu::TextureFormat,\n\n wrap_mode: wgpu::AddressMode,\n\n ) -> Self {\n\n // Create texture.\n\n let size = wgpu::Extent3d {\n\n width: width,\n\n height: height,\n\n depth: 1,\n\n };\n\n let _texture = device.create_texture(&wgpu::TextureDescriptor {\n\n label: None,\n\n size: size,\n\n array_layer_count: 1,\n", "file_path": "src/graphics/texture.rs", "rank": 24, "score": 26087.251072215437 }, { "content": " mag_filter: wgpu::FilterMode::Linear,\n\n min_filter: wgpu::FilterMode::Linear,\n\n mipmap_filter: wgpu::FilterMode::Linear,\n\n lod_min_clamp: -100.0,\n\n lod_max_clamp: 100.0,\n\n compare: wgpu::CompareFunction::LessEqual,\n\n });\n\n\n\n Self {\n\n _texture,\n\n dimension: wgpu::TextureViewDimension::Cube,\n\n view,\n\n sampler,\n\n }\n\n }\n\n\n\n pub fn new_framebuffer_texture(\n\n device: &wgpu::Device,\n\n width: u32,\n\n height: u32,\n", "file_path": "src/graphics/texture.rs", "rank": 25, "score": 26086.599576264973 }, { "content": " mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n format: image_format,\n\n usage: wgpu::TextureUsage::SAMPLED | wgpu::TextureUsage::COPY_DST,\n\n });\n\n\n\n let mut encoder =\n\n device.create_command_encoder(&wgpu::CommandEncoderDescriptor { label: None });\n\n\n\n encoder.copy_texture_to_texture(\n\n wgpu::TextureCopyView {\n\n texture: &framebuffer._texture,\n\n mip_level: 0,\n\n array_layer: 0,\n\n origin: wgpu::Origin3d::ZERO,\n\n },\n\n wgpu::TextureCopyView {\n\n texture: &_texture,\n\n mip_level: 0,\n", "file_path": "src/graphics/texture.rs", "rank": 26, "score": 26085.139047487133 }, { "content": " image_format: wgpu::TextureFormat,\n\n ) -> Self {\n\n let size = wgpu::Extent3d {\n\n width: width,\n\n height: height,\n\n depth: 1,\n\n };\n\n\n\n let desc = wgpu::TextureDescriptor {\n\n label: None,\n\n size,\n\n array_layer_count: 1,\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n format: image_format,\n\n usage: wgpu::TextureUsage::OUTPUT_ATTACHMENT\n\n | wgpu::TextureUsage::SAMPLED\n\n | wgpu::TextureUsage::COPY_SRC,\n\n };\n", "file_path": "src/graphics/texture.rs", "rank": 27, "score": 26085.075192468543 }, { "content": " let size = wgpu::Extent3d {\n\n width: width,\n\n height: height,\n\n depth: 1,\n\n };\n\n let _texture = device.create_texture(&wgpu::TextureDescriptor {\n\n label: None,\n\n size: size,\n\n array_layer_count: 6,\n\n mip_level_count: mip_levels,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n format: image_format,\n\n usage: wgpu::TextureUsage::SAMPLED | wgpu::TextureUsage::COPY_DST,\n\n });\n\n\n\n let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {\n\n label: Some(\"texture_buffer_copy_encoder\"),\n\n });\n\n\n", "file_path": "src/graphics/texture.rs", "rank": 28, "score": 26084.88258182479 }, { "content": " height: height,\n\n depth: 1,\n\n };\n\n let _texture = device.create_texture(&wgpu::TextureDescriptor {\n\n label: None,\n\n size: size,\n\n array_layer_count: 1,\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n format: image_format,\n\n usage: wgpu::TextureUsage::SAMPLED | wgpu::TextureUsage::COPY_DST,\n\n });\n\n\n\n // Upload data to texture.\n\n let buffer = device.create_buffer_with_data(&rgba_data, wgpu::BufferUsage::COPY_SRC);\n\n\n\n let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {\n\n label: Some(\"texture_buffer_copy_encoder\"),\n\n });\n", "file_path": "src/graphics/texture.rs", "rank": 29, "score": 26084.794500405627 }, { "content": "\n\n let _texture = device.create_texture(&desc);\n\n\n\n let view = _texture.create_default_view();\n\n\n\n let sampler = device.create_sampler(&wgpu::SamplerDescriptor {\n\n address_mode_u: wgpu::AddressMode::ClampToEdge,\n\n address_mode_v: wgpu::AddressMode::ClampToEdge,\n\n address_mode_w: wgpu::AddressMode::ClampToEdge,\n\n mag_filter: wgpu::FilterMode::Linear,\n\n min_filter: wgpu::FilterMode::Linear,\n\n mipmap_filter: wgpu::FilterMode::Nearest,\n\n lod_min_clamp: -100.0,\n\n lod_max_clamp: 100.0,\n\n compare: wgpu::CompareFunction::LessEqual,\n\n });\n\n\n\n Self {\n\n _texture,\n\n dimension: wgpu::TextureViewDimension::D2,\n\n view,\n\n sampler,\n\n }\n\n }\n\n}\n", "file_path": "src/graphics/texture.rs", "rank": 30, "score": 26084.30508735367 }, { "content": "\n\n let sampler = device.create_sampler(&wgpu::SamplerDescriptor {\n\n address_mode_u: wrap_mode,\n\n address_mode_v: wrap_mode,\n\n address_mode_w: wrap_mode,\n\n mag_filter: wgpu::FilterMode::Linear,\n\n min_filter: wgpu::FilterMode::Nearest,\n\n mipmap_filter: wgpu::FilterMode::Nearest,\n\n lod_min_clamp: -100.0,\n\n lod_max_clamp: 100.0,\n\n compare: wgpu::CompareFunction::LessEqual,\n\n });\n\n\n\n Self {\n\n _texture,\n\n dimension: wgpu::TextureViewDimension::D2,\n\n view,\n\n sampler,\n\n }\n\n }\n", "file_path": "src/graphics/texture.rs", "rank": 31, "score": 26083.88541899431 }, { "content": " for mip_level in 0..mip_levels as usize {\n\n for i in 0..6 {\n\n encoder.copy_texture_to_texture(\n\n wgpu::TextureCopyView {\n\n texture: &face_textures[mip_level * 6 + i]._texture,\n\n mip_level: 0,\n\n array_layer: 0,\n\n origin: wgpu::Origin3d::ZERO,\n\n },\n\n wgpu::TextureCopyView {\n\n texture: &_texture,\n\n mip_level: mip_level as u32,\n\n array_layer: i as u32,\n\n origin: wgpu::Origin3d::ZERO,\n\n },\n\n wgpu::Extent3d {\n\n width: width >> mip_level,\n\n height: height >> mip_level,\n\n depth: 1,\n\n },\n", "file_path": "src/graphics/texture.rs", "rank": 32, "score": 26083.250776671513 }, { "content": "\n\n encoder.copy_buffer_to_texture(\n\n wgpu::BufferCopyView {\n\n buffer: &buffer,\n\n offset: 0,\n\n bytes_per_row: (rgba_data.len() / height as usize) as u32,\n\n rows_per_image: height,\n\n },\n\n wgpu::TextureCopyView {\n\n texture: &_texture,\n\n mip_level: 0,\n\n array_layer: 0,\n\n origin: wgpu::Origin3d::ZERO,\n\n },\n\n size,\n\n );\n\n\n\n queue.submit(&[encoder.finish()]);\n\n\n\n let view = _texture.create_default_view();\n", "file_path": "src/graphics/texture.rs", "rank": 33, "score": 26082.962045596872 }, { "content": " );\n\n }\n\n }\n\n\n\n queue.submit(&[encoder.finish()]);\n\n\n\n let view = _texture.create_view(&wgpu::TextureViewDescriptor {\n\n format: image_format,\n\n dimension: wgpu::TextureViewDimension::Cube,\n\n aspect: wgpu::TextureAspect::All,\n\n base_mip_level: 0,\n\n level_count: mip_levels,\n\n base_array_layer: 0,\n\n array_layer_count: 6,\n\n });\n\n\n\n let sampler = device.create_sampler(&wgpu::SamplerDescriptor {\n\n address_mode_u: wgpu::AddressMode::ClampToEdge,\n\n address_mode_v: wgpu::AddressMode::ClampToEdge,\n\n address_mode_w: wgpu::AddressMode::ClampToEdge,\n", "file_path": "src/graphics/texture.rs", "rank": 34, "score": 26082.574942823194 }, { "content": " array_layer: 0,\n\n origin: wgpu::Origin3d::ZERO,\n\n },\n\n size,\n\n );\n\n\n\n queue.submit(&[encoder.finish()]);\n\n\n\n let view = _texture.create_default_view();\n\n\n\n let sampler = device.create_sampler(&wgpu::SamplerDescriptor {\n\n address_mode_u: wrap_mode,\n\n address_mode_v: wrap_mode,\n\n address_mode_w: wrap_mode,\n\n mag_filter: wgpu::FilterMode::Linear,\n\n min_filter: wgpu::FilterMode::Nearest,\n\n mipmap_filter: wgpu::FilterMode::Nearest,\n\n lod_min_clamp: -100.0,\n\n lod_max_clamp: 100.0,\n\n compare: wgpu::CompareFunction::LessEqual,\n", "file_path": "src/graphics/texture.rs", "rank": 35, "score": 26081.14184019447 }, { "content": "use super::*;\n\nuse specs::prelude::*;\n\n\n\npub struct Renderable {\n\n meshes: Vec<Mesh>,\n\n pub material: Box<dyn MaterialBase + Send + Sync>,\n\n}\n\n\n\nimpl Component for Renderable {\n\n type Storage = VecStorage<Self>;\n\n}\n\n\n\nimpl Renderable {\n\n pub fn render<'a>(\n\n &'a self,\n\n render_state: &RenderState,\n\n render_pass_desc: &wgpu::RenderPassDescriptor,\n\n encoder: &mut wgpu::CommandEncoder,\n\n transform_data: &TransformBindGroup,\n\n lighting_data: &LightingBindGroup,\n", "file_path": "src/graphics/renderable.rs", "rank": 36, "score": 25421.04582906163 }, { "content": " ) {\n\n let mut render_pass = self.material.begin_render_pass(\n\n &render_state.device,\n\n encoder,\n\n render_pass_desc,\n\n transform_data,\n\n lighting_data,\n\n );\n\n\n\n for mesh in self.meshes.iter() {\n\n mesh.draw(&mut render_pass);\n\n }\n\n }\n\n\n\n pub fn new(meshes: Vec<Mesh>, material: Box<dyn MaterialBase + Send + Sync>) -> Self {\n\n Self { meshes, material }\n\n }\n\n\n\n pub fn new_from_single_mesh(mesh: Mesh, material: Box<dyn MaterialBase + Send + Sync>) -> Self {\n\n let mut meshes = Vec::new();\n", "file_path": "src/graphics/renderable.rs", "rank": 37, "score": 25416.675926534124 }, { "content": "\n\n meshes.push(mesh);\n\n\n\n Self::new(meshes, material)\n\n }\n\n\n\n fn import_gltf(\n\n device: &wgpu::Device,\n\n sc_desc: &wgpu::SwapChainDescriptor,\n\n queue: &wgpu::Queue,\n\n gltf: &gltf::Document,\n\n buffers: &Vec<gltf::buffer::Data>,\n\n images: &Vec<gltf::image::Data>,\n\n skybox: &Skybox,\n\n ) -> Self {\n\n let mut meshes = Vec::new();\n\n let mut textures = Vec::new();\n\n\n\n for mesh in gltf.meshes() {\n\n meshes.push(Renderable::create_mesh(&device, &mesh, &buffers));\n", "file_path": "src/graphics/renderable.rs", "rank": 38, "score": 25411.371107730763 }, { "content": " }\n\n }\n\n None => MaterialProperty {\n\n texture_id: None,\n\n factor: Some([0.0; 4]),\n\n },\n\n },\n\n irradiance_map: &skybox.irradiance_map,\n\n prefiltered_environment_map: &skybox.prefiltered_environment_map,\n\n brdf_lut: &skybox.brdf_lut,\n\n textures,\n\n };\n\n\n\n let material = Box::new(PbrMaterial::new(&device, &sc_desc, &pbr_params));\n\n\n\n Renderable::new(meshes, material)\n\n }\n\n\n\n pub fn new_from_path(\n\n device: &wgpu::Device,\n", "file_path": "src/graphics/renderable.rs", "rank": 39, "score": 25409.076165618753 }, { "content": " sc_desc: &wgpu::SwapChainDescriptor,\n\n queue: &wgpu::Queue,\n\n path: &std::path::Path,\n\n skybox: &Skybox,\n\n ) -> Self {\n\n let (gltf, buffers, images) = gltf::import(path).unwrap();\n\n\n\n Renderable::import_gltf(device, sc_desc, queue, &gltf, &buffers, &images, skybox)\n\n }\n\n\n\n pub fn new_from_glb<'a>(\n\n device: &wgpu::Device,\n\n sc_desc: &wgpu::SwapChainDescriptor,\n\n queue: &wgpu::Queue,\n\n glb_data: &[u8],\n\n skybox: &Skybox,\n\n ) -> Self {\n\n let (gltf, buffers, images) = gltf::import_slice(glb_data.as_ref()).unwrap();\n\n\n\n Renderable::import_gltf(device, sc_desc, queue, &gltf, &buffers, &images, skybox)\n", "file_path": "src/graphics/renderable.rs", "rank": 40, "score": 25408.7478814629 }, { "content": " ]),\n\n },\n\n },\n\n metal_roughness_property: match mat\n\n .pbr_metallic_roughness()\n\n .metallic_roughness_texture()\n\n {\n\n Some(gltf_texture) => {\n\n textures.push(Renderable::create_texture(\n\n &device,\n\n &queue,\n\n &images[gltf_texture.texture().index()],\n\n wgpu::TextureFormat::Rgba8Unorm,\n\n ));\n\n MaterialProperty {\n\n texture_id: Some(textures.len() - 1),\n\n factor: None,\n\n }\n\n }\n\n None => MaterialProperty {\n", "file_path": "src/graphics/renderable.rs", "rank": 41, "score": 25408.216499058544 }, { "content": " texture_id: None,\n\n factor: Some([\n\n 0.0,\n\n mat.pbr_metallic_roughness().metallic_factor(),\n\n mat.pbr_metallic_roughness().roughness_factor(),\n\n 0.0,\n\n ]),\n\n },\n\n },\n\n normal_property: match mat.normal_texture() {\n\n Some(gltf_texture) => {\n\n textures.push(Renderable::create_texture(\n\n &device,\n\n &queue,\n\n &images[gltf_texture.texture().index()],\n\n wgpu::TextureFormat::Rgba8Unorm,\n\n ));\n\n MaterialProperty {\n\n texture_id: Some(textures.len() - 1),\n\n factor: None,\n", "file_path": "src/graphics/renderable.rs", "rank": 42, "score": 25408.051414767102 }, { "content": " }\n\n let mat = gltf.materials().next().unwrap();\n\n\n\n let pbr_params = PbrBindGroup {\n\n ao_property: match mat.occlusion_texture() {\n\n Some(gltf_texture) => {\n\n textures.push(Renderable::create_texture(\n\n &device,\n\n &queue,\n\n &images[gltf_texture.texture().index()],\n\n wgpu::TextureFormat::Rgba8Unorm,\n\n ));\n\n MaterialProperty {\n\n texture_id: Some(textures.len() - 1),\n\n factor: None,\n\n }\n\n }\n\n None => MaterialProperty {\n\n texture_id: None,\n\n factor: Some([1.0, 1.0, 1.0, 1.0]),\n", "file_path": "src/graphics/renderable.rs", "rank": 43, "score": 25408.0268646937 }, { "content": " emissive_property: match mat.emissive_texture() {\n\n Some(gltf_texture) => {\n\n textures.push(Renderable::create_texture(\n\n &device,\n\n &queue,\n\n &images[gltf_texture.texture().index()],\n\n wgpu::TextureFormat::Rgba8UnormSrgb,\n\n ));\n\n MaterialProperty {\n\n texture_id: Some(textures.len() - 1),\n\n factor: None,\n\n }\n\n }\n\n None => MaterialProperty {\n\n texture_id: None,\n\n factor: Some([\n\n mat.emissive_factor()[0],\n\n mat.emissive_factor()[1],\n\n mat.emissive_factor()[2],\n\n 1.0,\n", "file_path": "src/graphics/renderable.rs", "rank": 44, "score": 25408.0268646937 }, { "content": " },\n\n },\n\n albedo_property: match mat.pbr_metallic_roughness().base_color_texture() {\n\n Some(gltf_texture) => {\n\n textures.push(Renderable::create_texture(\n\n &device,\n\n &queue,\n\n &images[gltf_texture.texture().index()],\n\n wgpu::TextureFormat::Rgba8UnormSrgb,\n\n ));\n\n MaterialProperty {\n\n texture_id: Some(textures.len() - 1),\n\n factor: None,\n\n }\n\n }\n\n None => MaterialProperty {\n\n texture_id: None,\n\n factor: Some(mat.pbr_metallic_roughness().base_color_factor()),\n\n },\n\n },\n", "file_path": "src/graphics/renderable.rs", "rank": 45, "score": 25407.97875657182 }, { "content": " }\n\n\n\n fn create_texture(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n image: &gltf::image::Data,\n\n image_format: wgpu::TextureFormat,\n\n ) -> Texture {\n\n match image.format {\n\n gltf::image::Format::R8G8B8 => {\n\n // Convert RGB to RGBA.\n\n let mut rgba_data = vec![0; (image.width * image.height * 4) as usize];\n\n\n\n for i in 0..(image.width * image.height) as usize {\n\n rgba_data[i * 4 + 0] = image.pixels[i * 3 + 0];\n\n rgba_data[i * 4 + 1] = image.pixels[i * 3 + 1];\n\n rgba_data[i * 4 + 2] = image.pixels[i * 3 + 2];\n\n rgba_data[i * 4 + 3] = 255;\n\n }\n\n\n", "file_path": "src/graphics/renderable.rs", "rank": 46, "score": 25407.019577270366 }, { "content": " Texture::new_texture_from_data(\n\n &device,\n\n &queue,\n\n image.width,\n\n image.height,\n\n rgba_data.as_ref(),\n\n image_format,\n\n wgpu::AddressMode::Repeat,\n\n )\n\n }\n\n gltf::image::Format::R8G8B8A8 => Texture::new_texture_from_data(\n\n &device,\n\n &queue,\n\n image.width,\n\n image.height,\n\n image.pixels.as_ref(),\n\n image_format,\n\n wgpu::AddressMode::Repeat,\n\n ),\n\n _ => panic!(\"Unimplemented tex type\"),\n", "file_path": "src/graphics/renderable.rs", "rank": 47, "score": 25405.519909354203 }, { "content": " }\n\n }\n\n\n\n fn create_mesh(\n\n device: &wgpu::Device,\n\n gltf_mesh: &gltf::Mesh,\n\n buffers: &Vec<gltf::buffer::Data>,\n\n ) -> Mesh {\n\n let mut vertices = Vec::new();\n\n let mut indices = Vec::new();\n\n\n\n for primitive in gltf_mesh.primitives() {\n\n let reader = primitive.reader(|buffer| Some(&buffers[buffer.index()]));\n\n\n\n let pos_iter = reader.read_positions().unwrap();\n\n let norm_iter = reader.read_normals().unwrap();\n\n\n\n let tex_coord_iter: Box<dyn Iterator<Item = [f32; 2]>> = match reader.read_tex_coords(0)\n\n {\n\n Some(tex_coords_iter) => Box::new(tex_coords_iter.into_f32()),\n", "file_path": "src/graphics/renderable.rs", "rank": 48, "score": 25404.767750035684 }, { "content": " if let Some(iter) = reader.read_indices() {\n\n for vertex_index in iter.into_u32() {\n\n indices.push(vertex_index);\n\n }\n\n }\n\n\n\n // Calculate tangents.\n\n let mut tangents: Vec<nalgebra::Vector3<f32>> =\n\n vec![nalgebra::Vector3::zeros(); vertices.len()];\n\n let mut bitangents: Vec<nalgebra::Vector3<f32>> =\n\n vec![nalgebra::Vector3::zeros(); vertices.len()];\n\n\n\n for tri_ids in indices.chunks(3) {\n\n let i0 = tri_ids[0] as usize;\n\n let i1 = tri_ids[1] as usize;\n\n let i2 = tri_ids[2] as usize;\n\n\n\n let p0: nalgebra::Vector3<f32> = vertices[i0].position.into();\n\n let p1: nalgebra::Vector3<f32> = vertices[i1].position.into();\n\n let p2: nalgebra::Vector3<f32> = vertices[i2].position.into();\n", "file_path": "src/graphics/renderable.rs", "rank": 49, "score": 25404.671799104555 }, { "content": " tangents[i1] += t;\n\n tangents[i2] += t;\n\n\n\n bitangents[i0] += b;\n\n bitangents[i1] += b;\n\n bitangents[i2] += b;\n\n }\n\n\n\n for (i, vertex) in vertices.iter_mut().enumerate() {\n\n let t = tangents[i];\n\n let b = bitangents[i];\n\n let n: nalgebra::Vector3<f32> = vertex.normal.into();\n\n\n\n let tangent = (t - n.dot(&t) * n).normalize();\n\n let handedness = if n.dot(&t.cross(&b)) > 0.0 { 1.0 } else { -1.0 };\n\n\n\n vertex.tangent = [tangent.x, tangent.y, tangent.z, handedness];\n\n }\n\n }\n\n\n\n Mesh::new(device, vertices.as_slice(), Some(indices.as_slice()))\n\n }\n\n}\n", "file_path": "src/graphics/renderable.rs", "rank": 50, "score": 25404.46115529935 }, { "content": " None => Box::new(std::iter::repeat([0.0; 2])),\n\n };\n\n\n\n let tangent_iter: Box<dyn Iterator<Item = [f32; 4]>> = match reader.read_tangents() {\n\n Some(tangent_iter) => Box::new(tangent_iter),\n\n None => Box::new(std::iter::repeat([0.0; 4])),\n\n };\n\n\n\n for (vert_pos, vert_norm, vert_tex_coord, vert_tangent) in\n\n izip!(pos_iter, norm_iter, tex_coord_iter, tangent_iter)\n\n {\n\n vertices.push(Vertex {\n\n position: vert_pos,\n\n normal: vert_norm,\n\n tangent: vert_tangent,\n\n tex_coord: vert_tex_coord,\n\n });\n\n }\n\n\n\n // Read indices.\n", "file_path": "src/graphics/renderable.rs", "rank": 51, "score": 25402.21918914922 }, { "content": "\n\n let w0: nalgebra::Vector2<f32> = vertices[i0].tex_coord.into();\n\n let w1: nalgebra::Vector2<f32> = vertices[i1].tex_coord.into();\n\n let w2: nalgebra::Vector2<f32> = vertices[i2].tex_coord.into();\n\n\n\n let e1 = p1 - p0;\n\n let e2 = p2 - p0;\n\n\n\n let x1 = w1.x - w0.x;\n\n let x2 = w2.x - w0.x;\n\n\n\n let y1 = w1.y - w0.y;\n\n let y2 = w2.y - w0.y;\n\n\n\n let r = 1.0 / (x1 * y2 - x2 * y1);\n\n\n\n let t = (e1 * y2 - e2 * y1) * r;\n\n let b = (e2 * x1 - e1 * x2) * r;\n\n\n\n tangents[i0] += t;\n", "file_path": "src/graphics/renderable.rs", "rank": 52, "score": 25402.21918914922 }, { "content": "use winit::window::Window;\n\n\n\npub struct RenderState {\n\n pub device: wgpu::Device,\n\n pub queue: wgpu::Queue,\n\n surface: wgpu::Surface,\n\n pub swap_chain_desc: wgpu::SwapChainDescriptor,\n\n pub swap_chain: wgpu::SwapChain,\n\n}\n\n\n\nimpl RenderState {\n\n pub async fn new(window: &Window) -> Self {\n\n let size = window.inner_size();\n\n\n\n let surface = wgpu::Surface::create(window);\n\n\n\n let adapter = wgpu::Adapter::request(\n\n &wgpu::RequestAdapterOptions {\n\n power_preference: wgpu::PowerPreference::Default,\n\n compatible_surface: Some(&surface),\n", "file_path": "src/graphics/render_state.rs", "rank": 58, "score": 23877.036325725327 }, { "content": " width: size.width,\n\n height: size.height,\n\n present_mode: wgpu::PresentMode::Fifo,\n\n };\n\n let swap_chain = device.create_swap_chain(&surface, &swap_chain_desc);\n\n\n\n Self {\n\n device,\n\n queue,\n\n surface,\n\n swap_chain_desc,\n\n swap_chain,\n\n }\n\n }\n\n\n\n pub fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {\n\n self.swap_chain_desc.width = new_size.width;\n\n self.swap_chain_desc.height = new_size.height;\n\n\n\n self.swap_chain = self\n\n .device\n\n .create_swap_chain(&self.surface, &self.swap_chain_desc);\n\n }\n\n}\n", "file_path": "src/graphics/render_state.rs", "rank": 60, "score": 23872.593145849918 }, { "content": "use winit::{\n\n event::*,\n\n event_loop::{ControlFlow, EventLoop},\n\n window::{Window, WindowBuilder},\n\n};\n\n\n", "file_path": "src/graphics/render_loop.rs", "rank": 62, "score": 23870.301427396542 }, { "content": " ref event,\n\n window_id,\n\n } if window_id == window.id() => match event {\n\n WindowEvent::CloseRequested => *control_flow = ControlFlow::Exit,\n\n WindowEvent::KeyboardInput {\n\n input:\n\n KeyboardInput {\n\n state: ElementState::Pressed,\n\n virtual_keycode: Some(VirtualKeyCode::Escape),\n\n ..\n\n },\n\n ..\n\n } => *control_flow = ControlFlow::Exit,\n\n WindowEvent::Resized(physical_size) => render_system.resize(*physical_size),\n\n WindowEvent::ScaleFactorChanged { new_inner_size, .. } => {\n\n render_system.resize(**new_inner_size)\n\n }\n\n _ => render_system.handle_event(&window, event),\n\n },\n\n _ => {}\n\n }\n\n });\n\n}\n", "file_path": "src/graphics/render_loop.rs", "rank": 63, "score": 23870.026839889317 }, { "content": " },\n\n wgpu::BackendBit::PRIMARY, // Vulkan + Metal + DX12 + Browser WebGPU\n\n )\n\n .await\n\n .unwrap();\n\n\n\n println!(\"Backend type: {:?}\", adapter.get_info().backend);\n\n\n\n let (device, queue) = adapter\n\n .request_device(&wgpu::DeviceDescriptor {\n\n extensions: wgpu::Extensions {\n\n anisotropic_filtering: false,\n\n },\n\n limits: Default::default(),\n\n })\n\n .await;\n\n\n\n let swap_chain_desc = wgpu::SwapChainDescriptor {\n\n usage: wgpu::TextureUsage::OUTPUT_ATTACHMENT,\n\n format: wgpu::TextureFormat::Bgra8UnormSrgb,\n", "file_path": "src/graphics/render_state.rs", "rank": 64, "score": 23869.682332617173 }, { "content": "pub mod camera;\n\npub mod light;\n\npub mod material_base;\n\npub mod material_hdr;\n\npub mod material_pbr;\n\npub mod material_skybox;\n\npub mod mesh;\n\npub mod pose;\n\npub mod render_loop;\n\npub mod render_state;\n\npub mod render_system;\n\npub mod renderable;\n\npub mod skybox;\n\npub mod texture;\n\n\n\npub use camera::*;\n\npub use light::*;\n\npub use material_base::*;\n\npub use material_hdr::*;\n\npub use material_pbr::*;\n", "file_path": "src/graphics/mod.rs", "rank": 65, "score": 20.157341192154718 }, { "content": "pub use material_skybox::*;\n\npub use mesh::*;\n\npub use pose::*;\n\npub use render_loop::*;\n\npub use render_state::*;\n\npub use render_system::*;\n\npub use renderable::*;\n\npub use skybox::*;\n\npub use texture::*;\n", "file_path": "src/graphics/mod.rs", "rank": 66, "score": 17.349408281755366 }, { "content": " world.insert(camera);\n\n\n\n dispatcher.setup(&mut world);\n\n\n\n Self { world, dispatcher }\n\n }\n\n\n\n fn resize(&mut self, new_size: winit::dpi::PhysicalSize<u32>) {\n\n let mut render_state: WriteExpect<graphics::RenderState> = self.world.system_data();\n\n\n\n render_state.resize(new_size);\n\n }\n\n\n\n fn handle_event(&mut self, window: &winit::window::Window, event: &winit::event::WindowEvent) {\n\n match event {\n\n winit::event::WindowEvent::CursorMoved { .. }\n\n | winit::event::WindowEvent::MouseInput { .. }\n\n | winit::event::WindowEvent::MouseWheel { .. } => {\n\n let mut camera: WriteExpect<graphics::Camera> = self.world.system_data();\n\n\n", "file_path": "src/main.rs", "rank": 67, "score": 14.580015063346226 }, { "content": "use super::*;\n\nuse nalgebra::*;\n\n\n\npub struct Skybox {\n\n pub environment_texture: Texture,\n\n pub irradiance_map: Texture,\n\n pub prefiltered_environment_map: Texture,\n\n pub brdf_lut: Texture,\n\n}\n\n\n\nimpl Skybox {\n\n pub fn new(\n\n device: &wgpu::Device,\n\n sc_desc: &wgpu::SwapChainDescriptor,\n\n queue: &wgpu::Queue,\n\n hdr_data: &[u8],\n\n ) -> (Skybox, Renderable) {\n\n let hdr_texture = Skybox::create_hdr_texture(device, queue, hdr_data);\n\n\n\n // Create unit cube for projections.\n", "file_path": "src/graphics/skybox.rs", "rank": 68, "score": 14.106447932649484 }, { "content": " 1000.0,\n\n );\n\n\n\n // Create render system.\n\n let mut dispatcher = DispatcherBuilder::new()\n\n .with(RotateObjectSystem, \"rot_system\", &[])\n\n .with(graphics::RenderSystem, \"render_system\", &[\"rot_system\"])\n\n .build();\n\n\n\n // Create world.\n\n let mut world = World::new();\n\n\n\n world.register::<graphics::Renderable>();\n\n world.register::<graphics::Pose>();\n\n world.register::<graphics::Light>();\n\n world.register::<RotatingModel>();\n\n\n\n // Add models to world.\n\n let helmet_data = include_bytes!(\"../res/DamagedHelmet.glb\");\n\n let hdr_data = include_bytes!(\"../res/newport_loft.hdr\");\n", "file_path": "src/main.rs", "rank": 69, "score": 13.878838855492749 }, { "content": "use super::*;\n\n\n\npub struct SkyboxBindGroup {\n\n pub environment_texture: Texture,\n\n}\n\n\n\npub struct SkyboxMaterial {\n\n pub render_pipeline: wgpu::RenderPipeline,\n\n\n\n pub transform_bind_group: wgpu::BindGroup,\n\n pub transform_uniform_buffer: wgpu::Buffer,\n\n\n\n pub params_bind_group: wgpu::BindGroup,\n\n}\n\n\n\nimpl SkyboxMaterial {\n\n pub fn new(\n\n device: &wgpu::Device,\n\n sc_desc: &wgpu::SwapChainDescriptor,\n\n params: &SkyboxBindGroup,\n", "file_path": "src/graphics/material_skybox.rs", "rank": 70, "score": 13.216994384107476 }, { "content": " camera.handle_event(window, event);\n\n }\n\n _ => {}\n\n };\n\n }\n\n\n\n fn render(&mut self) {\n\n self.dispatcher.dispatch(&mut self.world);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 71, "score": 13.213586915186953 }, { "content": "}\n\n\n\npub struct PbrMaterial {\n\n pub render_pipeline: wgpu::RenderPipeline,\n\n pub transform_bind_group: wgpu::BindGroup,\n\n pub lighting_bind_group: wgpu::BindGroup,\n\n pub pbr_factor_bind_group: wgpu::BindGroup,\n\n pub pbr_texture_bind_group: wgpu::BindGroup,\n\n\n\n pub transform_uniform_buffer: wgpu::Buffer,\n\n pub lighting_uniform_buffer: wgpu::Buffer,\n\n}\n\n\n\nimpl PbrMaterial {\n\n pub fn new(\n\n device: &wgpu::Device,\n\n swap_chain_desc: &wgpu::SwapChainDescriptor,\n\n params: &PbrBindGroup,\n\n ) -> Self {\n\n // Init bind groups.\n", "file_path": "src/graphics/material_pbr.rs", "rank": 72, "score": 12.206138518038015 }, { "content": " .with(graphics::Renderable::new_from_glb(\n\n &render_state.device,\n\n &render_state.swap_chain_desc,\n\n &render_state.queue,\n\n include_bytes!(\"../res/BoxTextured.glb\"),\n\n &skybox,\n\n ))\n\n .with(graphics::Pose {\n\n model_matrix: nalgebra::Similarity3::from_parts(\n\n nalgebra::Translation3::new(3.0, 0.0, 0.0),\n\n nalgebra::UnitQuaternion::identity(),\n\n 1.0,\n\n ),\n\n })\n\n .with(graphics::Light {})\n\n .with(RotatingModel)\n\n .build();\n\n\n\n // Pass render state into ECS as last step.\n\n world.insert(render_state);\n", "file_path": "src/main.rs", "rank": 73, "score": 11.87921163793771 }, { "content": "use super::*;\n\n\n\npub struct PbrBindGroup<'a> {\n\n pub textures: Vec<Texture>,\n\n\n\n pub ao_property: MaterialProperty,\n\n pub albedo_property: MaterialProperty,\n\n pub emissive_property: MaterialProperty,\n\n pub metal_roughness_property: MaterialProperty,\n\n pub normal_property: MaterialProperty,\n\n\n\n pub irradiance_map: &'a Texture,\n\n pub prefiltered_environment_map: &'a Texture,\n\n pub brdf_lut: &'a Texture,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct MaterialProperty {\n\n pub factor: Option<[f32; 4]>,\n\n pub texture_id: Option<usize>,\n", "file_path": "src/graphics/material_pbr.rs", "rank": 74, "score": 11.519310892202387 }, { "content": "use super::*;\n\n\n\npub struct HdrCvtMaterial {\n\n pub render_pipeline: wgpu::RenderPipeline,\n\n pub transform_bind_group: wgpu::BindGroup,\n\n pub transform_bind_group_buffer: wgpu::Buffer,\n\n pub cvt_bind_group: wgpu::BindGroup,\n\n}\n\n\n\npub struct HdrConvolveDiffuseMaterial {\n\n pub render_pipeline: wgpu::RenderPipeline,\n\n pub transform_bind_group: wgpu::BindGroup,\n\n pub transform_bind_group_buffer: wgpu::Buffer,\n\n pub convolve_bind_group: wgpu::BindGroup,\n\n}\n\n\n\npub struct HdrConvolveSpecularMaterial {\n\n pub render_pipeline: wgpu::RenderPipeline,\n\n pub transform_bind_group: wgpu::BindGroup,\n\n pub transform_bind_group_buffer: wgpu::Buffer,\n", "file_path": "src/graphics/material_hdr.rs", "rank": 75, "score": 10.681320752347418 }, { "content": " }\n\n}\n\n\n\nimpl graphics::RenderLoopEvent for ExampleRenderLoop {\n\n fn init(window: &winit::window::Window) -> Self {\n\n // Init rendering state.\n\n let render_state = futures::executor::block_on(graphics::RenderState::new(&window));\n\n\n\n // Create camera.\n\n let camera_position = nalgebra::Point3::new(0.0, 0.0, 2.0);\n\n let camera_target = nalgebra::Point3::new(0.0, 0.0, 0.0);\n\n let camera_up = nalgebra::Vector3::y_axis();\n\n\n\n let camera = graphics::Camera::new(\n\n &camera_position,\n\n &camera_target,\n\n &camera_up,\n\n 1920.0 / 1080.0,\n\n std::f32::consts::PI / 180.0 * 100.0,\n\n 0.1,\n", "file_path": "src/main.rs", "rank": 76, "score": 9.782229289076959 }, { "content": "\n\n Some(buffer)\n\n }\n\n };\n\n\n\n let num_indices = match index_data {\n\n None => 0,\n\n Some(data) => data.len(),\n\n };\n\n\n\n Self {\n\n vertex_buffer: vertex_buffer,\n\n index_buffer: index_buffer,\n\n num_vertices: vertex_data.len() as u32,\n\n num_indices: num_indices as u32,\n\n }\n\n }\n\n\n\n pub fn draw<'a>(&'a self, render_pass: &mut wgpu::RenderPass<'a>) {\n\n render_pass.set_vertex_buffer(0, &self.vertex_buffer, 0, 0);\n", "file_path": "src/graphics/mesh.rs", "rank": 77, "score": 9.680037760476166 }, { "content": " pbr_defines,\n\n include_str!(\"shaders/pbr.frag\")\n\n ),\n\n &[\n\n &transform_bind_group_layout,\n\n &lighting_bind_group_layout,\n\n &pbr_factor_bind_group_layout,\n\n &pbr_texture_bind_group_layout,\n\n ],\n\n vertex_state_desc,\n\n &colour_states,\n\n depth_state,\n\n );\n\n\n\n Self {\n\n render_pipeline,\n\n transform_bind_group,\n\n lighting_bind_group,\n\n pbr_factor_bind_group,\n\n pbr_texture_bind_group,\n", "file_path": "src/graphics/material_pbr.rs", "rank": 78, "score": 9.65425175985389 }, { "content": " );\n\n material_base::update_uniform_buffer(\n\n device,\n\n &self.transform_uniform_buffer,\n\n encoder,\n\n transform_data,\n\n );\n\n\n\n let mut render_pass = encoder.begin_render_pass(rp_desc);\n\n\n\n render_pass.set_pipeline(&self.render_pipeline);\n\n render_pass.set_bind_group(0, &self.transform_bind_group, &[]);\n\n render_pass.set_bind_group(1, &self.lighting_bind_group, &[]);\n\n render_pass.set_bind_group(2, &self.pbr_factor_bind_group, &[]);\n\n render_pass.set_bind_group(3, &self.pbr_texture_bind_group, &[]);\n\n\n\n render_pass\n\n }\n\n}\n", "file_path": "src/graphics/material_pbr.rs", "rank": 79, "score": 9.248644572618511 }, { "content": " transform_uniform_buffer,\n\n lighting_uniform_buffer,\n\n }\n\n }\n\n}\n\n\n\nimpl MaterialBase for PbrMaterial {\n\n fn begin_render_pass<'a>(\n\n &'a self,\n\n device: &wgpu::Device,\n\n encoder: &'a mut wgpu::CommandEncoder,\n\n rp_desc: &'a wgpu::RenderPassDescriptor,\n\n transform_data: &TransformBindGroup,\n\n lighting_data: &LightingBindGroup,\n\n ) -> wgpu::RenderPass<'a> {\n\n material_base::update_uniform_buffer(\n\n device,\n\n &self.lighting_uniform_buffer,\n\n encoder,\n\n lighting_data,\n", "file_path": "src/graphics/material_pbr.rs", "rank": 80, "score": 8.922362664712256 }, { "content": " pub convolve_bind_group: wgpu::BindGroup,\n\n pub roughness_bind_group: wgpu::BindGroup,\n\n pub roughness_bind_group_buffer: wgpu::Buffer,\n\n}\n\n\n\npub struct HdrConvolveBrdfMaterial {\n\n pub render_pipeline: wgpu::RenderPipeline,\n\n pub transform_bind_group: wgpu::BindGroup,\n\n pub transform_bind_group_buffer: wgpu::Buffer,\n\n}\n\n\n\npub struct HdrTransformBindGroup {\n\n pub proj_matrix: nalgebra::Matrix4<f32>,\n\n pub view_matrix: nalgebra::Matrix4<f32>,\n\n}\n\n\n\npub struct HdrCvtBindGroup<'a> {\n\n pub equirectangular_texture: &'a Texture,\n\n}\n\n\n", "file_path": "src/graphics/material_hdr.rs", "rank": 81, "score": 8.918681801552495 }, { "content": " nalgebra::UnitQuaternion::from_euler_angles(\n\n std::f32::consts::FRAC_PI_2,\n\n 0.0,\n\n 0.0,\n\n ),\n\n 1.0,\n\n ),\n\n })\n\n .build();\n\n\n\n world\n\n .create_entity()\n\n .with(skybox_renderable)\n\n .with(graphics::Pose {\n\n model_matrix: nalgebra::Similarity3::identity(),\n\n })\n\n .build();\n\n\n\n world\n\n .create_entity()\n", "file_path": "src/main.rs", "rank": 82, "score": 8.689330691772778 }, { "content": "\n\nimpl MaterialBase for SkyboxMaterial {\n\n fn begin_render_pass<'a>(\n\n &'a self,\n\n device: &wgpu::Device,\n\n encoder: &'a mut wgpu::CommandEncoder,\n\n rp_desc: &'a wgpu::RenderPassDescriptor,\n\n transform_data: &TransformBindGroup,\n\n _lighting_data: &LightingBindGroup,\n\n ) -> wgpu::RenderPass<'a> {\n\n material_base::update_uniform_buffer(\n\n device,\n\n &self.transform_uniform_buffer,\n\n encoder,\n\n transform_data,\n\n );\n\n\n\n let mut render_pass = encoder.begin_render_pass(rp_desc);\n\n\n\n render_pass.set_pipeline(&self.render_pipeline);\n\n render_pass.set_bind_group(0, &self.transform_bind_group, &[]);\n\n render_pass.set_bind_group(1, &self.params_bind_group, &[]);\n\n\n\n render_pass\n\n }\n\n}\n", "file_path": "src/graphics/material_skybox.rs", "rank": 83, "score": 8.687219399024684 }, { "content": " view_matrix: Similarity3::identity().to_homogeneous(),\n\n };\n\n\n\n material_base::update_uniform_buffer(\n\n device,\n\n &brdf_mat.transform_bind_group_buffer,\n\n &mut encoder,\n\n &transforms,\n\n );\n\n\n\n let framebuffer =\n\n Texture::new_framebuffer_texture(device, 512, 512, wgpu::TextureFormat::Rg16Float);\n\n\n\n {\n\n let mut render_pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {\n\n color_attachments: &[wgpu::RenderPassColorAttachmentDescriptor {\n\n attachment: &framebuffer.view,\n\n resolve_target: None,\n\n load_op: wgpu::LoadOp::Clear,\n\n store_op: wgpu::StoreOp::Store,\n", "file_path": "src/graphics/skybox.rs", "rank": 84, "score": 8.529284934065284 }, { "content": "pub struct HdrConvolveDiffuseBindGroup<'a> {\n\n pub environment_texture: &'a Texture,\n\n}\n\n\n\npub struct HdrConvolveSpecularBindGroup<'a> {\n\n pub environment_texture: &'a Texture,\n\n pub roughness: f32,\n\n}\n\n\n\nimpl HdrCvtMaterial {\n\n pub fn new(device: &wgpu::Device, params: &HdrCvtBindGroup) -> Self {\n\n // Init bind groups.\n\n // Transform buffers.\n\n let (transform_bind_group_buffer, transform_bind_group, transform_bind_group_layout) =\n\n material_base::create_uniform_buffer::<TransformBindGroup>(\n\n device,\n\n wgpu::ShaderStage::VERTEX,\n\n );\n\n\n\n // Hdr conversion bind group.\n", "file_path": "src/graphics/material_hdr.rs", "rank": 85, "score": 8.503454127960692 }, { "content": "\n\n let (skybox, skybox_renderable) = graphics::Skybox::new(\n\n &render_state.device,\n\n &render_state.swap_chain_desc,\n\n &render_state.queue,\n\n hdr_data,\n\n );\n\n\n\n world\n\n .create_entity()\n\n .with(graphics::Renderable::new_from_glb(\n\n &render_state.device,\n\n &render_state.swap_chain_desc,\n\n &render_state.queue,\n\n helmet_data,\n\n &skybox,\n\n ))\n\n .with(graphics::Pose {\n\n model_matrix: nalgebra::Similarity3::from_parts(\n\n nalgebra::Translation3::identity(),\n", "file_path": "src/main.rs", "rank": 86, "score": 8.16784533436058 }, { "content": " clear_color: wgpu::Color::BLACK,\n\n }],\n\n depth_stencil_attachment: None,\n\n });\n\n\n\n render_pass.set_pipeline(&brdf_mat.render_pipeline);\n\n render_pass.set_bind_group(0, &brdf_mat.transform_bind_group, &[]);\n\n\n\n screen_space_quad_mesh.draw(&mut render_pass);\n\n }\n\n\n\n let cmd_buffer = encoder.finish();\n\n\n\n queue.submit(&[cmd_buffer]);\n\n\n\n Texture::new_texture_from_framebuffer(\n\n device,\n\n queue,\n\n 512,\n\n 512,\n\n &framebuffer,\n\n wgpu::TextureFormat::Rg16Float,\n\n wgpu::AddressMode::ClampToEdge,\n\n )\n\n }\n\n}\n", "file_path": "src/graphics/skybox.rs", "rank": 87, "score": 8.036237985876364 }, { "content": " unit_cube_mesh.draw(&mut render_pass);\n\n }\n\n\n\n cubemap_faces.push(cubemap_face);\n\n }\n\n }\n\n\n\n let cmd_buffer = encoder.finish();\n\n\n\n queue.submit(&[cmd_buffer]);\n\n\n\n Texture::new_cubemap_texture(\n\n device,\n\n queue,\n\n 128,\n\n 128,\n\n cubemap_faces.as_slice(),\n\n wgpu::TextureFormat::Rgba16Float,\n\n 5,\n\n )\n", "file_path": "src/graphics/skybox.rs", "rank": 88, "score": 7.9075433873417325 }, { "content": "\n\n unit_cube_mesh.draw(&mut render_pass);\n\n }\n\n\n\n cubemap_faces.push(cubemap_face);\n\n }\n\n\n\n let cmd_buffer = encoder.finish();\n\n\n\n queue.submit(&[cmd_buffer]);\n\n\n\n // Create environment cubemap.\n\n Texture::new_cubemap_texture(\n\n device,\n\n queue,\n\n 512,\n\n 512,\n\n cubemap_faces.as_slice(),\n\n wgpu::TextureFormat::Rgba16Float,\n\n 1,\n", "file_path": "src/graphics/skybox.rs", "rank": 89, "score": 7.718030627290934 }, { "content": " });\n\n\n\n let mut cubemap_faces = Vec::new();\n\n\n\n // Render every cube face.\n\n for i in 0..6 {\n\n let cubemap_face = Texture::new_framebuffer_texture(\n\n device,\n\n 512,\n\n 512,\n\n wgpu::TextureFormat::Rgba16Float,\n\n );\n\n\n\n let transforms = HdrTransformBindGroup {\n\n proj_matrix: proj.to_homogeneous(),\n\n view_matrix: views[i].to_homogeneous(),\n\n };\n\n\n\n material_base::update_uniform_buffer(\n\n device,\n", "file_path": "src/graphics/skybox.rs", "rank": 90, "score": 7.538173410313098 }, { "content": " ],\n\n vertex_state_desc,\n\n &colour_states,\n\n None,\n\n );\n\n\n\n Self {\n\n render_pipeline,\n\n transform_bind_group_buffer,\n\n transform_bind_group,\n\n convolve_bind_group,\n\n roughness_bind_group,\n\n roughness_bind_group_buffer,\n\n }\n\n }\n\n}\n\n\n\nimpl HdrConvolveBrdfMaterial {\n\n pub fn new(device: &wgpu::Device) -> Self {\n\n // Init bind groups.\n", "file_path": "src/graphics/material_hdr.rs", "rank": 91, "score": 7.500736026231522 }, { "content": "use super::*;\n\n\n", "file_path": "src/graphics/material_base.rs", "rank": 92, "score": 7.409099184893805 }, { "content": " binding: 0,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::SampledTexture {\n\n dimension: wgpu::TextureViewDimension::Cube,\n\n component_type: wgpu::TextureComponentType::Float,\n\n multisampled: false,\n\n },\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 1,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::Sampler { comparison: false },\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 2,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::SampledTexture {\n\n dimension: wgpu::TextureViewDimension::Cube,\n\n component_type: wgpu::TextureComponentType::Float,\n\n multisampled: false,\n", "file_path": "src/graphics/material_pbr.rs", "rank": 93, "score": 7.246954344996881 }, { "content": "# Rust Physically Based Renderer\n\n\n\nA physically based renderer written in Rust using wgpu-rs.\n\n\n\nImplements precomputed BRDF, specular, and diffuse image-based lighting methods.\n\n\n\nSupports gLTF and GLB model imports.\n\n\n\n![Example of image-based reflections on helmet model](docs/example_helmet.gif)\n\n![Example of dynamic specular highlights](docs/example_lighting.gif)\n\n\n\n\n\n## Getting Started\n\n\n\n### Setup\n\n\n\nTo download all dependencies and run the example program, run:\n\n\n\n```\n\ncargo build --release\n\ncargo run --release\n\n```\n\n\n\n### Usage\n\n\n\nSetup code for an example render loop can be found in `main.rs`.\n\n\n\nModels can be imported from files as a byte stream:\n\n\n\n```\n\ngraphics::Renderable::new_from_glb(\n\n ...,\n\n include_bytes!(\"../res/DamagedHelmet.glb\"),\n\n ...,\n\n);\n\n```\n\n\n\nor from a path:\n\n\n\n```\n\ngraphics::Renderable::new_from_path(\n\n ...,\n\n std::path::Path::new(\"/path/to/model.gltf\"),\n\n ...,\n\n);\n\n```\n", "file_path": "README.md", "rank": 94, "score": 6.89090738004584 }, { "content": " ];\n\n\n\n for (prop_name, property) in pbr_properties.iter() {\n\n match property.texture_id {\n\n None => pbr_factor_values.push(property.factor.unwrap()),\n\n Some(texture_id) => {\n\n pbr_texture_binding_entries.push(wgpu::BindGroupLayoutEntry {\n\n binding: pbr_texture_binding_entries.len() as u32,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::SampledTexture {\n\n dimension: wgpu::TextureViewDimension::D2,\n\n component_type: wgpu::TextureComponentType::Float,\n\n multisampled: false,\n\n },\n\n });\n\n pbr_texture_binding_entries.push(wgpu::BindGroupLayoutEntry {\n\n binding: pbr_texture_binding_entries.len() as u32,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::Sampler { comparison: false },\n\n });\n", "file_path": "src/graphics/material_pbr.rs", "rank": 95, "score": 6.724811962085211 }, { "content": " },\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 3,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::Sampler { comparison: false },\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 4,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::SampledTexture {\n\n dimension: wgpu::TextureViewDimension::D2,\n\n component_type: wgpu::TextureComponentType::Float,\n\n multisampled: false,\n\n },\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 5,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::Sampler { comparison: false },\n", "file_path": "src/graphics/material_pbr.rs", "rank": 96, "score": 6.502141458620855 }, { "content": " ty: wgpu::BindingType::SampledTexture {\n\n dimension: wgpu::TextureViewDimension::Cube,\n\n component_type: wgpu::TextureComponentType::Float,\n\n multisampled: false,\n\n },\n\n },\n\n wgpu::BindGroupLayoutEntry {\n\n binding: 1,\n\n visibility: wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::Sampler { comparison: false },\n\n },\n\n ],\n\n label: None,\n\n });\n\n\n\n let params_bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {\n\n layout: &params_bind_group_layout,\n\n bindings: &[\n\n wgpu::Binding {\n\n binding: 0,\n", "file_path": "src/graphics/material_skybox.rs", "rank": 97, "score": 6.459081518338084 }, { "content": " Self {\n\n render_pipeline,\n\n transform_bind_group,\n\n transform_bind_group_buffer,\n\n cvt_bind_group,\n\n }\n\n }\n\n}\n\n\n\nimpl HdrConvolveDiffuseMaterial {\n\n pub fn new(device: &wgpu::Device, params: &HdrConvolveDiffuseBindGroup) -> Self {\n\n // Init bind groups.\n\n // Transform buffers.\n\n let (transform_bind_group_buffer, transform_bind_group, transform_bind_group_layout) =\n\n material_base::create_uniform_buffer::<TransformBindGroup>(\n\n device,\n\n wgpu::ShaderStage::VERTEX,\n\n );\n\n\n\n // Hdr conversion bind group.\n", "file_path": "src/graphics/material_hdr.rs", "rank": 98, "score": 6.284841610084882 }, { "content": " bindings: &[wgpu::Binding {\n\n binding: 0,\n\n resource: wgpu::BindingResource::Buffer {\n\n buffer: &transform_uniform_buffer,\n\n // FYI: you can share a single buffer between bindings.\n\n range: 0..std::mem::size_of::<TransformBindGroup>() as wgpu::BufferAddress,\n\n },\n\n }],\n\n label: Some(\"transform_bind_group\"),\n\n });\n\n\n\n // Material bind group.\n\n let mut pbr_factor_values = Vec::new();\n\n let mut pbr_texture_binding_entries = Vec::new();\n\n let mut pbr_texture_bindings = Vec::new();\n\n let mut pbr_defines = \"\".to_owned();\n\n\n\n // Add constant texture bindings.\n\n pbr_texture_binding_entries.extend_from_slice(&[\n\n wgpu::BindGroupLayoutEntry {\n", "file_path": "src/graphics/material_pbr.rs", "rank": 99, "score": 6.236885890444393 } ]
Rust
2019/day12/src/main.rs
dcoles/advent-of-code
4d480934daad60fcdb2112ef66f4115d9cb83ac2
use std::path::Path; use std::{fs, ops}; use std::fmt; use std::collections::HashSet; fn main() { let mut s1 = Simulation::new(read_input("sample1.txt")); s1.simulate_n_steps(10); assert_eq!(179, s1.total_energy()); let mut s2 = Simulation::new(read_input("sample2.txt")); s2.simulate_n_steps(100); assert_eq!(1940, s2.total_energy()); let mut s3 = Simulation::new(read_input("sample2.txt")); s3.simulate_until_repeat(); assert_eq!(4686774924, s3.t); let mut sim1 = Simulation::new(read_input("input.txt")); sim1.simulate_n_steps(1000); println!("Part 1: Total energy of system after 1000 steps: {}", sim1.total_energy()); let mut sim2 = Simulation::new(read_input("input.txt")); sim2.simulate_until_repeat(); println!("Part 2: {}", sim2.t); } fn read_input<T: AsRef<Path>>(path: T) -> Vec<Moon> { let mut coords = Vec::new(); let contents = fs::read_to_string(path).expect("Failed to read input"); for line in contents.lines() { let line = line.trim_start_matches('<').trim_end_matches('>'); let mut split = line.split(','); let x = split.next().expect("Expected x field") .split('=').nth(1).expect("Expected value") .parse::<i32>().expect("Failed to parse value"); let y = split.next().expect("Expected y field") .split('=').nth(1).expect("Expected value") .parse::<i32>().expect("Failed to parse value"); let z = split.next().expect("Expected z field") .split('=').nth(1).expect("Expected value") .parse::<i32>().expect("Failed to parse value"); coords.push(Moon::new(Triple::new(x, y, z))); } coords } struct Simulation { t: usize, initial_state: Vec<Moon>, current_state: Vec<Moon>, } impl Simulation { fn new(moons: Vec<Moon>) -> Simulation { Simulation { t: 0, initial_state: moons.clone(), current_state: moons } } fn simulate_n_steps(&mut self, steps: usize) { while steps != self.t { self.simulate_one_tick(); } } fn simulate_until_repeat(&mut self) { let mut seen = HashSet::new(); let mut t_x = None; let mut t_y = None; let mut t_z = None; for _ in 0.. { let pos_vel_x: Vec<_> = self.current_state.iter().map(|m| ('x', m.pos.x, m.vel.x)).collect(); if t_x.is_none() && !seen.insert(pos_vel_x.clone()) { t_x = Some(self.t); } let pos_vel_y: Vec<_> = self.current_state.iter().map(|m| ('y', m.pos.y, m.vel.y)).collect(); if t_y.is_none() && !seen.insert(pos_vel_y.clone()) { t_y = Some(self.t); } let pos_vel_z: Vec<_> = self.current_state.iter().map(|m| ('z', m.pos.z, m.vel.z)).collect(); if t_z.is_none() && !seen.insert(pos_vel_z.clone()) { t_z = Some(self.t); } if t_x.is_some() && t_y.is_some() && t_z.is_some() { break; } self.simulate_one_tick(); } let t_x = t_x.unwrap(); let t_y = t_y.unwrap(); let t_z = t_z.unwrap(); self.t = t_x * t_y * t_z / gcd(t_x, t_y) / gcd(t_y, t_z) / gcd(t_z, t_x) * gcd(t_x, gcd(t_y, t_z)); self.current_state = self.initial_state.clone(); } fn simulate_one_tick(&mut self) { for i in 0..self.current_state.len() { for j in i+1..self.current_state.len() { let force = self.current_state[i].force(&self.current_state[j]); self.current_state[i].vel += force; self.current_state[j].vel -= force; } } for moon in self.current_state.iter_mut() { moon.tick(); } self.t += 1; } fn total_energy(&self) -> i32 { let mut total_energy = 0; for moon in &self.current_state { total_energy += moon.energy(); } total_energy } } fn gcd(a: usize, b: usize) -> usize { if b == 0 { a } else { gcd(b, a % b) } } #[derive(Clone)] struct Moon { pos: Triple, vel: Triple, } impl Moon { fn new(position: Triple) -> Moon { Moon { pos: position, vel: Triple::new(0, 0, 0) } } fn force(&self, other: &Moon) -> Triple { let dx = if self.pos.x > other.pos.x { -1 } else if self.pos.x < other.pos.x { 1 } else { 0 }; let dy = if self.pos.y > other.pos.y { -1 } else if self.pos.y < other.pos.y { 1 } else { 0 }; let dz = if self.pos.z > other.pos.z { -1 } else if self.pos.z < other.pos.z { 1 } else { 0 }; Triple::new(dx, dy, dz) } fn tick(&mut self) { self.pos += self.vel; } fn energy(&self) -> i32 { let potential = self.pos.x.abs() + self.pos.y.abs() + self.pos.z.abs(); let kinetic = self.vel.x.abs() + self.vel.y.abs() + self.vel.z.abs(); potential * kinetic } } #[derive(Hash, Copy, Clone, Eq, PartialEq, Debug)] struct Triple { x: i32, y: i32, z: i32, } impl Triple { const fn new(x: i32, y: i32, z: i32) -> Triple { Triple { x, y, z } } } impl fmt::Display for Triple { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "<{:3}, {:3}, {:3}>", self.x, self.y, self.z) } } impl ops::Add for Triple { type Output = Triple; fn add(self, rhs: Self) -> Self::Output { Triple { x: self.x + rhs.x, y: self.y + rhs.y, z: self.z + rhs.z } } } impl ops::AddAssign for Triple { fn add_assign(&mut self, rhs: Self) { self.x += rhs.x; self.y += rhs.y; self.z += rhs.z; } } impl ops::SubAssign for Triple { fn sub_assign(&mut self, rhs: Self) { self.x -= rhs.x; self.y -= rhs.y; self.z -= rhs.z; } } impl ops::Neg for Triple { type Output = Triple; fn neg(self) -> Self::Output { Triple { x: -self.x, y: -self.y, z: -self.y } } }
use std::path::Path; use std::{fs, ops}; use std::fmt; use std::collections::HashSet; fn main() { let mut s1 = Simulation::new(read_input("sample1.txt")); s1.simulate_n_steps(10); assert_eq!(179, s1.total_energy()); let mut s2 = Simulation::new(read_input("sample2.txt")); s2.simulate_n_steps(100); assert_eq!(1940, s2.total_energy()); let mut s3 = Simulation::new(read_input("sample2.txt")); s3.simulate_until_repeat(); assert_eq!(4686774924, s3.t);
fn read_input<T: AsRef<Path>>(path: T) -> Vec<Moon> { let mut coords = Vec::new(); let contents = fs::read_to_string(path).expect("Failed to read input"); for line in contents.lines() { let line = line.trim_start_matches('<').trim_end_matches('>'); let mut split = line.split(','); let x = split.next().expect("Expected x field") .split('=').nth(1).expect("Expected value") .parse::<i32>().expect("Failed to parse value"); let y = split.next().expect("Expected y field") .split('=').nth(1).expect("Expected value") .parse::<i32>().expect("Failed to parse value"); let z = split.next().expect("Expected z field") .split('=').nth(1).expect("Expected value") .parse::<i32>().expect("Failed to parse value"); coords.push(Moon::new(Triple::new(x, y, z))); } coords } struct Simulation { t: usize, initial_state: Vec<Moon>, current_state: Vec<Moon>, } impl Simulation { fn new(moons: Vec<Moon>) -> Simulation { Simulation { t: 0, initial_state: moons.clone(), current_state: moons } } fn simulate_n_steps(&mut self, steps: usize) { while steps != self.t { self.simulate_one_tick(); } } fn simulate_until_repeat(&mut self) { let mut seen = HashSet::new(); let mut t_x = None; let mut t_y = None; let mut t_z = None; for _ in 0.. { let pos_vel_x: Vec<_> = self.current_state.iter().map(|m| ('x', m.pos.x, m.vel.x)).collect(); if t_x.is_none() && !seen.insert(pos_vel_x.clone()) { t_x = Some(self.t); } let pos_vel_y: Vec<_> = self.current_state.iter().map(|m| ('y', m.pos.y, m.vel.y)).collect(); if t_y.is_none() && !seen.insert(pos_vel_y.clone()) { t_y = Some(self.t); } let pos_vel_z: Vec<_> = self.current_state.iter().map(|m| ('z', m.pos.z, m.vel.z)).collect(); if t_z.is_none() && !seen.insert(pos_vel_z.clone()) { t_z = Some(self.t); } if t_x.is_some() && t_y.is_some() && t_z.is_some() { break; } self.simulate_one_tick(); } let t_x = t_x.unwrap(); let t_y = t_y.unwrap(); let t_z = t_z.unwrap(); self.t = t_x * t_y * t_z / gcd(t_x, t_y) / gcd(t_y, t_z) / gcd(t_z, t_x) * gcd(t_x, gcd(t_y, t_z)); self.current_state = self.initial_state.clone(); } fn simulate_one_tick(&mut self) { for i in 0..self.current_state.len() { for j in i+1..self.current_state.len() { let force = self.current_state[i].force(&self.current_state[j]); self.current_state[i].vel += force; self.current_state[j].vel -= force; } } for moon in self.current_state.iter_mut() { moon.tick(); } self.t += 1; } fn total_energy(&self) -> i32 { let mut total_energy = 0; for moon in &self.current_state { total_energy += moon.energy(); } total_energy } } fn gcd(a: usize, b: usize) -> usize { if b == 0 { a } else { gcd(b, a % b) } } #[derive(Clone)] struct Moon { pos: Triple, vel: Triple, } impl Moon { fn new(position: Triple) -> Moon { Moon { pos: position, vel: Triple::new(0, 0, 0) } } fn force(&self, other: &Moon) -> Triple { let dx = if self.pos.x > other.pos.x { -1 } else if self.pos.x < other.pos.x { 1 } else { 0 }; let dy = if self.pos.y > other.pos.y { -1 } else if self.pos.y < other.pos.y { 1 } else { 0 }; let dz = if self.pos.z > other.pos.z { -1 } else if self.pos.z < other.pos.z { 1 } else { 0 }; Triple::new(dx, dy, dz) } fn tick(&mut self) { self.pos += self.vel; } fn energy(&self) -> i32 { let potential = self.pos.x.abs() + self.pos.y.abs() + self.pos.z.abs(); let kinetic = self.vel.x.abs() + self.vel.y.abs() + self.vel.z.abs(); potential * kinetic } } #[derive(Hash, Copy, Clone, Eq, PartialEq, Debug)] struct Triple { x: i32, y: i32, z: i32, } impl Triple { const fn new(x: i32, y: i32, z: i32) -> Triple { Triple { x, y, z } } } impl fmt::Display for Triple { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "<{:3}, {:3}, {:3}>", self.x, self.y, self.z) } } impl ops::Add for Triple { type Output = Triple; fn add(self, rhs: Self) -> Self::Output { Triple { x: self.x + rhs.x, y: self.y + rhs.y, z: self.z + rhs.z } } } impl ops::AddAssign for Triple { fn add_assign(&mut self, rhs: Self) { self.x += rhs.x; self.y += rhs.y; self.z += rhs.z; } } impl ops::SubAssign for Triple { fn sub_assign(&mut self, rhs: Self) { self.x -= rhs.x; self.y -= rhs.y; self.z -= rhs.z; } } impl ops::Neg for Triple { type Output = Triple; fn neg(self) -> Self::Output { Triple { x: -self.x, y: -self.y, z: -self.y } } }
let mut sim1 = Simulation::new(read_input("input.txt")); sim1.simulate_n_steps(1000); println!("Part 1: Total energy of system after 1000 steps: {}", sim1.total_energy()); let mut sim2 = Simulation::new(read_input("input.txt")); sim2.simulate_until_repeat(); println!("Part 2: {}", sim2.t); }
function_block-function_prefix_line
[ { "content": "fn reduce(pair: &mut Number) {\n\n loop {\n\n if explode(pair) {\n\n //println!(\"Explode: {:?}\", pair);\n\n continue;\n\n }\n\n\n\n if split(pair) {\n\n //println!(\"Split: {:?}\", pair);\n\n continue;\n\n }\n\n\n\n break;\n\n }\n\n}\n\n\n", "file_path": "2021/day18/src/main.rs", "rank": 0, "score": 195744.57976950082 }, { "content": "/// Step through program instructions until EOF or program would re-execute an instruction.\n\nfn run_until_loop(cpu: &mut CPU) {\n\n let mut seen = HashSet::new();\n\n while !cpu.is_eof() && !seen.contains(&cpu.pc) {\n\n seen.insert(cpu.pc);\n\n cpu.step().expect(\"Execution failed\");\n\n }\n\n}\n\n\n", "file_path": "2020/day08/src/main.rs", "rank": 1, "score": 193580.60534625285 }, { "content": "fn run_until_all_but_one_crashed(mut world: World) {\n\n println!(\"PART 2\");\n\n println!(\"Initial state\");\n\n world.print();\n\n while world.num_cart_crashed() < world.carts.len() - 1 {\n\n world.tick();\n\n }\n\n println!();\n\n\n\n println!(\"Final state (all but one cart crashed)\");\n\n world.print();\n\n for cart in &world.carts {\n\n println!(\"Cart{} at {},{}\", if cart.crashed { \" [crashed]\" } else { \"\" },\n\n cart.position.0, cart.position.1)\n\n }\n\n}\n\n\n", "file_path": "2018/day13/src/main.rs", "rank": 2, "score": 191492.56755975273 }, { "content": "fn attach_debugger(cpu: &mut IntcodeEmulator) {\n\n // Read from TTY, even if stdin is redirected\n\n let mut tty = match fs::File::open(\"/dev/tty\") {\n\n Err(err) => {\n\n eprintln!(\"ERROR: Could not open TTY: {}\", err);\n\n return;\n\n },\n\n Ok(file) => io::BufReader::new(file),\n\n };\n\n\n\n // Disable debug-tracing while running the debugger\n\n let last_debug = cpu.get_debug();\n\n cpu.set_debug(false);\n\n\n\n // Disassemble first instruction\n\n cpu.print_disassembled();\n\n\n\n let mut last_line = String::new();\n\n loop {\n\n eprint!(\"debug> \");\n", "file_path": "2019/intcode/src/main.rs", "rank": 3, "score": 191492.56755975273 }, { "content": "fn run_until_first_crash(mut world: World) {\n\n println!(\"PART 1\");\n\n println!(\"Initial state\");\n\n world.print();\n\n while world.num_cart_crashed() == 0 && world.t < 16 {\n\n world.tick();\n\n }\n\n println!();\n\n\n\n println!(\"Final state (first crash)\");\n\n world.print();\n\n for cart in &world.carts {\n\n println!(\"Cart{} at {},{}\", if cart.crashed { \" [crashed]\" } else { \"\" },\n\n cart.position.0, cart.position.1)\n\n }\n\n}\n\n\n", "file_path": "2018/day13/src/main.rs", "rank": 4, "score": 191492.56755975273 }, { "content": "fn run(program: &mut Vec<usize>) {\n\n for ip in (0..program.len()).step_by(4) {\n\n let (op, x1, x2, x3) = (program[ip], program[ip+1], program[ip+2], program[ip+3]);\n\n match op {\n\n OP_ADD => program[x3] = program[x1] + program[x2],\n\n OP_MUL => program[x3] = program[x1] * program[x2],\n\n OP_HALT => break,\n\n _ => panic!(\"Unknown opcode {}\", op),\n\n }\n\n }\n\n}\n\n\n", "file_path": "2019/day02/src/main.rs", "rank": 5, "score": 191010.7059294841 }, { "content": "fn explode(pair: &mut Number) -> bool {\n\n do_explode(pair, pair.0.len() - 1, 0)\n\n}\n\n\n", "file_path": "2021/day18/src/main.rs", "rank": 6, "score": 191010.7059294841 }, { "content": "fn redistribute(banks: &mut Vec<i32>) {\n\n let mut max_idx = 0;\n\n let mut max = banks[max_idx];\n\n for idx in 0..banks.len() {\n\n if banks[idx] > max {\n\n max_idx = idx;\n\n max = banks[idx];\n\n }\n\n }\n\n\n\n let mut blocks = banks[max_idx];\n\n banks[max_idx] = 0;\n\n\n\n let mut idx = (max_idx + 1) % banks.len();\n\n while blocks > 0 {\n\n banks[idx] += 1;\n\n blocks -= 1;\n\n idx = (idx + 1) % banks.len()\n\n }\n\n}\n", "file_path": "2017/day06/src/main.rs", "rank": 7, "score": 191010.7059294841 }, { "content": "fn split(pair: &mut Number) -> bool {\n\n do_split(pair, pair.0.len() - 1)\n\n}\n\n\n", "file_path": "2021/day18/src/main.rs", "rank": 8, "score": 191010.7059294841 }, { "content": "fn eval_<'a, I>(tokens: &mut I) -> u64\n\nwhere\n\n I: Iterator<Item=&'a Token>\n\n{\n\n let mut lhs = match *tokens.next().expect(\"Expected LHS\") {\n\n '(' => eval_(tokens),\n\n t if t.is_numeric() => t.to_digit(10).unwrap() as u64,\n\n x => panic!(\"Unexpected token: {}\", x),\n\n };\n\n\n\n while let Some(&op) = tokens.next() {\n\n let op = match op {\n\n '+' => |a, b| a + b,\n\n '*' => |a, b| a * b,\n\n ')' => return lhs,\n\n x => panic!(\"Unexpected token: {}\", x),\n\n };\n\n\n\n let rhs = match *tokens.next().expect(\"Expected RHS\") {\n\n '(' => eval_(tokens),\n\n t if t.is_numeric() => t.to_digit(10).unwrap() as u64,\n\n x => panic!(\"Unexpected token: {}\", x),\n\n };\n\n\n\n lhs = op(lhs, rhs)\n\n }\n\n\n\n lhs\n\n}\n\n\n", "file_path": "2020/day18/src/main.rs", "rank": 9, "score": 188531.98531639768 }, { "content": "fn dec(val: &mut i32, amount: i32) {\n\n *val -= amount;\n\n}\n", "file_path": "2017/day08/src/main.rs", "rank": 10, "score": 186523.9899700143 }, { "content": "fn inc(val: &mut i32, amount: i32) {\n\n *val += amount;\n\n}\n\n\n", "file_path": "2017/day08/src/main.rs", "rank": 11, "score": 186523.9899700143 }, { "content": "fn rotate(map: &mut HashMap<Pos, char>) {\n\n let width = map.keys().map(|&[x, _]| x).max().unwrap() + 1;\n\n let height = map.keys().map(|&[_, y]| y).max().unwrap() + 1;\n\n\n\n let mut new_map = HashMap::new();\n\n for y in 0..height {\n\n for x in 0..width {\n\n new_map.insert([x, y], map[&[y, height - 1 - x]]);\n\n }\n\n }\n\n\n\n *map = new_map;\n\n}\n\n\n", "file_path": "2020/day20/src/main.rs", "rank": 12, "score": 184507.96067125187 }, { "content": "fn flip(map: &mut HashMap<Pos, char>) {\n\n let width = map.keys().map(|&[x, _]| x).max().unwrap() + 1;\n\n let height = map.keys().map(|&[_, y]| y).max().unwrap() + 1;\n\n\n\n let mut new_map = HashMap::new();\n\n for y in 0..height {\n\n for x in 0..width {\n\n new_map.insert([x, y], map[&[width - 1 - x, y]]);\n\n }\n\n }\n\n\n\n *map = new_map;\n\n}\n\n\n", "file_path": "2020/day20/src/main.rs", "rank": 13, "score": 184507.96067125187 }, { "content": "fn do_split(pair: &mut Number, n: usize) -> bool {\n\n for i in 0..2 {\n\n match pair.0[n][i] {\n\n Element::Pair(p) => {\n\n if do_split(pair, p) {\n\n return true;\n\n }\n\n },\n\n Element::Value(ix) if pair.1[ix] >= 10 => {\n\n // Fix up indexes\n\n for m in 0..pair.0.len() {\n\n for i in 0..2 {\n\n match &mut pair.0[m][i] {\n\n Element::Pair(p) => {\n\n if *p > n {\n\n *p += 1;\n\n }\n\n },\n\n Element::Value(n) => {\n\n if *n > ix {\n", "file_path": "2021/day18/src/main.rs", "rank": 14, "score": 184279.9731066496 }, { "content": "fn eval_v<'a, I>(tokens: &mut Peekable<I>) -> u64\n\n where\n\n I: Iterator<Item=&'a Token>\n\n{\n\n match *tokens.next().expect(\"Expected VALUE\") {\n\n '(' => {\n\n let m = eval_m(tokens);\n\n assert_eq!(')', *tokens.next().expect(\"Missing closing )\"));\n\n m\n\n },\n\n t if t.is_numeric() => t.to_digit(10).unwrap() as u64,\n\n x => panic!(\"Unexpected token: {}\", x),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "2020/day18/src/main.rs", "rank": 15, "score": 184264.1642032395 }, { "content": "fn eval_a<'a, I>(tokens: &mut Peekable<I>) -> u64\n\n where\n\n I: Iterator<Item=&'a Token>\n\n{\n\n let mut lhs = eval_v(tokens);\n\n\n\n while let Some(&&op) = tokens.peek() {\n\n if op != '+' {\n\n return lhs;\n\n }\n\n\n\n tokens.next();\n\n lhs += eval_a(tokens);\n\n }\n\n\n\n lhs\n\n\n\n}\n\n\n", "file_path": "2020/day18/src/main.rs", "rank": 16, "score": 184264.1642032395 }, { "content": "fn eval_m<'a, I>(tokens: &mut Peekable<I>) -> u64\n\n where\n\n I: Iterator<Item=&'a Token>\n\n{\n\n let mut lhs = eval_a(tokens);\n\n\n\n while let Some(&&op) = tokens.peek() {\n\n if op != '*' {\n\n return lhs;\n\n }\n\n\n\n tokens.next();\n\n lhs *= eval_a(tokens);\n\n }\n\n\n\n lhs\n\n}\n\n\n", "file_path": "2020/day18/src/main.rs", "rank": 17, "score": 184264.1642032395 }, { "content": "fn rotate(mut coord: Vector, rotation: Rotation) -> Vector {\n\n for _ in 0..rotation[0] {\n\n coord = coord.rotate_x();\n\n }\n\n\n\n for _ in 0..rotation[1] {\n\n coord = coord.rotate_y();\n\n }\n\n\n\n for _ in 0..rotation[2] {\n\n coord = coord.rotate_z();\n\n }\n\n\n\n coord\n\n}\n\n\n", "file_path": "2021/day19/src/main.rs", "rank": 18, "score": 182263.94380788718 }, { "content": "fn parse(input: &mut impl Iterator<Item=u32>) -> Node {\n\n // Read header\n\n let n_children = input.next().expect(\"Unexpected EOF\");\n\n let n_metadata = input.next().expect(\"Unexpected EOF\");\n\n\n\n // For each child, parse it\n\n let mut children: Vec<Node> = Vec::new();\n\n for _ in 0..n_children {\n\n children.push(parse(input));\n\n }\n\n\n\n // Read metadata\n\n let mut metadata: Vec<u32> = Vec::new();\n\n for _ in 0..n_metadata {\n\n metadata.push(input.next().expect(\"Unexpected EOF\"));\n\n }\n\n\n\n // Return node\n\n Node { children, metadata }\n\n}\n\n\n", "file_path": "2018/day08/src/main.rs", "rank": 19, "score": 178212.41449925335 }, { "content": "fn do_explode(pair: &mut Number, n: usize, depth: usize) -> bool {\n\n for i in 0..2 {\n\n match pair.0[n][i] {\n\n Element::Pair(new_n) => {\n\n let new_depth = depth + 1;\n\n if new_depth == 4 {\n\n // Explode\n\n let [a, b] = pair.0[new_n];\n\n let ax = a.value().unwrap();\n\n let bx = b.value().unwrap();\n\n let a = pair.1[ax];\n\n pair.1[ax] = 0;\n\n let b = pair.1[bx];\n\n pair.0[n][i] = Element::Value(ax);\n\n\n\n if ax > 0 {\n\n pair.1[ax - 1] += a;\n\n }\n\n\n\n if bx + 1 < pair.1.len() {\n", "file_path": "2021/day18/src/main.rs", "rank": 20, "score": 176236.00236573102 }, { "content": "fn reverse_range(list: &mut Vec<usize>, start: usize, len: usize) {\n\n for n in 0..len/2 {\n\n list.swap((start + n) % SIZE, (start + len - 1 - n) % SIZE);\n\n }\n\n}\n\n\n", "file_path": "2017/day10/src/main.rs", "rank": 21, "score": 172532.23064420378 }, { "content": "fn main() {\n\n // Part 1\n\n assert!(valid1(111111));\n\n assert!(!valid1(223450));\n\n assert!(!valid1(123789));\n\n\n\n let count = INPUT.filter(|&n| valid1(n)).count();\n\n println!(\"Part 1: Number of passwords that meet the criteria: {}\", count);\n\n\n\n // Part 2\n\n assert!(valid2(112233));\n\n assert!(!valid2(123444));\n\n assert!(valid2(111122));\n\n\n\n let count = INPUT.filter(|&n| valid2(n)).count();\n\n println!(\"Part 2: Number of passwords that meet the criteria: {}\", count);\n\n}\n\n\n", "file_path": "2019/day04b/src/main.rs", "rank": 22, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Vector of (obj, orbiting_obj)\n\n let input = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n // Maps object to the object it's orbiting (so we need to flip the ordering)\n\n let obj_orbits: HashMap<_, _> = input.into_iter()\n\n .map(|(obj, orbiting_obj)| (orbiting_obj, obj))\n\n .collect();\n\n\n\n let mut count = 0;\n\n for obj in obj_orbits.keys() {\n\n count += orbital_distances(&obj_orbits, obj).len();\n\n }\n\n\n\n println!(\"Part 1: Count of direct and indirect orbits {}\", count);\n\n\n\n // Part 2\n\n let santa_orbital_dist = orbital_distances(&obj_orbits, SANTA);\n\n let you_orbital_dist = orbital_distances(&obj_orbits, YOU);\n", "file_path": "2019/day06/src/main.rs", "rank": 23, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\").expect(\"Failed to read input\");\n\n println!(\"[Part 1] Exits in {} steps\", run1(&input));\n\n println!(\"[Part 2] Exits in {} steps\", run2(&input));\n\n}\n\n\n", "file_path": "2017/day05/src/main.rs", "rank": 24, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let passwords = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n println!(\"Part 1: Number of valid passwords is {}\", passwords.iter().filter(|p| p.valid1()).count());\n\n\n\n // Part 2\n\n println!(\"Part 2: Number of valid passwords is {}\", passwords.iter().filter(|p| p.valid2()).count());\n\n}\n\n\n", "file_path": "2020/day02/src/main.rs", "rank": 25, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = Input::from_file(\"day16/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day16/src/main.rs", "rank": 26, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let mut map = read_input(\"input.txt\");\n\n for _ in 0..N_ITERATIONS {\n\n map.tick();\n\n }\n\n map.print();\n\n\n\n let mut count_at_rest = 0;\n\n let mut count_hypothetical = 0;\n\n for y in map.ymin..=map.ymax {\n\n for x in 0..WIDTH {\n\n let tile = map.cells[y][x];\n\n if tile == '~' {\n\n count_at_rest += 1;\n\n } else if tile == '|' {\n\n count_hypothetical += 1;\n\n }\n\n }\n\n }\n\n println!(\"Water can reach {} tiles\", count_at_rest + count_hypothetical);\n\n println!(\"There are {} water tiles at rest\", count_at_rest);\n\n\n\n}\n\n\n", "file_path": "2018/day17/src/main.rs", "rank": 27, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n assert_eq!(43210,\n\n run_pipeline(&[4, 3, 2, 1, 0],\n\n &Program::new(&[3, 15, 3, 16, 1002, 16, 10, 16, 1, 16, 15, 15, 4, 15, 99, 0, 0]),\n\n false));\n\n assert_eq!(54321,\n\n run_pipeline(&[0, 1, 2, 3, 4],\n\n &Program::new(&[3, 23, 3, 24, 1002, 24, 10, 24, 1002, 23, -1, 23, 101, 5, 23, 23, 1, 24, 23, 23, 4, 23, 99, 0, 0]),\n\n false));\n\n assert_eq!(65210,\n\n run_pipeline(&[1, 0, 4, 3, 2],\n\n &Program::new(&[3, 31, 3, 32, 1002, 32, 10, 32, 1001, 31, -2, 31, 1007, 31, 0, 33, 1002, 33, 7, 33, 1, 33, 31, 31, 1, 32, 31, 31, 4, 31, 99, 0, 0, 0]),\n\n false));\n\n\n\n\n\n let (max_thrust, phase) = find_max(&[0,1,2,3,4], &input, false);\n\n println!(\"Part 1: Max thrust is {} ({:?})\", max_thrust, phase);\n", "file_path": "2019/day07/src/main.rs", "rank": 28, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input_from_file(\"day07/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day07/src/main.rs", "rank": 29, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_to_string(\"input.txt\")\n\n .expect(\"Failed reading input\");\n\n\n\n // Part 1\n\n let lengths: Vec<usize> = input\n\n .trim()\n\n .split(\",\")\n\n .map(|s| s.parse().expect(\"Failed parsing int\"))\n\n .collect();\n\n\n\n let mut list: Vec<_> = (0..SIZE).collect();\n\n let mut cur_pos = 0;\n\n let mut skip_size = 0;\n\n\n\n for &len in &lengths {\n\n reverse_range(&mut list, cur_pos, len);\n\n cur_pos += len + skip_size;\n\n skip_size += 1;\n\n }\n\n\n\n println!(\"Part 1: {}\", list[0] * list[1]);\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", hash(input.trim()));\n\n}\n\n\n", "file_path": "2017/day10/src/main.rs", "rank": 30, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n println!(\"[Part 1] Number of valid passphrases: {}\", input.iter().filter(|p| valid(&p)).count());\n\n println!(\"[Part 2] Number of valid passphrases: {}\", input.iter().filter(|p| no_anagrams(&p)).count());\n\n}\n\n\n", "file_path": "2017/day04/src/main.rs", "rank": 31, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n for wire in &input {\n\n println!(\"{}\", wire.iter().map(|m| m.to_string()).collect::<Vec<_>>().join(\",\"));\n\n }\n\n\n\n // Part 1\n\n let mut grid: HashSet<Pos> = HashSet::new();\n\n let mut intersections: Vec<Pos> = Vec::new();\n\n for wire in &input {\n\n // Walk the wire's path\n\n let path = walk(&wire);\n\n\n\n // Check for any intersections\n\n for &pos in &path {\n\n if grid.contains(&pos) {\n\n intersections.push(pos);\n\n }\n\n }\n\n\n", "file_path": "2019/day03/src/main.rs", "rank": 32, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Part 1\n\n let map = Map::from_file(\"input1.txt\");\n\n map.draw();\n\n\n\n let (path, distance) = find_shortest_path(&map, &[ENTRANCE]);\n\n println!(\"Part 1: Shortest path that collects all the keys: {:?} (distance: {})\", path, distance);\n\n\n\n // Part 2\n\n let map = Map::from_file(\"input2.txt\");\n\n map.draw();\n\n\n\n let (path, distance) = find_shortest_path(&map, &[ENTRANCE1, ENTRANCE2, ENTRANCE3, ENTRANCE4]);\n\n println!(\"Part 2: Shortest path that collects all the keys: {:?} (distance: {})\", path, distance);\n\n}\n\n\n", "file_path": "2019/day18/src/main.rs", "rank": 33, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input_from_file(\"day11/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"{}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"{}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day11/src/main.rs", "rank": 34, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n assert_eq!(31, required_ore(&read_input(\"sample1.txt\"), 1));\n\n assert_eq!(165, required_ore(&read_input(\"sample2.txt\"), 1));\n\n assert_eq!(13312, required_ore(&read_input(\"sample3.txt\"), 1));\n\n assert_eq!(180697, required_ore(&read_input(\"sample4.txt\"), 1));\n\n assert_eq!(2210736, required_ore(&read_input(\"sample5.txt\"), 1));\n\n\n\n println!(\"Part 1: Required ore: {}\", required_ore(&input, 1));\n\n\n\n // Part 2\n\n assert_eq!(82892753, maximum_fuel(&read_input(\"sample3.txt\"), TRILLION));\n\n assert_eq!(5586022, maximum_fuel(&read_input(\"sample4.txt\"), TRILLION));\n\n assert_eq!(460664, maximum_fuel(&read_input(\"sample5.txt\"), TRILLION));\n\n\n\n println!(\"Part 2: Maximum fuel: {}\", maximum_fuel(&input, TRILLION));\n\n}\n\n\n", "file_path": "2019/day14/src/main.rs", "rank": 35, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let map = Map::from_file(\"input.txt\");\n\n println!(\"Initial state:\");\n\n map.draw(0);\n\n\n\n // Part 1\n\n let first_repeat = simulate(&map);\n\n println!(\"First repeated layout:\");\n\n first_repeat.draw(0);\n\n println!(\"Part 1: Biodiversity rating: {}\", biodiversity_rating(&first_repeat));\n\n\n\n // Part 2\n\n let minutes = 200;\n\n let after = simulate_for(&map, minutes);\n\n println!(\"After {} minutes:\", minutes);\n\n after.draw_all_levels();\n\n println!(\"Part 2: Number of bugs: {}\", after.total_number_of_bugs());\n\n}\n\n\n", "file_path": "2019/day24/src/main.rs", "rank": 36, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let program = emulator::Program::from_file(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1\");\n\n println!(\"══════\");\n\n let map = run(&program, Pos::new(45, 75), (80, 90), false);\n\n println!(\"Number of panels painted at least once: {}\", map.painted.len());\n\n println!();\n\n\n\n // Part 2\n\n println!(\"Part 2\");\n\n println!(\"══════\");\n\n let map = run(&program, Pos::new(1, 1), (80, 8), true);\n\n map.draw();\n\n}\n\n\n", "file_path": "2019/day11/src/main.rs", "rank": 37, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = Input::from_file(\"day14/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day14/src/main.rs", "rank": 38, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // The --turbo flag skips all animation delays\n\n let turbo = env::args().skip(1).any(|arg| &arg == \"--turbo\");\n\n\n\n // Part 1\n\n let program = Program::from_file(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n let mut droid = Droid::new(&program, ORIGIN);\n\n let mut planner = Planner::new();\n\n\n\n print!(\"\\x1B[2J\"); // Clear screen\n\n print!(\"\\x1B[?25l\"); // Hide cursor\n\n print!(\"\\x1B[8;{};{}t\", HEIGHT, WIDTH); // Resize console\n\n\n\n // Part 1: Find the broken O₂ system\n\n let mut o2_system = None;\n\n while let Some(command) = planner.plan(droid.pos) {\n\n // Clear the droid from the map\n\n draw( planner.get_tile(droid.pos), droid.pos);\n\n\n", "file_path": "2019/day15/src/main.rs", "rank": 39, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = fs::read_to_string(\"input.txt\")\n\n .expect(\"Failed to read input\");\n\n println!(\"Checksum: {}\", checksum(&input));\n\n\n\n for line1 in input.lines() {\n\n for line2 in input.lines() {\n\n let common = common(line1, line2);\n\n if common.len() == line1.len() - 1 {\n\n println!(\"Close match! {}\", common)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2018/day02/src/main.rs", "rank": 40, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = fs::read_to_string(\"input.txt\")\n\n .expect(\"Failed to read input\");\n\n let input = input.trim();\n\n\n\n let result = reduce(&input, None).len();\n\n println!(\"Number of units: {}\", result);\n\n assert_eq!(result, 10450);\n\n\n\n for c in (b'a'..= b'z').map(|c| c as char) {\n\n let result = reduce(&input, Some(c));\n\n println!(\"{}: {}\", c, result.len());\n\n }\n\n}\n\n\n", "file_path": "2018/day05/src/main.rs", "rank": 41, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Example\n\n assert_eq!(power((3, 5), 8), 4);\n\n assert_eq!(power((122, 79), 57), -5);\n\n assert_eq!(power((217, 196), 39), 0);\n\n assert_eq!(power((101, 153), 71), 4);\n\n\n\n let mut grid = [[0; MAX_SIZE]; MAX_SIZE];\n\n for (y, row) in (1..=300).zip(&mut grid[..]) {\n\n for (x, val) in (1..=300).zip(&mut row[..]) {\n\n *val = power((x, y), SERIAL_NUMBER);\n\n }\n\n }\n\n\n\n // Part 1\n\n\n\n let mut max_coord = (0, 0);\n\n let mut max_power = 0;\n\n for y in 1..=MAX_SIZE - 2 {\n\n for x in 1..=MAX_SIZE - 2 {\n", "file_path": "2018/day11/src/main.rs", "rank": 42, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n\n\n // Testing\n\n println!(\"== Testing ==\");\n\n let stdout = run(&vec![3,0,4,0,99], vec![1]);\n\n println!(\"STDOUT: {:?}\", stdout);\n\n assert_eq!(vec![1], stdout);\n\n\n\n // Part 1\n\n println!(\"== Part 1 ==\");\n\n println!(\"STDOUT: {:?}\", run(&input, vec![1]));\n\n\n\n // Part 2\n\n println!(\"== Part 2 ==\");\n\n println!(\"STDOUT: {:?}\", run(&input, vec![5]));\n\n}\n\n\n", "file_path": "2019/day05/src/main.rs", "rank": 43, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n let layers = split_layers(&input, 25, 6);\n\n let mut counts = count_digits(&layers);\n\n\n\n // Part 1\n\n counts.sort_by_key(|m| *m.get(&0).unwrap_or(&0));\n\n let layer = counts.first().expect(\"No first layer\");\n\n let checksum = layer[&1] * layer[&2];\n\n println!(\"Part 1: Number of 1 digits multiplied by number of 2 digits: {}\", checksum);\n\n\n\n // Part 2\n\n println!(\"Part 2:\");\n\n draw(&layers);\n\n}\n\n\n", "file_path": "2019/day08/src/main.rs", "rank": 44, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let values = parse_input();\n\n\n\n // Part 1\n\n let freq: i32 = values.iter().sum();\n\n println!(\"Summed frequency: {}Hz\", freq);\n\n\n\n // Part 2\n\n let mut freq = 0;\n\n let mut seen = collections::HashSet::new();\n\n for val in values.iter().cycle() {\n\n freq += val;\n\n if seen.contains(&freq) {\n\n break;\n\n };\n\n seen.insert(freq);\n\n }\n\n\n\n println!(\"First repeated frequency: {}Hz\", freq);\n\n}\n\n\n", "file_path": "2018/day01/src/main.rs", "rank": 45, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n assert_eq!(2, fuel1(12));\n\n assert_eq!(2, fuel1(14));\n\n assert_eq!(654, fuel1(1969));\n\n assert_eq!(33583, fuel1(100756));\n\n\n\n let total_fuel1: u32 = input.iter().copied().map(fuel1).sum();\n\n println!(\"Part 1: Sum of fuel requirements: {}\", total_fuel1);\n\n\n\n // Part 2\n\n assert_eq!(2, fuel(14));\n\n assert_eq!(966, fuel(1969));\n\n assert_eq!(50346, fuel(100756));\n\n\n\n let total_fuel2: u32 = input.iter().copied().map(fuel).sum();\n\n println!(\"Part 2: Sum of fuel requirements: {}\", total_fuel2);\n\n}\n\n\n", "file_path": "2019/day01/src/main.rs", "rank": 46, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input_from_file(\"day10/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day10/src/main.rs", "rank": 47, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let map = Map::new();\n\n println!(\"Risk level: {}\", map.risk_level(ORIGIN, TARGET));\n\n\n\n // Part 2\n\n plan(&map);\n\n}\n\n\n", "file_path": "2018/day22/src/main.rs", "rank": 48, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input();\n\n let root = parse(&mut input.into_iter());\n\n\n\n println!(\"Metadata sum: {}\", root.metadata_sum());\n\n println!(\"Value: {}\", root.value());\n\n}\n\n\n", "file_path": "2018/day08/src/main.rs", "rank": 49, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = run(\"input.txt\").expect(\"Failed to read input\");\n\n}\n\n\n", "file_path": "2017/day08/src/main.rs", "rank": 50, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let map = Map::from_file(\"day09/input.txt\").expect(\"failed to read input\");\n\n \n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&map));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&map));\n\n}\n\n\n", "file_path": "2021/day09/src/main.rs", "rank": 51, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let mut circle: Vec<u32> = Vec::new();\n\n circle.push(0);\n\n\n\n let mut score: HashMap<u32, u32> = HashMap::new();\n\n\n\n let mut idx: usize = 0;\n\n for n in 1..=TOP_MARBLE {\n\n let player = ((n - 1) % N_PLAYERS) + 1;\n\n if n % 23 == 0 {\n\n idx = (idx + circle.len() - 7) % circle.len();\n\n *score.entry(player).or_default() += n + circle.remove(idx);\n\n idx = idx % circle.len();\n\n } else {\n\n idx = (idx + 2) % circle.len();\n\n if idx == 0 {\n\n idx = circle.len();\n\n }\n\n circle.insert(idx, n);\n\n }\n", "file_path": "2018/day09/src/main.rs", "rank": 52, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = Input::from_file(\"day18/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day18/src/main.rs", "rank": 53, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n assert_eq!(3500, test(&vec![1,9,10,3,2,3,11,0,99,30,40,50]));\n\n\n\n let mut program = input.clone();\n\n program[1] = 12;\n\n program[2] = 2;\n\n run(&mut program);\n\n println!(\"Part 1: Position 0 = {}\", program[0]);\n\n\n\n // Part 2\n\n let target = 19690720;\n\n 'outer: for noun in 0..=99usize {\n\n for verb in 0..=99usize {\n\n let mut program = input.clone();\n\n program[1] = noun;\n\n program[2] = verb;\n\n run(&mut program);\n\n\n\n if program[0] == target {\n\n let answer = 100 * noun + verb;\n\n println!(\"Part 2: Inputs {}, {} give {} (answer: {})\", noun, verb, target, answer);\n\n break 'outer;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2019/day02/src/main.rs", "rank": 54, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let program = Program::from_file(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n // Part 1\n\n let view = get_view(&program);\n\n let map = Map::from_view(&view);\n\n let intersections = map.find_intersections();\n\n let calibration: usize = alignment_parameters(&intersections).into_iter().sum();\n\n\n\n println!(\"Part 1: Sum of alignment parameters: {}\", calibration);\n\n\n\n // Part 2\n\n println!(\"Part 2:\");\n\n let mut robot = Robot::new(&program);\n\n robot.set_active(true);\n\n robot.run();\n\n\n\n}\n\n\n", "file_path": "2019/day17/src/main.rs", "rank": 55, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n // Part 1\n\n let mut banks = input.clone();\n\n let mut seen = HashSet::new();\n\n seen.insert(banks.clone());\n\n for n in 1.. {\n\n redistribute(&mut banks);\n\n if seen.contains(&banks) {\n\n println!(\"[Part 1] Redistribution cycles {}\", n);\n\n break;\n\n }\n\n seen.insert(banks.clone());\n\n }\n\n\n\n // Part 2\n\n let seen_state = banks.clone();\n\n for n in 1.. {\n\n redistribute(&mut banks);\n\n if banks == seen_state {\n\n println!(\"[Part 2] Redistribution cycles {}\", n);\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "2017/day06/src/main.rs", "rank": 56, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let (signal, offset) = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n let mut signal1 = signal.clone();\n\n for _ in 0..100 {\n\n signal1 = phase(&signal1);\n\n }\n\n println!(\"Part 1: After 100 phases, the first 8 digits are: {:?}\", &signal1[..8]);\n\n\n\n // Part 2\n\n let real_signal: Vec<i32> = signal.iter().copied().cycle().take(10_000 * signal.len()).collect();\n\n println!(\"Part 2: The first 8 digits of the final output list are: {:?}\", &fft(&real_signal, offset, 100)[..8]);\n\n}\n\n\n", "file_path": "2019/day16/src/main.rs", "rank": 57, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let claims = read_claims();\n\n let mut fabric = vec![vec![0u32; 1000]; 1000];\n\n\n\n let mut total_overlap = 0;\n\n for claim in claims.iter() {\n\n for y in claim.top..claim.top+claim.height {\n\n for x in claim.left..claim.left+claim.width {\n\n // Only count the first overlap\n\n if fabric[y][x] == 1 {\n\n total_overlap += 1;\n\n }\n\n fabric[y][x] += 1;\n\n }\n\n }\n\n }\n\n\n\n println!(\"Overlap: {}\", total_overlap);\n\n\n\n for claim in claims.iter() {\n", "file_path": "2018/day03/src/main.rs", "rank": 58, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Part 1\n\n assert_eq!((Pos::new(3, 4), 8), best_position(&Map::from_file(\"sample0.txt\").find_asteroids()));\n\n assert_eq!((Pos::new(5, 8), 33), best_position(&Map::from_file(\"sample1.txt\").find_asteroids()));\n\n assert_eq!((Pos::new(1, 2), 35), best_position(&Map::from_file(\"sample2.txt\").find_asteroids()));\n\n assert_eq!((Pos::new(6, 3), 41), best_position(&Map::from_file(\"sample3.txt\").find_asteroids()));\n\n assert_eq!((Pos::new(11, 13), 210), best_position(&Map::from_file(\"sample4.txt\").find_asteroids()));\n\n\n\n let map = Map::from_file(\"input.txt\");\n\n let asteroids = map.find_asteroids();\n\n let (pos, count) = best_position(&asteroids);\n\n\n\n println!(\"Part 1: Most visible {} from {}\", count, pos);\n\n\n\n // Part 2\n\n assert_eq!(Pos::new(8, 2),\n\n asteroids_zapped(Pos::new(11, 13), &Map::from_file(\"sample4.txt\").find_asteroids())[199]);\n\n\n\n let zapped = asteroids_zapped(pos, &asteroids);\n\n let asteroid_200 = zapped[199];\n\n\n\n println!(\"Part 2: Index 200 is {} (answer: {})\", asteroid_200, 100 * asteroid_200.x + asteroid_200.y);\n\n}\n\n\n", "file_path": "2019/day10/src/main.rs", "rank": 59, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = Map::from_file(\"day15/example1.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day15/src/main.rs", "rank": 60, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let program = Program::from_file(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n let mut network = Network::new();\n\n for addr in 0..N_COMPUTERS {\n\n let network_queue = Rc::clone(&network.queue);\n\n network.add_computer(Computer::new(addr, &program, network_queue));\n\n }\n\n\n\n network.run();\n\n\n\n let first_nat_packet = network.first_nat_packet.expect(\"No first\");\n\n println!(\"Part 1: `Y` of first packet sent to address 255: {}\", first_nat_packet.payload[1]);\n\n let nat = network.nat.expect(\"No NAT packet\");\n\n println!(\"Part 2: First `Y` released by NAT twice in a row: {}\", nat.payload[1]);\n\n\n\n}\n\n\n", "file_path": "2019/day23/src/main.rs", "rank": 61, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Part 1\n\n assert!(valid1(111111));\n\n assert!(!valid1(223450));\n\n assert!(!valid1(123789));\n\n\n\n let count = INPUT.filter(|&n| valid1(n)).count();\n\n println!(\"Part 1: Number of passwords that meet the criteria: {}\", count);\n\n\n\n // Part 2\n\n assert!(valid2(112233));\n\n assert!(!valid2(123444));\n\n assert!(valid2(111122));\n\n\n\n let count = INPUT.filter(|&n| valid2(n)).count();\n\n println!(\"Part 2: Number of passwords that meet the criteria: {}\", count);\n\n}\n\n\n", "file_path": "2019/day04a/src/main.rs", "rank": 62, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let tower = read_input(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n // Part 1\n\n let &bottom = tower.topological_sort().last().unwrap();\n\n println!(\"The bottom program's name is {}\", tower.node(bottom).unwrap());\n\n\n\n // Part 2\n\n for node_idx in tower.topological_sort() {\n\n let adjacent = tower.adjacent(node_idx).unwrap();\n\n if !adjacent.iter().all(|&n| total_weight(&tower, n) == total_weight(&tower, *adjacent.iter().next().unwrap())) {\n\n println!(\"The unbalanced disk is held by {}\", tower.node(node_idx).unwrap());\n\n for adj_idx in adjacent {\n\n let total_weight = total_weight(&tower, adj_idx);\n\n println!(\"- [{}] {}\", total_weight, tower.node(adj_idx).unwrap());\n\n }\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "2017/day07/src/main.rs", "rank": 63, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let map = read_input(\"input.txt\");\n\n\n\n println!(\"Part 1: Number of trees encountered is {}\", part1(&map));\n\n println!(\"Part 2: Product of all slopes is {}\", part2(&map));\n\n}\n\n\n", "file_path": "2020/day03/src/main.rs", "rank": 64, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let program = Program::from_file(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n // Part 1\n\n let mut arcade = ArcadeCabinet::new();\n\n arcade.run(&program);\n\n println!(\"Part 1: Tiles on screen: {}\", arcade.state.borrow().n_blocks);\n\n\n\n // Part 2\n\n println!(\"Part 2:\");\n\n let mut arcade = ArcadeCabinet::new();\n\n arcade.freeplay(true);\n\n if env::args().any(|a| a.trim() == \"--turbo\") {\n\n arcade.turbo(true);\n\n } else {\n\n println!();\n\n println!(\" [ You may wish to run this with --turbo ]\");\n\n println!();\n\n thread::sleep(time::Duration::from_secs(4));\n\n }\n\n arcade.run(&program);\n\n\n\n // Be nice and reset the user's terminal\n\n print!(\"\\x1Bc\");\n\n println!(\"Final score: {}\", arcade.state.borrow().score);\n\n}\n\n\n", "file_path": "2019/day13/src/main.rs", "rank": 65, "score": 168903.72946026447 }, { "content": "fn main() {\n\n println!(\"Part 1: {}\", part1());\n\n println!(\"Part 2: {}\", part2());\n\n}\n\n\n", "file_path": "2017/day03/src/main.rs", "rank": 66, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input_from_file(\"day12/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n \n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day12/src/main.rs", "rank": 67, "score": 168903.72946026447 }, { "content": "fn main() {\n\n part1();\n\n println!();\n\n part2();\n\n}\n\n\n", "file_path": "2018/day15/src/main.rs", "rank": 68, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Part 1\n\n run(10, true);\n\n\n\n // Part 2\n\n run(1000000000 , false);\n\n}\n\n\n", "file_path": "2018/day18/src/main.rs", "rank": 69, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = parse_input(\"input.txt\");\n\n\n\n let mut constellations: Vec<Constellation> = Vec::new();\n\n for &point in &input {\n\n if constellations.iter().any(|c| c.contains(&point)) {\n\n continue;\n\n }\n\n\n\n let mut constellation = HashSet::new();\n\n constellation.insert(point);\n\n\n\n let mut edge = Vec::new();\n\n let mut seen = HashSet::new();\n\n edge.push(point);\n\n while ! edge.is_empty() {\n\n let point = edge.pop().unwrap();\n\n seen.insert(point);\n\n\n\n for adj in all_within_range(&input, point) {\n", "file_path": "2018/day25/src/main.rs", "rank": 70, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n input.run([0, 0, 0, 0, 0, 0]);\n\n\n\n // Part 2\n\n input.run([1, 0, 0, 0, 0, 0]);\n\n}\n\n\n", "file_path": "2018/day19/src/main.rs", "rank": 71, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let mut world = read_input();\n\n\n\n // Part 1\n\n println!(\"{:>3} [{:>5}] {:>width$}\", \"GEN\", \"SUM\", \"0\", width=OFFSET+1);\n\n while world.generation < GENERATIONS {\n\n world.print();\n\n world.tick();\n\n }\n\n world.print();\n\n\n\n // Part 2\n\n // Empirically we can see that by generation 129 that the plants have reached a steady-state\n\n // where the pattern shifts right by one each generation.\n\n //\n\n // We can use this to get the formula:\n\n // sum = (gen + 36) * 52\n\n //\n\n // Thus at gen 5,000,000,000 the sum of pots with plants is 2,600,000,001,872.\n\n}\n\n\n", "file_path": "2018/day12/src/main.rs", "rank": 72, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let program = Program::from_file(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n // Part 1\n\n let mut pulled = 0;\n\n for y in 0..50 {\n\n for x in 0..50 {\n\n if scan(&program, x, y) == PULLED {\n\n pulled += 1;\n\n print!(\"#\");\n\n } else {\n\n print!(\".\");\n\n }\n\n }\n\n println!()\n\n }\n\n println!(\"Part 1: Number of points affected by tractor beam: {}\", pulled);\n\n\n\n // Part 2\n\n let (x0, y0) = fit(&program, WIDTH, HEIGHT);\n", "file_path": "2019/day19/src/main.rs", "rank": 73, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n let mut map = Map::new();\n\n\n\n let mut stack = Vec::new();\n\n let mut pos = (WIDTH/2, HEIGHT/2);\n\n let mut n_max = 0;\n\n for c in input {\n\n let n = map.get(pos);\n\n match c {\n\n '^' => continue,\n\n 'N' => pos = (pos.0, pos.1 - 1),\n\n 'S' => pos = (pos.0, pos.1 + 1),\n\n 'E' => pos = (pos.0 + 1, pos.1),\n\n 'W' => pos = (pos.0 - 1, pos.1),\n\n '(' => {\n\n stack.push(pos);\n\n continue;\n\n },\n\n '|' => {\n", "file_path": "2018/day20/src/main.rs", "rank": 74, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n let mut executor = Executor::new();\n\n executor.run(&input, |ip, _reg| ip == 28); // Break at halt condition\n\n let key = executor.reg[5]; // Grab the expected value\n\n let mut executor = Executor::new();\n\n executor.reg[0] = key;\n\n executor.run(&input, |_,_| false);\n\n println!(\"The lowest non-negative integer value for register 0 that causes a halt is {}\", key);\n\n\n\n // Part 2\n\n let mut last = 0;\n\n let mut seen = HashSet::new();\n\n let mut executor = Executor::new();\n\n executor.run(&input, |ip, reg| {\n\n if ip == 28 {\n\n if seen.contains(&reg[5]) {\n\n return true;\n\n }\n\n seen.insert(reg[5]);\n\n last = reg[5];\n\n }\n\n false\n\n }); // Break at halt condition\n\n println!(\"The lowest non-negative integer value for register 0 that causes a halt after the most instructions is {}\", last);\n\n}\n\n\n", "file_path": "2018/day21/src/main.rs", "rank": 75, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let layers = read_input(\"input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n let severity: u32 = layers.iter()\n\n .filter(|l| l.caught(0))\n\n .map(|l| l.severity())\n\n .sum();\n\n\n\n println!(\"Part 1: Trip severity is {}\", severity);\n\n\n\n // Part 2\n\n for delay in 1.. {\n\n if layers.iter().all(|l| !l.caught(delay)) {\n\n println!(\"Part 2: Delay {} psec\", delay);\n\n break;\n\n }\n\n }\n\n\n\n\n\n}\n\n\n", "file_path": "2017/day13/src/main.rs", "rank": 76, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let mut input = read_input();\n\n\n\n let mut t = 0;\n\n while input[0].distance(&input[1]) > 1 {\n\n for point in &mut input {\n\n point.tick();\n\n }\n\n t += 1;\n\n }\n\n\n\n println!(\"{:?}\", input);\n\n println!(\"After {} ticks:\", t);\n\n draw(&input);\n\n}\n\n\n", "file_path": "2018/day10/src/main.rs", "rank": 77, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&TARGET_AREA));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&TARGET_AREA));\n\n}\n\n\n", "file_path": "2021/day17/src/main.rs", "rank": 78, "score": 168903.72946026447 }, { "content": "fn main() {\n\n assert_eq!(part1(9), \"5158916779\");\n\n assert_eq!(part1(5), \"0124515891\");\n\n assert_eq!(part1(18), \"9251071085\");\n\n assert_eq!(part1(2018), \"5941429882\");\n\n println!(\"After {} recipes, the scores of the next ten would be {}.\", INPUT, part1(INPUT as usize));\n\n\n\n assert_eq!(part2(\"51589\"), 9);\n\n assert_eq!(part2(\"01245\"), 5);\n\n assert_eq!(part2(\"92510\"), 18);\n\n assert_eq!(part2(\"59414\"), 2018);\n\n println!(\"{} first appears after {} recipes.\", INPUT, part2(&INPUT.to_string()));\n\n}\n\n\n", "file_path": "2018/day14/src/main.rs", "rank": 79, "score": 168903.72946026447 }, { "content": "fn main() {\n\n part1();\n\n part2();\n\n}\n\n\n", "file_path": "2018/day24/src/main.rs", "rank": 80, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input_from_file(\"day13/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2\");\n\n part2(&input);\n\n}\n\n\n", "file_path": "2021/day13/src/main.rs", "rank": 81, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = parse_input(\"input.txt\").unwrap();\n\n\n\n // Part 1\n\n let mut sum = 0;\n\n for idx in 0..input.len() {\n\n if input[idx] == input[(idx + 1) % input.len()] {\n\n sum += input[idx];\n\n }\n\n }\n\n println!(\"Part 1: Sum = {}\", sum);\n\n\n\n // Part 2\n\n let mut sum = 0;\n\n for idx in 0..input.len() {\n\n if input[idx] == input[(idx + input.len() / 2) % input.len()] {\n\n sum += input[idx];\n\n }\n\n }\n\n println!(\"Part 2: Sum = {}\", sum);\n\n}\n\n\n", "file_path": "2017/day01/src/main.rs", "rank": 82, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let s = read_to_string(\"input.txt\").expect(\"Failed to read string\");\n\n println!(\"{}\", score(&s));\n\n}\n\n\n", "file_path": "2017/day09/src/main.rs", "rank": 83, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let mut grid = Grid::new(WIDTH, HEIGHT);\n\n let input= read_input();\n\n\n\n // Part 1\n\n\n\n let mut infinite_points = HashSet::new();\n\n let mut point_area: HashMap<u8, u32> = HashMap::new();\n\n\n\n for y in 0..grid.height {\n\n for x in 0..grid.width {\n\n let c1 = Coord(x as i32, y as i32);\n\n let mut distance: Vec<(i32, u8)> = Vec::new();\n\n for (n, c2) in (1u8..).zip(&input) {\n\n distance.push((c1.distance(c2), n));\n\n }\n\n distance.sort();\n\n\n\n let n = distance[0].1;\n\n if distance[0].0 != distance[1].0 {\n", "file_path": "2018/day06/src/main.rs", "rank": 84, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let edges = read_input(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n // Part 1\n\n let mut visited = HashSet::new();\n\n visited.insert(0);\n\n let mut edge = Vec::new();\n\n edge.push(0);\n\n\n\n // Do DFS\n\n while let Some(cur) = edge.pop() {\n\n for &adj in edges.get(&cur).unwrap_or(&HashSet::new()) {\n\n if !visited.contains(&adj) {\n\n edge.push(adj);\n\n visited.insert(adj);\n\n }\n\n }\n\n }\n\n\n\n println!(\"Number of programs reachable from PID 0: {}\", visited.len());\n", "file_path": "2017/day12/src/main.rs", "rank": 85, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = fs::read_to_string(\"input.txt\")\n\n .expect(\"Failed to read input\");\n\n\n\n let mut input: Vec<_> = input.lines().collect();\n\n input.sort();\n\n\n\n let guard_sleeping = parse_schedule(input);\n\n for (guard, sleeping) in guard_sleeping.iter() {\n\n println!(\"Guard {}: {:?}\", guard, &sleeping[..])\n\n }\n\n\n\n // Part 1\n\n let (&max_guard, max_value) = guard_sleeping.iter()\n\n .map(|(k, v)| (k, v.iter().sum::<u32>()))\n\n .max_by_key(|(_k, v)| *v)\n\n .unwrap();\n\n println!(\"Guard {} slept for {} minutes\", max_guard, max_value);\n\n\n\n let mut minutes_days: HashMap<u32, u32> = HashMap::new();\n", "file_path": "2018/day04/src/main.rs", "rank": 86, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let mut nanobots = read_input(\"input.txt\");\n\n nanobots.sort_by_key(|n| n.r);\n\n\n\n // Part 1\n\n let strongest = nanobots.last().unwrap();\n\n let in_range = nanobots.iter().filter(|n| strongest.in_range(n)).count();\n\n println!(\"Strongest is {:?} with {} in range\", strongest, in_range);\n\n\n\n // Part 2\n\n let n = i32::pow(2, 30);\n\n let cube = Cube::new([-n, -n, -n], u32::pow(2, 31));\n\n search(cube, &nanobots);\n\n}\n\n\n", "file_path": "2018/day23/src/main.rs", "rank": 87, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = parse_input(\"input.txt\").expect(\"Failed to read input\");\n\n\n\n let mut pos = ORIGIN;\n\n let mut max = pos;\n\n for direction in &input {\n\n pos += direction.cube_direction();\n\n if pos.distance(ORIGIN) > max.distance(ORIGIN) {\n\n max = pos;\n\n }\n\n }\n\n\n\n // Part 1\n\n println!(\"Distance: {:?}\", pos.distance(ORIGIN) / 2);\n\n\n\n // Part 2\n\n println!(\"Max Distance: {:?}\", max.distance(ORIGIN) / 2);\n\n}\n\n\n", "file_path": "2017/day11/src/main.rs", "rank": 88, "score": 168903.72946026447 }, { "content": "fn main() {\n\n part1();\n\n part2();\n\n}\n\n\n", "file_path": "2018/day16/src/main.rs", "rank": 89, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = Input::from_file(\"day19/input.txt\").expect(\"failed to read input\");\n\n let (rotations, offsets) = solve(&input);\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input.scans, &rotations, &offsets));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&offsets));\n\n}\n\n\n", "file_path": "2021/day19/src/main.rs", "rank": 90, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = parse_input(\"input.txt\").unwrap();\n\n\n\n // Part 1\n\n println!(\"Checksum: {}\", checksum(&input));\n\n\n\n // Part 2\n\n println!(\"Sum of evenly divisible values: {}\", sum_even_divisible(&input));\n\n}\n\n\n", "file_path": "2017/day02/src/main.rs", "rank": 91, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Part 1\n\n run_until_first_crash(World::from_file(\"input.txt\"));\n\n\n\n // Part 2\n\n run_until_all_but_one_crashed(World::from_file(\"input.txt\"));\n\n}\n\n\n", "file_path": "2018/day13/src/main.rs", "rank": 93, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input(\"input.txt\");\n\n\n\n println!(\"Part 1: {}\", part1(&input));\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2020/day01/src/main.rs", "rank": 94, "score": 168903.72946026447 }, { "content": "fn main() {\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(PLAYER1, PLAYER2));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(PLAYER1, PLAYER2));\n\n}\n\n\n", "file_path": "2021/day21/src/main.rs", "rank": 95, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let techniques = read_input(\"input.txt\");\n\n\n\n // Part 1\n\n let pos = 2019;\n\n for c in 0..M_CARDS {\n\n if Shuffle::from_techniques(&techniques, N_CARDS).evaluate(c) == pos {\n\n println!(\"Part 1: Position of card {}: {}\", pos, c);\n\n break;\n\n }\n\n }\n\n\n\n // Part 2\n\n let pos = 2020;\n\n let card = Shuffle::from_techniques(&techniques, M_CARDS).repeat(M_SHUFFLES - 1).evaluate(pos);\n\n println!(\"Part 2: After shuffling {} cards {} times, the card at position {} is: {}\", M_CARDS, M_SHUFFLES, pos, card);\n\n}\n\n\n", "file_path": "2019/day22/src/main.rs", "rank": 96, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = Input::from_file(\"day20/input.txt\").expect(\"failed to read input\");\n\n assert_eq!(input.algorithm.len(), 512);\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day20/src/main.rs", "rank": 97, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let input = read_input_from_file(\"day08/input.txt\").expect(\"failed to read input\");\n\n\n\n // Part 1\n\n println!(\"Part 1: {}\", part1(&input));\n\n\n\n // Part 2\n\n println!(\"Part 2: {}\", part2(&input));\n\n}\n\n\n", "file_path": "2021/day08/src/main.rs", "rank": 98, "score": 168903.72946026447 }, { "content": "fn main() {\n\n let graph = read_input();\n\n\n\n for (step, node) in &graph.nodes {\n\n println!(\"{} -> {:?}\", step, node);\n\n }\n\n\n\n let steps: String = graph.walk(1).into_iter().collect();\n\n println!(\"Steps with one worker: {}\", steps);\n\n\n\n let n = 5;\n\n let steps: String = graph.walk(n).into_iter().collect();\n\n println!(\"Steps with {} workers: {}\", n, steps);\n\n}\n\n\n", "file_path": "2018/day07/src/main.rs", "rank": 99, "score": 168903.72946026447 } ]
Rust
compiler-core/src/io.rs
TristanCacqueray/gleam
6b77fbe92b38b7d0a31fd824caaf66d0e1708be5
pub mod memory; use crate::error::{Error, FileIoAction, FileKind, Result}; use async_trait::async_trait; use debug_ignore::DebugIgnore; use flate2::read::GzDecoder; use std::{ fmt::Debug, fs::ReadDir, io, path::{Path, PathBuf}, process::ExitStatus, }; use tar::{Archive, Entry}; pub trait Utf8Writer: std::fmt::Write { fn str_write(&mut self, str: &str) -> Result<()> { let res = self.write_str(str); self.wrap_result(res) } fn wrap_result<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<()> { self.convert_err(result.map(|_| ())) } fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<T>; } impl Utf8Writer for String { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<T> { result.map_err(|error| Error::FileIo { action: FileIoAction::WriteTo, kind: FileKind::File, path: PathBuf::from("<in memory>"), err: Some(error.to_string()), }) } } pub trait Writer: std::io::Write + Utf8Writer { fn write(&mut self, bytes: &[u8]) -> Result<(), Error> { let res = std::io::Write::write(self, bytes); self.wrap_result(res) } } #[derive(Debug, PartialEq, Clone)] pub struct OutputFile { pub text: String, pub path: PathBuf, } pub trait FileSystemReader { fn gleam_source_files(&self, dir: &Path) -> Box<dyn Iterator<Item = PathBuf>>; fn gleam_metadata_files(&self, dir: &Path) -> Box<dyn Iterator<Item = PathBuf>>; fn read_dir(&self, path: &Path) -> Result<ReadDir>; fn read(&self, path: &Path) -> Result<String, Error>; fn reader(&self, path: &Path) -> Result<WrappedReader, Error>; fn is_file(&self, path: &Path) -> bool; fn is_directory(&self, path: &Path) -> bool; } pub trait FileSystemIO: FileSystemWriter + FileSystemReader {} pub trait CommandExecutor { fn exec( &self, program: &str, args: &[String], env: &[(&str, String)], cwd: Option<&Path>, ) -> Result<ExitStatus, Error>; } pub trait FileSystemWriter { fn mkdir(&self, path: &Path) -> Result<(), Error>; fn writer(&self, path: &Path) -> Result<WrappedWriter, Error>; fn delete(&self, path: &Path) -> Result<(), Error>; fn copy(&self, from: &Path, to: &Path) -> Result<(), Error>; fn copy_dir(&self, from: &Path, to: &Path) -> Result<(), Error>; } #[derive(Debug)] pub struct WrappedReader { path: PathBuf, inner: DebugIgnore<Box<dyn std::io::Read>>, } impl WrappedReader { pub fn new(path: &Path, inner: Box<dyn std::io::Read>) -> Self { Self { path: path.to_path_buf(), inner: DebugIgnore(inner), } } fn read(&mut self, buffer: &mut [u8]) -> std::io::Result<usize> { self.inner.read(buffer) } } impl std::io::Read for WrappedReader { fn read(&mut self, buffer: &mut [u8]) -> std::io::Result<usize> { self.read(buffer) } } #[derive(Debug)] pub struct WrappedWriter { pub path: PathBuf, pub inner: DebugIgnore<Box<dyn std::io::Write>>, } impl Writer for WrappedWriter {} impl Utf8Writer for WrappedWriter { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<T> { result.map_err(|error| Error::FileIo { action: FileIoAction::WriteTo, kind: FileKind::File, path: self.path.to_path_buf(), err: Some(error.to_string()), }) } } impl WrappedWriter { pub fn new(path: &Path, inner: Box<dyn std::io::Write>) -> Self { Self { path: path.to_path_buf(), inner: DebugIgnore(inner), } } pub fn write(&mut self, bytes: &[u8]) -> Result<(), Error> { let result = self.inner.write(bytes); self.wrap_result(result) } } impl<'a> std::io::Write for WrappedWriter { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.inner.write(buf) } fn flush(&mut self) -> std::io::Result<()> { self.inner.flush() } } impl<'a> std::fmt::Write for WrappedWriter { fn write_str(&mut self, s: &str) -> std::fmt::Result { self.inner .write(s.as_bytes()) .map(|_| ()) .map_err(|_| std::fmt::Error) } } #[cfg(test)] pub mod test { use super::*; use std::{ cell::RefCell, io::Write, rc::Rc, sync::mpsc::{self, Receiver, Sender}, }; #[derive(Debug, Clone)] pub struct FilesChannel(Sender<(PathBuf, InMemoryFile)>); impl FilesChannel { pub fn new() -> (Self, Receiver<(PathBuf, InMemoryFile)>) { let (sender, receiver) = mpsc::channel(); (Self(sender), receiver) } pub fn recv_utf8_files( receiver: &Receiver<(PathBuf, InMemoryFile)>, ) -> Result<Vec<OutputFile>, ()> { receiver .try_iter() .map(|(path, file)| { Ok(OutputFile { path, text: String::from_utf8(file.into_contents()?).map_err(|_| ())?, }) }) .collect() } } impl FileSystemWriter for FilesChannel { fn writer<'a>(&self, path: &'a Path) -> Result<WrappedWriter, Error> { let file = InMemoryFile::new(); let _ = self.0.send((path.to_path_buf(), file.clone())); Ok(WrappedWriter::new(path, Box::new(file))) } fn delete(&self, _path: &Path) -> Result<(), Error> { panic!("FilesChannel does not support deletion") } fn copy(&self, _from: &Path, _to: &Path) -> Result<(), Error> { panic!("FilesChannel does not support copy") } fn mkdir(&self, _path: &Path) -> Result<(), Error> { panic!("FilesChannel does not support mkdir") } fn copy_dir(&self, _from: &Path, _to: &Path) -> Result<(), Error> { panic!("FilesChannel does not support copy_dir") } } impl FileSystemReader for FilesChannel { fn gleam_source_files(&self, _dir: &Path) -> Box<dyn Iterator<Item = PathBuf>> { unimplemented!() } fn gleam_metadata_files(&self, _dir: &Path) -> Box<dyn Iterator<Item = PathBuf>> { unimplemented!() } fn read(&self, _path: &Path) -> Result<String, Error> { unimplemented!() } fn is_file(&self, _path: &Path) -> bool { unimplemented!() } fn reader(&self, _path: &Path) -> Result<WrappedReader, Error> { unimplemented!() } fn is_directory(&self, _path: &Path) -> bool { unimplemented!() } fn read_dir(&self, _path: &Path) -> Result<ReadDir> { unimplemented!() } } impl FileSystemIO for FilesChannel {} #[derive(Debug, Default, Clone)] pub struct InMemoryFile { contents: Rc<RefCell<Vec<u8>>>, } impl InMemoryFile { pub fn new() -> Self { Default::default() } pub fn into_contents(self) -> Result<Vec<u8>, ()> { Rc::try_unwrap(self.contents) .map_err(|_| ()) .map(RefCell::into_inner) } } impl Write for InMemoryFile { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.contents.borrow_mut().write(buf) } fn flush(&mut self) -> std::io::Result<()> { self.contents.borrow_mut().flush() } } impl std::fmt::Write for InMemoryFile { fn write_str(&mut self, s: &str) -> std::fmt::Result { self.contents .borrow_mut() .write(s.as_bytes()) .map(|_| ()) .map_err(|_| std::fmt::Error) } } impl Utf8Writer for InMemoryFile { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<T> { result.map_err(|error| Error::FileIo { action: FileIoAction::WriteTo, kind: FileKind::File, path: PathBuf::from("<in memory test file>"), err: Some(error.to_string()), }) } } impl Writer for InMemoryFile {} } #[async_trait] pub trait HttpClient { async fn send(&self, request: http::Request<Vec<u8>>) -> Result<http::Response<Vec<u8>>, Error>; } pub trait TarUnpacker { fn io_result_entries<'a>( &self, archive: &'a mut Archive<WrappedReader>, ) -> io::Result<tar::Entries<'a, WrappedReader>>; fn entries<'a>( &self, archive: &'a mut Archive<WrappedReader>, ) -> Result<tar::Entries<'a, WrappedReader>> { tracing::debug!("iterating through tar archive"); self.io_result_entries(archive) .map_err(|e| Error::ExpandTar { error: e.to_string(), }) } fn io_result_unpack( &self, path: &Path, archive: Archive<GzDecoder<Entry<'_, WrappedReader>>>, ) -> io::Result<()>; fn unpack( &self, path: &Path, archive: Archive<GzDecoder<Entry<'_, WrappedReader>>>, ) -> Result<()> { tracing::debug!(path = ?path, "unpacking tar archive"); self.io_result_unpack(path, archive) .map_err(|e| Error::FileIo { action: FileIoAction::WriteTo, kind: FileKind::Directory, path: path.to_path_buf(), err: Some(e.to_string()), }) } }
pub mod memory; use crate::error::{Error, FileIoAction, FileKind, Result}; use async_trait::async_trait; use debug_ignore::DebugIgnore; use flate2::read::GzDecoder; use std::{ fmt::Debug, fs::ReadDir, io, path::{Path, PathBuf}, process::ExitStatus, }; use tar::{Archive, Entry}; pub trait Utf8Writer: std::fmt::Write { fn str_write(&mut self, str: &str) -> Result<()> { let res = self.write_str(str); self.wrap_result(res) } fn wrap_result<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<()> { self.convert_err(result.map(|_| ())) } fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<T>; } impl Utf8Writer for String { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<T> { result.map_err(|error| Error::FileIo { action: FileIoAction::WriteTo, kind: FileKind::File, path: PathBuf::from("<in memory>"), err: Some(error.to_string()), }) } } pub trait Writer: std::io::Write + Utf8Writer { fn write(&mut self, bytes: &[u8]) -> Result<(), Error> { let res = std::io::Write::write(self, bytes); self.wrap_result(res) } } #[derive(Debug, PartialEq, Clone)] pub struct OutputFile { pub text: String, pub path: PathBuf, } pub trait FileSystemReader { fn gleam_source_files(&self, dir: &Path) -> Box<dyn Iterator<Item = PathBuf>>; fn gleam_metadata_files(&self, dir: &Path) -> Box<dyn Iterator<Item = PathBuf>>; fn read_dir(&self, path: &Path) -> Result<ReadDir>; fn read(&self, path: &Path) -> Result<String, Error>; fn reader(&self, path: &Path) -> Result<WrappedReader, Error>; fn is_file(&self, path: &Path) -> bool; fn is_directory(&self, path: &Path) -> bool; } pub trait FileSystemIO: FileSystemWriter + FileSystemReader {} pub trait CommandExecutor { fn exec( &self, program: &str, args: &[String], env: &[(&str, String)], cwd: Option<&Path>, ) -> Result<ExitStatus, Error>; } pub trait FileSystemWriter { fn mkdir(&self, path: &Path) -> Result<(), Error>; fn writer(&self, path: &Path) -> Result<WrappedWriter, Error>; fn delete(&self, path: &Path) -> Result<(), Error>; fn copy(&self, from: &Path, to: &Path) -> Result<(), Error>; fn copy_dir(&self, from: &Path, to: &Path) -> Result<(), Error>; } #[derive(Debug)] pub struct WrappedReader { path: PathBuf, inner: DebugIgnore<Box<dyn std::io::Read>>, } impl WrappedReader { pub fn new(path: &Path, inner: Box<dyn std::io::Read>) -> Self { Self { path: path.to_path_buf(), inner: DebugIgnore(inner), } } fn read(&mut self, buffer: &mut [u8]) -> std::io::Result<usize> { self.inner.read(buffer) } } impl std::io::Read for WrappedReader { fn read(&mut self, buffer: &mut [u8]) -> std::io::Result<usize> { self.read(buffer) } } #[derive(Debug)] pub struct WrappedWriter { pub path: PathBuf, pub inner: DebugIgnore<Box<dyn std::io::Write>>, } impl Writer for WrappedWriter {} impl Utf8Writer for WrappedWriter { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<T> { result.map_err(|error| Error::FileIo { action: FileIoAction::WriteTo, kind: FileKind::File, path: self.path.to_path_buf(), err: Some(error.to_string()), }) } } impl WrappedWriter { pub fn new(path: &Path, inner: Box<dyn std::io::Write>) -> Self { Self { path: path.to_path_buf(), inner: DebugIgnore(inner), } } pub fn write(&mut self, bytes: &[u8]) -> Result<(), Error> { let result = self.inner.write(bytes); self.wrap_result(result) } } impl<'a> std::io::Write for WrappedWriter { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.inner.write(buf) } fn flush(&mut self) -> std::io::Result<()> { self.inner.flush() } } impl<'a> std::fmt::Write for WrappedWriter { fn write_str(&mut self, s: &str) -> std::fmt::Result { self.inner .write(s.as_bytes()) .map(|_| ()) .map_err(|_| std::fmt::Error) } } #[cfg(test)] pub mod test { use super::*; use std::{ cell::RefCell, io::Write, rc::Rc, sync::mpsc::{self, Receiver, Sender}, }; #[derive(Debug, Clone)] pub struct FilesChannel(Sender<(PathBuf, InMemoryFile)>); impl FilesChannel { pub fn new() -> (Self, Receiver<(PathBuf, InMemoryFile)>) { let (sender, receiver) = mpsc::channel(); (Self(sender), receiver) } pub fn recv_utf8_files( receiver: &Receiver<(PathBuf, InMemoryFile)>, ) -> Result<Vec<OutputFile>, ()> { receiver .try_iter() .map(|(path, file)| { Ok(OutputFile { path, text: String::from_utf8(file.into_contents()?).map_err(|_| ())?, }) }) .collect() } } impl FileSystemWriter for FilesChannel { fn writer<'a>(&self, path: &'a Path) -> Result<WrappedWriter, Error> { let file = InMemoryFile::new(); let _ = self.0.send((path.to_path_buf(), file.clone())); Ok(WrappedWriter::new(path, Box::new(file))) } fn delete(&self, _path: &Path) -> Result<(), Error> { panic!("FilesChannel does not support deletion") } fn copy(&self, _from: &Path, _to: &Path) -> Result<(), Error> { panic!("FilesChannel does not support copy") }
MemoryFile { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { self.contents.borrow_mut().write(buf) } fn flush(&mut self) -> std::io::Result<()> { self.contents.borrow_mut().flush() } } impl std::fmt::Write for InMemoryFile { fn write_str(&mut self, s: &str) -> std::fmt::Result { self.contents .borrow_mut() .write(s.as_bytes()) .map(|_| ()) .map_err(|_| std::fmt::Error) } } impl Utf8Writer for InMemoryFile { fn convert_err<T, E: std::error::Error>(&self, result: Result<T, E>) -> Result<T> { result.map_err(|error| Error::FileIo { action: FileIoAction::WriteTo, kind: FileKind::File, path: PathBuf::from("<in memory test file>"), err: Some(error.to_string()), }) } } impl Writer for InMemoryFile {} } #[async_trait] pub trait HttpClient { async fn send(&self, request: http::Request<Vec<u8>>) -> Result<http::Response<Vec<u8>>, Error>; } pub trait TarUnpacker { fn io_result_entries<'a>( &self, archive: &'a mut Archive<WrappedReader>, ) -> io::Result<tar::Entries<'a, WrappedReader>>; fn entries<'a>( &self, archive: &'a mut Archive<WrappedReader>, ) -> Result<tar::Entries<'a, WrappedReader>> { tracing::debug!("iterating through tar archive"); self.io_result_entries(archive) .map_err(|e| Error::ExpandTar { error: e.to_string(), }) } fn io_result_unpack( &self, path: &Path, archive: Archive<GzDecoder<Entry<'_, WrappedReader>>>, ) -> io::Result<()>; fn unpack( &self, path: &Path, archive: Archive<GzDecoder<Entry<'_, WrappedReader>>>, ) -> Result<()> { tracing::debug!(path = ?path, "unpacking tar archive"); self.io_result_unpack(path, archive) .map_err(|e| Error::FileIo { action: FileIoAction::WriteTo, kind: FileKind::Directory, path: path.to_path_buf(), err: Some(e.to_string()), }) } }
fn mkdir(&self, _path: &Path) -> Result<(), Error> { panic!("FilesChannel does not support mkdir") } fn copy_dir(&self, _from: &Path, _to: &Path) -> Result<(), Error> { panic!("FilesChannel does not support copy_dir") } } impl FileSystemReader for FilesChannel { fn gleam_source_files(&self, _dir: &Path) -> Box<dyn Iterator<Item = PathBuf>> { unimplemented!() } fn gleam_metadata_files(&self, _dir: &Path) -> Box<dyn Iterator<Item = PathBuf>> { unimplemented!() } fn read(&self, _path: &Path) -> Result<String, Error> { unimplemented!() } fn is_file(&self, _path: &Path) -> bool { unimplemented!() } fn reader(&self, _path: &Path) -> Result<WrappedReader, Error> { unimplemented!() } fn is_directory(&self, _path: &Path) -> bool { unimplemented!() } fn read_dir(&self, _path: &Path) -> Result<ReadDir> { unimplemented!() } } impl FileSystemIO for FilesChannel {} #[derive(Debug, Default, Clone)] pub struct InMemoryFile { contents: Rc<RefCell<Vec<u8>>>, } impl InMemoryFile { pub fn new() -> Self { Default::default() } pub fn into_contents(self) -> Result<Vec<u8>, ()> { Rc::try_unwrap(self.contents) .map_err(|_| ()) .map(RefCell::into_inner) } } impl Write for In
random
[ { "content": "pub fn erlang_files(dir: &Path) -> Result<impl Iterator<Item = PathBuf> + '_> {\n\n Ok(read_dir(dir)?\n\n .flat_map(Result::ok)\n\n .map(|e| e.path())\n\n .filter(|path| {\n\n let extension = path\n\n .extension()\n\n .unwrap_or_default()\n\n .to_str()\n\n .unwrap_or_default();\n\n extension == \"erl\" || extension == \"hrl\"\n\n }))\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 0, "score": 489310.9930223209 }, { "content": "pub fn write(path: &Path, text: &str) -> Result<(), Error> {\n\n write_bytes(path, text.as_bytes())\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 1, "score": 476767.1454914254 }, { "content": "fn std_io_error_kind_text(kind: &std::io::ErrorKind) -> String {\n\n use std::io::ErrorKind;\n\n match kind {\n\n ErrorKind::NotFound => \"Could not find the stdio stream\".to_string(),\n\n ErrorKind::PermissionDenied => \"Permission was denied\".to_string(),\n\n ErrorKind::ConnectionRefused => \"Connection was refused\".to_string(),\n\n ErrorKind::ConnectionReset => \"Connection was reset\".to_string(),\n\n ErrorKind::ConnectionAborted => \"Connection was aborted\".to_string(),\n\n ErrorKind::NotConnected => \"Was not connected\".to_string(),\n\n ErrorKind::AddrInUse => \"The stream was already in use\".to_string(),\n\n ErrorKind::AddrNotAvailable => \"The stream was not available\".to_string(),\n\n ErrorKind::BrokenPipe => \"The pipe was broken\".to_string(),\n\n ErrorKind::AlreadyExists => \"A handle to the stream already exists\".to_string(),\n\n ErrorKind::WouldBlock => {\n\n \"This operation would block when it was requested not to\".to_string()\n\n }\n\n ErrorKind::InvalidInput => \"Some parameter was invalid\".to_string(),\n\n ErrorKind::InvalidData => {\n\n \"The data was invalid. Check that the encoding is UTF-8\".to_string()\n\n }\n", "file_path": "compiler-core/src/error.rs", "rank": 2, "score": 466358.5049589857 }, { "content": "pub fn delete_dir(dir: &Path) -> Result<(), Error> {\n\n tracing::debug!(path=?dir, \"deleting_directory\");\n\n if dir.exists() {\n\n std::fs::remove_dir_all(&dir).map_err(|e| Error::FileIo {\n\n action: FileIoAction::Delete,\n\n kind: FileKind::Directory,\n\n path: dir.to_path_buf(),\n\n err: Some(e.to_string()),\n\n })?;\n\n } else {\n\n tracing::debug!(path=?dir, \"directory_did_not_exist_for_deletion\");\n\n }\n\n Ok(())\n\n}\n\n\n\n// pub fn delete(file: &PathBuf) -> Result<(), Error> {\n\n// tracing::debug!(\"Deleting file {:?}\", file);\n\n// if file.exists() {\n\n// std::fs::remove_file(&file).map_err(|e| Error::FileIO {\n\n// action: FileIOAction::Delete,\n\n// kind: FileKind::File,\n\n// path: file.clone(),\n\n// err: Some(e.to_string()),\n\n// })?;\n\n// } else {\n\n// tracing::debug!(\"Did not exist for deletion: {:?}\", file);\n\n// }\n\n// Ok(())\n\n// }\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 3, "score": 465395.40920749295 }, { "content": "pub fn write_bytes(path: &Path, bytes: &[u8]) -> Result<(), Error> {\n\n tracing::debug!(path=?path, \"deleting_directory\");\n\n\n\n let dir_path = path.parent().ok_or_else(|| Error::FileIo {\n\n action: FileIoAction::FindParent,\n\n kind: FileKind::Directory,\n\n path: path.to_path_buf(),\n\n err: None,\n\n })?;\n\n\n\n std::fs::create_dir_all(dir_path).map_err(|e| Error::FileIo {\n\n action: FileIoAction::Create,\n\n kind: FileKind::Directory,\n\n path: dir_path.to_path_buf(),\n\n err: Some(e.to_string()),\n\n })?;\n\n\n\n let mut f = File::create(&path).map_err(|e| Error::FileIo {\n\n action: FileIoAction::Create,\n\n kind: FileKind::File,\n", "file_path": "compiler-cli/src/fs.rs", "rank": 4, "score": 457916.6728504743 }, { "content": "pub fn read_dir(path: impl AsRef<Path> + Debug) -> Result<std::fs::ReadDir, Error> {\n\n tracing::debug!(path=?path,\"reading_directory\");\n\n\n\n std::fs::read_dir(&path).map_err(|e| Error::FileIo {\n\n action: FileIoAction::Read,\n\n kind: FileKind::Directory,\n\n path: PathBuf::from(path.as_ref()),\n\n err: Some(e.to_string()),\n\n })\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 5, "score": 457059.5238451341 }, { "content": "fn get_foldername(path: &str) -> Result<String, Error> {\n\n match path {\n\n \".\" => env::current_dir()\n\n .expect(\"invalid folder\")\n\n .file_name()\n\n .and_then(|x| x.to_str())\n\n .map(ToString::to_string)\n\n .ok_or(Error::UnableToFindProjectRoot {\n\n path: path.to_string(),\n\n }),\n\n _ => Path::new(path)\n\n .file_name()\n\n .and_then(|x| x.to_str())\n\n .map(ToString::to_string)\n\n .ok_or(Error::UnableToFindProjectRoot {\n\n path: path.to_string(),\n\n }),\n\n }\n\n}\n", "file_path": "compiler-cli/src/new.rs", "rank": 6, "score": 453220.20712676743 }, { "content": "pub fn copy_dir(path: impl AsRef<Path> + Debug, to: impl AsRef<Path> + Debug) -> Result<(), Error> {\n\n tracing::debug!(from=?path, to=?to, \"copying_directory\");\n\n\n\n // TODO: include the destination in the error message\n\n fs_extra::dir::copy(&path, &to, &fs_extra::dir::CopyOptions::new())\n\n .map_err(|err| Error::FileIo {\n\n action: FileIoAction::Copy,\n\n kind: FileKind::Directory,\n\n path: PathBuf::from(path.as_ref()),\n\n err: Some(err.to_string()),\n\n })\n\n .map(|_| ())\n\n}\n", "file_path": "compiler-cli/src/fs.rs", "rank": 7, "score": 450932.3812715303 }, { "content": "pub fn read(path: impl AsRef<Path> + Debug) -> Result<String, Error> {\n\n tracing::debug!(path=?path,\"reading_file\");\n\n\n\n std::fs::read_to_string(&path).map_err(|err| Error::FileIo {\n\n action: FileIoAction::Read,\n\n kind: FileKind::File,\n\n path: PathBuf::from(path.as_ref()),\n\n err: Some(err.to_string()),\n\n })\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 8, "score": 445199.0702082168 }, { "content": "pub fn gleam_files(dir: &Path) -> impl Iterator<Item = PathBuf> + '_ {\n\n walkdir::WalkDir::new(dir)\n\n .follow_links(true)\n\n .into_iter()\n\n .filter_map(Result::ok)\n\n .filter(|e| e.file_type().is_file())\n\n .map(|d| d.into_path())\n\n .filter(move |d| is_gleam_path(d, dir))\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 9, "score": 443280.5497452951 }, { "content": "pub fn pretty(writer: &mut impl Utf8Writer, src: &str) -> Result<()> {\n\n let (module, extra) = crate::parse::parse_module(src).map_err(|error| Error::Parse {\n\n path: PathBuf::from(\"<standard input>\"),\n\n src: src.to_string(),\n\n error,\n\n })?;\n\n let intermediate = Intermediate {\n\n comments: extra\n\n .comments\n\n .iter()\n\n .map(|span| Comment::from((span, src)))\n\n .collect(),\n\n doc_comments: extra\n\n .doc_comments\n\n .iter()\n\n .map(|span| Comment::from((span, src)))\n\n .collect(),\n\n empty_lines: &extra.empty_lines,\n\n module_comments: extra\n\n .module_comments\n\n .iter()\n\n .map(|span| Comment::from((span, src)))\n\n .collect(),\n\n };\n\n\n\n Formatter::with_comments(&intermediate)\n\n .module(&module)\n\n .pretty_print(80, writer)\n\n}\n\n\n", "file_path": "compiler-core/src/format.rs", "rank": 10, "score": 442993.4952464554 }, { "content": "pub fn writer(path: &Path) -> Result<WrappedWriter, Error> {\n\n tracing::debug!(path = ?path, \"opening_file_writer\");\n\n let dir_path = path.parent().ok_or_else(|| Error::FileIo {\n\n action: FileIoAction::FindParent,\n\n kind: FileKind::Directory,\n\n path: path.to_path_buf(),\n\n err: None,\n\n })?;\n\n std::fs::create_dir_all(dir_path).map_err(|e| Error::FileIo {\n\n action: FileIoAction::Create,\n\n kind: FileKind::Directory,\n\n path: dir_path.to_path_buf(),\n\n err: Some(e.to_string()),\n\n })?;\n\n let file = File::create(&path).map_err(|e| Error::FileIo {\n\n action: FileIoAction::Create,\n\n kind: FileKind::File,\n\n path: path.to_path_buf(),\n\n err: Some(e.to_string()),\n\n })?;\n\n Ok(WrappedWriter::new(path, Box::new(file)))\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 11, "score": 433130.0951741013 }, { "content": "pub fn gleam_files_excluding_gitignore(dir: &Path) -> impl Iterator<Item = PathBuf> + '_ {\n\n ignore::WalkBuilder::new(dir)\n\n .follow_links(true)\n\n .require_git(false)\n\n .build()\n\n .into_iter()\n\n .filter_map(Result::ok)\n\n .filter(|e| e.file_type().map(|t| t.is_file()).unwrap_or(false))\n\n .map(DirEntry::into_path)\n\n .filter(move |d| is_gleam_path(d, dir))\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 12, "score": 431506.67183675966 }, { "content": "pub fn buffered_reader<P: AsRef<Path> + Debug>(path: P) -> Result<impl BufRead, Error> {\n\n tracing::debug!(path=?path,\"opening_file_buffered_reader\");\n\n let reader = File::open(&path).map_err(|err| Error::FileIo {\n\n action: FileIoAction::Open,\n\n kind: FileKind::File,\n\n path: PathBuf::from(path.as_ref()),\n\n err: Some(err.to_string()),\n\n })?;\n\n Ok(BufReader::new(reader))\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 13, "score": 422330.9046558756 }, { "content": "pub fn copy(path: impl AsRef<Path> + Debug, to: impl AsRef<Path> + Debug) -> Result<(), Error> {\n\n tracing::debug!(from=?path, to=?to, \"copying_file\");\n\n\n\n // TODO: include the destination in the error message\n\n std::fs::copy(&path, &to)\n\n .map_err(|err| Error::FileIo {\n\n action: FileIoAction::Copy,\n\n kind: FileKind::File,\n\n path: PathBuf::from(path.as_ref()),\n\n err: Some(err.to_string()),\n\n })\n\n .map(|_| ())\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 14, "score": 420760.0625161164 }, { "content": "pub fn wrap(text: &str) -> String {\n\n textwrap::fill(text, std::cmp::min(75, textwrap::termwidth()))\n\n}\n", "file_path": "compiler-core/src/error.rs", "rank": 15, "score": 403860.49593077716 }, { "content": "fn contents_tarball(files: &[PathBuf]) -> Result<Vec<u8>, Error> {\n\n let mut contents_tar_gz = Vec::new();\n\n {\n\n let mut tarball =\n\n tar::Builder::new(GzEncoder::new(&mut contents_tar_gz, Compression::default()));\n\n for path in files {\n\n add_path_to_tar(&mut tarball, path)?;\n\n }\n\n tarball.finish().map_err(Error::finish_tar)?;\n\n }\n\n tracing::info!(\"Generated contents.tar.gz\");\n\n Ok(contents_tar_gz)\n\n}\n\n\n", "file_path": "compiler-cli/src/publish.rs", "rank": 16, "score": 402129.388751855 }, { "content": "pub fn ask(question: &str) -> Result<String, Error> {\n\n print!(\"{}: \", question);\n\n std::io::stdout().flush().expect(\"ask stdout flush\");\n\n let mut answer = String::new();\n\n let _ = std::io::stdin()\n\n .read_line(&mut answer)\n\n .map_err(|e| Error::StandardIo {\n\n action: StandardIoAction::Read,\n\n err: Some(e.kind()),\n\n })?;\n\n Ok(answer.trim().to_string())\n\n}\n\n\n", "file_path": "compiler-cli/src/cli.rs", "rank": 17, "score": 399728.1746298285 }, { "content": "fn write(path: PathBuf, contents: &str) -> Result<()> {\n\n let mut f = File::create(&path).map_err(|err| Error::FileIo {\n\n kind: FileKind::File,\n\n path: path.clone(),\n\n action: FileIoAction::Create,\n\n err: Some(err.to_string()),\n\n })?;\n\n\n\n f.write_all(contents.as_bytes())\n\n .map_err(|err| Error::FileIo {\n\n kind: FileKind::File,\n\n path,\n\n action: FileIoAction::WriteTo,\n\n err: Some(err.to_string()),\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-cli/src/new.rs", "rank": 18, "score": 396655.4422124713 }, { "content": "pub fn mkdir(path: impl AsRef<Path> + Debug) -> Result<(), Error> {\n\n tracing::debug!(path=?path, \"creating_directory\");\n\n\n\n std::fs::create_dir_all(&path).map_err(|err| Error::FileIo {\n\n kind: FileKind::Directory,\n\n path: PathBuf::from(path.as_ref()),\n\n action: FileIoAction::Create,\n\n err: Some(err.to_string()),\n\n })\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 19, "score": 396615.21785712824 }, { "content": "pub fn ask_password(question: &str) -> Result<String, Error> {\n\n let prompt = format!(\"{} (will not be printed as you type): \", question);\n\n rpassword::read_password_from_tty(Some(&prompt))\n\n .map_err(|e| Error::StandardIo {\n\n action: StandardIoAction::Read,\n\n err: Some(e.kind()),\n\n })\n\n .map(|s| s.trim().to_string())\n\n}\n\n\n", "file_path": "compiler-cli/src/cli.rs", "rank": 20, "score": 393745.30273621203 }, { "content": "pub fn write_output_under(file: &OutputFile, base: &Path) -> Result<(), Error> {\n\n let OutputFile { path, text } = file;\n\n write(&base.join(path), text)\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 21, "score": 387447.0950230549 }, { "content": "pub fn reader(path: impl AsRef<Path> + Debug) -> Result<WrappedReader, Error> {\n\n tracing::debug!(path=?path,\"opening_file_reader\");\n\n\n\n let reader = File::open(&path).map_err(|err| Error::FileIo {\n\n action: FileIoAction::Open,\n\n kind: FileKind::File,\n\n path: PathBuf::from(path.as_ref()),\n\n err: Some(err.to_string()),\n\n })?;\n\n\n\n Ok(WrappedReader::new(path.as_ref(), Box::new(reader)))\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 23, "score": 378694.91642670246 }, { "content": "fn format_file(problem_files: &mut Vec<Unformatted>, path: PathBuf) -> Result<()> {\n\n let src = crate::fs::read(&path)?;\n\n let mut output = String::new();\n\n gleam_core::format::pretty(&mut output, &src)?;\n\n\n\n if src != output {\n\n problem_files.push(Unformatted {\n\n source: path.clone(),\n\n destination: path,\n\n input: src,\n\n output,\n\n });\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-cli/src/format.rs", "rank": 24, "score": 372041.32776156405 }, { "content": "pub fn test() -> PathBuf {\n\n PathBuf::from(\"test\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 25, "score": 364783.2763738093 }, { "content": "pub fn run(stdin: bool, check: bool, files: Vec<String>) -> Result<()> {\n\n if stdin {\n\n process_stdin(check)\n\n } else {\n\n process_files(check, files)\n\n }\n\n}\n\n\n", "file_path": "compiler-cli/src/format.rs", "rank": 26, "score": 361086.1906299279 }, { "content": "fn is_gleam_path(path: &Path, dir: impl AsRef<Path>) -> bool {\n\n use regex::Regex;\n\n lazy_static! {\n\n static ref RE: Regex = Regex::new(&format!(\n\n \"^({module}{slash})*{module}\\\\.gleam$\",\n\n module = \"[a-z][_a-z0-9]*\",\n\n slash = \"(/|\\\\\\\\)\",\n\n ))\n\n .expect(\"is_gleam_path() RE regex\");\n\n }\n\n\n\n RE.is_match(\n\n path.strip_prefix(dir)\n\n .expect(\"is_gleam_path(): strip_prefix\")\n\n .to_str()\n\n .expect(\"is_gleam_path(): to_str\"),\n\n )\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 27, "score": 359929.47246115404 }, { "content": "pub fn build_deps_package_test(package: &str) -> PathBuf {\n\n build_deps_package(package).join(\"test\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 28, "score": 359559.7609357516 }, { "content": "pub fn read_project_config(config_path: PathBuf) -> Result<PackageConfig, Error> {\n\n let toml = crate::fs::read(&config_path)?;\n\n toml::from_str(&toml).map_err(|e| Error::FileIo {\n\n action: FileIoAction::Parse,\n\n kind: FileKind::File,\n\n path: config_path,\n\n err: Some(e.to_string()),\n\n })\n\n}\n", "file_path": "compiler-cli/src/config.rs", "rank": 29, "score": 358973.9399244308 }, { "content": "pub fn download(new_package: Option<(&str, bool)>) -> Result<Manifest> {\n\n let span = tracing::info_span!(\"download_deps\");\n\n let _enter = span.enter();\n\n let mode = Mode::Dev;\n\n\n\n let http = HttpClient::boxed();\n\n let fs = ProjectIO::boxed();\n\n let downloader = hex::Downloader::new(fs, http, Untar::boxed());\n\n\n\n // Read the project config\n\n let mut config = crate::config::root_config()?;\n\n let project_name = config.name.clone();\n\n\n\n // Insert the new package to add, if it exists\n\n if let Some((package, dev)) = new_package {\n\n let version = hexpm::version::Range::new(\">= 0.0.0\".into());\n\n let _ = if dev {\n\n config.dev_dependencies.insert(package.to_string(), version)\n\n } else {\n\n config.dependencies.insert(package.to_string(), version)\n", "file_path": "compiler-cli/src/dependencies.rs", "rank": 30, "score": 354129.3333171647 }, { "content": "pub fn write_output(file: &OutputFile) -> Result<(), Error> {\n\n let OutputFile { path, text } = file;\n\n write(path, text)\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 31, "score": 348201.24759805104 }, { "content": "pub fn write_outputs_under(outputs: &[OutputFile], base: &Path) -> Result<(), Error> {\n\n for file in outputs {\n\n write_output_under(file, base)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 32, "score": 348023.37679400307 }, { "content": "pub fn remove(package: String, version: String) -> Result<(), Error> {\n\n let config = hexpm::Config::new();\n\n let http = HttpClient::new();\n\n\n\n // Start event loop so we can run async functions to call the Hex API\n\n let runtime = tokio::runtime::Runtime::new().expect(\"Unable to start Tokio async runtime\");\n\n\n\n // Get login creds from user\n\n let username = cli::ask(\"https://hex.pm username\")?;\n\n let password = cli::ask_password(\"https://hex.pm password\")?;\n\n\n\n // Authenticate with API\n\n let request = hexpm::create_api_key_request(&username, &password, TOKEN_NAME, &config);\n\n let response = runtime.block_on(http.send(request))?;\n\n let token = hexpm::create_api_key_response(response).map_err(Error::hex)?;\n\n\n\n // Remove docs from API\n\n let request =\n\n hexpm::remove_docs_request(&package, &version, &token, &config).map_err(Error::hex)?;\n\n let response = runtime.block_on(http.send(request))?;\n\n hexpm::remove_docs_response(response).map_err(Error::hex)?;\n\n\n\n // Done!\n\n println!(\n\n \"The docs for {} {} have been removed from HexDocs\",\n\n package, version\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-cli/src/docs.rs", "rank": 33, "score": 343677.63454457093 }, { "content": "fn import_cycle(buffer: &mut Buffer, modules: &[String]) {\n\n use std::io::Write;\n\n use termcolor::{Color, ColorSpec, WriteColor};\n\n\n\n writeln!(\n\n buffer,\n\n \"\n\n ┌─────┐\"\n\n )\n\n .unwrap();\n\n for (index, name) in modules.iter().enumerate() {\n\n if index != 0 {\n\n writeln!(buffer, \" │ ↓\").unwrap();\n\n }\n\n write!(buffer, \" │ \").unwrap();\n\n buffer\n\n .set_color(ColorSpec::new().set_fg(Some(Color::Cyan)))\n\n .unwrap();\n\n writeln!(buffer, \"{}\", name).unwrap();\n\n buffer.set_color(&ColorSpec::new()).unwrap();\n\n }\n\n writeln!(buffer, \" └─────┘\\n\").unwrap();\n\n}\n\n\n", "file_path": "compiler-core/src/error.rs", "rank": 34, "score": 337604.636120574 }, { "content": "pub fn read_and_analyse(root: impl AsRef<Path>) -> Result<(PackageConfig, Vec<Analysed>), Error> {\n\n let project_config = config::read_project_config(root.as_ref().join(\"gleam.toml\"))?;\n\n let mut srcs = vec![];\n\n let root = root.as_ref();\n\n let lib_dir = root.join(\"_build\").join(\"default\").join(\"lib\");\n\n let checkouts_dir = root.join(\"_checkouts\");\n\n let mix_lib_dir = root.join(\"deps\");\n\n\n\n for project_dir in [lib_dir, checkouts_dir, mix_lib_dir]\n\n .iter()\n\n .filter_map(|d| std::fs::read_dir(d).ok())\n\n .flat_map(|d| d.filter_map(Result::ok))\n\n .map(|d| d.path())\n\n .filter(|p| {\n\n p.file_name().and_then(|os_string| os_string.to_str()) != Some(&project_config.name)\n\n })\n\n {\n\n collect_source(project_dir.join(\"src\"), ModuleOrigin::Dependency, &mut srcs)?;\n\n }\n\n\n\n // Collect source code from top level project\n\n collect_source(root.join(\"src\"), ModuleOrigin::Src, &mut srcs)?;\n\n collect_source(root.join(\"test\"), ModuleOrigin::Test, &mut srcs)?;\n\n\n\n // Analyse source\n\n let analysed = gleam_core::project::analysed(srcs)?;\n\n\n\n Ok((project_config, analysed))\n\n}\n\n\n", "file_path": "compiler-cli/src/project.rs", "rank": 35, "score": 337040.5543739899 }, { "content": "pub fn build_docs(package: &str) -> PathBuf {\n\n build()\n\n .join(Mode::Dev.to_string())\n\n .join(\"docs\")\n\n .join(package)\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 36, "score": 332096.22935649427 }, { "content": "fn command_build(stderr: &termcolor::BufferWriter, warnings_as_errors: bool) -> Result<(), Error> {\n\n let mut buffer = stderr.buffer();\n\n let root = Path::new(\"./\");\n\n\n\n // Use new build tool if not in a rebar or mix project\n\n if !root.join(\"rebar.config\").exists() && !root.join(\"mix.exs\").exists() {\n\n return build::main().map(|_| ());\n\n }\n\n\n\n diagnostic::write_title(\n\n &mut buffer,\n\n \"Deprecated rebar3 build command\",\n\n Severity::Warning,\n\n );\n\n buffer\n\n .write_all(wrap(REBAR_DEPRECATION_NOTICE).as_bytes())\n\n .expect(\"rebar deprecation message\");\n\n buffer.flush().expect(\"flush\");\n\n stderr\n\n .print(&buffer)\n", "file_path": "compiler-cli/src/main.rs", "rank": 37, "score": 331839.6628752599 }, { "content": "pub fn command(to_add: String, dev: bool) -> Result<()> {\n\n // Insert the new package into the manifest and perform dependency\n\n // resolution to determine a suitable version\n\n let manifest = crate::dependencies::download(Some((&to_add, dev)))?;\n\n\n\n // Pull the selected version out of the new manifest so we know what it is\n\n let version = manifest\n\n .packages\n\n .into_iter()\n\n .find(|package| package.name == to_add)\n\n .expect(\"Added package not found in resolved manifest\")\n\n .version;\n\n\n\n tracing::info!(version=%version, \"new_package_version_resolved\");\n\n\n\n // Produce a version requirement locked to the major version.\n\n // i.e. if 1.2.3 is selected we want ~> 1.2\n\n let range = format!(\"~> {}.{}\", version.major, version.minor);\n\n\n\n // Read gleam.toml so we can insert the new dep into it\n", "file_path": "compiler-cli/src/add.rs", "rank": 38, "score": 331653.3276326371 }, { "content": "pub fn create_tar_archive(outputs: Vec<OutputFile>) -> Result<Vec<u8>, Error> {\n\n tracing::debug!(\"creating_tar_archive\");\n\n\n\n let encoder = flate2::write::GzEncoder::new(vec![], flate2::Compression::default());\n\n let mut builder = tar::Builder::new(encoder);\n\n\n\n for file in outputs {\n\n let mut header = tar::Header::new_gnu();\n\n header.set_path(&file.path).map_err(|e| Error::AddTar {\n\n path: file.path.clone(),\n\n err: e.to_string(),\n\n })?;\n\n header.set_size(file.text.as_bytes().len() as u64);\n\n header.set_cksum();\n\n builder\n\n .append(&header, file.text.as_bytes())\n\n .map_err(|e| Error::AddTar {\n\n path: file.path.clone(),\n\n err: e.to_string(),\n\n })?;\n\n }\n\n\n\n builder\n\n .into_inner()\n\n .map_err(|e| Error::TarFinish(e.to_string()))?\n\n .finish()\n\n .map_err(|e| Error::Gzip(e.to_string()))\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 39, "score": 331297.5435126182 }, { "content": "pub fn write_title(buffer: &mut Buffer, title: &str, severity: Severity) {\n\n use std::io::Write;\n\n use termcolor::{Color, ColorSpec, WriteColor};\n\n let (kind, colour) = match severity {\n\n Severity::Bug => (\"bug\", Color::Red),\n\n Severity::Error => (\"error\", Color::Red),\n\n Severity::Warning => (\"warning\", Color::Yellow),\n\n Severity::Note => (\"note\", Color::Blue),\n\n Severity::Help => (\"help\", Color::Blue),\n\n };\n\n buffer\n\n .set_color(ColorSpec::new().set_bold(true).set_fg(Some(colour)))\n\n .expect(\"write_title_color1\");\n\n write!(buffer, \"{}\", kind).expect(\"write_title_kind\");\n\n buffer\n\n .set_color(ColorSpec::new().set_bold(true))\n\n .expect(\"write_title_color2\");\n\n write!(buffer, \": {}\\n\\n\", title).expect(\"write_title_title\");\n\n buffer\n\n .set_color(&ColorSpec::new())\n\n .expect(\"write_title_reset\");\n\n}\n\n\n", "file_path": "compiler-core/src/diagnostic.rs", "rank": 40, "score": 329190.84997758176 }, { "content": "pub fn command(arguments: &[String], which: Which) -> Result<(), Error> {\n\n let config = crate::config::root_config()?;\n\n\n\n // Determine which module to run\n\n let module = match which {\n\n Which::Src => config.name,\n\n Which::Test => format!(\"{}_test\", &config.name),\n\n };\n\n\n\n // Build project so we have bytecode to run\n\n let _ = crate::build::main()?;\n\n\n\n // Don't exit on ctrl+c as it is used by child erlang shell\n\n ctrlc::set_handler(move || {}).expect(\"Error setting Ctrl-C handler\");\n\n\n\n // Prepare the Erlang shell command\n\n let mut command = Command::new(\"erl\");\n\n\n\n // Specify locations of .beam files\n\n let packages = paths::build_packages(Mode::Dev, Target::Erlang);\n", "file_path": "compiler-cli/src/run.rs", "rank": 41, "score": 328558.37130909506 }, { "content": "pub fn build_deps_package(package: &str) -> PathBuf {\n\n packages().join(package)\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 42, "score": 327229.0726510469 }, { "content": "pub fn package_cache_tarball(package_name: &str, version: &str) -> PathBuf {\n\n packages_cache().join(format!(\"{}-{}.tar\", package_name, version))\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 44, "score": 325241.23478871485 }, { "content": "fn validate_name(name: &str) -> Result<(), Error> {\n\n if erlang::is_erlang_reserved_word(name) {\n\n Err(Error::InvalidProjectName {\n\n name: name.to_string(),\n\n reason: InvalidProjectNameReason::ErlangReservedWord,\n\n })\n\n } else if erlang::is_erlang_standard_library_module(name) {\n\n Err(Error::InvalidProjectName {\n\n name: name.to_string(),\n\n reason: InvalidProjectNameReason::ErlangStandardLibraryModule,\n\n })\n\n } else if parse::lexer::str_to_keyword(name).is_some() {\n\n Err(Error::InvalidProjectName {\n\n name: name.to_string(),\n\n reason: InvalidProjectNameReason::GleamReservedWord,\n\n })\n\n } else if name == \"gleam\" {\n\n Err(Error::InvalidProjectName {\n\n name: name.to_string(),\n\n reason: InvalidProjectNameReason::GleamReservedModule,\n", "file_path": "compiler-cli/src/new.rs", "rank": 45, "score": 324989.2685947643 }, { "content": "pub fn build_deps_package_config(package: &str) -> PathBuf {\n\n build_deps_package(package).join(\"gleam.toml\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 46, "score": 322580.3263352283 }, { "content": "pub fn build_deps_package_src(package: &str) -> PathBuf {\n\n build_deps_package(package).join(\"src\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 47, "score": 322580.3263352283 }, { "content": "pub fn write_project(buffer: &mut Buffer, d: ProjectErrorDiagnostic) {\n\n use std::io::Write;\n\n write_title(buffer, &d.title, Severity::Error);\n\n writeln!(buffer, \"{}\", d.label).expect(\"write_project\");\n\n}\n", "file_path": "compiler-core/src/diagnostic.rs", "rank": 48, "score": 320076.10374485166 }, { "content": "fn validate_root_folder(name: &str) -> Result<(), Error> {\n\n if Path::new(name).exists() {\n\n Err(Error::ProjectRootAlreadyExist {\n\n path: name.to_string(),\n\n })\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "compiler-cli/src/new.rs", "rank": 49, "score": 319743.6250317104 }, { "content": "pub fn change(x: String) -> String {\n\n \"\"\n\n}\n\n\n", "file_path": "compiler-core/src/type_/tests/errors.rs", "rank": 50, "score": 314279.51575116033 }, { "content": "fn process_files(check: bool, files: Vec<String>) -> Result<()> {\n\n if check {\n\n check_files(files)\n\n } else {\n\n format_files(files)\n\n }\n\n}\n\n\n", "file_path": "compiler-cli/src/format.rs", "rank": 51, "score": 314025.4155884038 }, { "content": "fn err<A>(error: ErrorType, location: SrcSpan) -> Result<A, Error> {\n\n Err(Error { location, error })\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n pub location: SrcSpan,\n\n pub error: ErrorType,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum ErrorType {\n\n ConflictingEndiannessOptions { existing_endianness: String },\n\n ConflictingSignednessOptions { existing_signed: String },\n\n ConflictingSizeOptions,\n\n ConflictingTypeOptions { existing_type: String },\n\n ConflictingUnitOptions,\n\n FloatWithSize,\n\n InvalidEndianness,\n\n OptionNotAllowedInValue,\n\n SegmentMustHaveSize,\n\n SignednessUsedOnNonInt { typ: String },\n\n TypeDoesNotAllowSize { typ: String },\n\n TypeDoesNotAllowUnit { typ: String },\n\n UnitMustHaveSize,\n\n VariableUtfSegmentInPattern,\n\n}\n", "file_path": "compiler-core/src/bit_string.rs", "rank": 52, "score": 311604.03937389684 }, { "content": "pub fn write_outputs(outputs: &[OutputFile]) -> Result<(), Error> {\n\n for file in outputs {\n\n write_output(file)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 54, "score": 305499.76874695125 }, { "content": "pub fn string(value: &str) -> Document<'_> {\n\n if value.contains('\\n') {\n\n Document::String(value.replace('\\n', r#\"\\n\"#)).surround(\"\\\"\", \"\\\"\")\n\n } else {\n\n value.to_doc().surround(\"\\\"\", \"\\\"\")\n\n }\n\n}\n\n\n", "file_path": "compiler-core/src/javascript/expression.rs", "rank": 55, "score": 303018.7451739751 }, { "content": "pub fn create(options: NewOptions, version: &'static str) -> Result<()> {\n\n let name = if let Some(name) = options.name.clone() {\n\n name\n\n } else {\n\n get_foldername(&options.project_root)?\n\n };\n\n validate_name(&name)?;\n\n validate_root_folder(&name)?;\n\n let creator = Creator::new(options.clone(), name, version);\n\n\n\n creator.run()?;\n\n\n\n let cd_folder = if options.project_root == \".\" {\n\n \"\".to_string()\n\n } else {\n\n format!(\"\\tcd {}\\n\", creator.options.project_root)\n\n };\n\n\n\n println!(\n\n \"Your Gleam project {} has been successfully created.\n\nThe project can be compiled and tested by running these commands:\n\n\n\n{}\\tgleam test\n\n\",\n\n creator.project_name, cd_folder,\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-cli/src/new.rs", "rank": 57, "score": 302132.6465692794 }, { "content": "pub fn unformatted_files(files: Vec<String>) -> Result<Vec<Unformatted>> {\n\n let mut problem_files = Vec::with_capacity(files.len());\n\n\n\n for file_path in files {\n\n let path = PathBuf::from_str(&file_path).map_err(|e| Error::FileIo {\n\n action: FileIoAction::Open,\n\n kind: FileKind::File,\n\n path: PathBuf::from(file_path),\n\n err: Some(e.to_string()),\n\n })?;\n\n\n\n if path.is_dir() {\n\n for path in crate::fs::gleam_files_excluding_gitignore(&path) {\n\n format_file(&mut problem_files, path)?;\n\n }\n\n } else {\n\n format_file(&mut problem_files, path)?;\n\n }\n\n }\n\n\n\n Ok(problem_files)\n\n}\n\n\n", "file_path": "compiler-cli/src/format.rs", "rank": 58, "score": 301889.67309195886 }, { "content": "pub fn unnest(within: &Path) -> PathBuf {\n\n let mut path = PathBuf::new();\n\n for _ in within {\n\n path = path.join(\"..\")\n\n }\n\n path\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 59, "score": 301088.42291928094 }, { "content": "// TODO: test\n\n// TODO: Don't include git-ignored Erlang files\n\nfn project_files() -> Result<Vec<PathBuf>> {\n\n let src = Path::new(\"src\");\n\n let mut files: Vec<PathBuf> = fs::gleam_files_excluding_gitignore(src)\n\n .chain(fs::erlang_files(src)?)\n\n .collect();\n\n let mut add = |path| {\n\n let path = PathBuf::from(path);\n\n if path.exists() {\n\n files.push(path);\n\n }\n\n };\n\n add(\"README\");\n\n add(\"README.md\");\n\n add(\"README.txt\");\n\n add(\"gleam.toml\");\n\n add(\"LICENSE\");\n\n add(\"LICENCE\");\n\n add(\"LICENSE.md\");\n\n add(\"LICENCE.md\");\n\n add(\"LICENSE.txt\");\n\n add(\"LICENCE.txt\");\n\n Ok(files)\n\n}\n\n\n", "file_path": "compiler-cli/src/publish.rs", "rank": 60, "score": 300926.55356738635 }, { "content": "pub fn make_tokenizer(source: &str) -> impl Iterator<Item = LexResult> + '_ {\n\n let nlh = NewlineHandler::new(source.char_indices());\n\n Lexer::new(nlh)\n\n}\n\n\n\n// The newline handler is an iterator which collapses different newline\n\n// types into \\n always.\n\n#[derive(Debug)]\n\npub struct NewlineHandler<T: Iterator<Item = (usize, char)>> {\n\n source: T,\n\n chr0: Option<(usize, char)>,\n\n chr1: Option<(usize, char)>,\n\n}\n\n\n\nimpl<T> NewlineHandler<T>\n\nwhere\n\n T: Iterator<Item = (usize, char)>,\n\n{\n\n pub fn new(source: T) -> Self {\n\n let mut nlh = NewlineHandler {\n", "file_path": "compiler-core/src/parse/lexer.rs", "rank": 61, "score": 299199.1632393294 }, { "content": "pub fn command() -> Result<(), Error> {\n\n // Build project\n\n let _ = crate::build::main()?;\n\n\n\n // Don't exit on ctrl+c as it is used by child erlang shell\n\n ctrlc::set_handler(move || {}).expect(\"Error setting Ctrl-C handler\");\n\n\n\n // Prepare the Erlang shell command\n\n let mut command = Command::new(\"erl\");\n\n\n\n // Print character lists as lists\n\n let _ = command.arg(\"-stdlib\").arg(\"shell_strings\").arg(\"false\");\n\n\n\n // Specify locations of .beam files\n\n let packages = paths::build_packages(Mode::Dev, Target::Erlang);\n\n for entry in crate::fs::read_dir(&packages)?.filter_map(Result::ok) {\n\n let _ = command.arg(\"-pa\").arg(entry.path().join(\"ebin\"));\n\n }\n\n\n\n crate::cli::print_running(\"Erlang shell\");\n\n\n\n // Run the shell\n\n tracing::info!(\"Running OS process {:?}\", command);\n\n let _ = command.status().map_err(|e| Error::ShellCommand {\n\n command: \"erl\".to_string(),\n\n err: Some(e.kind()),\n\n })?;\n\n Ok(())\n\n}\n", "file_path": "compiler-cli/src/shell.rs", "rank": 62, "score": 298698.4651042059 }, { "content": "fn render_markdown(text: &str) -> String {\n\n let mut s = String::with_capacity(text.len() * 3 / 2);\n\n let p = pulldown_cmark::Parser::new_ext(text, pulldown_cmark::Options::all());\n\n pulldown_cmark::html::push_html(&mut s, p);\n\n s\n\n}\n\n\n", "file_path": "compiler-core/src/docs.rs", "rank": 63, "score": 296546.3901259809 }, { "content": "pub fn print_added(text: &str) {\n\n print_colourful_prefix(\" Added\", text)\n\n}\n\n\n", "file_path": "compiler-cli/src/cli.rs", "rank": 64, "score": 295117.46001882566 }, { "content": "pub fn print_running(text: &str) {\n\n print_colourful_prefix(\" Running\", text)\n\n}\n\n\n", "file_path": "compiler-cli/src/cli.rs", "rank": 65, "score": 295117.46001882566 }, { "content": "pub fn print_compiling(text: &str) {\n\n print_colourful_prefix(\" Compiling\", text)\n\n}\n\n\n", "file_path": "compiler-cli/src/cli.rs", "rank": 66, "score": 295117.4600188257 }, { "content": "pub fn print_downloading(text: &str) {\n\n print_colourful_prefix(\"Downloading\", text)\n\n}\n\n\n", "file_path": "compiler-cli/src/cli.rs", "rank": 67, "score": 295117.46001882566 }, { "content": "pub fn read_stdin() -> Result<String> {\n\n let mut src = String::new();\n\n let _ = std::io::stdin()\n\n .read_to_string(&mut src)\n\n .map_err(|e| Error::StandardIo {\n\n action: StandardIoAction::Read,\n\n err: Some(e.kind()),\n\n })?;\n\n Ok(src)\n\n}\n", "file_path": "compiler-cli/src/format.rs", "rank": 68, "score": 294477.2147886971 }, { "content": "pub fn parse(input: BitString) -> String {\n\n case input {\n\n <<>> -> 1\n\n <<\"(\":utf8, b:binary>> ->\n\n parse(input)\n\n |> change\n\n }\n\n}\"#\n\n );\n\n}\n", "file_path": "compiler-core/src/type_/tests/errors.rs", "rank": 69, "score": 292244.4358382162 }, { "content": "pub fn get_string(x: Box(String)) { x.inner }\n\n\",\n\n vec![\n\n (\"Box\", \"fn(a) -> Box(a)\"),\n\n (\"get_box\", \"fn(Box(Box(a))) -> Box(a)\"),\n\n (\"get_generic\", \"fn(Box(a)) -> a\"),\n\n (\"get_get_box\", \"fn(Box(Box(a))) -> a\"),\n\n (\"get_int\", \"fn(Box(Int)) -> Int\"),\n\n (\"get_string\", \"fn(Box(String)) -> String\"),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "compiler-core/src/type_/tests.rs", "rank": 70, "score": 292218.9533471413 }, { "content": "pub fn print_colourful_prefix(prefix: &str, text: &str) {\n\n let buffer_writer = stdout_buffer_writer();\n\n let mut buffer = buffer_writer.buffer();\n\n buffer\n\n .set_color(\n\n ColorSpec::new()\n\n .set_intense(true)\n\n .set_fg(Some(Color::Magenta)),\n\n )\n\n .expect(\"print_green_prefix\");\n\n write!(buffer, \"{}\", prefix).expect(\"print_green_prefix\");\n\n buffer\n\n .set_color(&ColorSpec::new())\n\n .expect(\"print_green_prefix\");\n\n writeln!(buffer, \" {}\", text).expect(\"print_green_prefix\");\n\n buffer_writer.print(&buffer).expect(\"print_green_prefix\");\n\n}\n\n\n", "file_path": "compiler-cli/src/cli.rs", "rank": 71, "score": 291517.5174736918 }, { "content": "pub fn build_package(mode: Mode, target: Target, package: &str) -> PathBuf {\n\n build_packages(mode, target).join(package)\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 72, "score": 288285.26379142073 }, { "content": "fn load_libraries(libs: &[PathBuf]) -> Result<HashMap<String, Module>> {\n\n tracing::info!(\"Reading precompiled module metadata files\");\n\n let mut manifests = HashMap::with_capacity(libs.len() * 10);\n\n for lib in libs {\n\n for module in fs::gleam_modules_metadata_paths(lib)? {\n\n let reader = fs::buffered_reader(module)?;\n\n let module = metadata::ModuleDecoder::new().read(reader)?;\n\n let _ = manifests.insert(module.name.join(\"/\"), module);\n\n }\n\n }\n\n Ok(manifests)\n\n}\n", "file_path": "compiler-cli/src/compile_package.rs", "rank": 73, "score": 287011.68368584214 }, { "content": "pub fn manifest() -> PathBuf {\n\n PathBuf::from(\"manifest.toml\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 74, "score": 285996.36038997 }, { "content": "pub fn build() -> PathBuf {\n\n PathBuf::from(\"build\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 75, "score": 285996.36038997 }, { "content": "pub fn packages() -> PathBuf {\n\n build().join(\"packages\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 76, "score": 285996.36038997 }, { "content": "pub fn readme() -> PathBuf {\n\n PathBuf::from(\"README.md\")\n\n}\n", "file_path": "compiler-core/src/paths.rs", "rank": 77, "score": 285996.36038997 }, { "content": "pub fn src() -> PathBuf {\n\n PathBuf::from(\"src\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 78, "score": 285996.36038997 }, { "content": "#[cfg(test)]\n\npub fn parse_expression_sequence(src: &str) -> Result<UntypedExpr, ParseError> {\n\n let lex = lexer::make_tokenizer(src);\n\n let mut parser = Parser::new(lex);\n\n let expr = parser.parse_expression_seq();\n\n let expr = parser.ensure_no_errors_or_remaining_input(expr)?;\n\n if let Some((e, _)) = expr {\n\n Ok(e)\n\n } else {\n\n parse_error(ParseErrorType::ExpectedExpr, SrcSpan { start: 0, end: 0 })\n\n }\n\n}\n\n\n\n//\n\n// Parser\n\n//\n\n#[derive(Debug)]\n\npub struct Parser<T: Iterator<Item = LexResult>> {\n\n tokens: T,\n\n lex_errors: Vec<LexicalError>,\n\n tok0: Option<Spanned>,\n", "file_path": "compiler-core/src/parse.rs", "rank": 79, "score": 285118.54190250626 }, { "content": "pub fn packages_toml() -> PathBuf {\n\n packages().join(\"packages.toml\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 80, "score": 281553.38306927826 }, { "content": "pub fn root_config() -> PathBuf {\n\n PathBuf::from(\"gleam.toml\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 81, "score": 281553.38306927826 }, { "content": "pub fn build_scripts() -> PathBuf {\n\n build().join(\"scripts\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 82, "score": 281553.38306927826 }, { "content": "pub fn write_diagnostic(mut buffer: &mut Buffer, d: MultiLineDiagnostic, severity: Severity) {\n\n let file = SimpleFile::new(d.file, d.src);\n\n\n\n let labels = d\n\n .labels\n\n .iter()\n\n .map(|l| {\n\n Label::new(l.style, (), (l.location.start)..(l.location.end))\n\n .with_message(l.label.clone())\n\n })\n\n .collect();\n\n\n\n let diagnostic = codespan_reporting::diagnostic::Diagnostic::new(severity)\n\n .with_message(d.title)\n\n .with_labels(labels);\n\n\n\n let config = codespan_reporting::term::Config::default();\n\n emit(&mut buffer, &config, &file, &diagnostic).expect(\"write_diagnostic\");\n\n}\n\n\n\n/// Describes an error encountered while compiling the project (eg. a name collision\n\n/// between files).\n\n///\n\n#[derive(Debug)]\n\npub struct ProjectErrorDiagnostic {\n\n pub title: String,\n\n pub label: String,\n\n}\n\n\n", "file_path": "compiler-core/src/diagnostic.rs", "rank": 83, "score": 280252.6956310719 }, { "content": "fn validate_module_name(name: &[String]) -> Result<(), Error> {\n\n if name == [\"gleam\"] {\n\n return Err(Error::ReservedModuleName {\n\n name: name.join(\"/\"),\n\n });\n\n };\n\n for segment in name {\n\n if crate::parse::lexer::str_to_keyword(segment).is_some() {\n\n return Err(Error::KeywordInModuleName {\n\n name: name.join(\"/\"),\n\n keyword: segment.to_string(),\n\n });\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "compiler-core/src/type_.rs", "rank": 84, "score": 277991.7015820458 }, { "content": "pub fn is_erlang_reserved_word(name: &str) -> bool {\n\n matches!(\n\n name,\n\n \"!\" | \"receive\"\n\n | \"bnot\"\n\n | \"div\"\n\n | \"rem\"\n\n | \"band\"\n\n | \"bor\"\n\n | \"bxor\"\n\n | \"bsl\"\n\n | \"bsr\"\n\n | \"not\"\n\n | \"and\"\n\n | \"or\"\n\n | \"xor\"\n\n | \"orelse\"\n\n | \"andalso\"\n\n | \"when\"\n\n | \"end\"\n", "file_path": "compiler-core/src/erlang.rs", "rank": 85, "score": 277564.78223942296 }, { "content": "fn build_hex_tarball(config: &PackageConfig) -> Result<(Vec<u8>, Vec<PathBuf>)> {\n\n let files = project_files()?;\n\n let contents_tar_gz = contents_tarball(&files)?;\n\n let version = \"3\";\n\n let metadata = metadata_config(config, &files);\n\n\n\n // Calculate checksum\n\n let mut hasher = sha2::Sha256::new();\n\n hasher.update(version.as_bytes());\n\n hasher.update(metadata.as_bytes());\n\n hasher.update(contents_tar_gz.as_slice());\n\n let checksum = base16::encode_upper(&hasher.finalize());\n\n tracing::info!(checksum = %checksum, \"Generated Hex package inner checksum\");\n\n\n\n // Build tarball\n\n let mut tarball = Vec::new();\n\n {\n\n let mut tarball = tar::Builder::new(&mut tarball);\n\n add_to_tar(&mut tarball, \"VERSION\", version.as_bytes())?;\n\n add_to_tar(&mut tarball, \"metadata.config\", metadata.as_bytes())?;\n\n add_to_tar(&mut tarball, \"contents.tar.gz\", contents_tar_gz.as_slice())?;\n\n add_to_tar(&mut tarball, \"CHECKSUM\", checksum.as_bytes())?;\n\n tarball.finish().map_err(Error::finish_tar)?;\n\n }\n\n tracing::info!(\"Generated package Hex release tarball\");\n\n Ok((tarball, files))\n\n}\n\n\n", "file_path": "compiler-cli/src/publish.rs", "rank": 86, "score": 277325.3900238167 }, { "content": "pub fn default_gleam_cache() -> PathBuf {\n\n dirs::cache_dir()\n\n .expect(\"Failed to determine user cache directory\")\n\n .join(\"gleam\")\n\n}\n\n\n", "file_path": "compiler-core/src/paths.rs", "rank": 87, "score": 277321.41257118864 }, { "content": "pub fn root_config() -> Result<PackageConfig, Error> {\n\n read_project_config(paths::root_config())\n\n}\n\n\n", "file_path": "compiler-cli/src/config.rs", "rank": 88, "score": 276658.75567444426 }, { "content": "pub fn main() { let _ = foo(); 5 }\",\n\n );\n\n}\n\n\n", "file_path": "compiler-core/src/type_/tests.rs", "rank": 89, "score": 275936.77313113655 }, { "content": "//\n\n// Public Interface\n\n//\n\npub fn parse_module(src: &str) -> Result<(UntypedModule, ModuleExtra), ParseError> {\n\n let lex = lexer::make_tokenizer(src);\n\n let mut parser = Parser::new(lex);\n\n let module = parser.parse_module()?;\n\n Ok((module, parser.extra))\n\n}\n\n\n\n//\n\n// Test Interface\n\n//\n", "file_path": "compiler-core/src/parse.rs", "rank": 90, "score": 275074.95322548633 }, { "content": "// Includes shell_default & user_default which are looked for by the erlang shell\n\npub fn is_erlang_standard_library_module(name: &str) -> bool {\n\n matches!(\n\n name,\n\n \"array\"\n\n | \"base64\"\n\n | \"beam_lib\"\n\n | \"binary\"\n\n | \"c\"\n\n | \"calendar\"\n\n | \"dets\"\n\n | \"dict\"\n\n | \"digraph\"\n\n | \"digraph_utils\"\n\n | \"epp\"\n\n | \"erl_anno\"\n\n | \"erl_eval\"\n\n | \"erl_expand_records\"\n\n | \"erl_id_trans\"\n\n | \"erl_internal\"\n\n | \"erl_lint\"\n", "file_path": "compiler-core/src/erlang.rs", "rank": 91, "score": 273584.1233989281 }, { "content": "fn metadata_config(config: &PackageConfig, files: &[PathBuf]) -> String {\n\n let metadata = ReleaseMetadata {\n\n name: &config.name,\n\n version: &config.version,\n\n description: &config.description,\n\n files,\n\n licenses: &config.licences,\n\n links: config\n\n .links\n\n .iter()\n\n .map(|l| (l.title.as_str(), l.href.as_str()))\n\n .collect(),\n\n requirements: config\n\n .dependencies\n\n .iter()\n\n .map(|(name, requirement)| ReleaseRequirement { name, requirement })\n\n .collect(),\n\n build_tools: vec![\"gleam\"],\n\n }\n\n .as_erlang();\n\n tracing::info!(contents = ?metadata, \"Generated Hex metadata.config\");\n\n metadata\n\n}\n\n\n", "file_path": "compiler-cli/src/publish.rs", "rank": 92, "score": 273576.70815689396 }, { "content": "pub fn write(buffer: &mut Buffer, d: Diagnostic, severity: Severity) {\n\n let diagnostic = MultiLineDiagnostic {\n\n file: d.file,\n\n src: d.src,\n\n title: d.title,\n\n labels: vec![DiagnosticLabel {\n\n style: LabelStyle::Primary,\n\n location: d.location,\n\n label: d.label,\n\n }],\n\n };\n\n\n\n write_diagnostic(buffer, diagnostic, severity)\n\n}\n\n\n", "file_path": "compiler-core/src/diagnostic.rs", "rank": 93, "score": 273504.4725652977 }, { "content": "fn did_you_mean(name: &str, options: &[String], alt: &'static str) -> String {\n\n // Find best match\n\n options\n\n .iter()\n\n .filter(|&option| option != crate::ast::CAPTURE_VARIABLE)\n\n .sorted()\n\n .min_by_key(|option| strsim::levenshtein(option, name))\n\n .map(|option| format!(\"did you mean `{}`?\", option))\n\n .unwrap_or_else(|| alt.to_string())\n\n}\n\n\n\nimpl Error {\n\n pub fn pretty_string(&self) -> String {\n\n let mut nocolor = Buffer::no_color();\n\n self.pretty(&mut nocolor);\n\n String::from_utf8(nocolor.into_inner()).expect(\"Error printing produced invalid utf8\")\n\n }\n\n\n\n pub fn pretty(&self, buf: &mut Buffer) {\n\n use crate::type_::Error as TypeError;\n", "file_path": "compiler-core/src/error.rs", "rank": 94, "score": 270774.56796623016 }, { "content": "pub fn main(arg) {\n\n let _ = list_value\n\n case arg {\n\n #(w, x, y, z) if w == tuple_value && x == string_value && y >. float_value && z == int_value -> 1\n\n _ -> 0\n\n }\n\n}\n\n\"#,\n\n r#\"-module(the_app).\n\n-compile(no_auto_import).\n\n\n\n-export([main/1]).\n\n\n\n-spec main({{integer(), float(), binary()}, binary(), float(), integer()}) -> integer().\n\nmain(Arg) ->\n\n _@1 = [1, 2, 3],\n\n case Arg of\n\n {W, X, Y, Z} when (((W =:= {1, 2.0, <<\"3\"/utf8>>}) andalso (X =:= <<\"constant value\"/utf8>>)) andalso (Y > 3.14)) andalso (Z =:= 42) ->\n\n 1;\n\n\n\n _@2 ->\n\n 0\n\n end.\n\n\"#,\n\n );\n\n\n\n assert_erl!(\n\n r#\"\n\npub const list = [1, 2, 3]\n\n\n", "file_path": "compiler-core/src/erlang/tests.rs", "rank": 95, "score": 270676.4793297963 }, { "content": "}\n\n\n\nimpl CommandExecutor for ProjectIO {\n\n fn exec(\n\n &self,\n\n program: &str,\n\n args: &[String],\n\n env: &[(&str, String)],\n\n cwd: Option<&Path>,\n\n ) -> Result<std::process::ExitStatus, Error> {\n\n tracing::debug!(program=program, args=?args.join(\" \"), env=?env, cwd=?cwd, \"command_exec\");\n\n std::process::Command::new(program)\n\n .args(args)\n\n .envs(env.iter().map(|(a, b)| (a, b)))\n\n .current_dir(cwd.unwrap_or_else(|| Path::new(\"./\")))\n\n .status()\n\n .map_err(|e| Error::ShellCommand {\n\n command: program.to_ascii_uppercase(),\n\n err: Some(e.kind()),\n\n })\n\n }\n\n}\n\n\n\nimpl FileSystemIO for ProjectIO {}\n\n\n", "file_path": "compiler-cli/src/fs.rs", "rank": 98, "score": 60.44134965738682 } ]
Rust
components/pkg-export-container/src/container.rs
Mastercard/habitat-1
de95485cc74b94bcbd2fcd2c1728ea5b575b7844
use crate::{build::BuildRoot, engine::Engine, error::Result, naming::{ImageIdentifiers, Naming}, util}; use failure::SyncFailure; use habitat_common::ui::{Status, UIWriter, UI}; use habitat_core::package::PackageIdent; use handlebars::Handlebars; use std::{fs, path::{Path, PathBuf}, str::FromStr}; #[cfg(unix)] const DOCKERFILE: &str = include_str!("../defaults/Dockerfile.hbs"); #[cfg(windows)] const DOCKERFILE: &str = include_str!("../defaults/Dockerfile_win.hbs"); const BUILD_REPORT: &str = include_str!("../defaults/last_container_export.env.hbs"); const BUILD_REPORT_FILE_NAME: &str = "last_container_export.env"; const OLD_BUILD_REPORT_FILE_NAME: &str = "last_docker_export.env"; pub struct ContainerImage { id: String, name: String, tags: Vec<String>, workdir: PathBuf, expanded_identifiers: Vec<String>, } impl ContainerImage { pub fn workdir(&self) -> &Path { self.workdir.as_path() } pub fn expanded_identifiers(&self) -> &[String] { &self.expanded_identifiers } pub fn name(&self) -> String { self.name.clone() } pub fn tags(&self) -> Vec<String> { self.tags.clone() } pub fn create_report<P: AsRef<Path>>(&self, ui: &mut UI, dst: P) -> Result<()> { let report = Self::report_path(&dst); ui.status(Status::Creating, format!("build report {}", report.display()))?; fs::create_dir_all(&dst)?; let name_tags: Vec<_> = self.tags .iter() .map(|t| format!("{}:{}", &self.name, t)) .collect(); let json = json!({ "id": &self.id, "name": &self.name, "tags": self.tags.join(","), "name_tags": name_tags.join(","), }); util::write_file(&report, &Handlebars::new().template_render(BUILD_REPORT, &json) .map_err(SyncFailure::new)?)?; Self::create_old_report(ui, dst); Ok(()) } fn report_path<P: AsRef<Path>>(dir: P) -> PathBuf { dir.as_ref().join(BUILD_REPORT_FILE_NAME) } fn create_old_report<P: AsRef<Path>>(ui: &mut UI, dst: P) { let current_report = Self::report_path(&dst); let old_report = dst.as_ref().join(OLD_BUILD_REPORT_FILE_NAME); ui.status(Status::Creating, format!("old build report '{}' for backwards compatibility; please favor '{}' \ going forward", old_report.display(), current_report.display())) .ok(); if let Err(e) = std::fs::copy(&current_report, &old_report) { error!("Failed to create '{}' for backwards-compatibility purposes; this may safely \ be ignored: {}", old_report.display(), e); } } } pub struct BuildContext(BuildRoot); impl BuildContext { #[cfg(unix)] pub fn from_build_root(build_root: BuildRoot, ui: &mut UI) -> Result<Self> { let context = BuildContext(build_root); context.add_users_and_groups(ui)?; context.create_entrypoint(ui)?; context.create_dockerfile(ui)?; Ok(context) } #[cfg(windows)] pub fn from_build_root(build_root: BuildRoot, ui: &mut UI) -> Result<Self> { let context = BuildContext(build_root); context.create_dockerfile(ui)?; Ok(context) } pub fn destroy(self, ui: &mut UI) -> Result<()> { self.0.destroy(ui) } #[cfg(unix)] fn add_users_and_groups(&self, ui: &mut UI) -> Result<()> { use std::{fs::OpenOptions, io::Write}; let ctx = self.0.ctx(); let (users, groups) = ctx.svc_users_and_groups()?; { let file = "etc/passwd"; let mut f = OpenOptions::new().append(true) .open(ctx.rootfs().join(&file))?; for user in users { ui.status(Status::Creating, format!("user '{}' in /{}", user.name, &file))?; writeln!(f, "{}", user)?; } } { let file = "etc/group"; let mut f = OpenOptions::new().append(true) .open(ctx.rootfs().join(&file))?; for group in groups { ui.status(Status::Creating, format!("group '{}' in /{}", group.name, &file))?; writeln!(f, "{}", group)?; } } Ok(()) } #[cfg(unix)] fn create_entrypoint(&self, ui: &mut UI) -> Result<()> { use habitat_core::util::posix_perm; const INIT_SH: &str = include_str!("../defaults/init.sh.hbs"); ui.status(Status::Creating, "entrypoint script")?; let ctx = self.0.ctx(); let busybox_shell = util::pkg_path_for(&util::busybox_ident()?, ctx.rootfs())?.join("bin/sh"); let json = json!({ "busybox_shell": busybox_shell, "path": ctx.env_path(), "sup_bin": format!("{} sup", ctx.bin_path().join("hab").display()), "primary_svc_ident": ctx.primary_svc_ident().to_string(), }); let init = ctx.rootfs().join("init.sh"); util::write_file(&init, &Handlebars::new().template_render(INIT_SH, &json) .map_err(SyncFailure::new)?)?; posix_perm::set_permissions(init.to_string_lossy().as_ref(), 0o0755)?; Ok(()) } fn create_dockerfile(&self, ui: &mut UI) -> Result<()> { ui.status(Status::Creating, "image Dockerfile")?; let ctx = self.0.ctx(); let json = json!({ "base_image": ctx.base_image(), "rootfs": ctx.rootfs().file_name().expect("file_name exists") .to_string_lossy() .as_ref(), "path": ctx.env_path(), "hab_path": util::pkg_path_for( &PackageIdent::from_str("core/hab")?, ctx.rootfs())?.join("bin/hab") .to_string_lossy() .replace("\\", "/"), "exposes": ctx.svc_exposes().join(" "), "multi_layer": ctx.multi_layer(), "primary_svc_ident": ctx.primary_svc_ident().to_string(), "installed_primary_svc_ident": ctx.installed_primary_svc_ident()?.to_string(), "environment": ctx.environment, "packages": self.0.graph().reverse_topological_sort().iter().map(ToString::to_string).collect::<Vec<_>>(), }); util::write_file(self.0.workdir().join("Dockerfile"), &Handlebars::new().template_render(DOCKERFILE, &json) .map_err(SyncFailure::new)?)?; Ok(()) } pub fn export(&self, ui: &mut UI, naming: &Naming, memory: Option<&str>, engine: &dyn Engine) -> Result<ContainerImage> { ui.status(Status::Creating, "image")?; let ident = self.0.ctx().installed_primary_svc_ident()?; let channel = self.0.ctx().channel(); let ImageIdentifiers { name, tags, expanded_identifiers, } = naming.image_identifiers(&ident, &channel)?; let id = engine.build(self.0.workdir(), &expanded_identifiers, memory)?; Ok(ContainerImage { id, name, tags, expanded_identifiers, workdir: self.0.workdir().to_path_buf() }) } }
use crate::{build::BuildRoot, engine::Engine, error::Result, naming::{ImageIdentifiers, Naming}, util}; use failure::SyncFailure; use habitat_common::ui::{Status, UIWriter, UI}; use habitat_core::package::PackageIdent; use handlebars::Handlebars; use std::{fs, path::{Path, PathBuf}, str::FromStr}; #[cfg(unix)] const DOCKERFILE: &str = include_str!("../defaults/Dockerfile.hbs"); #[cfg(windows)] const DOCKERFILE: &str = include_str!("../defaults/Dockerfile_win.hbs"); const BUILD_REPORT: &str = include_str!("../defaults/last_container_export.env.hbs"); const BUILD_REPORT_FILE_NAME: &str = "last_container_export.env"; const OLD_BUILD_REPORT_FILE_NAME: &str = "last_docker_export.env"; pub struct ContainerImage { id: String, name: String, tags: Vec<String>, workdir: PathBuf, expanded_identifiers: Vec<String>, } impl ContainerImage { pub fn workdir(&self) -> &Path { self.workdir.as_path() } pub fn expanded_identifiers(&self) -> &[String] { &self.expanded_identifiers } pub fn name(&self) -> String { self.name.clone() } pub fn tags(&self) -> Vec<String> { self.tags.clone() } pub fn create_report<P: AsRef<Path>>(&self, ui: &mut UI, dst: P) -> Result<()> { let report = Self::report_path(&dst); ui.status(Status::Creating, format!("build report {}", report.display()))?; fs::create_dir_all(&dst)?; let name_tags: Vec<_> = self.tags .iter() .map(|t| format!("{}:{}", &self.name, t)) .collect(); let json = json!({ "id": &self.id, "name": &self.name, "tags": self.tags.join(","), "name_tags": name_tags.join(","), }); util::write_file(&report, &Handlebars::new().template_render(BUILD_REPORT, &json) .map_err(SyncFailure::new)?)?; Self::create_old_report(ui, dst); Ok(()) } fn report_path<P: AsRef<Path>>(dir: P) -> PathBuf { dir.as_ref().join(BUILD_REPORT_FILE_NAME) } fn create_old_report<P: AsRef<Path>>(ui: &mut UI, dst: P) { let current_report = Self::report_path(&dst); let old_report = dst.as_ref().join(OLD_BUILD_REPORT_FILE_NAME); ui.status(Status::Creating, format!("old build report '{}' for backwards compatibility; please favor '{}' \ going forward", old_report.display(), current_report.display())) .ok();
} } pub struct BuildContext(BuildRoot); impl BuildContext { #[cfg(unix)] pub fn from_build_root(build_root: BuildRoot, ui: &mut UI) -> Result<Self> { let context = BuildContext(build_root); context.add_users_and_groups(ui)?; context.create_entrypoint(ui)?; context.create_dockerfile(ui)?; Ok(context) } #[cfg(windows)] pub fn from_build_root(build_root: BuildRoot, ui: &mut UI) -> Result<Self> { let context = BuildContext(build_root); context.create_dockerfile(ui)?; Ok(context) } pub fn destroy(self, ui: &mut UI) -> Result<()> { self.0.destroy(ui) } #[cfg(unix)] fn add_users_and_groups(&self, ui: &mut UI) -> Result<()> { use std::{fs::OpenOptions, io::Write}; let ctx = self.0.ctx(); let (users, groups) = ctx.svc_users_and_groups()?; { let file = "etc/passwd"; let mut f = OpenOptions::new().append(true) .open(ctx.rootfs().join(&file))?; for user in users { ui.status(Status::Creating, format!("user '{}' in /{}", user.name, &file))?; writeln!(f, "{}", user)?; } } { let file = "etc/group"; let mut f = OpenOptions::new().append(true) .open(ctx.rootfs().join(&file))?; for group in groups { ui.status(Status::Creating, format!("group '{}' in /{}", group.name, &file))?; writeln!(f, "{}", group)?; } } Ok(()) } #[cfg(unix)] fn create_entrypoint(&self, ui: &mut UI) -> Result<()> { use habitat_core::util::posix_perm; const INIT_SH: &str = include_str!("../defaults/init.sh.hbs"); ui.status(Status::Creating, "entrypoint script")?; let ctx = self.0.ctx(); let busybox_shell = util::pkg_path_for(&util::busybox_ident()?, ctx.rootfs())?.join("bin/sh"); let json = json!({ "busybox_shell": busybox_shell, "path": ctx.env_path(), "sup_bin": format!("{} sup", ctx.bin_path().join("hab").display()), "primary_svc_ident": ctx.primary_svc_ident().to_string(), }); let init = ctx.rootfs().join("init.sh"); util::write_file(&init, &Handlebars::new().template_render(INIT_SH, &json) .map_err(SyncFailure::new)?)?; posix_perm::set_permissions(init.to_string_lossy().as_ref(), 0o0755)?; Ok(()) } fn create_dockerfile(&self, ui: &mut UI) -> Result<()> { ui.status(Status::Creating, "image Dockerfile")?; let ctx = self.0.ctx(); let json = json!({ "base_image": ctx.base_image(), "rootfs": ctx.rootfs().file_name().expect("file_name exists") .to_string_lossy() .as_ref(), "path": ctx.env_path(), "hab_path": util::pkg_path_for( &PackageIdent::from_str("core/hab")?, ctx.rootfs())?.join("bin/hab") .to_string_lossy() .replace("\\", "/"), "exposes": ctx.svc_exposes().join(" "), "multi_layer": ctx.multi_layer(), "primary_svc_ident": ctx.primary_svc_ident().to_string(), "installed_primary_svc_ident": ctx.installed_primary_svc_ident()?.to_string(), "environment": ctx.environment, "packages": self.0.graph().reverse_topological_sort().iter().map(ToString::to_string).collect::<Vec<_>>(), }); util::write_file(self.0.workdir().join("Dockerfile"), &Handlebars::new().template_render(DOCKERFILE, &json) .map_err(SyncFailure::new)?)?; Ok(()) } pub fn export(&self, ui: &mut UI, naming: &Naming, memory: Option<&str>, engine: &dyn Engine) -> Result<ContainerImage> { ui.status(Status::Creating, "image")?; let ident = self.0.ctx().installed_primary_svc_ident()?; let channel = self.0.ctx().channel(); let ImageIdentifiers { name, tags, expanded_identifiers, } = naming.image_identifiers(&ident, &channel)?; let id = engine.build(self.0.workdir(), &expanded_identifiers, memory)?; Ok(ContainerImage { id, name, tags, expanded_identifiers, workdir: self.0.workdir().to_path_buf() }) } }
if let Err(e) = std::fs::copy(&current_report, &old_report) { error!("Failed to create '{}' for backwards-compatibility purposes; this may safely \ be ignored: {}", old_report.display(), e); }
if_condition
[]
Rust
crates/core/src/year2021/day05.rs
fornwall/advent-of-code-2019-rs
67bb6a48860f20a773f9e913c7e498c921370eeb
use crate::input::Input; struct Board { claimed_once: [u64; (1000 * 1000) / 64], claimed_multiple: [u64; (1000 * 1000) / 64], } impl Board { const fn new() -> Self { Self { claimed_once: [0; (1000 * 1000) / 64], claimed_multiple: [0; (1000 * 1000) / 64], } } fn claim_square(&mut self, x: i32, y: i32) { let bit_idx = x as usize + y as usize * 1000; let array_idx = bit_idx / 64; let local_bit = 1 << (bit_idx % 64); if self.claimed_once[array_idx] & local_bit == 0 { self.claimed_once[array_idx] |= local_bit; } else { self.claimed_multiple[array_idx] |= local_bit; } } fn claimed_multiple(&self) -> u32 { self.claimed_multiple.iter().map(|&i| i.count_ones()).sum() } fn add_line(&mut self, from_x: u16, from_y: u16, to_x: u16, to_y: u16) { let mut current_x = i32::from(from_x); let mut current_y = i32::from(from_y); let dx = (i32::from(to_x) - current_x).signum(); let dy = (i32::from(to_y) - current_y).signum(); loop { self.claim_square(current_x, current_y); current_x += dx; current_y += dy; if (current_x, current_y) == (i32::from(to_x), i32::from(to_y)) { self.claim_square(current_x, current_y); break; } } } } fn parse_point(s: &str) -> Option<(u16, u16)> { if let Some((Ok(x), Ok(y))) = s .split_once(',') .map(|(x, y)| (x.parse::<u16>(), y.parse::<u16>())) { return Some((x, y)); } None } pub fn solve(input: &mut Input) -> Result<u32, String> { let mut board = Board::new(); for line in input.text.lines() { if let Some((from, to)) = line.split_once(" -> ") { if let Some((from_x, from_y)) = parse_point(from) { if let Some((to_x, to_y)) = parse_point(to) { if from_x < 1000 && from_y < 1000 && to_x < 1000 && to_y < 1000 { let is_straight_line = (from_x == to_x) || (from_y == to_y); let is_diagonal = (i32::from(from_x) - i32::from(to_x)).abs() == (i32::from(from_y) - i32::from(to_y)).abs(); match (is_straight_line, is_diagonal, input.is_part_two()) { (false, false, _) => { return Err(format!( "Line is neither straight nor diagonal: {},{} -> {},{}", from_x, from_y, to_x, to_y )); } (true, _, _) | (_, true, true) => { board.add_line(from_x, from_y, to_x, to_y); } (false, true, false) => {} } continue; } } } } return Err( "Input is not in the format 'x1,y1 -> x2,y2' with values in the range [0,1000]" .to_string(), ); } Ok(board.claimed_multiple()) } #[test] pub fn tests() { use crate::input::{test_part_one, test_part_two}; let example = "0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"; test_part_one!(example => 5); test_part_two!(example => 12); let real_input = include_str!("day05_input.txt"); test_part_one!(real_input => 7644); test_part_two!(real_input => 18627); }
use crate::input::Input; struct Board { claimed_once: [u64; (1000 * 1000) / 64], claimed_multiple: [u64; (1000 * 1000) / 64], } impl Board { const fn new() -> Self { Self { claimed_once: [0; (1000 * 1000) / 64], claimed_multiple: [0; (1000 * 1000) / 64], } } fn claim_square(&mut self, x: i32, y: i32) { let bit_idx = x as usize + y as usize * 1000; let array_idx = bit_idx / 64; let local_bit = 1 << (bit_idx % 64);
fn claimed_multiple(&self) -> u32 { self.claimed_multiple.iter().map(|&i| i.count_ones()).sum() } fn add_line(&mut self, from_x: u16, from_y: u16, to_x: u16, to_y: u16) { let mut current_x = i32::from(from_x); let mut current_y = i32::from(from_y); let dx = (i32::from(to_x) - current_x).signum(); let dy = (i32::from(to_y) - current_y).signum(); loop { self.claim_square(current_x, current_y); current_x += dx; current_y += dy; if (current_x, current_y) == (i32::from(to_x), i32::from(to_y)) { self.claim_square(current_x, current_y); break; } } } } fn parse_point(s: &str) -> Option<(u16, u16)> { if let Some((Ok(x), Ok(y))) = s .split_once(',') .map(|(x, y)| (x.parse::<u16>(), y.parse::<u16>())) { return Some((x, y)); } None } pub fn solve(input: &mut Input) -> Result<u32, String> { let mut board = Board::new(); for line in input.text.lines() { if let Some((from, to)) = line.split_once(" -> ") { if let Some((from_x, from_y)) = parse_point(from) { if let Some((to_x, to_y)) = parse_point(to) { if from_x < 1000 && from_y < 1000 && to_x < 1000 && to_y < 1000 { let is_straight_line = (from_x == to_x) || (from_y == to_y); let is_diagonal = (i32::from(from_x) - i32::from(to_x)).abs() == (i32::from(from_y) - i32::from(to_y)).abs(); match (is_straight_line, is_diagonal, input.is_part_two()) { (false, false, _) => { return Err(format!( "Line is neither straight nor diagonal: {},{} -> {},{}", from_x, from_y, to_x, to_y )); } (true, _, _) | (_, true, true) => { board.add_line(from_x, from_y, to_x, to_y); } (false, true, false) => {} } continue; } } } } return Err( "Input is not in the format 'x1,y1 -> x2,y2' with values in the range [0,1000]" .to_string(), ); } Ok(board.claimed_multiple()) } #[test] pub fn tests() { use crate::input::{test_part_one, test_part_two}; let example = "0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"; test_part_one!(example => 5); test_part_two!(example => 12); let real_input = include_str!("day05_input.txt"); test_part_one!(real_input => 7644); test_part_two!(real_input => 18627); }
if self.claimed_once[array_idx] & local_bit == 0 { self.claimed_once[array_idx] |= local_bit; } else { self.claimed_multiple[array_idx] |= local_bit; } }
function_block-function_prefix_line
[ { "content": "#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash)]\n\nstruct State<const SIDE_ROOM_SIZE: usize> {\n\n hallways: [Option<Amphipod>; HALLWAY_SPACES],\n\n /// Indexed by amphipod idx\n\n rooms: [[Option<Amphipod>; SIDE_ROOM_SIZE]; 4],\n\n}\n\n\n\nimpl<const SIDE_ROOM_SIZE: usize> State<SIDE_ROOM_SIZE> {\n\n fn parse(text: &str) -> Self {\n\n let mut rooms = [[Option::None; SIDE_ROOM_SIZE]; 4];\n\n let mut amphipod_count = 0;\n\n for b in text.bytes().filter(|b| b'A' <= *b && *b <= b'D') {\n\n rooms[amphipod_count % 4][amphipod_count / 4] = Some(Amphipod::from_idx(b - b'A'));\n\n amphipod_count += 1;\n\n if SIDE_ROOM_SIZE == 4 && amphipod_count == 4 {\n\n for b in [b'D', b'C', b'B', b'A', b'D', b'B', b'A', b'C'] {\n\n rooms[amphipod_count % 4][amphipod_count / 4] =\n\n Some(Amphipod::from_idx(b - b'A'));\n\n amphipod_count += 1;\n\n }\n\n }\n", "file_path": "crates/core/src/year2021/day23.rs", "rank": 0, "score": 195204.95668273343 }, { "content": "struct Board {\n\n width: u32,\n\n height: u32,\n\n cells: Vec<MapCell>,\n\n visited: Vec<bool>,\n\n round: u16,\n\n full_round: bool,\n\n elves_alive: i32,\n\n elf_died: bool,\n\n goblins_alive: i32,\n\n elf_attack_power: i32,\n\n}\n\n\n\nimpl Board {\n\n fn parse(input_string: &str, elf_attack_power: i32) -> Result<Self, String> {\n\n let width = input_string\n\n .find('\\n')\n\n .ok_or_else(|| \"No line in input\".to_string())? as u32;\n\n\n\n let mut elves_alive = 0;\n", "file_path": "crates/core/src/year2018/day15.rs", "rank": 1, "score": 182682.10510376556 }, { "content": "struct Board {\n\n cells: [u8; Self::WIDTH * Self::WIDTH],\n\n num_flashes: u64,\n\n}\n\n\n\nimpl Board {\n\n const WIDTH: usize = 10;\n\n\n\n fn parse(s: &str) -> Result<Self, String> {\n\n let mut board = Self {\n\n cells: [0; Self::WIDTH * Self::WIDTH],\n\n num_flashes: 0,\n\n };\n\n\n\n if s.lines().count() != 10 {\n\n return Err(\"Board is not 10 rows\".to_string());\n\n }\n\n\n\n for (y, line) in s.lines().enumerate() {\n\n if line.len() != 10 {\n", "file_path": "crates/core/src/year2021/day11.rs", "rank": 3, "score": 182682.10510376556 }, { "content": "struct Board {\n\n numbers: [u8; 25],\n\n drawn_numbers_bitmask: u32,\n\n}\n\n\n\nimpl Board {\n\n const fn is_bitmask_set(&self, bitmask: u32) -> bool {\n\n self.drawn_numbers_bitmask & bitmask == bitmask\n\n }\n\n\n\n const fn has_won(&self) -> bool {\n\n #![allow(clippy::unusual_byte_groupings)]\n\n self.is_bitmask_set(0b00000_00000_00000_00000_11111)\n\n || self.is_bitmask_set(0b00000_00000_00000_11111_00000)\n\n || self.is_bitmask_set(0b00000_00000_11111_00000_00000)\n\n || self.is_bitmask_set(0b00000_11111_00000_00000_00000)\n\n || self.is_bitmask_set(0b11111_00000_00000_00000_00000)\n\n || self.is_bitmask_set(0b00001_00001_00001_00001_00001)\n\n || self.is_bitmask_set(0b00010_00010_00010_00010_00010)\n\n || self.is_bitmask_set(0b00100_00100_00100_00100_00100)\n", "file_path": "crates/core/src/year2021/day04.rs", "rank": 4, "score": 182682.10510376556 }, { "content": "fn affected_by_beam(program: &Program, x: i32, y: i32) -> Result<bool, String> {\n\n let mut program_copy = program.clone();\n\n program_copy.input(Word::from(x));\n\n program_copy.input(Word::from(y));\n\n let output = program_copy.run_for_output()?;\n\n if output.is_empty() {\n\n return Err(\"No output produced\".to_string());\n\n } else if output.len() != 1 || !matches!(output[0], 0 | 1) {\n\n return Err(\"Invalid output from program (expected only 0 or 1)\".to_string());\n\n }\n\n Ok(output[0] == 1)\n\n}\n\n\n", "file_path": "crates/core/src/year2019/day19.rs", "rank": 5, "score": 172322.807878168 }, { "content": "// Parse input in the format \"A=<211,-141,-45>\".\n\nfn parse_vector(input: &str) -> Option<(i32, i32, i32)> {\n\n if input.len() > 3 {\n\n if let Some(stripped) = input[2..]\n\n .strip_prefix('<')\n\n .and_then(|s| s.strip_suffix('>'))\n\n {\n\n let mut number_parts = stripped.split(',');\n\n let x = number_parts.next()?.parse::<i16>().ok()?;\n\n let y = number_parts.next()?.parse::<i16>().ok()?;\n\n let z = number_parts.next()?.parse::<i16>().ok()?;\n\n return Some((i32::from(x), i32::from(y), i32::from(z)));\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "crates/core/src/year2017/day20.rs", "rank": 6, "score": 168043.1159318182 }, { "content": "pub fn part1_nth(input_string: &str, n: usize) -> Result<u64, String> {\n\n let mut moons = Moons::parse(input_string)?;\n\n for _ in 0..n {\n\n moons.step();\n\n }\n\n Ok(moons.total_energy())\n\n}\n\n\n", "file_path": "crates/core/src/year2019/day12.rs", "rank": 7, "score": 159482.95921872635 }, { "content": "/// The intcode instruction for moving the robot in the specified direction.\n\nfn instruction_for_direction(direction: (i32, i32)) -> Result<Word, String> {\n\n Ok(match direction {\n\n (0, 1) => 1,\n\n (0, -1) => 2,\n\n (-1, 0) => 3,\n\n (1, 0) => 4,\n\n _ => {\n\n return Err(format!(\n\n \"Invalid direction ({},{})\",\n\n direction.0, direction.1\n\n ))\n\n }\n\n })\n\n}\n\n\n", "file_path": "crates/core/src/year2019/day15.rs", "rank": 8, "score": 155376.81374792097 }, { "content": "fn parse_input(input_string: &str) -> Result<(i32, i32), String> {\n\n let (from_str, to_str) = input_string\n\n .trim()\n\n .split_once('-')\n\n .ok_or_else(|| \"Invalid parts - should be in FROM-TO form\".to_string())?;\n\n let from = from_str.parse::<i32>().or(Err(\"Invalid range\"))?;\n\n let to = to_str.parse::<i32>().or(Err(\"Invalid range\"))?;\n\n Ok((from, to))\n\n}\n\n\n", "file_path": "crates/core/src/year2019/day04.rs", "rank": 9, "score": 153826.61640670354 }, { "content": "fn convert_params(year: i32, day: i32, part: i32) -> Result<(u16, u8, u8), String> {\n\n Ok((\n\n u16::try_from(year).map_err(|_| format!(\"Invalid year: {}\", year))?,\n\n u8::try_from(day).map_err(|_| format!(\"Invalid day: {}\", day))?,\n\n u8::try_from(part).map_err(|_| format!(\"Invalid part: {}\", part))?,\n\n ))\n\n}\n", "file_path": "crates/java/src/lib.rs", "rank": 10, "score": 151110.8391813564 }, { "content": "fn score_recipe(ingredients: &[Ingredient], teaspoons: &[i32], part2: bool) -> i32 {\n\n if teaspoons.iter().sum::<i32>() != 100 {\n\n return 0;\n\n }\n\n\n\n let mut capacity = 0;\n\n let mut durability = 0;\n\n let mut flavour = 0;\n\n let mut texture = 0;\n\n let mut calories = 0;\n\n\n\n for i in 0..ingredients.len() {\n\n capacity += ingredients[i].capacity * teaspoons[i];\n\n durability += ingredients[i].durability * teaspoons[i];\n\n flavour += ingredients[i].flavour * teaspoons[i];\n\n texture += ingredients[i].texture * teaspoons[i];\n\n calories += ingredients[i].calories * teaspoons[i];\n\n }\n\n\n\n if capacity <= 0 || durability <= 0 || flavour <= 0 || texture <= 0 {\n", "file_path": "crates/core/src/year2015/day15.rs", "rank": 11, "score": 151001.84097054592 }, { "content": "fn is_prime(number: i32) -> bool {\n\n let number_sqrt = f64::from(number).sqrt() as i32;\n\n (2..=number_sqrt).all(|i| number % i != 0)\n\n}\n\n\n", "file_path": "crates/core/src/year2017/day23.rs", "rank": 12, "score": 148907.41115672095 }, { "content": "fn score_autocomplete(autocomplete: &[u8]) -> u64 {\n\n autocomplete.iter().rev().fold(0, |score, c| {\n\n score * 5\n\n + match c {\n\n b')' => 1,\n\n b']' => 2,\n\n b'}' => 3,\n\n _ => 4,\n\n }\n\n })\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day10.rs", "rank": 13, "score": 146906.20957749675 }, { "content": "fn run(input_string: &str, initial_color: Color) -> Result<HashMap<(i32, i32), Color>, String> {\n\n let mut program = Program::parse(input_string)?;\n\n let mut painted: HashMap<(i32, i32), Color> = HashMap::new();\n\n let mut position = (0, 0);\n\n let mut current_direction = Direction::Up;\n\n\n\n if initial_color == Color::White {\n\n painted.insert(position, initial_color);\n\n }\n\n\n\n loop {\n\n program.input(*painted.get(&position).unwrap_or(&Color::Black) as Word);\n\n let output = program.run_for_output()?;\n\n\n\n if program.is_halted() {\n\n break;\n\n }\n\n\n\n if output.len() != 2 {\n\n return Err(\"Invalid output length\".to_string());\n", "file_path": "crates/core/src/year2019/day11.rs", "rank": 14, "score": 136849.40560504247 }, { "content": "fn is_bit_mostly_set(numbers: &[u16], bit_idx: usize) -> bool {\n\n numbers.iter().fold(0, |acc, x| {\n\n acc + if x & (1 << bit_idx) == 0 { -1 } else { 1 }\n\n }) >= 0\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day03.rs", "rank": 15, "score": 132376.70867207582 }, { "content": "fn sum_json_value(value: &JsonValue, part2: bool) -> i32 {\n\n match value {\n\n JsonValue::Number(n) => *n,\n\n JsonValue::Array(vec) => vec.iter().map(|value| sum_json_value(value, part2)).sum(),\n\n JsonValue::Object(map) => {\n\n if part2\n\n && map\n\n .values()\n\n .any(|value| value == &JsonValue::String(b\"red\"))\n\n {\n\n 0\n\n } else {\n\n map.values().map(|value| sum_json_value(value, part2)).sum()\n\n }\n\n }\n\n _ => 0,\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2015/day12.rs", "rank": 16, "score": 132261.73290438106 }, { "content": "fn parse_digits(input_string: &str) -> Result<Vec<i32>, String> {\n\n let result = input_string\n\n .chars()\n\n .map(|b| {\n\n b.to_digit(10)\n\n .map(|b| b as i32)\n\n .ok_or_else(|| \"Invalid input\".to_string())\n\n })\n\n .collect::<Result<Vec<_>, String>>()?;\n\n if result.len() > 1000 {\n\n return Err(\"Too big input\".to_string());\n\n }\n\n Ok(result)\n\n}\n\n\n", "file_path": "crates/core/src/year2019/day16.rs", "rank": 17, "score": 129089.68857348763 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let ship_position = (0, 0);\n\n let ship_direction = (1, 0);\n\n let waypoint = (10, -1);\n\n\n\n let mut entities = [ship_position, ship_direction, waypoint];\n\n let moved_entity_idx = input.part_values(SHIP_POSITION_ENTITY_IDX, WAYPOINT_ENTITY_IDX);\n\n let rotated_entity_idx = input.part_values(SHIP_DIRECTION_ENTITY_IDX, WAYPOINT_ENTITY_IDX);\n\n\n\n let rotation_matrices: [(i32, i32, i32, i32); 3] = [\n\n (0, -1, 1, 0), // R90 / L270\n\n (-1, 0, 0, -1), // R180 / L180\n\n (0, 1, -1, 0), // R270 / L90\n\n ];\n\n\n\n #[cfg(feature = \"visualization\")]\n\n let is_part_one = input.is_part_one();\n\n #[cfg(feature = \"visualization\")]\n\n let mut renderer = Renderer::new(&mut input.painter);\n\n #[cfg(feature = \"visualization\")]\n", "file_path": "crates/core/src/year2020/day12.rs", "rank": 18, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n if input.is_part_one() {\n\n let mut distance_to_oxygen = -1;\n\n search_space_ship(input.text, |_, is_oxygen, distance| {\n\n if is_oxygen {\n\n distance_to_oxygen = distance;\n\n }\n\n })?;\n\n Ok(distance_to_oxygen)\n\n } else {\n\n // Contains (pos_x, pos_y).\n\n let mut locations_without_oxygen = HashSet::new();\n\n // Contains ((pos_x, pos_y), distance_from_oxygen).\n\n let mut to_visit = VecDeque::new();\n\n\n\n search_space_ship(input.text, |position, is_oxygen, _| {\n\n if is_oxygen {\n\n to_visit.push_back((position, 0));\n\n } else {\n\n locations_without_oxygen.insert(position);\n", "file_path": "crates/core/src/year2019/day15.rs", "rank": 19, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let mut floor = 0;\n\n for (idx, c) in input.text.chars().enumerate() {\n\n floor += match c {\n\n '(' => 1,\n\n ')' => -1,\n\n _ => {\n\n return Err(format!(\"Invalid char at offset {}: '{}'\", idx, c));\n\n }\n\n };\n\n if input.is_part_two() && floor == -1 {\n\n return Ok(idx as i32 + 1);\n\n }\n\n }\n\n Ok(floor)\n\n}\n\n\n", "file_path": "crates/core/src/year2015/day01.rs", "rank": 20, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let mut result = 0;\n\n visit_rooms(input.text, |cost| {\n\n if input.is_part_one() {\n\n result = max(result, cost);\n\n } else if cost >= 1000 {\n\n result += 1;\n\n }\n\n })?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "crates/core/src/year2018/day20.rs", "rank": 21, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let maze = Maze::parse(input.text, input.is_part_one())?;\n\n\n\n let mut to_visit = VecDeque::new();\n\n let mut visited = HashSet::new();\n\n // Contains ((x,y), distance, level):\n\n to_visit.push_back((maze.start_location, 0, 0));\n\n // Contains ((x,y), level):\n\n visited.insert((maze.start_location, 0));\n\n\n\n while let Some((visiting, distance, level)) = to_visit.pop_front() {\n\n let new_distance = distance + 1;\n\n\n\n for (new_location, level_difference) in DIRECTIONS\n\n .iter()\n\n .map(|&(dx, dy)| ((visiting.0 + dx, visiting.1 + dy), 0))\n\n .chain(\n\n if let Some(&(new_location, level_difference)) = maze.portals.get(&visiting) {\n\n Some((new_location, level_difference)).into_iter()\n\n } else {\n", "file_path": "crates/core/src/year2019/day20.rs", "rank": 22, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let mut horizontal_position = 0_i32;\n\n let mut depth = 0_i32;\n\n let mut aim = 0_i32;\n\n\n\n for (line_idx, line) in input.text.lines().enumerate() {\n\n match line\n\n .split_once(' ')\n\n .map(|(d, x)| (d, x.parse::<i16>().map(i32::from), input.part))\n\n {\n\n Some((\"forward\", Ok(amount), _)) => {\n\n horizontal_position += amount;\n\n depth += aim * amount;\n\n }\n\n Some((\"down\", Ok(amount), Part::One)) => {\n\n depth += amount;\n\n }\n\n Some((\"down\", Ok(amount), Part::Two)) => {\n\n aim += amount;\n\n }\n", "file_path": "crates/core/src/year2021/day02.rs", "rank": 23, "score": 127895.8669378298 }, { "content": "// https://www.forrestthewoods.com/blog/solving-advent-of-code-in-under-a-second/\n\npub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let bots = Nanobot::parse(input.text)?;\n\n\n\n if input.is_part_one() {\n\n let strongest_bot = bots\n\n .iter()\n\n .max_by(|x, y| x.radius.cmp(&y.radius))\n\n .ok_or(\"No robot specified\")?;\n\n return Ok(bots\n\n .iter()\n\n .filter(|&bot| strongest_bot.is_bot_within_range(bot))\n\n .count() as i32);\n\n }\n\n\n\n let mut octree = Octree::new(&bots);\n\n let origin = Position::new(0, 0, 0);\n\n let mut best_leaf: Option<Rc<RefCell<OctreeNode>>> = None;\n\n\n\n while let Some(leaf) = octree.leaves.pop() {\n\n let mut inner_leaf = leaf.borrow_mut();\n", "file_path": "crates/core/src/year2018/day23.rs", "rank": 24, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let mut id_assigner = IdAssigner::new();\n\n\n\n let mut happiness_changes = Vec::new();\n\n for line in input.text.lines() {\n\n // \"Alice would lose 79 happiness units by sitting next to Carol.\"\n\n let words = line.split(' ').collect::<Vec<_>>();\n\n if words.len() != 11 {\n\n return Err(\"Invalid line not consisting of 11 words\".to_string());\n\n }\n\n\n\n let person_name = words[0];\n\n let happiness_change = words[3]\n\n .parse::<i32>()\n\n .map_err(|_| \"Invalid happiness change\")?\n\n * if words[2] == \"gain\" { 1 } else { -1 };\n\n let other_name = &words[10]\n\n .strip_suffix('.')\n\n .ok_or_else(|| \"Line not ending with a period\".to_string())?;\n\n\n", "file_path": "crates/core/src/year2015/day13.rs", "rank": 25, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let error_mapper = |_| \"Invalid number\";\n\n\n\n let mut ingredients = Vec::new();\n\n for line in input.text.lines() {\n\n let words = line.split(' ').collect::<Vec<_>>();\n\n if words.len() != 11 || words.iter().any(|s| s.is_empty()) {\n\n return Err(\"Invalid line not consisting of 11 words\".to_string());\n\n }\n\n\n\n let capacity = words[2][0..words[2].len() - 1]\n\n .parse::<i32>()\n\n .map_err(error_mapper)?;\n\n let durability = words[4][0..words[4].len() - 1]\n\n .parse::<i32>()\n\n .map_err(error_mapper)?;\n\n let flavour = words[6][0..words[6].len() - 1]\n\n .parse::<i32>()\n\n .map_err(error_mapper)?;\n\n let texture = words[8][0..words[8].len() - 1]\n", "file_path": "crates/core/src/year2015/day15.rs", "rank": 26, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n const RACE_LENGTH_SECONDS: i32 = 2503;\n\n\n\n let mut reindeers = Vec::new();\n\n for line in input.text.lines() {\n\n // \"Comet can fly 14 km/s for 10 seconds, but then must rest for 127 seconds.\"\n\n let words = line.split(' ').collect::<Vec<_>>();\n\n if words.len() != 15 {\n\n return Err(\"Invalid reindeer line - not 14 words\".to_string());\n\n }\n\n let speed = words[3]\n\n .parse::<i32>()\n\n .map_err(|_| \"Invalid deer speed\".to_string())?;\n\n let speed_duration = words[6]\n\n .parse::<i32>()\n\n .map_err(|_| \"Invalid deer duration\".to_string())?;\n\n let rest_duration = words[13]\n\n .parse::<i32>()\n\n .map_err(|_| \"Invalid rest duration\".to_string())?;\n\n reindeers.push(Reindeer {\n", "file_path": "crates/core/src/year2015/day14.rs", "rank": 27, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let mut grid = Grid::parse(input.text)?;\n\n if input.is_part_one() {\n\n let mut sum = 0;\n\n for y in 0..=grid.target_y {\n\n for x in 0..=grid.target_x {\n\n sum += grid.risk_level(x, y) as i32;\n\n }\n\n }\n\n Ok(sum)\n\n } else {\n\n let mut to_visit = BinaryHeap::new();\n\n let mut visited = HashSet::new();\n\n\n\n // (-(cost+heuristic), -cost, x, y, equipment)\n\n to_visit.push((0, 0, 0_i16, 0_i16, Equipment::Torch));\n\n\n\n let heuristic = |x: Coordinate, y: Coordinate, equipment, g: &Grid| -> i32 {\n\n ((x - g.target_x).abs()\n\n + (y - g.target_y).abs()\n", "file_path": "crates/core/src/year2018/day22.rs", "rank": 28, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let mut registers: HashMap<&str, i32> = HashMap::new();\n\n\n\n let mut highest_value = 0;\n\n\n\n for line in input.text.lines() {\n\n let parts: Vec<&str> = line.split(' ').collect();\n\n if parts.len() != 7 {\n\n return Err(\"Invalid input - every line should have 7 words\".to_string());\n\n }\n\n\n\n let condition_register = parts[4];\n\n let &condition_value = registers.get(condition_register).unwrap_or(&0);\n\n let comparison = parts[5];\n\n let compared_with = parts[6]\n\n .parse::<i32>()\n\n .map_err(|_| \"Invalid TODO\".to_string())?;\n\n\n\n if match comparison {\n\n \">\" => condition_value > compared_with,\n", "file_path": "crates/core/src/year2017/day08.rs", "rank": 29, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n const MAX_ROUNDS: u16 = 500;\n\n\n\n let mut attack_strength = input.part_values(3, 4);\n\n\n\n loop {\n\n let mut board = Board::parse(input.text, attack_strength)?;\n\n\n\n loop {\n\n if board.round > MAX_ROUNDS {\n\n return Err(format!(\"No solution found in {} rounds\", MAX_ROUNDS));\n\n }\n\n\n\n board.perform_round();\n\n\n\n if input.is_part_two() && board.elf_died {\n\n break;\n\n } else if let Some(outcome) = board.calculate_outcome() {\n\n return Ok(outcome);\n\n }\n\n }\n\n\n\n attack_strength += 1;\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2018/day15.rs", "rank": 30, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let mut current_idx = 0_usize;\n\n let json_value = parse(input.text.as_bytes(), &mut current_idx)?;\n\n let sum = sum_json_value(&json_value, input.is_part_two());\n\n Ok(sum)\n\n}\n\n\n", "file_path": "crates/core/src/year2015/day12.rs", "rank": 31, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let points = parse_input(input.text)?;\n\n\n\n let (left, top, right, bottom) = points.iter().fold(\n\n (std::i32::MAX, std::i32::MAX, std::i32::MIN, std::i32::MIN),\n\n |(left, top, right, bottom), point| {\n\n (\n\n cmp::min(left, point.x),\n\n cmp::min(top, point.y),\n\n cmp::max(right, point.x),\n\n cmp::max(bottom, point.y),\n\n )\n\n },\n\n );\n\n\n\n if input.is_part_one() {\n\n let mut id_to_count = HashMap::new();\n\n let mut point_ids_with_infinite_area = HashSet::new();\n\n\n\n for y in top..=bottom {\n", "file_path": "crates/core/src/year2018/day06.rs", "rank": 32, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let initial_groups = ArmyGroup::parse(input.text)?;\n\n\n\n if input.is_part_one() {\n\n let groups = execute_battle(initial_groups);\n\n let result = groups.iter().fold(0, |acc, g| acc + g.units);\n\n Ok(result)\n\n } else {\n\n let mut boost = 1;\n\n loop {\n\n let mut groups = initial_groups.clone();\n\n for g in groups.iter_mut() {\n\n if g.immune_system {\n\n g.attack_damage += boost;\n\n }\n\n }\n\n\n\n let groups = execute_battle(groups);\n\n\n\n if groups.iter().all(|g| g.immune_system) {\n\n let result = groups.iter().fold(0, |acc, g| acc + g.units);\n\n return Ok(result);\n\n }\n\n\n\n boost += 1;\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2018/day24.rs", "rank": 33, "score": 127895.8669378298 }, { "content": "pub fn solve(input: &mut Input) -> Result<i32, String> {\n\n let trench = Trench::parse(input.text).ok_or_else(|| \"Unable to parse trench\".to_string())?;\n\n\n\n let mut max_y = 0;\n\n let mut count = 0;\n\n let max_x_to_try = *trench.x_range.end();\n\n let min_y_to_try = *trench.y_range.start();\n\n for initial_y_velocity in min_y_to_try..=1000 {\n\n for initial_x_velocity in 1..=max_x_to_try {\n\n if let Some(value) =\n\n probes_ends_in_trench(initial_x_velocity, initial_y_velocity, &trench)\n\n {\n\n max_y = value;\n\n count += 1;\n\n }\n\n }\n\n }\n\n Ok(input.part_values(max_y, count))\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day17.rs", "rank": 34, "score": 127895.8669378298 }, { "content": "/// Using double-width coordinates - see https://www.redblobgames.com/grids/hexagons/\n\npub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut black_tiles = HashSet::new();\n\n\n\n for line_str in input.text.lines() {\n\n let mut location = (0_i32, 0_i32);\n\n let mut string_position = 0;\n\n let line = line_str.as_bytes();\n\n while string_position < line.len() {\n\n let first_char = line[string_position];\n\n let diff = match first_char {\n\n b'e' => (2, 0),\n\n b'w' => (-2, 0),\n\n b's' | b'n' => {\n\n string_position += 1;\n\n match (first_char, line.get(string_position)) {\n\n (b'n', Some(b'e')) => (1, 1),\n\n (b'n', Some(b'w')) => (-1, 1),\n\n (b's', Some(b'e')) => (1, -1),\n\n (b's', Some(b'w')) => (-1, -1),\n\n _ => {\n", "file_path": "crates/core/src/year2020/day24.rs", "rank": 35, "score": 127887.67194774636 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let problem_input = ProblemInput::parse(input.text)?;\n\n\n\n let all_opcodes = [\n\n Opcode::Addr,\n\n Opcode::Addi,\n\n Opcode::Mulr,\n\n Opcode::Muli,\n\n Opcode::Banr,\n\n Opcode::Bani,\n\n Opcode::Borr,\n\n Opcode::Bori,\n\n Opcode::Setr,\n\n Opcode::Seti,\n\n Opcode::Gtir,\n\n Opcode::Gtri,\n\n Opcode::Gtrr,\n\n Opcode::Eqir,\n\n Opcode::Eqri,\n\n Opcode::Eqrr,\n", "file_path": "crates/core/src/year2018/day16.rs", "rank": 36, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut program = parse(input.text)?;\n\n if input.is_part_one() {\n\n // The last three instructions are (as seen with program.pretty_print()):\n\n //\n\n // 28: r4 = (r3 == r0) ? 1 : 0\n\n // 29: goto r4 + 30\n\n // 30: goto 6\n\n //\n\n // which exits on instruction 29 only if r4 is non-zero, which means r0 must equal r3.\n\n //\n\n // Since this is the only place in the program where register 0 is referenced, we can\n\n // set register 0 to the value it's first compared with here to exit as soon as possible.\n\n\n\n #[cfg(feature = \"debug-output\")]\n\n program.pretty_print(\"Initial program\");\n\n let mut loop_count = 0;\n\n while program.instruction_pointer()? != 29 {\n\n program.execute_one_instruction()?;\n\n\n", "file_path": "crates/core/src/year2018/day21.rs", "rank": 37, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let on_error = || \"Invalid input\".to_string();\n\n\n\n let mut parts = input.text.splitn(2, \"\\n\\n\");\n\n let mut player_1_cards =\n\n parse_player_cards(parts.next().ok_or_else(on_error)?).ok_or_else(on_error)?;\n\n let mut player_2_cards =\n\n parse_player_cards(parts.next().ok_or_else(on_error)?).ok_or_else(on_error)?;\n\n\n\n if player_1_cards.len() != player_2_cards.len() {\n\n return Err(on_error());\n\n }\n\n\n\n let winner = play(\n\n &mut player_1_cards,\n\n &mut player_2_cards,\n\n input.is_part_two(),\n\n );\n\n\n\n let winner_cards = match winner {\n", "file_path": "crates/core/src/year2020/day22.rs", "rank": 38, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut program = Program::parse(input.text)?;\n\n\n\n if input.is_part_one() {\n\n return program.execute_until_halt(10_000_000);\n\n }\n\n\n\n program.registers.values[0] = 1;\n\n\n\n #[cfg(feature = \"debug-output\")]\n\n program.pretty_print(\"Initial\");\n\n program.optimize();\n\n #[cfg(feature = \"debug-output\")]\n\n program.pretty_print(\"Optimized\");\n\n\n\n if program.instructions.len() < 3 {\n\n return Err(\"Too few instructions\".to_string());\n\n }\n\n let register = program.instructions[2].c as usize;\n\n if register > 5 {\n", "file_path": "crates/core/src/year2018/day19.rs", "rank": 39, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let on_error = || \"Invalid input\".to_string();\n\n let map_error = |_| on_error();\n\n\n\n let (rules_str, messages_str) = input.text.split_once(\"\\n\\n\").ok_or_else(on_error)?;\n\n\n\n let mut rules = Rules::parse(rules_str).map_err(map_error)?;\n\n\n\n if input.is_part_two() {\n\n rules.add_line(\"8: 42 | 42 8\").map_err(map_error)?;\n\n rules.add_line(\"11: 42 31 | 42 11 31\").map_err(map_error)?;\n\n }\n\n\n\n Ok(messages_str\n\n .lines()\n\n .filter(|line| rules.matches(line))\n\n .count() as u64)\n\n}\n\n\n", "file_path": "crates/core/src/year2020/day19.rs", "rank": 40, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let instructions = input\n\n .text\n\n .lines()\n\n .map(Instruction::parse)\n\n .collect::<Option<Vec<_>>>()\n\n .ok_or_else(|| \"Invalid input\".to_string())?;\n\n\n\n let input_blocks = extract_input_blocks(&instructions)?;\n\n\n\n let mut model_number = [0; NUM_DIGITS_IN_MODEL_NUMBER];\n\n let mut stack = Vec::new();\n\n\n\n for (block_idx, block) in input_blocks.iter().enumerate() {\n\n // See steps in extract_input_blocks():\n\n // x = 0 if (z % 26 + $X_NUMBER) == w else 1\n\n // z = (z / $Z_DIVISION) * (25 * x + 1) + (w + $Y_NUMBER) * x\n\n // Written out when x is 1:\n\n // z = (z / $Z_DIVISION) * 26 + w + $Y_NUMBER\n\n // Written out when x is 0:\n", "file_path": "crates/core/src/year2021/day24.rs", "rank": 41, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut lines = input.text.lines();\n\n\n\n let template = lines.next().ok_or(\"No first line\")?.as_bytes();\n\n if template.len() < 2 {\n\n return Err(\"No pairs in the template\".to_string());\n\n }\n\n\n\n lines.next();\n\n\n\n let productions = lines\n\n .map(|line| {\n\n let bytes = line.as_bytes();\n\n if bytes.len() != 7 {\n\n return Err(\"Production not in format 'AB -> C'\".to_string());\n\n }\n\n Ok(((bytes[0], bytes[1]), bytes[6]))\n\n })\n\n .collect::<Result<Vec<_>, _>>()?;\n\n\n", "file_path": "crates/core/src/year2021/day14.rs", "rank": 42, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n const PREAMBLE_LENGTH: usize = 25;\n\n\n\n let numbers = parse_lines::<u64>(input.text)?;\n\n\n\n if numbers.len() <= PREAMBLE_LENGTH {\n\n return Err(format!(\"Too few input numbers ({})\", numbers.len()));\n\n }\n\n\n\n let invalid_number = numbers\n\n .iter()\n\n .enumerate()\n\n .skip(PREAMBLE_LENGTH)\n\n .find_map(|(idx, &number)| {\n\n for j in (idx - PREAMBLE_LENGTH)..idx {\n\n for k in j + 1..idx {\n\n if numbers[j] + numbers[k] == number {\n\n return None;\n\n }\n\n }\n", "file_path": "crates/core/src/year2020/day09.rs", "rank": 43, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let text = input.text.as_bytes();\n\n uncompressed_size(text, input.is_part_two())\n\n}\n\n\n", "file_path": "crates/core/src/year2016/day09.rs", "rank": 44, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut grid = Grid::parse(input.text)?;\n\n let w_range = input.part_values(0, 1);\n\n\n\n for _ in 0..6 {\n\n grid.cycle(w_range);\n\n }\n\n\n\n Ok(grid.occupied_coordinates.len() as u64)\n\n}\n\n\n", "file_path": "crates/core/src/year2020/day17.rs", "rank": 45, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n (if input.is_part_one() {\n\n State::<2>::parse(input.text).least_total_energy_to_organize()\n\n } else {\n\n State::<4>::parse(input.text).least_total_energy_to_organize()\n\n })\n\n .ok_or_else(|| \"No solution found\".to_string())\n\n}\n\n\n\nconst HALLWAY_SPACES: usize = 7;\n\n\n\n/// [ H0 H1 H2 H3 H4 H5 H6 ]\n\n/// R0 R1 R2 R3\n\n/// R0 R1 R2 R3\n", "file_path": "crates/core/src/year2021/day23.rs", "rank": 46, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let map = CaveMap::parse(input.text)?;\n\n\n\n let visited_once = 1 << map.start_cave_identifier;\n\n let visited_small_twice = false;\n\n Ok(map.search(\n\n visited_once,\n\n visited_small_twice,\n\n map.start_cave_identifier,\n\n map.end_cave_identifier,\n\n input.is_part_two(),\n\n ))\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day12.rs", "rank": 47, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n const MAX_FIELD_VALUE: u32 = 1024;\n\n\n\n let mut parts = input.text.splitn(3, \"\\n\\n\");\n\n let on_error = || \"Invalid input\".to_string();\n\n let map_error = |_| on_error();\n\n\n\n let ticket_fields_str = parts.next().ok_or_else(on_error)?;\n\n let your_ticket_str = parts.next().ok_or_else(on_error)?;\n\n let nearby_tickets_str = parts.next().ok_or_else(on_error)?;\n\n\n\n let mut your_ticket_values = Vec::new();\n\n for part in your_ticket_str\n\n .lines()\n\n .nth(1)\n\n .ok_or_else(on_error)?\n\n .split(',')\n\n {\n\n your_ticket_values.push(part.parse::<u32>().map_err(map_error)?);\n\n }\n", "file_path": "crates/core/src/year2020/day16.rs", "rank": 48, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut game = Game::parse(input.text)?;\n\n if input.is_part_one() {\n\n let mut die_roll_count = 0_u64;\n\n let mut p1_score_saved = 0;\n\n let mut p2_score_saved = 0;\n\n loop {\n\n let next_dice_sum = 6 + die_roll_count * 3;\n\n die_roll_count += 3;\n\n game.on_dice_sum(next_dice_sum);\n\n p1_score_saved += u32::from(game.player_1_score);\n\n game.player_1_score = 0;\n\n if p1_score_saved >= 1000 {\n\n return Ok(u64::from(p2_score_saved) * die_roll_count);\n\n }\n\n game = game.switch_players();\n\n std::mem::swap(&mut p1_score_saved, &mut p2_score_saved);\n\n }\n\n } else {\n\n let mut outcome_cache = vec![GameOutcome::default(); Game::MAX_POSSIBLE_STATES];\n\n let wins = play_game_part_2(game, &mut outcome_cache);\n\n Ok(std::cmp::max(wins.player_1_wins, wins.player_2_wins))\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day21.rs", "rank": 49, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut cuboids = input\n\n .text\n\n .lines()\n\n .map(Cuboid::parse)\n\n .collect::<Option<Vec<_>>>()\n\n .ok_or(\"Invalid input\")?;\n\n\n\n if input.is_part_one() {\n\n cuboids.retain(|c| {\n\n !(c.max_corner.x < -50\n\n || c.min_corner.x > 50\n\n || c.max_corner.y < -50\n\n || c.min_corner.y > 50\n\n || c.max_corner.z < -50\n\n || c.min_corner.z > 50)\n\n });\n\n for c in cuboids.iter_mut() {\n\n c.min_corner.x = c.min_corner.x.clamp(-50, 50);\n\n c.max_corner.x = c.max_corner.x.clamp(-50, 50);\n\n c.min_corner.y = c.min_corner.y.clamp(-50, 50);\n\n c.max_corner.y = c.max_corner.y.clamp(-50, 50);\n\n c.min_corner.z = c.min_corner.z.clamp(-50, 50);\n\n c.max_corner.z = c.max_corner.z.clamp(-50, 50);\n\n }\n\n }\n\n\n\n Ok(Cuboid::combine_and_count_enclosed_cubes(&cuboids))\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day22.rs", "rank": 50, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut syntax_error_score = 0;\n\n let mut autocomplete_scores = Vec::new();\n\n\n\n 'next_line: for line in input.text.lines() {\n\n let mut expecting = Vec::new();\n\n for c in line.bytes() {\n\n let closing = match c {\n\n b'(' => b')',\n\n b'[' => b']',\n\n b'{' => b'}',\n\n b'<' => b'>',\n\n b')' | b']' | b'}' | b'>' => {\n\n if let Some(expected_closer) = expecting.pop() {\n\n if expected_closer != c {\n\n syntax_error_score += score_syntax_error(c);\n\n continue 'next_line;\n\n }\n\n continue;\n\n } else {\n", "file_path": "crates/core/src/year2021/day10.rs", "rank": 51, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n if input.is_part_one() {\n\n solve_with_bit_mask::<BitMaskV1>(input.text, 1000)\n\n } else {\n\n solve_with_bit_mask::<BitMaskV2>(input.text, 100_000)\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2020/day14.rs", "rank": 52, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n // Indexed by days left mapping to number of fishes with that many days left:\n\n let mut count_per_day_left = VecDeque::from([0; 9]);\n\n\n\n for day_left_str in input.text.split(',') {\n\n match day_left_str.parse::<u8>() {\n\n Ok(day_left) if day_left <= 8 => {\n\n count_per_day_left[day_left as usize] += 1;\n\n }\n\n _ => {\n\n return Err(\n\n \"Input is not comma-separated list of integers in the range [0,8].\".to_string(),\n\n );\n\n }\n\n }\n\n }\n\n\n\n for _day in 0..input.part_values(80, 256) {\n\n count_per_day_left.rotate_left(1);\n\n // Those with 0 days left have given birth to new ones with 8 days\n\n // left - but we need to also add them back (reset to 6 days left):\n\n count_per_day_left[6] += count_per_day_left[8];\n\n }\n\n\n\n Ok(count_per_day_left.iter().sum())\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day06.rs", "rank": 53, "score": 127883.49469056357 }, { "content": "/// Key properties from the problem description:\n\n///\n\n/// - Each tile is a 8x8 grid.\n\n/// - Each tile edge is 10 bits (so max 1024 distinct values).\n\n/// - The composed image is square.\n\n/// - The outermost edges tile edges won't line up with any other tiles.\n\npub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let tiles = Tile::parse(input.text)?;\n\n\n\n if input.is_part_one() {\n\n return Ok(tiles\n\n .iter()\n\n .filter_map(|tile| {\n\n if tile.is_corner() {\n\n Some(u64::from(tile.id))\n\n } else {\n\n None\n\n }\n\n })\n\n .product());\n\n }\n\n\n\n let composed_image_tile_width = (tiles.len() as f64).sqrt() as u8;\n\n let composed_image_pixel_width = composed_image_tile_width * 8;\n\n\n\n let a_corner = *tiles\n", "file_path": "crates/core/src/year2020/day20.rs", "rank": 54, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n if input.is_part_two() {\n\n return Ok(0);\n\n }\n\n\n\n let on_error = || \"Invalid input\".to_string();\n\n\n\n let mut lines = input.text.lines();\n\n\n\n let card_public_key = lines\n\n .next()\n\n .ok_or_else(on_error)?\n\n .parse::<u32>()\n\n .map_err(|_| on_error())?;\n\n\n\n let door_public_key = lines\n\n .next()\n\n .ok_or_else(on_error)?\n\n .parse::<u32>()\n\n .map_err(|_| on_error())?;\n", "file_path": "crates/core/src/year2020/day25.rs", "rank": 55, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let mut parser = Parser::new(input.is_part_two());\n\n input\n\n .text\n\n .lines()\n\n .map(|line| {\n\n parser.reset();\n\n for char in line.bytes() {\n\n parser.consume(char)?;\n\n }\n\n parser.finish()\n\n })\n\n .sum::<Result<CalculatorValue, String>>()\n\n}\n\n\n", "file_path": "crates/core/src/year2020/day18.rs", "rank": 56, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let hex_bytes = input.text.as_bytes();\n\n let mut transmission = Transmission::new(hex_bytes);\n\n if input.is_part_one() {\n\n let mut version_sum = 0_u64;\n\n while let Some(packet) = Packet::parse(&mut transmission) {\n\n version_sum += u64::from(packet.version);\n\n }\n\n Ok(version_sum)\n\n } else {\n\n Packet::parse_and_eval(&mut transmission)\n\n .ok_or_else(|| \"Unable to parse outermost package - check transmission\".to_string())\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day16.rs", "rank": 57, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n const MAX_STEPS_PART_TWO: usize = 100_000;\n\n\n\n let mut board = Board::parse(input.text)?;\n\n\n\n for step in 1..=input.part_values(100, MAX_STEPS_PART_TWO) {\n\n let before = board.num_flashes;\n\n\n\n board.advance();\n\n\n\n if input.is_part_two() && (board.num_flashes - before) == 100 {\n\n return Ok(step as u64);\n\n }\n\n }\n\n\n\n if input.is_part_two() {\n\n return Err(format!(\n\n \"No simultaneous flash within {} steps\",\n\n MAX_STEPS_PART_TWO\n\n ));\n\n }\n\n\n\n Ok(board.num_flashes)\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day11.rs", "rank": 58, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n let words = input.text.split(' ').collect::<Vec<_>>();\n\n if words.len() != 19 || words[16].is_empty() || words[18].is_empty() {\n\n return Err(\"Invalid input - not expected structure\".to_string());\n\n }\n\n\n\n let wanted_row = words[16][0..(words[16].len() - 1)]\n\n .parse::<u32>()\n\n .map_err(|_| \"Invalid input\")?;\n\n let wanted_col = words[18][0..(words[18].len() - 1)]\n\n .parse::<u32>()\n\n .map_err(|_| \"Invalid input\")?;\n\n\n\n let mut current_code = 20_151_125;\n\n let mut current_row = 1;\n\n let mut current_col = 1;\n\n while (current_row, current_col) != (wanted_row, wanted_col) {\n\n if current_row == 1 {\n\n current_row = 1 + current_col;\n\n current_col = 1;\n\n } else {\n\n current_col += 1;\n\n current_row -= 1;\n\n }\n\n current_code = (current_code * 252_533) % 33_554_393;\n\n }\n\n Ok(current_code)\n\n}\n\n\n", "file_path": "crates/core/src/year2015/day25.rs", "rank": 59, "score": 127883.49469056357 }, { "content": "pub fn solve(input: &mut Input) -> Result<u64, String> {\n\n if input.is_part_one() {\n\n part1_nth(input.text, 1000)\n\n } else {\n\n let mut moons = Moons::parse(input.text)?;\n\n let initial_moons = moons.clone();\n\n let mut cycles: [Option<u64>; 3] = [None; 3];\n\n\n\n let mut step = 0;\n\n while cycles.iter().any(Option::is_none) {\n\n moons.step();\n\n step += 1;\n\n\n\n for (i, cycle) in cycles.iter_mut().enumerate() {\n\n if cycle.is_none() {\n\n let mut same = true;\n\n for moon in 0..4 {\n\n if initial_moons.positions[moon][i] != moons.positions[moon][i]\n\n || initial_moons.velocities[moon][i] != moons.velocities[moon][i]\n\n {\n", "file_path": "crates/core/src/year2019/day12.rs", "rank": 60, "score": 127883.49469056357 }, { "content": "fn uncompressed_size(text: &[u8], recursive: bool) -> Result<u64, String> {\n\n let error_mapper_uf8 = |_| \"Invalid input\";\n\n let error_mapper_parse = |_| \"Invalid input\";\n\n let mut start_parenthesis_idx = None;\n\n let mut uncompressed_len = 0_u64;\n\n\n\n let mut i = 0;\n\n while i < text.len() {\n\n let c = text[i];\n\n if c == b'(' {\n\n start_parenthesis_idx = Some(i);\n\n } else if c == b')' {\n\n if let Some(from) = start_parenthesis_idx {\n\n let inside_parenthesis = &text[from + 1..i];\n\n let parts = inside_parenthesis\n\n .split(|&c| c == b'x')\n\n .collect::<Vec<&[u8]>>();\n\n if parts.len() != 2 {\n\n return Err(\"Invalid input\".into());\n\n }\n", "file_path": "crates/core/src/year2016/day09.rs", "rank": 61, "score": 126232.5829096425 }, { "content": "fn all_pairs<T>(elements: &[T]) -> impl Iterator<Item = (&T, &T)> {\n\n (0..elements.len())\n\n .flat_map(move |i| (i + 1..elements.len()).map(move |j| (&elements[i], &elements[j])))\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day19.rs", "rank": 62, "score": 123851.44237032514 }, { "content": "/// Return (max_seen, (x, y)) of station.\n\npub fn determine_station(points: &[(usize, usize)]) -> Result<(usize, (usize, usize)), String> {\n\n points\n\n .iter()\n\n .map(|&this_point| {\n\n let seen_count = points\n\n .iter()\n\n .filter(|&&point| point != this_point)\n\n .fold(HashSet::new(), |mut seen, other_point| {\n\n let mut distance_x = other_point.0 as i64 - this_point.0 as i64;\n\n let mut distance_y = other_point.1 as i64 - this_point.1 as i64;\n\n let divisor = gcd(distance_x.abs(), distance_y.abs());\n\n distance_x /= divisor;\n\n distance_y /= divisor;\n\n seen.insert((distance_x, distance_y));\n\n seen\n\n })\n\n .len();\n\n (seen_count, this_point)\n\n })\n\n .max_by_key(|&(seen_count, _)| seen_count)\n\n .ok_or_else(|| \"No points in input\".to_string())\n\n}\n\n\n", "file_path": "crates/core/src/year2019/day10.rs", "rank": 63, "score": 118183.97056545177 }, { "content": "pub fn parse_points(input_string: &str) -> Result<Vec<(usize, usize)>, String> {\n\n for c in input_string.chars() {\n\n if !(c == '#' || c == '.' || c == '\\n') {\n\n return Err(format!(\"Invalid character: {}\", c));\n\n }\n\n }\n\n\n\n let num_asteroides = input_string.chars().filter(|&c| c == '#').count();\n\n if num_asteroides < 201 {\n\n return Err(format!(\n\n \"Too few asteroids - expected at least 201, got {}\",\n\n num_asteroides\n\n ));\n\n }\n\n\n\n Ok(input_string\n\n .lines()\n\n .enumerate()\n\n .flat_map(|(row, line)| {\n\n line.chars()\n\n .enumerate()\n\n .filter_map(move |(col, character)| match character {\n\n '#' => Some((col, row)),\n\n _ => None,\n\n })\n\n })\n\n .collect())\n\n}\n\n\n", "file_path": "crates/core/src/year2019/day10.rs", "rank": 64, "score": 105978.6691382038 }, { "content": "pub fn render(map: &Map, slopes: &[(usize, usize)], painter: &mut PainterRef) {\n\n let mut num_horizontal_repeats = 1;\n\n for slope in slopes {\n\n let num_iterations = map.rows / slope.1;\n\n num_horizontal_repeats = std::cmp::max(\n\n num_horizontal_repeats,\n\n 1 + (slope.0 * num_iterations) / map.cols,\n\n );\n\n }\n\n\n\n let min_x = 0;\n\n let max_x = map.cols * num_horizontal_repeats;\n\n let min_y = 0;\n\n let max_y = map.rows;\n\n\n\n let grid_width = (max_x - min_x + 1) as i32;\n\n let grid_height = (max_y - min_y + 1) as i32;\n\n\n\n painter.set_aspect_ratio(16, 9);\n\n painter.end_frame();\n", "file_path": "crates/core/src/year2020/day03_renderer.rs", "rank": 65, "score": 103545.81859961656 }, { "content": "fn is_valid(field_idx: usize, value: &str) -> bool {\n\n fn in_range(string: &str, start: u32, end: u32) -> bool {\n\n (start..=end).contains(&string.parse::<u32>().unwrap_or_default())\n\n }\n\n\n\n match field_idx {\n\n 0 => value.len() == 4 && in_range(value, 1920, 2002),\n\n 1 => value.len() == 4 && in_range(value, 2010, 2020),\n\n 2 => value.len() == 4 && in_range(value, 2020, 2030),\n\n 3 => {\n\n (value.ends_with(\"cm\") && in_range(&value[0..(value.len() - 2)], 150, 193))\n\n || (value.ends_with(\"in\") && in_range(&value[0..(value.len() - 2)], 59, 76))\n\n }\n\n 4 => {\n\n value.starts_with('#')\n\n && value.len() == 7\n\n && value[1..]\n\n .bytes()\n\n .all(|c| matches!(c, b'0'..=b'9' | b'a'..=b'f'))\n\n }\n\n 5 => matches!(value, \"amb\" | \"blu\" | \"brn\" | \"gry\" | \"grn\" | \"hzl\" | \"oth\"),\n\n 6 => value.len() == 9 && value.parse::<u32>().is_ok(),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2020/day04.rs", "rank": 66, "score": 103408.39664007505 }, { "content": "fn parse(input_string: &str) -> Result<usize, String> {\n\n input_string\n\n .parse::<usize>()\n\n .map_err(|e| format!(\"Invalid input - {}\", e))\n\n .and_then(|value| {\n\n if value == 0 {\n\n Err(\"Invalid input 0\".to_string())\n\n } else {\n\n Ok(value)\n\n }\n\n })\n\n}\n\n\n", "file_path": "crates/core/src/year2017/day03.rs", "rank": 67, "score": 103408.39664007505 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n fn is_nice_part_1(string: &&str) -> bool {\n\n if string.contains(\"ab\")\n\n || string.contains(\"cd\")\n\n || string.contains(\"pq\")\n\n || string.contains(\"xy\")\n\n {\n\n return false;\n\n }\n\n\n\n string.chars().filter(|&c| \"aeiou\".contains(c)).count() >= 3\n\n && string\n\n .as_bytes()\n\n .windows(2)\n\n .any(|window| window[0] == window[1])\n\n }\n\n\n\n fn is_nice_part_2(string: &&str) -> bool {\n\n fn find_subsequence(haystack: &[u8], needle: &[u8]) -> bool {\n\n haystack\n", "file_path": "crates/core/src/year2015/day05.rs", "rank": 68, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n if input.is_part_one() {\n\n return steps_to_gather_all_keys(\n\n input.text,\n\n #[cfg(feature = \"visualization\")]\n\n &mut input.painter,\n\n #[cfg(feature = \"visualization\")]\n\n 0,\n\n #[cfg(feature = \"visualization\")]\n\n 0,\n\n #[cfg(feature = \"visualization\")]\n\n 0,\n\n #[cfg(feature = \"visualization\")]\n\n 0,\n\n );\n\n }\n\n\n\n let mut map_top_left = String::new();\n\n let mut map_top_right = String::new();\n\n let mut map_bottom_left = String::new();\n", "file_path": "crates/core/src/year2019/day18.rs", "rank": 69, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let input_polymer = input.text.as_bytes();\n\n let mut new_polymer = Vec::<PolymerUnit>::with_capacity(input_polymer.len());\n\n\n\n let candidates_for_removal = input.part_values(0..1, b'a'..b'z');\n\n\n\n candidates_for_removal\n\n .map(|to_remove_lower| {\n\n new_polymer.clear();\n\n\n\n for &unit in input_polymer\n\n .iter()\n\n .filter(|unit| !unit.eq_ignore_ascii_case(&to_remove_lower))\n\n {\n\n let unit_reacts_with_last = new_polymer\n\n .last()\n\n .map(|&last| destroys_each_other(unit, last))\n\n .unwrap_or(false);\n\n\n\n if unit_reacts_with_last {\n", "file_path": "crates/core/src/year2018/day05.rs", "rank": 70, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let data = input\n\n .text\n\n .split_whitespace()\n\n .map(|word| {\n\n word.parse::<InputNumber>()\n\n .map_err(|error| format!(\"Invalid input: {}\", error))\n\n })\n\n .collect::<Result<Vec<InputNumber>, _>>()?;\n\n Ok(evaluate_node(&data, 0, input.is_part_one())?.1)\n\n}\n\n\n", "file_path": "crates/core/src/year2018/day08.rs", "rank": 71, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n const MAX_ITERATIONS: u32 = 10_000;\n\n\n\n let leave_when_seeing = input.part_values(4, 5);\n\n let part_one = input.is_part_one();\n\n\n\n let rows = input.text.lines().count() as i32;\n\n let cols = input.text.lines().next().ok_or(\"No lines\")?.len() as i32;\n\n if input.text.lines().any(|line| line.len() != cols as usize) {\n\n return Err(\"Not all lines have equal length\".to_string());\n\n }\n\n\n\n if rows * cols > i32::from(u16::MAX) {\n\n return Err(format!(\n\n \"Too big input ({}x{}) - max supported seats is {}\",\n\n cols,\n\n rows,\n\n u16::MAX\n\n ));\n\n }\n", "file_path": "crates/core/src/year2020/day11.rs", "rank": 72, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let mut grid = Grid::from(input.text)?;\n\n #[cfg(feature = \"debug-output\")]\n\n grid.print(\"Initial\");\n\n\n\n grid.pour_water();\n\n #[cfg(feature = \"debug-output\")]\n\n grid.print(\"After pouring\");\n\n\n\n if input.is_part_one() {\n\n Ok(grid.count_water())\n\n } else {\n\n grid.dry_up();\n\n #[cfg(feature = \"debug-output\")]\n\n grid.print(\"After drying up\");\n\n Ok(grid.count_drained_water())\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2018/day17.rs", "rank": 73, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let mut grid = Grid::parse(input.text)?;\n\n\n\n if input.is_part_one() {\n\n for _ in 0..10 {\n\n grid.advance_minute()?;\n\n }\n\n Ok(grid.resource_value())\n\n } else {\n\n let mut seen = HashMap::new();\n\n\n\n for i in 1..1_000_000_000 {\n\n grid.advance_minute()?;\n\n\n\n let mut hasher = DefaultHasher::new();\n\n grid.cells.hash(&mut hasher);\n\n let hash_value = hasher.finish();\n\n\n\n match seen.entry(hash_value) {\n\n Entry::Occupied(entry) => {\n", "file_path": "crates/core/src/year2018/day18.rs", "rank": 74, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let mut points: Vec<(i32, i32, i32, i32, usize)> = input\n\n .text\n\n .lines()\n\n .enumerate()\n\n .map(|(i, line)| {\n\n let line_number = i + 1;\n\n let parts: Vec<&str> = line.split(',').collect();\n\n if parts.len() != 4 {\n\n return Err(format!(\n\n \"Invalid input at line {} - not 4 comma-separated values\",\n\n line_number\n\n ));\n\n }\n\n let error = |e| format!(\"Invalid input at line {}: {}\", line_number, e);\n\n Ok((\n\n parts[0].parse::<i32>().map_err(error)?,\n\n parts[1].parse::<i32>().map_err(error)?,\n\n parts[2].parse::<i32>().map_err(error)?,\n\n parts[3].parse::<i32>().map_err(error)?,\n", "file_path": "crates/core/src/year2018/day25.rs", "rank": 75, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let mut tape = HashSet::new();\n\n let mut target_steps = 0;\n\n\n\n let mut states: Vec<State> = Vec::new();\n\n let on_error = || \"Invalid input\".to_string();\n\n\n\n for (count, text) in input.text.split(\"\\n\\n\").enumerate() {\n\n if count == 0 {\n\n target_steps = text\n\n .split(' ')\n\n .nth(8)\n\n .ok_or_else(on_error)?\n\n .parse::<u32>()\n\n .map_err(|_| on_error())?;\n\n } else {\n\n let words: Vec<&str> = text.split(' ').collect();\n\n\n\n if words.len() < 69 {\n\n return Err(on_error());\n", "file_path": "crates/core/src/year2017/day25.rs", "rank": 76, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let mut result = 0;\n\n for line in input.text.lines() {\n\n let num_chars = line.len();\n\n\n\n // Strip leading and trailing quotes.\n\n if !(line.starts_with('\"') && line.ends_with('\"') && line.len() >= 2) {\n\n return Err(\"Invalid input - not surrounded by quotes\".to_string());\n\n }\n\n let line = &line[1..line.len() - 1].as_bytes();\n\n\n\n let mut idx = 0;\n\n // In part 2, 6 is for starting and trailing quote:\n\n let mut encoded_size = input.part_values(0, 6);\n\n while idx < line.len() {\n\n encoded_size += 1;\n\n if line[idx] == b'\\\\' {\n\n if idx + 1 == line.len() {\n\n return Err(\"Invalid input\".to_string());\n\n }\n", "file_path": "crates/core/src/year2015/day08.rs", "rank": 77, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let map = Map::parse(input.text)?;\n\n let slopes = input.part_values(vec![(3, 1)], vec![(1, 1), (3, 1), (5, 1), (7, 1), (1, 2)]);\n\n\n\n #[cfg(feature = \"visualization\")]\n\n render(&map, &slopes, &mut input.painter);\n\n\n\n Ok(slopes.iter().fold(1, |acc, slope| {\n\n let initial_position = (0, 0);\n\n\n\n let trees_seen_now = std::iter::successors(Some(initial_position), |pos| {\n\n let new_pos = (pos.0 + slope.0, pos.1 + slope.1);\n\n if new_pos.1 < map.rows {\n\n Some(new_pos)\n\n } else {\n\n None\n\n }\n\n })\n\n .map(|(x, y)| map.tree_at(x, y))\n\n .filter(|tree| *tree)\n\n .count();\n\n\n\n acc * trees_seen_now\n\n }))\n\n}\n\n\n", "file_path": "crates/core/src/year2020/day03.rs", "rank": 78, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let anagrams_are_equal = input.is_part_two();\n\n\n\n Ok(input\n\n .text\n\n .lines()\n\n .filter(|passphrase| {\n\n let mut words: Vec<Vec<char>> = passphrase\n\n .split_ascii_whitespace()\n\n .map(|word| {\n\n let mut chars: Vec<char> = word.chars().collect();\n\n if anagrams_are_equal {\n\n chars.sort_unstable();\n\n }\n\n chars\n\n })\n\n .collect();\n\n words.sort();\n\n let initial_len = words.len();\n\n words.dedup();\n\n initial_len == words.len()\n\n })\n\n .count())\n\n}\n\n\n", "file_path": "crates/core/src/year2017/day04.rs", "rank": 79, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n const MAX_DELAY: usize = 10_000_000;\n\n\n\n let layers = input.text.lines().count();\n\n let mut scanner_ranges = vec![0; layers];\n\n\n\n for (line_index, line) in input.text.lines().enumerate() {\n\n let error_message = || {\n\n format!(\n\n \"Invalid input at line {}: Not '${{NUMBER}}: ${{NUMBER}}'\",\n\n line_index + 1\n\n )\n\n };\n\n\n\n let parts: Vec<&str> = line.split(\": \").collect();\n\n if parts.len() != 2 {\n\n return Err(error_message());\n\n }\n\n let depth = parts[0].parse::<usize>().map_err(|_| error_message())?;\n\n let range = parts[1].parse::<usize>().map_err(|_| error_message())?;\n", "file_path": "crates/core/src/year2017/day13.rs", "rank": 80, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let puzzle_input = parse(input.text)?;\n\n if input.is_part_one() {\n\n Square::iter()\n\n .nth(puzzle_input - 1)\n\n .map(|walker| (walker.x.abs() + walker.y.abs()) as usize)\n\n .ok_or_else(|| \"No solution found\".to_string())\n\n } else {\n\n let mut square_values = HashMap::new();\n\n square_values.insert((0, 0), 1);\n\n\n\n Square::iter()\n\n .map(|walker| {\n\n let new_square_value = (-1..=1)\n\n .flat_map(move |dx| (-1..=1).map(move |dy| (dx, dy)))\n\n .map(|(dx, dy)| (walker.x + dx, walker.y + dy))\n\n .filter_map(|neighbor_square| square_values.get(&neighbor_square))\n\n .sum();\n\n square_values.insert((walker.x, walker.y), new_square_value);\n\n new_square_value\n\n })\n\n .find(|&new_square_value| new_square_value > puzzle_input)\n\n .ok_or_else(|| \"No solution found\".to_string())\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2017/day03.rs", "rank": 81, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n Ok(parse_lines::<u32>(input.text)?\n\n .windows(input.part_values(2, 4))\n\n .filter(|data| data.last() > data.first())\n\n .count())\n\n}\n\n\n", "file_path": "crates/core/src/year2021/day01.rs", "rank": 82, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let num_programs = input.text.lines().count();\n\n let mut program_groups = DisjointSet::new(num_programs);\n\n\n\n for (line_index, line) in input.text.lines().enumerate() {\n\n let error_message = || {\n\n format!(\n\n \"Invalid input at line {}: Expected 'ID <-> ID[, ID]'\",\n\n line_index + 1\n\n )\n\n };\n\n let parts = line.split(\" <-> \").collect::<Vec<_>>();\n\n if parts.len() != 2 {\n\n return Err(error_message());\n\n }\n\n let first = parts[0].parse::<usize>().map_err(|_| error_message())?;\n\n for other_str in parts[1].split(\", \") {\n\n let other = other_str.parse::<usize>().map_err(|_| error_message())?;\n\n program_groups.join(first, other);\n\n }\n\n }\n\n\n\n Ok(if input.is_part_one() {\n\n program_groups.size(0)\n\n } else {\n\n program_groups.num_groups()\n\n })\n\n}\n\n\n", "file_path": "crates/core/src/year2017/day12.rs", "rank": 83, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n let grid = Grid::parse(input.text)?;\n\n let mut distances: HashMap<(usize, usize), usize> = HashMap::new();\n\n\n\n for from in 0..grid.locations.len() {\n\n 'toloop: for to in 0..grid.locations.len() {\n\n if to <= from {\n\n continue;\n\n }\n\n\n\n let starting_location = grid.locations[from];\n\n let target_location = grid.locations[to];\n\n if starting_location == (0, 0) || target_location == (0, 0) {\n\n return Err(\"Not all digits in grid\".into());\n\n }\n\n\n\n let mut visited = HashSet::new();\n\n let mut to_visit = BinaryHeap::new();\n\n to_visit.push(Reverse((0, 0, starting_location)));\n\n\n", "file_path": "crates/core/src/year2016/day24.rs", "rank": 84, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n Ok(input\n\n .text\n\n .lines()\n\n .filter(if input.is_part_one() {\n\n has_abba\n\n } else {\n\n supports_ssl\n\n })\n\n .count())\n\n}\n\n\n", "file_path": "crates/core/src/year2016/day07.rs", "rank": 85, "score": 98989.33426070132 }, { "content": "pub fn solve(input: &mut Input) -> Result<usize, String> {\n\n fn parse_tuple(tuple: &str) -> Option<(u16, u16)> {\n\n tuple.split_once(',').and_then(|(first, second)| {\n\n Some((first.parse::<u16>().ok()?, second.parse::<u16>().ok()?))\n\n })\n\n }\n\n\n\n let mut grid = vec![0_u8; 1_000_000].into_boxed_slice();\n\n for line in input.text.lines() {\n\n let words = line.split(' ').collect::<Vec<&str>>();\n\n let is_toggle = words[0] == \"toggle\";\n\n let expected_word_count = if is_toggle { 4 } else { 5 };\n\n if words.len() != expected_word_count {\n\n return Err(\"Invalid input\".to_string());\n\n }\n\n\n\n let (from, to) = if is_toggle {\n\n (words[1], words[3])\n\n } else {\n\n (words[2], words[4])\n", "file_path": "crates/core/src/year2015/day06.rs", "rank": 86, "score": 98989.33426070132 }, { "content": "fn find_subseq_covering(seq: &[String], subseqs: &[&[String]]) -> Option<VecDeque<usize>> {\n\n if seq.is_empty() {\n\n Some(VecDeque::new())\n\n } else {\n\n for (i, subseq) in subseqs.iter().enumerate() {\n\n if seq.starts_with(subseq) {\n\n if let Some(mut subfind) = find_subseq_covering(&seq[subseq.len()..], subseqs) {\n\n subfind.push_front(i);\n\n return Some(subfind);\n\n }\n\n }\n\n }\n\n\n\n None\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2019/day17.rs", "rank": 87, "score": 94243.91607761564 }, { "content": "fn visit_subsets<F>(input: &[u8], subset_size: usize, on_subsete: &mut F)\n\nwhere\n\n F: FnMut(&[u8]),\n\n{\n\n let mut output = vec![0; subset_size];\n\n visit_subset_internal(input, &mut output, 0, 0, on_subsete);\n\n}\n\n\n", "file_path": "crates/core/src/year2015/day24.rs", "rank": 88, "score": 93095.65393205719 }, { "content": "struct Trench {\n\n x_range: RangeInclusive<i16>,\n\n y_range: RangeInclusive<i16>,\n\n}\n\n\n\nimpl Trench {\n\n fn parse(text: &str) -> Option<Self> {\n\n let target_area = if text.len() < 18 {\n\n return None;\n\n } else {\n\n &text[15..]\n\n };\n\n let (x_range, y_range) = if let Some((x_range, y_range)) = target_area.split_once(\", y=\") {\n\n (Self::parse_range(x_range)?, Self::parse_range(y_range)?)\n\n } else {\n\n return None;\n\n };\n\n Some(Self { x_range, y_range })\n\n }\n\n\n", "file_path": "crates/core/src/year2021/day17.rs", "rank": 89, "score": 90914.0432002291 }, { "content": "struct Grid {\n\n occupied_coordinates: HashSet<Coordinate>,\n\n active_neighbors_count: HashMap<Coordinate, u8>,\n\n}\n\n\n\nimpl Grid {\n\n fn parse(input: &str) -> Result<Self, String> {\n\n let mut occupied_coordinates = HashSet::with_capacity(2000);\n\n\n\n for (row, line) in input.lines().enumerate() {\n\n if line.len() > 8 || row > 7 {\n\n return Err(\"Bigger than 8x8 input\".into());\n\n }\n\n for (col, b) in line.bytes().enumerate() {\n\n if b == b'#' {\n\n occupied_coordinates.insert((\n\n col as CoordinateComponent,\n\n row as CoordinateComponent,\n\n 0,\n\n 0,\n", "file_path": "crates/core/src/year2020/day17.rs", "rank": 90, "score": 90914.0432002291 }, { "content": "struct Spell {\n\n mana_cost: u8,\n\n damage: u8,\n\n heals: u8,\n\n effect_idx: Option<usize>,\n\n}\n\n\n", "file_path": "crates/core/src/year2015/day22.rs", "rank": 91, "score": 90914.0432002291 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Edge {\n\n /// Bitmask where '#' is set bit, '.' is unset. Only 10 first bits used.\n\n bitmask: u16,\n\n /// The tile that matches on the other end.\n\n matching: Option<TileId>,\n\n}\n\n\n\nimpl Edge {\n\n const fn flipped(self) -> Self {\n\n Self {\n\n // Only the first 10 bits of the edge bitmask is used:\n\n bitmask: self.bitmask.reverse_bits() >> 6,\n\n matching: self.matching,\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/core/src/year2020/day20.rs", "rank": 92, "score": 90914.0432002291 }, { "content": "struct Parser {\n\n operand_stack: Vec<CalculatorValue>,\n\n operator_stack: Vec<u8>,\n\n postfix_expression: Vec<u8>,\n\n addition_precedence: u8,\n\n multiplication_precedence: u8,\n\n}\n\n\n\nimpl Parser {\n\n fn new(addition_has_higher_precedence: bool) -> Self {\n\n let (addition_precedence, multiplication_precedence) = if addition_has_higher_precedence {\n\n (2, 1)\n\n } else {\n\n (1, 1)\n\n };\n\n\n\n Self {\n\n operand_stack: Vec::with_capacity(64),\n\n operator_stack: Vec::with_capacity(64),\n\n postfix_expression: Vec::with_capacity(64),\n", "file_path": "crates/core/src/year2020/day18.rs", "rank": 93, "score": 90914.0432002291 }, { "content": "#[derive(Default, Eq, PartialEq, Hash, Clone)]\n\nstruct Effect {\n\n turns: u8,\n\n armor: u8,\n\n damage: u8,\n\n mana: u8,\n\n}\n\n\n", "file_path": "crates/core/src/year2015/day22.rs", "rank": 94, "score": 90914.0432002291 }, { "content": "#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\nstruct Packet {\n\n version: u8,\n\n type_id: u8,\n\n contents: LengthOrValue,\n\n length: u8,\n\n}\n\n\n\nimpl Packet {\n\n fn parse(transmission: &mut Transmission) -> Option<Self> {\n\n let initial_bit_offset = transmission.bit_offset;\n\n\n\n let version = transmission.next_bits(3)?;\n\n let type_id = transmission.next_bits(3)?;\n\n let length_or_value = if type_id == 4 {\n\n // Literal value packet - a single binary number.\n\n let mut value = 0_u64;\n\n let mut bit_offset = 0_usize;\n\n loop {\n\n let next_five_bits = transmission.next_bits(5)?;\n\n value |= u64::from(next_five_bits & 0b1111) << (bit_offset as u64);\n", "file_path": "crates/core/src/year2021/day16.rs", "rank": 95, "score": 90914.0432002291 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Tile {\n\n id: TileId,\n\n /// Indexed by 0,1,3,4 = Top,Right,Bottom,Left.\n\n edges: [Edge; 4],\n\n /// Indexed by row. Lowest bit to the right.\n\n /// Example: The row \"#..#....\" is stored as 0b10010000.\n\n body: [u8; 8],\n\n}\n\n\n\nimpl Tile {\n\n fn parse(input: &str) -> Result<Vec<Self>, String> {\n\n let mut tiles = Vec::new();\n\n for tile_str in input.split(\"\\n\\n\") {\n\n let mut tile_id = 0;\n\n let mut this_edges = [Edge {\n\n bitmask: 0,\n\n matching: None,\n\n }; 4];\n\n let mut body = [0_u8; 8];\n\n\n", "file_path": "crates/core/src/year2020/day20.rs", "rank": 96, "score": 90914.0432002291 }, { "content": "struct Rules {\n\n /// The rules indexed by rule id:s.\n\n rules: Vec<Rule>,\n\n}\n\n\n\nimpl Rules {\n\n fn parse(rules_str: &str) -> Result<Self, ()> {\n\n let mut rules = Self {\n\n rules: vec![Rule::Character(0); 255],\n\n };\n\n for rule_line in rules_str.lines() {\n\n rules.add_line(rule_line)?;\n\n }\n\n Ok(rules)\n\n }\n\n\n\n fn add_line(&mut self, rule_line: &str) -> Result<(), ()> {\n\n let (rule_idx_str, pattern_str) = rule_line.rsplit_once(\": \").ok_or(())?;\n\n\n\n let rule_idx = rule_idx_str.parse::<RuleId>().map_err(|_| ())?;\n", "file_path": "crates/core/src/year2020/day19.rs", "rank": 97, "score": 90914.0432002291 }, { "content": "struct Computer {\n\n registers: [u32; 2],\n\n instructions: Vec<Instruction>,\n\n}\n\n\n\nimpl Computer {\n\n fn parse_register(specifier: &str) -> Result<u8, String> {\n\n Ok(match specifier {\n\n \"a\" | \"a,\" => 0,\n\n \"b\" => 1,\n\n _ => {\n\n return Err(\"Invalid register (not 'a' or 'b')\".to_string());\n\n }\n\n })\n\n }\n\n\n\n fn parse(input: &str) -> Result<Self, String> {\n\n let mut instructions = Vec::new();\n\n\n\n for line in input.lines() {\n", "file_path": "crates/core/src/year2015/day23.rs", "rank": 98, "score": 90914.0432002291 }, { "content": "#[derive(Eq, PartialEq, Clone, PartialOrd, Ord)]\n\nstruct State {\n\n spent_mana: u32,\n\n mana_left: u32,\n\n boss_hit_points: u8,\n\n player_hit_points: u8,\n\n effects_remaining_turns: [u8; 3],\n\n}\n\n\n", "file_path": "crates/core/src/year2015/day22.rs", "rank": 99, "score": 90914.0432002291 } ]
Rust
src/encodings/arithmetic_coder/decoder.rs
gcarq/comprs
b23556b78e0813a223bd7dbd09141c94b0a371d1
use std::cmp; use std::io::{Read, Result}; use bitbit::{BitReader, MSB}; use crate::encodings::arithmetic_coder::Symbol; use super::base::ArithmeticCoderBase; use super::FrequencyTable; pub struct ArithmeticDecoder<R: Read> { reader: BitReader<R, MSB>, code: usize, low: usize, high: usize, state_mask: usize, full_range: usize, half_range: usize, quarter_range: usize, minimum_range: usize, maximum_total: usize, } impl<R: Read> ArithmeticCoderBase for ArithmeticDecoder<R> { fn set_low(&mut self, value: usize) { self.low = value } fn set_high(&mut self, value: usize) { self.high = value } #[inline] fn low(&self) -> usize { self.low } #[inline] fn high(&self) -> usize { self.high } #[inline] fn state_mask(&self) -> usize { self.state_mask } #[inline] fn minimum_range(&self) -> usize { self.minimum_range } #[inline] fn quarter_range(&self) -> usize { self.quarter_range } #[inline] fn half_range(&self) -> usize { self.half_range } #[inline] fn full_range(&self) -> usize { self.full_range } #[inline] fn maximum_total(&self) -> usize { self.maximum_total } fn shift(&mut self) -> Result<()> { let bit = if self.reader.read_bit().unwrap_or(false) { 1 } else { 0 }; self.code = ((self.code << 1) & self.state_mask) | bit; Ok(()) } fn underflow(&mut self) { let bit = if self.reader.read_bit().unwrap_or(false) { 1 } else { 0 }; self.code = (self.code & self.half_range) | ((self.code << 1) & (self.state_mask >> 1)) | bit; } } impl<R: Read> ArithmeticDecoder<R> { pub fn new(mut reader: BitReader<R, MSB>, num_bits: usize) -> Result<Self> { let num_state_bits = num_bits; let full_range = 1 << num_state_bits; let half_range = full_range >> 1; let quarter_range = half_range >> 1; let minimum_range = quarter_range + 2; let maximum_total = cmp::min(std::usize::MAX / full_range, minimum_range); let state_mask = full_range - 1; let low = 0; let high = state_mask; let mut code = 0; for _ in 0..num_bits { let bit = if reader.read_bit()? { 1 } else { 0 }; code = code << 1 | bit; } Ok(ArithmeticDecoder { reader, low, high, state_mask, full_range, half_range, quarter_range, minimum_range, maximum_total, code, }) } pub fn read<T: FrequencyTable>(&mut self, freqtable: &mut T) -> Result<Symbol> { let total = freqtable.total(); debug_assert!( total <= self.maximum_total, "cannot decode symbol because total is too large" ); let range = self.high - self.low + 1; let offset = self.code - self.low; let value = ((offset + 1) * total - 1) / range; debug_assert!(value * range / total <= offset); debug_assert!(value < total); let mut start = 0; let mut end = freqtable.get_symbol_limit(); while end - start > 1 { let middle = (start + end) >> 1; if freqtable.get_low(middle) > value { end = middle; } else { start = middle; } } debug_assert_eq!(start + 1, end); let symbol = start; debug_assert!(freqtable.get_low(symbol) * range / total <= offset); debug_assert!(offset < freqtable.get_high(symbol) * range / total); self.update(freqtable, symbol)?; debug_assert!(self.low <= self.code, "code out of range"); debug_assert!(self.code <= self.high, "code out of range"); Ok(symbol) } }
use std::cmp; use std::io::{Read, Result}; use bitbit::{BitReader, MSB}; use crate::encodings::arithmetic_coder::Symbol; use super::base::ArithmeticCoderBase; use super::FrequencyTable; pub struct ArithmeticDecoder<R: Read> { reader: BitReader<R, MSB>, code: usize, low: usize, high: usize, state_mask: usize, full_range: usize, half_range: usize, quarter_range: usize, minimum_range: usize, maximum_total: usize, } impl<R: Read> ArithmeticCoderBase for ArithmeticDecoder<R> { fn set_low(&mut self, value: usize) { self.low = value } fn set_high(&mut self, value: usize) { self.high = value } #[inline] fn low(&self) -> usize { self.low } #[inline] fn high(&self) -> usize { self.high } #[inline] fn state_mask(&self) -> usize { self.state_mask } #[inline] fn minimum_range(&self) -> usize { self.minimum_range } #[inline] fn quarter_range(&self) -> usize { self.quarter_range } #[inline] fn half_range(&self) -> usize { self.half_range } #[inline] fn full_range(&self) -> usize { self.full_range } #[inline] fn maximum_total(&self) -> usize { self.maximum_total } fn shift(&mut self) -> Result<()> { let bit = if self.reader.read_bit().unwrap_or(false) { 1 } else { 0 }; self.code = ((self.code << 1) & self.state_mask) | bit; Ok(()) }
} impl<R: Read> ArithmeticDecoder<R> { pub fn new(mut reader: BitReader<R, MSB>, num_bits: usize) -> Result<Self> { let num_state_bits = num_bits; let full_range = 1 << num_state_bits; let half_range = full_range >> 1; let quarter_range = half_range >> 1; let minimum_range = quarter_range + 2; let maximum_total = cmp::min(std::usize::MAX / full_range, minimum_range); let state_mask = full_range - 1; let low = 0; let high = state_mask; let mut code = 0; for _ in 0..num_bits { let bit = if reader.read_bit()? { 1 } else { 0 }; code = code << 1 | bit; } Ok(ArithmeticDecoder { reader, low, high, state_mask, full_range, half_range, quarter_range, minimum_range, maximum_total, code, }) } pub fn read<T: FrequencyTable>(&mut self, freqtable: &mut T) -> Result<Symbol> { let total = freqtable.total(); debug_assert!( total <= self.maximum_total, "cannot decode symbol because total is too large" ); let range = self.high - self.low + 1; let offset = self.code - self.low; let value = ((offset + 1) * total - 1) / range; debug_assert!(value * range / total <= offset); debug_assert!(value < total); let mut start = 0; let mut end = freqtable.get_symbol_limit(); while end - start > 1 { let middle = (start + end) >> 1; if freqtable.get_low(middle) > value { end = middle; } else { start = middle; } } debug_assert_eq!(start + 1, end); let symbol = start; debug_assert!(freqtable.get_low(symbol) * range / total <= offset); debug_assert!(offset < freqtable.get_high(symbol) * range / total); self.update(freqtable, symbol)?; debug_assert!(self.low <= self.code, "code out of range"); debug_assert!(self.code <= self.high, "code out of range"); Ok(symbol) } }
fn underflow(&mut self) { let bit = if self.reader.read_bit().unwrap_or(false) { 1 } else { 0 }; self.code = (self.code & self.half_range) | ((self.code << 1) & (self.state_mask >> 1)) | bit; }
function_block-full_function
[ { "content": "pub fn encode_pipeline<R: Read>(reader: R) -> Result<Vec<u8>> {\n\n Ok(bincode::serialize(&TData::encode(reader)?).expect(\"unable to serialize data\"))\n\n}\n\n\n", "file_path": "src/encodings/mod.rs", "rank": 0, "score": 155834.14144813723 }, { "content": "pub fn decode_pipeline<R: Read>(reader: R) -> Result<Vec<u8>> {\n\n let data = bincode::deserialize_from::<R, TData>(reader).expect(\"unable to deserialize data\");\n\n data.decode()\n\n}\n", "file_path": "src/encodings/mod.rs", "rank": 1, "score": 155834.14144813723 }, { "content": "fn compress_file<R: Read>(reader: R) -> Result<Vec<u8>> {\n\n println!(\"Compressing file ...\");\n\n let now = Instant::now();\n\n let cursor = Cursor::new(encodings::encode_pipeline(reader)?);\n\n let elapsed = now.elapsed();\n\n println!(\n\n \"elapsed time: {}.{} seconds\",\n\n elapsed.as_secs(),\n\n elapsed.subsec_millis()\n\n );\n\n Ok(cursor.into_inner())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 2, "score": 132438.08309952897 }, { "content": "fn decompress_file<R: Read>(reader: R) -> Result<Vec<u8>> {\n\n println!(\"Decompressing file ...\");\n\n let now = Instant::now();\n\n let result = encodings::decode_pipeline(reader);\n\n let elapsed = now.elapsed();\n\n println!(\n\n \"elapsed time: {}.{} seconds\",\n\n elapsed.as_secs(),\n\n elapsed.subsec_millis()\n\n );\n\n result\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_compression() -> Result<()> {\n\n let test_data = String::from(\"Lorem Ipsum is simply dummy text of the printing and typesetting industry.\n", "file_path": "src/main.rs", "rank": 3, "score": 132438.08309952897 }, { "content": "pub fn apply(data: &[u8]) -> Result<Vec<u8>> {\n\n let len = data.len();\n\n debug!(\"DEBUG:RLE: before: {}\", len);\n\n let mut writer = Cursor::new(Vec::with_capacity(data.len()));\n\n let mut pair = RLEPair::new(1, data[0]);\n\n\n\n for symbol in &data[1..] {\n\n if pair.symbol == *symbol {\n\n // Increment counter if symbol is the same\n\n pair.increment();\n\n } else {\n\n // Serialize current pair and create new one\n\n pair.serialize(&mut writer)?;\n\n pair = RLEPair::new(1, *symbol);\n\n }\n\n }\n\n pair.serialize(&mut writer)?;\n\n\n\n let encoded = writer.into_inner();\n\n debug!(\"DEBUG:RLE: after: {}\", encoded.len());\n\n Ok(encoded)\n\n}\n\n\n", "file_path": "src/encodings/rle.rs", "rank": 4, "score": 99284.3470574476 }, { "content": "/// Compress content provided by reader and write compressed data to writer.\n\npub fn apply(data: &[u8]) -> Result<Vec<u8>> {\n\n let mut encoder =\n\n ArithmeticEncoder::new(BitWriter::new(Vec::with_capacity(data.len() / 4)), NUM_BITS);\n\n let mut model = PPMModel::new(ORDER as u8, SYMBOL_LIMIT, ESCAPE_SYMBOL);\n\n let mut history: Vec<Symbol> = Vec::with_capacity(model.order as usize);\n\n\n\n for byte in data {\n\n let symbol = Symbol::from(*byte);\n\n encode_symbol(&mut model, &history, symbol, &mut encoder)?;\n\n model.increment_contexts(&history, symbol);\n\n\n\n if model.order >= 1 {\n\n mutate_history(symbol, model.order, &mut history);\n\n }\n\n }\n\n\n\n // Encode EOF\n\n encode_symbol(&mut model, &history, EOF, &mut encoder)?;\n\n encoder.finish()?;\n\n Ok(encoder.inner_ref().clone())\n\n}\n\n\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 5, "score": 97339.17726562009 }, { "content": "/// Decompress content provided by reader and write restored data to writer.\n\npub fn reduce(data: &[u8]) -> Result<Vec<u8>> {\n\n let mut decoder = ArithmeticDecoder::new(BitReader::new(data), NUM_BITS)?;\n\n let mut model = PPMModel::new(ORDER as u8, SYMBOL_LIMIT, ESCAPE_SYMBOL);\n\n\n\n let mut history: Vec<Symbol> = Vec::with_capacity(model.order as usize);\n\n let mut buffer = Vec::with_capacity(data.len());\n\n\n\n loop {\n\n let symbol = decode_symbol(&mut model, &history, &mut decoder)?;\n\n // Check if EOF symbol has occurred\n\n if symbol == EOF {\n\n break;\n\n }\n\n buffer.write_all(&[symbol as u8])?;\n\n model.increment_contexts(&history, symbol);\n\n\n\n if model.order >= 1 {\n\n mutate_history(symbol, model.order, &mut history);\n\n }\n\n }\n\n Ok(buffer)\n\n}\n\n\n\n/// Append current symbol to history or shift back by one\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 6, "score": 97339.17726562009 }, { "content": "fn main() -> Result<()> {\n\n let matches = App::new(\"comprs\")\n\n .version(crate_version!())\n\n .about(\"Experimental playground for compression algorithms in Rust\")\n\n .arg(\n\n Arg::with_name(\"mode\")\n\n .help(\"mode\")\n\n .required(true)\n\n .possible_values(&[\"c\", \"d\", \"compress\", \"decompress\"])\n\n .index(1),\n\n )\n\n .arg(\n\n Arg::with_name(\"file\")\n\n .help(\"Sets the input file to use\")\n\n .required(true)\n\n .index(2),\n\n )\n\n .arg(\n\n Arg::with_name(\"o\")\n\n .short(\"o\")\n", "file_path": "src/main.rs", "rank": 7, "score": 84474.18066215212 }, { "content": "/// Try to use highest order context that exists based on the history suffix. When symbol 256\n\n/// is consumed at a context at any non-negative order, it means \"escape to the next lower order\n\n/// with non-empty context\". When symbol 256 is consumed at the order -1 context, it means \"EOF\".\n\nfn decode_symbol<'a, R: Read>(\n\n model: &'a mut PPMModel,\n\n history: &[Symbol],\n\n decoder: &mut ArithmeticDecoder<R>,\n\n) -> Result<Symbol> {\n\n let hist_len = history.len();\n\n for order in (0..=hist_len).rev() {\n\n match traverse_context(&mut model.context, &history[hist_len - order..hist_len]) {\n\n None => {}\n\n Some(ref mut ctx) => {\n\n let symbol = decoder.read(&mut ctx.frequencies)?;\n\n if symbol < EOF {\n\n return Ok(symbol);\n\n }\n\n // Else we read the context escape symbol, so continue decrementing the order\n\n }\n\n }\n\n }\n\n // Logic for order = -1\n\n decoder.read(&mut model.order_minus1_freqs)\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 8, "score": 73734.41212706605 }, { "content": "/// Calculates shannon entropy for the given slice\n\npub fn calc_entropy(data: &[u8]) -> f64 {\n\n let mut occurences = HashMap::new();\n\n for byte in data {\n\n *occurences.entry(byte).or_insert(0) += 1;\n\n }\n\n\n\n let flen = data.len() as f64;\n\n let entropy: f64 = occurences\n\n .values()\n\n .map(|o| f64::from(*o) / flen)\n\n .map(|p| p * p.log2())\n\n .sum();\n\n\n\n -entropy\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 9, "score": 67953.59709440004 }, { "content": "pub fn apply(data: &[u8]) -> Vec<u8> {\n\n // Create alphabet vector\n\n let mut alphabet: Vec<u8> = (0..=255).collect();\n\n\n\n // Iterates over data and encodes each byte with the current alphabet\n\n data.iter().map(|b| encode(*b, &mut alphabet)).collect()\n\n}\n\n\n", "file_path": "src/encodings/mtf.rs", "rank": 10, "score": 66466.17525434235 }, { "content": "pub fn reduce(data: &[u8]) -> Vec<u8> {\n\n let mut decoded: Vec<u8> = Vec::with_capacity(data.len());\n\n\n\n let mut reader = Cursor::new(data);\n\n while let Ok(pair) = RLEPair::deserialize(&mut reader) {\n\n for _ in 0..pair.count.0 {\n\n decoded.push(pair.symbol);\n\n }\n\n }\n\n\n\n decoded\n\n}\n\n\n", "file_path": "src/encodings/rle.rs", "rank": 11, "score": 66466.17525434235 }, { "content": "pub fn reduce(data: &[u8]) -> Vec<u8> {\n\n // Create alphabet vector\n\n let mut alphabet: Vec<u8> = (0..=255).collect();\n\n\n\n let mut decoded = Vec::with_capacity(data.len());\n\n for index in data {\n\n let byte = alphabet.remove(*index as usize);\n\n decoded.push(byte);\n\n alphabet.insert(0, byte);\n\n }\n\n\n\n decoded\n\n}\n\n\n", "file_path": "src/encodings/mtf.rs", "rank": 12, "score": 66466.17525434235 }, { "content": "pub fn apply(data: &[u8]) -> Vec<u8> {\n\n // Assume UTF-8 encoding\n\n let file_content = match from_utf8(&data) {\n\n Ok(v) => v,\n\n Err(e) => panic!(\"Invalid UTF-8 sequence: {}\", e),\n\n };\n\n\n\n let mut dictionaries = vec![HashMap::new(); MAX_LENGTH + 1];\n\n for line in file_content.lines() {\n\n for word in line.split(&DELIMS[..]) {\n\n if is_word(word) {\n\n // Populate word occurrences in sub dictionary\n\n *dictionaries[word.len()]\n\n .entry(String::from(word))\n\n .or_insert(0) += 1;\n\n }\n\n }\n\n }\n\n\n\n // Create translation table\n", "file_path": "src/encodings/startransform.rs", "rank": 13, "score": 66466.17525434235 }, { "content": "pub fn apply(data: &[u8]) -> Vec<u8> {\n\n // Create chunks and encode them\n\n let chunks: Vec<BWTChunk> = data\n\n .chunks(CHUNK_SIZE as usize)\n\n .map(BWTChunk::encode)\n\n .collect();\n\n debug!(\"DEBUG:BWT: split up into {} chunks\", chunks.len());\n\n\n\n // Serialize encoded data to u8\n\n bincode::serialize(&BWTData { chunks }).expect(\"unable to serialize data\")\n\n}\n\n\n", "file_path": "src/encodings/bwt.rs", "rank": 14, "score": 66466.17525434235 }, { "content": "pub fn reduce(data: &[u8]) -> Vec<u8> {\n\n // Create chunks and encode them\n\n let data: BWTData = bincode::deserialize(data).expect(\"unable to deserialize data\");\n\n debug!(\"DEBUG:BWT: got {} chunks\", data.chunks.len());\n\n\n\n data.chunks\n\n .into_iter()\n\n .map(BWTChunk::decode)\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "src/encodings/bwt.rs", "rank": 15, "score": 66466.17525434235 }, { "content": "pub fn print_statistics(input_meta: &Metadata, compressed_meta: &Metadata) {\n\n let input_size = input_meta.len() as f64;\n\n let comp_size = compressed_meta.len() as f64;\n\n println!(\"Compressed Size: {}\", comp_size);\n\n println!(\n\n \"Compress Ratio: {:.1} ({:.2}%)\",\n\n input_size / comp_size,\n\n (1.0 - comp_size / input_size) * 100.0\n\n );\n\n println!(\"Bits per Byte: {:.4}\", comp_size / input_size * 8.0);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::utils::calc_entropy;\n\n\n\n #[test]\n\n fn test_calc_entropy() {\n\n let data: Vec<u8> = String::from(\"Lorem ipsum\").into_bytes();\n\n let result = calc_entropy(&data);\n\n assert_eq!(format!(\"{:.5}\", result), \"3.27761\");\n\n }\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 16, "score": 62608.782217013424 }, { "content": "/// Takes a dictionary vector as input (sorted by word occurrence)\n\nfn encode_dictionary(dict: Vec<(&String, &usize)>, len: usize) -> HashMap<String, String> {\n\n let mut translation_table: HashMap<String, String> = HashMap::with_capacity(dict.len());\n\n\n\n // Create word iterator\n\n let mut words = dict.into_iter().map(|(k, _)| k.clone());\n\n\n\n // translate first word with '*' * len\n\n let mut code = vec!['*'; len];\n\n if let Some(word) = words.next() {\n\n translation_table.insert(word, code.iter().collect());\n\n }\n\n\n\n // Iterate over each index in reverse to place single mutation\n\n for i in (0..len).rev() {\n\n populate_table(&mut translation_table, &mut words, &code, i);\n\n code[i] = '*'\n\n }\n\n\n\n for offset in 1..len {\n\n for ch in ENCODING.chars() {\n", "file_path": "src/encodings/startransform.rs", "rank": 17, "score": 51690.672066931045 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct BWTChunk {\n\n pub data: Vec<u8>,\n\n pub index: u32,\n\n}\n\n\n\nimpl BWTChunk {\n\n pub fn encode(data: &[u8]) -> Self {\n\n let len = data.len();\n\n\n\n // Create permutations table\n\n let mut permutations: Vec<Permutation> = (0..len)\n\n .into_par_iter()\n\n .map(|i| Permutation::new(data, i as u32))\n\n .collect();\n\n\n\n permutations.par_sort();\n\n\n\n // Create encoded data by using the last element in each row\n\n let index: u32 = permutations\n\n .par_iter()\n", "file_path": "src/encodings/bwt.rs", "rank": 18, "score": 47120.92534822895 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct BWTData {\n\n pub chunks: Vec<BWTChunk>,\n\n}\n\n\n", "file_path": "src/encodings/bwt.rs", "rank": 19, "score": 47120.92534822895 }, { "content": "struct RLEPair {\n\n pub count: Varint<u32>,\n\n pub symbol: u8,\n\n}\n\n\n\nimpl RLEPair {\n\n pub fn new(count: usize, symbol: u8) -> Self {\n\n RLEPair {\n\n count: Varint(count as u32),\n\n symbol,\n\n }\n\n }\n\n\n\n pub fn increment(&mut self) {\n\n self.count.0 += 1;\n\n }\n\n\n\n pub fn serialize<W: Write>(&self, writer: &mut W) -> Result<()> {\n\n self.count\n\n .serialize(writer)\n", "file_path": "src/encodings/rle.rs", "rank": 20, "score": 47120.92534822895 }, { "content": "struct Permutation<'a> {\n\n data: &'a [u8],\n\n pub index: u32,\n\n}\n\n\n\nimpl<'a> Permutation<'a> {\n\n #[inline]\n\n pub fn new(data: &'a [u8], index: u32) -> Self {\n\n Permutation { data, index }\n\n }\n\n}\n\n\n\nimpl<'a> Index<usize> for Permutation<'a> {\n\n type Output = u8;\n\n\n\n /* 01234567\n\n 0: .ANANAS.\n\n 1: ..ANANAS\n\n 2: S..ANANA\n\n 3: AS..ANAN\n", "file_path": "src/encodings/bwt.rs", "rank": 21, "score": 46548.17885537939 }, { "content": "struct StarTransformData {\n\n pub translation_table: HashMap<String, String>,\n\n pub content: Vec<u8>,\n\n}\n\n\n", "file_path": "src/encodings/startransform.rs", "rank": 22, "score": 46168.57813725958 }, { "content": "struct BWTReconstructData {\n\n pub position: u32,\n\n pub char: u8,\n\n}\n\n\n\nimpl Ord for BWTReconstructData {\n\n #[inline]\n\n fn cmp(&self, other: &BWTReconstructData) -> Ordering {\n\n self.char.cmp(&other.char)\n\n }\n\n}\n\n\n\nimpl PartialOrd for BWTReconstructData {\n\n #[inline]\n\n fn partial_cmp(&self, other: &BWTReconstructData) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl PartialEq for BWTReconstructData {\n\n #[inline]\n\n fn eq(&self, other: &BWTReconstructData) -> bool {\n\n self.char == other.char\n\n }\n\n}\n\n\n\nimpl Eq for BWTReconstructData {}\n\n\n", "file_path": "src/encodings/bwt.rs", "rank": 23, "score": 46168.57813725958 }, { "content": "pub trait FrequencyTable {\n\n fn new(num_symbols: u16) -> Self;\n\n fn get(&self, symbol: Symbol) -> usize;\n\n fn get_low(&self, symbol: Symbol) -> usize;\n\n fn get_high(&self, symbol: Symbol) -> usize;\n\n fn get_symbol_limit(&self) -> u16;\n\n fn set(&mut self, symbol: Symbol, frequency: usize);\n\n fn increment(&mut self, symbol: Symbol);\n\n fn total(&self) -> usize;\n\n}\n\n\n\npub struct SimpleFrequencyTable {\n\n pub frequencies: Vec<usize>,\n\n pub total: usize,\n\n}\n\n\n\nimpl SimpleFrequencyTable {\n\n fn cumulative(&self, symbol: Symbol) -> usize {\n\n self.frequencies.iter().take(symbol as usize).sum()\n\n }\n", "file_path": "src/encodings/arithmetic_coder/mod.rs", "rank": 24, "score": 40183.94906764979 }, { "content": "pub trait ArithmeticCoderBase {\n\n fn set_low(&mut self, value: usize);\n\n fn set_high(&mut self, value: usize);\n\n\n\n fn low(&self) -> usize;\n\n fn high(&self) -> usize;\n\n fn state_mask(&self) -> usize;\n\n fn minimum_range(&self) -> usize;\n\n fn quarter_range(&self) -> usize;\n\n fn half_range(&self) -> usize;\n\n fn full_range(&self) -> usize;\n\n fn maximum_total(&self) -> usize;\n\n\n\n fn shift(&mut self) -> Result<()>;\n\n fn underflow(&mut self);\n\n\n\n fn update<T: FrequencyTable>(&mut self, freqtable: &mut T, symbol: Symbol) -> Result<()> {\n\n let (low, high) = (self.low(), self.high());\n\n debug_assert!(low < high, \"low or high out of range\");\n\n\n", "file_path": "src/encodings/arithmetic_coder/base.rs", "rank": 25, "score": 39437.75133561726 }, { "content": "fn populate_contexts(\n\n ctx: &mut Context,\n\n history: &[Symbol],\n\n symbol: Symbol,\n\n escape_symbol: Symbol,\n\n symbol_limit: Symbol,\n\n) {\n\n if history.is_empty() {\n\n ctx.frequencies.increment(symbol);\n\n return;\n\n }\n\n\n\n let sym = history[0];\n\n if ctx.sub_ctxs.get(&sym).is_none() {\n\n let mut sub_ctx = Context::new(symbol_limit);\n\n sub_ctx.frequencies.increment(escape_symbol);\n\n ctx.sub_ctxs.insert(sym, sub_ctx);\n\n }\n\n\n\n populate_contexts(\n\n ctx.sub_ctxs.get_mut(&sym).unwrap(),\n\n &history[1..],\n\n symbol,\n\n escape_symbol,\n\n symbol_limit,\n\n );\n\n}\n", "file_path": "src/encodings/ppm/model.rs", "rank": 26, "score": 37688.65185992611 }, { "content": "fn populate_table<I: Iterator>(\n\n translation_table: &mut HashMap<String, String>,\n\n words: &mut I,\n\n code: &[char],\n\n index: usize,\n\n) where\n\n I: Iterator<Item = String>,\n\n{\n\n let mut code = code.to_owned();\n\n for character in ENCODING.chars() {\n\n code[index] = character;\n\n if let Some(word) = words.next() {\n\n translation_table.insert(word.clone(), code.iter().collect());\n\n } else {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/encodings/startransform.rs", "rank": 27, "score": 35664.37900776737 }, { "content": "fn is_word(word: &str) -> bool {\n\n let len = word.len();\n\n if len < MIN_LENGH || len > MAX_LENGTH {\n\n return false;\n\n }\n\n for ch in word.chars() {\n\n if !VALID.contains(ch) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/encodings/startransform.rs", "rank": 28, "score": 34011.608848937234 }, { "content": "/// Try to use highest order context that exists based on the history suffix, such\n\n/// that the next symbol has non-zero frequency. When symbol 256 is produced at a context\n\n/// at any non-negative order, it means \"escape to the next lower order with non-empty\n\n/// context\". When symbol 256 is produced at the order -1 context, it means \"EOF\".\n\nfn encode_symbol<'a, W: Write>(\n\n model: &'a mut PPMModel,\n\n history: &[Symbol],\n\n symbol: Symbol,\n\n encoder: &mut ArithmeticEncoder<W>,\n\n) -> Result<()> {\n\n let hist_len = history.len();\n\n for order in (0..=hist_len).rev() {\n\n match traverse_context(&mut model.context, &history[hist_len - order..hist_len]) {\n\n None => {}\n\n Some(ctx) => {\n\n if symbol != EOF && ctx.frequencies.get(symbol) > 0 {\n\n return encoder.write(&mut ctx.frequencies, symbol);\n\n }\n\n // Else write context escape symbol and continue decrementing the order\n\n encoder.write(&mut ctx.frequencies, EOF)?;\n\n }\n\n }\n\n }\n\n // Logic for order = -1\n\n encoder.write(&mut model.order_minus1_freqs, symbol)\n\n}\n\n\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 29, "score": 33239.05521007351 }, { "content": "fn encode(byte: u8, alphabet: &mut Vec<u8>) -> u8 {\n\n let index = alphabet\n\n .iter()\n\n .position(|&b| b == byte)\n\n .expect(\"byte not found in alphabet\");\n\n\n\n let byte = alphabet.remove(index);\n\n alphabet.insert(0, byte);\n\n index as u8\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{apply, reduce};\n\n\n\n #[test]\n\n fn test_apply() {\n\n let data = String::from(\"bananaaa\").into_bytes();\n\n assert_eq!(apply(&data), vec![98, 98, 110, 1, 1, 1, 0, 0]);\n\n }\n\n\n\n #[test]\n\n fn test_reduce() {\n\n let data = vec![98, 98, 110, 1, 1, 1, 0, 0];\n\n assert_eq!(reduce(&data), String::from(\"bananaaa\").into_bytes());\n\n }\n\n}\n", "file_path": "src/encodings/mtf.rs", "rank": 30, "score": 28762.13359981465 }, { "content": "// TODO: replace with ropey\n\nfn translate_content(content: &str, translation_table: &HashMap<String, String>) -> String {\n\n let mut final_lines: Vec<String> = Vec::new();\n\n for line in content.lines().collect::<Vec<&str>>() {\n\n let mut final_line: String = String::from(line);\n\n for word in line.split(&DELIMS[..]) {\n\n if is_word(word) {\n\n // Replace word in final line\n\n println!(\"replacing: {}\", String::from(word));\n\n final_line =\n\n final_line.replace(word, &translation_table.get(&String::from(word)).unwrap());\n\n }\n\n }\n\n final_lines.push(final_line);\n\n }\n\n //TODO: restore original line seperator\n\n final_lines.join(\"\\n\")\n\n}\n", "file_path": "src/encodings/startransform.rs", "rank": 31, "score": 27090.74357609568 }, { "content": "#[inline]\n\nfn mutate_history(symbol: Symbol, order: u8, history: &mut Vec<Symbol>) {\n\n if history.len() >= order as usize {\n\n history.remove(0);\n\n }\n\n history.push(symbol);\n\n}\n\n\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 32, "score": 26628.871283392673 }, { "content": "/// Return highest order context that exists for the given history prefix.\n\nfn traverse_context<'a>(ctx: &'a mut Context, history: &[Symbol]) -> Option<&'a mut Context> {\n\n if history.is_empty() {\n\n return Some(ctx);\n\n }\n\n\n\n match ctx.sub_ctxs.get_mut(&history[0]) {\n\n None => None,\n\n Some(sub) => traverse_context(sub, &history[1..]),\n\n }\n\n}\n\n\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 33, "score": 25465.689969419684 }, { "content": " self.writer.get_ref()\n\n }\n\n}\n\n\n\nimpl<W: Write> ArithmeticCoderBase for ArithmeticEncoder<W> {\n\n fn set_low(&mut self, value: usize) {\n\n self.low = value\n\n }\n\n fn set_high(&mut self, value: usize) {\n\n self.high = value\n\n }\n\n #[inline]\n\n fn low(&self) -> usize {\n\n self.low\n\n }\n\n #[inline]\n\n fn high(&self) -> usize {\n\n self.high\n\n }\n\n #[inline]\n", "file_path": "src/encodings/arithmetic_coder/encoder.rs", "rank": 39, "score": 14.973528197549792 }, { "content": "use std::cmp;\n\nuse std::io::{Result, Write};\n\n\n\nuse bitbit::BitWriter;\n\n\n\nuse crate::encodings::arithmetic_coder::Symbol;\n\n\n\nuse super::base::ArithmeticCoderBase;\n\nuse super::FrequencyTable;\n\n\n\npub struct ArithmeticEncoder<W: Write> {\n\n writer: BitWriter<W>,\n\n num_underflow: usize,\n\n num_state_bits: usize,\n\n\n\n low: usize,\n\n high: usize,\n\n state_mask: usize,\n\n full_range: usize,\n\n half_range: usize,\n", "file_path": "src/encodings/arithmetic_coder/encoder.rs", "rank": 40, "score": 13.907906540855949 }, { "content": " .expect(\"unable to serialize varuint\");\n\n writer.write_all(&[self.symbol])\n\n }\n\n\n\n pub fn deserialize<R: Read + Seek>(reader: &mut R) -> Result<Self> {\n\n let count = Varint::deserialize(reader)?;\n\n\n\n let mut buf = [0u8; 1];\n\n reader.read_exact(&mut buf)?;\n\n let symbol = buf[0];\n\n\n\n Ok(RLEPair { count, symbol })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{apply, reduce};\n\n\n\n #[test]\n", "file_path": "src/encodings/rle.rs", "rank": 41, "score": 12.598638724546749 }, { "content": "\n\n // While low and high have the same top bit value, shift them out\n\n let half_range = self.half_range();\n\n let state_mask = self.state_mask();\n\n while ((low ^ high) & half_range) == 0 {\n\n // shift() needs an updated low value\n\n self.set_low(low);\n\n self.shift()?;\n\n low = (low << 1) & state_mask;\n\n high = ((high << 1) & state_mask) | 1;\n\n }\n\n // Now low's top bit must be 0 and high's top bit must be 1\n\n\n\n // While low's top two bits are 01 and high's are 10, delete the second highest bit of both\n\n let quarter_range = self.quarter_range();\n\n while (low & !high & quarter_range) != 0 {\n\n self.underflow();\n\n low = (low << 1) ^ half_range;\n\n high = ((high ^ half_range) << 1) | half_range | 1;\n\n }\n\n self.set_low(low);\n\n self.set_high(high);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/encodings/arithmetic_coder/base.rs", "rank": 43, "score": 12.306869545637632 }, { "content": "use std::io::{Read, Result, Write};\n\n\n\nuse bitbit::{BitReader, BitWriter};\n\n\n\nuse super::arithmetic_coder::decoder::ArithmeticDecoder;\n\nuse super::arithmetic_coder::encoder::ArithmeticEncoder;\n\nuse super::arithmetic_coder::{FrequencyTable, Symbol};\n\n\n\nuse self::context::Context;\n\nuse self::model::PPMModel;\n\n\n\npub mod context;\n\npub mod model;\n\n\n\n// TODO: create struct to hold all possible encoding parameters\n\nconst ORDER: u8 = 2;\n\nconst EOF: Symbol = 256;\n\nconst SYMBOL_LIMIT: Symbol = 257;\n\nconst ESCAPE_SYMBOL: Symbol = 256;\n\nconst NUM_BITS: usize = 32;\n\n\n\n/// Compress content provided by reader and write compressed data to writer.\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 44, "score": 12.288009795624909 }, { "content": "impl FrequencyTable for FlatFrequencyTable {\n\n fn new(num_symbols: Symbol) -> Self {\n\n FlatFrequencyTable { num_symbols }\n\n }\n\n\n\n #[inline]\n\n fn get(&self, _symbol: Symbol) -> usize {\n\n 1\n\n }\n\n\n\n #[inline]\n\n fn get_low(&self, symbol: Symbol) -> usize {\n\n symbol as usize\n\n }\n\n\n\n #[inline]\n\n fn get_high(&self, symbol: Symbol) -> usize {\n\n symbol as usize + 1\n\n }\n\n\n", "file_path": "src/encodings/arithmetic_coder/mod.rs", "rank": 45, "score": 11.009603213033747 }, { "content": " quarter_range: usize,\n\n minimum_range: usize,\n\n maximum_total: usize,\n\n}\n\n\n\nimpl<W: Write> ArithmeticEncoder<W> {\n\n pub fn new(writer: BitWriter<W>, num_bits: usize) -> Self {\n\n debug_assert!(num_bits > 0);\n\n debug_assert!(num_bits < 64);\n\n let num_state_bits = num_bits;\n\n let full_range = 1 << num_state_bits;\n\n // The top bit at width num_state_bits, which is 0100...000.\n\n let half_range = full_range >> 1; // Non-zero\n\n // The second highest bit at width num_state_bits, which is 0010...000. This is zero when num_state_bits=1.\n\n let quarter_range = half_range >> 1; // Can be zero\n\n // Minimum range (high+1-low) during coding (non-trivial), which is 0010...010.\n\n let minimum_range = quarter_range + 2; // At least 2\n\n // Maximum allowed total from a frequency table at all times during coding. This differs from Java\n\n // and C++ because Python's native bigint avoids constraining the size of intermediate computations.\n\n let maximum_total = cmp::min(std::usize::MAX / full_range, minimum_range);\n", "file_path": "src/encodings/arithmetic_coder/encoder.rs", "rank": 46, "score": 10.417733945085129 }, { "content": "use std::fmt;\n\nuse std::io::{BufReader, Read, Result};\n\n\n\nuse crate::utils::calc_entropy;\n\n\n\npub mod arithmetic_coder;\n\npub mod bwt;\n\npub mod mtf;\n\npub mod ppm;\n\npub mod rle;\n\npub mod startransform;\n\n\n\n#[repr(u8)]\n\n#[derive(Serialize, Deserialize)]\n\npub enum Transform {\n\n BWT,\n\n MTF,\n\n RLE,\n\n ST,\n\n PPM,\n", "file_path": "src/encodings/mod.rs", "rank": 47, "score": 10.364759643367028 }, { "content": " fn maximum_total(&self) -> usize {\n\n self.maximum_total\n\n }\n\n\n\n fn shift(&mut self) -> Result<()> {\n\n let bit = match self.low >> (self.num_state_bits - 1) {\n\n 1 => true,\n\n 0 => false,\n\n _ => panic!(\"shift overflow\"),\n\n };\n\n self.writer.write_bit(bit)?;\n\n\n\n // Write out the saved underflow bits\n\n for _ in 0..self.num_underflow {\n\n self.writer.write_bit(!bit)?;\n\n }\n\n self.num_underflow = 0;\n\n Ok(())\n\n }\n\n\n\n fn underflow(&mut self) {\n\n self.num_underflow += 1;\n\n }\n\n}\n", "file_path": "src/encodings/arithmetic_coder/encoder.rs", "rank": 49, "score": 9.87589873700313 }, { "content": " minimum_range,\n\n maximum_total,\n\n }\n\n }\n\n #[inline]\n\n pub fn write<T: FrequencyTable>(&mut self, freqtable: &mut T, symbol: Symbol) -> Result<()> {\n\n self.update(freqtable, symbol)\n\n }\n\n\n\n /// Terminates the arithmetic coding by flushing any buffered bits, so that the output can be decoded properly.\n\n /// It is important that this method must be called at the end of the each encoding process.\n\n /// Note that this method merely writes data to the underlying output stream but does not close it.\n\n pub fn finish(&mut self) -> Result<()> {\n\n self.writer.write_bit(true)?;\n\n self.writer.write_byte(0)\n\n }\n\n\n\n /// Get reference of the inner writer\n\n #[inline]\n\n pub fn inner_ref(&mut self) -> &W {\n", "file_path": "src/encodings/arithmetic_coder/encoder.rs", "rank": 50, "score": 9.238443615106684 }, { "content": "}\n\n\n\nimpl FrequencyTable for SimpleFrequencyTable {\n\n #[inline]\n\n fn new(num_symbols: Symbol) -> Self {\n\n SimpleFrequencyTable {\n\n frequencies: vec![0; num_symbols as usize],\n\n total: 0,\n\n }\n\n }\n\n\n\n #[inline]\n\n fn get(&self, symbol: Symbol) -> usize {\n\n self.frequencies[symbol as usize]\n\n }\n\n\n\n #[inline]\n\n fn get_low(&self, symbol: Symbol) -> usize {\n\n self.cumulative(symbol)\n\n }\n", "file_path": "src/encodings/arithmetic_coder/mod.rs", "rank": 51, "score": 9.152917016042764 }, { "content": " }\n\n\n\n #[inline]\n\n fn total(&self) -> usize {\n\n self.total\n\n }\n\n}\n\n\n\nimpl fmt::Debug for SimpleFrequencyTable {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n fmt.debug_struct(\"SimpleFrequencyTable\")\n\n .field(\"frequencies\", &self.frequencies)\n\n .finish()\n\n }\n\n}\n\n\n\npub struct FlatFrequencyTable {\n\n num_symbols: Symbol,\n\n}\n\n\n", "file_path": "src/encodings/arithmetic_coder/mod.rs", "rank": 52, "score": 8.81655732448348 }, { "content": "use std::io::{Cursor, Read, Result, Seek, Write};\n\n\n\nuse varuint::{Deserializable, Serializable, Varint};\n\n\n", "file_path": "src/encodings/rle.rs", "rank": 54, "score": 7.742266783588951 }, { "content": " // Bit mask of num_state_bits ones, which is 0111...111.\n\n let state_mask = full_range - 1;\n\n\n\n // Low end of this arithmetic coder's current range. Conceptually has an infinite number of trailing 0s.\n\n let low = 0;\n\n // High end of this arithmetic coder's current range. Conceptually has an infinite number of trailing 1s.\n\n let high = state_mask;\n\n\n\n let num_underflow = 0;\n\n\n\n ArithmeticEncoder {\n\n writer,\n\n num_state_bits,\n\n num_underflow,\n\n low,\n\n high,\n\n state_mask,\n\n full_range,\n\n half_range,\n\n quarter_range,\n", "file_path": "src/encodings/arithmetic_coder/encoder.rs", "rank": 55, "score": 7.591507367246443 }, { "content": "use std::collections::BTreeMap;\n\n\n\nuse super::super::arithmetic_coder::{FrequencyTable, SimpleFrequencyTable, Symbol};\n\n\n\npub struct Context {\n\n pub frequencies: SimpleFrequencyTable,\n\n pub sub_ctxs: BTreeMap<Symbol, Context>,\n\n}\n\n\n\nimpl Context {\n\n #[inline]\n\n pub fn new(num_symbols: Symbol) -> Self {\n\n Context {\n\n frequencies: SimpleFrequencyTable::new(num_symbols),\n\n sub_ctxs: BTreeMap::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/encodings/ppm/context.rs", "rank": 56, "score": 7.552485601177174 }, { "content": "\n\nimpl TData {\n\n /// Applies given transform methods to reader input\n\n pub fn encode<R: Read>(reader: R) -> Result<TData> {\n\n let mut buffer = Vec::with_capacity(1024 * 1024);\n\n\n\n BufReader::new(reader).read_to_end(&mut buffer)?;\n\n\n\n debug!(\"DEBUG: Size before preprocessing: {}\", &buffer.len());\n\n debug!(\n\n \"DEBUG: File entropy before preprocessing: {:.2}\",\n\n calc_entropy(&buffer)\n\n );\n\n\n\n let transforms = vec![\n\n //Transform::ST, FIXME: currently broken\n\n Transform::BWT,\n\n Transform::MTF,\n\n Transform::PPM,\n\n ];\n", "file_path": "src/encodings/mod.rs", "rank": 57, "score": 7.46849893237624 }, { "content": " debug_assert!(low & self.state_mask() == low, \"low out of range\");\n\n debug_assert!(high & self.state_mask() == high, \"high out of range\");\n\n\n\n let range = high - low + 1;\n\n debug_assert!(self.minimum_range() <= range);\n\n debug_assert!(range <= self.full_range());\n\n\n\n let symlow = freqtable.get_low(symbol);\n\n let symhigh = freqtable.get_high(symbol);\n\n let total = freqtable.total();\n\n debug_assert!(symlow != symhigh, \"symbol has zero frequency\");\n\n debug_assert!(\n\n total <= self.maximum_total(),\n\n \"cannot code symbol because total is too large\"\n\n );\n\n\n\n let (mut low, mut high) = (\n\n low + symlow * range / total,\n\n low + symhigh * range / total - 1,\n\n );\n", "file_path": "src/encodings/arithmetic_coder/base.rs", "rank": 58, "score": 7.268513959486044 }, { "content": "\n\n #[inline]\n\n fn get_high(&self, symbol: Symbol) -> usize {\n\n self.cumulative(symbol + 1)\n\n }\n\n\n\n #[inline]\n\n fn get_symbol_limit(&self) -> Symbol {\n\n self.frequencies.len() as Symbol\n\n }\n\n\n\n fn set(&mut self, symbol: Symbol, frequency: usize) {\n\n self.total -= self.frequencies[symbol as usize];\n\n self.frequencies[symbol as usize] = frequency;\n\n self.total += frequency;\n\n }\n\n\n\n fn increment(&mut self, symbol: Symbol) {\n\n self.total += 1;\n\n self.frequencies[symbol as usize] += 1;\n", "file_path": "src/encodings/arithmetic_coder/mod.rs", "rank": 59, "score": 7.014809175611841 }, { "content": "extern crate adler32;\n\nextern crate bincode;\n\nextern crate bitbit;\n\n#[macro_use]\n\nextern crate clap;\n\n#[macro_use]\n\nextern crate log;\n\nextern crate rayon;\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nextern crate varuint;\n\n\n\nuse std::fs::File;\n\nuse std::io::{BufReader, BufWriter, Cursor, Read, Result, Write};\n\nuse std::time::Instant;\n\n\n\nuse adler32::adler32;\n\nuse clap::{App, Arg};\n\n\n\nuse utils::print_statistics;\n\n\n\nmod encodings;\n\nmod utils;\n\n\n", "file_path": "src/main.rs", "rank": 60, "score": 6.808215012506537 }, { "content": " fn state_mask(&self) -> usize {\n\n self.state_mask\n\n }\n\n #[inline]\n\n fn minimum_range(&self) -> usize {\n\n self.minimum_range\n\n }\n\n #[inline]\n\n fn quarter_range(&self) -> usize {\n\n self.quarter_range\n\n }\n\n #[inline]\n\n fn half_range(&self) -> usize {\n\n self.half_range\n\n }\n\n #[inline]\n\n fn full_range(&self) -> usize {\n\n self.full_range\n\n }\n\n #[inline]\n", "file_path": "src/encodings/arithmetic_coder/encoder.rs", "rank": 61, "score": 6.475395741551093 }, { "content": " code[len - offset] = ch;\n\n for i in (0..len - offset).rev() {\n\n populate_table(&mut translation_table, &mut words, &code, i);\n\n code[i] = '*'\n\n }\n\n }\n\n }\n\n\n\n for (key, value) in &translation_table {\n\n println!(\"key: {}, value: {}\", key, value);\n\n }\n\n\n\n debug_assert!(&words.next().is_none());\n\n\n\n translation_table\n\n}\n\n\n", "file_path": "src/encodings/startransform.rs", "rank": 62, "score": 5.835232918084978 }, { "content": " #[inline]\n\n fn get_symbol_limit(&self) -> Symbol {\n\n self.num_symbols\n\n }\n\n\n\n fn set(&mut self, _symbol: Symbol, _frequency: usize) {\n\n unimplemented!()\n\n }\n\n\n\n fn increment(&mut self, _symbol: Symbol) {\n\n unimplemented!()\n\n }\n\n\n\n #[inline]\n\n fn total(&self) -> usize {\n\n self.num_symbols as usize\n\n }\n\n}\n", "file_path": "src/encodings/arithmetic_coder/mod.rs", "rank": 63, "score": 5.606657645466086 }, { "content": "use std::fmt;\n\n\n\nmod base;\n\npub mod decoder;\n\npub mod encoder;\n\n\n\npub type Symbol = u16;\n\n\n", "file_path": "src/encodings/arithmetic_coder/mod.rs", "rank": 64, "score": 5.548069975268488 }, { "content": "use std::io::Result;\n\n\n\nuse crate::encodings::arithmetic_coder::Symbol;\n\n\n\nuse super::FrequencyTable;\n\n\n", "file_path": "src/encodings/arithmetic_coder/base.rs", "rank": 65, "score": 5.505833746985903 }, { "content": "use super::super::arithmetic_coder::{FlatFrequencyTable, FrequencyTable, Symbol};\n\nuse super::context::Context;\n\n\n\npub struct PPMModel {\n\n pub context: Context,\n\n pub order: u8,\n\n pub order_minus1_freqs: FlatFrequencyTable,\n\n symbol_limit: Symbol,\n\n escape_symbol: Symbol,\n\n}\n\n\n\nimpl PPMModel {\n\n pub fn new(order: u8, symbol_limit: Symbol, escape_symbol: Symbol) -> Self {\n\n debug_assert!(escape_symbol < symbol_limit);\n\n\n\n let mut context = Context::new(symbol_limit);\n\n context.frequencies.increment(escape_symbol);\n\n PPMModel {\n\n order_minus1_freqs: FlatFrequencyTable::new(symbol_limit),\n\n order,\n", "file_path": "src/encodings/ppm/model.rs", "rank": 66, "score": 5.459835649158701 }, { "content": "use std::collections::{HashMap, HashSet};\n\nuse std::iter::Iterator;\n\nuse std::str::from_utf8;\n\n\n\n// http://www.romjist.ro/content/pdf/08-radescu.pdf\n\n\n\nconst MIN_LENGH: usize = 2;\n\nconst MAX_LENGTH: usize = 22;\n\nconst ENCODING: &str = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVXXYZ\";\n\nconst VALID: &str = \"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVXXYZ\";\n\nconst DELIMS: [char; 32] = [\n\n ' ', '\\t', ',', ';', '.', ':', '?', '!', '(', ')', '[', ']', '{', '}', '<', '>', '/', '\\\\',\n\n '#', '`', '\\'', '\"', '|', '=', '-', '&', '_', '‘', '’', '+', '~', '@',\n\n];\n\n\n", "file_path": "src/encodings/startransform.rs", "rank": 67, "score": 4.929504729010932 }, { "content": " /// Decodes self and returns the content as bytes\n\n pub fn decode(self) -> Result<Vec<u8>> {\n\n let mut buffer = self.buffer;\n\n\n\n for transform in self.transforms.iter().rev() {\n\n println!(\" -> {} \", transform);\n\n buffer = match transform {\n\n Transform::BWT => bwt::reduce(&buffer),\n\n Transform::MTF => mtf::reduce(&buffer),\n\n Transform::RLE => rle::reduce(&buffer),\n\n Transform::PPM => ppm::reduce(&buffer)?,\n\n _ => unimplemented!(\"not implemented\"),\n\n };\n\n }\n\n Ok(buffer)\n\n }\n\n}\n\n\n", "file_path": "src/encodings/mod.rs", "rank": 68, "score": 4.660739517191685 }, { "content": " }\n\n \"d\" | \"decompress\" => {\n\n let output_file = input_file.clone().replace(\".comprs\", \".restored\");\n\n let mut reader = BufReader::new(File::open(input_file)?);\n\n let mut writer = BufWriter::new(File::create(&output_file)?);\n\n writer.write_all(&decompress_file(&mut reader)?)?;\n\n }\n\n _ => unreachable!(),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 69, "score": 4.567210112249802 }, { "content": " let mut translation_table = HashMap::new();\n\n\n\n for (i, item) in dictionaries.into_iter().enumerate().skip(1) {\n\n let mut dict: Vec<(&String, &usize)> = item.iter().collect();\n\n dict.sort_by(|a, b| b.1.cmp(a.1));\n\n\n\n // Ignore empty dictionaries\n\n if !dict.is_empty() {\n\n translation_table.extend(encode_dictionary(dict, i));\n\n }\n\n }\n\n\n\n // Verify translation table only holds unique values\n\n debug_assert!({\n\n let mut value_set = HashSet::new();\n\n for value in translation_table.values() {\n\n value_set.insert(value);\n\n }\n\n true\n\n });\n", "file_path": "src/encodings/startransform.rs", "rank": 70, "score": 4.458357048551664 }, { "content": "}\n\n\n\nimpl fmt::Display for Transform {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let printable = match *self {\n\n Transform::BWT => \"BWT\",\n\n Transform::MTF => \"MTF\",\n\n Transform::RLE => \"RLE\",\n\n Transform::ST => \"ST\",\n\n Transform::PPM => \"PPM\",\n\n };\n\n write!(f, \"{}\", printable)\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct TData {\n\n pub transforms: Vec<Transform>,\n\n pub buffer: Vec<u8>,\n\n}\n", "file_path": "src/encodings/mod.rs", "rank": 71, "score": 4.19647346687149 }, { "content": " 4: NAS..ANA\n\n 5: ANAS..AN\n\n 6: NANAS..A\n\n 7: ANANAS..\n\n */\n\n #[inline]\n\n fn index(&self, idx: usize) -> &u8 {\n\n let len = self.data.len();\n\n &self.data[(len - self.index as usize + idx) % len]\n\n }\n\n}\n\n\n\nimpl<'a> Ord for Permutation<'a> {\n\n fn cmp(&self, other: &Permutation<'a>) -> Ordering {\n\n if self.index != other.index {\n\n for i in 0..self.data.len() {\n\n match self[i].cmp(&other[i]) {\n\n Ordering::Equal => continue,\n\n o => return o,\n\n }\n", "file_path": "src/encodings/bwt.rs", "rank": 72, "score": 4.15139562155848 }, { "content": "# comprs\n\n\n\n[![Build Status](https://travis-ci.org/gcarq/comprs.svg?branch=master)](https://travis-ci.org/gcarq/comprs) [![Coverage Status](https://coveralls.io/repos/github/gcarq/comprs/badge.svg)](https://coveralls.io/github/gcarq/comprs)\n\n\n\nExperimental playground for compression algorithms in Rust.\n\nPPM and Arithmetic coder are inspired by: [Reference-arithmetic-coding](https://github.com/nayuki/Reference-arithmetic-coding).\n\n\n\nCurrently implemented algorithms:\n\n* [Prediction by Partial Matching](https://en.wikipedia.org/wiki/Prediction_by_partial_matching)\n\n* [Arithmetic coding](https://en.wikipedia.org/wiki/Arithmetic_coding)\n\n* [Burrows-Wheeler transform](https://en.wikipedia.org/wiki/Burrows%E2%80%93Wheeler_transform)\n\n* [Move-to-front transform](https://en.wikipedia.org/wiki/Move-to-front_transform)\n\n* [Run-length encoding](https://en.wikipedia.org/wiki/Run-length_encoding)\n\n\n\n## Usage\n\n\n\n```\n\n comprs 0.1.0\n\nExperimental playground for compression algorithms in Rust\n\n\n\nUSAGE:\n\n comprs [FLAGS] [OPTIONS] <mode> <file>\n\n\n\nFLAGS:\n\n -h, --help Prints help information\n\n -n Skip integrity check\n\n -v Sets the level of verbosity\n\n -V, --version Prints version information\n\n\n\nOPTIONS:\n\n -o <o> Specify compression level [default: 3] [possible values: 0, 1, 2, 3, 4, 5, 6]\n\n\n\nARGS:\n\n <mode> mode [possible values: c, d, compress, decompress]\n\n <file> Sets the input file to use\n\n```\n\n\n\n## Building\n\n\n\n```\n\n$ git clone https://github.com/gcarq/comprs.git\n\n$ cd comprs\n\n$ cargo build --release\n\n```\n\n\n\n## Example\n\n\n\n```\n\n$ wget -O world95.txt https://www.gutenberg.org/files/27560/27560.txt\n\n$ ./target/release/comprs c world95.txt\n\nApplying preprocessors ...\n\n -> BWT\n\n -> MTF\n\nCompressing file ...\n\nCompressed Size: 1683194\n\nCompress Ratio: 5.1 (80.57%)\n\nBits per Byte: 1.5542\n\nVerifying compressed file ...\n\nDecoding preprocessors ...\n\n -> MTF\n\n -> BWT\n\nchecksum is OK - 1921997141\n\n```\n", "file_path": "README.md", "rank": 73, "score": 4.147287166093401 }, { "content": " let mut verify = true;\n\n if matches.is_present(\"no-verify\") {\n\n verify = false;\n\n };\n\n\n\n match matches.value_of(\"mode\").unwrap() {\n\n \"c\" | \"compress\" => {\n\n let mut reader = BufReader::new(File::open(&input_file)?);\n\n let output_file = format!(\"{}.comprs\", input_file.clone());\n\n debug!(\"DEBUG: Saving output to: {}\", &output_file);\n\n let mut writer = BufWriter::new(File::create(&output_file)?);\n\n\n\n writer.write_all(&compress_file(&mut reader)?)?;\n\n\n\n print_statistics(\n\n &File::open(&input_file)?.metadata()?,\n\n &File::open(&output_file)?.metadata()?,\n\n );\n\n\n\n if !verify {\n", "file_path": "src/main.rs", "rank": 74, "score": 3.6476920529172694 }, { "content": " return Ok(());\n\n }\n\n\n\n println!(\"Verifying compressed file ...\");\n\n\n\n let restored = decompress_file(&mut BufReader::new(File::open(&output_file)?))?;\n\n\n\n // Calculate checksums\n\n let input_checksum = adler32(&mut File::open(&input_file)?)?;\n\n let restored_checksum = adler32(restored.as_slice())?;\n\n\n\n // Sanity check\n\n if input_checksum == restored_checksum {\n\n println!(\"checksum is OK - {}\", restored_checksum);\n\n } else {\n\n panic!(format!(\n\n \"FATAL: checksum does not match! - {}\",\n\n restored_checksum\n\n ));\n\n }\n", "file_path": "src/main.rs", "rank": 75, "score": 3.6324875738009172 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::io::Result;\n\n\n\n use super::{apply, reduce};\n\n\n\n #[test]\n\n fn test_compression() -> Result<()> {\n\n let original: Vec<u8> = String::from(\n\n \"\\\n\n Lorem Ipsum is simply dummy text of the printing and typesetting industry.\\\n\n Lorem Ipsum has been the industry's standard dummy text ever since the 1500s,\\\n\n when an unknown printer took a galley of type and scrambled it to make a type \\\n\n specimen book. It has survived not only five centuries, but also the leap into \\\n\n electronic typesetting, remaining essentially unchanged. It was popularised in \\\n\n the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, \\\n\n and more recently with desktop publishing software like Aldus PageMaker including \\\n\n versions of Lorem Ipsum.\",\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 76, "score": 3.557047641928337 }, { "content": " Lorem Ipsum has been the industry's standard dummy text ever since the 1500s,\n\n when an unknown printer took a galley of type and scrambled it to make a type specimen book.\n\n It has survived not only five centuries, but also the leap into electronic typesetting,\n\n remaining essentially unchanged. It was popularised in the 1960s with the release of\n\n Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing\n\n software like Aldus PageMaker including versions of Lorem Ipsum.\n\n It is a long established fact that a reader will be distracted by the readable content\n\n of a page when looking at its layout. The point of using Lorem Ipsum is that it has a\n\n more-or-less normal distribution of letters, as opposed to using 'Content here, content here',\n\n making it look like readable English. Many desktop publishing packages and web page editors now\n\n use Lorem Ipsum as their default model text, and a search for 'lorem ipsum' will uncover many\n\n web sites still in their infancy. Various versions have evolved over the years,\n\n sometimes by accident, sometimes on purpose (injected humour and the like).\n\n It is a long established fact that a reader will be distracted by the\n\n readable content of a page when looking at its layout.\n\n The point of using Lorem Ipsum is that it has a more-or-less normal\n\n distribution of letters, as opposed to using 'Content here, content here',\n\n making it look like readable English. Many desktop publishing packages and\n\n web page editors now use Lorem Ipsum as their default model text, and a search for\n\n 'lorem ipsum' will uncover many web sites still in their infancy. Various versions\n", "file_path": "src/main.rs", "rank": 77, "score": 3.2974860062909483 }, { "content": "use std::cmp::Ordering;\n\nuse std::ops::Index;\n\n\n\nuse rayon::iter::{\n\n IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator, ParallelIterator,\n\n};\n\nuse rayon::prelude::ParallelSliceMut;\n\n\n\nconst CHUNK_SIZE: u32 = 1024 * 1024;\n\n\n", "file_path": "src/encodings/bwt.rs", "rank": 78, "score": 3.0334231720242477 }, { "content": " .takes_value(true)\n\n .default_value(\"3\")\n\n .possible_values(&[\"0\", \"1\", \"2\", \"3\", \"4\", \"5\", \"6\"])\n\n .help(\"Specify compression level\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"v\")\n\n .short(\"v\")\n\n .multiple(true)\n\n .help(\"Sets the level of verbosity\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"no-verify\")\n\n .short(\"n\")\n\n .multiple(false)\n\n .help(\"Skip integrity check\"),\n\n )\n\n .get_matches();\n\n\n\n let input_file = String::from(matches.value_of(\"file\").unwrap());\n", "file_path": "src/main.rs", "rank": 79, "score": 3.008358324607978 }, { "content": " symbol_limit,\n\n escape_symbol,\n\n context,\n\n }\n\n }\n\n\n\n pub fn increment_contexts(&mut self, history: &[Symbol], symbol: Symbol) {\n\n let hist_len = history.len();\n\n\n\n debug_assert!(hist_len <= self.order as usize);\n\n debug_assert!(symbol < self.symbol_limit);\n\n\n\n for order in 0..=hist_len {\n\n populate_contexts(\n\n &mut self.context,\n\n &history[hist_len - order..hist_len],\n\n symbol,\n\n self.escape_symbol,\n\n self.symbol_limit,\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/encodings/ppm/model.rs", "rank": 80, "score": 2.994728593306233 }, { "content": " ];\n\n\n\n let result = reduce(&test_data);\n\n assert_eq!(result, String::from(\".ANANAS..ANANAS.123\").into_bytes());\n\n }\n\n\n\n #[test]\n\n fn test_encode() {\n\n let input: Vec<u8> = String::from(\".ANANAS.\").into_bytes();\n\n let result = BWTChunk::encode(&input);\n\n assert_eq!(result.index, 1);\n\n assert_eq!(\n\n String::from(\"S..NNAAA\"),\n\n String::from_utf8(result.data).unwrap()\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_decode() {\n\n let input: Vec<u8> = String::from(\"S..NNAAA\").into_bytes();\n", "file_path": "src/encodings/bwt.rs", "rank": 81, "score": 2.956949997911872 }, { "content": "use std::collections::HashMap;\n\nuse std::fs::Metadata;\n\n\n\n/// Calculates shannon entropy for the given slice\n", "file_path": "src/utils/mod.rs", "rank": 82, "score": 2.898706191272718 }, { "content": "\n\n table.par_sort();\n\n\n\n // Build decoded content\n\n let mut decoded = Vec::with_capacity(len);\n\n let mut idx: usize = self.index as usize;\n\n for _ in 0..len {\n\n decoded.push(table[idx].char);\n\n idx = table[idx].position as usize;\n\n }\n\n decoded\n\n }\n\n}\n\n\n", "file_path": "src/encodings/bwt.rs", "rank": 83, "score": 2.8589742232159403 }, { "content": "Copyright (c) 2018, Michael Egger\n\nAll rights reserved.\n\n\n\nRedistribution and use in source and binary forms, with or without\n\nmodification, are permitted provided that the following conditions are met:\n\n\n\n* Redistributions of source code must retain the above copyright notice, this\n\n list of conditions and the following disclaimer.\n\n\n\n* Redistributions in binary form must reproduce the above copyright notice,\n\n this list of conditions and the following disclaimer in the documentation\n\n and/or other materials provided with the distribution.\n\n\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\n\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n", "file_path": "LICENSE.md", "rank": 84, "score": 2.778506511749906 }, { "content": " have evolved over the years, sometimes by accident, sometimes on purpose\n\n (injected humour and the like). There are many variations of passages of\n\n Lorem Ipsum available, but the majority have suffered alteration in some form,\n\n by injected humour, or randomised words which don't look even slightly believable.\n\n If you are going to use a passage of Lorem Ipsum, you need to be sure there isn't\n\n anything embarrassing hidden in the middle of text. All the Lorem Ipsum generators\n\n on the Internet tend to repeat predefined chunks as necessary,\n\n making this the first true generator on the Internet.\n\n It uses a dictionary of over 200 Latin words,\n\n combined with a handful of model sentence structures,\n\n to generate Lorem Ipsum which looks reasonable. The generated Lorem Ipsum is\n\n therefore always free from repetition, injected humour, or non-characteristic words etc.\"\n\n ).into_bytes();\n\n\n\n let compressed = compress_file(test_data.as_slice())?;\n\n let restored = decompress_file(compressed.as_slice())?;\n\n\n\n assert_eq!(restored, test_data);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 85, "score": 2.491693325096212 }, { "content": " )\n\n .into_bytes();\n\n\n\n let intermediate = apply(&original)?;\n\n let restored = reduce(&intermediate)?;\n\n\n\n assert_eq!(original, restored);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/encodings/ppm/mod.rs", "rank": 86, "score": 2.4756457978157798 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::{apply, reduce, BWTChunk};\n\n\n\n #[test]\n\n fn test_apply() {\n\n let test_data = vec![\n\n 01, 00, 00, 00, 00, 00, 00, 00, 19, 00, 00, 00, 00, 00, 00, 00, 83, 83, 51, 46, 46, 49,\n\n 50, 46, 46, 78, 78, 78, 78, 65, 65, 65, 65, 65, 65, 02, 00, 00, 00,\n\n ];\n\n\n\n let data = String::from(\".ANANAS..ANANAS.123\").into_bytes();\n\n assert_eq!(apply(&data), test_data);\n\n }\n\n\n\n #[test]\n\n fn test_reduce() {\n\n let test_data = vec![\n\n 01, 00, 00, 00, 00, 00, 00, 00, 19, 00, 00, 00, 00, 00, 00, 00, 83, 83, 51, 46, 46, 49,\n\n 50, 46, 46, 78, 78, 78, 78, 65, 65, 65, 65, 65, 65, 02, 00, 00, 00,\n", "file_path": "src/encodings/bwt.rs", "rank": 87, "score": 1.729401134302937 }, { "content": "\n\n for transform in &transforms {\n\n println!(\" -> {} \", transform);\n\n buffer = match transform {\n\n Transform::ST => startransform::apply(&buffer),\n\n Transform::BWT => bwt::apply(&buffer),\n\n Transform::MTF => mtf::apply(&buffer),\n\n Transform::RLE => rle::apply(&buffer)?,\n\n Transform::PPM => ppm::apply(&buffer)?,\n\n };\n\n }\n\n\n\n debug!(\"DEBUG: Size after preprocessing: {}\", &buffer.len());\n\n debug!(\n\n \"DEBUG: File entropy after preprocessing: {:.2}\",\n\n calc_entropy(&buffer)\n\n );\n\n Ok(TData { transforms, buffer })\n\n }\n\n\n", "file_path": "src/encodings/mod.rs", "rank": 88, "score": 1.4599220397405737 } ]
Rust
www/src/main.rs
noguxun/rust_web_play
8b8b9a8127687ba347a9c27db40debce384a7e2a
#[macro_use] extern crate derive_more; use bytes::BytesMut; use env_logger::{Builder, Env}; use futures::future; use futures::stream::StreamExt; use futures::FutureExt; use handlebars::Handlebars; use http::header::{HeaderMap, HeaderValue}; use http::status::StatusCode; use http::Uri; use hyper::service::{make_service_fn, service_fn}; use hyper::{header, Body, Method, Request, Response, Server}; use log::{debug, error, info, trace, warn}; use percent_encoding::percent_decode_str; use serde::Serialize; use std::error::Error as StdError; use std::io; use std::net::SocketAddr; use std::path::{Path, PathBuf}; use structopt::StructOpt; use tokio::codec::{BytesCodec, FramedRead}; use tokio::fs::File; use tokio::runtime::Runtime; mod ext; fn main() { if let Err(e) = run() { log_error_chain(&e); } } fn log_error_chain(mut e: &dyn StdError) { error!("error: {}", e); while let Some(source) = e.source() { error!("caused by: {}", source); e = source; } } #[derive(Clone, StructOpt)] #[structopt(about = "A basic HTTP file server")] pub struct Config { #[structopt( name = "ADDR", short = "a", long = "addr", parse(try_from_str), default_value = "127.0.0.1:4000" )] addr: SocketAddr, #[structopt(name = "ROOT", parse(from_os_str), default_value = ".")] root_dir: PathBuf, #[structopt(short = "x")] use_extensions: bool, } fn run() -> Result<()> { let env = Env::new().default_filter_or("basic_http_server=info"); Builder::from_env(env) .default_format_module_path(false) .default_format_timestamp(false) .init(); let config = Config::from_args(); info!("basic-http-server {}", env!("CARGO_PKG_VERSION")); info!("addr: http://{}", config.addr); info!("root dir: {}", config.root_dir.display()); info!("extensions: {}", config.use_extensions); let make_service = make_service_fn(|_| { let config = config.clone(); let service = service_fn(move |req| { let config = config.clone(); serve(config, req).map(Ok::<_, Error>) }); future::ok::<_, Error>(service) }); let server = Server::bind(&config.addr).serve(make_service); let rt = Runtime::new()?; rt.block_on(server)?; Ok(()) } async fn serve(config: Config, req: Request<Body>) -> Response<Body> { let resp = serve_or_error(config, req).await; let resp = transform_error(resp); resp } async fn serve_or_error(config: Config, req: Request<Body>) -> Result<Response<Body>> { if let Some(resp) = handle_unsupported_request(&req) { return resp; } let resp = serve_file(&req, &config.root_dir).await; let resp = ext::serve(config, req, resp).await; resp } async fn serve_file(req: &Request<Body>, root_dir: &PathBuf) -> Result<Response<Body>> { let maybe_redir_resp = try_dir_redirect(req, &root_dir)?; if let Some(redir_resp) = maybe_redir_resp { return Ok(redir_resp); } let path = local_path_with_maybe_index(req.uri(), &root_dir)?; Ok(respond_with_file(path).await?) } fn try_dir_redirect(req: &Request<Body>, root_dir: &PathBuf) -> Result<Option<Response<Body>>> { if req.uri().path().ends_with("/") { return Ok(None); } debug!("path does not end with /"); let path = local_path_for_request(req.uri(), root_dir)?; if !path.is_dir() { return Ok(None); } let mut new_loc = req.uri().path().to_string(); new_loc.push_str("/"); if let Some(query) = req.uri().query() { new_loc.push_str("?"); new_loc.push_str(query); } info!("redirecting {} to {}", req.uri(), new_loc); Response::builder() .status(StatusCode::FOUND) .header(header::LOCATION, new_loc) .body(Body::empty()) .map(Some) .map_err(Error::from) } async fn respond_with_file(path: PathBuf) -> Result<Response<Body>> { let mime_type = file_path_mime(&path); let file = File::open(path).await?; let meta = file.metadata().await?; let len = meta.len(); let codec = BytesCodec::new(); let stream = FramedRead::new(file, codec); let stream = stream.map(|b| b.map(BytesMut::freeze)); let body = Body::wrap_stream(stream); let resp = Response::builder() .status(StatusCode::OK) .header(header::CONTENT_LENGTH, len as u64) .header(header::CONTENT_TYPE, mime_type.as_ref()) .header(header::ACCESS_CONTROL_ALLOW_ORIGIN, "*") .body(body)?; Ok(resp) } fn file_path_mime(file_path: &Path) -> mime::Mime { mime_guess::from_path(file_path).first_or_octet_stream() } fn local_path_with_maybe_index(uri: &Uri, root_dir: &Path) -> Result<PathBuf> { local_path_for_request(uri, root_dir).map(|mut p: PathBuf| { if p.is_dir() { p.push("index.html"); debug!("trying {} for directory URL", p.display()); } else { trace!("trying path as from URL"); } p }) } fn local_path_for_request(uri: &Uri, root_dir: &Path) -> Result<PathBuf> { debug!("raw URI: {}", uri); let request_path = uri.path(); debug!("raw URI to path: {}", request_path); let end = request_path.find('?').unwrap_or(request_path.len()); let request_path = &request_path[0..end]; let decoded = percent_decode_str(&request_path); let request_path = if let Ok(p) = decoded.decode_utf8() { p } else { error!("non utf-8 URL: {}", request_path); return Err(Error::UriNotUtf8); }; let mut path = root_dir.to_owned(); if request_path.starts_with('/') { path.push(&request_path[1..]); } else { warn!("found non-absolute path {}", request_path); return Err(Error::UriNotAbsolute); } debug!("URL · path : {} · {}", uri, path.display()); Ok(path) } fn handle_unsupported_request(req: &Request<Body>) -> Option<Result<Response<Body>>> { get_unsupported_request_message(req) .map(|unsup| make_error_response_from_code_and_headers(unsup.code, unsup.headers)) } struct Unsupported { code: StatusCode, headers: HeaderMap, } fn get_unsupported_request_message(req: &Request<Body>) -> Option<Unsupported> { use std::iter::FromIterator; if req.method() != Method::GET { return Some(Unsupported { code: StatusCode::METHOD_NOT_ALLOWED, headers: HeaderMap::from_iter(vec![(header::ALLOW, HeaderValue::from_static("GET"))]), }); } None } fn transform_error(resp: Result<Response<Body>>) -> Response<Body> { match resp { Ok(r) => r, Err(e) => { let resp = make_error_response(e); match resp { Ok(r) => r, Err(e) => { error!("unexpected internal error: {}", e); Response::new(Body::from(format!("unexpected internal error: {}", e))) } } } } } fn make_error_response(e: Error) -> Result<Response<Body>> { let resp = match e { Error::Io(e) => make_io_error_response(e)?, Error::Ext(ext::Error::Io(e)) => make_io_error_response(e)?, e => make_internal_server_error_response(e)?, }; Ok(resp) } fn make_internal_server_error_response(err: Error) -> Result<Response<Body>> { log_error_chain(&err); let resp = make_error_response_from_code(StatusCode::INTERNAL_SERVER_ERROR)?; Ok(resp) } fn make_io_error_response(error: io::Error) -> Result<Response<Body>> { let resp = match error.kind() { io::ErrorKind::NotFound => { debug!("{}", error); make_error_response_from_code(StatusCode::NOT_FOUND)? } _ => make_internal_server_error_response(Error::Io(error))?, }; Ok(resp) } fn make_error_response_from_code(status: StatusCode) -> Result<Response<Body>> { make_error_response_from_code_and_headers(status, HeaderMap::new()) } fn make_error_response_from_code_and_headers( status: StatusCode, headers: HeaderMap, ) -> Result<Response<Body>> { let body = render_error_html(status)?; let resp = html_str_to_response_with_headers(body, status, headers)?; Ok(resp) } fn html_str_to_response(body: String, status: StatusCode) -> Result<Response<Body>> { html_str_to_response_with_headers(body, status, HeaderMap::new()) } fn html_str_to_response_with_headers( body: String, status: StatusCode, headers: HeaderMap, ) -> Result<Response<Body>> { let mut builder = Response::builder(); builder.headers_mut().map(|h| h.extend(headers)); builder .status(status) .header(header::CONTENT_LENGTH, body.len()) .header(header::CONTENT_TYPE, mime::TEXT_HTML.as_ref()) .body(Body::from(body)) .map_err(Error::from) } static HTML_TEMPLATE: &str = include_str!("template.html"); #[derive(Serialize)] struct HtmlCfg { title: String, body: String, } fn render_html(cfg: HtmlCfg) -> Result<String> { let reg = Handlebars::new(); let rendered = reg .render_template(HTML_TEMPLATE, &cfg) .map_err(Error::TemplateRender)?; Ok(rendered) } fn render_error_html(status: StatusCode) -> Result<String> { render_html(HtmlCfg { title: format!("{}", status), body: String::new(), }) } pub type Result<T> = std::result::Result<T, Error>; #[derive(Debug, Display)] pub enum Error { #[display(fmt = "Extension error")] Ext(ext::Error), #[display(fmt = "HTTP error")] Http(http::Error), #[display(fmt = "Hyper error")] Hyper(hyper::Error), #[display(fmt = "I/O error")] Io(io::Error), #[display(fmt = "failed to parse IP address")] AddrParse(std::net::AddrParseError), #[display(fmt = "failed to render template")] TemplateRender(handlebars::TemplateRenderError), #[display(fmt = "requested URI is not an absolute path")] UriNotAbsolute, #[display(fmt = "requested URI is not UTF-8")] UriNotUtf8, } impl StdError for Error { fn source(&self) -> Option<&(dyn StdError + 'static)> { use Error::*; match self { Ext(e) => Some(e), Io(e) => Some(e), Http(e) => Some(e), Hyper(e) => Some(e), AddrParse(e) => Some(e), TemplateRender(e) => Some(e), UriNotAbsolute => None, UriNotUtf8 => None, } } } impl From<ext::Error> for Error { fn from(e: ext::Error) -> Error { Error::Ext(e) } } impl From<http::Error> for Error { fn from(e: http::Error) -> Error { Error::Http(e) } } impl From<hyper::Error> for Error { fn from(e: hyper::Error) -> Error { Error::Hyper(e) } } impl From<io::Error> for Error { fn from(e: io::Error) -> Error { Error::Io(e) } }
#[macro_use] extern crate derive_more; use bytes::BytesMut; use env_logger::{Builder, Env}; use futures::future; use futures::stream::StreamExt; use futures::FutureExt; use handlebars::Handlebars; use http::header::{HeaderMap, HeaderValue}; use http::status::StatusCode; use http::Uri; use hyper::service::{make_service_fn, service_fn}; use hyper::{header, Body, Method, Request, Response, Server}; use log::{debug, error, info, trace, warn}; use percent_encoding::percent_decode_str; use serde::Serialize; use std::error::Error as StdError; use std::io; use std::net::SocketAddr; use std::path::{Path, PathBuf}; use structopt::StructOpt; use tokio::codec::{BytesCodec, FramedRead}; use tokio::fs::File; use tokio::runtime::Runtime; mod ext; fn main() { if let Err(e) = run() { log_error_chain(&e); } } fn log_error_chain(mut e: &dyn StdError) { error!("error: {}", e); while let Some(source) = e.source() { error!("caused by: {}", source); e = source; } } #[derive(Clone, StructOpt)] #[structopt(about = "A basic HTTP file server")] pub struct Config { #[structopt( name = "ADDR", short = "a", long = "addr", parse(try_from_str), default_value = "127.0.0.1:4000" )] addr: SocketAddr, #[structopt(name = "ROOT", parse(from_os_str), default_value = ".")] root_dir: PathBuf, #[structopt(short = "x")] use_extensions: bool, } fn run() -> Result<()> { let env = Env::new().default_filter_or("basic_http_server=info"); Builder::from_env(env) .default_format_module_path(false) .default_format_timestamp(false) .init(); let config = Config::from_args(); info!("basic-http-server {}", env!("CARGO_PKG_VERSION")); info!("addr: http://{}", config.addr); info!("root dir: {}", config.root_dir.display()); info!("extensions: {}", config.use_extensions); let make_service = make_service_fn(|_| { let config = config.clone(); let service = service_fn(move |req| { let config = config.clone(); serve(config, req).map(Ok::<_, Error>) }); future::ok::<_, Error>(service) }); let server = Server::bind(&config.addr).serve(make_service); let rt = Runtime::new()?; rt.block_on(server)?; Ok(()) } async fn serve(config: Config, req: Request<Body>) -> Response<Body> { let resp = serve_or_error(config, req).await; let resp = transform_error(resp); resp } async fn serve_or_error(config: Config, req: Request<Body>) -> Result<Response<Body>> { if let Some(resp) = handle_unsupported_request(&req) { return resp; } let resp = serve_file(&req, &config.root_dir).await; let resp = ext::serve(config, req, resp).await; resp } async fn serve_file(req: &Request<Body>, root_dir: &PathBuf) -> Result<Response<Body>> { let maybe_redir_resp = try_dir_redirect(req, &root_dir)?; if let Some(redir_resp) = maybe_redir_resp { return Ok(redir_resp); } let path = local_path_with_maybe_index(req.uri(), &root_dir)?; Ok(respond_with_file(path).await?) } fn try_dir_redirect(req: &Request<Body>, root_dir: &PathBuf) -> Result<Option<Response<Body>>> { if req.uri().path().ends_with("/") { return Ok(None); } debug!("path does not end with /"); let path = local_path_for_request(req.uri(), root_dir)?; if !path.is_dir() { return Ok(None); } let mut new_loc = req.uri().path().to_string(); new_loc.push_str("/"); if let Some(query) = req.uri().query() { new_loc.push_str("?"); new_loc.push_str(query); } info!("redirecting {} to {}", req.uri(), new_loc); Response::builder() .status(StatusCode::FOUND) .header(header::LOCATION, new_loc) .body(Body::empty()) .map(Some) .map_err(Error::from) } async fn respond_with_file(path: PathBuf) -> Result<Response<Body>> { let mime_type = file_path_mime(&path); let file = File::open(path).await?; let meta = file.metadata().await?; let len = meta.len(); let codec = BytesCodec::new(); let stream = FramedRead::new(file, codec); let stream = stream.map(|b| b.map(BytesMut::freeze)); let body = Body::wrap_stream(stream); let resp = Response::builder() .status(StatusCode::OK) .header(header::CONTENT_LENGTH, len as u64) .header(header::CONTENT_TYPE, mime_type.as_ref()) .header(header::ACCESS_CONTROL_ALLOW_ORIGIN, "*") .body(body)?; Ok(resp) } fn file_path_mime(file_path: &Path) -> mime::Mime { mime_guess::from_path(file_path).first_or_octet_stream() } fn local_path_with_maybe_index(uri: &Uri, root_dir: &Path) -> Result<PathBuf> { local_path_for_request(uri, root_dir).map(|mut p: PathBuf| { if p.is_dir() { p.push("index.html"); debug!("trying {} for directory URL", p.display()); } else { trace!("trying path as from URL"); } p }) } fn local_path_for_request(uri: &Uri, root_dir: &Path) -> Result<PathBuf> { debug!("raw URI: {}", uri); let request_path = uri.path(); debug!("raw URI to path: {}", request_path); let end = request_path.find('?').unwrap_or(request_path.len()); let request_path = &request_path[0..end]; let decoded = percent_decode_str(&request_path); let request_path = if let Ok(p) = decoded.decode_utf8() { p } else { error!("non utf-8 URL: {}", request_path); return Err(Error::UriNotUtf8); }; let mut path = root_dir.to_owned(); if request_path.starts_with('/') { path.push(&request_path[1..]); } else { warn!("found non-absolute path {}", request_path); return Err(Error::UriNotAbsolute); } debug!("URL · path : {} · {}", uri, path.display()); Ok(path) } fn handle_unsupported_request(req: &Request<Body>) -> Option<Result<Response<Body>>> { get_unsupported_request_message(req) .map(|unsup| make_error_response_from_code_and_headers(unsup.code, unsup.headers)) } struct Unsupported { code: StatusCode, headers: HeaderMap, } fn get_unsupported_request_message(req: &Request<Body>) -> Option<Unsupported> { use std::iter::FromIterator; if req.method() != Method::GET { return Some(Unsupported { code: StatusCode::METHOD_NOT_ALLOWED, headers: HeaderMap::from_iter(vec![(header::ALLOW, HeaderValue::from_static("GET"))]), }); } None } fn transform_error(resp: Result<Response<Body>>) -> Response<Body> { match resp { Ok(r) => r, Err(e) => { let resp = make_error_response(e); match resp { Ok(r) => r, Err(e) => { error!("unexpected internal error: {}", e); Response::new(Body::from(format!("unexpected internal error: {}", e))) } } } } } fn make_error_response(e: Error) -> Result<Response<Body>> { let resp = match e { Error::Io(e) => make_io_error_response(e)?, Error::Ext(ext::Error::Io(e)) => make_io_error_response(e)?, e => make_internal_server_error_response(e)?, }; Ok(resp) } fn make_internal_server_error_response(err: Error) -> Result<Response<Body>> { log_error_chain(&err); let resp = make_error_response_from_code(StatusCode::INTERNAL_SERVER_ERROR)?; Ok(resp) } fn make_io_error_response(error: io::Error) -> Result<Response<Body>> { let resp =
; Ok(resp) } fn make_error_response_from_code(status: StatusCode) -> Result<Response<Body>> { make_error_response_from_code_and_headers(status, HeaderMap::new()) } fn make_error_response_from_code_and_headers( status: StatusCode, headers: HeaderMap, ) -> Result<Response<Body>> { let body = render_error_html(status)?; let resp = html_str_to_response_with_headers(body, status, headers)?; Ok(resp) } fn html_str_to_response(body: String, status: StatusCode) -> Result<Response<Body>> { html_str_to_response_with_headers(body, status, HeaderMap::new()) } fn html_str_to_response_with_headers( body: String, status: StatusCode, headers: HeaderMap, ) -> Result<Response<Body>> { let mut builder = Response::builder(); builder.headers_mut().map(|h| h.extend(headers)); builder .status(status) .header(header::CONTENT_LENGTH, body.len()) .header(header::CONTENT_TYPE, mime::TEXT_HTML.as_ref()) .body(Body::from(body)) .map_err(Error::from) } static HTML_TEMPLATE: &str = include_str!("template.html"); #[derive(Serialize)] struct HtmlCfg { title: String, body: String, } fn render_html(cfg: HtmlCfg) -> Result<String> { let reg = Handlebars::new(); let rendered = reg .render_template(HTML_TEMPLATE, &cfg) .map_err(Error::TemplateRender)?; Ok(rendered) } fn render_error_html(status: StatusCode) -> Result<String> { render_html(HtmlCfg { title: format!("{}", status), body: String::new(), }) } pub type Result<T> = std::result::Result<T, Error>; #[derive(Debug, Display)] pub enum Error { #[display(fmt = "Extension error")] Ext(ext::Error), #[display(fmt = "HTTP error")] Http(http::Error), #[display(fmt = "Hyper error")] Hyper(hyper::Error), #[display(fmt = "I/O error")] Io(io::Error), #[display(fmt = "failed to parse IP address")] AddrParse(std::net::AddrParseError), #[display(fmt = "failed to render template")] TemplateRender(handlebars::TemplateRenderError), #[display(fmt = "requested URI is not an absolute path")] UriNotAbsolute, #[display(fmt = "requested URI is not UTF-8")] UriNotUtf8, } impl StdError for Error { fn source(&self) -> Option<&(dyn StdError + 'static)> { use Error::*; match self { Ext(e) => Some(e), Io(e) => Some(e), Http(e) => Some(e), Hyper(e) => Some(e), AddrParse(e) => Some(e), TemplateRender(e) => Some(e), UriNotAbsolute => None, UriNotUtf8 => None, } } } impl From<ext::Error> for Error { fn from(e: ext::Error) -> Error { Error::Ext(e) } } impl From<http::Error> for Error { fn from(e: http::Error) -> Error { Error::Http(e) } } impl From<hyper::Error> for Error { fn from(e: hyper::Error) -> Error { Error::Hyper(e) } } impl From<io::Error> for Error { fn from(e: io::Error) -> Error { Error::Io(e) } }
match error.kind() { io::ErrorKind::NotFound => { debug!("{}", error); make_error_response_from_code(StatusCode::NOT_FOUND)? } _ => make_internal_server_error_response(Error::Io(error))?, }
if_condition
[ { "content": "fn make_dir_list_body(root_dir: &Path, paths: &[PathBuf]) -> Result<String> {\n\n let mut buf = String::new();\n\n\n\n writeln!(buf, \"<div>\").map_err(Error::WriteInDirList)?;\n\n\n\n let dot_dot = OsStr::new(\"..\");\n\n\n\n for path in paths {\n\n let full_url = path\n\n .strip_prefix(root_dir)\n\n .map_err(Error::StripPrefixInDirList)?;\n\n let maybe_dot_dot = || {\n\n if path.ends_with(\"..\") {\n\n Some(dot_dot)\n\n } else {\n\n None\n\n }\n\n };\n\n if let Some(file_name) = path.file_name().or_else(maybe_dot_dot) {\n\n if let Some(file_name) = file_name.to_str() {\n", "file_path": "www/src/ext.rs", "rank": 5, "score": 168448.86293873462 }, { "content": "fn maybe_convert_mime_type_to_text(req: &Request<Body>, resp: &mut Response<Body>) {\n\n let path = req.uri().path();\n\n let file_name = path.rsplit('/').next();\n\n if let Some(file_name) = file_name {\n\n let mut do_convert = false;\n\n\n\n let ext = file_name.rsplit('.').next();\n\n if let Some(ext) = ext {\n\n if TEXT_EXTENSIONS.contains(&ext) {\n\n do_convert = true;\n\n }\n\n }\n\n\n\n if TEXT_FILES.contains(&file_name) {\n\n do_convert = true;\n\n }\n\n\n\n if do_convert {\n\n use http::header::HeaderValue;\n\n let val =\n", "file_path": "www/src/ext.rs", "rank": 6, "score": 157407.3384861857 }, { "content": "#[wasm_bindgen(start)]\n\npub fn main() -> Result<(), JsValue> {\n\n // Use `web_sys`'s global `window` function to get a handle on the global window object. \n\n let window = web_sys::window().expect(\"no global `window` exists\");\n\n let document = window.document().expect(\"should have a document on window\");\n\n let body = document.body().expect(\"document should have a body\");\n\n\n\n // Manufacture the element we're gonna append\n\n let val = document.create_element(\"p\")?;\n\n val.set_inner_html(\"Hello from Rust!\");\n\n\n\n body.append_child(&val)?; \n\n\n\n Ok(())\n\n}\n\n\n\n// Export a simple function\n", "file_path": "src/lib.rs", "rank": 12, "score": 123156.95130902066 }, { "content": "#[wasm_bindgen]\n\npub fn greet(name: &str) {\n\n alert(&format!(\"Hello, {}!\", name));\n\n}\n\n\n\n// Called when the wasm module is instantiated\n\n// https://rustwasm.github.io/docs/wasm-bindgen/examples/without-a-bundler.html\n\n// Shows how to manipulate DOM in the parent html page\n", "file_path": "src/lib.rs", "rank": 16, "score": 97439.36132244598 }, { "content": "/// Draw Mandelbrot set\n\npub fn draw(element: HtmlCanvasElement) -> DrawResult<impl Fn((i32, i32)) -> Option<(f64, f64)>> {\n\n let backend = CanvasBackend::with_canvas_object(element).unwrap();\n\n\n\n let root = backend.into_drawing_area();\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .margin(20)\n\n .x_label_area_size(10)\n\n .y_label_area_size(10)\n\n .build_ranged(-2.1..0.6, -1.2..1.2)?;\n\n\n\n chart\n\n .configure_mesh()\n\n .disable_x_mesh()\n\n .disable_y_mesh()\n\n .draw()?;\n\n\n\n let plotting_area = chart.plotting_area();\n\n\n", "file_path": "src/mandelbrot.rs", "rank": 25, "score": 59665.92176720745 }, { "content": "/// Draw power function f(x) = x^power.\n\npub fn draw(canvas_id: &str, power: i32) -> DrawResult<impl Fn((i32, i32)) -> Option<(f32, f32)>> {\n\n let backend = CanvasBackend::new(canvas_id).expect(\"cannot find canvas\");\n\n let root = backend.into_drawing_area();\n\n let font: FontDesc = (\"sans-serif\", 20.0).into();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(format!(\"y=x^{}\", power), font)\n\n .x_label_area_size(30)\n\n .y_label_area_size(30)\n\n .build_ranged(-1f32..1f32, -1.2f32..1.2f32)?;\n\n\n\n chart.configure_mesh().x_labels(3).y_labels(3).draw()?;\n\n\n\n chart.draw_series(LineSeries::new(\n\n (-50..=50)\n\n .map(|x| x as f32 / 50.0)\n\n .map(|x| (x, x.powf(power as f32))),\n\n &RED,\n\n ))?;\n\n\n\n root.present()?;\n\n return Ok(chart.into_coord_trans());\n\n}", "file_path": "src/func_plot.rs", "rank": 26, "score": 56058.3595231653 }, { "content": "#[wasm_bindgen]\n\npub fn add(a: u32, b: u32) -> u32 {\n\n a + b\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n// https://github.com/rustwasm/wasm-bindgen/blob/master/examples/fetch/src/lib.rs\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse wasm_bindgen::JsCast;\n\nuse wasm_bindgen_futures::JsFuture;\n\nuse web_sys::{Request, RequestInit, RequestMode, Response};\n\nuse serde_json::Value;\n\n\n\n\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n", "file_path": "src/lib.rs", "rank": 27, "score": 54746.298580263116 }, { "content": "fn mandelbrot_set(\n\n real: Range<f64>,\n\n complex: Range<f64>,\n\n samples: (usize, usize),\n\n max_iter: usize,\n\n) -> impl Iterator<Item = (f64, f64, usize)> {\n\n let step = (\n\n (real.end - real.start) / samples.0 as f64,\n\n (complex.end - complex.start) / samples.1 as f64,\n\n );\n\n return (0..(samples.0 * samples.1)).map(move |k| {\n\n let c = (\n\n real.start + step.0 * (k % samples.0) as f64,\n\n complex.start + step.1 * (k / samples.0) as f64,\n\n );\n\n let mut z = (0.0, 0.0);\n\n let mut cnt = 0;\n\n while cnt < max_iter && z.0 * z.0 + z.1 * z.1 <= 1e10 {\n\n z = (z.0 * z.0 - z.1 * z.1 + c.0, 2.0 * z.0 * z.1 + c.1);\n\n cnt += 1;\n\n }\n\n return (c.0, c.1, cnt);\n\n });\n\n}", "file_path": "src/mandelbrot.rs", "rank": 28, "score": 28488.400005941843 }, { "content": "\n\n match resp {\n\n Ok(mut resp) => {\n\n // Serve source code as plain text to render them in the browser\n\n maybe_convert_mime_type_to_text(&req, &mut resp);\n\n Ok(resp)\n\n }\n\n Err(super::Error::Io(e)) => {\n\n // If the requested file was not found, then try doing a directory listing.\n\n if e.kind() == io::ErrorKind::NotFound {\n\n let list_dir_resp = maybe_list_dir(&config.root_dir, &path).await?;\n\n trace!(\"using directory list extension\");\n\n if let Some(f) = list_dir_resp {\n\n Ok(f)\n\n } else {\n\n Err(super::Error::from(e))\n\n }\n\n } else {\n\n Err(super::Error::from(e))\n\n }\n", "file_path": "www/src/ext.rs", "rank": 29, "score": 22068.452704890962 }, { "content": "/// response result from regular file serving, and have the opportunity to\n\n/// replace the response with their own response.\n\npub async fn serve(\n\n config: Config,\n\n req: Request<Body>,\n\n resp: super::Result<Response<Body>>,\n\n) -> super::Result<Response<Body>> {\n\n trace!(\"checking extensions\");\n\n\n\n if !config.use_extensions {\n\n return resp;\n\n }\n\n\n\n let path = super::local_path_for_request(&req.uri(), &config.root_dir)?;\n\n let file_ext = path.extension().and_then(OsStr::to_str).unwrap_or(\"\");\n\n\n\n if file_ext == \"md\" {\n\n trace!(\"using markdown extension\");\n\n return Ok(md_path_to_html(&path).await?);\n\n }\n", "file_path": "www/src/ext.rs", "rank": 30, "score": 22068.308568850578 }, { "content": "//! Developer extensions for basic-http-server\n\n//!\n\n//! This code is not as clean and well-documented as main.rs,\n\n//! but could still be a useful read.\n\n\n\nuse super::{Config, HtmlCfg};\n\nuse comrak::ComrakOptions;\n\nuse futures::{future, StreamExt};\n\nuse http::{Request, Response, StatusCode};\n\nuse hyper::{header, Body};\n\nuse log::{trace, warn};\n\nuse percent_encoding::{utf8_percent_encode, AsciiSet, CONTROLS};\n\nuse std::error::Error as StdError;\n\nuse std::ffi::OsStr;\n\nuse std::fmt::Write;\n\nuse std::io;\n\nuse std::path::{Path, PathBuf};\n\nuse tokio_fs::DirEntry;\n\n\n\n/// The entry point to extensions. Extensions are given both the request and the\n", "file_path": "www/src/ext.rs", "rank": 31, "score": 22064.051920273876 }, { "content": " let dents = dents.filter_map(|dent| match dent {\n\n Ok(dent) => future::ready(Some(dent)),\n\n Err(e) => {\n\n warn!(\"directory entry error: {}\", e);\n\n future::ready(None)\n\n }\n\n });\n\n let paths = dents.map(|dent| DirEntry::path(&dent));\n\n let mut paths: Vec<_> = paths.collect().await;\n\n paths.sort();\n\n let paths = Some(up_dir).into_iter().chain(paths);\n\n let paths: Vec<_> = paths.collect();\n\n let html = make_dir_list_body(&root_dir, &paths)?;\n\n let resp = super::html_str_to_response(html, StatusCode::OK)?;\n\n Ok(resp)\n\n}\n\n\n", "file_path": "www/src/ext.rs", "rank": 32, "score": 22062.833729608774 }, { "content": " \"LICENSE-MIT\",\n\n \"Makefile\",\n\n \"rust-toolchain\",\n\n];\n\n\n\n/// Try to treat the path as a directory and list the contents as HTML.\n\nasync fn maybe_list_dir(root_dir: &Path, path: &Path) -> Result<Option<Response<Body>>> {\n\n let meta = tokio::fs::metadata(path).await?;\n\n if meta.is_dir() {\n\n Ok(Some(list_dir(&root_dir, path).await?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n\n/// List the contents of a directory as HTML.\n\nasync fn list_dir(root_dir: &Path, path: &Path) -> Result<Response<Body>> {\n\n let up_dir = path.join(\"..\");\n\n let path = path.to_owned();\n\n let dents = tokio::fs::read_dir(path).await?;\n", "file_path": "www/src/ext.rs", "rank": 33, "score": 22061.81848656918 }, { "content": " }\n\n r => r,\n\n }\n\n}\n\n\n\n/// Load a markdown file, render to HTML, and return the response.\n\nasync fn md_path_to_html(path: &Path) -> Result<Response<Body>> {\n\n // Render Markdown like GitHub\n\n let mut options = ComrakOptions::default();\n\n options.ext_autolink = true;\n\n options.ext_header_ids = None;\n\n options.ext_table = true;\n\n options.ext_strikethrough = true;\n\n options.ext_tagfilter = true;\n\n options.ext_tasklist = true;\n\n options.github_pre_lang = true;\n\n options.ext_header_ids = Some(\"user-content-\".to_string());\n\n\n\n let buf = tokio::fs::read(path).await?;\n\n let s = String::from_utf8(buf).map_err(|_| Error::MarkdownUtf8)?;\n", "file_path": "www/src/ext.rs", "rank": 34, "score": 22057.568390023305 }, { "content": " use Error::*;\n\n\n\n match self {\n\n Engine(e) => Some(e),\n\n Io(e) => Some(e),\n\n Http(e) => Some(e),\n\n MarkdownUtf8 => None,\n\n StripPrefixInDirList(e) => Some(e),\n\n WriteInDirList(e) => Some(e),\n\n }\n\n }\n\n}\n\n\n\nimpl From<super::Error> for Error {\n\n fn from(e: super::Error) -> Error {\n\n Error::Engine(Box::new(e))\n\n }\n\n}\n\n\n\nimpl From<http::Error> for Error {\n", "file_path": "www/src/ext.rs", "rank": 35, "score": 22057.106514042884 }, { "content": " let html = comrak::markdown_to_html(&s, &options);\n\n let cfg = HtmlCfg {\n\n title: String::new(),\n\n body: html,\n\n };\n\n let html = super::render_html(cfg)?;\n\n\n\n Response::builder()\n\n .status(StatusCode::OK)\n\n .header(header::CONTENT_LENGTH, html.len() as u64)\n\n .header(header::CONTENT_TYPE, mime::TEXT_HTML.as_ref())\n\n .body(Body::from(html))\n\n .map_err(Error::from)\n\n}\n\n\n", "file_path": "www/src/ext.rs", "rank": 36, "score": 22055.936861161335 }, { "content": "\n\n #[display(fmt = \"HTTP error\")]\n\n Http(http::Error),\n\n\n\n #[display(fmt = \"I/O error\")]\n\n Io(io::Error),\n\n\n\n // custom \"semantic\" error types\n\n #[display(fmt = \"markdown is not UTF-8\")]\n\n MarkdownUtf8,\n\n\n\n #[display(fmt = \"failed to strip prefix in directory listing\")]\n\n StripPrefixInDirList(std::path::StripPrefixError),\n\n\n\n #[display(fmt = \"formatting error while creating directory listing\")]\n\n WriteInDirList(std::fmt::Error),\n\n}\n\n\n\nimpl StdError for Error {\n\n fn source(&self) -> Option<&(dyn StdError + 'static)> {\n", "file_path": "www/src/ext.rs", "rank": 37, "score": 22055.81615283832 }, { "content": " if let Some(full_url) = full_url.to_str() {\n\n // %-encode filenames\n\n // https://url.spec.whatwg.org/#fragment-percent-encode-set\n\n const FRAGMENT_SET: &AsciiSet =\n\n &CONTROLS.add(b' ').add(b'\"').add(b'<').add(b'>').add(b'`');\n\n const PATH_SET: &AsciiSet =\n\n &FRAGMENT_SET.add(b'#').add(b'?').add(b'{').add(b'}');\n\n let full_url = utf8_percent_encode(full_url, &PATH_SET);\n\n\n\n // TODO: Make this a relative URL\n\n writeln!(buf, \"<div><a href='/{}'>{}</a></div>\", full_url, file_name)\n\n .map_err(Error::WriteInDirList)?;\n\n } else {\n\n warn!(\"non-unicode url: {}\", full_url.to_string_lossy());\n\n }\n\n } else {\n\n warn!(\"non-unicode path: {}\", file_name.to_string_lossy());\n\n }\n\n } else {\n\n warn!(\"path without file name: {}\", path.display());\n", "file_path": "www/src/ext.rs", "rank": 38, "score": 22055.335383229165 }, { "content": " }\n\n }\n\n\n\n writeln!(buf, \"</div>\").map_err(Error::WriteInDirList)?;\n\n\n\n let cfg = HtmlCfg {\n\n title: String::new(),\n\n body: buf,\n\n };\n\n\n\n Ok(super::render_html(cfg)?)\n\n}\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n#[derive(Debug, Display)]\n\npub enum Error {\n\n // blanket \"pass-through\" error types\n\n #[display(fmt = \"engine error\")]\n\n Engine(Box<super::Error>),\n", "file_path": "www/src/ext.rs", "rank": 39, "score": 22055.26047383382 }, { "content": " fn from(e: http::Error) -> Error {\n\n Error::Http(e)\n\n }\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(e: io::Error) -> Error {\n\n Error::Io(e)\n\n }\n\n}", "file_path": "www/src/ext.rs", "rank": 40, "score": 22049.24264660094 }, { "content": " HeaderValue::from_str(mime::TEXT_PLAIN.as_ref()).expect(\"mime is valid header\");\n\n resp.headers_mut().insert(header::CONTENT_TYPE, val);\n\n }\n\n }\n\n}\n\n\n\n#[rustfmt::skip]\n\nstatic TEXT_EXTENSIONS: &[&'static str] = &[\n\n \"c\",\n\n \"cc\",\n\n \"cpp\",\n\n \"csv\",\n\n \"fst\",\n\n \"h\",\n\n \"java\",\n\n \"md\",\n\n \"mk\",\n\n \"proto\",\n\n \"py\",\n\n \"rb\",\n", "file_path": "www/src/ext.rs", "rank": 41, "score": 22049.000457759594 }, { "content": " \"rs\",\n\n \"rst\",\n\n \"sh\",\n\n \"toml\",\n\n \"yml\",\n\n];\n\n\n\n#[rustfmt::skip]\n\nstatic TEXT_FILES: &[&'static str] = &[\n\n \".gitattributes\",\n\n \".gitignore\",\n\n \".mailmap\",\n\n \"AUTHORS\",\n\n \"CODE_OF_CONDUCT\",\n\n \"CONTRIBUTING\",\n\n \"COPYING\",\n\n \"COPYRIGHT\",\n\n \"Cargo.lock\",\n\n \"LICENSE\",\n\n \"LICENSE-APACHE\",\n", "file_path": "www/src/ext.rs", "rank": 42, "score": 22046.701147506566 }, { "content": " // `resp_value` is a `Response` object.\n\n assert!(resp_value.is_instance_of::<Response>());\n\n let resp: Response = resp_value.dyn_into().unwrap();\n\n\n\n // Convert this other `Promise` into a rust `Future`.\n\n let json = JsFuture::from(resp.json()?).await?;\n\n\n\n // Use serde to parse the JSON into a struct.\n\n let branch_info: Branch = json.into_serde().unwrap();\n\n\n\n // Send the `Branch` struct back to JS as an `Object`.\n\n Ok(JsValue::from_serde(&branch_info).unwrap())\n\n}\n\n\n\n#[wasm_bindgen]\n\npub async fn fetch_fastly(key: String) -> Result<String, String> {\n\n let mut opts = RequestInit::new();\n\n opts.method(\"GET\");\n\n opts.mode(RequestMode::Cors);\n\n\n", "file_path": "src/lib.rs", "rank": 57, "score": 14.434405119864259 }, { "content": " pub email: String,\n\n}\n\n\n\n#[wasm_bindgen]\n\npub async fn fetch_sample(repo: String) -> Result<JsValue, JsValue> {\n\n let mut opts = RequestInit::new();\n\n opts.method(\"GET\");\n\n opts.mode(RequestMode::Cors);\n\n\n\n let url = format!(\"https://api.github.com/repos/{}/branches/master\", repo);\n\n\n\n let request = Request::new_with_str_and_init(&url, &opts)?;\n\n\n\n request\n\n .headers()\n\n .set(\"Accept\", \"application/vnd.github.v3+json\")?;\n\n\n\n let window = web_sys::window().unwrap();\n\n let resp_value = JsFuture::from(window.fetch_with_request(&request)).await?;\n\n\n", "file_path": "src/lib.rs", "rank": 58, "score": 13.476386529065211 }, { "content": "\n\nuse web_sys::HtmlCanvasElement;\n\n\n\nmod func_plot;\n\nmod mandelbrot;\n\n\n\n#[global_allocator]\n\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;\n\n\n\n/// Type alias for the result of a drawing function.\n\npub type DrawResult<T> = Result<T, Box<dyn std::error::Error>>;\n\n\n\n/// Type used on the JS side to convert screen coordinates to chart\n\n/// coordinates.\n\n#[wasm_bindgen]\n\npub struct Chart {\n\n convert: Box<dyn Fn((i32, i32)) -> Option<(f64, f64)>>,\n\n}\n\n\n\n/// Result of screen to chart coordinates conversion.\n", "file_path": "src/lib.rs", "rank": 59, "score": 12.382141036354568 }, { "content": " let url = \"https://rt.fastly.com/v1/channel/Sm6cqWFUwFk8eNwHL9UTG/ts/0\";\n\n\n\n let request = Request::new_with_str_and_init(&url, &opts).unwrap();\n\n\n\n request\n\n .headers()\n\n .set(\"Fastly-Key\", &key).unwrap();\n\n\n\n let window = web_sys::window().unwrap();\n\n let resp_value = JsFuture::from(window.fetch_with_request(&request)).await.unwrap();\n\n\n\n // `resp_value` is a `Response` object.\n\n assert!(resp_value.is_instance_of::<Response>());\n\n let resp: Response = resp_value.dyn_into().unwrap(); \n\n \n\n let txt_promise = resp.text().unwrap();\n\n\n\n // Convert this other `Promise` into a rust `Future`.\n\n let jsvalue = JsFuture::from(txt_promise).await.unwrap();\n\n\n", "file_path": "src/lib.rs", "rank": 60, "score": 10.346332169663201 }, { "content": "#[wasm_bindgen]\n\npub struct Point {\n\n pub x: f64,\n\n pub y: f64,\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl Chart {\n\n /// Draw provided power function on the canvas element using it's id.\n\n /// Return `Chart` struct suitable for coordinate conversion.\n\n pub fn power(canvas_id: &str, power: i32) -> Result<Chart, JsValue> {\n\n let map_coord = func_plot::draw(canvas_id, power).map_err(|err| err.to_string())?;\n\n Ok(Chart {\n\n convert: Box::new(move |coord| map_coord(coord).map(|(x, y)| (x.into(), y.into()))),\n\n })\n\n }\n\n\n\n /// Draw Mandelbrot set on the provided canvas element.\n\n /// Return `Chart` struct suitable for coordinate conversion.\n\n pub fn mandelbrot(canvas: HtmlCanvasElement) -> Result<Chart, JsValue> {\n", "file_path": "src/lib.rs", "rank": 61, "score": 8.888863885860538 }, { "content": "# Introduction\n\nThis project shows how to start a plain rust wasm application that could be easily deployed to any web server, no npm is required. \n\nVerified on Ubuntu and MacOS\n\n\n\n# Enviroment Setup\n\n```\n\n# Install build essential packages for rust building on Ubuntu\n\n# only needed for Ubuntu\n\nsudo apt install build-essential\n\n\n\n# Install rust toolchain\n\ncurl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh\n\n\n\n# Install wasm-pack \n\ncurl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh\n\n\n\n# Install basic-http-server for simple test\n\ncargo install basic-http-server\n\n\n\n```\n\n\n\n# Build Project\n\n```\n\n# build rust wasm project\n\n# the output will be in the pkg folder\n\ncd rust_wasm_template\n\nwasm-pack build --target web\n\n```\n\n\n\n# Test Rust Wasm Application\n\nRun basic-http-server to server index.html file in www folder. \n\nindex.html imports ../pkg/first_wasm.js, which is web-pack build output. \n\n```\n\ncd rust_wasm_template\n\nbasic-http-server --addr 0.0.0.0:4000\n\n```\n\nIn browser, access html file in the www folder\n\nhttp://localhost:4000/www/index.html\n\n\n\n\n\n```\n\nhttps://rustwasm.github.io/wasm-bindgen/examples/fetch.html\n\nhttps://github.com/rustwasm/wasm-bindgen/tree/master/examples/fetch\n\n\n\n```\n\n\n", "file_path": "README.md", "rank": 62, "score": 7.780459094043618 }, { "content": " // Use serde to parse the JSON into a struct.\n\n let rt_data = jsvalue.as_string().unwrap();\n\n\n\n let rt: Value = serde_json::from_str(&rt_data).unwrap();\n\n\n\n let test_data = rt[\"Data\"][0][\"datacenter\"][\"CDG\"][\"requests\"].as_u64().unwrap();\n\n\n\n let s: String = test_data.to_string();\n\n\n\n // Send the `Branch` struct back to JS as an `Object`.\n\n Ok(s)\n\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n// https://github.com/38/plotters/blob/master/examples/wasm-demo/src/lib.rs\n", "file_path": "src/lib.rs", "rank": 63, "score": 7.571435936628445 }, { "content": "use crate::DrawResult;\n\nuse plotters::prelude::*;\n\n\n\n/// Draw power function f(x) = x^power.\n", "file_path": "src/func_plot.rs", "rank": 64, "score": 7.469466278305813 }, { "content": " let map_coord = mandelbrot::draw(canvas).map_err(|err| err.to_string())?;\n\n Ok(Chart {\n\n convert: Box::new(map_coord),\n\n })\n\n }\n\n\n\n /// This function can be used to convert screen coordinates to\n\n /// chart coordinates.\n\n pub fn coord(&self, x: i32, y: i32) -> Option<Point> {\n\n (self.convert)((x, y)).map(|(x, y)| Point { x, y })\n\n }\n\n}", "file_path": "src/lib.rs", "rank": 65, "score": 7.0390436180183755 }, { "content": "use crate::DrawResult;\n\nuse plotters::prelude::*;\n\nuse std::ops::Range;\n\nuse web_sys::HtmlCanvasElement;\n\n\n\n/// Draw Mandelbrot set\n", "file_path": "src/mandelbrot.rs", "rank": 66, "score": 6.880418219251698 }, { "content": "use wasm_bindgen::prelude::*;\n\n\n\n// Declare an JS function\n\n#[wasm_bindgen]\n\nextern {\n\n pub fn alert(s: &str);\n\n}\n\n\n\n// Export a function calling a JS function\n\n#[wasm_bindgen]\n", "file_path": "src/lib.rs", "rank": 67, "score": 5.745264374439859 }, { "content": "pub struct Branch {\n\n pub name: String,\n\n pub commit: Commit,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct Commit {\n\n pub sha: String,\n\n pub commit: CommitDetails,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct CommitDetails {\n\n pub author: Signature,\n\n pub committer: Signature,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct Signature {\n\n pub name: String,\n", "file_path": "src/lib.rs", "rank": 68, "score": 5.478562438580817 }, { "content": " let range = plotting_area.get_pixel_range();\n\n let (pw, ph) = (range.0.end - range.0.start, range.1.end - range.1.start);\n\n let (xr, yr) = (chart.x_range(), chart.y_range());\n\n\n\n for (x, y, c) in mandelbrot_set(xr, yr, (pw as usize, ph as usize), 100) {\n\n if c != 100 {\n\n plotting_area.draw_pixel((x, y), &HSLColor(c as f64 / 100.0, 1.0, 0.5))?;\n\n } else {\n\n plotting_area.draw_pixel((x, y), &BLACK)?;\n\n }\n\n }\n\n\n\n root.present()?;\n\n return Ok(Box::new(chart.into_coord_trans()));\n\n}\n\n\n", "file_path": "src/mandelbrot.rs", "rank": 69, "score": 2.4735438234742415 } ]
Rust
src/cooking_book/recipe.rs
totoMauz/Cooking-Book
d80b051d38e68430ec1d391d6815c44fada08ce1
use crate::cooking_book::ingredient::Ingredient; use crate::file_access::persistency; use std::collections::HashMap; use std::collections::HashSet; #[derive(PartialEq, Eq)] pub struct Recipe { pub name: String, pub ingredients: HashMap<Ingredient, (u16, String)>, pub tags: HashSet<String>, } impl Recipe { pub fn new_by_line(line: &str) -> Recipe { let mut values = line.split(';'); let name = String::from(values.next().unwrap()); let mut all_ingredients = persistency::load_ingredients(); let mut ingredients: HashMap<Ingredient, (u16, String)> = HashMap::new(); let mut tags: HashSet<String> = HashSet::new(); for s in values { if s.starts_with('#') { tags.insert(s.to_string()); continue; } let mut ingre_amount = s.split(','); let name = ingre_amount.next().unwrap().to_string(); let amount = match ingre_amount.next() { Some(amount) => amount, None => "0", }; let amount = match amount.parse::<u16>() { Ok(num) => num, Err(_) => 0, }; let unit = match ingre_amount.next() { Some(unit) => unit, None => "", }; if !all_ingredients.contains_key(&name) { Ingredient::persist_new_ingredient(&name, &mut all_ingredients) .unwrap_or_else(|e| eprintln!("{}", e)); } ingredients.insert( all_ingredients.get(&name).unwrap().clone(), (amount, String::from(unit)), ); } return Recipe { name, ingredients, tags, }; } fn to_json(&self) -> String { let mut json: String = String::new(); json.push('{'); json.push_str("\"name\": \""); json.push_str(&self.name); json.push_str("\", "); json.push_str("\"ingredients\": ["); for (i, (a, u)) in &self.ingredients { json.push_str(Recipe::ingredient_to_json(i, a, u).as_str()); } json.push_str("]"); json.push('}'); return json; } fn ingredient_to_json(i: &Ingredient, a: &u16, u: &String) -> String { let mut json: String = String::new(); json.push('{'); json.push_str("\"name\": \""); json.push_str(&i.name); json.push_str("\", "); json.push_str("\"amount\": "); json.push_str(&format!("{}", &a)); json.push_str(", "); json.push_str("\"unit\": \""); json.push_str(&u); json.push_str("\""); json.push('}'); return json; } fn get_recipes_by_name<'a>( recipes: &'a HashMap<String, Recipe>, name: &str, ) -> Vec<&'a Recipe> { let mut recipes_by_name: Vec<&'a Recipe> = Vec::new(); for (_n, recipe) in recipes.iter().filter(|(k, _v)| k.contains(name)) { recipes_by_name.push(recipe); } return recipes_by_name; } fn get_recipes_by_ingredients<'a>( recipes: &'a HashMap<String, Recipe>, ingredient_included: &Vec<String>, ingredient_excluding: &Vec<String>, ) -> Vec<&'a Recipe> { let mut recipes_by_ingredient: Vec<&Recipe> = Vec::new(); for (_n, recipe) in recipes { let mut is_included = ingredient_included.is_empty(); let mut is_excluded = false; for (i, (_a, _u)) in &recipe.ingredients { if is_included == false && ingredient_included.contains(&i.name) { is_included = true; } if ingredient_excluding.contains(&i.name) { is_excluded = true; break; } } if is_included == false || is_excluded { continue; } recipes_by_ingredient.push(recipe); } return recipes_by_ingredient; } fn get_recipes_by_tags<'a>( recipes: &'a HashMap<String, Recipe>, tags: &Vec<String>, ) -> Vec<&'a Recipe> { let mut recipes_by_tag: Vec<&Recipe> = Vec::new(); for (_n, recipe) in recipes { for tag in &recipe.tags { if tags.contains(&tag) { recipes_by_tag.push(recipe); break; } } } return recipes_by_tag; } fn split_including_and_excluding(input: Vec<&str>) -> (Vec<String>, Vec<String>) { let mut including: Vec<String> = Vec::new(); let mut excluding: Vec<String> = Vec::new(); for s in input { if s.starts_with('!') { excluding.push(s.chars().skip(1).collect()); } else { including.push(s.to_string()); } } return (including, excluding); } fn unify_tags(input: &str) -> Vec<String> { let mut tags: Vec<String> = Vec::new(); let inputs = input.trim().split(','); for i in inputs { if i.starts_with('#') { tags.push(i.to_string()); } else { let mut tag = String::with_capacity(i.len() + 1); tag.push('#'); tag.push_str(i); tags.push(tag); } } return tags; } } #[cfg(test)] mod tests { use super::Recipe; use crate::cooking_book::group::Group; use crate::cooking_book::ingredient::Ingredient; use crate::cooking_book::store::Store; use std::collections::HashMap; use std::collections::HashSet; #[test] fn test_json() { let mut ingredients1: HashMap<Ingredient, (u16, String)> = HashMap::with_capacity(1); let in1 = Ingredient::new_by_name("Ei".to_string()); ingredients1.insert(in1, (1, "Stück".to_string())); let mut tags1: HashSet<String> = HashSet::with_capacity(1); tags1.insert("Frühstück".to_string()); let waffels = Recipe { name: "Waffeln".to_string(), ingredients: ingredients1, tags: tags1, }; assert_eq!(waffels.to_json(), "{\"name\": \"Waffeln\", \"ingredients\": [{\"name\": \"Ei\", \"amount\": 1, \"unit\": \"Stück\"}]}") } #[test] fn test_split_including_and_excluding() { let input: Vec<&str> = vec!["a", "!b"]; let (included, excluded) = Recipe::split_including_and_excluding(input); assert!(included.len() == 1); assert!(included.contains(&"a".to_string())); assert!(!included.contains(&"b".to_string())); assert!(excluded.len() == 1); assert!(!excluded.contains(&"a".to_string())); assert!(excluded.contains(&"b".to_string())); } #[test] fn test_unify_tag() { let tags = "a,#b"; let tags = Recipe::unify_tags(tags); let tag_a = "#a".to_string(); assert!(tags.contains(&tag_a)); let tag_b = "#b".to_string(); assert!(tags.contains(&tag_b)); } fn get_mocks() -> HashMap<String, Recipe> { let mut recipes: HashMap<String, Recipe> = HashMap::with_capacity(2); let name1 = "R1".to_string(); let mut ingredients1: HashMap<Ingredient, (u16, String)> = HashMap::new(); let in1 = Ingredient { name: "A".to_string(), group: Group::Other, preferred_store: Store::Any, }; ingredients1.insert(in1, (1, "unit".to_string())); let in2 = Ingredient { name: "B".to_string(), group: Group::Other, preferred_store: Store::Any, }; ingredients1.insert(in2, (1, "unit".to_string())); let mut tags1: HashSet<String> = HashSet::new(); tags1.insert("1".to_string()); tags1.insert("3".to_string()); let r1 = Recipe { name: name1, ingredients: ingredients1, tags: tags1, }; recipes.insert("R1".to_string(), r1); let name2 = "R2".to_string(); let mut ingredients2: HashMap<Ingredient, (u16, String)> = HashMap::new(); let in12 = Ingredient { name: "A".to_string(), group: Group::Other, preferred_store: Store::Any, }; ingredients2.insert(in12, (1, "unit".to_string())); let in22 = Ingredient { name: "C".to_string(), group: Group::Other, preferred_store: Store::Any, }; ingredients2.insert(in22, (1, "unit".to_string())); let mut tags2: HashSet<String> = HashSet::new(); tags2.insert("2".to_string()); tags2.insert("3".to_string()); let r2 = Recipe { name: name2, ingredients: ingredients2, tags: tags2, }; recipes.insert("R2".to_string(), r2); return recipes; } #[test] fn test_by_ingredient_all() { let recipes = self::get_mocks(); let including: Vec<String> = vec!["A".to_string()]; let excluding: Vec<String> = Vec::with_capacity(0); let filtered = Recipe::get_recipes_by_ingredients(&recipes, &including, &excluding); assert!(filtered.contains(&recipes.get("R1").unwrap())); assert!(filtered.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_ingredient_with_exclude() { let recipes = self::get_mocks(); let including: Vec<String> = Vec::with_capacity(0); let excluding: Vec<String> = vec!["A".to_string()]; let filtered: Vec<&Recipe> = Recipe::get_recipes_by_ingredients(&recipes, &including, &excluding); assert!(!filtered.contains(&recipes.get("R1").unwrap())); assert!(!filtered.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_ingredient_with_exclude_and_include() { let recipes = self::get_mocks(); let including: Vec<String> = vec!["A".to_string()]; let excluding: Vec<String> = vec!["C".to_string()]; let filtered = Recipe::get_recipes_by_ingredients(&recipes, &including, &excluding); assert!(filtered.contains(&recipes.get("R1").unwrap())); assert!(!filtered.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_ingredient_with_exclude_and_inclu() { let recipes = self::get_mocks(); let including: Vec<String> = vec!["A".to_string()]; let excluding: Vec<String> = vec!["B".to_string(), "C".to_string()]; let filtered = Recipe::get_recipes_by_ingredients(&recipes, &including, &excluding); assert!(!filtered.contains(&recipes.get("R1").unwrap())); assert!(!filtered.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_name() { let recipes = self::get_mocks(); let recipes_r = Recipe::get_recipes_by_name(&recipes, "R"); assert!(recipes_r.contains(&recipes.get("R1").unwrap())); assert!(recipes_r.contains(&recipes.get("R2").unwrap())); let recipes_1 = Recipe::get_recipes_by_name(&recipes, "1"); assert!(recipes_1.contains(&recipes.get("R1").unwrap())); assert!(!recipes_1.contains(&recipes.get("R2").unwrap())); let recipes_2 = Recipe::get_recipes_by_name(&recipes, "2"); assert!(!recipes_2.contains(&recipes.get("R1").unwrap())); assert!(recipes_2.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_tag() { let recipes = self::get_mocks(); let mut tags: Vec<String> = vec!["1".to_string()]; let recipes_r = Recipe::get_recipes_by_tags(&recipes, &tags); assert!(recipes_r.contains(&recipes.get("R1").unwrap())); assert!(!recipes_r.contains(&recipes.get("R2").unwrap())); tags.clear(); let tag = "2".to_string(); tags.push(tag); let recipes_1 = Recipe::get_recipes_by_tags(&recipes, &tags); assert!(!recipes_1.contains(&recipes.get("R1").unwrap())); assert!(recipes_1.contains(&recipes.get("R2").unwrap())); tags.clear(); let tag = "3".to_string(); tags.push(tag); let recipes_2 = Recipe::get_recipes_by_tags(&recipes, &tags); assert!(recipes_2.contains(&recipes.get("R1").unwrap())); assert!(recipes_2.contains(&recipes.get("R2").unwrap())); } }
use crate::cooking_book::ingredient::Ingredient; use crate::file_access::persistency; use std::collections::HashMap; use std::collections::HashSet; #[derive(PartialEq, Eq)] pub struct Recipe { pub name: String, pub ingredients: HashMap<Ingredient, (u16, String)>, pub tags: HashSet<String>, } impl Recipe { pub fn new_by_line(line: &str) -> Recipe { let mut values = line.split(';'); let name = String::from(values.next().unwrap()); let mut all_ingredients = persistency::load_ingredients(); let mut ingredients: HashMap<Ingredient, (u16, String)> = HashMap::new(); let mut tags: HashSet<String> = HashSet::new(); for s in values { if s.starts_with('#') { tags.insert(s.to_string()); continue; } let mut ingre_amount = s.split(','); let name = ingre_amount.next().unwrap().to_string(); let amount = match ingre_amount.next() { Some(amount) => amount, None => "0", }; let amount = match amount.parse::<u16>() { Ok(num) => num, Err(_) => 0, }; let unit = match ingre_amount.next() { Some(unit) => unit, None => "", }; if !all_ingredients.contains_key(&name) { Ingredient::persist_new_ingredient(&name, &mut all_ingredients) .unwrap_or_else(|e| eprintln!("{}", e)); } ingredients.insert( all_ingredients.get(&name).unwrap().clone(), (amount, String::from(unit)), ); } return Recipe { name, ingredients, tags, }; } fn to_json(&self) -> String { let mut json: String = String::new(); json.push('{'); json.push_str("\"name\": \""); json.push_str(&self.name); json.push_str("\", "); json.push_str("\"ingredients\": ["); for (i, (a, u)) in &self.ingredients { json.push_str(Recipe::ingredient_to_json(i, a, u).as_str()); } json.push_str("]"); json.push('}'); return json; } fn ingredient_to_json(i: &Ingredient, a: &u16, u: &String) -> String { let mut json: String = String::new(); json.push('{'); json.push_str("\"name\": \""); json.push_str(&i.name); json.push_str("\", "); json.push_str("\"amount\": "); json.push_str(&format!("{}", &a)); json.push_str(", "); json.push_str("\"unit\": \""); json.push_str(&u); json.push_str("\""); json.push('}'); return json; } fn get_recipes_by_name<'a>( recipes: &'a HashMap<String, Recipe>, name: &str, ) -> Vec<&'a Recipe> { let mut recipes_by_name: Vec<&'a Recipe> = Vec::new(); for (_n, recipe) in recipes.iter().filter(|(k, _v)| k.contains(name)) { recipes_by_name.push(recipe); } return recipes_by_name; } fn get_recipes_by_ingredients<'a>( recipes: &'a HashMap<String, Recipe>, ingredient_included: &Vec<String>, ingredient_excluding: &Vec<String>, ) -> Vec<&'a Recipe> { let mut recipes_by_ingredient: Vec<&Recipe> = Vec::new(); for (_n, recipe) in recipes { let mut is_included = ingredient_included.is_empty(); let mut is_excluded = false; for (i, (_a, _u)) in &recipe.ingredients { if is_included == false && ingredient_included.contains(&i.name) { is_included = true; } if ingredient_excluding.contains(&i.name) { is_excluded = true; break; } } if is_included == false || is_excluded { continue; } recipes_by_ingredient.push(recipe); } return recipes_by_ingredient; } fn get_recipes_by_tags<'a>( recipes: &'a HashMap<String, Recipe>, tags: &Vec<String>, ) -> Vec<&'a Recipe> { let mut recipes_by_tag: Vec<&Recipe> = Vec::new(); for (_n, recipe) in recipes { for tag in &recipe.tags { if tags.contains(&tag) { recipes_by_tag.push(recipe); break; } } } return recipes_by_tag; } fn split_including_and_excluding(input: Vec<&str>) -> (Vec<String>, Vec<String>) { let mut including: Vec<String> = Vec::new(); let mut excluding: Vec<String> = Vec::new(); for s in input { if s.starts_with('!') { excluding.push(s.chars().skip(1).collect()); } else { including.push(s.to_string()); } } return (including, excluding); } fn unify_tags(input: &str) -> Vec<String> { let mut tags: Vec<String> = Vec::new(); let inputs = input.trim().split(','); for i in inputs { if i.starts_with('#') { tags.push(i.to_string()); } else { let mut tag = String::with_capacity(i.len() + 1); tag.push('#'); tag.push_str(i); tags.push(tag); } } return tags; } } #[cfg(test)] mod tests { use super::Recipe; use crate::cooking_book::group::Group; use crate::cooking_book::ingredient::Ingredient; use crate::cooking_book::store::Store; use std::collections::HashMap; use std::collections::HashSet; #[test] fn test_json() {
.insert("Frühstück".to_string()); let waffels = Recipe { name: "Waffeln".to_string(), ingredients: ingredients1, tags: tags1, }; assert_eq!(waffels.to_json(), "{\"name\": \"Waffeln\", \"ingredients\": [{\"name\": \"Ei\", \"amount\": 1, \"unit\": \"Stück\"}]}") } #[test] fn test_split_including_and_excluding() { let input: Vec<&str> = vec!["a", "!b"]; let (included, excluded) = Recipe::split_including_and_excluding(input); assert!(included.len() == 1); assert!(included.contains(&"a".to_string())); assert!(!included.contains(&"b".to_string())); assert!(excluded.len() == 1); assert!(!excluded.contains(&"a".to_string())); assert!(excluded.contains(&"b".to_string())); } #[test] fn test_unify_tag() { let tags = "a,#b"; let tags = Recipe::unify_tags(tags); let tag_a = "#a".to_string(); assert!(tags.contains(&tag_a)); let tag_b = "#b".to_string(); assert!(tags.contains(&tag_b)); } fn get_mocks() -> HashMap<String, Recipe> { let mut recipes: HashMap<String, Recipe> = HashMap::with_capacity(2); let name1 = "R1".to_string(); let mut ingredients1: HashMap<Ingredient, (u16, String)> = HashMap::new(); let in1 = Ingredient { name: "A".to_string(), group: Group::Other, preferred_store: Store::Any, }; ingredients1.insert(in1, (1, "unit".to_string())); let in2 = Ingredient { name: "B".to_string(), group: Group::Other, preferred_store: Store::Any, }; ingredients1.insert(in2, (1, "unit".to_string())); let mut tags1: HashSet<String> = HashSet::new(); tags1.insert("1".to_string()); tags1.insert("3".to_string()); let r1 = Recipe { name: name1, ingredients: ingredients1, tags: tags1, }; recipes.insert("R1".to_string(), r1); let name2 = "R2".to_string(); let mut ingredients2: HashMap<Ingredient, (u16, String)> = HashMap::new(); let in12 = Ingredient { name: "A".to_string(), group: Group::Other, preferred_store: Store::Any, }; ingredients2.insert(in12, (1, "unit".to_string())); let in22 = Ingredient { name: "C".to_string(), group: Group::Other, preferred_store: Store::Any, }; ingredients2.insert(in22, (1, "unit".to_string())); let mut tags2: HashSet<String> = HashSet::new(); tags2.insert("2".to_string()); tags2.insert("3".to_string()); let r2 = Recipe { name: name2, ingredients: ingredients2, tags: tags2, }; recipes.insert("R2".to_string(), r2); return recipes; } #[test] fn test_by_ingredient_all() { let recipes = self::get_mocks(); let including: Vec<String> = vec!["A".to_string()]; let excluding: Vec<String> = Vec::with_capacity(0); let filtered = Recipe::get_recipes_by_ingredients(&recipes, &including, &excluding); assert!(filtered.contains(&recipes.get("R1").unwrap())); assert!(filtered.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_ingredient_with_exclude() { let recipes = self::get_mocks(); let including: Vec<String> = Vec::with_capacity(0); let excluding: Vec<String> = vec!["A".to_string()]; let filtered: Vec<&Recipe> = Recipe::get_recipes_by_ingredients(&recipes, &including, &excluding); assert!(!filtered.contains(&recipes.get("R1").unwrap())); assert!(!filtered.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_ingredient_with_exclude_and_include() { let recipes = self::get_mocks(); let including: Vec<String> = vec!["A".to_string()]; let excluding: Vec<String> = vec!["C".to_string()]; let filtered = Recipe::get_recipes_by_ingredients(&recipes, &including, &excluding); assert!(filtered.contains(&recipes.get("R1").unwrap())); assert!(!filtered.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_ingredient_with_exclude_and_inclu() { let recipes = self::get_mocks(); let including: Vec<String> = vec!["A".to_string()]; let excluding: Vec<String> = vec!["B".to_string(), "C".to_string()]; let filtered = Recipe::get_recipes_by_ingredients(&recipes, &including, &excluding); assert!(!filtered.contains(&recipes.get("R1").unwrap())); assert!(!filtered.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_name() { let recipes = self::get_mocks(); let recipes_r = Recipe::get_recipes_by_name(&recipes, "R"); assert!(recipes_r.contains(&recipes.get("R1").unwrap())); assert!(recipes_r.contains(&recipes.get("R2").unwrap())); let recipes_1 = Recipe::get_recipes_by_name(&recipes, "1"); assert!(recipes_1.contains(&recipes.get("R1").unwrap())); assert!(!recipes_1.contains(&recipes.get("R2").unwrap())); let recipes_2 = Recipe::get_recipes_by_name(&recipes, "2"); assert!(!recipes_2.contains(&recipes.get("R1").unwrap())); assert!(recipes_2.contains(&recipes.get("R2").unwrap())); } #[test] fn test_by_tag() { let recipes = self::get_mocks(); let mut tags: Vec<String> = vec!["1".to_string()]; let recipes_r = Recipe::get_recipes_by_tags(&recipes, &tags); assert!(recipes_r.contains(&recipes.get("R1").unwrap())); assert!(!recipes_r.contains(&recipes.get("R2").unwrap())); tags.clear(); let tag = "2".to_string(); tags.push(tag); let recipes_1 = Recipe::get_recipes_by_tags(&recipes, &tags); assert!(!recipes_1.contains(&recipes.get("R1").unwrap())); assert!(recipes_1.contains(&recipes.get("R2").unwrap())); tags.clear(); let tag = "3".to_string(); tags.push(tag); let recipes_2 = Recipe::get_recipes_by_tags(&recipes, &tags); assert!(recipes_2.contains(&recipes.get("R1").unwrap())); assert!(recipes_2.contains(&recipes.get("R2").unwrap())); } }
let mut ingredients1: HashMap<Ingredient, (u16, String)> = HashMap::with_capacity(1); let in1 = Ingredient::new_by_name("Ei".to_string()); ingredients1.insert(in1, (1, "Stück".to_string())); let mut tags1: HashSet<String> = HashSet::with_capacity(1); tags1
function_block-random_span
[ { "content": "#[delete(\"/ingredient/<name>\", format = \"application/json\")]\n\nfn delete_ingredient(name: String) -> String {\n\n let ingredients = persistency::load_ingredients();\n\n\n\n let mut shopping_list = persistency::load_shopping_list();\n\n if ingredients.contains_key(&name) {\n\n let ingredient = ingredients.get(&name).unwrap();\n\n let _ = shopping_list.remove_and_save(&ingredient);\n\n }\n\n return shopping_list.to_json();\n\n}\n\n\n\n/// Returns the shopping list.\n", "file_path": "src/main.rs", "rank": 0, "score": 125178.32364815839 }, { "content": "#[put(\"/ingredient/<name>\", format = \"application/json\")]\n\nfn put_new_ingredient(name: String) -> String {\n\n let mut ingredients = persistency::load_ingredients();\n\n\n\n if !ingredients.contains_key(&name) {\n\n let _ = Ingredient::persist_new_ingredient(&name, &mut ingredients);\n\n }\n\n let ingredient = ingredients.get(&name).unwrap();\n\n let mut shopping_list = persistency::load_shopping_list();\n\n let _ = shopping_list.add_and_save(&ingredient);\n\n\n\n return shopping_list.to_json();\n\n}\n\n\n\n/// Upserts an ingredient.\n\n///\n\n/// #Arguments\n\n///\n\n/// * `name` - The name of the ingredient\n\n/// * `group` - The group of the ingredient\n\n/// * `store` - The store of the ingredient\n", "file_path": "src/main.rs", "rank": 1, "score": 121917.42756630387 }, { "content": "/// Writes all ingredients.\n\n///\n\n/// #Arguments\n\n///\n\n/// * `all_ingredients` The ingredients to write\n\npub fn write_all_ingredients(all_ingredients: &HashMap<String, Ingredient>) -> Result<(), String> {\n\n let file = OpenOptions::new()\n\n .write(true)\n\n .truncate(true)\n\n .open(paths::INGREDIENTS);\n\n\n\n if file.is_ok() {\n\n let mut file = file.unwrap();\n\n for (_k, ingredient) in all_ingredients {\n\n write_ingredient(&ingredient, &mut file).unwrap_or_else(|e| eprintln!(\"{}\", e));\n\n }\n\n return Ok(());\n\n }\n\n return Err(format!(\"Couldn't write to file: {}\", file.unwrap_err()));\n\n}\n\n\n", "file_path": "src/file_access/persistency.rs", "rank": 2, "score": 105987.11242571595 }, { "content": "/// Returns all recipes.\n\npub fn load_recipes() -> HashMap<String, Recipe> {\n\n let mut all_recipes: HashMap<String, Recipe> = HashMap::new();\n\n\n\n let content = load_file(paths::RECIPES);\n\n if content.is_none() {\n\n return all_recipes;\n\n }\n\n\n\n for line in content.unwrap().lines() {\n\n if line.starts_with(\"#\") {\n\n continue;\n\n }\n\n\n\n let name = line.split(';').next().unwrap();\n\n\n\n all_recipes.insert(String::from(name), Recipe::new_by_line(line));\n\n }\n\n return all_recipes;\n\n}\n\n\n", "file_path": "src/file_access/persistency.rs", "rank": 3, "score": 104153.89937944227 }, { "content": "/// Returns all ingredients.\n\npub fn load_ingredients() -> HashMap<String, Ingredient> {\n\n let mut all_ingredients: HashMap<String, Ingredient> = HashMap::new();\n\n\n\n let content = load_file(paths::INGREDIENTS);\n\n if content.is_none() {\n\n return all_ingredients;\n\n }\n\n\n\n for line in content.unwrap().lines() {\n\n if line.starts_with(\"#\") {\n\n continue;\n\n }\n\n\n\n let name = line.split(';').next().unwrap();\n\n all_ingredients.insert(name.to_string(), Ingredient::new_by_line(line));\n\n }\n\n return all_ingredients;\n\n}\n\n\n", "file_path": "src/file_access/persistency.rs", "rank": 4, "score": 103878.64282831407 }, { "content": "/// Write a single ingredient.\n\n///\n\n/// #Arguments\n\n///\n\n/// * `new_ingredient` The ingredient to append.\n\npub fn write_single_ingredient(new_ingredient: &Ingredient) -> Result<(), String> {\n\n let file = OpenOptions::new().append(true).open(paths::INGREDIENTS);\n\n\n\n if file.is_ok() {\n\n return write_ingredient(&new_ingredient, &mut file.unwrap());\n\n }\n\n return Err(format!(\"Couldn't open file: {}\", file.unwrap_err()));\n\n}\n\n\n", "file_path": "src/file_access/persistency.rs", "rank": 5, "score": 100397.0611104056 }, { "content": "fn load_file(file_name: &str) -> Option<String> {\n\n if Path::new(file_name).is_file() {\n\n return match fs::read_to_string(file_name) {\n\n Ok(c) => Some(c),\n\n Err(_) => None,\n\n };\n\n }\n\n let file = OpenOptions::new()\n\n .write(true)\n\n .create_new(true)\n\n .open(file_name);\n\n\n\n if file.is_err() {\n\n return None;\n\n }\n\n\n\n let mut contents = String::new();\n\n return match file.unwrap().read_to_string(&mut contents) {\n\n Ok(_) => Some(contents),\n\n Err(_) => None,\n\n };\n\n}\n\n\n", "file_path": "src/file_access/persistency.rs", "rank": 6, "score": 98581.72126160123 }, { "content": "fn write_ingredient(ingredient: &Ingredient, file: &mut File) -> Result<(), String> {\n\n if let Err(e) = writeln!(\n\n file,\n\n \"{};{};{}\",\n\n ingredient.name, ingredient.group as i8, ingredient.preferred_store as i8\n\n ) {\n\n return Err(format!(\"Couldn't write to file: {}\", e));\n\n }\n\n return Ok(());\n\n}\n", "file_path": "src/file_access/persistency.rs", "rank": 7, "score": 96701.86697667524 }, { "content": "#[get(\"/ingredient\", format = \"application/json\")]\n\nfn get_ingredient() -> String {\n\n let ingredients = persistency::load_ingredients();\n\n return Ingredient::all_to_json(&ingredients);\n\n}\n\n\n\n/// Adds an ingredient to the shopping list. If the ingredients doesn't exist it will be created.\n\n/// Returns the updated shopping list.\n\n///\n\n/// #Arguments\n\n///\n\n/// * `name` - The name of the ingredient to add\n", "file_path": "src/main.rs", "rank": 8, "score": 92344.2403795706 }, { "content": "#[put(\"/ingredient/<name>/<group>/<store>\", format = \"application/json\")]\n\nfn put_update_ingredient(name: String, group: usize, store: usize) {\n\n let mut ingredients = persistency::load_ingredients();\n\n\n\n if ingredients.contains_key(&name) {\n\n let ingredient = ingredients.get_mut(&name).unwrap();\n\n ingredient.set_group(group);\n\n ingredient.set_store(store);\n\n } else {\n\n let new_ingredient = Ingredient {\n\n name: name.to_string(),\n\n group: cooking_book::group::Group::lookup_group_number(group),\n\n preferred_store: cooking_book::store::Store::lookup_store_number(store),\n\n };\n\n ingredients.insert(name, new_ingredient);\n\n }\n\n\n\n let _ = persistency::write_all_ingredients(&ingredients);\n\n}\n\n\n\n/// Removes an ingredient from the shopping list.\n\n/// Returns the updated shopping list.\n\n///\n\n/// #Arguments\n\n///\n\n/// * `name` The name of the ingredient to remove\n", "file_path": "src/main.rs", "rank": 9, "score": 92050.32948011404 }, { "content": "/// Writes the shopping list.\n\n///\n\n/// #Arguments\n\n///\n\n/// * `shopping_list` The shopping list to write.\n\npub fn write_shopping_list(shopping_list: &ShoppingList) -> Result<(), String> {\n\n let file = OpenOptions::new()\n\n .write(true)\n\n .truncate(true)\n\n .open(paths::SHOPPING_LIST);\n\n\n\n if file.is_ok() {\n\n let mut file = file.unwrap();\n\n for (ingredient, amount) in &shopping_list.to_buy {\n\n if let Err(e) = writeln!(file, \"{};{}\", ingredient.name, amount) {\n\n return Err(format!(\"Couldn't write to file: {}\", e));\n\n }\n\n }\n\n }\n\n return Ok(());\n\n}\n\n\n", "file_path": "src/file_access/persistency.rs", "rank": 10, "score": 72453.83547162038 }, { "content": "#[get(\"/store\", format = \"application/json\")]\n\nfn get_store() -> String {\n\n return crate::cooking_book::store::Store::all_as_json();\n\n}\n\n\n\n/// Returns the groups.\n", "file_path": "src/main.rs", "rank": 11, "score": 71200.07187523805 }, { "content": "#[get(\"/group\", format = \"application/json\")]\n\nfn get_group() -> String {\n\n return crate::cooking_book::group::Group::all_as_json();\n\n}\n\n\n\n///Returns a list of all ingredients\n", "file_path": "src/main.rs", "rank": 12, "score": 71200.07187523805 }, { "content": "#[get(\"/shopping_list\", format = \"application/json\")]\n\nfn get_shopping_list() -> String {\n\n let shopping_list = persistency::load_shopping_list();\n\n return shopping_list.to_json();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 13, "score": 69224.252671879 }, { "content": "/// Returns the shopping list.\n\npub fn load_shopping_list() -> ShoppingList {\n\n let mut shopping_list = ShoppingList::new();\n\n\n\n let content = load_file(paths::SHOPPING_LIST);\n\n if content.is_none() {\n\n return shopping_list;\n\n }\n\n\n\n let mut all_ingredients = load_ingredients();\n\n\n\n for line in content.unwrap().lines() {\n\n let mut values = line.split(';');\n\n let name = values.next().unwrap().to_string();\n\n\n\n if !all_ingredients.contains_key(&name) {\n\n Ingredient::persist_new_ingredient(&name, &mut all_ingredients)\n\n .unwrap_or_else(|e| eprintln!(\"{}\", e));\n\n }\n\n\n\n let amount = match values.next() {\n", "file_path": "src/file_access/persistency.rs", "rank": 14, "score": 60398.59370720835 }, { "content": "fn main() {\n\n rocket::ignite()\n\n .mount(\n\n \"/\",\n\n routes![\n\n get_store,\n\n get_group,\n\n get_ingredient,\n\n put_new_ingredient,\n\n put_update_ingredient,\n\n delete_ingredient,\n\n get_shopping_list\n\n ],\n\n )\n\n .mount(\"/\", StaticFiles::from(\"web\"))\n\n .launch();\n\n}", "file_path": "src/main.rs", "rank": 15, "score": 40185.63825290671 }, { "content": "use std::cmp::Ordering;\n\nuse std::collections::HashMap;\n\nuse std::fmt;\n\n\n\nuse crate::cooking_book::group::Group;\n\nuse crate::cooking_book::store::Store;\n\nuse crate::file_access::persistency;\n\n\n\n/// An ingredient which is in a specific group and has a preferred store\n\n#[derive(Clone, PartialEq, Eq, Hash)]\n\npub struct Ingredient {\n\n pub name: String,\n\n pub group: Group,\n\n pub preferred_store: Store,\n\n}\n\n\n\nimpl Ingredient {\n\n pub fn new_by_line(line: &str) -> Ingredient {\n\n let mut values = line.split(';');\n\n\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 36, "score": 22210.30784384758 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Ingredient;\n\n use crate::cooking_book::group::Group;\n\n use crate::cooking_book::store::Store;\n\n use std::cmp::Ordering;\n\n\n\n #[test]\n\n fn test_to_json() {\n\n let ingredient = Ingredient {\n\n name: \"Gurke\".to_string(),\n\n group: Group::Vegetable,\n\n preferred_store: Store::Any,\n\n };\n\n assert_eq!(\n\n ingredient.to_json(),\n\n \"{\\\"name\\\": \\\"Gurke\\\", \\\"group\\\": \\\"Gemüse\\\", \\\"store\\\": \\\"Überall\\\"}\"\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 37, "score": 22209.47128868217 }, { "content": " pub fn all_to_json(all_ingredients: &HashMap<String, Ingredient>) -> String {\n\n let mut json: String = String::new();\n\n json.push('[');\n\n\n\n let mut keys: Vec<&String> = all_ingredients.keys().collect();\n\n keys.sort();\n\n\n\n let mut is_first: bool = true;\n\n for k in keys {\n\n if !is_first {\n\n json.push_str(\", \");\n\n }\n\n\n\n json.push_str(&all_ingredients.get(k).unwrap().to_json());\n\n is_first = false;\n\n }\n\n\n\n json.push(']');\n\n return json;\n\n }\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 38, "score": 22209.019193441694 }, { "content": "\n\n /// Export this ingredient to JSON.\n\n pub fn to_json(&self) -> String {\n\n let mut json: String = String::new();\n\n json.push('{');\n\n\n\n json.push_str(\"\\\"name\\\": \\\"\");\n\n json.push_str(&self.name);\n\n json.push_str(\"\\\", \");\n\n\n\n json.push_str(\"\\\"group\\\": \\\"\");\n\n json.push_str(&format!(\"{}\", &self.group));\n\n json.push_str(\"\\\", \");\n\n\n\n json.push_str(\"\\\"store\\\": \\\"\");\n\n json.push_str(&format!(\"{}\", &self.preferred_store));\n\n json.push_str(\"\\\"\");\n\n\n\n json.push('}');\n\n\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 39, "score": 22207.85045431349 }, { "content": " let name = String::from(values.next().unwrap());\n\n let group = match values.next() {\n\n Some(group) => group.trim(),\n\n None => \"<empty>\",\n\n };\n\n let group: Group = match group.parse::<usize>() {\n\n Ok(num) => Group::lookup_group_number(num),\n\n Err(_) => Group::Other,\n\n };\n\n\n\n let store = match values.next() {\n\n Some(store) => store.trim(),\n\n None => \"<empty>\",\n\n };\n\n let store: Store = match store.parse::<usize>() {\n\n Ok(num) => Store::lookup_store_number(num),\n\n Err(_) => Store::Any,\n\n };\n\n\n\n Ingredient {\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 40, "score": 22207.230723266806 }, { "content": " return json;\n\n }\n\n\n\n /// Returns the list of all ingredients.\n\n pub fn get_all_ingredients() -> Vec<Ingredient> {\n\n let all_ingredients = persistency::load_ingredients();\n\n let mut vec_ingredients: Vec<Ingredient> = Vec::new();\n\n\n\n for (_key, value) in all_ingredients.iter() {\n\n vec_ingredients.push(value.clone());\n\n }\n\n vec_ingredients.sort();\n\n\n\n return vec_ingredients;\n\n }\n\n}\n\n\n\nimpl PartialOrd for Ingredient {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.cmp(other))\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 41, "score": 22205.951781510146 }, { "content": " ///\n\n /// * `name` The name of the new ingredient.\n\n pub fn persist_new_ingredient(\n\n name: &String,\n\n all_ingredients: &mut HashMap<String, Ingredient>,\n\n ) -> Result<(), String> {\n\n let new_ingredient = Ingredient::new_by_name(name.to_string());\n\n let result = persistency::write_single_ingredient(&new_ingredient);\n\n if result.is_err() {\n\n return result;\n\n }\n\n all_ingredients.insert(name.to_string(), new_ingredient);\n\n return Ok(());\n\n }\n\n\n\n /// Export the list of all ingredients to JSON.\n\n ///\n\n /// #Arguments\n\n ///\n\n /// ' `all_ingredients` The collection of all ingredients.\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 42, "score": 22205.47834638678 }, { "content": " assert_eq!(ingredient.name, \"Salami\");\n\n assert_eq!(ingredient.group, Group::Other);\n\n }\n\n\n\n #[test]\n\n fn test_new_by_line_only_name_2() {\n\n let ingredient = Ingredient::new_by_line(\"Salami;\");\n\n assert_eq!(ingredient.name, \"Salami\");\n\n assert_eq!(ingredient.group, Group::Other);\n\n }\n\n\n\n #[test]\n\n fn test_new_by_line_only_name_3() {\n\n let ingredient = Ingredient::new_by_line(\"Salami;;\");\n\n assert_eq!(ingredient.name, \"Salami\");\n\n assert_eq!(ingredient.group, Group::Other);\n\n }\n\n\n\n #[test]\n\n fn test_new_by_line_only_group_1() {\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 43, "score": 22204.31459186746 }, { "content": " preferred_store: Store::Any,\n\n };\n\n let i2 = Ingredient {\n\n name: String::from(\"asd\"),\n\n group: Group::Other,\n\n preferred_store: Store::Any,\n\n };\n\n\n\n assert_eq!(i1.cmp(&i2), Ordering::Greater);\n\n }\n\n\n\n #[test]\n\n fn test_sort_name() {\n\n let i1 = Ingredient {\n\n name: String::from(\"asd\"),\n\n group: Group::Vegetable,\n\n preferred_store: Store::Any,\n\n };\n\n let i2 = Ingredient {\n\n name: String::from(\"asc\"),\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 44, "score": 22204.07927083966 }, { "content": " fn test_sort_equal() {\n\n let i1 = Ingredient {\n\n name: String::from(\"asd\"),\n\n group: Group::Other,\n\n preferred_store: Store::Any,\n\n };\n\n let i2 = Ingredient {\n\n name: String::from(\"asd\"),\n\n group: Group::Other,\n\n preferred_store: Store::Any,\n\n };\n\n\n\n assert_eq!(i1.cmp(&i2), Ordering::Equal);\n\n }\n\n\n\n #[test]\n\n fn test_sort_group() {\n\n let i1 = Ingredient {\n\n name: String::from(\"asd\"),\n\n group: Group::Vegetable,\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 45, "score": 22204.072396332926 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n fn test_new_by_line_empty_1() {\n\n let ingredient = Ingredient::new_by_line(\"\");\n\n assert_eq!(ingredient.name, \"\");\n\n assert_eq!(ingredient.group, Group::Other);\n\n }\n\n\n\n #[test]\n\n fn test_new_by_line_empty_2() {\n\n let ingredient = Ingredient::new_by_line(\";\");\n\n assert_eq!(ingredient.name, \"\");\n\n assert_eq!(ingredient.group, Group::Other);\n\n }\n\n\n\n #[test]\n\n fn test_new_by_line_only_name_1() {\n\n let ingredient = Ingredient::new_by_line(\"Salami\");\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 46, "score": 22203.993797208117 }, { "content": " let ingredient = Ingredient::new_by_line(\";0\");\n\n assert_eq!(ingredient.name, \"\");\n\n assert_eq!(ingredient.group, Group::Vegetable);\n\n }\n\n\n\n #[test]\n\n fn test_new_by_line_invalid_group_1() {\n\n let ingredient = Ingredient::new_by_line(\"Salami;-1\");\n\n assert_eq!(ingredient.name, \"Salami\");\n\n assert_eq!(ingredient.group, Group::Other);\n\n }\n\n\n\n #[test]\n\n fn test_new_by_line_invalid_group_2() {\n\n let ingredient = Ingredient::new_by_line(\"Salami;asd\");\n\n assert_eq!(ingredient.name, \"Salami\");\n\n assert_eq!(ingredient.group, Group::Other);\n\n }\n\n\n\n #[test]\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 47, "score": 22203.71571494151 }, { "content": " pub fn set_store(&mut self, store: usize) {\n\n self.preferred_store = Store::lookup_store_number(store);\n\n }\n\n\n\n /// Create a new ingredient with only a name. Store and Group will fallback to their defaults.\n\n ///\n\n /// #Arguments\n\n ///\n\n /// * `name` The name of the new ingredient.\n\n pub fn new_by_name(name: String) -> Ingredient {\n\n return Ingredient {\n\n name: name.to_string(),\n\n group: Group::Other,\n\n preferred_store: Store::Any,\n\n };\n\n }\n\n /// Create a new ingredient with only a name. Store and Group will fallback to their defaults.\n\n /// The new ingredient will be persisted.\n\n ///\n\n /// #Arguments\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 48, "score": 22203.46410608552 }, { "content": " group: Group::Vegetable,\n\n preferred_store: Store::Any,\n\n };\n\n\n\n assert_eq!(i1.cmp(&i2), Ordering::Greater);\n\n }\n\n\n\n #[test]\n\n fn test_sort_store() {\n\n let i1 = Ingredient {\n\n name: String::from(\"asd\"),\n\n group: Group::Vegetable,\n\n preferred_store: Store::Any,\n\n };\n\n let i2 = Ingredient {\n\n name: String::from(\"asd\"),\n\n group: Group::Vegetable,\n\n preferred_store: Store::DM,\n\n };\n\n\n\n assert_eq!(i1.cmp(&i2), Ordering::Less);\n\n }\n\n}\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 49, "score": 22202.958311620892 }, { "content": " }\n\n}\n\nimpl Ord for Ingredient {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n let order_store = self.preferred_store.cmp(&other.preferred_store);\n\n\n\n if order_store == Ordering::Equal {\n\n let order_group = self.group.cmp(&other.group);\n\n if order_group == Ordering::Equal {\n\n return self.name.cmp(&other.name);\n\n }\n\n return order_group;\n\n }\n\n return order_store;\n\n }\n\n}\n\n\n\nimpl fmt::Display for Ingredient {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{} [{}]\", &self.name, &self.group)\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 50, "score": 22200.74225151407 }, { "content": " name,\n\n group,\n\n preferred_store: store,\n\n }\n\n }\n\n\n\n /// Change the assigned group\n\n /// \n\n /// #Arguments\n\n /// \n\n /// * `group` the encoded group\n\n pub fn set_group(&mut self, group: usize) {\n\n self.group = Group::lookup_group_number(group);\n\n } \n\n \n\n /// Change the assigned store\n\n /// \n\n /// #Arguments\n\n /// \n\n /// * `store` the encoded store\n", "file_path": "src/cooking_book/ingredient.rs", "rank": 51, "score": 22198.02947078245 }, { "content": " json.push_str(&format!(\", \\\"amount\\\": {}\", amount));\n\n }\n\n\n\n json.push('}');\n\n is_first = false;\n\n }\n\n\n\n json.push_str(\"]}}\");\n\n return json;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::ShoppingList;\n\n use crate::cooking_book::group::Group;\n\n use crate::cooking_book::ingredient::Ingredient;\n\n use crate::cooking_book::store::Store;\n\n\n\n #[test]\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 52, "score": 16.87064030749276 }, { "content": "\n\n assert_eq!(\n\n shopping_list.to_json(),\n\n \"{\\\"Any\\\": {\\\"Anderes\\\": [{\\\"name\\\": \\\"Banane\\\"}, {\\\"name\\\": \\\"Gurke\\\", \\\"amount\\\": 2}]}}\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_to_json_3() {\n\n let ingredient1 = Ingredient {\n\n name: \"Banane\".to_string(),\n\n group: Group::Fruit,\n\n preferred_store: Store::Any,\n\n };\n\n let ingredient2 = Ingredient {\n\n name: \"Gurke\".to_string(),\n\n group: Group::Vegetable,\n\n preferred_store: Store::Any,\n\n };\n\n let mut shopping_list = ShoppingList::new();\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 53, "score": 15.822700603061389 }, { "content": " }\n\n\n\n if i.group != category {\n\n category = i.group;\n\n json.push_str(\"], \\\"\");\n\n json.push_str(&format!(\"{:}\", category));\n\n json.push_str(\"\\\": [\");\n\n is_first = true;\n\n }\n\n\n\n if !is_first {\n\n json.push_str(\", \");\n\n }\n\n\n\n json.push_str(\"{\\\"name\\\": \\\"\");\n\n json.push_str(&i.name);\n\n json.push_str(\"\\\"\");\n\n\n\n let amount: &u16 = self.to_buy.get(i).unwrap();\n\n if amount > &1u16 {\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 54, "score": 15.39424885054104 }, { "content": "#![feature(proc_macro_hygiene, decl_macro)]\n\n\n\n#[macro_use]\n\nextern crate rocket;\n\nextern crate rocket_contrib;\n\n\n\nuse rocket_contrib::serve::StaticFiles;\n\n\n\nmod cooking_book {\n\n pub mod group;\n\n pub mod ingredient;\n\n pub mod recipe;\n\n pub mod shopping_list;\n\n pub mod store;\n\n}\n\n\n\nmod file_access {\n\n pub mod persistency;\n\n}\n\n\n\nuse crate::cooking_book::ingredient::Ingredient;\n\nuse crate::file_access::persistency;\n\n\n\n/// Returns the stores.\n\n#[get(\"/store\", format = \"application/json\")]\n", "file_path": "src/main.rs", "rank": 55, "score": 14.839291863209686 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::file_access::persistency;\n\nuse crate::Ingredient;\n\n\n\n/// The shopping list\n\n#[derive(PartialEq, Eq)]\n\npub struct ShoppingList {\n\n pub to_buy: HashMap<Ingredient, u16>,\n\n}\n\n\n\nimpl ShoppingList {\n\n pub fn new() -> ShoppingList {\n\n let to_buy: HashMap<Ingredient, u16> = HashMap::new();\n\n return ShoppingList { to_buy };\n\n }\n\n\n\n /// Add an item to the shopping list. If the item is already present, the number to buy will be incremented.\n\n /// The updated shopping list will be persisted.\n\n pub fn add_and_save(&mut self, ingredient: &Ingredient) -> Result<(), String> {\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 56, "score": 14.478976683891304 }, { "content": "use std::collections::HashMap;\n\nuse std::fs;\n\nuse std::fs::File;\n\nuse std::fs::OpenOptions;\n\nuse std::io::prelude::*;\n\nuse std::path::Path;\n\n\n\nuse crate::cooking_book::ingredient::Ingredient;\n\nuse crate::cooking_book::recipe::Recipe;\n\nuse crate::cooking_book::shopping_list::ShoppingList;\n\n\n\nmod paths {\n\n pub const INGREDIENTS: &'static str = \"persistency/ingredients.csv\";\n\n pub const SHOPPING_LIST: &'static str = \"persistency/shoppingList.csv\";\n\n pub const RECIPES: &'static str = \"persistency/recipes.csv\";\n\n}\n\n\n", "file_path": "src/file_access/persistency.rs", "rank": 57, "score": 13.818037881472044 }, { "content": " Some(x) => x,\n\n None => \"\",\n\n };\n\n\n\n let amount = match amount.parse::<u16>() {\n\n Ok(x) => x,\n\n Err(_) => 1,\n\n };\n\n\n\n shopping_list.add_item(all_ingredients.get(&name).unwrap().clone(), amount);\n\n }\n\n\n\n return shopping_list;\n\n}\n\n\n", "file_path": "src/file_access/persistency.rs", "rank": 58, "score": 13.75930267965019 }, { "content": "\n\nimpl Group {\n\n fn get_group_iterator() -> Iter<'static, Group> {\n\n static GROUPS: [Group; 8] = [\n\n Vegetable, Fruit, Freezer, Conserved, Beverage, Baking, Spice, Other,\n\n ];\n\n GROUPS.into_iter()\n\n }\n\n\n\n pub fn all_as_json() -> String {\n\n let mut json = String::new();\n\n json.push_str(\"{\\\"groups\\\": [\");\n\n\n\n let mut is_first: bool = true;\n\n for group in Group::get_group_iterator() {\n\n if is_first {\n\n json.push_str(&format!(\"\\\"{}\\\"\", group));\n\n is_first = false;\n\n } else {\n\n json.push_str(&format!(\", \\\"{}\\\"\", group));\n", "file_path": "src/cooking_book/group.rs", "rank": 59, "score": 13.640983160224074 }, { "content": " let mut json = String::new();\n\n json.push_str(\"{\\\"stores\\\": [\");\n\n\n\n let mut is_first: bool = true;\n\n for store in Store::get_store_iterator() {\n\n if is_first {\n\n json.push_str(&format!(\"\\\"{}\\\"\", store));\n\n is_first = false;\n\n } else {\n\n json.push_str(&format!(\", \\\"{}\\\"\", store));\n\n }\n\n }\n\n json.push_str(\"]}\");\n\n\n\n return json;\n\n }\n\n\n\n /// Returns the decoded Store.\n\n ///\n\n /// #Arguments\n", "file_path": "src/cooking_book/store.rs", "rank": 60, "score": 13.414389826463392 }, { "content": " let ingredient1 = Ingredient::new_by_name(\"Banane\".to_string());\n\n let ingredient2 = Ingredient::new_by_name(\"Gurke\".to_string());\n\n let mut shopping_list = ShoppingList::new();\n\n shopping_list.add_or_increment(&ingredient1);\n\n shopping_list.add_or_increment(&ingredient2);\n\n\n\n assert_eq!(\n\n shopping_list.to_json(),\n\n \"{\\\"Any\\\": {\\\"Anderes\\\": [{\\\"name\\\": \\\"Banane\\\"}, {\\\"name\\\": \\\"Gurke\\\"}]}}\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_to_json_2() {\n\n let ingredient1 = Ingredient::new_by_name(\"Banane\".to_string());\n\n let ingredient2 = Ingredient::new_by_name(\"Gurke\".to_string());\n\n let mut shopping_list = ShoppingList::new();\n\n shopping_list.add_or_increment(&ingredient1);\n\n shopping_list.add_or_increment(&ingredient2);\n\n shopping_list.add_or_increment(&ingredient2);\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 61, "score": 13.29405818660873 }, { "content": " self.add_or_increment(ingredient);\n\n return persistency::write_shopping_list(&self);\n\n }\n\n\n\n fn add_or_increment(&mut self, ingredient: &Ingredient) {\n\n let amount = self.to_buy.entry(ingredient.clone()).or_insert(0);\n\n *amount += 1;\n\n }\n\n\n\n /// Add an item with amount to the shopping list.\n\n pub fn add_item(&mut self, ingredient: Ingredient, amount: u16) {\n\n self.to_buy.insert(ingredient, amount);\n\n }\n\n\n\n /// Remove an item from the shopping list. The updated shopping list will be persisted.\n\n ///\n\n /// #Arguments\n\n ///\n\n /// * `ingredient` The ingredient to remove from the list.\n\n pub fn remove_and_save(&mut self, ingredient: &Ingredient) -> Result<(), String> {\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 62, "score": 12.748869610057174 }, { "content": "\n\n let mut json: String = String::new();\n\n json.push_str(\"{\\\"\");\n\n json.push_str(&format!(\"{:?}\", store));\n\n json.push_str(\"\\\": {\\\"\");\n\n\n\n json.push_str(&format!(\"{:}\", category));\n\n json.push_str(\"\\\": [\");\n\n\n\n let mut is_first: bool = true;\n\n for i in keys {\n\n if i.preferred_store != store {\n\n let store = i.preferred_store;\n\n category = i.group;\n\n json.push_str(\"]}, \\\"\");\n\n json.push_str(&format!(\"{:?}\", store));\n\n json.push_str(\"\\\": {\\\"\");\n\n json.push_str(&format!(\"{:}\", category));\n\n json.push_str(\"\\\": [\");\n\n is_first = true;\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 63, "score": 12.522078559515734 }, { "content": " shopping_list.add_or_increment(&ingredient1);\n\n shopping_list.add_or_increment(&ingredient2);\n\n\n\n assert_eq!(\n\n shopping_list.to_json(),\n\n \"{\\\"Any\\\": {\\\"Gemüse\\\": [{\\\"name\\\": \\\"Gurke\\\"}], \\\"Obst\\\": [{\\\"name\\\": \\\"Banane\\\"}]}}\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_to_json_4() {\n\n let ingredient1 = Ingredient {\n\n name: \"Banane\".to_string(),\n\n group: Group::Other,\n\n preferred_store: Store::Any,\n\n };\n\n let ingredient2 = Ingredient {\n\n name: \"Gurke\".to_string(),\n\n group: Group::Other,\n\n preferred_store: Store::DM,\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 64, "score": 12.361369011908067 }, { "content": "use self::Store::*;\n\nuse std::fmt;\n\nuse std::slice::Iter;\n\n\n\n/// The available stores for shopping.\n\n#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Copy, Hash)]\n\npub enum Store {\n\n Rewe = 0,\n\n DM = 1,\n\n Denz = 2,\n\n Any = -1,\n\n}\n\n\n\nimpl Store {\n\n fn get_store_iterator() -> Iter<'static, Store> {\n\n static STORES: [Store; 4] = [Rewe, DM, Denz, Any];\n\n STORES.into_iter()\n\n }\n\n\n\n pub fn all_as_json() -> String {\n", "file_path": "src/cooking_book/store.rs", "rank": 65, "score": 11.519822497822133 }, { "content": " fn test_add_or_increment() {\n\n let ingredient = Ingredient::new_by_name(\"Banane\".to_string());\n\n let mut shopping_list = ShoppingList::new();\n\n assert!(shopping_list.to_buy.is_empty());\n\n\n\n shopping_list.add_or_increment(&ingredient);\n\n assert!(shopping_list.to_buy.contains_key(&ingredient));\n\n\n\n let mut expected_count: u16 = 1;\n\n assert_eq!(\n\n shopping_list.to_buy.get(&ingredient).unwrap(),\n\n &expected_count\n\n );\n\n\n\n shopping_list.add_or_increment(&ingredient);\n\n expected_count += 1;\n\n assert_eq!(\n\n shopping_list.to_buy.get(&ingredient).unwrap(),\n\n &expected_count\n\n );\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 66, "score": 11.261861753506503 }, { "content": " };\n\n let mut shopping_list = ShoppingList::new();\n\n shopping_list.add_or_increment(&ingredient1);\n\n shopping_list.add_or_increment(&ingredient2);\n\n\n\n assert_eq!(\n\n shopping_list.to_json(),\n\n \"{\\\"Any\\\": {\\\"Anderes\\\": [{\\\"name\\\": \\\"Banane\\\"}]}, \\\"DM\\\": {\\\"Anderes\\\": [{\\\"name\\\": \\\"Gurke\\\"}]}}\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_to_json_5() {\n\n let shopping_list = ShoppingList::new();\n\n\n\n assert_eq!(shopping_list.to_json(), \"{}\");\n\n }\n\n}\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 67, "score": 10.784574709602136 }, { "content": " }\n\n\n\n #[test]\n\n fn test_remove() {\n\n let ingredient = Ingredient::new_by_name(\"Banane\".to_string());\n\n let mut shopping_list = ShoppingList::new();\n\n assert!(shopping_list.to_buy.is_empty());\n\n\n\n shopping_list.add_or_increment(&ingredient);\n\n assert!(shopping_list.to_buy.contains_key(&ingredient));\n\n\n\n shopping_list.remove(&ingredient);\n\n assert!(shopping_list.to_buy.is_empty());\n\n\n\n shopping_list.remove(&ingredient);\n\n assert!(shopping_list.to_buy.is_empty());\n\n }\n\n\n\n #[test]\n\n fn test_to_json_1() {\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 68, "score": 10.511840109881764 }, { "content": " self.remove(ingredient);\n\n return persistency::write_shopping_list(&self);\n\n }\n\n\n\n fn remove(&mut self, ingredient: &Ingredient) {\n\n self.to_buy.remove(&ingredient);\n\n }\n\n\n\n /// Exports the shopping list to json.\n\n pub fn to_json(&self) -> String {\n\n let mut keys: Vec<&Ingredient> = self.to_buy.keys().collect();\n\n if keys.is_empty() {\n\n return \"{}\".to_string();\n\n }\n\n\n\n keys.sort();\n\n\n\n let first_entry = keys.first().unwrap();\n\n let store = first_entry.preferred_store;\n\n let mut category = first_entry.group;\n", "file_path": "src/cooking_book/shopping_list.rs", "rank": 69, "score": 10.508136834555845 }, { "content": " }\n\n }\n\n json.push_str(\"]}\");\n\n\n\n return json;\n\n }\n\n\n\n /// Returns the decoded Group.\n\n ///\n\n /// #Arguments\n\n /// * `number` The encoded Group.\n\n pub fn lookup_group_number(number: usize) -> Group {\n\n match number {\n\n 0 => Group::Vegetable,\n\n 1 => Group::Fruit,\n\n 2 => Group::Freezer,\n\n 3 => Group::Conserved,\n\n 4 => Group::Beverage,\n\n 5 => Group::Baking,\n\n 6 => Group::Pasta,\n", "file_path": "src/cooking_book/group.rs", "rank": 70, "score": 9.662055544781143 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Store;\n\n #[test]\n\n fn test_store_size() {\n\n let it = Store::get_store_iterator();\n\n let (size, _asd) = it.size_hint();\n\n assert_eq!(size, 4);\n\n }\n\n\n\n #[test]\n\n fn test_lookup_store() {\n\n assert_eq!(Store::lookup_store_number(0), Store::Rewe);\n\n assert_eq!(Store::lookup_store_number(1), Store::DM);\n\n assert_eq!(Store::lookup_store_number(2), Store::Denz);\n\n assert_eq!(Store::lookup_store_number(3), Store::Any);\n\n assert_eq!(Store::lookup_store_number(4), Store::Any);\n\n }\n\n}\n", "file_path": "src/cooking_book/store.rs", "rank": 71, "score": 8.567138435168575 }, { "content": "use self::Group::*;\n\nuse std::fmt;\n\nuse std::slice::Iter;\n\n\n\n/// The groups for ingredients\n\n#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Copy, Hash)]\n\npub enum Group {\n\n Vegetable = 0,\n\n Fruit = 1,\n\n Freezer = 2,\n\n Conserved = 3,\n\n Beverage = 4,\n\n Baking = 5,\n\n Pasta = 6,\n\n Legume = 7,\n\n Spice = 8,\n\n Snacks = 9,\n\n Sweets = 10,\n\n Other = -1,\n\n}\n", "file_path": "src/cooking_book/group.rs", "rank": 72, "score": 8.525579424462517 }, { "content": " Group::Spice => write!(f, \"Gewürz\"),\n\n Group::Snacks => write!(f, \"Knabberkram\"),\n\n Group::Sweets => write!(f, \"Süßigkeiten\"),\n\n Group::Other => write!(f, \"Anderes\"),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Group;\n\n #[test]\n\n fn test_group_size() {\n\n let it = Group::get_group_iterator();\n\n let (size, _asd) = it.size_hint();\n\n assert_eq!(size, 8);\n\n }\n\n\n\n #[test]\n\n fn test_group_display() {\n", "file_path": "src/cooking_book/group.rs", "rank": 73, "score": 8.197059572357237 }, { "content": " /// * `number` The encoded store.\n\n pub fn lookup_store_number(number: usize) -> Store {\n\n match number {\n\n 0 => Store::Rewe,\n\n 1 => Store::DM,\n\n 2 => Store::Denz,\n\n _ => Store::Any,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Store {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Store::Rewe => write!(f, \"Rewe\"),\n\n Store::DM => write!(f, \"DM\"),\n\n Store::Denz => write!(f, \"Denz\"),\n\n Store::Any => write!(f, \"Überall\"),\n\n }\n\n }\n", "file_path": "src/cooking_book/store.rs", "rank": 74, "score": 7.0078641048879895 }, { "content": "# Cooking-Book\n\nA small little application to teach myself rust. Progress will be slow and the implementation inefficient :)\n\n\n\nEventually this application will allow the user to\n\n* Manage recipes and search them conveniently\n\n* Get some insights into used recipes (calories or price per meal)\n\n* Assign a recipe for a day of week and create a shoping cart out of it. The shopping cart will be grouped by \n\n ingredients group (like vegetable or baking ingredient) to make shopping easier. If provided the shopping cart will \n\n be also grouped by different stores if an ingredient is only available in a special place.\n\n* A WebInterface will allow you to see the shopping cart in the store (where you actually need it) without printing or\n", "file_path": "README.md", "rank": 75, "score": 5.215310738664228 }, { "content": " assert_eq!(&format!(\"{}\", Group::Other), \"Anderes\");\n\n }\n\n\n\n #[test]\n\n fn test_lookup_group() {\n\n assert_eq!(Group::lookup_group_number(0), Group::Vegetable);\n\n assert_eq!(Group::lookup_group_number(1), Group::Fruit);\n\n assert_eq!(Group::lookup_group_number(2), Group::Freezer);\n\n assert_eq!(Group::lookup_group_number(3), Group::Conserved);\n\n assert_eq!(Group::lookup_group_number(4), Group::Beverage);\n\n assert_eq!(Group::lookup_group_number(5), Group::Baking);\n\n assert_eq!(Group::lookup_group_number(6), Group::Pasta);\n\n assert_eq!(Group::lookup_group_number(7), Group::Legume);\n\n assert_eq!(Group::lookup_group_number(8), Group::Spice);\n\n assert_eq!(Group::lookup_group_number(9), Group::Snacks);\n\n assert_eq!(Group::lookup_group_number(10), Group::Sweets);\n\n assert_eq!(Group::lookup_group_number(11), Group::Other);\n\n }\n\n}\n", "file_path": "src/cooking_book/group.rs", "rank": 76, "score": 4.711406982212188 }, { "content": " 7 => Group::Legume,\n\n 8 => Group::Spice,\n\n 9 => Group::Snacks,\n\n 10 => Group::Sweets,\n\n _ => Group::Other,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Group {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Group::Vegetable => write!(f, \"Gemüse\"),\n\n Group::Fruit => write!(f, \"Obst\"),\n\n Group::Freezer => write!(f, \"Kühlung\"),\n\n Group::Conserved => write!(f, \"Konserve\"),\n\n Group::Beverage => write!(f, \"Getränk\"),\n\n Group::Baking => write!(f, \"Backzutat\"),\n\n Group::Pasta => write!(f, \"Nudeln\"),\n\n Group::Legume => write!(f, \"Hülsenfrüchte\"),\n", "file_path": "src/cooking_book/group.rs", "rank": 77, "score": 4.368762160873248 } ]
Rust
foreign-types-macros/src/build.rs
flier/foreign-types
263ab4afdb1268073e1cbbf010768198fae388ec
use proc_macro2::TokenStream; use quote::quote; use syn::{Path, Ident}; use crate::parse::{Input, ForeignType}; fn ref_name(input: &ForeignType) -> Ident { Ident::new(&format!("{}Ref", input.name), input.name.span()) } pub fn build(input: Input) -> TokenStream { let types = input.types.iter().map(|t| build_foreign_type(&input.crate_, t)); quote! { #(#types)* } } fn build_foreign_type(crate_: &Path, input: &ForeignType) -> TokenStream { let decls = build_decls(crate_, input); let oibits = build_oibits(crate_, input); let foreign_impls = build_foreign_impls(crate_, input); let drop_impl = build_drop_impl(crate_, input); let deref_impls = build_deref_impls(crate_, input); let borrow_impls = build_borrow_impls(crate_, input); let as_ref_impls = build_as_ref_impls(crate_, input); let clone_impl = build_clone_impl(crate_, input); let to_owned_impl = build_to_owned_impl(crate_, input); quote! { #decls #oibits #foreign_impls #drop_impl #deref_impls #borrow_impls #as_ref_impls #clone_impl #to_owned_impl } } fn build_decls(crate_: &Path, input: &ForeignType) -> TokenStream { let attrs = &input.attrs; let vis = &input.visibility; let name = &input.name; let generics = &input.generics; let ctype = &input.ctype; let phantom_data = input.phantom_data.as_ref().map(|d| quote!(, #crate_::export::PhantomData<#d>)); let ref_name = ref_name(input); let ref_docs = format!("A borrowed reference to a [`{name}`](struct.{}.html).", name = name); quote! { #(#attrs)* #vis struct #name #generics(#crate_::export::NonNull<#ctype> #phantom_data); #[doc = #ref_docs] #vis struct #ref_name #generics(#crate_::Opaque #phantom_data); } } fn build_oibits(crate_: &Path, input: &ForeignType) -> TokenStream { let oibits = input.oibits.iter().map(|t| build_oibit(crate_, input, t)); quote! { #(#oibits)* } } fn build_oibit(crate_: &Path, input: &ForeignType, oibit: &Ident) -> TokenStream { let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { unsafe impl #impl_generics #crate_::export::#oibit for #name #ty_generics {} unsafe impl #impl_generics #crate_::export::#oibit for #ref_name #ty_generics {} } } fn build_foreign_impls(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let ctype = &input.ctype; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); let phantom_data = input.phantom_data.as_ref().map(|_| quote!(, #crate_::export::PhantomData)); quote! { impl #impl_generics #crate_::ForeignType for #name #ty_generics { type CType = #ctype; type Ref = #ref_name #ty_generics; #[inline] unsafe fn from_ptr(ptr: *mut #ctype) -> #name #ty_generics { debug_assert!(!ptr.is_null()); #name(<#crate_::export::NonNull<_>>::new_unchecked(ptr) #phantom_data) } #[inline] fn as_ptr(&self) -> *mut #ctype { <#crate_::export::NonNull<_>>::as_ptr(self.0) } } impl #impl_generics #crate_::ForeignTypeRef for #ref_name #ty_generics { type CType = #ctype; } } } fn build_drop_impl(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let drop = &input.drop; let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::Drop for #name #ty_generics { #[inline] fn drop(&mut self) { unsafe { #drop(#crate_::ForeignType::as_ptr(self)); } } } } } fn build_deref_impls(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::Deref for #name #ty_generics { type Target = #ref_name #ty_generics; #[inline] fn deref(&self) -> &#ref_name #ty_generics { unsafe { #crate_::ForeignTypeRef::from_ptr(#crate_::ForeignType::as_ptr(self)) } } } impl #impl_generics #crate_::export::DerefMut for #name #ty_generics { #[inline] fn deref_mut(&mut self) -> &mut #ref_name #ty_generics { unsafe { #crate_::ForeignTypeRef::from_ptr_mut(#crate_::ForeignType::as_ptr(self)) } } } } } fn build_borrow_impls(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::Borrow<#ref_name #ty_generics> for #name #ty_generics { #[inline] fn borrow(&self) -> &#ref_name #ty_generics { &**self } } impl #impl_generics #crate_::export::BorrowMut<#ref_name #ty_generics> for #name #ty_generics { #[inline] fn borrow_mut(&mut self) -> &mut #ref_name #ty_generics { &mut **self } } } } fn build_as_ref_impls(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::AsRef<#ref_name #ty_generics> for #name #ty_generics { #[inline] fn as_ref(&self) -> &#ref_name #ty_generics { &**self } } impl #impl_generics #crate_::export::AsMut<#ref_name #ty_generics> for #name #ty_generics { #[inline] fn as_mut(&mut self) -> &mut #ref_name #ty_generics { &mut **self } } } } fn build_clone_impl(crate_: &Path, input: &ForeignType) -> TokenStream { let clone = match &input.clone { Some(clone) => clone, None => return quote!(), }; let name = &input.name; let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::Clone for #name #ty_generics { #[inline] fn clone(&self) -> #name #ty_generics { unsafe { let ptr = #clone(#crate_::ForeignType::as_ptr(self)); #crate_::ForeignType::from_ptr(ptr) } } } } } #[cfg(feature = "std")] fn build_to_owned_impl(crate_: &Path, input: &ForeignType) -> TokenStream { let clone = match &input.clone { Some(clone) => clone, None => return quote!(), }; let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::ToOwned for #ref_name #ty_generics { type Owned = #name #ty_generics; #[inline] fn to_owned(&self) -> #name #ty_generics { unsafe { let ptr = #clone(#crate_::ForeignTypeRef::as_ptr(self)); #crate_::ForeignType::from_ptr(ptr) } } } } } #[cfg(not(feature = "std"))] fn build_to_owned_impl(_: &Path, _: &ForeignType) -> TokenStream { quote!() }
use proc_macro2::TokenStream; use quote::quote; use syn::{Path, Ident}; use crate::parse::{Input, ForeignType}; fn ref_name(input: &ForeignType) -> Ident { Ident::new(&format!("{}Ref", input.name), input.name.span()) } pub fn build(input: Input) -> TokenStream { let types = input.types.iter().map(|t| build_foreign_type(&input.crate_, t)); quote! { #(#types)* } } fn build_foreign_type(crate_: &Path, input: &ForeignType) -> TokenStream { let decls = build_decls(crate_, input); let oibits = build_oibits(crate_, input); let foreign_impls = build_foreign_impls(crate_, input); let drop_impl = build_drop_impl(crate_, input); let deref_impls = build_deref_impls(crate_, input); let borrow_impls = build_borrow_impls(crate_, input); let as_ref_impls = build_as_ref_impls(crate_, input); let clone_impl = build_clone_impl(crate_, input); let to_owned_impl = build_to_owned_impl(crate_, input); quote! { #decls #oibits #foreign_impls #drop_impl #deref_impls #borrow_impls #as_ref_impls #clone_impl #to_owned_impl } } fn build_decls(crate_: &Path, input: &ForeignType) -> TokenStream { let attrs = &input.attrs; let vis = &input.visibility; let name = &input.name; let generics = &input.generics; let ctype = &input.ctype; let phantom_data = input.phantom_data.as_ref().map(|d| quote!(, #crate_::export::PhantomData<#d>)); let ref_name = ref_name(input); let ref_docs = format!("A borrowed reference to a [`{name}`](struct.{}.html).", name = name); quote! { #(#attrs)* #vis struct #name #generics(#crate_::export::NonNull<#ctype> #phantom_data); #[doc = #ref_docs] #vis struct #ref_name #generics(#crate_::Opaque #phantom_data); } } fn build_oibits(crate_: &Path, input: &ForeignType) -> TokenStream { let oibits = input.oibits.iter().map(|t| build_oibit(crate_, input, t)); quote! { #(#oibits)* } } fn build_oibit(crate_: &Path, input: &ForeignType, oibit: &Ident) -> TokenStream { let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { unsafe impl #impl_generics #crate_::export::#oibit for #name #ty_generics {} unsafe impl #impl_generics #crate_::export::#oibit for #ref_name #ty_generics {} } } fn build_foreign_impls(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let ctype = &input.ctype; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); let phantom_data = input.phantom_data.as_ref().map(|_| quote!(, #crate_::export::PhantomData)); quote! { impl #impl_generics #crate_::ForeignType for #name #ty_generics { type CType = #ctype; type Ref = #ref_name #ty_generics; #[inline] unsafe fn from_ptr(ptr: *mut #ctype) -> #name #ty_generics { debug_assert!(!ptr.is_null()); #name(<#crate_::export::NonNull<_>>::new_unchecked(ptr) #phantom_data) } #[inline] fn as_ptr(&self) -> *mut #ctype { <#crate_::export::NonNull<_>>::as_ptr(self.0) } } impl #impl_generics #crate_::ForeignTypeRef for #ref_name #ty_generics { type CType = #ctype; } } } fn build_drop_impl(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let drop = &input.drop; let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::Drop for #name #ty_generics { #[inline] fn drop(&mut self) { unsafe { #drop(#crate_::ForeignType::as_ptr(self)); } } } } } fn build_deref_impls(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::Deref for #name #ty_generics { type Target = #ref_name #ty_generics; #[inline] fn deref(&self) -> &#ref_name #ty_generics { unsafe { #crate_::ForeignTypeRef::from_ptr(#crate_::ForeignType::as_ptr(self)) } } } impl #impl_generics #crate_::export::DerefMut for #name #ty_generics { #[inline] fn deref_mut(&mut self) -> &mut #ref_name #ty_generics { unsafe { #crate_::ForeignTypeRef::from_ptr_mut(#crate_::ForeignType::as_ptr(self)) } } } } } fn build_borrow_impls(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::Borrow<#ref_name #ty_generics> for #name #ty_generics { #[inline] fn borrow(&self) -> &#ref_name #ty_generics { &**self } } impl #impl_generics #crate_::export::BorrowMut<#ref_name #ty_generics> for #name #ty_generics { #[inline] fn borrow_mut(&mut self) -> &mut #ref_name #ty_generics { &mut **self } } } } fn build_as_ref_impls(crate_: &Path, input: &ForeignType) -> TokenStream { let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::AsRef<#ref_name #ty_generics> for #name #ty_generics { #[inline] fn as_ref(&self) -> &#ref_name #ty_generics { &**self } } impl #impl_generics #crate_::export::AsMut<#ref_name #ty_generics> for #name #ty_generics { #[inline] fn as_mut(&mut self) -> &mut #ref_name #ty_generics { &mut **self } } } }
#[cfg(feature = "std")] fn build_to_owned_impl(crate_: &Path, input: &ForeignType) -> TokenStream { let clone = match &input.clone { Some(clone) => clone, None => return quote!(), }; let name = &input.name; let ref_name = ref_name(input); let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::ToOwned for #ref_name #ty_generics { type Owned = #name #ty_generics; #[inline] fn to_owned(&self) -> #name #ty_generics { unsafe { let ptr = #clone(#crate_::ForeignTypeRef::as_ptr(self)); #crate_::ForeignType::from_ptr(ptr) } } } } } #[cfg(not(feature = "std"))] fn build_to_owned_impl(_: &Path, _: &ForeignType) -> TokenStream { quote!() }
fn build_clone_impl(crate_: &Path, input: &ForeignType) -> TokenStream { let clone = match &input.clone { Some(clone) => clone, None => return quote!(), }; let name = &input.name; let (impl_generics, ty_generics, _) = input.generics.split_for_impl(); quote! { impl #impl_generics #crate_::export::Clone for #name #ty_generics { #[inline] fn clone(&self) -> #name #ty_generics { unsafe { let ptr = #clone(#crate_::ForeignType::as_ptr(self)); #crate_::ForeignType::from_ptr(ptr) } } } } }
function_block-full_function
[ { "content": "#[proc_macro]\n\npub fn foreign_type_impl(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as Input);\n\n build::build(input).into()\n\n}\n", "file_path": "foreign-types-macros/src/lib.rs", "rank": 5, "score": 107763.6663491709 }, { "content": "fn parse_oibit(input: ParseStream) -> parse::Result<Ident> {\n\n let lookahead = input.lookahead1();\n\n if lookahead.peek(kw::Sync) || lookahead.peek(kw::Send) {\n\n input.parse()\n\n } else {\n\n Err(lookahead.error())\n\n }\n\n}\n\n\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 14, "score": 91497.83019194324 }, { "content": "fn parse_oibits(input: ParseStream) -> parse::Result<Punctuated<Ident, Token![+]>> {\n\n let mut out = Punctuated::new();\n\n\n\n if input.parse::<Option<Token![:]>>()?.is_some() {\n\n loop {\n\n out.push_value(input.call(parse_oibit)?);\n\n if input.peek(token::Brace) {\n\n break;\n\n }\n\n out.push_punct(input.parse()?);\n\n if input.peek(token::Brace) {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 15, "score": 85061.54774655026 }, { "content": "fn parse_fn<T>(input: ParseStream) -> parse::Result<ExprPath>\n\nwhere\n\n T: Parse,\n\n{\n\n input.parse::<Token![fn]>()?;\n\n input.parse::<T>()?;\n\n input.parse::<Token![=]>()?;\n\n let path = input.parse()?;\n\n input.parse::<Token![;]>()?;\n\n Ok(path)\n\n}\n\n\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 17, "score": 62526.78191671613 }, { "content": "fn parse_type<T>(input: ParseStream) -> parse::Result<Type>\n\nwhere\n\n T: Parse,\n\n{\n\n input.parse::<Token![type]>()?;\n\n input.parse::<T>()?;\n\n input.parse::<Token![=]>()?;\n\n let type_ = input.parse()?;\n\n input.parse::<Token![;]>()?;\n\n Ok(type_)\n\n}\n\n\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 18, "score": 58932.40069361526 }, { "content": "fn parse_phantom_data(input: ParseStream) -> parse::Result<Option<Type>> {\n\n if input.peek(Token![type]) && input.peek2(kw::PhantomData) {\n\n input.call(parse_type::<kw::PhantomData>).map(Some)\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 19, "score": 57426.8413634037 }, { "content": "/// A trait implemented by types which reference borrowed foreign types.\n\npub trait ForeignTypeRef: Sized {\n\n /// The raw C type.\n\n type CType;\n\n\n\n /// Constructs a shared instance of this type from its raw type.\n\n #[inline]\n\n unsafe fn from_ptr<'a>(ptr: *mut Self::CType) -> &'a Self {\n\n debug_assert!(!ptr.is_null());\n\n &*(ptr as *mut _)\n\n }\n\n\n\n /// Constructs a mutable reference of this type from its raw type.\n\n #[inline]\n\n unsafe fn from_ptr_mut<'a>(ptr: *mut Self::CType) -> &'a mut Self {\n\n debug_assert!(!ptr.is_null());\n\n &mut *(ptr as *mut _)\n\n }\n\n\n\n /// Returns a raw pointer to the wrapped value.\n\n #[inline]\n\n fn as_ptr(&self) -> *mut Self::CType {\n\n self as *const _ as *mut _\n\n }\n\n}\n", "file_path": "foreign-types-shared/src/lib.rs", "rank": 20, "score": 57038.19867581611 }, { "content": "fn parse_clone(input: ParseStream) -> parse::Result<Option<ExprPath>> {\n\n if input.peek(Token![fn]) && input.peek2(kw::clone) {\n\n input.call(parse_fn::<kw::clone>).map(Some)\n\n } else {\n\n Ok(None)\n\n }\n\n}\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 21, "score": 56872.972702810766 }, { "content": "/// A type implemented by wrappers over foreign types.\n\npub trait ForeignType: Sized {\n\n /// The raw C type.\n\n type CType;\n\n\n\n /// The type representing a reference to this type.\n\n type Ref: ForeignTypeRef<CType = Self::CType>;\n\n\n\n /// Constructs an instance of this type from its raw type.\n\n unsafe fn from_ptr(ptr: *mut Self::CType) -> Self;\n\n\n\n /// Returns a raw pointer to the wrapped value.\n\n fn as_ptr(&self) -> *mut Self::CType;\n\n}\n\n\n", "file_path": "foreign-types-shared/src/lib.rs", "rank": 22, "score": 39840.0566519049 }, { "content": "//! // should be defined there.\n\n//! pub struct Foo(NonNull<foo_sys::FOO>);\n\n//!\n\n//! unsafe impl Sync for FooRef {}\n\n//! unsafe impl Send for FooRef {}\n\n//!\n\n//! unsafe impl Sync for Foo {}\n\n//! unsafe impl Send for Foo {}\n\n//!\n\n//! impl Drop for Foo {\n\n//! fn drop(&mut self) {\n\n//! unsafe { foo_sys::FOO_free(self.as_ptr()) }\n\n//! }\n\n//! }\n\n//!\n\n//! impl ForeignType for Foo {\n\n//! type CType = foo_sys::FOO;\n\n//! type Ref = FooRef;\n\n//!\n\n//! unsafe fn from_ptr(ptr: *mut foo_sys::FOO) -> Foo {\n", "file_path": "foreign-types/src/lib.rs", "rank": 23, "score": 4084.184410806843 }, { "content": "pub use foreign_types_shared::{ForeignType, ForeignTypeRef, Opaque};\n\n\n\n#[doc(hidden)]\n\npub mod export {\n\n pub use core::borrow::{Borrow, BorrowMut};\n\n pub use core::clone::Clone;\n\n pub use core::convert::{AsMut, AsRef};\n\n pub use core::marker::{PhantomData, Send, Sync};\n\n pub use core::ops::{Deref, DerefMut, Drop};\n\n pub use core::ptr::NonNull;\n\n\n\n #[cfg(feature = \"std\")]\n\n pub use std::borrow::ToOwned;\n\n}\n\n\n\n/// A macro to easily define wrappers for foreign types.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n", "file_path": "foreign-types/src/lib.rs", "rank": 24, "score": 4084.1193524601517 }, { "content": "//! unsafe { FooRef::from_ptr(foo_sys::BAR_get_foo(self.as_ptr())) }\n\n//! }\n\n//!\n\n//! fn foo_mut(&mut self) -> &mut FooRef {\n\n//! unsafe { FooRef::from_ptr_mut(foo_sys::BAR_get_foo(self.as_ptr())) }\n\n//! }\n\n//! }\n\n//!\n\n//! # fn main() {}\n\n//! ```\n\n#![no_std]\n\n#![warn(missing_docs)]\n\n#![doc(html_root_url = \"https://docs.rs/foreign-types/0.4\")]\n\n\n\n#[cfg(feature = \"std\")]\n\nextern crate std;\n\n\n\n#[doc(hidden)]\n\npub use foreign_types_macros::foreign_type_impl;\n\n#[doc(inline)]\n", "file_path": "foreign-types/src/lib.rs", "rank": 25, "score": 4083.911742878227 }, { "content": "//! }\n\n//!\n\n//! // add in Borrow, BorrowMut, AsRef, AsRefMut, Clone, ToOwned...\n\n//! ```\n\n//!\n\n//! The `foreign_type!` macro can generate this boilerplate for you:\n\n//!\n\n//! ```\n\n//! #[macro_use]\n\n//! extern crate foreign_types;\n\n//!\n\n//! mod foo_sys {\n\n//! pub enum FOO {}\n\n//!\n\n//! extern {\n\n//! pub fn FOO_free(foo: *mut FOO);\n\n//! pub fn FOO_duplicate(foo: *mut FOO) -> *mut FOO; // optional\n\n//! }\n\n//! }\n\n//!\n", "file_path": "foreign-types/src/lib.rs", "rank": 26, "score": 4082.5320107981515 }, { "content": "//! Foo(NonNull::new_unchecked(ptr))\n\n//! }\n\n//!\n\n//! fn as_ptr(&self) -> *mut foo_sys::FOO {\n\n//! self.0.as_ptr()\n\n//! }\n\n//! }\n\n//!\n\n//! impl Deref for Foo {\n\n//! type Target = FooRef;\n\n//!\n\n//! fn deref(&self) -> &FooRef {\n\n//! unsafe { FooRef::from_ptr(self.as_ptr()) }\n\n//! }\n\n//! }\n\n//!\n\n//! impl DerefMut for Foo {\n\n//! fn deref_mut(&mut self) -> &mut FooRef {\n\n//! unsafe { FooRef::from_ptr_mut(self.as_ptr()) }\n\n//! }\n", "file_path": "foreign-types/src/lib.rs", "rank": 27, "score": 4082.26816195682 }, { "content": "/// #[macro_use]\n\n/// extern crate foreign_types;\n\n///\n\n/// # mod openssl_sys { pub type SSL = (); pub unsafe fn SSL_free(_: *mut SSL) {} pub unsafe fn SSL_dup(x: *mut SSL) -> *mut SSL {x} }\n\n/// # mod foo_sys { pub type THING = (); pub unsafe fn THING_free(_: *mut THING) {} }\n\n/// foreign_type! {\n\n/// /// Documentation for the owned type.\n\n/// pub type Ssl: Sync + Send {\n\n/// type CType = openssl_sys::SSL;\n\n/// fn drop = openssl_sys::SSL_free;\n\n/// fn clone = openssl_sys::SSL_dup;\n\n/// }\n\n///\n\n/// /// This type immutably borrows other data and has a limited lifetime!\n\n/// pub type Thing<'a>: Send {\n\n/// type CType = foo_sys::THING;\n\n/// type PhantomData = &'a ();\n\n/// fn drop = foo_sys::THING_free;\n\n/// }\n\n/// }\n", "file_path": "foreign-types/src/lib.rs", "rank": 28, "score": 4082.1751484354836 }, { "content": "use foreign_types::foreign_type;\n\n\n\nmod foo_sys {\n\n pub enum FOO {}\n\n\n\n pub unsafe extern \"C\" fn foo_drop(_: *mut FOO) {}\n\n pub unsafe extern \"C\" fn foo_clone(ptr: *mut FOO) -> *mut FOO { ptr }\n\n}\n\n\n\nforeign_type! {\n\n pub type Foo<'a, T>: Sync + Send {\n\n type CType = foo_sys::FOO;\n\n type PhantomData = &'a T;\n\n fn drop = foo_sys::foo_drop;\n\n fn clone = foo_sys::foo_clone;\n\n }\n\n\n\n pub type Foo2 {\n\n type CType = foo_sys::FOO;\n\n fn drop = foo_sys::foo_drop;\n\n }\n\n}\n", "file_path": "foreign-types/tests/test.rs", "rank": 29, "score": 4082.072538177491 }, { "content": "//! pub enum FOO {}\n\n//!\n\n//! extern {\n\n//! pub fn FOO_free(foo: *mut FOO);\n\n//! }\n\n//! }\n\n//!\n\n//! // The borrowed type is a newtype wrapper around an `Opaque` value.\n\n//! //\n\n//! // `FooRef` values never exist; we instead create references to `FooRef`s\n\n//! // from raw C pointers.\n\n//! pub struct FooRef(Opaque);\n\n//!\n\n//! impl ForeignTypeRef for FooRef {\n\n//! type CType = foo_sys::FOO;\n\n//! }\n\n//!\n\n//! // The owned type is simply a newtype wrapper around the raw C type.\n\n//! //\n\n//! // It dereferences to `FooRef`, so methods that do not require ownership\n", "file_path": "foreign-types/src/lib.rs", "rank": 30, "score": 4081.7675719047584 }, { "content": "//! pub fn BAR_get_foo(bar: *mut BAR) -> *mut FOO;\n\n//! }\n\n//! }\n\n//!\n\n//! foreign_type! {\n\n//! /// A Foo.\n\n//! pub type Foo: Sync + Send {\n\n//! type CType = foo_sys::FOO;\n\n//! fn drop = foo_sys::FOO_free;\n\n//! }\n\n//!\n\n//! /// A Bar.\n\n//! pub type Bar: Sync + Send {\n\n//! type CType = foo_sys::BAR;\n\n//! fn drop = foo_sys::BAR_free;\n\n//! }\n\n//! }\n\n//!\n\n//! impl BarRef {\n\n//! fn foo(&self) -> &FooRef {\n", "file_path": "foreign-types/src/lib.rs", "rank": 31, "score": 4081.726182012133 }, { "content": "//! }\n\n//! ```\n\n//!\n\n//! The documentation for the C library states that `BAR_get_foo` returns a reference into the `BAR`\n\n//! passed to it, which translates into a reference in Rust. It also says that we're allowed to\n\n//! modify the `FOO`, so we'll define a pair of accessor methods, one immutable and one mutable:\n\n//!\n\n//! ```\n\n//! #[macro_use]\n\n//! extern crate foreign_types;\n\n//!\n\n//! use foreign_types::ForeignTypeRef;\n\n//!\n\n//! mod foo_sys {\n\n//! pub enum FOO {}\n\n//! pub enum BAR {}\n\n//!\n\n//! extern {\n\n//! pub fn FOO_free(foo: *mut FOO);\n\n//! pub fn BAR_free(bar: *mut BAR);\n", "file_path": "foreign-types/src/lib.rs", "rank": 32, "score": 4081.1223612659414 }, { "content": "//! A framework for Rust wrappers over C APIs.\n\n//!\n\n//! Ownership is as important in C as it is in Rust, but the semantics are often implicit. In\n\n//! particular, pointer-to-value is commonly used to pass C values both when transferring ownership\n\n//! or a borrow.\n\n//!\n\n//! This crate provides a framework to define a Rust wrapper over these kinds of raw C APIs in a way\n\n//! that allows ownership semantics to be expressed in an ergonomic manner. The framework takes a\n\n//! dual-type approach similar to APIs in the standard library such as `PathBuf`/`Path` or `String`/\n\n//! `str`. One type represents an owned value and references to the other represent borrowed\n\n//! values.\n\n//!\n\n//! # Examples\n\n//!\n\n//! ```\n\n//! use foreign_types::{ForeignType, ForeignTypeRef, Opaque};\n\n//! use std::ops::{Deref, DerefMut};\n\n//! use std::ptr::NonNull;\n\n//!\n\n//! mod foo_sys {\n", "file_path": "foreign-types/src/lib.rs", "rank": 33, "score": 4080.419920039923 }, { "content": "//! }\n\n//!\n\n//! # fn main() {}\n\n//! ```\n\n//!\n\n//! If `fn clone` is specified, then it must take `CType` as an argument and return a copy of it as `CType`.\n\n//! It will be used to implement `Clone`, and if the `std` Cargo feature is enabled, `ToOwned`.\n\n//!\n\n//! Say we then have a separate type in our C API that contains a `FOO`:\n\n//!\n\n//! ```\n\n//! mod foo_sys {\n\n//! pub enum FOO {}\n\n//! pub enum BAR {}\n\n//!\n\n//! extern {\n\n//! pub fn FOO_free(foo: *mut FOO);\n\n//! pub fn BAR_free(bar: *mut BAR);\n\n//! pub fn BAR_get_foo(bar: *mut BAR) -> *mut FOO;\n\n//! }\n", "file_path": "foreign-types/src/lib.rs", "rank": 34, "score": 4078.999038824446 }, { "content": "//! foreign_type! {\n\n//! /// A Foo.\n\n//! pub type Foo\n\n//! : Sync + Send // optional\n\n//! {\n\n//! type CType = foo_sys::FOO;\n\n//! fn drop = foo_sys::FOO_free;\n\n//! fn clone = foo_sys::FOO_duplicate; // optional\n\n//! }\n\n//!\n\n//! /// A Foo with generic parameters.\n\n//! pub type GenericFoo<T> {\n\n//! type CType = foo_sys::FOO;\n\n//! // This type is added as a `PhantomData` field to handle variance\n\n//! // of the parameters. However, it has no impact on trait impls:\n\n//! // `GenericFoo<T>` is always `Clone`, even if `T` is not.\n\n//! type PhantomData = T;\n\n//! fn drop = foo_sys::FOO_free;\n\n//! fn clone = foo_sys::FOO_duplicate;\n\n//! }\n", "file_path": "foreign-types/src/lib.rs", "rank": 35, "score": 4077.842297830122 }, { "content": "///\n\n/// # fn main() {}\n\n/// ```\n\n#[macro_export(local_inner_macros)]\n\nmacro_rules! foreign_type {\n\n ($($t:tt)*) => {\n\n $crate::foreign_type_impl!($crate $($t)*);\n\n };\n\n}\n", "file_path": "foreign-types/src/lib.rs", "rank": 36, "score": 4076.662608066833 }, { "content": "# foreign-types\n\n\n\n[![CircleCI](https://circleci.com/gh/sfackler/foreign-types.svg?style=shield)](https://circleci.com/gh/sfackler/foreign-types)\n\n\n\n[Documentation](https://docs.rs/foreign-types)\n\n\n\nA framework for Rust wrappers over C APIs.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be dual licensed as above, without any additional terms or\n\nconditions.\n", "file_path": "foreign-types/README.md", "rank": 37, "score": 4022.068078701636 }, { "content": " fn parse(input: ParseStream) -> parse::Result<Input> {\n\n let crate_ = input.parse()?;\n\n let mut types = vec![];\n\n while !input.is_empty() {\n\n types.push(input.parse()?);\n\n }\n\n\n\n Ok(Input { crate_, types })\n\n }\n\n}\n\n\n\npub struct ForeignType {\n\n pub attrs: Vec<Attribute>,\n\n pub visibility: Visibility,\n\n pub name: Ident,\n\n pub generics: Generics,\n\n pub oibits: Punctuated<Ident, Token![+]>,\n\n pub phantom_data: Option<Type>,\n\n pub ctype: Type,\n\n pub drop: ExprPath,\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 38, "score": 3929.6602841668205 }, { "content": " pub clone: Option<ExprPath>,\n\n}\n\n\n\nimpl Parse for ForeignType {\n\n fn parse(input: ParseStream) -> parse::Result<ForeignType> {\n\n let attrs = input.call(Attribute::parse_outer)?;\n\n let visibility = input.parse()?;\n\n input.parse::<Token![type]>()?;\n\n let name = input.parse()?;\n\n let generics = input.parse()?;\n\n let oibits = input.call(parse_oibits)?;\n\n let inner;\n\n braced!(inner in input);\n\n let ctype = inner.call(parse_type::<kw::CType>)?;\n\n let phantom_data = inner.call(parse_phantom_data)?;\n\n let drop = inner.call(parse_fn::<kw::drop>)?;\n\n let clone = inner.call(parse_clone)?;\n\n\n\n Ok(ForeignType {\n\n attrs,\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 39, "score": 3927.6637128068487 }, { "content": "use syn::parse::{self, Parse, ParseStream};\n\nuse syn::punctuated::Punctuated;\n\nuse syn::token;\n\nuse syn::{braced, Attribute, ExprPath, Generics, Ident, Path, Token, Type, Visibility};\n\n\n\nmod kw {\n\n syn::custom_keyword!(Sync);\n\n syn::custom_keyword!(Send);\n\n syn::custom_keyword!(PhantomData);\n\n syn::custom_keyword!(CType);\n\n syn::custom_keyword!(drop);\n\n syn::custom_keyword!(clone);\n\n}\n\n\n\npub struct Input {\n\n pub crate_: Path,\n\n pub types: Vec<ForeignType>,\n\n}\n\n\n\nimpl Parse for Input {\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 41, "score": 3925.477225631514 }, { "content": "//! Internal crate used by foreign-types\n\n\n\n#![no_std]\n\n#![warn(missing_docs)]\n\n#![doc(html_root_url=\"https://docs.rs/foreign-types-shared/0.2\")]\n\n\n\nuse core::cell::UnsafeCell;\n\nuse core::marker::PhantomData;\n\n\n\n/// An opaque type used to define `ForeignTypeRef` types.\n\n///\n\n/// A type implementing `ForeignTypeRef` should simply be a newtype wrapper around this type.\n\npub struct Opaque(UnsafeCell<PhantomData<*mut ()>>);\n\n\n\n/// A type implemented by wrappers over foreign types.\n", "file_path": "foreign-types-shared/src/lib.rs", "rank": 42, "score": 3924.296325501125 }, { "content": " visibility,\n\n name,\n\n generics,\n\n oibits,\n\n ctype,\n\n phantom_data,\n\n drop,\n\n clone,\n\n })\n\n }\n\n}\n\n\n", "file_path": "foreign-types-macros/src/parse.rs", "rank": 43, "score": 3923.6451930568724 }, { "content": "extern crate proc_macro;\n\n\n\nuse proc_macro::TokenStream;\n\nuse syn::parse_macro_input;\n\n\n\nuse crate::parse::Input;\n\n\n\nmod build;\n\nmod parse;\n\n\n\n#[proc_macro]\n", "file_path": "foreign-types-macros/src/lib.rs", "rank": 44, "score": 3919.8583166264316 }, { "content": "# foreign-types\n\n\n\n[![CircleCI](https://circleci.com/gh/sfackler/foreign-types.svg?style=shield)](https://circleci.com/gh/sfackler/foreign-types)\n\n\n\n[Documentation](https://docs.rs/foreign-types)\n\n\n\nA framework for Rust wrappers over C APIs.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be dual licensed as above, without any additional terms or\n\nconditions.\n", "file_path": "README.md", "rank": 45, "score": 2991.0863095846685 } ]
Rust
batiskaf_derive/tests/sql_result.rs
yakov-bakhmatov/batiskaf
aa5bc46df1e5cab201159a3ef5910fb860a4f301
use rusqlite::types::ToSql; use rusqlite::{Connection, NO_PARAMS}; use batiskaf::SqlResult; use batiskaf_derive::*; #[test] fn test_sql_result() { #[derive(Debug, Eq, PartialEq, SqlResult)] struct Person { pub id: i64, pub name: String, pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn.prepare("select id, name, age from person").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: Some(30) }, bob ); } #[test] fn test_rename() { #[derive(Debug, Eq, PartialEq, SqlResult)] struct Person { pub id: i64, #[batiskaf(column = "full_name")] pub name: String, pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, full_name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (full_name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn .prepare("select id, full_name, age from person") .unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: Some(30) }, bob ); } #[test] fn test_skip() { #[derive(Debug, Eq, PartialEq, SqlResult)] struct Person { pub id: i64, pub name: String, #[batiskaf(skip, default)] pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn.prepare("select id, name, age from person").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: None }, bob ); } #[test] fn test_default() { #[derive(Debug, Eq, PartialEq, SqlResult)] struct Person { pub id: i64, pub name: String, #[batiskaf(default)] pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn.prepare("select id, name from person").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: None }, bob ); } #[test] fn test_default_struct() { #[derive(Debug, Eq, PartialEq, SqlResult)] #[batiskaf(default)] struct Person { pub id: i64, pub name: String, pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn.prepare("select id, name from person").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: None }, bob ); } #[test] fn test_generic() { #[derive(SqlResult)] struct KeyValue<T> { pub key: String, pub value: T, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table key_value (key text not null, value text not null)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into key_value (key, value) values (:key, :value)") .unwrap(); stmt.execute_named(&[(":key", &"name" as &dyn ToSql), (":value", &"Bob")]) .unwrap(); let mut select = conn.prepare("select key, value from key_value").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = KeyValue::<String>::from_row(&row).unwrap(); assert_eq!("name".to_string(), bob.key); assert_eq!("Bob".to_string(), bob.value); } #[test] fn test_generic_default() { #[derive(SqlResult)] #[batiskaf(default)] struct KeyValue<T> { pub key: String, pub value: T, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table key_value (key text not null, value text not null)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into key_value (key, value) values (:key, :value)") .unwrap(); stmt.execute_named(&[(":key", &"name" as &dyn ToSql), (":value", &"Bob")]) .unwrap(); let mut select = conn.prepare("select key, value from key_value").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = KeyValue::<String>::from_row(&row).unwrap(); assert_eq!("name".to_string(), bob.key); assert_eq!("Bob".to_string(), bob.value); } #[test] fn test_custom_from_sql() { #[allow(unused)] #[derive(Debug, Eq, PartialEq)] enum Status { New, Completed, } impl ::rusqlite::types::FromSql for Status { fn column_result( value: ::rusqlite::types::ValueRef, ) -> ::rusqlite::types::FromSqlResult<Self> { match value { ::rusqlite::types::ValueRef::Integer(x) => match x { 1 => Ok(Status::New), 2 => Ok(Status::Completed), x => Err(::rusqlite::types::FromSqlError::OutOfRange(x)), }, _ => Err(::rusqlite::types::FromSqlError::InvalidType), } } } #[derive(Debug, Eq, PartialEq, SqlResult)] struct Order { pub id: i64, pub status: Status, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table \"order\" (id integer primary key, status integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into \"order\" (status) values (:status)") .unwrap(); stmt.execute_named(&[(":status", &1)]).unwrap(); let mut select = conn.prepare("select id, status from \"order\"").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let order = Order::from_row(&row).unwrap(); assert_eq!( Order { id: 1, status: Status::New }, order ); }
use rusqlite::types::ToSql; use rusqlite::{Connection, NO_PARAMS}; use batiskaf::SqlResult; use batiskaf_derive::*; #[test] fn test_sql_result() { #[derive(Debug, Eq, PartialEq, SqlResult)] struct Person { pub id: i64, pub name: String, pub age: Option<u32>,
(); let mut stmt = conn .prepare("insert into key_value (key, value) values (:key, :value)") .unwrap(); stmt.execute_named(&[(":key", &"name" as &dyn ToSql), (":value", &"Bob")]) .unwrap(); let mut select = conn.prepare("select key, value from key_value").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = KeyValue::<String>::from_row(&row).unwrap(); assert_eq!("name".to_string(), bob.key); assert_eq!("Bob".to_string(), bob.value); } #[test] fn test_generic_default() { #[derive(SqlResult)] #[batiskaf(default)] struct KeyValue<T> { pub key: String, pub value: T, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table key_value (key text not null, value text not null)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into key_value (key, value) values (:key, :value)") .unwrap(); stmt.execute_named(&[(":key", &"name" as &dyn ToSql), (":value", &"Bob")]) .unwrap(); let mut select = conn.prepare("select key, value from key_value").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = KeyValue::<String>::from_row(&row).unwrap(); assert_eq!("name".to_string(), bob.key); assert_eq!("Bob".to_string(), bob.value); } #[test] fn test_custom_from_sql() { #[allow(unused)] #[derive(Debug, Eq, PartialEq)] enum Status { New, Completed, } impl ::rusqlite::types::FromSql for Status { fn column_result( value: ::rusqlite::types::ValueRef, ) -> ::rusqlite::types::FromSqlResult<Self> { match value { ::rusqlite::types::ValueRef::Integer(x) => match x { 1 => Ok(Status::New), 2 => Ok(Status::Completed), x => Err(::rusqlite::types::FromSqlError::OutOfRange(x)), }, _ => Err(::rusqlite::types::FromSqlError::InvalidType), } } } #[derive(Debug, Eq, PartialEq, SqlResult)] struct Order { pub id: i64, pub status: Status, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table \"order\" (id integer primary key, status integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into \"order\" (status) values (:status)") .unwrap(); stmt.execute_named(&[(":status", &1)]).unwrap(); let mut select = conn.prepare("select id, status from \"order\"").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let order = Order::from_row(&row).unwrap(); assert_eq!( Order { id: 1, status: Status::New }, order ); }
} let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn.prepare("select id, name, age from person").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: Some(30) }, bob ); } #[test] fn test_rename() { #[derive(Debug, Eq, PartialEq, SqlResult)] struct Person { pub id: i64, #[batiskaf(column = "full_name")] pub name: String, pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, full_name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (full_name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn .prepare("select id, full_name, age from person") .unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: Some(30) }, bob ); } #[test] fn test_skip() { #[derive(Debug, Eq, PartialEq, SqlResult)] struct Person { pub id: i64, pub name: String, #[batiskaf(skip, default)] pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn.prepare("select id, name, age from person").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: None }, bob ); } #[test] fn test_default() { #[derive(Debug, Eq, PartialEq, SqlResult)] struct Person { pub id: i64, pub name: String, #[batiskaf(default)] pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn.prepare("select id, name from person").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: None }, bob ); } #[test] fn test_default_struct() { #[derive(Debug, Eq, PartialEq, SqlResult)] #[batiskaf(default)] struct Person { pub id: i64, pub name: String, pub age: Option<u32>, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table person (id integer primary key, name text not null, age integer)", NO_PARAMS, ) .unwrap(); let mut stmt = conn .prepare("insert into person (name, age) values (:name, :age)") .unwrap(); stmt.execute_named(&[(":name", &"Bob" as &dyn ToSql), (":age", &30)]) .unwrap(); let mut select = conn.prepare("select id, name from person").unwrap(); let mut rows = select.query(NO_PARAMS).unwrap(); let row = rows.next().unwrap().unwrap(); let bob = Person::from_row(&row).unwrap(); assert_eq!( Person { id: 1, name: "Bob".to_string(), age: None }, bob ); } #[test] fn test_generic() { #[derive(SqlResult)] struct KeyValue<T> { pub key: String, pub value: T, } let conn = Connection::open_in_memory().unwrap(); conn.execute( "create table key_value (key text not null, value text not null)", NO_PARAMS, ) .unwrap
random
[ { "content": "#[derive(Debug, Eq, PartialEq)]\n\nstruct Person {\n\n pub id: i64,\n\n pub name: String,\n\n pub age: Option<u32>,\n\n}\n\n\n\nimpl SqlParam for Person {\n\n fn to_named_params(&self, stmt: &Statement) -> Vec<(&str, &dyn ToSql)> {\n\n let mut params = Vec::new();\n\n if let Ok(Some(_)) = stmt.parameter_index(\":id\") {\n\n params.push((\":id\", &self.id as &dyn ToSql));\n\n }\n\n if let Ok(Some(_)) = stmt.parameter_index(\":name\") {\n\n params.push((\":name\", &self.name as &dyn ToSql));\n\n }\n\n if let Ok(Some(_)) = stmt.parameter_index(\":age\") {\n\n params.push((\":age\", &self.age as &dyn ToSql));\n\n }\n\n params\n\n }\n", "file_path": "batiskaf/src/tests.rs", "rank": 0, "score": 118691.73079266635 }, { "content": "#[test]\n\nfn test_sql_result() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n create_table(&conn);\n\n let mut stmt = conn\n\n .prepare(\"insert into person (name, age) values (:name, :age)\")\n\n .unwrap();\n\n stmt.execute_named(&[(\":name\", &\"Alice\" as &dyn ToSql), (\":age\", &33)])\n\n .unwrap();\n\n let mut select = conn.prepare(\"select id, name, age from person\").unwrap();\n\n let x = select.query_row(NO_PARAMS, Person::from_row).unwrap();\n\n assert_eq!(\n\n Person {\n\n id: 1,\n\n name: \"Alice\".to_string(),\n\n age: Some(33)\n\n },\n\n x\n\n );\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 1, "score": 83547.64361586946 }, { "content": "#[test]\n\nfn test_delete() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n create_table(&conn);\n\n let mut bob = Person {\n\n id: 0,\n\n name: \"Bob\".to_string(),\n\n age: Some(30),\n\n };\n\n bob.id = conn.insert(\"person\", &bob).unwrap();\n\n let mut select = conn.prepare(\"select id, name, age from person\").unwrap();\n\n assert!(select.exists(NO_PARAMS).unwrap());\n\n conn.delete(\"person\", &bob).unwrap();\n\n assert!(!select.exists(NO_PARAMS).unwrap());\n\n}\n", "file_path": "batiskaf/src/tests.rs", "rank": 4, "score": 63834.30130965031 }, { "content": "#[test]\n\nfn test_update() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n create_table(&conn);\n\n let mut bob = Person {\n\n id: 0,\n\n name: \"Bob\".to_string(),\n\n age: Some(30),\n\n };\n\n bob.id = conn.insert(\"person\", &bob).unwrap();\n\n bob.name = \"Bob Smith\".to_string();\n\n bob.age = None;\n\n conn.update(\"person\", &bob).unwrap();\n\n let x: Person = conn\n\n .select_one(\"select id, name, age from person\", &[])\n\n .unwrap();\n\n assert_eq!(bob, x);\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 5, "score": 63834.30130965031 }, { "content": "#[test]\n\nfn test_insert() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n create_table(&conn);\n\n let mut bob = Person {\n\n id: 0,\n\n name: \"Bob\".to_string(),\n\n age: Some(30),\n\n };\n\n bob.id = conn.insert(\"person\", &bob).unwrap();\n\n let x: Person = conn\n\n .select_one(\"select id, name, age from person\", &[])\n\n .unwrap();\n\n assert_eq!(bob, x);\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 6, "score": 63834.30130965031 }, { "content": "#[test]\n\nfn test_select_one() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n create_table(&conn);\n\n let mut stmt = conn\n\n .prepare(\"insert into person (name, age) values (:name, :age)\")\n\n .unwrap();\n\n stmt.execute_named(&[(\":name\", &\"Alice\" as &dyn ToSql), (\":age\", &33)])\n\n .unwrap();\n\n let x: Person = conn\n\n .select_one(\"select id, name, age from person\", &[])\n\n .unwrap();\n\n assert_eq!(\n\n Person {\n\n id: 1,\n\n name: \"Alice\".to_string(),\n\n age: Some(33)\n\n },\n\n x\n\n );\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 7, "score": 62402.82025630846 }, { "content": "#[test]\n\nfn test_select_many() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n create_table(&conn);\n\n let alice = Person {\n\n id: 1,\n\n name: \"Alice\".to_string(),\n\n age: Some(33),\n\n };\n\n let bob = Person {\n\n id: 2,\n\n name: \"Bob\".to_string(),\n\n age: Some(30),\n\n };\n\n let mut stmt = conn\n\n .prepare(\"insert into person (name, age) values (:name, :age)\")\n\n .unwrap();\n\n stmt.execute_named(&alice.to_named_params(&stmt)).unwrap();\n\n stmt.execute_named(&bob.to_named_params(&stmt)).unwrap();\n\n let people = conn\n\n .select_many(\"select id, name, age from person\", &[])\n\n .unwrap();\n\n assert_eq!(vec![alice, bob], people);\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 8, "score": 62402.82025630846 }, { "content": "#[test]\n\nfn test_sql_param() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n create_table(&conn);\n\n let mut stmt = conn\n\n .prepare(\"insert into person (name, age) values (:name, :age)\")\n\n .unwrap();\n\n let person = Person {\n\n id: 0,\n\n name: \"Bob\".to_string(),\n\n age: Some(30),\n\n };\n\n let params = person.to_named_params(&stmt);\n\n stmt.execute_named(&params).unwrap();\n\n let mut select = conn.prepare(\"select id, name, age from person\").unwrap();\n\n let x = select\n\n .query_row(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)))\n\n .unwrap();\n\n assert_eq!((1, \"Bob\".to_string(), 30), x);\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 9, "score": 62402.82025630846 }, { "content": "#[test]\n\nfn test_sql_result_2() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n create_table(&conn);\n\n let mut stmt = conn\n\n .prepare(\"insert into person (name, age) values (:name, :age)\")\n\n .unwrap();\n\n stmt.execute_named(&[(\":name\", &\"Alice\" as &dyn ToSql), (\":age\", &33)])\n\n .unwrap();\n\n let mut select = conn.prepare(\"select id, name from person\").unwrap();\n\n let x = select.query_row(NO_PARAMS, Person::from_row).unwrap();\n\n assert_eq!(\n\n Person {\n\n id: 1,\n\n name: \"Alice\".to_string(),\n\n age: None\n\n },\n\n x\n\n );\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 10, "score": 62402.82025630846 }, { "content": "#[test]\n\nfn test_generic() {\n\n #[derive(SqlParam)]\n\n struct KeyValue<T> {\n\n pub key: String,\n\n pub value: T,\n\n }\n\n let conn = Connection::open_in_memory().unwrap();\n\n conn.execute(\n\n \"create table key_value (key text not null, value text not null)\",\n\n NO_PARAMS,\n\n )\n\n .unwrap();\n\n let mut stmt = conn\n\n .prepare(\"insert into key_value (key, value) values (:key, :value)\")\n\n .unwrap();\n\n let kv = KeyValue::<String> {\n\n key: \"name\".to_string(),\n\n value: \"Bob\".to_string(),\n\n };\n\n let params = kv.to_named_params(&stmt);\n\n stmt.execute_named(&params).unwrap();\n\n let mut select = conn.prepare(\"select key, value from key_value\").unwrap();\n\n let x: (String, String) = select\n\n .query_row(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))\n\n .unwrap();\n\n assert_eq!((\"name\".to_string(), \"Bob\".to_string()), x);\n\n}\n", "file_path": "batiskaf_derive/tests/sql_param.rs", "rank": 15, "score": 61053.71652778344 }, { "content": "#[test]\n\nfn test_simple() {\n\n #[allow(unused)]\n\n #[derive(SqlInsert)]\n\n struct Person {\n\n id: i64,\n\n name: String,\n\n age: Option<u32>,\n\n }\n\n let sql = Person::insert_statement(\"person\");\n\n assert_eq!(\n\n \"INSERT INTO person (id, name, age) VALUES (:id, :name, :age)\",\n\n sql\n\n );\n\n}\n\n\n", "file_path": "batiskaf_derive/tests/sql_insert.rs", "rank": 16, "score": 61053.71652778344 }, { "content": "#[test]\n\nfn test_complex_key() {\n\n #[allow(unused)]\n\n #[derive(SqlDelete)]\n\n struct FieldValue {\n\n #[batiskaf(primary_key)]\n\n x: i64,\n\n #[batiskaf(primary_key)]\n\n y: i64,\n\n value: f64,\n\n }\n\n let sql = FieldValue::delete_statement(\"height\");\n\n assert_eq!(\"DELETE FROM height WHERE x = :x AND y = :y\", sql);\n\n}\n", "file_path": "batiskaf_derive/tests/sql_delete.rs", "rank": 17, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_custom_to_sql() {\n\n #[allow(unused)]\n\n enum Status {\n\n New,\n\n Completed,\n\n }\n\n impl ::rusqlite::types::ToSql for Status {\n\n fn to_sql(&self) -> ::rusqlite::Result<::rusqlite::types::ToSqlOutput> {\n\n match self {\n\n Status::New => Ok(::rusqlite::types::ToSqlOutput::Owned(\n\n ::rusqlite::types::Value::Integer(1),\n\n )),\n\n Status::Completed => Ok(::rusqlite::types::ToSqlOutput::Owned(\n\n ::rusqlite::types::Value::Integer(2),\n\n )),\n\n }\n\n }\n\n }\n\n #[derive(SqlParam)]\n\n struct Order {\n", "file_path": "batiskaf_derive/tests/sql_param.rs", "rank": 20, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_simple_key() {\n\n #[allow(unused)]\n\n #[derive(SqlDelete)]\n\n struct Person {\n\n #[batiskaf(primary_key, autogenerated)]\n\n id: i64,\n\n name: String,\n\n age: Option<u32>,\n\n }\n\n let sql = Person::delete_statement(\"person\");\n\n assert_eq!(\"DELETE FROM person WHERE id = :id\", sql);\n\n}\n\n\n", "file_path": "batiskaf_derive/tests/sql_delete.rs", "rank": 21, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_sql_param() {\n\n #[derive(SqlParam)]\n\n struct Person {\n\n pub id: i64,\n\n pub name: String,\n\n pub age: Option<u32>,\n\n }\n\n let conn = Connection::open_in_memory().unwrap();\n\n conn.execute(\n\n \"create table person (id integer primary key, name text not null, age integer)\",\n\n NO_PARAMS,\n\n )\n\n .unwrap();\n\n let mut stmt = conn\n\n .prepare(\"insert into person (name, age) values (:name, :age)\")\n\n .unwrap();\n\n let person = Person {\n\n id: 0,\n\n name: \"Bob\".to_string(),\n\n age: Some(30),\n\n };\n\n let params = person.to_named_params(&stmt);\n\n stmt.execute_named(&params).unwrap();\n\n let mut select = conn.prepare(\"select id, name, age from person\").unwrap();\n\n let x = select\n\n .query_row(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)))\n\n .unwrap();\n\n assert_eq!((1, \"Bob\".to_string(), 30), x);\n\n}\n\n\n", "file_path": "batiskaf_derive/tests/sql_param.rs", "rank": 22, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_simple_key() {\n\n #[allow(unused)]\n\n #[derive(SqlUpdate)]\n\n struct Person {\n\n #[batiskaf(primary_key, autogenerated)]\n\n id: i64,\n\n name: String,\n\n age: Option<u32>,\n\n }\n\n let sql = Person::update_statement(\"person\");\n\n assert_eq!(\n\n \"UPDATE person SET name = :name, age = :age WHERE id = :id\",\n\n sql\n\n );\n\n}\n\n\n", "file_path": "batiskaf_derive/tests/sql_update.rs", "rank": 23, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_skip_param() {\n\n #[derive(SqlParam)]\n\n struct Person {\n\n pub id: i64,\n\n pub name: String,\n\n #[batiskaf(skip)]\n\n pub age: Option<u32>,\n\n }\n\n let conn = Connection::open_in_memory().unwrap();\n\n conn.execute(\n\n \"create table person (id integer primary key, name text not null, age integer)\",\n\n NO_PARAMS,\n\n )\n\n .unwrap();\n\n let mut stmt = conn\n\n .prepare(\"insert into person (name, age) values (:name, :age)\")\n\n .unwrap();\n\n let person = Person {\n\n id: 0,\n\n name: \"Bob\".to_string(),\n", "file_path": "batiskaf_derive/tests/sql_param.rs", "rank": 24, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_skip_autogenerated() {\n\n #[allow(unused)]\n\n #[derive(SqlInsert)]\n\n struct Person {\n\n #[batiskaf(autogenerated)]\n\n id: i64,\n\n name: String,\n\n age: Option<u32>,\n\n }\n\n let sql = Person::insert_statement(\"person\");\n\n assert_eq!(\"INSERT INTO person (name, age) VALUES (:name, :age)\", sql);\n\n}\n\n\n", "file_path": "batiskaf_derive/tests/sql_insert.rs", "rank": 25, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_rename_column() {\n\n #[allow(unused)]\n\n #[derive(SqlInsert)]\n\n struct Request {\n\n id: i64,\n\n #[batiskaf(column = \"type\")]\n\n request_type: String,\n\n data: String,\n\n }\n\n let sql = Request::insert_statement(\"request\");\n\n assert_eq!(\n\n \"INSERT INTO request (id, type, data) VALUES (:id, :type, :data)\",\n\n sql\n\n );\n\n}\n", "file_path": "batiskaf_derive/tests/sql_insert.rs", "rank": 26, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_skip_column() {\n\n #[allow(unused)]\n\n #[derive(SqlInsert)]\n\n struct Person {\n\n id: i64,\n\n name: String,\n\n #[batiskaf(skip)]\n\n age: Option<u32>,\n\n }\n\n let sql = Person::insert_statement(\"person\");\n\n assert_eq!(\"INSERT INTO person (id, name) VALUES (:id, :name)\", sql);\n\n}\n\n\n", "file_path": "batiskaf_derive/tests/sql_insert.rs", "rank": 27, "score": 59779.8062460249 }, { "content": "#[test]\n\nfn test_complex_key() {\n\n #[allow(unused)]\n\n #[derive(SqlUpdate)]\n\n struct FieldValue {\n\n #[batiskaf(primary_key)]\n\n x: i64,\n\n #[batiskaf(primary_key)]\n\n y: i64,\n\n value: f64,\n\n }\n\n let sql = FieldValue::update_statement(\"height\");\n\n assert_eq!(\n\n \"UPDATE height SET value = :value WHERE x = :x AND y = :y\",\n\n sql\n\n );\n\n}\n", "file_path": "batiskaf_derive/tests/sql_update.rs", "rank": 28, "score": 59779.8062460249 }, { "content": "#[proc_macro_derive(SqlInsert, attributes(batiskaf))]\n\npub fn derive_sql_insert(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n sql_insert::derive(input).into()\n\n}\n\n\n", "file_path": "batiskaf_derive/src/lib.rs", "rank": 29, "score": 58326.84941634798 }, { "content": "#[proc_macro_derive(SqlResult, attributes(batiskaf))]\n\npub fn derive_sql_result(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n sql_result::derive(input).into()\n\n}\n\n\n", "file_path": "batiskaf_derive/src/lib.rs", "rank": 30, "score": 58326.84941634798 }, { "content": "#[proc_macro_derive(SqlUpdate, attributes(batiskaf))]\n\npub fn derive_sql_update(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n sql_update::derive(input).into()\n\n}\n\n\n", "file_path": "batiskaf_derive/src/lib.rs", "rank": 31, "score": 58326.84941634798 }, { "content": "#[proc_macro_derive(SqlParam, attributes(batiskaf))]\n\npub fn derive_sql_param(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n sql_param::derive(input).into()\n\n}\n\n\n", "file_path": "batiskaf_derive/src/lib.rs", "rank": 32, "score": 58326.84941634798 }, { "content": "#[proc_macro_derive(SqlDelete, attributes(batiskaf))]\n\npub fn derive_sql_delete(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n sql_delete::derive(input).into()\n\n}\n", "file_path": "batiskaf_derive/src/lib.rs", "rank": 33, "score": 58326.84941634798 }, { "content": "pub trait SqlResult: Sized {\n\n fn from_row(row: &Row) -> rusqlite::Result<Self>;\n\n}\n\n\n", "file_path": "batiskaf/src/lib.rs", "rank": 34, "score": 55528.2670468238 }, { "content": "fn create_table(conn: &Connection) {\n\n conn.execute(\n\n \"create table person(id integer primary key, name text not null, age integer)\",\n\n NO_PARAMS,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 35, "score": 51957.97828814396 }, { "content": "pub trait SqlInsert {\n\n fn insert_statement(table: &str) -> String;\n\n}\n\n\n", "file_path": "batiskaf/src/lib.rs", "rank": 36, "score": 41688.76621976659 }, { "content": "pub trait SqlDelete {\n\n fn delete_statement(table: &str) -> String;\n\n}\n\n\n", "file_path": "batiskaf/src/lib.rs", "rank": 37, "score": 41688.76621976659 }, { "content": "pub trait SqlParam {\n\n fn to_named_params(&self, stmt: &Statement) -> Vec<(&str, &dyn ToSql)>;\n\n}\n\n\n", "file_path": "batiskaf/src/lib.rs", "rank": 38, "score": 41688.76621976659 }, { "content": "pub trait SqlUpdate {\n\n fn update_statement(table: &str) -> String;\n\n}\n\n\n", "file_path": "batiskaf/src/lib.rs", "rank": 39, "score": 41688.76621976659 }, { "content": "pub trait BatiskafConnection {\n\n fn select_one<T: SqlResult>(\n\n &self,\n\n sql: &str,\n\n params: &[(&str, &dyn ToSql)],\n\n ) -> rusqlite::Result<T>;\n\n fn select_many<T: SqlResult>(\n\n &self,\n\n sql: &str,\n\n params: &[(&str, &dyn ToSql)],\n\n ) -> rusqlite::Result<Vec<T>>;\n\n fn insert<T: SqlInsert + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<i64>;\n\n fn update<T: SqlUpdate + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<usize>;\n\n fn delete<T: SqlDelete + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<usize>;\n\n}\n\n\n\nimpl BatiskafConnection for Connection {\n\n fn select_one<T: SqlResult>(\n\n &self,\n\n sql: &str,\n", "file_path": "batiskaf/src/lib.rs", "rank": 40, "score": 41688.76621976659 }, { "content": "fn attributes_to_meta(attrs: &[Attribute]) -> Vec<Meta> {\n\n attrs\n\n .iter()\n\n .filter_map(|a| a.parse_meta().ok())\n\n .filter_map(|m| {\n\n if let Meta::List(meta) = m {\n\n Some(meta)\n\n } else {\n\n None\n\n }\n\n })\n\n .filter(|m| m.ident.to_string().as_str() == \"batiskaf\")\n\n .flat_map(|m| m.nested)\n\n .filter_map(|m| {\n\n if let NestedMeta::Meta(meta) = m {\n\n Some(meta)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "batiskaf_derive/src/column.rs", "rank": 41, "score": 27790.46441377874 }, { "content": "fn add_trait_bounds(mut generics: Generics) -> Generics {\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n type_param\n\n .bounds\n\n .push(parse_quote!(::rusqlite::types::ToSql));\n\n }\n\n }\n\n generics\n\n}\n", "file_path": "batiskaf_derive/src/sql_param.rs", "rank": 42, "score": 26667.72265788229 }, { "content": "fn add_trait_bounds(mut generics: Generics) -> Generics {\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n type_param\n\n .bounds\n\n .push(parse_quote!(::rusqlite::types::FromSql));\n\n type_param\n\n .bounds\n\n .push(parse_quote!(::std::default::Default));\n\n }\n\n }\n\n generics\n\n}\n", "file_path": "batiskaf_derive/src/sql_result.rs", "rank": 43, "score": 26667.72265788229 }, { "content": " format!(\n\n \"update {} set name = :name, age = :age where id = :id\",\n\n table\n\n )\n\n }\n\n}\n\n\n\nimpl SqlDelete for Person {\n\n fn delete_statement(table: &str) -> String {\n\n format!(\"delete from {} where id = :id\", table)\n\n }\n\n}\n\n\n", "file_path": "batiskaf/src/tests.rs", "rank": 44, "score": 23541.962253860413 }, { "content": "}\n\n\n\nimpl SqlResult for Person {\n\n fn from_row(row: &Row) -> rusqlite::Result<Self> {\n\n Ok(Person {\n\n id: row.get(\"id\")?,\n\n name: row.get(\"name\")?,\n\n age: row.get(\"age\").unwrap_or_default(),\n\n })\n\n }\n\n}\n\n\n\nimpl SqlInsert for Person {\n\n fn insert_statement(table: &str) -> String {\n\n format!(\"insert into {} (name, age) values (:name, :age)\", table)\n\n }\n\n}\n\n\n\nimpl SqlUpdate for Person {\n\n fn update_statement(table: &str) -> String {\n", "file_path": "batiskaf/src/tests.rs", "rank": 45, "score": 23541.654391348206 }, { "content": "use rusqlite::{Connection, NO_PARAMS};\n\n\n\nuse super::*;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n", "file_path": "batiskaf/src/tests.rs", "rank": 46, "score": 23536.139582815526 }, { "content": " age: Some(30),\n\n };\n\n let params = person.to_named_params(&stmt);\n\n stmt.execute_named(&params).unwrap();\n\n let mut select = conn.prepare(\"select id, name, age from person\").unwrap();\n\n let x: (i64, String, Option<u32>) = select\n\n .query_row(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?, row.get(2)?)))\n\n .unwrap();\n\n assert_eq!((1, \"Bob\".to_string(), None), x);\n\n}\n\n\n", "file_path": "batiskaf_derive/tests/sql_param.rs", "rank": 47, "score": 21828.38704178874 }, { "content": " pub id: i64,\n\n pub status: Status,\n\n }\n\n let conn = Connection::open_in_memory().unwrap();\n\n conn.execute(\n\n \"create table \\\"order\\\" (id integer primary key, status integer)\",\n\n NO_PARAMS,\n\n )\n\n .unwrap();\n\n let mut stmt = conn\n\n .prepare(\"insert into \\\"order\\\" (status) values (:status)\")\n\n .unwrap();\n\n let order = Order {\n\n id: 0,\n\n status: Status::New,\n\n };\n\n let params = order.to_named_params(&stmt);\n\n stmt.execute_named(&params).unwrap();\n\n let mut select = conn.prepare(\"select id, status from \\\"order\\\"\").unwrap();\n\n let x = select\n\n .query_row(NO_PARAMS, |row| Ok((row.get(0)?, row.get(1)?)))\n\n .unwrap();\n\n assert_eq!((1, 1), x);\n\n}\n\n\n", "file_path": "batiskaf_derive/tests/sql_param.rs", "rank": 54, "score": 21823.03165191188 }, { "content": "use batiskaf::SqlUpdate;\n\nuse batiskaf_derive::*;\n\n\n\n#[test]\n", "file_path": "batiskaf_derive/tests/sql_update.rs", "rank": 55, "score": 21820.960857403046 }, { "content": "use batiskaf::SqlDelete;\n\nuse batiskaf_derive::*;\n\n\n\n#[test]\n", "file_path": "batiskaf_derive/tests/sql_delete.rs", "rank": 56, "score": 21820.960857403046 }, { "content": "use batiskaf::SqlInsert;\n\nuse batiskaf_derive::*;\n\n\n\n#[test]\n", "file_path": "batiskaf_derive/tests/sql_insert.rs", "rank": 57, "score": 21820.960857403046 }, { "content": "use rusqlite::{Connection, NO_PARAMS};\n\n\n\nuse batiskaf::SqlParam;\n\nuse batiskaf_derive::*;\n\n\n\n#[test]\n", "file_path": "batiskaf_derive/tests/sql_param.rs", "rank": 58, "score": 21820.878827000026 }, { "content": "## batiskaf_derive\n\n\n\nБиблиотека *batiskaf* сама по себе довольно бесполезная. Эту ситуацию исправляет библиотека *batiskaf_derive*, избавляя программиста от кучи бойлерплейта при помощи магии процедурных макросов.\n\n\n\nБиблиотека умеет выводить реализацию трейтов *batiskaf* для структур с именованными полями.\n\n\n\n### Пример использования\n\n\n\n```rust\n\nuse rusqlite::{Connection, NO_PARAMS};\n\n\n\nuse batiskaf::*;\n\n\n\n#[derive(Debug, SqlParam, SqlResult, SqlInsert, SqlUpdate, SqlDelete)]\n\nstruct Person {\n\n #[batiskaf(primary_key, autogenerated)]\n\n pub id: i64,\n\n #[batiskaf(column = \"full_name\")]\n\n pub name: String,\n\n pub age: Option<u32>,\n\n #[batiskaf(skip, default)]\n\n pub hobby: Option<String>,\n\n}\n\n\n\nfn main() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n conn.execute(\n\n \"create table person (\\\n\n id integer primary key, \\\n\n full_name text not null, \\\n\n age integer\\\n\n )\",\n\n NO_PARAMS,\n\n )\n\n .unwrap();\n\n\n\n let mut bob = Person {\n\n id: 0,\n\n name: \"Bob\".to_string(),\n\n age: Some(30),\n\n hobby: Some(\"wood carving\".to_string()),\n\n };\n\n\n\n bob.id = conn.insert(\"person\", &bob).unwrap();\n\n\n\n bob.age = Some(31);\n\n conn.update(\"person\", &bob).unwrap();\n\n\n\n let stored_bob: Person = conn\n\n .select_one(\"select id, full_name, age from person where id = :id\", &[(\":id\", &bob.id)])\n\n .unwrap();\n\n println!(\"{:?}\", stored_bob); // Person { id: 1, name: \"Bob\", age: Some(31), hobby: None }\n\n\n\n conn.delete(\"person\", &bob).unwrap();\n\n\n\n let nobody: Vec<Person> = conn\n\n .select_many(\"select id, full_name, age from person\", &[])\n\n .unwrap();\n\n println!(\"{}\", nobody.len()); // 0\n\n}\n\n```\n\n\n\n### Арибуты\n\n\n", "file_path": "README.md", "rank": 60, "score": 15.697851317350853 }, { "content": "# Batiskaf\n\n\n\n*Batiskaf* - библиотека, предназначенная для избавления от бойлерплейта при работе с [*rusqlite*](https://crates.io/crates/rusqlite). Идея заключается в том, что программист пишет SQL-код, а преобразования структур в параметры для запросов и результаты запросов в структуры выполняет магия библиотеки. Идея честно позаимствована из замечательного фреймворка [MyBatis](http://www.mybatis.org/mybatis-3/).\n\n\n\n\n\n## Подключение\n\n\n\nДобавить в `Cargo.toml`:\n\n```toml\n\nbatiskaf = { git = \"https://github.com/yakov-bakhmatov/batiskaf\", features = [\"derive\"] }\n\n```\n\n*Batiskaf* зависит от [*rusqlite*](https://crates.io/crates/rusqlite) версии 0.17\n\n\n\n## Пример использования\n\n\n\n```rust\n\nuse rusqlite::{Connection, NO_PARAMS};\n\n\n\nuse batiskaf::*;\n\n\n\n#[derive(Debug, SqlParam, SqlResult)]\n\nstruct Person {\n\n pub id: i64,\n\n pub name: String,\n\n pub age: Option<u32>,\n\n}\n\n\n\nfn main() {\n\n let conn = Connection::open_in_memory().unwrap();\n\n conn.execute(\n\n \"create table person (id integer primary key, name text not null, age integer)\",\n\n NO_PARAMS,\n\n )\n\n .unwrap();\n\n\n\n let person = Person {\n\n id: 0,\n\n name: \"Bob\".to_string(),\n\n age: Some(30),\n\n };\n\n\n\n let mut stmt = conn\n\n .prepare(\"insert into person (name, age) values (:name, :age)\")\n\n .unwrap();\n\n let params = person.to_named_params(&stmt);\n\n stmt.execute_named(&params).unwrap();\n\n\n\n let mut select = conn.prepare(\"select id, name, age from person\").unwrap();\n\n let mut rows = select.query(NO_PARAMS).unwrap();\n\n let row = rows.next().unwrap().unwrap();\n\n let bob = Person::from_row(&row).unwrap();\n\n println!(\"{:?}\", bob); // Person { id: 1, name: \"Bob\", age: Some(30) }\n\n}\n\n```\n\n\n\n## Краткое описание batiskaf\n\n\n", "file_path": "README.md", "rank": 61, "score": 14.085941590528009 }, { "content": "use std::collections::{HashMap, HashSet};\n\nuse syn::{self, Attribute, Data, Field, Fields, Lit, Meta, NestedMeta};\n\n\n\n#[derive(Debug, Default)]\n\npub(crate) struct Attributes {\n\n pub word: HashSet<String>,\n\n pub name_value: HashMap<String, String>,\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub(crate) struct Column {\n\n pub attrs: Attributes,\n\n}\n\n\n\nimpl Column {\n\n pub fn name(&self) -> String {\n\n match self.attrs.name_value.get(\"column\") {\n\n Some(name) => name.clone(),\n\n None => \"\".to_string(),\n\n }\n", "file_path": "batiskaf_derive/src/column.rs", "rank": 62, "score": 9.941853187549155 }, { "content": " Some(ref ident) => ident.to_string(),\n\n None => \"\".to_string(), // TODO: error\n\n };\n\n attrs.name_value.insert(\"column\".to_string(), name);\n\n }\n\n Column { attrs }\n\n }\n\n}\n\n\n\npub(crate) fn columns(data: &Data) -> Vec<Column> {\n\n match *data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => fields.named.iter().map(|f| f.into()).collect(),\n\n _ => unimplemented!(),\n\n },\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n\npub(crate) fn columns_with_fields<'a>(data: &'a Data, default: bool) -> Vec<(Column, &'a Field)> {\n", "file_path": "batiskaf_derive/src/column.rs", "rank": 63, "score": 7.824245656645051 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::DeriveInput;\n\n\n\nuse crate::column::{columns, Column};\n\n\n\npub(crate) fn derive(input: DeriveInput) -> TokenStream {\n\n let name = input.ident;\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();\n\n let cs = columns(&input.data);\n\n let cs: Vec<Column> = cs\n\n .into_iter()\n\n .filter(|c| !c.skip())\n\n .filter(|c| !c.autogenerated())\n\n .collect();\n\n let names: Vec<String> = cs.into_iter().map(|c| c.name()).collect();\n\n let params: Vec<String> = names.iter().map(|c| format!(\":{}\", c)).collect();\n\n let sql = format!(\n\n \"INSERT INTO {{}} ({}) VALUES ({})\",\n\n names.join(\", \"),\n", "file_path": "batiskaf_derive/src/sql_insert.rs", "rank": 64, "score": 7.332099175377617 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::DeriveInput;\n\n\n\nuse crate::column::{columns, Column};\n\n\n\npub(crate) fn derive(input: DeriveInput) -> TokenStream {\n\n let name = input.ident;\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();\n\n let cs = columns(&input.data);\n\n let cs: Vec<Column> = cs.into_iter().filter(|c| !c.skip()).collect();\n\n let values: Vec<String> = cs\n\n .iter()\n\n .filter(|c| !c.primary_key())\n\n .map(|c| c.name())\n\n .map(|n| format!(\"{} = :{}\", n, n))\n\n .collect();\n\n let keys: Vec<String> = cs\n\n .iter()\n\n .filter(|c| c.primary_key())\n", "file_path": "batiskaf_derive/src/sql_update.rs", "rank": 65, "score": 6.605294768968211 }, { "content": "use rusqlite::types::ToSql;\n\nuse rusqlite::{self, Connection, Row, Statement};\n\n\n\n// Реэкспорт derive-макросов из batiskaf_derive\n\n#[cfg(feature = \"batiskaf_derive\")]\n\n#[allow(unused_imports)]\n\n#[macro_use]\n\nextern crate batiskaf_derive;\n\n#[cfg(feature = \"batiskaf_derive\")]\n\n#[doc(hidden)]\n\npub use batiskaf_derive::*;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n", "file_path": "batiskaf/src/lib.rs", "rank": 66, "score": 6.5219129864395615 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::DeriveInput;\n\n\n\nuse crate::column::{columns, Column};\n\n\n\npub(crate) fn derive(input: DeriveInput) -> TokenStream {\n\n let name = input.ident;\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();\n\n let cs = columns(&input.data);\n\n let cs: Vec<Column> = cs.into_iter().filter(|c| !c.skip()).collect();\n\n let keys: Vec<String> = cs\n\n .iter()\n\n .filter(|c| c.primary_key())\n\n .map(|c| c.name())\n\n .map(|n| format!(\"{} = :{}\", n, n))\n\n .collect();\n\n if keys.is_empty() {\n\n return syn::Error::new(\n\n name.span(),\n", "file_path": "batiskaf_derive/src/sql_delete.rs", "rank": 67, "score": 6.4246294130302255 }, { "content": "### trait SqlParam\n\n\n\n```rust\n\nfn to_named_params(&self, stmt: &Statement) -> Vec<(&str, &dyn ToSql)>;\n\n```\n\nФункция предназначена для преобразования структуры в именованные параметры SQL-запроса.\n\n\n\n\n\n### trait SqlResult\n\n\n\n```rust\n\nfn from_row(row: &Row) -> rusqlite::Result<Self>;\n\n```\n\nФункция предназначена для преобразования строки результата запроса в структуру.\n\n\n\n\n\n### trait SqlInsert\n\n\n\n```rust\n\nfn insert_statement(table: &str) -> String;\n\n```\n\nВспомогательный trait для уменьшения бойлерплейта; его единственная функция предназначена для генерации SQL-запроса INSERT для создания записи в указанной таблице.\n\n\n\n\n\n### trait SqlUpdate\n\n\n\n```rust\n\nfn update_statement(table: &str) -> String;\n\n```\n\nФункция генерирует SQL-запрос обновления записи в указанной таблице.\n\n\n\n\n\n### trait SqlDelete\n\n\n\n```rust\n\nfn delete_statement(table: &str) -> String;\n\n```\n\nФункция возвращает SQL-запрос удаления записи из указанной таблицы.\n\n\n\n\n", "file_path": "README.md", "rank": 68, "score": 6.055043760446294 }, { "content": " let mut xs: Vec<(Column, &'a Field)> = match *data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => fields.named.iter().map(|f| (f.into(), f)).collect(),\n\n _ => unimplemented!(),\n\n },\n\n _ => unimplemented!(),\n\n };\n\n if default {\n\n for x in xs.iter_mut() {\n\n x.0.attrs.word.insert(\"default\".to_string());\n\n }\n\n }\n\n xs\n\n}\n\n\n\npub(crate) fn parse_attributes(attrs: &[Attribute]) -> Attributes {\n\n let mut a = Attributes::default();\n\n let meta: Vec<Meta> = attributes_to_meta(attrs);\n\n for m in meta {\n\n match m {\n", "file_path": "batiskaf_derive/src/column.rs", "rank": 69, "score": 5.545949752605161 }, { "content": " Meta::Word(ident) => {\n\n a.word.insert(ident.to_string());\n\n }\n\n Meta::NameValue(name_value) => {\n\n if let Lit::Str(lit) = name_value.lit {\n\n a.name_value\n\n .insert(name_value.ident.to_string(), lit.value());\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n a\n\n}\n\n\n", "file_path": "batiskaf_derive/src/column.rs", "rank": 70, "score": 5.475648636364225 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::{quote, quote_spanned};\n\nuse syn::spanned::Spanned;\n\nuse syn::{parse_quote, DeriveInput, GenericParam, Generics};\n\n\n\nuse crate::column::{columns_with_fields, parse_attributes};\n\n\n\npub(crate) fn derive(input: DeriveInput) -> TokenStream {\n\n let name = input.ident;\n\n let generics = add_trait_bounds(input.generics);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n let attrs = parse_attributes(&input.attrs);\n\n let cs = columns_with_fields(&input.data, attrs.word.contains(\"default\"));\n\n for (c, f) in cs.iter() {\n\n if c.skip() && !c.default() {\n\n return syn::Error::new(\n\n f.ident.span(),\n\n \"field with `skip` attribute must have `default` attribute\".to_string(),\n\n )\n\n .to_compile_error();\n", "file_path": "batiskaf_derive/src/sql_result.rs", "rank": 71, "score": 5.01527279720904 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::{quote, quote_spanned};\n\nuse syn::spanned::Spanned;\n\nuse syn::{parse_quote, DeriveInput, Field, GenericParam, Generics};\n\n\n\nuse crate::column::{columns_with_fields, Column};\n\n\n\npub(crate) fn derive(input: DeriveInput) -> TokenStream {\n\n let name = input.ident;\n\n let generics = add_trait_bounds(input.generics);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n let cs = columns_with_fields(&input.data, false);\n\n let cs: Vec<(Column, &Field)> = cs.into_iter().filter(|cf| !cf.0.skip()).collect();\n\n let tokens = cs.iter().map(|cf| {\n\n let name = &cf.1.ident;\n\n let param = &format!(\":{}\", cf.0.name());\n\n quote_spanned! { cf.1.span() =>\n\n if let Ok(Some(_)) = stmt.parameter_index(#param) {\n\n params.push((#param, &self.#name as &dyn ::rusqlite::types::ToSql));\n\n }\n", "file_path": "batiskaf_derive/src/sql_param.rs", "rank": 72, "score": 4.980914814852648 }, { "content": " }\n\n pub fn primary_key(&self) -> bool {\n\n self.attrs.word.contains(\"primary_key\")\n\n }\n\n pub fn autogenerated(&self) -> bool {\n\n self.attrs.word.contains(\"autogenerated\")\n\n }\n\n pub fn skip(&self) -> bool {\n\n self.attrs.word.contains(\"skip\")\n\n }\n\n pub fn default(&self) -> bool {\n\n self.attrs.word.contains(\"default\")\n\n }\n\n}\n\n\n\nimpl From<&Field> for Column {\n\n fn from(f: &Field) -> Self {\n\n let mut attrs = parse_attributes(&f.attrs);\n\n if !attrs.name_value.contains_key(\"column\") {\n\n let name = match f.ident {\n", "file_path": "batiskaf_derive/src/column.rs", "rank": 73, "score": 4.5538644054668485 }, { "content": "### trait BatiskafConnection\n\n\n\nДополняет структуру `rusqlite::Connection` следующими функциями:\n\n\n\n```rust\n\nfn select_one<T: SqlResult>(&self, sql: &str, params: &[(&str, &dyn ToSql)]) -> rusqlite::Result<T>;\n\n```\n\nФункция-обёртка над `rusqlite::Connection::query_row_named`, преобразующая результат запроса в тип `T`.\n\n\n\n```rust\n\nfn select_many<T: SqlResult>(&self, sql: &str, params: &[(&str, &dyn ToSql)]) -> rusqlite::Result<Vec<T>>;\n\n```\n\nФункция-обёртка над `rusqlite::Connection::query_named`, преобразующая все строки результата запроса в тип `T`.\n\n\n\n```rust\n\nfn insert<T: SqlInsert + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<i64>;\n\n```\n\nФункция вставляет строку в таблицу `table`. SQL-код выражения `INSERT` генерируется функцией `T::insert_statement`, параметром для запроса является аргумент `value`. Функция возвращает `rowid` только что вставленной строки.\n\n\n\n```rust\n\nfn update<T: SqlUpdate + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<usize>;\n\n```\n\nФункция изменяет строки в таблице `table` на основании запроса `T::insert_statement` с параметрами `value` и возвращает количество изменённых строк.\n\n\n\n```rust\n\nfn delete<T: SqlDelete + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<usize>;\n\n```\n\nФункция удаляет строки из таблицы `table` при помощи запроса `T::delete_statement` и значения `value` и возвращает количество удалённых строк.\n\n\n\n\n", "file_path": "README.md", "rank": 74, "score": 4.082910000456907 }, { "content": " keys.join(\" AND \")\n\n );\n\n quote! {\n\n impl #impl_generics ::batiskaf::SqlUpdate for #name #ty_generics #where_clause {\n\n fn update_statement(table: &str) -> String {\n\n format!(#sql, table)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "batiskaf_derive/src/sql_update.rs", "rank": 75, "score": 3.756232674248813 }, { "content": " params.join(\", \")\n\n );\n\n quote! {\n\n impl #impl_generics ::batiskaf::SqlInsert for #name #ty_generics #where_clause {\n\n fn insert_statement(table: &str) -> String {\n\n format!(#sql, table)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "batiskaf_derive/src/sql_insert.rs", "rank": 76, "score": 3.756232674248813 }, { "content": " format!(\"struct {} must contain `primary_key` field\", name),\n\n )\n\n .to_compile_error();\n\n }\n\n let sql = format!(\"DELETE FROM {{}} WHERE {}\", keys.join(\" AND \"));\n\n quote! {\n\n impl #impl_generics ::batiskaf::SqlDelete for #name #ty_generics #where_clause {\n\n fn delete_statement(table: &str) -> String {\n\n format!(#sql, table)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "batiskaf_derive/src/sql_delete.rs", "rank": 77, "score": 3.738981637337515 }, { "content": " }\n\n }\n\n let tokens = cs.iter().map(|cf| {\n\n let name = &cf.1.ident;\n\n let param = cf.0.name();\n\n if cf.0.skip() {\n\n quote_spanned! { cf.1.span() =>\n\n #name: ::std::default::Default::default()\n\n }\n\n } else if cf.0.default() {\n\n quote_spanned! { cf.1.span() =>\n\n #name: {\n\n let x = row.get(#param);\n\n if let Err(::rusqlite::Error::InvalidColumnName(_)) = x {\n\n ::std::default::Default::default()\n\n } else {\n\n x?\n\n }\n\n }\n\n }\n", "file_path": "batiskaf_derive/src/sql_result.rs", "rank": 78, "score": 2.7815394016887915 }, { "content": " .map(|c| c.name())\n\n .map(|n| format!(\"{} = :{}\", n, n))\n\n .collect();\n\n if values.is_empty() {\n\n return syn::Error::new(\n\n name.span(),\n\n format!(\"struct {} must contain non `primary_key` field\", name),\n\n )\n\n .to_compile_error();\n\n }\n\n if keys.is_empty() {\n\n return syn::Error::new(\n\n name.span(),\n\n format!(\"struct {} must contain `primary_key` field\", name),\n\n )\n\n .to_compile_error();\n\n }\n\n let sql = format!(\n\n \"UPDATE {{}} SET {} WHERE {}\",\n\n values.join(\", \"),\n", "file_path": "batiskaf_derive/src/sql_update.rs", "rank": 79, "score": 2.750852431848287 }, { "content": " params: &[(&str, &dyn ToSql)],\n\n ) -> rusqlite::Result<T> {\n\n self.query_row_named(sql, params, T::from_row)\n\n }\n\n\n\n fn select_many<T: SqlResult>(\n\n &self,\n\n sql: &str,\n\n params: &[(&str, &dyn ToSql)],\n\n ) -> rusqlite::Result<Vec<T>> {\n\n let mut stmt = self.prepare(sql)?;\n\n let mut rows = stmt.query_named(params)?;\n\n let mut result = Vec::new();\n\n while let Some(row) = rows.next()? {\n\n result.push(T::from_row(&row)?);\n\n }\n\n Ok(result)\n\n }\n\n\n\n fn insert<T: SqlInsert + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<i64> {\n", "file_path": "batiskaf/src/lib.rs", "rank": 80, "score": 2.7072325929661747 }, { "content": " } else {\n\n quote_spanned! { cf.1.span() =>\n\n #name: row.get(#param)?\n\n }\n\n }\n\n });\n\n quote! {\n\n impl #impl_generics ::batiskaf::SqlResult for #name #ty_generics #where_clause {\n\n fn from_row(row: &::rusqlite::Row) -> rusqlite::Result<Self> {\n\n Ok(#name {\n\n #(#tokens),*\n\n })\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "batiskaf_derive/src/sql_result.rs", "rank": 81, "score": 2.613685267821639 }, { "content": " let sql = T::insert_statement(table);\n\n let mut stmt = self.prepare(&sql)?;\n\n let changes = stmt.execute_named(&value.to_named_params(&stmt))?;\n\n match changes {\n\n 1 => Ok(self.last_insert_rowid()),\n\n _ => Err(rusqlite::Error::StatementChangedRows(changes)),\n\n }\n\n }\n\n\n\n fn update<T: SqlUpdate + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<usize> {\n\n let sql = T::update_statement(table);\n\n let mut stmt = self.prepare(&sql)?;\n\n stmt.execute_named(&value.to_named_params(&stmt))\n\n }\n\n\n\n fn delete<T: SqlDelete + SqlParam>(&self, table: &str, value: &T) -> rusqlite::Result<usize> {\n\n let sql = T::delete_statement(table);\n\n let mut stmt = self.prepare(&sql)?;\n\n stmt.execute_named(&value.to_named_params(&stmt))\n\n }\n\n}\n", "file_path": "batiskaf/src/lib.rs", "rank": 82, "score": 2.382383768214299 }, { "content": " }\n\n });\n\n quote! {\n\n impl #impl_generics ::batiskaf::SqlParam for #name #ty_generics #where_clause {\n\n fn to_named_params(&self, stmt: &::rusqlite::Statement) -> ::std::vec::Vec<(&str, &dyn ::rusqlite::types::ToSql)> {\n\n let mut params = ::std::vec::Vec::new();\n\n #(#tokens)*\n\n params\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "batiskaf_derive/src/sql_param.rs", "rank": 83, "score": 2.202284834268175 }, { "content": "- column\n\n- primary_key\n\n- skip\n\n\n\n*/\n\n\n\nextern crate proc_macro;\n\nuse proc_macro::TokenStream;\n\nuse syn::{self, parse_macro_input, DeriveInput};\n\n\n\nmod column;\n\nmod sql_delete;\n\nmod sql_insert;\n\nmod sql_param;\n\nmod sql_result;\n\nmod sql_update;\n\n\n\n#[proc_macro_derive(SqlParam, attributes(batiskaf))]\n", "file_path": "batiskaf_derive/src/lib.rs", "rank": 84, "score": 2.088126455143296 }, { "content": "#### column = \"column_name\"\n\nПереименование столбца. По-умолчанию название столбца в БД совпадает с названием поля структуры. Атрибут `column` задаёт другое название для соответствующего столбца в таблице.\n\n\n\nПрименяется во всех пяти трейтах.\n\n\n\n#### primary_key\n\nПоле (и соответствующий столбец) является первичным ключом. В том случае, когда этот атрибут указан для нескольких полей, соответствующие столбцы образуют составной первичный ключ. Первичные ключи используются при выводе `SqlUpdate` и `SqlDelete` для идентификации изменяемой (удаляемой) строки.\n\n\n\nУчитывается при генерации `SqlUpdate` и `SqlDelete`. Если ни одно поле не будет иметь этот атрибут, будет ошибка компиляции.\n\n\n\nЕсли все поля имеют атрибут `primary_key`, компиляция трейта `SqlUpdate` завершится с ошибкой.\n\n\n\n#### autogenerated\n\nЗначение соответствуюего столбца является автогенерируемым и пропускается при генерации SQL-кода выражения `INSERT`, возвращаемого функцией `SqlInsert::insert_statement`.\n\n\n\nУчитывается при выводе `SqlInsert`.\n\n\n\n#### skip\n\nПоле не используется в SQL-выражениях. Применяется ко всем пяти трейтам.\n\n\n", "file_path": "README.md", "rank": 85, "score": 1.591379561958992 } ]
Rust
core/src/variant_array.rs
sprucely/godot-rust
bea2eb2ad4e46f76c9e029a8680e224cb2a8701a
use sys; use get_api; use Variant; use ToVariant; pub struct VariantArray(pub(crate) sys::godot_array); impl VariantArray { pub fn new() -> Self { VariantArray::default() } pub fn set(&mut self, idx: i32, val: &Variant) { unsafe { (get_api().godot_array_set)(&mut self.0, idx, &val.0) } } pub fn get_val(&mut self, idx: i32) -> Variant { unsafe { Variant((get_api().godot_array_get)(&self.0, idx)) } } pub fn get_ref(&self, idx: i32) -> &Variant { unsafe { Variant::cast_ref( (get_api().godot_array_operator_index_const)(&self.0, idx) ) } } pub fn get_mut_ref(&mut self, idx: i32) -> &mut Variant { unsafe { Variant::cast_mut_ref((get_api().godot_array_operator_index)(&mut self.0, idx)) } } pub fn count(&mut self, val: &Variant) -> i32 { unsafe { (get_api().godot_array_count)(&mut self.0, &val.0) } } pub fn clear(&mut self) { unsafe { (get_api().godot_array_clear)(&mut self.0); } } pub fn remove(&mut self, idx: i32) { unsafe { (get_api().godot_array_remove)(&mut self.0, idx) } } pub fn erase(&mut self, val: &Variant) { unsafe { (get_api().godot_array_erase)(&mut self.0, &val.0) } } pub fn is_empty(&self) -> bool { unsafe { (get_api().godot_array_empty)(&self.0) } } pub fn len(&self) -> i32 { unsafe { (get_api().godot_array_size)(&self.0) } } pub fn push(&mut self, val: &Variant) { unsafe { (get_api().godot_array_push_back)(&mut self.0, &val.0); } } pub fn pop(&mut self) -> Variant { unsafe { Variant((get_api().godot_array_pop_back)(&mut self.0)) } } pub fn push_front(&mut self, val: &Variant) { unsafe { (get_api().godot_array_push_front)(&mut self.0, &val.0); } } pub fn pop_front(&mut self) -> Variant { unsafe { Variant((get_api().godot_array_pop_front)(&mut self.0)) } } pub fn insert(&mut self, at: i32, val: &Variant) { unsafe { (get_api().godot_array_insert)(&mut self.0, at, &val.0) } } pub fn find(&self, what: &Variant, from: i32) -> i32 { unsafe { (get_api().godot_array_find)(&self.0, &what.0, from) } } pub fn contains(&self, what: &Variant) -> bool { unsafe { (get_api().godot_array_has)(&self.0, &what.0) } } pub fn resize(&mut self, size: i32) { unsafe { (get_api().godot_array_resize)(&mut self.0, size) } } pub fn rfind(&self, what: &Variant, from: i32) -> i32 { unsafe { (get_api().godot_array_rfind)(&self.0, &what.0, from) } } pub fn find_last(&self, what: &Variant) -> i32 { unsafe { (get_api().godot_array_find_last)(&self.0, &what.0) } } pub fn invert(&mut self) { unsafe { (get_api().godot_array_invert)(&mut self.0) } } pub fn hash(&self) -> i32 { unsafe { (get_api().godot_array_hash)(&self.0) } } pub fn sort(&mut self) { unsafe { (get_api().godot_array_sort)(&mut self.0) } } #[doc(hidden)] pub fn sys(&self) -> *const sys::godot_array { &self.0 } #[doc(hidden)] pub fn from_sys(sys: sys::godot_array) -> Self { VariantArray(sys) } impl_common_methods! { pub fn new_ref(&self) -> VariantArray : godot_array_new_copy; } } impl_basic_traits!( for VariantArray as godot_array { Drop => godot_array_destroy; Default => godot_array_new; } ); impl ToVariant for VariantArray { fn to_variant(&self) -> Variant { Variant::from_array(self) } fn from_variant(variant: &Variant) -> Option<Self> { variant.try_to_array() } } godot_test!(test_array { let foo = Variant::from_str("foo"); let bar = Variant::from_str("bar"); let nope = Variant::from_str("nope"); let mut array = VariantArray::new(); assert!(array.is_empty()); assert_eq!(array.len(), 0); array.push(&foo); array.push(&bar); assert_eq!(array.len(), 2); assert!(array.contains(&foo)); assert!(array.contains(&bar)); assert!(!array.contains(&nope)); array.set(0, &bar); array.set(1, &foo); assert_eq!(array.get_ref(0), &bar); assert_eq!(array.get_ref(1), &foo); array.pop(); array.pop(); let x = Variant::from_i64(42); let y = Variant::from_i64(1337); let z = Variant::from_i64(512); array.insert(0, &x); array.insert(0, &y); array.push_front(&z); array.push_front(&z); assert_eq!(array.find(&y, 0), 2); assert_eq!(array.find_last(&z), 1); assert_eq!(array.find(&nope, 0), -1); array.invert(); assert_eq!(array.get_ref(0), &x); array.pop_front(); array.pop_front(); assert_eq!(array.get_ref(0), &z); array.resize(0); assert!(array.is_empty()); array.push(&foo); array.push(&bar); let array2 = array.new_ref(); assert!(array2.contains(&foo)); assert!(array2.contains(&bar)); assert!(!array2.contains(&nope)); });
use sys; use get_api; use Variant; use ToVariant; pub struct VariantArray(pub(crate) sys::godot_array); impl VariantArray { pub fn new() -> Self { VariantArray::default() } pub fn set(&mut self, idx: i32, val: &Variant) { unsafe { (get_api().godot_array_set)(&mut self.0, idx, &val.0) } } pub fn get_val(&mut self, idx: i32) -> Variant { unsafe { Variant((get_api().godot_array_get)(&self.0, idx)) } } pub fn get_ref(&self, id
pub fn get_mut_ref(&mut self, idx: i32) -> &mut Variant { unsafe { Variant::cast_mut_ref((get_api().godot_array_operator_index)(&mut self.0, idx)) } } pub fn count(&mut self, val: &Variant) -> i32 { unsafe { (get_api().godot_array_count)(&mut self.0, &val.0) } } pub fn clear(&mut self) { unsafe { (get_api().godot_array_clear)(&mut self.0); } } pub fn remove(&mut self, idx: i32) { unsafe { (get_api().godot_array_remove)(&mut self.0, idx) } } pub fn erase(&mut self, val: &Variant) { unsafe { (get_api().godot_array_erase)(&mut self.0, &val.0) } } pub fn is_empty(&self) -> bool { unsafe { (get_api().godot_array_empty)(&self.0) } } pub fn len(&self) -> i32 { unsafe { (get_api().godot_array_size)(&self.0) } } pub fn push(&mut self, val: &Variant) { unsafe { (get_api().godot_array_push_back)(&mut self.0, &val.0); } } pub fn pop(&mut self) -> Variant { unsafe { Variant((get_api().godot_array_pop_back)(&mut self.0)) } } pub fn push_front(&mut self, val: &Variant) { unsafe { (get_api().godot_array_push_front)(&mut self.0, &val.0); } } pub fn pop_front(&mut self) -> Variant { unsafe { Variant((get_api().godot_array_pop_front)(&mut self.0)) } } pub fn insert(&mut self, at: i32, val: &Variant) { unsafe { (get_api().godot_array_insert)(&mut self.0, at, &val.0) } } pub fn find(&self, what: &Variant, from: i32) -> i32 { unsafe { (get_api().godot_array_find)(&self.0, &what.0, from) } } pub fn contains(&self, what: &Variant) -> bool { unsafe { (get_api().godot_array_has)(&self.0, &what.0) } } pub fn resize(&mut self, size: i32) { unsafe { (get_api().godot_array_resize)(&mut self.0, size) } } pub fn rfind(&self, what: &Variant, from: i32) -> i32 { unsafe { (get_api().godot_array_rfind)(&self.0, &what.0, from) } } pub fn find_last(&self, what: &Variant) -> i32 { unsafe { (get_api().godot_array_find_last)(&self.0, &what.0) } } pub fn invert(&mut self) { unsafe { (get_api().godot_array_invert)(&mut self.0) } } pub fn hash(&self) -> i32 { unsafe { (get_api().godot_array_hash)(&self.0) } } pub fn sort(&mut self) { unsafe { (get_api().godot_array_sort)(&mut self.0) } } #[doc(hidden)] pub fn sys(&self) -> *const sys::godot_array { &self.0 } #[doc(hidden)] pub fn from_sys(sys: sys::godot_array) -> Self { VariantArray(sys) } impl_common_methods! { pub fn new_ref(&self) -> VariantArray : godot_array_new_copy; } } impl_basic_traits!( for VariantArray as godot_array { Drop => godot_array_destroy; Default => godot_array_new; } ); impl ToVariant for VariantArray { fn to_variant(&self) -> Variant { Variant::from_array(self) } fn from_variant(variant: &Variant) -> Option<Self> { variant.try_to_array() } } godot_test!(test_array { let foo = Variant::from_str("foo"); let bar = Variant::from_str("bar"); let nope = Variant::from_str("nope"); let mut array = VariantArray::new(); assert!(array.is_empty()); assert_eq!(array.len(), 0); array.push(&foo); array.push(&bar); assert_eq!(array.len(), 2); assert!(array.contains(&foo)); assert!(array.contains(&bar)); assert!(!array.contains(&nope)); array.set(0, &bar); array.set(1, &foo); assert_eq!(array.get_ref(0), &bar); assert_eq!(array.get_ref(1), &foo); array.pop(); array.pop(); let x = Variant::from_i64(42); let y = Variant::from_i64(1337); let z = Variant::from_i64(512); array.insert(0, &x); array.insert(0, &y); array.push_front(&z); array.push_front(&z); assert_eq!(array.find(&y, 0), 2); assert_eq!(array.find_last(&z), 1); assert_eq!(array.find(&nope, 0), -1); array.invert(); assert_eq!(array.get_ref(0), &x); array.pop_front(); array.pop_front(); assert_eq!(array.get_ref(0), &z); array.resize(0); assert!(array.is_empty()); array.push(&foo); array.push(&bar); let array2 = array.new_ref(); assert!(array2.contains(&foo)); assert!(array2.contains(&bar)); assert!(!array2.contains(&nope)); });
x: i32) -> &Variant { unsafe { Variant::cast_ref( (get_api().godot_array_operator_index_const)(&self.0, idx) ) } }
function_block-function_prefixed
[ { "content": "pub fn generate_bindings(\n\n output: &mut File,\n\n crate_type: Crate,\n\n) -> GeneratorResult {\n\n\n\n let api = Api::new(crate_type);\n\n\n\n writeln!(output, \"use std::os::raw::c_char;\")?;\n\n writeln!(output, \"use std::ptr;\")?;\n\n writeln!(output, \"use std::mem;\")?;\n\n\n\n for class in &api.classes {\n\n if api.namespaces[&class.name] != crate_type {\n\n continue;\n\n }\n\n\n\n generate_class_documentation(output, &api, class)?;\n\n\n\n generate_class_struct(output, class)?;\n\n\n", "file_path": "bindings_generator/src/lib.rs", "rank": 0, "score": 101324.6516821162 }, { "content": "pub fn generate_methods(\n\n output: &mut File,\n\n api: &Api,\n\n method_set: &mut HashSet<String>,\n\n class_name: &str,\n\n is_safe: bool,\n\n is_leaf: bool,\n\n) -> GeneratorResult {\n\n if let Some(class) = api.find_class(class_name) {\n\n 'method:\n\n for method in &class.methods {\n\n let method_name = method.get_name();\n\n\n\n if skip_method(&method_name) {\n\n continue;\n\n }\n\n\n\n let rust_ret_type = if let Some(ty) = method.get_return_type().to_rust() {\n\n ty\n\n } else {\n", "file_path": "bindings_generator/src/methods.rs", "rank": 1, "score": 101324.6516821162 }, { "content": "/// Types that can be converted to and from a `Variant`.\n\npub trait ToVariant: Sized {\n\n fn to_variant(&self) -> Variant;\n\n fn from_variant(variant: &Variant) -> Option<Self>;\n\n}\n\n\n\nimpl ToVariant for () {\n\n fn to_variant(&self) -> Variant {\n\n Variant::new()\n\n }\n\n\n\n fn from_variant(variant: &Variant) -> Option<Self> {\n\n if variant.get_type() == VariantType::Nil {\n\n Some(())\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! impl_to_variant_for_int {\n", "file_path": "core/src/variant.rs", "rank": 2, "score": 100177.77523386452 }, { "content": "pub fn generate_upcast(\n\n output: &mut File,\n\n api: &Api,\n\n base_class_name: &str,\n\n is_pointer_safe: bool,\n\n) -> GeneratorResult {\n\n if let Some(parent) = api.find_class(&base_class_name) {\n\n let snake_name = class_name_to_snake_case(&base_class_name);\n\n if is_pointer_safe {\n\n writeln!(output,\n\nr#\" /// Up-cast.\n\n #[inline]\n\n pub fn to_{snake_name}(&self) -> {name} {{\n\n {addref_if_reference}\n\n {name} {{ this: self.this }}\n\n }}\n\n\"#,\n\n name = parent.name,\n\n snake_name = snake_name,\n\n addref_if_reference = if parent.is_refcounted() {\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 3, "score": 98627.82420056786 }, { "content": "#[inline]\n\n#[doc(hidden)]\n\npub fn get_api() -> &'static GodotApi {\n\n unsafe { GODOT_API.as_ref().expect(\"API not bound\") }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\n#[repr(u32)]\n\npub enum GodotError {\n\n Failed = sys::godot_error_GODOT_FAILED,\n\n Unavailable = sys::godot_error_GODOT_ERR_UNAVAILABLE,\n\n Unconfigured = sys::godot_error_GODOT_ERR_UNCONFIGURED,\n\n Unothorized = sys::godot_error_GODOT_ERR_UNAUTHORIZED,\n\n PrameterRange = sys::godot_error_GODOT_ERR_PARAMETER_RANGE_ERROR,\n\n OutOfMemory = sys::godot_error_GODOT_ERR_OUT_OF_MEMORY,\n\n FileNotFound = sys::godot_error_GODOT_ERR_FILE_NOT_FOUND,\n\n FileBadDrive = sys::godot_error_GODOT_ERR_FILE_BAD_DRIVE,\n\n FileBadPath = sys::godot_error_GODOT_ERR_FILE_BAD_PATH,\n\n FileNoPermission = sys::godot_error_GODOT_ERR_FILE_NO_PERMISSION,\n\n FileAlreadyInUse = sys::godot_error_GODOT_ERR_FILE_ALREADY_IN_USE,\n\n FileCantOpen = sys::godot_error_GODOT_ERR_FILE_CANT_OPEN,\n\n FileCantWrite = sys::godot_error_GODOT_ERR_FILE_CANT_WRITE,\n", "file_path": "core/src/lib.rs", "rank": 4, "score": 96201.82334994763 }, { "content": "pub fn generate_class_struct(output: &mut File, class: &GodotClass) -> GeneratorResult {\n\n if !class.is_refcounted() {\n\n writeln!(output, \"#[derive(Copy, Clone)]\")?;\n\n }\n\n\n\n writeln!(output,\n\nr#\"#[allow(non_camel_case_types)]\n\n#[derive(Debug)]\n\npub struct {name} {{\n\n #[doc(hidden)]\n\n pub this: *mut sys::godot_object,\n\n}}\n\n\"#,\n\n name = class.name\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "bindings_generator/src/classes.rs", "rank": 5, "score": 91880.02012348859 }, { "content": "pub fn generate_godot_object_impl(output: &mut File, class: &GodotClass) -> GeneratorResult {\n\n writeln!(output,\n\nr#\"\n\nunsafe impl GodotObject for {name} {{\n\n fn class_name() -> &'static str {{\n\n \"{name}\"\n\n }}\n\n\n\n unsafe fn from_sys(obj: *mut sys::godot_object) -> Self {{\n\n {addref_if_reference}\n\n Self {{ this: obj, }}\n\n }}\n\n\n\n unsafe fn to_sys(&self) -> *mut sys::godot_object {{\n\n self.this\n\n }}\n\n}}\n\n\n\nimpl ToVariant for {name} {{\n\n fn to_variant(&self) -> Variant {{ Variant::from_object(self) }}\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 6, "score": 90164.4618352462 }, { "content": "pub fn generate_free_impl(output: &mut File, api: &Api, class: &GodotClass) -> GeneratorResult {\n\n if class.instanciable && !class.is_pointer_safe() {\n\n writeln!(output,\n\nr#\"impl Free for {name} {{\n\n unsafe fn godot_free(self) {{ self.free() }}\n\n}}\n\n\"#,\n\n name = class.name,\n\n )?;\n\n }\n\n\n\n if class.name == \"Node\" || api.class_inherits(&class, \"Node\") {\n\n writeln!(output,\n\nr#\"impl QueueFree for {name} {{\n\n unsafe fn godot_queue_free(&mut self) {{ self.queue_free() }}\n\n}}\n\n\"#,\n\n name = class.name,\n\n )?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 7, "score": 86337.55655953517 }, { "content": "pub fn generate_method_impl(output: &mut File, class: &GodotClass, method: &GodotMethod) -> GeneratorResult {\n\n let method_name = method.get_name();\n\n\n\n if skip_method(&method_name) {\n\n return Ok(());\n\n }\n\n\n\n let rust_ret_type = if let Some(ty) = method.get_return_type().to_rust() {\n\n ty\n\n } else {\n\n writeln!(output, \"// TODO: missing method {}\", method_name)?;\n\n return Ok(());\n\n };\n\n\n\n let mut params = String::new();\n\n for argument in &method.arguments {\n\n if let Some(ty) = argument.get_type().to_rust() {\n\n fmt::Write::write_fmt(&mut params, format_args!(\", {}: {}\", rust_safe_name(&argument.name), ty)).unwrap();\n\n } else {\n\n writeln!(output, \"// TODO: missing method {}\", method_name)?;\n", "file_path": "bindings_generator/src/methods.rs", "rank": 8, "score": 86337.55655953517 }, { "content": "pub fn official_doc_url(class: &GodotClass) -> String {\n\n format!(\n\n \"https://godot.readthedocs.io/en/3.0/classes/class_{lower_case}.html\",\n\n lower_case = class.name.to_lowercase(),\n\n )\n\n}\n\n\n", "file_path": "bindings_generator/src/documentation.rs", "rank": 9, "score": 78171.88517443187 }, { "content": "pub fn class_doc_link(class: &GodotClass) -> String {\n\n // TODO: link the correct crate\n\n // let subcrate = get_crate(class);\n\n format!(\"[{name}](struct.{name}.html)\", name = class.name)\n\n}\n\n\n", "file_path": "bindings_generator/src/documentation.rs", "rank": 10, "score": 78171.88517443187 }, { "content": "pub fn result_from_sys(err: sys::godot_error) -> GodotResult {\n\n if err == sys::godot_error_GODOT_OK {\n\n return Ok(());\n\n }\n\n\n\n Err(unsafe { mem::transmute(err) })\n\n}\n", "file_path": "core/src/lib.rs", "rank": 11, "score": 76274.04782848325 }, { "content": "pub fn class_name_to_snake_case(name: &str) -> String {\n\n // TODO: this is a quick-n-dirty band-aid, it'd be better to\n\n // programmatically do the right conversion, but to_snake_case\n\n // currently translates \"Node2D\" into \"node2_d\".\n\n match name {\n\n \"SpriteBase3D\" => \"sprite_base_3d\".to_string(),\n\n \"Node2D\" => \"node_2d\".to_string(),\n\n \"CollisionObject2D\" => \"collision_object_2d\".to_string(),\n\n \"PhysicsBody2D\" => \"physics_body_2d\".to_string(),\n\n \"VisibilityNotifier2D\" => \"visibility_notifier_2d\".to_string(),\n\n \"Joint2D\" => \"joint_2d\".to_string(),\n\n \"Shape2D\" => \"shape_2d\".to_string(),\n\n \"Physics2DServer\" => \"physics_2d_server\".to_string(),\n\n \"Physics2DDirectBodyState\" => \"physics_2d_direct_body_state\".to_string(),\n\n _ => name.to_snake_case(),\n\n }\n\n}\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 12, "score": 76247.36167454068 }, { "content": "pub fn get_crate_namespace(crate_type: Crate) -> &'static str {\n\n match crate_type {\n\n Crate::core => \"core\",\n\n Crate::common => \"common\",\n\n Crate::graphics => \"graphics\",\n\n Crate::animation => \"animation\",\n\n Crate::physics => \"physics\",\n\n Crate::network => \"network\",\n\n Crate::audio => \"audio\",\n\n Crate::video => \"video\",\n\n Crate::arvr => \"arvr\",\n\n Crate::input => \"input\",\n\n Crate::ui => \"ui\",\n\n Crate::editor => \"editor\",\n\n Crate::visual_script => \"visual_script\",\n\n Crate::unknown => \"unknown\",\n\n }\n\n}\n\n\n", "file_path": "bindings_generator/src/lib.rs", "rank": 13, "score": 74349.52432859207 }, { "content": "pub fn get_api_json() -> &'static [u8] { include_bytes!(\"../api.json\") }\n\n\n", "file_path": "bindings_generator/src/api.rs", "rank": 14, "score": 72539.15711344092 }, { "content": "pub fn get_namespaces_json() -> &'static [u8] { include_bytes!(\"../namespaces.json\") }\n", "file_path": "bindings_generator/src/api.rs", "rank": 15, "score": 72539.15711344092 }, { "content": "pub fn godot_cast<T>(from: *mut sys::godot_object) -> Option<T>\n\nwhere\n\n T: GodotObject,\n\n{\n\n unsafe {\n\n if !is_class(from, T::class_name()) {\n\n return None;\n\n }\n\n\n\n Some(T::from_sys(from))\n\n }\n\n}\n", "file_path": "core/src/object.rs", "rank": 16, "score": 70724.01431579696 }, { "content": "pub fn get_crate_namespace_opt(crate_type: Option<Crate>) -> &'static str {\n\n match crate_type {\n\n Some(ty) => get_crate_namespace(ty),\n\n None => \"\"\n\n }\n\n}\n\n\n", "file_path": "bindings_generator/src/lib.rs", "rank": 17, "score": 69356.39706783659 }, { "content": "pub fn is_class(obj: *mut sys::godot_object, class_name: &str) -> bool {\n\n unsafe {\n\n let api = ::get_api();\n\n let method_bind = ObjectMethodTable::get(api).is_class;\n\n\n\n let mut class_name = (api.godot_string_chars_to_utf8_with_len)(\n\n class_name.as_ptr() as *const _,\n\n class_name.len() as _\n\n );\n\n\n\n let mut argument_buffer = [ptr::null() as *const libc::c_void; 1];\n\n argument_buffer[0] = (&class_name) as *const _ as *const _;\n\n\n\n let mut ret = false;\n\n let ret_ptr = &mut ret as *mut _;\n\n (api.godot_method_bind_ptrcall)(\n\n method_bind,\n\n obj,\n\n argument_buffer.as_mut_ptr() as *mut _,\n\n ret_ptr as *mut _\n\n );\n\n\n\n (api.godot_string_destroy)(&mut class_name);\n\n\n\n ret\n\n }\n\n}\n\n\n", "file_path": "core/src/object.rs", "rank": 18, "score": 68913.64710064582 }, { "content": "pub fn generate_drop(output: &mut File, class: &GodotClass) -> GeneratorResult {\n\n writeln!(output,\n\nr#\"\n\nimpl Drop for {name} {{\n\n fn drop(&mut self) {{\n\n unsafe {{\n\n if object::unref(self.this) {{\n\n (get_api().godot_object_destroy)(self.this);\n\n }}\n\n }}\n\n }}\n\n}}\n\n\"#,\n\n name = class.name\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 19, "score": 66564.13476862336 }, { "content": "pub fn generate_method_table(output: &mut File, class: &GodotClass) -> GeneratorResult {\n\n writeln!(output, r#\"\n\n#[doc(hidden)]\n\n#[allow(non_camel_case_types)]\n\npub struct {name}MethodTable {{\n\n pub class_constructor: sys::godot_class_constructor,\"#,\n\n name = class.name\n\n )?;\n\n\n\n for method in &class.methods {\n\n let method_name = method.get_name();\n\n if method_name == \"free\" {\n\n continue;\n\n }\n\n writeln!(output, \" pub {}: *mut sys::godot_method_bind,\", method_name)?;\n\n }\n\n writeln!(output, r#\"\n\n}}\n\n\n\nimpl {name}MethodTable {{\n", "file_path": "bindings_generator/src/methods.rs", "rank": 20, "score": 66564.13476862336 }, { "content": "pub fn generate_refreference_ctor(output: &mut File, class: &GodotClass) -> GeneratorResult {\n\n writeln!(output,\n\nr#\"\n\n // Constructor\n\n pub fn new() -> Self {{\n\n unsafe {{\n\n let gd_api = get_api();\n\n let ctor = {name}MethodTable::get(gd_api).class_constructor.unwrap();\n\n let obj = ctor();\n\n object::init_ref_count(obj);\n\n\n\n {name} {{\n\n this: obj\n\n }}\n\n }}\n\n }}\n\n\n\n /// Creates a new reference to the same reference-counted object.\n\n pub fn new_ref(&self) -> Self {{\n\n unsafe {{\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 21, "score": 65041.31795370373 }, { "content": "pub fn generate_dynamic_cast(output: &mut File, class: &GodotClass) -> GeneratorResult {\n\n writeln!(output,\n\nr#\"\n\n /// Generic dynamic cast.\n\n pub {maybe_unsafe}fn cast<T: GodotObject>(&self) -> Option<T> {{\n\n object::godot_cast::<T>(self.this)\n\n }}\n\n\"#,\n\n maybe_unsafe = if class.is_pointer_safe() { \"\" } else { \"unsafe \" },\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 22, "score": 65041.31795370373 }, { "content": "pub fn generate_singleton_getter(output: &mut File, class: &GodotClass) -> GeneratorResult {\n\n let s_name = if class.name.starts_with(\"_\") {\n\n &class.name[1..]\n\n } else {\n\n class.name.as_ref()\n\n };\n\n\n\n writeln!(output, r#\"\n\n #[inline]\n\n pub fn godot_singleton() -> Self {{\n\n unsafe {{\n\n let this = (get_api().godot_global_get_singleton)(b\"{s_name}\\0\".as_ptr() as *mut _);\n\n\n\n {name} {{\n\n this\n\n }}\n\n }}\n\n }}\"#,\n\n name = class.name,\n\n s_name = s_name\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 23, "score": 65041.31795370373 }, { "content": "pub fn generate_enum(output: &mut File, class: &GodotClass, e: &Enum) -> GeneratorResult {\n\n // TODO: check whether the start of the variant name is\n\n // equal to the end of the enum name and if so don't repeat it\n\n // it. For example ImageFormat::Rgb8 instead of ImageFormat::FormatRgb8.\n\n\n\n let mut values: Vec<(&String, &u32)> = e.values.iter().collect();\n\n values.sort_by(|a, b|{ a.1.cmp(&b.1) });\n\n\n\n writeln!(output,\n\nr#\"#[repr(u32)]\n\n#[allow(non_camel_case_types)]\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub enum {class_name}{enum_name} {{\"#,\n\n class_name = class.name, enum_name = e.name\n\n )?;\n\n\n\n for &(key, val) in &values {\n\n // Use lowercase to test because of different CamelCase conventions (Msaa/MSAA, etc.).\n\n let enum_name_without_mode = if e.name.ends_with(\"Mode\") {\n\n e.name[0..(e.name.len() - 4)].to_lowercase()\n", "file_path": "bindings_generator/src/classes.rs", "rank": 24, "score": 64790.93737687151 }, { "content": "pub fn generate_non_refreference_ctor(output: &mut File, class: &GodotClass) -> GeneratorResult {\n\n writeln!(output,\n\nr#\"\n\n /// Constructor.\n\n ///\n\n /// Because this type is not reference counted, the lifetime of the returned object\n\n /// is *not* automatically managed.\n\n /// Immediately after creation, the object is owned by the caller, and can be\n\n /// passed to the engine (in which case the engine will be responsible for\n\n /// destroying the object) or destroyed manually using `{name}::free`.\n\n pub fn new() -> Self {{\n\n unsafe {{\n\n let gd_api = get_api();\n\n let ctor = {name}MethodTable::get(gd_api).class_constructor.unwrap();\n\n let this = ctor();\n\n\n\n {name} {{\n\n this\n\n }}\n\n }}\n", "file_path": "bindings_generator/src/special_methods.rs", "rank": 25, "score": 63599.25002273495 }, { "content": "pub fn generate_class_documentation(output: &mut File, api: &Api, class: &GodotClass) -> GeneratorResult {\n\n let has_parent = class.base_class != \"\";\n\n let singleton_str = if class.singleton { \"singleton \" } else { \"\" } ;\n\n let ownership_type = if class.is_refcounted() { \"reference counted\" } else { \"unsafe\" };\n\n if &class.name == \"Reference\" {\n\n writeln!(output, \"/// Base class of all reference-counted types. Inherits `Object`.\")?;\n\n } else if &class.name == \"Object\" {\n\n writeln!(output, \"/// The base class of most Godot classes.\")?;\n\n } else if has_parent {\n\n writeln!(output, r#\"\n\n/// `{api_type} {singleton}class {name}` inherits `{base_class}` ({ownership_type}).\"#,\n\n api_type = class.api_type,\n\n name = class.name,\n\n base_class = class.base_class,\n\n ownership_type = ownership_type,\n\n singleton = singleton_str\n\n )?;\n\n } else {\n\n writeln!(output, r#\"\n\n/// `{api_type} {singleton}class {name}` ({ownership_type}).\"#,\n", "file_path": "bindings_generator/src/documentation.rs", "rank": 26, "score": 61826.0526309831 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"arvr_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::arvr).unwrap();\n\n}\n", "file_path": "arvr/build.rs", "rank": 27, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"video_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::video).unwrap();\n\n}\n", "file_path": "video/build.rs", "rank": 28, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"audio_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::audio).unwrap();\n\n}\n", "file_path": "audio/build.rs", "rank": 29, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"ui_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::ui).unwrap();\n\n}\n", "file_path": "ui/build.rs", "rank": 30, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"common_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::common).unwrap();\n\n}\n", "file_path": "common/build.rs", "rank": 31, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"network_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::network).unwrap();\n\n}\n", "file_path": "network/build.rs", "rank": 32, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"core_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::core).unwrap();\n\n}\n", "file_path": "core/build.rs", "rank": 33, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"input_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::input).unwrap();\n\n}\n", "file_path": "input/build.rs", "rank": 34, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let bindings = {\n\n let mut builder = bindgen::Builder::default()\n\n .header(\"godot_headers/gdnative_api_struct.gen.h\")\n\n .whitelisted_type(\"godot.*\")\n\n .whitelisted_function(\"godot.*\")\n\n .whitelisted_var(\"godot.*\")\n\n .whitelisted_type(\"GDNATIVE.*\")\n\n .derive_default(true)\n\n .ignore_functions()\n\n .ctypes_prefix(\"libc\")\n\n .clang_arg(format!(\"-I{}/godot_headers\", dir));\n\n\n\n #[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\n match osx_include_path() {\n\n Ok(osx_include_path) => {\n\n builder = builder.clang_arg(\"-I\").clang_arg(osx_include_path);\n\n },\n\n _ => {},\n", "file_path": "sys/build.rs", "rank": 35, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"editor_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::editor).unwrap();\n\n}\n", "file_path": "editor/build.rs", "rank": 36, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"graphics_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::graphics).unwrap();\n\n}\n", "file_path": "graphics/build.rs", "rank": 37, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"physics_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::physics).unwrap();\n\n}\n", "file_path": "physics/build.rs", "rank": 38, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"animation_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::animation).unwrap();\n\n}\n", "file_path": "animation/build.rs", "rank": 39, "score": 61576.8440463018 }, { "content": "fn main() {\n\n let out_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap());\n\n let mut output = File::create(out_path.join(\"visual_script_types.rs\")).unwrap();\n\n\n\n generate_bindings(&mut output, Crate::visual_script).unwrap();\n\n}\n", "file_path": "visual_script/build.rs", "rank": 40, "score": 59725.463249945824 }, { "content": "#[test]\n\nfn color_repr() {\n\n use std::mem::size_of;\n\n assert_eq!(size_of::<Color>(), size_of::<sys::godot_color>());\n\n}\n", "file_path": "core/src/color.rs", "rank": 41, "score": 58023.88662464623 }, { "content": "fn list_base_classes(\n\n output: &mut File,\n\n api: &Api,\n\n parent_name: &str,\n\n) -> GeneratorResult {\n\n if let Some(parent) = api.find_class(parent_name) {\n\n let class_link = class_doc_link(&parent);\n\n\n\n writeln!(output, \"/// - {}\", class_link)?;\n\n\n\n if parent.base_class != \"\" {\n\n list_base_classes(output, api, &parent.base_class)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "bindings_generator/src/documentation.rs", "rank": 42, "score": 55002.86188289475 }, { "content": "struct GodotApi {\n\n core(GDNATIVE_API_TYPES_GDNATIVE_CORE, godot_gdnative_core_api_struct) {\n\n pub godot_color_new_rgba: ::std::option::Option<unsafe extern \"C\" fn(r_dest:\n\n *mut godot_color,\n\n p_r:\n\n godot_real,\n\n p_g:\n\n godot_real,\n\n p_b:\n\n godot_real,\n\n p_a:\n\n godot_real)>,\n\n pub godot_color_new_rgb: ::std::option::Option<unsafe extern \"C\" fn(r_dest:\n\n *mut godot_color,\n\n p_r:\n\n godot_real,\n\n p_g:\n\n godot_real,\n\n p_b:\n\n godot_real)>,\n", "file_path": "core/src/internal.rs", "rank": 43, "score": 54385.29306145453 }, { "content": "struct MyClass {\n\n // This field is usually needed in order to call methods of the parent\n\n // class, but in this specific case this never happens, so this field\n\n // is actually not needed here. 99% of the time you usually need this.\n\n header: NativeInstanceHeader,\n\n\n\n elapsed_time: f64,\n\n}\n\n\n\nimpl MyClass {\n\n fn new(header: NativeInstanceHeader) -> Self {\n\n MyClass {\n\n header: header,\n\n elapsed_time: 0.0,\n\n }\n\n }\n\n\n\n fn _ready(&mut self) {\n\n godot_print!(\"Hello World!\");\n\n }\n", "file_path": "examples/manually_registered/src/lib.rs", "rank": 44, "score": 52962.81916106139 }, { "content": "/// Manually managed Godot classes implementing `free`.\n\npub trait Free {\n\n unsafe fn godot_free(self);\n\n}\n\n\n", "file_path": "core/src/free_on_drop.rs", "rank": 45, "score": 52179.46677613893 }, { "content": "pub trait NativeClass {\n\n fn class_name() -> &'static str;\n\n\n\n fn get_header(&self) -> &NativeInstanceHeader;\n\n\n\n fn as_object(&self) -> &Object {\n\n unsafe {\n\n mem::transmute(self.get_header())\n\n }\n\n }\n\n}\n\n\n\n/// A reference to a rust native script.\n\npub struct NativeRef<T: NativeClass> {\n\n this: *mut sys::godot_object,\n\n _marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T: NativeClass> NativeRef<T> {\n\n\n", "file_path": "core/src/class.rs", "rank": 46, "score": 52179.46677613893 }, { "content": "fn test_constructor() -> bool {\n\n println!(\" -- test_constructor\");\n\n\n\n use gdnative::{GDNativeLibrary, Path2D, FreeOnDrop};\n\n\n\n // Just create an object and call a method as a sanity check for the\n\n // generated constructors.\n\n let lib = GDNativeLibrary::new();\n\n let _ = lib.is_singleton();\n\n\n\n unsafe {\n\n let path = FreeOnDrop::new(Path2D::new());\n\n let _ = path.get_z_index();\n\n }\n\n\n\n return true;\n\n}\n\n\n\ngodot_gdnative_init!();\n\ngodot_nativescript_init!();\n\ngodot_gdnative_terminate!();\n", "file_path": "test/src/lib.rs", "rank": 47, "score": 52051.740672584216 }, { "content": "/// Manually managed Godot classes implementing `queue_free`.\n\npub trait QueueFree {\n\n unsafe fn godot_queue_free(&mut self);\n\n}\n\n\n\n/// A wrapper that automatically frees the object when dropped.\n\npub struct FreeOnDrop<T: Free + Clone> {\n\n ptr: T,\n\n}\n\n\n\nimpl<T> FreeOnDrop<T> where T: Free + Clone {\n\n\n\n pub unsafe fn new(ptr: T) -> Self {\n\n FreeOnDrop { ptr }\n\n }\n\n\n\n pub fn forget(self) -> T {\n\n let ptr = self.ptr.clone();\n\n mem::forget(self);\n\n\n\n ptr\n", "file_path": "core/src/free_on_drop.rs", "rank": 48, "score": 50724.68822377066 }, { "content": " }\n\n )\n\n}\n\n\n\nimpl_to_variant_for_int!(i8);\n\nimpl_to_variant_for_int!(i16);\n\nimpl_to_variant_for_int!(i32);\n\nimpl_to_variant_for_int!(i64);\n\n\n\nmacro_rules! godot_uint_impl {\n\n ($ty:ty) => (\n\n impl ToVariant for $ty {\n\n fn to_variant(&self) -> Variant {\n\n unsafe {\n\n let mut ret = sys::godot_variant::default();\n\n (get_api().godot_variant_new_uint)(&mut ret, u64::from(*self));\n\n Variant(ret)\n\n }\n\n }\n\n\n", "file_path": "core/src/variant.rs", "rank": 49, "score": 43877.27244335206 }, { "content": " pub fn from_object<T>(val: &T) -> Variant\n\n where T: GodotObject\n\n {\n\n unsafe {\n\n let api = get_api();\n\n let mut dest = sys::godot_variant::default();\n\n (api.godot_variant_new_object)(&mut dest, val.to_sys());\n\n Variant(dest)\n\n }\n\n }\n\n\n\n /// Creates a `Variant` wrapping a signed integer value.\n\n pub fn from_i64(v: i64) -> Variant {\n\n unsafe {\n\n let api = get_api();\n\n let mut dest = sys::godot_variant::default();\n\n (api.godot_variant_new_int)(&mut dest, v);\n\n Variant(dest)\n\n }\n\n }\n", "file_path": "core/src/variant.rs", "rank": 50, "score": 43876.49035011144 }, { "content": " pub fn from_basis(&Basis) -> Self as sys::godot_basis : godot_variant_new_basis;\n\n /// Creates a `Variant` wrapping a `Color`.\n\n pub fn from_color(&Color) -> Self as sys::godot_color : godot_variant_new_color;\n\n /// Creates a `Variant` wrapping an `Aabb`.\n\n pub fn from_aabb(&Aabb) -> Self as sys::godot_aabb : godot_variant_new_aabb;\n\n );\n\n\n\n variant_constructors_wrap!(\n\n /// Creates a `Variant` wrapping an `Rid`.\n\n pub fn from_rid(&Rid) -> Self as sys::godot_rid : godot_variant_new_rid;\n\n /// Creates a `Variant` wrapping a `NodePath`.\n\n pub fn from_node_path(&NodePath) -> Self as sys::godot_node_path : godot_variant_new_node_path;\n\n /// Creates a `Variant` wrapping a `GodotString`.\n\n pub fn from_godot_string(&GodotString) -> Self as sys::godot_string : godot_variant_new_string;\n\n /// Creates an `Variant` wrapping an array of variants.\n\n pub fn from_array(&VariantArray) -> Self as sys::godot_array : godot_variant_new_array;\n\n /// Creates a `Variant` wrapping a byte array.\n\n pub fn from_byte_array(&ByteArray) -> Self as sys::godot_pool_byte_array : godot_variant_new_pool_byte_array;\n\n /// Creates a `Variant` wrapping an array of 32bit signed integers.\n\n pub fn from_int32_array(&Int32Array) -> Self as sys::godot_pool_int_array : godot_variant_new_pool_int_array;\n", "file_path": "core/src/variant.rs", "rank": 51, "score": 43876.48103604091 }, { "content": " /// Creates a `Variant` wrapping an array of 32bit floats.\n\n pub fn from_float32_array(&Float32Array) -> Self as sys::godot_pool_real_array : godot_variant_new_pool_real_array;\n\n /// Creates a `Variant` wrapping an array of godot strings.\n\n pub fn from_string_array(&StringArray) -> Self as sys::godot_pool_string_array : godot_variant_new_pool_string_array;\n\n /// Creates a `Variant` wrapping an array of 2d vectors.\n\n pub fn from_vector2_array(&Vector2Array) -> Self as sys::godot_pool_vector2_array : godot_variant_new_pool_vector2_array;\n\n /// Creates a `Variant` wrapping an array of 3d vectors.\n\n pub fn from_vector3_array(&Vector3Array) -> Self as sys::godot_pool_vector3_array : godot_variant_new_pool_vector3_array;\n\n /// Creates a `Variant` wrapping an array of colors.\n\n pub fn from_color_array(&ColorArray) -> Self as sys::godot_pool_color_array : godot_variant_new_pool_color_array;\n\n /// Creates a `Variant` wrapping a dictionary.\n\n pub fn from_dictionary(&Dictionary) -> Self as sys::godot_dictionary : godot_variant_new_dictionary;\n\n );\n\n\n\n /// Creates an empty `Variant`.\n\n pub fn new() -> Self {\n\n unsafe {\n\n let api = get_api();\n\n let mut dest = sys::godot_variant::default();\n\n (api.godot_variant_new_nil)(&mut dest);\n", "file_path": "core/src/variant.rs", "rank": 52, "score": 43876.20684770545 }, { "content": "\n\nimpl ToVariant for f64 {\n\n fn to_variant(&self) -> Variant {\n\n unsafe {\n\n let mut ret = sys::godot_variant::default();\n\n (get_api().godot_variant_new_real)(&mut ret, *self);\n\n Variant(ret)\n\n }\n\n }\n\n\n\n fn from_variant(variant: &Variant) -> Option<Self> {\n\n unsafe {\n\n let api = get_api();\n\n if (api.godot_variant_get_type)(&variant.0) == sys::godot_variant_type_GODOT_VARIANT_TYPE_REAL {\n\n Some((api.godot_variant_as_real)(&variant.0) as Self)\n\n } else {\n\n None\n\n }\n\n }\n\n }\n", "file_path": "core/src/variant.rs", "rank": 53, "score": 43875.785231665956 }, { "content": "impl ToVariant for f32 {\n\n fn to_variant(&self) -> Variant {\n\n unsafe {\n\n let mut ret = sys::godot_variant::default();\n\n (get_api().godot_variant_new_real)(&mut ret, f64::from(*self));\n\n Variant(ret)\n\n }\n\n }\n\n\n\n fn from_variant(variant: &Variant) -> Option<Self> {\n\n unsafe {\n\n let api = get_api();\n\n if (api.godot_variant_get_type)(&variant.0) == sys::godot_variant_type_GODOT_VARIANT_TYPE_REAL {\n\n Some((api.godot_variant_as_real)(&variant.0) as Self)\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n", "file_path": "core/src/variant.rs", "rank": 54, "score": 43875.66446051549 }, { "content": "}\n\n\n\nmacro_rules! variant_from_ref {\n\n ($(impl From<&$Type:ty> : $ctor:ident;)*) => (\n\n $(\n\n impl<'l> From<&'l $Type> for Variant\n\n {\n\n fn from(val: &'l $Type) -> Variant {\n\n Variant::$ctor(val)\n\n }\n\n }\n\n )*\n\n );\n\n}\n\n\n\nmacro_rules! variant_from_val {\n\n ($(impl From<$Type:ty> : $ctor:ident;)*) => (\n\n $(\n\n impl From<$Type> for Variant\n\n {\n", "file_path": "core/src/variant.rs", "rank": 55, "score": 43875.4522563791 }, { "content": " ($ty:ty) => (\n\n impl ToVariant for $ty {\n\n fn to_variant(&self) -> Variant {\n\n unsafe {\n\n let mut ret = sys::godot_variant::default();\n\n (get_api().godot_variant_new_int)(&mut ret, i64::from(*self));\n\n Variant(ret)\n\n }\n\n }\n\n\n\n fn from_variant(variant: &Variant) -> Option<Self> {\n\n unsafe {\n\n let api = get_api();\n\n if (api.godot_variant_get_type)(&variant.0) == sys::godot_variant_type_GODOT_VARIANT_TYPE_INT {\n\n Some((api.godot_variant_as_int)(&variant.0) as Self)\n\n } else {\n\n None\n\n }\n\n }\n\n }\n", "file_path": "core/src/variant.rs", "rank": 56, "score": 43875.43041252143 }, { "content": " Variant(sys)\n\n }\n\n}\n\n\n\nimpl_basic_traits!(\n\n for Variant as godot_variant {\n\n Drop => godot_variant_destroy;\n\n Clone => godot_variant_new_copy;\n\n PartialEq => godot_variant_operator_equal;\n\n }\n\n);\n\n\n\nimpl Default for Variant {\n\n fn default() -> Self { Variant::new() }\n\n}\n\n\n\nimpl fmt::Debug for Variant {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(f, \"{:?}({})\", self.get_type(), self.to_string())\n\n }\n", "file_path": "core/src/variant.rs", "rank": 57, "score": 43875.37971010189 }, { "content": " fn from_variant(variant: &Variant) -> Option<Self> {\n\n unsafe {\n\n let api = get_api();\n\n if (api.godot_variant_get_type)(&variant.0) == sys::godot_variant_type_GODOT_VARIANT_TYPE_INT {\n\n Some((api.godot_variant_as_uint)(&variant.0) as Self)\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n }\n\n )\n\n}\n\n\n\ngodot_uint_impl!(u8);\n\ngodot_uint_impl!(u16);\n\ngodot_uint_impl!(u32);\n\ngodot_uint_impl!(u64);\n\n\n\n\n", "file_path": "core/src/variant.rs", "rank": 58, "score": 43875.122026480734 }, { "content": " $(\n\n $(#[$attr])*\n\n pub fn $ctor(val: $Type) -> Variant {\n\n unsafe {\n\n let api = get_api();\n\n let mut dest = sys::godot_variant::default();\n\n let gd_val: $GdType = transmute(*val);\n\n (api.$gd_method)(&mut dest, &gd_val);\n\n Variant(dest)\n\n }\n\n }\n\n )*\n\n )\n\n}\n\n\n\nmacro_rules! variant_constructors_wrap {\n\n (\n\n $(\n\n $(#[$attr:meta])*\n\n pub fn $ctor:ident($Type:ty) -> Self as $GdType:ty : $gd_method:ident;\n", "file_path": "core/src/variant.rs", "rank": 59, "score": 43875.05477482625 }, { "content": " fn from(val: $Type) -> Variant {\n\n Variant::$ctor(val)\n\n }\n\n }\n\n )*\n\n );\n\n}\n\n\n\nvariant_from_val!(\n\n impl From<i64> : from_i64;\n\n impl From<u64> : from_u64;\n\n impl From<bool> : from_bool;\n\n);\n\n\n\nvariant_from_ref!(\n\n impl From<&Vector2> : from_vector2;\n\n impl From<&Vector3> : from_vector3;\n\n impl From<&Quat> : from_quat;\n\n impl From<&Plane> : from_plane;\n\n impl From<&Rect2> : from_rect2;\n", "file_path": "core/src/variant.rs", "rank": 60, "score": 43874.72500663085 }, { "content": "\n\n /// Creates a `Variant` wrapping an unsigned integer value.\n\n pub fn from_u64(v: u64) -> Variant {\n\n unsafe {\n\n let api = get_api();\n\n let mut dest = sys::godot_variant::default();\n\n (api.godot_variant_new_uint)(&mut dest, v);\n\n Variant(dest)\n\n }\n\n }\n\n\n\n /// Creates a `Variant` wrapping an boolean.\n\n pub fn from_bool(v: bool) -> Variant {\n\n unsafe {\n\n let api = get_api();\n\n let mut dest = sys::godot_variant::default();\n\n (api.godot_variant_new_bool)(&mut dest, v);\n\n Variant(dest)\n\n }\n\n }\n", "file_path": "core/src/variant.rs", "rank": 61, "score": 43874.48890325277 }, { "content": " fn from(v: &str) -> Variant {\n\n Variant::from_str(v)\n\n }\n\n}\n\n\n\nimpl <T> From<T> for Variant\n\n where T: GodotObject\n\n{\n\n fn from(val: T) -> Variant {\n\n Variant::from_object(&val)\n\n }\n\n}\n\n\n\ngodot_test!(\n\n test_variant_nil {\n\n let nil = Variant::new();\n\n assert_eq!(nil.get_type(), VariantType::Nil);\n\n assert!(nil.is_nil());\n\n\n\n assert!(nil.try_to_array().is_none());\n", "file_path": "core/src/variant.rs", "rank": 62, "score": 43874.10165524566 }, { "content": " Variant(dest)\n\n }\n\n }\n\n\n\n /// Creates a `Variant` wrapping a string.\n\n pub fn from_str<S>(s: S) -> Variant\n\n where S: AsRef<str>\n\n {\n\n unsafe {\n\n let api = get_api();\n\n let mut dest = sys::godot_variant::default();\n\n let val = s.as_ref();\n\n let mut godot_s = (api.godot_string_chars_to_utf8_with_len)(val.as_ptr() as *const _, val.len() as _);\n\n (api.godot_variant_new_string)(&mut dest, &godot_s);\n\n (api.godot_string_destroy)(&mut godot_s);\n\n Variant(dest)\n\n }\n\n }\n\n\n\n /// Creates a `Variant` wrapping a Godot object.\n", "file_path": "core/src/variant.rs", "rank": 63, "score": 43874.05679234425 }, { "content": " )*\n\n ) => (\n\n $(\n\n $(#[$attr])*\n\n pub fn $ctor(val: $Type) -> Variant {\n\n unsafe {\n\n let api = get_api();\n\n let mut dest = sys::godot_variant::default();\n\n (api.$gd_method)(&mut dest, &val.0);\n\n Variant(dest)\n\n }\n\n }\n\n )*\n\n )\n\n}\n\n\n\nmacro_rules! variant_to_type_transmute {\n\n (\n\n $(\n\n $(#[$to_attr:meta])*\n", "file_path": "core/src/variant.rs", "rank": 64, "score": 43873.85203101783 }, { "content": " }\n\n }\n\n }\n\n\n\n pub(crate) fn cast_ref<'l>(ptr: *const sys::godot_variant) -> &'l Variant {\n\n unsafe { transmute(ptr) }\n\n }\n\n\n\n pub(crate) fn cast_mut_ref<'l>(ptr: *mut sys::godot_variant) -> &'l mut Variant {\n\n unsafe { transmute(ptr) }\n\n }\n\n\n\n /// Returns the internal ffi representation of the variant and consumes\n\n /// the rust object without running the destructor.\n\n ///\n\n /// This should be only used when certain that the receiving side is\n\n /// responsible for running the destructor for the object, otherwise\n\n /// it is leaked.\n\n pub fn forget(self) -> sys::godot_variant {\n\n let v = self.0;\n", "file_path": "core/src/variant.rs", "rank": 65, "score": 43873.617385852725 }, { "content": "use super::*;\n\nuse std::mem::{transmute, forget};\n\nuse std::default::Default;\n\nuse std::fmt;\n\n\n\n// TODO: implement Debug, PartialEq, etc.\n\n\n\n/// A `Variant` can represent many of godot's core types.\n\n///\n\n/// The underlying data can be either stored inline or reference-counted,\n\n/// dependning on the size of the type and whether the it is trivially copyable.\n\npub struct Variant(pub(crate) sys::godot_variant);\n\n\n\nmacro_rules! variant_constructors_transmute {\n\n (\n\n $(\n\n $(#[$attr:meta])*\n\n pub fn $ctor:ident($Type:ty) -> Self as $GdType:ty : $gd_method:ident;\n\n )*\n\n ) => (\n", "file_path": "core/src/variant.rs", "rank": 66, "score": 43872.5535772493 }, { "content": " }\n\n }\n\n\n\n pub fn to_string(&self) -> String {\n\n self.to_godot_string().to_string()\n\n }\n\n\n\n pub fn try_to_string(&self) -> Option<String> {\n\n self.try_to_godot_string().map(|s|{ s.to_string() })\n\n }\n\n\n\n /// Returns this variant's type.\n\n pub fn get_type(&self) -> VariantType {\n\n unsafe {\n\n VariantType::from_sys(\n\n (get_api().godot_variant_get_type)(&self.0)\n\n )\n\n }\n\n }\n\n\n", "file_path": "core/src/variant.rs", "rank": 67, "score": 43872.23774455928 }, { "content": " forget(self);\n\n v\n\n }\n\n\n\n // Returns a copy of the internal ffi representation of the variant.\n\n //\n\n // The variant remains owned by the rust wrapper and the receiver of\n\n // the ffi representation should not run its destructor.\n\n #[doc(hidden)]\n\n pub fn to_sys(&self) -> sys::godot_variant {\n\n self.0\n\n }\n\n\n\n #[doc(hidden)]\n\n pub fn sys(&self) -> *const sys::godot_variant {\n\n &self.0\n\n }\n\n\n\n #[doc(hidden)]\n\n pub fn from_sys(sys: sys::godot_variant) -> Self {\n", "file_path": "core/src/variant.rs", "rank": 68, "score": 43871.50701128872 }, { "content": " /// Returns true if this is an empty variant.\n\n pub fn is_nil(&self) -> bool {\n\n self.get_type() == VariantType::Nil\n\n }\n\n\n\n pub fn has_method(&self, method: &GodotString) -> bool {\n\n unsafe {\n\n (get_api().godot_variant_has_method)(&self.0, &method.0)\n\n }\n\n }\n\n\n\n // TODO: return a proper error.\n\n pub fn call(&mut self, method: &GodotString, args: &[Variant]) -> Result<(), ()> {\n\n unsafe {\n\n let api = get_api();\n\n let mut err = sys::godot_variant_call_error::default();\n\n if args.is_empty() {\n\n let mut first = ::std::ptr::null() as *const sys::godot_variant;\n\n (api.godot_variant_call)(\n\n &mut self.0,\n", "file_path": "core/src/variant.rs", "rank": 69, "score": 43871.26177748918 }, { "content": "}\n\n\n\nimpl ToVariant for String {\n\n fn to_variant(&self) -> Variant {\n\n Variant::from_str(&self)\n\n }\n\n\n\n fn from_variant(variant: &Variant) -> Option<Self> {\n\n unsafe {\n\n let api = get_api();\n\n if (api.godot_variant_get_type)(&variant.0) == sys::godot_variant_type_GODOT_VARIANT_TYPE_STRING {\n\n let mut gd_variant = (api.godot_variant_as_string)(&variant.0);\n\n let tmp = (api.godot_string_utf8)(&gd_variant);\n\n let ret = ::std::ffi::CStr::from_ptr((api.godot_char_string_get_data)(&tmp) as *const _)\n\n .to_string_lossy()\n\n .into_owned();\n\n (api.godot_string_destroy)(&mut gd_variant);\n\n Some(ret)\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "core/src/variant.rs", "rank": 70, "score": 43871.009018803714 }, { "content": " }\n\n }\n\n\n\n $(#[$try_attr])*\n\n pub fn $try_method(&self) -> Option<$TryType> {\n\n if self.get_type() != VariantType::$TryType {\n\n return None;\n\n }\n\n unsafe {\n\n Some($TryType((get_api().$try_gd_method)(&self.0)))\n\n }\n\n }\n\n )*\n\n )\n\n}\n\n\n\n#[repr(u32)]\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n\npub enum VariantType {\n\n Nil = sys::godot_variant_type_GODOT_VARIANT_TYPE_NIL,\n", "file_path": "core/src/variant.rs", "rank": 71, "score": 43870.74659668236 }, { "content": " pub fn to_color_array(&self) -> ColorArray : godot_variant_as_pool_color_array;\n\n /// Returns `Some(ColorArray)` if this variant is one, `None` otherwise.\n\n pub fn try_to_color_array(&self) -> Option<ColorArray> : godot_variant_as_pool_color_array;\n\n\n\n /// Do a best effort to create a `Dictionary` out of the variant, possibly returning a default value.\n\n pub fn to_dictionary(&self) -> Dictionary : godot_variant_as_dictionary;\n\n /// Returns `Some(Dictionary)` if this variant is one, `None` otherwise.\n\n pub fn try_to_dictionary(&self) -> Option<Dictionary> : godot_variant_as_dictionary;\n\n );\n\n\n\n pub fn try_to_object<T>(&self) -> Option<T>\n\n where T: GodotObject\n\n {\n\n unsafe {\n\n let api = get_api();\n\n if (api.godot_variant_get_type)(&self.0) != sys::godot_variant_type_GODOT_VARIANT_TYPE_OBJECT {\n\n return None;\n\n }\n\n let obj = Object::from_sys((api.godot_variant_as_object)(&self.0));\n\n obj.cast::<T>()\n", "file_path": "core/src/variant.rs", "rank": 72, "score": 43870.21991274601 }, { "content": " pub fn $to_method:ident(&self) -> $ToType:ident : $to_gd_method:ident;\n\n $(#[$try_attr:meta])*\n\n pub fn $try_method:ident(&self) -> Option<$TryType:ident> : $try_gd_method:ident;\n\n )*\n\n ) => (\n\n $(\n\n $(#[$to_attr])*\n\n pub fn $to_method(&self) -> $ToType {\n\n unsafe {\n\n transmute((get_api().$to_gd_method)(&self.0))\n\n }\n\n }\n\n\n\n $(#[$try_attr])*\n\n pub fn $try_method(&self) -> Option<$TryType> {\n\n if self.get_type() != VariantType::$TryType {\n\n return None;\n\n }\n\n unsafe {\n\n Some(transmute((get_api().$try_gd_method)(&self.0)))\n", "file_path": "core/src/variant.rs", "rank": 73, "score": 43870.04572190013 }, { "content": " impl From<&Transform> : from_transform;\n\n impl From<&Transform2D> : from_transform2d;\n\n impl From<&Basis> : from_basis;\n\n impl From<&Color> : from_color;\n\n impl From<&Aabb> : from_aabb;\n\n impl From<&String> : from_str;\n\n impl From<&Rid> : from_rid;\n\n impl From<&NodePath> : from_node_path;\n\n impl From<&GodotString> : from_godot_string;\n\n impl From<&Dictionary> : from_dictionary;\n\n impl From<&VariantArray> : from_array;\n\n impl From<&ByteArray> : from_byte_array;\n\n impl From<&Int32Array> : from_int32_array;\n\n impl From<&Float32Array> : from_float32_array;\n\n impl From<&Vector2Array> : from_vector2_array;\n\n impl From<&Vector3Array> : from_vector3_array;\n\n impl From<&ColorArray> : from_color_array;\n\n);\n\n\n\nimpl<'l> From<&'l str> for Variant {\n", "file_path": "core/src/variant.rs", "rank": 74, "score": 43869.737714336 }, { "content": " Int32Array = sys::godot_variant_type_GODOT_VARIANT_TYPE_POOL_INT_ARRAY,\n\n Float32Array = sys::godot_variant_type_GODOT_VARIANT_TYPE_POOL_REAL_ARRAY,\n\n StringArray = sys::godot_variant_type_GODOT_VARIANT_TYPE_POOL_STRING_ARRAY,\n\n Vector2Array = sys::godot_variant_type_GODOT_VARIANT_TYPE_POOL_VECTOR2_ARRAY,\n\n Vector3Array = sys::godot_variant_type_GODOT_VARIANT_TYPE_POOL_VECTOR3_ARRAY,\n\n ColorArray = sys::godot_variant_type_GODOT_VARIANT_TYPE_POOL_COLOR_ARRAY,\n\n}\n\n\n\nimpl VariantType {\n\n #[doc(hidden)]\n\n pub fn from_sys(v: sys::godot_variant_type) -> VariantType {\n\n unsafe { transmute(v) }\n\n }\n\n}\n\n\n\n// TODO: Looks like this is missing from the godot_headers bindings.\n\n// It's risky to redefine it here and count on the fact that the integer\n\n// constants will be the same.\n\n#[repr(u32)]\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]\n", "file_path": "core/src/variant.rs", "rank": 75, "score": 43869.42116275122 }, { "content": " pub fn to_rid(&self) -> Rid : godot_variant_as_rid;\n\n /// Returns `Some(Rid)` if this variant is one, `None` otherwise.\n\n pub fn try_to_rid(&self) -> Option<Rid> : godot_variant_as_rid;\n\n\n\n /// Do a best effort to create a `VariantArray` out of the variant, possibly returning a default value.\n\n pub fn to_array(&self) -> VariantArray : godot_variant_as_array;\n\n /// Returns `Some(VariantArray)` if this variant is one, `None` otherwise.\n\n pub fn try_to_array(&self) -> Option<VariantArray> : godot_variant_as_array;\n\n\n\n /// Do a best effort to create a `ByteArray` out of the variant, possibly returning a default value.\n\n pub fn to_byte_array(&self) -> ByteArray : godot_variant_as_pool_byte_array;\n\n /// Returns `Some(ByteArray)` if this variant is one, `None` otherwise.\n\n pub fn try_to_byte_array(&self) -> Option<ByteArray> : godot_variant_as_pool_byte_array;\n\n\n\n /// Do a best effort to create an `Int32Array` out of the variant, possibly returning a default value.\n\n pub fn to_int32_array(&self) -> Int32Array : godot_variant_as_pool_int_array;\n\n /// Returns `Some(Int32Array)` if this variant is one, `None` otherwise.\n\n pub fn try_to_int32_array(&self) -> Option<Int32Array> : godot_variant_as_pool_int_array;\n\n\n\n /// Do a best effort to create a `Float32Array` out of the variant, possibly returning a default value.\n", "file_path": "core/src/variant.rs", "rank": 76, "score": 43869.400666657566 }, { "content": " pub fn try_to_basis(&self) -> Option<Basis> : godot_variant_as_basis;\n\n\n\n /// Do a best effort to create a `Color` out of the variant, possibly returning a default value.\n\n pub fn to_color(&self) -> Color : godot_variant_as_color;\n\n /// Returns `Some(Color)` if this variant is one, `None` otherwise.\n\n pub fn try_to_color(&self) -> Option<Color> : godot_variant_as_color;\n\n\n\n /// Do a best effort to create an `Aabb` out of the variant, possibly returning a default value.\n\n pub fn to_aabb(&self) -> Aabb : godot_variant_as_aabb;\n\n /// Returns `Some(Aabb)` if this variant is one, `None` otherwise.\n\n pub fn try_to_aabb(&self) -> Option<Aabb> : godot_variant_as_aabb;\n\n\n\n /// Do a best effort to create a `f64` out of the variant, possibly returning a default value.\n\n pub fn to_f64(&self) -> F64 : godot_variant_as_real;\n\n /// Returns `Some(f64)` if this variant is one, `None` otherwise.\n\n pub fn try_to_f64(&self) -> Option<F64> : godot_variant_as_real;\n\n\n\n /// Do a best effort to create an `i64` out of the variant, possibly returning a default value.\n\n pub fn to_i64(&self) -> I64 : godot_variant_as_int;\n\n /// Returns `Some(i64)` if this variant is one, `None` otherwise.\n", "file_path": "core/src/variant.rs", "rank": 77, "score": 43869.36505347524 }, { "content": " pub fn try_to_plane(&self) -> Option<Plane> : godot_variant_as_plane;\n\n\n\n /// Do a best effort to create a `Rect2` out of the variant, possibly returning a default value.\n\n pub fn to_rect2(&self) -> Rect2 : godot_variant_as_rect2;\n\n /// Returns `Some(Rect2)` if this variant is one, `None` otherwise.\n\n pub fn try_to_rect2(&self) -> Option<Rect2> : godot_variant_as_rect2;\n\n\n\n /// Do a best effort to create a `Transform` out of the variant, possibly returning a default value.\n\n pub fn to_transform(&self) -> Transform : godot_variant_as_transform;\n\n /// Returns `Some(Transform)` if this variant is one, `None` otherwise.\n\n pub fn try_to_transform(&self) -> Option<Transform> : godot_variant_as_transform;\n\n\n\n /// Do a best effort to create a `Transform2D` out of the variant, possibly returning a default value.\n\n pub fn to_transform2d(&self) -> Transform2D : godot_variant_as_transform2d;\n\n /// Returns `Some(Transform2D)` if this variant is one, `None` otherwise.\n\n pub fn try_to_transform2d(&self) -> Option<Transform2D> : godot_variant_as_transform2d;\n\n\n\n /// Do a best effort to create a `Basis` out of the variant, possibly returning a default value.\n\n pub fn to_basis(&self) -> Basis : godot_variant_as_basis;\n\n /// Returns `Some(Basis)` if this variant is one, `None` otherwise.\n", "file_path": "core/src/variant.rs", "rank": 78, "score": 43869.247959547196 }, { "content": "\n\n variant_to_type_transmute!(\n\n /// Do a best effort to create a `Vector2` out of the variant, possibly returning a default value.\n\n pub fn to_vector2(&self) -> Vector2 : godot_variant_as_vector2;\n\n /// Returns `Some(Vector2)` if this variant is one, `None` otherwise.\n\n pub fn try_to_vector2(&self) -> Option<Vector2> : godot_variant_as_vector2;\n\n\n\n /// Do a best effort to create a `Vector3` out of the variant, possibly returning a default value.\n\n pub fn to_vector3(&self) -> Vector3 : godot_variant_as_vector3;\n\n /// Returns `Some(Vector3)` if this variant is one, `None` otherwise.\n\n pub fn try_to_vector3(&self) -> Option<Vector3> : godot_variant_as_vector3;\n\n\n\n /// Do a best effort to create a `Quat` out of the variant, possibly returning a default value.\n\n pub fn to_quat(&self) -> Quat : godot_variant_as_quat;\n\n /// Returns `Some(Quat)` if this variant is one, `None` otherwise.\n\n pub fn try_to_quat(&self) -> Option<Quat> : godot_variant_as_quat;\n\n\n\n /// Do a best effort to create a `Plane` out of the variant, possibly returning a default value.\n\n pub fn to_plane(&self) -> Plane : godot_variant_as_plane;\n\n /// Returns `Some(Plane)` if this variant is one, `None` otherwise.\n", "file_path": "core/src/variant.rs", "rank": 79, "score": 43869.02070919303 }, { "content": " pub fn try_to_i64(&self) -> Option<I64> : godot_variant_as_int;\n\n\n\n /// Do a best effort to create a `bool` out of the variant, possibly returning a default value.\n\n pub fn to_bool(&self) -> Bool : godot_variant_as_bool;\n\n /// Returns `Some(bool)` if this variant is one, `None` otherwise.\n\n pub fn try_to_bool(&self) -> Option<Bool> : godot_variant_as_bool;\n\n );\n\n\n\n variant_to_type_wrap!(\n\n /// Do a best effort to create a `NodePath` out of the variant, possibly returning a default value.\n\n pub fn to_node_path(&self) -> NodePath : godot_variant_as_node_path;\n\n /// Returns `Some(NodePath)` if this variant is one, `None` otherwise.\n\n pub fn try_to_node_path(&self) -> Option<NodePath> : godot_variant_as_node_path;\n\n\n\n /// Do a best effort to create a `GodotString` out of the variant, possibly returning a default value.\n\n pub fn to_godot_string(&self) -> GodotString : godot_variant_as_string;\n\n /// Returns `Some(GodotString)` if this variant is one, `None` otherwise.\n\n pub fn try_to_godot_string(&self) -> Option<GodotString> : godot_variant_as_string;\n\n\n\n /// Do a best effort to create a `Rid` out of the variant, possibly returning a default value.\n", "file_path": "core/src/variant.rs", "rank": 80, "score": 43868.75299791506 }, { "content": " pub fn to_float32_array(&self) -> Float32Array : godot_variant_as_pool_real_array;\n\n /// Returns `Some(Float32Array)` if this variant is one, `None` otherwise.\n\n pub fn try_to_float32_array(&self) -> Option<Float32Array> : godot_variant_as_pool_real_array;\n\n\n\n /// Do a best effort to create a `StringArray` out of the variant, possibly returning a default value.\n\n pub fn to_string_array(&self) -> StringArray : godot_variant_as_pool_string_array;\n\n /// Returns `Some(StringArray)` if this variant is one, `None` otherwise.\n\n pub fn try_to_string_array(&self) -> Option<StringArray> : godot_variant_as_pool_string_array;\n\n\n\n /// Do a best effort to create a `Vector2Array` out of the variant, possibly returning a default value.\n\n pub fn to_vector2_array(&self) -> Vector2Array : godot_variant_as_pool_vector2_array;\n\n /// Returns `Some(Vector2Array)` if this variant is one, `None` otherwise.\n\n pub fn try_to_vector2_array(&self) -> Option<Vector2Array> : godot_variant_as_pool_vector2_array;\n\n\n\n /// Do a best effort to create a `Vector3Array` out of the variant, possibly returning a default value.\n\n pub fn to_vector3_array(&self) -> Vector3Array : godot_variant_as_pool_vector3_array;\n\n /// Returns `Some(Vector3Array)` if this variant is one, `None` otherwise.\n\n pub fn try_to_vector3_array(&self) -> Option<Vector3Array> : godot_variant_as_pool_vector3_array;\n\n\n\n /// Do a best effort to create a `ColorArray` out of the variant, possibly returning a default value.\n", "file_path": "core/src/variant.rs", "rank": 81, "score": 43868.339558292224 }, { "content": " }\n\n }\n\n )*\n\n )\n\n}\n\n\n\nmacro_rules! variant_to_type_wrap {\n\n (\n\n $(\n\n $(#[$to_attr:meta])*\n\n pub fn $to_method:ident(&self) -> $ToType:ident : $to_gd_method:ident;\n\n $(#[$try_attr:meta])*\n\n pub fn $try_method:ident(&self) -> Option<$TryType:ident> : $try_gd_method:ident;\n\n )*\n\n ) => (\n\n $(\n\n $(#[$to_attr])*\n\n pub fn $to_method(&self) -> $ToType {\n\n unsafe {\n\n $ToType((get_api().$to_gd_method)(&self.0))\n", "file_path": "core/src/variant.rs", "rank": 82, "score": 43867.643310205814 }, { "content": " Bool = sys::godot_variant_type_GODOT_VARIANT_TYPE_BOOL,\n\n I64 = sys::godot_variant_type_GODOT_VARIANT_TYPE_INT,\n\n F64 = sys::godot_variant_type_GODOT_VARIANT_TYPE_REAL,\n\n GodotString = sys::godot_variant_type_GODOT_VARIANT_TYPE_STRING,\n\n Vector2 = sys::godot_variant_type_GODOT_VARIANT_TYPE_VECTOR2,\n\n Rect2 = sys::godot_variant_type_GODOT_VARIANT_TYPE_RECT2,\n\n Vector3 = sys::godot_variant_type_GODOT_VARIANT_TYPE_VECTOR3,\n\n Transform2D = sys::godot_variant_type_GODOT_VARIANT_TYPE_TRANSFORM2D,\n\n Plane = sys::godot_variant_type_GODOT_VARIANT_TYPE_PLANE,\n\n Quat = sys::godot_variant_type_GODOT_VARIANT_TYPE_QUAT,\n\n Aabb = sys::godot_variant_type_GODOT_VARIANT_TYPE_AABB,\n\n Basis = sys::godot_variant_type_GODOT_VARIANT_TYPE_BASIS,\n\n Transform = sys::godot_variant_type_GODOT_VARIANT_TYPE_TRANSFORM,\n\n Color = sys::godot_variant_type_GODOT_VARIANT_TYPE_COLOR,\n\n NodePath = sys::godot_variant_type_GODOT_VARIANT_TYPE_NODE_PATH,\n\n Rid = sys::godot_variant_type_GODOT_VARIANT_TYPE_RID,\n\n Object = sys::godot_variant_type_GODOT_VARIANT_TYPE_OBJECT,\n\n Dictionary = sys::godot_variant_type_GODOT_VARIANT_TYPE_DICTIONARY,\n\n VariantArray = sys::godot_variant_type_GODOT_VARIANT_TYPE_ARRAY,\n\n ByteArray = sys::godot_variant_type_GODOT_VARIANT_TYPE_POOL_BYTE_ARRAY,\n", "file_path": "core/src/variant.rs", "rank": 83, "score": 43867.44774609747 }, { "content": " &method.0,\n\n &mut first, 0,\n\n &mut err\n\n );\n\n } else {\n\n // TODO: double check that this is safe.\n\n let gd_args: &[sys::godot_variant] = transmute(args);\n\n let mut first = &gd_args[0] as *const sys::godot_variant;\n\n (api.godot_variant_call)(\n\n &mut self.0,\n\n &method.0,\n\n &mut first, args.len() as i32,\n\n &mut err\n\n );\n\n }\n\n\n\n if err.error == sys::godot_variant_call_error_error_GODOT_CALL_ERROR_CALL_OK {\n\n Ok(())\n\n } else {\n\n Err(())\n", "file_path": "core/src/variant.rs", "rank": 84, "score": 43867.41285557141 }, { "content": " BitAnd, // = OP_BIT_AND,\n\n BitOr, // = OP_BIT_OR,\n\n BitXor, // = OP_BIT_XOR,\n\n BitNegate, // = OP_BIT_NEGATE,\n\n //logic\n\n And, // = OP_AND,\n\n Or, // = OP_OR,\n\n Xor, // = OP_XOR,\n\n Not, // = OP_NOT,\n\n //containment\n\n In, // = OP_IN,\n\n Max, // = OP_MAX\n\n}\n\n\n\n//fn to_godot_varianty_type(v: VariantType) -> sys::godot_variant_type {\n\n// unsafe { transmute(v) }\n\n//}\n\n\n\n// These aliases are just here so the type name matches the VariantType's variant names\n\n// to make writing macros easier.\n", "file_path": "core/src/variant.rs", "rank": 85, "score": 43866.90671929956 }, { "content": " assert!(v_42.try_to_array().is_none());\n\n\n\n let v_m1 = Variant::from_i64(-1);\n\n assert_eq!(v_m1.get_type(), VariantType::I64);\n\n\n\n assert!(!v_m1.is_nil());\n\n assert_eq!(v_m1.try_to_i64(), Some(-1));\n\n assert!(v_m1.try_to_f64().is_none());\n\n assert!(v_m1.try_to_array().is_none());\n\n }\n\n);\n\n\n\n\n", "file_path": "core/src/variant.rs", "rank": 86, "score": 43865.431973162784 }, { "content": " assert!(nil.try_to_rid().is_none());\n\n assert!(nil.try_to_i64().is_none());\n\n assert!(nil.try_to_bool().is_none());\n\n assert!(nil.try_to_aabb().is_none());\n\n assert!(nil.try_to_vector2().is_none());\n\n assert!(nil.try_to_basis().is_none());\n\n\n\n assert!(!nil.has_method(&GodotString::from_str(\"foo\")));\n\n\n\n let clone = nil.clone();\n\n assert!(clone == nil);\n\n }\n\n\n\n test_variant_i64 {\n\n let v_42 = Variant::from_i64(42);\n\n assert_eq!(v_42.get_type(), VariantType::I64);\n\n\n\n assert!(!v_42.is_nil());\n\n assert_eq!(v_42.try_to_i64(), Some(42));\n\n assert!(v_42.try_to_f64().is_none());\n", "file_path": "core/src/variant.rs", "rank": 87, "score": 43865.087580225976 }, { "content": "pub enum VariantOperator {\n\n //comparison\n\n Equal, // = OP_EQUAL,\n\n NotEqual, // = OP_NOT_EQUAL,\n\n Less, // = OP_LESS,\n\n LessEqual, // = OP_LESS_EQUAL,\n\n Greater, // = OP_GREATER,\n\n GreaterEqual, // = OP_GREATER_EQUAL,\n\n //mathematic\n\n Add, // = OP_ADD,\n\n Subtact, // = OP_SUBTRACT,\n\n Multiply, // = OP_MULTIPLY,\n\n Divide, // = OP_DIVIDE,\n\n Negate, // = OP_NEGATE,\n\n Positive, // = OP_POSITIVE,\n\n Module, // = OP_MODULE,\n\n Concat, // = OP_STRING_CONCAT,\n\n //bitwise\n\n ShiftLeft, // = OP_SHIFT_LEFT,\n\n ShiftRight, // = OP_SHIFT_RIGHT,\n", "file_path": "core/src/variant.rs", "rank": 88, "score": 43864.67782852895 }, { "content": "fn skip_method(name: &str) -> bool {\n\n name == \"free\" || name == \"reference\" || name == \"unreference\"\n\n}\n\n\n", "file_path": "bindings_generator/src/methods.rs", "rank": 89, "score": 43587.618931925004 }, { "content": "fn rust_safe_name(name: &str) -> &str {\n\n match name {\n\n \"use\" => \"_use\",\n\n \"type\" => \"_type\",\n\n \"loop\" => \"_loop\",\n\n \"in\" => \"_in\",\n\n \"override\" => \"_override\",\n\n \"where\" => \"_where\",\n\n name => name,\n\n }\n\n}\n\n\n", "file_path": "bindings_generator/src/lib.rs", "rank": 90, "score": 42418.75187604328 }, { "content": "use sys;\n\nuse get_api;\n\nuse Variant;\n\nuse ToVariant;\n\nuse VariantArray;\n\n\n\n/// A reference-counted vector of `i32` that uses Godot's pool allocator.\n\npub struct Int32Array(pub(crate) sys::godot_pool_int_array);\n\n\n\nimpl Int32Array {\n\n /// Creates an empty `Int32Array`.\n\n pub fn new() -> Self { Int32Array::default() }\n\n\n\n /// Creates an array by trying to convert each variant.\n\n ///\n\n /// See `Variant::to_int32_array`.\n\n pub fn from_variant_array(array: &VariantArray) -> Self {\n\n unsafe {\n\n let mut result = sys::godot_pool_int_array::default();\n\n (get_api().godot_pool_int_array_new_with_array)(&mut result, &array.0);\n", "file_path": "core/src/int32_array.rs", "rank": 99, "score": 18.248056258129704 } ]
Rust
pipebuilder/src/cli/commands/list.rs
pipebase/pipebuilder
9e5e08ff9526c1e0917034dc49951939beceac01
use super::Cmd; use crate::ops::{ do_app::list_app_metadata, do_build::{list_build_metadata, list_build_snapshot}, do_catalog_schema::{list_catalog_schema_metadata, list_catalog_schema_snapshot}, do_catalogs::{list_catalogs_metadata, list_catalogs_snapshot}, do_manifest::{list_manifest_metadata, list_manifest_snapshot}, do_namespace::list_namespace, do_node::list_node_state, do_project::list_project, print::print_records, }; use pipebuilder_common::{api::client::ApiClient, NodeRole, Result}; use clap::Arg; pub fn cmd() -> Cmd { Cmd::new("list").about("List resource").subcommands(vec![ app(), build(), catalogs(), catalog_schema(), manifest(), node(), namespace(), project(), ]) } pub fn build() -> Cmd { Cmd::new("build") .about("List build metadata given namespace and project id") .args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify project id") .takes_value(true), Arg::new("snapshot") .short('s') .help("Specify build snapshot per project id returned"), ]) } async fn exec_build_snapshot(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_build_snapshot(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) } async fn exec_build_metadata(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id").map(|id| id.to_owned()); let response = list_build_metadata(&client, namespace.to_owned(), id.to_owned()).await?; print_records(response.as_slice()); Ok(()) } pub async fn exec_build(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let is_snapshot = args.is_present("snapshot"); if is_snapshot { return exec_build_snapshot(client, args).await; } return exec_build_metadata(client, args).await; } pub fn node() -> Cmd { Cmd::new("node") .about("List node given role") .args(vec![Arg::new("role") .short('r') .help("Specify node role") .takes_value(true)]) } pub async fn exec_node(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let role = args.value_of("role"); let role: Option<NodeRole> = role.map(|role| role.into()); let response = list_node_state(&client, role).await?; print_records(response.as_slice()); Ok(()) } pub fn app() -> Cmd { Cmd::new("app").about("List app metadata").args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify project id") .takes_value(true), ]) } pub async fn exec_app(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id"); let id = id.map(|id| id.to_owned()); let response = list_app_metadata(&client, namespace.to_owned(), id).await?; print_records(response.as_slice()); Ok(()) } pub fn manifest() -> Cmd { Cmd::new("manifest") .about("List manifest metadata") .args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify project id") .takes_value(true), Arg::new("snapshot") .short('s') .help("Specify manifest snapshot returned"), ]) } async fn exec_manifest_snapshot(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_manifest_snapshot(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) } async fn exec_manifest_metadata(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id"); let id = id.map(|id| id.to_owned()); let response = list_manifest_metadata(&client, namespace.to_owned(), id).await?; print_records(response.as_slice()); Ok(()) } pub async fn exec_manifest(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let is_snapshot = args.is_present("snapshot"); if is_snapshot { return exec_manifest_snapshot(client, args).await; } return exec_manifest_metadata(client, args).await; } pub fn catalog_schema() -> Cmd { Cmd::new("catalog-schema") .about("List catalog schema metadata") .args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify catalog schema id") .takes_value(true), Arg::new("snapshot") .short('s') .help("Specify catalog schema snapshot returned"), ]) } async fn exec_catalog_schema_snapshot(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_catalog_schema_snapshot(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) } async fn exec_catalog_schema_metadata(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id"); let id = id.map(|id| id.to_owned()); let response = list_catalog_schema_metadata(&client, namespace.to_owned(), id).await?; print_records(response.as_slice()); Ok(()) } pub async fn exec_catalog_schema(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let is_snapshot = args.is_present("snapshot"); if is_snapshot { return exec_catalog_schema_snapshot(client, args).await; } return exec_catalog_schema_metadata(client, args).await; } pub fn catalogs() -> Cmd { Cmd::new("catalogs") .about("List catalogs metadata") .args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify project id") .takes_value(true), Arg::new("snapshot") .short('s') .help("Specify catalogs snapshot returned"), ]) } async fn exec_catalogs_snapshot(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_catalogs_snapshot(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) } async fn exec_catalogs_metadata(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id"); let id = id.map(|id| id.to_owned()); let response = list_catalogs_metadata(&client, namespace.to_owned(), id).await?; print_records(response.as_slice()); Ok(()) } pub async fn exec_catalogs(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let is_snapshot = args.is_present("snapshot"); if is_snapshot { return exec_catalogs_snapshot(client, args).await; } return exec_catalogs_metadata(client, args).await; } pub fn namespace() -> Cmd { Cmd::new("namespace").about("List namespace") } pub async fn exec_namespace(client: ApiClient, _args: &clap::ArgMatches) -> Result<()> { let response = list_namespace(&client).await?; print_records(response.as_slice()); Ok(()) } pub fn project() -> Cmd { Cmd::new("project") .about("list project given namespace id") .args(vec![Arg::new("namespace") .short('n') .help("Specify namespace id") .required(true) .takes_value(true)]) } pub async fn exec_project(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_project(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) }
use super::Cmd; use crate::ops::{ do_app::list_app_metadata, do_build::{list_build_metadata, list_build_snapshot}, do_catalog_schema::{list_catalog_schema_metadata, list_catalog_schema_snapshot}, do_catalogs::{list_catalogs_metadata, list_catalogs_snapshot}, do_manifest::{list_manifest_metadata, list_manifest_snapshot}, do_namespace::list_namespace, do_node::list_node_state, do_project::list_project, print::print_records, }; use pipebuilder_common::{api::client::ApiClient, NodeRole, Result}; use clap::Arg; pub fn cmd() -> Cmd { Cmd::new("list").about("List resource").subcommands(vec![ app(), build(), catalogs(), catalog_schema(), manifest(), node(), namespace(), project(), ]) } pub fn build() -> Cmd { Cmd::new("build") .about("List build metadata given namespace and project id") .args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify project id") .takes_value(true), Arg::new("snapshot") .short('s') .help("Specify build snapshot per project id returned"), ]) } async fn exec_build_snapshot(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_build_snapshot(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) } async fn exec_build_metadata(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id").map(|id| id.to_owned()); let response = list_build_metadata(&client, namespace.to_owned(), id.to_owned()).await?; print_records(response.as_slice()); Ok(()) } pub async fn exec_build(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let is_snapshot = args.is_present("snapshot"); if is_snapshot { return exec_build_snapshot(client, args).await; } return exec_build_metadata(client, args).await; } pub fn node() -> Cmd { Cmd::new("node") .about("List node given role") .args(vec![Arg::new("role") .short('r') .help("Specify node role") .takes_value(true)]) } pub async fn exec_node(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let role = args.value_of("role"); let role: Option<NodeRole> = role.map(|role| role.into()); let response = list_node_state(&client, role).await?; print_records(response.as_slice()); Ok(()) } pub fn app() -> Cmd { Cmd::new("app").about("List app metadata").args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify project id") .takes_value(true), ]) } pub async fn exec_app(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id"); let id = id.map(|id| id.to_owned()); let response = list_app_metadata(&client, namespace.to_owned(), id).await?; print_records(response.as_slice()); Ok(()) } pub fn manifest() -> Cmd { Cmd::new("manifest") .about("List manifest metadata") .args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify project id") .takes_value(true), Arg::new("snapshot") .short('s') .help("Specify manifest snapshot returned"), ]) }
async fn exec_manifest_metadata(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id"); let id = id.map(|id| id.to_owned()); let response = list_manifest_metadata(&client, namespace.to_owned(), id).await?; print_records(response.as_slice()); Ok(()) } pub async fn exec_manifest(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let is_snapshot = args.is_present("snapshot"); if is_snapshot { return exec_manifest_snapshot(client, args).await; } return exec_manifest_metadata(client, args).await; } pub fn catalog_schema() -> Cmd { Cmd::new("catalog-schema") .about("List catalog schema metadata") .args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify catalog schema id") .takes_value(true), Arg::new("snapshot") .short('s') .help("Specify catalog schema snapshot returned"), ]) } async fn exec_catalog_schema_snapshot(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_catalog_schema_snapshot(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) } async fn exec_catalog_schema_metadata(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id"); let id = id.map(|id| id.to_owned()); let response = list_catalog_schema_metadata(&client, namespace.to_owned(), id).await?; print_records(response.as_slice()); Ok(()) } pub async fn exec_catalog_schema(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let is_snapshot = args.is_present("snapshot"); if is_snapshot { return exec_catalog_schema_snapshot(client, args).await; } return exec_catalog_schema_metadata(client, args).await; } pub fn catalogs() -> Cmd { Cmd::new("catalogs") .about("List catalogs metadata") .args(vec![ Arg::new("namespace") .short('n') .help("Specify namespace") .required(true) .takes_value(true), Arg::new("id") .short('i') .help("Specify project id") .takes_value(true), Arg::new("snapshot") .short('s') .help("Specify catalogs snapshot returned"), ]) } async fn exec_catalogs_snapshot(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_catalogs_snapshot(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) } async fn exec_catalogs_metadata(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let id = args.value_of("id"); let id = id.map(|id| id.to_owned()); let response = list_catalogs_metadata(&client, namespace.to_owned(), id).await?; print_records(response.as_slice()); Ok(()) } pub async fn exec_catalogs(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let is_snapshot = args.is_present("snapshot"); if is_snapshot { return exec_catalogs_snapshot(client, args).await; } return exec_catalogs_metadata(client, args).await; } pub fn namespace() -> Cmd { Cmd::new("namespace").about("List namespace") } pub async fn exec_namespace(client: ApiClient, _args: &clap::ArgMatches) -> Result<()> { let response = list_namespace(&client).await?; print_records(response.as_slice()); Ok(()) } pub fn project() -> Cmd { Cmd::new("project") .about("list project given namespace id") .args(vec![Arg::new("namespace") .short('n') .help("Specify namespace id") .required(true) .takes_value(true)]) } pub async fn exec_project(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_project(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) }
async fn exec_manifest_snapshot(client: ApiClient, args: &clap::ArgMatches) -> Result<()> { let namespace = args.value_of("namespace").unwrap(); let response = list_manifest_snapshot(&client, namespace.to_owned()).await?; print_records(response.as_slice()); Ok(()) }
function_block-full_function
[ { "content": "pub fn validate_node_state(state: &NodeState, expected_role: &NodeRole) -> Result<()> {\n\n let actual_role = &state.role;\n\n if actual_role != expected_role {\n\n return Err(invalid_api_request(format!(\n\n \"invalid node state, expect '{}', actual '{}'\",\n\n expected_role.to_string(),\n\n actual_role.to_string()\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "pipebuilder/src/api/validations.rs", "rank": 0, "score": 259252.43012336182 }, { "content": "pub fn project() -> Cmd {\n\n Cmd::new(\"project\")\n\n .about(\"Create project given namespace and project id\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n ])\n\n}\n\n\n\npub async fn exec_project(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let namespace = args.value_of(\"namespace\").unwrap();\n\n let id = args.value_of(\"id\").unwrap();\n\n let project = create_project(&client, namespace.to_owned(), id.to_owned()).await?;\n\n let projects = vec![project];\n\n print_records(projects.as_slice());\n\n Ok(())\n\n}\n", "file_path": "pipebuilder/src/cli/commands/create.rs", "rank": 2, "score": 253200.7674593312 }, { "content": "pub fn project() -> Cmd {\n\n Cmd::new(\"project\")\n\n .about(\"Delete project given namespace, project id\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n ])\n\n}\n\n\n\npub async fn exec_project(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let namespace = args.value_of(\"namespace\").unwrap();\n\n let id = args.value_of(\"id\").unwrap();\n\n delete_project(&client, namespace.to_owned(), id.to_owned()).await\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 3, "score": 253200.7674593312 }, { "content": "pub fn app() -> Cmd {\n\n Cmd::new(\"app\")\n\n .about(\"Pull app binary given namespace, project id and build version\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify app build version\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"path\")\n", "file_path": "pipebuilder/src/cli/commands/pull.rs", "rank": 4, "score": 253156.77286213986 }, { "content": "pub fn app() -> Cmd {\n\n Cmd::new(\"app\")\n\n .about(\"Delete app binary given namespace, project id and build version, if no build version provide, all app deleted\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify app build version\")\n\n .takes_value(true),\n\n ])\n\n}\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 6, "score": 253156.77286213986 }, { "content": "pub fn manifest() -> Cmd {\n\n Cmd::new(\"manifest\")\n\n .about(\"Push manifest given namespace, project id and manifest file\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"file\")\n\n .short('f')\n\n .help(\"Specify app manifest file path\")\n\n .required(true)\n\n .takes_value(true),\n\n ])\n", "file_path": "pipebuilder/src/cli/commands/push.rs", "rank": 7, "score": 253046.08255867183 }, { "content": "pub fn manifest() -> Cmd {\n\n Cmd::new(\"manifest\")\n\n .about(\"Delete manifest given namespace, project id and manifest version, if no version provide, all manifest deleted\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify app manifest version\")\n\n .takes_value(true),\n\n ])\n\n}\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 9, "score": 253046.08255867183 }, { "content": "pub fn manifest() -> Cmd {\n\n Cmd::new(\"manifest\")\n\n .about(\"Pull manifest given namespace, project id and manifest version\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify app manifest version\")\n\n .required(true)\n\n .takes_value(true),\n\n ])\n", "file_path": "pipebuilder/src/cli/commands/pull.rs", "rank": 10, "score": 253046.08255867183 }, { "content": "pub fn node() -> Cmd {\n\n Cmd::new(\"node\")\n\n .about(\"Shutdown node given id\")\n\n .args(vec![Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify node id\")\n\n .takes_value(true)\n\n .required(true)])\n\n}\n\n\n\npub async fn exec_node(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let id = args.value_of(\"id\").unwrap();\n\n let _ = shutdown_node(&client, id.to_owned()).await?;\n\n Ok(())\n\n}\n", "file_path": "pipebuilder/src/cli/commands/shutdown.rs", "rank": 11, "score": 252722.3638682064 }, { "content": "pub fn node() -> Cmd {\n\n Cmd::new(\"node\")\n\n .about(\"Deactivate node given id\")\n\n .args(vec![Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify node id\")\n\n .takes_value(true)\n\n .required(true)])\n\n}\n\n\n\npub async fn exec_node(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let id = args.value_of(\"id\").unwrap();\n\n let _ = deactivate_node(&client, id.to_owned()).await?;\n\n Ok(())\n\n}\n", "file_path": "pipebuilder/src/cli/commands/deactivate.rs", "rank": 12, "score": 252722.3638682064 }, { "content": "pub fn node() -> Cmd {\n\n Cmd::new(\"node\")\n\n .about(\"Activate node given id\")\n\n .args(vec![Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify node id\")\n\n .takes_value(true)\n\n .required(true)])\n\n}\n\n\n\npub async fn exec_node(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let id = args.value_of(\"id\").unwrap();\n\n let _ = activate_node(&client, id.to_owned()).await?;\n\n Ok(())\n\n}\n", "file_path": "pipebuilder/src/cli/commands/activate.rs", "rank": 14, "score": 252722.3638682064 }, { "content": "pub fn build() -> Cmd {\n\n Cmd::new(\"build\")\n\n .about(\"Scan builds at builder\")\n\n .args(vec![Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify builder id\")\n\n .takes_value(true)\n\n .required(true)])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/scan.rs", "rank": 16, "score": 251830.0142995415 }, { "content": "pub fn build() -> Cmd {\n\n Cmd::new(\"build\")\n\n .about(\"Delete build given namespace, project id and build version, if no build version provide, all build deleted\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify app build version\")\n\n .takes_value(true),\n\n ])\n\n}\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 17, "score": 251830.01429954154 }, { "content": "pub fn build() -> Cmd {\n\n Cmd::new(\"build\")\n\n .about(\"Cancel build given namespace, project id and build version\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify app build version\")\n\n .takes_value(true)\n\n .required(true),\n\n ])\n", "file_path": "pipebuilder/src/cli/commands/cancel.rs", "rank": 18, "score": 251830.01429954154 }, { "content": "pub fn build() -> Cmd {\n\n Cmd::new(\"build\")\n\n .about(\"Get build metadata given namespace, project id and build version\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify app build version\")\n\n .takes_value(true)\n\n .required(true),\n\n ])\n", "file_path": "pipebuilder/src/cli/commands/get.rs", "rank": 19, "score": 251830.01429954154 }, { "content": "pub fn build() -> Cmd {\n\n Cmd::new(\"build\")\n\n .about(\"Create build given namespace and project id\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify manifest version\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"target-platform\")\n", "file_path": "pipebuilder/src/cli/commands/create.rs", "rank": 20, "score": 251830.01429954154 }, { "content": "pub fn catalog_schema() -> Cmd {\n\n Cmd::new(\"catalog-schema\")\n\n .about(\"Delete catalog schema given namespace, catalog schema id and version, if no version provide, all catalog schema deleted\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify catalog schema id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify catalog schema version\")\n\n .takes_value(true),\n\n ])\n\n}\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 22, "score": 249546.91909234418 }, { "content": "pub fn catalog_schema() -> Cmd {\n\n Cmd::new(\"catalog-schema\")\n\n .about(\"Pull catalog schema given namespace, schema id and version\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify catalog schema id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify catalog schema version\")\n\n .required(true)\n\n .takes_value(true),\n\n ])\n", "file_path": "pipebuilder/src/cli/commands/pull.rs", "rank": 23, "score": 249546.91909234418 }, { "content": "pub fn catalog_schema() -> Cmd {\n\n Cmd::new(\"catalog-schema\")\n\n .about(\"Push catalog schema given namespace, schema id and schema file\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify schema id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"file\")\n\n .short('f')\n\n .help(\"Specify catalog schema file path\")\n\n .required(true)\n\n .takes_value(true),\n\n ])\n", "file_path": "pipebuilder/src/cli/commands/push.rs", "rank": 24, "score": 249546.91909234418 }, { "content": "fn validate_role(role: &NodeRole) -> Result<()> {\n\n match role {\n\n NodeRole::Undefined => Err(invalid_api_request(String::from(\"undefined node role\"))),\n\n _ => Ok(()),\n\n }\n\n}\n\n\n", "file_path": "pipebuilder/src/api/validations.rs", "rank": 25, "score": 243959.12275883957 }, { "content": "pub fn namespace() -> Cmd {\n\n Cmd::new(\"namespace\")\n\n .about(\"Create namespace given namespace id\")\n\n .args(vec![Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify namespace id\")\n\n .required(true)\n\n .takes_value(true)])\n\n}\n\n\n\npub async fn exec_namespace(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let id = args.value_of(\"id\").unwrap();\n\n let namespace = create_namespace(&client, id.to_owned()).await?;\n\n let namespaces = vec![namespace];\n\n print_records(namespaces.as_slice());\n\n Ok(())\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/create.rs", "rank": 26, "score": 226299.0965632876 }, { "content": "pub fn namespace() -> Cmd {\n\n Cmd::new(\"namespace\")\n\n .about(\"Delete namespace given namespace id\")\n\n .args(vec![Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify namespace id\")\n\n .required(true)\n\n .takes_value(true)])\n\n}\n\n\n\npub async fn exec_namespace(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let id = args.value_of(\"id\").unwrap();\n\n delete_namespace(&client, id.to_owned()).await\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 27, "score": 226299.0965632876 }, { "content": "pub fn build_node_service(config: NodeConfig, lease_id: i64) -> NodeService {\n\n NodeService::new(config, lease_id)\n\n}\n\n\n\npub async fn bootstrap(\n\n config: BaseConfig,\n\n) -> Result<(\n\n Register,\n\n NodeService,\n\n HealthService,\n\n LeaseService,\n\n Receiver<()>,\n\n)> {\n\n info!(\"bootstrap base service\");\n\n // build register\n\n let mut register = build_register(config.register).await?;\n\n // lease grant\n\n let ttl = config.lease.ttl;\n\n let resp = register.lease_grant(ttl as i64).await?;\n\n let lease_id = resp.id();\n", "file_path": "pipebuilder_common/src/bootstrap.rs", "rank": 29, "score": 226010.5562816495 }, { "content": "pub fn catalogs() -> Cmd {\n\n Cmd::new(\"catalogs\")\n\n .about(\"Pull catalogs given namespace, project id and version\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify catalogs version\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"directory\")\n", "file_path": "pipebuilder/src/cli/commands/pull.rs", "rank": 31, "score": 225749.81946433213 }, { "content": "pub fn catalogs() -> Cmd {\n\n Cmd::new(\"catalogs\")\n\n .about(\"Push catalogs given namespace, project id and catalogs file\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"file\")\n\n .short('f')\n\n .help(\"Specify catalogs file path\")\n\n .required(true)\n\n .takes_value(true),\n\n ])\n", "file_path": "pipebuilder/src/cli/commands/push.rs", "rank": 32, "score": 225749.81946433213 }, { "content": "pub fn catalogs() -> Cmd {\n\n Cmd::new(\"catalogs\")\n\n .about(\"Delete catalogs given namespace, project id and version, if no version provide, all catalogs deleted\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .required(true)\n\n .takes_value(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify catalogs version\")\n\n .takes_value(true),\n\n ])\n\n}\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 33, "score": 225749.81946433213 }, { "content": "pub fn build_cache() -> Cmd {\n\n Cmd::new(\"build-cache\")\n\n .about(\"Scan build caches at builder\")\n\n .args(vec![Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify builder id\")\n\n .takes_value(true)\n\n .required(true)])\n\n}\n\n\n\npub async fn exec_build(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let builder_id = args.value_of(\"id\").unwrap();\n\n let builds = scan_build(&client, builder_id).await?;\n\n print_records(builds.as_slice());\n\n Ok(())\n\n}\n\n\n\npub async fn exec_build_cache(client: ApiClient, args: &clap::ArgMatches) -> Result<()> {\n\n let builder_id = args.value_of(\"id\").unwrap();\n\n let caches = scan_build_cache(&client, builder_id).await?;\n\n print_records(caches.as_slice());\n\n Ok(())\n\n}\n", "file_path": "pipebuilder/src/cli/commands/scan.rs", "rank": 34, "score": 222086.13336984484 }, { "content": "pub fn build_log() -> Cmd {\n\n Cmd::new(\"log\")\n\n .about(\"Pull build log given namespace, project id and build version\")\n\n .args(vec![\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"version\")\n\n .short('v')\n\n .help(\"Specify app build version\")\n\n .takes_value(true)\n\n .required(true),\n\n ])\n", "file_path": "pipebuilder/src/cli/commands/pull.rs", "rank": 35, "score": 222086.13336984484 }, { "content": "pub fn build_cache() -> Cmd {\n\n Cmd::new(\"build-cache\")\n\n .about(\"Delete build-cache given builder id, namespace, project id and target platform\")\n\n .args(vec![\n\n Arg::new(\"builder\")\n\n .short('b')\n\n .help(\"Specify builder id\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"namespace\")\n\n .short('n')\n\n .help(\"Specify namespace\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"id\")\n\n .short('i')\n\n .help(\"Specify project id\")\n\n .takes_value(true)\n\n .required(true),\n\n Arg::new(\"target-platform\")\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 36, "score": 222086.13336984484 }, { "content": "pub fn validate_list_node_state_request(request: &models::ListNodeStateRequest) -> Result<()> {\n\n let role = request.role.as_ref();\n\n let role = match role {\n\n Some(role) => role,\n\n None => return Ok(()),\n\n };\n\n validate_role(role)\n\n}\n\n\n", "file_path": "pipebuilder/src/api/validations.rs", "rank": 37, "score": 206582.906245955 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"activate\")\n\n .about(\"Activate resources\")\n\n .subcommands(vec![node()])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/activate.rs", "rank": 38, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"pull\").about(\"Pull resource\").subcommands(vec![\n\n app(),\n\n build_log(),\n\n catalogs(),\n\n catalog_schema(),\n\n manifest(),\n\n ])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/pull.rs", "rank": 39, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"cancel\")\n\n .about(\"Cancel resource\")\n\n .subcommands(vec![build()])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/cancel.rs", "rank": 40, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"get\")\n\n .about(\"Get resource\")\n\n .subcommands(vec![build()])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/get.rs", "rank": 41, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"push\").about(\"Push resource\").subcommands(vec![\n\n manifest(),\n\n catalogs(),\n\n catalog_schema(),\n\n ])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/push.rs", "rank": 42, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"shutdown\")\n\n .about(\"Shutdown resources\")\n\n .subcommands(vec![node()])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/shutdown.rs", "rank": 43, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"delete\")\n\n .about(\"Delete resource\")\n\n .subcommands(vec![\n\n manifest(),\n\n build(),\n\n catalogs(),\n\n catalog_schema(),\n\n app(),\n\n project(),\n\n namespace(),\n\n build_cache(),\n\n ])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/delete.rs", "rank": 44, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"create\")\n\n .about(\"Create resource\")\n\n .subcommands(vec![build(), namespace(), project()])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/create.rs", "rank": 46, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"scan\")\n\n .about(\"Scan local node resource\")\n\n .subcommands(vec![build(), build_cache()])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/scan.rs", "rank": 47, "score": 199826.32100332927 }, { "content": "pub fn cmd() -> Cmd {\n\n Cmd::new(\"deactivate\")\n\n .about(\"Deactivate resources\")\n\n .subcommands(vec![node()])\n\n}\n\n\n", "file_path": "pipebuilder/src/cli/commands/deactivate.rs", "rank": 48, "score": 199826.32100332927 }, { "content": "pub fn cmds() -> Vec<Cmd> {\n\n vec![\n\n activate::cmd(),\n\n create::cmd(),\n\n deactivate::cmd(),\n\n delete::cmd(),\n\n get::cmd(),\n\n list::cmd(),\n\n cancel::cmd(),\n\n scan::cmd(),\n\n pull::cmd(),\n\n push::cmd(),\n\n shutdown::cmd(),\n\n ]\n\n}\n\n\n\n// exec given cmds (action, resource), client and args\n\npub async fn exec(\n\n action: &str,\n\n resource: &str,\n", "file_path": "pipebuilder/src/cli/commands/mod.rs", "rank": 49, "score": 178299.0836075588 }, { "content": "// etcd ops\n\npub fn log_event(event: &Event) -> Result<()> {\n\n if let Some(kv) = event.kv() {\n\n let event = match event.event_type() {\n\n EventType::Delete => \"delete\",\n\n EventType::Put => \"put\",\n\n };\n\n info!(\"[event] type: {}, key: {}\", event, kv.key_str()?,);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 50, "score": 176002.92121587525 }, { "content": "pub fn build_lease_service(config: LeaseConfig, lease_id: i64) -> LeaseService {\n\n LeaseService::new(config, lease_id)\n\n}\n\n\n", "file_path": "pipebuilder_common/src/bootstrap.rs", "rank": 51, "score": 173531.4095227452 }, { "content": "pub fn open_lock_file<P>(path: &P) -> Result<LockFile>\n\nwhere\n\n P: ToOsStr + ?Sized,\n\n{\n\n let file = LockFile::open(path)?;\n\n Ok(file)\n\n}\n\n\n\npub async fn create_directory<P>(path: P) -> Result<()>\n\nwhere\n\n P: AsRef<std::path::Path>,\n\n{\n\n fs::create_dir_all(path).await?;\n\n Ok(())\n\n}\n\n\n\npub async fn reset_directory<P>(path: &P) -> Result<()>\n\nwhere\n\n P: AsRef<std::path::Path>,\n\n{\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 52, "score": 162577.83179335028 }, { "content": "fn yml_to_json(yml: &str) -> Result<String> {\n\n let value: serde_yaml::Value = serde_yaml::from_str(yml)?;\n\n let json = serde_json::to_string(&value)?;\n\n Ok(json)\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct CatalogsMetadata {\n\n // pull count\n\n pub pulls: u64,\n\n // catalogs file size in byte\n\n pub size: usize,\n\n // created timestamp\n\n pub created: DateTime<Utc>,\n\n}\n\n\n\nimpl BlobResource for CatalogsMetadata {\n\n fn incr_usage(&mut self) {\n\n self.pulls += 1\n\n }\n", "file_path": "pipebuilder_common/src/catalog.rs", "rank": 53, "score": 160474.23297154583 }, { "content": "pub fn cargo_error(cmd: &str, code: i32, msg: String) -> Error {\n\n Error(Box::new(ErrorImpl::Cargo {\n\n cmd: String::from(cmd),\n\n code,\n\n msg,\n\n }))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 54, "score": 155790.93258556514 }, { "content": "pub fn deserialize_event<T>(event: &Event) -> Result<Option<(EventType, String, Option<T>)>>\n\nwhere\n\n T: DeserializeOwned,\n\n{\n\n if let Some(kv) = event.kv() {\n\n let key = kv.key_str()?;\n\n let value = kv.value();\n\n // in case delete, value is empty\n\n let value = match value.is_empty() {\n\n false => Some(serde_json::from_slice::<T>(value)?),\n\n true => None,\n\n };\n\n return Ok(Some((event.event_type(), key.to_owned(), value)));\n\n }\n\n Ok(None)\n\n}\n\n\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 55, "score": 149132.96307195746 }, { "content": "// remove '/resource/namespace/' and return id/<suffix> given a key\n\npub fn remove_resource_namespace<'a, R>(origin_key: &'a str, namespace: &str) -> &'a str\n\nwhere\n\n R: Resource,\n\n{\n\n let prefix_key = ResourceKeyBuilder::new()\n\n .resource(R::ty())\n\n .namespace(namespace)\n\n .build();\n\n let pattern = format!(\"{}/\", prefix_key);\n\n origin_key\n\n .strip_prefix(pattern.as_str())\n\n .unwrap_or_else(|| {\n\n panic!(\n\n \"key '{}' not start with '/{}/{}/'\",\n\n origin_key,\n\n R::ty(),\n\n namespace\n\n )\n\n })\n\n}\n\n\n", "file_path": "pipebuilder_common/src/resource.rs", "rank": 56, "score": 145551.3097763868 }, { "content": "pub fn build_error(operation: String, message: String) -> Error {\n\n Error(Box::new(ErrorImpl::Build { operation, message }))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 57, "score": 144014.27397073506 }, { "content": "pub fn invalid_catalog_name(reason: String, message: String) -> Error {\n\n Error(Box::new(ErrorImpl::CatalogName { reason, message }))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 58, "score": 142632.4954471313 }, { "content": "pub fn copy_directory<P>(from: P, to: P) -> Pin<Box<dyn Future<Output = Result<bool>> + Send>>\n\nwhere\n\n P: AsRef<Path> + Send + 'static,\n\n{\n\n Box::pin(async move {\n\n let mut from_path = PathBuf::new();\n\n from_path.push(from);\n\n let mut to_path = PathBuf::new();\n\n to_path.push(to);\n\n if !from_path.exists() {\n\n warn!(\"copy from path '{}' does not exist\", from_path.display());\n\n return Ok(false);\n\n }\n\n // extract last segment from and append to target\n\n let file_name = match from_path.file_name() {\n\n Some(file_name) => file_name,\n\n None => {\n\n warn!(\"file name not found in path '{}'.\", from_path.display());\n\n return Ok(false);\n\n }\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 59, "score": 141664.51956846943 }, { "content": "// snapshot resource\n\npub trait Snapshot: Default {\n\n fn incr_version(&mut self);\n\n fn get_version(&self) -> u64;\n\n}\n\n\n", "file_path": "pipebuilder_common/src/resource.rs", "rank": 60, "score": 130327.39308325983 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with NodeServer.\"]\n\n #[async_trait]\n\n pub trait Node: Send + Sync + 'static {\n\n async fn activate(\n\n &self,\n\n request: tonic::Request<super::ActivateRequest>,\n\n ) -> Result<tonic::Response<super::ActivateResponse>, tonic::Status>;\n\n async fn deactivate(\n\n &self,\n\n request: tonic::Request<super::DeactivateRequest>,\n\n ) -> Result<tonic::Response<super::DeactivateResponse>, tonic::Status>;\n\n async fn shutdown(\n\n &self,\n\n request: tonic::Request<super::ShutdownRequest>,\n\n ) -> Result<tonic::Response<super::ShutdownResponse>, tonic::Status>;\n\n async fn status(\n\n &self,\n\n request: tonic::Request<super::StatusRequest>,\n\n ) -> Result<tonic::Response<super::StatusResponse>, tonic::Status>;\n\n }\n\n #[derive(Debug)]\n\n pub struct NodeServer<T: Node> {\n", "file_path": "pipebuilder_common/src/grpc/node.rs", "rank": 61, "score": 130240.41267645053 }, { "content": "pub fn bootstrap(\n\n repository: MockRepository,\n\n) -> Result<(\n\n impl Filter<Extract = impl warp::Reply, Error = warp::Rejection> + Clone,\n\n mpsc::Receiver<()>,\n\n)> {\n\n let app_directory = PathBuf::from(repository.app);\n\n let catalogs_directory = PathBuf::from(repository.catalogs);\n\n let (shutdown_tx, shutdown_rx) = mpsc::channel::<()>(1);\n\n let api = api::filters::api(app_directory, catalogs_directory, shutdown_tx);\n\n Ok((api, shutdown_rx))\n\n}\n", "file_path": "pipebuilder_mock/src/bootstrap.rs", "rank": 62, "score": 123645.57929847023 }, { "content": "fn validate_target_platform(target_platform: &str) -> Result<()> {\n\n if !Build::is_target_platform_support(target_platform) {\n\n return Err(invalid_api_request(format!(\n\n \"target platform '{}' not support\",\n\n target_platform\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n\nasync fn validate_namespace(register: &mut Register, namespace: &str) -> Result<()> {\n\n let key = ResourceKeyBuilder::new()\n\n .resource(ResourceType::Namespace)\n\n .id(namespace)\n\n .build();\n\n let is_exist = register.is_exist(key).await?;\n\n match is_exist {\n\n true => Ok(()),\n\n false => Err(invalid_api_request(format!(\n\n \"invalid namespace '{}'\",\n", "file_path": "pipebuilder/src/api/validations.rs", "rank": 63, "score": 120957.16802608504 }, { "content": "pub fn api_client_error(\n\n status_code: u16,\n\n reason: Option<String>,\n\n message: Option<String>,\n\n) -> Error {\n\n Error(Box::new(ErrorImpl::ApiClient {\n\n status_code,\n\n reason: reason.unwrap_or_default(),\n\n message: message.unwrap_or_default(),\n\n }))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 64, "score": 120438.28227060493 }, { "content": "pub fn init_tracing_subscriber() {\n\n let formatter = std::env::var(ENV_FORMATTER).unwrap_or_else(|_| String::from(FULL_FORMATTER));\n\n match formatter.as_str() {\n\n FULL_FORMATTER => init_with_full_formatter(),\n\n PRETTY_FORMATTER => init_with_pretty_formatter(),\n\n JSON_FORMATTER => init_with_json_formatter(),\n\n _ => init_with_full_formatter(),\n\n }\n\n}\n", "file_path": "pipebuilder_common/src/logging.rs", "rank": 65, "score": 120438.28227060493 }, { "content": "pub fn api_server_error(\n\n status_code: u16,\n\n reason: Option<String>,\n\n message: Option<String>,\n\n) -> Error {\n\n Error(Box::new(ErrorImpl::ApiServer {\n\n status_code,\n\n reason: reason.unwrap_or_default(),\n\n message: message.unwrap_or_default(),\n\n }))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 66, "score": 120438.28227060493 }, { "content": "pub trait ValidateCatalog: VisitCatalog {\n\n fn validate(&self) -> Result<()>;\n\n}\n\n\n\npub struct CatalogSchemaValidator {\n\n pub schema: JSONSchema,\n\n pub instance: Option<serde_json::Value>,\n\n}\n\n\n\nimpl CatalogSchemaValidator {\n\n pub fn from_literal(schema: &str) -> Result<Self> {\n\n let schema = serde_json::from_str(schema)?;\n\n Self::from_json_value(&schema)\n\n }\n\n\n\n pub fn from_buffer(schema: &[u8]) -> Result<Self> {\n\n let schema = serde_json::from_slice(schema)?;\n\n Self::from_json_value(&schema)\n\n }\n\n\n", "file_path": "pipebuilder_common/src/catalog.rs", "rank": 67, "score": 113694.84922599334 }, { "content": "pub trait VisitCatalog {\n\n fn visit(&mut self, c: &Catalog) -> Result<()>;\n\n}\n\n\n", "file_path": "pipebuilder_common/src/catalog.rs", "rank": 68, "score": 113164.62937702413 }, { "content": "pub fn invalid_api_request(message: String) -> Error {\n\n Error(Box::new(ErrorImpl::ApiRequest { message }))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 69, "score": 110042.97899278956 }, { "content": "fn main() {\n\n tonic_build::configure()\n\n .out_dir(\"src/grpc\")\n\n .compile(&[\"proto/build.proto\"], &[\"proto\"])\n\n .unwrap();\n\n tonic_build::configure()\n\n .out_dir(\"src/grpc\")\n\n .compile(&[\"proto/health.proto\"], &[\"proto\"])\n\n .unwrap();\n\n tonic_build::configure()\n\n .out_dir(\"src/grpc\")\n\n .compile(&[\"proto/schedule.proto\"], &[\"proto\"])\n\n .unwrap();\n\n tonic_build::configure()\n\n .out_dir(\"src/grpc\")\n\n .compile(&[\"proto/repository.proto\"], &[\"proto\"])\n\n .unwrap();\n\n tonic_build::configure()\n\n .out_dir(\"src/grpc\")\n\n .compile(&[\"proto/node.proto\"], &[\"proto\"])\n\n .unwrap();\n\n}\n", "file_path": "pipebuilder_common/build.rs", "rank": 70, "score": 109131.03838656432 }, { "content": "pub fn rpc_not_found(message: &str) -> tonic::Status {\n\n tonic::Status::not_found(message)\n\n}\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 71, "score": 108454.52231926908 }, { "content": "// rpc status\n\npub fn rpc_internal_error(error: Error) -> tonic::Status {\n\n tonic::Status::internal(format!(\"{:#?}\", error))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 72, "score": 107091.76923181882 }, { "content": "pub fn repository_error(operation: String, message: String) -> Error {\n\n Error(Box::new(ErrorImpl::Repository { operation, message }))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 73, "score": 105659.23468576046 }, { "content": "// remove '/resource/' and return suffix\n\npub fn remove_resource<R>(origin_key: &'_ str) -> &'_ str\n\nwhere\n\n R: Resource,\n\n{\n\n let prefix_key = ResourceKeyBuilder::new().resource(R::ty()).build();\n\n let pattern = format!(\"{}/\", prefix_key);\n\n origin_key\n\n .strip_prefix(pattern.as_str())\n\n .unwrap_or_else(|| panic!(\"key '{}' not start with '/{}/'\", origin_key, R::ty()))\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct BlobDescriptor<'a>(pub &'a str, pub &'a str, pub u64);\n\n\n\nimpl<'a> BlobDescriptor<'a> {\n\n pub fn into_tuple(self) -> (&'a str, &'a str, u64) {\n\n (self.0, self.1, self.2)\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct SnapshotDescriptor<'a>(pub &'a str, pub &'a str);\n\n\n\nimpl<'a> SnapshotDescriptor<'a> {\n\n pub fn into_tuple(self) -> (&'a str, &'a str) {\n\n (self.0, self.1)\n\n }\n\n}\n\n\n", "file_path": "pipebuilder_common/src/resource.rs", "rank": 74, "score": 104365.72733946459 }, { "content": "pub fn bootstrap(_config: SchedulerConfig, register: Register) -> SchedulerService {\n\n let manager = ScheduleManager::builder().build();\n\n // start builder watcher\n\n manager.run(register);\n\n SchedulerService::new(manager)\n\n}\n", "file_path": "pipebuilder/src/scheduler/bootstrap.rs", "rank": 75, "score": 104342.54659061243 }, { "content": "pub fn append_dot_format_suffix(filename: &str, format: &str) -> String {\n\n format!(\"{}.{}\", filename, format)\n\n}\n\n\n\n// run cmd and collect status and output\n\nasync fn cmd_status_output(mut cmd: Command) -> Result<(i32, String)> {\n\n let output = cmd.output().await?;\n\n match output.status.success() {\n\n true => {\n\n let stderr = String::from_utf8(output.stderr)?;\n\n Ok((0, stderr))\n\n }\n\n false => {\n\n let stderr = String::from_utf8(output.stderr)?;\n\n let err_code = output.status.code().unwrap_or(1);\n\n Ok((err_code, stderr))\n\n }\n\n }\n\n}\n\n\n\n// run cmd and collect status\n\nasync fn cmd_status(mut cmd: Command) -> Result<i32> {\n\n let status = cmd.status().await?;\n\n match status.success() {\n\n true => Ok(0),\n\n false => Ok(status.code().unwrap_or(1)),\n\n }\n\n}\n\n\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 76, "score": 103069.62661348001 }, { "content": "pub fn hash_distance<T>(t0: &T, t1: &T) -> u64\n\nwhere\n\n T: Hash,\n\n{\n\n let h0 = fnv1a(t0);\n\n let h1 = fnv1a(t1);\n\n if h0 > h1 {\n\n return h0 - h1;\n\n }\n\n h1 - h0\n\n}\n\n\n\n// App cargo.toml\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct TomlProject {\n\n name: String,\n\n version: String,\n\n authors: Option<Vec<String>>,\n\n edition: Option<String>,\n\n}\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 77, "score": 103044.0503646531 }, { "content": "pub fn json_schema_error(operation: String, messages: Vec<String>) -> Error {\n\n Error(Box::new(ErrorImpl::JsonSchema {\n\n operation,\n\n messages,\n\n }))\n\n}\n\n\n", "file_path": "pipebuilder_common/src/errors.rs", "rank": 78, "score": 101771.13038752068 }, { "content": "// prost type conversion\n\npub fn prost_timestamp_to_datetime_utc(timestamp: prost_types::Timestamp) -> DateTime<Utc> {\n\n let secs = timestamp.seconds;\n\n let nsecs = timestamp.nanos as u32;\n\n Utc.timestamp(secs, nsecs)\n\n}\n\n\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 79, "score": 100646.64245841953 }, { "content": "pub fn datetime_utc_to_prost_timestamp(datetime: DateTime<Utc>) -> prost_types::Timestamp {\n\n let seconds = datetime.timestamp();\n\n let nanos = datetime.timestamp_subsec_nanos() as i32;\n\n prost_types::Timestamp { seconds, nanos }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use crate::{copy_directory, read_file, remove_directory, Result};\n\n\n\n #[tokio::test]\n\n async fn test_copy_directory() -> Result<()> {\n\n let from = \"resources/utils/files/from/app\";\n\n let to = \"resources/utils/files/to\";\n\n let aloha = \"resources/utils/files/to/app/file.txt\";\n\n let hello = \"resources/utils/files/to/app/src/file.txt\";\n\n assert!(copy_directory(from, to).await.is_ok());\n\n let buffer = read_file(aloha).await?;\n\n let actual = String::from_utf8(buffer)?;\n\n assert_eq!(\"aloha\", actual.as_str());\n\n let buffer = read_file(hello).await?;\n\n let actual = String::from_utf8(buffer)?;\n\n assert_eq!(\"hello\", actual.as_str());\n\n remove_directory(\"resources/utils/files/to/app\").await\n\n }\n\n}\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 80, "score": 100646.64245841953 }, { "content": "fn is_non_empty(s: &str) -> bool {\n\n !s.is_empty()\n\n}\n\n\n", "file_path": "pipebuilder_common/src/catalog.rs", "rank": 81, "score": 99782.17457386114 }, { "content": "fn is_snake_lower_case(s: &str) -> bool {\n\n is_snake_case(s, false)\n\n}\n\n\n", "file_path": "pipebuilder_common/src/catalog.rs", "rank": 82, "score": 98265.87811733616 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with BuilderServer.\"]\n\n #[async_trait]\n\n pub trait Builder: Send + Sync + 'static {\n\n async fn build(\n\n &self,\n\n request: tonic::Request<super::BuildRequest>,\n\n ) -> Result<tonic::Response<super::BuildResponse>, tonic::Status>;\n\n async fn cancel_build(\n\n &self,\n\n request: tonic::Request<super::CancelBuildRequest>,\n\n ) -> Result<tonic::Response<super::CancelBuildResponse>, tonic::Status>;\n\n async fn get_build_log(\n\n &self,\n\n request: tonic::Request<super::GetBuildLogRequest>,\n\n ) -> Result<tonic::Response<super::GetBuildLogResponse>, tonic::Status>;\n\n async fn scan_build(\n\n &self,\n\n request: tonic::Request<super::ScanBuildRequest>,\n\n ) -> Result<tonic::Response<super::ScanBuildResponse>, tonic::Status>;\n\n async fn delete_build_cache(\n\n &self,\n\n request: tonic::Request<super::DeleteBuildCacheRequest>,\n", "file_path": "pipebuilder_common/src/grpc/build.rs", "rank": 83, "score": 95593.47238009072 }, { "content": "fn is_snake_case(s: &str, uppercase: bool) -> bool {\n\n // no leading underscore\n\n let mut underscore = true;\n\n let mut initial_char = true;\n\n for c in s.chars() {\n\n if initial_char && !c.is_ascii() {\n\n return false;\n\n }\n\n initial_char = false;\n\n if c.is_numeric() {\n\n underscore = false;\n\n continue;\n\n }\n\n if c.is_ascii() && c.is_ascii_uppercase() == uppercase {\n\n underscore = false;\n\n continue;\n\n }\n\n if c == '_' {\n\n if underscore {\n\n // consecutive underscore\n\n return false;\n\n }\n\n underscore = true;\n\n continue;\n\n }\n\n return false;\n\n }\n\n true\n\n}\n\n\n", "file_path": "pipebuilder_common/src/catalog.rs", "rank": 84, "score": 95138.19682223245 }, { "content": "fn build_header_map(headers: &HashMap<String, String>) -> HeaderMap {\n\n let mut hmap = HeaderMap::new();\n\n for (name, value) in headers {\n\n hmap.insert::<HeaderName>(\n\n name.parse()\n\n .unwrap_or_else(|_| panic!(\"invalid header name '{}'\", name)),\n\n value\n\n .parse()\n\n .unwrap_or_else(|_| panic!(\"invalid header value '{}'\", value)),\n\n );\n\n }\n\n hmap\n\n}\n\n\n\npub struct ApiClient {\n\n client: Client,\n\n endpoint: String,\n\n basic_auth: Option<BasicAuth>,\n\n bearer_auth_token: Option<String>,\n\n headers: HeaderMap,\n", "file_path": "pipebuilder_common/src/api/client.rs", "rank": 85, "score": 87979.7894807707 }, { "content": "fn init_with_full_formatter() {\n\n tracing_subscriber::fmt().init()\n\n}\n\n\n", "file_path": "pipebuilder_common/src/logging.rs", "rank": 86, "score": 62111.38785435671 }, { "content": "fn init_with_json_formatter() {\n\n tracing_subscriber::fmt().json().flatten_event(true).init()\n\n}\n\n\n", "file_path": "pipebuilder_common/src/logging.rs", "rank": 87, "score": 62111.38785435671 }, { "content": "fn init_with_pretty_formatter() {\n\n tracing_subscriber::fmt().pretty().init()\n\n}\n\n\n", "file_path": "pipebuilder_common/src/logging.rs", "rank": 88, "score": 62111.38785435671 }, { "content": "pub trait Resource {\n\n fn ty() -> ResourceType;\n\n}\n\n\n", "file_path": "pipebuilder_common/src/resource.rs", "rank": 89, "score": 61985.67617453172 }, { "content": "// metadata + data\n\npub trait BlobResource {\n\n fn incr_usage(&mut self);\n\n fn new(size: usize) -> Self;\n\n}\n", "file_path": "pipebuilder_common/src/resource.rs", "rank": 90, "score": 61173.56284270548 }, { "content": "pub trait PrintHeader {\n\n fn print_header();\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct BuildRequest {\n\n pub namespace: String,\n\n // project id\n\n pub id: String,\n\n pub manifest_version: u64,\n\n pub target_platform: Option<String>,\n\n}\n\n\n\nimpl BuildRequest {\n\n pub fn set_target_platform(&mut self, target_platform: String) {\n\n self.target_platform = Some(target_platform)\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n", "file_path": "pipebuilder_common/src/api/models.rs", "rank": 91, "score": 60381.874973426646 }, { "content": "// cargo ops\n\nfn cargo_binary() -> OsString {\n\n match std::env::var_os(\"CARGO\") {\n\n Some(cargo) => cargo,\n\n None => \"cargo\".to_owned().into(),\n\n }\n\n}\n\n\n\npub async fn cargo_init<S>(path: S) -> Result<()>\n\nwhere\n\n S: AsRef<std::ffi::OsStr>,\n\n{\n\n let mut cmd = Command::new(cargo_binary());\n\n cmd.arg(\"init\").arg(path);\n\n let (code, out) = cmd_status_output(cmd).await?;\n\n match code == 0 {\n\n true => Ok(()),\n\n false => Err(cargo_error(\"init\", code, out)),\n\n }\n\n}\n\n\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 92, "score": 60056.40729717829 }, { "content": "pub trait ScheduleHash<T> {\n\n fn schedule_hash(&self) -> T;\n\n}\n\n\n\n// (namespace, id)\n\npub struct ScheduleDescriptor<'a>(pub &'a str, pub &'a str);\n\n\n\nimpl<'a> ScheduleHash<String> for ScheduleDescriptor<'a> {\n\n fn schedule_hash(&self) -> String {\n\n format!(\"{}/{}\", self.0, self.1)\n\n }\n\n}\n", "file_path": "pipebuilder_common/src/schedule.rs", "rank": 93, "score": 59249.081335345516 }, { "content": "// hash\n\nfn fnv1a<T>(t: &T) -> u64\n\nwhere\n\n T: Hash,\n\n{\n\n let mut hasher = FnvHasher::default();\n\n t.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n", "file_path": "pipebuilder_common/src/utils.rs", "rank": 94, "score": 57079.75532841055 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with RepositoryServer.\"]\n\n #[async_trait]\n\n pub trait Repository: Send + Sync + 'static {\n\n async fn get_manifest(\n\n &self,\n\n request: tonic::Request<super::GetManifestRequest>,\n\n ) -> Result<tonic::Response<super::GetManifestResponse>, tonic::Status>;\n\n async fn put_manifest(\n\n &self,\n\n request: tonic::Request<super::PutManifestRequest>,\n\n ) -> Result<tonic::Response<super::PutManifestResponse>, tonic::Status>;\n\n async fn delete_manifest(\n\n &self,\n\n request: tonic::Request<super::DeleteManifestRequest>,\n\n ) -> Result<tonic::Response<super::DeleteManifestResponse>, tonic::Status>;\n\n async fn get_app(\n\n &self,\n\n request: tonic::Request<super::GetAppRequest>,\n\n ) -> Result<tonic::Response<super::GetAppResponse>, tonic::Status>;\n\n async fn post_app(\n\n &self,\n\n request: tonic::Request<super::PostAppRequest>,\n", "file_path": "pipebuilder_common/src/grpc/repository.rs", "rank": 95, "score": 55872.6719212894 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with HealthServer.\"]\n\n #[async_trait]\n\n pub trait Health: Send + Sync + 'static {\n\n async fn health(\n\n &self,\n\n request: tonic::Request<super::HealthRequest>,\n\n ) -> Result<tonic::Response<super::HealthResponse>, tonic::Status>;\n\n }\n\n #[derive(Debug)]\n\n pub struct HealthServer<T: Health> {\n\n inner: _Inner<T>,\n\n accept_compression_encodings: (),\n\n send_compression_encodings: (),\n\n }\n\n struct _Inner<T>(Arc<T>);\n\n impl<T: Health> HealthServer<T> {\n\n pub fn new(inner: T) -> Self {\n\n let inner = Arc::new(inner);\n\n let inner = _Inner(inner);\n\n Self {\n\n inner,\n\n accept_compression_encodings: Default::default(),\n", "file_path": "pipebuilder_common/src/grpc/health.rs", "rank": 96, "score": 55872.6719212894 }, { "content": " #[doc = \"Generated trait containing gRPC methods that should be implemented for use with SchedulerServer.\"]\n\n #[async_trait]\n\n pub trait Scheduler: Send + Sync + 'static {\n\n async fn schedule(\n\n &self,\n\n request: tonic::Request<super::ScheduleRequest>,\n\n ) -> Result<tonic::Response<super::ScheduleResponse>, tonic::Status>;\n\n }\n\n #[derive(Debug)]\n\n pub struct SchedulerServer<T: Scheduler> {\n\n inner: _Inner<T>,\n\n accept_compression_encodings: (),\n\n send_compression_encodings: (),\n\n }\n\n struct _Inner<T>(Arc<T>);\n\n impl<T: Scheduler> SchedulerServer<T> {\n\n pub fn new(inner: T) -> Self {\n\n let inner = Arc::new(inner);\n\n let inner = _Inner(inner);\n\n Self {\n\n inner,\n\n accept_compression_encodings: Default::default(),\n", "file_path": "pipebuilder_common/src/grpc/schedule.rs", "rank": 97, "score": 55872.6719212894 }, { "content": " const TEST_CATALOG_YAML: &str = r#\"\n\n---\n\ninterval:\n\n Secs: 1000\n\nticks: 10\n\n\"#;\n\n const TEST_CATALOG_SCHEMA: &str = r##\"\n\n{\n\n \"title\": \"test_catalog_schema\",\n\n \"type\": \"object\",\n\n \"definitions\": {\n\n \"interval_in_millis\": {\n\n \"type\": \"object\",\n\n \"properties\": {\n\n \"Millis\": {\n\n \"type\": \"integer\"\n\n }\n\n },\n\n \"required\": [ \"Millis\" ],\n\n \"additionalProperties\": false\n", "file_path": "pipebuilder_common/src/catalog.rs", "rank": 98, "score": 51406.01756493403 }, { "content": "#[cfg(feature = \"itest\")]\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use crate::utils::{\n\n build_api_client, list_api_state, list_builder_state, list_repository_state,\n\n list_scheduler_state, shutdown_ci, wait,\n\n };\n\n use pipebuilder_common::NodeRole;\n\n\n\n const TEST_CLUSTER_READY_MILLIS: u64 = 30000;\n\n\n\n #[tokio::test]\n\n async fn test_node() {\n\n wait(TEST_CLUSTER_READY_MILLIS).await;\n\n let client = build_api_client(\"resources/cli.yml\").await.unwrap();\n\n // validate api\n\n let node_states = list_api_state(&client).await.unwrap();\n\n assert_eq!(1, node_states.len());\n\n let node_state = node_states.get(0).unwrap();\n", "file_path": "e2e/src/node.rs", "rank": 99, "score": 46314.02178322455 } ]
Rust
src/lib.rs
denbeigh2000/rusty-interaction
ee7abe7d1e57dc3de58ef89ab2441c40108b3aad
#![warn(missing_docs)] #[macro_use] mod macros; #[allow(dead_code)] const BASE_URL: &str = "https://discord.com/api/v9"; #[cfg(feature = "types")] pub mod types; #[cfg(feature = "security")] pub mod security; #[cfg(any(feature = "handler", feature = "extended-handler"))] #[cfg_attr(docsrs, doc(cfg(feature = "handler")))] pub mod handler; #[cfg(any(feature = "handler", feature = "extended-handler"))] #[cfg_attr(docsrs, doc(cfg(feature = "handler")))] pub use actix; #[cfg(any(feature = "handler", feature = "extended-handler"))] #[cfg_attr(docsrs, doc(cfg(feature = "handler")))] pub use log; #[cfg(any(feature = "handler", feature = "extended-handler"))] #[cfg_attr(docsrs, doc(cfg(feature = "handler")))] pub use attributes::*; #[cfg(all(test, feature = "security"))] mod tests; pub trait Builder<T> { type Error: std::error::Error; fn build(self) -> Result<T, Self::Error>; } #[macro_export] #[doc(hidden)] macro_rules! expect_successful_api_response { ($response:ident, $succret:expr) => { match $response { Err(e) => { debug!("Discord API request failed: {:#?}", e); Err(HttpError { code: 0, message: format!("{:#?}", e), }) } Ok(r) => { let st = r.status(); if !st.is_success() { let e = format!("{:#?}", r.text().await); debug!("Discord API returned an error: {:#?}", e); Err(HttpError { code: st.as_u16(), message: e, }) } else { $succret } } } }; } #[macro_export] #[doc(hidden)] macro_rules! expect_specific_api_response { ($response:ident, $expres:expr, $succret:expr) => { match $response { Err(e) => { debug!("Discord API request failed: {:#?}", e); Err(HttpError { code: 0, message: format!("{:#?}", e), }) } Ok(r) => { let st = r.status(); if st != $expres { let e = format!("{:#?}", r.text().await); debug!("Discord API returned an error: {:#?}", e); Err(HttpError { code: st.as_u16(), message: e, }) } else { $succret } } } }; } #[macro_export] #[doc(hidden)] macro_rules! expect_successful_api_response_and_return { ($response:ident, $struc:ident, $retval:ident, $succret:expr) => { match $response { Err(e) => { debug!("Discord API request failed: {:#?}", e); Err(HttpError { code: 0, message: format!("{:#?}", e), }) } Ok(r) => { let st = r.status(); let text = r.text().await.unwrap(); if !st.is_success() { let e = format!("{:#?}", &text); debug!("Discord API returned an error: {:#?}", e); Err(HttpError { code: st.as_u16(), message: e, }) } else { let a: Result<$struc, serde_json::Error> = serde_json::from_str(&text); match a { Err(e) => { debug!("Failed to decode response: {:#?}", e); debug!("Original response: {:#?}", &text); Err(HttpError { code: 500, message: format!("{:?}", e), }) } Ok($retval) => $succret, } } } } }; }
#![warn(missing_docs)] #[macro_use] mod macros; #[allow(dead_code)] const BASE_URL: &str = "https://discord.com/api/v9"; #[cfg(feature = "types")] pub mod types; #[cfg(feature = "security")] pub mod security; #[cfg(any(feature = "handler", feature = "extended-handler"))] #[cfg_attr(docsrs, doc(cfg(feature = "handler")))] pub mod handler; #[cfg(any(feature = "handler", feature = "extended-handler"))] #[cfg_attr(docsrs, doc(cfg(feature = "handler")))] pub use actix; #[cfg(any(feature = "handler", feature = "extended-handler"))] #[cfg_attr(docsrs, doc(cfg(feature = "handler")))] pub use log; #[cfg(any(feature = "handler", feature = "extended-handler"))] #[cfg_attr(docsrs, doc(cfg(feature = "handler")))] pub use attributes::*; #[cfg(all(test, feature = "security"))] mod tests; pub trait Builder<T> { type Error: std::error::Error; fn build(self) -> Result<T, Self::Err
} else { let a: Result<$struc, serde_json::Error> = serde_json::from_str(&text); match a { Err(e) => { debug!("Failed to decode response: {:#?}", e); debug!("Original response: {:#?}", &text); Err(HttpError { code: 500, message: format!("{:?}", e), }) } Ok($retval) => $succret, } } } } }; }
or>; } #[macro_export] #[doc(hidden)] macro_rules! expect_successful_api_response { ($response:ident, $succret:expr) => { match $response { Err(e) => { debug!("Discord API request failed: {:#?}", e); Err(HttpError { code: 0, message: format!("{:#?}", e), }) } Ok(r) => { let st = r.status(); if !st.is_success() { let e = format!("{:#?}", r.text().await); debug!("Discord API returned an error: {:#?}", e); Err(HttpError { code: st.as_u16(), message: e, }) } else { $succret } } } }; } #[macro_export] #[doc(hidden)] macro_rules! expect_specific_api_response { ($response:ident, $expres:expr, $succret:expr) => { match $response { Err(e) => { debug!("Discord API request failed: {:#?}", e); Err(HttpError { code: 0, message: format!("{:#?}", e), }) } Ok(r) => { let st = r.status(); if st != $expres { let e = format!("{:#?}", r.text().await); debug!("Discord API returned an error: {:#?}", e); Err(HttpError { code: st.as_u16(), message: e, }) } else { $succret } } } }; } #[macro_export] #[doc(hidden)] macro_rules! expect_successful_api_response_and_return { ($response:ident, $struc:ident, $retval:ident, $succret:expr) => { match $response { Err(e) => { debug!("Discord API request failed: {:#?}", e); Err(HttpError { code: 0, message: format!("{:#?}", e), }) } Ok(r) => { let st = r.status(); let text = r.text().await.unwrap(); if !st.is_success() { let e = format!("{:#?}", &text); debug!("Discord API returned an error: {:#?}", e); Err(HttpError { code: st.as_u16(), message: e, })
random
[ { "content": "fn handler(\n\n _attr: TokenStream,\n\n item: TokenStream,\n\n defer_return: quote::__private::TokenStream,\n\n) -> TokenStream {\n\n // There is _probably_ a more efficient way to do what I want to do, but hey I am here\n\n // to learn so why not join me on my quest to create this procedural macro...lol\n\n let mut defer = false;\n\n\n\n // Parse the stream of tokens to something more usable.\n\n let input = syn::parse_macro_input!(item as syn::ItemFn);\n\n\n\n // Let's see if the programmer wants to respond with a deferring acknowlegdement first.\n\n // If so, the end-result needs to be built differently.\n\n for at in &input.attrs {\n\n for seg in at.path.segments.clone() {\n\n if seg.ident == \"defer\" {\n\n defer = true;\n\n }\n\n }\n", "file_path": "attributes/src/lib.rs", "rank": 0, "score": 118441.11971324463 }, { "content": "type HandlerFunction = fn(\n\n &mut InteractionHandler,\n\n Context,\n\n) -> Pin<Box<dyn Future<Output = HandlerResponse> + Send + '_>>;\n\n\n\nmacro_rules! match_handler_response {\n\n ($response:ident) => {\n\n\n\n if let Ok(ref __unwrapped_response__) = $response{\n\n match __unwrapped_response__.r#type {\n\n InteractionResponseType::None => {\n\n Ok(HttpResponse::build(StatusCode::NO_CONTENT).finish())\n\n }\n\n InteractionResponseType::DefferedChannelMessageWithSource\n\n | InteractionResponseType::DefferedUpdateMessage => {\n\n /* The use of HTTP code 202 is more appropriate when an Interaction is deffered.\n\n If an application is first sending a deffered channel message response, this usually means the system\n\n is still processing whatever it is doing.\n\n See the spec: https://tools.ietf.org/html/rfc7231#section-6.3.3 */\n\n Ok(HttpResponse::build(StatusCode::ACCEPTED).json(__unwrapped_response__))\n", "file_path": "src/handler.rs", "rank": 1, "score": 117287.72990815145 }, { "content": "/// Verifies an incoming Interaction.\n\n/// This verification is mandatory for every incoming Interaction.\n\n/// See [the developer docs](https://discord.com/developers/docs/interactions/slash-commands#security-and-authorization) for more info\n\npub fn verify_discord_message(\n\n public_key: PublicKey,\n\n signature: &str,\n\n timestamp: &str,\n\n body: &str,\n\n) -> Result<(), ValidationError> {\n\n let signature_bytes = hex::decode(signature)\n\n .map_err(|_| ValidationError::KeyConversionError { name: \"Signature\" })?;\n\n\n\n let signature_bytes =\n\n signature_bytes\n\n .try_into()\n\n .map_err(|_| ValidationError::KeyConversionError {\n\n name: \"Signature Length\",\n\n })?;\n\n\n\n let signature = Signature::new(signature_bytes);\n\n\n\n // Format the data to verify (Timestamp + body)\n\n let msg = format!(\"{}{}\", timestamp, body);\n\n\n\n public_key\n\n .verify(msg.as_bytes(), &signature)\n\n .map_err(|_| ValidationError::InvalidSignatureError)\n\n}\n", "file_path": "src/security.rs", "rank": 2, "score": 107419.90358952581 }, { "content": "/// Convenience procedural macro that allows you to bind an async function to the [`InteractionHandler`] for handling component interactions.\n\npub fn component_handler(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let ret = quote!(\n\n ::rusty_interaction::types::interaction::InteractionResponseType::DefferedUpdateMessage\n\n );\n\n\n\n handler(attr, item, ret)\n\n}\n\n\n\n#[proc_macro_attribute]\n", "file_path": "attributes/src/lib.rs", "rank": 3, "score": 100729.66546835285 }, { "content": "/// Simpler header getter from a HTTP request\n\nfn get_header<'a>(req: &'a HttpRequest, header: &str) -> Option<&'a str> {\n\n req.headers().get(header)?.to_str().ok()\n\n}\n", "file_path": "src/handler.rs", "rank": 4, "score": 98712.49164419665 }, { "content": "// This is just here to make the tests work...lol\n\npub fn slash_command_test(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n // There is _probably_ a more efficient way to do what I want to do, but hey I am here\n\n // to learn so why not join me on my quest to create this procedural macro...lol\n\n let mut defer = false;\n\n\n\n // Parse the stream of tokens to something more usable.\n\n let input = syn::parse_macro_input!(item as syn::ItemFn);\n\n\n\n // Let's see if the programmer wants to respond with a deferring acknowlegdement first.\n\n // If so, the end-result needs to be built differently.\n\n for at in &input.attrs {\n\n for seg in at.path.segments.clone() {\n\n if seg.ident == \"defer\" {\n\n defer = true;\n\n }\n\n }\n\n }\n\n\n\n // Ok here comes the fun part\n\n\n", "file_path": "attributes/src/lib.rs", "rank": 5, "score": 98461.08615560099 }, { "content": "/// Send out a deffered channel message response before doing work.\n\npub fn defer(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n item\n\n}\n\n\n\n#[doc(hidden)]\n\n#[proc_macro_attribute]\n\n#[doc(hidden)]\n", "file_path": "attributes/src/lib.rs", "rank": 7, "score": 81622.15329499202 }, { "content": "/// Convenience procedural macro that allows you to bind an async function to the [`InteractionHandler`]\n\npub fn slash_command(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let ret = quote!(::rusty_interaction::types::interaction::InteractionResponseType::DefferedChannelMessageWithSource);\n\n\n\n handler(attr, item, ret)\n\n}\n\n\n\n#[proc_macro_attribute]\n", "file_path": "attributes/src/lib.rs", "rank": 8, "score": 79607.12442252616 }, { "content": "// Discord interaction verification test OK 1\n\nfn crypto_verify_test_ok() {\n\n let bytes = hex::decode(TEST_PUB_KEY).unwrap();\n\n\n\n let pbk = PublicKey::from_bytes(&bytes).expect(\"Failed to convert public key.\");\n\n\n\n let res = verify_discord_message(pbk,\n\n \"c41278a0cf22bf8f3061756063cd7ef548a3df23d0ffc5496209aa0ad4d9593343801bf11e099f41bca1afcac2c70734eebafede3dec7aac1caa5d8fade5af0c\",\n\n \"1616343571\",\n\n &String::from(\"{\\\"type\\\" : 1}\"));\n\n\n\n match res {\n\n Err(ValidationError::KeyConversionError { name }) => panic!(\n\n \"One of the keys failed to convert to proper types! Key: {}\",\n\n name\n\n ),\n\n Err(ValidationError::InvalidSignatureError) => {\n\n panic!(\"Unexpected invalidation of signature\")\n\n }\n\n Ok(_) => {\n\n // Good!\n\n }\n\n }\n\n}\n\n\n\n#[test]\n\n#[should_panic]\n", "file_path": "src/tests.rs", "rank": 9, "score": 75554.6332040731 }, { "content": "// Discord interacton verification test invalid 1\n\nfn crypto_verify_test_fail() {\n\n let bytes = hex::decode(TEST_PUB_KEY).unwrap();\n\n let pbk = PublicKey::from_bytes(&bytes).expect(\"Failed to convert public key.\");\n\n\n\n let res = verify_discord_message(pbk,\n\n \"69696969696969696696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969696696969\",\n\n \"1616343571\",\n\n &String::from(\"{\\\"type\\\" : 1}\"));\n\n\n\n match res {\n\n Err(ValidationError::KeyConversionError { name }) => panic!(\n\n \"One of the keys failed to convert to proper types! Key: {}\",\n\n name\n\n ),\n\n Err(ValidationError::InvalidSignatureError) => {\n\n panic!(\"Unexpected invalidation of signature\")\n\n } // This is what it should be!\n\n\n\n Ok(_) => {\n\n // Good!\n", "file_path": "src/tests.rs", "rank": 10, "score": 75554.6332040731 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n// async_trait::async_trait;\n\n\n\n/// Module containing the embed structures\n\npub mod embed;\n\n\n\n/// Module containing all structs for defining application commands\n\npub mod application;\n\n/// Module containing structures for interactions\n\npub mod interaction;\n\n\n\n/// Module containing structures for members/users.\n\npub mod user;\n\n\n\n/// Module containing structures for using [Message Components](https://discord.com/developers/docs/interactions/message-components#what-are-components)\n\npub mod components;\n\n\n\n/// Module containing structures for guilds\n\npub mod guild;\n", "file_path": "src/types/mod.rs", "rank": 11, "score": 61600.28600636365 }, { "content": "//use interaction::{InteractionResponse, Interaction};\n\n\n\n/// Discord's 'snowflake'. It's a 64bit unsigned integer that is mainly used for identifying anything Discord.\n\npub type Snowflake = u64;\n\n\n\n#[doc(hidden)]\n\n#[derive(Clone, Serialize, Deserialize, Debug)]\n\npub struct HttpError {\n\n pub code: u16,\n\n pub message: String,\n\n}\n\n#[doc(hidden)]\n\nimpl HttpError {\n\n pub fn new(code: u16, message: String) -> HttpError {\n\n HttpError { code, message }\n\n }\n\n}\n\n#[doc(hidden)]\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct MessageError {\n", "file_path": "src/types/mod.rs", "rank": 12, "score": 61596.7476593488 }, { "content": " pub message: String,\n\n}\n\n#[doc(hidden)]\n\nimpl MessageError {\n\n pub fn new(message: String) -> MessageError {\n\n MessageError { message }\n\n }\n\n}\n\n#[doc(hidden)]\n\nimpl From<HttpError> for MessageError {\n\n fn from(HttpError { message, .. }: HttpError) -> MessageError {\n\n MessageError { message }\n\n }\n\n}\n", "file_path": "src/types/mod.rs", "rank": 13, "score": 61593.52822691885 }, { "content": "type AnyMap = Map<dyn CloneAny + Send + Sync>;\n\n\n\n/// Alias for InteractionResponse\n\npub type HandlerResponse = Result<InteractionResponse, std::convert::Infallible>;\n\n\n", "file_path": "src/handler.rs", "rank": 14, "score": 59943.76930749338 }, { "content": "/// Macro that generates an `HttpResponse` containing a message serialized in JSON\n\n#[macro_export]\n\n#[doc(hidden)]\n\nmacro_rules! ERROR_RESPONSE {\n\n ($status:expr, $message:expr) => {{\n\n let emsg = $crate::types::MessageError::new(::std::string::String::from($message));\n\n\n\n Ok(::actix_web::HttpResponse::build(\n\n ::actix_web::http::StatusCode::from_u16($status).unwrap(),\n\n )\n\n .json(emsg))\n\n }};\n\n}\n", "file_path": "src/macros.rs", "rank": 15, "score": 34268.851320354035 }, { "content": "use ed25519_dalek::Verifier;\n\nuse ed25519_dalek::{PublicKey, Signature};\n\n\n\nuse std::convert::TryInto;\n\n\n\n/// If verification failes, it will return the `ValidationError` enum.\n\npub enum ValidationError {\n\n /// For anything related to conversion errors\n\n KeyConversionError {\n\n /// What error?\n\n name: &'static str,\n\n },\n\n /// For invalid keys\n\n InvalidSignatureError,\n\n}\n\n\n\n/// Verifies an incoming Interaction.\n\n/// This verification is mandatory for every incoming Interaction.\n\n/// See [the developer docs](https://discord.com/developers/docs/interactions/slash-commands#security-and-authorization) for more info\n", "file_path": "src/security.rs", "rank": 16, "score": 34243.523338277606 }, { "content": "\n\n#[cfg(feature = \"handler\")]\n\nuse log::error;\n\n\n\nconst TEST_PUB_KEY: &str = \"82d8d97fe0641e68a1b0b11220f05e9ea0539a0cdc002119d4a9e9e025aba1e9\";\n\n/*------------------------------\n\nSECURITY TESTS\n\n*/\n\n#[test]\n\n// Discord interaction verification test OK 1\n", "file_path": "src/tests.rs", "rank": 17, "score": 33574.67828240198 }, { "content": "}\n\n#[cfg(all(feature = \"handler\", not(feature = \"extended-handler\")))]\n\nmacro_rules! init_handler {\n\n () => {\n\n InteractionHandler::new(0, TEST_PUB_KEY, None)\n\n };\n\n}\n\n\n\n#[cfg(feature = \"extended-handler\")]\n\nmacro_rules! init_handler {\n\n () => {\n\n InteractionHandler::new(0, TEST_PUB_KEY, Some(&String::new()))\n\n };\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\n// Request with bad content with no Content-Type header present\n\n// Expected result: Return 400 without panicking\n\nasync fn interactions_no_content_type_header_test() {\n", "file_path": "src/tests.rs", "rank": 18, "score": 33568.571772507785 }, { "content": "use crate::security::*;\n\n\n\nuse ed25519_dalek::PublicKey;\n\n\n\n#[cfg(feature = \"handler\")]\n\nuse crate::handler::InteractionHandler;\n\n#[cfg(feature = \"handler\")]\n\nuse crate::types;\n\n\n\n#[cfg(feature = \"handler\")]\n\nuse crate::types::interaction::{\n\n Context, InteractionResponse, InteractionResponseBuilder, InteractionResponseType,\n\n};\n\n#[cfg(feature = \"handler\")]\n\nuse crate::*;\n\n\n\n#[cfg(feature = \"handler\")]\n\nuse actix_web::{http, test, web, App, HttpRequest};\n\n#[cfg(feature = \"handler\")]\n\nuse std::sync::Mutex;\n", "file_path": "src/tests.rs", "rank": 19, "score": 33566.74821933369 }, { "content": " .header(\"X-Signature-Ed25519\", \"c41278a0cf22bf8f3061756063cd7ef548a3df23d0ffc5496209aa0ad4d9593343801bf11e099f41bca1afcac2c70734eebafede3dec7aac1caa5d8fade5af0c\")\n\n .header(\"X-Signature-Timestamp\", \"1616343571\")\n\n .set_payload(\"{\\\"type\\\" : 1}\")\n\n .to_request();\n\n\n\n let res: types::interaction::InteractionResponse =\n\n test::read_response_json(&mut app, req).await;\n\n\n\n assert_eq!(\n\n res.r#type,\n\n types::interaction::InteractionResponseType::Pong\n\n );\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\n// Bad content but OK signature test\n\n// Expected result: Return 400 with error, don't panic\n\nasync fn interactions_bad_body_test() {\n\n let ih = init_handler!();\n", "file_path": "src/tests.rs", "rank": 20, "score": 33561.08312833054 }, { "content": " return ctx.respond().content(\"TEST\").finish();\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[allow(unused_must_use)]\n\n#[slash_command_test]\n\nasync fn normal_handle_value_test(ctx: Context) -> InteractionResponse {\n\n let response = ctx.respond().content(\"TEST\").finish();\n\n return response;\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[allow(unused_must_use)]\n\n#[slash_command_test]\n\nasync fn normal_handle_direct_test(_ctx: Context) -> InteractionResponse {\n\n return InteractionResponseBuilder::default()\n\n .content(\"TEST\")\n\n .finish();\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\nasync fn interactions_normal_handle_test() {\n", "file_path": "src/tests.rs", "rank": 21, "score": 33561.05841573053 }, { "content": " let ih = init_handler!();\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .set_payload(\"This is some malformed text { the system : can't really handle }\")\n\n .to_request();\n\n\n\n let res: types::MessageError = test::read_response_json(&mut app, req).await;\n\n\n\n assert_eq!(res.message, \"Bad Content-Type\");\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\n// Request with bad content with no Content-Type header present\n\n// Expected result: Return 400 without panicking\n\nasync fn interactions_bad_content_type_header_test() {\n", "file_path": "src/tests.rs", "rank": 22, "score": 33560.89635520024 }, { "content": " let ih = init_handler!();\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"plain/text\")\n\n .set_payload(\"This is some malformed text { the system : can't really handle }\")\n\n .to_request();\n\n\n\n let res: types::MessageError = test::read_response_json(&mut app, req).await;\n\n\n\n assert_eq!(res.message, \"Bad Content-Type\");\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\n// Request with missing X-Signature-Ed25519 Header\n\n// Expected result: Return 400 without panicking\n", "file_path": "src/tests.rs", "rank": 23, "score": 33560.42215811607 }, { "content": " .to_request();\n\n\n\n let res: types::MessageError = test::read_response_json(&mut app, req).await;\n\n\n\n assert_eq!(res.message, \"Bad signature data\");\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\n// Normal ping request\n\n// Expected result: Return 200 with payload\n\nasync fn interactions_ping_test() {\n\n let ih = init_handler!();\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n", "file_path": "src/tests.rs", "rank": 24, "score": 33560.390316160825 }, { "content": "async fn interactions_no_signature_header_test() {\n\n let ih = init_handler!();\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Timestamp\", \"1229349\")\n\n .set_payload(\"This is some malformed text { the system : can't really handle }\")\n\n .to_request();\n\n\n\n let res: types::MessageError = test::read_response_json(&mut app, req).await;\n\n\n\n assert_eq!(res.message, \"Bad signature data\");\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n", "file_path": "src/tests.rs", "rank": 25, "score": 33559.907560105756 }, { "content": " let res: types::MessageError = test::read_response_json(&mut app, req).await;\n\n\n\n assert_eq!(res.message, \"Bad signature data\");\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\n// Request with missing a signature that is too long (> 512 bits)\n\n// Expected result: Return 400 without panicking\n\nasync fn interactions_bad_signature_length_too_long_test() {\n\n let ih = init_handler!();\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"6969696969696969669696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969669696969696969696969696696969696969696969696969696969696969696969\")\n\n .set_payload(\"This is some malformed text { the system : can't really handle }\")\n", "file_path": "src/tests.rs", "rank": 26, "score": 33559.10015414099 }, { "content": "#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\n// Request with missing a signature that is too short (< 512 bits)\n\n// Expected result: Return 400 without panicking\n\nasync fn interactions_bad_signature_length_short_test() {\n\n let ih = init_handler!();\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\n\n \"X-Signature-Ed25519\",\n\n \"69696969696969696696969696969696969696969696969696969696969696969\",\n\n )\n\n .set_payload(\"This is some malformed text { the system : can't really handle }\")\n\n .to_request();\n\n\n", "file_path": "src/tests.rs", "rank": 27, "score": 33557.90308247071 }, { "content": " .finish();\n\n\n\n //let expected_res = HttpResponse::build(StatusCode::OK).json(expected_data);\n\n\n\n assert_eq!(res, expected_data);\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\nasync fn interactions_normal_from_value_handle_test() {\n\n let mut ih = init_handler!();\n\n\n\n ih.add_global_command(\"test\", normal_handle_direct_test);\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"a27ed2fd0e91da58667bec63d14406e5274a0427edad9530b7d95e9d2b0fc4ee17f74e8a6bd3acd6623a05f1bde9e598fa37f3eedfe479da0a00da7827595e0b\")\n", "file_path": "src/tests.rs", "rank": 28, "score": 33557.73930769453 }, { "content": " //let expected_res = HttpResponse::build(StatusCode::OK).json(expected_data);\n\n\n\n assert_eq!(res, expected_data);\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\nuse crate::types::interaction::WebhookMessage;\n\n#[cfg(feature = \"handler\")]\n\n#[slash_command_test]\n\n#[defer]\n\nasync fn deffered_handle_test(ctx: Context) -> InteractionResponse {\n\n return ctx.respond().content(\"TEST\").finish();\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[slash_command_test]\n\n#[defer]\n\nasync fn deffered_handle_value_test(ctx: Context) -> InteractionResponse {\n\n let response = ctx.respond().content(\"TEST\").finish();\n\n return response;\n\n}\n", "file_path": "src/tests.rs", "rank": 29, "score": 33557.66021971983 }, { "content": " data: None,\n\n };\n\n\n\n //let expected_res = HttpResponse::build(StatusCode::OK).json(expected_data);\n\n\n\n assert_eq!(res, expected_data);\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\nasync fn interactions_deffered_from_direct_value_handle_test() {\n\n let mut ih = init_handler!();\n\n\n\n ih.add_global_command(\"test\", deffered_handle_direct_test);\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"a27ed2fd0e91da58667bec63d14406e5274a0427edad9530b7d95e9d2b0fc4ee17f74e8a6bd3acd6623a05f1bde9e598fa37f3eedfe479da0a00da7827595e0b\")\n", "file_path": "src/tests.rs", "rank": 30, "score": 33557.61720318683 }, { "content": "#[cfg(feature = \"handler\")]\n\n#[slash_command_test]\n\n#[defer]\n\nasync fn deffered_handle_direct_test(_ctx: Context) -> InteractionResponse {\n\n return InteractionResponseBuilder::default()\n\n .content(\"TEST\")\n\n .finish();\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\nasync fn interactions_deffered_handle_test() {\n\n let mut ih = init_handler!();\n\n\n\n ih.add_global_command(\"test\", deffered_handle_test);\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n", "file_path": "src/tests.rs", "rank": 31, "score": 33557.51399432973 }, { "content": "\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"51c5defa19cc2471a361c00c87a7f380d9e9d6cd21f05b65d3c223aac0b7d258277a09d0a016108e0be1338d985ed4ce0dae55e5ac93db5957a37ce31d007505\")\n\n .header(\"X-Signature-Timestamp\", \"1616343571\")\n\n .set_payload(\"this is some malformed {\\\"data\\\" : cant handle}\")\n\n .to_request();\n\n\n\n let res = test::call_service(&mut app, req).await;\n\n\n\n assert_eq!(res.status(), http::StatusCode::BAD_REQUEST);\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[allow(unused_must_use)]\n\n#[slash_command_test]\n\nasync fn normal_handle_test(ctx: Context) -> InteractionResponse {\n", "file_path": "src/tests.rs", "rank": 32, "score": 33556.391762881634 }, { "content": " }\n\n }\n\n}\n\n/*-------------------------------\n\nDiscord Interactions API tests (endpoint: /api/discord/interactions)\n\n*/\n\n#[cfg(feature = \"handler\")]\n\nmacro_rules! interaction_app_init {\n\n ($ih: ident) => {\n\n\n\n test::init_service(App::new().app_data($ih.clone()).route(\n\n \"/api/discord/interactions\",\n\n web::post().to(\n\n |data: web::Data<Mutex<InteractionHandler>>, req: HttpRequest, body: String| async move {\n\n data.lock().unwrap().interaction(req, body).await\n\n },\n\n ),\n\n ))\n\n .await;\n\n };\n", "file_path": "src/tests.rs", "rank": 33, "score": 33555.645877552815 }, { "content": " .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"a27ed2fd0e91da58667bec63d14406e5274a0427edad9530b7d95e9d2b0fc4ee17f74e8a6bd3acd6623a05f1bde9e598fa37f3eedfe479da0a00da7827595e0b\")\n\n .header(\"X-Signature-Timestamp\", \"1616343571\")\n\n .set_payload(\"{\\\"type\\\":2,\\\"token\\\":\\\"awQabcabc\\\",\\\"member\\\":{\\\"user\\\":{\\\"id\\\":\\\"317209107000066050\\\",\\\"username\\\":\\\"C0der\\\",\\\"avatar\\\":\\\"a_d5efa99b3eeaa7dd43acca82f5692432\\\",\\\"discriminator\\\":\\\"1337\\\",\\\"public_flags\\\":131141},\\\"roles\\\":[],\\\"premium_since\\\":null,\\\"permissions\\\":\\\"2147483647\\\",\\\"pending\\\":false,\\\"nick\\\":null,\\\"mute\\\":false,\\\"joined_at\\\":\\\"2017-03-13T19:19:14.040000+00:00\\\",\\\"is_pending\\\":false,\\\"deaf\\\":false},\\\"id\\\":\\\"786008729715212338\\\",\\\"guild_id\\\":\\\"290926798626357999\\\",\\\"data\\\":{\\\"name\\\":\\\"test\\\",\\\"id\\\":\\\"771825006014889984\\\"},\\\"channel_id\\\":\\\"645027906669510667\\\"}\")\n\n .to_request();\n\n\n\n let res: types::interaction::InteractionResponse =\n\n test::read_response_json(&mut app, req).await;\n\n\n\n let expected_data = InteractionResponse {\n\n r#type: InteractionResponseType::DefferedChannelMessageWithSource,\n\n data: None,\n\n };\n\n\n\n //let expected_res = HttpResponse::build(StatusCode::OK).json(expected_data);\n\n\n\n assert_eq!(res, expected_data);\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n", "file_path": "src/tests.rs", "rank": 34, "score": 33555.496796944615 }, { "content": " .header(\"X-Signature-Timestamp\", \"1616343571\")\n\n .set_payload(\"{\\\"type\\\":2,\\\"token\\\":\\\"awQabcabc\\\",\\\"member\\\":{\\\"user\\\":{\\\"id\\\":\\\"317209107000066050\\\",\\\"username\\\":\\\"C0der\\\",\\\"avatar\\\":\\\"a_d5efa99b3eeaa7dd43acca82f5692432\\\",\\\"discriminator\\\":\\\"1337\\\",\\\"public_flags\\\":131141},\\\"roles\\\":[],\\\"premium_since\\\":null,\\\"permissions\\\":\\\"2147483647\\\",\\\"pending\\\":false,\\\"nick\\\":null,\\\"mute\\\":false,\\\"joined_at\\\":\\\"2017-03-13T19:19:14.040000+00:00\\\",\\\"is_pending\\\":false,\\\"deaf\\\":false},\\\"id\\\":\\\"786008729715212338\\\",\\\"guild_id\\\":\\\"290926798626357999\\\",\\\"data\\\":{\\\"name\\\":\\\"test\\\",\\\"id\\\":\\\"771825006014889984\\\"},\\\"channel_id\\\":\\\"645027906669510667\\\"}\")\n\n .to_request();\n\n\n\n let res: types::interaction::InteractionResponse =\n\n test::read_response_json(&mut app, req).await;\n\n\n\n let expected_data = InteractionResponseBuilder::default()\n\n .content(\"TEST\")\n\n .finish();\n\n\n\n //let expected_res = HttpResponse::build(StatusCode::OK).json(expected_data);\n\n\n\n assert_eq!(res, expected_data);\n\n}\n\n#[cfg(feature = \"handler\")]\n\n#[actix_rt::test]\n\nasync fn interactions_normal_from_direct_call_handle_test() {\n\n let mut ih = init_handler!();\n\n\n", "file_path": "src/tests.rs", "rank": 35, "score": 33554.344118041176 }, { "content": "// Request with missing X-Signature-Timestamp Header\n\n// Expected result: Return 400 without panicking\n\nasync fn interactions_no_timestamp_header_test() {\n\n let ih = init_handler!();\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"69696969696969696696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969696969696696969\")\n\n .set_payload(\"This is some malformed text { the system : can't really handle }\")\n\n .to_request();\n\n\n\n let res: types::MessageError = test::read_response_json(&mut app, req).await;\n\n\n\n assert_eq!(res.message, \"Bad signature data\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 36, "score": 33554.18101689509 }, { "content": "async fn interactions_deffered_from_value_handle_test() {\n\n let mut ih = init_handler!();\n\n\n\n ih.add_global_command(\"test\", deffered_handle_value_test);\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"a27ed2fd0e91da58667bec63d14406e5274a0427edad9530b7d95e9d2b0fc4ee17f74e8a6bd3acd6623a05f1bde9e598fa37f3eedfe479da0a00da7827595e0b\")\n\n .header(\"X-Signature-Timestamp\", \"1616343571\")\n\n .set_payload(\"{\\\"type\\\":2,\\\"token\\\":\\\"awQabcabc\\\",\\\"member\\\":{\\\"user\\\":{\\\"id\\\":\\\"317209107000066050\\\",\\\"username\\\":\\\"C0der\\\",\\\"avatar\\\":\\\"a_d5efa99b3eeaa7dd43acca82f5692432\\\",\\\"discriminator\\\":\\\"1337\\\",\\\"public_flags\\\":131141},\\\"roles\\\":[],\\\"premium_since\\\":null,\\\"permissions\\\":\\\"2147483647\\\",\\\"pending\\\":false,\\\"nick\\\":null,\\\"mute\\\":false,\\\"joined_at\\\":\\\"2017-03-13T19:19:14.040000+00:00\\\",\\\"is_pending\\\":false,\\\"deaf\\\":false},\\\"id\\\":\\\"786008729715212338\\\",\\\"guild_id\\\":\\\"290926798626357999\\\",\\\"data\\\":{\\\"name\\\":\\\"test\\\",\\\"id\\\":\\\"771825006014889984\\\"},\\\"channel_id\\\":\\\"645027906669510667\\\"}\")\n\n .to_request();\n\n\n\n let res: types::interaction::InteractionResponse =\n\n test::read_response_json(&mut app, req).await;\n\n\n\n let expected_data = InteractionResponse {\n\n r#type: InteractionResponseType::DefferedChannelMessageWithSource,\n", "file_path": "src/tests.rs", "rank": 37, "score": 33552.25658597744 }, { "content": " let mut ih = init_handler!();\n\n\n\n ih.add_global_command(\"test\", normal_handle_test);\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"a27ed2fd0e91da58667bec63d14406e5274a0427edad9530b7d95e9d2b0fc4ee17f74e8a6bd3acd6623a05f1bde9e598fa37f3eedfe479da0a00da7827595e0b\")\n\n .header(\"X-Signature-Timestamp\", \"1616343571\")\n\n .set_payload(\"{\\\"type\\\":2,\\\"token\\\":\\\"awQabcabc\\\",\\\"member\\\":{\\\"user\\\":{\\\"id\\\":\\\"317209107000066050\\\",\\\"username\\\":\\\"C0der\\\",\\\"avatar\\\":\\\"a_d5efa99b3eeaa7dd43acca82f5692432\\\",\\\"discriminator\\\":\\\"1337\\\",\\\"public_flags\\\":131141},\\\"roles\\\":[],\\\"premium_since\\\":null,\\\"permissions\\\":\\\"2147483647\\\",\\\"pending\\\":false,\\\"nick\\\":null,\\\"mute\\\":false,\\\"joined_at\\\":\\\"2017-03-13T19:19:14.040000+00:00\\\",\\\"is_pending\\\":false,\\\"deaf\\\":false},\\\"id\\\":\\\"786008729715212338\\\",\\\"guild_id\\\":\\\"290926798626357999\\\",\\\"data\\\":{\\\"name\\\":\\\"test\\\",\\\"id\\\":\\\"771825006014889984\\\"},\\\"channel_id\\\":\\\"645027906669510667\\\"}\")\n\n .to_request();\n\n\n\n let res: types::interaction::InteractionResponse =\n\n test::read_response_json(&mut app, req).await;\n\n\n\n let expected_data = InteractionResponseBuilder::default()\n\n .content(\"TEST\")\n", "file_path": "src/tests.rs", "rank": 38, "score": 33551.640575377685 }, { "content": " ih.add_global_command(\"test\", normal_handle_value_test);\n\n\n\n let data = web::Data::new(Mutex::new(ih));\n\n let mut app = interaction_app_init!(data);\n\n\n\n let req = test::TestRequest::post()\n\n .uri(\"/api/discord/interactions\")\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"X-Signature-Ed25519\", \"a27ed2fd0e91da58667bec63d14406e5274a0427edad9530b7d95e9d2b0fc4ee17f74e8a6bd3acd6623a05f1bde9e598fa37f3eedfe479da0a00da7827595e0b\")\n\n .header(\"X-Signature-Timestamp\", \"1616343571\")\n\n .set_payload(\"{\\\"type\\\":2,\\\"token\\\":\\\"awQabcabc\\\",\\\"member\\\":{\\\"user\\\":{\\\"id\\\":\\\"317209107000066050\\\",\\\"username\\\":\\\"C0der\\\",\\\"avatar\\\":\\\"a_d5efa99b3eeaa7dd43acca82f5692432\\\",\\\"discriminator\\\":\\\"1337\\\",\\\"public_flags\\\":131141},\\\"roles\\\":[],\\\"premium_since\\\":null,\\\"permissions\\\":\\\"2147483647\\\",\\\"pending\\\":false,\\\"nick\\\":null,\\\"mute\\\":false,\\\"joined_at\\\":\\\"2017-03-13T19:19:14.040000+00:00\\\",\\\"is_pending\\\":false,\\\"deaf\\\":false},\\\"id\\\":\\\"786008729715212338\\\",\\\"guild_id\\\":\\\"290926798626357999\\\",\\\"data\\\":{\\\"name\\\":\\\"test\\\",\\\"id\\\":\\\"771825006014889984\\\"},\\\"channel_id\\\":\\\"645027906669510667\\\"}\")\n\n .to_request();\n\n\n\n let res: types::interaction::InteractionResponse =\n\n test::read_response_json(&mut app, req).await;\n\n\n\n let expected_data = InteractionResponseBuilder::default()\n\n .content(\"TEST\")\n\n .finish();\n\n\n", "file_path": "src/tests.rs", "rank": 39, "score": 33549.973212489705 }, { "content": " .header(\"X-Signature-Timestamp\", \"1616343571\")\n\n .set_payload(\"{\\\"type\\\":2,\\\"token\\\":\\\"awQabcabc\\\",\\\"member\\\":{\\\"user\\\":{\\\"id\\\":\\\"317209107000066050\\\",\\\"username\\\":\\\"C0der\\\",\\\"avatar\\\":\\\"a_d5efa99b3eeaa7dd43acca82f5692432\\\",\\\"discriminator\\\":\\\"1337\\\",\\\"public_flags\\\":131141},\\\"roles\\\":[],\\\"premium_since\\\":null,\\\"permissions\\\":\\\"2147483647\\\",\\\"pending\\\":false,\\\"nick\\\":null,\\\"mute\\\":false,\\\"joined_at\\\":\\\"2017-03-13T19:19:14.040000+00:00\\\",\\\"is_pending\\\":false,\\\"deaf\\\":false},\\\"id\\\":\\\"786008729715212338\\\",\\\"guild_id\\\":\\\"290926798626357999\\\",\\\"data\\\":{\\\"name\\\":\\\"test\\\",\\\"id\\\":\\\"771825006014889984\\\"},\\\"channel_id\\\":\\\"645027906669510667\\\"}\")\n\n .to_request();\n\n\n\n let res: types::interaction::InteractionResponse =\n\n test::read_response_json(&mut app, req).await;\n\n\n\n let expected_data = InteractionResponse {\n\n r#type: InteractionResponseType::DefferedChannelMessageWithSource,\n\n data: None,\n\n };\n\n\n\n //let expected_res = HttpResponse::build(StatusCode::OK).json(expected_data);\n\n\n\n assert_eq!(res, expected_data);\n\n}\n", "file_path": "src/tests.rs", "rank": 40, "score": 33548.984304944155 }, { "content": "use crate::security::*;\n\n\n\n#[cfg(feature = \"extended-handler\")]\n\nuse crate::types::application::*;\n\n\n\n#[cfg(feature = \"handler\")]\n\nuse crate::types::interaction::*;\n\n\n\n#[cfg(feature = \"extended-handler\")]\n\nuse crate::types::HttpError;\n\nuse crate::types::Snowflake;\n\n#[cfg(feature = \"extended-handler\")]\n\nuse crate::{\n\n expect_specific_api_response, expect_successful_api_response,\n\n expect_successful_api_response_and_return,\n\n};\n\nuse actix_web::http::StatusCode;\n\nuse actix_web::{web, App, HttpRequest, HttpResponse, HttpServer, Result};\n\nuse reqwest::header;\n\nuse reqwest::Client;\n", "file_path": "src/handler.rs", "rank": 41, "score": 33147.87259569461 }, { "content": " /// # use rusty_interaction::types::interaction::{Context, InteractionResponse};\n\n /// # use rusty_interaction::handler::InteractionHandler;\n\n /// # use attributes::slash_command;\n\n /// const PUB_KEY: &str = \"my_public_key\";\n\n ///\n\n /// #[slash_command]\n\n /// async fn pong_handler(ctx: Context) -> InteractionResponse {\n\n /// return ctx.respond()\n\n /// .content(\"Pong!\")\n\n /// .build()\n\n /// .unwrap();\n\n /// }\n\n ///\n\n /// #[actix_web::main]\n\n /// async fn main() -> std::io::Result<()> {\n\n ///\n\n /// let mut handle = InteractionHandler::new(PUB_KEY);\n\n /// handle.add_command(\"ping\", pong_handler);\n\n ///\n\n /// return handle.run().await;\n", "file_path": "src/handler.rs", "rank": 42, "score": 33145.852473331186 }, { "content": " /// }\n\n /// ```\n\n pub fn add_global_command(&mut self, name: &'static str, func: HandlerFunction) {\n\n self.global_handles.insert(name, func);\n\n }\n\n\n\n /// Binds an async function to a **component**.\n\n /// Your function must take a [`Context`] as an argument and must return a [`InteractionResponse`].\n\n /// Use the `#[component_handler]` procedural macro for your own convinence.eprintln!\n\n ///\n\n /// # Example\n\n /// ```ignore\n\n /// use rusty_interaction::handler::InteractionHandler;\n\n /// use rusty_interaction::types::components::*;\n\n /// use rusty_interaction::types::interaction::*;\n\n ///\n\n /// #[component_handler]\n\n /// async fn comp_hand(ctx: Context) -> InteractionResponse {\n\n /// return ctx.respond().content(\"Some message content\").build();\n\n /// }\n", "file_path": "src/handler.rs", "rank": 43, "score": 33142.70120264715 }, { "content": " }\n\n /// Binds an async function to a **global** command.\n\n /// Your function must take a [`Context`] as an argument and must return a [`InteractionResponse`].\n\n /// Make sure to use the `#[slash_command]` procedural macro to make it usable for the handler.\n\n ///\n\n /// Like:\n\n /// ```ignore\n\n /// # use rusty_interaction::types::interaction::{Context, InteractionResponse};\n\n /// # use attributes::slash_command;\n\n /// #[slash_command]\n\n /// async fn do_work(ctx: Context) -> InteractionResponse {\n\n /// return todo!(\"Do work and return a response\");\n\n /// }\n\n /// ```\n\n ///\n\n /// # Note\n\n /// The handler will first check if a guild-specific handler is available. If not, it will try to match a global command. If that fails too, an error will be returned.\n\n ///\n\n /// # Example\n\n /// ```ignore\n", "file_path": "src/handler.rs", "rank": 44, "score": 33142.21834950684 }, { "content": "/// Used for some functions to define which scope should be manipulated.\n\npub enum ManipulationScope {\n\n /// Only apply changes locally\n\n Local,\n\n /// Apply changes locally and to Discord\n\n All,\n\n /// Only apply changes with Discord\n\n Discord,\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[derive(Clone)]\n\n/// The InteractionHandler is the 'thing' that will handle your incoming interactions.\n\n/// It does interaction validation (as required by Discord) and provides a pre-defined actix-web server\n\n/// with [`InteractionHandler::run`] and [`InteractionHandler::run_ssl`]\n\npub struct InteractionHandler {\n\n application_id: Snowflake,\n\n\n\n app_public_key: PublicKey,\n\n client: Client,\n", "file_path": "src/handler.rs", "rank": 45, "score": 33141.86164810534 }, { "content": " \"No associated handler found for {}\",\n\n data.name.as_ref().unwrap().as_str()\n\n );\n\n ERROR_RESPONSE!(501, \"No associated handler found\")\n\n }\n\n }\n\n InteractionType::MessageComponent => {\n\n let data = if let Some(ref data) = interaction.data {\n\n data\n\n } else {\n\n error!(\"Failed to unwrap Interaction!\");\n\n return ERROR_RESPONSE!(500, \"Failed to unwrap\");\n\n };\n\n\n\n if let Some(handler) = self\n\n .component_handles\n\n .get(data.custom_id.as_ref().unwrap().as_str())\n\n {\n\n // construct a Context\n\n let ctx = Context::new(self.client.clone(), interaction);\n", "file_path": "src/handler.rs", "rank": 46, "score": 33141.680360413426 }, { "content": "\n\n // Call the handler\n\n let response = handler(self, ctx).await;\n\n\n\n match_handler_response!(response)\n\n } else {\n\n error!(\n\n \"No associated handler found for {}\",\n\n data.custom_id.as_ref().unwrap().as_str()\n\n );\n\n ERROR_RESPONSE!(501, \"No associated handler found\")\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// This is a predefined function that starts an `actix_web::HttpServer` and binds `self.interaction` to `/api/discord/interactions`.\n\n /// Note that you'll eventually have to switch to an HTTPS server. This function does not provide this.\n", "file_path": "src/handler.rs", "rank": 47, "score": 33141.06387219391 }, { "content": " .finish();\n\n }\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\nimpl InteractionHandler {\n\n /// Initalizes a new `InteractionHandler`\n\n pub fn new(\n\n app_id: Snowflake,\n\n pbk_str: impl AsRef<str>,\n\n token: Option<&String>,\n\n ) -> InteractionHandler {\n\n let pbk_bytes =\n\n hex::decode(pbk_str.as_ref()).expect(\"Failed to parse the public key from hexadecimal\");\n\n\n\n let app_public_key =\n\n PublicKey::from_bytes(&pbk_bytes).expect(\"Failed to parse public key.\");\n\n\n\n if let Some(token) = token {\n\n let mut headers = header::HeaderMap::new();\n", "file_path": "src/handler.rs", "rank": 48, "score": 33140.24816743533 }, { "content": " /// handle.add_command(\"ping\", pong_handler);\n\n /// handle.add_component_handle(\"HEHE\", comp_hand);\n\n /// return handle.run().await;\n\n /// }\n\n /// ```\n\n pub fn add_component_handle(&mut self, custom_id: &'static str, func: HandlerFunction) {\n\n self.component_handles.insert(custom_id, func);\n\n }\n\n\n\n #[cfg(feature = \"extended-handler\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"extended-handler\")))]\n\n /// Register a guild-specific command with Discord!\n\n ///\n\n /// # NOTE\n\n /// Guild-specific commands are not cached or saved in any way by the handler.\n\n /// This means that between restarts, updates, crashes, or whatever that causes the app to terminate, the handler 'forgets' which command belonged to which handler.\n\n pub async fn register_guild_handle(\n\n &mut self,\n\n guild: impl Into<Snowflake>,\n\n cmd: &ApplicationCommand,\n", "file_path": "src/handler.rs", "rank": 49, "score": 33139.84673892469 }, { "content": " #[cfg(feature = \"extended-handler\")]\n\n #[cfg_attr(docsrs, doc(cfg(feature = \"extended-handler\")))]\n\n /// Remove a guild handle\n\n pub async fn deregister_guild_handle(\n\n &mut self,\n\n guild: impl Into<Snowflake>,\n\n id: impl Into<Snowflake>,\n\n scope: &ManipulationScope,\n\n ) -> Result<(), HttpError> {\n\n let i = id.into();\n\n let g = guild.into();\n\n\n\n match scope {\n\n ManipulationScope::Local => {\n\n self.guild_handles.remove(&i);\n\n Ok(())\n\n }\n\n ManipulationScope::All | ManipulationScope::Discord => {\n\n let url = format!(\n\n \"{}/applications/{}/guilds/{}/commands/{}\",\n", "file_path": "src/handler.rs", "rank": 50, "score": 33139.33571105472 }, { "content": " // If proper headers are not present reject.\n\n debug!(\n\n \"Incoming interaction rejected, missing headers. Origin: {:?}\",\n\n req.connection_info().realip_remote_addr()\n\n );\n\n return ERROR_RESPONSE!(400, \"Bad signature data\");\n\n }\n\n\n\n // Security checks passed, try deserializing request to Interaction.\n\n match serde_json::from_str::<Interaction>(&body) {\n\n Err(e) => {\n\n // It's probably bad on our end if this code is reached.\n\n error!(\"Failed to decode interaction! Error: {}\", e);\n\n debug!(\"Body sent: {}\", body);\n\n return ERROR_RESPONSE!(400, format!(\"Bad body: {}\", e));\n\n }\n\n Ok(interaction) => {\n\n match interaction.r#type {\n\n InteractionType::Ping => {\n\n let response =\n", "file_path": "src/handler.rs", "rank": 51, "score": 33139.2668489577 }, { "content": " #[cfg_attr(docsrs, doc(cfg(feature = \"extended-handler\")))]\n\n /// Override a bunch of permissions for commands in a guild.\n\n pub async fn override_guild_permissions(\n\n &self,\n\n guild_id: impl Into<Snowflake>,\n\n overrides: &[ApplicationCommandPermissionBatch],\n\n ) -> Result<(), HttpError> {\n\n let url = format!(\n\n \"{}/applications/{}/guilds/{}/commands/permissions\",\n\n crate::BASE_URL,\n\n self.application_id,\n\n guild_id.into()\n\n );\n\n\n\n let res = self.client.put(&url).json(overrides).send().await;\n\n\n\n expect_successful_api_response!(res, Ok(()))\n\n }\n\n\n\n #[cfg(feature = \"extended-handler\")]\n", "file_path": "src/handler.rs", "rank": 52, "score": 33138.89805383497 }, { "content": "\n\n global_handles: HashMap<&'static str, HandlerFunction>,\n\n component_handles: HashMap<&'static str, HandlerFunction>,\n\n\n\n // These handles are 'forgotten' every time the app is shutdown (whatever the reason may be).\n\n guild_handles: HashMap<Snowflake, HandlerFunction>,\n\n\n\n /// Field to access data\n\n pub data: AnyMap,\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n// Only here to make Debug less generic, so I can send a reference\n\nimpl fmt::Debug for InteractionHandler {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n return f\n\n .debug_struct(\"InteractionHandler\")\n\n .field(\"app_public_key\", &self.app_public_key)\n\n .field(\"global_handles_len\", &self.global_handles.len())\n\n .field(\"component_handles_len\", &self.component_handles.len())\n", "file_path": "src/handler.rs", "rank": 53, "score": 33138.87378598562 }, { "content": "\n\nuse log::{debug, error};\n\nuse std::fmt;\n\n\n\nuse anymap::{any::CloneAny, Map};\n\n\n\nuse ed25519_dalek::PublicKey;\n\n\n\nuse rustls::ServerConfig;\n\n\n\nuse std::{collections::HashMap, future::Future, pin::Pin, sync::Mutex};\n\n\n", "file_path": "src/handler.rs", "rank": 54, "score": 33138.64257350029 }, { "content": " // Call the handler\n\n let response = handler(self, ctx).await;\n\n\n\n match_handler_response!(response)\n\n }\n\n // Welp, nothing found. Check for matches in the global map\n\n else if let Some(handler) = self.global_handles.get(\n\n data.name.as_ref().unwrap().as_str(), /* Don't question it */\n\n ) {\n\n // construct a Context\n\n let ctx = Context::new(self.client.clone(), interaction);\n\n\n\n // Call the handler\n\n let response = handler(self, ctx).await;\n\n\n\n match_handler_response!(response)\n\n }\n\n // Still nothing, return an error\n\n else {\n\n error!(\n", "file_path": "src/handler.rs", "rank": 55, "score": 33138.412241447244 }, { "content": " InteractionResponse::new(InteractionResponseType::Pong, None);\n\n debug!(\"Got a ping, responding with pong.\");\n\n return Ok(HttpResponse::build(StatusCode::OK)\n\n .content_type(\"application/json\")\n\n .json(response));\n\n }\n\n\n\n InteractionType::ApplicationCommand => {\n\n let data = if let Some(ref data) = interaction.data {\n\n data\n\n } else {\n\n error!(\"Failed to unwrap Interaction!\");\n\n return ERROR_RESPONSE!(500, \"Failed to unwrap\");\n\n };\n\n\n\n // Check for matches in guild handler map. Unwrapping because this should always contain an ID\n\n if let Some(handler) = self.guild_handles.get(data.id.as_ref().unwrap()) {\n\n // construct a Context\n\n let ctx = Context::new(self.client.clone(), interaction);\n\n\n", "file_path": "src/handler.rs", "rank": 56, "score": 33137.763445768054 }, { "content": "\n\n /// Entry point function for handling `Interactions`\n\n pub async fn interaction(&mut self, req: HttpRequest, body: String) -> Result<HttpResponse> {\n\n // Check for good content type --> must be application/json\n\n\n\n if let Some(ct) = req.headers().get(\"Content-Type\") {\n\n if ct != \"application/json\" {\n\n debug!(\n\n \"Incoming interaction rejected, bad Content-Type specified. Origin: {:?}\",\n\n req.connection_info().realip_remote_addr()\n\n );\n\n return ERROR_RESPONSE!(400, \"Bad Content-Type\");\n\n }\n\n } else {\n\n debug!(\n\n \"Incoming interaction rejected, no Content-Type specified. Origin: {:?}\",\n\n req.connection_info().realip_remote_addr()\n\n );\n\n return ERROR_RESPONSE!(400, \"Bad Content-Type\");\n\n }\n", "file_path": "src/handler.rs", "rank": 57, "score": 33137.186027988966 }, { "content": " #[cfg_attr(docsrs, doc(cfg(feature = \"extended-handler\")))]\n\n /// Add a permission override for a guild command\n\n pub async fn edit_guild_command_permissions(\n\n &self,\n\n guild_id: impl Into<Snowflake>,\n\n appcmd_id: impl Into<Snowflake>,\n\n permission_override: &ApplicationCommandPermission,\n\n ) -> Result<(), HttpError> {\n\n let url = format!(\n\n \"{}/applications/{}/guilds/{}/commands/{}/permissions\",\n\n crate::BASE_URL,\n\n self.application_id,\n\n guild_id.into(),\n\n appcmd_id.into(),\n\n );\n\n\n\n let res = self.client.put(&url).json(permission_override).send().await;\n\n\n\n expect_successful_api_response!(res, Ok(()))\n\n }\n", "file_path": "src/handler.rs", "rank": 58, "score": 33136.93264448461 }, { "content": " }\n\n _ => {\n\n // Send out a response to Discord\n\n let r = HttpResponse::build(StatusCode::OK).json(__unwrapped_response__);\n\n\n\n Ok(r)\n\n }\n\n }\n\n }\n\n else{\n\n debug!(\"Responding with 500!\");\n\n Ok(HttpResponse::build(StatusCode::INTERNAL_SERVER_ERROR).finish())\n\n }\n\n\n\n };\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[non_exhaustive]\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n", "file_path": "src/handler.rs", "rank": 59, "score": 33136.36415630564 }, { "content": " /// ComponentButtonBuilder::default()\n\n /// .label(\"Delete\")\n\n /// .custom_id(\"DELETE\")\n\n /// .style(ComponentButtonStyle::Danger)\n\n /// .build()\n\n /// .unwrap()\n\n /// )\n\n /// .build()\n\n /// .unwrap()\n\n /// )\n\n /// .build()\n\n /// .unwrap();\n\n\n\n /// return resp;\n\n ///\n\n /// }\n\n /// #[actix_web::main]\n\n /// async fn main() -> std::io::Result<()> {\n\n ///\n\n /// let mut handle = InteractionHandler::new(PUB_KEY);\n", "file_path": "src/handler.rs", "rank": 60, "score": 33136.194546691004 }, { "content": " crate::BASE_URL,\n\n self.application_id,\n\n g,\n\n i\n\n );\n\n\n\n let r = self.client.delete(&url).send().await;\n\n\n\n expect_specific_api_response!(r, StatusCode::NO_CONTENT, {\n\n if scope == &ManipulationScope::All {\n\n self.guild_handles.remove(&i);\n\n }\n\n\n\n Ok(())\n\n })\n\n }\n\n }\n\n }\n\n\n\n #[cfg(feature = \"extended-handler\")]\n", "file_path": "src/handler.rs", "rank": 61, "score": 33133.974577627654 }, { "content": " }\n\n } else {\n\n InteractionHandler {\n\n application_id: app_id,\n\n app_public_key,\n\n client: Client::new(),\n\n global_handles: HashMap::new(),\n\n component_handles: HashMap::new(),\n\n guild_handles: HashMap::new(),\n\n data: AnyMap::new(),\n\n }\n\n }\n\n }\n\n\n\n /// Add some data. Data can be accessed by handlers with `InteractionHandler.data`\n\n pub fn add_data<T: Clone>(&mut self, data: T)\n\n where\n\n T: Send + 'static + Sync,\n\n {\n\n self.data.insert(data);\n", "file_path": "src/handler.rs", "rank": 62, "score": 33133.90466083829 }, { "content": " pub async fn run(self, port: u16) -> std::io::Result<()> {\n\n let data = web::Data::new(Mutex::new(self));\n\n HttpServer::new(move || {\n\n App::new().app_data(data.clone()).route(\n\n \"/api/discord/interactions\",\n\n web::post().to(\n\n |data: web::Data<Mutex<InteractionHandler>>, req: HttpRequest, body: String| async move {\n\n data.lock().unwrap().interaction(req, body).await\n\n },\n\n ),\n\n )\n\n })\n\n .bind(format!(\"0.0.0.0:{}\", port))?\n\n .run()\n\n .await\n\n }\n\n\n\n /// Same as [`InteractionHandler::run`] but starts a server with SSL/TLS.\n\n pub async fn run_ssl(self, server_conf: ServerConfig, port: u16) -> std::io::Result<()> {\n\n let data = web::Data::new(Mutex::new(self));\n", "file_path": "src/handler.rs", "rank": 63, "score": 33133.545634610855 }, { "content": "\n\n // Let it panic if there is no valid value\n\n let auth_value = header::HeaderValue::from_str(token.as_str());\n\n\n\n assert!(!auth_value.is_err(), \"Invalid value given at token\");\n\n\n\n let mut auth_value = auth_value.unwrap();\n\n\n\n auth_value.set_sensitive(true);\n\n headers.insert(header::AUTHORIZATION, auth_value);\n\n let new_c = Client::builder().default_headers(headers).build().unwrap();\n\n\n\n InteractionHandler {\n\n application_id: app_id,\n\n app_public_key,\n\n client: new_c,\n\n global_handles: HashMap::new(),\n\n component_handles: HashMap::new(),\n\n guild_handles: HashMap::new(),\n\n data: AnyMap::new(),\n", "file_path": "src/handler.rs", "rank": 64, "score": 33133.25319701753 }, { "content": " func: HandlerFunction,\n\n scope: &ManipulationScope,\n\n ) -> Result<ApplicationCommand, HttpError> {\n\n let g = guild.into();\n\n match scope {\n\n ManipulationScope::Local => {\n\n self.guild_handles.insert(g, func);\n\n Ok(cmd.clone())\n\n }\n\n ManipulationScope::Discord | ManipulationScope::All => {\n\n let url = format!(\n\n \"{}/applications/{}/guilds/{}/commands\",\n\n crate::BASE_URL,\n\n self.application_id,\n\n g\n\n );\n\n\n\n let r = self.client.post(&url).json(cmd).send().await;\n\n\n\n expect_successful_api_response_and_return!(r, ApplicationCommand, a, {\n", "file_path": "src/handler.rs", "rank": 65, "score": 33133.02361718927 }, { "content": " ///\n\n /// #[slash_command]\n\n /// async fn spawn_buttons(ctx: Context) -> InteractionResponse {\n\n /// // Let's build our message!\n\n /// let resp = ctx.respond()\n\n /// // Set message content\n\n /// .content(\"Not edited\")\n\n /// // add a component action row using it's builder\n\n /// .add_component_row(\n\n /// ComponentRowBuilder::default()\n\n /// // Add buttons using it's builder\n\n /// .add_button(\n\n /// ComponentButtonBuilder::default()\n\n /// .label(\"Edit\")\n\n /// .custom_id(\"HEHE\")\n\n /// .style(ComponentButtonStyle::Primary)\n\n /// .build()\n\n /// .unwrap()\n\n /// )\n\n /// .add_button(\n", "file_path": "src/handler.rs", "rank": 66, "score": 33131.31055179399 }, { "content": "\n\n let se = get_header(&req, \"X-Signature-Ed25519\");\n\n let st = get_header(&req, \"X-Signature-Timestamp\");\n\n\n\n // TODO: Domain check might be a good one.\n\n\n\n if let Some((se, st)) = se.zip(st) {\n\n // Verify timestamp + body against given signature\n\n if verify_discord_message(self.app_public_key, se, st, &body).is_ok() {\n\n // Signature OK. Continue with interaction processing.\n\n } else {\n\n // Verification failed, reject.\n\n // TODO: Switch error response\n\n debug!(\n\n \"Incoming interaction rejected, invalid signature. Origin: {:?}\",\n\n req.connection_info().realip_remote_addr()\n\n );\n\n return ERROR_RESPONSE!(401, \"Invalid request signature\");\n\n }\n\n } else {\n", "file_path": "src/handler.rs", "rank": 67, "score": 33131.1994985273 }, { "content": " if let Some(id) = a.id {\n\n if scope == &ManipulationScope::All {\n\n // Already overwrites current key if it exists, so no need to check.\n\n self.guild_handles.insert(id, func);\n\n }\n\n\n\n Ok(a)\n\n } else {\n\n // Pretty bad if this code reaches...\n\n Err(HttpError {\n\n code: 0,\n\n message: \"Command registration response did not have an ID.\"\n\n .to_string(),\n\n })\n\n }\n\n })\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/handler.rs", "rank": 68, "score": 33131.03948304107 }, { "content": " HttpServer::new(move || {\n\n App::new().app_data(data.clone()).route(\n\n \"/api/discord/interactions\",\n\n web::post().to(\n\n |data: web::Data<Mutex<InteractionHandler>>, req: HttpRequest, body: String| async move {\n\n data.lock().unwrap().interaction(req, body).await\n\n },\n\n ),\n\n )\n\n })\n\n .bind_rustls(format!(\"0.0.0.0:{}\", port), server_conf)?\n\n .run()\n\n .await\n\n }\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 69, "score": 33130.620937720334 }, { "content": " if let Err(i) = ctx.edit_original(&WebhookMessage::from(__r)).await{\n\n ::rusty_interaction::log::error!(\"Editing original message failed: {:?}\", i);\n\n }\n\n }\n\n }\n\n else{\n\n // Nothing\n\n }\n\n\n\n\n\n });\n\n\n\n return InteractionResponseBuilder::default().respond_type(#defer_return).finish();\n\n\n\n })\n\n }\n\n };\n\n subst_fn.into()\n\n }\n\n}\n\n\n\n#[proc_macro_attribute]\n", "file_path": "attributes/src/lib.rs", "rank": 70, "score": 32207.982399298508 }, { "content": "extern crate proc_macro;\n\n\n\nuse proc_macro::*;\n\n\n\nuse quote::format_ident;\n\nuse quote::quote;\n\n\n\nuse syn::{Expr, ExprReturn, FnArg, ReturnType, Stmt};\n\n\n", "file_path": "attributes/src/lib.rs", "rank": 71, "score": 32204.423428152233 }, { "content": " .unwrap_or_else(|| panic!(\"Expected some return value\"));\n\n\n\n let nvec = nbody.to_vec();\n\n\n\n // Now that we have all the information we need, we can finally start building our new function!\n\n // The difference here being that the non-deffered function doesn't have to spawn a new thread that\n\n // does the actual work. Here we need it to reply with a deffered channel message.\n\n let subst_fn = quote! {\n\n #vis fn #fname (#ih_n: &mut InteractionHandler, #ctxname: Context) -> ::std::pin::Pin<::std::boxed::Box<dyn Send + ::std::future::Future<Output = #ret> + '_>>{\n\n Box::pin(async move {\n\n actix::Arbiter::spawn(async move {\n\n #(#nvec)*\n\n if #expra.r#type != InteractionResponseType::Pong && #expra.r#type != InteractionResponseType::None{\n\n if let Err(i) = #ctxname.edit_original(&WebhookMessage::from(#expra)).await{\n\n error!(\"Editing original message failed: {:?}\", i);\n\n }\n\n }\n\n\n\n });\n\n\n\n return InteractionResponseBuilder::default().respond_type(InteractionResponseType::DefferedChannelMessageWithSource).finish();\n\n\n\n })\n\n }\n\n };\n\n subst_fn.into()\n\n }\n\n}\n", "file_path": "attributes/src/lib.rs", "rank": 72, "score": 32201.314238260853 }, { "content": " // Create two functions. One that actually does the work, and one that handles the threading.\n\n\n\n let act_fn = format_ident!(\"__actual_{}\", fname);\n\n\n\n let subst_fn = quote! {\n\n fn #act_fn (#ih_n: &mut InteractionHandler, #ctxname: Context) -> ::std::pin::Pin<::std::boxed::Box<dyn Send + ::std::future::Future<Output = #ret> + '_>>{\n\n Box::pin(async move {\n\n #body\n\n })\n\n }\n\n #vis fn #fname (ihd: &mut InteractionHandler, ctx: Context) -> ::std::pin::Pin<::std::boxed::Box<dyn Send + ::std::future::Future<Output = #ret> + '_>>{\n\n Box::pin(async move {\n\n // TODO: Try to do this without cloning.\n\n let mut __ih_c = ihd.clone();\n\n\n\n ::rusty_interaction::actix::Arbiter::spawn(async move {\n\n\n\n let __response = #act_fn (&mut __ih_c, ctx.clone()).await;\n\n if let Ok(__r) = __response{\n\n if __r.r#type != InteractionResponseType::Pong && __r.r#type != InteractionResponseType::None{\n", "file_path": "attributes/src/lib.rs", "rank": 73, "score": 32200.50742164092 }, { "content": " syn::Type::Path(b) => {\n\n for segment in b.path.segments.clone() {\n\n if segment.ident == \"Context\" {\n\n if let syn::Pat::Ident(a) = &*t.pat {\n\n ctxname = Some(a.ident.clone());\n\n break;\n\n }\n\n } else if segment.ident == \"InteractionHandler\" {\n\n panic!(\"Cannot take ownership of `InteractionHandler`. Try using &InteractionHandler!\")\n\n }\n\n }\n\n }\n\n // This might be an &InteractionHandler!\n\n syn::Type::Reference(r) => {\n\n let e = r.elem.clone();\n\n if let syn::Type::Path(w) = &*e {\n\n for segment in w.path.segments.clone() {\n\n if segment.ident == \"InteractionHandler\" {\n\n if let syn::Pat::Ident(a) = &*t.pat {\n\n handlename = Some(a.ident.clone());\n", "file_path": "attributes/src/lib.rs", "rank": 74, "score": 32199.752612313245 }, { "content": " break;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n _ => {\n\n continue;\n\n }\n\n }\n\n }\n\n if let FnArg::Receiver(_) = p {\n\n panic!(\"`self` arguments are not allowed. If you need to access data in your function, use the `InteractionHandler.data` field and `InteractionHandler::add_data` method\")\n\n }\n\n }\n\n\n\n if ctxname.is_none() {\n\n panic!(\"Couldn't determine the Context parameter. Make sure you take a `Context` as an argument\");\n\n }\n\n\n", "file_path": "attributes/src/lib.rs", "rank": 75, "score": 32198.9972995912 }, { "content": " let mut ih_n = quote!(_);\n\n\n\n if handlename.is_some() {\n\n ih_n = quote!(#handlename);\n\n }\n\n\n\n // Using quasi-quoting to generate a new function. This is what will be the end function returned to the compiler.\n\n if !defer {\n\n // Build the function\n\n let subst_fn = quote! {\n\n #vis fn #fname (#ih_n: &mut InteractionHandler, #ctxname: Context) -> ::std::pin::Pin<::std::boxed::Box<dyn Send + ::std::future::Future<Output = #ret> + '_>>{\n\n Box::pin(async move {\n\n #body\n\n })\n\n }\n\n };\n\n subst_fn.into()\n\n }\n\n // Deferring is requested, this will require a bit more manipulation.\n\n else {\n", "file_path": "attributes/src/lib.rs", "rank": 76, "score": 32197.762646500232 }, { "content": " ctxname = Some(a.ident.clone());\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n // This might be an &InteractionHandler!\n\n syn::Type::Reference(r) => {\n\n let e = r.elem.clone();\n\n if let syn::Type::Path(w) = &*e {\n\n for segment in w.path.segments.clone() {\n\n if segment.ident == \"InteractionHandler\" {\n\n if let syn::Pat::Ident(a) = &*t.pat {\n\n handlename = Some(a.ident.clone());\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n }\n", "file_path": "attributes/src/lib.rs", "rank": 77, "score": 32196.949492289157 }, { "content": " ReturnType::Type(_a, b) => {\n\n ret = *b.clone();\n\n }\n\n }\n\n\n\n // Find the name of the Context parameter\n\n let mut ctxname: Option<syn::Ident> = None;\n\n let mut handlename: Option<syn::Ident> = None;\n\n // eprintln!(\"{:#?}\", params);\n\n\n\n // I am honestly laughing at this...\n\n // But hey it works! :D\n\n for p in params {\n\n if let FnArg::Typed(t) = p {\n\n match &*t.ty {\n\n // This might be a Context\n\n syn::Type::Path(b) => {\n\n for segment in b.path.segments.clone() {\n\n if segment.ident == \"Context\" {\n\n if let syn::Pat::Ident(a) = &*t.pat {\n", "file_path": "attributes/src/lib.rs", "rank": 78, "score": 32196.365312435017 }, { "content": " match ret_sig {\n\n ReturnType::Default => {\n\n panic!(\"Expected an `Result<InteractionResponse, std::convert::Infallible>` return type, but got no return type. Consider adding `-> Result<InteractionResponse, Infallible>` to your function signature.\");\n\n }\n\n ReturnType::Type(_a, b) => {\n\n ret = *b.clone();\n\n }\n\n }\n\n\n\n // Find the name of the Context parameter\n\n let mut ctxname: Option<syn::Ident> = None;\n\n let mut handlename: Option<syn::Ident> = None;\n\n // eprintln!(\"{:#?}\", params);\n\n\n\n // I am honestly laughing at this...\n\n // But hey it works! :D\n\n for p in params {\n\n if let FnArg::Typed(t) = p {\n\n match &*t.ty {\n\n // This might be a Context\n", "file_path": "attributes/src/lib.rs", "rank": 79, "score": 32196.09593958899 }, { "content": " let subst_fn = quote! {\n\n #vis fn #fname (#ih_n: &mut InteractionHandler, #ctxname: Context) -> ::std::pin::Pin<::std::boxed::Box<dyn Send + ::std::future::Future<Output = #ret> + '_>>{\n\n Box::pin(async move {\n\n #body\n\n })\n\n }\n\n };\n\n subst_fn.into()\n\n }\n\n // Deferring is requested, this will require a bit more manipulation.\n\n else {\n\n // Find the return statement and split the entire tokenstream there.\n\n let mut ind: Option<usize> = None;\n\n let mut expr: Option<ExprReturn> = None;\n\n for n in 0..body.stmts.len() {\n\n let s = &body.stmts[n];\n\n match s {\n\n Stmt::Expr(Expr::Return(a)) => {\n\n expr = Some(a.clone());\n\n ind = Some(n);\n", "file_path": "attributes/src/lib.rs", "rank": 80, "score": 32195.11884295394 }, { "content": " // Get the function name\n\n let fname = &input.sig.ident;\n\n // Get the visibility (public fn, private fn, etc)\n\n let vis = &input.vis;\n\n\n\n // Get the parameters and return types\n\n let params = &input.sig.inputs;\n\n let ret_sig = &input.sig.output;\n\n\n\n // Must be filled later, but define its type for now.\n\n let ret: syn::Type;\n\n\n\n // Get the function body\n\n let body = &input.block;\n\n\n\n // Check for a proper return type and fill ret if found.\n\n match ret_sig {\n\n ReturnType::Default => {\n\n panic!(\"Expected an `InteractionResponse` return type, but got no return type. Consider adding `-> InteractionResponse` to your function signature.\");\n\n }\n", "file_path": "attributes/src/lib.rs", "rank": 81, "score": 32192.847025955994 }, { "content": " _ => {\n\n continue;\n\n }\n\n }\n\n }\n\n }\n\n\n\n if ctxname.is_none() {\n\n panic!(\"Couldn't determine the Context parameter. Make sure you take a `Context` as an argument\");\n\n }\n\n\n\n let mut ih_n = quote!(_);\n\n\n\n if handlename.is_some() {\n\n ih_n = quote!(#handlename);\n\n }\n\n\n\n // Using quasi-quoting to generate a new function. This is what will be the end function returned to the compiler.\n\n if !defer {\n\n // Build the function\n", "file_path": "attributes/src/lib.rs", "rank": 82, "score": 32192.4670201107 }, { "content": " }\n\n\n\n // Ok here comes the fun part\n\n\n\n // Get the function name\n\n let fname = &input.sig.ident;\n\n // Get the visibility (public fn, private fn, etc)\n\n let vis = &input.vis;\n\n\n\n // Get the parameters and return types\n\n let params = &input.sig.inputs;\n\n let ret_sig = &input.sig.output;\n\n\n\n // Must be filled later, but define its type for now.\n\n let ret: syn::Type;\n\n\n\n // Get the function body\n\n let body = &input.block;\n\n\n\n // Check for a proper return type and fill ret if found.\n", "file_path": "attributes/src/lib.rs", "rank": 83, "score": 32192.217183594894 }, { "content": " break;\n\n }\n\n Stmt::Semi(Expr::Return(a), _) => {\n\n expr = Some(a.clone());\n\n ind = Some(n);\n\n break;\n\n }\n\n _ => (),\n\n }\n\n }\n\n let (nbody, _reta) = body.stmts.split_at(ind.unwrap_or_else(|| {\n\n panic!(\n\n \"Could not find return statement in slash-command. Explicit returns are required.\"\n\n );\n\n }));\n\n\n\n // Unwrap, unwrap, unwrap, unwrap.\n\n let expra = expr\n\n .unwrap_or_else(|| panic!(\"Expected return\"))\n\n .expr\n", "file_path": "attributes/src/lib.rs", "rank": 84, "score": 32189.62771233098 }, { "content": "use super::Snowflake;\n\n#[cfg(feature = \"handler\")]\n\nuse ::chrono::{DateTime, Utc};\n\n#[cfg(feature = \"handler\")]\n\nuse log::{debug, error};\n\n#[cfg(any(feature = \"handler\", feature = \"extended-handler\"))]\n\nuse reqwest::{Client, StatusCode};\n\n\n\n// ======================\n\n\n\n#[cfg(any(feature = \"handler\", feature = \"extended-handler\"))]\n\n#[derive(Clone, Debug)]\n\n/// A context contains relevant information and useful functions you can use when handling Interactions.\n\npub struct Context {\n\n client: Client,\n\n\n\n /// Resolved user ID of author\n\n pub author_id: Option<Snowflake>,\n\n\n\n /// The [`Interaction`] sent by Discord.\n", "file_path": "src/types/interaction.rs", "rank": 85, "score": 28950.911585990503 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse ::chrono::{DateTime, Utc};\n\nuse serde_with::*;\n\n\n\n#[cfg(feature = \"builder\")]\n\nuse crate::Builder;\n\n#[cfg(feature = \"builder\")]\n\nuse log::warn;\n\n// ======== Structures =========\n\n#[serde_as]\n\n#[skip_serializing_none]\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]\n\n/// An embed in Discord is a way to display rich content in messages\n\npub struct Embed {\n\n /// Title of the embed\n\n pub title: Option<String>,\n\n /// Description of the embed\n\n pub description: Option<String>,\n\n // Type field is not implemented since it's considered deprecated\n", "file_path": "src/types/embed.rs", "rank": 86, "score": 28946.08538686464 }, { "content": "#[cfg(feature = \"builder\")]\n\nuse std::error;\n\n#[cfg(feature = \"builder\")]\n\nuse std::fmt::{self, Display};\n\n\n\n#[cfg(feature = \"builder\")]\n\nuse log::warn;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[cfg(feature = \"builder\")]\n\nuse crate::Builder;\n\n\n\nuse serde_repr::*;\n\nuse serde_with::*;\n\n\n\n#[serde_as]\n\n#[skip_serializing_none]\n\n#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)]\n\n/// Message components are a framework for adding interactive elements to the messages your app or bot sends. They're accessible, customizable, and easy to use.\n\npub struct MessageComponent {\n", "file_path": "src/types/components.rs", "rank": 87, "score": 28945.926095802046 }, { "content": "#[cfg(feature = \"extended-handler\")]\n\nuse crate::expect_successful_api_response_and_return;\n\n\n\n#[cfg(feature = \"handler\")]\n\nuse crate::{expect_specific_api_response, expect_successful_api_response};\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse serde_with::*;\n\n\n\nuse serde_repr::*;\n\n\n\nuse super::application::*;\n\nuse super::components::*;\n\nuse super::embed::*;\n\n#[cfg(feature = \"extended-handler\")]\n\nuse super::guild::*;\n\nuse super::user::*;\n\n#[cfg(feature = \"handler\")]\n\nuse super::HttpError;\n", "file_path": "src/types/interaction.rs", "rank": 88, "score": 28945.57082037124 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse serde_with::*;\n\n\n\n#[cfg(feature = \"builder\")]\n\nuse crate::Builder;\n\n\n\nuse super::components::ComponentType;\n\nuse super::user::*;\n\nuse super::Snowflake;\n\nuse serde_repr::*;\n\n\n\n#[serde_as]\n\n#[skip_serializing_none]\n\n#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)]\n\n/// AKA a 'slash command'.\n\npub struct ApplicationCommand {\n\n #[serde_as(as = \"Option<DisplayFromStr>\")]\n\n #[serde(default)]\n\n /// ID of command\n", "file_path": "src/types/application.rs", "rank": 89, "score": 28944.985335699082 }, { "content": " }\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\nimpl InteractionResponseBuilder {\n\n fn ret(self) -> InteractionResponse {\n\n InteractionResponse {\n\n r#type: self.r#type,\n\n data: self.data,\n\n }\n\n }\n\n\n\n /// Return a pong with no data. Use with caution\n\n pub fn pong(mut self) -> Result<InteractionResponse, std::convert::Infallible> {\n\n self.r#type = InteractionResponseType::Pong;\n\n self.data = None;\n\n self.finish()\n\n }\n\n\n\n /// Return without any data. Use with caution\n", "file_path": "src/types/interaction.rs", "rank": 90, "score": 28944.762434701366 }, { "content": " /// Type of response\n\n pub r#type: InteractionResponseType,\n\n\n\n /// Optional data field\n\n pub data: Option<InteractionApplicationCommandCallbackData>,\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\n#[derive(Clone, Debug)]\n\n/// Builder for making a [`InteractionResponse`]\n\n\n\npub struct InteractionResponseBuilder {\n\n #[doc(hidden)]\n\n pub r#type: InteractionResponseType,\n\n #[doc(hidden)]\n\n pub data: Option<InteractionApplicationCommandCallbackData>,\n\n}\n\n\n\nimpl InteractionResponse {\n\n /// Creates a new InteractionResponse\n", "file_path": "src/types/interaction.rs", "rank": 91, "score": 28943.22714744617 }, { "content": "\n\n expect_successful_api_response!(exec, {\n\n // TODO: Update edited fields\n\n Ok(())\n\n })\n\n }\n\n\n\n /// Delete this followup message.\n\n ///\n\n /// If the deletion succeeded, you'll get an `Ok(())`. However, if this somehow fails, it will return `Err(Self)`.\n\n /// That means that if the deletion did not succeed, this reference does not go out of scope.\n\n ///\n\n /// Errors get printed using the [`::log::debug!`] macro\n\n pub async fn delete_message(self) -> Result<(), Self> {\n\n let url = format!(\n\n \"{}/webhooks/{:?}/{}/messages/{:?}\",\n\n crate::BASE_URL,\n\n self.application_id,\n\n self.interaction_token,\n\n self.id\n", "file_path": "src/types/interaction.rs", "rank": 92, "score": 28942.81883369529 }, { "content": "\n\n #[deprecated(since = \"0.1.9\", note = \"Use the `build()` function instead\")]\n\n /// Build the embed. You can't use the function after this anymore\n\n pub fn finish(self) -> Embed {\n\n self.obj\n\n }\n\n}\n\n\n\n#[cfg(feature = \"builder\")]\n\nimpl Builder<Embed> for EmbedBuilder {\n\n type Error = std::convert::Infallible;\n\n\n\n fn build(self) -> Result<Embed, Self::Error> {\n\n Ok(self.obj)\n\n }\n\n}\n\n\n\nimpl Default for EmbedFooter {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "src/types/embed.rs", "rank": 93, "score": 28942.804288859836 }, { "content": " pub interaction: Interaction,\n\n}\n\n\n\n#[serde_as]\n\n#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)]\n\n/// The base Interaction structure. When Interactions are received, this structure is wrapped inside a [`Context`]\n\n/// and can be used to get information about the Interaction.\n\npub struct Interaction {\n\n #[serde_as(as = \"Option<DisplayFromStr>\")]\n\n #[serde(default)]\n\n /// The application id of your applicaton\n\n pub application_id: Option<Snowflake>,\n\n\n\n #[serde_as(as = \"Option<DisplayFromStr>\")]\n\n #[serde(default)]\n\n /// Unique id identifying the interaction\n\n pub id: Option<Snowflake>,\n\n /// The type of interaction\n\n pub r#type: InteractionType,\n\n /// Interaction data, if applicable\n", "file_path": "src/types/interaction.rs", "rank": 94, "score": 28942.349082413548 }, { "content": " /// .finish();\n\n /// ```\n\n pub fn respond(&self) -> InteractionResponseBuilder {\n\n let mut b = InteractionResponseBuilder::default();\n\n\n\n // Default to UpdateMessage response type if InteractionType is MessageComponent\n\n if self.interaction.r#type == InteractionType::MessageComponent {\n\n b.r#type = InteractionResponseType::UpdateMessage;\n\n }\n\n\n\n b\n\n }\n\n\n\n /// Edit the original interaction response\n\n ///\n\n /// This takes an [`WebhookMessage`]. You can convert an [`InteractionResponse`] using [`WebhookMessage::from`].\n\n pub async fn edit_original(&self, new_content: &WebhookMessage) -> Result<(), HttpError> {\n\n let url = format!(\n\n \"{}/webhooks/{:?}/{}/messages/@original\",\n\n crate::BASE_URL,\n", "file_path": "src/types/interaction.rs", "rank": 95, "score": 28941.91179018741 }, { "content": " pub fn new(\n\n rtype: InteractionResponseType,\n\n data: Option<InteractionApplicationCommandCallbackData>,\n\n ) -> InteractionResponse {\n\n InteractionResponse {\n\n r#type: rtype,\n\n data,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"handler\")]\n\nimpl Default for InteractionResponseBuilder {\n\n /// This will default to responding with the `InteractionResponseType::CHANNEL_MESSAGE_WITH_SOURCE` response type and no data.\n\n /// Adding data yourself is expected.\n\n fn default() -> Self {\n\n Self {\n\n r#type: InteractionResponseType::ChannelMessageWithSource,\n\n data: None,\n\n }\n", "file_path": "src/types/interaction.rs", "rank": 96, "score": 28941.652815220667 }, { "content": "}\n\n#[cfg(feature = \"handler\")]\n\n#[serde_as]\n\n#[skip_serializing_none]\n\n#[derive(Clone, Serialize, Deserialize, Debug)]\n\n/// Read-only struct representing a Followup message sent by some application.\n\npub struct FollowupMessage {\n\n #[serde_as(as = \"DisplayFromStr\")]\n\n id: Snowflake,\n\n r#type: u8,\n\n content: Option<String>,\n\n embeds: Vec<Embed>,\n\n #[serde_as(as = \"Option<DisplayFromStr>\")]\n\n #[serde(default)]\n\n channel_id: Option<Snowflake>,\n\n author: Option<User>,\n\n tts: bool,\n\n #[serde_as(as = \"DisplayFromStr\")]\n\n timestamp: DateTime<Utc>,\n\n #[serde_as(as = \"Option<DisplayFromStr>\")]\n", "file_path": "src/types/interaction.rs", "rank": 97, "score": 28941.60775611093 }, { "content": " pub components: Option<Vec<MessageComponent>>,\n\n /// Used for files.\n\n pub payload_json: Option<String>,\n\n allowed_mentions: Option<AllowedMentions>,\n\n}\n\n#[cfg(feature = \"handler\")]\n\nimpl WebhookMessage {\n\n /// Add text to this WebhookMessage\n\n pub fn content(mut self, content: impl ToString) -> Self {\n\n self.content = Some(content.to_string());\n\n self\n\n }\n\n\n\n /// Add an embed to this WebhookMessage\n\n pub fn add_embed(mut self, embed: Embed) -> Self {\n\n match self.embeds.as_mut() {\n\n None => {\n\n self.embeds = Some(vec![embed]);\n\n }\n\n Some(e) => {\n", "file_path": "src/types/interaction.rs", "rank": 98, "score": 28941.485813204374 }, { "content": " #[serde(default)]\n\n edited_timestamp: Option<DateTime<Utc>>,\n\n flags: u32,\n\n #[serde_as(as = \"DisplayFromStr\")]\n\n application_id: Snowflake,\n\n #[serde_as(as = \"DisplayFromStr\")]\n\n webhook_id: Snowflake,\n\n message_reference: MessageReference,\n\n\n\n #[serde(skip)]\n\n interaction_token: String,\n\n #[serde(skip)]\n\n client: Client,\n\n}\n\n#[cfg(feature = \"handler\")]\n\n/// Getter functions\n\nimpl FollowupMessage {\n\n /// Get the ID of this follow up\n\n pub fn id(&self) -> Snowflake {\n\n self.id\n", "file_path": "src/types/interaction.rs", "rank": 99, "score": 28941.440927497424 } ]
Rust
src/api/schema/components/source.rs
parampavar/vector
83bd797ff6a05fb3246a2442a701db3a85e323b5
use std::cmp; use async_graphql::{Enum, InputObject, Object}; use strum::IntoEnumIterator; use strum_macros::EnumIter; use super::{sink, state, transform, Component}; use crate::{ api::schema::{ filter, metrics::{self, outputs_by_component_key, IntoSourceMetrics, Output}, sort, }, config::{ComponentKey, DataType, OutputId}, filter_check, }; #[derive(Debug, Enum, EnumIter, Eq, PartialEq, Copy, Clone, Ord, PartialOrd)] pub enum SourceOutputType { Log, Metric, Trace, } #[derive(Debug, Clone)] pub struct Data { pub component_key: ComponentKey, pub component_type: String, pub output_type: DataType, pub outputs: Vec<String>, } #[derive(Enum, Copy, Clone, Eq, PartialEq)] pub enum SourcesSortFieldName { ComponentKey, ComponentType, OutputType, } #[derive(Debug, Clone)] pub struct Source(pub Data); impl Source { #[allow(clippy::missing_const_for_fn)] pub fn get_component_key(&self) -> &ComponentKey { &self.0.component_key } pub fn get_component_type(&self) -> &str { self.0.component_type.as_str() } pub fn get_output_types(&self) -> Vec<SourceOutputType> { SourceOutputType::iter() .filter(|s| self.0.output_type.contains(s.into())) .map(Into::into) .collect() } pub fn get_outputs(&self) -> &[String] { self.0.outputs.as_ref() } } impl From<&SourceOutputType> for DataType { fn from(s: &SourceOutputType) -> Self { match s { SourceOutputType::Log => DataType::Log, SourceOutputType::Metric => DataType::Metric, SourceOutputType::Trace => DataType::Trace, } } } impl sort::SortableByField<SourcesSortFieldName> for Source { fn sort(&self, rhs: &Self, field: &SourcesSortFieldName) -> cmp::Ordering { match field { SourcesSortFieldName::ComponentKey => { Ord::cmp(self.get_component_key(), rhs.get_component_key()) } SourcesSortFieldName::ComponentType => { Ord::cmp(self.get_component_type(), rhs.get_component_type()) } SourcesSortFieldName::OutputType => { Ord::cmp(&u8::from(self.0.output_type), &u8::from(rhs.0.output_type)) } } } } #[Object] impl Source { pub async fn component_id(&self) -> &str { self.0.component_key.id() } pub async fn component_type(&self) -> &str { self.get_component_type() } pub async fn output_types(&self) -> Vec<SourceOutputType> { self.get_output_types() } pub async fn outputs(&self) -> Vec<Output> { outputs_by_component_key(self.get_component_key(), self.get_outputs()) } pub async fn transforms(&self) -> Vec<transform::Transform> { state::filter_components(|(_component_key, components)| match components { Component::Transform(t) if t.0.inputs.contains(&OutputId::from(&self.0.component_key)) => { Some(t.clone()) } _ => None, }) } pub async fn sinks(&self) -> Vec<sink::Sink> { state::filter_components(|(_component_key, components)| match components { Component::Sink(s) if s.0.inputs.contains(&OutputId::from(&self.0.component_key)) => { Some(s.clone()) } _ => None, }) } pub async fn metrics(&self) -> metrics::SourceMetrics { metrics::by_component_key(&self.0.component_key) .into_source_metrics(self.get_component_type()) } } #[derive(Default, InputObject)] pub(super) struct SourcesFilter { component_id: Option<Vec<filter::StringFilter>>, component_type: Option<Vec<filter::StringFilter>>, output_type: Option<Vec<filter::ListFilter<SourceOutputType>>>, or: Option<Vec<Self>>, } impl filter::CustomFilter<Source> for SourcesFilter { fn matches(&self, source: &Source) -> bool { filter_check!( self.component_id.as_ref().map(|f| f .iter() .all(|f| f.filter_value(&source.get_component_key().to_string()))), self.component_type.as_ref().map(|f| f .iter() .all(|f| f.filter_value(source.get_component_type()))), self.output_type .as_ref() .map(|f| f.iter().all(|f| f.filter_value(source.get_output_types()))) ); true } fn or(&self) -> Option<&Vec<Self>> { self.or.as_ref() } } #[cfg(test)] mod tests { use super::*; use crate::config::DataType; fn source_fixtures() -> Vec<Source> { vec![ Source(Data { component_key: ComponentKey::from("gen1"), component_type: "demo_logs".to_string(), output_type: DataType::Log | DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen2"), component_type: "demo_logs".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen3"), component_type: "demo_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), ] } #[test] fn filter_output_type() { struct Test { component_id: &'static str, output_types: Vec<SourceOutputType>, } let tests = vec![ Test { component_id: "gen1", output_types: vec![SourceOutputType::Log, SourceOutputType::Metric], }, Test { component_id: "gen2", output_types: vec![SourceOutputType::Log], }, Test { component_id: "gen3", output_types: vec![SourceOutputType::Metric], }, ]; for t in tests { let filter = SourcesFilter { component_id: Some(vec![filter::StringFilter { equals: Some(t.component_id.to_string()), ..Default::default() }]), output_type: Some(vec![filter::ListFilter::<SourceOutputType> { equals: Some(t.output_types), not_equals: None, contains: None, not_contains: None, }]), ..Default::default() }; let sources = filter::filter_items(source_fixtures().into_iter(), &filter); assert_eq!(sources.len(), 1); } } #[test] fn sort_component_id_desc() { let mut sources = source_fixtures(); let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::ComponentKey, direction: sort::Direction::Desc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen3", "gen2", "gen1"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } #[test] fn sort_component_type_asc() { let mut sources = vec![ Source(Data { component_key: ComponentKey::from("gen2"), component_type: "file".to_string(), output_type: DataType::Log | DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen3"), component_type: "demo_logs".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen1"), component_type: "docker_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), ]; let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::ComponentType, direction: sort::Direction::Asc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen3", "gen1", "gen2"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } #[test] fn sort_component_type_desc() { let mut sources = vec![ Source(Data { component_key: ComponentKey::from("gen3"), component_type: "file".to_string(), output_type: DataType::Log | DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen2"), component_type: "demo_logs".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen1"), component_type: "docker_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), ]; let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::ComponentType, direction: sort::Direction::Desc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen3", "gen1", "gen2"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } #[test] fn sort_output_type_asc() { let mut sources = vec![ Source(Data { component_key: ComponentKey::from("gen4"), component_type: "demo_trace".to_string(), output_type: DataType::Trace, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen1"), component_type: "demo_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen2"), component_type: "file".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen3"), component_type: "mutliple_type".to_string(), output_type: DataType::Log | DataType::Metric | DataType::Trace, outputs: vec![], }), ]; let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::OutputType, direction: sort::Direction::Asc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen2", "gen1", "gen4", "gen3"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } #[test] fn sort_output_type_desc() { let mut sources = vec![ Source(Data { component_key: ComponentKey::from("gen4"), component_type: "demo_trace".to_string(), output_type: DataType::Trace, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen1"), component_type: "demo_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen2"), component_type: "file".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen3"), component_type: "mutliple_type".to_string(), output_type: DataType::Log | DataType::Metric | DataType::Trace, outputs: vec![], }), ]; let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::OutputType, direction: sort::Direction::Desc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen3", "gen4", "gen1", "gen2"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } }
use std::cmp; use async_graphql::{Enum, InputObject, Object}; use strum::IntoEnumIterator; use strum_macros::EnumIter; use super::{sink, state, transform, Component}; use crate::{ api::schema::{ filter, metrics::{self, outputs_by_component_key, IntoSourceMetrics, Output}, sort, }, config::{ComponentKey, DataType, OutputId}, filter_check, }; #[derive(Debug, Enum, EnumIter, Eq, PartialEq, Copy, Clone, Ord, PartialOrd)] pub enum SourceOutputType { Log, Metric, Trace, } #[derive(Debug, Clone)] pub struct Data { pub component_key: ComponentKey, pub component_type: String, pub output_type: DataType, pub outputs: Vec<String>, } #[derive(Enum, Copy, Clone, Eq, PartialEq)] pub enum SourcesSortFieldName { ComponentKey, ComponentType, OutputType, } #[derive(Debug, Clone)] pub struct Source(pub Data); impl Source { #[allow(clippy::missing_const_for_fn)] pub fn get_component_key(&self) -> &ComponentKey { &self.0.component_key } pub fn get_component_type(&self) -> &str { self.0.component_type.as_str() } pub fn get_output_types(&self) -> Vec<SourceOutputType> { SourceOutputType::iter() .filter(|s| self.0.output_type.contains(s.into())) .map(Into::into) .collect() } pub fn get_outputs(&self) -> &[String] { self.0.outputs.as_ref() } } impl From<&SourceOutputType> for DataType { fn from(s: &SourceOutputType) -> Self { match s { SourceOutputType::Log => DataType::Log, SourceOutputType::Metric => DataType::Metric, SourceOutputType::Trace => DataType::Trace, } } } impl sort::SortableByField<SourcesSortFieldName> for Source { fn sort(&self, rhs: &Self, field: &SourcesSortFieldName) -> cmp::Ordering { match field { SourcesSortFieldName::ComponentKey => { Ord::cmp(self.get_component_key(), rhs.get_component_key()) } SourcesSortFieldName::ComponentType => { Ord::cmp(self.get_component_type(), rhs.get_component_type()) } SourcesSortFieldName::OutputType => { Ord::cmp(&u8::from(self.0.output_type), &u8::from(rhs.0.output_type)) } } } } #[Object] impl Source { pub async fn component_id(&self) -> &str { self.0.component_key.id() } pub async fn component_type(&self) -> &str { self.get_component_type() } pub async fn output_types(&self) -> Vec<SourceOutputType> { self.get_output_types() } pub async fn outputs(&self) -> Vec<Output> { outputs_by_component_key(self.get_component_key(), self.get_outputs()) } pub async fn transforms(&self) -> Vec<transform::Transform> { state::filter_components(|(_component_key, components)| match components { Component::Transform(t) if t.0.inputs.contains(&OutputId::from(&self.0.component_key)) => { Some(t.clone()) } _ => None, }) } pub async fn sinks(&self) -> Vec<sink::Sink> { state::filter_components(|(_component_key, components)| match components { Component::Sink(s) if s.0.inputs.contains(&OutputId::from(&self.0.component_key)) => { Some(s.clone()) } _ => None, }) } pub async fn metrics(&self) -> metrics::SourceMetrics { metrics::by_component_key(&self.0.component_key) .into_source_metrics(self.get_component_type()) } } #[derive(Default, InputObject)] pub(super) struct SourcesFilter { component_id: Option<Vec<filter::StringFilter>>, component_type: Option<Vec<filter::StringFilter>>, output_type: Option<Vec<filter::ListFilter<SourceOutputType>>>, or: Option<Vec<Self>>, } impl filter::CustomFilter<Source> for SourcesFilter { fn matches(&self, source: &Source) -> bool { filter_check!( self.component_id.as_ref().map(|f| f .iter() .all(|f| f.filter_value(&source.get_component_key().to_string()))), self.component_type.as_ref().map(|f| f .iter() .all(|f| f.filter_value(source.get_component_type()))), self.output_type .as_ref() .map(|f| f.iter().all(|f| f.filter_value(source.get_output_types()))) ); true } fn or(&self) -> Option<&Vec<Self>> { self.or.as_ref() } } #[cfg(test)] mod tests { use super::*; use crate::config::DataType; fn source_fixtures() -> Vec<Source> { vec![ Source(Data { component_key: ComponentKey::from("gen1"), component_type: "demo_logs".to_string(), output_type: DataType::Log | DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen2"), component_type: "demo_logs".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen3"), component_type: "demo_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), ] } #[test] fn filter_output_type() { struct Test { component_id: &'static str, output_types: Vec<SourceOutputType>, } let tests = vec![ Test { component_id: "gen1", output_types: vec![SourceOutputType::Log, SourceOutputType::Metric], }, Test { component_id: "gen2", output_types: vec![SourceOutputType::Log], }, Test { component_id: "gen3", output_types: vec![SourceOutputType::Metric], }, ]; for t in tests { let filter = SourcesFilter { component_id: Some(vec![filter::StringFilter { equals: Some(t.component_id.to_string()), ..Default::default() }]), output_type: Some(vec![filter::ListFilter::<SourceOutputType> { equals: Some(t.output_types), not_equals: None, contains: None, not_contains: None, }]), ..Default::default() }; let sources = filter::filter_items(source_fixtures().into_iter(), &filter); assert_eq!(sources.len(), 1); } } #[test] fn sort_component_id_desc() { let mut sources = source_fixtures();
sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen3", "gen2", "gen1"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } #[test] fn sort_component_type_asc() { let mut sources = vec![ Source(Data { component_key: ComponentKey::from("gen2"), component_type: "file".to_string(), output_type: DataType::Log | DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen3"), component_type: "demo_logs".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen1"), component_type: "docker_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), ]; let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::ComponentType, direction: sort::Direction::Asc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen3", "gen1", "gen2"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } #[test] fn sort_component_type_desc() { let mut sources = vec![ Source(Data { component_key: ComponentKey::from("gen3"), component_type: "file".to_string(), output_type: DataType::Log | DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen2"), component_type: "demo_logs".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen1"), component_type: "docker_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), ]; let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::ComponentType, direction: sort::Direction::Desc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen3", "gen1", "gen2"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } #[test] fn sort_output_type_asc() { let mut sources = vec![ Source(Data { component_key: ComponentKey::from("gen4"), component_type: "demo_trace".to_string(), output_type: DataType::Trace, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen1"), component_type: "demo_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen2"), component_type: "file".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen3"), component_type: "mutliple_type".to_string(), output_type: DataType::Log | DataType::Metric | DataType::Trace, outputs: vec![], }), ]; let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::OutputType, direction: sort::Direction::Asc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen2", "gen1", "gen4", "gen3"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } #[test] fn sort_output_type_desc() { let mut sources = vec![ Source(Data { component_key: ComponentKey::from("gen4"), component_type: "demo_trace".to_string(), output_type: DataType::Trace, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen1"), component_type: "demo_logs".to_string(), output_type: DataType::Metric, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen2"), component_type: "file".to_string(), output_type: DataType::Log, outputs: vec![], }), Source(Data { component_key: ComponentKey::from("gen3"), component_type: "mutliple_type".to_string(), output_type: DataType::Log | DataType::Metric | DataType::Trace, outputs: vec![], }), ]; let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::OutputType, direction: sort::Direction::Desc, }]; sort::by_fields(&mut sources, &fields); for (i, component_id) in ["gen3", "gen4", "gen1", "gen2"].iter().enumerate() { assert_eq!(sources[i].get_component_key().to_string(), *component_id); } } }
let fields = vec![sort::SortField::<SourcesSortFieldName> { field: SourcesSortFieldName::ComponentKey, direction: sort::Direction::Desc, }];
assignment_statement
[ { "content": "pub fn outputs_by_component_key(component_key: &ComponentKey, outputs: &[String]) -> Vec<Output> {\n\n let metrics = by_component_key(component_key)\n\n .into_iter()\n\n .filter(|m| m.name() == \"component_sent_events_total\")\n\n .collect::<Vec<_>>();\n\n\n\n outputs\n\n .iter()\n\n .map(|output| {\n\n Output::new(\n\n output.clone(),\n\n filter_output_metric(&metrics, output.as_ref()),\n\n )\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "src/api/schema/metrics/output.rs", "rank": 0, "score": 695229.3390819541 }, { "content": "fn add_collector(collector: &str, mut metrics: Vec<Metric>) -> Vec<Metric> {\n\n for metric in &mut metrics {\n\n metric.insert_tag(\"collector\".into(), collector.into());\n\n }\n\n metrics\n\n}\n\n\n", "file_path": "src/sources/host_metrics/mod.rs", "rank": 1, "score": 606783.2937260248 }, { "content": "/// Return Vec<Metric> based on a component id tag.\n\npub fn by_component_key(component_key: &ComponentKey) -> Vec<Metric> {\n\n get_controller()\n\n .capture_metrics()\n\n .into_iter()\n\n .filter_map(|m| m.tag_matches(\"component_id\", component_key.id()).then(|| m))\n\n .collect()\n\n}\n\n\n", "file_path": "src/api/schema/metrics/filter.rs", "rank": 2, "score": 594106.8328979529 }, { "content": "/// If the closure returns false, then the element is removed\n\nfn retain<T>(vec: &mut Vec<T>, mut retain_filter: impl FnMut(&mut T) -> bool) {\n\n let mut i = 0;\n\n while let Some(data) = vec.get_mut(i) {\n\n if retain_filter(data) {\n\n i += 1;\n\n } else {\n\n let _ = vec.remove(i);\n\n }\n\n }\n\n}\n", "file_path": "src/topology/mod.rs", "rank": 3, "score": 590800.782671236 }, { "content": "/// Loads Log Schema from configurations and sets global schema. Once this is\n\n/// done, configurations can be correctly loaded using configured log schema\n\n/// defaults.\n\n///\n\n/// # Errors\n\n///\n\n/// This function will fail if the `builder` fails.\n\n///\n\n/// # Panics\n\n///\n\n/// If deny is set, will panic if schema has already been set.\n\npub fn init_log_schema<F>(builder: F, deny_if_set: bool) -> Result<(), Vec<String>>\n\nwhere\n\n F: FnOnce() -> Result<LogSchema, Vec<String>>,\n\n{\n\n let log_schema = builder()?;\n\n assert!(\n\n !(LOG_SCHEMA.set(log_schema).is_err() && deny_if_set),\n\n \"Couldn't set schema\"\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/vector-core/src/config/log_schema.rs", "rank": 4, "score": 581318.7373640321 }, { "content": "pub fn source_with_data(data: &str) -> (SourceSender, MockSourceConfig) {\n\n let (tx, rx) = SourceSender::new_with_buffer(1);\n\n let source = MockSourceConfig::new_with_data(rx, data);\n\n (tx, source)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 5, "score": 571368.3149223144 }, { "content": "/// Filter components with the provided `map_func`\n\npub fn filter_components<T>(map_func: impl Fn((&ComponentKey, &Component)) -> Option<T>) -> Vec<T> {\n\n COMPONENTS\n\n .read()\n\n .expect(INVARIANT)\n\n .iter()\n\n .filter_map(map_func)\n\n .collect()\n\n}\n\n\n", "file_path": "src/api/schema/components/state.rs", "rank": 6, "score": 547754.5487537573 }, { "content": "/// Filters components, and returns a clone of transforms\n\npub fn get_transforms() -> Vec<transform::Transform> {\n\n filter_components(|(_, components)| match components {\n\n Component::Transform(t) => Some(t.clone()),\n\n _ => None,\n\n })\n\n}\n\n\n", "file_path": "src/api/schema/components/state.rs", "rank": 7, "score": 535814.5312704435 }, { "content": "// Splits the given input by a separator.\n\n// If the separator is `None`, then it will split on whitespace.\n\npub fn split(input: &str, separator: Option<String>) -> Vec<&str> {\n\n match separator {\n\n Some(separator) => input.split(&separator).collect(),\n\n None => input.split_whitespace().collect(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use ordered_float::NotNan;\n\n\n\n use super::*;\n\n use crate::{\n\n config::TransformConfig,\n\n event::{Event, LogEvent, Value},\n\n };\n\n\n\n #[test]\n\n fn generate_config() {\n\n crate::test_util::test_generate_config::<SplitConfig>();\n", "file_path": "src/transforms/split.rs", "rank": 8, "score": 530891.1420269064 }, { "content": "/// Tests if the given metric contains all the given tag names\n\nfn has_tags(metric: &Metric, names: &[&str]) -> bool {\n\n metric\n\n .tags()\n\n .map(|tags| names.iter().all(|name| tags.contains_key(*name)))\n\n .unwrap_or_else(|| names.is_empty())\n\n}\n\n\n", "file_path": "src/test_util/components.rs", "rank": 9, "score": 527410.2450696201 }, { "content": "/// Build a log event for test purposes.\n\n///\n\n/// The implementation is shared, and therefore consistent across all\n\n/// the parsers.\n\npub fn make_log_event(message: &str, timestamp: &str, stream: &str, is_partial: bool) -> LogEvent {\n\n let mut log = LogEvent::default();\n\n\n\n log.insert(\"message\", message);\n\n\n\n let timestamp = DateTime::parse_from_rfc3339(timestamp)\n\n .expect(\"invalid test case\")\n\n .with_timezone(&Utc);\n\n log.insert(\"timestamp\", timestamp);\n\n\n\n log.insert(\"stream\", stream);\n\n\n\n if is_partial {\n\n log.insert(\"_partial\", true);\n\n }\n\n log\n\n}\n\n\n\n/// Build a log event for test purposes.\n\n/// Message can be a not valid UTF-8 string\n", "file_path": "src/sources/kubernetes_logs/parser/test_util.rs", "rank": 10, "score": 525887.261530264 }, { "content": "/// Iterates over all paths in form `a.b[0].c[1]` in alphabetical order.\n\n/// It is implemented as a wrapper around `all_fields` to reduce code\n\n/// duplication.\n\npub fn keys(fields: &BTreeMap<String, Value>) -> impl Iterator<Item = String> + '_ {\n\n all_fields(fields).map(|(k, _)| k)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use serde_json::json;\n\n\n\n use super::{super::test::fields_from_json, *};\n\n\n\n #[test]\n\n fn keys_simple() {\n\n let fields = fields_from_json(json!({\n\n \"field2\": 3,\n\n \"field1\": 4,\n\n \"field3\": 5\n\n }));\n\n let expected: Vec<_> = vec![\"field1\", \"field2\", \"field3\"]\n\n .into_iter()\n\n .map(String::from)\n", "file_path": "lib/vector-core/src/event/util/log/keys.rs", "rank": 11, "score": 525126.8217449867 }, { "content": "/// Loads Log Schema from configurations and sets global schema.\n\n/// Once this is done, configurations can be correctly loaded using\n\n/// configured log schema defaults.\n\n/// If deny is set, will panic if schema has already been set.\n\npub fn init_log_schema(config_paths: &[ConfigPath], deny_if_set: bool) -> Result<(), Vec<String>> {\n\n vector_core::config::init_log_schema(\n\n || {\n\n let (builder, _) = load_builder_from_paths(config_paths)?;\n\n Ok(builder.global.log_schema)\n\n },\n\n deny_if_set,\n\n )\n\n}\n\n\n\n#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]\n\npub enum ConfigPath {\n\n File(PathBuf, FormatHint),\n\n Dir(PathBuf),\n\n}\n\n\n\nimpl<'a> From<&'a ConfigPath> for &'a PathBuf {\n\n fn from(config_path: &'a ConfigPath) -> &'a PathBuf {\n\n match config_path {\n\n ConfigPath::File(path, _) => path,\n", "file_path": "src/config/mod.rs", "rank": 12, "score": 525060.2634600642 }, { "content": "/// Filters items based on an implementation of `CustomFilter<T>`.\n\npub fn filter_items<Item, Iter, Filter>(items: Iter, f: &Filter) -> Vec<Item>\n\nwhere\n\n Iter: Iterator<Item = Item>,\n\n Filter: CustomFilter<Item>,\n\n{\n\n items.filter(|c| filter_item(c, f)).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::StringFilter;\n\n\n\n #[test]\n\n fn string_equals() {\n\n let value = \"test\";\n\n\n\n let sf = StringFilter {\n\n equals: value.to_string().into(),\n\n ..Default::default()\n\n };\n", "file_path": "src/api/schema/filter.rs", "rank": 13, "score": 523182.66739471897 }, { "content": "pub fn random_lines(len: usize) -> impl Iterator<Item = String> {\n\n iter::repeat_with(move || random_string(len))\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 14, "score": 521255.8560224207 }, { "content": "pub fn filter_output_metric(metrics: &[Metric], output_name: &str) -> Option<Metric> {\n\n sum_metrics(\n\n metrics\n\n .iter()\n\n .filter(|m| m.tag_matches(\"output\", output_name)),\n\n )\n\n}\n", "file_path": "src/api/schema/metrics/output.rs", "rank": 15, "score": 518221.90765001514 }, { "content": "fn collect_secret_keys(input: &str, keys: &mut HashMap<String, Vec<String>>) {\n\n COLLECTOR.captures_iter(input).for_each(|cap| {\n\n if let (Some(backend), Some(key)) = (cap.get(1), cap.get(2)) {\n\n if let Some(keys) = keys.get_mut(backend.as_str()) {\n\n keys.push(key.as_str().to_string());\n\n } else {\n\n keys.insert(backend.as_str().to_string(), vec![key.as_str().to_string()]);\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "src/config/loading/secret.rs", "rank": 16, "score": 509730.2254155848 }, { "content": "pub fn get_all_metrics(interval: i32) -> impl Stream<Item = Vec<Metric>> {\n\n let controller = get_controller();\n\n let mut interval = tokio::time::interval(Duration::from_millis(interval as u64));\n\n\n\n stream! {\n\n loop {\n\n interval.tick().await;\n\n yield controller.capture_metrics()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/api/schema/metrics/filter.rs", "rank": 17, "score": 508679.5885729983 }, { "content": "fn encode_string(output: &mut String, str: &str) {\n\n let needs_quoting = str.chars().any(|c| c.is_whitespace() || c == '\"');\n\n\n\n if needs_quoting {\n\n output.write_char('\"').unwrap();\n\n }\n\n\n\n for c in str.chars() {\n\n match c {\n\n '\\\\' => output.push_str(r#\"\\\\\"#),\n\n '\"' => output.push_str(r#\"\\\"\"#),\n\n '\\n' => output.push_str(r#\"\\\\n\"#),\n\n _ => output.push(c),\n\n }\n\n }\n\n\n\n if needs_quoting {\n\n output.push('\"');\n\n }\n\n}\n\n\n", "file_path": "lib/vector-common/src/encode_key_value.rs", "rank": 18, "score": 506461.56424035213 }, { "content": "/// This helper function validates the presence of `vector_started`-ish metric.\n\npub fn extract_vector_started(metrics: &str) -> bool {\n\n metrics_regex().captures_iter(metrics).any(|captures| {\n\n let metric_name = &captures[\"name\"];\n\n let value = &captures[\"value\"];\n\n metric_name.contains(\"vector_started\") && value == \"1\"\n\n })\n\n}\n\n\n\n/// This helper function performs an HTTP request to the specified URL and\n\n/// extracts the sum of `component_sent_events_total`-ish metrics across all labels.\n\npub async fn get_component_sent_events_total(url: &str) -> Result<u64, Box<dyn std::error::Error>> {\n\n let metrics = load(url).await?;\n\n extract_component_sent_events_total_sum(&metrics)\n\n}\n\n\n\n/// This helper function performs an HTTP request to the specified URL and\n\n/// validates the presence of `vector_started`-ish metric.\n\npub async fn assert_vector_started(url: &str) -> Result<(), Box<dyn std::error::Error>> {\n\n let metrics = load(url).await?;\n\n if !extract_vector_started(&metrics) {\n", "file_path": "lib/k8s-e2e-tests/src/metrics.rs", "rank": 19, "score": 503139.16989832604 }, { "content": "fn encode_string(key: &str, output: &mut BytesMut) {\n\n for c in key.chars() {\n\n if \"\\\\, =\".contains(c) {\n\n output.put_u8(b'\\\\');\n\n }\n\n let mut c_buffer: [u8; 4] = [0; 4];\n\n output.put_slice(c.encode_utf8(&mut c_buffer).as_bytes());\n\n }\n\n}\n\n\n\npub(in crate::sinks) fn encode_timestamp(timestamp: Option<DateTime<Utc>>) -> i64 {\n\n if let Some(ts) = timestamp {\n\n ts.timestamp_nanos()\n\n } else {\n\n encode_timestamp(Some(Utc::now()))\n\n }\n\n}\n\n\n\npub(in crate::sinks) fn encode_uri(\n\n endpoint: &str,\n", "file_path": "src/sinks/influxdb/mod.rs", "rank": 20, "score": 502876.792098069 }, { "content": "/// Create config adding fullnameOverride entry. This allows multiple tests\n\n/// to be run against the same cluster without the role names clashing.\n\npub fn config_override_name(name: &str, cleanup: bool) -> String {\n\n let vectordir = if is_multinode() {\n\n format!(\"{}-vector\", name)\n\n } else {\n\n \"vector\".to_string()\n\n };\n\n\n\n let volumeconfig = if is_multinode() {\n\n formatdoc!(\n\n r#\"\n\n dataVolume:\n\n hostPath:\n\n path: /var/lib/{}/\n\n \"#,\n\n vectordir,\n\n )\n\n } else {\n\n String::new()\n\n };\n\n\n", "file_path": "lib/k8s-e2e-tests/src/lib.rs", "rank": 21, "score": 500299.81589285634 }, { "content": "pub fn prepare_input<R: std::io::Read>(mut input: R) -> Result<(String, Vec<String>), Vec<String>> {\n\n let mut source_string = String::new();\n\n input\n\n .read_to_string(&mut source_string)\n\n .map_err(|e| vec![e.to_string()])?;\n\n\n\n let mut vars = std::env::vars().collect::<HashMap<_, _>>();\n\n if !vars.contains_key(\"HOSTNAME\") {\n\n if let Ok(hostname) = crate::get_hostname() {\n\n vars.insert(\"HOSTNAME\".into(), hostname);\n\n }\n\n }\n\n vars::interpolate(&source_string, &vars)\n\n}\n\n\n", "file_path": "src/config/loading/mod.rs", "rank": 22, "score": 500238.81523349194 }, { "content": "/// Performs an in-place sort against a slice of Sortable<T>, with the provided SortField<T>s\n\npub fn by_fields<T: InputType>(f: &mut [impl SortableByField<T>], sort_fields: &[SortField<T>]) {\n\n f.sort_by(|a, b| {\n\n sort_fields\n\n .iter()\n\n .fold_while(Ordering::Equal, |cmp, f| match cmp {\n\n Ordering::Equal => {\n\n let cmp = a.sort(b, &f.field);\n\n Continue(match f.direction {\n\n Direction::Desc => cmp.reverse(),\n\n _ => cmp,\n\n })\n\n }\n\n _ => Done(cmp),\n\n })\n\n .into_inner()\n\n });\n\n}\n", "file_path": "src/api/schema/sort.rs", "rank": 23, "score": 499851.838882589 }, { "content": "// This function constructs the effective field selector to use, based on\n\n// the specified configuration.\n\nfn prepare_field_selector(config: &Config) -> crate::Result<String> {\n\n let self_node_name = if config.self_node_name.is_empty()\n\n || config.self_node_name == default_self_node_name_env_template()\n\n {\n\n std::env::var(SELF_NODE_NAME_ENV_KEY).map_err(|_| {\n\n format!(\n\n \"self_node_name config value or {} env var is not set\",\n\n SELF_NODE_NAME_ENV_KEY\n\n )\n\n })?\n\n } else {\n\n config.self_node_name.clone()\n\n };\n\n info!(\n\n message = \"Obtained Kubernetes Node name to collect logs for (self).\",\n\n ?self_node_name\n\n );\n\n\n\n let field_selector = format!(\"spec.nodeName={}\", self_node_name);\n\n\n\n if config.extra_field_selector.is_empty() {\n\n return Ok(field_selector);\n\n }\n\n\n\n Ok(format!(\n\n \"{},{}\",\n\n field_selector, config.extra_field_selector\n\n ))\n\n}\n\n\n", "file_path": "src/sources/kubernetes_logs/mod.rs", "rank": 24, "score": 494922.69920956396 }, { "content": "pub fn transform(suffix: &str, increase: f64) -> MockTransformConfig {\n\n MockTransformConfig::new(suffix.to_owned(), increase)\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 25, "score": 494071.17078058305 }, { "content": "pub fn load_from_str(input: &str, format: Format) -> Result<Config, Vec<String>> {\n\n let (builder, load_warnings) = load_from_inputs(std::iter::once((input.as_bytes(), format)))?;\n\n let (config, build_warnings) = builder.build_with_warnings()?;\n\n\n\n for warning in load_warnings.into_iter().chain(build_warnings) {\n\n warn!(\"{}\", warning);\n\n }\n\n\n\n Ok(config)\n\n}\n\n\n", "file_path": "src/config/loading/mod.rs", "rank": 26, "score": 493745.7535137246 }, { "content": "pub fn tests(ignore_cue: bool) -> Vec<Test> {\n\n if ignore_cue {\n\n return vec![];\n\n }\n\n\n\n let dir = fs::canonicalize(\"../../../scripts\").unwrap();\n\n\n\n let output = Command::new(\"bash\")\n\n .current_dir(dir)\n\n .args(&[\"cue.sh\", \"export\", \"-e\", \"remap\"])\n\n .output()\n\n .expect(\"failed to execute process\");\n\n\n\n if output.stdout.is_empty() {\n\n Vec::new()\n\n } else {\n\n let Reference {\n\n examples,\n\n functions,\n\n expressions,\n", "file_path": "lib/vrl/tests/src/docs.rs", "rank": 27, "score": 493634.0413828179 }, { "content": "#[must_use]\n\npub fn contains_name(name: &str) -> bool {\n\n EVENTS_RECORDED.with(|events| events.borrow().iter().any(|event| event.ends_with(name)))\n\n}\n\n\n", "file_path": "lib/vector-common/src/event_test_util.rs", "rank": 28, "score": 488493.90240924613 }, { "content": "fn encode_field<'a>(output: &mut String, key: &str, value: &str, key_value_delimiter: &'a str) {\n\n encode_string(output, key);\n\n output.push_str(key_value_delimiter);\n\n encode_string(output, value);\n\n}\n\n\n", "file_path": "lib/vector-common/src/encode_key_value.rs", "rank": 29, "score": 487140.7837463763 }, { "content": "pub fn compress_distribution(samples: &mut Vec<Sample>) -> Vec<Sample> {\n\n if samples.is_empty() {\n\n return Vec::new();\n\n }\n\n\n\n samples.sort_by(|a, b| a.value.partial_cmp(&b.value).unwrap_or(Ordering::Equal));\n\n\n\n let mut acc = Sample {\n\n value: samples[0].value,\n\n rate: 0,\n\n };\n\n let mut result = Vec::new();\n\n\n\n for sample in samples {\n\n if acc.value == sample.value {\n\n acc.rate += sample.rate;\n\n } else {\n\n result.push(acc);\n\n acc = *sample;\n\n }\n", "file_path": "src/sinks/util/buffer/metrics/mod.rs", "rank": 30, "score": 484454.9616812216 }, { "content": "pub fn default_namespace() -> String {\n\n \"mongodb\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<MongoDbMetricsConfig>(\"mongodb_metrics\")\n\n}\n\n\n\nimpl_generate_config_from_default!(MongoDbMetricsConfig);\n\n\n\n#[async_trait::async_trait]\n\n#[typetag::serde(name = \"mongodb_metrics\")]\n\nimpl SourceConfig for MongoDbMetricsConfig {\n\n async fn build(&self, mut cx: SourceContext) -> crate::Result<super::Source> {\n\n let namespace = Some(self.namespace.clone()).filter(|namespace| !namespace.is_empty());\n\n\n\n let sources = try_join_all(\n\n self.endpoints\n\n .iter()\n\n .map(|endpoint| MongoDbMetrics::new(endpoint, namespace.clone())),\n", "file_path": "src/sources/mongodb_metrics/mod.rs", "rank": 31, "score": 483397.15830631286 }, { "content": "pub fn default_endpoint() -> String {\n\n \"https://localhost:2113/stats\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<EventStoreDbConfig>(\"eventstoredb_metrics\")\n\n}\n\n\n\nimpl_generate_config_from_default!(EventStoreDbConfig);\n\n\n\n#[async_trait::async_trait]\n\n#[typetag::serde(name = \"eventstoredb_metrics\")]\n\nimpl SourceConfig for EventStoreDbConfig {\n\n async fn build(&self, cx: SourceContext) -> crate::Result<super::Source> {\n\n eventstoredb(\n\n self.endpoint.clone(),\n\n self.scrape_interval_secs,\n\n self.default_namespace.clone(),\n\n cx,\n\n )\n", "file_path": "src/sources/eventstoredb_metrics/mod.rs", "rank": 32, "score": 483397.15830631286 }, { "content": "pub fn default_namespace() -> String {\n\n \"apache\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<ApacheMetricsConfig>(\"apache_metrics\")\n\n}\n\n\n\nimpl GenerateConfig for ApacheMetricsConfig {\n\n fn generate_config() -> toml::Value {\n\n toml::Value::try_from(Self {\n\n endpoints: vec![\"http://localhost:8080/server-status/?auto\".to_owned()],\n\n scrape_interval_secs: default_scrape_interval_secs(),\n\n namespace: default_namespace(),\n\n })\n\n .unwrap()\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n", "file_path": "src/sources/apache_metrics/mod.rs", "rank": 33, "score": 483397.15830631286 }, { "content": "pub fn default_namespace() -> String {\n\n \"nginx\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<NginxMetricsConfig>(\"nginx_metrics\")\n\n}\n\n\n\nimpl_generate_config_from_default!(NginxMetricsConfig);\n\n\n\n#[async_trait::async_trait]\n\n#[typetag::serde(name = \"nginx_metrics\")]\n\nimpl SourceConfig for NginxMetricsConfig {\n\n async fn build(&self, mut cx: SourceContext) -> crate::Result<super::Source> {\n\n let tls = TlsSettings::from_options(&self.tls)?;\n\n let http_client = HttpClient::new(tls, &cx.proxy)?;\n\n\n\n let namespace = Some(self.namespace.clone()).filter(|namespace| !namespace.is_empty());\n\n let mut sources = Vec::with_capacity(self.endpoints.len());\n\n for endpoint in self.endpoints.iter() {\n", "file_path": "src/sources/nginx_metrics/mod.rs", "rank": 34, "score": 483397.15830631286 }, { "content": "/// Filters components, and returns a clone of sources\n\npub fn get_sources() -> Vec<source::Source> {\n\n filter_components(|(_, components)| match components {\n\n Component::Source(s) => Some(s.clone()),\n\n _ => None,\n\n })\n\n}\n\n\n", "file_path": "src/api/schema/components/state.rs", "rank": 35, "score": 480021.4480789099 }, { "content": "pub fn compile(mut builder: ConfigBuilder) -> Result<(Config, Vec<String>), Vec<String>> {\n\n let mut errors = Vec::new();\n\n\n\n // component names should not have dots in the configuration file\n\n // but components can expand (like route) to have components with a dot\n\n // so this check should be done before expanding components\n\n if let Err(name_errors) = validation::check_names(\n\n builder\n\n .transforms\n\n .keys()\n\n .chain(builder.sources.keys())\n\n .chain(builder.sinks.keys()),\n\n ) {\n\n errors.extend(name_errors);\n\n }\n\n\n\n let expansions = expand_macros(&mut builder)?;\n\n\n\n expand_globs(&mut builder);\n\n\n", "file_path": "src/config/compiler.rs", "rank": 36, "score": 477788.97880328994 }, { "content": "pub fn default_namespace() -> String {\n\n \"awsecs\".to_string()\n\n}\n\n\n\ninventory::submit! {\n\n SourceDescription::new::<AwsEcsMetricsSourceConfig>(\"aws_ecs_metrics\")\n\n}\n\n\n\nimpl AwsEcsMetricsSourceConfig {\n\n fn stats_endpoint(&self) -> String {\n\n match self.version {\n\n Version::V2 => format!(\"{}/stats\", self.endpoint),\n\n _ => format!(\"{}/task/stats\", self.endpoint),\n\n }\n\n }\n\n}\n\n\n\nimpl GenerateConfig for AwsEcsMetricsSourceConfig {\n\n fn generate_config() -> toml::Value {\n\n toml::Value::try_from(Self {\n", "file_path": "src/sources/aws_ecs_metrics/mod.rs", "rank": 37, "score": 476726.933029423 }, { "content": "pub fn default_endpoint() -> String {\n\n env::var(METADATA_URI_V4)\n\n .or_else(|_| env::var(METADATA_URI_V3))\n\n .unwrap_or_else(|_| \"http://169.254.170.2/v2\".into())\n\n}\n\n\n", "file_path": "src/sources/aws_ecs_metrics/mod.rs", "rank": 38, "score": 476726.933029423 }, { "content": "/// Parse the data one way if it looks like a DER file, and the other if\n\n/// it looks like a PEM file. For the content to be treated as PEM, it\n\n/// must parse as valid UTF-8 and contain a PEM start marker.\n\nfn der_or_pem<T>(data: Vec<u8>, der_fn: impl Fn(Vec<u8>) -> T, pem_fn: impl Fn(String) -> T) -> T {\n\n // None of these steps cause (re)allocations,\n\n // just parsing and type manipulation\n\n match String::from_utf8(data) {\n\n Ok(text) => match text.find(PEM_START_MARKER) {\n\n Some(_) => pem_fn(text),\n\n None => der_fn(text.into_bytes()),\n\n },\n\n Err(err) => der_fn(err.into_bytes()),\n\n }\n\n}\n\n\n", "file_path": "src/tls/settings.rs", "rank": 39, "score": 470014.44669133937 }, { "content": "pub fn check_names<'a, I: Iterator<Item = &'a ComponentKey>>(names: I) -> Result<(), Vec<String>> {\n\n let errors: Vec<_> = names\n\n .filter(|component_key| component_key.id().contains('.'))\n\n .map(|component_key| {\n\n format!(\n\n \"Component name \\\"{}\\\" should not contain a \\\".\\\"\",\n\n component_key.id()\n\n )\n\n })\n\n .collect();\n\n\n\n if errors.is_empty() {\n\n Ok(())\n\n } else {\n\n Err(errors)\n\n }\n\n}\n\n\n", "file_path": "src/config/validation.rs", "rank": 40, "score": 467167.48310029344 }, { "content": "fn render_metric_field(key: &str, metric: &Metric) -> Option<String> {\n\n match key {\n\n \"name\" => Some(metric.name().into()),\n\n \"namespace\" => metric.namespace().map(Into::into),\n\n _ if key.starts_with(\"tags.\") => {\n\n metric.tags().and_then(|tags| tags.get(&key[5..]).cloned())\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/template.rs", "rank": 41, "score": 464901.0285668309 }, { "content": "fn extract_excluded_containers_for_pod(pod: &Pod) -> impl Iterator<Item = &str> {\n\n let metadata = &pod.metadata;\n\n metadata.annotations.iter().flat_map(|annotations| {\n\n annotations\n\n .iter()\n\n .filter_map(|(key, value)| {\n\n if key != CONTAINER_EXCLUSION_ANNOTATION_KEY {\n\n return None;\n\n }\n\n Some(value)\n\n })\n\n .flat_map(|containers| containers.split(','))\n\n .map(|container| container.trim())\n\n })\n\n}\n\n\n", "file_path": "src/sources/kubernetes_logs/k8s_paths_provider.rs", "rank": 42, "score": 464807.6727086825 }, { "content": "// This function constructs the effective label selector to use, based on\n\n// the specified configuration.\n\nfn prepare_label_selector(selector: &str) -> String {\n\n const BUILT_IN: &str = \"vector.dev/exclude!=true\";\n\n\n\n if selector.is_empty() {\n\n return BUILT_IN.to_string();\n\n }\n\n\n\n format!(\"{},{}\", BUILT_IN, selector)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Config;\n\n\n\n #[test]\n\n fn generate_config() {\n\n crate::test_util::test_generate_config::<Config>();\n\n }\n\n\n\n #[test]\n", "file_path": "src/sources/kubernetes_logs/mod.rs", "rank": 43, "score": 464195.2355437624 }, { "content": "fn insert_if_true(tree: &mut BTreeMap<String, Value>, key: &str, value: bool) {\n\n if value {\n\n tree.insert(key.to_owned(), Value::Boolean(true));\n\n }\n\n}\n", "file_path": "lib/value/src/kind/debug.rs", "rank": 44, "score": 463828.89721338026 }, { "content": "fn annotate_from_container(log: &mut LogEvent, fields_spec: &FieldsSpec, container: &Container) {\n\n for (key, val) in [(&fields_spec.container_image, &container.image)].iter() {\n\n if let Some(val) = val {\n\n log.insert(key.as_str(), val.to_owned());\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use k8s_openapi::api::core::v1::PodIP;\n\n use vector_common::assert_event_data_eq;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_annotate_from_metadata() {\n\n let cases = vec![\n\n (\n\n FieldsSpec::default(),\n", "file_path": "src/sources/kubernetes_logs/pod_metadata_annotator.rs", "rank": 45, "score": 463698.75071132835 }, { "content": "pub fn interpolate(input: &str, secrets: &HashMap<String, String>) -> Result<String, Vec<String>> {\n\n let mut errors = Vec::<String>::new();\n\n let output = COLLECTOR\n\n .replace_all(input, |caps: &Captures<'_>| {\n\n caps.get(1)\n\n .and_then(|b| caps.get(2).map(|k| (b, k)))\n\n .map(|(b, k)| secrets.get(&format!(\"{}.{}\", b.as_str(), k.as_str())))\n\n .flatten()\n\n .cloned()\n\n .unwrap_or_else(|| {\n\n errors.push(format!(\n\n \"Unable to find secret replacement for {}.\",\n\n caps.get(0).unwrap().as_str()\n\n ));\n\n \"\".to_string()\n\n })\n\n })\n\n .into_owned();\n\n if errors.is_empty() {\n\n Ok(output)\n\n } else {\n\n Err(errors)\n\n }\n\n}\n\n\n", "file_path": "src/config/loading/secret.rs", "rank": 46, "score": 458700.6395484694 }, { "content": "pub fn init(color: bool, json: bool, levels: &str) {\n\n let _ = BUFFER.set(Mutex::new(Some(Vec::new())));\n\n let fmt_filter = tracing_subscriber::filter::Targets::from_str(levels).expect(\n\n \"logging filter targets were not formatted correctly or did not specify a valid level\",\n\n );\n\n\n\n let metrics_layer = metrics_layer_enabled()\n\n .then(|| MetricsLayer::new().with_filter(tracing_subscriber::filter::LevelFilter::INFO));\n\n\n\n let subscriber = tracing_subscriber::registry()\n\n .with(metrics_layer)\n\n .with(BroadcastLayer::new().with_filter(fmt_filter.clone()));\n\n\n\n #[cfg(feature = \"tokio-console\")]\n\n let subscriber = {\n\n let console_layer = console_subscriber::ConsoleLayer::builder()\n\n .with_default_env()\n\n .spawn();\n\n\n\n subscriber.with(console_layer)\n", "file_path": "src/trace.rs", "rank": 47, "score": 456993.99654464616 }, { "content": "fn line_to_events(mut decoder: Decoder, line: String) -> SmallVec<[Event; 1]> {\n\n let parts = line.splitn(8, ' ').collect::<Vec<&str>>();\n\n\n\n let mut events = SmallVec::<[Event; 1]>::new();\n\n\n\n if parts.len() == 8 {\n\n let timestamp = parts[2];\n\n let hostname = parts[3];\n\n let app_name = parts[4];\n\n let proc_id = parts[5];\n\n let message = parts[7];\n\n\n\n let mut buffer = BytesMut::new();\n\n buffer.put(message.as_bytes());\n\n\n\n loop {\n\n match decoder.decode_eof(&mut buffer) {\n\n Ok(Some((decoded, _byte_size))) => {\n\n for mut event in decoded {\n\n if let Event::Log(ref mut log) = event {\n", "file_path": "src/sources/heroku_logs.rs", "rank": 48, "score": 455830.9402279042 }, { "content": "pub fn generate_lines_with_stream<Gen: FnMut(usize) -> String>(\n\n generator: Gen,\n\n count: usize,\n\n batch: Option<Arc<BatchNotifier>>,\n\n) -> (Vec<String>, impl Stream<Item = EventArray>) {\n\n let lines = (0..count).map(generator).collect::<Vec<_>>();\n\n let stream = map_batch_stream(stream::iter(lines.clone()).map(LogEvent::from), batch);\n\n (lines, stream)\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 49, "score": 455168.1656261484 }, { "content": "pub fn lines_from_file<P: AsRef<Path>>(path: P) -> Vec<String> {\n\n trace!(message = \"Reading file.\", path = %path.as_ref().display());\n\n let mut file = File::open(path).unwrap();\n\n let mut output = String::new();\n\n file.read_to_string(&mut output).unwrap();\n\n output.lines().map(|s| s.to_owned()).collect()\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 50, "score": 454972.71832642256 }, { "content": "fn add_hostname(mut event: LogEvent, host_key: &str, hostname: &Option<String>) -> LogEvent {\n\n if let Some(hostname) = hostname {\n\n event.insert(host_key, hostname.clone());\n\n }\n\n\n\n event\n\n}\n\n\n", "file_path": "src/sources/docker_logs.rs", "rank": 51, "score": 452632.23176547367 }, { "content": "/// Converts a field/facet name to the VRL equivalent. Datadog payloads have a `message` field\n\n/// (which is used whenever the default field is encountered. Facets are hosted on .custom.*.\n\npub fn normalize_fields<T: AsRef<str>>(value: T) -> Vec<Field> {\n\n let value = value.as_ref();\n\n if value.eq(grammar::DEFAULT_FIELD) {\n\n return DEFAULT_FIELDS\n\n .iter()\n\n .map(|s| Field::Default((*s).to_owned()))\n\n .collect();\n\n }\n\n\n\n let field = match value.replace('@', \"custom.\") {\n\n v if value.starts_with('@') => Field::Facet(v),\n\n v if DEFAULT_FIELDS.contains(&v.as_ref()) => Field::Default(v),\n\n v if RESERVED_ATTRIBUTES.contains(&v.as_ref()) => Field::Reserved(v),\n\n v => Field::Tag(v),\n\n };\n\n\n\n vec![field]\n\n}\n", "file_path": "lib/datadog/search-syntax/src/field.rs", "rank": 52, "score": 452214.5529885504 }, { "content": "fn expand_globs_inner(inputs: &mut Vec<String>, id: &str, candidates: &IndexSet<String>) {\n\n let raw_inputs = std::mem::take(inputs);\n\n for raw_input in raw_inputs {\n\n let matcher = glob::Pattern::new(&raw_input)\n\n .map(InputMatcher::Pattern)\n\n .unwrap_or_else(|error| {\n\n warn!(message = \"Invalid glob pattern for input.\", component_id = %id, %error);\n\n InputMatcher::String(raw_input.to_string())\n\n });\n\n let mut matched = false;\n\n for input in candidates {\n\n if matcher.matches(input) && input != id {\n\n matched = true;\n\n inputs.push(input.clone())\n\n }\n\n }\n\n // If it didn't work as a glob pattern, leave it in the inputs as-is. This lets us give\n\n // more accurate error messages about non-existent inputs.\n\n if !matched {\n\n inputs.push(raw_input)\n", "file_path": "src/config/compiler.rs", "rank": 53, "score": 451260.96381840226 }, { "content": "pub fn lines_from_gzip_file<P: AsRef<Path>>(path: P) -> Vec<String> {\n\n trace!(message = \"Reading gzip file.\", path = %path.as_ref().display());\n\n let mut file = File::open(path).unwrap();\n\n let mut gzip_bytes = Vec::new();\n\n file.read_to_end(&mut gzip_bytes).unwrap();\n\n let mut output = String::new();\n\n MultiGzDecoder::new(&gzip_bytes[..])\n\n .read_to_string(&mut output)\n\n .unwrap();\n\n output.lines().map(|s| s.to_owned()).collect()\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 54, "score": 449409.447644851 }, { "content": "fn render_template(s: &str, event: &Event) -> Result<String, TransformError> {\n\n let template = Template::try_from(s).map_err(TransformError::TemplateParseError)?;\n\n template\n\n .render_string(event)\n\n .map_err(TransformError::TemplateRenderingError)\n\n}\n\n\n", "file_path": "src/transforms/log_to_metric.rs", "rank": 55, "score": 446024.6788761929 }, { "content": "fn encode_tags(tags: BTreeMap<String, String>, output: &mut BytesMut) {\n\n let original_len = output.len();\n\n // `tags` is already sorted\n\n for (key, value) in tags {\n\n if key.is_empty() || value.is_empty() {\n\n continue;\n\n }\n\n encode_string(&key, output);\n\n output.put_u8(b'=');\n\n encode_string(&value, output);\n\n output.put_u8(b',');\n\n }\n\n\n\n // remove last ','\n\n if output.len() > original_len {\n\n output.truncate(output.len() - 1);\n\n }\n\n}\n\n\n", "file_path": "src/sinks/influxdb/mod.rs", "rank": 56, "score": 444417.36593261256 }, { "content": "fn source_config(source: &str) -> String {\n\n format!(\n\n r#\"\n\ndata_dir = \"${{VECTOR_DATA_DIR}}\"\n\n\n\n[sources.in]\n\n{}\n\n\n\n[sinks.out]\n\n inputs = [\"in\"]\n\n type = \"blackhole\"\n\n\"#,\n\n source\n\n )\n\n}\n\n\n", "file_path": "tests/cli.rs", "rank": 57, "score": 444348.64493843133 }, { "content": "fn source_config(source: &str) -> String {\n\n format!(\n\n r#\"\n\ndata_dir = \"${{VECTOR_DATA_DIR}}\"\n\n\n\n[sources.in]\n\n{}\n\n\n\n[sinks.out]\n\n inputs = [\"in\"]\n\n type = \"blackhole\"\n\n\"#,\n\n source\n\n )\n\n}\n\n\n", "file_path": "tests/shutdown.rs", "rank": 58, "score": 444348.6449384313 }, { "content": "/// Remove credentials from endpoint.\n\n/// URI components: https://docs.mongodb.com/manual/reference/connection-string/#components\n\n/// It's not possible to use [url::Url](https://docs.rs/url/2.1.1/url/struct.Url.html) because connection string can have multiple hosts.\n\n/// Would be nice to serialize [ClientOptions][https://docs.rs/mongodb/1.1.1/mongodb/options/struct.ClientOptions.html] to String, but it's not supported.\n\n/// `endpoint` argument would not be required, but field `original_uri` in `ClieotnOptions` is private.\n\n/// `.unwrap()` in function is safe because endpoint was already verified by `ClientOptions`.\n\n/// Based on ClientOptions::parse_uri -- https://github.com/mongodb/mongo-rust-driver/blob/09e1193f93dcd850ebebb7fb82f6ab786fd85de1/src/client/options/mod.rs#L708\n\nfn sanitize_endpoint(endpoint: &str, options: &ClientOptions) -> String {\n\n let mut endpoint = endpoint.to_owned();\n\n if options.credential.is_some() {\n\n let start = endpoint.find(\"://\").unwrap() + 3;\n\n\n\n // Split `username:password@host[:port]` and `/defaultauthdb?<options>`\n\n let pre_slash = match endpoint[start..].find('/') {\n\n Some(index) => {\n\n let mut segments = endpoint[start..].split_at(index);\n\n // If we have databases and options\n\n if segments.1.len() > 1 {\n\n let lstart = start + segments.0.len() + 1;\n\n let post_slash = &segments.1[1..];\n\n // Split `/defaultauthdb` and `?<options>`\n\n if let Some(index) = post_slash.find('?') {\n\n let segments = post_slash.split_at(index);\n\n // If we have options\n\n if segments.1.len() > 1 {\n\n // Remove authentication options\n\n let options = segments.1[1..]\n", "file_path": "src/sources/mongodb_metrics/mod.rs", "rank": 59, "score": 439871.8145739099 }, { "content": "/// Returns a stream of `Vec<(Metric, Vec<Metric>)>`, where `Metric` is the\n\n/// total `component_sent_events_total` metric for a component and `Vec<Metric>`\n\n/// is the `component_sent_events_total` metric split by output\n\npub fn component_sent_events_totals_metrics_with_outputs(\n\n interval: i32,\n\n) -> impl Stream<Item = Vec<(Metric, Vec<Metric>)>> {\n\n let mut cache = BTreeMap::new();\n\n\n\n component_to_filtered_metrics(interval, &|m| m.name() == \"component_sent_events_total\").map(\n\n move |map| {\n\n map.into_iter()\n\n .filter_map(|(id, metrics)| {\n\n let outputs = metrics\n\n .iter()\n\n .filter_map(|m| m.tag_value(\"output\"))\n\n .collect::<HashSet<_>>();\n\n\n\n let metric_by_outputs = outputs\n\n .iter()\n\n .filter_map(|output| {\n\n let m = filter_output_metric(metrics.as_ref(), output.as_ref())?;\n\n match m.value() {\n\n MetricValue::Counter { value }\n", "file_path": "src/api/schema/metrics/filter.rs", "rank": 60, "score": 439637.53880699165 }, { "content": "pub fn open_fixture(path: impl AsRef<Path>) -> crate::Result<serde_json::Value> {\n\n let test_file = match File::open(path) {\n\n Ok(file) => file,\n\n Err(e) => return Err(e.into()),\n\n };\n\n let value: serde_json::Value = serde_json::from_reader(test_file)?;\n\n Ok(value)\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 61, "score": 439252.56173112843 }, { "content": "/// Gets a name we can use for roles to prevent them conflicting with other tests.\n\n/// Uses the provided namespace as the root.\n\npub fn get_override_name(namespace: &str, suffix: &str) -> String {\n\n format!(\"{}-{}\", namespace, suffix)\n\n}\n\n\n", "file_path": "lib/k8s-e2e-tests/src/lib.rs", "rank": 62, "score": 438039.6258779428 }, { "content": "pub fn get_namespace_appended(namespace: &str, suffix: &str) -> String {\n\n format!(\"{}-{}\", namespace, suffix)\n\n}\n\n\n", "file_path": "lib/k8s-e2e-tests/src/lib.rs", "rank": 63, "score": 438025.0508529366 }, { "content": "/// Iterates over all paths in form `a.b[0].c[1]` in alphabetical order\n\n/// and their corresponding values.\n\npub fn all_fields(fields: &BTreeMap<String, Value>) -> FieldsIter {\n\n FieldsIter::new(fields)\n\n}\n\n\n", "file_path": "lib/vector-core/src/event/util/log/all_fields.rs", "rank": 64, "score": 437316.0510013695 }, { "content": "/// Returns all components\n\npub fn get_components() -> Vec<Component> {\n\n filter_components(|(_component_key, components)| Some(components.clone()))\n\n}\n\n\n", "file_path": "src/api/schema/components/state.rs", "rank": 65, "score": 436119.0064595911 }, { "content": "fn annotate_from_metadata(log: &mut LogEvent, fields_spec: &FieldsSpec, metadata: &ObjectMeta) {\n\n for (key, val) in [\n\n (&fields_spec.pod_name, &metadata.name),\n\n (&fields_spec.pod_namespace, &metadata.namespace),\n\n (&fields_spec.pod_uid, &metadata.uid),\n\n ]\n\n .iter()\n\n {\n\n if let Some(val) = val {\n\n log.insert(key.as_str(), val.to_owned());\n\n }\n\n }\n\n\n\n if let Some(owner_references) = &metadata.owner_references {\n\n log.insert(\n\n fields_spec.pod_owner.as_str(),\n\n format!(\"{}/{}\", owner_references[0].kind, owner_references[0].name),\n\n );\n\n }\n\n\n", "file_path": "src/sources/kubernetes_logs/pod_metadata_annotator.rs", "rank": 66, "score": 435140.5742619048 }, { "content": "fn annotate_from_metadata(log: &mut LogEvent, fields_spec: &FieldsSpec, metadata: &ObjectMeta) {\n\n // Calculate and cache the prefix path.\n\n let prefix_path = parse_path(&fields_spec.namespace_labels);\n\n if let Some(labels) = &metadata.labels {\n\n for (key, val) in labels.iter() {\n\n let mut path = prefix_path.clone().segments;\n\n path.push(OwnedSegment::Field(key.clone()));\n\n log.insert(&path, val.to_owned());\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use vector_common::assert_event_data_eq;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_annotate_from_metadata() {\n", "file_path": "src/sources/kubernetes_logs/namespace_metadata_annotator.rs", "rank": 67, "score": 435140.5742619048 }, { "content": "/// grpc doesn't like an address without a scheme, so we default to http or https if one isn't\n\n/// specified in the address.\n\npub fn with_default_scheme(address: &str, tls: bool) -> crate::Result<Uri> {\n\n let uri: Uri = address.parse()?;\n\n if uri.scheme().is_none() {\n\n // Default the scheme to http or https.\n\n let mut parts = uri.into_parts();\n\n\n\n parts.scheme = if tls {\n\n Some(\n\n \"https\"\n\n .parse()\n\n .unwrap_or_else(|_| unreachable!(\"https should be valid\")),\n\n )\n\n } else {\n\n Some(\n\n \"http\"\n\n .parse()\n\n .unwrap_or_else(|_| unreachable!(\"http should be valid\")),\n\n )\n\n };\n\n\n", "file_path": "src/sinks/vector/v2/config.rs", "rank": 68, "score": 433429.81382617564 }, { "content": "/// Returns throughput based on a metric and provided `cache` of previous values\n\nfn throughput(metric: &Metric, id: String, cache: &mut BTreeMap<String, f64>) -> Option<f64> {\n\n match metric.value() {\n\n MetricValue::Counter { value } => {\n\n let last = cache.insert(id, *value).unwrap_or(0.00);\n\n let throughput = value - last;\n\n Some(throughput)\n\n }\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/api/schema/metrics/filter.rs", "rank": 69, "score": 432894.81190998474 }, { "content": "pub fn random_map(max_size: usize, field_len: usize) -> HashMap<String, String> {\n\n let size = thread_rng().gen_range(0..max_size);\n\n\n\n (0..size)\n\n .map(move |_| (random_string(field_len), random_string(field_len)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 70, "score": 432862.6877933844 }, { "content": "fn into_value<'a>(iter: impl IntoIterator<Item = (&'a str, Option<String>)>) -> Value {\n\n iter.into_iter()\n\n .map(|(name, value)| {\n\n (\n\n name.to_string(),\n\n value.map(|s| s.into()).unwrap_or(Value::Null),\n\n )\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "lib/vrl/stdlib/src/parse_user_agent.rs", "rank": 71, "score": 432167.37892713025 }, { "content": "/// Returns a stream of `Metric`s, collected at the provided millisecond interval.\n\npub fn get_metrics(interval: i32) -> impl Stream<Item = Metric> {\n\n let controller = get_controller();\n\n let mut interval = tokio::time::interval(Duration::from_millis(interval as u64));\n\n\n\n stream! {\n\n loop {\n\n interval.tick().await;\n\n for m in controller.capture_metrics() {\n\n yield m;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/api/schema/metrics/filter.rs", "rank": 72, "score": 431297.7864057728 }, { "content": "#[allow(clippy::needless_pass_by_value)] // impl Path is always a reference\n\npub fn remove<'a>(value: &mut Value, path: impl Path<'a>, prune: bool) -> Option<Value> {\n\n let path_iter = path.segment_iter().peekable();\n\n remove_rec(value, path_iter, prune).map(|(value, _)| value)\n\n}\n\n\n", "file_path": "lib/vector-core/src/event/util/log/remove.rs", "rank": 73, "score": 431182.0227331695 }, { "content": "/// Parses the field_delimiter between the key/value pairs, ignoring surrounding spaces\n\nfn parse_field_delimiter<'a>(field_delimiter: &'a str) -> impl Fn(&'a str) -> SResult<&'a str> {\n\n move |input| {\n\n if field_delimiter == \" \" {\n\n space1(input)\n\n } else {\n\n preceded(space0, tag(field_delimiter))(input)\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/datadog/grok/src/filters/keyvalue.rs", "rank": 74, "score": 428124.4145215364 }, { "content": "/// Retrns a `Matcher` that returns true if the log event resolves to an array of strings,\n\n/// where at least one string matches the provided `func`.\n\nfn any_string_match<S, F>(field: S, func: F) -> Box<dyn Matcher<LogEvent>>\n\nwhere\n\n S: Into<String>,\n\n F: Fn(Cow<str>) -> bool + Send + Sync + Clone + 'static,\n\n{\n\n any_match(field, move |value| {\n\n let bytes = value.coerce_to_bytes();\n\n func(String::from_utf8_lossy(&bytes))\n\n })\n\n}\n\n\n\n//------------------------------------------------------------------------------\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use datadog_filter::{build_matcher, Filter, Resolver};\n\n use datadog_search_syntax::parse;\n\n use serde_json::json;\n\n use vector_core::event::Event;\n\n\n", "file_path": "src/conditions/datadog_search.rs", "rank": 75, "score": 427564.8910388919 }, { "content": "/// Returns a `Matcher` that returns true if the log event resolves to a string which\n\n/// matches the provided `func`.\n\nfn string_match<S, F>(field: S, func: F) -> Box<dyn Matcher<LogEvent>>\n\nwhere\n\n S: Into<String>,\n\n F: Fn(Cow<str>) -> bool + Send + Sync + Clone + 'static,\n\n{\n\n let field = field.into();\n\n\n\n Run::boxed(move |log: &LogEvent| match log.get(field.as_str()) {\n\n Some(Value::Bytes(v)) => func(String::from_utf8_lossy(v)),\n\n _ => false,\n\n })\n\n}\n\n\n", "file_path": "src/conditions/datadog_search.rs", "rank": 76, "score": 427564.34297225036 }, { "content": "fn assert_no_log_lines(output: Vec<u8>) {\n\n let output = String::from_utf8(output).expect(\"Vector output isn't a valid utf8 string\");\n\n\n\n // Assert there are no lines with keywords\n\n let keywords = [\"ERROR\", \"WARN\", \"INFO\", \"DEBUG\", \"TRACE\"];\n\n for line in output.lines() {\n\n let present = keywords.iter().any(|word| line.contains(word));\n\n assert!(!present, \"Log detected in output line: {:?}\", line);\n\n }\n\n}\n\n\n", "file_path": "tests/cli.rs", "rank": 77, "score": 426181.48099384544 }, { "content": "/// To avoid collisions between `output` metric tags, check that a component\n\n/// does not have a named output with the name [`DEFAULT_OUTPUT`]\n\npub fn check_outputs(config: &ConfigBuilder) -> Result<(), Vec<String>> {\n\n let mut errors = Vec::new();\n\n for (key, source) in config.sources.iter() {\n\n let outputs = source.inner.outputs();\n\n if outputs\n\n .iter()\n\n .map(|output| output.port.as_deref().unwrap_or(\"\"))\n\n .any(|name| name == DEFAULT_OUTPUT)\n\n {\n\n errors.push(format!(\n\n \"Source {key} cannot have a named output with reserved name: `{DEFAULT_OUTPUT}`\"\n\n ));\n\n }\n\n }\n\n\n\n for (key, transform) in config.transforms.iter() {\n\n let definition = schema::Definition::empty();\n\n if let Err(errs) = transform.inner.validate(&definition) {\n\n errors.extend(errs.into_iter().map(|msg| format!(\"Transform {key} {msg}\")));\n\n }\n", "file_path": "src/config/validation.rs", "rank": 78, "score": 424798.2082672902 }, { "content": "/// Returns the throughput of the 'component_sent_events_total' metric, sampled over `interval` milliseconds,\n\n/// for each component. Within a particular component, throughput per output stream is also included.\n\npub fn component_sent_events_total_throughputs_with_outputs(\n\n interval: i32,\n\n) -> impl Stream<Item = Vec<(ComponentKey, i64, Vec<OutputThroughput>)>> {\n\n let mut cache = BTreeMap::new();\n\n\n\n component_to_filtered_metrics(interval, &|m| m.name() == \"component_sent_events_total\")\n\n .map(move |map| {\n\n map.into_iter()\n\n .filter_map(|(id, metrics)| {\n\n let outputs = metrics\n\n .iter()\n\n .filter_map(|m| m.tag_value(\"output\"))\n\n .collect::<HashSet<_>>();\n\n\n\n let throughput_by_outputs = outputs\n\n .iter()\n\n .filter_map(|output| {\n\n let m = filter_output_metric(metrics.as_ref(), output.as_ref())?;\n\n let throughput =\n\n throughput(&m, format!(\"{}.{}\", id, output), &mut cache)?;\n", "file_path": "src/api/schema/metrics/filter.rs", "rank": 79, "score": 424625.10909653804 }, { "content": "/// Parse the string represented in the specified format.\n\n/// If the format is unknown - fallback to the default format and attempt\n\n/// parsing using that.\n\npub fn deserialize<T>(content: &str, format: Format) -> Result<T, Vec<String>>\n\nwhere\n\n T: de::DeserializeOwned,\n\n{\n\n match format {\n\n Format::Toml => toml::from_str(content).map_err(|e| vec![e.to_string()]),\n\n Format::Yaml => serde_yaml::from_str(content).map_err(|e| vec![e.to_string()]),\n\n Format::Json => serde_json::from_str(content).map_err(|e| vec![e.to_string()]),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n /// This test ensures the logic to guess file format from the file path\n\n /// works correctly.\n\n /// Like all other tests, it also demonstrates various cases and how our\n\n /// code behaves when it encounters them.\n\n #[test]\n", "file_path": "src/config/format.rs", "rank": 80, "score": 424484.45363157056 }, { "content": "/// Gets a component by output_id\n\npub fn component_by_output_id(output_id: &OutputId) -> Option<Component> {\n\n filter_components(|(key, component)| {\n\n if key == &output_id.component {\n\n Some(component.clone())\n\n } else {\n\n None\n\n }\n\n })\n\n .pop()\n\n}\n\n\n", "file_path": "src/api/schema/components/state.rs", "rank": 81, "score": 424218.7684232974 }, { "content": "/// This function returns the default value for `self_node_name` variable\n\n/// as it should be at the generated config file.\n\nfn default_self_node_name_env_template() -> String {\n\n format!(\"${{{}}}\", SELF_NODE_NAME_ENV_KEY.to_owned())\n\n}\n\n\n", "file_path": "src/sources/kubernetes_logs/mod.rs", "rank": 82, "score": 423865.65984734765 }, { "content": "pub fn parse(packet: &str) -> Result<Metric, ParseError> {\n\n // https://docs.datadoghq.com/developers/dogstatsd/datagram_shell/#datagram-format\n\n let key_and_body = packet.splitn(2, ':').collect::<Vec<_>>();\n\n if key_and_body.len() != 2 {\n\n return Err(ParseError::Malformed(\n\n \"should be key and body with ':' separator\",\n\n ));\n\n }\n\n let (key, body) = (key_and_body[0], key_and_body[1]);\n\n\n\n let parts = body.split('|').collect::<Vec<_>>();\n\n if parts.len() < 2 {\n\n return Err(ParseError::Malformed(\n\n \"body should have at least two pipe separated components\",\n\n ));\n\n }\n\n\n\n let name = sanitize_key(key);\n\n let metric_type = parts[1];\n\n\n", "file_path": "src/sources/statsd/parser.rs", "rank": 83, "score": 423532.3266424499 }, { "content": "pub fn random_string(len: usize) -> String {\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(len)\n\n .map(char::from)\n\n .collect::<String>()\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 84, "score": 421372.5045103633 }, { "content": "pub fn random_updated_events_with_stream<F>(\n\n len: usize,\n\n count: usize,\n\n batch: Option<Arc<BatchNotifier>>,\n\n update_fn: F,\n\n) -> (Vec<Event>, impl Stream<Item = EventArray>)\n\nwhere\n\n F: Fn((usize, Event)) -> Event,\n\n{\n\n let events = (0..count)\n\n .map(|_| Event::from(random_string(len)))\n\n .enumerate()\n\n .map(update_fn)\n\n .collect::<Vec<_>>();\n\n let stream = map_batch_stream(\n\n stream::iter(events.clone()).map(|event| event.into_log()),\n\n batch,\n\n );\n\n (events, stream)\n\n}\n\n\n", "file_path": "src/test_util/mod.rs", "rank": 85, "score": 420602.32240034034 }, { "content": "/// Shared logic for testing parsers.\n\n///\n\n/// Takes a parser builder and a list of test cases.\n\npub fn test_parser<B, L, S>(builder: B, loader: L, cases: Vec<(S, Vec<LogEvent>)>)\n\nwhere\n\n B: Fn() -> Transform,\n\n L: Fn(S) -> Event,\n\n{\n\n for (message, expected) in cases {\n\n let input = loader(message);\n\n let mut parser = (builder)();\n\n let parser = parser.as_function();\n\n\n\n let mut output = OutputBuffer::default();\n\n parser.transform(&mut output, input);\n\n\n\n let expected = expected.into_iter().map(Event::Log).collect::<Vec<_>>();\n\n\n\n vector_common::assert_event_data_eq!(output, expected, \"expected left, actual right\");\n\n }\n\n}\n", "file_path": "src/sources/kubernetes_logs/parser/test_util.rs", "rank": 86, "score": 420595.6220616614 }, { "content": "// This function constructs the patterns we exclude from file watching, created\n\n// from the defaults or user provided configuration.\n\nfn prepare_exclude_paths(config: &Config) -> crate::Result<Vec<glob::Pattern>> {\n\n let exclude_paths = config\n\n .exclude_paths_glob_patterns\n\n .iter()\n\n .map(|pattern| {\n\n let pattern = pattern\n\n .to_str()\n\n .ok_or(\"glob pattern is not a valid UTF-8 string\")?;\n\n Ok(glob::Pattern::new(pattern)?)\n\n })\n\n .collect::<crate::Result<Vec<_>>>()?;\n\n\n\n info!(\n\n message = \"Excluding matching files.\",\n\n exclude_paths = ?exclude_paths\n\n .iter()\n\n .map(glob::Pattern::as_str)\n\n .collect::<Vec<_>>()\n\n );\n\n\n\n Ok(exclude_paths)\n\n}\n\n\n", "file_path": "src/sources/kubernetes_logs/mod.rs", "rank": 87, "score": 419417.42083657044 }, { "content": "fn real_glob(pattern: &str) -> impl Iterator<Item = PathBuf> {\n\n glob::glob_with(\n\n pattern,\n\n glob::MatchOptions {\n\n require_literal_separator: true,\n\n ..Default::default()\n\n },\n\n )\n\n .expect(\"the pattern is supposed to always be correct\")\n\n .flat_map(|paths| paths.into_iter())\n\n}\n\n\n", "file_path": "src/sources/kubernetes_logs/k8s_paths_provider.rs", "rank": 88, "score": 419141.45674992213 }, { "content": "pub fn make_test_container<'a>(name: &'a str, command: &'a str) -> Container {\n\n Container {\n\n name: name.to_owned(),\n\n image: Some(BUSYBOX_IMAGE.to_owned()),\n\n command: Some(vec![\"sh\".to_owned()]),\n\n args: Some(vec![\"-c\".to_owned(), command.to_owned()]),\n\n ..Container::default()\n\n }\n\n}\n\n\n", "file_path": "lib/k8s-e2e-tests/src/lib.rs", "rank": 89, "score": 418496.28366091725 }, { "content": "#[inline]\n\nfn parse_delimited<'a>(field_delimiter: &'a str) -> impl Fn(&'a str) -> SResult<&'a str> {\n\n move |input| map(alt((take_until(field_delimiter), rest)), |s: &str| s.trim())(input)\n\n}\n\n\n", "file_path": "lib/datadog/grok/src/filters/keyvalue.rs", "rank": 90, "score": 415956.2480091401 }, { "content": "/// Takes a test name and a future, and uses `rusty_fork` to perform a cross-platform\n\n/// process fork. This allows us to test functionality without conflicting with global\n\n/// state that may have been set/mutated from previous tests\n\nfn fork_test<T: std::future::Future<Output = ()>>(test_name: &'static str, fut: T) {\n\n let fork_id = rusty_fork::rusty_fork_id!();\n\n\n\n rusty_fork::fork(\n\n test_name,\n\n fork_id,\n\n |_| {},\n\n |child, f| {\n\n let status = child.wait().expect(\"Couldn't wait for child process\");\n\n\n\n // Copy all output\n\n let mut stdout = io::stdout();\n\n io::copy(f, &mut stdout).expect(\"Couldn't write to stdout\");\n\n\n\n // If the test failed, panic on the parent thread\n\n if !status.success() {\n\n panic!(\"Test failed\");\n\n }\n\n },\n\n || {\n", "file_path": "tests/api.rs", "rank": 91, "score": 411102.2929684083 }, { "content": "pub trait TcpSource: Clone + Send + Sync + 'static\n\nwhere\n\n <<Self as TcpSource>::Decoder as tokio_util::codec::Decoder>::Item: std::marker::Send,\n\n{\n\n // Should be default: `std::io::Error`.\n\n // Right now this is unstable: https://github.com/rust-lang/rust/issues/29661\n\n type Error: From<io::Error>\n\n + StreamDecodingError\n\n + std::fmt::Debug\n\n + std::fmt::Display\n\n + Send\n\n + Unpin;\n\n type Item: Into<SmallVec<[Event; 1]>> + Send + Unpin;\n\n type Decoder: Decoder<Item = (Self::Item, usize), Error = Self::Error> + Send + 'static;\n\n type Acker: TcpSourceAcker + Send;\n\n\n\n fn decoder(&self) -> Self::Decoder;\n\n\n\n fn handle_events(&self, _events: &mut [Event], _host: std::net::SocketAddr) {}\n\n\n", "file_path": "src/sources/util/tcp/mod.rs", "rank": 92, "score": 410360.632720663 }, { "content": "fn emit_received() -> impl Filter<Extract = (), Error = warp::reject::Rejection> + Clone {\n\n warp::any()\n\n .and(warp::header::optional(\"X-Amz-Firehose-Request-Id\"))\n\n .and(warp::header::optional(\"X-Amz-Firehose-Source-Arn\"))\n\n .map(|request_id: Option<String>, source_arn: Option<String>| {\n\n emit!(AwsKinesisFirehoseRequestReceived {\n\n request_id: request_id.as_deref(),\n\n source_arn: source_arn.as_deref(),\n\n });\n\n })\n\n .untuple_one()\n\n}\n\n\n", "file_path": "src/sources/aws_kinesis_firehose/filters.rs", "rank": 93, "score": 410007.5673054637 }, { "content": "fn http_access_log_lines() -> impl Iterator<Item = String> {\n\n let mut rng = SmallRng::from_rng(thread_rng()).unwrap();\n\n let code = Uniform::from(200..600);\n\n let year = Uniform::from(2010..2020);\n\n let mday = Uniform::from(1..32);\n\n let hour = Uniform::from(0..24);\n\n let minsec = Uniform::from(0..60);\n\n let size = Uniform::from(10..60); // FIXME\n\n\n\n std::iter::repeat(()).map(move |_| {\n\n let url_size = size.sample(&mut rng);\n\n let browser_size = size.sample(&mut rng);\n\n format!(\"{}.{}.{}.{} - - [{}/Jun/{}:{}:{}:{} -0400] \\\"GET /{} HTTP/1.1\\\" {} {} \\\"-\\\" \\\"Mozilla/5.0 ({})\\\"\",\n\n rng.gen::<u8>(), rng.gen::<u8>(), rng.gen::<u8>(), rng.gen::<u8>(), // IP\n\n year.sample(&mut rng), mday.sample(&mut rng), // date\n\n hour.sample(&mut rng), minsec.sample(&mut rng), minsec.sample(&mut rng), // time\n\n (&mut rng).sample_iter(&Alphanumeric).take(url_size).map(char::from).collect::<String>(), // URL\n\n code.sample(&mut rng), size.sample(&mut rng),\n\n (&mut rng).sample_iter(&Alphanumeric).take(browser_size).map(char::from).collect::<String>(),\n\n )\n", "file_path": "benches/regex.rs", "rank": 94, "score": 408709.5967981749 }, { "content": "pub fn parse(input: &str) -> Vec<&str> {\n\n let simple = is_not::<_, _, (&str, ErrorKind)>(\" \\t[\\\"\");\n\n let string = delimited(\n\n tag(\"\\\"\"),\n\n map(opt(escaped(is_not(\"\\\"\\\\\"), '\\\\', one_of(\"\\\"\\\\\"))), |o| {\n\n o.unwrap_or(\"\")\n\n }),\n\n tag(\"\\\"\"),\n\n );\n\n let bracket = delimited(\n\n tag(\"[\"),\n\n map(opt(escaped(is_not(\"]\\\\\"), '\\\\', one_of(\"]\\\\\"))), |o| {\n\n o.unwrap_or(\"\")\n\n }),\n\n tag(\"]\"),\n\n );\n\n\n\n // fall back to returning the rest of the input, if any\n\n let remainder = verify(rest, |s: &str| !s.is_empty());\n\n let field = alt((bracket, string, simple, remainder));\n", "file_path": "lib/vector-common/src/tokenize.rs", "rank": 95, "score": 408166.760200088 }, { "content": "#[cfg(test)]\n\npub fn reset_early_buffer() -> Option<Vec<LogEvent>> {\n\n get_early_buffer().replace(Vec::new())\n\n}\n\n\n", "file_path": "src/trace.rs", "rank": 96, "score": 407945.76633986656 }, { "content": "/// Creates a file with given content\n\npub fn create_file(config: &str) -> PathBuf {\n\n let path = temp_file();\n\n overwrite_file(path.clone(), config);\n\n path\n\n}\n\n\n", "file_path": "tests/support/mod.rs", "rank": 97, "score": 406783.9874198957 }, { "content": "/// An iterator with a single \"message\" element\n\npub fn all_fields_non_object_root(value: &Value) -> FieldsIter {\n\n FieldsIter::non_object(value)\n\n}\n\n\n", "file_path": "lib/vector-core/src/event/util/log/all_fields.rs", "rank": 98, "score": 406695.54091100703 }, { "content": "fn parse_number<'a>(field_delimiter: &'a str) -> impl Fn(&'a str) -> SResult<Value> {\n\n move |input| {\n\n map(\n\n terminated(\n\n double,\n\n peek(alt((\n\n parse_field_delimiter(field_delimiter),\n\n parse_end_of_input(),\n\n ))),\n\n ),\n\n |v| {\n\n if ((v as i64) as f64 - v).abs() == 0.0 {\n\n // can be safely converted to Integer without precision loss\n\n Value::Integer(v as i64)\n\n } else {\n\n Value::Float(NotNan::new(v).expect(\"not a float\"))\n\n }\n\n },\n\n )(input)\n\n .map_err(|e| match e {\n\n // double might return Failure(an unrecoverable error) - make it recoverable\n\n nom::Err::Failure(_) => nom::Err::Error((input, nom::error::ErrorKind::Float)),\n\n e => e,\n\n })\n\n }\n\n}\n\n\n", "file_path": "lib/datadog/grok/src/filters/keyvalue.rs", "rank": 99, "score": 404294.62252526614 } ]
Rust
src/dma/ifc.rs
lucab/efm32hg-pac
ca4230132f7ce087da064f6d7d613e262fef8e62
#[doc = "Writer for register IFC"] pub type W = crate::W<u32, super::IFC>; #[doc = "Register IFC `reset()`'s with value 0"] impl crate::ResetValue for super::IFC { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `CH0DONE`"] pub struct CH0DONE_W<'a> { w: &'a mut W, } impl<'a> CH0DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Write proxy for field `CH1DONE`"] pub struct CH1DONE_W<'a> { w: &'a mut W, } impl<'a> CH1DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `CH2DONE`"] pub struct CH2DONE_W<'a> { w: &'a mut W, } impl<'a> CH2DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `CH3DONE`"] pub struct CH3DONE_W<'a> { w: &'a mut W, } impl<'a> CH3DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `CH4DONE`"] pub struct CH4DONE_W<'a> { w: &'a mut W, } impl<'a> CH4DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `CH5DONE`"] pub struct CH5DONE_W<'a> { w: &'a mut W, } impl<'a> CH5DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `ERR`"] pub struct ERR_W<'a> { w: &'a mut W, } impl<'a> ERR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl W { #[doc = "Bit 0 - DMA Channel 0 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch0done(&mut self) -> CH0DONE_W { CH0DONE_W { w: self } } #[doc = "Bit 1 - DMA Channel 1 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch1done(&mut self) -> CH1DONE_W { CH1DONE_W { w: self } } #[doc = "Bit 2 - DMA Channel 2 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch2done(&mut self) -> CH2DONE_W { CH2DONE_W { w: self } } #[doc = "Bit 3 - DMA Channel 3 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch3done(&mut self) -> CH3DONE_W { CH3DONE_W { w: self } } #[doc = "Bit 4 - DMA Channel 4 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch4done(&mut self) -> CH4DONE_W { CH4DONE_W { w: self } } #[doc = "Bit 5 - DMA Channel 5 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch5done(&mut self) -> CH5DONE_W { CH5DONE_W { w: self } } #[doc = "Bit 31 - DMA Error Interrupt Flag Clear"] #[inline(always)] pub fn err(&mut self) -> ERR_W { ERR_W { w: self } } }
#[doc = "Writer for register IFC"] pub type W = crate::W<u32, super::IFC>; #[doc = "Register IFC `reset()`'s with value 0"] impl crate::ResetValue for super::IFC { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `CH0DONE`"] pub struct CH0DONE_W<'a> { w: &'a mut W, } impl<'a> CH0DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((valu
raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `CH4DONE`"] pub struct CH4DONE_W<'a> { w: &'a mut W, } impl<'a> CH4DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `CH5DONE`"] pub struct CH5DONE_W<'a> { w: &'a mut W, } impl<'a> CH5DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `ERR`"] pub struct ERR_W<'a> { w: &'a mut W, } impl<'a> ERR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl W { #[doc = "Bit 0 - DMA Channel 0 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch0done(&mut self) -> CH0DONE_W { CH0DONE_W { w: self } } #[doc = "Bit 1 - DMA Channel 1 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch1done(&mut self) -> CH1DONE_W { CH1DONE_W { w: self } } #[doc = "Bit 2 - DMA Channel 2 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch2done(&mut self) -> CH2DONE_W { CH2DONE_W { w: self } } #[doc = "Bit 3 - DMA Channel 3 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch3done(&mut self) -> CH3DONE_W { CH3DONE_W { w: self } } #[doc = "Bit 4 - DMA Channel 4 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch4done(&mut self) -> CH4DONE_W { CH4DONE_W { w: self } } #[doc = "Bit 5 - DMA Channel 5 Complete Interrupt Flag Clear"] #[inline(always)] pub fn ch5done(&mut self) -> CH5DONE_W { CH5DONE_W { w: self } } #[doc = "Bit 31 - DMA Error Interrupt Flag Clear"] #[inline(always)] pub fn err(&mut self) -> ERR_W { ERR_W { w: self } } }
e as u32) & 0x01); self.w } } #[doc = "Write proxy for field `CH1DONE`"] pub struct CH1DONE_W<'a> { w: &'a mut W, } impl<'a> CH1DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `CH2DONE`"] pub struct CH2DONE_W<'a> { w: &'a mut W, } impl<'a> CH2DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `CH3DONE`"] pub struct CH3DONE_W<'a> { w: &'a mut W, } impl<'a> CH3DONE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 0, "score": 154756.45537591208 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "build.rs", "rank": 1, "score": 69025.14900813346 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 2, "score": 61088.910934378815 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 3, "score": 61077.6467886195 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `EXT`\"]\n\npub struct EXT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EXT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u16) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff);\n\n self.w\n", "file_path": "src/gpio/ifc.rs", "rank": 5, "score": 51979.36064778866 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/timer0/ifc.rs", "rank": 6, "score": 51975.84001476406 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/timer1/ifc.rs", "rank": 7, "score": 51975.84001476406 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/timer2/ifc.rs", "rank": 8, "score": 51975.84001476406 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/rtc/ifc.rs", "rank": 9, "score": 51975.84001476406 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `EDGE`\"]\n\npub struct EDGE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EDGE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/vcmp/ifc.rs", "rank": 10, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXC`\"]\n\npub struct TXC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/leuart0/ifc.rs", "rank": 11, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `EDGE`\"]\n\npub struct EDGE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EDGE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/acmp0/ifc.rs", "rank": 12, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DONE`\"]\n\npub struct DONE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DONE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/aes/ifc.rs", "rank": 13, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXC`\"]\n\npub struct TXC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/usart1/ifc.rs", "rank": 14, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `HFRCORDY`\"]\n\npub struct HFRCORDY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HFRCORDY_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/cmu/ifc.rs", "rank": 15, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `START`\"]\n\npub struct START_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> START_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/i2c0/ifc.rs", "rank": 16, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXC`\"]\n\npub struct TXC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/usart0/ifc.rs", "rank": 17, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SINGLE`\"]\n\npub struct SINGLE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SINGLE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/adc0/ifc.rs", "rank": 18, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ERASE`\"]\n\npub struct ERASE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ERASE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/msc/ifc.rs", "rank": 19, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `UF`\"]\n\npub struct UF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> UF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/pcnt0/ifc.rs", "rank": 20, "score": 51974.77622789976 }, { "content": "#[doc = \"Writer for register IFC\"]\n\npub type W = crate::W<u32, super::IFC>;\n\n#[doc = \"Register IFC `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IFC {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `VREGOSH`\"]\n\npub struct VREGOSH_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VREGOSH_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/usb/ifc.rs", "rank": 21, "score": 51974.77622789976 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `WRITE`\"]\n\npub struct WRITE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WRITE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/msc/ifc.rs", "rank": 22, "score": 51956.91766127076 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `OF`\"]\n\npub struct OF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/pcnt0/ifc.rs", "rank": 23, "score": 51956.25746930182 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CCF`\"]\n\npub struct CCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/usart1/ifc.rs", "rank": 24, "score": 51956.25746930182 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CCF`\"]\n\npub struct CCF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CCF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/usart0/ifc.rs", "rank": 25, "score": 51956.25746930182 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXOF`\"]\n\npub struct TXOF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/i2c0/ifc.rs", "rank": 26, "score": 51956.25746930182 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXOF`\"]\n\npub struct RXOF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usart0/ifc.rs", "rank": 27, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ADDR`\"]\n\npub struct ADDR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c0/ifc.rs", "rank": 28, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DIRCNG`\"]\n\npub struct DIRCNG_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DIRCNG_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/pcnt0/ifc.rs", "rank": 29, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC0`\"]\n\npub struct CC0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CC0_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer2/ifc.rs", "rank": 30, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CHOF`\"]\n\npub struct CHOF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CHOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/msc/ifc.rs", "rank": 31, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `LFRCORDY`\"]\n\npub struct LFRCORDY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LFRCORDY_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/cmu/ifc.rs", "rank": 33, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `BITO`\"]\n\npub struct BITO_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BITO_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/i2c0/ifc.rs", "rank": 34, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SINGLEOF`\"]\n\npub struct SINGLEOF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SINGLEOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/adc0/ifc.rs", "rank": 35, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXUF`\"]\n\npub struct RXUF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXUF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/leuart0/ifc.rs", "rank": 36, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC0`\"]\n\npub struct CC0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CC0_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer1/ifc.rs", "rank": 37, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC0`\"]\n\npub struct CC0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CC0_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/timer0/ifc.rs", "rank": 38, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `COMP1`\"]\n\npub struct COMP1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP1_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/rtc/ifc.rs", "rank": 39, "score": 51955.89346306214 }, { "content": " self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXOF`\"]\n\npub struct RXOF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/usart1/ifc.rs", "rank": 40, "score": 51955.89346306214 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `BUSHOLD`\"]\n\npub struct BUSHOLD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BUSHOLD_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/i2c0/ifc.rs", "rank": 41, "score": 51955.802008132625 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SSM`\"]\n\npub struct SSM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SSM_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usart1/ifc.rs", "rank": 42, "score": 51955.802008132625 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SSM`\"]\n\npub struct SSM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SSM_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "src/usart0/ifc.rs", "rank": 43, "score": 51955.802008132625 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXFULL`\"]\n\npub struct RXFULL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFULL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/usart0/ifc.rs", "rank": 44, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `WARMUP`\"]\n\npub struct WARMUP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WARMUP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/vcmp/ifc.rs", "rank": 45, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/rtc/ifc.rs", "rank": 46, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `VREGOSL`\"]\n\npub struct VREGOSL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> VREGOSL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/usb/ifc.rs", "rank": 48, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `HFXORDY`\"]\n\npub struct HFXORDY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> HFXORDY_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/cmu/ifc.rs", "rank": 49, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `UF`\"]\n\npub struct UF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> UF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/timer0/ifc.rs", "rank": 50, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `UF`\"]\n\npub struct UF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> UF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/timer2/ifc.rs", "rank": 51, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SCAN`\"]\n\npub struct SCAN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SCAN_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/adc0/ifc.rs", "rank": 52, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXUF`\"]\n\npub struct RXUF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXUF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/i2c0/ifc.rs", "rank": 53, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXOF`\"]\n\npub struct RXOF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/leuart0/ifc.rs", "rank": 54, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RSTART`\"]\n\npub struct RSTART_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RSTART_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/i2c0/ifc.rs", "rank": 55, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `WARMUP`\"]\n\npub struct WARMUP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> WARMUP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/acmp0/ifc.rs", "rank": 56, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `UF`\"]\n\npub struct UF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> UF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/timer1/ifc.rs", "rank": 57, "score": 51955.46715525851 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXFULL`\"]\n\npub struct RXFULL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RXFULL_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n", "file_path": "src/usart1/ifc.rs", "rank": 58, "score": 51955.46715525851 }, { "content": "#[doc = \"Write proxy for field `TXUF`\"]\n\npub struct TXUF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXUF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n", "file_path": "src/usart0/ifc.rs", "rank": 59, "score": 51955.03735424423 }, { "content": "#[doc = \"Write proxy for field `CALRDY`\"]\n\npub struct CALRDY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CALRDY_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n", "file_path": "src/cmu/ifc.rs", "rank": 60, "score": 51955.03735424423 }, { "content": "#[doc = \"Write proxy for field `ICBOF0`\"]\n\npub struct ICBOF0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ICBOF0_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n", "file_path": "src/timer1/ifc.rs", "rank": 61, "score": 51955.03735424423 }, { "content": "#[doc = \"Write proxy for field `TXUF`\"]\n\npub struct TXUF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXUF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n", "file_path": "src/usart1/ifc.rs", "rank": 62, "score": 51955.03735424423 }, { "content": "#[doc = \"Write proxy for field `NACK`\"]\n\npub struct NACK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> NACK_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n", "file_path": "src/i2c0/ifc.rs", "rank": 64, "score": 51955.03735424423 }, { "content": "#[doc = \"Write proxy for field `ICBOF0`\"]\n\npub struct ICBOF0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ICBOF0_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n", "file_path": "src/timer0/ifc.rs", "rank": 65, "score": 51955.03735424423 }, { "content": "#[doc = \"Write proxy for field `FERR`\"]\n\npub struct FERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);\n\n self.w\n", "file_path": "src/leuart0/ifc.rs", "rank": 66, "score": 51955.03735424423 }, { "content": "#[doc = \"Write proxy for field `ICBOF0`\"]\n\npub struct ICBOF0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ICBOF0_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n", "file_path": "src/timer2/ifc.rs", "rank": 67, "score": 51955.03735424423 }, { "content": "impl<'a> CC1_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC2`\"]\n\npub struct CC2_W<'a> {\n", "file_path": "src/timer0/ifc.rs", "rank": 68, "score": 51954.92293668349 }, { "content": "impl<'a> AUXOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TCC`\"]\n\npub struct TCC_W<'a> {\n", "file_path": "src/pcnt0/ifc.rs", "rank": 69, "score": 51954.92293668349 }, { "content": "impl<'a> RXUF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXOF`\"]\n\npub struct TXOF_W<'a> {\n", "file_path": "src/usart1/ifc.rs", "rank": 70, "score": 51954.92293668349 }, { "content": "impl<'a> RXUF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXOF`\"]\n\npub struct TXOF_W<'a> {\n", "file_path": "src/usart0/ifc.rs", "rank": 71, "score": 51954.92293668349 }, { "content": "impl<'a> CLTO_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SSTOP`\"]\n\npub struct SSTOP_W<'a> {\n", "file_path": "src/i2c0/ifc.rs", "rank": 72, "score": 51954.92293668349 }, { "content": "impl<'a> TXOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `PERR`\"]\n\npub struct PERR_W<'a> {\n", "file_path": "src/leuart0/ifc.rs", "rank": 73, "score": 51954.92293668349 }, { "content": "impl<'a> CC1_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC2`\"]\n\npub struct CC2_W<'a> {\n", "file_path": "src/timer1/ifc.rs", "rank": 75, "score": 51954.92293668349 }, { "content": "impl<'a> CC1_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC2`\"]\n\npub struct CC2_W<'a> {\n", "file_path": "src/timer2/ifc.rs", "rank": 76, "score": 51954.92293668349 }, { "content": "impl<'a> TXC_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ACK`\"]\n\npub struct ACK_W<'a> {\n", "file_path": "src/i2c0/ifc.rs", "rank": 77, "score": 51954.92293668349 }, { "content": "impl<'a> LFXORDY_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `AUXHFRCORDY`\"]\n\npub struct AUXHFRCORDY_W<'a> {\n", "file_path": "src/cmu/ifc.rs", "rank": 78, "score": 51954.92293668349 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `AUXOF`\"]\n\npub struct AUXOF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/pcnt0/ifc.rs", "rank": 79, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC1`\"]\n\npub struct CC1_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/timer1/ifc.rs", "rank": 80, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `SCANOF`\"]\n\npub struct SCANOF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/adc0/ifc.rs", "rank": 81, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXOF`\"]\n\npub struct TXOF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/leuart0/ifc.rs", "rank": 82, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXUF`\"]\n\npub struct RXUF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usart0/ifc.rs", "rank": 83, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC1`\"]\n\npub struct CC1_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/timer2/ifc.rs", "rank": 84, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CLTO`\"]\n\npub struct CLTO_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/i2c0/ifc.rs", "rank": 85, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TXC`\"]\n\npub struct TXC_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/i2c0/ifc.rs", "rank": 86, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `LFXORDY`\"]\n\npub struct LFXORDY_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/cmu/ifc.rs", "rank": 87, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `RXUF`\"]\n\npub struct RXUF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/usart1/ifc.rs", "rank": 88, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CC1`\"]\n\npub struct CC1_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/timer0/ifc.rs", "rank": 89, "score": 51952.605584436926 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Write proxy for field `CMOF`\"]\n\npub struct CMOF_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "src/msc/ifc.rs", "rank": 91, "score": 51952.605584436926 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `CALOF`\"]\n\npub struct CALOF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CALOF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/cmu/ifc.rs", "rank": 92, "score": 51951.25995239304 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `ICBOF1`\"]\n\npub struct ICBOF1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ICBOF1_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/timer0/ifc.rs", "rank": 93, "score": 51951.25995239304 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `ICBOF1`\"]\n\npub struct ICBOF1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ICBOF1_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/timer1/ifc.rs", "rank": 94, "score": 51951.25995239304 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `MPAF`\"]\n\npub struct MPAF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MPAF_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/leuart0/ifc.rs", "rank": 96, "score": 51951.25995239304 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `PERR`\"]\n\npub struct PERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/usart1/ifc.rs", "rank": 97, "score": 51951.25995239304 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `MSTOP`\"]\n\npub struct MSTOP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MSTOP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/i2c0/ifc.rs", "rank": 98, "score": 51951.25995239304 }, { "content": " }\n\n}\n\n#[doc = \"Write proxy for field `PERR`\"]\n\npub struct PERR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PERR_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n", "file_path": "src/usart0/ifc.rs", "rank": 99, "score": 51951.25995239304 } ]
Rust
src/dispatcher/sink.rs
majacQ/CorTeX
7957f91131f52c9f13b77a890ec2cdcc213c7b8e
use std::collections::HashMap; use std::error::Error; use std::fs::File; use std::io; use std::io::ErrorKind; use std::io::Write; use std::ops::Deref; use std::path::Path; use std::sync::Arc; use std::sync::Mutex; use time; use crate::dispatcher::server; use crate::helpers; use crate::helpers::{TaskProgress, TaskReport, TaskStatus}; use crate::models::{Service, WorkerMetadata}; pub struct Sink { pub port: usize, pub queue_size: usize, pub message_size: usize, pub backend_address: String, } impl Sink { pub fn start( &self, services_arc: &Arc<Mutex<HashMap<String, Option<Service>>>>, progress_queue_arc: &Arc<Mutex<HashMap<i64, TaskProgress>>>, done_queue_arc: &Arc<Mutex<Vec<TaskReport>>>, job_limit: Option<usize>, ) -> Result<(), Box<dyn Error>> { let context = zmq::Context::new(); let sink = context.socket(zmq::PULL)?; let address = format!("tcp://*:{}", self.port); assert!(sink.bind(&address).is_ok()); let mut sink_job_count: usize = 0; loop { let mut recv_msg = zmq::Message::new(); let mut identity_msg = zmq::Message::new(); let mut taskid_msg = zmq::Message::new(); let mut service_msg = zmq::Message::new(); sink.recv(&mut identity_msg, 0)?; let identity = identity_msg.as_str().unwrap_or("_worker_"); sink.recv(&mut service_msg, 0)?; let service_name = service_msg.as_str().unwrap_or("_unknown_"); sink.recv(&mut taskid_msg, 0)?; let taskid_str = taskid_msg.as_str().unwrap_or("-1"); let taskid = taskid_str.parse::<i64>().unwrap_or(-1); sink_job_count += 1; let mut total_incoming = 0; let request_time = time::get_time(); println!( "sink {:?}: incoming result for {:?}, worker {:?}, taskid: {}", sink_job_count, service_name, identity, taskid ); if let Some(task_progress) = server::pop_progress_task(progress_queue_arc, taskid) { let task = task_progress.task; match server::get_service(service_name, services_arc) { None => { return Err(Box::new(io::Error::new( ErrorKind::Other, "TODO: Server::get_service found nothing.", ))); }, Some(service) => { if service.id == task.service_id { if service.id == 1 { sink.recv(&mut recv_msg, 0)?; let done_report = TaskReport { task: task.clone(), status: TaskStatus::NoProblem, messages: Vec::new(), }; server::push_done_queue(done_queue_arc, done_report); } else { match Path::new(&task.entry.clone()).parent() { None => { println!("-- Error TODO: Path::new(&task.entry).parent() failed."); }, Some(recv_dir) => { match recv_dir.to_str() { None => { println!("-- Error TODO: recv_dir.to_str() failed"); }, Some(recv_dir_str) => { let recv_dir_string = recv_dir_str.to_string(); let recv_pathname = recv_dir_string + "/" + &service.name + ".zip"; let recv_path = Path::new(&recv_pathname); { let mut file = match File::create(recv_path) { Ok(f) => f, Err(e) => { println!("-- Error TODO: File::create(recv_path): {:?}", e); continue; }, }; while sink.recv(&mut recv_msg, 0).is_ok() { match file.write(recv_msg.deref()) { Ok(written_bytes) => total_incoming += written_bytes, Err(e) => { println!( "-- Error TODO: file.write(recv_msg.deref()) failed: {:?}", e ); break; }, }; match sink.get_rcvmore() { Ok(true) => {}, _ => break, /* println!("Error TODO: sink.get_rcvmore failed: * {:?}", e); */ }; } drop(file); } let done_report = helpers::generate_report(task, recv_path); server::push_done_queue(done_queue_arc, done_report); }, } }, } } WorkerMetadata::record_received( identity.to_string(), service.id, taskid, self.backend_address.clone(), )?; } else { println!( "-- Mismatch between requested service id {:?} and task's service id {:?} for task {:?}, discarding response", service.id, task.service_id, taskid ); while sink.recv(&mut recv_msg, 0).is_ok() { if !sink.get_rcvmore()? { break; } } } }, }; } else { println!("-- No such task id found in dispatcher queue: {:?}", taskid); while sink.recv(&mut recv_msg, 0).is_ok() { if !sink.get_rcvmore()? { break; } } } let responded_time = time::get_time(); let request_duration = (responded_time - request_time).num_milliseconds(); println!( "sink {}: message size: {}, took {}ms.", sink_job_count, total_incoming, request_duration ); if let Some(limit_number) = job_limit { if sink_job_count >= limit_number { println!( "sink {}: job limit reached, terminating Sink thread...", limit_number ); break; } } } Ok(()) } }
use std::collections::HashMap; use std::error::Error; use std::fs::File; use std::io; use std::io::ErrorKind; use std::io::Write; use std::ops::Deref; use std::path::Path; use std::sync::Arc; use std::sync::Mutex; use time; use crate::dispatcher::server; use crate::helpers; use crate::helpers::{TaskProgress, TaskReport, TaskStatus}; use crate::models::{Service, WorkerMetadata}; pub struct Sink { pub port: usize, pub queue_size: usize, pub message_size: usize, pub backend_address: String, } impl Sink {
}
pub fn start( &self, services_arc: &Arc<Mutex<HashMap<String, Option<Service>>>>, progress_queue_arc: &Arc<Mutex<HashMap<i64, TaskProgress>>>, done_queue_arc: &Arc<Mutex<Vec<TaskReport>>>, job_limit: Option<usize>, ) -> Result<(), Box<dyn Error>> { let context = zmq::Context::new(); let sink = context.socket(zmq::PULL)?; let address = format!("tcp://*:{}", self.port); assert!(sink.bind(&address).is_ok()); let mut sink_job_count: usize = 0; loop { let mut recv_msg = zmq::Message::new(); let mut identity_msg = zmq::Message::new(); let mut taskid_msg = zmq::Message::new(); let mut service_msg = zmq::Message::new(); sink.recv(&mut identity_msg, 0)?; let identity = identity_msg.as_str().unwrap_or("_worker_"); sink.recv(&mut service_msg, 0)?; let service_name = service_msg.as_str().unwrap_or("_unknown_"); sink.recv(&mut taskid_msg, 0)?; let taskid_str = taskid_msg.as_str().unwrap_or("-1"); let taskid = taskid_str.parse::<i64>().unwrap_or(-1); sink_job_count += 1; let mut total_incoming = 0; let request_time = time::get_time(); println!( "sink {:?}: incoming result for {:?}, worker {:?}, taskid: {}", sink_job_count, service_name, identity, taskid ); if let Some(task_progress) = server::pop_progress_task(progress_queue_arc, taskid) { let task = task_progress.task; match server::get_service(service_name, services_arc) { None => { return Err(Box::new(io::Error::new( ErrorKind::Other, "TODO: Server::get_service found nothing.", ))); }, Some(service) => { if service.id == task.service_id { if service.id == 1 { sink.recv(&mut recv_msg, 0)?; let done_report = TaskReport { task: task.clone(), status: TaskStatus::NoProblem, messages: Vec::new(), }; server::push_done_queue(done_queue_arc, done_report); } else { match Path::new(&task.entry.clone()).parent() { None => { println!("-- Error TODO: Path::new(&task.entry).parent() failed."); }, Some(recv_dir) => { match recv_dir.to_str() { None => { println!("-- Error TODO: recv_dir.to_str() failed"); }, Some(recv_dir_str) => { let recv_dir_string = recv_dir_str.to_string(); let recv_pathname = recv_dir_string + "/" + &service.name + ".zip"; let recv_path = Path::new(&recv_pathname); { let mut file = match File::create(recv_path) { Ok(f) => f, Err(e) => { println!("-- Error TODO: File::create(recv_path): {:?}", e); continue; }, }; while sink.recv(&mut recv_msg, 0).is_ok() { match file.write(recv_msg.deref()) { Ok(written_bytes) => total_incoming += written_bytes, Err(e) => { println!( "-- Error TODO: file.write(recv_msg.deref()) failed: {:?}", e ); break; }, }; match sink.get_rcvmore() { Ok(true) => {}, _ => break, /* println!("Error TODO: sink.get_rcvmore failed: * {:?}", e); */ }; } drop(file); } let done_report = helpers::generate_report(task, recv_path); server::push_done_queue(done_queue_arc, done_report); }, } }, } } WorkerMetadata::record_received( identity.to_string(), service.id, taskid, self.backend_address.clone(), )?; } else { println!( "-- Mismatch between requested service id {:?} and task's service id {:?} for task {:?}, discarding response", service.id, task.service_id, taskid ); while sink.recv(&mut recv_msg, 0).is_ok() { if !sink.get_rcvmore()? { break; } } } }, }; } else { println!("-- No such task id found in dispatcher queue: {:?}", taskid); while sink.recv(&mut recv_msg, 0).is_ok() { if !sink.get_rcvmore()? { break; } } } let responded_time = time::get_time(); let request_duration = (responded_time - request_time).num_milliseconds(); println!( "sink {}: message size: {}, took {}ms.", sink_job_count, total_incoming, request_duration ); if let Some(limit_number) = job_limit { if sink_job_count >= limit_number { println!( "sink {}: job limit reached, terminating Sink thread...", limit_number ); break; } } } Ok(()) }
function_block-full_function
[ { "content": "/// Utility functions, until they find a better place\n\npub fn utf_truncate(input: &mut String, maxsize: usize) {\n\n let mut utf_maxsize = input.len();\n\n if utf_maxsize >= maxsize {\n\n {\n\n let mut char_iter = input.char_indices();\n\n while utf_maxsize >= maxsize {\n\n utf_maxsize = match char_iter.next_back() {\n\n Some((index, _)) => index,\n\n _ => 0,\n\n };\n\n }\n\n } // Extra {} wrap to limit the immutable borrow of char_indices()\n\n input.truncate(utf_maxsize);\n\n }\n\n // eliminate null characters if any\n\n *input = input.replace(\"\\x00\", \"\");\n\n}\n\n\n", "file_path": "src/helpers.rs", "rank": 0, "score": 123546.72156384544 }, { "content": "/// Provide a `NamedFile` for an entry\n\npub fn serve_entry(service_name: String, entry_id: usize) -> Result<NamedFile, NotFound<String>> {\n\n let backend = Backend::default();\n\n match Task::find(entry_id as i64, &backend.connection) {\n\n Ok(task) => {\n\n let entry = task.entry;\n\n let zip_path = match service_name.as_str() {\n\n \"import\" => entry,\n\n _ => STRIP_NAME_REGEX.replace(&entry, \"\").to_string() + \"/\" + &service_name + \".zip\",\n\n };\n\n if zip_path.is_empty() {\n\n Err(NotFound(format!(\n\n \"Service {:?} does not have a result\n\n for entry {:?}\",\n\n service_name, entry_id\n\n )))\n\n } else {\n\n NamedFile::open(&zip_path).map_err(|_| NotFound(\"Invalid Zip at path\".to_string()))\n\n }\n\n },\n\n Err(e) => Err(NotFound(format!(\"Task not found: {}\", e))),\n\n }\n\n}\n\n\n", "file_path": "src/frontend/concerns.rs", "rank": 1, "score": 120443.58073023043 }, { "content": "/// TODO: Is this outdated?\n\n/// Maps a regular string into a URI-encoded one\n\npub fn uri_escape(param: Option<String>) -> Option<String> {\n\n match param {\n\n None => None,\n\n Some(param_pure) => {\n\n let mut param_encoded: String =\n\n percent_encoding::utf8_percent_encode(&param_pure, percent_encoding::NON_ALPHANUMERIC)\n\n .collect::<String>();\n\n // TODO: This could/should be done faster by using lazy_static!\n\n for &(original, replacement) in &[\n\n (\":\", \"%3A\"),\n\n (\"/\", \"%2F\"),\n\n (\"\\\\\", \"%5C\"),\n\n (\"$\", \"%24\"),\n\n (\".\", \"%2E\"),\n\n (\"!\", \"%21\"),\n\n (\"@\", \"%40\"),\n\n ] {\n\n param_encoded = param_encoded.replace(original, replacement);\n\n }\n\n // if param_pure != param_encoded {\n\n // println!(\"Encoded {:?} to {:?}\", param_pure, param_encoded);\n\n // } else {\n\n // println!(\"No encoding needed: {:?}\", param_pure);\n\n // }\n\n Some(param_encoded)\n\n },\n\n }\n\n}\n", "file_path": "src/frontend/helpers.rs", "rank": 2, "score": 111876.48932138484 }, { "content": "/// TODO: Is this outdated?\n\n/// Maps a URI-encoded string into its regular plain text form\n\npub fn uri_unescape(param: Option<&str>) -> Option<String> {\n\n match param {\n\n None => None,\n\n Some(param_encoded) => {\n\n let mut param_decoded: String = param_encoded.to_owned();\n\n // TODO: This could/should be done faster by using lazy_static!\n\n for &(original, replacement) in &[\n\n (\"%3A\", \":\"),\n\n (\"%2F\", \"/\"),\n\n (\"%24\", \"$\"),\n\n (\"%2E\", \".\"),\n\n (\"%21\", \"!\"),\n\n (\"%40\", \"@\"),\n\n ] {\n\n param_decoded = param_decoded.replace(original, replacement);\n\n }\n\n Some(\n\n percent_encoding::percent_decode(param_decoded.as_bytes())\n\n .decode_utf8_lossy()\n\n .into_owned(),\n\n )\n\n },\n\n }\n\n}\n", "file_path": "src/frontend/helpers.rs", "rank": 3, "score": 93321.61796514448 }, { "content": "fn since_string(then: SystemTime, is_fresh: &mut bool) -> String {\n\n let now = SystemTime::now();\n\n let since_duration = now.duration_since(then).unwrap();\n\n let secs = since_duration.as_secs();\n\n if secs < 60 {\n\n *is_fresh = true;\n\n format!(\"{} seconds ago\", secs)\n\n } else if secs < 3_600 {\n\n format!(\"{} minutes ago\", secs / 60)\n\n } else if secs < 86_400 {\n\n format!(\"{} hours ago\", secs / 3_600)\n\n } else {\n\n format!(\"{} days ago\", secs / 86_400)\n\n }\n\n}\n\n\n\nimpl WorkerMetadata {\n\n /// Update the metadata for a worker which was just dispatched to\n\n pub fn record_dispatched(\n\n name: String,\n", "file_path": "src/models/worker_metadata.rs", "rank": 4, "score": 83684.5515166762 }, { "content": "/// Generates a `TaskReport`, given the path to a result archive from a `CorTeX` processing job\n\n/// Expects a \"cortex.log\" file in the archive, following the `LaTeXML` messaging conventions\n\npub fn generate_report(task: Task, result: &Path) -> TaskReport {\n\n // println!(\"Preparing report for {:?}, result at {:?}\",self.entry, result);\n\n let mut messages = Vec::new();\n\n let mut status = TaskStatus::Fatal; // Fatal by default\n\n {\n\n // -- Archive::Reader, trying to localize (to .drop asap)\n\n // Let's open the archive file and find the cortex.log file:\n\n let log_name = \"cortex.log\";\n\n match Reader::new()\n\n .unwrap_or_else(|_| panic!(\"Could not create libarchive Reader struct\"))\n\n .support_filter_all()\n\n .support_format_all()\n\n .open_filename(result.to_str().unwrap_or_default(), BUFFER_SIZE)\n\n {\n\n Err(e) => {\n\n println!(\"Error TODO: Couldn't open archive_reader: {:?}\", e);\n\n },\n\n Ok(archive_reader) => {\n\n while let Ok(entry) = archive_reader.next_header() {\n\n if entry.pathname() != log_name {\n", "file_path": "src/helpers.rs", "rank": 5, "score": 65991.47811906732 }, { "content": "/// Adds a task report to a shared report queue\n\npub fn push_done_queue(reports_arc: &Arc<Mutex<Vec<TaskReport>>>, report: TaskReport) {\n\n let mut reports = reports_arc\n\n .lock()\n\n .unwrap_or_else(|_| panic!(\"Failed to obtain Mutex lock in push_done_queue\"));\n\n if reports.len() > 10_000 {\n\n panic!(\n\n \"Done queue is too large: {:?} tasks. Stop the sink!\",\n\n reports.len()\n\n );\n\n }\n\n reports.push(report)\n\n}\n\n\n", "file_path": "src/dispatcher/server.rs", "rank": 6, "score": 65810.5891371707 }, { "content": "/// A minimalistic ORM trait for `CorTeX` data items\n\npub trait CortexInsertable {\n\n /// Creates a new item given a connection\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error>;\n\n}\n\n\n", "file_path": "src/concerns/orm.rs", "rank": 7, "score": 58494.08104198027 }, { "content": "/// Prepare a configurable report for a <corpus,server> pair\n\npub fn serve_report(\n\n corpus_name: String,\n\n service_name: String,\n\n severity: Option<String>,\n\n category: Option<String>,\n\n what: Option<String>,\n\n params: Option<Form<ReportParams>>,\n\n) -> Result<Template, NotFound<String>> {\n\n let report_start = time::get_time();\n\n let mut context = TemplateContext::default();\n\n let mut global = HashMap::new();\n\n let backend = Backend::default();\n\n\n\n let corpus_name = corpus_name.to_lowercase();\n\n let service_name = service_name.to_lowercase();\n\n let corpus_result = Corpus::find_by_name(&corpus_name, &backend.connection);\n\n if let Ok(corpus) = corpus_result {\n\n let service_result = Service::find_by_name(&service_name, &backend.connection);\n\n if let Ok(service) = service_result {\n\n // Metadata in all reports\n", "file_path": "src/frontend/concerns.rs", "rank": 8, "score": 58494.08104198027 }, { "content": "/// Rerun a filtered subset of tasks for a <corpus,service> pair\n\npub fn serve_rerun(\n\n corpus_name: String,\n\n service_name: String,\n\n severity: Option<String>,\n\n category: Option<String>,\n\n what: Option<String>,\n\n rr: Json<RerunRequestParams>,\n\n) -> Result<Accepted<String>, NotFound<String>> {\n\n let token = rr.token.clone();\n\n let description = rr.description.clone();\n\n let config = load_config();\n\n let corpus_name = corpus_name.to_lowercase();\n\n let service_name = service_name.to_lowercase();\n\n\n\n // Ensure we're given a valid rerun token to rerun, or anyone can wipe the cortex results\n\n // let token = safe_data_to_string(data).unwrap_or_else(|_| UNKNOWN.to_string()); // reuse old\n\n // code by setting data to the String\n\n let user_opt = config.rerun_tokens.get(&token);\n\n let user = match user_opt {\n\n None => return Err(NotFound(\"Access Denied\".to_string())), /* TODO: response.\n", "file_path": "src/frontend/concerns.rs", "rank": 9, "score": 58494.08104198027 }, { "content": "/// A minimalistic ORM trait for `CorTeX` data items\n\npub trait CortexDeletable {\n\n /// Creates a new item given a connection\n\n fn delete_by(&self, connection: &PgConnection, field: &str) -> Result<usize, Error>;\n\n}\n", "file_path": "src/concerns/orm.rs", "rank": 10, "score": 58494.08104198027 }, { "content": "/// Log actor trait, assumes already Identifiable (for id())\n\npub trait LogRecord {\n\n /// Owner Task's id accessor\n\n fn task_id(&self) -> i64;\n\n /// Category accessor\n\n fn category(&self) -> &str;\n\n /// What accessor\n\n fn what(&self) -> &str;\n\n /// Details accessor\n\n fn details(&self) -> &str;\n\n /// Details setter\n\n fn set_details(&mut self, new_details: String);\n\n /// Severity accessor\n\n fn severity(&self) -> &str;\n\n /// Implements the fmt::Debug fmt\n\n fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(\n\n f,\n\n \"{}(category: {},\\n\\twhat: {},\\n\\tdetails: {})\",\n\n self.severity(),\n\n self.category(),\n", "file_path": "src/models/messages.rs", "rank": 11, "score": 58494.08104198027 }, { "content": "/// Constructs the default Backend struct for testing\n\npub fn testdb() -> Backend {\n\n dotenv().ok();\n\n Backend {\n\n connection: connection_at(TEST_DB_ADDRESS),\n\n }\n\n}\n", "file_path": "src/backend.rs", "rank": 12, "score": 57862.402248452665 }, { "content": "/// Serves an entry as a `Template` instance to be preview via a client-side asset renderer\n\npub fn serve_entry_preview(\n\n corpus_name: String,\n\n service_name: String,\n\n entry_name: String,\n\n) -> Result<Template, NotFound<String>> {\n\n let report_start = time::get_time();\n\n let corpus_name = corpus_name.to_lowercase();\n\n let mut context = TemplateContext::default();\n\n let mut global = HashMap::new();\n\n let backend = Backend::default();\n\n\n\n let corpus_result = Corpus::find_by_name(&corpus_name, &backend.connection);\n\n if let Ok(corpus) = corpus_result {\n\n let service_result = Service::find_by_name(&service_name, &backend.connection);\n\n if let Ok(service) = service_result {\n\n // Assemble the Download URL from where we will gather the page contents\n\n // First, we need the taskid\n\n let task = match Task::find_by_name(&entry_name, &corpus, &service, &backend.connection) {\n\n Ok(t) => t,\n\n Err(e) => return Err(NotFound(e.to_string())),\n", "file_path": "src/frontend/concerns.rs", "rank": 13, "score": 57308.85198871899 }, { "content": "/// Persists a shared vector of reports to the Task store\n\npub fn mark_done_arc(\n\n backend: &Backend,\n\n reports_arc: &Arc<Mutex<Vec<TaskReport>>>,\n\n) -> Result<bool, String>\n\n{\n\n // Important: hold the mutex lock for the entirety of the mark_done process,\n\n // so that it gets poisoned if the DB runs away and the thread panics\n\n // we want the entire dispatcher to panic if this thread panics.\n\n let mut mutex_guard = reports_arc\n\n .lock()\n\n .unwrap_or_else(|_| panic!(\"Failed to obtain Mutex lock in drain_shared_vec\"));\n\n\n\n let reports : Vec<TaskReport> = (*mutex_guard).drain(..).collect();\n\n if !reports.is_empty() {\n\n let request_time = time::get_time();\n\n let mut success = false;\n\n if let Err(e) = backend.mark_done(&reports) {\n\n println!(\"-- mark_done attempt failed: {:?}\", e);\n\n // DB persist failed, retry\n\n let mut retries = 0;\n", "file_path": "src/dispatcher/server.rs", "rank": 14, "score": 57308.85198871899 }, { "content": "/// A standalone worker loop for invalidating stale cache entries, mostly for CorTeX's frontend\n\n/// report pages\n\npub fn cache_worker() {\n\n let redis_client = match redis::Client::open(\"redis://127.0.0.1/\") {\n\n Ok(client) => client,\n\n _ => panic!(\"Redis connection failed, please boot up redis and restart the frontend!\"),\n\n };\n\n let mut redis_connection = match redis_client.get_connection() {\n\n Ok(conn) => conn,\n\n _ => panic!(\"Redis connection failed, please boot up redis and restart the frontend!\"),\n\n };\n\n let mut queued_cache: HashMap<String, usize> = HashMap::new();\n\n loop {\n\n // Keep a fresh backend connection on each invalidation pass.\n\n let backend = Backend::default();\n\n let mut global_stub: HashMap<String, String> = HashMap::new();\n\n // each corpus+service (non-import)\n\n for corpus in &backend.corpora() {\n\n if let Ok(services) = corpus.select_services(&backend.connection) {\n\n for service in &services {\n\n if service.name == \"import\" {\n\n continue;\n", "file_path": "src/frontend/cached/worker.rs", "rank": 15, "score": 57308.85198871899 }, { "content": "/// Task reruns by a variety of selector granularity\n\npub trait MarkRerun {\n\n /// Most-specific rerun query, via both category and what filter\n\n fn mark_rerun_by_what(\n\n mark: i32,\n\n corpus_id: i32,\n\n service_id: i32,\n\n rerun_category: &str,\n\n rerun_what: &str,\n\n connection: &PgConnection,\n\n ) -> Result<usize, Error>;\n\n /// Mid-specificity `category`-filtered reruns\n\n fn mark_rerun_by_category(\n\n mark: i32,\n\n corpus_id: i32,\n\n service_id: i32,\n\n rerun_category: &str,\n\n connection: &PgConnection,\n\n ) -> Result<usize, Error>;\n\n}\n\n\n", "file_path": "src/concerns/mark_rerun.rs", "rank": 16, "score": 57308.85198871899 }, { "content": "/// Generate a random integer useful for temporary DB marks\n\npub fn random_mark() -> i32 {\n\n let mut rng = thread_rng();\n\n let mark_rng: u16 = rng.gen();\n\n i32::from(mark_rng)\n\n}\n\n\n", "file_path": "src/helpers.rs", "rank": 17, "score": 56583.17089770401 }, { "content": "/// Cached proxy over `Backend::task_report`\n\npub fn task_report(\n\n global: &mut HashMap<String, String>,\n\n corpus: &Corpus,\n\n service: &Service,\n\n severity: Option<String>,\n\n category: Option<String>,\n\n what: Option<String>,\n\n params: &Option<Form<ReportParams>>,\n\n) -> Vec<HashMap<String, String>>\n\n{\n\n let all_messages = match params {\n\n None => false,\n\n Some(ref params) => *params.all.as_ref().unwrap_or(&false),\n\n };\n\n let offset = match params {\n\n None => 0,\n\n Some(ref params) => *params.offset.as_ref().unwrap_or(&0),\n\n };\n\n let page_size = match params {\n\n None => 100,\n", "file_path": "src/frontend/cached/task_report.rs", "rank": 18, "score": 56207.389404333 }, { "content": "/// Loads the global `FrontendConfig` from config.json\n\npub fn load_config() -> FrontendConfig {\n\n let mut config_file = match File::open(\"config.json\") {\n\n Ok(cfg) => cfg,\n\n Err(e) => panic!(\n\n \"You need a well-formed JSON config.json file to run the frontend. Error: {}\",\n\n e\n\n ),\n\n };\n\n let mut config_buffer = String::new();\n\n match config_file.read_to_string(&mut config_buffer) {\n\n Ok(_) => {},\n\n Err(e) => panic!(\n\n \"You need a well-formed JSON config.json file to run the frontend. Error: {}\",\n\n e\n\n ),\n\n };\n\n\n\n match serde_json::from_str(&config_buffer) {\n\n Ok(decoded) => decoded,\n\n Err(e) => panic!(\n\n \"You need a well-formed JSON config.json file to run the frontend. Error: {}\",\n\n e\n\n ),\n\n }\n\n}\n", "file_path": "src/frontend/helpers.rs", "rank": 19, "score": 54292.40937441429 }, { "content": "pub fn register_event(e: Event) {}\n", "file_path": "src/dispatcher/metadata.rs", "rank": 20, "score": 53740.32514902725 }, { "content": "/// Constructs a Backend at a given address\n\npub fn from_address(address: &str) -> Backend {\n\n Backend {\n\n connection: connection_at(address),\n\n }\n\n}\n\n\n\n/// Options container for relevant fields in requesting a `(corpus, service)` rerun\n\npub struct RerunOptions<'a> {\n\n /// corpus to rerun\n\n pub corpus: &'a Corpus,\n\n /// service to rerun\n\n pub service: &'a Service,\n\n /// optionally, severity level filter\n\n pub severity_opt: Option<String>,\n\n /// optionally, category level filter\n\n pub category_opt: Option<String>,\n\n /// optionally, what level filter\n\n pub what_opt: Option<String>,\n\n /// optionally, owner of the rerun (default is \"admin\")\n\n pub owner_opt: Option<String>,\n", "file_path": "src/backend.rs", "rank": 21, "score": 52298.09611662633 }, { "content": "/// Constructs a new Task store representation from a Postgres DB address\n\npub fn connection_at(address: &str) -> PgConnection {\n\n PgConnection::establish(address).unwrap_or_else(|_| panic!(\"Error connecting to {}\", address))\n\n}\n", "file_path": "src/backend.rs", "rank": 22, "score": 51196.63353224035 }, { "content": "/// Maps a cortex message severity into a bootstrap class for color highlight\n\npub fn severity_highlight(severity: &str) -> &str {\n\n match severity {\n\n // Bootstrap highlight classes\n\n \"no_problem\" => \"success\",\n\n \"warning\" => \"warning\",\n\n \"error\" => \"error\",\n\n \"fatal\" => \"danger\",\n\n \"invalid\" => \"info\",\n\n _ => \"info\",\n\n }\n\n}\n", "file_path": "src/frontend/helpers.rs", "rank": 23, "score": 50170.36018050831 }, { "content": "/// Helper for generating a random i32 in a range, to avoid loading the rng crate + boilerplate\n\npub fn rand_in_range(from: u16, to: u16) -> u16 {\n\n let mut rng = thread_rng();\n\n let mark_rng: u16 = rng.gen_range(from..=to);\n\n mark_rng\n\n}\n", "file_path": "src/helpers.rs", "rank": 24, "score": 48802.45551622486 }, { "content": "/// Auto-generates a URI-encoded \"foo_uri\" entry for each \"foo\" label associated with a clickable\n\n/// link (for Tera templates)\n\npub fn decorate_uri_encodings(context: &mut TemplateContext) {\n\n for inner_vec in &mut [\n\n &mut context.corpora,\n\n &mut context.services,\n\n &mut context.entries,\n\n &mut context.categories,\n\n &mut context.whats,\n\n ] {\n\n if let Some(ref mut inner_vec_data) = **inner_vec {\n\n for subhash in inner_vec_data {\n\n let mut uri_decorations = vec![];\n\n for (subkey, subval) in subhash.iter() {\n\n uri_decorations.push((\n\n subkey.to_string() + \"_uri\",\n\n uri_escape(Some(subval.to_string())).unwrap(),\n\n ));\n\n }\n\n for (decoration_key, decoration_val) in uri_decorations {\n\n subhash.insert(decoration_key, decoration_val);\n\n }\n", "file_path": "src/frontend/helpers.rs", "rank": 25, "score": 48314.55317019502 }, { "content": "/// Getter for a `Service` stored inside an `Arc<Mutex<HashMap>`, with no DB access\n\npub fn get_service<S: ::std::hash::BuildHasher>(\n\n service_name: &str,\n\n services: &Arc<Mutex<HashMap<String, Option<Service>, S>>>,\n\n) -> Option<Service>\n\n{\n\n let services = services\n\n .lock()\n\n .unwrap_or_else(|_| panic!(\"Failed to obtain Mutex lock in get_service\"));\n\n match services.get(service_name) {\n\n None => None, // TODO: Handle errors\n\n Some(service) => service.clone(),\n\n }\n\n}\n", "file_path": "src/dispatcher/server.rs", "rank": 26, "score": 47045.56631968075 }, { "content": "/// Pushes a new task on the progress queue\n\npub fn push_progress_task<S: ::std::hash::BuildHasher>(\n\n progress_queue_arc: &Arc<Mutex<HashMap<i64, TaskProgress, S>>>,\n\n progress_task: TaskProgress,\n\n)\n\n{\n\n let mut progress_queue = progress_queue_arc\n\n .lock()\n\n .unwrap_or_else(|_| panic!(\"Failed to obtain Mutex lock in push_progress_task\"));\n\n // NOTE: This constant should be adjusted if you expect a fringe of more than 10,000 jobs\n\n // I am using this as a workaround for the inability to catch thread panic!() calls.\n\n if progress_queue.len() > 10_000 {\n\n panic!(\n\n \"Progress queue is too large: {:?} tasks. Stop the ventilator!\",\n\n progress_queue.len()\n\n );\n\n }\n\n progress_queue.insert(progress_task.task.id, progress_task);\n\n}\n\n\n", "file_path": "src/dispatcher/server.rs", "rank": 27, "score": 46203.85003807167 }, { "content": "/// Check for, remove and return any expired tasks from the progress queue\n\npub fn timeout_progress_tasks<S: ::std::hash::BuildHasher>(\n\n progress_queue_arc: &Arc<Mutex<HashMap<i64, TaskProgress, S>>>,\n\n) -> Vec<TaskProgress> {\n\n let mut progress_queue = progress_queue_arc\n\n .lock()\n\n .unwrap_or_else(|_| panic!(\"Failed to obtain Mutex lock in timeout_progress_tasks\"));\n\n let now = time::get_time().sec;\n\n let expired_keys = progress_queue\n\n .iter()\n\n .filter(|&(_, v)| v.expected_at() < now)\n\n .map(|(k, _)| *k)\n\n .collect::<Vec<_>>();\n\n let mut expired_tasks = Vec::new();\n\n for key in expired_keys {\n\n match progress_queue.remove(&key) {\n\n None => {},\n\n Some(task_progress) => expired_tasks.push(task_progress),\n\n }\n\n }\n\n expired_tasks\n\n}\n\n\n", "file_path": "src/dispatcher/server.rs", "rank": 28, "score": 46203.85003807167 }, { "content": "/// Pops the next task from the progress queue\n\npub fn pop_progress_task<S: ::std::hash::BuildHasher>(\n\n progress_queue_arc: &Arc<Mutex<HashMap<i64, TaskProgress, S>>>,\n\n taskid: i64,\n\n) -> Option<TaskProgress>\n\n{\n\n if taskid < 0 {\n\n // Mock ids are to be skipped\n\n return None;\n\n }\n\n let mut progress_queue = progress_queue_arc\n\n .lock()\n\n .unwrap_or_else(|_| panic!(\"Failed to obtain Mutex lock in pop_progress_task\"));\n\n progress_queue.remove(&taskid)\n\n}\n\n\n", "file_path": "src/dispatcher/server.rs", "rank": 29, "score": 46203.85003807167 }, { "content": "pub fn list_corpora(connection: &PgConnection) -> Vec<Corpus> {\n\n corpora::table\n\n .order(corpora::name.asc())\n\n .load(connection)\n\n .unwrap_or_default()\n\n}", "file_path": "src/backend/corpora_aggregate.rs", "rank": 30, "score": 46203.85003807167 }, { "content": "/// Memoized getter for a `Service` record from the backend\n\npub fn get_sync_service<S: ::std::hash::BuildHasher>(\n\n service_name: &str,\n\n services: &Arc<Mutex<HashMap<String, Option<Service>, S>>>,\n\n backend: &Backend,\n\n) -> Option<Service>\n\n{\n\n let mut services = services\n\n .lock()\n\n .unwrap_or_else(|_| panic!(\"Failed to obtain Mutex lock in get_sync_services\"));\n\n services\n\n .entry(service_name.to_string())\n\n .or_insert_with(\n\n || match Service::find_by_name(service_name, &backend.connection) {\n\n Ok(s) => Some(s),\n\n _ => None,\n\n },\n\n )\n\n .clone()\n\n}\n\n\n", "file_path": "src/dispatcher/server.rs", "rank": 31, "score": 46203.85003807167 }, { "content": "/// Returns an open file handle to the task's entry\n\npub fn prepare_input_stream(task: &Task) -> Result<File, io::Error> {\n\n let entry_path = Path::new(&task.entry);\n\n File::open(entry_path)\n\n}\n\n\n", "file_path": "src/helpers.rs", "rank": 32, "score": 44074.132545183966 }, { "content": "/// Parses a log string which follows the `LaTeXML` convention\n\n/// (described at [the Manual](http://dlmf.nist.gov/LaTeXML/manual/errorcodes/index.html))\n\npub fn parse_log(task_id: i64, log: &str) -> Vec<NewTaskMessage> {\n\n let mut messages: Vec<NewTaskMessage> = Vec::new();\n\n let mut in_details_mode = false;\n\n\n\n for line in log.lines() {\n\n // Skip empty lines\n\n if line.is_empty() {\n\n continue;\n\n }\n\n // If we have found a message header and we're collecting details:\n\n if in_details_mode {\n\n // If the line starts with tab, we are indeed reading in details\n\n if line.starts_with('\\t') {\n\n // Append details line to the last message\n\n let mut last_message = messages.pop().unwrap_or_else(|| {\n\n panic!(\"parse_log tried to parse details without having a log message, invalid log file?\")\n\n });\n\n let mut truncated_details = last_message.details().to_string() + \"\\n\" + line;\n\n utf_truncate(&mut truncated_details, 2000);\n\n last_message.set_details(truncated_details);\n", "file_path": "src/helpers.rs", "rank": 33, "score": 42541.43892922827 }, { "content": "/// Transfer the data contained within `Reader` to a `Writer`, assuming it was a single file\n\npub fn single_file_transfer(tex_target: &str, reader: &Reader, writer: &mut Writer) {\n\n // In a \"raw\" read, we don't know the data size in advance. So we bite the\n\n // bullet and read the usually tiny tex file in memory,\n\n // obtaining a size estimate\n\n let mut raw_data = Vec::new();\n\n while let Ok(chunk) = reader.read_data(BUFFER_SIZE) {\n\n raw_data.extend(chunk.into_iter());\n\n }\n\n let mut ok_header = false;\n\n match writer.write_header_new(tex_target, raw_data.len() as i64) {\n\n Ok(_) => {\n\n ok_header = true;\n\n },\n\n Err(e) => {\n\n println!(\"Couldn't write header: {:?}\", e);\n\n },\n\n }\n\n if ok_header {\n\n match writer.write_data(raw_data) {\n\n Ok(_) => {},\n\n Err(e) => println!(\"Failed to write data to {:?} because {:?}\", tex_target, e),\n\n };\n\n }\n\n}\n", "file_path": "src/importer.rs", "rank": 34, "score": 41635.66206249462 }, { "content": "fn aux_stats_compute_percentages(stats_hash: &mut HashMap<String, f64>, total_given: Option<f64>) {\n\n // Compute percentages, now that we have a total\n\n let total: f64 = 1.0_f64.max(match total_given {\n\n None => {\n\n let total_entry = stats_hash.get_mut(\"total\").unwrap();\n\n *total_entry\n\n },\n\n Some(total_num) => total_num,\n\n });\n\n let stats_keys = stats_hash\n\n .iter()\n\n .map(|(k, _)| k.clone())\n\n .collect::<Vec<_>>();\n\n for stats_key in stats_keys {\n\n {\n\n let key_percent_value: f64 =\n\n 100.0 * (*stats_hash.get_mut(&stats_key).unwrap() as f64 / total as f64);\n\n let key_percent_rounded: f64 = (key_percent_value * 100.0).round() as f64 / 100.0;\n\n let key_percent_name = stats_key + \"_percent\";\n\n stats_hash.insert(key_percent_name, key_percent_rounded);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/backend/reports.rs", "rank": 35, "score": 40347.86793845831 }, { "content": "use crate::importer::Importer;\n\nuse crate::models::{Corpus, NewCorpus, Task};\n\nuse pericortex::worker::Worker;\n\n\n\n/// `Worker` for initializing/importing a new corpus into `CorTeX`\n\n#[derive(Debug, Clone)]\n\npub struct InitWorker {\n\n /// name of the service (\"init\")\n\n pub service: String,\n\n /// version, as usual\n\n pub version: f32,\n\n /// message size, as usual\n\n pub message_size: usize,\n\n /// full URL (including port) to task source/dispatcher\n\n pub source: String,\n\n /// full URL (including port) to task sink/receiver\n\n pub sink: String,\n\n /// address to the Task store backend\n\n /// (special case, only for the init service, third-party workers can't access the Task store\n\n /// directly)\n", "file_path": "src/worker.rs", "rank": 46, "score": 21.82116246402805 }, { "content": "/// Manager struct responsible for dispatching and receiving tasks\n\npub struct TaskManager {\n\n /// port for requesting/dispatching jobs\n\n pub source_port: usize,\n\n /// port for responding/receiving results\n\n pub result_port: usize,\n\n /// the size of the dispatch queue\n\n /// (also the batch size for Task store queue requests)\n\n pub queue_size: usize,\n\n /// size of an individual message chunk sent via zeromq\n\n /// (keep this small to avoid large RAM use, increase to reduce network bandwidth)\n\n pub message_size: usize,\n\n /// address for the Task store postgres endpoint\n\n pub backend_address: String,\n\n}\n\n\n\nimpl Default for TaskManager {\n\n fn default() -> TaskManager {\n\n TaskManager {\n\n source_port: 51695,\n", "file_path": "src/dispatcher/manager.rs", "rank": 47, "score": 21.1344574080765 }, { "content": " result_port: 51696,\n\n queue_size: 100,\n\n message_size: 100_000,\n\n backend_address: DEFAULT_DB_ADDRESS.to_string(),\n\n }\n\n }\n\n}\n\n\n\nimpl TaskManager {\n\n /// Starts a new manager, spinning of dispatch/sink servers, listening on the specified ports\n\n pub fn start(&self, job_limit: Option<usize>) -> Result<(), Error> {\n\n // We'll use some local memoization shared between source and sink:\n\n let services: HashMap<String, Option<Service>> = HashMap::new();\n\n let progress_queue: HashMap<i64, TaskProgress> = HashMap::new();\n\n let done_queue: Vec<TaskReport> = Vec::new();\n\n\n\n let services_arc = Arc::new(Mutex::new(services));\n\n let progress_queue_arc = Arc::new(Mutex::new(progress_queue));\n\n let done_queue_arc = Arc::new(Mutex::new(done_queue));\n\n\n", "file_path": "src/dispatcher/manager.rs", "rank": 48, "score": 19.37941558182992 }, { "content": "use crate::backend;\n\nuse crate::dispatcher::server;\n\nuse crate::helpers::TaskReport;\n\nuse std::error::Error;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\n/// Specifies the binding and operation parameters for a thread that saves finalized tasks to the DB\n\npub struct Finalize {\n\n /// the DB address to bind on\n\n pub backend_address: String,\n\n /// Maximum number of jobs before manager termination (optional)\n\n pub job_limit: Option<usize>,\n\n}\n\n\n\nimpl Finalize {\n\n /// Start the finalize loop, checking for new completed tasks every second\n\n pub fn start(&self, done_queue_arc: &Arc<Mutex<Vec<TaskReport>>>) -> Result<(), Box<dyn Error>> {\n", "file_path": "src/dispatcher/finalize.rs", "rank": 49, "score": 18.695781429702368 }, { "content": " pub queue_size: usize,\n\n /// size of an individual message chunk sent via zeromq\n\n /// (keep this small to avoid large RAM use, increase to reduce network bandwidth)\n\n pub message_size: usize,\n\n /// address for the Task store postgres endpoint\n\n pub backend_address: String,\n\n}\n\n\n\nimpl Ventilator {\n\n /// Starts a new dispatch `Server` (ZMQ Ventilator), to serve tasks to processing workers.\n\n /// The ventilator shares state with other manager threads via queues for tasks in progress,\n\n /// as well as a queue for completed tasks pending persisting to disk.\n\n /// A job limit can be provided as a termination condition for the sink server.\n\n pub fn start(\n\n &self,\n\n services_arc: &Arc<Mutex<HashMap<String, Option<Service>>>>,\n\n progress_queue_arc: &Arc<Mutex<HashMap<i64, TaskProgress>>>,\n\n done_queue_arc: &Arc<Mutex<Vec<TaskReport>>>,\n\n job_limit: Option<usize>,\n\n ) -> Result<(), Box<dyn Error>> {\n", "file_path": "src/dispatcher/ventilator.rs", "rank": 50, "score": 17.301520743340674 }, { "content": "use std::collections::HashMap;\n\nuse std::io::Read;\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\nuse time;\n\n\n\nuse crate::backend;\n\nuse crate::dispatcher::server;\n\nuse crate::helpers;\n\nuse crate::helpers::{NewTaskMessage, TaskProgress, TaskReport, TaskStatus};\n\nuse crate::models::{Service, WorkerMetadata};\n\nuse std::error::Error;\n\nuse zmq::SNDMORE;\n\n\n\n/// Specifies the binding and operation parameters for a ZMQ ventilator component\n\npub struct Ventilator {\n\n /// port to listen on\n\n pub port: usize,\n\n /// the size of the dispatch queue\n\n /// (also the batch size for Task store queue requests)\n", "file_path": "src/dispatcher/ventilator.rs", "rank": 51, "score": 17.238247980162313 }, { "content": " /// warning tasks in run\n\n pub warning: i32,\n\n /// no_problem tasks in run\n\n pub no_problem: i32,\n\n /// in_progress tasks in run\n\n pub in_progress: i32,\n\n /// start time of run, formatted for a report\n\n pub start_time: String,\n\n /// end time of run, formatted for a report\n\n pub end_time: String,\n\n /// initiator of the run\n\n pub owner: String,\n\n /// description of the run\n\n pub description: String,\n\n}\n\nimpl RunMetadata {\n\n /// f32 type cast for the run frequency fields\n\n pub fn field_f32(&self, field: &str) -> f32 {\n\n let field_i32 = match field {\n\n \"invalid\" => self.invalid,\n", "file_path": "src/models/historical_runs.rs", "rank": 52, "score": 16.584354083969348 }, { "content": "const BUFFER_SIZE: usize = 10_240;\n\n\n\n/// Struct for performing corpus imports into `CorTeX`\n\npub struct Importer {\n\n /// a `Corpus` to be imported, containing all relevant metadata\n\n pub corpus: Corpus,\n\n /// a `Backend` on which to persist the import into the Task store\n\n pub backend: Backend,\n\n /// the current working directory, to resolve relative paths\n\n pub cwd: PathBuf,\n\n}\n\nimpl Default for Importer {\n\n fn default() -> Importer {\n\n let default_backend = Backend::default();\n\n let name = \"default\";\n\n default_backend\n\n .add(&NewCorpus {\n\n path: \".\".to_string(),\n\n name: name.to_string(),\n\n complex: false,\n", "file_path": "src/importer.rs", "rank": 53, "score": 16.54119006628186 }, { "content": " /// percent to total\n\n pub percent: f32,\n\n /// total number of jobs\n\n pub total: i32,\n\n /// start time of run, formatted for a report\n\n pub start_time: String,\n\n /// end time of run, formatted for a report\n\n pub end_time: String,\n\n /// initiator of the run\n\n pub owner: String,\n\n /// description of the run\n\n pub description: String,\n\n}\n\nimpl RunMetadataStack {\n\n /// Transforms to a vega-lite Stack -near representation\n\n pub fn transform(runs_meta: &[RunMetadata]) -> Vec<RunMetadataStack> {\n\n let mut start_time_guard = HashSet::new();\n\n let mut runs_meta_vega = Vec::new();\n\n for run in runs_meta.iter() {\n\n // Avoid adding more than one run at a given start_time for the vega metadata stack,\n", "file_path": "src/models/historical_runs.rs", "rank": 54, "score": 16.03670134631802 }, { "content": " /// time of last dispatched task\n\n pub time_last_dispatch: SystemTime,\n\n /// time of last returned job result\n\n pub time_last_return: Option<SystemTime>,\n\n /// identity of this worker, usually hostname:pid\n\n pub name: String,\n\n}\n\n\n\nimpl From<WorkerMetadata> for HashMap<String, String> {\n\n fn from(worker: WorkerMetadata) -> HashMap<String, String> {\n\n let mut wh = HashMap::new();\n\n wh.insert(\"id\".to_string(), worker.id.to_string());\n\n wh.insert(\"service_id\".to_string(), worker.service_id.to_string());\n\n wh.insert(\n\n \"last_dispatched_task_id\".to_string(),\n\n worker.last_dispatched_task_id.to_string(),\n\n );\n\n wh.insert(\n\n \"last_returned_task_id\".to_string(),\n\n match worker.last_returned_task_id {\n", "file_path": "src/models/worker_metadata.rs", "rank": 55, "score": 15.764572044917427 }, { "content": " pub start_time: NaiveDateTime,\n\n /// end timestamp of run, i.e. timestamp of next run initiation\n\n pub end_time: Option<NaiveDateTime>,\n\n /// owner who initiated the run\n\n pub owner: String,\n\n /// description of the purpose of this run\n\n pub description: String,\n\n}\n\n\n\n#[derive(Debug, Serialize, Clone)]\n\n/// A JSON-friendly data structure, used for the frontend reports\n\npub struct RunMetadata {\n\n /// total tasks in run\n\n pub total: i32,\n\n /// invalid tasks in run\n\n pub invalid: i32,\n\n /// fatak tasks in run\n\n pub fatal: i32,\n\n /// error tasks in run\n\n pub error: i32,\n", "file_path": "src/models/historical_runs.rs", "rank": 56, "score": 15.602662309978763 }, { "content": " fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(services::table)\n\n .values(self)\n\n .execute(connection)\n\n }\n\n}\n\n\n\nimpl Service {\n\n /// ORM-like until diesel.rs introduces finders for more fields\n\n pub fn find_by_name(name_query: &str, connection: &PgConnection) -> Result<Service, Error> {\n\n use crate::schema::services::name;\n\n services::table\n\n .filter(name.eq(name_query))\n\n .get_result(connection)\n\n }\n\n\n\n /// Returns a hash representation of the `Service`, usually for frontend reports\n\n pub fn to_hash(&self) -> HashMap<String, String> {\n\n let mut hm = HashMap::new();\n\n hm.insert(\"id\".to_string(), self.id.to_string());\n", "file_path": "src/models/services.rs", "rank": 57, "score": 15.491845675627095 }, { "content": " .execute(connection)\n\n }\n\n}\n\n\n\n/// Insertable `Corpus` struct\n\n#[derive(Insertable)]\n\n#[table_name = \"corpora\"]\n\npub struct NewCorpus {\n\n /// file system path to corpus root\n\n /// (a corpus is held in a single top-level directory)\n\n pub path: String,\n\n /// a human-readable name for this corpus\n\n pub name: String,\n\n /// are we using multiple files to represent a document entry?\n\n /// (if unsure, always use \"true\")\n\n pub complex: bool,\n\n /// frontend-facing description of the corpus, maybe allow markdown here?\n\n pub description: String,\n\n}\n\nimpl Default for NewCorpus {\n", "file_path": "src/models/corpora.rs", "rank": 58, "score": 15.259662465557497 }, { "content": " pub backend_address: String,\n\n /// thread-local unique identifier\n\n pub identity: String,\n\n}\n\nimpl Default for InitWorker {\n\n fn default() -> InitWorker {\n\n InitWorker {\n\n service: \"init\".to_string(),\n\n version: 0.1,\n\n message_size: 100_000,\n\n source: \"tcp://localhost:51695\".to_string(),\n\n sink: \"tcp://localhost:51696\".to_string(),\n\n backend_address: DEFAULT_DB_ADDRESS.to_string(),\n\n identity: String::new(),\n\n }\n\n }\n\n}\n\nimpl Worker for InitWorker {\n\n fn get_service(&self) -> &str { &self.service }\n\n fn get_source_address(&self) -> Cow<str> { Cow::Borrowed(&self.source) }\n", "file_path": "src/worker.rs", "rank": 59, "score": 15.199882546054395 }, { "content": " /// time of last ventilator dispatch to the service\n\n pub last_dispatched_task_id: i64,\n\n /// time of last sink job received from the service\n\n pub last_returned_task_id: Option<i64>,\n\n /// dispatch totals\n\n pub total_dispatched: i32,\n\n /// return totals\n\n pub total_returned: i32,\n\n /// first registered ventilator request for this worker, coincides with insertion in DB\n\n pub first_seen: SystemTime,\n\n /// first time seen in the current dispatcher session\n\n pub session_seen: Option<SystemTime>,\n\n /// time of last dispatched task\n\n pub time_last_dispatch: SystemTime,\n\n /// time of last returned job result\n\n pub time_last_return: Option<SystemTime>,\n\n /// identity of this worker, usually hostname:pid\n\n pub name: String,\n\n}\n\n\n", "file_path": "src/models/worker_metadata.rs", "rank": 60, "score": 14.681820051117764 }, { "content": " /// auto-incremented postgres id\n\n pub id: i32,\n\n /// file system path to corpus root\n\n /// (a corpus is held in a single top-level directory)\n\n pub path: String,\n\n /// a human-readable name for this corpus\n\n pub name: String,\n\n /// are we using multiple files to represent a document entry?\n\n /// (if unsure, always use \"true\")\n\n pub complex: bool,\n\n /// a human-readable description of the corpus, maybe allow markdown here?\n\n pub description: String,\n\n}\n\n\n\nimpl Corpus {\n\n /// ORM-like until diesel.rs introduces finders for more fields\n\n pub fn find_by_name(name_query: &str, connection: &PgConnection) -> Result<Self, Error> {\n\n use crate::schema::corpora::name;\n\n corpora::table.filter(name.eq(name_query)).first(connection)\n\n }\n", "file_path": "src/models/corpora.rs", "rank": 61, "score": 14.658170408372964 }, { "content": "impl CortexInsertable for NewHistoricalRun {\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(historical_runs::table)\n\n .values(self)\n\n .execute(connection)\n\n }\n\n}\n\n\n\nimpl HistoricalRun {\n\n /// Obtain all historical runs for a given `(Corpus, Service)` pair\n\n pub fn find_by(\n\n corpus: &Corpus,\n\n service: &Service,\n\n connection: &PgConnection,\n\n ) -> Result<Vec<HistoricalRun>, Error>\n\n {\n\n use crate::schema::historical_runs::dsl::{corpus_id, service_id, start_time};\n\n let runs: Vec<HistoricalRun> = historical_runs::table\n\n .filter(corpus_id.eq(corpus.id))\n\n .filter(service_id.eq(service.id))\n", "file_path": "src/models/historical_runs.rs", "rank": 62, "score": 13.797588969677648 }, { "content": "impl Task {\n\n /// Delete task by entry\n\n pub fn delete_by_entry(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n use crate::schema::tasks::dsl::entry;\n\n delete(tasks::table.filter(entry.eq(&self.entry))).execute(connection)\n\n }\n\n\n\n /// Delete all tasks matching this task's service id\n\n pub fn delete_by_service_id(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n use crate::schema::tasks::dsl::service_id;\n\n delete(tasks::table.filter(service_id.eq(&self.service_id))).execute(connection)\n\n }\n\n\n\n /// Delete task by id\n\n pub fn delete_by_id(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n use crate::schema::tasks::dsl::id;\n\n delete(tasks::table.filter(id.eq(self.id))).execute(connection)\n\n }\n\n\n\n /// Find task by id, error if none\n", "file_path": "src/models/tasks.rs", "rank": 63, "score": 13.758483166851128 }, { "content": "impl NewTask {\n\n fn delete_by_entry(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n use crate::schema::tasks::dsl::entry;\n\n delete(tasks::table.filter(entry.eq(&self.entry))).execute(connection)\n\n }\n\n fn delete_by_service_id(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n use crate::schema::tasks::dsl::service_id;\n\n delete(tasks::table.filter(service_id.eq(&self.service_id))).execute(connection)\n\n }\n\n /// Creates the task unless already present in the DB (entry conflict)\n\n pub fn create_if_new(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(tasks::table)\n\n .values(self)\n\n .on_conflict_do_nothing()\n\n .execute(connection)\n\n }\n\n}\n", "file_path": "src/models/tasks.rs", "rank": 64, "score": 12.54480306471846 }, { "content": "pub struct NewService {\n\n /// a human-readable name\n\n pub name: String,\n\n /// a floating-point number to mark the current version (e.g. 0.01)\n\n pub version: f32,\n\n /// the expected input format (e.g. tex)\n\n pub inputformat: String,\n\n /// the produced output format (e.g. html)\n\n pub outputformat: String,\n\n // pub xpath : String,\n\n // pub resource : String,\n\n /// prerequisite input conversion service, if any\n\n pub inputconverter: Option<String>,\n\n /// is this service requiring more than the main textual content of a document?\n\n /// mark \"true\" if unsure\n\n pub complex: bool,\n\n /// a human-readable description\n\n pub description: String,\n\n}\n\nimpl CortexInsertable for NewService {\n", "file_path": "src/models/services.rs", "rank": 65, "score": 12.458619408222521 }, { "content": "use std::collections::HashMap;\n\nuse std::thread;\n\nuse std::time::SystemTime;\n\n\n\nuse diesel::pg::PgConnection;\n\nuse diesel::result::Error;\n\nuse diesel::*;\n\nuse diesel::{insert_into, update};\n\n\n\nuse serde::Serialize;\n\n\n\nuse crate::backend;\n\nuse crate::schema::worker_metadata;\n\n\n\n#[derive(Insertable, Debug)]\n\n#[table_name = \"worker_metadata\"]\n\n/// Metadata collection for workers, updated by the dispatcher upon zmq transactions\n\npub struct NewWorkerMetadata {\n\n /// associated service for this worker metadata set\n\n pub service_id: i32,\n", "file_path": "src/models/worker_metadata.rs", "rank": 66, "score": 12.362246628748464 }, { "content": " );\n\n }\n\n\n\n if fetched_report.len() >= page_size as usize {\n\n global.insert(\"offset_max_false\".to_string(), \"true\".to_string());\n\n }\n\n global.insert(\n\n \"next_offset\".to_string(),\n\n (from_offset + page_size).to_string(),\n\n );\n\n\n\n global.insert(\"offset\".to_string(), offset.to_string());\n\n global.insert(\"page_size\".to_string(), page_size.to_string());\n\n global.insert(\"to_offset\".to_string(), to_offset.to_string());\n\n global.insert(\"report_time\".to_string(), time_val);\n\n\n\n fetched_report\n\n}\n", "file_path": "src/frontend/cached/task_report.rs", "rank": 67, "score": 11.92806997623212 }, { "content": " pub token: String,\n\n /// a plain text description for the purpose of the rerun\n\n pub description: String,\n\n}\n\n\n\n/// Global configuration for the frontend executable, read in at boot\n\n#[derive(Deserialize, Serialize, Debug, Clone)]\n\npub struct FrontendConfig {\n\n /// a captcha secret registered with google\n\n pub captcha_secret: String,\n\n /// a list of known password-like tokens that allow users to trigger reruns\n\n pub rerun_tokens: HashMap<String, String>,\n\n}\n\n\n\n/// A backend-retrieved report used for filling in Tera-templated pages\n\n#[derive(Serialize, Default)]\n\npub struct TemplateContext {\n\n /// global data, as per Rocket examples\n\n pub global: HashMap<String, String>,\n\n /// tabular data for reporting on corpora\n", "file_path": "src/frontend/params.rs", "rank": 68, "score": 11.885021991802503 }, { "content": " }\n\n }\n\n}\n\nimpl CortexInsertable for NewTaskMessage {\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n use crate::helpers::NewTaskMessage::*;\n\n match *self {\n\n Info(ref record) => record.create(connection),\n\n Warning(ref record) => record.create(connection),\n\n Error(ref record) => record.create(connection),\n\n Fatal(ref record) => record.create(connection),\n\n Invalid(ref record) => record.create(connection),\n\n }\n\n }\n\n}\n\n\n\nimpl NewTaskMessage {\n\n /// Instantiates an appropriate insertable LogRecord object based on the raw message components\n\n pub fn new(\n\n task_id: i64,\n", "file_path": "src/helpers.rs", "rank": 69, "score": 11.632645080210443 }, { "content": "#[derive(Identifiable, Queryable, Clone, Debug, Serialize)]\n\n#[table_name = \"worker_metadata\"]\n\n/// Metadata collection for workers, updated by the dispatcher upon zmq transactions\n\npub struct WorkerMetadata {\n\n /// task primary key, auto-incremented by postgresql\n\n pub id: i32,\n\n /// associated service for this worker metadata set\n\n pub service_id: i32,\n\n /// time of last ventilator dispatch to the service\n\n pub last_dispatched_task_id: i64,\n\n /// time of last sink job received from the service\n\n pub last_returned_task_id: Option<i64>,\n\n /// dispatch totals\n\n pub total_dispatched: i32,\n\n /// return totals\n\n pub total_returned: i32,\n\n /// first registered ventilator request for this worker, coincides with insertion in DB\n\n pub first_seen: SystemTime,\n\n /// first time seen in the current dispatcher session\n\n pub session_seen: Option<SystemTime>,\n", "file_path": "src/models/worker_metadata.rs", "rank": 70, "score": 11.574999580124212 }, { "content": " pub task: Task,\n\n /// time of entering the job queue / first dispatch\n\n pub created_at: i64,\n\n /// number of dispatch retries\n\n pub retries: i64,\n\n}\n\nimpl TaskProgress {\n\n /// What is the latest admissible time for this task to be completed?\n\n pub fn expected_at(&self) -> i64 { self.created_at + ((self.retries + 1) * 3600) }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\n/// Completed task, with its processing status and report messages\n\npub struct TaskReport {\n\n /// the `Task` we are reporting on\n\n pub task: Task,\n\n /// the reported processing status\n\n pub status: TaskStatus,\n\n /// a vector of `TaskMessage` log entries\n\n pub messages: Vec<NewTaskMessage>,\n", "file_path": "src/helpers.rs", "rank": 71, "score": 11.413728355177156 }, { "content": " fn default() -> Self {\n\n NewCorpus {\n\n name: \"mock corpus\".to_string(),\n\n path: \".\".to_string(),\n\n complex: true,\n\n description: String::new(),\n\n }\n\n }\n\n}\n\nimpl CortexInsertable for NewCorpus {\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(corpora::table).values(self).execute(connection)\n\n }\n\n}\n", "file_path": "src/models/corpora.rs", "rank": 72, "score": 11.208337644271387 }, { "content": " /// optionally, description of the rerun (default is \"rerun\")\n\n pub description_opt: Option<String>,\n\n}\n\n\n\n/// Instance methods\n\nimpl Backend {\n\n /// Insert a vector of new `NewTask` tasks into the Task store\n\n /// For example, on import, or when a new service is activated on a corpus\n\n pub fn mark_imported(&self, imported_tasks: &[NewTask]) -> Result<usize, Error> {\n\n mark::mark_imported(&self.connection, imported_tasks)\n\n }\n\n /// Insert a vector of `TaskReport` reports into the Task store, also marking their tasks as\n\n /// completed with the correct status code.\n\n pub fn mark_done(&self, reports: &[TaskReport]) -> Result<(), Error> {\n\n mark::mark_done(&self.connection, reports)\n\n }\n\n /// Given a complex selector, of a `Corpus`, `Service`, and the optional `severity`, `category`\n\n /// and `what` mark all matching tasks to be rerun\n\n pub fn mark_rerun(&self, options: RerunOptions) -> Result<(), Error> {\n\n mark::mark_rerun(&self.connection, options)\n", "file_path": "src/backend.rs", "rank": 73, "score": 10.926418146303325 }, { "content": " /// ORM-like until diesel.rs introduces finders for more fields\n\n pub fn find_by_path(path_query: &str, connection: &PgConnection) -> Result<Self, Error> {\n\n use crate::schema::corpora::path;\n\n corpora::table.filter(path.eq(path_query)).first(connection)\n\n }\n\n /// Return a hash representation of the corpus, usually for frontend reports\n\n pub fn to_hash(&self) -> HashMap<String, String> {\n\n let mut hm = HashMap::new();\n\n hm.insert(\"name\".to_string(), self.name.clone());\n\n hm.insert(\"path\".to_string(), self.path.clone());\n\n hm.insert(\"description\".to_string(), self.description.clone());\n\n hm\n\n }\n\n\n\n /// Return a vector of services currently activated on this corpus\n\n pub fn select_services(&self, connection: &PgConnection) -> Result<Vec<Service>, Error> {\n\n use crate::schema::tasks::dsl::{corpus_id, service_id};\n\n let corpus_service_ids_query = tasks::table\n\n .select(service_id)\n\n .distinct()\n", "file_path": "src/models/corpora.rs", "rank": 74, "score": 10.915077013011441 }, { "content": " /// Generic delete method, uses primary \"id\" field\n\n pub fn delete<Model: CortexDeletable>(&self, object: &Model) -> Result<usize, Error> {\n\n object.delete_by(&self.connection, \"id\")\n\n }\n\n /// Delete all entries matching the \"field\" value of a given object\n\n pub fn delete_by<Model: CortexDeletable>(\n\n &self,\n\n object: &Model,\n\n field: &str,\n\n ) -> Result<usize, Error>\n\n {\n\n object.delete_by(&self.connection, field)\n\n }\n\n /// Generic addition method, attempting to insert in the DB a Task store datum\n\n /// applicable for any struct implementing the `CortexORM` trait\n\n /// (for example `Corpus`, `Service`, `Task`)\n\n pub fn add<Model: CortexInsertable>(&self, object: &Model) -> Result<usize, Error> {\n\n object.create(&self.connection)\n\n }\n\n\n", "file_path": "src/backend.rs", "rank": 75, "score": 10.902206116388454 }, { "content": "use diesel::*;\n\nuse dotenv::dotenv;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::concerns::{CortexDeletable, CortexInsertable};\n\nuse crate::helpers::{TaskReport, TaskStatus};\n\nuse crate::models::{Corpus, NewTask, Service, Task};\n\n\n\n/// The production database postgresql address, set from the .env configuration file\n\npub const DEFAULT_DB_ADDRESS: &str = dotenv!(\"DATABASE_URL\");\n\n/// The test database postgresql address, set from the .env configuration file\n\npub const TEST_DB_ADDRESS: &str = dotenv!(\"TEST_DATABASE_URL\");\n\n\n\n/// Provides an interface to the Postgres task store\n\npub struct Backend {\n\n /// The Diesel PgConnection object\n\n pub connection: PgConnection,\n\n}\n\nimpl Default for Backend {\n\n fn default() -> Self {\n\n dotenv().ok();\n\n let connection = connection_at(DEFAULT_DB_ADDRESS);\n\n\n\n Backend { connection }\n\n }\n\n}\n\n\n\n/// Constructs a new Task store representation from a Postgres DB address\n", "file_path": "src/backend.rs", "rank": 76, "score": 10.838801198944896 }, { "content": " fn get_sink_address(&self) -> Cow<str> { Cow::Borrowed(&self.sink) }\n\n fn get_identity(&self) -> &str { &self.identity }\n\n fn set_identity(&mut self, identity: String) { self.identity = identity; }\n\n fn message_size(&self) -> usize { self.message_size }\n\n\n\n fn convert(&self, path_opt: &Path) -> Result<File, Box<dyn Error>> {\n\n let path = path_opt.to_str().unwrap().to_string();\n\n let name = path\n\n .rsplit_once('/')\n\n .map(|x| x.1)\n\n .unwrap_or(&path)\n\n .to_lowercase(); // TODO: this is Unix path only\n\n let backend = backend::from_address(&self.backend_address);\n\n let corpus = NewCorpus {\n\n name,\n\n path: path.clone(),\n\n complex: true,\n\n description: String::new(),\n\n };\n\n // Add the new corpus.\n", "file_path": "src/worker.rs", "rank": 77, "score": 10.315953837505024 }, { "content": " fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"warning\" }\n\n}\n\nimpl LogRecord for NewLogWarning {\n\n fn task_id(&self) -> i64 { self.task_id }\n\n fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n\n fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"warning\" }\n\n}\n\nimpl CortexInsertable for NewLogWarning {\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(log_warnings::table)\n\n .values(self)\n\n .execute(connection)\n\n }\n\n}\n\nimpl LogRecord for LogError {\n\n fn task_id(&self) -> i64 { self.task_id }\n", "file_path": "src/models/messages.rs", "rank": 78, "score": 10.203719616952787 }, { "content": "//! Aggregate methods, to be used by backend\n\nuse crate::helpers::TaskStatus;\n\nuse crate::models::{Service, Task};\n\nuse crate::schema::tasks;\n\nuse diesel::pg::PgConnection;\n\nuse diesel::result::Error;\n\nuse diesel::*;\n\nuse rand::{thread_rng, Rng};\n\n\n\npub(crate) fn fetch_tasks(\n\n connection: &PgConnection,\n\n service: &Service,\n\n queue_size: usize,\n\n) -> Result<Vec<Task>, Error>\n\n{\n\n use crate::schema::tasks::dsl::{service_id, status};\n\n let mut rng = thread_rng();\n\n let mark: u16 = 1 + rng.gen::<u16>();\n\n\n\n let mut marked_tasks: Vec<Task> = Vec::new();\n", "file_path": "src/backend/tasks_aggregate.rs", "rank": 79, "score": 10.105107010204256 }, { "content": " backend\n\n .add(&NewTask {\n\n entry: corpus_path.clone(),\n\n service_id: 1, // Init service always has id 1\n\n corpus_id: 1,\n\n status: TaskStatus::TODO.raw(),\n\n })\n\n .unwrap();\n\n\n\n // Let us thread out a ventilator on a special port\n\n // Start up a ventilator/sink pair\n\n thread::spawn(move || {\n\n let manager = TaskManager {\n\n source_port: 5757,\n\n result_port: 5758,\n\n queue_size: 100_000,\n\n message_size: 100,\n\n backend_address: DEFAULT_DB_ADDRESS.to_string(),\n\n };\n\n assert!(manager.start(Some(1)).is_ok());\n", "file_path": "examples/tex_to_html_import.rs", "rank": 80, "score": 10.083035979139298 }, { "content": "}\n\nimpl LogRecord for LogFatal {\n\n fn task_id(&self) -> i64 { self.task_id }\n\n fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n\n fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"fatal\" }\n\n}\n\nimpl LogRecord for NewLogFatal {\n\n fn task_id(&self) -> i64 { self.task_id }\n\n fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n\n fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"fatal\" }\n\n}\n\nimpl CortexInsertable for NewLogFatal {\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(log_fatals::table)\n", "file_path": "src/models/messages.rs", "rank": 81, "score": 10.042158354593742 }, { "content": " \"total\" => self.total,\n\n \"fatal\" => self.fatal,\n\n \"error\" => self.error,\n\n \"warning\" => self.warning,\n\n \"no_problem\" => self.no_problem,\n\n \"in_progress\" => self.in_progress,\n\n _ => unimplemented!(),\n\n };\n\n field_i32 as f32\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Clone)]\n\n/// A JSON-friendly data structure, used for vega-lite Stack figures\n\n/// https://vega.github.io/vega-lite/docs/stack.html\n\npub struct RunMetadataStack {\n\n /// type of messages\n\n pub severity: String,\n\n /// raw severity index\n\n pub severity_numeric: i32,\n", "file_path": "src/models/historical_runs.rs", "rank": 82, "score": 9.928375351885826 }, { "content": " .values(self)\n\n .execute(connection)\n\n }\n\n}\n\nimpl LogRecord for LogInvalid {\n\n fn task_id(&self) -> i64 { self.task_id }\n\n fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n\n fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"invalid\" }\n\n}\n\nimpl LogRecord for NewLogInvalid {\n\n fn task_id(&self) -> i64 { self.task_id }\n\n fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n\n fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"invalid\" }\n\n}\n\nimpl CortexInsertable for NewLogInvalid {\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(log_invalids::table)\n\n .values(self)\n\n .execute(connection)\n\n }\n\n}", "file_path": "src/models/messages.rs", "rank": 83, "score": 9.809808719765174 }, { "content": " fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n\n fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"error\" }\n\n}\n\nimpl LogRecord for NewLogError {\n\n fn task_id(&self) -> i64 { self.task_id }\n\n fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n\n fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"error\" }\n\n}\n\nimpl CortexInsertable for NewLogError {\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(log_errors::table)\n\n .values(self)\n\n .execute(connection)\n\n }\n", "file_path": "src/models/messages.rs", "rank": 84, "score": 9.809453139696139 }, { "content": "impl TaskStatus {\n\n /// Maps the enumeration into the raw ints for the Task store\n\n pub fn raw(&self) -> i32 {\n\n match *self {\n\n TaskStatus::TODO => 0,\n\n TaskStatus::NoProblem => -1,\n\n TaskStatus::Warning => -2,\n\n TaskStatus::Error => -3,\n\n TaskStatus::Fatal => -4,\n\n TaskStatus::Invalid => -5,\n\n TaskStatus::Blocked(x) | TaskStatus::Queued(x) => x,\n\n }\n\n }\n\n /// Maps the enumeration into the raw severity string for the Task store logs / frontend reports\n\n pub fn to_key(&self) -> String {\n\n match *self {\n\n TaskStatus::NoProblem => \"no_problem\",\n\n TaskStatus::Warning => \"warning\",\n\n TaskStatus::Error => \"error\",\n\n TaskStatus::Fatal => \"fatal\",\n", "file_path": "src/helpers.rs", "rank": 85, "score": 9.802387713898367 }, { "content": " \"session_seen\".to_string(),\n\n match worker.session_seen {\n\n Some(session_seen) => since_string(session_seen, &mut fresh),\n\n None => String::new(),\n\n },\n\n );\n\n\n\n wh.insert(\n\n \"time_last_dispatch\".to_string(),\n\n since_string(worker.time_last_dispatch, &mut fresh),\n\n );\n\n wh.insert(\n\n \"time_last_return\".to_string(),\n\n match worker.time_last_return {\n\n Some(time_last_return) => since_string(time_last_return, &mut fresh),\n\n None => String::new(),\n\n },\n\n );\n\n wh.insert(\n\n \"fresh\".to_string(),\n\n if fresh { \"fresh\" } else { \"stale\" }.to_string(),\n\n );\n\n wh.insert(\"name\".to_string(), worker.name);\n\n wh\n\n }\n\n}\n\n\n", "file_path": "src/models/worker_metadata.rs", "rank": 86, "score": 9.772762258785434 }, { "content": " pub name: String,\n\n /// a floating-point number to mark the current version (e.g. 0.01)\n\n pub version: f32,\n\n /// the expected input format (e.g. tex)\n\n pub inputformat: String,\n\n /// the produced output format (e.g. html)\n\n pub outputformat: String,\n\n // pub xpath : String,\n\n // pub resource : String,\n\n /// prerequisite input conversion service, if any\n\n pub inputconverter: Option<String>,\n\n /// is this service requiring more than the main textual content of a document?\n\n /// mark \"true\" if unsure\n\n pub complex: bool,\n\n /// a human-readable description\n\n pub description: String,\n\n}\n\n/// Insertable struct for `Service`\n\n#[derive(Insertable, Clone, Debug)]\n\n#[table_name = \"services\"]\n", "file_path": "src/models/services.rs", "rank": 87, "score": 9.736177373362306 }, { "content": " pub corpora: Option<Vec<HashMap<String, String>>>,\n\n /// tabular data for reporting on services\n\n pub services: Option<Vec<HashMap<String, String>>>,\n\n /// tabular data for reporting on entries\n\n pub entries: Option<Vec<HashMap<String, String>>>,\n\n /// tabular data for reporting on message `categories`\n\n pub categories: Option<Vec<HashMap<String, String>>>,\n\n /// tabular data for reporting on message `whats`\n\n pub whats: Option<Vec<HashMap<String, String>>>,\n\n /// tabular data for reporting on workers\n\n pub workers: Option<Vec<HashMap<String, String>>>,\n\n /// tabular data for reporting on rerun history\n\n pub history: Option<Vec<RunMetadata>>,\n\n /// serialized data for easy plotting of rerun history\n\n pub history_serialized: Option<String>,\n\n}\n", "file_path": "src/frontend/params.rs", "rank": 88, "score": 9.716616447074188 }, { "content": " .order(start_time.desc())\n\n .get_results(connection)?;\n\n Ok(runs)\n\n }\n\n\n\n /// Obtain a currently ongoing run entry for a `(Corpus, Service)` pair, if any\n\n pub fn find_current(\n\n corpus: &Corpus,\n\n service: &Service,\n\n connection: &PgConnection,\n\n ) -> Result<Option<HistoricalRun>, Error>\n\n {\n\n use crate::schema::historical_runs::dsl::{corpus_id, end_time, service_id};\n\n historical_runs::table\n\n .filter(corpus_id.eq(corpus.id))\n\n .filter(service_id.eq(service.id))\n\n .filter(end_time.is_null())\n\n .first(connection)\n\n .optional()\n\n }\n", "file_path": "src/models/historical_runs.rs", "rank": 89, "score": 9.601750695901014 }, { "content": "impl LogRecord for NewLogInfo {\n\n fn task_id(&self) -> i64 { self.task_id }\n\n fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n\n fn set_details(&mut self, new_details: String) { self.details = new_details; }\n\n fn severity(&self) -> &str { \"info\" }\n\n}\n\nimpl CortexInsertable for NewLogInfo {\n\n fn create(&self, connection: &PgConnection) -> Result<usize, Error> {\n\n insert_into(log_infos::table)\n\n .values(self)\n\n .execute(connection)\n\n }\n\n}\n\nimpl LogRecord for LogWarning {\n\n fn task_id(&self) -> i64 { self.task_id }\n\n fn category(&self) -> &str { &self.category }\n\n fn what(&self) -> &str { &self.what }\n\n fn details(&self) -> &str { &self.details }\n", "file_path": "src/models/messages.rs", "rank": 90, "score": 9.484435339557315 }, { "content": "// Copyright 2015-2018 Deyan Ginev. See the LICENSE\n\n// file at the top-level directory of this distribution.\n\n//\n\n// Licensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>.\n\n// This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! Backend models and traits for the `CorTeX` \"Task store\"\n\n\n\nmod tasks;\n\npub use tasks::*;\n\n\n\nmod messages;\n\npub use messages::*;\n\n\n\nmod worker_metadata;\n\npub use worker_metadata::*;\n\n\n\nmod services;\n\npub use services::*;\n\n\n\nmod corpora;\n\npub use corpora::*;\n\n\n\nmod historical_runs;\n\npub use historical_runs::*;\n", "file_path": "src/models.rs", "rank": 91, "score": 9.458875500928468 }, { "content": " pub id: i64,\n\n /// owner task's id\n\n pub task_id: i64,\n\n /// mid-level description (open set)\n\n pub category: String,\n\n /// low-level description (open set)\n\n pub what: String,\n\n /// technical details of the message (e.g. localization info)\n\n pub details: String,\n\n}\n\n#[derive(Insertable, Clone, Debug)]\n\n#[table_name = \"log_invalids\"]\n\n/// A new, insertable, invalid message\n\npub struct NewLogInvalid {\n\n /// owner task's id\n\n pub task_id: i64,\n\n /// mid-level description (open set)\n\n pub category: String,\n\n /// low-level description (open set)\n\n pub what: String,\n\n /// technical details of the message (e.g. localization info)\n\n pub details: String,\n\n}\n\n\n\n/// Log actor trait, assumes already Identifiable (for id())\n", "file_path": "src/models/messages.rs", "rank": 92, "score": 9.428200036457138 }, { "content": " page_size,\n\n });\n\n if what.is_none() && severity != Some(\"no_problem\".to_string()) {\n\n let report_json: String = serde_json::to_string(&fetched_report).unwrap();\n\n // don't cache the task list pages\n\n\n\n if let Some(ref mut rc) = redis_connection {\n\n rc.set::<String, String, ()>(cache_key, report_json)\n\n .unwrap();\n\n }\n\n\n\n if let Some(ref mut rc) = redis_connection {\n\n rc.set::<String, String, ()>(cache_key_time, time_val.clone())\n\n .unwrap();\n\n }\n\n }\n\n } else {\n\n // Get the report time, so that the user knows where the data is coming from\n\n time_val = if let Some(ref mut rc) = redis_connection {\n\n match rc.get(cache_key_time) {\n", "file_path": "src/frontend/cached/task_report.rs", "rank": 93, "score": 9.387495559848183 }, { "content": " /// mid-level description (open set)\n\n pub category: String,\n\n /// low-level description (open set)\n\n pub what: String,\n\n /// technical details of the message (e.g. localization info)\n\n pub details: String,\n\n}\n\n\n\n#[derive(Identifiable, Queryable, AsChangeset, Associations, Clone, Debug)]\n\n#[belongs_to(Task)]\n\n/// A warning message, as per the `LaTeXML` convention\n\npub struct LogWarning {\n\n /// task primary key, auto-incremented by postgresql\n\n pub id: i64,\n\n /// owner task's id\n\n pub task_id: i64,\n\n /// mid-level description (open set)\n\n pub category: String,\n\n /// low-level description (open set)\n\n pub what: String,\n", "file_path": "src/models/messages.rs", "rank": 94, "score": 9.319218865923602 }, { "content": " /// low-level description (open set)\n\n pub what: String,\n\n /// technical details of the message (e.g. localization info)\n\n pub details: String,\n\n}\n\n\n\n#[derive(Identifiable, Queryable, AsChangeset, Associations, Clone, Debug)]\n\n#[belongs_to(Task)]\n\n/// A fatal message, as per the `LaTeXML` convention\n\npub struct LogFatal {\n\n /// task primary key, auto-incremented by postgresql\n\n pub id: i64,\n\n /// owner task's id\n\n pub task_id: i64,\n\n /// mid-level description (open set)\n\n pub category: String,\n\n /// low-level description (open set)\n\n pub what: String,\n\n /// technical details of the message (e.g. localization info)\n\n pub details: String,\n", "file_path": "src/models/messages.rs", "rank": 95, "score": 9.319218865923602 }, { "content": "use crate::schema::{log_errors, log_fatals, log_infos, log_invalids, log_warnings, tasks};\n\nuse diesel::pg::PgConnection;\n\nuse diesel::result::Error;\n\nuse diesel::*;\n\n\n\nuse super::RerunOptions;\n\nuse crate::concerns::{CortexInsertable, MarkRerun};\n\nuse crate::helpers::{random_mark, TaskReport, TaskStatus};\n\nuse crate::models::{\n\n Corpus, HistoricalRun, LogError, LogFatal, LogInfo, LogInvalid, LogRecord, LogWarning,\n\n NewHistoricalRun, NewTask, Service,\n\n};\n\n\n\npub(crate) fn mark_imported(\n\n connection: &PgConnection,\n\n imported_tasks: &[NewTask],\n\n) -> Result<usize, Error> {\n\n // Insert, but only if the task is new (allow for extension calls with the same method)\n\n insert_into(tasks::table)\n\n .values(imported_tasks)\n", "file_path": "src/backend/mark.rs", "rank": 96, "score": 9.317874796911545 }, { "content": " .execute(&backend.connection)\n\n .unwrap_or(0);\n\n },\n\n }\n\n });\n\n Ok(())\n\n }\n\n /// Update the metadata for a worker which was just received from\n\n pub fn record_received(\n\n identity: String,\n\n service_id: i32,\n\n last_returned_task_id: i64,\n\n backend_address: String,\n\n ) -> Result<(), Error>\n\n {\n\n let now = SystemTime::now();\n\n let _ = thread::spawn(move || {\n\n let backend = backend::from_address(&backend_address);\n\n if let Ok(data) = WorkerMetadata::find_by_name(&identity, service_id, &backend.connection) {\n\n let session_seen = match data.session_seen {\n", "file_path": "src/models/worker_metadata.rs", "rank": 97, "score": 9.296970478218629 }, { "content": "//! CORS capabilities for the Rocket frontend\n\nuse rocket::fairing::{Fairing, Info, Kind};\n\nuse rocket::http::Header;\n\nuse std::io::Cursor;\n\n/// Rocket solution for Cross-origin resource sharing\n\npub struct CORS();\n\n\n\nimpl Fairing for CORS {\n\n fn info(&self) -> Info {\n\n Info {\n\n name: \"Add CORS headers to requests\",\n\n kind: Kind::Response,\n\n }\n\n }\n\n\n\n fn on_response(&self, request: &rocket::Request, response: &mut rocket::Response) {\n\n if request.method() == rocket::http::Method::Options\n\n || response.content_type() == Some(rocket::http::ContentType::JSON)\n\n {\n\n response.set_header(Header::new(\"Access-Control-Allow-Origin\", \"*\"));\n", "file_path": "src/frontend/cors.rs", "rank": 98, "score": 9.27645710922452 }, { "content": "//! Cache-backed logic, currently based on Redis\n\npub mod task_report;\n\npub mod worker;\n\n\n\npub use task_report::task_report;\n\npub use worker::cache_worker;\n", "file_path": "src/frontend/cached/mod.rs", "rank": 99, "score": 9.27458127179573 } ]
Rust
egui_glium/src/backend.rs
katyo/egui
02db9ee5835a522ddf04308f259025388abf0185
use crate::{window_settings::WindowSettings, *}; use egui::Color32; #[cfg(target_os = "windows")] use glium::glutin::platform::windows::WindowBuilderExtWindows; use std::time::Instant; #[cfg(feature = "persistence")] const EGUI_MEMORY_KEY: &str = "egui"; #[cfg(feature = "persistence")] const WINDOW_KEY: &str = "window"; #[cfg(feature = "persistence")] fn deserialize_window_settings(storage: &Option<Box<dyn epi::Storage>>) -> Option<WindowSettings> { epi::get_value(&**storage.as_ref()?, WINDOW_KEY) } #[cfg(not(feature = "persistence"))] fn deserialize_window_settings(_: &Option<Box<dyn epi::Storage>>) -> Option<WindowSettings> { None } #[cfg(feature = "persistence")] fn deserialize_memory(storage: &Option<Box<dyn epi::Storage>>) -> Option<egui::Memory> { epi::get_value(&**storage.as_ref()?, EGUI_MEMORY_KEY) } #[cfg(not(feature = "persistence"))] fn deserialize_memory(_: &Option<Box<dyn epi::Storage>>) -> Option<egui::Memory> { None } impl epi::TextureAllocator for Painter { fn alloc_srgba_premultiplied( &mut self, size: (usize, usize), srgba_pixels: &[Color32], ) -> egui::TextureId { let id = self.alloc_user_texture(); self.set_user_texture(id, size, srgba_pixels); id } fn free(&mut self, id: egui::TextureId) { self.free_user_texture(id) } } struct RequestRepaintEvent; struct GliumRepaintSignal( std::sync::Mutex<glutin::event_loop::EventLoopProxy<RequestRepaintEvent>>, ); impl epi::RepaintSignal for GliumRepaintSignal { fn request_repaint(&self) { self.0.lock().unwrap().send_event(RequestRepaintEvent).ok(); } } #[cfg(target_os = "windows")] fn window_builder_drag_and_drop( window_builder: glutin::window::WindowBuilder, enable: bool, ) -> glutin::window::WindowBuilder { window_builder.with_drag_and_drop(enable) } #[cfg(not(target_os = "windows"))] fn window_builder_drag_and_drop( window_builder: glutin::window::WindowBuilder, _enable: bool, ) -> glutin::window::WindowBuilder { window_builder } fn create_display( app: &dyn epi::App, native_options: &epi::NativeOptions, window_settings: Option<WindowSettings>, window_icon: Option<glutin::window::Icon>, event_loop: &glutin::event_loop::EventLoop<RequestRepaintEvent>, ) -> glium::Display { let mut window_builder = glutin::window::WindowBuilder::new() .with_always_on_top(native_options.always_on_top) .with_decorations(native_options.decorated) .with_resizable(native_options.resizable) .with_title(app.name()) .with_transparent(native_options.transparent) .with_window_icon(window_icon); window_builder = window_builder_drag_and_drop(window_builder, native_options.drag_and_drop_support); let initial_size_points = native_options.initial_window_size; if let Some(window_settings) = &window_settings { window_builder = window_settings.initialize_size(window_builder); } else if let Some(initial_size_points) = initial_size_points { window_builder = window_builder.with_inner_size(glutin::dpi::LogicalSize { width: initial_size_points.x as f64, height: initial_size_points.y as f64, }); } let context_builder = glutin::ContextBuilder::new() .with_depth_buffer(0) .with_srgb(true) .with_stencil_buffer(0) .with_vsync(true); let display = glium::Display::new(window_builder, context_builder, &event_loop).unwrap(); if let Some(window_settings) = &window_settings { window_settings.restore_positions(&display); } display } #[cfg(not(feature = "persistence"))] fn create_storage(_app_name: &str) -> Option<Box<dyn epi::Storage>> { None } #[cfg(feature = "persistence")] fn create_storage(app_name: &str) -> Option<Box<dyn epi::Storage>> { if let Some(proj_dirs) = directories_next::ProjectDirs::from("", "", app_name) { let data_dir = proj_dirs.data_dir().to_path_buf(); if let Err(err) = std::fs::create_dir_all(&data_dir) { eprintln!( "Saving disabled: Failed to create app path at {:?}: {}", data_dir, err ); None } else { let mut config_dir = data_dir; config_dir.push("app.ron"); let storage = crate::persistence::FileStorage::from_path(config_dir); Some(Box::new(storage)) } } else { eprintln!("Saving disabled: Failed to find path to data_dir."); None } } fn integration_info( display: &glium::Display, previous_frame_time: Option<f32>, ) -> epi::IntegrationInfo { epi::IntegrationInfo { web_info: None, prefer_dark_mode: None, cpu_usage: previous_frame_time, seconds_since_midnight: seconds_since_midnight(), native_pixels_per_point: Some(native_pixels_per_point(&display)), } } fn load_icon(icon_data: epi::IconData) -> Option<glutin::window::Icon> { glutin::window::Icon::from_rgba(icon_data.rgba, icon_data.width, icon_data.height).ok() } pub fn run(mut app: Box<dyn epi::App>, nativve_options: epi::NativeOptions) -> ! { let mut storage = create_storage(app.name()); #[cfg(feature = "http")] let http = std::sync::Arc::new(crate::http::GliumHttp {}); let window_settings = deserialize_window_settings(&storage); let event_loop = glutin::event_loop::EventLoop::with_user_event(); let icon = nativve_options.icon_data.clone().and_then(load_icon); let display = create_display(&*app, &nativve_options, window_settings, icon, &event_loop); let repaint_signal = std::sync::Arc::new(GliumRepaintSignal(std::sync::Mutex::new( event_loop.create_proxy(), ))); let mut egui = EguiGlium::new(&display); *egui.ctx().memory() = deserialize_memory(&storage).unwrap_or_default(); { let (ctx, painter) = egui.ctx_and_painter_mut(); let mut app_output = epi::backend::AppOutput::default(); let mut frame = epi::backend::FrameBuilder { info: integration_info(&display, None), tex_allocator: painter, #[cfg(feature = "http")] http: http.clone(), output: &mut app_output, repaint_signal: repaint_signal.clone(), } .build(); app.setup(&ctx, &mut frame, storage.as_deref()); } let mut previous_frame_time = None; let mut is_focused = true; #[cfg(feature = "persistence")] let mut last_auto_save = Instant::now(); if app.warm_up_enabled() { let saved_memory = egui.ctx().memory().clone(); egui.ctx().memory().set_everything_is_visible(true); egui.begin_frame(&display); let (ctx, painter) = egui.ctx_and_painter_mut(); let mut app_output = epi::backend::AppOutput::default(); let mut frame = epi::backend::FrameBuilder { info: integration_info(&display, None), tex_allocator: painter, #[cfg(feature = "http")] http: http.clone(), output: &mut app_output, repaint_signal: repaint_signal.clone(), } .build(); app.update(&ctx, &mut frame); let _ = egui.end_frame(&display); *egui.ctx().memory() = saved_memory; egui.ctx().clear_animations(); } event_loop.run(move |event, _, control_flow| { let mut redraw = || { if !is_focused { std::thread::sleep(std::time::Duration::from_millis(10)); } let frame_start = std::time::Instant::now(); egui.begin_frame(&display); let (ctx, painter) = egui.ctx_and_painter_mut(); let mut app_output = epi::backend::AppOutput::default(); let mut frame = epi::backend::FrameBuilder { info: integration_info(&display, previous_frame_time), tex_allocator: painter, #[cfg(feature = "http")] http: http.clone(), output: &mut app_output, repaint_signal: repaint_signal.clone(), } .build(); app.update(ctx, &mut frame); let (needs_repaint, shapes) = egui.end_frame(&display); let frame_time = (Instant::now() - frame_start).as_secs_f64() as f32; previous_frame_time = Some(frame_time); { use glium::Surface as _; let mut target = display.draw(); let clear_color = app.clear_color(); target.clear_color( clear_color[0], clear_color[1], clear_color[2], clear_color[3], ); egui.paint(&display, &mut target, shapes); target.finish().unwrap(); } { let epi::backend::AppOutput { quit, window_size } = app_output; if let Some(window_size) = window_size { display.gl_window().window().set_inner_size( glutin::dpi::PhysicalSize { width: (egui.ctx().pixels_per_point() * window_size.x).round(), height: (egui.ctx().pixels_per_point() * window_size.y).round(), } .to_logical::<f32>(native_pixels_per_point(&display) as f64), ); } *control_flow = if quit { glutin::event_loop::ControlFlow::Exit } else if needs_repaint { display.gl_window().window().request_redraw(); glutin::event_loop::ControlFlow::Poll } else { glutin::event_loop::ControlFlow::Wait }; } #[cfg(feature = "persistence")] if let Some(storage) = &mut storage { let now = Instant::now(); if now - last_auto_save > app.auto_save_interval() { epi::set_value( storage.as_mut(), WINDOW_KEY, &WindowSettings::from_display(&display), ); epi::set_value(storage.as_mut(), EGUI_MEMORY_KEY, &*egui.ctx().memory()); app.save(storage.as_mut()); storage.flush(); last_auto_save = now; } } }; match event { glutin::event::Event::RedrawEventsCleared if cfg!(windows) => redraw(), glutin::event::Event::RedrawRequested(_) if !cfg!(windows) => redraw(), glutin::event::Event::WindowEvent { event, .. } => { if let glutin::event::WindowEvent::Focused(new_focused) = event { is_focused = new_focused; } egui.on_event(event, control_flow); display.gl_window().window().request_redraw(); } glutin::event::Event::LoopDestroyed => { app.on_exit(); #[cfg(feature = "persistence")] if let Some(storage) = &mut storage { epi::set_value( storage.as_mut(), WINDOW_KEY, &WindowSettings::from_display(&display), ); epi::set_value(storage.as_mut(), EGUI_MEMORY_KEY, &*egui.ctx().memory()); app.save(storage.as_mut()); storage.flush(); } } glutin::event::Event::UserEvent(RequestRepaintEvent) => { display.gl_window().window().request_redraw(); } _ => (), } }); }
use crate::{window_settings::WindowSettings, *}; use egui::Color32; #[cfg(target_os = "windows")] use glium::glutin::platform::windows::WindowBuilderExtWindows; use std::time::Instant; #[cfg(feature = "persistence")] const EGUI_MEMORY_KEY: &str = "egui"; #[cfg(feature = "persistence")] const WINDOW_KEY: &str = "window"; #[cfg(feature = "persistence")] fn deserialize_window_settings(storage: &Option<Box<dyn epi::Storage>>) -> Option<WindowSettings> { epi::get_value(&**storage.as_ref()?, WINDOW_KEY) } #[cfg(not(feature = "persistence"))] fn deserialize_window_settings(_: &Option<Box<dyn epi::Storage>>) -> Option<WindowSettings> { None } #[cfg(feature = "persistence")] fn deserialize_memory(storage: &Option<Box<dyn epi::Storage>>) -> Option<egui::Memory> { epi::get_value(&**storage.as_ref()?, EGUI_MEMORY_KEY) } #[cfg(not(feature = "persistence"))] fn deserialize_memory(_: &Option<Box<dyn epi::Storage>>) -> Option<egui::Memory> { None } impl epi::TextureAllocator for Painter { fn alloc_srgba_premultiplied( &mut self, size: (usize, usize), srgba_pixels: &[Color32], ) -> egui::TextureId { let id = self.alloc_user_texture(); self.set_user_texture(id, size, srgba_pixels); id } fn free(&mut self, id: egui::TextureId) { self.free_user_texture(id) } } struct RequestRepaintEvent; struct GliumRepaintSignal( std::sync::Mutex<glutin::event_loop::EventLoopProxy<RequestRepaintEvent>>, ); impl epi::RepaintSignal for GliumRepaintSignal { fn request_repaint(&self) { self.0.lock().unwrap().send_event(RequestRepaintEvent).ok(); } } #[cfg(target_os = "windows")] fn window_builder_drag_and_drop( window_builder: glutin::window::WindowBuilder, enable: bool, ) -> glutin::window::WindowBuilder { window_builder.with_drag_and_drop(enable) } #[cfg(not(target_os = "windows"))] fn window_builder_drag_and_drop( window_builder: glutin::window::WindowBuilder, _enable: bool, ) -> glutin::window::WindowBuilder { window_builder } fn create_display( app: &dyn epi::App, native_options: &epi::NativeOptions, window_settings: Option<WindowSettings>, window_icon: Option<glutin::window::Icon>, event_loop: &glutin::event_loop::EventLoop<RequestRepaintEvent>, ) -> glium::Display { let mut window_builder = glutin::window::WindowBuilder::new() .with_always_on_top(native_options.always_on_top) .with_decorations(native_options.decorated) .with_resizable(native_options.resizable) .with_title(app.name()) .with_transparent(native_options.transparent) .with_window_icon(window_icon); window_builder = window_builder_drag_and_drop(window_builder, native_options.drag_and_drop_support); let initial_size_points = native_options.initial_window_size; if let Some(window_settings) = &window_settings { window_builder = window_settings.initialize_size(window_builder); } else if let Some(initial_size_points) = initial_size_points { window_builder = window_builder.with_inner_size(glutin::dpi::LogicalSize { width: initial_size_points.x as f64, height: initial_size_points.y as f64, }); } let context_builder = glutin::ContextBuilder::ne
#[cfg(not(feature = "persistence"))] fn create_storage(_app_name: &str) -> Option<Box<dyn epi::Storage>> { None } #[cfg(feature = "persistence")] fn create_storage(app_name: &str) -> Option<Box<dyn epi::Storage>> { if let Some(proj_dirs) = directories_next::ProjectDirs::from("", "", app_name) { let data_dir = proj_dirs.data_dir().to_path_buf(); if let Err(err) = std::fs::create_dir_all(&data_dir) { eprintln!( "Saving disabled: Failed to create app path at {:?}: {}", data_dir, err ); None } else { let mut config_dir = data_dir; config_dir.push("app.ron"); let storage = crate::persistence::FileStorage::from_path(config_dir); Some(Box::new(storage)) } } else { eprintln!("Saving disabled: Failed to find path to data_dir."); None } } fn integration_info( display: &glium::Display, previous_frame_time: Option<f32>, ) -> epi::IntegrationInfo { epi::IntegrationInfo { web_info: None, prefer_dark_mode: None, cpu_usage: previous_frame_time, seconds_since_midnight: seconds_since_midnight(), native_pixels_per_point: Some(native_pixels_per_point(&display)), } } fn load_icon(icon_data: epi::IconData) -> Option<glutin::window::Icon> { glutin::window::Icon::from_rgba(icon_data.rgba, icon_data.width, icon_data.height).ok() } pub fn run(mut app: Box<dyn epi::App>, nativve_options: epi::NativeOptions) -> ! { let mut storage = create_storage(app.name()); #[cfg(feature = "http")] let http = std::sync::Arc::new(crate::http::GliumHttp {}); let window_settings = deserialize_window_settings(&storage); let event_loop = glutin::event_loop::EventLoop::with_user_event(); let icon = nativve_options.icon_data.clone().and_then(load_icon); let display = create_display(&*app, &nativve_options, window_settings, icon, &event_loop); let repaint_signal = std::sync::Arc::new(GliumRepaintSignal(std::sync::Mutex::new( event_loop.create_proxy(), ))); let mut egui = EguiGlium::new(&display); *egui.ctx().memory() = deserialize_memory(&storage).unwrap_or_default(); { let (ctx, painter) = egui.ctx_and_painter_mut(); let mut app_output = epi::backend::AppOutput::default(); let mut frame = epi::backend::FrameBuilder { info: integration_info(&display, None), tex_allocator: painter, #[cfg(feature = "http")] http: http.clone(), output: &mut app_output, repaint_signal: repaint_signal.clone(), } .build(); app.setup(&ctx, &mut frame, storage.as_deref()); } let mut previous_frame_time = None; let mut is_focused = true; #[cfg(feature = "persistence")] let mut last_auto_save = Instant::now(); if app.warm_up_enabled() { let saved_memory = egui.ctx().memory().clone(); egui.ctx().memory().set_everything_is_visible(true); egui.begin_frame(&display); let (ctx, painter) = egui.ctx_and_painter_mut(); let mut app_output = epi::backend::AppOutput::default(); let mut frame = epi::backend::FrameBuilder { info: integration_info(&display, None), tex_allocator: painter, #[cfg(feature = "http")] http: http.clone(), output: &mut app_output, repaint_signal: repaint_signal.clone(), } .build(); app.update(&ctx, &mut frame); let _ = egui.end_frame(&display); *egui.ctx().memory() = saved_memory; egui.ctx().clear_animations(); } event_loop.run(move |event, _, control_flow| { let mut redraw = || { if !is_focused { std::thread::sleep(std::time::Duration::from_millis(10)); } let frame_start = std::time::Instant::now(); egui.begin_frame(&display); let (ctx, painter) = egui.ctx_and_painter_mut(); let mut app_output = epi::backend::AppOutput::default(); let mut frame = epi::backend::FrameBuilder { info: integration_info(&display, previous_frame_time), tex_allocator: painter, #[cfg(feature = "http")] http: http.clone(), output: &mut app_output, repaint_signal: repaint_signal.clone(), } .build(); app.update(ctx, &mut frame); let (needs_repaint, shapes) = egui.end_frame(&display); let frame_time = (Instant::now() - frame_start).as_secs_f64() as f32; previous_frame_time = Some(frame_time); { use glium::Surface as _; let mut target = display.draw(); let clear_color = app.clear_color(); target.clear_color( clear_color[0], clear_color[1], clear_color[2], clear_color[3], ); egui.paint(&display, &mut target, shapes); target.finish().unwrap(); } { let epi::backend::AppOutput { quit, window_size } = app_output; if let Some(window_size) = window_size { display.gl_window().window().set_inner_size( glutin::dpi::PhysicalSize { width: (egui.ctx().pixels_per_point() * window_size.x).round(), height: (egui.ctx().pixels_per_point() * window_size.y).round(), } .to_logical::<f32>(native_pixels_per_point(&display) as f64), ); } *control_flow = if quit { glutin::event_loop::ControlFlow::Exit } else if needs_repaint { display.gl_window().window().request_redraw(); glutin::event_loop::ControlFlow::Poll } else { glutin::event_loop::ControlFlow::Wait }; } #[cfg(feature = "persistence")] if let Some(storage) = &mut storage { let now = Instant::now(); if now - last_auto_save > app.auto_save_interval() { epi::set_value( storage.as_mut(), WINDOW_KEY, &WindowSettings::from_display(&display), ); epi::set_value(storage.as_mut(), EGUI_MEMORY_KEY, &*egui.ctx().memory()); app.save(storage.as_mut()); storage.flush(); last_auto_save = now; } } }; match event { glutin::event::Event::RedrawEventsCleared if cfg!(windows) => redraw(), glutin::event::Event::RedrawRequested(_) if !cfg!(windows) => redraw(), glutin::event::Event::WindowEvent { event, .. } => { if let glutin::event::WindowEvent::Focused(new_focused) = event { is_focused = new_focused; } egui.on_event(event, control_flow); display.gl_window().window().request_redraw(); } glutin::event::Event::LoopDestroyed => { app.on_exit(); #[cfg(feature = "persistence")] if let Some(storage) = &mut storage { epi::set_value( storage.as_mut(), WINDOW_KEY, &WindowSettings::from_display(&display), ); epi::set_value(storage.as_mut(), EGUI_MEMORY_KEY, &*egui.ctx().memory()); app.save(storage.as_mut()); storage.flush(); } } glutin::event::Event::UserEvent(RequestRepaintEvent) => { display.gl_window().window().request_redraw(); } _ => (), } }); }
w() .with_depth_buffer(0) .with_srgb(true) .with_stencil_buffer(0) .with_vsync(true); let display = glium::Display::new(window_builder, context_builder, &event_loop).unwrap(); if let Some(window_settings) = &window_settings { window_settings.restore_positions(&display); } display }
function_block-function_prefixed
[ { "content": "// A wrapper that allows the more idiomatic usage pattern: `ui.add(toggle(&mut my_bool))`\n\n/// iOS-style toggle switch.\n\n///\n\n/// ## Example:\n\n/// ``` ignore\n\n/// ui.add(toggle(&mut my_bool));\n\n/// ```\n\npub fn toggle(on: &mut bool) -> impl egui::Widget + '_ {\n\n move |ui: &mut egui::Ui| toggle_ui(ui, on)\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/toggle_switch.rs", "rank": 0, "score": 360966.55477544933 }, { "content": "pub fn drag_source(ui: &mut Ui, id: Id, body: impl FnOnce(&mut Ui)) {\n\n let is_being_dragged = ui.memory().is_being_dragged(id);\n\n\n\n if !is_being_dragged {\n\n let response = ui.scope(body).response;\n\n\n\n // Check for drags:\n\n let response = ui.interact(response.rect, id, Sense::drag());\n\n if response.hovered() {\n\n ui.output().cursor_icon = CursorIcon::Grab;\n\n }\n\n } else {\n\n ui.output().cursor_icon = CursorIcon::Grabbing;\n\n\n\n // Paint the body to a new layer:\n\n let layer_id = LayerId::new(Order::Tooltip, id);\n\n let response = ui.with_layer_id(layer_id, body).response;\n\n\n\n // Now we move the visuals of the body to where the mouse is.\n\n // Normally you need to decide a location for a widget first,\n", "file_path": "egui_demo_lib/src/apps/demo/drag_and_drop.rs", "rank": 1, "score": 348729.6925786222 }, { "content": "fn set_open(open: &mut BTreeSet<String>, key: &'static str, is_open: bool) {\n\n if is_open {\n\n if !open.contains(key) {\n\n open.insert(key.to_owned());\n\n }\n\n } else {\n\n open.remove(key);\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n/// A menu bar in which you can select different demo windows to show.\n\n#[derive(Default)]\n\n#[cfg_attr(feature = \"persistence\", derive(serde::Deserialize, serde::Serialize))]\n\n#[cfg_attr(feature = \"persistence\", serde(default))]\n\npub struct DemoWindows {\n\n demos: Demos,\n\n tests: Tests,\n\n}\n", "file_path": "egui_demo_lib/src/apps/demo/demo_app_windows.rs", "rank": 2, "score": 339235.1637072341 }, { "content": "/// Install event listeners to register different input events\n\n/// and start running the given app.\n\npub fn start(canvas_id: &str, app: Box<dyn epi::App>) -> Result<AppRunnerRef, JsValue> {\n\n let backend = WebBackend::new(canvas_id)?;\n\n let mut runner = AppRunner::new(backend, app)?;\n\n runner.warm_up()?;\n\n start_runner(runner)\n\n}\n\n\n", "file_path": "egui_web/src/backend.rs", "rank": 3, "score": 322914.22500620066 }, { "content": "fn ui_color(ui: &mut Ui, srgba: &mut Color32, text: &str) {\n\n ui.horizontal(|ui| {\n\n ui.color_edit_button_srgba(srgba);\n\n ui.label(text);\n\n });\n\n}\n", "file_path": "egui/src/style.rs", "rank": 4, "score": 317253.6214381762 }, { "content": "#[allow(dead_code)]\n\nfn toggle_ui_compact(ui: &mut egui::Ui, on: &mut bool) -> egui::Response {\n\n let desired_size = ui.spacing().interact_size.y * egui::vec2(2.0, 1.0);\n\n let (rect, mut response) = ui.allocate_exact_size(desired_size, egui::Sense::click());\n\n if response.clicked() {\n\n *on = !*on;\n\n response.mark_changed();\n\n }\n\n response.widget_info(|| egui::WidgetInfo::selected(egui::WidgetType::Checkbox, *on, \"\"));\n\n\n\n let how_on = ui.ctx().animate_bool(response.id, *on);\n\n let visuals = ui.style().interact_selectable(&response, *on);\n\n let rect = rect.expand(visuals.expansion);\n\n let radius = 0.5 * rect.height();\n\n ui.painter()\n\n .rect(rect, radius, visuals.bg_fill, visuals.bg_stroke);\n\n let circle_x = egui::lerp((rect.left() + radius)..=(rect.right() - radius), how_on);\n\n let center = egui::pos2(circle_x, rect.center().y);\n\n ui.painter()\n\n .circle(center, 0.75 * radius, visuals.bg_fill, visuals.fg_stroke);\n\n\n\n response\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/toggle_switch.rs", "rank": 5, "score": 316645.4966603942 }, { "content": "/// iOS-style toggle switch:\n\n///\n\n/// ``` text\n\n/// _____________\n\n/// / /.....\\\n\n/// | |.......|\n\n/// \\_______\\_____/\n\n/// ```\n\n///\n\n/// ## Example:\n\n/// ``` ignore\n\n/// toggle_ui(ui, &mut my_bool);\n\n/// ```\n\npub fn toggle_ui(ui: &mut egui::Ui, on: &mut bool) -> egui::Response {\n\n // Widget code can be broken up in four steps:\n\n // 1. Decide a size for the widget\n\n // 2. Allocate space for it\n\n // 3. Handle interactions with the widget (if any)\n\n // 4. Paint the widget\n\n\n\n // 1. Deciding widget size:\n\n // You can query the `ui` how much space is available,\n\n // but in this example we have a fixed size widget based on the height of a standard button:\n\n let desired_size = ui.spacing().interact_size.y * egui::vec2(2.0, 1.0);\n\n\n\n // 2. Allocating space:\n\n // This is where we get a region of the screen assigned.\n\n // We also tell the Ui to sense clicks in the allocated region.\n\n let (rect, mut response) = ui.allocate_exact_size(desired_size, egui::Sense::click());\n\n\n\n // 3. Interact: Time to check for clicks!\n\n if response.clicked() {\n\n *on = !*on;\n", "file_path": "egui_demo_lib/src/apps/demo/toggle_switch.rs", "rank": 6, "score": 312810.6851733003 }, { "content": "/// Show a tooltip at the current pointer position (if any).\n\n///\n\n/// Most of the time it is easier to use [`Response::on_hover_ui`].\n\n///\n\n/// See also [`show_tooltip_text`].\n\n///\n\n/// ```\n\n/// # let mut ui = egui::Ui::__test();\n\n/// if ui.ui_contains_pointer() {\n\n/// egui::show_tooltip(ui.ctx(), egui::Id::new(\"my_tooltip\"), |ui| {\n\n/// ui.label(\"Helpful text\");\n\n/// });\n\n/// }\n\n/// ```\n\npub fn show_tooltip(ctx: &CtxRef, id: Id, add_contents: impl FnOnce(&mut Ui)) {\n\n show_tooltip_at_pointer(ctx, id, add_contents)\n\n}\n\n\n", "file_path": "egui/src/containers/popup.rs", "rank": 7, "score": 304588.87378303986 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\nfn menu_impl<'c>(ui: &mut Ui, title: impl ToString, add_contents: Box<dyn FnOnce(&mut Ui) + 'c>) {\n\n let title = title.to_string();\n\n let bar_id = ui.id();\n\n let menu_id = bar_id.with(&title);\n\n\n\n let mut bar_state = BarState::load(ui.ctx(), &bar_id);\n\n\n\n let mut button = Button::new(title);\n\n\n\n if bar_state.open_menu == Some(menu_id) {\n\n button = button.fill(Some(ui.visuals().selection.bg_fill));\n\n }\n\n\n\n let button_response = ui.add(button);\n\n if button_response.clicked() {\n\n // Toggle\n\n if bar_state.open_menu == Some(menu_id) {\n\n bar_state.open_menu = None;\n\n } else {\n\n bar_state.open_menu = Some(menu_id);\n", "file_path": "egui/src/menu.rs", "rank": 8, "score": 304289.9839322878 }, { "content": "fn color_slider_1d(ui: &mut Ui, value: &mut f32, color_at: impl Fn(f32) -> Color32) -> Response {\n\n #![allow(clippy::identity_op)]\n\n\n\n let desired_size = vec2(\n\n ui.spacing().slider_width,\n\n ui.spacing().interact_size.y * 2.0,\n\n );\n\n let (rect, response) = ui.allocate_at_least(desired_size, Sense::click_and_drag());\n\n\n\n if let Some(mpos) = response.interact_pointer_pos() {\n\n *value = remap_clamp(mpos.x, rect.left()..=rect.right(), 0.0..=1.0);\n\n }\n\n\n\n let visuals = ui.style().interact(&response);\n\n\n\n background_checkers(ui.painter(), rect); // for alpha:\n\n\n\n {\n\n // fill color:\n\n let mut mesh = Mesh::default();\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 9, "score": 302882.3808328073 }, { "content": "pub fn show_tooltip_at_pointer(ctx: &CtxRef, id: Id, add_contents: impl FnOnce(&mut Ui)) {\n\n let suggested_pos = ctx\n\n .input()\n\n .pointer\n\n .hover_pos()\n\n .map(|pointer_pos| pointer_pos + vec2(16.0, 16.0));\n\n show_tooltip_at(ctx, id, suggested_pos, add_contents)\n\n}\n\n\n", "file_path": "egui/src/containers/popup.rs", "rank": 10, "score": 301387.3714055398 }, { "content": "fn clock_button(ui: &mut egui::Ui, seconds_since_midnight: f64) -> egui::Response {\n\n let time = seconds_since_midnight;\n\n let time = format!(\n\n \"{:02}:{:02}:{:02}.{:02}\",\n\n (time % (24.0 * 60.0 * 60.0) / 3600.0).floor(),\n\n (time % (60.0 * 60.0) / 60.0).floor(),\n\n (time % 60.0).floor(),\n\n (time % 1.0 * 100.0).floor()\n\n );\n\n\n\n ui.add(egui::Button::new(time).text_style(egui::TextStyle::Monospace))\n\n}\n\n\n", "file_path": "egui_demo_lib/src/wrap_app.rs", "rank": 11, "score": 300697.994672282 }, { "content": "// A wrapper that allows the more idiomatic usage pattern: `ui.add(...)`\n\n/// Password entry field with ability to toggle character hiding.\n\n///\n\n/// ## Example:\n\n/// ``` ignore\n\n/// ui.add(password(&mut password));\n\n/// ```\n\npub fn password(text: &mut String) -> impl egui::Widget + '_ {\n\n move |ui: &mut egui::Ui| password_ui(ui, text)\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/password.rs", "rank": 12, "score": 295583.35845603026 }, { "content": "fn color_text_ui(ui: &mut Ui, color: impl Into<Color32>) {\n\n let color = color.into();\n\n ui.horizontal(|ui| {\n\n let [r, g, b, a] = color.to_array();\n\n ui.label(format!(\n\n \"RGBA (premultiplied): rgba({}, {}, {}, {})\",\n\n r, g, b, a\n\n ));\n\n\n\n if ui.button(\"📋\").on_hover_text(\"Click to copy\").clicked() {\n\n ui.output().copied_text = format!(\"{}, {}, {}, {}\", r, g, b, a);\n\n }\n\n });\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 13, "score": 295288.74463059823 }, { "content": "fn doc_link_label<'a>(title: &'a str, search_term: &'a str) -> impl egui::Widget + 'a {\n\n let label = format!(\"{}:\", title);\n\n let url = format!(\"https://docs.rs/egui?search={}\", search_term);\n\n move |ui: &mut egui::Ui| {\n\n ui.hyperlink_to(label, url).on_hover_ui(|ui| {\n\n ui.horizontal_wrapped(|ui| {\n\n ui.label(\"Search egui docs for\");\n\n ui.code(search_term);\n\n });\n\n })\n\n }\n\n}\n", "file_path": "egui_demo_lib/src/apps/demo/widget_gallery.rs", "rank": 14, "score": 294091.4753698681 }, { "content": "pub fn adjust_colors(shape: &mut Shape, adjust_color: &impl Fn(&mut Color32)) {\n\n #![allow(clippy::match_same_arms)]\n\n match shape {\n\n Shape::Noop => {}\n\n Shape::Vec(shapes) => {\n\n for shape in shapes {\n\n adjust_colors(shape, adjust_color)\n\n }\n\n }\n\n Shape::Circle { fill, stroke, .. } => {\n\n adjust_color(fill);\n\n adjust_color(&mut stroke.color);\n\n }\n\n Shape::LineSegment { stroke, .. } => {\n\n adjust_color(&mut stroke.color);\n\n }\n\n Shape::Path { fill, stroke, .. } => {\n\n adjust_color(fill);\n\n adjust_color(&mut stroke.color);\n\n }\n", "file_path": "epaint/src/shape_transform.rs", "rank": 16, "score": 293616.7485875408 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn start_web(canvas_id: &str, app: Box<dyn epi::App>) -> Result<(), wasm_bindgen::JsValue> {\n\n egui_web::start(canvas_id, app)?;\n\n Ok(())\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n// When compiling natively\n\n\n\n/// Call from `fn main` like this: `eframe::run_native(Box::new(MyEguiApp::default()))`\n", "file_path": "eframe/src/lib.rs", "rank": 17, "score": 291722.0070521347 }, { "content": "pub fn show_tooltip_under(ctx: &CtxRef, id: Id, rect: &Rect, add_contents: impl FnOnce(&mut Ui)) {\n\n show_tooltip_at(\n\n ctx,\n\n id,\n\n Some(rect.left_bottom() + vec2(-2.0, 4.0)),\n\n add_contents,\n\n )\n\n}\n\n\n", "file_path": "egui/src/containers/popup.rs", "rank": 18, "score": 291388.06479400897 }, { "content": "pub fn canvas_size_in_points(canvas_id: &str) -> egui::Vec2 {\n\n let canvas = canvas_element(canvas_id).unwrap();\n\n let pixels_per_point = native_pixels_per_point();\n\n egui::vec2(\n\n canvas.width() as f32 / pixels_per_point,\n\n canvas.height() as f32 / pixels_per_point,\n\n )\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 19, "score": 289420.57466608967 }, { "content": "fn huge_content_painter(ui: &mut egui::Ui) {\n\n // This is similar to the other demo, but is fully manual, for when you want to do custom painting.\n\n ui.label(\"A lot of rows, but only the visible ones are painted, so performance is still good:\");\n\n ui.add_space(4.0);\n\n\n\n let text_style = TextStyle::Body;\n\n let row_height = ui.fonts()[text_style].row_height() + ui.spacing().item_spacing.y;\n\n let num_rows = 10_000;\n\n\n\n ScrollArea::auto_sized().show_viewport(ui, |ui, viewport| {\n\n ui.set_height(row_height * num_rows as f32);\n\n\n\n let first_item = (viewport.min.y / row_height).floor().at_least(0.0) as usize;\n\n let last_item = (viewport.max.y / row_height).ceil() as usize + 1;\n\n let last_item = last_item.at_most(num_rows);\n\n\n\n for i in first_item..last_item {\n\n let indentation = (i % 100) as f32;\n\n let x = ui.min_rect().left() + indentation;\n\n let y = ui.min_rect().top() + i as f32 * row_height;\n", "file_path": "egui_demo_lib/src/apps/demo/scrolling.rs", "rank": 20, "score": 288360.2676407694 }, { "content": "fn tint_shape_towards(shape: &mut Shape, target: Color32) {\n\n epaint::shape_transform::adjust_colors(shape, &|color| {\n\n *color = crate::color::tint_color_towards(*color, target);\n\n });\n\n}\n", "file_path": "egui/src/painter.rs", "rank": 21, "score": 287912.1880229051 }, { "content": "pub fn resize_canvas_to_screen_size(canvas_id: &str, max_size_points: egui::Vec2) -> Option<()> {\n\n let canvas = canvas_element(canvas_id)?;\n\n\n\n let screen_size_points = screen_size_in_native_points()?;\n\n let pixels_per_point = native_pixels_per_point();\n\n\n\n let max_size_pixels = pixels_per_point * max_size_points;\n\n\n\n let canvas_size_pixels = pixels_per_point * screen_size_points;\n\n let canvas_size_pixels = canvas_size_pixels.min(max_size_pixels);\n\n let canvas_size_points = canvas_size_pixels / pixels_per_point;\n\n\n\n // Make sure that the height and width are always even numbers.\n\n // otherwise, the page renders blurry on some platforms.\n\n // See https://github.com/emilk/egui/issues/103\n\n fn round_to_even(v: f32) -> f32 {\n\n (v / 2.0).round() * 2.0\n\n }\n\n\n\n canvas\n", "file_path": "egui_web/src/lib.rs", "rank": 22, "score": 276582.642012057 }, { "content": "fn numbered_point(ui: &mut Ui, width: f32, number: &str) -> Response {\n\n let row_height = ui.fonts()[TextStyle::Body].row_height();\n\n let (rect, response) = ui.allocate_exact_size(vec2(width, row_height), Sense::hover());\n\n let text = format!(\"{}.\", number);\n\n let text_color = ui.visuals().strong_text_color();\n\n ui.painter().text(\n\n rect.right_center(),\n\n Align2::RIGHT_CENTER,\n\n text,\n\n TextStyle::Body,\n\n text_color,\n\n );\n\n response\n\n}\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 23, "score": 272896.47496102424 }, { "content": "/// Combined into one function (rather than two) to make it easier\n\n/// for the borrow checker.\n\ntype GetSetValue<'a> = Box<dyn 'a + FnMut(Option<f64>) -> f64>;\n\n\n", "file_path": "egui/src/widgets/slider.rs", "rank": 26, "score": 265308.42958655104 }, { "content": "/// Construct a top level menu in a menu bar. This would be e.g. \"File\", \"Edit\" etc.\n\npub fn menu(ui: &mut Ui, title: impl ToString, add_contents: impl FnOnce(&mut Ui)) {\n\n menu_impl(ui, title, Box::new(add_contents))\n\n}\n\n\n", "file_path": "egui/src/menu.rs", "rank": 27, "score": 263800.40303584654 }, { "content": "/// Web sends all keys as strings, so it is up to us to figure out if it is\n\n/// a real text input or the name of a key.\n\nfn should_ignore_key(key: &str) -> bool {\n\n let is_function_key = key.starts_with('F') && key.len() > 1;\n\n is_function_key\n\n || matches!(\n\n key,\n\n \"Alt\"\n\n | \"ArrowDown\"\n\n | \"ArrowLeft\"\n\n | \"ArrowRight\"\n\n | \"ArrowUp\"\n\n | \"Backspace\"\n\n | \"CapsLock\"\n\n | \"ContextMenu\"\n\n | \"Control\"\n\n | \"Delete\"\n\n | \"End\"\n\n | \"Enter\"\n\n | \"Esc\"\n\n | \"Escape\"\n\n | \"Help\"\n", "file_path": "egui_web/src/lib.rs", "rank": 28, "score": 262829.1202214684 }, { "content": "/// Combined into one function (rather than two) to make it easier\n\n/// for the borrow checker.\n\ntype GetSetValue<'a> = Box<dyn 'a + FnMut(Option<f64>) -> f64>;\n\n\n", "file_path": "egui/src/widgets/drag_value.rs", "rank": 29, "score": 261563.94890106376 }, { "content": "fn canvas_origin(canvas_id: &str) -> egui::Pos2 {\n\n let rect = canvas_element(canvas_id)\n\n .unwrap()\n\n .get_bounding_client_rect();\n\n egui::Pos2::new(rect.left() as f32, rect.top() as f32)\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 30, "score": 259869.32287913992 }, { "content": "fn next_word_boundary_char_index(it: impl Iterator<Item = char>, mut index: usize) -> usize {\n\n let mut it = it.skip(index);\n\n if let Some(_first) = it.next() {\n\n index += 1;\n\n\n\n if let Some(second) = it.next() {\n\n index += 1;\n\n for next in it {\n\n if is_word_char(next) != is_word_char(second) {\n\n break;\n\n }\n\n index += 1;\n\n }\n\n }\n\n }\n\n index\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 31, "score": 259672.60738944708 }, { "content": "fn show_menu_bar(ui: &mut Ui) {\n\n trace!(ui);\n\n use egui::*;\n\n\n\n menu::bar(ui, |ui| {\n\n menu::menu(ui, \"File\", |ui| {\n\n if ui.button(\"Organize windows\").clicked() {\n\n ui.ctx().memory().reset_areas();\n\n }\n\n if ui\n\n .button(\"Clear egui memory\")\n\n .on_hover_text(\"Forget scroll, positions, sizes etc\")\n\n .clicked()\n\n {\n\n *ui.ctx().memory() = Default::default();\n\n }\n\n });\n\n });\n\n}\n", "file_path": "egui_demo_lib/src/apps/demo/demo_app_windows.rs", "rank": 32, "score": 258403.17191499908 }, { "content": "/// Password entry field with ability to toggle character hiding.\n\n///\n\n/// ## Example:\n\n/// ``` ignore\n\n/// password_ui(ui, &mut password);\n\n/// ```\n\npub fn password_ui(ui: &mut egui::Ui, text: &mut String) -> egui::Response {\n\n // This widget has its own state — show or hide password characters.\n\n\n\n // 1. Declare state struct\n\n // This struct represents the state of this widget.\n\n // It must implement at least `Clone` and be `'static`.\n\n // If you use the `persistence` feature, it also must implement `serde::{Deserialize, Serialize}`.\n\n // You should prefer creating custom newtype structs or enums like this, to avoid `TypeId`\n\n // intersection errors, especially when you use `Memory::data` without `Id`.\n\n #[derive(Clone, Copy, Default)]\n\n struct State(bool);\n\n\n\n // 2. Create id\n\n let id = ui.id().with(\"show_password\");\n\n\n\n // 3. Get state for this widget\n\n // You can read more about available `Memory` functions in the documentation of `egui::Memory`\n\n // struct and `egui::any` module.\n\n // You should get state by value, not by reference to avoid borrowing of `Memory`.\n\n let mut plaintext = *ui.memory().id_data_temp.get_or_default::<State>(id);\n", "file_path": "egui_demo_lib/src/apps/demo/password.rs", "rank": 33, "score": 252446.23075380962 }, { "content": "fn vertex_gradient(ui: &mut Ui, bg_fill: Color32, gradient: &Gradient) -> Response {\n\n use egui::epaint::*;\n\n let (rect, response) = ui.allocate_at_least(GRADIENT_SIZE, Sense::hover());\n\n if bg_fill != Default::default() {\n\n let mut mesh = Mesh::default();\n\n mesh.add_colored_rect(rect, bg_fill);\n\n ui.painter().add(Shape::mesh(mesh));\n\n }\n\n {\n\n let n = gradient.0.len();\n\n assert!(n >= 2);\n\n let mut mesh = Mesh::default();\n\n for (i, &color) in gradient.0.iter().enumerate() {\n\n let t = i as f32 / (n as f32 - 1.0);\n\n let x = lerp(rect.x_range(), t);\n\n mesh.colored_vertex(pos2(x, rect.top()), color);\n\n mesh.colored_vertex(pos2(x, rect.bottom()), color);\n\n if i < n - 1 {\n\n let i = i as u32;\n\n mesh.add_triangle(2 * i, 2 * i + 1, 2 * i + 2);\n\n mesh.add_triangle(2 * i + 1, 2 * i + 2, 2 * i + 3);\n\n }\n\n }\n\n ui.painter().add(Shape::mesh(mesh));\n\n }\n\n response\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/color_test.rs", "rank": 34, "score": 248823.12617685698 }, { "content": "fn huge_content_lines(ui: &mut egui::Ui) {\n\n ui.label(\n\n \"A lot of rows, but only the visible ones are layed out, so performance is still good:\",\n\n );\n\n ui.add_space(4.0);\n\n\n\n let text_style = TextStyle::Body;\n\n let row_height = ui.fonts()[text_style].row_height();\n\n let num_rows = 10_000;\n\n ScrollArea::auto_sized().show_rows(ui, row_height, num_rows, |ui, row_range| {\n\n for row in row_range {\n\n let text = format!(\"This is row {}/{}\", row + 1, num_rows);\n\n ui.label(text);\n\n }\n\n });\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/scrolling.rs", "rank": 35, "score": 248713.33700807445 }, { "content": "/// Show a button to switch to/from dark/light mode (globally).\n\nfn dark_light_mode_switch(ui: &mut egui::Ui) {\n\n let style: egui::Style = (*ui.ctx().style()).clone();\n\n let new_visuals = style.visuals.light_dark_small_toggle_button(ui);\n\n if let Some(visuals) = new_visuals {\n\n ui.ctx().set_visuals(visuals);\n\n }\n\n}\n", "file_path": "egui_demo_lib/src/wrap_app.rs", "rank": 36, "score": 248713.33700807445 }, { "content": "fn contrast_color(color: impl Into<Rgba>) -> Color32 {\n\n if color.into().intensity() < 0.5 {\n\n Color32::WHITE\n\n } else {\n\n Color32::BLACK\n\n }\n\n}\n\n\n\n/// Number of vertices per dimension in the color sliders.\n\n/// We need at least 6 for hues, and more for smooth 2D areas.\n\n/// Should always be a multiple of 6 to hit the peak hues in HSV/HSL (every 60°).\n\nconst N: u32 = 6 * 6;\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 37, "score": 246311.5714569121 }, { "content": "fn ui_url(ui: &mut egui::Ui, frame: &mut epi::Frame<'_>, url: &mut String) -> Option<String> {\n\n let mut trigger_fetch = false;\n\n\n\n ui.horizontal(|ui| {\n\n ui.label(\"URL:\");\n\n trigger_fetch |= ui.text_edit_singleline(url).lost_focus();\n\n trigger_fetch |= ui.button(\"GET\").clicked();\n\n });\n\n\n\n if frame.is_web() {\n\n ui.label(\"HINT: paste the url of this page into the field above!\");\n\n }\n\n\n\n ui.horizontal(|ui| {\n\n if ui.button(\"Source code for this example\").clicked() {\n\n *url = format!(\n\n \"https://raw.githubusercontent.com/emilk/egui/master/{}\",\n\n file!()\n\n );\n\n trigger_fetch = true;\n", "file_path": "egui_demo_lib/src/apps/http_app.rs", "rank": 38, "score": 245982.62084855838 }, { "content": "/// Show some text at the current pointer position (if any).\n\n///\n\n/// Most of the time it is easier to use [`Response::on_hover_text`].\n\n///\n\n/// See also [`show_tooltip`].\n\n///\n\n/// ```\n\n/// # let mut ui = egui::Ui::__test();\n\n/// if ui.ui_contains_pointer() {\n\n/// egui::show_tooltip_text(ui.ctx(), egui::Id::new(\"my_tooltip\"), \"Helpful text\");\n\n/// }\n\n/// ```\n\npub fn show_tooltip_text(ctx: &CtxRef, id: Id, text: impl ToString) {\n\n show_tooltip(ctx, id, |ui| {\n\n ui.add(crate::widgets::Label::new(text));\n\n })\n\n}\n\n\n", "file_path": "egui/src/containers/popup.rs", "rank": 39, "score": 242624.22818555374 }, { "content": "pub fn show_color(ui: &mut Ui, color: impl Into<Hsva>, desired_size: Vec2) -> Response {\n\n show_hsva(ui, color.into(), desired_size)\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 40, "score": 241135.79364103195 }, { "content": "fn get(get_set_value: &mut GetSetValue<'_>) -> f64 {\n\n (get_set_value)(None)\n\n}\n\n\n", "file_path": "egui/src/widgets/slider.rs", "rank": 41, "score": 238743.48356824665 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\n#[wasm_bindgen]\n\npub fn start(canvas_id: &str) -> Result<(), wasm_bindgen::JsValue> {\n\n let app = egui_demo_lib::WrapApp::default();\n\n eframe::start_web(canvas_id, Box::new(app))\n\n}\n", "file_path": "egui_demo_app/src/lib.rs", "rank": 42, "score": 237806.8867111604 }, { "content": "pub fn handle_output(output: &egui::Output, runner: &mut AppRunner) {\n\n let egui::Output {\n\n cursor_icon,\n\n open_url,\n\n copied_text,\n\n needs_repaint: _, // handled elsewhere\n\n events: _, // we ignore these (TODO: accessibility screen reader)\n\n text_cursor_pos,\n\n } = output;\n\n\n\n set_cursor_icon(*cursor_icon);\n\n if let Some(open) = open_url {\n\n crate::open_url(&open.url, open.new_tab);\n\n }\n\n\n\n #[cfg(web_sys_unstable_apis)]\n\n if !copied_text.is_empty() {\n\n set_clipboard_text(copied_text);\n\n }\n\n\n\n #[cfg(not(web_sys_unstable_apis))]\n\n let _ = copied_text;\n\n\n\n if &runner.last_text_cursor_pos != text_cursor_pos {\n\n move_text_cursor(text_cursor_pos, runner.canvas_id());\n\n runner.last_text_cursor_pos = *text_cursor_pos;\n\n }\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 43, "score": 236560.6685883052 }, { "content": "/// return true on change\n\nfn color_picker_hsva_2d(ui: &mut Ui, hsva: &mut Hsva, alpha: Alpha) -> bool {\n\n let mut hsvag = HsvaGamma::from(*hsva);\n\n color_picker_hsvag_2d(ui, &mut hsvag, alpha);\n\n let new_hasva = Hsva::from(hsvag);\n\n if *hsva == new_hasva {\n\n false\n\n } else {\n\n *hsva = new_hasva;\n\n true\n\n }\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 44, "score": 235781.53758690617 }, { "content": "fn get(get_set_value: &mut GetSetValue<'_>) -> f64 {\n\n (get_set_value)(None)\n\n}\n\n\n", "file_path": "egui/src/widgets/drag_value.rs", "rank": 45, "score": 235578.55734017253 }, { "content": "#[cfg(feature = \"ron\")]\n\npub fn set_value<T: serde::Serialize>(storage: &mut dyn Storage, key: &str, value: &T) {\n\n storage.set_string(\n\n key,\n\n ron::ser::to_string_pretty(value, Default::default()).unwrap(),\n\n );\n\n}\n\n\n\n/// [`Storage`] key used for app\n\npub const APP_KEY: &str = \"app\";\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\n#[cfg(feature = \"http\")]\n\n/// `epi` supports simple HTTP requests with [`Frame::http_fetch`].\n\npub mod http {\n\n /// A simple http requests.\n\n pub struct Request {\n\n /// \"GET\", …\n\n pub method: String,\n\n /// https://…\n", "file_path": "epi/src/lib.rs", "rank": 46, "score": 235194.6611436407 }, { "content": "pub fn open_url(url: &str, new_tab: bool) -> Option<()> {\n\n let name = if new_tab { \"_blank\" } else { \"_self\" };\n\n\n\n web_sys::window()?\n\n .open_with_url_and_target(url, name)\n\n .ok()?;\n\n Some(())\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 47, "score": 235174.48837923416 }, { "content": "// Move text agent to text cursor's position, on desktop/laptop,\n\n// candidate window moves following text element (agent),\n\n// so it appears that the IME candidate window moves with text cursor.\n\n// On mobile devices, there is no need to do that.\n\nfn move_text_cursor(cursor: &Option<egui::Pos2>, canvas_id: &str) -> Option<()> {\n\n let style = text_agent().style();\n\n // Note: movint agent on mobile devices will lead to unpredictable scroll.\n\n if is_mobile() == Some(false) {\n\n cursor.as_ref().and_then(|&egui::Pos2 { x, y }| {\n\n let canvas = canvas_element(canvas_id)?;\n\n let y = y + (canvas.scroll_top() + canvas.offset_top()) as f32;\n\n let x = x + (canvas.scroll_left() + canvas.offset_left()) as f32;\n\n // Canvas is translated 50% horizontally in html.\n\n let x = x - canvas.offset_width() as f32 / 2.0;\n\n style.set_property(\"position\", \"absolute\").ok()?;\n\n style.set_property(\"top\", &(y.to_string() + \"px\")).ok()?;\n\n style.set_property(\"left\", &(x.to_string() + \"px\")).ok()\n\n })\n\n } else {\n\n style.set_property(\"position\", \"absolute\").ok()?;\n\n style.set_property(\"top\", \"0px\").ok()?;\n\n style.set_property(\"left\", \"0px\").ok()\n\n }\n\n}\n", "file_path": "egui_web/src/lib.rs", "rank": 48, "score": 235093.24110361806 }, { "content": "fn manipulate_agent(canvas_id: &str, latest_cursor: Option<egui::Pos2>) -> Option<()> {\n\n use wasm_bindgen::JsCast;\n\n use web_sys::HtmlInputElement;\n\n let window = web_sys::window()?;\n\n let document = window.document()?;\n\n let input: HtmlInputElement = document.get_element_by_id(AGENT_ID)?.dyn_into().unwrap();\n\n let cutsor_txt = document.body()?.style().get_property_value(\"cursor\").ok()?;\n\n let style = canvas_element(canvas_id)?.style();\n\n if cutsor_txt == cursor_web_name(egui::CursorIcon::Text) {\n\n input.set_hidden(false);\n\n input.focus().ok()?;\n\n // Panning canvas so that text edit is shown at 30%\n\n // Only on touch screens, when keyboard popups\n\n if let Some(p) = latest_cursor {\n\n let inner_height = window.inner_height().ok()?.as_f64()? as f32;\n\n let current_rel = p.y / inner_height;\n\n\n\n if current_rel > 0.5 {\n\n // probably below the keyboard\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 49, "score": 235087.45123272602 }, { "content": "fn color_button(ui: &mut Ui, color: Color32) -> Response {\n\n let size = ui.spacing().interact_size;\n\n let (rect, response) = ui.allocate_exact_size(size, Sense::click());\n\n response.widget_info(|| WidgetInfo::new(WidgetType::ColorButton));\n\n let visuals = ui.style().interact(&response);\n\n let rect = rect.expand(visuals.expansion);\n\n\n\n background_checkers(ui.painter(), rect);\n\n\n\n let left_half = Rect::from_min_max(rect.left_top(), rect.center_bottom());\n\n let right_half = Rect::from_min_max(rect.center_top(), rect.right_bottom());\n\n ui.painter().rect_filled(left_half, 0.0, color);\n\n ui.painter().rect_filled(right_half, 0.0, color.to_opaque());\n\n\n\n let corner_radius = visuals.corner_radius.at_most(2.0);\n\n ui.painter()\n\n .rect_stroke(rect, corner_radius, (2.0, visuals.bg_fill)); // fill is intentional!\n\n\n\n response\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 50, "score": 234837.2240239883 }, { "content": "fn is_integer(f: f64) -> bool {\n\n f.round() == f\n\n}\n\n\n", "file_path": "emath/src/smart_aim.rs", "rank": 51, "score": 234703.56424220616 }, { "content": "/// The menu bar goes well in a [`TopBottomPanel::top`],\n\n/// but can also be placed in a `Window`.\n\n/// In the latter case you may want to wrap it in `Frame`.\n\npub fn bar<R>(ui: &mut Ui, add_contents: impl FnOnce(&mut Ui) -> R) -> InnerResponse<R> {\n\n ui.horizontal(|ui| {\n\n let mut style = (**ui.style()).clone();\n\n style.spacing.button_padding = vec2(2.0, 0.0);\n\n // style.visuals.widgets.active.bg_fill = Color32::TRANSPARENT;\n\n style.visuals.widgets.active.bg_stroke = Stroke::none();\n\n // style.visuals.widgets.hovered.bg_fill = Color32::TRANSPARENT;\n\n style.visuals.widgets.hovered.bg_stroke = Stroke::none();\n\n style.visuals.widgets.inactive.bg_fill = Color32::TRANSPARENT;\n\n style.visuals.widgets.inactive.bg_stroke = Stroke::none();\n\n ui.set_style(style);\n\n\n\n // Take full width and fixed height:\n\n let height = ui.spacing().interact_size.y;\n\n ui.set_min_size(vec2(ui.available_width(), height));\n\n\n\n add_contents(ui)\n\n })\n\n}\n\n\n", "file_path": "egui/src/menu.rs", "rank": 52, "score": 233170.95379217158 }, { "content": "fn set(get_set_value: &mut GetSetValue<'_>, value: f64) {\n\n (get_set_value)(Some(value));\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "egui/src/widgets/slider.rs", "rank": 53, "score": 231534.7866664477 }, { "content": "pub fn stroke_ui(ui: &mut crate::Ui, stroke: &mut epaint::Stroke, text: &str) {\n\n let epaint::Stroke { width, color } = stroke;\n\n ui.horizontal(|ui| {\n\n ui.add(DragValue::new(width).speed(0.1).clamp_range(0.0..=5.0))\n\n .on_hover_text(\"Width\");\n\n ui.color_edit_button_srgba(color);\n\n ui.label(text);\n\n\n\n // stroke preview:\n\n let (_id, stroke_rect) = ui.allocate_space(ui.spacing().interact_size);\n\n let left = stroke_rect.left_center();\n\n let right = stroke_rect.right_center();\n\n ui.painter().line_segment([left, right], (*width, *color));\n\n });\n\n}\n\n\n\npub(crate) fn shadow_ui(ui: &mut Ui, shadow: &mut epaint::Shadow, text: &str) {\n\n let epaint::Shadow { extrusion, color } = shadow;\n\n ui.horizontal(|ui| {\n\n ui.label(text);\n\n ui.add(\n\n DragValue::new(extrusion)\n\n .speed(1.0)\n\n .clamp_range(0.0..=100.0),\n\n )\n\n .on_hover_text(\"Extrusion\");\n\n ui.color_edit_button_srgba(color);\n\n });\n\n}\n", "file_path": "egui/src/widgets/mod.rs", "rank": 54, "score": 230052.67091009644 }, { "content": "fn set(get_set_value: &mut GetSetValue<'_>, value: f64) {\n\n (get_set_value)(Some(value));\n\n}\n\n\n\n/// A numeric value that you can change by dragging the number. More compact than a [`Slider`].\n\n///\n\n/// ```\n\n/// # let ui = &mut egui::Ui::__test();\n\n/// # let mut my_f32: f32 = 0.0;\n\n/// ui.add(egui::DragValue::new(&mut my_f32).speed(0.1));\n\n/// ```\n\n#[must_use = \"You should put this widget in an ui with `ui.add(widget);`\"]\n\npub struct DragValue<'a> {\n\n get_set_value: GetSetValue<'a>,\n\n speed: f64,\n\n prefix: String,\n\n suffix: String,\n\n clamp_range: RangeInclusive<f64>,\n\n min_decimals: usize,\n\n max_decimals: Option<usize>,\n", "file_path": "egui/src/widgets/drag_value.rs", "rank": 55, "score": 228498.36838967144 }, { "content": "/// Shows a button with the given color.\n\n/// If the user clicks the button, a full color picker is shown.\n\npub fn color_edit_button_srgba(ui: &mut Ui, srgba: &mut Color32, alpha: Alpha) -> Response {\n\n // To ensure we keep hue slider when `srgba` is gray we store the\n\n // full `Hsva` in a cache:\n\n\n\n let mut hsva = ui\n\n .ctx()\n\n .memory()\n\n .data_temp\n\n .get_or_default::<Cache<Color32, Hsva>>()\n\n .get(srgba)\n\n .cloned()\n\n .unwrap_or_else(|| Hsva::from(*srgba));\n\n\n\n let response = color_edit_button_hsva(ui, &mut hsva, alpha);\n\n\n\n *srgba = Color32::from(hsva);\n\n\n\n ui.ctx()\n\n .memory()\n\n .data_temp\n\n .get_mut_or_default::<Cache<Color32, Hsva>>()\n\n .set(*srgba, hsva);\n\n\n\n response\n\n}\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 56, "score": 227843.23924181954 }, { "content": "#[derive(Clone, Hash, PartialEq, Eq)]\n\nstruct Gradient(pub Vec<Color32>);\n\n\n\nimpl Gradient {\n\n pub fn one_color(srgba: Color32) -> Self {\n\n Self(vec![srgba, srgba])\n\n }\n\n pub fn texture_gradient(left: Color32, right: Color32) -> Self {\n\n Self(vec![left, right])\n\n }\n\n pub fn ground_truth_linear_gradient(left: Color32, right: Color32) -> Self {\n\n let left = Rgba::from(left);\n\n let right = Rgba::from(right);\n\n\n\n let n = 255;\n\n Self(\n\n (0..=n)\n\n .map(|i| {\n\n let t = i as f32 / n as f32;\n\n Color32::from(lerp(left..=right, t))\n\n })\n", "file_path": "egui_demo_lib/src/apps/color_test.rs", "rank": 57, "score": 226220.88101730158 }, { "content": "fn insert_text<S: TextBuffer>(ccursor: &mut CCursor, text: &mut S, text_to_insert: &str) {\n\n ccursor.index += text.insert_text(text_to_insert, ccursor.index);\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 58, "score": 225067.48676136092 }, { "content": "pub fn pos_from_mouse_event(canvas_id: &str, event: &web_sys::MouseEvent) -> egui::Pos2 {\n\n let canvas = canvas_element(canvas_id).unwrap();\n\n let rect = canvas.get_bounding_client_rect();\n\n egui::Pos2 {\n\n x: event.client_x() as f32 - rect.left() as f32,\n\n y: event.client_y() as f32 - rect.top() as f32,\n\n }\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 59, "score": 223474.80200238156 }, { "content": "fn bullet_point(ui: &mut Ui, width: f32) -> Response {\n\n let row_height = ui.fonts()[TextStyle::Body].row_height();\n\n let (rect, response) = ui.allocate_exact_size(vec2(width, row_height), Sense::hover());\n\n ui.painter().circle_filled(\n\n rect.center(),\n\n rect.height() / 8.0,\n\n ui.visuals().strong_text_color(),\n\n );\n\n response\n\n}\n\n\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 60, "score": 223049.21831997443 }, { "content": "/// Parse and display a VERY simple and small subset of Markdown.\n\npub fn easy_mark(ui: &mut Ui, easy_mark: &str) {\n\n easy_mark_it(ui, easy_mark::Parser::new(easy_mark))\n\n}\n\n\n", "file_path": "egui_demo_lib/src/easy_mark/easy_mark_viewer.rs", "rank": 61, "score": 219850.02314647942 }, { "content": "fn pointer_pressed_on_area(ctx: &Context, layer_id: LayerId) -> bool {\n\n if let Some(pointer_pos) = ctx.input().pointer.interact_pos() {\n\n ctx.input().pointer.any_pressed() && ctx.layer_id_at(pointer_pos) == Some(layer_id)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "egui/src/containers/area.rs", "rank": 62, "score": 218672.10704781325 }, { "content": "fn byte_index_from_char_index(s: &str, char_index: usize) -> usize {\n\n for (ci, (bi, _)) in s.char_indices().enumerate() {\n\n if ci == char_index {\n\n return bi;\n\n }\n\n }\n\n s.len()\n\n}\n\n\n", "file_path": "egui/src/widgets/text_edit.rs", "rank": 63, "score": 218439.40681386055 }, { "content": "/// Alternative to `FileStorage`\n\npub fn read_memory(ctx: &egui::Context, memory_file_path: impl AsRef<std::path::Path>) {\n\n let memory: Option<egui::Memory> = read_ron(memory_file_path);\n\n if let Some(memory) = memory {\n\n *ctx.memory() = memory;\n\n }\n\n}\n\n\n", "file_path": "egui_glium/src/persistence.rs", "rank": 64, "score": 217754.79442753355 }, { "content": "fn pixel_test(ui: &mut Ui) {\n\n ui.label(\"Each subsequent square should be one physical pixel larger than the previous. They should be exactly one physical pixel apart. They should be perfectly aligned to the pixel grid.\");\n\n\n\n let pixels_per_point = ui.ctx().pixels_per_point();\n\n let num_squares: u32 = 8;\n\n let size_pixels = Vec2::new(\n\n ((num_squares + 1) * (num_squares + 2) / 2) as f32,\n\n num_squares as f32,\n\n );\n\n let size_points = size_pixels / pixels_per_point + Vec2::splat(2.0);\n\n let (response, painter) = ui.allocate_painter(size_points, Sense::hover());\n\n\n\n let mut cursor_pixel = Pos2::new(\n\n response.rect.min.x * pixels_per_point,\n\n response.rect.min.y * pixels_per_point,\n\n )\n\n .ceil();\n\n for size in 1..=num_squares {\n\n let rect_points = Rect::from_min_size(\n\n Pos2::new(\n\n cursor_pixel.x / pixels_per_point,\n\n cursor_pixel.y / pixels_per_point,\n\n ),\n\n Vec2::splat(size as f32) / pixels_per_point,\n\n );\n\n painter.rect_filled(rect_points, 0.0, egui::Color32::WHITE);\n\n cursor_pixel.x += (1 + size) as f32;\n\n }\n\n}\n", "file_path": "egui_demo_lib/src/apps/color_test.rs", "rank": 65, "score": 216640.03894308745 }, { "content": "fn push_touches(runner: &mut AppRunner, phase: egui::TouchPhase, event: &web_sys::TouchEvent) {\n\n let canvas_origin = canvas_origin(runner.canvas_id());\n\n for touch_idx in 0..event.changed_touches().length() {\n\n if let Some(touch) = event.changed_touches().item(touch_idx) {\n\n runner.input.raw.events.push(egui::Event::Touch {\n\n device_id: egui::TouchDeviceId(0),\n\n id: egui::TouchId::from(touch.identifier()),\n\n phase,\n\n pos: pos_from_touch(canvas_origin, &touch),\n\n force: touch.force(),\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 66, "score": 216477.30058051075 }, { "content": "#[cfg_attr(feature = \"persistence\", derive(serde::Deserialize, serde::Serialize))]\n\n#[cfg_attr(feature = \"persistence\", serde(default))]\n\nstruct Demos {\n\n #[cfg_attr(feature = \"persistence\", serde(skip))]\n\n demos: Vec<Box<dyn Demo>>,\n\n\n\n open: BTreeSet<String>,\n\n}\n\n\n\nimpl Default for Demos {\n\n fn default() -> Self {\n\n Self::from_demos(vec![\n\n Box::new(super::dancing_strings::DancingStrings::default()),\n\n Box::new(super::drag_and_drop::DragAndDropDemo::default()),\n\n Box::new(super::font_book::FontBook::default()),\n\n Box::new(super::MiscDemoWindow::default()),\n\n Box::new(super::multi_touch::MultiTouch::default()),\n\n Box::new(super::painting::Painting::default()),\n\n Box::new(super::plot_demo::PlotDemo::default()),\n\n Box::new(super::scrolling::Scrolling::default()),\n\n Box::new(super::sliders::Sliders::default()),\n\n Box::new(super::widget_gallery::WidgetGallery::default()),\n", "file_path": "egui_demo_lib/src/apps/demo/demo_app_windows.rs", "rank": 67, "score": 214181.04234089382 }, { "content": "#[cfg_attr(feature = \"persistence\", derive(serde::Deserialize, serde::Serialize))]\n\n#[cfg_attr(feature = \"persistence\", serde(default))]\n\nstruct Tests {\n\n #[cfg_attr(feature = \"persistence\", serde(skip))]\n\n demos: Vec<Box<dyn Demo>>,\n\n\n\n open: BTreeSet<String>,\n\n}\n\n\n\nimpl Default for Tests {\n\n fn default() -> Self {\n\n Self::from_demos(vec![\n\n Box::new(super::tests::CursorTest::default()),\n\n Box::new(super::tests::IdTest::default()),\n\n Box::new(super::tests::InputTest::default()),\n\n Box::new(super::layout_test::LayoutTest::default()),\n\n Box::new(super::tests::ManualLayoutTest::default()),\n\n Box::new(super::tests::TableTest::default()),\n\n ])\n\n }\n\n}\n\n\n", "file_path": "egui_demo_lib/src/apps/demo/demo_app_windows.rs", "rank": 68, "score": 214181.04234089382 }, { "content": "fn demo_ui(ui: &mut Ui) {\n\n ui.monospace(\"Example widgets:\");\n\n for _ in 0..3 {\n\n ui.label(\"label\");\n\n }\n\n for _ in 0..3 {\n\n let mut dummy = false;\n\n ui.checkbox(&mut dummy, \"checkbox\");\n\n }\n\n for _ in 0..3 {\n\n let _ = ui.button(\"button\");\n\n }\n\n}\n", "file_path": "egui_demo_lib/src/apps/demo/layout_test.rs", "rank": 69, "score": 213362.08541572472 }, { "content": "pub fn label(ui: &mut Ui, alloc_info: &epaint::stats::AllocInfo, what: &str) -> Response {\n\n ui.add(Label::new(alloc_info.format(what)).wrap(false))\n\n}\n\n\n\nimpl Widget for &mut epaint::TessellationOptions {\n\n fn ui(self, ui: &mut Ui) -> Response {\n\n ui.vertical(|ui| {\n\n let epaint::TessellationOptions {\n\n pixels_per_point: _,\n\n aa_size: _,\n\n anti_alias,\n\n coarse_tessellation_culling,\n\n debug_paint_clip_rects,\n\n debug_paint_text_rects,\n\n debug_ignore_clip_rects,\n\n } = self;\n\n ui.checkbox(anti_alias, \"Antialias\");\n\n ui.checkbox(\n\n coarse_tessellation_culling,\n\n \"Do coarse culling in the tessellator\",\n", "file_path": "egui/src/introspection.rs", "rank": 70, "score": 209782.01921158266 }, { "content": "fn close_button(ui: &mut Ui, rect: Rect) -> Response {\n\n let close_id = ui.auto_id_with(\"window_close_button\");\n\n let response = ui.interact(rect, close_id, Sense::click());\n\n ui.expand_to_include_rect(response.rect);\n\n\n\n let visuals = ui.style().interact(&response);\n\n let rect = rect.shrink(2.0).expand(visuals.expansion);\n\n let stroke = visuals.fg_stroke;\n\n ui.painter()\n\n .line_segment([rect.left_top(), rect.right_bottom()], stroke);\n\n ui.painter()\n\n .line_segment([rect.right_top(), rect.left_bottom()], stroke);\n\n response\n\n}\n", "file_path": "egui/src/containers/window.rs", "rank": 71, "score": 208876.68056340236 }, { "content": "fn cursor_web_name(cursor: egui::CursorIcon) -> &'static str {\n\n match cursor {\n\n egui::CursorIcon::Alias => \"alias\",\n\n egui::CursorIcon::AllScroll => \"all-scroll\",\n\n egui::CursorIcon::Cell => \"cell\",\n\n egui::CursorIcon::ContextMenu => \"context-menu\",\n\n egui::CursorIcon::Copy => \"copy\",\n\n egui::CursorIcon::Crosshair => \"crosshair\",\n\n egui::CursorIcon::Default => \"default\",\n\n egui::CursorIcon::Grab => \"grab\",\n\n egui::CursorIcon::Grabbing => \"grabbing\",\n\n egui::CursorIcon::Help => \"help\",\n\n egui::CursorIcon::Move => \"move\",\n\n egui::CursorIcon::NoDrop => \"no-drop\",\n\n egui::CursorIcon::None => \"none\",\n\n egui::CursorIcon::NotAllowed => \"not-allowed\",\n\n egui::CursorIcon::PointingHand => \"pointer\",\n\n egui::CursorIcon::Progress => \"progress\",\n\n egui::CursorIcon::ResizeHorizontal => \"ew-resize\",\n\n egui::CursorIcon::ResizeNeSw => \"nesw-resize\",\n\n egui::CursorIcon::ResizeNwSe => \"nwse-resize\",\n\n egui::CursorIcon::ResizeVertical => \"ns-resize\",\n\n egui::CursorIcon::Text => \"text\",\n\n egui::CursorIcon::VerticalText => \"vertical-text\",\n\n egui::CursorIcon::Wait => \"wait\",\n\n egui::CursorIcon::ZoomIn => \"zoom-in\",\n\n egui::CursorIcon::ZoomOut => \"zoom-out\",\n\n }\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 72, "score": 208341.0772207473 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\npub fn run_native(app: Box<dyn epi::App>, native_options: epi::NativeOptions) -> ! {\n\n egui_glium::run(app, native_options)\n\n}\n", "file_path": "eframe/src/lib.rs", "rank": 74, "score": 208286.40714627694 }, { "content": "#[test]\n\nfn test_egui_zero_window_size() {\n\n let mut demo_windows = crate::DemoWindows::default();\n\n let mut ctx = egui::CtxRef::default();\n\n let raw_input = egui::RawInput {\n\n screen_rect: Some(egui::Rect::from_min_max(egui::Pos2::ZERO, egui::Pos2::ZERO)),\n\n ..Default::default()\n\n };\n\n\n\n const NUM_FRAMES: usize = 5;\n\n for _ in 0..NUM_FRAMES {\n\n ctx.begin_frame(raw_input.clone());\n\n demo_windows.ui(&ctx);\n\n let (_output, shapes) = ctx.end_frame();\n\n let clipped_meshes = ctx.tessellate(shapes);\n\n assert!(clipped_meshes.is_empty(), \"There should be nothing to show\");\n\n }\n\n}\n", "file_path": "egui_demo_lib/src/lib.rs", "rank": 75, "score": 206952.0516445308 }, { "content": "/// Web sends all all keys as strings, so it is up to us to figure out if it is\n\n/// a real text input or the name of a key.\n\npub fn translate_key(key: &str) -> Option<egui::Key> {\n\n match key {\n\n \"ArrowDown\" => Some(egui::Key::ArrowDown),\n\n \"ArrowLeft\" => Some(egui::Key::ArrowLeft),\n\n \"ArrowRight\" => Some(egui::Key::ArrowRight),\n\n \"ArrowUp\" => Some(egui::Key::ArrowUp),\n\n\n\n \"Esc\" | \"Escape\" => Some(egui::Key::Escape),\n\n \"Tab\" => Some(egui::Key::Tab),\n\n \"Backspace\" => Some(egui::Key::Backspace),\n\n \"Enter\" => Some(egui::Key::Enter),\n\n \"Space\" | \" \" => Some(egui::Key::Space),\n\n\n\n \"Help\" | \"Insert\" => Some(egui::Key::Insert),\n\n \"Delete\" => Some(egui::Key::Delete),\n\n \"Home\" => Some(egui::Key::Home),\n\n \"End\" => Some(egui::Key::End),\n\n \"PageUp\" => Some(egui::Key::PageUp),\n\n \"PageDown\" => Some(egui::Key::PageDown),\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 76, "score": 206617.37982126974 }, { "content": "/// where to put the zero cutoff for logarithmic sliders\n\n/// that crosses zero ?\n\nfn logaritmic_zero_cutoff(min: f64, max: f64) -> f64 {\n\n assert!(min < 0.0 && 0.0 < max);\n\n\n\n let min_magnitude = if min == -INFINITY {\n\n INF_RANGE_MAGNITUDE\n\n } else {\n\n min.abs().log10().abs()\n\n };\n\n let max_magnitude = if max == INFINITY {\n\n INF_RANGE_MAGNITUDE\n\n } else {\n\n max.log10().abs()\n\n };\n\n\n\n let cutoff = min_magnitude / (min_magnitude + max_magnitude);\n\n crate::egui_assert!(0.0 <= cutoff && cutoff <= 1.0);\n\n cutoff\n\n}\n", "file_path": "egui/src/widgets/slider.rs", "rank": 78, "score": 204516.80205442465 }, { "content": "fn is_paste_command(modifiers: egui::Modifiers, keycode: VirtualKeyCode) -> bool {\n\n (modifiers.command && keycode == VirtualKeyCode::V)\n\n || (cfg!(target_os = \"windows\") && modifiers.shift && keycode == VirtualKeyCode::Insert)\n\n}\n\n\n", "file_path": "egui_glium/src/lib.rs", "rank": 79, "score": 202331.41935148288 }, { "content": "fn is_cut_command(modifiers: egui::Modifiers, keycode: VirtualKeyCode) -> bool {\n\n (modifiers.command && keycode == VirtualKeyCode::X)\n\n || (cfg!(target_os = \"windows\") && modifiers.shift && keycode == VirtualKeyCode::Delete)\n\n}\n\n\n", "file_path": "egui_glium/src/lib.rs", "rank": 80, "score": 202331.41935148288 }, { "content": "fn is_copy_command(modifiers: egui::Modifiers, keycode: VirtualKeyCode) -> bool {\n\n (modifiers.command && keycode == VirtualKeyCode::C)\n\n || (cfg!(target_os = \"windows\") && modifiers.ctrl && keycode == VirtualKeyCode::Insert)\n\n}\n\n\n", "file_path": "egui_glium/src/lib.rs", "rank": 81, "score": 202331.41935148288 }, { "content": "fn megabytes(size: usize) -> String {\n\n format!(\"{:.2} MB\", size as f64 / 1e6)\n\n}\n", "file_path": "epaint/src/stats.rs", "rank": 82, "score": 201299.49303948836 }, { "content": "fn clamp_to_range(x: f64, range: RangeInclusive<f64>) -> f64 {\n\n x.clamp(\n\n range.start().min(*range.end()),\n\n range.start().max(*range.end()),\n\n )\n\n}\n", "file_path": "egui/src/widgets/drag_value.rs", "rank": 84, "score": 200041.65198707805 }, { "content": "/// Round a value to the given number of decimal places.\n\npub fn round_to_decimals(value: f64, decimal_places: usize) -> f64 {\n\n // This is a stupid way of doing this, but stupid works.\n\n format!(\"{:.*}\", decimal_places, value)\n\n .parse()\n\n .unwrap_or(value)\n\n}\n\n\n", "file_path": "emath/src/lib.rs", "rank": 85, "score": 199885.30282058133 }, { "content": "pub fn canvas_element_or_die(canvas_id: &str) -> web_sys::HtmlCanvasElement {\n\n crate::canvas_element(canvas_id)\n\n .unwrap_or_else(|| panic!(\"Failed to find canvas with id '{}'\", canvas_id))\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 86, "score": 199696.05116695928 }, { "content": "fn range_log10(min: f64, max: f64, spec: &SliderSpec) -> (f64, f64) {\n\n assert!(spec.logarithmic);\n\n assert!(min <= max);\n\n\n\n if min == 0.0 && max == INFINITY {\n\n (spec.smallest_positive.log10(), INF_RANGE_MAGNITUDE)\n\n } else if min == 0.0 {\n\n if spec.smallest_positive < max {\n\n (spec.smallest_positive.log10(), max.log10())\n\n } else {\n\n (max.log10() - INF_RANGE_MAGNITUDE, max.log10())\n\n }\n\n } else if max == INFINITY {\n\n if min < spec.largest_finite {\n\n (min.log10(), spec.largest_finite.log10())\n\n } else {\n\n (min.log10(), min.log10() + INF_RANGE_MAGNITUDE)\n\n }\n\n } else {\n\n (min.log10(), max.log10())\n\n }\n\n}\n\n\n", "file_path": "egui/src/widgets/slider.rs", "rank": 87, "score": 199024.07573185937 }, { "content": "fn background_checkers(painter: &Painter, rect: Rect) {\n\n let rect = rect.shrink(0.5); // Small hack to avoid the checkers from peeking through the sides\n\n if !rect.is_positive() {\n\n return;\n\n }\n\n\n\n let mut top_color = Color32::from_gray(128);\n\n let mut bottom_color = Color32::from_gray(32);\n\n let checker_size = Vec2::splat(rect.height() / 2.0);\n\n let n = (rect.width() / checker_size.x).round() as u32;\n\n\n\n let mut mesh = Mesh::default();\n\n for i in 0..n {\n\n let x = lerp(rect.left()..=rect.right(), i as f32 / (n as f32));\n\n mesh.add_colored_rect(\n\n Rect::from_min_size(pos2(x, rect.top()), checker_size),\n\n top_color,\n\n );\n\n mesh.add_colored_rect(\n\n Rect::from_min_size(pos2(x, rect.center().y), checker_size),\n\n bottom_color,\n\n );\n\n std::mem::swap(&mut top_color, &mut bottom_color);\n\n }\n\n painter.add(Shape::mesh(mesh));\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 88, "score": 198847.34378679795 }, { "content": "#[cfg_attr(feature = \"persistence\", derive(serde::Deserialize, serde::Serialize))]\n\n#[cfg_attr(feature = \"persistence\", serde(default))]\n\nstruct BoxPainting {\n\n size: Vec2,\n\n corner_radius: f32,\n\n stroke_width: f32,\n\n num_boxes: usize,\n\n}\n\n\n\nimpl Default for BoxPainting {\n\n fn default() -> Self {\n\n Self {\n\n size: vec2(64.0, 32.0),\n\n corner_radius: 5.0,\n\n stroke_width: 2.0,\n\n num_boxes: 1,\n\n }\n\n }\n\n}\n\n\n\nimpl BoxPainting {\n\n pub fn ui(&mut self, ui: &mut Ui) {\n", "file_path": "egui_demo_lib/src/apps/demo/misc_demo_window.rs", "rank": 89, "score": 198740.00906251118 }, { "content": "#[derive(PartialEq)]\n\n#[cfg_attr(feature = \"persistence\", derive(serde::Deserialize, serde::Serialize))]\n\n#[cfg_attr(feature = \"persistence\", serde(default))]\n\nstruct ColorWidgets {\n\n srgba_unmul: [u8; 4],\n\n srgba_premul: [u8; 4],\n\n rgba_unmul: [f32; 4],\n\n rgba_premul: [f32; 4],\n\n}\n\n\n\nimpl Default for ColorWidgets {\n\n fn default() -> Self {\n\n // Approximately the same color.\n\n ColorWidgets {\n\n srgba_unmul: [0, 255, 183, 127],\n\n srgba_premul: [0, 187, 140, 127],\n\n rgba_unmul: [0.0, 1.0, 0.5, 0.5],\n\n rgba_premul: [0.0, 0.5, 0.25, 0.5],\n\n }\n\n }\n\n}\n\n\n\nimpl ColorWidgets {\n", "file_path": "egui_demo_lib/src/apps/demo/misc_demo_window.rs", "rank": 90, "score": 198739.92119447875 }, { "content": "pub fn canvas_element(canvas_id: &str) -> Option<web_sys::HtmlCanvasElement> {\n\n use wasm_bindgen::JsCast;\n\n let document = web_sys::window()?.document()?;\n\n let canvas = document.get_element_by_id(canvas_id)?;\n\n canvas.dyn_into::<web_sys::HtmlCanvasElement>().ok()\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 91, "score": 197062.21253988205 }, { "content": "fn main() {\n\n\\tprintln!(\\\"Hello world!\\\");\n\n}\n\n\"\n\n .to_owned(),\n\n }\n\n }\n\n}\n\n\n\nimpl Widgets {\n\n pub fn ui(&mut self, ui: &mut Ui) {\n\n let Self {\n\n angle,\n\n password,\n\n lock_focus,\n\n code_snippet,\n\n } = self;\n\n ui.vertical_centered(|ui| {\n\n ui.add(crate::__egui_github_link_file_line!());\n\n });\n", "file_path": "egui_demo_lib/src/apps/demo/misc_demo_window.rs", "rank": 92, "score": 196168.01153632754 }, { "content": "pub fn read_ron<T>(ron_path: impl AsRef<Path>) -> Option<T>\n\nwhere\n\n T: serde::de::DeserializeOwned,\n\n{\n\n match std::fs::File::open(ron_path) {\n\n Ok(file) => {\n\n let reader = std::io::BufReader::new(file);\n\n match ron::de::from_reader(reader) {\n\n Ok(value) => Some(value),\n\n Err(err) => {\n\n eprintln!(\"ERROR: Failed to parse RON: {}\", err);\n\n None\n\n }\n\n }\n\n }\n\n Err(_err) => {\n\n // File probably doesn't exist. That's fine.\n\n None\n\n }\n\n }\n\n}\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "egui_glium/src/persistence.rs", "rank": 93, "score": 195626.4040136522 }, { "content": "/// If context is running under mobile device?\n\nfn is_mobile() -> Option<bool> {\n\n let user_agent = web_sys::window()?.navigator().user_agent().ok()?;\n\n let is_mobile = MOBILE_DEVICE.iter().any(|&name| user_agent.contains(name));\n\n Some(is_mobile)\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 94, "score": 194695.24855534278 }, { "content": "fn translate_cursor(cursor_icon: egui::CursorIcon) -> Option<glutin::window::CursorIcon> {\n\n match cursor_icon {\n\n CursorIcon::None => None,\n\n\n\n CursorIcon::Alias => Some(glutin::window::CursorIcon::Alias),\n\n CursorIcon::AllScroll => Some(glutin::window::CursorIcon::AllScroll),\n\n CursorIcon::Cell => Some(glutin::window::CursorIcon::Cell),\n\n CursorIcon::ContextMenu => Some(glutin::window::CursorIcon::ContextMenu),\n\n CursorIcon::Copy => Some(glutin::window::CursorIcon::Copy),\n\n CursorIcon::Crosshair => Some(glutin::window::CursorIcon::Crosshair),\n\n CursorIcon::Default => Some(glutin::window::CursorIcon::Default),\n\n CursorIcon::Grab => Some(glutin::window::CursorIcon::Grab),\n\n CursorIcon::Grabbing => Some(glutin::window::CursorIcon::Grabbing),\n\n CursorIcon::Help => Some(glutin::window::CursorIcon::Help),\n\n CursorIcon::Move => Some(glutin::window::CursorIcon::Move),\n\n CursorIcon::NoDrop => Some(glutin::window::CursorIcon::NoDrop),\n\n CursorIcon::NotAllowed => Some(glutin::window::CursorIcon::NotAllowed),\n\n CursorIcon::PointingHand => Some(glutin::window::CursorIcon::Hand),\n\n CursorIcon::Progress => Some(glutin::window::CursorIcon::Progress),\n\n CursorIcon::ResizeHorizontal => Some(glutin::window::CursorIcon::EwResize),\n", "file_path": "egui_glium/src/lib.rs", "rank": 95, "score": 194413.6363692753 }, { "content": "pub fn local_storage_set(key: &str, value: &str) {\n\n local_storage().map(|storage| storage.set_item(key, value));\n\n}\n\n\n", "file_path": "egui_web/src/lib.rs", "rank": 96, "score": 193161.50595268048 }, { "content": "fn show_hsva(ui: &mut Ui, color: Hsva, desired_size: Vec2) -> Response {\n\n let (rect, response) = ui.allocate_at_least(desired_size, Sense::hover());\n\n background_checkers(ui.painter(), rect);\n\n if true {\n\n let left = Rect::from_min_max(rect.left_top(), rect.center_bottom());\n\n let right = Rect::from_min_max(rect.center_top(), rect.right_bottom());\n\n ui.painter().rect_filled(left, 0.0, color);\n\n ui.painter().rect_filled(right, 0.0, color.to_opaque());\n\n } else {\n\n ui.painter().add(Shape::Rect {\n\n rect,\n\n corner_radius: 2.0,\n\n fill: color.into(),\n\n stroke: Stroke::new(3.0, color.to_opaque()),\n\n });\n\n }\n\n response\n\n}\n\n\n", "file_path": "egui/src/widgets/color_picker.rs", "rank": 98, "score": 192108.63993429567 }, { "content": "use crate::*;\n\n\n\npub use egui::{pos2, Color32};\n\n\n\n// ----------------------------------------------------------------------------\n\n\n\npub struct WebBackend {\n\n egui_ctx: egui::CtxRef,\n\n painter: Box<dyn Painter>,\n\n previous_frame_time: Option<f32>,\n\n frame_start: Option<f64>,\n\n}\n\n\n\nimpl WebBackend {\n\n pub fn new(canvas_id: &str) -> Result<Self, JsValue> {\n\n let ctx = egui::CtxRef::default();\n\n\n\n let painter: Box<dyn Painter> =\n\n if let Ok(webgl2_painter) = webgl2::WebGl2Painter::new(canvas_id) {\n\n console_log(\"Using WebGL2 backend\");\n", "file_path": "egui_web/src/backend.rs", "rank": 99, "score": 45.520976384193574 } ]
Rust
rust_dev_preview/cross_service/detect_faces/src/main.rs
grjan7/aws-doc-sdk-examples
4a9ce4ee5c9a8808fd6c905dbef25d3674ef1dd5
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ use aws_config::meta::region::RegionProviderChain; use std::error::Error; use std::path::Path; use structopt::StructOpt; #[derive(Debug)] struct Person { from_left: f32, age_range: String, gender: String, emotion: String, } #[derive(Debug, StructOpt)] struct Opt { #[structopt(short, long)] bucket: String, #[structopt(short, long)] filename: String, #[structopt(short, long)] region: Option<String>, #[structopt(short, long)] verbose: bool, } async fn save_bucket( client: &aws_sdk_s3::Client, body: aws_sdk_s3::types::ByteStream, bucket: &str, content_type: &str, key: &str, ) -> Result<(), aws_sdk_s3::Error> { client .put_object() .body(body) .bucket(bucket) .content_type(content_type) .key(key) .send() .await?; println!("Added file to bucket."); println!(); Ok(()) } async fn describe_faces( verbose: bool, client: &aws_sdk_rekognition::Client, image: aws_sdk_rekognition::model::Image, ) -> Result<(), aws_sdk_rekognition::Error> { let resp = client .detect_faces() .image(image) .attributes(aws_sdk_rekognition::model::Attribute::All) .send() .await?; let mut persons: Vec<Person> = vec![]; for detail in resp.face_details.unwrap_or_default() { if verbose { println!("{:?}", detail); println!(); } let age = detail.age_range.unwrap(); let mut range: String = age.low.unwrap_or_default().to_string().to_owned(); range.push('-'); range.push_str(&age.high.unwrap_or_default().to_string()); let mut e: String = String::from(""); let mut confidence = 0.0; for emotion in detail.emotions.unwrap_or_default() { let c = emotion.confidence.unwrap_or_default(); if c > confidence { confidence = c; e = String::from(emotion.r#type.unwrap().as_ref()); } } let p = Person { from_left: detail.bounding_box.unwrap().left.unwrap_or_default(), age_range: range, gender: String::from(detail.gender.unwrap().value.unwrap().as_ref()), emotion: e, }; persons.push(p); } persons.sort_by(|a, b| a.from_left.partial_cmp(&b.from_left).unwrap()); if !verbose { println!("Face details (from left):"); println!(); for p in persons { println!("From left: {}", p.from_left); println!("Age range: {}", p.age_range); println!("Gender: {}", p.gender); println!("Emotion: {}", p.emotion); println!(); } } Ok(()) } #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { tracing_subscriber::fmt::init(); let Opt { bucket, filename, region, verbose, } = Opt::from_args(); let mut content_type = String::new(); let path = Path::new(&filename); let extension: &str = path.extension().unwrap().to_str().unwrap(); match extension { "jpg" => content_type.push_str("image/jpg"), "jpeg" => content_type.push_str("image/jpg"), "png" => content_type.push_str("image/png"), _ => { println!(); println!("{} is not a JPG, JPEG, or PNG file!", filename); println!(); return Ok(()); } } let s3_region = region.clone(); let rek_region = region.clone(); let rek_region_provider = RegionProviderChain::first_try(s3_region.map(aws_sdk_rekognition::Region::new)) .or_default_provider() .or_else(aws_sdk_rekognition::Region::new("us-west-2")); let s3_region_provider = RegionProviderChain::first_try(rek_region.map(aws_sdk_s3::Region::new)) .or_default_provider() .or_else(aws_sdk_s3::Region::new("us-west-2")); println!(); if verbose { println!( "Rekognition client version: {}", aws_sdk_rekognition::PKG_VERSION ); println!("S3 client version: {}", aws_sdk_s3::PKG_VERSION); println!("Bucket: {}", bucket); println!("Filename: {}", filename); println!( "Region: {}", s3_region_provider.region().await.unwrap().as_ref() ); println!(); } let s3_shared_config = aws_config::from_env() .region(s3_region_provider) .load() .await; let s3_client = aws_sdk_s3::Client::new(&s3_shared_config); let rek_shared_config = aws_config::from_env() .region(rek_region_provider) .load() .await; let rek_client = aws_sdk_rekognition::Client::new(&rek_shared_config); let body = aws_sdk_s3::types::ByteStream::from_path(path).await; let key: String = String::from("uploads/") + &filename; save_bucket(&s3_client, body.unwrap(), &bucket, &content_type, &filename) .await .unwrap(); let s3_obj = aws_sdk_rekognition::model::S3Object::builder() .bucket(bucket) .name(key) .build(); let s3_img = aws_sdk_rekognition::model::Image::builder() .s3_object(s3_obj) .build(); describe_faces(verbose, &rek_client, s3_img).await.unwrap(); Ok(()) }
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * SPDX-License-Identifier: Apache-2.0. */ use aws_config::meta::region::RegionProviderChain; use std::error::Error; use std::path::Path; use structopt::StructOpt; #[derive(Debug)] struct Person { from_left: f32, age_range: String, gender: String, emotion: String, } #[derive(Debug, StructOpt)] struct Opt { #[structopt(short, long)] bucket: String, #[structopt(short, long)] filename: String, #[structopt(short, long)] region: Option<String>, #[structopt(short, long)] verbose: bool, } async fn save_bucket( client: &aws_sdk_s3::Client, body: aws_sdk_s3::types::ByteStream, bucket: &str, content_type: &str, key: &str, ) -> Result<(), aws_sdk_s3::Error> { client .put_object() .body(body) .bucket(bucket) .content_type(content_type) .key(key) .send() .await?; println!("Added file to bucket."); println!(); Ok(()) } async fn describe_faces( verbose: bool, client: &aws_sdk_rekognition::Client, image: aws_sdk_rekognition::model::Image, ) -> Result<(), aws_sdk_rekognition::Error> { let resp = client .detect_faces() .image(image) .attribut
.region(s3_region_provider) .load() .await; let s3_client = aws_sdk_s3::Client::new(&s3_shared_config); let rek_shared_config = aws_config::from_env() .region(rek_region_provider) .load() .await; let rek_client = aws_sdk_rekognition::Client::new(&rek_shared_config); let body = aws_sdk_s3::types::ByteStream::from_path(path).await; let key: String = String::from("uploads/") + &filename; save_bucket(&s3_client, body.unwrap(), &bucket, &content_type, &filename) .await .unwrap(); let s3_obj = aws_sdk_rekognition::model::S3Object::builder() .bucket(bucket) .name(key) .build(); let s3_img = aws_sdk_rekognition::model::Image::builder() .s3_object(s3_obj) .build(); describe_faces(verbose, &rek_client, s3_img).await.unwrap(); Ok(()) }
es(aws_sdk_rekognition::model::Attribute::All) .send() .await?; let mut persons: Vec<Person> = vec![]; for detail in resp.face_details.unwrap_or_default() { if verbose { println!("{:?}", detail); println!(); } let age = detail.age_range.unwrap(); let mut range: String = age.low.unwrap_or_default().to_string().to_owned(); range.push('-'); range.push_str(&age.high.unwrap_or_default().to_string()); let mut e: String = String::from(""); let mut confidence = 0.0; for emotion in detail.emotions.unwrap_or_default() { let c = emotion.confidence.unwrap_or_default(); if c > confidence { confidence = c; e = String::from(emotion.r#type.unwrap().as_ref()); } } let p = Person { from_left: detail.bounding_box.unwrap().left.unwrap_or_default(), age_range: range, gender: String::from(detail.gender.unwrap().value.unwrap().as_ref()), emotion: e, }; persons.push(p); } persons.sort_by(|a, b| a.from_left.partial_cmp(&b.from_left).unwrap()); if !verbose { println!("Face details (from left):"); println!(); for p in persons { println!("From left: {}", p.from_left); println!("Age range: {}", p.age_range); println!("Gender: {}", p.gender); println!("Emotion: {}", p.emotion); println!(); } } Ok(()) } #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { tracing_subscriber::fmt::init(); let Opt { bucket, filename, region, verbose, } = Opt::from_args(); let mut content_type = String::new(); let path = Path::new(&filename); let extension: &str = path.extension().unwrap().to_str().unwrap(); match extension { "jpg" => content_type.push_str("image/jpg"), "jpeg" => content_type.push_str("image/jpg"), "png" => content_type.push_str("image/png"), _ => { println!(); println!("{} is not a JPG, JPEG, or PNG file!", filename); println!(); return Ok(()); } } let s3_region = region.clone(); let rek_region = region.clone(); let rek_region_provider = RegionProviderChain::first_try(s3_region.map(aws_sdk_rekognition::Region::new)) .or_default_provider() .or_else(aws_sdk_rekognition::Region::new("us-west-2")); let s3_region_provider = RegionProviderChain::first_try(rek_region.map(aws_sdk_s3::Region::new)) .or_default_provider() .or_else(aws_sdk_s3::Region::new("us-west-2")); println!(); if verbose { println!( "Rekognition client version: {}", aws_sdk_rekognition::PKG_VERSION ); println!("S3 client version: {}", aws_sdk_s3::PKG_VERSION); println!("Bucket: {}", bucket); println!("Filename: {}", filename); println!( "Region: {}", s3_region_provider.region().await.unwrap().as_ref() ); println!(); } let s3_shared_config = aws_config::from_env()
random
[ { "content": "#[derive(Debug, StructOpt)]\n\nstruct Opt {\n\n /// The AWS Region.\n\n #[structopt(short, long)]\n\n region: Option<String>,\n\n\n\n /// The data to be uploaded to S3.\n\n #[structopt(short, long)]\n\n body: String,\n\n\n\n /// The name of the bucket.\n\n #[structopt(short, long)]\n\n bucket: String,\n\n\n\n /// The object key.\n\n #[structopt(short, long)]\n\n object: String,\n\n\n\n /// How long in seconds before the presigned request should expire.\n\n #[structopt(short, long, default_value = \"900\")]\n\n expires_in: u64,\n", "file_path": "rust_dev_preview/sending-presigned-requests/src/main.rs", "rank": 0, "score": 290135.61541089794 }, { "content": "/// This function demonstrates how you can convert a presigned request into a cURL command\n\n/// that you can run from your terminal of choice.\n\n///\n\n/// _NOTE:_ This only prints out the command, it's up to you to copy-paste it and run it.\n\nfn print_as_curl_request(presigned_req: &PresignedRequest, body: Option<&str>) {\n\n println!(\n\n \"curl -X {} {} \\\\\",\n\n presigned_req.method(),\n\n presigned_req.uri()\n\n );\n\n\n\n if let Some(body) = body {\n\n println!(\"-d '{}' \\\\\", body);\n\n }\n\n\n\n for (name, value) in presigned_req.headers() {\n\n // This value conversion method is naïve and will drop values that aren't valid UTF8\n\n // It's only here for demonstration purposes; Don't use this unless you're confident\n\n // that your header values are valid UTF-8\n\n println!(\n\n \"-H '{}: {}' \\\\\",\n\n name,\n\n value.to_str().unwrap_or_default().to_string()\n\n )\n", "file_path": "rust_dev_preview/sending-presigned-requests/src/main.rs", "rank": 1, "score": 282244.9115392559 }, { "content": "// snippet-start:[detect_labels-get_exif_data.rust.main]\n\nfn get_exif_data(filename: &str) -> Edata {\n\n let height: String = \"\".to_owned();\n\n let width: String = \"\".to_owned();\n\n let created: String = \"\".to_owned();\n\n let mut edata = Edata {\n\n height,\n\n width,\n\n created,\n\n };\n\n\n\n let file = std::fs::File::open(&filename).unwrap();\n\n let mut bufreader = std::io::BufReader::new(&file);\n\n let exifreader = exif::Reader::new();\n\n\n\n match exifreader.read_from_container(&mut bufreader) {\n\n Ok(exif) => {\n\n println!(\"{}\", &filename);\n\n\n\n for f in exif.fields() {\n\n // Get EXIF values for image width, height, and when image was created.\n", "file_path": "rust_dev_preview/cross_service/detect_labels/src/main.rs", "rank": 2, "score": 241805.326103641 }, { "content": "fn pcm_data(audio_file: &str) -> Vec<u8> {\n\n let reader = hound::WavReader::open(audio_file).unwrap();\n\n let samples_result: hound::Result<Vec<i16>> = reader.into_samples::<i16>().collect();\n\n\n\n let mut pcm: Vec<u8> = Vec::new();\n\n for sample in samples_result.unwrap() {\n\n pcm.put_i16_le(sample);\n\n }\n\n pcm\n\n}\n", "file_path": "rust_dev_preview/transcribestreaming/src/main.rs", "rank": 3, "score": 240064.5106666316 }, { "content": " private String key;\n", "file_path": "javav2/usecases/creating_photo_analyzer_async/src/main/java/com/example/photo/BucketItem.java", "rank": 4, "score": 238846.0109535093 }, { "content": " private static final Region region = Region.US_EAST_1; // change to the region where you want to create your resources\n", "file_path": "javav2/usecases/create_amazon_personalize_app/src/main/java/com/amazonaws/personalize/client/demo/movielens/PersonalizeDemoOnMovieLens20M.java", "rank": 5, "score": 237629.35080320944 }, { "content": " private final String bucket;\n", "file_path": "javav2/usecases/create_amazon_personalize_app/src/main/java/com/amazonaws/personalize/client/resource/DatasetImportJobManager.java", "rank": 6, "score": 228650.65774284658 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Opt {\n\n /// The AWS Region.\n\n #[structopt(short, long)]\n\n region: Option<String>,\n\n\n\n /// The name of the audio file.\n\n #[structopt(short, long)]\n\n audio_file: String,\n\n\n\n /// Whether to display additional information.\n\n #[structopt(short, long)]\n\n verbose: bool,\n\n}\n\n\n\nconst CHUNK_SIZE: usize = 8192;\n\n\n\n/// Transcribes an audio file to text.\n\n/// # Arguments\n\n///\n\n/// * `-a AUDIO_FILE` - The name of the audio file.\n", "file_path": "rust_dev_preview/transcribestreaming/src/main.rs", "rank": 7, "score": 228072.2260569695 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Opt {\n\n /// The AWS Region.\n\n #[structopt(short, long)]\n\n region: Option<String>,\n\n\n\n /// The name of the bucket.\n\n #[structopt(short, long)]\n\n bucket: String,\n\n\n\n /// The bucket prefix.\n\n #[structopt(short, long)]\n\n prefix: String,\n\n\n\n /// Whether to display additional information.\n\n #[structopt(short, long)]\n\n verbose: bool,\n\n}\n\n\n\n// The two testing approaches imported as modules below\n\nmod enums;\n", "file_path": "rust_dev_preview/testing/src/intro.rs", "rank": 8, "score": 228072.2260569695 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Opt {\n\n /// The AWS Region.\n\n #[structopt(short, long)]\n\n region: Option<String>,\n\n\n\n /// Whether to display additional information.\n\n #[structopt(short, long)]\n\n verbose: bool,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Error> {\n\n // snippet-start:[logging.rust.main-logger-init]\n\n env_logger::init();\n\n // snippet-end:[logging.rust.main-logger-init]\n\n\n\n let Opt { region, verbose } = Opt::from_args();\n\n\n\n let region_provider = RegionProviderChain::first_try(region.map(Region::new))\n\n .or_default_provider()\n", "file_path": "rust_dev_preview/logging/logger/src/main.rs", "rank": 9, "score": 226204.88503274508 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Opt {\n\n /// The AWS Region.\n\n #[structopt(short, long)]\n\n region: Option<String>,\n\n\n\n /// Whether to display additional information.\n\n #[structopt(short, long)]\n\n verbose: bool,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Error> {\n\n // snippet-start:[tracing.rust.main-tracing-init]\n\n tracing_subscriber::fmt::init();\n\n // snippet-end:[tracing.rust.main-tracing-init]\n\n\n\n let Opt { region, verbose } = Opt::from_args();\n\n\n\n let region_provider = RegionProviderChain::first_try(region.map(Region::new))\n\n .or_default_provider()\n", "file_path": "rust_dev_preview/logging/tracing/src/main.rs", "rank": 10, "score": 226204.88503274508 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Opt {\n\n /// The AWS Region.\n\n #[structopt(short, long)]\n\n region: Option<String>,\n\n\n\n /// The file containing the input text.\n\n #[structopt(short, long)]\n\n filename: String,\n\n\n\n /// The name of the job.\n\n #[structopt(short, long)]\n\n job_name: String,\n\n\n\n /// The Amazon Simple Storage Service (Amazon S3) bucket to which the MP3\n\n /// file produced by Polly is uploaded.\n\n #[structopt(short, long)]\n\n bucket: String,\n\n\n\n /// Whether to display additional information.\n\n #[structopt(short, long)]\n", "file_path": "rust_dev_preview/cross_service/telephone/src/main.rs", "rank": 11, "score": 224388.299737937 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct Opt {\n\n /// The S3 bucket.\n\n #[structopt(short, long)]\n\n bucket: String,\n\n\n\n /// The filename.\n\n #[structopt(short, long)]\n\n filename: String,\n\n\n\n /// The AWS Region.\n\n #[structopt(short, long)]\n\n region: Option<String>,\n\n\n\n /// The DynamoDB table.\n\n #[structopt(short, long)]\n\n table: String,\n\n\n\n /// Whether to display additional information.\n\n #[structopt(short, long)]\n\n verbose: bool,\n\n}\n\n\n", "file_path": "rust_dev_preview/cross_service/detect_labels/src/main.rs", "rank": 13, "score": 222620.42855076745 }, { "content": "func (m *mockIAMClient) GetAccessKeyLastUsed(input *iam.GetAccessKeyLastUsedInput) (*iam.GetAccessKeyLastUsedOutput, error) {\n\n // Check that required inputs exist\n\n if input.AccessKeyId == nil || *input.AccessKeyId == \"\" {\n\n return nil, errors.New(\"GetAccessKeyLastUsedInput.AccessKeyId is nil or an empty string\")\n\n }\n\n\n\n resp := iam.GetAccessKeyLastUsedOutput{\n\n AccessKeyLastUsed: &iam.AccessKeyLastUsed{\n\n LastUsedDate: aws.Time(time.Now()),\n\n Region: aws.String(\"REGION\"),\n\n ServiceName: aws.String(\"SERVICE-NAME\"),\n\n },\n\n UserName: aws.String(\"MrMagoo\"),\n\n }\n\n return &resp, nil\n", "file_path": "go/iam/AccessKeyLastUsed/AccessKeyLastUsed_test.go", "rank": 15, "score": 210231.24003774568 }, { "content": " private static SSECustomerKey SSE_KEY;\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 16, "score": 206375.79913146936 }, { "content": " private static KeyGenerator KEY_GENERATOR;\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 17, "score": 206375.79913146936 }, { "content": " private static AmazonS3 S3_CLIENT;\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 18, "score": 206363.96324699372 }, { "content": "var configFileName = \"config.json\"\n", "file_path": "go/sqs/SendReceiveLongPolling/SendReceiveLongPolling_test.go", "rank": 19, "score": 203266.91303963197 }, { "content": "type mockIAMClient struct {\n\n iamiface.IAMAPI\n", "file_path": "go/iam/AccessKeyLastUsed/AccessKeyLastUsed_test.go", "rank": 20, "score": 203036.59010310262 }, { "content": "public class ServerSideEncryptionUsingClientSideEncryptionKey {\n\n private static SSECustomerKey SSE_KEY;\n\n private static AmazonS3 S3_CLIENT;\n\n private static KeyGenerator KEY_GENERATOR;\n\n\n\n public static void main(String[] args) throws IOException, NoSuchAlgorithmException {\n\n Regions clientRegion = Regions.DEFAULT_REGION;\n\n String bucketName = \"*** Bucket name ***\";\n\n String keyName = \"*** Key name ***\";\n\n String uploadFileName = \"*** File path ***\";\n\n String targetKeyName = \"*** Target key name ***\";\n\n\n\n // Create an encryption key.\n\n KEY_GENERATOR = KeyGenerator.getInstance(\"AES\");\n\n KEY_GENERATOR.init(256, new SecureRandom());\n\n SSE_KEY = new SSECustomerKey(KEY_GENERATOR.generateKey());\n\n\n\n try {\n\n S3_CLIENT = AmazonS3ClientBuilder.standard()\n\n .withCredentials(new ProfileCredentialsProvider())\n\n .withRegion(clientRegion)\n\n .build();\n\n\n\n // Upload an object.\n\n uploadObject(bucketName, keyName, new File(uploadFileName));\n\n\n\n // Download the object.\n\n downloadObject(bucketName, keyName);\n\n\n\n // Verify that the object is properly encrypted by attempting to retrieve it\n\n // using the encryption key.\n\n retrieveObjectMetadata(bucketName, keyName);\n\n\n\n // Copy the object into a new object that also uses SSE-C.\n\n copyObject(bucketName, keyName, targetKeyName);\n\n } catch (AmazonServiceException e) {\n\n // The call was transmitted successfully, but Amazon S3 couldn't process \n\n // it, so it returned an error response.\n\n e.printStackTrace();\n\n } catch (SdkClientException e) {\n\n // Amazon S3 couldn't be contacted for a response, or the client\n\n // couldn't parse the response from Amazon S3.\n\n e.printStackTrace();\n\n }\n\n }\n\n\n\n private static void uploadObject(String bucketName, String keyName, File file) {\n\n PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, file).withSSECustomerKey(SSE_KEY);\n\n S3_CLIENT.putObject(putRequest);\n\n System.out.println(\"Object uploaded\");\n\n }\n\n\n\n private static void downloadObject(String bucketName, String keyName) throws IOException {\n\n GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, keyName).withSSECustomerKey(SSE_KEY);\n\n S3Object object = S3_CLIENT.getObject(getObjectRequest);\n\n\n\n System.out.println(\"Object content: \");\n\n displayTextInputStream(object.getObjectContent());\n\n }\n\n\n\n private static void retrieveObjectMetadata(String bucketName, String keyName) {\n\n GetObjectMetadataRequest getMetadataRequest = new GetObjectMetadataRequest(bucketName, keyName)\n\n .withSSECustomerKey(SSE_KEY);\n\n ObjectMetadata objectMetadata = S3_CLIENT.getObjectMetadata(getMetadataRequest);\n\n System.out.println(\"Metadata retrieved. Object size: \" + objectMetadata.getContentLength());\n\n }\n\n\n\n private static void copyObject(String bucketName, String keyName, String targetKeyName)\n\n throws NoSuchAlgorithmException {\n\n // Create a new encryption key for target so that the target is saved using SSE-C.\n\n SSECustomerKey newSSEKey = new SSECustomerKey(KEY_GENERATOR.generateKey());\n\n\n\n CopyObjectRequest copyRequest = new CopyObjectRequest(bucketName, keyName, bucketName, targetKeyName)\n\n .withSourceSSECustomerKey(SSE_KEY)\n\n .withDestinationSSECustomerKey(newSSEKey);\n\n\n\n S3_CLIENT.copyObject(copyRequest);\n\n System.out.println(\"Object copied\");\n\n }\n\n\n\n private static void displayTextInputStream(S3ObjectInputStream input) throws IOException {\n\n // Read one line at a time from the input stream and display each line.\n\n BufferedReader reader = new BufferedReader(new InputStreamReader(input));\n\n String line;\n\n while ((line = reader.readLine()) != null) {\n\n System.out.println(line);\n\n }\n\n System.out.println();\n\n }\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 21, "score": 200578.0144412161 }, { "content": " public static void main(String[] args) throws IOException, NoSuchAlgorithmException {\n\n Regions clientRegion = Regions.DEFAULT_REGION;\n\n String bucketName = \"*** Bucket name ***\";\n\n String keyName = \"*** Key name ***\";\n\n String uploadFileName = \"*** File path ***\";\n\n String targetKeyName = \"*** Target key name ***\";\n\n\n\n // Create an encryption key.\n\n KEY_GENERATOR = KeyGenerator.getInstance(\"AES\");\n\n KEY_GENERATOR.init(256, new SecureRandom());\n\n SSE_KEY = new SSECustomerKey(KEY_GENERATOR.generateKey());\n\n\n\n try {\n\n S3_CLIENT = AmazonS3ClientBuilder.standard()\n\n .withCredentials(new ProfileCredentialsProvider())\n\n .withRegion(clientRegion)\n\n .build();\n\n\n\n // Upload an object.\n\n uploadObject(bucketName, keyName, new File(uploadFileName));\n\n\n\n // Download the object.\n\n downloadObject(bucketName, keyName);\n\n\n\n // Verify that the object is properly encrypted by attempting to retrieve it\n\n // using the encryption key.\n\n retrieveObjectMetadata(bucketName, keyName);\n\n\n\n // Copy the object into a new object that also uses SSE-C.\n\n copyObject(bucketName, keyName, targetKeyName);\n\n } catch (AmazonServiceException e) {\n\n // The call was transmitted successfully, but Amazon S3 couldn't process \n\n // it, so it returned an error response.\n\n e.printStackTrace();\n\n } catch (SdkClientException e) {\n\n // Amazon S3 couldn't be contacted for a response, or the client\n\n // couldn't parse the response from Amazon S3.\n\n e.printStackTrace();\n\n }\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 22, "score": 199201.5629546388 }, { "content": " private static void uploadObject(String bucketName, String keyName, File file) {\n\n PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, file).withSSECustomerKey(SSE_KEY);\n\n S3_CLIENT.putObject(putRequest);\n\n System.out.println(\"Object uploaded\");\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 23, "score": 197843.87429844696 }, { "content": " private static void downloadObject(String bucketName, String keyName) throws IOException {\n\n GetObjectRequest getObjectRequest = new GetObjectRequest(bucketName, keyName).withSSECustomerKey(SSE_KEY);\n\n S3Object object = S3_CLIENT.getObject(getObjectRequest);\n\n\n\n System.out.println(\"Object content: \");\n\n displayTextInputStream(object.getObjectContent());\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 24, "score": 197843.87429844696 }, { "content": " private static void copyObject(String bucketName, String keyName, String targetKeyName)\n\n throws NoSuchAlgorithmException {\n\n // Create a new encryption key for target so that the target is saved using SSE-C.\n\n SSECustomerKey newSSEKey = new SSECustomerKey(KEY_GENERATOR.generateKey());\n\n\n\n CopyObjectRequest copyRequest = new CopyObjectRequest(bucketName, keyName, bucketName, targetKeyName)\n\n .withSourceSSECustomerKey(SSE_KEY)\n\n .withDestinationSSECustomerKey(newSSEKey);\n\n\n\n S3_CLIENT.copyObject(copyRequest);\n\n System.out.println(\"Object copied\");\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 25, "score": 197843.87429844696 }, { "content": "# @param s3_client [Aws::S3::Client] An initialized Amazon S3 client.\n\n# @param bucket_name [String] The bucket's name.\n\n# @return [Boolean] true if all operations succeed; otherwise, false.\n\n# @example\n\n# s3_client = Aws::S3::Client.new(region: 'us-west-2')\n\n# exit 1 unless list_bucket_objects?(s3_client, 'doc-example-bucket')\n\ndef list_bucket_objects?(s3_client, bucket_name)\n\n puts \"Accessing the bucket named '#{bucket_name}'...\"\n\n objects = s3_client.list_objects_v2(\n\n bucket: bucket_name,\n\n max_keys: 50\n\n )\n\n\n\n if objects.count.positive?\n\n puts \"The object keys in this bucket are (first 50 objects):\"\n\n objects.contents.each do |object|\n\n puts object.key\n\n end\n\n else\n\n puts \"No objects found in this bucket.\"\n\n end\n\n\n\n return true\n\nrescue StandardError => e\n\n puts \"Error while accessing the bucket named '#{bucket_name}': #{e.message}\"\n\n return false\n\nend\n\n\n", "file_path": "ruby/example_code/s3/auth_request_object_keys.rb", "rank": 26, "score": 197111.69099121788 }, { "content": " private static void retrieveObjectMetadata(String bucketName, String keyName) {\n\n GetObjectMetadataRequest getMetadataRequest = new GetObjectMetadataRequest(bucketName, keyName)\n\n .withSSECustomerKey(SSE_KEY);\n\n ObjectMetadata objectMetadata = S3_CLIENT.getObjectMetadata(getMetadataRequest);\n\n System.out.println(\"Metadata retrieved. Object size: \" + objectMetadata.getContentLength());\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 27, "score": 196504.56742691327 }, { "content": " private static void displayTextInputStream(S3ObjectInputStream input) throws IOException {\n\n // Read one line at a time from the input stream and display each line.\n\n BufferedReader reader = new BufferedReader(new InputStreamReader(input));\n\n String line;\n\n while ((line = reader.readLine()) != null) {\n\n System.out.println(line);\n\n }\n\n System.out.println();\n", "file_path": "java/example_code/s3/src/main/java/aws/example/s3/ServerSideEncryptionUsingClientSideEncryptionKey.java", "rank": 28, "score": 195183.2715429098 }, { "content": " Bucket string `json:\"Bucket\"`\n", "file_path": "go/s3/CustomClient/CustomHttpClient_test.go", "rank": 29, "score": 193415.51078713895 }, { "content": " Key string `json:\"Key\"`\n", "file_path": "go/s3/CreateBucketAndObject/CreateBucketAndObject_test.go", "rank": 30, "score": 192172.7010694663 }, { "content": " public void send(byte[] attachment, String emailAddress) throws MessagingException, IOException {\n\n\n\n MimeMessage message = null;\n\n Session session = Session.getDefaultInstance(new Properties());\n\n\n\n // Create a new MimeMessage object.\n\n message = new MimeMessage(session);\n\n\n\n // Add subject, from and to lines.\n\n message.setSubject(subject, \"UTF-8\");\n\n message.setFrom(new InternetAddress(sender));\n\n message.setRecipients(Message.RecipientType.TO, InternetAddress.parse(emailAddress));\n\n\n\n // Create a multipart/alternative child container.\n\n MimeMultipart msgBody = new MimeMultipart(\"alternative\");\n\n\n\n // Create a wrapper for the HTML and text parts.\n\n MimeBodyPart wrap = new MimeBodyPart();\n\n\n\n // Define the text part.\n\n MimeBodyPart textPart = new MimeBodyPart();\n\n textPart.setContent(bodyText, \"text/plain; charset=UTF-8\");\n\n\n\n // Define the HTML part.\n\n MimeBodyPart htmlPart = new MimeBodyPart();\n\n htmlPart.setContent(bodyHTML, \"text/html; charset=UTF-8\");\n\n\n\n // Add the text and HTML parts to the child container.\n\n msgBody.addBodyPart(textPart);\n\n msgBody.addBodyPart(htmlPart);\n\n\n\n // Add the child container to the wrapper object.\n\n wrap.setContent(msgBody);\n\n\n\n // Create a multipart/mixed parent container.\n\n MimeMultipart msg = new MimeMultipart(\"mixed\");\n\n\n\n // Add the parent container to the message.\n\n message.setContent(msg);\n\n\n\n // Add the multipart/alternative part to the message.\n\n msg.addBodyPart(wrap);\n\n\n\n // Define the attachment\n\n MimeBodyPart att = new MimeBodyPart();\n\n DataSource fds = new ByteArrayDataSource(attachment, \"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet\");\n\n att.setDataHandler(new DataHandler(fds));\n\n\n\n String reportName = \"PhotoReport.xls\";\n\n att.setFileName(reportName);\n\n\n\n // Add the attachment to the message.\n\n msg.addBodyPart(att);\n\n\n\n // Try to send the email.\n\n try {\n\n System.out.println(\"Attempting to send an email through Amazon SES \" + \"using the AWS SDK for Java...\");\n\n\n\n Region region = Region.US_WEST_2;\n\n SesClient client = SesClient.builder()\n\n // .credentialsProvider(EnvironmentVariableCredentialsProvider.create())\n\n .region(region)\n\n .build();\n\n\n\n ByteArrayOutputStream outputStream = new ByteArrayOutputStream();\n\n message.writeTo(outputStream);\n\n\n\n ByteBuffer buf = ByteBuffer.wrap(outputStream.toByteArray());\n\n byte[] arr = new byte[buf.remaining()];\n\n buf.get(arr);\n\n\n\n SdkBytes data = SdkBytes.fromByteArray(arr);\n\n RawMessage rawMessage = RawMessage.builder()\n\n .data(data)\n\n .build();\n\n\n\n SendRawEmailRequest rawEmailRequest = SendRawEmailRequest.builder()\n\n .rawMessage(rawMessage)\n\n .build();\n\n\n\n client.sendRawEmail(rawEmailRequest);\n\n\n\n } catch (SesException e) {\n\n System.err.println(e.awsErrorDetails().errorMessage());\n\n System.exit(1);\n\n }\n\n System.out.println(\"Email sent with attachment\");\n", "file_path": "javav2/usecases/creating_photo_analyzer_async/src/main/java/com/example/photo/SendMessages.java", "rank": 31, "score": 191394.5477446934 }, { "content": " public static String region = \"us-east-1\";\n", "file_path": "java/example_code/pinpoint/pinpoint_send_sms_message_api.java", "rank": 32, "score": 189788.29386733667 }, { "content": " public static String region = \"us-west-2\";\n", "file_path": "java/example_code/pinpoint/pinpoint_send_email_message_api.java", "rank": 33, "score": 189788.29386733667 }, { "content": "var configFileName = \"config.json\"\n", "file_path": "gov2/iam/AccessKeyLastUsed/AccessKeyLastUsedv2_test.go", "rank": 34, "score": 187471.33379775862 }, { "content": " static final String region = \"us-west-2\";\n", "file_path": "java/example_code/pinpoint-email/pinpoint_send_email_message_email_api.java", "rank": 35, "score": 187285.00989893312 }, { "content": " public static AmazonRedshift client;\n", "file_path": "java/example_code/redshift/ListAndPurchaseReservedNodeOffering.java", "rank": 36, "score": 186074.692033426 }, { "content": " private static String key = \"\";\n", "file_path": "javav2/example_code/dynamodbasync/src/test/java/DynamoDBAsyncTest.java", "rank": 37, "score": 184917.51996788417 }, { "content": " static final String region = \"us-east-1\";\n", "file_path": "java/example_code/pinpoint-sms-voice/pinpoint_send_voice_message_sms_voice_api.java", "rank": 38, "score": 184878.96285168282 }, { "content": " private String key;\n", "file_path": "javav2/usecases/video_analyzer_application/src/main/java/com/example/video/BucketItem.java", "rank": 39, "score": 184671.49793363048 }, { "content": " private String key;\n", "file_path": "javav2/usecases/creating_photo_analyzer_app/src/main/java/com/example/photo/BucketItem.java", "rank": 40, "score": 183505.42777763747 }, { "content": " private String key;\n", "file_path": "javav2/usecases/creating_lambda_tag_assets/src/main/java/com/example/tags/BucketItem.java", "rank": 41, "score": 183505.42777763747 }, { "content": " public static final String bucket = \"bucket\";\n", "file_path": "java/example_code/rekognition/rekognition-collection-java-search-face-matching-image-collection.java", "rank": 42, "score": 182552.95481502975 }, { "content": " private static final Region REGION = Region.US_EAST_1;\n", "file_path": "javav2/example_code/transcribe/src/main/java/com/amazonaws/transcribestreaming/TranscribeStreamingDemoFile.java", "rank": 43, "score": 181529.995064966 }, { "content": " private static TranscribeStreamingAsyncClient client;\n", "file_path": "javav2/example_code/transcribe/src/main/java/com/amazonaws/transcribestreaming/TranscribeStreamingDemoFile.java", "rank": 44, "score": 181351.4274627212 }, { "content": " private String bodyText = \"Hello,\\r\\n\" + \"Please see the attached file for the analyzed photos report.\";\n", "file_path": "javav2/usecases/creating_photo_analyzer_async/src/main/java/com/example/photo/SendMessages.java", "rank": 45, "score": 180250.7099339847 }, { "content": " private String bodyHTML = \"<html>\" + \"<head></head>\" + \"<body>\" + \"<h1>Hello!</h1>\"\n", "file_path": "javav2/usecases/creating_photo_analyzer_async/src/main/java/com/example/photo/SendMessages.java", "rank": 46, "score": 180244.25268950596 }, { "content": " public void setKey(String key) {\n\n this.key = key ;\n", "file_path": "javav2/usecases/creating_photo_analyzer_async/src/main/java/com/example/photo/BucketItem.java", "rank": 47, "score": 179966.19017265885 }, { "content": " public String getKey() {\n\n return this.key ;\n", "file_path": "javav2/usecases/creating_photo_analyzer_async/src/main/java/com/example/photo/BucketItem.java", "rank": 48, "score": 179966.19017265885 }, { "content": "package com.example.s3.async;\n", "file_path": "javav2/example_code/s3/src/main/java/com/example/s3/async/CreateBucketAsync.java", "rank": 49, "score": 179481.31251320965 }, { "content": " private static final String S3_BUCKET = PREFIX.toLowerCase() + \"-tutorial-bucket\";\n", "file_path": "javav2/usecases/create_amazon_personalize_app/src/main/java/com/amazonaws/personalize/client/demo/movielens/PersonalizeDemoOnMovieLens20M.java", "rank": 50, "score": 179021.2627464674 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/lambda/src/s3-bucket-setup.ts", "rank": 51, "score": 178686.61137353646 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/dynamodb/src/libs/ddbClient.js", "rank": 52, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/glacier/src/libs/glacierClient.js", "rank": 53, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/kinesis/src/libs/kinesisClient.js", "rank": 54, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/mediaconvert/src/libs/emcClient.js", "rank": 55, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/sqs/src/libs/sqsClient.js", "rank": 56, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/ses/src/libs/sesClient.js", "rank": 57, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; // For example, \"us-east-1\".\n", "file_path": "javascriptv3/example_code/iam/src/libs/iamClient.js", "rank": 58, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/pinpoint/src/libs/pinClient.js", "rank": 59, "score": 178672.78971672335 }, { "content": "export const REGION = \"REGION\"; // For example, \"us-east-1\".\n", "file_path": "javascriptv3/example_code/iam/scenarios/libs/iamClient.js", "rank": 60, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/redshift/src/libs/redshiftClient.js", "rank": 61, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/transcribe/src/libs/transcribeClient.js", "rank": 62, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/sts/src/libs/stsClient.js", "rank": 63, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/secrets/src/libs/secretsClient.js", "rank": 64, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/s3/src/libs/s3Client.js", "rank": 65, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/ec2/src/libs/ec2Client.js", "rank": 66, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/nodegetstarted/src/libs/s3Client.js", "rank": 67, "score": 178672.78971672335 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/sns/src/libs/snsClient.js", "rank": 68, "score": 178672.78971672335 }, { "content": " public static void ensurePersonalizePermissionsOnS3Bucket(S3Client s3, String bucket) {\n\n final String bucketPolicy = BUCKET_POLICY_TEMPLATE.replace(\"{bucket}\", bucket);\n\n //System.out.println(\"Bucket policy: \" + bucketPolicy);\n\n\n\n PutBucketPolicyRequest policyRequest = PutBucketPolicyRequest.builder()\n\n .bucket(bucket)\n\n .policy(bucketPolicy)\n\n .build();\n\n s3.putBucketPolicy(policyRequest);\n", "file_path": "javav2/usecases/create_amazon_personalize_app/src/main/java/com/amazonaws/personalize/client/demo/movielens/DemoUtils.java", "rank": 69, "score": 178235.90675291469 }, { "content": "# @param sqs_client [Aws::SQS::Client] An initialized Amazon SQS client.\n\n# @param queue_url [String] The URL of the queue.\n\n# @param message_body [String] The contents of the message to be sent.\n\n# @return [Boolean] true if the message was sent; otherwise, false.\n\n# @example\n\n# exit 1 unless message_sent?(\n\n# Aws::SQS::Client.new(region: 'us-west-2'),\n\n# 'https://sqs.us-west-2.amazonaws.com/111111111111/my-queue',\n\n# 'This is my message.'\n\n# )\n\ndef message_sent?(sqs_client, queue_url, message_body)\n\n sqs_client.send_message(\n\n queue_url: queue_url,\n\n message_body: message_body\n\n )\n\n true\n\nrescue StandardError => e\n\n puts \"Error sending message: #{e.message}\"\n\n false\n\nend\n\n\n", "file_path": "ruby/example_code/sqs/sqs-ruby-example-send-message.rb", "rank": 70, "score": 177969.17903282164 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/mediaconvert/src/libs/emcClientGet.js", "rank": 71, "score": 177163.8818272953 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/cloudwatch/src/libs/cloudWatchClient.js", "rank": 72, "score": 177163.8818272953 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/eventbridge/src/libs/eventBridgeClient.js", "rank": 73, "score": 177163.8818272953 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/dynamodb/src/libs/ddbDocClient.js", "rank": 74, "score": 177163.8818272953 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/codebuild/src/libs/codeBuildClient.js", "rank": 75, "score": 177156.8276326652 }, { "content": "const REGION = \"eu-west-1\";\n", "file_path": "javascriptv3/example_code/codecommit/src/libs/codeCommitClient.js", "rank": 76, "score": 177156.8276326652 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/lambda/src/s3-bucket-setup-non-modular.ts", "rank": 77, "score": 175705.4481806056 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/polly/general-examples/src/libs/pollyClient.js", "rank": 78, "score": 175691.9653667655 }, { "content": "export const REGION = \"REGION\"; //For example, \"us-east-1\".\n", "file_path": "javascriptv3/example_code/s3/scenarios/s3_basics/libs/s3Client.js", "rank": 79, "score": 175691.9653667655 }, { "content": "const REGION = \"REGION\"; // e.g., 'us-east-2'\n", "file_path": "javascriptv3/example_code/cross-services/lambda-for-browser/libs/lambdaClient.js", "rank": 80, "score": 175691.9653667655 }, { "content": "const REGION = \"REGION\"; // e.g., 'us-east-2'\n", "file_path": "javascriptv3/example_code/cross-services/lambda-for-browser/libs/ddbClient.js", "rank": 81, "score": 175691.9653667655 }, { "content": "export const REGION = \"REGION\"; // For example, \"us-east-1\".\n", "file_path": "javascriptv3/example_code/dynamodb/scenarios/dynamodb_basics/libs/ddbClient.js", "rank": 82, "score": 175691.9653667655 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/cross-services/photo_analyzer/libs/rekognitionClient.js", "rank": 83, "score": 175684.9111721354 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/cross-services/photo_analyzer/libs/s3Client.js", "rank": 84, "score": 175684.9111721354 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/cross-services/photo_analyzer/libs/sesClient.js", "rank": 85, "score": 175684.9111721354 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/cross-services/lambda-for-browser/libs/ddbDocClient.js", "rank": 86, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/cloudwatch-events/src/libs/cloudWatchEventsClient.js", "rank": 87, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\"; // e.g., 'us-east-2'\n", "file_path": "javascriptv3/example_code/cross-services/photo-analyzer-ppe/libs/rekognitionClient.js", "rank": 88, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/cloudwatch-logs/src/libs/cloudWatchLogsClient.js", "rank": 89, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\"; // e.g., 'us-east-2'\n", "file_path": "javascriptv3/example_code/cross-services/photo-analyzer-ppe/libs/sesClient.js", "rank": 90, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/rekognition/estimate-age-example/src/libs/rekognitionClient.js", "rank": 91, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\"; //e.g. \"us-east-1\"\n", "file_path": "javascriptv3/example_code/cross-services/photo-analyzer-ppe/libs/s3Client.js", "rank": 92, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\"; // e.g., 'us-east-2'\n", "file_path": "javascriptv3/example_code/lambda/lambda_create_function/src/libs/lambaClient.js", "rank": 93, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\"; // For example, \"us-east-1\".\n", "file_path": "javascriptv3/example_code/dynamodb/scenarios/dynamodb_basics/libs/ddbDocClient.js", "rank": 94, "score": 174255.69651958146 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/cross-services/video-analyzer/src/libs/rekognitionClient.js", "rank": 95, "score": 174248.6423249514 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/cross-services/sns-sample-app/libs/snsClient.js", "rank": 96, "score": 174248.6423249514 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/cross-services/lex-bot/src/libs/lexClient.js", "rank": 97, "score": 174248.6423249514 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/cross-services/message-app/js/libs/sqsClient.js", "rank": 98, "score": 174248.6423249514 }, { "content": "const REGION = \"REGION\";\n", "file_path": "javascriptv3/example_code/cross-services/video-analyzer/src/libs/s3Client.js", "rank": 99, "score": 174248.6423249514 } ]
Rust
src/ir/layout.rs
birdbrainswagtrain/combinatorio
89240facff6bc0b341164580c29721ddcaf812d8
use std::collections::HashMap; use rand::Rng; use crate::{common::ConnectType, disjoint_set::DisjointSet}; use super::{IRArg, IRModule, IRNode, WireColor}; #[derive(Debug)] struct WireNet { color: WireColor, connections: Vec<(u32,ConnectType)> } const MAX_DIST: f32 = 9.0; fn square_dist(a: (f32,f32), b: (f32,f32)) -> f32 { let x = a.0 - b.0; let y = a.1 - b.1; x * x + y * y } fn check_dist(sq_dist: f32) -> bool { return sq_dist <= MAX_DIST * MAX_DIST; } impl WireNet { fn to_links(&self, module: &IRModule, out: &mut Vec<WireLink>) -> bool { let mut subnet_ids = DisjointSet::new(self.connections.len()); loop { let mut link_count = 0; for id_a in 0..self.connections.len() { let net_id_a = subnet_ids.get(id_a); for id_b in (id_a+1)..self.connections.len() { let net_id_b = subnet_ids.get(id_b); if net_id_a == net_id_b { continue; } let pos_a = module.get_true_pos(self.connections[id_a].0).unwrap(); let pos_b = module.get_true_pos(self.connections[id_b].0).unwrap(); if !check_dist(square_dist(pos_a,pos_b)) { continue; } link_count += 1; subnet_ids.merge(net_id_a, net_id_b); out.push(WireLink{ color: self.color, a: self.connections[id_a].clone(), b: self.connections[id_b].clone() }); break; } } if subnet_ids.count_sets() == 1 { return true; } if link_count == 0 { return false; } } } fn correct(&self, module: &mut IRModule) { const MIN_FRACTION: f32 = 0.0; const MAX_FRACTION: f32 = 1.0; fn lerp_pos(start: (i32,i32), end: (i32,i32), f: f32) -> (i32,i32) { let x = start.0 + ((end.0 - start.0) as f32 * f).round() as i32; let y = start.1 + ((end.1 - start.1) as f32 * f).round() as i32; (x,y) } let mut rng = rand::thread_rng(); let mut positions = Vec::new(); for (id,_) in &self.connections { let pos = module.grid.get_pos_for(*id).unwrap(); positions.push(pos); } let mid_pos = positions[rng.gen_range(0..positions.len())]; for (id,_) in &self.connections { if !module.can_move(*id) { continue; } let base_pos = module.grid.get_pos_for(*id).unwrap(); if base_pos == mid_pos { continue; } let fraction = MIN_FRACTION + (MAX_FRACTION - MIN_FRACTION) * rng.gen::<f32>(); let new_pos = lerp_pos(base_pos, mid_pos, fraction); if new_pos == base_pos { continue; } if module.grid.is_cell_reserved(new_pos) { continue; } if let Some(old_id) = module.grid.get_id_at(new_pos) { if !module.can_move(old_id) { continue; } self.shift_chain(module, new_pos, base_pos); } assert_eq!(module.grid.get_id_at(new_pos),None); module.grid.set(new_pos, *id); } } fn shift_chain(&self, module: &mut IRModule, start_pos: (i32,i32), end_pos: (i32,i32)) { fn move_toward(a: (i32,i32), b: (i32,i32)) -> (i32,i32) { let (ax,ay) = a; let (bx,by) = b; if ax > bx { (ax-1,ay) } else if ax < bx { (ax+1,ay) } else { if ay > by { (ax,ay-1) } else if ay < by { (ax,ay+1) } else { panic!("can not move, a == b"); } } } let mut target_pos = end_pos; let mut source_pos = move_toward(target_pos,start_pos); loop { if let Some(id) = module.grid.get_id_at(source_pos) { if module.can_move(id) { module.grid.set(target_pos, id); target_pos = source_pos; if target_pos == start_pos { return; } } } source_pos = move_toward(source_pos,start_pos); } } } #[derive(Debug)] pub struct WireLink { pub color: WireColor, pub a: (u32,ConnectType), pub b: (u32,ConnectType) } #[derive(Default)] struct NetRegistry { map: HashMap<(u32,ConnectType,WireColor),usize>, list: Vec<WireNet> } impl NetRegistry { fn add_link(&mut self, src_arg: &IRArg, dest_id: u32, module: &IRModule) { if let IRArg::Link(src_id,color) = src_arg { if let IRNode::MultiDriver(args) = module.nodes.get(*src_id as usize) { for arg in args { let fixed_arg = if let IRArg::Link(src_id,_) = arg { IRArg::Link(*src_id, *color) } else { panic!("raw constants not permitted in multi-driver"); }; self.add_link(&fixed_arg, dest_id, module); } return; } let src_key = (*src_id,ConnectType::Out,*color); let dest_key = (dest_id,ConnectType::In,*color); let src_net_exists = self.map.contains_key(&src_key); let dest_net_exists = self.map.contains_key(&dest_key); if src_net_exists && dest_net_exists { let net_id_1 = *self.map.get(&src_key).unwrap(); let net_id_2 = *self.map.get(&dest_key).unwrap(); if net_id_1 == net_id_2 { return; } panic!("both exist {} {}",net_id_1,net_id_2); } else if src_net_exists { let net_id = *self.map.get(&src_key).unwrap(); let net = &mut self.list[net_id]; net.connections.push((dest_id,ConnectType::In)); self.map.insert(dest_key, net_id); } else if dest_net_exists { let net_id = *self.map.get(&dest_key).unwrap(); let net = &mut self.list[net_id]; net.connections.push((*src_id,ConnectType::Out)); self.map.insert(src_key, net_id); } else { let net = WireNet{ color: *color, connections: vec!((*src_id,ConnectType::Out),(dest_id,ConnectType::In)) }; let net_id = self.list.len(); self.list.push(net); self.map.insert(src_key, net_id); self.map.insert(dest_key, net_id); } } } fn to_links(&self, module: &IRModule, priority_check_list: &Vec<u32>) -> Result< Vec<WireLink>, Vec<u32> > { let mut failed = Vec::new(); let mut out = Vec::new(); for i in priority_check_list { let net = &self.list[*i as usize]; if !net.to_links(module, &mut out) { failed.push(*i); } } if failed.len() > 0 { return Err(failed); } out.clear(); for (i, net) in self.list.iter().enumerate() { if !net.to_links(module, &mut out) { failed.push(i as u32); } } if failed.len() > 0 { Err(failed) } else { Ok(out) } } } #[derive(Default, Debug)] pub struct Grid { cell_map: HashMap<(i32,i32),u32>, node_positions: Vec<Option<(i32,i32)>>, approx_w: i32 } impl Grid { fn init(&mut self, size: usize) { self.approx_w = ((size as f32 / 2.0).sqrt() * 2.0).ceil() as i32; self.node_positions.resize(size, None); } pub fn get_pos_for(&self, id: u32) -> Option<(i32,i32)> { if (id as usize) < self.node_positions.len() { return self.node_positions[id as usize]; } None } fn is_cell_filled(&self, key: (i32,i32)) -> bool { self.cell_map.get(&key).is_some() } fn is_cell_reserved(&self, key: (i32,i32)) -> bool { let x = key.0.rem_euclid(18); let y = key.1.rem_euclid(9); y == 0 && x <= 1 } fn get_id_at(&self, key: (i32,i32)) -> Option<u32> { self.cell_map.get(&key).map(|x| *x) } fn set(&mut self, key: (i32,i32), val: u32) { if let Some(current_id) = self.cell_map.get(&key) { self.node_positions[*current_id as usize] = None; } if let Some(current_pos) = self.node_positions[val as usize] { self.cell_map.remove(&current_pos); } self.cell_map.insert(key,val); self.node_positions[val as usize] = Some(key); } fn add_input(&mut self, id: u32, port_count: i32) { let mut x = -port_count/2; let y = 1; loop { if !self.is_cell_filled((x,y)) { self.set((x,y), id); return; } x += 1; } } fn add_output(&mut self, id: u32, port_count: i32) { let mut x = -port_count/2; let y = 1; loop { if !self.is_cell_filled((x,y)) { break; } x += 1; } x += 1; loop { if !self.is_cell_filled((x,y)) { self.set((x,y), id); return; } x += 1; } } fn add_node(&mut self, id: u32) { let base_x = -self.approx_w/2; let mut y = 2; loop { let wind_dir = (y & 1) == 1; for offset_x in 0..self.approx_w { let x = if wind_dir { base_x + offset_x } else { -base_x - offset_x }; if !self.is_cell_filled((x,y)) && !self.is_cell_reserved((x,y)) { self.set((x,y), id); return; } } y += 1; } } } impl IRModule { pub fn layout_nodes(&mut self) { let mut networks: NetRegistry = Default::default(); self.grid.init(self.nodes.len()); print!("Layout... "); for (i,node) in self.nodes.iter().enumerate() { match node { IRNode::Input(_) => { self.grid.add_input(i as u32, self.port_count); }, IRNode::Constant(_) => { self.grid.add_node(i as u32); }, IRNode::Output(_,arg) => { self.grid.add_output(i as u32, self.port_count); networks.add_link(arg, i as u32, self); }, IRNode::BinOp(lhs,_,rhs) => { self.grid.add_node(i as u32); networks.add_link(lhs, i as u32, self); networks.add_link(rhs, i as u32, self); }, IRNode::BinOpCmpGate(lhs,_,_,gated) => { self.grid.add_node(i as u32); networks.add_link(lhs, i as u32, self); networks.add_link(gated, i as u32, self); }, IRNode::BinOpSame(arg,_) => { self.grid.add_node(i as u32); networks.add_link(arg, i as u32, self); }, IRNode::MultiDriver(_) => (), IRNode::Removed => (), _ => panic!("Node {:?} is not supported at this stage.",node) } } let mut pass_n = 1; let mut priority_list = Vec::new(); loop { let res = networks.to_links(&self, &priority_list); if let Err(bad_nets) = res { for net_id in &bad_nets { networks.list[*net_id as usize].correct(self); } priority_list = bad_nets; } else { self.links = res.unwrap(); break; } pass_n += 1; } println!("Done in {} passes.",pass_n); } fn can_move(&self, id: u32) -> bool { match self.nodes.get(id as usize) { IRNode::Input(..) | IRNode::Output(..) => false, _ => true } } }
use std::collections::HashMap; use rand::Rng; use crate::{common::ConnectType, disjoint_set::DisjointSet}; use super::{IRArg, IRModule, IRNode, WireColor}; #[derive(Debug)] struct WireNet { color: WireColor, connections: Vec<(u32,ConnectType)> } const MAX_DIST: f32 = 9.0; fn square_dist(a: (f32,f32), b: (f32,f32)) -> f32 { let x = a.0 - b.0; let y = a.1 - b.1; x * x + y * y } fn check_dist(sq_dist: f32) -> bool { return sq_dist <= MAX_DIST * MAX_DIST; } impl WireNet { fn to_links(&self, module: &IRModule, out: &mut Vec<WireLink>) -> bool { let mut subnet_ids = DisjointSet::new(self.connections.len()); loop { let mut link_count = 0; for id_a in 0..self.connections.len() { let net_id_a = subnet_ids.get(id_a); for id_b in (id_a+1)..self.connections.len() { let net_id_b = subnet_ids.get(id_b); if net_id_a == net_id_b { continue; } let pos_a = module.get_true_pos(self.connections[id_a].0).unwrap(); let pos_b = module.get_true_pos(self.connections[id_b].0).unwrap(); if !check_dist(square_dist(pos_a,
self.shift_chain(module, new_pos, base_pos); } assert_eq!(module.grid.get_id_at(new_pos),None); module.grid.set(new_pos, *id); } } fn shift_chain(&self, module: &mut IRModule, start_pos: (i32,i32), end_pos: (i32,i32)) { fn move_toward(a: (i32,i32), b: (i32,i32)) -> (i32,i32) { let (ax,ay) = a; let (bx,by) = b; if ax > bx { (ax-1,ay) } else if ax < bx { (ax+1,ay) } else { if ay > by { (ax,ay-1) } else if ay < by { (ax,ay+1) } else { panic!("can not move, a == b"); } } } let mut target_pos = end_pos; let mut source_pos = move_toward(target_pos,start_pos); loop { if let Some(id) = module.grid.get_id_at(source_pos) { if module.can_move(id) { module.grid.set(target_pos, id); target_pos = source_pos; if target_pos == start_pos { return; } } } source_pos = move_toward(source_pos,start_pos); } } } #[derive(Debug)] pub struct WireLink { pub color: WireColor, pub a: (u32,ConnectType), pub b: (u32,ConnectType) } #[derive(Default)] struct NetRegistry { map: HashMap<(u32,ConnectType,WireColor),usize>, list: Vec<WireNet> } impl NetRegistry { fn add_link(&mut self, src_arg: &IRArg, dest_id: u32, module: &IRModule) { if let IRArg::Link(src_id,color) = src_arg { if let IRNode::MultiDriver(args) = module.nodes.get(*src_id as usize) { for arg in args { let fixed_arg = if let IRArg::Link(src_id,_) = arg { IRArg::Link(*src_id, *color) } else { panic!("raw constants not permitted in multi-driver"); }; self.add_link(&fixed_arg, dest_id, module); } return; } let src_key = (*src_id,ConnectType::Out,*color); let dest_key = (dest_id,ConnectType::In,*color); let src_net_exists = self.map.contains_key(&src_key); let dest_net_exists = self.map.contains_key(&dest_key); if src_net_exists && dest_net_exists { let net_id_1 = *self.map.get(&src_key).unwrap(); let net_id_2 = *self.map.get(&dest_key).unwrap(); if net_id_1 == net_id_2 { return; } panic!("both exist {} {}",net_id_1,net_id_2); } else if src_net_exists { let net_id = *self.map.get(&src_key).unwrap(); let net = &mut self.list[net_id]; net.connections.push((dest_id,ConnectType::In)); self.map.insert(dest_key, net_id); } else if dest_net_exists { let net_id = *self.map.get(&dest_key).unwrap(); let net = &mut self.list[net_id]; net.connections.push((*src_id,ConnectType::Out)); self.map.insert(src_key, net_id); } else { let net = WireNet{ color: *color, connections: vec!((*src_id,ConnectType::Out),(dest_id,ConnectType::In)) }; let net_id = self.list.len(); self.list.push(net); self.map.insert(src_key, net_id); self.map.insert(dest_key, net_id); } } } fn to_links(&self, module: &IRModule, priority_check_list: &Vec<u32>) -> Result< Vec<WireLink>, Vec<u32> > { let mut failed = Vec::new(); let mut out = Vec::new(); for i in priority_check_list { let net = &self.list[*i as usize]; if !net.to_links(module, &mut out) { failed.push(*i); } } if failed.len() > 0 { return Err(failed); } out.clear(); for (i, net) in self.list.iter().enumerate() { if !net.to_links(module, &mut out) { failed.push(i as u32); } } if failed.len() > 0 { Err(failed) } else { Ok(out) } } } #[derive(Default, Debug)] pub struct Grid { cell_map: HashMap<(i32,i32),u32>, node_positions: Vec<Option<(i32,i32)>>, approx_w: i32 } impl Grid { fn init(&mut self, size: usize) { self.approx_w = ((size as f32 / 2.0).sqrt() * 2.0).ceil() as i32; self.node_positions.resize(size, None); } pub fn get_pos_for(&self, id: u32) -> Option<(i32,i32)> { if (id as usize) < self.node_positions.len() { return self.node_positions[id as usize]; } None } fn is_cell_filled(&self, key: (i32,i32)) -> bool { self.cell_map.get(&key).is_some() } fn is_cell_reserved(&self, key: (i32,i32)) -> bool { let x = key.0.rem_euclid(18); let y = key.1.rem_euclid(9); y == 0 && x <= 1 } fn get_id_at(&self, key: (i32,i32)) -> Option<u32> { self.cell_map.get(&key).map(|x| *x) } fn set(&mut self, key: (i32,i32), val: u32) { if let Some(current_id) = self.cell_map.get(&key) { self.node_positions[*current_id as usize] = None; } if let Some(current_pos) = self.node_positions[val as usize] { self.cell_map.remove(&current_pos); } self.cell_map.insert(key,val); self.node_positions[val as usize] = Some(key); } fn add_input(&mut self, id: u32, port_count: i32) { let mut x = -port_count/2; let y = 1; loop { if !self.is_cell_filled((x,y)) { self.set((x,y), id); return; } x += 1; } } fn add_output(&mut self, id: u32, port_count: i32) { let mut x = -port_count/2; let y = 1; loop { if !self.is_cell_filled((x,y)) { break; } x += 1; } x += 1; loop { if !self.is_cell_filled((x,y)) { self.set((x,y), id); return; } x += 1; } } fn add_node(&mut self, id: u32) { let base_x = -self.approx_w/2; let mut y = 2; loop { let wind_dir = (y & 1) == 1; for offset_x in 0..self.approx_w { let x = if wind_dir { base_x + offset_x } else { -base_x - offset_x }; if !self.is_cell_filled((x,y)) && !self.is_cell_reserved((x,y)) { self.set((x,y), id); return; } } y += 1; } } } impl IRModule { pub fn layout_nodes(&mut self) { let mut networks: NetRegistry = Default::default(); self.grid.init(self.nodes.len()); print!("Layout... "); for (i,node) in self.nodes.iter().enumerate() { match node { IRNode::Input(_) => { self.grid.add_input(i as u32, self.port_count); }, IRNode::Constant(_) => { self.grid.add_node(i as u32); }, IRNode::Output(_,arg) => { self.grid.add_output(i as u32, self.port_count); networks.add_link(arg, i as u32, self); }, IRNode::BinOp(lhs,_,rhs) => { self.grid.add_node(i as u32); networks.add_link(lhs, i as u32, self); networks.add_link(rhs, i as u32, self); }, IRNode::BinOpCmpGate(lhs,_,_,gated) => { self.grid.add_node(i as u32); networks.add_link(lhs, i as u32, self); networks.add_link(gated, i as u32, self); }, IRNode::BinOpSame(arg,_) => { self.grid.add_node(i as u32); networks.add_link(arg, i as u32, self); }, IRNode::MultiDriver(_) => (), IRNode::Removed => (), _ => panic!("Node {:?} is not supported at this stage.",node) } } let mut pass_n = 1; let mut priority_list = Vec::new(); loop { let res = networks.to_links(&self, &priority_list); if let Err(bad_nets) = res { for net_id in &bad_nets { networks.list[*net_id as usize].correct(self); } priority_list = bad_nets; } else { self.links = res.unwrap(); break; } pass_n += 1; } println!("Done in {} passes.",pass_n); } fn can_move(&self, id: u32) -> bool { match self.nodes.get(id as usize) { IRNode::Input(..) | IRNode::Output(..) => false, _ => true } } }
pos_b)) { continue; } link_count += 1; subnet_ids.merge(net_id_a, net_id_b); out.push(WireLink{ color: self.color, a: self.connections[id_a].clone(), b: self.connections[id_b].clone() }); break; } } if subnet_ids.count_sets() == 1 { return true; } if link_count == 0 { return false; } } } fn correct(&self, module: &mut IRModule) { const MIN_FRACTION: f32 = 0.0; const MAX_FRACTION: f32 = 1.0; fn lerp_pos(start: (i32,i32), end: (i32,i32), f: f32) -> (i32,i32) { let x = start.0 + ((end.0 - start.0) as f32 * f).round() as i32; let y = start.1 + ((end.1 - start.1) as f32 * f).round() as i32; (x,y) } let mut rng = rand::thread_rng(); let mut positions = Vec::new(); for (id,_) in &self.connections { let pos = module.grid.get_pos_for(*id).unwrap(); positions.push(pos); } let mid_pos = positions[rng.gen_range(0..positions.len())]; for (id,_) in &self.connections { if !module.can_move(*id) { continue; } let base_pos = module.grid.get_pos_for(*id).unwrap(); if base_pos == mid_pos { continue; } let fraction = MIN_FRACTION + (MAX_FRACTION - MIN_FRACTION) * rng.gen::<f32>(); let new_pos = lerp_pos(base_pos, mid_pos, fraction); if new_pos == base_pos { continue; } if module.grid.is_cell_reserved(new_pos) { continue; } if let Some(old_id) = module.grid.get_id_at(new_pos) { if !module.can_move(old_id) { continue; }
random
[ { "content": "fn update_color_for_arg(arg: &mut IRArg, forbid_color: WireColor, out_color_counts: &mut Vec<ColorCounts>) -> WireColor {\n\n match arg {\n\n IRArg::Link(parent,color) => {\n\n assert_eq!(*color,WireColor::None);\n\n\n\n let counts = &mut out_color_counts[*parent as usize];\n\n\n\n let red_picked = match forbid_color {\n\n WireColor::Red => false,\n\n WireColor::Green => true,\n\n WireColor::None => counts.red >= counts.green\n\n };\n\n\n\n *color = if red_picked {\n\n counts.red += 1;\n\n WireColor::Red\n\n } else {\n\n counts.green += 1;\n\n WireColor::Green\n\n };\n", "file_path": "src/ir/select_colors.rs", "rank": 1, "score": 106235.13538996913 }, { "content": "fn add_arg(arg: &IRArg, saved: &mut Vec<bool>, stack: &mut Vec<usize>) {\n\n if let IRArg::Link(id,_) = arg {\n\n if !saved[*id as usize] {\n\n saved[*id as usize] = true;\n\n stack.push(*id as usize);\n\n }\n\n }\n\n}\n\n\n\nimpl IRModule {\n\n pub fn prune(&mut self) {\n\n let mut saved = Vec::new();\n\n saved.resize(self.nodes.len(), false);\n\n\n\n let mut stack: Vec<usize> = Vec::new();\n\n\n\n // save inputs, add outputs to stack\n\n for (i,node) in self.nodes.iter().enumerate() {\n\n match node {\n\n IRNode::Input(..) => {\n", "file_path": "src/ir/opt/tree_prune.rs", "rank": 3, "score": 83847.61865691331 }, { "content": "fn make_pos(arg: (f32,f32)) -> Position {\n\n Position{x: arg.0, y: arg.1}\n\n}\n\n\n", "file_path": "src/ir/to_blueprint.rs", "rank": 4, "score": 82760.34616268385 }, { "content": "#[derive(Default,Clone)]\n\nstruct ColorCounts {\n\n red: i32,\n\n green: i32\n\n}\n\n\n", "file_path": "src/ir/select_colors.rs", "rank": 5, "score": 76847.71001712384 }, { "content": "fn parse_expr<'a>(parser: &mut Parser<'a>) -> Expr<'a> {\n\n\n\n let mut expr_stack: Vec<Expr> = Vec::new();\n\n let mut op_stack: Vec<BinOp> = Vec::new();\n\n let mut ternary = false;\n\n\n\n expr_stack.push(parse_leaf(parser));\n\n\n\n loop {\n\n // try parsing an operator, or end the expression\n\n let next_tok = parser.peek();\n\n\n\n let new_op = match next_tok {\n\n LexToken::OpAdd => BinOp::Add,\n\n LexToken::OpSub => BinOp::Sub,\n\n LexToken::OpMul => BinOp::Mul,\n\n LexToken::OpDiv => BinOp::Div,\n\n LexToken::OpMod => BinOp::Mod,\n\n LexToken::OpPower => BinOp::Power,\n\n\n", "file_path": "src/parser.rs", "rank": 7, "score": 63290.56872945506 }, { "content": "fn parse_leaf<'a>(parser: &mut Parser<'a>) -> Expr<'a> {\n\n let tok = parser.next();\n\n\n\n match tok {\n\n LexToken::Ident(id) => {\n\n if parser.peek() == LexToken::OpParenOpen {\n\n let mut sub_args = Vec::new();\n\n parser.take(LexToken::OpParenOpen);\n\n\n\n if parser.peek() == LexToken::OpParenClose {\n\n parser.take(LexToken::OpParenClose);\n\n } else {\n\n loop {\n\n sub_args.push(parse_expr(parser));\n\n if parser.take_comma_or_close_paren() {\n\n break;\n\n }\n\n }\n\n }\n\n Expr::SubModule(id.to_owned(),sub_args)\n", "file_path": "src/parser.rs", "rank": 8, "score": 63290.56872945506 }, { "content": "fn parse_stmt<'a>(parser: &mut Parser<'a>) -> Statement<'a> {\n\n let tok = parser.next();\n\n match tok {\n\n LexToken::KeyOutput => {\n\n let mut out_args = Vec::new();\n\n parser.take(LexToken::OpParenOpen);\n\n // Don't worry about the empty case, why output nothing?\n\n loop {\n\n out_args.push(parse_expr(parser));\n\n if parser.take_comma_or_close_paren() {\n\n break;\n\n }\n\n }\n\n Statement::Output(out_args)\n\n },\n\n LexToken::KeyLet => {\n\n if parser.peek() == LexToken::OpParenOpen {\n\n let mut idents = Vec::new();\n\n parser.take(LexToken::OpParenOpen);\n\n // Don't worry about the empty case, why have zero vars?\n", "file_path": "src/parser.rs", "rank": 9, "score": 63290.56872945506 }, { "content": "fn get_circuit_id(ent_type: &str, connect_type: ConnectType) -> u32 {\n\n match ent_type {\n\n \"constant-combinator\" | \"medium-electric-pole\" => 1,\n\n \"arithmetic-combinator\" | \"decider-combinator\" => match connect_type {\n\n ConnectType::In => 1,\n\n ConnectType::Out => 2\n\n },\n\n _ => panic!(\"can't get circuit id for: {}\",ent_type)\n\n }\n\n}\n\n\n", "file_path": "src/ir/to_blueprint.rs", "rank": 10, "score": 53634.63806638164 }, { "content": "/// We narrow from 64 bit constants so we can use both signed and unsigned 32 bit constants.\n\nfn narrow_constant(x: i64) -> i32 {\n\n if let Ok(n) = x.try_into() {\n\n n\n\n } else if let Ok(n) = x.try_into() {\n\n let n: u32 = n;\n\n n as i32\n\n } else {\n\n panic!(\"constant too wide: {}\",x)\n\n }\n\n}\n\n\n", "file_path": "src/ir/mod.rs", "rank": 11, "score": 51269.85853019594 }, { "content": "#[derive(RustEmbed)]\n\n#[folder = \"assets\"]\n\nstruct Asset;\n\n\n", "file_path": "src/assets.rs", "rank": 12, "score": 48603.01212529039 }, { "content": "fn main() {\n\n\n\n let options = CmdOptions::parse();\n\n\n\n // TODO allow users to override or add additional symbols\n\n let symbols_json = assets::get_asset_string(\"symbols.json\").expect(\"failed to load symbol defintions\");\n\n symbols::load_symbols(&symbols_json);\n\n\n\n \n\n let settings = Rc::new(CompileSettings{\n\n fold_constants: !(options.no_fold || options.no_opt),\n\n prune: !(options.no_prune || options.no_opt),\n\n main_mod_name: options.mod_name\n\n });\n\n\n\n let mut modules = HashMap::new();\n\n let mut constants = HashMap::new();\n\n\n\n // Load prelude\n\n {\n", "file_path": "src/main.rs", "rank": 13, "score": 47053.064931099885 }, { "content": "#[derive(Debug,Serialize,Deserialize)]\n\nstruct BlueprintWrapper {\n\n blueprint: Blueprint\n\n}\n\n\n\n#[derive(Debug,Serialize,Deserialize)]\n\npub struct Blueprint {\n\n pub entities: Vec<Entity>\n\n}\n\n\n\n#[derive(Debug,Serialize,Deserialize)]\n\npub struct Entity {\n\n pub entity_number: u32,\n\n pub name: String,\n\n pub position: Position,\n\n pub direction: u32, // usually 4 for us\n\n pub control_behavior: ControlBehavior,\n\n pub connections: Option<HashMap<u32,Connections>> // key = circuit id\n\n}\n\n\n\n#[derive(Debug,Serialize,Deserialize)]\n", "file_path": "src/blueprint.rs", "rank": 14, "score": 47039.79742152554 }, { "content": "#[derive(Debug,Serialize,Deserialize)]\n\nstruct ParseSymbol{\n\n id: String,\n\n signal: Signal\n\n}\n\n\n\nstatic SYMBOL_INFO: OnceCell<SymbolInfo> = OnceCell::new();\n\n\n", "file_path": "src/symbols.rs", "rank": 15, "score": 47039.79742152554 }, { "content": "#[derive(CmdParser)]\n\n#[clap(version = \"1.0.0\", author = \"cogg <[email protected]>\")]\n\nstruct CmdOptions {\n\n /// The source file to compile.\n\n filename: String,\n\n #[clap(default_value = \"main\")]\n\n /// The name of the top-level module to generate a blueprint for.\n\n mod_name: String,\n\n\n\n #[clap(long)]\n\n /// Disable all optimizations.\n\n no_opt: bool,\n\n #[clap(long)]\n\n /// Disable constant folding.\n\n no_fold: bool,\n\n #[clap(long)]\n\n /// Disable pruning unused combinators.\n\n no_prune: bool,\n\n\n\n #[clap(long)]\n\n rom_offset: Option<u32>\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CompileSettings {\n\n fold_constants: bool,\n\n prune: bool,\n\n main_mod_name: String\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 47039.79742152554 }, { "content": "struct SymbolInfo {\n\n signals: Vec<Signal>,\n\n ident_map: HashMap<String, u32>\n\n}\n\n\n", "file_path": "src/symbols.rs", "rank": 17, "score": 47039.79742152554 }, { "content": "struct Parser<'a> {\n\n lexer: Peekable<Lexer<'a>>\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n\n fn new(lexer: Lexer<'a>) -> Self {\n\n Self{lexer: lexer.peekable()}\n\n }\n\n\n\n fn take(&mut self, tok: LexToken) {\n\n let present = self.next();\n\n if present != tok {\n\n panic!(\"Expected {:?}, found {:?}.\",tok,present);\n\n }\n\n }\n\n\n\n fn take_ident(&mut self) -> &'a str {\n\n let present = self.next();\n\n if let LexToken::Ident(ident_str) = present {\n\n ident_str\n", "file_path": "src/parser.rs", "rank": 18, "score": 46003.665426927306 }, { "content": "#[derive(Default,Debug)]\n\nstruct NodeList {\n\n nodes: Vec<IRNode>,\n\n debug_names: Vec<String>\n\n}\n\n\n\nimpl NodeList {\n\n pub fn iter(&self) -> core::slice::Iter<IRNode> {\n\n self.nodes.iter()\n\n }\n\n\n\n pub fn iter_mut(&mut self) -> core::slice::IterMut<IRNode> {\n\n self.nodes.iter_mut()\n\n }\n\n\n\n pub fn iter_debug(&self) -> std::iter::Zip<core::slice::Iter<IRNode>,core::slice::Iter<String>> {\n\n self.nodes.iter().zip(self.debug_names.iter())\n\n }\n\n\n\n pub fn get(&self, index: usize) -> &IRNode {\n\n return &self.nodes[index];\n", "file_path": "src/ir/mod.rs", "rank": 19, "score": 45643.00861778129 }, { "content": "struct BlueprintBuilder {\n\n entities: Vec<Entity>\n\n}\n\n\n", "file_path": "src/ir/to_blueprint.rs", "rank": 21, "score": 45643.00861778129 }, { "content": "// Consumes a list of AST modules and returns the IR for the final module.\n\n// Runs checks on the modules. May panic if an error is encountered.\n\npub fn build_ir(\n\n parse_mods: Vec<ParseItem>,\n\n settings: Rc<CompileSettings>,\n\n modules: &mut HashMap<String,IRModule>,\n\n constants: &mut HashMap<String,i64>\n\n) {\n\n for p_item in parse_mods {\n\n match p_item {\n\n ParseItem::Constant(name,num) => {\n\n if constants.insert(name.to_owned(), num).is_some() {\n\n panic!(\"Duplicate constant definition for '{}'.\",name);\n\n }\n\n },\n\n ParseItem::Module(p_mod) => {\n\n let mut ir = IRModule::new(p_mod.name.to_owned(), settings.clone());\n\n ir.arg_types = p_mod.arg_types;\n\n ir.ret_types = p_mod.ret_types;\n\n \n\n if ir.arg_types.len() != p_mod.arg_names.len() {\n\n panic!(\"The number of args does not match the number of types. This should never happen.\");\n", "file_path": "src/ir/mod.rs", "rank": 22, "score": 40446.56875886969 }, { "content": "pub fn load_symbols(json: &str) {\n\n let symbols: Vec<ParseSymbol> = serde_json::from_str(&json).expect(\"bad json\");\n\n let mut info = SymbolInfo{signals:Vec::new(),ident_map:HashMap::new()};\n\n for symbol in symbols {\n\n let index = info.signals.len() as u32;\n\n info.signals.push(symbol.signal);\n\n info.ident_map.insert(symbol.id.to_uppercase(),index);\n\n }\n\n SYMBOL_INFO.set(info).ok();\n\n}\n\n\n", "file_path": "src/symbols.rs", "rank": 23, "score": 35897.55252682153 }, { "content": "#[allow(unused)]\n\npub fn read_blueprint(blueprint: &str) -> Blueprint {\n\n assert_eq!(blueprint.chars().next(),Some('0'),\"bad version\");\n\n let (_,b64) = blueprint.split_at(1);\n\n let data = base64::decode(b64 ).expect(\"bad base64\");\n\n\n\n let mut decomp = ZlibDecoder::new(&*data);\n\n let mut json = String::new();\n\n decomp.read_to_string(&mut json).unwrap();\n\n println!(\"=> {}\",json);\n\n \n\n let wrapper: BlueprintWrapper = serde_json::from_str(&json).expect(\"bad json\");\n\n wrapper.blueprint\n\n}\n\n\n", "file_path": "src/blueprint.rs", "rank": 24, "score": 33634.33077917391 }, { "content": "pub fn write_blueprint(blueprint: Blueprint) -> String {\n\n let wrapper = BlueprintWrapper{blueprint};\n\n let json = serde_json::to_string(&wrapper).expect(\"serialize failed\");\n\n //println!(\"=> {}\",json);\n\n\n\n let mut compress = ZlibEncoder::new(Vec::new(), Compression::best());\n\n compress.write_all(json.as_bytes()).unwrap();\n\n let data = compress.finish().unwrap();\n\n let b64 = base64::encode(data);\n\n format!(\"0{}\",b64)\n\n}\n\n\n", "file_path": "src/blueprint.rs", "rank": 25, "score": 33634.33077917391 }, { "content": "pub fn symbol_index_from_identifier(ident: &str) -> u32 {\n\n let info = SYMBOL_INFO.get().expect(\"symbol info not loaded\");\n\n if let Some(n) = info.ident_map.get(&ident.to_uppercase()) {\n\n *n\n\n } else {\n\n panic!(\"Signal name '{}' does not exist.\",ident);\n\n }\n\n}\n", "file_path": "src/symbols.rs", "rank": 26, "score": 32704.019031700504 }, { "content": "// TODO bidirectional mapping\n\npub fn signal_from_symbol_index(index: u32) -> Signal {\n\n let info = SYMBOL_INFO.get().expect(\"symbol info not loaded\");\n\n info.signals[index as usize].clone()\n\n}\n\n\n", "file_path": "src/symbols.rs", "rank": 27, "score": 32704.019031700504 }, { "content": "pub fn get_asset_string(filename: &str) -> Option<String> {\n\n // Since rust_embed uses cows and directly reads the FS in debug mode,\n\n // we unfortunately can't just hand out references to the static data.\n\n if let Some(file) = Asset::get(filename) {\n\n std::str::from_utf8(file.data.as_ref()).ok().map(|x| x.to_owned())\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/assets.rs", "rank": 28, "score": 30815.733278001717 }, { "content": "pub fn parse<'a>(source: &'a str) -> Vec<ParseItem<'a>> {\n\n\n\n let lexer = Lexer::new(source);\n\n let mut parser = Parser::new(lexer);\n\n\n\n // Module declaration\n\n let mut results = Vec::new();\n\n while !parser.is_eof() {\n\n if parser.peek() == LexToken::KeyConst {\n\n parser.take(LexToken::KeyConst);\n\n\n\n let name = parser.take_ident();\n\n parser.take(LexToken::OpAssign);\n\n \n\n let expr = parse_expr(&mut parser);\n\n\n\n parser.take(LexToken::OpSemicolon);\n\n\n\n if let Expr::Constant(num) = expr {\n\n results.push(ParseItem::Constant(name,num));\n", "file_path": "src/parser.rs", "rank": 29, "score": 29388.070399778328 }, { "content": "pub fn make_rom(data: &[u8], offset: u32) -> String {\n\n \n\n let mut result = String::from(\"mod main(addr: $A) -> $X { output(match(addr) {\\n\");\n\n\n\n for (index,x) in data.iter().enumerate() {\n\n result.push_str(&format!(\"\\t {} => {},\",index+offset as usize,x));\n\n if index % ENTRIES_PER_LINE == ENTRIES_PER_LINE-1 {\n\n result.push('\\n');\n\n }\n\n }\n\n\n\n result.push_str(\"})}\\n\");\n\n\n\n println!(\"{}\",result);\n\n result\n\n}\n", "file_path": "src/rom_generator.rs", "rank": 30, "score": 29155.593764384997 }, { "content": " \n\n *color\n\n },\n\n IRArg::Constant(_) => WireColor::None\n\n }\n\n}\n\n\n\nimpl IRModule {\n\n /// Is the argument connected to a module input?\n\n fn is_input(&self, arg: &IRArg) -> bool {\n\n match arg {\n\n IRArg::Link(parent,_) => {\n\n let node = self.nodes.get(*parent as usize);\n\n if let IRNode::Input(_) = node {\n\n true\n\n } else if let IRNode::MultiDriver(args) = node {\n\n args.iter().any(|x| self.is_input(x))\n\n } else {\n\n false\n\n }\n", "file_path": "src/ir/select_colors.rs", "rank": 31, "score": 26339.18117268322 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::{IRArg, IRModule, IRNode, WireColor};\n\n\n\n\n\n#[derive(Default,Clone)]\n", "file_path": "src/ir/select_colors.rs", "rank": 32, "score": 26337.369526828355 }, { "content": " update_color_for_arg(arg, forbid_color, &mut out_color_counts);\n\n },\n\n IRNode::Output(_,arg) => {\n\n // Force red outputs\n\n update_color_for_arg(arg , WireColor::Green, &mut out_color_counts);\n\n },\n\n IRNode::MultiDriver(_) => (), // use colors determined by downstream nodes\n\n _ => panic!(\"Node {:?} is not supported at this stage.\",node)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/ir/select_colors.rs", "rank": 33, "score": 26337.289354555025 }, { "content": " for i in 0..self.nodes.len() {\n\n let node = self.nodes.get_mut(i);\n\n match node {\n\n IRNode::Input(_) => (),\n\n IRNode::Constant(_) => (),\n\n IRNode::Removed => (),\n\n IRNode::BinOp(lhs,_,rhs) |\n\n IRNode::BinOpCmpGate(lhs,_,_,rhs) => {\n\n let input_side = inputs.get(&i);\n\n let forbid_color = match input_side {\n\n Some(0) => WireColor::Green,\n\n Some(1) => WireColor::Red,\n\n _ => WireColor::None\n\n };\n\n let forbid_color = update_color_for_arg(lhs, forbid_color, &mut out_color_counts);\n\n update_color_for_arg(rhs , forbid_color, &mut out_color_counts);\n\n },\n\n IRNode::BinOpSame(arg,_) => {\n\n let input_side = inputs.get(&i);\n\n let forbid_color = if input_side.is_some() { WireColor::Green } else { WireColor::None };\n", "file_path": "src/ir/select_colors.rs", "rank": 34, "score": 26334.91397201434 }, { "content": " },\n\n IRArg::Constant(_) => false\n\n }\n\n }\n\n\n\n pub fn select_colors(&mut self) {\n\n let mut out_color_counts: Vec<ColorCounts> = Vec::new();\n\n out_color_counts.resize(self.nodes.len(), Default::default());\n\n\n\n let mut inputs = HashMap::new();\n\n\n\n for i in 0..self.nodes.len() {\n\n let node = self.nodes.get(i);\n\n match node {\n\n IRNode::BinOp(lhs,_,rhs) |\n\n IRNode::BinOpCmpGate(lhs,_,_,rhs) => {\n\n let li = self.is_input(lhs);\n\n let ri = self.is_input(rhs);\n\n\n\n if li && ri {\n", "file_path": "src/ir/select_colors.rs", "rank": 35, "score": 26334.5822036518 }, { "content": " panic!(\"CANNOT COLOR BOTH ARGS RED: {:?} {:?}\",lhs,rhs);\n\n }\n\n\n\n if li {\n\n inputs.insert(i, 0);\n\n }\n\n\n\n if ri {\n\n inputs.insert(i, 1);\n\n }\n\n },\n\n IRNode::BinOpSame(arg,_) => {\n\n if self.is_input(arg) {\n\n inputs.insert(i, 0);\n\n }\n\n },\n\n _ => ()\n\n }\n\n }\n\n\n", "file_path": "src/ir/select_colors.rs", "rank": 36, "score": 26330.901856094886 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::{blueprint::{ArithmeticConditions, Blueprint, Connection, ControlBehavior, DeciderConditions, Entity, Filter, Position, Signal}};\n\nuse crate::symbols::signal_from_symbol_index;\n\n\n\nuse super::{IRArg, IRModule, IRNode, WireColor};\n\nuse crate::common::ConnectType;\n\n\n", "file_path": "src/ir/to_blueprint.rs", "rank": 38, "score": 10.969193545884863 }, { "content": " let b_circuit_id = get_circuit_id(&self.entities[b.0-1].name, b.1);\n\n\n\n // TODO clean this up a bit -- add a getter for Connections that returns the list for a color\n\n if color == WireColor::Red {\n\n self.entities[a.0-1].connections.as_mut().unwrap()\n\n .entry(a_circuit_id).or_default()\n\n .red.get_or_insert_with(|| Vec::new())\n\n .push(Connection{entity_id: b.0 as u32, circuit_id: Some(b_circuit_id)});\n\n\n\n self.entities[b.0-1].connections.as_mut().unwrap()\n\n .entry(b_circuit_id).or_default()\n\n .red.get_or_insert_with(|| Vec::new())\n\n .push(Connection{entity_id: a.0 as u32, circuit_id: Some(a_circuit_id)});\n\n } else if color == WireColor::Green {\n\n self.entities[a.0-1].connections.as_mut().unwrap()\n\n .entry(a_circuit_id).or_default()\n\n .green.get_or_insert_with(|| Vec::new())\n\n .push(Connection{entity_id: b.0 as u32, circuit_id: Some(b_circuit_id)});\n\n\n\n self.entities[b.0-1].connections.as_mut().unwrap()\n", "file_path": "src/ir/to_blueprint.rs", "rank": 39, "score": 9.687073238296605 }, { "content": " .entry(b_circuit_id).or_default()\n\n .green.get_or_insert_with(|| Vec::new())\n\n .push(Connection{entity_id: a.0 as u32, circuit_id: Some(a_circuit_id)});\n\n } else {\n\n panic!(\"no wire color in blueprint gen\");\n\n }\n\n }\n\n\n\n fn get_bounds(&self) -> [f32;4] {\n\n let mut max_x = f32::MIN;\n\n let mut min_x = f32::MAX;\n\n let mut max_y = max_x;\n\n let mut min_y = min_x;\n\n for ent in &self.entities {\n\n max_x = max_x.max(ent.position.x);\n\n min_x = min_x.min(ent.position.x);\n\n max_y = max_y.max(ent.position.y);\n\n min_y = min_y.min(ent.position.y);\n\n }\n\n [min_x,min_y,max_x,max_y]\n", "file_path": "src/ir/to_blueprint.rs", "rank": 40, "score": 9.3598738652491 }, { "content": "// This is not really an optimization pass, but it happens during optimization.\n\n// It expands Gates and makes sure some requirements for conversion to combinators are satasfied.\n\n\n\nuse crate::{common::BinOp, ir::WireColor};\n\n\n\nuse super::super::{IRModule, IRNode, IRArg};\n\n\n\nimpl IRModule {\n\n pub fn fix_nodes(&mut self) {\n\n for i in 0..self.nodes.len() {\n\n let node = self.nodes.get(i).clone();\n\n match node {\n\n IRNode::BinOp(lhs,op,rhs) => {\n\n if lhs.is_link() && lhs == rhs {\n\n // 1. fix same-arg binops\n\n self.nodes.update(i, IRNode::BinOpSame(lhs.clone(),op.clone()));\n\n } else if op.is_compare() {\n\n // 2. fix comparisons (lhs cannot be constant)\n\n if lhs.is_link() {\n\n // fine as-is\n", "file_path": "src/ir/opt/fix_nodes.rs", "rank": 41, "score": 9.124010221492854 }, { "content": " fn add_pole(&mut self, pos: (f32,f32), substation: bool) -> usize {\n\n let id = self.entities.len()+1;\n\n self.entities.push(Entity{\n\n entity_number: id as u32,\n\n name: if substation { \"substation\" } else { \"medium-electric-pole\"}.to_owned(),\n\n position: make_pos(pos),\n\n direction: 4,\n\n\n\n connections: Some(HashMap::new()),\n\n control_behavior: ControlBehavior{\n\n arithmetic_conditions: None,\n\n decider_conditions: None,\n\n filters: None\n\n }\n\n });\n\n id\n\n }\n\n\n\n fn add_arithmetic(&mut self, pos: (f32,f32), operation: String, lhs: SymbolOrConstant, rhs: SymbolOrConstant, out_symbol: u32) -> usize {\n\n let id = self.entities.len()+1;\n", "file_path": "src/ir/to_blueprint.rs", "rank": 42, "score": 8.875685351180646 }, { "content": " OpQuestion,\n\n\n\n OpNotBitwise,\n\n OpNotLogical,\n\n\n\n OpParenOpen,\n\n OpParenClose,\n\n OpBraceOpen,\n\n OpBraceClose,\n\n}\n\n\n\nimpl<'a> LexToken<'a> {\n\n fn ident_or_keyword(ident: &'a str) -> Self {\n\n match ident {\n\n \"mod\" => Self::KeyMod,\n\n \"output\" => Self::KeyOutput,\n\n \"let\" => Self::KeyLet,\n\n \"match\" => Self::KeyMatch,\n\n \"const\" => Self::KeyConst,\n\n \"use\" => panic!(\"'{}' is a keyword reserved for future use.\",ident),\n", "file_path": "src/lexer.rs", "rank": 45, "score": 8.273168861459997 }, { "content": "\n\n connections: Some(HashMap::new()),\n\n control_behavior: ControlBehavior{\n\n decider_conditions: Some(DeciderConditions{\n\n comparator,\n\n constant,\n\n first_signal,\n\n second_signal,\n\n output_signal,\n\n copy_count_from_input\n\n }),\n\n arithmetic_conditions: None,\n\n filters: None\n\n }\n\n });\n\n id\n\n }\n\n\n\n fn add_link(&mut self, color: WireColor, a: (usize,ConnectType), b: (usize,ConnectType)) {\n\n let a_circuit_id = get_circuit_id(&self.entities[a.0-1].name, a.1);\n", "file_path": "src/ir/to_blueprint.rs", "rank": 47, "score": 7.877274138402452 }, { "content": " let cond = self.fix_const(&cond);\n\n let gated = self.fix_const(&gated);\n\n\n\n // If gated == 0, this gate has no effect.\n\n if let IRArg::Constant(const_gated) = gated {\n\n if const_gated == 0 {\n\n self.nodes.update(index, IRNode::Constant(0));\n\n changes += 1;\n\n continue;\n\n }\n\n }\n\n\n\n // If cond is constant, evaluate to gated value or 0.\n\n if let IRArg::Constant(const_cond) = cond {\n\n let cond_bool = const_cond != 0;\n\n self.nodes.update(index, if cond_bool == check {\n\n self.clone_arg(&gated)\n\n } else {\n\n IRNode::Constant(0)\n\n });\n", "file_path": "src/ir/opt/constant_folding.rs", "rank": 48, "score": 7.58184520063012 }, { "content": "\n\nuse super::super::{IRModule, IRNode, IRArg};\n\n\n\nimpl IRModule {\n\n // Attempts to covert constant nodes to constant args.\n\n fn fix_const(&self, arg: &IRArg) -> IRArg {\n\n if let IRArg::Link(id,_) = arg {\n\n let node = self.nodes.get(*id as usize);\n\n if let IRNode::Constant(n) = node {\n\n return IRArg::Constant(*n);\n\n }\n\n }\n\n arg.clone()\n\n }\n\n\n\n fn clone_arg(&self, arg: &IRArg) -> IRNode {\n\n if let IRArg::Link(id,_) = arg {\n\n self.nodes.get(*id as usize).clone()\n\n } else if let IRArg::Constant(n) = arg {\n\n IRNode::Constant(*n)\n", "file_path": "src/ir/opt/constant_folding.rs", "rank": 50, "score": 7.287424329790811 }, { "content": " } else {\n\n panic!();\n\n }\n\n }\n\n\n\n pub fn fold_constants(&mut self) {\n\n // Doing this iteratively might be kinda dumb, but because of the\n\n // order of our nodes, we should usually finish in only a couple passes.\n\n loop {\n\n let mut changes = 0;\n\n for index in 0..self.nodes.len() {\n\n let node = self.nodes.get(index).clone();\n\n match node {\n\n IRNode::Input(..) | IRNode::Constant(..) => (),\n\n IRNode::Output(id,arg) => {\n\n self.nodes.update(index, IRNode::Output(id,self.fix_const(&arg)));\n\n },\n\n IRNode::BinOp(lhs,op,rhs) => {\n\n let lhs = self.fix_const(&lhs);\n\n let rhs = self.fix_const(&rhs);\n", "file_path": "src/ir/opt/constant_folding.rs", "rank": 51, "score": 7.197723802442408 }, { "content": " self.debug_names.push(name);\n\n }\n\n\n\n pub fn set(&mut self, index: usize, node: IRNode, name: String) {\n\n self.nodes[index] = node;\n\n self.debug_names[index] = name;\n\n }\n\n\n\n pub fn update(&mut self, index: usize, node: IRNode) {\n\n self.nodes[index] = node;\n\n }\n\n}\n\n\n\nimpl IRModule {\n\n fn new(name: String, settings: Rc<CompileSettings>) -> Self {\n\n IRModule{\n\n name,\n\n settings,\n\n port_count: 0,\n\n bindings: HashMap::new(),\n", "file_path": "src/ir/mod.rs", "rank": 53, "score": 7.056757850658786 }, { "content": "\n\nimpl<'a> Lexer<'a> {\n\n pub fn new(string: &str) -> Lexer {\n\n Lexer{\n\n chars: string.chars()\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Lexer<'a> {\n\n type Item = LexToken<'a>;\n\n \n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n let parse_str = self.chars.as_str();\n\n \n\n return if let Some(c) = self.chars.next() {\n\n if c.is_ascii_alphabetic() || c == '_' {\n\n let token_end = parse_str.find(|c: char| !c.is_ascii_alphanumeric() && c != '_')\n\n .unwrap_or(parse_str.len());\n", "file_path": "src/lexer.rs", "rank": 55, "score": 6.920016383866926 }, { "content": " }\n\n\n\n for link in &self.links {\n\n let (a_id,a_ty) = link.a.clone();\n\n let (b_id,b_ty) = link.b.clone();\n\n\n\n builder.add_link(link.color, \n\n (ent_ids[a_id as usize],a_ty),\n\n (ent_ids[b_id as usize],b_ty)\n\n );\n\n }\n\n\n\n let [x_min,_y_min,x_max,y_max] = builder.get_bounds();\n\n\n\n let mut x_pole_start = 0;\n\n let mut x_pole_end = 0;\n\n let y_pole_start = 0;\n\n let mut y_pole_end = 0;\n\n while 0.5 + x_pole_start as f32 * 18.0 - 9.0 > x_min {\n\n x_pole_start -= 1;\n", "file_path": "src/ir/to_blueprint.rs", "rank": 56, "score": 6.907069812179641 }, { "content": "use std::{collections::HashMap, rc::Rc};\n\nuse std::convert::TryInto;\n\n\n\nuse crate::{CompileSettings, common::{BinOp, UnaryOp}};\n\nuse crate::parser::{Expr, ParseItem, Statement};\n\n\n\nuse self::layout::{Grid, WireLink};\n\n\n\nmod select_colors;\n\nmod select_symbols;\n\nmod layout;\n\nmod to_blueprint;\n\nmod opt;\n\n\n\n#[derive(Debug)]\n\npub struct IRModule {\n\n name: String,\n\n settings: Rc<CompileSettings>,\n\n port_count: i32,\n\n bindings: HashMap<String,IRArg>,\n", "file_path": "src/ir/mod.rs", "rank": 57, "score": 6.831064493205144 }, { "content": " changes += 1;\n\n continue;\n\n }\n\n\n\n self.nodes.update(index,IRNode::Gate(cond,check,gated));\n\n },\n\n IRNode::MultiDriver(args) => {\n\n let mut const_sum: i32 = 0;\n\n let mut filtered_args: Vec<IRArg> = args.clone();\n\n filtered_args.retain(|arg| {\n\n let arg = self.fix_const(arg);\n\n\n\n if let IRArg::Constant(n) = arg {\n\n const_sum = const_sum.wrapping_add(n);\n\n false\n\n } else {\n\n true\n\n }\n\n });\n\n\n", "file_path": "src/ir/opt/constant_folding.rs", "rank": 58, "score": 6.795342670315532 }, { "content": " }\n\n\n\n fn finish(self) -> Blueprint {\n\n Blueprint{\n\n entities: self.entities\n\n }\n\n }\n\n}\n\n\n\nimpl IRModule {\n\n\n\n fn get_arg_symbol_or_const(&self, arg: &IRArg) -> SymbolOrConstant {\n\n match arg {\n\n IRArg::Link(id,_) => {\n\n SymbolOrConstant::Symbol(self.out_symbols[*id as usize])\n\n },\n\n IRArg::Constant(n) => SymbolOrConstant::Constant(*n)\n\n }\n\n }\n\n\n", "file_path": "src/ir/to_blueprint.rs", "rank": 59, "score": 6.724582448700957 }, { "content": " } else {\n\n if let Expr::SubModule(name,args) = expr {\n\n self.add_submodule(module_table, constant_table, name,args,Some(&out_slots));\n\n } else {\n\n panic!(\"multi-assignment can only be used with sub-modules\");\n\n }\n\n }\n\n },\n\n _ => panic!(\"todo handle stmt {:?}\",stmt)\n\n }\n\n }\n\n\n\n fn add_node(&mut self, node: IRNode, name: String, slot: Option<u32>) -> IRArg {\n\n if let Some(slot) = slot {\n\n assert_eq!(*self.nodes.get(slot as usize),IRNode::PlaceHolder);\n\n self.nodes.set(slot as usize, node, name);\n\n IRArg::Link(slot, WireColor::None)\n\n } else {\n\n self.nodes.push(node,name);\n\n IRArg::Link(self.nodes.len() as u32 - 1, WireColor::None)\n", "file_path": "src/ir/mod.rs", "rank": 60, "score": 6.67762311111353 }, { "content": "\n\nuse crate::disjoint_set::DisjointSet;\n\n\n\nuse super::{IRModule, IRNode, IRArg};\n\n\n\n#[derive(Debug)]\n", "file_path": "src/ir/select_symbols.rs", "rank": 62, "score": 6.503717069650665 }, { "content": "use crate::lexer::{Lexer, LexToken};\n\nuse crate::common::{BinOp,UnaryOp};\n\nuse crate::symbols;\n\n\n\nuse std::iter::Peekable;\n\n\n\n#[derive(Debug)]\n\npub struct Module<'a> {\n\n pub name: &'a str,\n\n pub arg_names: Vec<&'a str>,\n\n pub stmts: Vec<Statement<'a>>,\n\n pub arg_types: Vec<Option<u32>>,\n\n pub ret_types: Option<Vec<Option<u32>>>\n\n}\n\n\n\npub enum ParseItem<'a> {\n\n Module(Module<'a>),\n\n Constant(&'a str,i64)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 63, "score": 6.48503821009645 }, { "content": "\n\nuse super::super::{IRModule, IRNode, IRArg};\n\n\n", "file_path": "src/ir/opt/tree_prune.rs", "rank": 64, "score": 6.432349107678195 }, { "content": "use std::str::FromStr;\n\n\n\n#[derive(Debug,PartialEq,Clone,Copy)]\n\npub enum LexToken<'a> {\n\n Ident(&'a str),\n\n Symbol(&'a str),\n\n Number(i64),\n\n\n\n KeyMod,\n\n KeyOutput,\n\n KeyLet,\n\n KeyConst,\n\n KeyMatch,\n\n\n\n OpAdd,\n\n OpSub,\n\n OpMul,\n\n OpDiv,\n\n OpMod,\n\n OpPower,\n", "file_path": "src/lexer.rs", "rank": 65, "score": 6.312881820298012 }, { "content": "\n\n fn add_constant(&mut self, pos: (f32,f32), symbol: u32, count: i32) -> usize {\n\n let id = self.entities.len()+1;\n\n let signal = signal_from_symbol_index(symbol);\n\n self.entities.push(Entity{\n\n entity_number: id as u32,\n\n name: \"constant-combinator\".to_owned(),\n\n position: make_pos(pos),\n\n direction: 4,\n\n\n\n connections: Some(HashMap::new()),\n\n control_behavior: ControlBehavior{\n\n arithmetic_conditions: None,\n\n decider_conditions: None,\n\n filters: Some(vec!(Filter{index:1,count,signal}))\n\n }\n\n });\n\n id\n\n }\n\n\n", "file_path": "src/ir/to_blueprint.rs", "rank": 68, "score": 6.193085166663373 }, { "content": " }\n\n }\n\n\n\n fn next(&mut self) -> LexToken<'a> {\n\n self.lexer.next().expect(\"Expected token, found EOF.\")\n\n }\n\n\n\n fn peek(&mut self) -> LexToken<'a> {\n\n *self.lexer.peek().expect(\"Expected token, found EOF.\")\n\n }\n\n\n\n fn is_eof(&mut self) -> bool {\n\n self.lexer.peek().is_none() \n\n }\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 69, "score": 5.960216298611259 }, { "content": " let arg = IRArg::Link(self.nodes.len() as u32 - 1, WireColor::None);\n\n\n\n if self.bindings.insert((*var_name).to_owned(), arg ).is_some() {\n\n panic!(\"Module '{}': Duplicate variable binding '{}'.\",self.name,var_name);\n\n }\n\n }\n\n },\n\n _ => ()\n\n }\n\n }\n\n\n\n fn add_stmt(&mut self, stmt: &Statement, module_table: &HashMap<String, IRModule>, constant_table: &HashMap<String,i64>) {\n\n if self.outputs_set {\n\n panic!(\"Module '{}': No statements may appear after output(...).\",self.name);\n\n }\n\n match stmt {\n\n Statement::Output(out_exprs) => {\n\n if let Some(ret_types) = &self.ret_types {\n\n if out_exprs.len() != ret_types.len() {\n\n panic!(\"The number of returned values does not match the type signature.\");\n", "file_path": "src/ir/mod.rs", "rank": 70, "score": 5.953609140967963 }, { "content": " } else {\n\n panic!(\"constants can currently only be literal values, sorry\")\n\n }\n\n continue;\n\n }\n\n\n\n parser.take(LexToken::KeyMod);\n\n let mod_name = parser.take_ident();\n\n let mut mod_args = Vec::new();\n\n let mut mod_stmts = Vec::new();\n\n \n\n // Arguments\n\n let mut arg_types = Vec::new();\n\n parser.take(LexToken::OpParenOpen);\n\n if parser.peek() != LexToken::OpParenClose {\n\n loop {\n\n mod_args.push(parser.take_ident());\n\n let ty = if parser.peek() == LexToken::OpColon {\n\n parser.take(LexToken::OpColon);\n\n parser.take_symbol()\n", "file_path": "src/parser.rs", "rank": 71, "score": 5.8431468235529405 }, { "content": " }),\n\n decider_conditions: None,\n\n filters: None\n\n }\n\n });\n\n id\n\n }\n\n\n\n fn add_decider(&mut self, pos: (f32,f32), comparator: String, lhs_symbol: u32, rhs: SymbolOrConstant, out_symbol: u32, copy_count_from_input: bool) -> usize {\n\n let id = self.entities.len()+1;\n\n\n\n let first_signal = signal_from_symbol_index(lhs_symbol);\n\n let (second_signal,constant) = rhs.unpack();\n\n let output_signal = Some(signal_from_symbol_index(out_symbol));\n\n\n\n self.entities.push(Entity{\n\n entity_number: id as u32,\n\n name: \"decider-combinator\".to_owned(),\n\n position: make_pos(pos),\n\n direction: 4,\n", "file_path": "src/ir/to_blueprint.rs", "rank": 72, "score": 5.803790223066239 }, { "content": " (x, base_y + offset_y)\n\n })\n\n }\n\n\n\n fn add_args(&mut self, arg_names: &Vec<&str>) {\n\n self.port_count += arg_names.len() as i32;\n\n for (i,arg_name) in arg_names.iter().enumerate() {\n\n self.nodes.push(IRNode::Input(i as u32),format!(\"arg {}\",self.name));\n\n if self.bindings.insert((*arg_name).to_owned(), IRArg::Link(i as u32,WireColor::None) ).is_some() {\n\n panic!(\"Module '{}': Duplicate argument '{}'.\",self.name,arg_name);\n\n }\n\n }\n\n }\n\n\n\n /// Run in its own pass before add_stmt\n\n fn add_stmt_bindings(&mut self, stmt: &Statement) {\n\n match stmt {\n\n Statement::VarBinding(idents,_expr) => {\n\n for var_name in idents {\n\n self.nodes.push(IRNode::PlaceHolder,\"placeholder\".to_owned());\n", "file_path": "src/ir/mod.rs", "rank": 74, "score": 5.1479966720690555 }, { "content": "pub enum UnaryOp {\n\n Negate, // - compiled as 0-x\n\n Plus, // + compiled as x+0 and used for buffering and fixing constraints\n\n NotBitwise, // ~ compiled as x^-1\n\n NotLogical, // ! compiled as x==0\n\n}\n\n\n\n#[derive(Debug,Hash,PartialEq,Eq,Clone)]\n\npub enum ConnectType {\n\n In,\n\n Out\n\n}\n", "file_path": "src/common.rs", "rank": 75, "score": 5.075563094453411 }, { "content": " let dbg_b = self.nodes.get_debug(*b as usize);\n\n println!(\"First: ({}) != ({})\",dbg_a,dbg_b);\n\n panic!(\"Conflicting equality and inequality constraints.\");\n\n }\n\n if pinned_symbols[set_a] && pinned_symbols[set_b] {\n\n if self.out_symbols[set_a] == self.out_symbols[set_b] {\n\n panic!(\"Conflicting inequality and type signature constraints.\");\n\n }\n\n }\n\n },\n\n _ => ()\n\n }\n\n }\n\n\n\n // Fix inequalities\n\n let mut pass_num = 1;\n\n let mut errors = 0;\n\n loop {\n\n for cons in &constraints {\n\n match cons {\n", "file_path": "src/ir/select_symbols.rs", "rank": 76, "score": 4.889584353241874 }, { "content": " parser.take(LexToken::OpParenClose);\n\n Some(vec!())\n\n } else {\n\n let mut result = vec!(parser.take_symbol());\n\n while !parser.take_comma_or_close_paren() {\n\n result.push(parser.take_symbol());\n\n }\n\n Some(result)\n\n }\n\n } else {\n\n Some(vec!(parser.take_symbol()))\n\n }\n\n } else {\n\n None\n\n };\n\n\n\n parser.take(LexToken::OpBraceOpen);\n\n loop {\n\n let stmt = parse_stmt(&mut parser);\n\n match stmt {\n", "file_path": "src/parser.rs", "rank": 77, "score": 4.714512824561779 }, { "content": " let prelude_source = assets::get_asset_string(\"std/prelude.cdl\").expect(\"failed to load prelude\");\n\n let prelude_parsed = crate::parser::parse(&prelude_source);\n\n ir::build_ir(prelude_parsed, settings.clone(), &mut modules, &mut constants);\n\n }\n\n\n\n let source = if let Some(rom_offset) = options.rom_offset {\n\n let bytes = std::fs::read(options.filename).expect(\"failed to read file\");\n\n crate::rom_generator::make_rom(&bytes,rom_offset)\n\n } else {\n\n std::fs::read_to_string(options.filename).expect(\"failed to read file\")\n\n };\n\n\n\n // Load main source file\n\n {\n\n let parse_results = crate::parser::parse(&source);\n\n ir::build_ir(parse_results, settings.clone(), &mut modules, &mut constants);\n\n }\n\n\n\n if let Some(ir_mod) = modules.get_mut(&settings.main_mod_name) {\n\n ir_mod.select_colors();\n", "file_path": "src/main.rs", "rank": 81, "score": 4.3036839309747865 }, { "content": " } else {\n\n panic!(\"Expected ident, found {:?}.\",present);\n\n }\n\n }\n\n\n\n fn take_symbol(&mut self) -> Option<u32> {\n\n let present = self.next();\n\n if let LexToken::Symbol(symbol_str) = present {\n\n Some(symbols::symbol_index_from_identifier(symbol_str))\n\n } else {\n\n panic!(\"Expected symbol, found {:?}.\",present);\n\n }\n\n }\n\n\n\n fn take_comma_or_close_paren(&mut self) -> bool {\n\n let present = self.next();\n\n match present {\n\n LexToken::OpComma => false,\n\n LexToken::OpParenClose => true,\n\n _ => panic!(\"Expected comma or close paren, found {:?}.\",present)\n", "file_path": "src/parser.rs", "rank": 82, "score": 4.254212794163062 }, { "content": " self.add_node(IRNode::MultiDriver(vec!(result)),format!(\"submod result\"), desired_slot)\n\n } else {\n\n result\n\n }\n\n } else {\n\n panic!(\"Module '{}': Submodule '{}' is not defined.\",self.name,name);\n\n }*/\n\n }\n\n }\n\n }\n\n\n\n fn add_submodule(&mut self, module_table: &HashMap<String, IRModule>, constant_table: &HashMap<String, i64>,\n\n mod_name: &str, inputs: &Vec<Expr>, out_slots: Option<&Vec<u32>>\n\n ) -> Option<IRArg> {\n\n\n\n let args: Vec<_> = inputs.iter().map(|arg| self.add_expr(arg, module_table, constant_table, None)).collect();\n\n if let Some(submod) = module_table.get(mod_name) {\n\n let offset = self.nodes.len() as u32;\n\n let mut results: Vec<Option<IRArg>> = Vec::new();\n\n for (node,debug_name) in submod.nodes.iter_debug() {\n", "file_path": "src/ir/mod.rs", "rank": 83, "score": 4.199296917648437 }, { "content": "use serde::{Serialize, Deserialize};\n\n\n\nuse once_cell::sync::OnceCell;\n\nuse std::collections::HashMap;\n\nuse crate::blueprint::Signal;\n\n\n", "file_path": "src/symbols.rs", "rank": 84, "score": 4.186657297542288 }, { "content": " add_arg(lhs, &mut saved, &mut stack);\n\n add_arg(rhs, &mut saved, &mut stack);\n\n },\n\n IRNode::BinOpSame(arg,_) => {\n\n add_arg(arg, &mut saved, &mut stack);\n\n },\n\n IRNode::MultiDriver(args) => {\n\n for arg in args {\n\n add_arg(arg, &mut saved, &mut stack);\n\n }\n\n },\n\n IRNode::Gate(arg1,_,arg2) => {\n\n add_arg(arg1, &mut saved, &mut stack);\n\n add_arg(arg2, &mut saved, &mut stack);\n\n },\n\n IRNode::BinOpCmpGate(arg1,_,_,arg2) => {\n\n add_arg(arg1, &mut saved, &mut stack);\n\n add_arg(arg2, &mut saved, &mut stack);\n\n },\n\n _ => panic!(\"todo prune {:?}\",node)\n", "file_path": "src/ir/opt/tree_prune.rs", "rank": 86, "score": 4.072115237668893 }, { "content": " for arg in args {\n\n if self.detect_short_cycles(base_index,arg) {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n false\n\n }\n\n\n\n /// IIRC the point of this is to add nodes close to a specific index, to prevent spaghetti\n\n fn add_node_at(&mut self, i: usize, node: IRNode, name: String) -> IRArg {\n\n let mut offset = 0;\n\n while offset >= i && i + offset < self.nodes.len() {\n\n if let Some(IRNode::Removed) = self.nodes.try_get(i + offset) {\n\n self.nodes.set(i+offset, node, name);\n\n return IRArg::Link((i + offset) as u32,WireColor::None);\n\n }\n\n if offset >= i {\n\n if let Some(IRNode::Removed) = self.nodes.try_get(i - offset) {\n", "file_path": "src/ir/opt/fix_nodes.rs", "rank": 87, "score": 4.062618307900337 }, { "content": " }\n\n while 0.5 + x_pole_end as f32 * 18.0 + 9.0 < x_max {\n\n x_pole_end += 1;\n\n }\n\n while 0.5 + y_pole_end as f32 * 18.0 + 9.0 < y_max {\n\n y_pole_end += 1;\n\n }\n\n\n\n for y in y_pole_start..=y_pole_end {\n\n for x in x_pole_start..=x_pole_end {\n\n builder.add_pole((0.5+18.0*x as f32,0.5+18.0*y as f32), true);\n\n }\n\n }\n\n\n\n builder.finish()\n\n }\n\n}\n", "file_path": "src/ir/to_blueprint.rs", "rank": 88, "score": 4.000575006857273 }, { "content": "use std::{collections::HashMap, io::prelude::*};\n\nuse flate2::{Compression, read::ZlibDecoder, write::ZlibEncoder};\n\nuse serde::{Serialize, Deserialize};\n\n\n\n#[allow(unused)]\n", "file_path": "src/blueprint.rs", "rank": 89, "score": 3.8393796350882567 }, { "content": " } else if rhs.is_link() {\n\n self.nodes.update(i,IRNode::BinOp(rhs.clone(),op.flip(),lhs.clone()));\n\n } else if let IRArg::Constant(n) = lhs {\n\n let op = op.clone();\n\n let rhs = rhs.clone();\n\n\n\n let lhs_const = self.add_node_at(i,IRNode::Constant(n),format!(\"const {}\",n));\n\n self.nodes.update(i, IRNode::BinOp(lhs_const,op,rhs));\n\n }\n\n }\n\n },\n\n // 3. fix multi-driver (constant args not permitted)\n\n IRNode::MultiDriver(args) => {\n\n let mut args = args.clone();\n\n for j in 0..args.len() {\n\n if let IRArg::Constant(x) = args[j] {\n\n args[j] = self.add_node_at(i, IRNode::Constant(x),format!(\"merged md const {}\",x))\n\n }\n\n }\n\n self.nodes.update(i,IRNode::MultiDriver(args));\n", "file_path": "src/ir/opt/fix_nodes.rs", "rank": 90, "score": 3.6974884577098632 }, { "content": " }\n\n\n\n pub fn get_mut(&mut self, index: usize) -> &mut IRNode {\n\n return &mut self.nodes[index];\n\n }\n\n\n\n pub fn get_debug(&self, index: usize) -> &str {\n\n return &self.debug_names[index];\n\n }\n\n\n\n pub fn try_get(&self, index: usize) -> Option<&IRNode> {\n\n return self.nodes.get(index);\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.nodes.len()\n\n }\n\n\n\n pub fn push(&mut self, node: IRNode, name: String) {\n\n self.nodes.push(node);\n", "file_path": "src/ir/mod.rs", "rank": 91, "score": 3.604387754658695 }, { "content": " }\n\n }\n\n\n\n fn add_expr(&mut self, expr: &Expr, module_table: &HashMap<String, IRModule>, constant_table: &HashMap<String,i64>, desired_slot: Option<u32>) -> IRArg {\n\n let expr_string = format!(\"{:?}\",expr);\n\n match expr {\n\n Expr::Ident(name) => {\n\n if let Some(arg) = self.bindings.get(*name) {\n\n if desired_slot.is_some() {\n\n // hack to make assignments work properly\n\n let arg = arg.clone();\n\n return self.add_node(IRNode::MultiDriver(vec!(arg)),expr_string,desired_slot);\n\n }\n\n arg.clone()\n\n } else if let Some(num) = constant_table.get(*name) {\n\n return self.add_node(IRNode::Constant(narrow_constant(*num)), expr_string, desired_slot);\n\n } else {\n\n panic!(\"Module '{}': '{}' is not defined.\",self.name,name);\n\n }\n\n },\n", "file_path": "src/ir/mod.rs", "rank": 92, "score": 3.572091553625515 }, { "content": "pub struct Filter {\n\n pub signal: Signal,\n\n pub count: i32,\n\n pub index: u32\n\n}\n\n\n\n#[derive(Debug,Serialize,Deserialize)]\n\npub struct Position{\n\n pub x: f32,\n\n pub y: f32\n\n}\n", "file_path": "src/blueprint.rs", "rank": 93, "score": 3.5290250071443956 }, { "content": " \n\n let num = i64::from_str(token_str).expect(\"failed to parse int\");\n\n \n\n Some(LexToken::Number(num))\n\n }\n\n } else if c.is_ascii_whitespace() {\n\n // TODO line handling?\n\n // Probably use another iterator wrapper if we want that?\n\n // skip\n\n continue;\n\n } else if c == '$' {\n\n let parse_str = self.chars.as_str();\n\n\n\n let token_end = parse_str.find(|c: char| !c.is_ascii_alphanumeric() && c != '_')\n\n .unwrap_or(parse_str.len());\n\n let token_str = &parse_str[0..token_end];\n\n let remainder_str = &parse_str[token_end..];\n\n\n\n self.chars = remainder_str.chars();\n\n\n", "file_path": "src/lexer.rs", "rank": 94, "score": 3.515358497195106 }, { "content": " println!(\"LINKS:\");\n\n for link in &self.links {\n\n println!(\" {:?}\",link);\n\n }\n\n }\n\n\n\n fn get_true_pos(&self, id: u32) -> Option<(f32,f32)> {\n\n self.grid.get_pos_for(id).map(|pos|{\n\n let x = pos.0 as f32;\n\n let base_y = pos.1 as f32 * 2.0;\n\n let node = self.nodes.get(id as usize);\n\n let offset_y = match node {\n\n IRNode::BinOp(..) |\n\n IRNode::BinOpCmpGate(..) |\n\n IRNode::BinOpSame(..) => 0.5,\n\n IRNode::Input(..) |\n\n IRNode::Output(..) |\n\n IRNode::Constant(..) => 0.0,\n\n _ => panic!(\"todo offset {:?}\",node)\n\n };\n", "file_path": "src/ir/mod.rs", "rank": 95, "score": 3.420413601289721 }, { "content": " pub cat: String,\n\n pub name: String\n\n}\n\n\n\n#[derive(Debug,Serialize,Deserialize,Default)]\n\npub struct Connections {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub red: Option<Vec<Connection>>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub green: Option<Vec<Connection>>,\n\n}\n\n\n\n#[derive(Debug,Serialize,Deserialize)]\n\npub struct Connection {\n\n pub entity_id: u32,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub circuit_id: Option<u32> // input = 1, output = 2\n\n}\n\n\n\n#[derive(Debug,Serialize,Deserialize)]\n", "file_path": "src/blueprint.rs", "rank": 96, "score": 3.413767919922786 }, { "content": " }\n\n } else {\n\n panic!(\"Module '{}': Submodule '{}' is not defined.\",self.name,mod_name);\n\n }\n\n }\n\n\n\n fn add_node_from_submodule(&mut self, node: &IRNode, old_name: &str, offset: u32, inputs: &Vec<IRArg>) -> Option<(u32,IRArg)> {\n\n \n\n let offset_arg = |arg: &IRArg| {\n\n if let IRArg::Link(n,c) = arg {\n\n IRArg::Link(*n + offset, *c)\n\n } else {\n\n arg.clone()\n\n }\n\n };\n\n\n\n let adjusted = match node {\n\n // HACK: Input abuses multi-drivers to proxy signals.\n\n IRNode::Input(n) => {\n\n let arg = &inputs[*n as usize];\n", "file_path": "src/ir/mod.rs", "rank": 97, "score": 3.3874103406535503 }, { "content": " for i in 0..self.nodes.len() {\n\n let node = self.nodes.get(i);\n\n if let IRNode::MultiDriver(args) = node {\n\n for arg in args {\n\n if self.detect_short_cycles(i,arg) {\n\n panic!(\"Short cycle detected. TODO add more useful information.\");\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn detect_short_cycles(&self, base_index: usize, arg: &IRArg) -> bool {\n\n if let IRArg::Link(target_index,_) = arg {\n\n let target_index = *target_index as usize;\n\n if target_index == base_index {\n\n return true;\n\n }\n\n let node = self.nodes.get(target_index);\n\n if let IRNode::MultiDriver(args) = node {\n", "file_path": "src/ir/opt/fix_nodes.rs", "rank": 98, "score": 3.383815084154411 } ]
Rust
summarize/src/main.rs
bugadani/measureme
03a352fca3e9bcf7faeb16218ba5b61345bddd97
#[macro_use] extern crate prettytable; use analyzeme::ProfilingData; use std::error::Error; use std::fs::File; use std::io::{BufReader, BufWriter}; use std::{path::PathBuf, time::Duration}; use prettytable::{Cell, Row, Table}; use serde::Serialize; use structopt::StructOpt; mod aggregate; mod analysis; mod diff; mod query_data; mod signed_duration; use query_data::Results; #[derive(StructOpt, Debug)] struct AggregateOpt { files: Vec<PathBuf>, } #[derive(StructOpt, Debug)] struct DiffOpt { base: PathBuf, change: PathBuf, #[structopt(short = "e", long = "exclude")] exclude: Vec<String>, #[structopt(long = "json")] json: bool, } #[derive(StructOpt, Debug)] struct SummarizeOpt { file_prefix: PathBuf, #[structopt(long = "json")] json: bool, #[structopt(short = "pa", long = "percent-above", default_value = "0.0")] percent_above: f64, } #[derive(StructOpt, Debug)] enum Opt { #[structopt(name = "aggregate")] Aggregate(AggregateOpt), #[structopt(name = "diff")] Diff(DiffOpt), #[structopt(name = "summarize")] Summarize(SummarizeOpt), } fn process_results(file: &PathBuf) -> Result<Results, Box<dyn Error + Send + Sync>> { if file.ends_with("json") { let reader = BufReader::new(File::open(&file)?); let results: Results = serde_json::from_reader(reader)?; Ok(results) } else { let data = ProfilingData::new(&file)?; Ok(analysis::perform_analysis(data)) } } fn write_results_json( file: &PathBuf, results: impl Serialize, ) -> Result<(), Box<dyn Error + Send + Sync>> { let file = BufWriter::new(File::create(file.with_extension("json"))?); serde_json::to_writer(file, &results)?; Ok(()) } fn aggregate(opt: AggregateOpt) -> Result<(), Box<dyn Error + Send + Sync>> { let profiles = opt .files .into_iter() .map(|file| ProfilingData::new(&file)) .collect::<Result<Vec<_>, _>>()?; aggregate::aggregate_profiles(profiles); Ok(()) } fn diff(opt: DiffOpt) -> Result<(), Box<dyn Error + Send + Sync>> { let base = process_results(&opt.base)?; let change = process_results(&opt.change)?; let results = diff::calculate_diff(base, change); if opt.json { write_results_json(&opt.change, results)?; return Ok(()); } let mut table = Table::new(); table.add_row(row!( "Item", "Self Time", "Self Time Change", "Time", "Time Change", "Item count", "Cache hits", "Blocked time", "Incremental load time" )); for query_data in results.query_data { let exclude = opt.exclude.iter().any(|e| query_data.label.contains(e)); if exclude { continue; } table.add_row(row![ query_data.label, format!("{:.2?}", query_data.self_time), format!("{:+.2}%", query_data.self_time_change), format!("{:.2?}", query_data.time), format!("{:+.2}%", query_data.time_change), format!("{:+}", query_data.invocation_count), format!("{:+}", query_data.number_of_cache_hits), format!("{:.2?}", query_data.blocked_time), format!("{:.2?}", query_data.incremental_load_time), ]); } table.printstd(); println!("Total cpu time: {:?}", results.total_time); Ok(()) } fn summarize(opt: SummarizeOpt) -> Result<(), Box<dyn Error + Send + Sync>> { let data = ProfilingData::new(&opt.file_prefix)?; let mut results = analysis::perform_analysis(data); if opt.json { write_results_json(&opt.file_prefix, &results)?; return Ok(()); } let percent_above = opt.percent_above; if percent_above > 100.0 { eprintln!("Percentage of total time cannot be more than 100.0"); std::process::exit(1); } else if percent_above < 0.0 { eprintln!("Percentage of total time cannot be less than 0.0"); std::process::exit(1); } results .query_data .sort_by(|l, r| r.self_time.cmp(&l.self_time)); let mut table = Table::new(); let mut has_cache_hits = false; let mut has_blocked_time = false; let mut has_incremental_load_time = false; let duration_zero = Duration::from_secs(0); for r in &results.query_data { if r.number_of_cache_hits > 0 { has_cache_hits = true; } if r.blocked_time > duration_zero { has_blocked_time = true; } if r.incremental_load_time > duration_zero { has_incremental_load_time = true; } if has_cache_hits && has_blocked_time && has_incremental_load_time { break; } } let columns = &[ ("Item", true), ("Self time", true), ("% of total time", true), ("Time", true), ("Item count", true), ("Cache hits", has_cache_hits), ("Blocked time", has_blocked_time), ("Incremental load time", has_incremental_load_time), ]; fn filter_cells(cells: &[(&str, bool)]) -> Vec<Cell> { cells .iter() .filter(|(_, show)| *show) .map(|(cell, _)| Cell::new(cell)) .collect() } table.add_row(Row::new(filter_cells(columns))); let total_time = results.total_time.as_nanos() as f64; let mut percent_total_time: f64 = 0.0; for query_data in results.query_data { let curr_percent = (query_data.self_time.as_nanos() as f64) / total_time * 100.0; if curr_percent < percent_above { break; } percent_total_time = percent_total_time + curr_percent; table.add_row(Row::new(filter_cells(&[ (&query_data.label, true), (&format!("{:.2?}", query_data.self_time), true), (&format!("{:.3}", curr_percent), true), (&format!("{:.2?}", query_data.time), true), (&format!("{}", query_data.invocation_count), true), ( &format!("{}", query_data.number_of_cache_hits), has_cache_hits, ), ( &format!("{:.2?}", query_data.blocked_time), has_blocked_time, ), ( &format!("{:.2?}", query_data.incremental_load_time), has_incremental_load_time, ), ]))); } table.printstd(); println!("Total cpu time: {:?}", results.total_time); if percent_above != 0.0 { println!( "Filtered results account for {:.3}% of total time.", percent_total_time ); } Ok(()) } fn main() -> Result<(), Box<dyn Error + Send + Sync>> { let opt = Opt::from_args(); match opt { Opt::Summarize(opt) => summarize(opt), Opt::Diff(opt) => diff(opt), Opt::Aggregate(opt) => aggregate(opt), } }
#[macro_use] extern crate prettytable; use analyzeme::ProfilingData; use std::error::Error; use std::fs::File; use std::io::{BufReader, BufWriter}; use std::{path::PathBuf, time::Duration}; use prettytable::{Cell, Row, Table}; use serde::Serialize; use structopt::StructOpt; mod aggregate; mod analysis; mod diff; mod query_data; mod signed_duration; use query_data::Results; #[derive(StructOpt, Debug)] struct AggregateOpt { files: Vec<PathBuf>, } #[derive(StructOpt, Debug)] struct DiffOpt { base: PathBuf, change: PathBuf, #[structopt(short = "e", long = "exclude")] exclude: Vec<String>, #[structopt(long = "json")] json: bool, } #[derive(StructOpt, Debug)] struct SummarizeOpt { file_prefix: PathBuf, #[structopt(long = "json")] json: bool, #[structopt(short = "pa", long = "percent-above", default_value = "0.0")] percent_above: f64, } #[derive(StructOpt, Debug)] enum Opt { #[structopt(name = "aggregate")] Aggregate(AggregateOpt), #[structopt(name = "diff")] Diff(DiffOpt), #[structopt(name = "summarize")] Summarize(SummarizeOpt), } fn process_results(file: &PathBuf) -> Result<Results, Box<dyn Error + Send + Sync>> { if file.ends_with("json") { let reader = BufReader::new(File::open(&file)?); let results: Results = serde_json::from_reader(reader)?; Ok(results) } else { let data = ProfilingData::new(&file)?; Ok(analysis::perform_analysis(data)) } }
fn aggregate(opt: AggregateOpt) -> Result<(), Box<dyn Error + Send + Sync>> { let profiles = opt .files .into_iter() .map(|file| ProfilingData::new(&file)) .collect::<Result<Vec<_>, _>>()?; aggregate::aggregate_profiles(profiles); Ok(()) } fn diff(opt: DiffOpt) -> Result<(), Box<dyn Error + Send + Sync>> { let base = process_results(&opt.base)?; let change = process_results(&opt.change)?; let results = diff::calculate_diff(base, change); if opt.json { write_results_json(&opt.change, results)?; return Ok(()); } let mut table = Table::new(); table.add_row(row!( "Item", "Self Time", "Self Time Change", "Time", "Time Change", "Item count", "Cache hits", "Blocked time", "Incremental load time" )); for query_data in results.query_data { let exclude = opt.exclude.iter().any(|e| query_data.label.contains(e)); if exclude { continue; } table.add_row(row![ query_data.label, format!("{:.2?}", query_data.self_time), format!("{:+.2}%", query_data.self_time_change), format!("{:.2?}", query_data.time), format!("{:+.2}%", query_data.time_change), format!("{:+}", query_data.invocation_count), format!("{:+}", query_data.number_of_cache_hits), format!("{:.2?}", query_data.blocked_time), format!("{:.2?}", query_data.incremental_load_time), ]); } table.printstd(); println!("Total cpu time: {:?}", results.total_time); Ok(()) } fn summarize(opt: SummarizeOpt) -> Result<(), Box<dyn Error + Send + Sync>> { let data = ProfilingData::new(&opt.file_prefix)?; let mut results = analysis::perform_analysis(data); if opt.json { write_results_json(&opt.file_prefix, &results)?; return Ok(()); } let percent_above = opt.percent_above; if percent_above > 100.0 { eprintln!("Percentage of total time cannot be more than 100.0"); std::process::exit(1); } else if percent_above < 0.0 { eprintln!("Percentage of total time cannot be less than 0.0"); std::process::exit(1); } results .query_data .sort_by(|l, r| r.self_time.cmp(&l.self_time)); let mut table = Table::new(); let mut has_cache_hits = false; let mut has_blocked_time = false; let mut has_incremental_load_time = false; let duration_zero = Duration::from_secs(0); for r in &results.query_data { if r.number_of_cache_hits > 0 { has_cache_hits = true; } if r.blocked_time > duration_zero { has_blocked_time = true; } if r.incremental_load_time > duration_zero { has_incremental_load_time = true; } if has_cache_hits && has_blocked_time && has_incremental_load_time { break; } } let columns = &[ ("Item", true), ("Self time", true), ("% of total time", true), ("Time", true), ("Item count", true), ("Cache hits", has_cache_hits), ("Blocked time", has_blocked_time), ("Incremental load time", has_incremental_load_time), ]; fn filter_cells(cells: &[(&str, bool)]) -> Vec<Cell> { cells .iter() .filter(|(_, show)| *show) .map(|(cell, _)| Cell::new(cell)) .collect() } table.add_row(Row::new(filter_cells(columns))); let total_time = results.total_time.as_nanos() as f64; let mut percent_total_time: f64 = 0.0; for query_data in results.query_data { let curr_percent = (query_data.self_time.as_nanos() as f64) / total_time * 100.0; if curr_percent < percent_above { break; } percent_total_time = percent_total_time + curr_percent; table.add_row(Row::new(filter_cells(&[ (&query_data.label, true), (&format!("{:.2?}", query_data.self_time), true), (&format!("{:.3}", curr_percent), true), (&format!("{:.2?}", query_data.time), true), (&format!("{}", query_data.invocation_count), true), ( &format!("{}", query_data.number_of_cache_hits), has_cache_hits, ), ( &format!("{:.2?}", query_data.blocked_time), has_blocked_time, ), ( &format!("{:.2?}", query_data.incremental_load_time), has_incremental_load_time, ), ]))); } table.printstd(); println!("Total cpu time: {:?}", results.total_time); if percent_above != 0.0 { println!( "Filtered results account for {:.3}% of total time.", percent_total_time ); } Ok(()) } fn main() -> Result<(), Box<dyn Error + Send + Sync>> { let opt = Opt::from_args(); match opt { Opt::Summarize(opt) => summarize(opt), Opt::Diff(opt) => diff(opt), Opt::Aggregate(opt) => aggregate(opt), } }
fn write_results_json( file: &PathBuf, results: impl Serialize, ) -> Result<(), Box<dyn Error + Send + Sync>> { let file = BufWriter::new(File::create(file.with_extension("json"))?); serde_json::to_writer(file, &results)?; Ok(()) }
function_block-full_function
[ { "content": "fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {\n\n let opt = Opt::from_args();\n\n\n\n let chrome_file = BufWriter::new(fs::File::create(\"chrome_profiler.json\")?);\n\n let mut serializer = serde_json::Serializer::new(chrome_file);\n\n\n\n let mut seq = serializer.serialize_seq(None)?;\n\n\n\n let dir_paths = file_prefixes_in_dir(&opt)?;\n\n\n\n for file_prefix in opt.file_prefix.iter().chain(dir_paths.iter()) {\n\n let data = ProfilingData::new(&file_prefix)?;\n\n\n\n let thread_to_collapsed_thread = generate_thread_to_collapsed_thread_mapping(&opt, &data);\n\n\n\n // Chrome does not seem to like how many QueryCacheHit events we generate\n\n // only handle Interval events for now\n\n for event in data.iter().filter(|e| !e.timestamp.is_instant()) {\n\n let duration = event.duration().unwrap();\n\n if let Some(minimum_duration) = opt.minimum_duration {\n", "file_path": "crox/src/main.rs", "rank": 5, "score": 241848.8706183175 }, { "content": "fn main() -> Result<(), Box<dyn Error + Send + Sync>> {\n\n let opt = Opt::from_args();\n\n\n\n let data = ProfilingData::new(&opt.file_prefix)?;\n\n\n\n if let Some(global_start_time) = data.iter().map(|e| e.timestamp.start()).min() {\n\n for event in data.iter() {\n\n if let Some(thread_id) = opt.thread_id {\n\n if event.thread_id != thread_id {\n\n continue;\n\n }\n\n }\n\n print_event(&event.to_event(), global_start_time);\n\n }\n\n } else {\n\n eprintln!(\"No events.\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "mmview/src/main.rs", "rank": 6, "score": 241267.7671445525 }, { "content": "fn main() -> Result<(), Box<dyn Error + Send + Sync>> {\n\n let opt = Opt::from_args();\n\n\n\n let profiling_data = ProfilingData::new(&opt.file_prefix)?;\n\n\n\n let recorded_stacks = collapse_stacks(&profiling_data)\n\n .iter()\n\n .map(|(unique_stack, count)| format!(\"{} {}\", unique_stack, count))\n\n .collect::<Vec<_>>();\n\n\n\n let file = BufWriter::new(File::create(\"rustc.svg\")?);\n\n let mut flamegraph_options = FlamegraphOptions::default();\n\n\n\n from_lines(\n\n &mut flamegraph_options,\n\n recorded_stacks.iter().map(|s| s.as_ref()),\n\n file,\n\n )\n\n .expect(\n\n \"unable to generate a flamegraph \\\n\n from the collapsed stack data\",\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "flamegraph/src/main.rs", "rank": 7, "score": 241267.7671445525 }, { "content": "fn percentage_change(base: Duration, change: Duration) -> f64 {\n\n let nanos = change.as_nanos() as i128 - base.as_nanos() as i128;\n\n nanos as f64 / base.as_nanos() as f64 * 100.0\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct Results {\n\n pub query_data: Vec<QueryData>,\n\n pub total_time: Duration,\n\n}\n\n\n\n// For now this is only needed for tests it seems\n\n#[cfg(test)]\n\nimpl Results {\n\n pub fn query_data_by_label(&self, label: &str) -> &QueryData {\n\n self.query_data.iter().find(|qd| qd.label == label).unwrap()\n\n }\n\n}\n", "file_path": "summarize/src/query_data.rs", "rank": 8, "score": 240642.55541320686 }, { "content": "fn main() -> Result<(), Box<dyn Error + Send + Sync>> {\n\n let opt = Opt::from_args();\n\n\n\n let profiling_data = ProfilingData::new(&opt.file_prefix)?;\n\n\n\n let recorded_stacks = collapse_stacks(&profiling_data);\n\n\n\n let mut file = BufWriter::new(File::create(\"out.stacks_folded\")?);\n\n\n\n //now that we've got all of the recorded data, print the results to the output file\n\n for (unique_stack, count) in recorded_stacks {\n\n writeln!(file, \"{} {}\", unique_stack, count)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "stack_collapse/src/main.rs", "rank": 9, "score": 237175.15632270233 }, { "content": "pub fn calculate_diff(base: Results, change: Results) -> DiffResults {\n\n #[inline]\n\n fn sd(d: Duration) -> SignedDuration {\n\n d.into()\n\n }\n\n\n\n let base_data = build_query_lookup(&base.query_data);\n\n let change_data = build_query_lookup(&change.query_data);\n\n\n\n let mut all_labels = FxHashSet::with_capacity_and_hasher(\n\n base.query_data.len() + change.query_data.len(),\n\n Default::default(),\n\n );\n\n for query_data in base.query_data.iter().chain(&change.query_data) {\n\n all_labels.insert(&query_data.label[..]);\n\n }\n\n\n\n let mut query_data: Vec<_> = all_labels\n\n .iter()\n\n .map(|l| {\n", "file_path": "summarize/src/diff.rs", "rank": 10, "score": 233766.56890043968 }, { "content": "fn file_prefixes_in_dir(opt: &Opt) -> Result<Vec<PathBuf>, std::io::Error> {\n\n let mut result = Vec::new();\n\n if let Some(dir_path) = &opt.dir {\n\n for entry in fs::read_dir(dir_path)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n if path.extension().filter(|e| *e == \"events\").is_some() {\n\n result.push(path)\n\n }\n\n }\n\n }\n\n Ok(result)\n\n}\n\n\n", "file_path": "crox/src/main.rs", "rank": 11, "score": 231846.99320615848 }, { "content": "/// Collects accumulated summary data for the given ProfilingData.\n\n///\n\n/// The main result we are interested in is the query \"self-time\". This is the\n\n/// time spent computing the result of a query `q` minus the time spent in\n\n/// any other queries that `q` might have called. This \"self-time\" can be\n\n/// computed by looking at invocation stacks as follows:\n\n///\n\n/// When we encounter a query provider event, we first add its entire duration\n\n/// to the self-time counter of the query. Then, when we encounter a direct\n\n/// child of that query provider event, we subtract the duration of the child\n\n/// from the self-time counter of the query. Thus, after we've encountered all\n\n/// direct children we'll end up with the self-time.\n\n///\n\n/// For example, take the following query invocation trace:\n\n///\n\n/// <== q4 ==>\n\n/// <== q2 ==> <====== q3 ======>\n\n/// <===================== q1 =====================>\n\n/// -------------------------------------------------------> time\n\n///\n\n/// Query `q1` calls `q2` and later `q3`, which in turn calls `q4`. In order\n\n/// to get the self-time of `q1`, we take it's entire duration and subtract the\n\n/// durations of `q2` and `q3`. We do not subtract the duration of `q4` because\n\n/// that is already accounted for by the duration of `q3`.\n\n///\n\n/// The function below uses an algorithm that computes the self-times of all\n\n/// queries in a single pass over the profiling data. As the algorithm walks the\n\n/// data, it needs to keep track of invocation stacks. Because interval events\n\n/// occur in the stream at their \"end\" time, a parent event comes after all\n\n/// child events. For this reason we have to walk the events in *reverse order*.\n\n/// This way we always encounter the parent before its children, which makes it\n\n/// simple to keep an up-to-date stack of invocations.\n\n///\n\n/// The algorithm goes as follows:\n\n///\n\n/// ```\n\n/// for event in profiling_data.reversed()\n\n/// // Keep the stack up-to-date by popping all events that\n\n/// // don't contain the current event. After this loop, the\n\n/// // parent of the current event will be the top of the stack.\n\n/// while !stack.top.is_ancestor_of(event)\n\n/// stack.pop()\n\n///\n\n/// // Update the parents self-time if needed\n\n/// let parent = stack.top()\n\n/// if parent.is_some()\n\n/// self_time_for(parent) -= event.duration\n\n///\n\n/// // Update the self-time for the current-event\n\n/// self_time_for(event) += event.duration\n\n///\n\n/// // Push the current event onto the stack\n\n/// stack.push(event)\n\n/// ```\n\n///\n\n/// Here is an example of what updating the stack looks like:\n\n///\n\n/// ```\n\n/// <--e2--> <--e3-->\n\n/// <-----------e1----------->\n\n/// ```\n\n///\n\n/// In the event stream this shows up as something like:\n\n///\n\n/// ```\n\n/// [\n\n/// { label=e2, start= 5, end=10 },\n\n/// { label=e3, start=15, end=20 },\n\n/// { label=e1, start= 0, end=25 },\n\n/// ]\n\n/// ```\n\n///\n\n/// because events are emitted in the order of their end timestamps. So, as we\n\n/// walk backwards, we\n\n///\n\n/// 1. encounter `e1`, push it onto the stack, then\n\n/// 2. encounter `e3`, the stack contains `e1`, but that is fine since the\n\n/// time-interval of `e1` includes the time interval of `e3`. `e3` goes onto\n\n/// the stack and then we\n\n/// 3. encounter `e2`. The stack is `[e1, e3]`, but now `e3` needs to be popped\n\n/// because we are past its range, so we pop `e3` and push `e2`.\n\n///\n\n/// Why is popping done in a `while` loop? consider the following\n\n///\n\n/// ```\n\n/// <-e4->\n\n/// <--e2--> <--e3-->\n\n/// <-----------e1----------->\n\n/// ```\n\n///\n\n/// This looks as follows in the stream:\n\n///\n\n/// ```\n\n/// [\n\n/// { label=e2, start= 5, end=10 },\n\n/// { label=e4, start=17, end=19 },\n\n/// { label=e3, start=15, end=20 },\n\n/// { label=e1, start= 0, end=25 },\n\n/// ]\n\n/// ```\n\n///\n\n/// In this case when we encounter `e2`, the stack is `[e1, e3, e4]`, and both\n\n/// `e4` and `e3` need to be popped in the same step.\n\npub fn perform_analysis(data: ProfilingData) -> Results {\n\n struct PerThreadState<'a> {\n\n stack: Vec<Event<'a>>,\n\n start: SystemTime,\n\n end: SystemTime,\n\n }\n\n\n\n let mut query_data = FxHashMap::<String, QueryData>::default();\n\n let mut threads = FxHashMap::<_, PerThreadState>::default();\n\n\n\n let mut record_event_data = |label: &Cow<'_, str>, f: &dyn Fn(&mut QueryData)| {\n\n if let Some(data) = query_data.get_mut(&label[..]) {\n\n f(data);\n\n } else {\n\n let mut data = QueryData::new(label.clone().into_owned());\n\n f(&mut data);\n\n query_data.insert(label.clone().into_owned(), data);\n\n }\n\n };\n\n\n", "file_path": "summarize/src/analysis.rs", "rank": 12, "score": 205558.7368111111 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq)]\n\nenum SamplePoint<E> {\n\n Start(E),\n\n End(E),\n\n Instant(E),\n\n}\n\n\n\nimpl<E> SamplePoint<E> {\n\n fn event(&self) -> &E {\n\n match self {\n\n SamplePoint::Start(event) | SamplePoint::End(event) | SamplePoint::Instant(event) => {\n\n event\n\n }\n\n }\n\n }\n\n\n\n fn map_event<E2>(self, f: impl FnOnce(E) -> E2) -> SamplePoint<E2> {\n\n match self {\n\n SamplePoint::Start(event) => SamplePoint::Start(f(event)),\n\n SamplePoint::End(event) => SamplePoint::End(f(event)),\n\n SamplePoint::Instant(event) => SamplePoint::Instant(f(event)),\n", "file_path": "summarize/src/aggregate.rs", "rank": 15, "score": 185835.09421824248 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq)]\n\nstruct SampleInterval<E> {\n\n start: SamplePoint<E>,\n\n end: SamplePoint<E>,\n\n}\n\n\n\nimpl<E> SampleInterval<E> {\n\n fn map_event<E2>(self, f: impl Copy + FnOnce(E) -> E2) -> SampleInterval<E2> {\n\n SampleInterval {\n\n start: self.start.map_event(f),\n\n end: self.end.map_event(f),\n\n }\n\n }\n\n}\n\n\n\nimpl SampleInterval<WithParent<Event<'_>>> {\n\n fn duration(&self) -> Duration {\n\n self.end\n\n .timestamp()\n\n .duration_since(self.start.timestamp())\n\n .unwrap()\n\n }\n\n}\n\n\n", "file_path": "summarize/src/aggregate.rs", "rank": 16, "score": 184161.75329495742 }, { "content": "fn system_time_from_nanos<'de, D>(deserializer: D) -> Result<SystemTime, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let duration_from_epoch = Duration::from_nanos(u64::deserialize(deserializer)?);\n\n Ok(UNIX_EPOCH\n\n .checked_add(duration_from_epoch)\n\n .expect(\"a time that can be represented as SystemTime\"))\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Metadata {\n\n #[serde(deserialize_with = \"system_time_from_nanos\")]\n\n pub start_time: SystemTime,\n\n pub process_id: u32,\n\n pub cmd: String,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ProfilingData {\n", "file_path": "analyzeme/src/profiling_data.rs", "rank": 20, "score": 150203.2983338083 }, { "content": "fn build_query_lookup(query_data: &[QueryData]) -> FxHashMap<&str, usize> {\n\n let mut lookup = FxHashMap::with_capacity_and_hasher(query_data.len(), Default::default());\n\n for i in 0..query_data.len() {\n\n lookup.insert(&query_data[i].label[..], i);\n\n }\n\n\n\n lookup\n\n}\n\n\n", "file_path": "summarize/src/diff.rs", "rank": 21, "score": 150051.00698064806 }, { "content": "pub fn aggregate_profiles(profiles: Vec<ProfilingData>) {\n\n let aggregated_sample_intervals = AggregatedSampleIntervals::new(\n\n profiles\n\n .iter()\n\n .map(|data| SamplePoints::new(data.iter().map(|event| event.to_event())).intervals()),\n\n );\n\n\n\n let mut intervals_count = 0;\n\n\n\n // FIXME(eddyb) make the `10` configurable at runtime (i.e. with a flag)\n\n let mut durations = Extrema::new(10);\n\n let mut variances = Extrema::new(10);\n\n\n\n for interval in aggregated_sample_intervals.rev() {\n\n intervals_count += 1;\n\n\n\n durations.add_range(\n\n interval.min_duration..=interval.max_duration(),\n\n &interval.descriptions,\n\n );\n", "file_path": "summarize/src/aggregate.rs", "rank": 22, "score": 144078.25703911093 }, { "content": "pub fn strip_file_header(data: &[u8]) -> &[u8] {\n\n &data[FILE_HEADER_SIZE..]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{PageTag, SerializationSinkBuilder};\n\n\n\n #[test]\n\n fn roundtrip() {\n\n let data_sink = SerializationSinkBuilder::new_in_memory().new_sink(PageTag::Events);\n\n\n\n write_file_header(&mut data_sink.as_std_write(), FILE_MAGIC_EVENT_STREAM).unwrap();\n\n\n\n let data = data_sink.into_bytes();\n\n\n\n verify_file_header(&data, FILE_MAGIC_EVENT_STREAM, None, \"test\").unwrap();\n\n }\n\n\n", "file_path": "measureme/src/file_header.rs", "rank": 23, "score": 135494.91045940988 }, { "content": "struct AggregatedSampleInterval<'a> {\n\n descriptions: SampleInterval<WithParent<EventDescription<'a>>>,\n\n\n\n min_duration: Duration,\n\n duration_variance: Variance<Duration>,\n\n}\n\n\n\nimpl AggregatedSampleInterval<'_> {\n\n fn max_duration(&self) -> Duration {\n\n self.min_duration + self.duration_variance.range_size\n\n }\n\n}\n\n\n", "file_path": "summarize/src/aggregate.rs", "rank": 24, "score": 132901.71956780265 }, { "content": "struct AggregatedSampleIntervals<I> {\n\n sample_intervals_per_profile: Vec<I>,\n\n}\n\n\n\nimpl<'a, I: BackwardsIterator<Item = SampleInterval<WithParent<Event<'a>>>>>\n\n AggregatedSampleIntervals<I>\n\n{\n\n fn new(sample_intervals_per_profile: impl Iterator<Item = I>) -> Self {\n\n AggregatedSampleIntervals {\n\n sample_intervals_per_profile: sample_intervals_per_profile.collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, I: BackwardsIterator<Item = SampleInterval<WithParent<Event<'a>>>>> BackwardsIterator\n\n for AggregatedSampleIntervals<I>\n\n{\n\n type Item = AggregatedSampleInterval<'a>;\n\n fn next_back(&mut self) -> Option<Self::Item> {\n\n match self.sample_intervals_per_profile.get_mut(0)?.next_back() {\n", "file_path": "summarize/src/aggregate.rs", "rank": 25, "score": 132901.71956780265 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\nstruct Variance<T> {\n\n /// The size of the range of possible values, i.e. `max - min`.\n\n range_size: T,\n\n}\n\n\n", "file_path": "summarize/src/aggregate.rs", "rank": 26, "score": 127109.68555664746 }, { "content": "// This struct reflects what filenames were in old versions of measureme. It is\n\n// used only for giving helpful error messages now if a user tries to load old\n\n// data.\n\nstruct ProfilerFiles {\n\n pub events_file: PathBuf,\n\n pub string_data_file: PathBuf,\n\n pub string_index_file: PathBuf,\n\n}\n\n\n\nimpl ProfilerFiles {\n\n fn new<P: AsRef<Path>>(path_stem: P) -> ProfilerFiles {\n\n ProfilerFiles {\n\n events_file: path_stem.as_ref().with_extension(\"events\"),\n\n string_data_file: path_stem.as_ref().with_extension(\"string_data\"),\n\n string_index_file: path_stem.as_ref().with_extension(\"string_index\"),\n\n }\n\n }\n\n}\n\n\n\n#[rustfmt::skip]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "analyzeme/src/profiling_data.rs", "rank": 27, "score": 127081.41593925853 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct Opt {\n\n file_prefix: PathBuf,\n\n}\n\n\n", "file_path": "flamegraph/src/main.rs", "rank": 28, "score": 126164.54856916296 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct Opt {\n\n #[structopt(required_unless = \"dir\")]\n\n file_prefix: Vec<PathBuf>,\n\n /// all event trace files in dir will be merged to one chrome_profiler.json file\n\n #[structopt(long = \"dir\")]\n\n dir: Option<PathBuf>,\n\n /// collapse threads without overlapping events\n\n #[structopt(long = \"collapse-threads\")]\n\n collapse_threads: bool,\n\n /// filter out events with shorter duration (in microseconds)\n\n #[structopt(long = \"minimum-duration\")]\n\n minimum_duration: Option<u128>,\n\n}\n\n\n", "file_path": "crox/src/main.rs", "rank": 29, "score": 126164.54856916296 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct Opt {\n\n file_prefix: PathBuf,\n\n\n\n /// Filter to events which occured on the specified thread id\n\n #[structopt(short = \"t\", long = \"thread-id\")]\n\n thread_id: Option<u32>,\n\n}\n\n\n", "file_path": "mmview/src/main.rs", "rank": 30, "score": 126164.54856916296 }, { "content": "fn as_micros<S: Serializer>(d: &Duration, s: S) -> Result<S::Ok, S::Error> {\n\n let v = (d.as_secs() * 1_000_000) + (d.subsec_nanos() as u64 / 1_000);\n\n s.serialize_u64(v)\n\n}\n\n\n", "file_path": "crox/src/main.rs", "rank": 31, "score": 125026.57205350109 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct Opt {\n\n file_prefix: PathBuf,\n\n}\n\n\n", "file_path": "stack_collapse/src/main.rs", "rank": 32, "score": 123803.5260576591 }, { "content": "struct SampleIntervals<I: BackwardsIterator> {\n\n last_sample_point: Option<I::Item>,\n\n\n\n sample_points: I,\n\n}\n\n\n\nimpl<I: BackwardsIterator> SampleIntervals<I> {\n\n fn new(mut sample_points: I) -> Self {\n\n SampleIntervals {\n\n last_sample_point: sample_points.next_back(),\n\n\n\n sample_points,\n\n }\n\n }\n\n}\n\n\n\nimpl<E: Clone, I: BackwardsIterator<Item = SamplePoint<E>>> BackwardsIterator\n\n for SampleIntervals<I>\n\n{\n\n type Item = SampleInterval<E>;\n", "file_path": "summarize/src/aggregate.rs", "rank": 33, "score": 119212.95780122027 }, { "content": "struct SamplePoints<'a, I: DoubleEndedIterator<Item = Event<'a>>> {\n\n /// This analysis only works with deterministic runs, which precludes parallelism,\n\n /// so we just have to find the *only* thread's ID and require there is no other.\n\n expected_thread_id: u32,\n\n\n\n rev_events: std::iter::Peekable<std::iter::Rev<I>>,\n\n stack: Vec<Event<'a>>,\n\n}\n\n\n\nimpl<'a, I: DoubleEndedIterator<Item = Event<'a>>> SamplePoints<'a, I> {\n\n fn new(events: I) -> Self {\n\n let mut rev_events = events.rev().peekable();\n\n SamplePoints {\n\n // The `0` default doesn't matter, if there are no events.\n\n expected_thread_id: rev_events.peek().map_or(0, |event| event.thread_id),\n\n\n\n rev_events,\n\n stack: vec![],\n\n }\n\n }\n", "file_path": "summarize/src/aggregate.rs", "rank": 34, "score": 106362.57143252056 }, { "content": "fn mk_filestem(file_name_stem: &str) -> PathBuf {\n\n let mut path = PathBuf::new();\n\n\n\n path.push(\"test-tmp\");\n\n path.push(\"end_to_end_serialization\");\n\n path.push(file_name_stem);\n\n\n\n path\n\n}\n\n\n", "file_path": "analyzeme/src/testing_common.rs", "rank": 35, "score": 97771.67574125716 }, { "content": "#[must_use]\n\npub fn verify_file_header(\n\n bytes: &[u8],\n\n expected_magic: &[u8; 4],\n\n diagnostic_file_path: Option<&Path>,\n\n stream_tag: &str,\n\n) -> Result<(), Box<dyn Error + Send + Sync>> {\n\n // The implementation here relies on FILE_HEADER_SIZE to have the value 8.\n\n // Let's make sure this assumption cannot be violated without being noticed.\n\n assert_eq!(FILE_HEADER_SIZE, 8);\n\n\n\n let diagnostic_file_path = diagnostic_file_path.unwrap_or(Path::new(\"<in-memory>\"));\n\n\n\n if bytes.len() < FILE_HEADER_SIZE {\n\n let msg = format!(\n\n \"Error reading {} stream in file `{}`: Expected file to contain at least `{:?}` bytes but found `{:?}` bytes\",\n\n stream_tag,\n\n diagnostic_file_path.display(),\n\n FILE_HEADER_SIZE,\n\n bytes.len()\n\n );\n", "file_path": "measureme/src/file_header.rs", "rank": 36, "score": 93377.25660826672 }, { "content": "pub fn write_file_header(\n\n s: &mut dyn std::io::Write,\n\n file_magic: &[u8; 4],\n\n) -> Result<(), Box<dyn Error + Send + Sync>> {\n\n // The implementation here relies on FILE_HEADER_SIZE to have the value 8.\n\n // Let's make sure this assumption cannot be violated without being noticed.\n\n assert_eq!(FILE_HEADER_SIZE, 8);\n\n\n\n s.write_all(file_magic).map_err(Box::new)?;\n\n s.write_all(&CURRENT_FILE_FORMAT_VERSION.to_le_bytes())\n\n .map_err(Box::new)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "measureme/src/file_header.rs", "rank": 37, "score": 93373.16402038111 }, { "content": "// Generate some profiling data. This is the part that would run in rustc.\n\nfn generate_profiling_data(\n\n filestem: &Path,\n\n num_stacks: usize,\n\n num_threads: usize,\n\n) -> Vec<Event<'static>> {\n\n let profiler = Arc::new(Profiler::new(Path::new(filestem)).unwrap());\n\n\n\n let event_id_virtual = EventId::from_label(StringId::new_virtual(42));\n\n let event_id_builder = EventIdBuilder::new(&profiler);\n\n\n\n let event_ids: Vec<(StringId, EventId)> = vec![\n\n (\n\n profiler.alloc_string(\"Generic\"),\n\n EventId::from_label(profiler.alloc_string(\"SomeGenericActivity\")),\n\n ),\n\n (profiler.alloc_string(\"Query\"), event_id_virtual),\n\n (\n\n profiler.alloc_string(\"QueryWithArg\"),\n\n event_id_builder.from_label_and_arg(\n\n profiler.alloc_string(\"AQueryWithArg\"),\n", "file_path": "analyzeme/src/testing_common.rs", "rank": 38, "score": 86868.0892126265 }, { "content": "fn check_profiling_data(\n\n actual_events: &mut dyn Iterator<Item = Event<'_>>,\n\n expected_events: &mut dyn Iterator<Item = Event<'_>>,\n\n num_expected_events: usize,\n\n) {\n\n let mut count = 0;\n\n\n\n // This assertion makes sure that the ExactSizeIterator impl works as expected.\n\n assert_eq!(\n\n (num_expected_events, Some(num_expected_events)),\n\n actual_events.size_hint()\n\n );\n\n\n\n let actual_events_per_thread = collect_events_per_thread(actual_events);\n\n let expected_events_per_thread = collect_events_per_thread(expected_events);\n\n\n\n let thread_ids: Vec<_> = actual_events_per_thread.keys().collect();\n\n assert_eq!(\n\n thread_ids,\n\n expected_events_per_thread.keys().collect::<Vec<_>>()\n", "file_path": "analyzeme/src/testing_common.rs", "rank": 39, "score": 86864.3151334018 }, { "content": "// String IDs in the table data are encoded in big endian format, while string\n\n// IDs in the index are encoded in little endian format. Don't mix the two up.\n\nfn decode_string_ref_from_data(bytes: &[u8]) -> StringId {\n\n // The code below assumes we use a 5-byte encoding for string\n\n // refs, where the first byte is STRING_REF_TAG and the\n\n // following 4 bytes are a little-endian u32 string ID value.\n\n assert!(bytes[0] == STRING_REF_TAG);\n\n assert!(STRING_REF_ENCODED_SIZE == 5);\n\n\n\n let id = u32::from_le_bytes(bytes[1..5].try_into().unwrap());\n\n StringId::new(id)\n\n}\n\n\n\n/// Read-only version of the string table\n\n#[derive(Debug)]\n\npub struct StringTable {\n\n // TODO: Replace with something lazy\n\n string_data: Vec<u8>,\n\n index: FxHashMap<StringId, Addr>,\n\n}\n\n\n\nimpl StringTable {\n", "file_path": "analyzeme/src/stringtable.rs", "rank": 40, "score": 75738.23429753832 }, { "content": "fn event_index_to_addr(event_index: usize) -> usize {\n\n FILE_HEADER_SIZE + event_index * mem::size_of::<RawEvent>()\n\n}\n\n\n", "file_path": "analyzeme/src/profiling_data.rs", "rank": 41, "score": 75731.35936568909 }, { "content": "/// Collect a map of all stacks and how many nanoseconds are spent in each.\n\n/// Uses a variation of the algorithm in `summarize`.\n\n// Original implementation provided by @andjo403 in\n\n// https://github.com/michaelwoerister/measureme/pull/1\n\npub fn collapse_stacks<'a>(profiling_data: &ProfilingData) -> FxHashMap<String, u64> {\n\n let mut counters = FxHashMap::default();\n\n let mut threads = FxHashMap::<_, PerThreadState<'_>>::default();\n\n\n\n for current_event in profiling_data\n\n .iter()\n\n .rev()\n\n .filter(|e| !e.timestamp.is_instant())\n\n {\n\n let thread = threads\n\n .entry(current_event.thread_id)\n\n .or_insert(PerThreadState {\n\n stack: Vec::new(),\n\n stack_id: \"rustc\".to_owned(),\n\n start: current_event.timestamp.start(),\n\n end: current_event.timestamp.end(),\n\n total_event_time_nanos: 0,\n\n });\n\n\n\n thread.start = cmp::min(thread.start, current_event.timestamp.start());\n", "file_path": "analyzeme/src/stack_collapse.rs", "rank": 42, "score": 75156.50741742256 }, { "content": "use crate::query_data::{QueryData, QueryDataDiff, Results};\n\nuse crate::signed_duration::SignedDuration;\n\nuse rustc_hash::{FxHashMap, FxHashSet};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::time::Duration;\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct DiffResults {\n\n pub query_data: Vec<QueryDataDiff>,\n\n pub total_time: SignedDuration,\n\n}\n\n\n", "file_path": "summarize/src/diff.rs", "rank": 43, "score": 72938.91143998505 }, { "content": " let b = base_data.get(l).map(|i| &base.query_data[*i]);\n\n let c = change_data.get(l).map(|i| &change.query_data[*i]);\n\n\n\n match (b, c) {\n\n (Some(b), Some(c)) => c.clone() - b.clone(),\n\n (Some(b), None) => b.invert(),\n\n (None, Some(c)) => c.as_query_data_diff(),\n\n (None, None) => unreachable!(),\n\n }\n\n })\n\n .collect();\n\n\n\n query_data.sort_by(|l, r| r.self_time.duration.cmp(&l.self_time.duration));\n\n\n\n DiffResults {\n\n query_data,\n\n total_time: sd(change.total_time) - sd(base.total_time),\n\n }\n\n}\n", "file_path": "summarize/src/diff.rs", "rank": 44, "score": 72938.15252869579 }, { "content": "\n\n#[rustfmt::skip]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::time::Duration;\n\n use analyzeme::ProfilingDataBuilder;\n\n\n\n #[test]\n\n fn total_time_and_nesting() {\n\n let mut b = ProfilingDataBuilder::new();\n\n\n\n b.interval(QUERY_EVENT_KIND, \"q1\", 0, 100, 200, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"q2\", 0, 110, 190, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"q3\", 0, 120, 180, |_| {});\n\n });\n\n });\n\n\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n", "file_path": "summarize/src/analysis.rs", "rank": 45, "score": 72539.82495697732 }, { "content": "use crate::query_data::{QueryData, Results};\n\nuse analyzeme::{Event, ProfilingData, Timestamp};\n\nuse measureme::rustc::*;\n\nuse rustc_hash::FxHashMap;\n\nuse std::borrow::Cow;\n\nuse std::time::SystemTime;\n\n\n\n/// Collects accumulated summary data for the given ProfilingData.\n\n///\n\n/// The main result we are interested in is the query \"self-time\". This is the\n\n/// time spent computing the result of a query `q` minus the time spent in\n\n/// any other queries that `q` might have called. This \"self-time\" can be\n\n/// computed by looking at invocation stacks as follows:\n\n///\n\n/// When we encounter a query provider event, we first add its entire duration\n\n/// to the self-time counter of the query. Then, when we encounter a direct\n\n/// child of that query provider event, we subtract the duration of the child\n\n/// from the self-time counter of the query. Thus, after we've encountered all\n\n/// direct children we'll end up with the self-time.\n\n///\n", "file_path": "summarize/src/analysis.rs", "rank": 46, "score": 72537.1641795487 }, { "content": "\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n\n assert_eq!(results.total_time, Duration::from_nanos(200));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").self_time, Duration::from_nanos(100));\n\n assert_eq!(results.query_data_by_label(\"e2\").self_time, Duration::from_nanos(50));\n\n assert_eq!(results.query_data_by_label(\"e3\").self_time, Duration::from_nanos(30));\n\n assert_eq!(results.query_data_by_label(\"e4\").self_time, Duration::from_nanos(20));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"e2\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"e3\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"e4\").invocation_count, 1);\n\n }\n\n\n\n #[test]\n\n fn same_event_multiple_times() {\n\n // <--e3--> <--e3-->\n\n // <---e2---> <---e2--->\n", "file_path": "summarize/src/analysis.rs", "rank": 47, "score": 72534.58727983681 }, { "content": " // T2: <------q1 (loading)------>\n\n // T3: <----q1 (loading)---->\n\n // 0 30 40 100\n\n\n\n let mut b = ProfilingDataBuilder::new();\n\n\n\n b.interval(INCREMENTAL_LOAD_RESULT_EVENT_KIND, \"q1\", 1, 0, 100, |_| {});\n\n b.interval(INCREMENTAL_LOAD_RESULT_EVENT_KIND, \"q1\", 2, 30, 100, |_| {});\n\n b.interval(INCREMENTAL_LOAD_RESULT_EVENT_KIND, \"q1\", 3, 40, 100, |_| {});\n\n\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n\n assert_eq!(results.total_time, Duration::from_nanos(230));\n\n\n\n assert_eq!(results.query_data_by_label(\"q1\").self_time, Duration::from_nanos(230));\n\n assert_eq!(results.query_data_by_label(\"q1\").incremental_load_time, Duration::from_nanos(230));\n\n assert_eq!(results.query_data_by_label(\"q1\").time, Duration::from_nanos(230));\n\n }\n\n}\n", "file_path": "summarize/src/analysis.rs", "rank": 48, "score": 72534.48957126042 }, { "content": "\n\n let mut b = ProfilingDataBuilder::new();\n\n\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 100, 200, |_| {});\n\n b.interval(QUERY_EVENT_KIND, \"e2\", 0, 200, 300, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 0, 200, 250, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e4\", 0, 200, 220, |_| {});\n\n });\n\n });\n\n\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n\n assert_eq!(results.total_time, Duration::from_nanos(200));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").self_time, Duration::from_nanos(100));\n\n assert_eq!(results.query_data_by_label(\"e2\").self_time, Duration::from_nanos(50));\n\n assert_eq!(results.query_data_by_label(\"e3\").self_time, Duration::from_nanos(30));\n\n assert_eq!(results.query_data_by_label(\"e4\").self_time, Duration::from_nanos(20));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").invocation_count, 1);\n", "file_path": "summarize/src/analysis.rs", "rank": 49, "score": 72534.37089601962 }, { "content": " b.interval(QUERY_EVENT_KIND, \"e1\", 0, 200, 300, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 220, 280, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 240, 260, |_| {});\n\n });\n\n });\n\n\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n\n assert_eq!(results.total_time, Duration::from_nanos(100));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").self_time, Duration::from_nanos(100));\n\n assert_eq!(results.query_data_by_label(\"e1\").invocation_count, 3);\n\n assert_eq!(results.query_data_by_label(\"e1\").time, Duration::from_nanos(180));\n\n }\n\n\n\n #[test]\n\n fn query_blocked() {\n\n // T1: <---------------q1--------------->\n\n // T2: <------q1 (blocked)------>\n\n // T3: <----q1 (blocked)---->\n", "file_path": "summarize/src/analysis.rs", "rank": 50, "score": 72534.19545290498 }, { "content": " // 0 30 40 100\n\n\n\n let mut b = ProfilingDataBuilder::new();\n\n\n\n b.interval(QUERY_EVENT_KIND, \"q1\", 1, 0, 100, |_| {});\n\n b.interval(QUERY_BLOCKED_EVENT_KIND, \"q1\", 2, 30, 100, |_| {});\n\n b.interval(QUERY_BLOCKED_EVENT_KIND, \"q1\", 3, 40, 100, |_| {});\n\n\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n\n assert_eq!(results.total_time, Duration::from_nanos(230));\n\n\n\n assert_eq!(results.query_data_by_label(\"q1\").self_time, Duration::from_nanos(230));\n\n assert_eq!(results.query_data_by_label(\"q1\").blocked_time, Duration::from_nanos(130));\n\n assert_eq!(results.query_data_by_label(\"q1\").time, Duration::from_nanos(230));\n\n }\n\n\n\n #[test]\n\n fn query_incr_loading_time() {\n\n // T1: <---------------q1 (loading)----->\n", "file_path": "summarize/src/analysis.rs", "rank": 51, "score": 72534.18545899371 }, { "content": " b.interval(QUERY_EVENT_KIND, \"e2\", 0, 220, 280, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 0, 240, 260, |_| {});\n\n });\n\n });\n\n\n\n // Thread 1 -- continued\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 1, 210, 310, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e2\", 1, 230, 290, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 1, 250, 270, |_| {});\n\n });\n\n });\n\n\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n\n assert_eq!(results.total_time, Duration::from_nanos(400));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").self_time, Duration::from_nanos(160));\n\n assert_eq!(results.query_data_by_label(\"e2\").self_time, Duration::from_nanos(160));\n\n assert_eq!(results.query_data_by_label(\"e3\").self_time, Duration::from_nanos(80));\n\n\n", "file_path": "summarize/src/analysis.rs", "rank": 52, "score": 72534.11796986299 }, { "content": " // <--------e1--------><--------e1-------->\n\n // 100 200 300\n\n\n\n let mut b = ProfilingDataBuilder::new();\n\n\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 100, 200, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e2\", 0, 120, 180, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 0, 140, 160, |_| {});\n\n });\n\n });\n\n\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 200, 300, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e2\", 0, 220, 280, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 0, 240, 260, |_| {});\n\n });\n\n });\n\n\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n\n assert_eq!(results.total_time, Duration::from_nanos(200));\n", "file_path": "summarize/src/analysis.rs", "rank": 53, "score": 72533.26270273604 }, { "content": " b.instant(QUERY_CACHE_HIT_EVENT_KIND, \"y\", 0, 230);\n\n\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 0, 240, 260, |b| {\n\n b.instant(QUERY_CACHE_HIT_EVENT_KIND, \"x\", 0, 241);\n\n b.instant(QUERY_CACHE_HIT_EVENT_KIND, \"y\", 0, 242);\n\n b.instant(QUERY_CACHE_HIT_EVENT_KIND, \"x\", 0, 243);\n\n b.instant(QUERY_CACHE_HIT_EVENT_KIND, \"y\", 0, 244);\n\n });\n\n\n\n b.instant(QUERY_CACHE_HIT_EVENT_KIND, \"x\", 0, 270);\n\n });\n\n\n\n b.instant(QUERY_CACHE_HIT_EVENT_KIND, \"x\", 0, 290);\n\n });\n\n\n\n let results = perform_analysis(b.into_profiling_data());\n\n\n\n assert_eq!(results.total_time, Duration::from_nanos(100));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").self_time, Duration::from_nanos(40));\n", "file_path": "summarize/src/analysis.rs", "rank": 54, "score": 72532.75690490744 }, { "content": " INCREMENTAL_LOAD_RESULT_EVENT_KIND => {\n\n record_event_data(&current_event.label, &|data| {\n\n data.self_time += current_event_duration;\n\n data.time += current_event_duration;\n\n data.incremental_load_time += current_event_duration;\n\n });\n\n }\n\n\n\n _ => {\n\n // Data sources other than rustc will use their own event kinds so just\n\n // treat this like a GENERIC_ACTIVITY except that we don't track cache\n\n // misses since those may not apply to all data sources.\n\n record_event_data(&current_event.label, &|data| {\n\n data.self_time += current_event_duration;\n\n data.time += current_event_duration;\n\n data.invocation_count += 1;\n\n });\n\n }\n\n };\n\n\n", "file_path": "summarize/src/analysis.rs", "rank": 55, "score": 72532.72417777791 }, { "content": " record_event_data(\n\n &current_top.label,\n\n &|data| match &current_top.event_kind[..] {\n\n QUERY_EVENT_KIND | GENERIC_ACTIVITY_EVENT_KIND => {\n\n data.self_time -= current_event_duration;\n\n }\n\n INCREMENTAL_LOAD_RESULT_EVENT_KIND => {\n\n data.self_time -= current_event_duration;\n\n data.incremental_load_time -= current_event_duration;\n\n }\n\n _ => {\n\n // Data sources other than rustc will use their own event kinds so\n\n // just treat this like a GENERIC_ACTIVITY except that we don't\n\n // track cache misses since those may not apply to all data sources.\n\n data.self_time -= current_event_duration;\n\n }\n\n },\n\n );\n\n }\n\n\n", "file_path": "summarize/src/analysis.rs", "rank": 56, "score": 72532.60305841317 }, { "content": " assert_eq!(results.query_data_by_label(\"e2\").self_time, Duration::from_nanos(40));\n\n assert_eq!(results.query_data_by_label(\"e3\").self_time, Duration::from_nanos(20));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"e2\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"e3\").invocation_count, 1);\n\n\n\n assert_eq!(results.query_data_by_label(\"x\").number_of_cache_hits, 5);\n\n assert_eq!(results.query_data_by_label(\"y\").number_of_cache_hits, 3);\n\n }\n\n\n\n #[test]\n\n fn stack_of_same_events() {\n\n // <--e1-->\n\n // <-----e1----->\n\n // <--------e1-------->\n\n // 100 200\n\n\n\n let mut b = ProfilingDataBuilder::new();\n\n\n", "file_path": "summarize/src/analysis.rs", "rank": 57, "score": 72532.47716320542 }, { "content": "\n\n assert_eq!(results.query_data_by_label(\"e1\").self_time, Duration::from_nanos(80));\n\n assert_eq!(results.query_data_by_label(\"e2\").self_time, Duration::from_nanos(80));\n\n assert_eq!(results.query_data_by_label(\"e3\").self_time, Duration::from_nanos(40));\n\n\n\n assert_eq!(results.query_data_by_label(\"e1\").invocation_count, 2);\n\n assert_eq!(results.query_data_by_label(\"e2\").invocation_count, 2);\n\n assert_eq!(results.query_data_by_label(\"e3\").invocation_count, 2);\n\n }\n\n\n\n #[test]\n\n fn multiple_threads() {\n\n // <--e3--> <--e3-->\n\n // <---e2---> <---e2--->\n\n // <--------e1--------><--------e1-------->\n\n // T0 100 200 300\n\n //\n\n // <--e3--> <--e3-->\n\n // <---e2---> <---e2--->\n\n // <--------e1--------><--------e1-------->\n", "file_path": "summarize/src/analysis.rs", "rank": 58, "score": 72532.17722151516 }, { "content": " assert_eq!(results.total_time, Duration::from_nanos(100));\n\n\n\n // 10ns in the beginning and 10ns in the end\n\n assert_eq!(results.query_data_by_label(\"q1\").self_time, Duration::from_nanos(20));\n\n // 10ns in the beginning and 10ns in the end, again\n\n assert_eq!(results.query_data_by_label(\"q2\").self_time, Duration::from_nanos(20));\n\n // 60ns of uninterupted self-time\n\n assert_eq!(results.query_data_by_label(\"q3\").self_time, Duration::from_nanos(60));\n\n\n\n assert_eq!(results.query_data_by_label(\"q1\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"q2\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"q3\").invocation_count, 1);\n\n }\n\n\n\n #[test]\n\n fn events_with_same_starting_time() {\n\n // <--e4-->\n\n // <---e3--->\n\n // <--------e1--------><--------e2-------->\n\n // 100 200 300\n", "file_path": "summarize/src/analysis.rs", "rank": 59, "score": 72532.16705447882 }, { "content": " assert_eq!(results.query_data_by_label(\"e1\").invocation_count, 4);\n\n assert_eq!(results.query_data_by_label(\"e2\").invocation_count, 4);\n\n assert_eq!(results.query_data_by_label(\"e3\").invocation_count, 4);\n\n }\n\n\n\n #[test]\n\n fn instant_events() {\n\n // xyxy\n\n // y <--e3--> x\n\n // x <-----e2-----> x\n\n // <--------e1-------->\n\n // 100 200\n\n\n\n let mut b = ProfilingDataBuilder::new();\n\n\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 200, 300, |b| {\n\n b.instant(QUERY_CACHE_HIT_EVENT_KIND, \"x\", 0, 210);\n\n\n\n b.interval(QUERY_EVENT_KIND, \"e2\", 0, 220, 280, |b| {\n\n\n", "file_path": "summarize/src/analysis.rs", "rank": 60, "score": 72531.53282815685 }, { "content": " assert_eq!(results.query_data_by_label(\"e2\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"e3\").invocation_count, 1);\n\n assert_eq!(results.query_data_by_label(\"e4\").invocation_count, 1);\n\n }\n\n\n\n #[test]\n\n fn events_with_same_end_time() {\n\n // <--e4-->\n\n // <---e3--->\n\n // <--------e1--------><--------e2-------->\n\n // 100 200 300\n\n\n\n let mut b = ProfilingDataBuilder::new();\n\n\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 100, 200, |_| {});\n\n b.interval(QUERY_EVENT_KIND, \"e2\", 0, 200, 300, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 0, 250, 300, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e4\", 0, 280, 300, |_| {});\n\n });\n\n });\n", "file_path": "summarize/src/analysis.rs", "rank": 61, "score": 72531.39562225471 }, { "content": " // Update the start and end times for thread\n\n thread.start = std::cmp::min(thread.start, start);\n\n thread.end = std::cmp::max(thread.end, end);\n\n\n\n // Bring the stack up-to-date\n\n thread.stack.push(current_event)\n\n }\n\n }\n\n }\n\n\n\n let total_time = threads\n\n .values()\n\n .map(|t| t.end.duration_since(t.start).unwrap())\n\n .sum();\n\n\n\n Results {\n\n query_data: query_data.drain().map(|(_, value)| value).collect(),\n\n total_time,\n\n }\n\n}\n", "file_path": "summarize/src/analysis.rs", "rank": 62, "score": 72529.62710166209 }, { "content": "/// For example, take the following query invocation trace:\n\n///\n\n/// <== q4 ==>\n\n/// <== q2 ==> <====== q3 ======>\n\n/// <===================== q1 =====================>\n\n/// -------------------------------------------------------> time\n\n///\n\n/// Query `q1` calls `q2` and later `q3`, which in turn calls `q4`. In order\n\n/// to get the self-time of `q1`, we take it's entire duration and subtract the\n\n/// durations of `q2` and `q3`. We do not subtract the duration of `q4` because\n\n/// that is already accounted for by the duration of `q3`.\n\n///\n\n/// The function below uses an algorithm that computes the self-times of all\n\n/// queries in a single pass over the profiling data. As the algorithm walks the\n\n/// data, it needs to keep track of invocation stacks. Because interval events\n\n/// occur in the stream at their \"end\" time, a parent event comes after all\n\n/// child events. For this reason we have to walk the events in *reverse order*.\n\n/// This way we always encounter the parent before its children, which makes it\n\n/// simple to keep an up-to-date stack of invocations.\n\n///\n", "file_path": "summarize/src/analysis.rs", "rank": 63, "score": 72528.47508976067 }, { "content": " // Update counters for the current event\n\n match &current_event.event_kind[..] {\n\n QUERY_EVENT_KIND | GENERIC_ACTIVITY_EVENT_KIND => {\n\n record_event_data(&current_event.label, &|data| {\n\n data.self_time += current_event_duration;\n\n data.time += current_event_duration;\n\n data.number_of_cache_misses += 1;\n\n data.invocation_count += 1;\n\n });\n\n }\n\n\n\n QUERY_BLOCKED_EVENT_KIND => {\n\n record_event_data(&current_event.label, &|data| {\n\n data.self_time += current_event_duration;\n\n data.time += current_event_duration;\n\n data.blocked_time += current_event_duration;\n\n data.invocation_count += 1;\n\n });\n\n }\n\n\n", "file_path": "summarize/src/analysis.rs", "rank": 64, "score": 72528.152428273 }, { "content": " for current_event in data\n\n .iter()\n\n .rev()\n\n .map(|lightweight_event| lightweight_event.to_event())\n\n {\n\n match current_event.timestamp {\n\n Timestamp::Instant(_) => {\n\n if &current_event.event_kind[..] == QUERY_CACHE_HIT_EVENT_KIND {\n\n record_event_data(&current_event.label, &|data| {\n\n data.number_of_cache_hits += 1;\n\n data.invocation_count += 1;\n\n });\n\n }\n\n }\n\n Timestamp::Interval { start, end } => {\n\n // This is an interval event\n\n let thread =\n\n threads\n\n .entry(current_event.thread_id)\n\n .or_insert_with(|| PerThreadState {\n", "file_path": "summarize/src/analysis.rs", "rank": 65, "score": 72527.6934389096 }, { "content": " // T1 100 200 300\n\n\n\n let mut b = ProfilingDataBuilder::new();\n\n\n\n // Thread 0\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 100, 200, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e2\", 0, 120, 180, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 0, 140, 160, |_| {});\n\n });\n\n });\n\n\n\n // Thread 1 -- the same as thread 0 with a slight time offset\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 1, 110, 210, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e2\", 1, 130, 190, |b| {\n\n b.interval(QUERY_EVENT_KIND, \"e3\", 1, 150, 170, |_| {});\n\n });\n\n });\n\n\n\n // Thread 0 -- continued\n\n b.interval(QUERY_EVENT_KIND, \"e1\", 0, 200, 300, |b| {\n", "file_path": "summarize/src/analysis.rs", "rank": 66, "score": 72525.9708659988 }, { "content": "/// The algorithm goes as follows:\n\n///\n\n/// ```\n\n/// for event in profiling_data.reversed()\n\n/// // Keep the stack up-to-date by popping all events that\n\n/// // don't contain the current event. After this loop, the\n\n/// // parent of the current event will be the top of the stack.\n\n/// while !stack.top.is_ancestor_of(event)\n\n/// stack.pop()\n\n///\n\n/// // Update the parents self-time if needed\n\n/// let parent = stack.top()\n\n/// if parent.is_some()\n\n/// self_time_for(parent) -= event.duration\n\n///\n\n/// // Update the self-time for the current-event\n\n/// self_time_for(event) += event.duration\n\n///\n\n/// // Push the current event onto the stack\n\n/// stack.push(event)\n", "file_path": "summarize/src/analysis.rs", "rank": 67, "score": 72525.84200182218 }, { "content": "/// [\n\n/// { label=e2, start= 5, end=10 },\n\n/// { label=e4, start=17, end=19 },\n\n/// { label=e3, start=15, end=20 },\n\n/// { label=e1, start= 0, end=25 },\n\n/// ]\n\n/// ```\n\n///\n\n/// In this case when we encounter `e2`, the stack is `[e1, e3, e4]`, and both\n\n/// `e4` and `e3` need to be popped in the same step.\n", "file_path": "summarize/src/analysis.rs", "rank": 68, "score": 72523.44115658132 }, { "content": "/// ```\n\n///\n\n/// Here is an example of what updating the stack looks like:\n\n///\n\n/// ```\n\n/// <--e2--> <--e3-->\n\n/// <-----------e1----------->\n\n/// ```\n\n///\n\n/// In the event stream this shows up as something like:\n\n///\n\n/// ```\n\n/// [\n\n/// { label=e2, start= 5, end=10 },\n\n/// { label=e3, start=15, end=20 },\n\n/// { label=e1, start= 0, end=25 },\n\n/// ]\n\n/// ```\n\n///\n\n/// because events are emitted in the order of their end timestamps. So, as we\n", "file_path": "summarize/src/analysis.rs", "rank": 69, "score": 72523.44115658132 }, { "content": "/// walk backwards, we\n\n///\n\n/// 1. encounter `e1`, push it onto the stack, then\n\n/// 2. encounter `e3`, the stack contains `e1`, but that is fine since the\n\n/// time-interval of `e1` includes the time interval of `e3`. `e3` goes onto\n\n/// the stack and then we\n\n/// 3. encounter `e2`. The stack is `[e1, e3]`, but now `e3` needs to be popped\n\n/// because we are past its range, so we pop `e3` and push `e2`.\n\n///\n\n/// Why is popping done in a `while` loop? consider the following\n\n///\n\n/// ```\n\n/// <-e4->\n\n/// <--e2--> <--e3-->\n\n/// <-----------e1----------->\n\n/// ```\n\n///\n\n/// This looks as follows in the stream:\n\n///\n\n/// ```\n", "file_path": "summarize/src/analysis.rs", "rank": 70, "score": 72523.44115658132 }, { "content": " stack: Vec::new(),\n\n start,\n\n end,\n\n });\n\n\n\n // Pop all events from the stack that are not parents of the\n\n // current event.\n\n while let Some(current_top) = thread.stack.last().cloned() {\n\n if current_top.contains(&current_event) {\n\n break;\n\n }\n\n\n\n thread.stack.pop();\n\n }\n\n\n\n let current_event_duration = current_event.duration().unwrap();\n\n\n\n // If there is something on the stack, subtract the current\n\n // interval from it.\n\n if let Some(current_top) = thread.stack.last() {\n", "file_path": "summarize/src/analysis.rs", "rank": 71, "score": 72523.44115658132 }, { "content": "use measureme::rustc::*;\n\nuse std::borrow::Cow;\n\nuse std::collections::BTreeMap;\n\nuse std::fmt;\n\nuse std::time::{Duration, SystemTime};\n\n\n\n// FIXME(eddyb) move this into `analyzeme`.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct EventDescription<'a> {\n\n pub event_kind: Cow<'a, str>,\n\n pub label: Cow<'a, str>,\n\n pub additional_data: Vec<Cow<'a, str>>,\n\n}\n\n\n\nimpl<'a> From<Event<'a>> for EventDescription<'a> {\n\n fn from(e: Event<'a>) -> Self {\n\n EventDescription {\n\n event_kind: e.event_kind,\n\n label: e.label,\n\n additional_data: e.additional_data,\n", "file_path": "summarize/src/aggregate.rs", "rank": 72, "score": 72440.53054841104 }, { "content": " // string tables, or even enforce that the string tables\n\n // of each profile are themselves identical.\n\n assert_eq!(\n\n descriptions,\n\n interval.map_event(WithParent::<EventDescription>::from),\n\n \"`summarize aggregate` requires identical sequences of events\"\n\n );\n\n\n\n duration\n\n }),\n\n );\n\n\n\n let (mut min_duration, mut max_duration) = {\n\n let first = durations_across_profiles.next().unwrap();\n\n (first, first)\n\n };\n\n for duration in durations_across_profiles {\n\n min_duration = min_duration.min(duration);\n\n max_duration = max_duration.max(duration);\n\n }\n", "file_path": "summarize/src/aggregate.rs", "rank": 73, "score": 72439.17860164012 }, { "content": "\n\n Some(AggregatedSampleInterval {\n\n descriptions,\n\n\n\n min_duration,\n\n duration_variance: Variance {\n\n range_size: max_duration - min_duration,\n\n },\n\n })\n\n }\n\n None => {\n\n for leftover_intervals in self.sample_intervals_per_profile.iter_mut() {\n\n assert_eq!(\n\n leftover_intervals.next_back(),\n\n None,\n\n \"`summarize aggregate` requires identical sequences of events\"\n\n );\n\n }\n\n None\n\n }\n", "file_path": "summarize/src/aggregate.rs", "rank": 74, "score": 72437.26851601913 }, { "content": "}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct WithParent<T> {\n\n this: T,\n\n parent: Option<T>,\n\n}\n\n\n\nimpl<'a> From<WithParent<Event<'a>>> for WithParent<EventDescription<'a>> {\n\n fn from(e: WithParent<Event<'a>>) -> Self {\n\n WithParent {\n\n this: e.this.into(),\n\n parent: e.parent.map(|e| e.into()),\n\n }\n\n }\n\n}\n\n\n\n// FIXME(eddyb) should all these variants have `E` in them? seems un-DRY\n", "file_path": "summarize/src/aggregate.rs", "rank": 75, "score": 72436.5512819207 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl fmt::Display for EventDescription<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.event_kind[..] {\n\n QUERY_EVENT_KIND | GENERIC_ACTIVITY_EVENT_KIND => {}\n\n _ => write!(f, \"{} \", self.event_kind)?,\n\n }\n\n\n\n write!(f, \"`{}(\", self.label)?;\n\n for (i, arg) in self.additional_data.iter().enumerate() {\n\n if i > 0 {\n\n write!(f, \", \")?;\n\n }\n\n write!(f, \"{}\", arg)?;\n\n }\n\n write!(f, \")`\")\n\n }\n", "file_path": "summarize/src/aggregate.rs", "rank": 76, "score": 72436.12160628912 }, { "content": "\n\n fn intervals(self) -> SampleIntervals<Self> {\n\n SampleIntervals::new(self)\n\n }\n\n}\n\n\n\nimpl<'a, I: DoubleEndedIterator<Item = Event<'a>>> BackwardsIterator for SamplePoints<'a, I> {\n\n type Item = SamplePoint<WithParent<Event<'a>>>;\n\n fn next_back(&mut self) -> Option<Self::Item> {\n\n let sample_point = match self.rev_events.peek() {\n\n Some(peeked_event) => {\n\n assert_eq!(\n\n peeked_event.thread_id, self.expected_thread_id,\n\n \"more than one thread is not supported in `summarize aggregate`\"\n\n );\n\n match self.stack.last() {\n\n // Make sure to first leave any events in the stack that succeed\n\n // this one (note that because we're `peek`-ing, this will keep\n\n // getting hit until we run out of stack entries to leave).\n\n Some(top_event) if !top_event.contains(peeked_event) => {\n", "file_path": "summarize/src/aggregate.rs", "rank": 77, "score": 72434.74885056017 }, { "content": " Some(interval) => {\n\n let first_duration = interval.duration();\n\n let descriptions = interval.map_event(WithParent::<EventDescription>::from);\n\n\n\n // FIXME(eddyb) maybe extract this part into an `Iterator` impl? but it\n\n // would be hard to return an interable that doesn't allocate nor borrow\n\n // the iterator (whereas here `durations_across_profiles` borrows\n\n // `self.sample_intervals_per_profile`)\n\n let mut durations_across_profiles = std::iter::once(first_duration).chain(\n\n self.sample_intervals_per_profile[1..].iter_mut().map(|it| {\n\n let interval = it\n\n .next_back()\n\n .expect(\"`summarize aggregate` requires identical sequences of events\");\n\n\n\n let duration = interval.duration();\n\n\n\n // Ensure we don't allow profiles that differ in event details.\n\n // FIXME(eddyb) this may be expensive (and is redundant\n\n // for every event, shared by adjacent intervals), there\n\n // should be a cheaper way to compare strings across\n", "file_path": "summarize/src/aggregate.rs", "rank": 78, "score": 72434.640640088 }, { "content": " for (variance, descriptions) in variances.smallest {\n\n println!(\n\n \" ±{} ns: {}\",\n\n variance.range_size.as_nanos() as f64 / 2.0,\n\n describe(descriptions)\n\n );\n\n }\n\n println!();\n\n println!(\"Largest {} variances:\", variances.largest.len());\n\n for (variance, descriptions) in variances.largest {\n\n println!(\n\n \" ±{} ns: {}\",\n\n variance.range_size.as_nanos() as f64 / 2.0,\n\n describe(descriptions)\n\n );\n\n }\n\n}\n", "file_path": "summarize/src/aggregate.rs", "rank": 79, "score": 72434.36607460355 }, { "content": " let count = descriptions.count();\n\n format!(\n\n \"{} occurrences, or {:.2}%\",\n\n count,\n\n (count as f64) / (intervals_count as f64) * 100.0\n\n )\n\n }\n\n };\n\n\n\n println!(\"Smallest {} durations:\", durations.smallest.len());\n\n for (duration, descriptions) in durations.smallest {\n\n println!(\" {} ns: {}\", duration.as_nanos(), describe(descriptions));\n\n }\n\n println!(\"\");\n\n println!(\"Largest {} durations:\", durations.largest.len());\n\n for (duration, descriptions) in durations.largest {\n\n println!(\" {} ns: {}\", duration.as_nanos(), describe(descriptions));\n\n }\n\n println!(\"\");\n\n println!(\"Smallest {} variances:\", variances.smallest.len());\n", "file_path": "summarize/src/aggregate.rs", "rank": 80, "score": 72433.98842114044 }, { "content": "mod backwards_iter {\n\n // HACK(eddyb) like `DoubleEndedIterator`, but without a (forwards) `Iterator`.\n\n // This is needed because of how events are stored in \"postorder\",\n\n // i.e. an interval event follows all events nested in it, meaning\n\n // that most analyses we want to do can only be done in reverse.\n", "file_path": "summarize/src/aggregate.rs", "rank": 81, "score": 72433.59517482707 }, { "content": " assert_eq!(inner.parent.as_ref(), Some(&outer.this));\n\n format!(\"in {}, after {}\", outer.this, inner.this)\n\n }\n\n\n\n (SamplePoint::End(first), SamplePoint::Start(second))\n\n | (SamplePoint::Instant(first), SamplePoint::Start(second))\n\n | (SamplePoint::End(first), SamplePoint::Instant(second))\n\n | (SamplePoint::Instant(first), SamplePoint::Instant(second)) => {\n\n assert_eq!(first.parent, second.parent);\n\n if let Some(parent) = &first.parent {\n\n format!(\n\n \"in {},\\n between {}\\n and {}\\n\",\n\n parent, first.this, second.this\n\n )\n\n } else {\n\n format!(\"between {} and {}\", first.this, second.this)\n\n }\n\n }\n\n }\n\n } else {\n", "file_path": "summarize/src/aggregate.rs", "rank": 82, "score": 72430.2739217841 }, { "content": " };\n\n\n\n // HACK(eddyb) this works around `SamplePoint::End` having pushed itself\n\n // onto the stack, so its parent isn't the top of the stack anymore.\n\n let parent = match sample_point {\n\n SamplePoint::End(_) => {\n\n if self.stack.len() >= 2 {\n\n Some(&self.stack[self.stack.len() - 2])\n\n } else {\n\n None\n\n }\n\n }\n\n SamplePoint::Start(_) | SamplePoint::Instant(_) => self.stack.last(),\n\n };\n\n\n\n Some(sample_point.map_event(|this| WithParent {\n\n this,\n\n parent: parent.cloned(),\n\n }))\n\n }\n\n}\n\n\n", "file_path": "summarize/src/aggregate.rs", "rank": 83, "score": 72430.2739217841 }, { "content": " fn next_back(&mut self) -> Option<Self::Item> {\n\n let start = self.sample_points.next_back()?;\n\n // FIXME(eddyb) make this cloning cheaper (somehow?)\n\n let end = self.last_sample_point.replace(start.clone())?;\n\n\n\n Some(SampleInterval { start, end })\n\n }\n\n}\n\n\n\n// FIXME(eddyb) extend this with more statistical information, rather\n\n// than assuming uniform distribution inside the range (`min..=max`).\n", "file_path": "summarize/src/aggregate.rs", "rank": 84, "score": 72430.2739217841 }, { "content": " variances.add(interval.duration_variance, &interval.descriptions);\n\n }\n\n\n\n let describe =\n\n |descriptions: ExtremaSources<SampleInterval<WithParent<EventDescription<'_>>>>| {\n\n if let ExtremaSources::One(description) = descriptions {\n\n match (description.start, description.end) {\n\n (SamplePoint::Start(start), SamplePoint::End(end)) => {\n\n assert_eq!(start, end);\n\n start.this.to_string()\n\n }\n\n\n\n (SamplePoint::Start(outer), SamplePoint::Start(inner))\n\n | (SamplePoint::Start(outer), SamplePoint::Instant(inner)) => {\n\n assert_eq!(inner.parent.as_ref(), Some(&outer.this));\n\n format!(\"in {}, before {}\", outer.this, inner.this)\n\n }\n\n\n\n (SamplePoint::End(inner), SamplePoint::End(outer))\n\n | (SamplePoint::Instant(inner), SamplePoint::End(outer)) => {\n", "file_path": "summarize/src/aggregate.rs", "rank": 85, "score": 72430.2739217841 }, { "content": " Largest,\n\n }\n\n\n\n for which in &[Which::Smallest, Which::Largest] {\n\n let (map, &value) = match which {\n\n Which::Smallest => (&mut self.smallest, range.start()),\n\n Which::Largest => (&mut self.largest, range.end()),\n\n };\n\n if map.len() < self.limit {\n\n map.entry(value).or_default().add(source);\n\n } else {\n\n let least_extreme = match which {\n\n Which::Smallest => map.keys().rev().next().copied().unwrap(), // `max(smallest)`\n\n Which::Largest => map.keys().next().copied().unwrap(), // `min(largest)`\n\n };\n\n let less_extreme = match which {\n\n Which::Smallest => value > least_extreme, // `value > max(smallest)`\n\n Which::Largest => value < least_extreme, // `value < min(largest)`\n\n };\n\n if !less_extreme {\n", "file_path": "summarize/src/aggregate.rs", "rank": 86, "score": 72430.2739217841 }, { "content": " map.entry(value).or_default().add(source);\n\n\n\n if map.len() > self.limit {\n\n map.remove(&least_extreme);\n\n }\n\n\n\n assert_eq!(map.len(), self.limit);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "summarize/src/aggregate.rs", "rank": 87, "score": 72430.2739217841 }, { "content": " }\n\n }\n\n}\n\n\n\n// FIXME(eddyb) move this somewhere else\n\n// (counterpoint: tracking \"sources\" of values is too specific)\n\npub struct Extrema<T, S = ()> {\n\n /// Number of `smallest`/`largest` values to keep track of.\n\n limit: usize,\n\n\n\n pub smallest: BTreeMap<T, ExtremaSources<S>>,\n\n pub largest: BTreeMap<T, ExtremaSources<S>>,\n\n}\n\n\n\npub enum ExtremaSources<S> {\n\n Empty,\n\n One(S),\n\n Count(usize),\n\n}\n\n\n", "file_path": "summarize/src/aggregate.rs", "rank": 88, "score": 72430.2739217841 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl SamplePoint<WithParent<Event<'_>>> {\n\n fn timestamp(&self) -> SystemTime {\n\n match (self, self.event().this.timestamp) {\n\n (SamplePoint::Start(_), Timestamp::Interval { start, .. }) => start,\n\n (SamplePoint::End(_), Timestamp::Interval { end, .. }) => end,\n\n (SamplePoint::Instant(_), Timestamp::Instant(time)) => time,\n\n _ => panic!(\n\n \"SamplePoint::timestamp: event timestamp doesn't match \\\n\n `SamplePoint` variant, in `SamplePoint::{:?}`\",\n\n self\n\n ),\n\n }\n\n }\n\n}\n\n\n", "file_path": "summarize/src/aggregate.rs", "rank": 89, "score": 72430.2739217841 }, { "content": " }\n\n}\n\n\n\nimpl<T: Copy + Ord, S: Clone> Extrema<T, S> {\n\n pub fn new(limit: usize) -> Self {\n\n Extrema {\n\n limit,\n\n\n\n smallest: BTreeMap::new(),\n\n largest: BTreeMap::new(),\n\n }\n\n }\n\n\n\n pub fn add(&mut self, value: T, source: &S) {\n\n self.add_range(value..=value, source)\n\n }\n\n\n\n pub fn add_range(&mut self, range: std::ops::RangeInclusive<T>, source: &S) {\n\n enum Which {\n\n Smallest,\n", "file_path": "summarize/src/aggregate.rs", "rank": 90, "score": 72430.2739217841 }, { "content": "impl<S> Default for ExtremaSources<S> {\n\n fn default() -> Self {\n\n ExtremaSources::Empty\n\n }\n\n}\n\n\n\nimpl<S: Clone> ExtremaSources<S> {\n\n pub fn count(&self) -> usize {\n\n match *self {\n\n ExtremaSources::Empty => 0,\n\n ExtremaSources::One(_) => 1,\n\n ExtremaSources::Count(count) => count,\n\n }\n\n }\n\n\n\n pub fn add(&mut self, source: &S) {\n\n *self = match self {\n\n ExtremaSources::Empty => ExtremaSources::One(source.clone()),\n\n _ => ExtremaSources::Count(self.count() + 1),\n\n };\n", "file_path": "summarize/src/aggregate.rs", "rank": 91, "score": 72430.2739217841 }, { "content": " SamplePoint::Start(self.stack.pop().unwrap())\n\n }\n\n\n\n _ => {\n\n let event = self.rev_events.next().unwrap();\n\n match event.timestamp {\n\n Timestamp::Interval { .. } => {\n\n // Now entering this new event.\n\n self.stack.push(event.clone());\n\n SamplePoint::End(event)\n\n }\n\n\n\n Timestamp::Instant(_) => SamplePoint::Instant(event),\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Ran out of events, but we might still have stack entries to leave.\n\n None => SamplePoint::Start(self.stack.pop()?),\n", "file_path": "summarize/src/aggregate.rs", "rank": 92, "score": 72430.2739217841 }, { "content": "// Process some profiling data. This is the part that would run in a\n\n// post processing tool.\n\nfn process_profiling_data(filestem: &Path, expected_events: &[Event<'static>]) {\n\n let profiling_data = ProfilingData::new(filestem).unwrap();\n\n\n\n check_profiling_data(\n\n &mut profiling_data.iter().map(|e| e.to_event()),\n\n &mut expected_events.iter().cloned(),\n\n expected_events.len(),\n\n );\n\n check_profiling_data(\n\n &mut profiling_data.iter().rev().map(|e| e.to_event()),\n\n &mut expected_events.iter().rev().cloned(),\n\n expected_events.len(),\n\n );\n\n}\n\n\n", "file_path": "analyzeme/src/testing_common.rs", "rank": 93, "score": 70770.89070845193 }, { "content": " label: self.label.clone(),\n\n time: invert(self.time),\n\n time_change: -100.0,\n\n self_time: invert(self.self_time),\n\n self_time_change: -100.0,\n\n number_of_cache_misses: -(self.number_of_cache_misses as i64),\n\n number_of_cache_hits: -(self.number_of_cache_hits as i64),\n\n invocation_count: -(self.invocation_count as i64),\n\n blocked_time: invert(self.blocked_time),\n\n incremental_load_time: invert(self.incremental_load_time),\n\n }\n\n }\n\n\n\n pub fn as_query_data_diff(&self) -> QueryDataDiff {\n\n QueryDataDiff {\n\n label: self.label.clone(),\n\n time: self.time.into(),\n\n time_change: std::f64::INFINITY,\n\n self_time: self.self_time.into(),\n\n self_time_change: std::f64::INFINITY,\n", "file_path": "summarize/src/query_data.rs", "rank": 94, "score": 69766.40413047261 }, { "content": "use crate::signed_duration::SignedDuration;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::ops::Sub;\n\nuse std::time::Duration;\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\n\npub struct QueryData {\n\n pub label: String,\n\n pub time: Duration,\n\n pub self_time: Duration,\n\n pub number_of_cache_misses: usize,\n\n pub number_of_cache_hits: usize,\n\n pub invocation_count: usize,\n\n pub blocked_time: Duration,\n\n pub incremental_load_time: Duration,\n\n}\n\n\n\nimpl QueryData {\n\n pub fn new(label: String) -> QueryData {\n\n QueryData {\n", "file_path": "summarize/src/query_data.rs", "rank": 95, "score": 69764.01212152424 }, { "content": " number_of_cache_misses: self.number_of_cache_misses as i64,\n\n number_of_cache_hits: self.number_of_cache_hits as i64,\n\n invocation_count: self.invocation_count as i64,\n\n blocked_time: self.blocked_time.into(),\n\n incremental_load_time: self.incremental_load_time.into(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct QueryDataDiff {\n\n pub label: String,\n\n pub time: SignedDuration,\n\n pub time_change: f64,\n\n pub self_time: SignedDuration,\n\n pub self_time_change: f64,\n\n pub number_of_cache_misses: i64,\n\n pub number_of_cache_hits: i64,\n\n pub invocation_count: i64,\n\n pub blocked_time: SignedDuration,\n", "file_path": "summarize/src/query_data.rs", "rank": 96, "score": 69762.4081209651 }, { "content": " pub incremental_load_time: SignedDuration,\n\n}\n\n\n\nimpl Sub for QueryData {\n\n type Output = QueryDataDiff;\n\n\n\n fn sub(self, rhs: QueryData) -> QueryDataDiff {\n\n #[inline(always)]\n\n fn sd(d: Duration) -> SignedDuration {\n\n d.into()\n\n }\n\n\n\n #[inline(always)]\n\n fn i(u: usize) -> i64 {\n\n u as i64\n\n }\n\n\n\n QueryDataDiff {\n\n label: self.label,\n\n time: sd(self.time) - sd(rhs.time),\n", "file_path": "summarize/src/query_data.rs", "rank": 97, "score": 69760.07304663336 }, { "content": " label,\n\n time: Duration::from_nanos(0),\n\n self_time: Duration::from_nanos(0),\n\n number_of_cache_misses: 0,\n\n number_of_cache_hits: 0,\n\n invocation_count: 0,\n\n blocked_time: Duration::from_nanos(0),\n\n incremental_load_time: Duration::from_nanos(0),\n\n }\n\n }\n\n\n\n pub fn invert(&self) -> QueryDataDiff {\n\n fn invert(d: Duration) -> SignedDuration {\n\n SignedDuration {\n\n duration: d,\n\n is_positive: false,\n\n }\n\n }\n\n\n\n QueryDataDiff {\n", "file_path": "summarize/src/query_data.rs", "rank": 98, "score": 69758.80312198024 }, { "content": " time_change: percentage_change(rhs.time, self.time),\n\n self_time: sd(self.self_time) - sd(rhs.self_time),\n\n self_time_change: percentage_change(rhs.self_time, self.self_time),\n\n number_of_cache_misses: i(self.number_of_cache_misses) - i(rhs.number_of_cache_misses),\n\n number_of_cache_hits: i(self.number_of_cache_hits) - i(rhs.number_of_cache_hits),\n\n invocation_count: i(self.invocation_count) - i(rhs.invocation_count),\n\n blocked_time: sd(self.blocked_time) - sd(rhs.blocked_time),\n\n incremental_load_time: sd(self.incremental_load_time) - sd(rhs.incremental_load_time),\n\n }\n\n }\n\n}\n\n\n", "file_path": "summarize/src/query_data.rs", "rank": 99, "score": 69755.60727998406 } ]
Rust
src/config.rs
bekicot/cigale
3cf366b02237d43f9a4bf202de93adf8de3d17ba
use crate::events::events::{EventProvider, Result}; use chrono::prelude::*; use gtk::prelude::*; use regex::Regex; use serde_derive::{Deserialize, Serialize}; use std::borrow::Cow; use std::collections::hash_map::*; use std::fs::File; use std::io::{Read, Write}; #[cfg(unix)] use std::os::unix::fs::PermissionsExt; use std::path::{Path, PathBuf}; use std::*; #[derive(Deserialize, Serialize, Clone, Copy, Debug, PartialEq)] pub enum PrevNextDaySkipWeekends { Skip, DontSkip, } impl Default for PrevNextDaySkipWeekends { fn default() -> Self { PrevNextDaySkipWeekends::Skip } } #[derive(Deserialize, Serialize, Clone, Debug)] pub struct Config { #[serde(default)] pub prefer_dark_theme: bool, #[serde(default)] pub prev_next_day_skip_weekends: PrevNextDaySkipWeekends, pub git: HashMap<String, crate::events::git::GitConfig>, pub email: HashMap<String, crate::events::email::EmailConfig>, pub ical: HashMap<String, crate::events::ical::IcalConfig>, pub redmine: HashMap<String, crate::events::redmine::RedmineConfig>, #[serde(default)] pub gitlab: HashMap<String, crate::events::gitlab::GitlabConfig>, #[serde(default)] pub stackexchange: HashMap<String, crate::events::stackexchange::StackExchangeConfig>, } impl Config { pub fn config_path() -> Result<PathBuf> { let config_folder = Self::config_folder()?; Ok(config_folder.join("config.toml")) } pub fn default_config() -> Config { Config { git: HashMap::new(), email: HashMap::new(), ical: HashMap::new(), redmine: HashMap::new(), gitlab: HashMap::new(), stackexchange: HashMap::new(), prefer_dark_theme: false, prev_next_day_skip_weekends: PrevNextDaySkipWeekends::Skip, } } fn read_config_file() -> Result<Config> { let config_file = Self::config_path()?; if !config_file.is_file() { return Ok(Self::default_config()); } let mut contents = String::new(); File::open(config_file)?.read_to_string(&mut contents)?; let r = toml::from_str(&contents)?; Ok(r) } pub fn read_config() -> Config { Config::read_config_file().unwrap_or_else(|e| { let dialog = gtk::MessageDialog::new( None::<&gtk::Window>, gtk::DialogFlags::all(), gtk::MessageType::Error, gtk::ButtonsType::Close, "Error loading the configuration", ); dialog.set_secondary_text(Some(&format!( "{}: {:}", Config::config_path() .ok() .map(|p| p.to_string_lossy().to_string()) .unwrap_or_else(|| "".to_string()), e ))); let _r = dialog.run(); dialog.close(); Config::default_config() }) } fn save_config_file(&self) -> Result<()> { let mut file = File::create(Self::config_path()?)?; file.write_all(toml::to_string_pretty(self)?.as_bytes())?; Ok(()) } pub fn save_config(&self, parent_win: &gtk::Window) { self.save_config_file().unwrap_or_else(|e| { let dialog = gtk::MessageDialog::new( Some(parent_win), gtk::DialogFlags::all(), gtk::MessageType::Error, gtk::ButtonsType::Close, "Error saving the configuration", ); dialog.set_secondary_text(Some(&format!("{}", e))); let _r = dialog.run(); dialog.close(); }); } #[cfg(unix)] fn set_private_folder(path: &Path) -> Result<()> { let mut p = File::open(path)?.metadata()?.permissions(); p.set_mode(0o700); fs::set_permissions(path, p)?; Ok(()) } #[cfg(not(unix))] fn set_private_folder(_path: &PathBuf) -> Result<()> { Ok(()) } pub fn config_folder() -> Result<PathBuf> { let home_dir = dirs::home_dir().expect("Can't find your home folder?"); let config_folder = home_dir.join(".cigale"); if !config_folder.is_dir() { fs::create_dir(&config_folder)?; Self::set_private_folder(&config_folder)?; } Ok(config_folder) } fn get_cache_path(event_provider: &dyn EventProvider, config_name: &str) -> Result<PathBuf> { let config_folder = Self::config_folder()?; Ok(config_folder.join(format!( "{}_{}.cache", event_provider.name(), Self::sanitize_for_filename(config_name) ))) } pub fn sanitize_for_filename(str: &str) -> Cow<str> { let re = Regex::new(r"[^A-Za-z0-9]").unwrap(); re.replace_all(str, "_") } pub fn get_cached_contents( event_provider: &dyn EventProvider, config_name: &str, date: &DateTime<Local>, ) -> Result<Option<String>> { let cache_file = Self::get_cache_path(event_provider, config_name)?; if !cache_file.exists() { return Ok(None); } let metadata = std::fs::metadata(&cache_file)?; let file_date: DateTime<Local> = DateTime::from(metadata.modified()?); if file_date >= *date { let mut contents = String::new(); File::open(cache_file)?.read_to_string(&mut contents)?; Ok(Some(contents)) } else { log::debug!( "{} {} cache too old, refetching", event_provider.name(), config_name ); Ok(None) } } pub fn write_to_cache( event_provider: &dyn EventProvider, config_name: &str, contents: &str, ) -> Result<()> { let mut file = File::create(Self::get_cache_path(event_provider, config_name)?)?; file.write_all(contents.as_bytes())?; Ok(()) } } #[test] fn it_properly_escapes_filenames() { assert_eq!( "simPleN123ame", Config::sanitize_for_filename("simPleN123ame") ); assert_eq!( "simPle_N___12_____3am_e", Config::sanitize_for_filename("simPle N!()12č>/\\*3amée") ); }
use crate::events::events::{EventProvider, Result}; use chrono::prelude::*; use gtk::prelude::*; use regex::Regex; use serde_derive::{Deserialize, Serialize}; use std::borrow::Cow; use std::collections::hash_map::*; use std::fs::File; use std::io::{Read, Write}; #[cfg(unix)] use std::os::unix::fs::PermissionsExt; use std::path::{Path, PathBuf}; use std::*; #[derive(Deserialize, Serialize, Clone, Copy, Debug, PartialEq)] pub enum PrevNextDaySkipWeekends { Skip, DontSkip, } impl Default for PrevNextDaySkipWeekends { fn default() -> Self { PrevNextDaySkipWeekends::Skip } } #[derive(Deserialize, Serialize, Clone, Debug)] pub struct Config { #[serde(default)] pub prefer_dark_theme: bool, #[serde(default)] pub prev_next_day_skip_weekends: PrevNextDaySkipWeekends, pub git: HashMap<String, crate::events::git::GitConfig>, pub email: HashMap<String, crate::events::email::EmailConfig>, pub ical: HashMap<String, crate::events::ical::IcalConfig>, pub redmine: HashMap<String, crate::events::redmine::RedmineConfig>, #[serde(default)] pub gitlab: HashMap<String, crate::events::gitlab::GitlabConfig>, #[serde(default)] pub stackexchange: HashMap<String, crate::events::stackexchange::StackExchangeConfig>, } impl Config { pub fn config_path() -> Result<PathBuf> { let config_folder = Self::config_folder()?; Ok(config_folder.join("config.toml")) } pub fn default_config() -> Config { Config { git: HashMap::new(), email: HashMap::new(), ical: HashMap::new(), redmine: HashMap::new(), gitlab: HashMap::new(), stackexchange: HashMap::new(), prefer_dark_theme: false, prev_next_day_skip_weekends: PrevNextDaySkipWeekends::Skip, } } fn read_config_file() -> Result<Config> { let config_file = Self::config_path()?; if !config_file.is_file() { return Ok(Self::default_config()); } let mut contents = String::new(); File::open(config_file)?.read_to_string(&mut contents)?; let r = toml::from_str(&contents)?; Ok(r) } pub fn read_config() -> Config { Config::read_config_file().unwrap_or_else(|e| { let dialog = gtk::MessageDialog::new( None::<&gtk::Window>, gtk::DialogFlags::all(), gtk::MessageType::Error, gtk::ButtonsType::Close, "Error loading the configuration", ); dialog.set_secondary_text(Some(&format!( "{}: {:}", Config::config_path() .ok() .map(|p| p.to_string_lossy().to_string()) .unwrap_or_else(|| "".to_string()), e ))); let _r = dialog.run(); dialog.close(); Config::default_config() }) } fn save_config_file(&self) -> Result<()> { let mut file = File::create(Self::config_path()?)?; file.write_all(toml::to_string_pretty(self)?.as_bytes())?; Ok(()) } pub fn save_config(&self, parent_win: &gtk::Window) { self.save_config_file().unwrap_or_else(|e| { let dialog = gtk::MessageDialog::new( Some(parent_win), gtk::DialogFlags::all(), gtk::MessageType::Error, gtk::ButtonsType::Close, "Error saving the configuration", ); dialog.set_secondary_text(Some(&format!("{}", e))); let _r = dialog.run(); dialog.close(); }); } #[cfg(unix)] fn set_private_folder(path: &Path) -> Result<()> { let mut p = File::open(path)?.metadata()?.permissions(); p.set_mode(0o700); fs::set_permissions(path, p)?; Ok(()) } #[cfg(not(unix))] fn set_private_folder(_path: &PathBuf) -> Result<()> { Ok(()) } pub fn config_folder() -> Result<PathBuf> { let home_dir = dirs::home_dir().expect("Can't find your home folder?"); let config_folder = home_dir.join(".cigale"); if !config_folder.is_dir() { fs::create_dir(&config_folder)?; Self::set_private_folder(&config_folder)?; } Ok(config_folder) } fn get_cache_path(event_provider: &dyn EventProvider, config_n
pub fn sanitize_for_filename(str: &str) -> Cow<str> { let re = Regex::new(r"[^A-Za-z0-9]").unwrap(); re.replace_all(str, "_") } pub fn get_cached_contents( event_provider: &dyn EventProvider, config_name: &str, date: &DateTime<Local>, ) -> Result<Option<String>> { let cache_file = Self::get_cache_path(event_provider, config_name)?; if !cache_file.exists() { return Ok(None); } let metadata = std::fs::metadata(&cache_file)?; let file_date: DateTime<Local> = DateTime::from(metadata.modified()?); if file_date >= *date { let mut contents = String::new(); File::open(cache_file)?.read_to_string(&mut contents)?; Ok(Some(contents)) } else { log::debug!( "{} {} cache too old, refetching", event_provider.name(), config_name ); Ok(None) } } pub fn write_to_cache( event_provider: &dyn EventProvider, config_name: &str, contents: &str, ) -> Result<()> { let mut file = File::create(Self::get_cache_path(event_provider, config_name)?)?; file.write_all(contents.as_bytes())?; Ok(()) } } #[test] fn it_properly_escapes_filenames() { assert_eq!( "simPleN123ame", Config::sanitize_for_filename("simPleN123ame") ); assert_eq!( "simPle_N___12_____3am_e", Config::sanitize_for_filename("simPle N!()12č>/\\*3amée") ); }
ame: &str) -> Result<PathBuf> { let config_folder = Self::config_folder()?; Ok(config_folder.join(format!( "{}_{}.cache", event_provider.name(), Self::sanitize_for_filename(config_name) ))) }
function_block-function_prefixed
[ { "content": "pub fn get_all_events(config: Config, day: Date<Local>) -> Result<Vec<Event>> {\n\n let start = Instant::now();\n\n let eps = get_event_providers();\n\n let configs_to_fetch: Vec<(&Box<dyn EventProvider>, &String)> = eps\n\n .iter()\n\n .flat_map(|ep| {\n\n ep.get_config_names(&config)\n\n .into_iter()\n\n .map(move |cfg_name| (ep, cfg_name))\n\n })\n\n .collect();\n\n\n\n // use rayon's par_iter to fetch in parallel from multiple\n\n // event sources -- it's not CPU bound, but some sources\n\n // go to the network and parallelization helps a lot.\n\n // maybe I should force the size of the rayon's thread pool:\n\n // https://docs.rs/rayon/1.3.0/rayon/struct.ThreadPoolBuilder.html#method.build_global\n\n // because I think currently rayon will tie it to the number\n\n // of cores of the machine, but in our case it's really independent\n\n // as the tasks are IO-bound. Possibly I should enforce let's say\n", "file_path": "src/events/events.rs", "rank": 0, "score": 143596.37951868496 }, { "content": "pub fn get_event_providers() -> Vec<Box<dyn EventProvider>> {\n\n vec![\n\n Box::new(Git),\n\n Box::new(Email),\n\n Box::new(Ical),\n\n Box::new(Redmine),\n\n Box::new(Gitlab),\n\n Box::new(StackExchange),\n\n ]\n\n}\n\n\n", "file_path": "src/events/events.rs", "rank": 1, "score": 108625.18324890514 }, { "content": "#[derive(Deserialize, Serialize, Clone, Debug)]\n\nstruct GitlabNote {\n\n body: String,\n\n #[serde(rename = \"type\")]\n\n note_type: Option<String>,\n\n noteable_type: String,\n\n position: Option<GitlabPosition>,\n\n noteable_iid: Option<usize>,\n\n}\n\n\n", "file_path": "src/events/gitlab.rs", "rank": 2, "score": 91730.20891918505 }, { "content": "#[derive(Deserialize, Serialize, Clone, Debug)]\n\nstruct GitlabEvent {\n\n project_id: ProjectId,\n\n action_name: String,\n\n target_type: Option<String>,\n\n target_title: Option<String>,\n\n created_at: DateTime<Local>,\n\n note: Option<GitlabNote>,\n\n target_iid: Option<usize>,\n\n}\n\n\n", "file_path": "src/events/gitlab.rs", "rank": 3, "score": 91730.20891918505 }, { "content": "#[derive(Deserialize, Serialize, Clone, Debug)]\n\nstruct GitlabPosition {\n\n new_path: String,\n\n new_line: Option<usize>,\n\n}\n\n\n", "file_path": "src/events/gitlab.rs", "rank": 4, "score": 91730.20891918505 }, { "content": "#[derive(Deserialize, Serialize, Clone, Debug)]\n\nstruct GitlabProject {\n\n id: ProjectId,\n\n web_url: String,\n\n}\n\n\n\nimpl Gitlab {\n\n fn noteable_type_desc(note_type: &str) -> String {\n\n if note_type == \"MergeRequest\" {\n\n \"Merge Request\".to_string()\n\n } else {\n\n note_type.to_string()\n\n }\n\n }\n\n\n\n fn build_openinbrowser_link(\n\n project_infos: &HashMap<ProjectId, String>,\n\n evt: &GitlabEvent,\n\n ) -> Cow<'static, str> {\n\n if let Some(iid) = evt.note.as_ref().unwrap().noteable_iid {\n\n Cow::from(format!(\n", "file_path": "src/events/gitlab.rs", "rank": 5, "score": 91730.20891918505 }, { "content": "#[test]\n\nfn it_parses_ical_dates_correctly() {\n\n assert_eq!(\n\n FixedOffset::east(3600).ymd(2020, 2, 9).and_hms(15, 30, 50),\n\n Ical::parse_ical_date(&\"20200209T143050Z\".to_string()).unwrap()\n\n );\n\n // in practice entries with time which don't contain the timezone inline\n\n // have a separate ical entry: Property{name=DTSTART, params: {TZID: .., value: ..}}\n\n // but for now i'll just assume local time.\n\n assert_eq!(\n\n FixedOffset::east(3600).ymd(2020, 2, 9).and_hms(14, 30, 50),\n\n Ical::parse_ical_date(&\"20200209T143050\".to_string()).unwrap()\n\n );\n\n assert_eq!(\n\n FixedOffset::east(7200).ymd(2014, 3, 31).and_hms(0, 0, 0),\n\n Ical::parse_ical_date(&\"20140331\".to_string()).unwrap()\n\n );\n\n}\n", "file_path": "src/events/ical.rs", "rank": 6, "score": 86075.3458965367 }, { "content": "enum ActivityData {\n\n Done(Vec<Event>),\n\n ReachedEndOfPage(Option<String>), // link to the previous page or None if no previous\n\n}\n\n\n", "file_path": "src/events/redmine.rs", "rank": 7, "score": 84140.00272952967 }, { "content": "#[test]\n\nfn it_can_extract_two_short_emails() {\n\n let mut buf = vec![0; BUF_SIZE as usize];\n\n let file = File::open(\"tests/two_short_emails.txt\").unwrap();\n\n let mut reader = BufReader::new(file);\n\n let cur_pos_end = reader.seek(SeekFrom::End(0)).unwrap();\n\n let mut parsing_state = ParsingState {\n\n reader: &mut reader,\n\n bytes_left: cur_pos_end,\n\n };\n\n\n\n let email = Email::read_next_mail(&mut buf, &mut parsing_state)\n\n .unwrap()\n\n .unwrap();\n\n assert_eq!(\"From b\\nbye a\\n\", String::from_utf8(email).unwrap());\n\n assert_eq!(11, parsing_state.bytes_left);\n\n\n\n let email2 = Email::read_next_mail(&mut buf, &mut parsing_state)\n\n .unwrap()\n\n .unwrap();\n\n assert_eq!(\"From a\\nhi b\", String::from_utf8(email2).unwrap());\n\n\n\n let email3 = Email::read_next_mail(&mut buf, &mut parsing_state).unwrap();\n\n assert_eq!(true, email3.is_none());\n\n}\n\n\n", "file_path": "src/events/email.rs", "rank": 8, "score": 84083.30111625946 }, { "content": "#[test]\n\nfn it_parses_multiple_email_date_formats() {\n\n let expected = FixedOffset::east(7200).ymd(2013, 9, 27).and_hms(20, 46, 35);\n\n assert_eq!(\n\n expected,\n\n Email::parse_email_date(\"Sep 27 20:46:35 2013\").unwrap()\n\n );\n\n assert_eq!(\n\n expected,\n\n Email::parse_email_date(\"Fri, 27 Sep 2013 20:46:35 +0200\").unwrap()\n\n );\n\n let expected2 = FixedOffset::east(3600).ymd(2014, 11, 3).and_hms(7, 54, 9);\n\n assert_eq!(\n\n expected2,\n\n Email::parse_email_date(\"Mon Nov 3 07:54:09 2014\").unwrap() // notice the extra space\n\n );\n\n let expected2 = FixedOffset::east(3600).ymd(2014, 11, 3).and_hms(7, 54, 9);\n\n assert_eq!(\n\n expected2,\n\n Email::parse_email_date(\"Mon, 3 Nov 2014 07:54:09 +0100 (CET)\").unwrap()\n\n );\n\n assert_eq!(\n\n expected2,\n\n Email::parse_email_date(\"Mon, 3 Nov 2014 07:54:09 +0100\").unwrap()\n\n );\n\n assert_eq!(\n\n expected,\n\n Email::parse_email_date(\"Fri, 27 Sep 2013 18:46:35 GMT\").unwrap()\n\n );\n\n}\n", "file_path": "src/events/email.rs", "rank": 9, "score": 84083.30111625946 }, { "content": "#[derive(Debug)]\n\nstruct ProviderError {\n\n pub provider_name: &'static str,\n\n pub config_name: String,\n\n pub err: Box<dyn Error + Send + Sync>,\n\n}\n\n\n\nimpl fmt::Display for ProviderError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{} - {}: {}\",\n\n self.provider_name, self.config_name, self.err\n\n )\n\n }\n\n}\n\n\n\nimpl Error for ProviderError {}\n\n\n\n/// lets us know from which event source the error came\n\nimpl ProviderError {\n", "file_path": "src/events/events.rs", "rank": 10, "score": 83939.29951608818 }, { "content": "#[derive(Debug)]\n\nstruct LocaleInfo {\n\n date_format: &'static str,\n\n today_translation: &'static str,\n\n}\n\n\n\nimpl LocaleInfo {\n\n fn new(date_format: &'static str, today_translation: &'static str) -> LocaleInfo {\n\n LocaleInfo {\n\n date_format,\n\n today_translation,\n\n }\n\n }\n\n}\n\n\n\nimpl Redmine {\n\n fn parse_date(locale_info: &LocaleInfo, date_str: &str) -> Result<Date<Local>> {\n\n log::debug!(\n\n \"parse_date: parsing {}, locale: {:?}\",\n\n date_str,\n\n locale_info\n", "file_path": "src/events/redmine.rs", "rank": 11, "score": 83245.6600656867 }, { "content": "struct ParsingState<'a> {\n\n bytes_left: u64,\n\n reader: &'a mut BufReader<File>,\n\n}\n\n\n\nimpl Email {\n\n // re-reading the buffer from the file for each new email, but i rely on the bufreader too\n\n fn read_next_mail(\n\n buf: &mut Vec<u8>,\n\n parsing_state: &mut ParsingState,\n\n ) -> Result<Option<Vec<u8>>> {\n\n let mut email_contents: Vec<u8> = vec![];\n\n let mut separator_idx = 0;\n\n\n\n loop {\n\n if parsing_state.bytes_left == 0 {\n\n return Ok(None);\n\n }\n\n let cur_buf = Email::read_into_buffer(buf, parsing_state)?;\n\n\n", "file_path": "src/events/email.rs", "rank": 13, "score": 80814.73060931783 }, { "content": "fn main() {\n\n let client = reqwest::blocking::ClientBuilder::new()\n\n .timeout(Duration::from_secs(30))\n\n .connect_timeout(Duration::from_secs(30))\n\n .connection_verbose(true)\n\n .build()\n\n .unwrap();\n\n\n\n println!(\"Fetching the redmine source...\");\n\n let zip_bytes = client\n\n .get(\"https://github.com/redmine/redmine/archive/master.zip\")\n\n .send()\n\n .unwrap()\n\n .error_for_status()\n\n .unwrap()\n\n .bytes()\n\n .unwrap();\n\n\n\n println!(\"Unzipping the redmine source...\");\n\n let reader = std::io::Cursor::new(zip_bytes);\n", "file_path": "helpers/redmine_locales/src/main.rs", "rank": 14, "score": 78722.30087983643 }, { "content": "#[test]\n\nfn it_parses_slovenian_dates_correctly() {\n\n let sl = &Redmine::redmine_locales()[\"sl\"];\n\n assert_eq!(\n\n NaiveDate::from_ymd(2020, 3, 23),\n\n Redmine::parse_date(sl, \"23.03.2020\").unwrap().naive_local()\n\n );\n\n}\n\n\n", "file_path": "src/events/redmine.rs", "rank": 15, "score": 76716.8507787556 }, { "content": "#[test]\n\nfn it_parses_iso_times_correctly() {\n\n assert_eq!(\n\n NaiveTime::from_hms(13, 30, 0),\n\n Redmine::parse_time(\"13:30\").unwrap()\n\n );\n\n}\n", "file_path": "src/events/redmine.rs", "rank": 16, "score": 76716.8507787556 }, { "content": "#[test]\n\nfn it_parses_iso_dates_correctly() {\n\n let en_gb = &Redmine::redmine_locales()[\"en-GB\"];\n\n assert_eq!(\n\n NaiveDate::from_ymd(2020, 3, 23),\n\n Redmine::parse_date(en_gb, \"2020-03-23\")\n\n .unwrap()\n\n .naive_local()\n\n );\n\n}\n\n\n", "file_path": "src/events/redmine.rs", "rank": 17, "score": 76716.8507787556 }, { "content": "#[test]\n\nfn it_parses_us_dates_correctly() {\n\n let en_gb = &Redmine::redmine_locales()[\"en\"];\n\n assert_eq!(\n\n NaiveDate::from_ymd(2020, 3, 23),\n\n Redmine::parse_date(en_gb, \"03/23/2020\")\n\n .unwrap()\n\n .naive_local()\n\n );\n\n}\n\n\n", "file_path": "src/events/redmine.rs", "rank": 18, "score": 76716.8507787556 }, { "content": "#[test]\n\nfn it_parses_us_times_correctly() {\n\n assert_eq!(\n\n NaiveTime::from_hms(13, 30, 0),\n\n Redmine::parse_time(\"01:30 PM\").unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/events/redmine.rs", "rank": 19, "score": 76716.8507787556 }, { "content": "pub fn left_align_menu(menu: &gtk::ModelButton) {\n\n if let Some(label) = menu\n\n .child()\n\n .and_then(|c| c.dynamic_cast::<gtk::Label>().ok())\n\n {\n\n label.set_xalign(0.0);\n\n label.set_hexpand(true);\n\n }\n\n}\n\n\n\n#[widget]\n\nimpl Widget for WinTitleBar {\n\n fn init_view(&mut self) {\n\n let vbox = gtk::BoxBuilder::new()\n\n .margin(10)\n\n .orientation(gtk::Orientation::Vertical)\n\n .build();\n\n let preferences_btn = gtk::ModelButtonBuilder::new().label(\"Preferences\").build();\n\n left_align_menu(&preferences_btn);\n\n relm::connect!(\n", "file_path": "src/widgets/wintitlebar.rs", "rank": 20, "score": 75146.89018584265 }, { "content": "#[test]\n\nfn it_can_get_events_for_the_cigale_repo() {\n\n let git_cfg_map = vec![\n\n (REPO_FOLDER_KEY, \".\".to_string()),\n\n (COMMIT_AUTHOR_KEY, \"Emmanuel Touzery\".to_string()),\n\n ]\n\n .into_iter()\n\n .collect();\n\n let mut config = Config::default_config();\n\n Git.add_config_values(&mut config, \"test\".to_string(), git_cfg_map);\n\n let expected_fst = Event::new(\n\n \"Git\",\n\n crate::icons::Icon::CODE_BRANCH,\n\n NaiveTime::from_hms(17, 1, 35),\n\n \"include the icons in the binary\".to_string(),\n\n \"include the icons in the binary\\n\".to_string(),\n\n EventBody::Markup(\n\n r#\"<a href=\"https://github.com/emmanueltouzery/cigale/commit//1225b0a0efceb2f9b8862fd1cd03bf5dc6cb54d4\">Open in browser</a>\n\n\n\n<span font-family=\"monospace\">master\n\n\n", "file_path": "src/events/git.rs", "rank": 21, "score": 74988.98451700709 }, { "content": "#[test]\n\nfn it_rejects_a_non_remote_github_url() {\n\n assert_eq!(\n\n None,\n\n Git::get_commit_display_url_github(\"https://mycompany.com/emmanueltouzery/cigale.git\")\n\n .unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/events/git.rs", "rank": 22, "score": 74988.98451700709 }, { "content": "#[test]\n\nfn it_parses_http_remote_github_url() {\n\n assert_eq!(\n\n Some(\"https://github.com/emmanueltouzery/cigale/commit/\".to_string()),\n\n Git::get_commit_display_url_github(\"https://github.com/emmanueltouzery/cigale.git\")\n\n .unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/events/git.rs", "rank": 23, "score": 74988.98451700709 }, { "content": "#[test]\n\nfn it_parses_ssh_remote_github_url() {\n\n assert_eq!(\n\n Some(\"https://github.com/emmanueltouzery/cigale/commit/\".to_string()),\n\n Git::get_commit_display_url_github(\"[email protected]:emmanueltouzery/cigale.git\").unwrap()\n\n );\n\n}\n\n\n", "file_path": "src/events/git.rs", "rank": 24, "score": 74988.98451700709 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=src/icons.gresource\");\n\n let target_foldername = format!(\"fontawesome-free-{}-desktop\", FONTAWESOME_VERSION);\n\n if !Path::new(&target_foldername).exists() {\n\n fetch_fontawesome_icons(&target_foldername);\n\n }\n\n let status = Command::new(\"glib-compile-resources\")\n\n .arg(\"src/icons.gresource\")\n\n .arg(\"--target=src/icons.bin\")\n\n .spawn()\n\n .expect(\"Failed running glib-compile-resources\")\n\n .wait()\n\n .unwrap();\n\n assert!(status.success());\n\n}\n\n\n", "file_path": "build.rs", "rank": 25, "score": 52633.91088169803 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let res_bytes = include_bytes!(\"icons.bin\");\n\n let data = glib::Bytes::from(&res_bytes[..]);\n\n let resource = gio::Resource::from_data(&data).unwrap();\n\n gio::resources_register(&resource);\n\n\n\n widgets::win::Win::run(()).unwrap();\n\n}\n", "file_path": "src/main.rs", "rank": 26, "score": 51299.22950044868 }, { "content": "pub trait EventProvider: Sync {\n\n // TODO this could get derived automatically through a procedural macro\n\n fn get_config_fields(&self) -> Vec<(&'static str, ConfigType)>;\n\n\n\n fn get_config_names<'a>(&self, config: &'a Config) -> Vec<&'a String>;\n\n\n\n // TODO this could get derived automatically through a procedural macro\n\n fn get_config_values(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n ) -> HashMap<&'static str, String>;\n\n\n\n fn add_config_values(\n\n &self,\n\n config: &mut Config,\n\n config_name: String,\n\n config_values: HashMap<&'static str, String>,\n\n );\n\n\n", "file_path": "src/events/events.rs", "rank": 27, "score": 44827.02517138535 }, { "content": "fn fetch_fontawesome_icons(target_foldername: &str) {\n\n let fontawesome_url = format!(\n\n \"https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-{}.tgz\",\n\n FONTAWESOME_VERSION\n\n );\n\n let mut resp = reqwest::blocking::get(&fontawesome_url).expect(\"request failed\");\n\n let mut out = File::create(\"fontawesome.tgz\").expect(\"failed to create file\");\n\n std::io::copy(&mut resp, &mut out).expect(\"failed to copy content\");\n\n let mut archive = tar::Archive::new(GzDecoder::new(\n\n File::open(\"fontawesome.tgz\").expect(\"open archive\"),\n\n ));\n\n archive.unpack(\".\").expect(\"Failed extracting\");\n\n fs::rename(\"package\", target_foldername).expect(\"folder rename\");\n\n fs::remove_file(\"fontawesome.tgz\").expect(\"remove tgz\");\n\n}\n", "file_path": "build.rs", "rank": 28, "score": 42641.74798313394 }, { "content": "use super::events::{ConfigType, Event, EventBody, EventProvider, Result};\n\nuse crate::config::Config;\n\nuse crate::icons::*;\n\nuse chrono::prelude::*;\n\nuse core::time::Duration;\n\nuse ical::parser::ical::component::IcalEvent;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(serde_derive::Deserialize, serde_derive::Serialize, Clone, Debug)]\n\npub struct IcalConfig {\n\n pub ical_url: String,\n\n}\n\n\n\nimpl Ical {\n\n fn get_property_value<'a>(event: &'a IcalEvent, name: &str) -> Option<&'a str> {\n\n event\n\n .properties\n\n .iter()\n\n .find(|p| p.name == name)\n\n .and_then(|s| s.value.as_deref())\n", "file_path": "src/events/ical.rs", "rank": 39, "score": 34865.05615446217 }, { "content": " let mut result = vec![];\n\n for line in reader {\n\n // the ical library's error type doesn't implement std::error::Error conversion\n\n // so it complicates using the '?' operator in our case\n\n match line {\n\n Ok(l) => {\n\n for event in l.events {\n\n Ical::add_event_if_in_range(\n\n &event,\n\n &day_start,\n\n &next_day_start,\n\n &mut result,\n\n );\n\n }\n\n }\n\n Err(_) => {\n\n return Err(Box::new(std::io::Error::new(\n\n std::io::ErrorKind::Other,\n\n \"Ical error\",\n\n )))\n\n }\n\n }\n\n }\n\n Ok(result)\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/events/ical.rs", "rank": 40, "score": 34861.3587854073 }, { "content": " .ok()\n\n })\n\n }\n\n\n\n fn fetch_ical(config_name: &str, ical_url: &str) -> Result<String> {\n\n let r = reqwest::blocking::ClientBuilder::new()\n\n .timeout(Duration::from_secs(30))\n\n .connect_timeout(Duration::from_secs(30))\n\n .build()?\n\n .get(ical_url)\n\n .send()?\n\n .error_for_status()?\n\n .text()?;\n\n Config::write_to_cache(&Ical, config_name, &r)?;\n\n Ok(r)\n\n }\n\n\n\n fn add_event_if_in_range(\n\n event: &IcalEvent,\n\n day_start: &DateTime<Local>,\n", "file_path": "src/events/ical.rs", "rank": 41, "score": 34854.95991329085 }, { "content": " ical_url: config_values.remove(URL_KEY).unwrap(),\n\n },\n\n );\n\n }\n\n\n\n fn get_events(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n day: Date<Local>,\n\n ) -> Result<Vec<Event>> {\n\n let ical_config = &config.ical[config_name];\n\n let day_start = day.and_hms(0, 0, 0);\n\n let next_day_start = day_start + chrono::Duration::days(1);\n\n let ical_text = match Config::get_cached_contents(&Ical, config_name, &next_day_start)? {\n\n Some(t) => Ok(t),\n\n None => Ical::fetch_ical(config_name, &ical_config.ical_url),\n\n }?;\n\n let bytes = ical_text.as_bytes();\n\n let reader = ical::IcalParser::new(std::io::BufReader::new(bytes));\n", "file_path": "src/events/ical.rs", "rank": 42, "score": 34853.173651567085 }, { "content": " &self,\n\n _cur_values: &HashMap<&'static str, String>,\n\n _field_name: &'static str,\n\n ) -> Result<Vec<String>> {\n\n Ok(Vec::new())\n\n }\n\n\n\n fn remove_config(&self, config: &mut Config, config_name: String) {\n\n config.ical.remove(&config_name);\n\n }\n\n\n\n fn add_config_values(\n\n &self,\n\n config: &mut Config,\n\n config_name: String,\n\n mut config_values: HashMap<&'static str, String>,\n\n ) {\n\n config.ical.insert(\n\n config_name,\n\n IcalConfig {\n", "file_path": "src/events/ical.rs", "rank": 43, "score": 34852.59454488835 }, { "content": " .cloned()\n\n .unwrap_or_else(|| \"\".to_string()),\n\n ),\n\n extra_info,\n\n )\n\n }\n\n}\n\n\n\nconst URL_KEY: &str = \"Ical URL\";\n\n\n\npub struct Ical;\n\n\n\nimpl EventProvider for Ical {\n\n fn get_config_fields(&self) -> Vec<(&'static str, ConfigType)> {\n\n vec![(URL_KEY, ConfigType::Text(\"\"))]\n\n }\n\n\n\n fn name(&self) -> &'static str {\n\n \"Ical\"\n\n }\n", "file_path": "src/events/ical.rs", "rank": 44, "score": 34852.45988336606 }, { "content": " next_day_start: &DateTime<Local>,\n\n result: &mut Vec<Event>,\n\n ) {\n\n let start = Ical::get_property_value(event, \"DTSTART\");\n\n let end = Ical::get_property_value(event, \"DTEND\");\n\n let summary = Ical::get_property_value_any(event, &[\"SUMMARY\", \"DESCRIPTION\", \"LOCATION\"]);\n\n match (\n\n start.and_then(Ical::parse_ical_date),\n\n end.and_then(Ical::parse_ical_date),\n\n summary,\n\n ) {\n\n (Some(st), end_dt, Some(summ)) => {\n\n if st >= *day_start && st < *next_day_start {\n\n result.push(Ical::build_event(summ, st, end_dt));\n\n }\n\n }\n\n _ => println!(\"Skipping event without start or summary: {:?}\", event),\n\n }\n\n }\n\n\n", "file_path": "src/events/ical.rs", "rank": 45, "score": 34847.732964575596 }, { "content": "\n\n fn default_icon(&self) -> Icon {\n\n Icon::CALENDAR_ALT\n\n }\n\n\n\n fn get_config_names<'a>(&self, config: &'a Config) -> Vec<&'a String> {\n\n config.ical.keys().collect()\n\n }\n\n\n\n fn get_config_values(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n ) -> HashMap<&'static str, String> {\n\n vec![(URL_KEY, config.ical[config_name].ical_url.to_string())]\n\n .into_iter()\n\n .collect()\n\n }\n\n\n\n fn field_values(\n", "file_path": "src/events/ical.rs", "rank": 46, "score": 34843.65081243222 }, { "content": " }\n\n\n\n fn get_property_value_any<'a>(event: &'a IcalEvent, names: &[&str]) -> Option<&'a str> {\n\n names\n\n .iter()\n\n .find(|n| Ical::get_property_value(event, n).is_some())\n\n .and_then(|n| Ical::get_property_value(event, n))\n\n }\n\n\n\n fn parse_ical_date(ical_date_str: &str) -> Option<DateTime<Local>> {\n\n Utc.datetime_from_str(ical_date_str, \"%Y%m%dT%H%M%SZ\")\n\n .ok()\n\n .map(DateTime::from)\n\n .or_else(|| Local.datetime_from_str(ical_date_str, \"%Y%m%dT%H%M%S\").ok())\n\n .or_else(|| {\n\n // pure laziness from me here. that chrono function wants a time component,\n\n // i give it a time component.\n\n // Otherwise not the same as earlier: we assume local time not UTC here.\n\n Local\n\n .datetime_from_str(format!(\"{}T00:00:00\", ical_date_str).as_str(), \"%Y%m%dT%T\")\n", "file_path": "src/events/ical.rs", "rank": 47, "score": 34842.50225783486 }, { "content": " fn build_event(summ: &str, st: DateTime<Local>, end_dt: Option<DateTime<Local>>) -> Event {\n\n let summary = summ.replace(\"\\\\,\", \",\");\n\n let extra_info = end_dt.map(|e| {\n\n let duration = e - st;\n\n format!(\n\n \"End: {}; duration: {}:{:02}\",\n\n e.format(\"%H:%M\"),\n\n duration.num_hours(),\n\n duration.num_minutes() % 60\n\n )\n\n });\n\n Event::new(\n\n \"Ical\",\n\n Icon::CALENDAR_ALT,\n\n st.time(),\n\n summary.to_string(),\n\n summary,\n\n EventBody::PlainText(\n\n extra_info\n\n .as_ref()\n", "file_path": "src/events/ical.rs", "rank": 48, "score": 34837.65023184811 }, { "content": "// as far I know the official stackexchange API does not expose user votes\n\n// https://stackapps.com/questions/4725/list-of-votes-by-authenticated-users\n\n// https://meta.stackexchange.com/questions/288217/how-could-i-get-my-own-vote-activity-from-api\n\n// so I have to scrap the website\n\n// my understand is that scraping is acceptable if there is no alternative:\n\n// https://meta.stackexchange.com/a/446/218504\n\nuse super::events::{ConfigType, Event, EventBody, EventProvider, Result, WordWrapMode};\n\nuse crate::config::Config;\n\nuse crate::icons::*;\n\nuse chrono::prelude::*;\n\nuse core::time::Duration;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(serde_derive::Deserialize, serde_derive::Serialize, Clone, Debug)]\n\npub struct StackExchangeConfig {\n\n pub exchange_site_url: String,\n\n pub username: String,\n\n pub password: String,\n\n}\n\n\n", "file_path": "src/events/stackexchange.rs", "rank": 49, "score": 34738.59636200998 }, { "content": " url_path: &str,\n\n ) -> Result<String> {\n\n log::debug!(\n\n \"getting {}\",\n\n &format!(\"{}{}\", stackexchange_config.exchange_site_url, url_path)\n\n );\n\n let resp = client\n\n .get(&format!(\n\n \"{}{}\",\n\n stackexchange_config.exchange_site_url, url_path\n\n ))\n\n .send()?\n\n .error_for_status()?;\n\n\n\n let html = resp.text()?;\n\n log::debug!(\n\n \"{}{}: got back html {}\",\n\n stackexchange_config.exchange_site_url,\n\n url_path,\n\n html\n", "file_path": "src/events/stackexchange.rs", "rank": 50, "score": 34734.572764619625 }, { "content": "pub struct StackExchange;\n\nconst EXCHANGE_SITE_URL: &str = \"Stack Exchange site url\";\n\nconst USERNAME_KEY: &str = \"username\";\n\nconst PASSWORD_KEY: &str = \"password\";\n\n\n\nimpl StackExchange {\n\n fn url_escape(msg: &str) -> String {\n\n msg.replace(\"/\", \"%2f\").replace(\":\", \"%3a\")\n\n }\n\n\n\n fn login(\n\n client: &reqwest::blocking::Client,\n\n stackexchange_config: &StackExchangeConfig,\n\n ) -> Result<String> {\n\n let home_body = Self::html_get(\n\n client,\n\n stackexchange_config,\n\n &format!(\n\n \"/users/login?ssrc=head&returnurl={}\",\n\n Self::url_escape(&stackexchange_config.exchange_site_url)\n", "file_path": "src/events/stackexchange.rs", "rank": 51, "score": 34733.94932766827 }, { "content": " );\n\n }\n\n\n\n fn remove_config(&self, config: &mut Config, config_name: String) {\n\n config.stackexchange.remove(&config_name);\n\n }\n\n\n\n fn get_events(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n day: Date<Local>,\n\n ) -> Result<Vec<Event>> {\n\n log::debug!(\"stackexchange::get_events\");\n\n let stackexchange_config = &config.stackexchange[config_name];\n\n let day_start = day.and_hms(0, 0, 0);\n\n let next_day_start = day_start + chrono::Duration::days(1);\n\n\n\n let votes_page_html =\n\n match Config::get_cached_contents(&StackExchange, config_name, &next_day_start)? {\n", "file_path": "src/events/stackexchange.rs", "rank": 52, "score": 34731.47479088483 }, { "content": " (\"ssrc\", \"head\"),\n\n (\"fkey\", fkey),\n\n (\"email\", &stackexchange_config.username),\n\n (\"password\", &stackexchange_config.password),\n\n (\"oauth_version\", \"\"),\n\n (\"oauth_server\", \"\"),\n\n ])\n\n .send()?\n\n .error_for_status()?;\n\n let html = resp.text()?;\n\n if html.contains(\"Human verification\") && html.contains(\"Are you a human being?\") {\n\n Err(\"Login rejected: human verification failed\".into())\n\n } else {\n\n Ok(html)\n\n }\n\n }\n\n\n\n fn html_get(\n\n client: &reqwest::blocking::Client,\n\n stackexchange_config: &StackExchangeConfig,\n", "file_path": "src/events/stackexchange.rs", "rank": 53, "score": 34729.3109421218 }, { "content": " Some(t) => Ok(t),\n\n None => Self::get_votes_page_html(config_name, stackexchange_config),\n\n }?;\n\n\n\n Self::get_votes(\n\n &votes_page_html,\n\n stackexchange_config,\n\n day_start,\n\n next_day_start,\n\n )\n\n }\n\n}\n", "file_path": "src/events/stackexchange.rs", "rank": 54, "score": 34728.52373075356 }, { "content": " ),\n\n )?;\n\n let doc = scraper::Html::parse_document(&home_body);\n\n let sel_fkey = scraper::Selector::parse(\"input[name=fkey]\").unwrap();\n\n let fkey_node = doc\n\n .select(&sel_fkey)\n\n .next()\n\n .ok_or(\"login: can't find fkey?\")?;\n\n let fkey = fkey_node\n\n .value()\n\n .attr(\"value\")\n\n .ok_or(\"login: can't find fkey value?\")?;\n\n\n\n let resp = client\n\n .post(&format!(\n\n \"{}/users/login?ssrc=head&returnurl={}\",\n\n stackexchange_config.exchange_site_url,\n\n Self::url_escape(&stackexchange_config.exchange_site_url)\n\n ))\n\n .form(&[\n", "file_path": "src/events/stackexchange.rs", "rank": 55, "score": 34728.08959259224 }, { "content": " &format!(\"{}?tab=votes\", userpage_link),\n\n )?;\n\n\n\n Config::write_to_cache(&StackExchange, config_name, &votes_page_html)?;\n\n Ok(votes_page_html)\n\n }\n\n\n\n fn get_votes(\n\n votes_page_html: &str,\n\n stackexchange_config: &StackExchangeConfig,\n\n day_start: DateTime<Local>,\n\n next_day_start: DateTime<Local>,\n\n ) -> Result<Vec<Event>> {\n\n let doc = scraper::Html::parse_document(votes_page_html);\n\n let sel_vote_urls = scraper::Selector::parse(\n\n \"table.history-table a.answer-hyperlink,table.history-table a.question-hyperlink\",\n\n )\n\n .unwrap();\n\n let sel_vote_dates = scraper::Selector::parse(\"table.history-table div.date\").unwrap();\n\n let mut vote_urls = doc.select(&sel_vote_urls);\n", "file_path": "src/events/stackexchange.rs", "rank": 56, "score": 34727.21223186415 }, { "content": " stackexchange_config: &StackExchangeConfig,\n\n ) -> Result<String> {\n\n let client = reqwest::blocking::ClientBuilder::new()\n\n .cookie_store(true)\n\n .user_agent(format!(\n\n \"Cigale/{} (https://github.com/emmanueltouzery/cigale)\",\n\n env!(\"CARGO_PKG_VERSION\")\n\n ))\n\n .timeout(Duration::from_secs(30))\n\n .connect_timeout(Duration::from_secs(30))\n\n .connection_verbose(true)\n\n .build()?;\n\n\n\n let html = Self::login(&client, stackexchange_config)?;\n\n\n\n let userpage_link = Self::get_user_page_url(&html)?;\n\n\n\n let votes_page_html = Self::html_get(\n\n &client,\n\n stackexchange_config,\n", "file_path": "src/events/stackexchange.rs", "rank": 57, "score": 34727.01360661926 }, { "content": " config.stackexchange.keys().collect()\n\n }\n\n\n\n fn get_config_fields(&self) -> Vec<(&'static str, ConfigType)> {\n\n vec![\n\n (\n\n EXCHANGE_SITE_URL,\n\n ConfigType::Text(\"https://stackoverflow.com\"),\n\n ),\n\n (USERNAME_KEY, ConfigType::Text(\"\")),\n\n (PASSWORD_KEY, ConfigType::Password),\n\n ]\n\n }\n\n\n\n fn field_values(\n\n &self,\n\n _cur_values: &HashMap<&'static str, String>,\n\n _field_name: &'static str,\n\n ) -> Result<Vec<String>> {\n\n Ok(Vec::new())\n", "file_path": "src/events/stackexchange.rs", "rank": 58, "score": 34725.16602879287 }, { "content": " config.stackexchange[config_name].password.to_string(),\n\n ),\n\n ]\n\n .into_iter()\n\n .collect()\n\n }\n\n\n\n fn add_config_values(\n\n &self,\n\n config: &mut Config,\n\n config_name: String,\n\n mut config_values: HashMap<&'static str, String>,\n\n ) {\n\n config.stackexchange.insert(\n\n config_name,\n\n StackExchangeConfig {\n\n exchange_site_url: config_values.remove(EXCHANGE_SITE_URL).unwrap(),\n\n username: config_values.remove(USERNAME_KEY).unwrap(),\n\n password: config_values.remove(PASSWORD_KEY).unwrap(),\n\n },\n", "file_path": "src/events/stackexchange.rs", "rank": 59, "score": 34724.46198589123 }, { "content": " if let (Some(date), Some((title, Some(link)))) = (date, title_link) {\n\n Some((date, title, link))\n\n } else {\n\n None\n\n }\n\n })\n\n .filter(|(date, _, _)| date >= &day_start && date < &next_day_start)\n\n .map(|(date, title, link)| {\n\n Event::new(\n\n \"S.Exch\",\n\n Icon::THUMBS_UP,\n\n date.time(),\n\n title.clone(),\n\n format!(\"Vote: {}\", title),\n\n EventBody::Markup(\n\n format!(\n\n \"<a href=\\\"{}{}\\\">Open in the browser</a>\\n\\nStack Exchange vote: {}\",\n\n stackexchange_config.exchange_site_url,\n\n link,\n\n stackexchange_config.exchange_site_url\n", "file_path": "src/events/stackexchange.rs", "rank": 60, "score": 34722.25349360686 }, { "content": " );\n\n Ok(html)\n\n }\n\n\n\n fn get_user_page_url(html: &str) -> Result<String> {\n\n let doc = scraper::Html::parse_document(html);\n\n let sel_userpage = scraper::Selector::parse(\"a.my-profile.js-gps-track\").unwrap();\n\n let userpage_node = doc\n\n .select(&sel_userpage)\n\n .next()\n\n .ok_or(\"Can't find the user page link\")?;\n\n userpage_node\n\n .value()\n\n .attr(\"href\")\n\n .map(|s| s.to_string())\n\n .ok_or_else(|| \"Can't find the link to the user page\".into())\n\n }\n\n\n\n fn get_votes_page_html(\n\n config_name: &str,\n", "file_path": "src/events/stackexchange.rs", "rank": 61, "score": 34721.67700836228 }, { "content": " }\n\n\n\n fn get_config_values(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n ) -> HashMap<&'static str, String> {\n\n vec![\n\n (\n\n EXCHANGE_SITE_URL,\n\n config.stackexchange[config_name]\n\n .exchange_site_url\n\n .to_string(),\n\n ),\n\n (\n\n USERNAME_KEY,\n\n config.stackexchange[config_name].username.to_string(),\n\n ),\n\n (\n\n PASSWORD_KEY,\n", "file_path": "src/events/stackexchange.rs", "rank": 62, "score": 34721.40350926799 }, { "content": " ),\n\n WordWrapMode::WordWrap,\n\n ),\n\n Some(\"Vote\".to_string()),\n\n )\n\n })\n\n .collect())\n\n }\n\n}\n\n\n\nimpl EventProvider for StackExchange {\n\n fn name(&self) -> &'static str {\n\n \"StackExchange\"\n\n }\n\n\n\n fn default_icon(&self) -> Icon {\n\n Icon::THUMBS_UP\n\n }\n\n\n\n fn get_config_names<'a>(&self, config: &'a Config) -> Vec<&'a String> {\n", "file_path": "src/events/stackexchange.rs", "rank": 63, "score": 34720.13675804525 }, { "content": " let sel_date_brick = scraper::Selector::parse(\"div.date_brick\").unwrap();\n\n Ok(doc\n\n .select(&sel_vote_dates)\n\n .filter_map(|date_node| {\n\n let title_link = vote_urls\n\n .next()\n\n .map(|n| (n.inner_html(), n.value().attr(\"href\")));\n\n let date_str = date_node.value().attr(\"title\").or_else(|| {\n\n date_node\n\n .select(&sel_date_brick)\n\n .next()\n\n .and_then(|n| n.value().attr(\"title\"))\n\n });\n\n let date: Option<DateTime<Local>> = date_str\n\n .and_then(|d_str| {\n\n DateTime::parse_from_str(&d_str.replace(\"Z\", \"+00\"), \"%Y-%m-%d %H:%M:%S%#z\")\n\n .ok()\n\n })\n\n .map(DateTime::from);\n\n log::debug!(\"{:?} - {:?}\", date_str, date);\n", "file_path": "src/events/stackexchange.rs", "rank": 64, "score": 34718.450316105576 }, { "content": "use super::events::{ConfigType, Event, EventBody, EventProvider, Result};\n\nuse crate::config::Config;\n\nuse crate::icons::*;\n\nuse chrono::prelude::*;\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::*;\n\n\n\nconst BUF_SIZE: u64 = 4096;\n\n\n\n// let mut separator_bytes = \"\\nFrom \".to_string().into_bytes();\n\n// separator_bytes.reverse();\n\n// could use lazy_static! but a dependency for that...\n\nconst SEPARATOR_BYTES: [u8; 6] = [b' ', b'm', b'o', b'r', b'F', b'\\n'];\n\n\n\n#[derive(serde_derive::Deserialize, serde_derive::Serialize, Clone, Debug)]\n\npub struct EmailConfig {\n\n pub mbox_file_path: String, // Path\n\n}\n\n\n", "file_path": "src/events/email.rs", "rank": 65, "score": 34650.17347384831 }, { "content": " Some(d) => result.push(Email::email_to_event(&email_contents, &d)?),\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct Email;\n\n\n\nconst MBOX_FILE_PATH_KEY: &str = \"Mbox file path\";\n\n\n\nimpl EventProvider for Email {\n\n fn get_config_fields(&self) -> Vec<(&'static str, ConfigType)> {\n\n vec![(MBOX_FILE_PATH_KEY, ConfigType::File)]\n\n }\n\n\n\n fn name(&self) -> &'static str {\n\n \"Email\"\n\n }\n", "file_path": "src/events/email.rs", "rank": 66, "score": 34642.36670946288 }, { "content": " config.email.insert(\n\n config_name,\n\n EmailConfig {\n\n mbox_file_path: config_values.remove(MBOX_FILE_PATH_KEY).unwrap(),\n\n },\n\n );\n\n }\n\n\n\n fn get_events(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n day: Date<Local>,\n\n ) -> Result<Vec<Event>> {\n\n let email_config = &config.email[config_name];\n\n let day_start = day.and_hms(0, 0, 0);\n\n let next_day_start = day_start + chrono::Duration::days(1);\n\n let mut buf = vec![0; BUF_SIZE as usize];\n\n let file = File::open(&email_config.mbox_file_path)?;\n\n // i \"double buffer\". probably OK.\n", "file_path": "src/events/email.rs", "rank": 67, "score": 34637.7438785366 }, { "content": " Self::find_message_body(p, email_date)?\n\n }\n\n Some(p) => p.get_body()?,\n\n None => {\n\n return Err(\n\n format!(\"Email of {}: can't find a text/plain part\", email_date).into(),\n\n )\n\n }\n\n }\n\n } else {\n\n email_contents.get_body()?\n\n };\n\n Ok(r)\n\n }\n\n\n\n fn email_to_event(\n\n email_contents: &mailparse::ParsedMail,\n\n email_date: &DateTime<Local>,\n\n ) -> Result<Event> {\n\n let message_body = Self::find_message_body(email_contents, email_date)?;\n", "file_path": "src/events/email.rs", "rank": 68, "score": 34631.80784172702 }, { "content": " fn read_emails_until_day_start(\n\n buf: &mut Vec<u8>,\n\n day_start: &DateTime<Local>,\n\n parsing_state: &mut ParsingState,\n\n ) -> Result<Vec<Event>> {\n\n // now read the emails i'm interested in.\n\n // i'll read one-too-many email bodies (and I'll read\n\n // a header for the second time right now) but no biggie\n\n let mut result = vec![];\n\n loop {\n\n // the nest match doesn't look too great to my haskeller's eyes,\n\n // but i tried to carry the value through options,\n\n // as is done in find_first_mail_sent_before(), and it looked worse.\n\n match Email::read_next_mail(buf, parsing_state)? {\n\n None => return Ok(result),\n\n Some(email_bytes) => {\n\n let email_contents = mailparse::parse_mail(&email_bytes)?;\n\n let email_date = Email::parse_email_headers_date(&email_contents.headers);\n\n match email_date.filter(|d| d >= day_start) {\n\n None => return Ok(result),\n", "file_path": "src/events/email.rs", "rank": 69, "score": 34631.44483238683 }, { "content": " }\n\n\n\n fn field_values(\n\n &self,\n\n _cur_values: &HashMap<&'static str, String>,\n\n _field_name: &'static str,\n\n ) -> Result<Vec<String>> {\n\n Ok(Vec::new())\n\n }\n\n\n\n fn remove_config(&self, config: &mut Config, config_name: String) {\n\n config.email.remove(&config_name);\n\n }\n\n\n\n fn add_config_values(\n\n &self,\n\n config: &mut Config,\n\n config_name: String,\n\n mut config_values: HashMap<&'static str, String>,\n\n ) {\n", "file_path": "src/events/email.rs", "rank": 70, "score": 34630.348090030886 }, { "content": "\n\n fn default_icon(&self) -> Icon {\n\n Icon::ENVELOPE\n\n }\n\n\n\n fn get_config_names<'a>(&self, config: &'a Config) -> Vec<&'a String> {\n\n config.email.keys().collect()\n\n }\n\n\n\n fn get_config_values(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n ) -> HashMap<&'static str, String> {\n\n vec![(\n\n MBOX_FILE_PATH_KEY,\n\n config.email[config_name].mbox_file_path.to_string(),\n\n )]\n\n .into_iter()\n\n .collect()\n", "file_path": "src/events/email.rs", "rank": 71, "score": 34629.58394192102 }, { "content": "\n\n // skip emails which are newer than the date i'm interested in.\n\n // remember we're reading from the end.\n\n // it's ok to just read headers for now (I just want the date)\n\n fn find_first_mail_sent_before(\n\n buf: &mut Vec<u8>,\n\n parsing_state: &mut ParsingState,\n\n next_day_start: &DateTime<Local>,\n\n ) -> Result<Option<(Vec<u8>, DateTime<Local>)>> {\n\n loop {\n\n let email_bytes = Email::read_next_mail(buf, parsing_state)?;\n\n let email_headers = email_bytes\n\n .as_ref()\n\n .map(|bytes| mailparse::parse_headers(bytes))\n\n .transpose()?;\n\n let email_date = email_headers.and_then(|h| Email::parse_email_headers_date(&h.0));\n\n match email_date {\n\n None => {\n\n return Ok(None); // no more emails\n\n }\n", "file_path": "src/events/email.rs", "rank": 72, "score": 34629.28404352563 }, { "content": " let event_body = Email::get_header_val(&email_contents.headers, \"To\")\n\n .map(|t| format!(\"To: {}\\n\", t))\n\n .unwrap_or_else(|| \"\".to_string())\n\n + &Email::get_header_val(&email_contents.headers, \"Cc\")\n\n .map(|c| format!(\"Cc: {}\\n\\n\", c))\n\n .unwrap_or_else(|| \"\".to_string())\n\n + &message_body;\n\n let email_subject = Email::get_header_val(&email_contents.headers, \"Subject\")\n\n .unwrap_or_else(|| \"-\".to_string());\n\n Ok(Event::new(\n\n \"Email\",\n\n Icon::ENVELOPE,\n\n email_date.time(),\n\n email_subject.clone(),\n\n email_subject,\n\n EventBody::PlainText(event_body),\n\n Email::get_header_val(&email_contents.headers, \"To\"),\n\n ))\n\n }\n\n\n", "file_path": "src/events/email.rs", "rank": 73, "score": 34627.53215640334 }, { "content": " email_contents.reverse();\n\n parsing_state.bytes_left -= (i + 1) as u64;\n\n parsing_state\n\n .reader\n\n .seek(SeekFrom::Start(parsing_state.bytes_left))?;\n\n return Ok(Some(email_contents));\n\n }\n\n if byte_matches {\n\n separator_idx += 1;\n\n } else {\n\n separator_idx = 0;\n\n }\n\n }\n\n email_contents.extend(cur_buf.iter());\n\n parsing_state.bytes_left -= cur_buf.len() as u64;\n\n }\n\n }\n\n\n\n fn read_into_buffer<'a>(\n\n buf: &'a mut Vec<u8>,\n", "file_path": "src/events/email.rs", "rank": 74, "score": 34627.42870095417 }, { "content": " Some(date) if date < *next_day_start => {\n\n // first date before my end date\n\n return Ok(Some((email_bytes.unwrap(), date)));\n\n }\n\n Some(_) => {} // email, but after my end date\n\n }\n\n }\n\n }\n\n\n\n fn find_message_body(\n\n email_contents: &mailparse::ParsedMail,\n\n email_date: &DateTime<Local>,\n\n ) -> Result<String> {\n\n let r = if email_contents.subparts.len() > 1 {\n\n let part = email_contents.subparts.iter().find(|p| {\n\n p.ctype.mimetype.contains(\"text/plain\")\n\n || p.ctype.mimetype.contains(\"multipart/alternative\")\n\n });\n\n match part {\n\n Some(p) if p.ctype.mimetype.contains(\"multipart/alternative\") => {\n", "file_path": "src/events/email.rs", "rank": 75, "score": 34627.4256381309 }, { "content": " let mut reader = BufReader::new(file);\n\n let cur_pos_end = reader.seek(SeekFrom::End(0))?;\n\n let mut parsing_state = ParsingState {\n\n reader: &mut reader,\n\n bytes_left: cur_pos_end,\n\n };\n\n // we go from the end. so we first search for an email sent\n\n // _before_ the end date we're interested in.\n\n let first_mail =\n\n Email::find_first_mail_sent_before(&mut buf, &mut parsing_state, &next_day_start)?;\n\n if let Some((email_bytes, email_date)) = first_mail {\n\n if email_date < day_start {\n\n // no emails match\n\n return Ok(vec![]);\n\n }\n\n let email_contents = mailparse::parse_mail(&email_bytes)?;\n\n // read until the first email sent before\n\n // the start date we're interested in.\n\n let mut emails =\n\n Email::read_emails_until_day_start(&mut buf, &day_start, &mut parsing_state)?;\n", "file_path": "src/events/email.rs", "rank": 76, "score": 34627.16265106707 }, { "content": " // add the first email now (append is faster than prepend, and sorting is done later)\n\n emails.push(Email::email_to_event(&email_contents, &email_date)?);\n\n Ok(emails)\n\n } else {\n\n // no emails match\n\n Ok(vec![])\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/events/email.rs", "rank": 77, "score": 34626.13096366636 }, { "content": " for i in 0..cur_buf.len() {\n\n let cur = cur_buf[i];\n\n let byte_matches = cur == SEPARATOR_BYTES[separator_idx];\n\n let mut matches = false;\n\n if byte_matches && separator_idx == SEPARATOR_BYTES.len() - 1 {\n\n // matching in the middle of the file.\n\n // not interested in the extra \\n so I take only [0..i]\n\n matches = true;\n\n email_contents.extend(cur_buf[0..i].iter());\n\n } else if separator_idx == SEPARATOR_BYTES.len() - 2\n\n && parsing_state.bytes_left as usize - (i + 1) == 0\n\n {\n\n // hit the beginning of the file (bytes_left - (i+1) == 0)\n\n // => we don't require the leading \\n from the separator bytes\n\n // do collect the current letter too [0..(i+1)]\n\n matches = true;\n\n email_contents.extend(cur_buf[0..=i].iter());\n\n }\n\n if matches {\n\n // found the marker for the beginning of the email\n", "file_path": "src/events/email.rs", "rank": 78, "score": 34623.16775006226 }, { "content": " parsing_state: &mut ParsingState,\n\n ) -> Result<&'a [u8]> {\n\n let cur_buf = if parsing_state.bytes_left as usize > buf.len() {\n\n &mut buf[0..] // can fill in the whole buffer\n\n } else {\n\n &mut buf[0..parsing_state.bytes_left as usize] // less than BUF_SIZE left to read\n\n };\n\n parsing_state\n\n .reader\n\n .seek(SeekFrom::Current(-(cur_buf.len() as i64)))?;\n\n parsing_state.reader.read_exact(cur_buf)?;\n\n // reading moved us back after the buffer => get back where we were\n\n parsing_state\n\n .reader\n\n .seek(SeekFrom::Current(-(cur_buf.len() as i64)))?;\n\n cur_buf.reverse(); // we'll read from end to beginning\n\n Ok(cur_buf)\n\n }\n\n\n\n fn get_header_val(headers: &[mailparse::MailHeader], header_name: &str) -> Option<String> {\n", "file_path": "src/events/email.rs", "rank": 79, "score": 34621.44856228226 }, { "content": " &dt_str[..(dt_str.len() - 6)]\n\n } else {\n\n dt_str\n\n }\n\n }\n\n\n\n fn parse_email_date(dt_str: &str) -> Option<DateTime<Local>> {\n\n DateTime::parse_from_rfc2822(dt_str)\n\n .ok()\n\n .or_else(|| {\n\n DateTime::parse_from_str(\n\n Email::drop_string_tz_if_present(dt_str),\n\n \"%a, %-d %b %Y %T %z\",\n\n )\n\n .ok()\n\n })\n\n .map(DateTime::from)\n\n .or_else(|| Local.datetime_from_str(dt_str, \"%b %d %T %Y\").ok())\n\n .or_else(|| Local.datetime_from_str(dt_str, \"%a %b %e %T %Y\").ok())\n\n }\n", "file_path": "src/events/email.rs", "rank": 80, "score": 34621.421089680865 }, { "content": " headers\n\n .iter()\n\n // TODO change to Result::contains when it stabilizes\n\n .find(|h| h.get_key() == header_name)\n\n .map(|h| h.get_value())\n\n }\n\n\n\n fn parse_email_headers_date(headers: &[mailparse::MailHeader]) -> Option<DateTime<Local>> {\n\n Email::get_header_val(headers, \"Date\").and_then(|d_str| Email::parse_email_date(&d_str))\n\n }\n\n\n\n // some date strings end with \" (CET)\" timezone specifiers, but rust\n\n // format strings can't parse that:\n\n // %Z _Formatting only_: Local time zone name.\n\n // often we don't need them, so drop them.\n\n // this function is dumb, will dump the final 6 bytes if the\n\n // string is long enough. don't want to add a regex lib\n\n // dependency, don't feel like doing it more precisely.\n\n fn drop_string_tz_if_present(dt_str: &str) -> &str {\n\n if dt_str.len() > 6 {\n", "file_path": "src/events/email.rs", "rank": 81, "score": 34619.90043338444 }, { "content": "use super::events::{ConfigType, Event, EventBody, EventProvider, Result, WordWrapMode};\n\nuse crate::config::Config;\n\nuse crate::icons::*;\n\nuse chrono::prelude::*;\n\nuse git2::{Commit, Repository};\n\nuse regex::Regex;\n\nuse std::collections::{HashMap, HashSet};\n\n\n\n// git2 revwalk\n\n// https://github.com/rust-lang/git2-rs/blob/master/examples/log.rs\n\n\n\n#[derive(serde_derive::Deserialize, serde_derive::Serialize, Clone, Debug)]\n\npub struct GitConfig {\n\n pub repo_folder: String, // Path\n\n pub commit_author: String,\n\n}\n\n\n\nimpl Git {\n\n fn git2_time_to_datetime(time: git2::Time) -> DateTime<Local> {\n\n Utc.timestamp(time.seconds(), 0).with_timezone(&Local)\n", "file_path": "src/events/git.rs", "rank": 82, "score": 34544.65887853213 }, { "content": " fn get_events(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n day: Date<Local>,\n\n ) -> Result<Vec<Event>> {\n\n let git_config = &config.git[config_name];\n\n let day_start = day.and_hms(0, 0, 0);\n\n let next_day_start = day_start + chrono::Duration::days(1);\n\n let repo = Repository::open(&git_config.repo_folder)?;\n\n let mut all_commits = HashMap::new();\n\n let commit_display_url = Self::get_commit_display_url(&repo, config)?;\n\n log::info!(\"gitlab commit display url: {:?}\", commit_display_url);\n\n for branch in repo\n\n .branches(Some(git2::BranchType::Local))?\n\n .filter_map(|b| b.ok())\n\n {\n\n if let Some(branch_oid) = branch.0.get().target() {\n\n let branch_name = branch.0.name().ok().flatten().map(|s| s.to_string());\n\n let branch_head = repo.find_commit(branch_oid)?;\n", "file_path": "src/events/git.rs", "rank": 83, "score": 34534.08717643698 }, { "content": "\n\n fn get_commit_extra_info<'a>(diff: &git2::Diff<'a>) -> Option<String> {\n\n // not done here. i want to get the list of files and copy the\n\n // getcommitExtraInfo algo from the cigale haskell version.\n\n let mut files_touched = vec![];\n\n let mut file_cb = |diff_delta: git2::DiffDelta<'_>, _count| {\n\n if let Some(path) = diff_delta.new_file().path() {\n\n files_touched.push(path.to_owned());\n\n }\n\n if let Some(path) = diff_delta.old_file().path() {\n\n files_touched.push(path.to_owned());\n\n }\n\n true\n\n };\n\n diff.foreach(&mut file_cb, None, None, None).ok()?;\n\n Some(Git::get_files_root(&files_touched))\n\n }\n\n\n\n // common prefix to all the files\n\n fn get_files_root(files: &[std::path::PathBuf]) -> String {\n", "file_path": "src/events/git.rs", "rank": 84, "score": 34534.040958147125 }, { "content": " common_prefix.join(\"/\")\n\n }\n\n\n\n // collaborate with the gitlab plugin... if this repo matches a configured\n\n // gitlab event source, then we can build a URL to open the commit in the\n\n // browser in the gitlab GUI.\n\n fn get_commit_display_url(repo: &Repository, config: &Config) -> Result<Option<String>> {\n\n let origin_url = match repo\n\n .find_remote(\"origin\")\n\n .ok()\n\n .and_then(|r| r.url().map(|url| url.to_string()))\n\n {\n\n Some(v) => v,\n\n None => return Ok(None),\n\n };\n\n\n\n let url = Self::get_commit_display_url_gitlab(&origin_url, config)?.or_else(|| {\n\n Self::get_commit_display_url_github(&origin_url)\n\n .ok()\n\n .flatten()\n", "file_path": "src/events/git.rs", "rank": 85, "score": 34532.538539158115 }, { "content": " &self,\n\n cur_values: &HashMap<&'static str, String>,\n\n field_name: &'static str,\n\n ) -> Result<Vec<String>> {\n\n // for the 'commit author' combo box, we offer the list\n\n // of authors for the repo. This is quite slow though,\n\n // hopefully there is a faster way?\n\n // https://stackoverflow.com/questions/60464449/get-the-list-of-authors-in-a-git-repository-efficiently-with-libgit2\n\n let git_path = cur_values\n\n .get(REPO_FOLDER_KEY)\n\n .map(|s| s.as_str())\n\n .unwrap_or_else(|| \"\");\n\n if field_name != COMMIT_AUTHOR_KEY || git_path.is_empty() {\n\n return Ok(Vec::new());\n\n }\n\n let repo = Repository::open(&git_path)?;\n\n let mut revwalk = repo.revwalk()?;\n\n revwalk.push_head()?;\n\n let mut authors: Vec<String> = revwalk\n\n .map(|r| {\n", "file_path": "src/events/git.rs", "rank": 86, "score": 34530.57667952516 }, { "content": " Git::get_commit_extra_info(&d),\n\n ),\n\n };\n\n Event::new(\n\n \"Git\",\n\n Icon::CODE_BRANCH,\n\n commit_date.time(),\n\n c.summary().unwrap_or(\"\").to_string(),\n\n contents_header,\n\n EventBody::Markup(contents, WordWrapMode::NoWordWrap),\n\n extra_details,\n\n )\n\n }\n\n}\n\n\n\npub struct Git;\n\nconst REPO_FOLDER_KEY: &str = \"Repository folder\";\n\nconst COMMIT_AUTHOR_KEY: &str = \"Commit Author\";\n\n\n\nimpl EventProvider for Git {\n", "file_path": "src/events/git.rs", "rank": 87, "score": 34530.34325405784 }, { "content": " });\n\n Ok(url)\n\n }\n\n\n\n fn get_commit_display_url_gitlab(origin_url: &str, config: &Config) -> Result<Option<String>> {\n\n // we have the repo origin url, something like [email protected]:afc/afc.git\n\n // and we also have the gitlab URL, something like https://gitlab.lit-transit.com/\n\n // find out whether the origin URL contains the gitlab URL minus the protocol\n\n let url_protocol_regex = Regex::new(r\"^[a-z]+://\").unwrap();\n\n let matching_gitlab_cfg = match config.gitlab.iter().find(|(_, v)| {\n\n origin_url.contains(\n\n &url_protocol_regex\n\n .replace_all(&v.gitlab_url, \"\")\n\n .to_string(),\n\n )\n\n }) {\n\n Some((_, v)) => v,\n\n None => return Ok(None),\n\n };\n\n\n", "file_path": "src/events/git.rs", "rank": 88, "score": 34529.86473406604 }, { "content": "\n\n fn add_config_values(\n\n &self,\n\n config: &mut Config,\n\n config_name: String,\n\n mut config_values: HashMap<&'static str, String>,\n\n ) {\n\n config.git.insert(\n\n config_name,\n\n GitConfig {\n\n repo_folder: config_values.remove(REPO_FOLDER_KEY).unwrap(),\n\n commit_author: config_values.remove(COMMIT_AUTHOR_KEY).unwrap(),\n\n },\n\n );\n\n }\n\n\n\n fn remove_config(&self, config: &mut Config, config_name: String) {\n\n config.git.remove(&config_name);\n\n }\n\n\n", "file_path": "src/events/git.rs", "rank": 89, "score": 34529.192969678144 }, { "content": " // [email protected]:afc/afc.git => afc/afc [keep between ':' and '.git']\n\n let gitlab_projectname_regex = Regex::new(r\":(.*?)\\.git\").unwrap();\n\n let gitlab_project_name = match gitlab_projectname_regex.captures_iter(origin_url).next() {\n\n Some(v) => v[1].to_string(),\n\n None => return Ok(None),\n\n };\n\n\n\n // combine the URL from the gitlab config plus the project name\n\n // that we extracted from the repo upstream URL to get the URL\n\n // to display a commit.\n\n // (the commit sha will have to be appended to this URL)\n\n Ok(Some(format!(\n\n \"{}/{}/commit/\",\n\n matching_gitlab_cfg.gitlab_url, gitlab_project_name\n\n )))\n\n }\n\n\n\n // autodetect github repos & offer to open the commits in the browser\n\n fn get_commit_display_url_github(origin_url: &str) -> Result<Option<String>> {\n\n let github_projectname_regex =\n", "file_path": "src/events/git.rs", "rank": 90, "score": 34528.69365097212 }, { "content": " .collect();\n\n let mut result = all_commits\n\n .iter()\n\n .flat_map(|(branch, commits)| {\n\n let rrepo = &repo;\n\n let cdu = &commit_display_url;\n\n commits\n\n .iter()\n\n .filter(move |c| branch == \"master\" || !master_commit_ids.contains(&c.id()))\n\n .map(move |c| Self::build_event(c, rrepo, branch, cdu))\n\n })\n\n .collect::<Vec<Event>>();\n\n result.sort_by_key(|e| e.event_time); // need to sort for the dedup to work\n\n result.dedup_by(|e1, e2| {\n\n // deduplicate identical commits seen in different branches\n\n // (the body will be different since we put the branch name there)\n\n e1.event_time == e2.event_time\n\n && e1.event_contents_header == e2.event_contents_header\n\n && e1.event_info == e2.event_info\n\n });\n\n Ok(result)\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/events/git.rs", "rank": 91, "score": 34526.370195281976 }, { "content": " let branch_head_date = Git::git2_time_to_datetime(branch_head.time());\n\n if branch_head_date < day_start {\n\n // early abort: quite a lot faster than starting a useless revwalk\n\n continue;\n\n }\n\n let mut revwalk = repo.revwalk()?;\n\n revwalk.set_sorting(/*git2::Sort::REVERSE |*/ git2::Sort::TIME)?;\n\n revwalk.push(branch_oid)?;\n\n let mut commits: Vec<Commit> = revwalk\n\n .map(|r| {\n\n let oid = r?;\n\n repo.find_commit(oid)\n\n })\n\n .filter_map(|c| match c {\n\n Ok(commit) => Some(commit),\n\n Err(e) => {\n\n println!(\"Error walking the revisions {}, skipping\", e);\n\n None\n\n }\n\n })\n", "file_path": "src/events/git.rs", "rank": 92, "score": 34526.19514834986 }, { "content": " Cargo.lock | 11 ++++++-----\n\n Cargo.toml | 1 +\n\n src/events/email.rs | 6 +++---\n\n src/events/events.rs | 6 +++---\n\n src/events/git.rs | 6 +++---\n\n src/events/ical.rs | 6 +++---\n\n src/events/redmine.rs | 6 +++---\n\n src/icons.rs | 57 ++++++++++++++++++++++++++++++++++++++++++++++-----------\n\n src/widgets/addeventsourcedlg.rs | 7 ++-----\n\n src/widgets/datepicker.rs | 6 +++---\n\n src/widgets/eventsource.rs | 4 ++--\n\n 11 files changed, 75 insertions(+), 41 deletions(-)\n\n</span>\"#\n\n .to_string(),\n\n WordWrapMode::NoWordWrap,\n\n ),\n\n Some(\"\".to_string()),\n\n );\n\n let actual = Git\n\n .get_events(&config, \"test\", Local.ymd(2020, 2, 25))\n\n .unwrap();\n\n assert_eq!(2, actual.len());\n\n assert_eq!(expected_fst, *actual.first().unwrap());\n\n}\n", "file_path": "src/events/git.rs", "rank": 93, "score": 34525.722284269286 }, { "content": " let oid = r?;\n\n repo.find_commit(oid)\n\n })\n\n .filter_map(|c| match c {\n\n Ok(commit) => Some(commit),\n\n Err(e) => {\n\n println!(\"Error walking the revisions {}, skipping\", e);\n\n None\n\n }\n\n })\n\n .fold(HashSet::new(), |mut sofar, cur| {\n\n if let Some(name) = cur.author().name() {\n\n sofar.insert(name.to_string());\n\n }\n\n sofar\n\n })\n\n .into_iter()\n\n .collect();\n\n authors.sort();\n\n Ok(authors)\n", "file_path": "src/events/git.rs", "rank": 94, "score": 34525.50351020445 }, { "content": " }\n\n\n\n fn get_config_values(\n\n &self,\n\n config: &Config,\n\n config_name: &str,\n\n ) -> HashMap<&'static str, String> {\n\n vec![\n\n (\n\n REPO_FOLDER_KEY,\n\n config.git[config_name].repo_folder.to_string(),\n\n ),\n\n (\n\n COMMIT_AUTHOR_KEY,\n\n config.git[config_name].commit_author.to_string(),\n\n ),\n\n ]\n\n .into_iter()\n\n .collect()\n\n }\n", "file_path": "src/events/git.rs", "rank": 95, "score": 34525.38960820099 }, { "content": " Regex::new(r\"^git@github\\.com:(?P<reponame1>.*)\\.git$|^https://github\\.com/(?P<reponame2>.*)\\.git$\").unwrap();\n\n let github_project_name = github_projectname_regex\n\n .captures_iter(origin_url)\n\n .next()\n\n .and_then(|v| v.name(\"reponame1\").or_else(|| v.name(\"reponame2\")))\n\n .map(|m| m.as_str());\n\n\n\n Ok(github_project_name.map(|n| format!(\"https://github.com/{}/commit/\", n)))\n\n }\n\n\n\n fn build_event(\n\n c: &Commit,\n\n repo: &Repository,\n\n branch: &str,\n\n commit_display_url: &Option<String>,\n\n ) -> Event {\n\n let commit_date = Git::git2_time_to_datetime(c.time());\n\n let diff = Git::get_commit_diff(repo, c);\n\n let contents_header = c.summary().unwrap_or(\"\").to_string();\n\n let base_msg = c.message().unwrap_or(\"\");\n", "file_path": "src/events/git.rs", "rank": 96, "score": 34525.1201950201 }, { "content": " fn get_config_fields(&self) -> Vec<(&'static str, ConfigType)> {\n\n vec![\n\n (REPO_FOLDER_KEY, ConfigType::Folder),\n\n (COMMIT_AUTHOR_KEY, ConfigType::Combo),\n\n ]\n\n }\n\n\n\n fn name(&self) -> &'static str {\n\n \"Git\"\n\n }\n\n\n\n fn default_icon(&self) -> Icon {\n\n Icon::CODE_BRANCH\n\n }\n\n\n\n fn get_config_names<'a>(&self, config: &'a Config) -> Vec<&'a String> {\n\n config.git.keys().collect()\n\n }\n\n\n\n fn field_values(\n", "file_path": "src/events/git.rs", "rank": 97, "score": 34525.0321683431 }, { "content": " let paths_for_each_file: Vec<Vec<&str>> = files\n\n .iter()\n\n .filter_map(|f| f.iter().map(|c| c.to_str()).collect())\n\n .collect();\n\n let shortest_path = paths_for_each_file\n\n .iter()\n\n .map(|chars| chars.len())\n\n .min()\n\n .unwrap_or(0);\n\n let mut common_prefix = vec![];\n\n for idx in 0..shortest_path {\n\n let first_component = paths_for_each_file[0][idx];\n\n if !paths_for_each_file\n\n .iter()\n\n .all(|chars| chars[idx] == first_component)\n\n {\n\n break;\n\n }\n\n common_prefix.push(first_component);\n\n }\n", "file_path": "src/events/git.rs", "rank": 98, "score": 34524.780296882396 }, { "content": " let message_contents =\n\n glib::markup_escape_text(base_msg.strip_prefix(&contents_header).unwrap_or(base_msg))\n\n .to_string();\n\n let open_in_browser = match commit_display_url {\n\n Some(cdu) => format!(\"<a href=\\\"{}/{}\\\">Open in browser</a>\", cdu, c.id()),\n\n None => \"\".to_string(),\n\n };\n\n let (contents, extra_details) = match diff {\n\n None => (\n\n format!(\"{}\\n{}\\n{}\", open_in_browser, branch, message_contents),\n\n None,\n\n ),\n\n Some(d) => (\n\n format!(\n\n \"{}\\n\\n{}\\n<span font-family=\\\"monospace\\\">{}\\n\\n{}</span>\",\n\n open_in_browser,\n\n branch,\n\n message_contents,\n\n &Git::get_commit_full_diffstr(&d).unwrap_or_else(|| \"\".to_string())\n\n ),\n", "file_path": "src/events/git.rs", "rank": 99, "score": 34519.96626433284 } ]
Rust
src/foreign_key/create.rs
samsamai/sea-query
744a8a859fda2b05b471e88500036ef3b8d92798
use crate::{ backend::SchemaBuilder, prepare::*, types::*, ForeignKeyAction, SchemaStatementBuilder, TableForeignKey, }; #[derive(Debug, Clone)] pub struct ForeignKeyCreateStatement { pub(crate) foreign_key: TableForeignKey, } impl Default for ForeignKeyCreateStatement { fn default() -> Self { Self::new() } } impl ForeignKeyCreateStatement { pub fn new() -> Self { Self { foreign_key: Default::default(), } } pub fn name(mut self, name: &str) -> Self { self.foreign_key.name(name); self } #[deprecated( since = "0.10.2", note = "Please use the [`ForeignKeyCreateStatement::from`] and [`ForeignKeyCreateStatement::to`]" )] pub fn table<T: 'static, R: 'static>(mut self, table: T, ref_table: R) -> Self where T: Iden, R: Iden, { self.foreign_key.from_tbl(table); self.foreign_key.to_tbl(ref_table); self } #[deprecated( since = "0.10.2", note = "Please use the [`ForeignKeyCreateStatement::from`] and [`ForeignKeyCreateStatement::to`]" )] pub fn col<T: 'static, R: 'static>(mut self, column: T, ref_column: R) -> Self where T: Iden, R: Iden, { self.foreign_key.from_col(column); self.foreign_key.to_col(ref_column); self } pub fn from<T, C>(mut self, table: T, columns: C) -> Self where T: IntoIden, C: IdenList, { self.foreign_key.from_tbl(table); for col in columns.into_iter() { self.foreign_key.from_col(col); } self } pub fn to<T, C>(mut self, table: T, columns: C) -> Self where T: IntoIden, C: IdenList, { self.foreign_key.to_tbl(table); for col in columns.into_iter() { self.foreign_key.to_col(col); } self } pub fn from_tbl<T>(mut self, table: T) -> Self where T: IntoIden, { self.foreign_key.from_tbl(table); self } pub fn to_tbl<R>(mut self, ref_table: R) -> Self where R: IntoIden, { self.foreign_key.to_tbl(ref_table); self } pub fn from_col<T>(mut self, column: T) -> Self where T: IntoIden, { self.foreign_key.from_col(column); self } pub fn to_col<R>(mut self, ref_column: R) -> Self where R: IntoIden, { self.foreign_key.to_col(ref_column); self } pub fn on_delete(mut self, action: ForeignKeyAction) -> Self { self.foreign_key.on_delete(action); self } pub fn on_update(mut self, action: ForeignKeyAction) -> Self { self.foreign_key.on_update(action); self } pub fn get_foreign_key(&self) -> &TableForeignKey { &self.foreign_key } } impl SchemaStatementBuilder for ForeignKeyCreateStatement { fn build<T: SchemaBuilder>(&self, schema_builder: T) -> String { let mut sql = SqlWriter::new(); schema_builder.prepare_foreign_key_create_statement(self, &mut sql); sql.result() } fn build_any(&self, schema_builder: &dyn SchemaBuilder) -> String { let mut sql = SqlWriter::new(); schema_builder.prepare_foreign_key_create_statement(self, &mut sql); sql.result() } }
use crate::{ backend::SchemaBuilder, prepare::*, types::*, ForeignKeyAction, SchemaStatementBuilder, TableForeignKey, }; #[derive(Debug, Clone)] pub struct ForeignKeyCreateStatement { pub(crate) foreign_key: TableForeignKey, } impl Default for ForeignKeyCreateStatement { fn default() -> Self { Self::new() } } impl ForeignKeyCreateStatement { pub fn new() -> Self { Self { foreign_key: Default::default(), } } pub fn name(mut self, name: &str) -> Self { self.foreign_key.name(name); self } #[deprecated( since = "0.10.2", note = "Please use the [`ForeignKeyCreateStatement::from`] and [`ForeignKeyCreateStatement::to`]" )] pub fn table<T: 'static, R: 'static>(mut self, table: T, ref_table: R) -> Self where
] and [`ForeignKeyCreateStatement::to`]" )] pub fn col<T: 'static, R: 'static>(mut self, column: T, ref_column: R) -> Self where T: Iden, R: Iden, { self.foreign_key.from_col(column); self.foreign_key.to_col(ref_column); self } pub fn from<T, C>(mut self, table: T, columns: C) -> Self where T: IntoIden, C: IdenList, { self.foreign_key.from_tbl(table); for col in columns.into_iter() { self.foreign_key.from_col(col); } self } pub fn to<T, C>(mut self, table: T, columns: C) -> Self where T: IntoIden, C: IdenList, { self.foreign_key.to_tbl(table); for col in columns.into_iter() { self.foreign_key.to_col(col); } self } pub fn from_tbl<T>(mut self, table: T) -> Self where T: IntoIden, { self.foreign_key.from_tbl(table); self } pub fn to_tbl<R>(mut self, ref_table: R) -> Self where R: IntoIden, { self.foreign_key.to_tbl(ref_table); self } pub fn from_col<T>(mut self, column: T) -> Self where T: IntoIden, { self.foreign_key.from_col(column); self } pub fn to_col<R>(mut self, ref_column: R) -> Self where R: IntoIden, { self.foreign_key.to_col(ref_column); self } pub fn on_delete(mut self, action: ForeignKeyAction) -> Self { self.foreign_key.on_delete(action); self } pub fn on_update(mut self, action: ForeignKeyAction) -> Self { self.foreign_key.on_update(action); self } pub fn get_foreign_key(&self) -> &TableForeignKey { &self.foreign_key } } impl SchemaStatementBuilder for ForeignKeyCreateStatement { fn build<T: SchemaBuilder>(&self, schema_builder: T) -> String { let mut sql = SqlWriter::new(); schema_builder.prepare_foreign_key_create_statement(self, &mut sql); sql.result() } fn build_any(&self, schema_builder: &dyn SchemaBuilder) -> String { let mut sql = SqlWriter::new(); schema_builder.prepare_foreign_key_create_statement(self, &mut sql); sql.result() } }
T: Iden, R: Iden, { self.foreign_key.from_tbl(table); self.foreign_key.to_tbl(ref_table); self } #[deprecated( since = "0.10.2", note = "Please use the [`ForeignKeyCreateStatement::from`
random
[ { "content": "/// Escape a SQL string literal\n\npub fn escape_string(string: &str) -> String {\n\n string\n\n .replace(\"\\\\\", \"\\\\\\\\\")\n\n .replace(\"\\\"\", \"\\\\\\\"\")\n\n .replace(\"'\", \"\\\\'\")\n\n .replace(\"\\0\", \"\\\\0\")\n\n .replace(\"\\x08\", \"\\\\b\")\n\n .replace(\"\\x09\", \"\\\\t\")\n\n .replace(\"\\x1a\", \"\\\\z\")\n\n .replace(\"\\n\", \"\\\\n\")\n\n .replace(\"\\r\", \"\\\\r\")\n\n}\n\n\n", "file_path": "src/value.rs", "rank": 0, "score": 141440.89902203993 }, { "content": "/// Unescape a SQL string literal\n\npub fn unescape_string(input: &str) -> String {\n\n let mut escape = false;\n\n let mut output = String::new();\n\n for c in input.chars() {\n\n if !escape && c == '\\\\' {\n\n escape = true;\n\n } else if escape {\n\n write!(\n\n output,\n\n \"{}\",\n\n match c {\n\n '0' => '\\0',\n\n 'b' => '\\x08',\n\n 't' => '\\x09',\n\n 'z' => '\\x1a',\n\n 'n' => '\\n',\n\n 'r' => '\\r',\n\n c => c,\n\n }\n\n )\n", "file_path": "src/value.rs", "rank": 1, "score": 141440.8990220399 }, { "content": "pub trait IntoTableRef {\n\n fn into_table_ref(self) -> TableRef;\n\n}\n\n\n\n/// Unary operator\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum UnOper {\n\n Not,\n\n}\n\n\n\n/// Binary operator\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum BinOper {\n\n And,\n\n Or,\n\n Like,\n\n NotLike,\n\n Is,\n\n IsNot,\n\n In,\n", "file_path": "src/types.rs", "rank": 2, "score": 132674.3555468421 }, { "content": "pub fn inject_parameters<I>(sql: &str, params: I, query_builder: &dyn QueryBuilder) -> String\n\nwhere\n\n I: IntoIterator<Item = Value>,\n\n{\n\n let params: Vec<Value> = params.into_iter().collect();\n\n let tokenizer = Tokenizer::new(sql);\n\n let tokens: Vec<Token> = tokenizer.iter().collect();\n\n let mut counter = 0;\n\n let mut output = Vec::new();\n\n let mut i = 0;\n\n while i < tokens.len() {\n\n let token = &tokens[i];\n\n match token {\n\n Token::Punctuation(mark) => {\n\n if (mark.as_ref(), false) == query_builder.placeholder() {\n\n output.push(query_builder.value_to_string(&params[counter]));\n\n counter += 1;\n\n i += 1;\n\n continue;\n\n } else if (mark.as_ref(), true) == query_builder.placeholder() && i + 1 < tokens.len() {\n", "file_path": "src/prepare.rs", "rank": 3, "score": 131868.8128351442 }, { "content": "pub trait ValueType: ValueTypeDefault {\n\n fn unwrap(v: Value) -> Self;\n\n\n\n fn type_name() -> &'static str;\n\n}\n\n\n", "file_path": "src/value.rs", "rank": 4, "score": 129141.09557304908 }, { "content": "pub trait ValueTypeDefault {\n\n fn default() -> Self;\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Values(pub Vec<Value>);\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum ValueTuple {\n\n One(Value),\n\n Two(Value, Value),\n\n Three(Value, Value, Value),\n\n}\n\n\n", "file_path": "src/value.rs", "rank": 5, "score": 128780.33168981408 }, { "content": "pub trait TypeBuilder {\n\n /// Translate [`TypeCreateStatement`] into database specific SQL statement.\n\n fn prepare_type_create_statement(\n\n &self,\n\n create: &TypeCreateStatement,\n\n sql: &mut SqlWriter,\n\n collector: &mut dyn FnMut(Value),\n\n );\n\n\n\n /// Translate [`TypeDropStatement`] into database specific SQL statement.\n\n fn prepare_type_drop_statement(\n\n &self,\n\n drop: &TypeDropStatement,\n\n sql: &mut SqlWriter,\n\n collector: &mut dyn FnMut(Value),\n\n );\n\n\n\n /// Translate [`TypeAlterStatement`] into database specific SQL statement.\n\n fn prepare_type_alter_statement(\n\n &self,\n", "file_path": "src/extension/postgres/types.rs", "rank": 6, "score": 102572.66668816278 }, { "content": "pub trait IntoIden {\n\n fn into_iden(self) -> DynIden;\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 7, "score": 100416.0745261393 }, { "content": "/// Identifier in query\n\npub trait Iden {\n\n fn prepare(&self, s: &mut dyn fmt::Write, q: char) {\n\n write!(s, \"{}\", q).unwrap();\n\n self.unquoted(s);\n\n write!(s, \"{}\", q).unwrap();\n\n }\n\n\n\n fn to_string(&self) -> String {\n\n let s = &mut String::new();\n\n self.unquoted(s);\n\n s.to_owned()\n\n }\n\n\n\n fn unquoted(&self, s: &mut dyn fmt::Write);\n\n}\n\n\n\npub type DynIden = SeaRc<dyn Iden>;\n\n\n", "file_path": "src/types.rs", "rank": 8, "score": 100416.0745261393 }, { "content": "pub trait IdenList {\n\n type IntoIter: Iterator<Item = DynIden>;\n\n\n\n fn into_iter(self) -> Self::IntoIter;\n\n}\n\n\n\nimpl fmt::Debug for dyn Iden {\n\n fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.unquoted(formatter);\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Column references\n\n#[derive(Debug, Clone)]\n\npub enum ColumnRef {\n\n Column(DynIden),\n\n TableColumn(DynIden, DynIden),\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 9, "score": 97014.36841704887 }, { "content": "pub trait IntoColumnRef {\n\n fn into_column_ref(self) -> ColumnRef;\n\n}\n\n\n\n/// Table references\n\n#[allow(clippy::large_enum_variant)]\n\n#[derive(Debug, Clone)]\n\npub enum TableRef {\n\n Table(DynIden),\n\n SchemaTable(DynIden, DynIden),\n\n TableAlias(DynIden, DynIden),\n\n SchemaTableAlias(DynIden, DynIden, DynIden),\n\n SubQuery(SelectStatement, DynIden),\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 10, "score": 97014.36841704887 }, { "content": "#[test]\n\nfn alter_5() {\n\n assert_eq!(\n\n Type::alter()\n\n .name(Font::Table)\n\n .rename_value(Font::Variant, Font::Language)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TYPE \"font\" RENAME VALUE 'variant' TO 'language'\"#\n\n )\n\n}\n", "file_path": "tests/postgres/types.rs", "rank": 11, "score": 93590.05017291759 }, { "content": "#[test]\n\nfn alter_4() {\n\n assert_eq!(\n\n Type::alter()\n\n .name(Font::Table)\n\n .rename_to(Alias::new(\"typeface\"))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TYPE \"font\" RENAME TO 'typeface'\"#\n\n )\n\n}\n\n\n", "file_path": "tests/postgres/types.rs", "rank": 12, "score": 93590.05017291759 }, { "content": "#[test]\n\nfn drop_3() {\n\n assert_eq!(\n\n Type::drop()\n\n .if_exists()\n\n .name(Font::Table)\n\n .cascade()\n\n .to_string(PostgresQueryBuilder),\n\n r#\"DROP TYPE IF EXISTS \"font\" CASCADE\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/types.rs", "rank": 13, "score": 93590.05017291759 }, { "content": "#[test]\n\nfn create_1() {\n\n assert_eq!(\n\n Type::create()\n\n .as_enum(Font::Table)\n\n .values(vec![Font::Name, Font::Variant, Font::Language])\n\n .to_string(PostgresQueryBuilder),\n\n r#\"CREATE TYPE \"font\" AS ENUM ('name', 'variant', 'language')\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/types.rs", "rank": 14, "score": 93590.05017291759 }, { "content": "#[test]\n\nfn drop_2() {\n\n assert_eq!(\n\n Type::drop()\n\n .name(Font::Table)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"DROP TYPE \"font\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/types.rs", "rank": 15, "score": 93590.05017291759 }, { "content": "#[test]\n\nfn alter_3() {\n\n assert_eq!(\n\n Type::alter()\n\n .name(Font::Table)\n\n .add_value(Alias::new(\"weight\"))\n\n .after(Font::Variant)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TYPE \"font\" ADD VALUE 'weight' AFTER 'variant'\"#\n\n )\n\n}\n\n\n", "file_path": "tests/postgres/types.rs", "rank": 16, "score": 93590.05017291759 }, { "content": "#[test]\n\nfn drop_1() {\n\n assert_eq!(\n\n Type::drop()\n\n .if_exists()\n\n .name(Font::Table)\n\n .restrict()\n\n .to_string(PostgresQueryBuilder),\n\n r#\"DROP TYPE IF EXISTS \"font\" RESTRICT\"#\n\n )\n\n}\n\n\n", "file_path": "tests/postgres/types.rs", "rank": 17, "score": 93590.05017291759 }, { "content": "#[test]\n\nfn alter_2() {\n\n assert_eq!(\n\n Type::alter()\n\n .name(Font::Table)\n\n .add_value(Alias::new(\"weight\"))\n\n .before(Font::Variant)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TYPE \"font\" ADD VALUE 'weight' BEFORE 'variant'\"#\n\n )\n\n}\n\n\n", "file_path": "tests/postgres/types.rs", "rank": 18, "score": 93590.05017291759 }, { "content": "#[test]\n\nfn alter_1() {\n\n assert_eq!(\n\n Type::alter()\n\n .name(Font::Table)\n\n .add_value(Alias::new(\"weight\"))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TYPE \"font\" ADD VALUE 'weight'\"#\n\n )\n\n}\n", "file_path": "tests/postgres/types.rs", "rank": 19, "score": 93590.05017291759 }, { "content": "#[test]\n\n#[should_panic(expected = \"Sqlite not support modifying table column\")]\n\nfn alter_2() {\n\n Table::alter()\n\n .table(Font::Table)\n\n .modify_column(ColumnDef::new(Alias::new(\"new_col\")).double())\n\n .to_string(SqliteQueryBuilder);\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 20, "score": 93103.44845017622 }, { "content": "#[test]\n\n#[should_panic(expected = \"Sqlite not support dropping table column\")]\n\nfn alter_4() {\n\n Table::alter()\n\n .table(Font::Table)\n\n .drop_column(Alias::new(\"new_column\"))\n\n .to_string(SqliteQueryBuilder);\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 21, "score": 93103.44845017622 }, { "content": "#[test]\n\nfn alter_1() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .add_column(\n\n ColumnDef::new(Alias::new(\"new_col\"))\n\n .integer()\n\n .not_null()\n\n .default(100)\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TABLE \"font\" ADD COLUMN \"new_col\" integer NOT NULL DEFAULT 100\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 22, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_1() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Glyph::Table)\n\n .col(\n\n ColumnDef::new(Glyph::Id)\n\n .integer_len(11)\n\n .not_null()\n\n .auto_increment()\n\n .primary_key()\n\n )\n\n .col(ColumnDef::new(Glyph::Aspect).double().not_null())\n\n .col(ColumnDef::new(Glyph::Image).text())\n\n .engine(\"InnoDB\")\n\n .character_set(\"utf8mb4\")\n\n .collate(\"utf8mb4_unicode_ci\")\n\n .to_string(MysqlQueryBuilder),\n\n vec![\n\n \"CREATE TABLE `glyph` (\",\n\n \"`id` int(11) NOT NULL AUTO_INCREMENT PRIMARY KEY,\",\n\n \"`aspect` double NOT NULL,\",\n\n \"`image` text\",\n\n \") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci\",\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 23, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_5() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Glyph::Table)\n\n .col(ColumnDef::new(Glyph::Id).integer().not_null())\n\n .index(Index::create().unique().name(\"idx-glyph-id\").col(Glyph::Id))\n\n .to_string(MysqlQueryBuilder),\n\n vec![\n\n \"CREATE TABLE `glyph` (\",\n\n \"`id` int NOT NULL,\",\n\n \"UNIQUE KEY `idx-glyph-id` (`id`)\",\n\n \")\",\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 24, "score": 93099.1820176565 }, { "content": "#[test]\n\n#[should_panic(expected = \"No alter option found\")]\n\nfn alter_6() {\n\n Table::alter().to_string(MysqlQueryBuilder);\n\n}\n", "file_path": "tests/mysql/table.rs", "rank": 25, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_5() {\n\n assert_eq!(\n\n Table::rename()\n\n .table(Font::Table, Alias::new(\"font_new\"))\n\n .to_string(MysqlQueryBuilder),\n\n \"RENAME TABLE `font` TO `font_new`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 26, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_2() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .modify_column(\n\n ColumnDef::new(Alias::new(\"new_col\"))\n\n .big_integer()\n\n .default(999)\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n vec![\n\n r#\"ALTER TABLE \"font\"\"#,\n\n r#\"ALTER COLUMN \"new_col\" TYPE bigint,\"#,\n\n r#\"ALTER COLUMN \"new_col\" SET DEFAULT 999\"#,\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 27, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn truncate_1() {\n\n assert_eq!(\n\n Table::truncate()\n\n .table(Font::Table)\n\n .to_string(MysqlQueryBuilder),\n\n \"TRUNCATE TABLE `font`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 28, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_3() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .rename_column(Alias::new(\"new_col\"), Alias::new(\"new_column\"))\n\n .to_string(MysqlQueryBuilder),\n\n \"ALTER TABLE `font` RENAME COLUMN `new_col` TO `new_column`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 29, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_4() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Glyph::Table)\n\n .col(ColumnDef::new(Glyph::Image).custom(Glyph::Aspect))\n\n .to_string(PostgresQueryBuilder),\n\n vec![r#\"CREATE TABLE \"glyph\" (\"#, r#\"\"image\" aspect\"#, r#\")\"#,].join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 30, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_4() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .drop_column(Alias::new(\"new_column\"))\n\n .to_string(MysqlQueryBuilder),\n\n \"ALTER TABLE `font` DROP COLUMN `new_column`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 31, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_2() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Font::Table)\n\n .col(\n\n ColumnDef::new(Font::Id)\n\n .integer()\n\n .not_null()\n\n .primary_key()\n\n .auto_increment()\n\n )\n\n .col(ColumnDef::new(Font::Name).string().not_null())\n\n .col(ColumnDef::new(Font::Variant).string().not_null())\n\n .col(ColumnDef::new(Font::Language).string().not_null())\n\n .to_string(SqliteQueryBuilder),\n\n vec![\n\n \"CREATE TABLE `font` (\",\n\n \"`id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,\",\n\n \"`name` text NOT NULL,\",\n\n \"`variant` text NOT NULL,\",\n\n \"`language` text NOT NULL\",\n\n \")\",\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 32, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_1() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Glyph::Table)\n\n .col(\n\n ColumnDef::new(Glyph::Id)\n\n .integer()\n\n .not_null()\n\n .auto_increment()\n\n .primary_key()\n\n )\n\n .col(ColumnDef::new(Glyph::Aspect).double().not_null())\n\n .col(ColumnDef::new(Glyph::Image).text())\n\n .to_string(SqliteQueryBuilder),\n\n vec![\n\n \"CREATE TABLE `glyph` (\",\n\n \"`id` integer NOT NULL PRIMARY KEY AUTOINCREMENT,\",\n\n \"`aspect` real NOT NULL,\",\n\n \"`image` text\",\n\n \")\",\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 33, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_3() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .rename_column(Alias::new(\"new_col\"), Alias::new(\"new_column\"))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TABLE \"font\" RENAME COLUMN \"new_col\" TO \"new_column\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 34, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_1() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Glyph::Table)\n\n .col(\n\n ColumnDef::new(Glyph::Id)\n\n .integer()\n\n .not_null()\n\n .auto_increment()\n\n .primary_key()\n\n )\n\n .col(ColumnDef::new(Glyph::Aspect).double().not_null())\n\n .col(ColumnDef::new(Glyph::Image).text())\n\n .to_string(PostgresQueryBuilder),\n\n vec![\n\n r#\"CREATE TABLE \"glyph\" (\"#,\n\n r#\"\"id\" serial NOT NULL PRIMARY KEY,\"#,\n\n r#\"\"aspect\" double precision NOT NULL,\"#,\n\n r#\"\"image\" text\"#,\n\n r#\")\"#,\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 35, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_3() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .rename_column(Alias::new(\"new_col\"), Alias::new(\"new_column\"))\n\n .to_string(SqliteQueryBuilder),\n\n \"ALTER TABLE `font` RENAME COLUMN `new_col` TO `new_column`\"\n\n );\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 36, "score": 93099.1820176565 }, { "content": "#[test]\n\n#[should_panic(expected = \"No alter option found\")]\n\nfn alter_6() {\n\n Table::alter().to_string(SqliteQueryBuilder);\n\n}\n", "file_path": "tests/sqlite/table.rs", "rank": 37, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_3() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Char::Table)\n\n .if_not_exists()\n\n .col(\n\n ColumnDef::new(Char::Id)\n\n .integer()\n\n .not_null()\n\n .primary_key()\n\n .auto_increment()\n\n )\n\n .col(ColumnDef::new(Char::FontSize).integer().not_null())\n\n .col(ColumnDef::new(Char::Character).string_len(255).not_null())\n\n .col(ColumnDef::new(Char::SizeW).integer().not_null())\n\n .col(ColumnDef::new(Char::SizeH).integer().not_null())\n\n .col(ColumnDef::new(Char::FontId).integer().default(Value::Null))\n\n .foreign_key(\n\n ForeignKey::create()\n\n .name(\"FK_2e303c3a712662f1fc2a4d0aad6\")\n", "file_path": "tests/postgres/table.rs", "rank": 38, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_3() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Char::Table)\n\n .if_not_exists()\n\n .col(\n\n ColumnDef::new(Char::Id)\n\n .integer()\n\n .not_null()\n\n .auto_increment()\n\n .primary_key()\n\n )\n\n .col(ColumnDef::new(Char::FontSize).integer().not_null())\n\n .col(ColumnDef::new(Char::Character).string().not_null())\n\n .col(ColumnDef::new(Char::SizeW).integer().not_null())\n\n .col(ColumnDef::new(Char::SizeH).integer().not_null())\n\n .col(ColumnDef::new(Char::FontId).integer().default(Value::Null))\n\n .foreign_key(\n\n ForeignKey::create()\n\n .from(Char::Table, Char::FontId)\n", "file_path": "tests/sqlite/table.rs", "rank": 39, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_1() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .add_column(\n\n ColumnDef::new(Alias::new(\"new_col\"))\n\n .integer()\n\n .not_null()\n\n .default(100)\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"ALTER TABLE `font` ADD COLUMN `new_col` int NOT NULL DEFAULT 100\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 40, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_5() {\n\n assert_eq!(\n\n Table::rename()\n\n .table(Font::Table, Alias::new(\"font_new\"))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TABLE \"font\" RENAME TO \"font_new\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 41, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_2() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Font::Table)\n\n .col(\n\n ColumnDef::new(Font::Id)\n\n .integer_len(11)\n\n .not_null()\n\n .auto_increment()\n\n .primary_key()\n\n )\n\n .col(ColumnDef::new(Font::Name).string().not_null())\n\n .col(ColumnDef::new(Font::Variant).string_len(255).not_null())\n\n .col(ColumnDef::new(Font::Language).string_len(1024).not_null())\n\n .engine(\"InnoDB\")\n\n .character_set(\"utf8mb4\")\n\n .collate(\"utf8mb4_unicode_ci\")\n\n .to_string(MysqlQueryBuilder),\n\n vec![\n\n \"CREATE TABLE `font` (\",\n\n \"`id` int(11) NOT NULL AUTO_INCREMENT PRIMARY KEY,\",\n\n \"`name` varchar(255) NOT NULL,\",\n\n \"`variant` varchar(255) NOT NULL,\",\n\n \"`language` varchar(1024) NOT NULL\",\n\n \") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci\",\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 42, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_5() {\n\n assert_eq!(\n\n Table::rename()\n\n .table(Font::Table, Alias::new(\"font_new\"))\n\n .to_string(SqliteQueryBuilder),\n\n \"ALTER TABLE `font` RENAME TO `font_new`\"\n\n );\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 43, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_2() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Font::Table)\n\n .col(\n\n ColumnDef::new(Font::Id)\n\n .integer()\n\n .not_null()\n\n .primary_key()\n\n .auto_increment()\n\n )\n\n .col(ColumnDef::new(Font::Name).string().not_null())\n\n .col(ColumnDef::new(Font::Variant).string_len(255).not_null())\n\n .col(ColumnDef::new(Font::Language).string_len(255).not_null())\n\n .to_string(PostgresQueryBuilder),\n\n vec![\n\n r#\"CREATE TABLE \"font\" (\"#,\n\n r#\"\"id\" serial NOT NULL PRIMARY KEY,\"#,\n\n r#\"\"name\" varchar NOT NULL,\"#,\n\n r#\"\"variant\" varchar(255) NOT NULL,\"#,\n\n r#\"\"language\" varchar(255) NOT NULL\"#,\n\n r#\")\"#,\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 44, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_1() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .add_column(\n\n ColumnDef::new(Alias::new(\"new_col\"))\n\n .integer()\n\n .not_null()\n\n .default(99)\n\n )\n\n .to_string(SqliteQueryBuilder),\n\n \"ALTER TABLE `font` ADD COLUMN `new_col` integer NOT NULL DEFAULT 99\"\n\n );\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 45, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_5() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Glyph::Table)\n\n .col(ColumnDef::new(Glyph::Image).json())\n\n .col(ColumnDef::new(Glyph::Aspect).json_binary())\n\n .to_string(PostgresQueryBuilder),\n\n vec![\n\n r#\"CREATE TABLE \"glyph\" (\"#,\n\n r#\"\"image\" json,\"#,\n\n r#\"\"aspect\" jsonb\"#,\n\n r#\")\"#,\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 46, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_2() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .modify_column(\n\n ColumnDef::new(Alias::new(\"new_col\"))\n\n .big_integer()\n\n .default(999)\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"ALTER TABLE `font` MODIFY COLUMN `new_col` bigint DEFAULT 999\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 47, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn truncate_1() {\n\n assert_eq!(\n\n Table::truncate()\n\n .table(Font::Table)\n\n .to_string(SqliteQueryBuilder),\n\n \"TRUNCATE TABLE `font`\"\n\n );\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 48, "score": 93099.1820176565 }, { "content": "#[test]\n\n#[should_panic(expected = \"No alter option found\")]\n\nfn alter_6() {\n\n Table::alter().to_string(PostgresQueryBuilder);\n\n}\n", "file_path": "tests/postgres/table.rs", "rank": 49, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_6() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Glyph::Table)\n\n .col(\n\n ColumnDef::new(Glyph::Id)\n\n .integer()\n\n .not_null()\n\n .extra(\"ANYTHING I WANT TO SAY\".to_owned())\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n vec![\n\n r#\"CREATE TABLE \"glyph\" (\"#,\n\n r#\"\"id\" integer NOT NULL ANYTHING I WANT TO SAY\"#,\n\n r#\")\"#,\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 50, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn alter_4() {\n\n assert_eq!(\n\n Table::alter()\n\n .table(Font::Table)\n\n .drop_column(Alias::new(\"new_column\"))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"ALTER TABLE \"font\" DROP COLUMN \"new_column\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 51, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn truncate_1() {\n\n assert_eq!(\n\n Table::truncate()\n\n .table(Font::Table)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"TRUNCATE TABLE \"font\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 52, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn drop_1() {\n\n assert_eq!(\n\n Table::drop()\n\n .table(Glyph::Table)\n\n .table(Char::Table)\n\n .cascade()\n\n .to_string(SqliteQueryBuilder),\n\n \"DROP TABLE `glyph`, `character` CASCADE\"\n\n );\n\n}\n\n\n", "file_path": "tests/sqlite/table.rs", "rank": 53, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_4() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Glyph::Table)\n\n .col(\n\n ColumnDef::new(Glyph::Id)\n\n .integer()\n\n .not_null()\n\n .extra(\"ANYTHING I WANT TO SAY\".to_owned())\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n vec![\n\n \"CREATE TABLE `glyph` (\",\n\n \"`id` int NOT NULL ANYTHING I WANT TO SAY\",\n\n \")\",\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 54, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn drop_1() {\n\n assert_eq!(\n\n Table::drop()\n\n .table(Glyph::Table)\n\n .table(Char::Table)\n\n .cascade()\n\n .to_string(PostgresQueryBuilder),\n\n r#\"DROP TABLE \"glyph\", \"character\" CASCADE\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/table.rs", "rank": 55, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn create_3() {\n\n assert_eq!(\n\n Table::create()\n\n .table(Char::Table)\n\n .if_not_exists()\n\n .col(\n\n ColumnDef::new(Char::Id)\n\n .integer_len(11)\n\n .not_null()\n\n .auto_increment()\n\n .primary_key()\n\n )\n\n .col(ColumnDef::new(Char::FontSize).integer_len(11).not_null())\n\n .col(ColumnDef::new(Char::Character).string_len(255).not_null())\n\n .col(ColumnDef::new(Char::SizeW).integer_len(11).not_null())\n\n .col(ColumnDef::new(Char::SizeH).integer_len(11).not_null())\n\n .col(\n\n ColumnDef::new(Char::FontId)\n\n .integer_len(11)\n\n .default(Value::Null)\n", "file_path": "tests/mysql/table.rs", "rank": 56, "score": 93099.1820176565 }, { "content": "#[test]\n\nfn drop_1() {\n\n assert_eq!(\n\n Table::drop()\n\n .table(Glyph::Table)\n\n .table(Char::Table)\n\n .cascade()\n\n .to_string(MysqlQueryBuilder),\n\n \"DROP TABLE `glyph`, `character` CASCADE\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/table.rs", "rank": 57, "score": 93099.1820176565 }, { "content": "#[allow(clippy::many_single_char_names)]\n\n#[cfg(feature = \"with-json\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"with-json\")))]\n\npub fn sea_value_to_json_value(v: &Value) -> Json {\n\n match v {\n\n Value::Null => Json::Null,\n\n Value::Bool(b) => Json::Bool(*b),\n\n Value::TinyInt(v) => (*v).into(),\n\n Value::SmallInt(v) => (*v).into(),\n\n Value::Int(v) => (*v).into(),\n\n Value::BigInt(v) => (*v).into(),\n\n Value::TinyUnsigned(v) => (*v).into(),\n\n Value::SmallUnsigned(v) => (*v).into(),\n\n Value::Unsigned(v) => (*v).into(),\n\n Value::BigUnsigned(v) => (*v).into(),\n\n Value::Float(v) => (*v).into(),\n\n Value::Double(v) => (*v).into(),\n\n Value::String(s) => Json::String(s.as_ref().clone()),\n\n Value::Bytes(s) => Json::String(from_utf8(s).unwrap().to_string()),\n\n Value::Json(v) => v.as_ref().clone(),\n\n #[cfg(feature = \"with-chrono\")]\n\n Value::DateTime(v) => v.format(\"%Y-%m-%d %H:%M:%S\").to_string().into(),\n\n #[cfg(feature = \"with-rust_decimal\")]\n", "file_path": "src/value.rs", "rank": 58, "score": 89325.15784676057 }, { "content": "#[cfg(feature = \"with-json\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"with-json\")))]\n\npub fn json_value_to_sea_value(v: &Json) -> Value {\n\n match v {\n\n Json::Null => Value::Null,\n\n Json::Bool(v) => Value::Int(v.to_owned().into()),\n\n Json::Number(v) => {\n\n if v.is_f64() {\n\n Value::Double(v.as_f64().unwrap())\n\n } else if v.is_i64() {\n\n Value::BigInt(v.as_i64().unwrap())\n\n } else if v.is_u64() {\n\n Value::BigUnsigned(v.as_u64().unwrap())\n\n } else {\n\n unreachable!()\n\n }\n\n }\n\n Json::String(v) => Value::String(Box::new(v.clone())),\n\n Json::Array(_) => panic!(\"Json::Array is not supported\"),\n\n Json::Object(v) => Value::Json(Box::new(Json::Object(v.clone()))),\n\n }\n\n}\n\n\n\n/// Convert value to json value\n", "file_path": "src/value.rs", "rank": 59, "score": 89321.1922802278 }, { "content": "#[proc_macro_derive(Iden, attributes(iden, method))]\n\npub fn derive_iden(input: TokenStream) -> TokenStream {\n\n let DeriveInput {\n\n ident, data, attrs, ..\n\n } = parse_macro_input!(input);\n\n\n\n let table_name = match get_iden_attr(&attrs) {\n\n Some(lit) => quote! { #lit },\n\n None => {\n\n let normalized = ident.to_string().to_snake_case();\n\n quote! { #normalized }\n\n }\n\n };\n\n\n\n // Currently we only support enums and unit structs\n\n let variants =\n\n match data {\n\n syn::Data::Enum(DataEnum { variants, .. }) => variants,\n\n syn::Data::Struct(DataStruct {\n\n fields: Fields::Unit,\n\n ..\n", "file_path": "sea-query-derive/src/lib.rs", "rank": 60, "score": 81337.8511435943 }, { "content": "pub trait TableBuilder: IndexBuilder + ForeignKeyBuilder + QuotedBuilder {\n\n /// Translate [`TableCreateStatement`] into SQL statement.\n\n fn prepare_table_create_statement(&self, create: &TableCreateStatement, sql: &mut SqlWriter) {\n\n write!(sql, \"CREATE TABLE \").unwrap();\n\n\n\n if create.if_not_exists {\n\n write!(sql, \"IF NOT EXISTS \").unwrap();\n\n }\n\n\n\n if let Some(table) = &create.table {\n\n table.prepare(sql, self.quote());\n\n }\n\n\n\n write!(sql, \" ( \").unwrap();\n\n let mut count = 0;\n\n\n\n for column_def in create.columns.iter() {\n\n if count > 0 {\n\n write!(sql, \", \").unwrap();\n\n }\n", "file_path": "src/backend/table_builder.rs", "rank": 61, "score": 80086.37121803136 }, { "content": "#[derive(Debug)]\n\nstruct CharacterStruct {\n\n id: i32,\n\n character: String,\n\n font_size: i32,\n\n}\n\n\n\nimpl From<Row> for CharacterStruct {\n\n fn from(row: Row) -> Self {\n\n Self {\n\n id: row.get(\"id\"),\n\n character: row.get(\"character\"),\n\n font_size: row.get(\"font_size\"),\n\n }\n\n }\n\n}\n", "file_path": "examples/postgres/src/main.rs", "rank": 62, "score": 75380.92437468993 }, { "content": "#[derive(Debug)]\n\nstruct CharacterStruct {\n\n id: i32,\n\n character: String,\n\n font_size: i32,\n\n}\n\n\n\nimpl From<&Row<'_>> for CharacterStruct {\n\n fn from(row: &Row) -> Self {\n\n Self {\n\n id: row.get_unwrap(\"id\"),\n\n character: row.get_unwrap(\"character\"),\n\n font_size: row.get_unwrap(\"font_size\"),\n\n }\n\n }\n\n}\n", "file_path": "examples/rusqlite/src/main.rs", "rank": 63, "score": 75380.92437468993 }, { "content": "#[derive(sqlx::FromRow, Debug)]\n\nstruct CharacterStruct {\n\n id: i32,\n\n character: String,\n\n font_size: i32,\n\n}\n", "file_path": "examples/sqlx_mysql/src/main.rs", "rank": 64, "score": 74096.16372902368 }, { "content": "#[derive(Debug)]\n\nstruct DocumentStruct {\n\n id: i32,\n\n json_field: serde_json::Value,\n\n timestamp: NaiveDateTime,\n\n}\n\n\n\nimpl From<Row> for DocumentStruct {\n\n fn from(row: Row) -> Self {\n\n Self {\n\n id: row.get(\"id\"),\n\n json_field: row.get(\"json_field\"),\n\n timestamp: row.get(\"timestamp\"),\n\n }\n\n }\n\n}\n", "file_path": "examples/postgres_json/src/main.rs", "rank": 65, "score": 74096.16372902368 }, { "content": "#[derive(sqlx::FromRow, Debug)]\n\nstruct CharacterStruct {\n\n id: i32,\n\n character: String,\n\n font_size: i32,\n\n}\n", "file_path": "examples/sqlx_sqlite/src/main.rs", "rank": 66, "score": 74096.16372902368 }, { "content": "#[derive(sqlx::FromRow, Debug)]\n\nstruct CharacterStruct {\n\n id: i32,\n\n character: String,\n\n font_size: i32,\n\n}\n", "file_path": "examples/sqlx_postgres/src/main.rs", "rank": 67, "score": 74096.16372902368 }, { "content": "pub trait SchemaBuilder: TableBuilder + IndexBuilder + ForeignKeyBuilder {}\n\n\n", "file_path": "src/backend/mod.rs", "rank": 68, "score": 70255.67386150213 }, { "content": "pub trait IntoValueTuple {\n\n fn into_value_tuple(self) -> ValueTuple;\n\n}\n\n\n\nimpl Value {\n\n pub fn unwrap<T>(self) -> T\n\n where\n\n T: ValueType,\n\n {\n\n T::unwrap(self)\n\n }\n\n}\n\n\n\nimpl Default for Value {\n\n fn default() -> Self {\n\n Self::Null\n\n }\n\n}\n\n\n\nmacro_rules! type_to_value {\n", "file_path": "src/value.rs", "rank": 69, "score": 60885.534745390694 }, { "content": "pub trait IntoCondition {\n\n fn into_condition(self) -> Condition;\n\n}\n\n\n\npub type Cond = Condition;\n\n\n\n/// Represents anything that can be passed to an [`Condition::any`] or [`Condition::all`]'s [`Condition::add`] method.\n\n///\n\n/// The arguments are automatically converted to the right enum.\n\n#[derive(Debug, Clone)]\n\npub enum ConditionExpression {\n\n Condition(Condition),\n\n SimpleExpr(SimpleExpr),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum ConditionHolderContents {\n\n Empty,\n\n Chain(Vec<LogicalChainOper>),\n\n Condition(Condition),\n", "file_path": "src/query/condition.rs", "rank": 70, "score": 60885.534745390694 }, { "content": "pub trait SchemaStatementBuilder {\n\n /// Build corresponding SQL statement for certain database backend and return SQL string\n\n fn build<T: SchemaBuilder>(&self, schema_builder: T) -> String;\n\n\n\n /// Build corresponding SQL statement for certain database backend and return SQL string\n\n fn build_any(&self, schema_builder: &dyn SchemaBuilder) -> String;\n\n\n\n /// Build corresponding SQL statement for certain database backend and return SQL string\n\n fn to_string<T: SchemaBuilder>(&self, schema_builder: T) -> String {\n\n self.build(schema_builder)\n\n }\n\n}\n", "file_path": "src/schema.rs", "rank": 71, "score": 59327.182469796164 }, { "content": "pub trait IntoIndexColumn {\n\n fn into_index_column(self) -> IndexColumn;\n\n}\n\n\n\nimpl Default for TableIndex {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl<I> IntoIndexColumn for I\n\nwhere\n\n I: IntoIden,\n\n{\n\n fn into_index_column(self) -> IndexColumn {\n\n IndexColumn {\n\n name: self.into_iden(),\n\n prefix: None,\n\n order: None,\n\n }\n", "file_path": "src/index/common.rs", "rank": 72, "score": 59327.182469796164 }, { "content": "pub trait OrderedStatement {\n\n #[doc(hidden)]\n\n // Implementation for the trait.\n\n fn add_order_by(&mut self, order: OrderExpr) -> &mut Self;\n\n\n\n /// Order by column.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use sea_query::{*, tests_cfg::*};\n\n ///\n\n /// let query = Query::select()\n\n /// .column(Glyph::Aspect)\n\n /// .from(Glyph::Table)\n\n /// .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n /// .order_by(Glyph::Image, Order::Desc)\n\n /// .order_by((Glyph::Table, Glyph::Aspect), Order::Asc)\n\n /// .to_owned();\n\n ///\n", "file_path": "src/query/ordered.rs", "rank": 73, "score": 59327.182469796164 }, { "content": "pub trait QuotedBuilder {\n\n /// The type of quote the builder uses.\n\n fn quote(&self) -> char;\n\n}\n", "file_path": "src/backend/mod.rs", "rank": 74, "score": 59327.182469796164 }, { "content": "pub trait ConditionalStatement {\n\n /// And where condition. This cannot be mixed with [`ConditionalStatement::or_where`].\n\n /// Calling `or_where` after `and_where` will panic.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use sea_query::{*, tests_cfg::*};\n\n ///\n\n /// let query = Query::select()\n\n /// .column(Glyph::Image)\n\n /// .from(Glyph::Table)\n\n /// .and_where(Expr::tbl(Glyph::Table, Glyph::Aspect).is_in(vec![3, 4]))\n\n /// .and_where(Expr::tbl(Glyph::Table, Glyph::Image).like(\"A%\"))\n\n /// .to_owned();\n\n ///\n\n /// assert_eq!(\n\n /// query.to_string(MysqlQueryBuilder),\n\n /// r#\"SELECT `image` FROM `glyph` WHERE `glyph`.`aspect` IN (3, 4) AND `glyph`.`image` LIKE 'A%'\"#\n\n /// );\n", "file_path": "src/query/condition.rs", "rank": 75, "score": 59327.182469796164 }, { "content": "pub trait QueryStatementBuilder {\n\n /// Build corresponding SQL statement for certain database backend and return SQL string\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use sea_query::{*, tests_cfg::*};\n\n ///\n\n /// let query = Query::select()\n\n /// .column(Glyph::Aspect)\n\n /// .from(Glyph::Table)\n\n /// .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n /// .order_by(Glyph::Image, Order::Desc)\n\n /// .order_by_tbl(Glyph::Table, Glyph::Aspect, Order::Asc)\n\n /// .to_string(MysqlQueryBuilder);\n\n ///\n\n /// assert_eq!(\n\n /// query,\n\n /// r#\"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2 ORDER BY `image` DESC, `glyph`.`aspect` ASC\"#\n\n /// );\n", "file_path": "src/query/traits.rs", "rank": 76, "score": 57897.14091070363 }, { "content": "fn main() {\n\n println!(\"Default field names\");\n\n assert_eq!(dbg!(Iden::to_string(&User::Table)), \"user\");\n\n assert_eq!(dbg!(Iden::to_string(&User::Id)), \"id\");\n\n assert_eq!(dbg!(Iden::to_string(&User::FirstName)), \"first_name\");\n\n assert_eq!(dbg!(Iden::to_string(&User::LastName)), \"last_name\");\n\n assert_eq!(dbg!(Iden::to_string(&User::Email)), \"email\");\n\n\n\n println!(\"Custom field names\");\n\n assert_eq!(dbg!(Iden::to_string(&Custom::Table)), \"user\");\n\n assert_eq!(dbg!(Iden::to_string(&Custom::Id)), \"my_id\");\n\n assert_eq!(dbg!(Iden::to_string(&Custom::FirstName)), \"name\");\n\n assert_eq!(dbg!(Iden::to_string(&Custom::LastName)), \"surname\");\n\n assert_eq!(\n\n dbg!(Iden::to_string(&Custom::Email(\n\n \"[email protected]\".to_owned()\n\n ))),\n\n \"EMail\"\n\n );\n\n assert_eq!(\n", "file_path": "examples/derive.rs", "rank": 77, "score": 57570.67734093517 }, { "content": "pub trait PostgresDriver<'a> {\n\n fn as_params(&'a self) -> Vec<&'a (dyn ToSql + Sync)>;\n\n}\n\n\n\nimpl ToSql for Value {\n\n fn to_sql(\n\n &self,\n\n ty: &Type,\n\n out: &mut BytesMut,\n\n ) -> Result<IsNull, Box<dyn Error + Sync + Send>> {\n\n match self {\n\n Value::Null => None::<bool>.to_sql(ty, out),\n\n Value::Bool(v) => v.to_sql(ty, out),\n\n Value::TinyInt(v) => v.to_sql(ty, out),\n\n Value::SmallInt(v) => v.to_sql(ty, out),\n\n Value::Int(v) => v.to_sql(ty, out),\n\n Value::BigInt(v) => v.to_sql(ty, out),\n\n Value::TinyUnsigned(v) => (*v as u32).to_sql(ty, out),\n\n Value::SmallUnsigned(v) => (*v as u32).to_sql(ty, out),\n\n Value::Unsigned(v) => v.to_sql(ty, out),\n", "file_path": "src/driver/postgres.rs", "rank": 78, "score": 56300.776823146225 }, { "content": "#[test]\n\nfn derive_3() {\n\n #[derive(Debug, Iden)]\n\n enum Something {\n\n // ...the Table can also be overwritten like this\n\n #[iden = \"something_else\"]\n\n Table,\n\n Id,\n\n AssetName,\n\n UserId,\n\n }\n\n\n\n println!(\"Single custom field name\");\n\n assert_eq!(Iden::to_string(&Something::Table), \"something_else\");\n\n assert_eq!(Iden::to_string(&Something::Id), \"id\");\n\n assert_eq!(Iden::to_string(&Something::AssetName), \"asset_name\");\n\n assert_eq!(Iden::to_string(&Something::UserId), \"user_id\");\n\n}\n\n\n", "file_path": "tests/derive/mod.rs", "rank": 79, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn drop_1() {\n\n assert_eq!(\n\n Index::drop()\n\n .name(\"idx-glyph-aspect\")\n\n .table(Glyph::Table)\n\n .to_string(MysqlQueryBuilder),\n\n \"DROP INDEX `idx-glyph-aspect` ON `glyph`\"\n\n );\n\n}\n", "file_path": "tests/mysql/index.rs", "rank": 80, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_5() {\n\n assert_eq!(\n\n Query::select()\n\n .column((Glyph::Table, Glyph::Image))\n\n .from(Glyph::Table)\n\n .and_where(Expr::tbl(Glyph::Table, Glyph::Aspect).is_in(vec![3, 4]))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `glyph`.`image` FROM `glyph` WHERE `glyph`.`aspect` IN (3, 4)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 81, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_12() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Glyph::Aspect,\n\n ])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .order_by_columns(vec![\n\n (Glyph::Id, Order::Asc),\n\n (Glyph::Aspect, Order::Desc),\n\n ])\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2 ORDER BY `id` ASC, `aspect` DESC\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 82, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_3() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character, Char::SizeW, Char::SizeH\n\n ])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .and_where(Expr::col(Char::SizeH).eq(4))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` WHERE `size_w` = 3 AND `size_h` = 4\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 83, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn create_4() {\n\n assert_eq!(\n\n Index::create()\n\n .index_type(IndexType::Hash)\n\n .name(\"idx-glyph-image\")\n\n .table(Glyph::Table)\n\n .col(Glyph::Image)\n\n .to_string(MysqlQueryBuilder),\n\n \"CREATE INDEX `idx-glyph-image` ON `glyph` USING HASH (`image`)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/index.rs", "rank": 84, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_7() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 85, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_8() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character,\n\n ])\n\n .from(Char::Table)\n\n .left_join(Font::Table, Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` LEFT JOIN `font` ON `character`.`font_id` = `font`.`id`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 86, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn derive_2() {\n\n #[derive(Debug, Iden)]\n\n // Outer iden attributes overrides what's used for \"Table\"...\n\n #[iden = \"user\"]\n\n enum Custom {\n\n Table,\n\n #[iden = \"my_id\"]\n\n Id,\n\n #[iden = \"name\"]\n\n FirstName,\n\n #[iden = \"surname\"]\n\n LastName,\n\n // Custom casing if needed\n\n #[iden = \"EMail\"]\n\n Email,\n\n }\n\n\n\n println!(\"Custom field names\");\n\n assert_eq!(Iden::to_string(&Custom::Table), \"user\");\n\n assert_eq!(Iden::to_string(&Custom::Id), \"my_id\");\n\n assert_eq!(Iden::to_string(&Custom::FirstName), \"name\");\n\n assert_eq!(Iden::to_string(&Custom::LastName), \"surname\");\n\n assert_eq!(Iden::to_string(&Custom::Email), \"EMail\");\n\n}\n\n\n", "file_path": "tests/derive/mod.rs", "rank": 87, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn derive_1() {\n\n #[derive(Debug, Iden)]\n\n enum User {\n\n Table,\n\n Id,\n\n FirstName,\n\n LastName,\n\n Email,\n\n }\n\n\n\n println!(\"Default field names\");\n\n assert_eq!(Iden::to_string(&User::Table), \"user\");\n\n assert_eq!(Iden::to_string(&User::Id), \"id\");\n\n assert_eq!(Iden::to_string(&User::FirstName), \"first_name\");\n\n assert_eq!(Iden::to_string(&User::LastName), \"last_name\");\n\n assert_eq!(Iden::to_string(&User::Email), \"email\");\n\n}\n\n\n", "file_path": "tests/derive/mod.rs", "rank": 88, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn create_1() {\n\n assert_eq!(\n\n Index::create()\n\n .name(\"idx-glyph-aspect\")\n\n .table(Glyph::Table)\n\n .col(Glyph::Aspect)\n\n .to_string(MysqlQueryBuilder),\n\n \"CREATE INDEX `idx-glyph-aspect` ON `glyph` (`aspect`)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/index.rs", "rank": 89, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_9() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character,\n\n ])\n\n .from(Char::Table)\n\n .left_join(Font::Table, Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id))\n\n .inner_join(Glyph::Table, Expr::tbl(Char::Table, Char::Character).equals(Glyph::Table, Glyph::Image))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` LEFT JOIN `font` ON `character`.`font_id` = `font`.`id` INNER JOIN `glyph` ON `character`.`character` = `glyph`.`image`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 90, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_6() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .exprs(vec![Expr::col(Glyph::Image).max(),])\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect,])\n\n .and_having(Expr::col(Glyph::Aspect).gt(2))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect`, MAX(`image`) FROM `glyph` GROUP BY `aspect` HAVING `aspect` > 2\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 91, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_11() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Glyph::Aspect,\n\n ])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .order_by(Glyph::Image, Order::Desc)\n\n .order_by((Glyph::Table, Glyph::Aspect), Order::Asc)\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2 ORDER BY `image` DESC, `glyph`.`aspect` ASC\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 92, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_10() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character,\n\n ])\n\n .from(Char::Table)\n\n .left_join(Font::Table,\n\n Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id)\n\n .and(Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id))\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` LEFT JOIN `font` ON (`character`.`font_id` = `font`.`id`) AND (`character`.`font_id` = `font`.`id`)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 93, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn derive_4() {\n\n #[derive(Debug, Iden)]\n\n pub struct SomeType;\n\n\n\n #[derive(Debug, Iden)]\n\n #[iden = \"another_name\"]\n\n pub struct CustomName;\n\n\n\n println!(\"Unit structs\");\n\n assert_eq!(Iden::to_string(&SomeType), \"some_type\");\n\n assert_eq!(Iden::to_string(&CustomName), \"another_name\");\n\n}\n", "file_path": "tests/derive/mod.rs", "rank": 94, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn create_2() {\n\n assert_eq!(\n\n Index::create()\n\n .unique()\n\n .name(\"idx-glyph-aspect-image\")\n\n .table(Glyph::Table)\n\n .col(Glyph::Aspect)\n\n .col(Glyph::Image)\n\n .to_string(MysqlQueryBuilder),\n\n \"CREATE UNIQUE INDEX `idx-glyph-aspect-image` ON `glyph` (`aspect`, `image`)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/index.rs", "rank": 95, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_4() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Image])\n\n .from_subquery(\n\n Query::select()\n\n .columns(vec![Glyph::Image, Glyph::Aspect])\n\n .from(Glyph::Table)\n\n .take(),\n\n Alias::new(\"subglyph\")\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `image` FROM (SELECT `image`, `aspect` FROM `glyph`) AS `subglyph`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 96, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_2() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` WHERE `size_w` = 3\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 97, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn create_3() {\n\n assert_eq!(\n\n Index::create()\n\n .full_text()\n\n .name(\"idx-glyph-image\")\n\n .table(Glyph::Table)\n\n .col(Glyph::Image)\n\n .to_string(MysqlQueryBuilder),\n\n \"CREATE FULLTEXT INDEX `idx-glyph-image` ON `glyph` (`image`)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/index.rs", "rank": 98, "score": 55764.25355810333 }, { "content": "#[test]\n\nfn select_1() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .limit(10)\n\n .offset(100)\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` LIMIT 10 OFFSET 100\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 99, "score": 55764.25355810333 } ]
Rust
flowc/src/lib/model/process.rs
andrewdavidmackenzie/flow
484de720caa19684f0d96e9cbc925254ed4168e3
use serde_derive::{Deserialize, Serialize}; use crate::model::flow::Flow; use crate::model::function::Function; use crate::model::name::{HasName, Name}; use crate::model::route::{HasRoute, Route}; #[derive(Serialize, Deserialize, Debug, Clone)] #[allow(clippy::large_enum_variant)] #[serde(untagged)] pub enum Process { FlowProcess(Flow), FunctionProcess(Function), } impl HasName for Process { fn name(&self) -> &Name { match self { Process::FlowProcess(flow) => flow.name(), Process::FunctionProcess(function) => function.name(), } } fn alias(&self) -> &Name { match self { Process::FlowProcess(flow) => flow.alias(), Process::FunctionProcess(function) => function.alias(), } } } impl HasRoute for Process { fn route(&self) -> &Route { match self { Process::FlowProcess(ref flow) => flow.route(), Process::FunctionProcess(ref function) => function.route(), } } fn route_mut(&mut self) -> &mut Route { match self { Process::FlowProcess(ref mut flow) => flow.route_mut(), Process::FunctionProcess(ref mut function) => function.route_mut(), } } } #[cfg(test)] mod test { use url::Url; use flowcore::deserializers::deserializer::get_deserializer; use flowcore::errors::*; use crate::model::process::Process; use crate::model::process::Process::FlowProcess; fn toml_from_str(content: &str) -> Result<Process> { let url = Url::parse("file:///fake.toml").expect("Could not parse URL"); let deserializer = get_deserializer::<Process>(&url).expect("Could not get deserializer"); deserializer.deserialize(content, Some(&url)) } fn yaml_from_str(content: &str) -> Result<Process> { let url = Url::parse("file:///fake.yaml").expect("Could not parse URL"); let deserializer = get_deserializer::<Process>(&url).expect("Could not get deserializer"); deserializer.deserialize(content, Some(&url)) } fn json_from_str(content: &str) -> Result<Process> { let url = Url::parse("file:///fake.json").expect("Could not parse URL"); let deserializer = get_deserializer::<Process>(&url).expect("Could not get deserializer"); deserializer.deserialize(content, Some(&url)) } #[test] fn flow_with_partial_metadata() { let flow_description = " flow: hello-world-simple-toml metadata: version: '1.1.1' authors: ['unknown <[email protected]>'] "; match yaml_from_str(&flow_description.replace("'", "\"")) { Ok(FlowProcess(flow)) => { assert_eq!(flow.metadata.name, String::default()); assert_eq!(flow.metadata.version, "1.1.1".to_string()); assert_eq!( flow.metadata.authors, vec!("unknown <[email protected]>".to_string()) ); } _ => panic!("Deserialization didn't detect a flow"), } } #[test] fn simple_context_loads() { let flow_description = "\ flow = 'hello-world-simple-toml' [[process]] alias = 'message' source = 'lib://flowstdlib/data/buffer.toml' input.default = {once = 'hello'} [[process]] alias = 'print' source = 'lib://flowruntime/stdio/stdout.toml' [[connection]] from = 'message' to = 'print' "; assert!(toml_from_str(flow_description).is_ok()); } #[test] fn flow_errors_on_unknown_fields() { let flow_description = "\ flow = 'hello-world-simple-toml' foo = 'true' [[bar]] bar = 'true' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn function_errors_on_unknown_fields() { let flow_description = "\ function = 'hello-world-simple-toml' [[output]] foo = 'true' [[bar]] bar = 'true' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn default_optional_values() { let flow_description = "\ flow = 'test' "; match toml_from_str(flow_description) { Ok(FlowProcess(flow)) => { assert_eq!(flow.metadata.version, String::default()); assert_eq!(flow.metadata.authors, Vec::<String>::default()); } _ => panic!(), } } #[test] fn flow_has_metadata() { let flow_description = "\ flow = 'test' [metadata] name = \"me\" version = \"1.1.1\" description = \"ok\" authors = [\"Andrew <[email protected]>\"] "; match toml_from_str(flow_description) { Ok(FlowProcess(flow)) => { assert_eq!(flow.metadata.name, "me".to_string()); assert_eq!(flow.metadata.version, "1.1.1".to_string()); assert_eq!(flow.metadata.description, "ok".to_string()); assert_eq!( flow.metadata.authors, vec!("Andrew <[email protected]>".to_string()) ); } Ok(_) => panic!("Deserialization didn't detect a flow"), Err(e) => panic!("Deserialization error: {:?}", e), } } #[test] fn flow_has_partial_metadata() { let flow_description = "\ flow = 'test' [metadata] version = \"1.1.1\" "; match toml_from_str(flow_description) { Ok(FlowProcess(flow)) => { assert_eq!(flow.metadata.name, String::default()); assert_eq!(flow.metadata.version, "1.1.1".to_string()); assert_eq!(flow.metadata.description, String::default()); } Ok(_) => panic!("Deserialization didn't detect a flow"), Err(e) => panic!("Deserialization error: {:?}", e), } } #[test] fn flow_with_function_from_lib() { let flow_description = "\ flow = 'use-library-function' [[process]] alias = 'print' source = 'lib://flowstdlib/stdio/stdout.toml' "; assert!(toml_from_str(flow_description).is_ok()); } #[test] fn flow_with_unknown_lib_key() { let flow_description = "\ flow = 'use-library-function' [[process]] alias = 'print' lib = 'lib://fake/stdio/stdout.toml' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn flow_with_function_without_source() { let flow_description = "\ flow = 'use-library-function' [[process]] alias = 'print' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn load_fails_if_no_alias() { let flow_description = "\ [[process]] source = 'lib://flowstdlib/stdio/stdout.toml' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn function_parses() { let function_definition = "\ function = 'stdout' source = 'stdout.rs' [[input]] name = 'stdout' type = 'String'"; assert!(toml_from_str(function_definition).is_ok()); } #[test] fn function_lacks_name() { let function_definition = "\ source = 'stdout.rs' [[input]] name = 'stdout' type = 'String'"; assert!(toml_from_str(function_definition).is_err()); } #[test] fn function_lacks_implementation() { let function_definition = "\ function = 'stdout' [[input]] name = 'stdout' type = 'String'"; assert!(toml_from_str(function_definition).is_err()); } #[test] fn simplest_context_loads() { let flow_description = "{ 'flow': 'hello-world-simple-toml', 'process': [ { 'alias': 'print', 'source': 'lib://flowruntime/stdio/stdout.toml', 'input': { 'default': { 'once': 'hello' } } } ] }"; let flow = json_from_str(&flow_description.replace("'", "\"")); assert!(flow.is_ok()); } #[test] fn simple_context_loads_from_json() { let flow_description = "{ 'flow': 'hello-world-simple-toml', 'process': [ { 'alias': 'message', 'source': 'lib://flowstdlib/data/buffer.toml', 'input': { 'default': { 'once': 'hello' } } }, { 'alias': 'print', 'source': 'lib://flowruntime/stdio/stdout.toml' } ], 'connection': [ { 'from': 'message', 'to': 'print' } ] }"; let flow = json_from_str(&flow_description.replace("'", "\"")); assert!(flow.is_ok()); } #[test] fn invalid_context_fails() { let flow_description = "{ 'flow': 'hello-world-simple-toml', 'process': [ { 'alias': 'message', 'source': 'lib://flowstdlib/data/buffer.toml', 'input': { 'default': { 'once': 'hello' } } }, { 'alias': 'print', 'source': 'lib://flowruntime/stdio/stdout.toml' } ], 'connection': [ {\ 'from': 'message' } ] }"; let flow = json_from_str(&flow_description.replace("'", "\"")); assert!(flow.is_err()); } }
use serde_derive::{Deserialize, Serialize}; use crate::model::flow::Flow; use crate::model::function::Function; use crate::model::name::{HasName, Name}; use crate::model::route::{HasRoute, Route}; #[derive(Serialize, Deserialize, Debug, Clone)] #[allow(clippy::large_enum_variant)] #[serde(untagged)] pub enum Process { FlowProcess(Flow), FunctionProcess(Function), } impl HasName for Process { fn name(&self) -> &Name { match self { Process::FlowProcess(flow) => flow.name(), Process::FunctionProcess(function) => function.name(), } } fn alias(&self) -> &Name { match self { Process::FlowProcess(flow) => flow.alias(), Process::FunctionProcess(function) => function.alias(), } } } impl HasRoute for Process { fn route(&self) -> &Route {
} fn route_mut(&mut self) -> &mut Route { match self { Process::FlowProcess(ref mut flow) => flow.route_mut(), Process::FunctionProcess(ref mut function) => function.route_mut(), } } } #[cfg(test)] mod test { use url::Url; use flowcore::deserializers::deserializer::get_deserializer; use flowcore::errors::*; use crate::model::process::Process; use crate::model::process::Process::FlowProcess; fn toml_from_str(content: &str) -> Result<Process> { let url = Url::parse("file:///fake.toml").expect("Could not parse URL"); let deserializer = get_deserializer::<Process>(&url).expect("Could not get deserializer"); deserializer.deserialize(content, Some(&url)) } fn yaml_from_str(content: &str) -> Result<Process> { let url = Url::parse("file:///fake.yaml").expect("Could not parse URL"); let deserializer = get_deserializer::<Process>(&url).expect("Could not get deserializer"); deserializer.deserialize(content, Some(&url)) } fn json_from_str(content: &str) -> Result<Process> { let url = Url::parse("file:///fake.json").expect("Could not parse URL"); let deserializer = get_deserializer::<Process>(&url).expect("Could not get deserializer"); deserializer.deserialize(content, Some(&url)) } #[test] fn flow_with_partial_metadata() { let flow_description = " flow: hello-world-simple-toml metadata: version: '1.1.1' authors: ['unknown <[email protected]>'] "; match yaml_from_str(&flow_description.replace("'", "\"")) { Ok(FlowProcess(flow)) => { assert_eq!(flow.metadata.name, String::default()); assert_eq!(flow.metadata.version, "1.1.1".to_string()); assert_eq!( flow.metadata.authors, vec!("unknown <[email protected]>".to_string()) ); } _ => panic!("Deserialization didn't detect a flow"), } } #[test] fn simple_context_loads() { let flow_description = "\ flow = 'hello-world-simple-toml' [[process]] alias = 'message' source = 'lib://flowstdlib/data/buffer.toml' input.default = {once = 'hello'} [[process]] alias = 'print' source = 'lib://flowruntime/stdio/stdout.toml' [[connection]] from = 'message' to = 'print' "; assert!(toml_from_str(flow_description).is_ok()); } #[test] fn flow_errors_on_unknown_fields() { let flow_description = "\ flow = 'hello-world-simple-toml' foo = 'true' [[bar]] bar = 'true' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn function_errors_on_unknown_fields() { let flow_description = "\ function = 'hello-world-simple-toml' [[output]] foo = 'true' [[bar]] bar = 'true' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn default_optional_values() { let flow_description = "\ flow = 'test' "; match toml_from_str(flow_description) { Ok(FlowProcess(flow)) => { assert_eq!(flow.metadata.version, String::default()); assert_eq!(flow.metadata.authors, Vec::<String>::default()); } _ => panic!(), } } #[test] fn flow_has_metadata() { let flow_description = "\ flow = 'test' [metadata] name = \"me\" version = \"1.1.1\" description = \"ok\" authors = [\"Andrew <[email protected]>\"] "; match toml_from_str(flow_description) { Ok(FlowProcess(flow)) => { assert_eq!(flow.metadata.name, "me".to_string()); assert_eq!(flow.metadata.version, "1.1.1".to_string()); assert_eq!(flow.metadata.description, "ok".to_string()); assert_eq!( flow.metadata.authors, vec!("Andrew <[email protected]>".to_string()) ); } Ok(_) => panic!("Deserialization didn't detect a flow"), Err(e) => panic!("Deserialization error: {:?}", e), } } #[test] fn flow_has_partial_metadata() { let flow_description = "\ flow = 'test' [metadata] version = \"1.1.1\" "; match toml_from_str(flow_description) { Ok(FlowProcess(flow)) => { assert_eq!(flow.metadata.name, String::default()); assert_eq!(flow.metadata.version, "1.1.1".to_string()); assert_eq!(flow.metadata.description, String::default()); } Ok(_) => panic!("Deserialization didn't detect a flow"), Err(e) => panic!("Deserialization error: {:?}", e), } } #[test] fn flow_with_function_from_lib() { let flow_description = "\ flow = 'use-library-function' [[process]] alias = 'print' source = 'lib://flowstdlib/stdio/stdout.toml' "; assert!(toml_from_str(flow_description).is_ok()); } #[test] fn flow_with_unknown_lib_key() { let flow_description = "\ flow = 'use-library-function' [[process]] alias = 'print' lib = 'lib://fake/stdio/stdout.toml' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn flow_with_function_without_source() { let flow_description = "\ flow = 'use-library-function' [[process]] alias = 'print' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn load_fails_if_no_alias() { let flow_description = "\ [[process]] source = 'lib://flowstdlib/stdio/stdout.toml' "; assert!(toml_from_str(flow_description).is_err()); } #[test] fn function_parses() { let function_definition = "\ function = 'stdout' source = 'stdout.rs' [[input]] name = 'stdout' type = 'String'"; assert!(toml_from_str(function_definition).is_ok()); } #[test] fn function_lacks_name() { let function_definition = "\ source = 'stdout.rs' [[input]] name = 'stdout' type = 'String'"; assert!(toml_from_str(function_definition).is_err()); } #[test] fn function_lacks_implementation() { let function_definition = "\ function = 'stdout' [[input]] name = 'stdout' type = 'String'"; assert!(toml_from_str(function_definition).is_err()); } #[test] fn simplest_context_loads() { let flow_description = "{ 'flow': 'hello-world-simple-toml', 'process': [ { 'alias': 'print', 'source': 'lib://flowruntime/stdio/stdout.toml', 'input': { 'default': { 'once': 'hello' } } } ] }"; let flow = json_from_str(&flow_description.replace("'", "\"")); assert!(flow.is_ok()); } #[test] fn simple_context_loads_from_json() { let flow_description = "{ 'flow': 'hello-world-simple-toml', 'process': [ { 'alias': 'message', 'source': 'lib://flowstdlib/data/buffer.toml', 'input': { 'default': { 'once': 'hello' } } }, { 'alias': 'print', 'source': 'lib://flowruntime/stdio/stdout.toml' } ], 'connection': [ { 'from': 'message', 'to': 'print' } ] }"; let flow = json_from_str(&flow_description.replace("'", "\"")); assert!(flow.is_ok()); } #[test] fn invalid_context_fails() { let flow_description = "{ 'flow': 'hello-world-simple-toml', 'process': [ { 'alias': 'message', 'source': 'lib://flowstdlib/data/buffer.toml', 'input': { 'default': { 'once': 'hello' } } }, { 'alias': 'print', 'source': 'lib://flowruntime/stdio/stdout.toml' } ], 'connection': [ {\ 'from': 'message' } ] }"; let flow = json_from_str(&flow_description.replace("'", "\"")); assert!(flow.is_err()); } }
match self { Process::FlowProcess(ref flow) => flow.route(), Process::FunctionProcess(ref function) => function.route(), }
if_condition
[ { "content": "pub fn route_or_route_array<'de, D>(deserializer: D) -> Result<Vec<Route>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StringOrVec(PhantomData<Vec<Route>>);\n\n\n\n impl<'de> de::Visitor<'de> for StringOrVec {\n\n type Value = Vec<Route>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"Route or list of Routes\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(vec![Route::from(value)])\n\n }\n\n\n", "file_path": "flowc/src/lib/model/route_array_serde.rs", "rank": 0, "score": 167676.36278174788 }, { "content": "#[allow(clippy::ptr_arg)]\n\nfn node_from_io_route(route: &Route, name: &Name, io_set: &IOSet) -> (String, String) {\n\n let label = if !io_set.find(route) {\n\n name.to_string()\n\n } else {\n\n \"\".to_string()\n\n };\n\n\n\n if name.is_empty() || io_set.find(route) {\n\n (route.to_string(), label)\n\n } else {\n\n let length_without_io_name = route.len() - name.len() - 1; // 1 for '/'\n\n (\n\n route.to_string()[..length_without_io_name].to_string(),\n\n label,\n\n )\n\n }\n\n}\n\n\n", "file_path": "flowc/src/lib/dumper/dump_dot.rs", "rank": 1, "score": 160062.46270071977 }, { "content": "pub fn process_refs_to_dot(\n\n flow: &Flow,\n\n tables: &GenerationTables,\n\n output_dir: &Path,\n\n) -> Result<String> {\n\n let mut output = String::new();\n\n\n\n // Do the same for all subprocesses referenced from this one\n\n for process_ref in &flow.process_refs {\n\n let process = flow\n\n .subprocesses\n\n .get(process_ref.alias())\n\n .ok_or(\"Could not find process named in process_ref\")?;\n\n match process {\n\n FlowProcess(ref subflow) => {\n\n // create cluster sub graph\n\n output.push_str(&format!(\n\n \"\\nsubgraph cluster_{} {{\\n\",\n\n str::replace(&subflow.alias.to_string(), \"-\", \"_\")\n\n ));\n", "file_path": "flowc/src/lib/dumper/dump_dot.rs", "rank": 2, "score": 157505.29171754915 }, { "content": "/// Implement the `FlowImpl` derive macro\n\npub fn flow_impl_derive(input: TokenStream) -> TokenStream {\n\n // Construct a representation of Rust code as a syntax tree that we can manipulate\n\n let ast = syn::parse(input).unwrap();\n\n\n\n // Build the trait implementation\n\n impl_flow_impl(&ast)\n\n}\n\n\n", "file_path": "flow_impl_derive/src/lib.rs", "rank": 3, "score": 149607.92289155605 }, { "content": "/// Return a Deserializer based on the file extension of the resource referred to from `url` input\n\npub fn get_deserializer<'a, T>(url: &'a Url) -> Result<Box<dyn Deserializer<'a, T> + 'a>>\n\nwhere\n\n T: DeserializeOwned + 'static,\n\n{\n\n match get_file_extension(url) {\n\n Some(ext) => match ext {\n\n \"toml\" => Ok(Box::new(TomlDeserializer::new())),\n\n \"yaml\" | \"yml\" => Ok(Box::new(YamlDeserializer::new())),\n\n \"json\" => Ok(Box::new(JsonDeserializer::new())),\n\n _ => bail!(\"Unknown file extension so cannot determine which deserializer to use\"),\n\n },\n\n None => bail!(\"No file extension so cannot determine which deserializer to use\"),\n\n }\n\n}\n\n\n", "file_path": "flowcore/src/deserializers/deserializer.rs", "rank": 4, "score": 143716.1480216439 }, { "content": "/// Construct two look-up tables that can be used to find the index of a function in the functions table,\n\n/// and the index of it's input - using the input route or it's output route\n\npub fn create_routes_table(tables: &mut GenerationTables) {\n\n for function in &mut tables.functions {\n\n // Add inputs to functions to the table as a possible source of connections from a\n\n // job that completed using this function\n\n for (input_number, input) in function.get_inputs().iter().enumerate() {\n\n tables.sources.insert(\n\n input.route().clone(),\n\n (Input(input_number), function.get_id()),\n\n );\n\n }\n\n\n\n // Add any output routes it has to the source routes table\n\n for output in function.get_outputs() {\n\n tables.sources.insert(\n\n output.route().clone(),\n\n (Output(output.name().to_string()), function.get_id()),\n\n );\n\n }\n\n\n\n // Add any inputs it has to the destination routes table\n\n for (input_index, input) in function.get_inputs().iter().enumerate() {\n\n tables.destination_routes.insert(\n\n input.route().clone(),\n\n (function.get_id(), input_index, function.get_flow_id()),\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "flowc/src/lib/compiler/connector.rs", "rank": 5, "score": 139856.94835519843 }, { "content": "fn load_flow(test_dir: &Path, test_name: &str, search_path: Simpath) -> Process {\n\n let test_flow = format!(\"{}.toml\", test_name);\n\n let mut flow_file = test_dir.to_path_buf();\n\n flow_file.push(test_flow);\n\n loader::load(\n\n &helper::absolute_file_url_from_relative_path(&flow_file.to_string_lossy()),\n\n &MetaProvider::new(search_path),\n\n &mut HashSet::<(Url, Url)>::new(),\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 6, "score": 125879.735244271 }, { "content": "/// Load a `Flow` definition from a `Url`, recursively loading all sub-processes referenced.\n\n///\n\n/// The return is a `Result` containing the `Process`, or a `String` describing the error\n\n/// found while loading.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use flowcore::lib_provider::Provider;\n\n/// use flowcore::errors::Result;\n\n/// use std::env;\n\n/// use url::Url;\n\n/// use std::collections::HashSet;\n\n///\n\n/// // Clients need to provide a Provider of content for the loader as flowlibc is independent of\n\n/// // file systems and io.\n\n/// struct DummyProvider;\n\n///\n\n/// // A Provider must implement the `Provider` trait, with the methods to `resolve` a URL and to\n\n/// // `get` the contents for parsing.\n\n/// impl Provider for DummyProvider {\n\n/// fn resolve_url(&self, url: &Url, default_filename: &str, _ext: &[&str]) -> Result<(Url, Option<String>)> {\n\n/// // Just fake the url resolution in this example\n\n/// Ok((url.clone(), None))\n\n/// }\n\n///\n\n/// fn get_contents(&self, url: &Url) -> Result<Vec<u8>> {\n\n/// // Return the simplest flow definition possible - ignoring the url passed in\n\n/// Ok(\"flow = \\\"test\\\"\".as_bytes().to_owned())\n\n/// }\n\n/// }\n\n///\n\n/// // Create an instance of the `DummyProvider`\n\n/// let dummy_provider = DummyProvider{};\n\n///\n\n/// // keep track of the source Urls loaded for this flow\n\n/// let mut source_urls = HashSet::<(Url, Url)>::new();\n\n///\n\n/// // load the flow from `url = file:///example.toml` using the `dummy_provider`\n\n/// flowclib::compiler::loader::load(&Url::parse(\"file:///example.toml\").unwrap(), &dummy_provider, &mut source_urls).unwrap();\n\n/// ```\n\npub fn load(\n\n url: &Url,\n\n provider: &dyn Provider,\n\n #[cfg(feature = \"debugger\")] source_urls: &mut HashSet<(Url, Url)>,\n\n) -> Result<Process> {\n\n trace!(\"load()\");\n\n load_process(\n\n &Route::default(),\n\n &Name::default(),\n\n 0,\n\n &mut 0,\n\n url,\n\n provider,\n\n &HashMap::new(),\n\n #[cfg(feature = \"debugger\")]\n\n source_urls,\n\n 0,\n\n )\n\n}\n\n\n", "file_path": "flowc/src/lib/compiler/loader.rs", "rank": 7, "score": 125855.165143677 }, { "content": "pub fn start_executors(\n\n number_of_executors: usize,\n\n job_rx: &Arc<Mutex<Receiver<Job>>>,\n\n job_tx: &Sender<Job>,\n\n) {\n\n for executor_number in 0..number_of_executors {\n\n create_executor(\n\n format!(\"Executor #{}\", executor_number),\n\n job_rx.clone(),\n\n job_tx.clone(),\n\n ); // clone of Arcs and Sender OK\n\n }\n\n}\n\n\n\n/*\n\n Replace the standard panic hook with one that just outputs the file and line of any process's\n\n run-time panic.\n\n*/\n", "file_path": "flowr/src/lib/execution.rs", "rank": 8, "score": 125838.168482108 }, { "content": "/// Paths in the manifest are relative to the location of the manifest file, to make the file\n\n/// and associated files relocatable (and maybe packaged into a ZIP etc). So we use manifest_url\n\n/// as the location other file paths are made relative to.\n\npub fn create_manifest(\n\n flow: &Flow,\n\n debug_symbols: bool,\n\n manifest_url: &Url,\n\n tables: &GenerationTables,\n\n #[cfg(feature = \"debugger\")] source_urls: HashSet<(Url, Url)>,\n\n) -> Result<FlowManifest> {\n\n info!(\"Writing flow manifest to '{}'\", manifest_url);\n\n\n\n let mut manifest = FlowManifest::new(MetaData::from(flow));\n\n\n\n // Generate run-time Function struct for each of the compile-time functions\n\n for function in &tables.functions {\n\n manifest.add_function(function_to_runtimefunction(\n\n manifest_url,\n\n function,\n\n debug_symbols,\n\n )?);\n\n }\n\n\n\n manifest.set_lib_references(&tables.libs);\n\n #[cfg(feature = \"debugger\")]\n\n manifest.set_source_urls(source_urls);\n\n\n\n Ok(manifest)\n\n}\n\n\n", "file_path": "flowc/src/lib/generator/generate.rs", "rank": 9, "score": 123615.3077483715 }, { "content": "pub fn set_panic_hook() {\n\n panic::set_hook(Box::new(|panic_info| {\n\n /* Only available on 'nightly'\n\n if let Some(message) = panic_info.message() {\n\n error!(\"Message: {:?}\", message);\n\n }\n\n */\n\n\n\n if let Some(location) = panic_info.location() {\n\n error!(\n\n \"Panic in file '{}' at line {}\",\n\n location.file(),\n\n location.line()\n\n );\n\n }\n\n }));\n\n}\n\n\n", "file_path": "flowr/src/lib/execution.rs", "rank": 10, "score": 123611.25873882773 }, { "content": "/// Return a `LibraryManifest` for the run-time functions\n\npub fn get_manifest(\n\n server_connection: Arc<Mutex<ServerConnection<ServerMessage, ClientMessage>>>,\n\n) -> Result<LibraryManifest> {\n\n let metadata = MetaData {\n\n name: \"flowruntime\".into(),\n\n version: \"0.1.0\".into(),\n\n description: \"Flow Runtime functions\".into(),\n\n authors: vec![\"Andrew Mackenzie\".to_string()],\n\n };\n\n let lib_url = Url::parse(\"lib://flowruntime\")?;\n\n let mut manifest = LibraryManifest::new(lib_url, metadata);\n\n\n\n manifest.locators.insert(\n\n Url::parse(\"lib://flowruntime/args/get/get\")\n\n .chain_err(|| \"Could not parse url\")\n\n .chain_err(|| \"Could not parse url\")?,\n\n Native(Arc::new(args::get::Get {\n\n server_connection: server_connection.clone(),\n\n })),\n\n );\n", "file_path": "flowr/src/lib/flowruntime/mod.rs", "rank": 11, "score": 123611.25873882773 }, { "content": "/// dump a flow's functions graph as a .dot file to visualize dependencies\n\n///\n\n///\n\n/// # Example\n\n/// ```\n\n/// use std::env;\n\n/// use url::Url;\n\n/// use flowcore::lib_provider::{Provider, MetaProvider};\n\n/// use flowcore::errors::Result;\n\n/// use flowclib::model::process::Process::FlowProcess;\n\n/// use std::collections::HashSet;\n\n/// use simpath::Simpath;\n\n///\n\n/// let lib_search_path = Simpath::new(\"FLOW_LIB_PATH\");\n\n/// let provider = MetaProvider::new(lib_search_path);\n\n/// let mut url = url::Url::from_file_path(env::current_dir().unwrap()).unwrap();\n\n/// url = url.join(\"samples/hello-world/context.toml\").unwrap();\n\n///\n\n/// let mut source_urls = HashSet::<(Url, Url)>::new();\n\n///\n\n/// if let Ok(FlowProcess(mut flow)) = flowclib::compiler::loader::load(&url,\n\n/// &provider,\n\n/// &mut source_urls) {\n\n/// let tables = flowclib::compiler::compile::compile(&mut flow).unwrap();\n\n/// let output_dir = tempdir::TempDir::new(\"flow\").unwrap().into_path();\n\n///\n\n/// flowclib::dumper::dump_tables::dump_functions(&flow, &tables, &output_dir).unwrap();\n\n/// }\n\n/// ```\n\npub fn dump_functions(\n\n flow: &Flow,\n\n tables: &GenerationTables,\n\n output_dir: &Path,\n\n) -> std::io::Result<()> {\n\n functions_to_dot(flow, tables, output_dir)?;\n\n\n\n let mut writer = create_output_file(output_dir, \"functions\", \"dump\")?;\n\n info!(\"\\tGenerating functions.dump\");\n\n dump_table(tables.functions.iter(), &mut writer)\n\n}\n\n\n", "file_path": "flowc/src/lib/dumper/dump_tables.rs", "rank": 12, "score": 121511.88083511457 }, { "content": "/// Dump a human readable representation of loaded flow definition (in a `Flow` structure) to a\n\n/// file in the specified output directory\n\n///\n\n/// # Example\n\n/// ```\n\n/// use std::env;\n\n/// use url::Url;\n\n/// use flowcore::lib_provider::{Provider, MetaProvider};\n\n/// use flowcore::errors::Result;\n\n/// use flowclib::model::process::Process::FlowProcess;\n\n/// use tempdir::TempDir;\n\n/// use std::collections::HashSet;\n\n/// use simpath::Simpath;\n\n///\n\n/// let lib_search_path = Simpath::new(\"FLOW_LIB_PATH\");\n\n/// let provider = MetaProvider::new(lib_search_path);\n\n///\n\n/// let mut url = url::Url::from_file_path(env::current_dir().unwrap()).unwrap();\n\n/// url = url.join(\"samples/hello-world/context.toml\").unwrap();\n\n///\n\n/// let mut source_urls = HashSet::<(Url, Url)>::new();\n\n/// if let Ok(FlowProcess(mut flow)) = flowclib::compiler::loader::load(&url,\n\n/// &provider,\n\n/// &mut source_urls) {\n\n///\n\n/// // strip off filename so output_dir is where the context.toml file resides\n\n/// let output_dir = TempDir::new(\"flow\").unwrap().into_path();\n\n///\n\n/// // dump the flows compiler data and dot graph into files alongside the 'context.toml'\n\n/// flowclib::dumper::dump_flow::dump_flow(&flow, &output_dir, &provider, true, true).unwrap();\n\n/// }\n\n/// ```\n\npub fn dump_flow(\n\n flow: &Flow,\n\n target_dir: &Path,\n\n provider: &dyn Provider,\n\n dump_files: bool,\n\n dot_files: bool,\n\n) -> Result<()> {\n\n info!(\n\n \"=== Dumper: Dumping flow hierarchy to '{}'\",\n\n target_dir.display()\n\n );\n\n _dump_flow(flow, 0, target_dir, provider, dump_files, dot_files)?;\n\n info!(\"Dump complete\");\n\n Ok(())\n\n}\n\n\n", "file_path": "flowc/src/lib/dumper/dump_flow.rs", "rank": 13, "score": 121511.70756232143 }, { "content": "/// Generate a manifest for the flow in JSON that can be used to run it using 'flowr'\n\n// TODO this is tied to being a file:// - generalize this to write to a URL, moving the code\n\n// TODO into the provider and implementing for file and http\n\npub fn write_flow_manifest(\n\n flow: Flow,\n\n debug_symbols: bool,\n\n destination: &Path,\n\n tables: &GenerationTables,\n\n #[cfg(feature = \"debugger\")] source_urls: HashSet<(Url, Url)>,\n\n) -> Result<PathBuf> {\n\n let mut filename = destination.to_path_buf();\n\n filename.push(DEFAULT_MANIFEST_FILENAME.to_string());\n\n filename.set_extension(\"json\");\n\n let mut manifest_file =\n\n File::create(&filename).chain_err(|| \"Could not create manifest file\")?;\n\n let manifest_url =\n\n Url::from_file_path(&filename).map_err(|_| \"Could not parse Url from file path\")?;\n\n let manifest = create_manifest(\n\n &flow,\n\n debug_symbols,\n\n &manifest_url,\n\n tables,\n\n #[cfg(feature = \"debugger\")]\n", "file_path": "flowc/src/lib/generator/generate.rs", "rank": 14, "score": 121507.57208455712 }, { "content": "pub fn pixel_to_point(\n\n size: (usize, usize),\n\n pixel: (usize, usize),\n\n upper_left: Complex<f64>,\n\n lower_right: Complex<f64>,\n\n) -> Complex<f64> {\n\n let width = lower_right.re - upper_left.re;\n\n let height = upper_left.im - lower_right.im;\n\n\n\n Complex {\n\n re: upper_left.re + (pixel.0 as f64 * (width / size.0 as f64)),\n\n im: upper_left.im - (pixel.1 as f64 * (height / size.1 as f64)),\n\n // This is subtraction as pixel.1 increases as we go down,\n\n // but the imaginary component increases as we go up.\n\n }\n\n}\n\n\n\n/// Given the row and column of a pixel in the output image, return the\n\n/// corresponding point on the complex plane.\n\n///\n", "file_path": "samples/mandlebrot/pixel_to_point/pixel_to_point.rs", "rank": 15, "score": 121502.49290541143 }, { "content": "/// Compile a function's implementation to wasm and modify implementation to point to the wasm file\n\n/// Checks the timestamps of the source and wasm files and only recompiles if wasm file is out of date\n\npub fn compile_implementation(\n\n target_dir: &Path,\n\n function: &mut Function,\n\n native_only: bool,\n\n #[cfg(feature = \"debugger\")] source_urls: &mut HashSet<(Url, Url)>,\n\n) -> Result<(PathBuf, bool)> {\n\n let mut built = false;\n\n\n\n let (source_path, wasm_destination) = get_paths(target_dir, function)?;\n\n\n\n #[cfg(feature = \"debugger\")]\n\n source_urls.insert((\n\n Url::from_file_path(&source_path).map_err(|_| \"Could not create Url from file path\")?,\n\n Url::from_file_path(&wasm_destination)\n\n .map_err(|_| \"Could not create Url from file path\")?,\n\n ));\n\n\n\n let (missing, out_of_date) = out_of_date(&source_path, &wasm_destination)?;\n\n\n\n if missing || out_of_date {\n", "file_path": "flowc/src/lib/compiler/compile_wasm.rs", "rank": 16, "score": 121502.49290541143 }, { "content": "pub fn main() -> io::Result<()>{\n\n let bin_path = env::current_exe()?;\n\n println!(\n\n \"'{}' version {}\",\n\n env!(\"CARGO_CRATE_NAME\"),\n\n env!(\"CARGO_PKG_VERSION\")\n\n );\n\n println!(\"For more details see: {}\", env!(\"CARGO_PKG_HOMEPAGE\"));\n\n println!(\n\n \"'{}' binary located at '{}'\",\n\n env!(\"CARGO_CRATE_NAME\"),\n\n bin_path.display()\n\n );\n\n\n\n let bin_directory = bin_path.parent().ok_or_else(||\n\n io::Error::new( io::ErrorKind::Other, \"Could not get directory where 'flowstdlib' binary is located\"))?;\n\n check_flow_lib_path(bin_directory);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "flowstdlib/main.rs", "rank": 17, "score": 121354.78129948952 }, { "content": "/// Trait implemented by objects that have a Name\n\npub trait HasName {\n\n /// Return a reference to the name of the struct implementing this trait\n\n fn name(&self) -> &Name;\n\n /// Return a reference to the alias (also a Name type) of the struct implementing this trait\n\n fn alias(&self) -> &Name;\n\n}\n\n\n\nimpl Validate for Name {\n\n fn validate(&self) -> Result<()> {\n\n // Names cannot be numbers as they can be confused with array indexes for Array outputs\n\n if self.parse::<usize>().is_ok() {\n\n bail!(\n\n \"Name '{}' cannot be a number, they are reserved for array indexes\",\n\n self\n\n );\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "flowc/src/lib/model/name.rs", "rank": 18, "score": 120679.35088344241 }, { "content": "/// A trait implemented by objects that have Routes\n\npub trait HasRoute {\n\n /// Return a reference to the Route of the struct that implements this trait\n\n fn route(&self) -> &Route;\n\n /// Return a mutable reference to the Route of the struct that implements this trait\n\n fn route_mut(&mut self) -> &mut Route;\n\n}\n\n\n", "file_path": "flowc/src/lib/model/route.rs", "rank": 19, "score": 120595.06329278766 }, { "content": "/// All deserializers have to implement this trait for content deserialization, plus a method\n\n/// to return their name to be able to inform the user of which deserializer was used\n\npub trait Deserializer<'a, T: Deserialize<'a>> {\n\n /// Deserialize the supplied `content` that was loaded from `url` into a `P`\n\n fn deserialize(&self, contents: &'a str, url: Option<&Url>) -> Result<T>;\n\n /// Return the name of the serializer implementing this trait\n\n fn name(&self) -> &str;\n\n}\n\n\n", "file_path": "flowcore/src/deserializers/deserializer.rs", "rank": 20, "score": 119802.0493168147 }, { "content": "pub fn write_flow_to_dot(\n\n flow: &Flow,\n\n dot_file: &mut dyn Write,\n\n output_dir: &Path,\n\n) -> std::io::Result<()> {\n\n dot_file.write_all(digraph_wrapper_start(flow).as_bytes())?;\n\n\n\n let mut contents = String::new();\n\n\n\n // Inputs\n\n contents.push_str(&add_input_set(flow.inputs(), flow.route(), false));\n\n\n\n // Outputs\n\n contents.push_str(&add_output_set(flow.outputs(), flow.route(), false));\n\n\n\n // Process References\n\n contents.push_str(\"\\n\\t// Process References\\n\");\n\n for process_ref in &flow.process_refs {\n\n let process = flow.subprocesses.get(process_ref.alias()).ok_or_else(|| {\n\n std::io::Error::new(\n", "file_path": "flowc/src/lib/dumper/dump_dot.rs", "rank": 21, "score": 119502.71212674963 }, { "content": "/// Create a file at the specified `output_path`, `filename` and `extension` that output will be dumped to\n\npub fn create_output_file(\n\n output_path: &Path,\n\n filename: &str,\n\n extension: &str,\n\n) -> std::io::Result<File> {\n\n let mut output_file = PathBuf::from(filename);\n\n output_file.set_extension(extension);\n\n let mut output_file_path = output_path.to_path_buf();\n\n output_file_path.push(&output_file);\n\n File::create(&output_file_path)\n\n}\n\n\n\n/*\n\n Create a directed graph named after the flow, adding functions grouped in sub-clusters\n\n*/\n", "file_path": "flowc/src/lib/dumper/dump_tables.rs", "rank": 22, "score": 119502.71212674963 }, { "content": "/// Some structs with Routes will be able to have their route set by using parent route\n\npub trait SetRoute {\n\n /// Set the routes in fields of this struct based on the route of it's parent.\n\n fn set_routes_from_parent(&mut self, parent: &Route);\n\n}\n\n\n\n/// structs with IOs will be able to have the IOs routes set by using parent route\n", "file_path": "flowc/src/lib/model/route.rs", "rank": 23, "score": 118565.88645857517 }, { "content": "/// return the version number of the library as a string of the form \"M.m.p\"\n\n///\n\n/// - M is a one or two digit Major version number\n\n/// - m is a one or two digit Minor version number\n\n/// - p is a one or two digit Patch version number\n\npub fn version() -> &'static str {\n\n VERSION\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn can_get_version() {\n\n assert!(!version().is_empty());\n\n }\n\n}", "file_path": "flowc/src/lib/info.rs", "rank": 24, "score": 116772.60321911795 }, { "content": "/// Return the version number of the `flowrlib` library\n\npub fn version() -> &'static str {\n\n VERSION\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn can_get_version() {\n\n assert!(!version().is_empty());\n\n }\n\n}", "file_path": "flowr/src/lib/info.rs", "rank": 25, "score": 116772.60321911795 }, { "content": "#[allow(clippy::upper_case_acronyms)]\n\npub trait SetIORoutes {\n\n /// Set the route and IO type of IOs in this struct based on parent's route\n\n fn set_io_routes_from_parent(&mut self, parent: &Route, io_type: IOType);\n\n}\n\n\n\nimpl fmt::Display for Route {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl From<&str> for Route {\n\n fn from(string: &str) -> Self {\n\n Route(string.to_string())\n\n }\n\n}\n\n\n\nimpl From<String> for Route {\n\n fn from(string: String) -> Self {\n\n Route(string)\n", "file_path": "flowc/src/lib/model/route.rs", "rank": 26, "score": 116611.20305923151 }, { "content": "pub fn set_lib_search_path_to_project() -> Simpath {\n\n let mut lib_search_path = Simpath::new(\"lib_search_path\");\n\n\n\n // Add the root directory of the project so that 'flowstdlib' can be found\n\n let root_str = Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .parent()\n\n .expect(\"Could not get project root dir\");\n\n lib_search_path.add_directory(root_str.to_str().unwrap());\n\n\n\n // Add the parent directory of 'flowruntime' which is in flowr/src/lib so it can be found\n\n let runtime_parent = root_str.join(\"flowr/src/lib\");\n\n lib_search_path.add_directory(runtime_parent.to_str().unwrap());\n\n\n\n lib_search_path\n\n}\n\n\n", "file_path": "flowc/tests/helper.rs", "rank": 27, "score": 115796.08215049595 }, { "content": "/// Return the LibraryManifest for this library\n\npub fn get_manifest() -> Result<LibraryManifest> {\n\n let metadata = MetaData {\n\n name: env!(\\\"CARGO_PKG_NAME\\\").into(),\n\n version: env!(\\\"CARGO_PKG_VERSION\\\").into(),\n\n description: env!(\\\"CARGO_PKG_DESCRIPTION\\\").into(),\n\n authors: env!(\\\"CARGO_PKG_AUTHORS\\\")\n\n .split(':')\n\n .map(|s| s.to_string())\n\n .collect(),\n\n };\n\n let lib_url = Url::parse(&format!(\\\"lib://{}\\\", metadata.name))?;\n\n let mut manifest = LibraryManifest::new(lib_url, metadata);\\n\n\n\";\n\n\n\n/// Generate a manifest for the library in rust format for static linking into a runtime binary\n", "file_path": "flowc/src/lib/compiler/rust_manifest.rs", "rank": 28, "score": 108959.31565218588 }, { "content": "pub fn set_lib_search_path_flowstdlib_on_web() -> Simpath {\n\n let mut lib_search_path = Simpath::new(\"lib_search_path\");\n\n\n\n // Add the parent directory of 'flowruntime' which is in flowr/src/lib so `lib://flowruntime/*` references\n\n // can be found\n\n // let root_str = Path::new(env!(\"CARGO_MANIFEST_DIR\")).parent().expect(\"Could not get project root dir\");\n\n // let runtime_parent = root_str.join(\"flowr/src/lib\");\n\n // lib_search_path.add_directory(runtime_parent.to_str().unwrap());\n\n lib_search_path.add_url(&Url::parse(\"https://raw.githubusercontent.com/andrewdavidmackenzie/flow/master/flowr/src/lib/flowruntime\")\n\n .expect(\"Could not parse the url for Simpath\"));\n\n\n\n // Add the url of 'flowstdlib' on the web, so `lib://flowstdlib/*` references can be found\n\n lib_search_path.add_url(\n\n &Url::parse(\n\n \"https://raw.githubusercontent.com/andrewdavidmackenzie/flow/master/flowstdlib\",\n\n )\n\n .expect(\"Could not parse the url for Simpath\"),\n\n );\n\n\n\n lib_search_path\n\n}\n\n\n", "file_path": "flowc/tests/flowc-sample_loading_tests.rs", "rank": 29, "score": 108733.66274011762 }, { "content": "pub fn optimize(tables: &mut GenerationTables) {\n\n while remove_dead_processes(tables) {}\n\n}\n\n\n", "file_path": "flowc/src/lib/compiler/optimizer.rs", "rank": 30, "score": 108083.60181413188 }, { "content": "pub fn absolute_file_url_from_relative_path(path: &str) -> Url {\n\n let flow_root = Path::new(env!(\"CARGO_MANIFEST_DIR\")).parent().unwrap();\n\n Url::from_directory_path(flow_root)\n\n .unwrap()\n\n .join(path)\n\n .unwrap()\n\n}\n", "file_path": "flowc/tests/helper.rs", "rank": 31, "score": 106277.89259121957 }, { "content": "#[test]\n\nfn dead_process_and_connected_process_removed() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\"flowc/tests/test-flows/dead-process-and-connected-process/dead-process-and-connected-process.toml\");\n\n let process = loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).unwrap();\n\n if let FlowProcess(ref flow) = process {\n\n match compile::compile(&flow) {\n\n Ok(_tables) => panic!(\"Flow should not compile when it has no side-effects\"),\n\n Err(e) => assert_eq!(\"Flow has no side-effects\", e.description()),\n\n }\n\n // assert!(\n\n // tables.functions.is_empty(),\n\n // \"Incorrect number of functions after optimization\"\n\n // );\n\n // // And the connection are all gone also\n\n // assert_eq!(\n\n // tables.collapsed_connections.len(),\n\n // 0,\n\n // \"Incorrect number of connections after optimization\"\n\n // );\n\n } else {\n\n panic!(\"Process loaded was not a flow\");\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-compiler_tests.rs", "rank": 32, "score": 105799.90304176763 }, { "content": "/// Generate SVG files from any .dot file found below the `root_dir` using the `dot` graphviz\n\n/// executable, if it is found on the system within the `$PATH` variable of the user\n\npub fn generate_svgs(root_dir: &Path) -> Result<()> {\n\n if let Ok(FoundType::File(dot)) = Simpath::new(\"PATH\").find_type(\"dot\", FileType::File) {\n\n println!(\"Generating .dot.svg files from .dot files, using 'dot' command from $PATH\");\n\n\n\n let mut dot_command = Command::new(dot);\n\n let options = MatchOptions {\n\n case_sensitive: false,\n\n ..Default::default()\n\n };\n\n\n\n let pattern = format!(\"{}/**/*.dot\", root_dir.to_string_lossy());\n\n\n\n for path in glob_with(&pattern, options)?.flatten() {\n\n let dot_child = dot_command\n\n .args(vec![\"-Tsvg\", \"-O\", &path.to_string_lossy()])\n\n .stdin(Stdio::inherit())\n\n .stdout(Stdio::inherit())\n\n .stderr(Stdio::inherit())\n\n .spawn()?;\n\n\n", "file_path": "flowc/src/lib/dumper/dump_flow.rs", "rank": 33, "score": 104563.03945260431 }, { "content": "pub fn index_functions(functions: &mut Vec<Function>) {\n\n for (index, function) in functions.iter_mut().enumerate() {\n\n function.set_id(index);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use url::Url;\n\n\n\n use flowcore::output_connection::{OutputConnection, Source};\n\n\n\n use crate::model::function::Function;\n\n use crate::model::io::IO;\n\n use crate::model::name::Name;\n\n use crate::model::route::Route;\n\n\n\n #[test]\n\n fn empty_index_test() {\n\n super::index_functions(&mut vec![]);\n", "file_path": "flowc/src/lib/compiler/gatherer.rs", "rank": 34, "score": 103956.35582234219 }, { "content": "/// Take a hierarchical flow definition in memory and compile it, generating a manifest for execution\n\n/// of the flow, including references to libraries required.\n\npub fn compile(flow: &Flow) -> Result<GenerationTables> {\n\n trace!(\"compile()\");\n\n let mut tables = GenerationTables::new();\n\n\n\n info!(\"=== Compiler phase: Gathering\");\n\n gatherer::gather_functions_and_connections(flow, &mut tables)?;\n\n info!(\"=== Compiler phase: Collapsing connections\");\n\n tables.collapsed_connections = connector::collapse_connections(&tables.connections);\n\n info!(\"=== Compiler phase: Optimizing\");\n\n optimizer::optimize(&mut tables);\n\n info!(\"=== Compiler phase: Indexing\");\n\n gatherer::index_functions(&mut tables.functions);\n\n info!(\"=== Compiler phase: Calculating routes tables\");\n\n connector::create_routes_table(&mut tables);\n\n info!(\"=== Compiler phase: Checking connections\");\n\n checker::check_connections(&mut tables)?;\n\n info!(\"=== Compiler phase: Checking processes\");\n\n checker::check_function_inputs(&mut tables)?;\n\n info!(\"=== Compiler phase: Checking flow has side-effects\");\n\n checker::check_side_effects(&mut tables)?;\n", "file_path": "flowc/src/lib/compiler/compile.rs", "rank": 35, "score": 103956.35582234219 }, { "content": "/// Given an output directory, return a PathBuf to the rust format manifest that should be\n\n/// generated inside it\n\npub fn manifest_filename(base_dir: &Path) -> PathBuf {\n\n let mut filename = base_dir.to_path_buf();\n\n filename.push(DEFAULT_LIB_RUST_MANIFEST_FILENAME.to_string());\n\n filename\n\n}\n", "file_path": "flowc/src/lib/compiler/rust_manifest.rs", "rank": 36, "score": 102920.21413569522 }, { "content": "/// Given an output directory, return a PathBuf to the json format manifest that should be\n\n/// generated inside it\n\npub fn manifest_filename(base_dir: &Path) -> PathBuf {\n\n let mut filename = base_dir.to_path_buf();\n\n filename.push(DEFAULT_LIB_JSON_MANIFEST_FILENAME.to_string());\n\n filename.set_extension(\"json\");\n\n filename\n\n}\n", "file_path": "flowc/src/lib/compiler/json_manifest.rs", "rank": 37, "score": 102920.21413569522 }, { "content": "/// Take the original table of connections as gathered from the flow hierarchy, and for each one\n\n/// follow it through any intermediate connections (sub-flow boundaries) to arrive at the final\n\n/// destination. Then create a new direct connection from source to destination and add that\n\n/// to the table of \"collapsed\" connections which will be used to configure the outputs of the\n\n/// functions.\n\npub fn collapse_connections(original_connections: &[Connection]) -> Vec<Connection> {\n\n let mut collapsed_connections: Vec<Connection> = Vec::new();\n\n\n\n debug!(\n\n \"Working on collapsing {} flow connections\",\n\n original_connections.len()\n\n );\n\n\n\n for connection in original_connections {\n\n // All collapsed connections must start and end at a Function, so we only build\n\n // them starting at ones that begin at a Function's IO\n\n if *connection.from_io().io_type() == IOType::FunctionIO {\n\n debug!(\n\n \"Trying to create connection from function output at '{}' (level={})\",\n\n connection.from_io().route(),\n\n connection.level()\n\n );\n\n if *connection.to_io().io_type() == IOType::FunctionIO {\n\n debug!(\n\n \"\\tFound direct connection to function input at '{}'\",\n", "file_path": "flowc/src/lib/compiler/connector.rs", "rank": 38, "score": 102240.98954124129 }, { "content": "pub fn check_connections(tables: &mut GenerationTables) -> Result<()> {\n\n check_for_competing_inputs(tables)?;\n\n\n\n Ok(())\n\n}\n\n\n\n/*\n\n Check for a problems that lead to competition for inputs causing input overflow:\n\n - A single function has two output connections to the same destination input\n\n - a function connects to an input that has a constant initializer\n\n*/\n", "file_path": "flowc/src/lib/compiler/checker.rs", "rank": 39, "score": 102237.25497826072 }, { "content": "/// Go through all connections, finding:\n\n/// - source process (process id and output route connection is from)\n\n/// - destination process (process id and input number the connection is to)\n\n///\n\n/// Then add an output route to the source process's output routes vector\n\n/// (according to each function's output route in the original description plus each connection from\n\n/// that route, which could be to multiple destinations)\n\npub fn prepare_function_connections(tables: &mut GenerationTables) -> Result<()> {\n\n debug!(\"Setting output routes on processes\");\n\n for connection in &tables.collapsed_connections {\n\n if let Some((source, source_id)) = get_source(&tables.sources, connection.from_io().route())\n\n {\n\n if let Some(&(destination_function_id, destination_input_index, destination_flow_id)) =\n\n tables.destination_routes.get(connection.to_io().route())\n\n {\n\n if let Some(source_function) = tables.functions.get_mut(source_id) {\n\n debug!(\n\n \"Connection: from '{}' to '{}'\",\n\n &connection.from_io().route(),\n\n &connection.to_io().route()\n\n );\n\n debug!(\" Source output route = '{}' --> Destination: Process ID = {}, Input number = {}\",\n\n source, destination_function_id, destination_input_index);\n\n\n\n let output_conn = OutputConnection::new(\n\n source,\n\n destination_function_id,\n", "file_path": "flowc/src/lib/compiler/connector.rs", "rank": 40, "score": 100610.55533434308 }, { "content": "/// Check that some impure function producing a side effect is called or return an error\n\npub fn check_side_effects(tables: &mut GenerationTables) -> Result<()> {\n\n for function in &tables.functions {\n\n if function.is_impure() {\n\n return Ok(());\n\n }\n\n }\n\n\n\n bail!(\"Flow has no side-effects\")\n\n}\n\n\n", "file_path": "flowc/src/lib/compiler/checker.rs", "rank": 41, "score": 100598.67736681784 }, { "content": "/// Check that all Functions have connections to all their inputs or return an error\n\npub fn check_function_inputs(tables: &mut GenerationTables) -> Result<()> {\n\n for function in &tables.functions {\n\n for input in function.get_inputs() {\n\n match input.get_initializer() {\n\n None => {\n\n if !connection_to(tables, input.route()) {\n\n bail!(\"Input at route '{}' is not used\", input.route());\n\n }\n\n }\n\n Some(Always(_)) => {\n\n // Has a constant initializer and there is another\n\n // connections to this input then flag that as an error\n\n if connection_to(tables, input.route()) {\n\n bail!(\"Input at route '{}' has a 'constant' initializer and a connection to it\",\n\n input.route());\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "flowc/src/lib/compiler/checker.rs", "rank": 42, "score": 100598.67736681784 }, { "content": "#[test]\n\nfn invalid_process() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\n\n \"flowc/tests/test-flows/invalid-process/invalid-process.toml\",\n\n );\n\n if loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).is_ok() {\n\n panic!(\"invalid.toml should not load successfully\");\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-loader_tests.rs", "rank": 43, "score": 100489.97170588003 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn load_process(\n\n parent_route: &Route,\n\n alias: &Name,\n\n parent_flow_id: usize,\n\n flow_count: &mut usize,\n\n url: &Url,\n\n provider: &dyn Provider,\n\n initializations: &HashMap<String, InputInitializer>,\n\n #[cfg(feature = \"debugger\")] source_urls: &mut HashSet<(Url, Url)>,\n\n level: usize,\n\n) -> Result<Process> {\n\n trace!(\"load_process()\");\n\n\n\n let (resolved_url, lib_ref) = provider\n\n .resolve_url(url, \"context\", &[\"toml\"])\n\n .chain_err(|| format!(\"Could not resolve the url: '{}'\", url))?;\n\n if &resolved_url != url {\n\n debug!(\"Source URL '{}' resolved to: '{}'\", url, resolved_url);\n\n }\n\n\n", "file_path": "flowc/src/lib/compiler/loader.rs", "rank": 44, "score": 100489.97170588003 }, { "content": "fn impl_flow_impl(ast: &syn::DeriveInput) -> TokenStream {\n\n let name = &ast.ident;\n\n let gen = quote! {\n\n use std::os::raw::c_void;\n\n\n\n // Allocate a chunk of memory of `size` bytes in wasm module\n\n #[cfg(target_arch = \"wasm32\")]\n\n #[no_mangle]\n\n pub extern \"C\" fn alloc(size: usize) -> *mut c_void {\n\n use std::mem;\n\n let mut buf = Vec::with_capacity(size);\n\n let ptr = buf.as_mut_ptr();\n\n mem::forget(buf);\n\n return ptr as *mut c_void;\n\n }\n\n\n\n // Wrapper function for running a wasm implementation\n\n #[cfg(target_arch = \"wasm32\")]\n\n #[no_mangle]\n\n pub extern \"C\" fn run_wasm(input_data_ptr: *mut c_void, input_data_length: i32) -> i32 {\n", "file_path": "flow_impl_derive/src/lib.rs", "rank": 45, "score": 100116.62903946787 }, { "content": "/// Get the file extension of the resource referred to by `url`\n\nfn get_file_extension(url: &Url) -> Option<&str> {\n\n url.path_segments()?.last()?.rsplit_once('.').map(|t| t.1)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use serde_derive::{Deserialize, Serialize};\n\n use url::Url;\n\n\n\n use super::get_deserializer;\n\n use super::get_file_extension;\n\n\n\n #[derive(Serialize, Deserialize, Debug, Clone)]\n\n #[serde(untagged)]\n\n pub enum TestStruct {\n\n /// The process is actually a `Flow`\n\n FlowProcess(String),\n\n /// The process is actually a `Function`\n\n FunctionProcess(String),\n\n }\n", "file_path": "flowcore/src/deserializers/deserializer.rs", "rank": 46, "score": 98733.26168565883 }, { "content": "/// Try to determine if 'c' is in the Mandlebrot set, using at most 'limit' iterations to decide\n\n/// If 'c' is not a member, return 'Some(i)', where 'i' is the number of iterations it took for 'c'\n\n/// to leave the circle of radius two centered on the origin.\n\n/// If 'c' seems to be a member (more precisely, if we reached the iteration limit without being\n\n/// able to prove that 'c' is not a member) return 'None'\n\npub fn escapes(c: Complex<f64>, limit: u64) -> u64 {\n\n if c.norm_sqr() > 4.0 {\n\n return 0;\n\n }\n\n\n\n let mut z = c;\n\n\n\n for i in 1..limit {\n\n z = z * z + c;\n\n if z.norm_sqr() > 4.0 {\n\n return i;\n\n }\n\n }\n\n\n\n limit\n\n}\n\n\n\n/*\n\n Given the row and column of a pixel in the output image, return the\n\n corresponding point on the complex plane.\n", "file_path": "samples/mandlebrot/render_pixel/render_pixel.rs", "rank": 47, "score": 98422.0895247917 }, { "content": "#[cfg(feature = \"debugger\")]\n\n#[test]\n\n#[serial]\n\nfn debug_print_args() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"debug-print-args\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 48, "score": 98360.92043254578 }, { "content": "#[test]\n\nfn same_name_flow_ids() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\n\n \"flowc/tests/test-flows/same-name-parent/same-name-parent.toml\",\n\n );\n\n let process = loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).unwrap();\n\n if let FlowProcess(ref flow) = process {\n\n let tables = compile::compile(flow).unwrap();\n\n\n\n // print function in context flow should have flow_id = 0\n\n let print_function = tables\n\n .functions\n\n .iter()\n\n .find(|f| f.alias() == &Name::from(\"print\"))\n\n .unwrap();\n\n assert_eq!(\n\n print_function.get_flow_id(),\n\n 0,\n\n \"print function in context should have flow_id = 0\"\n\n );\n", "file_path": "flowc/tests/flowc-compiler_tests.rs", "rank": 49, "score": 98347.26327451409 }, { "content": "#[test]\n\nfn same_name_input_and_output() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\n\n \"flowc/tests/test-flows/same-name-parent/same-name-parent.toml\",\n\n );\n\n let process = loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).unwrap();\n\n if let FlowProcess(ref flow) = process {\n\n let tables = compile::compile(flow).unwrap();\n\n // If done correctly there should only be two connections\n\n // args -> buffer, and buffer -> print\n\n assert_eq!(4, tables.collapsed_connections.len());\n\n } else {\n\n panic!(\"Process loaded was not a flow\");\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-compiler_tests.rs", "rank": 50, "score": 98347.26327451409 }, { "content": "fn load_process_refs(\n\n flow: &mut Flow,\n\n flow_count: &mut usize,\n\n provider: &dyn Provider,\n\n #[cfg(feature = \"debugger\")] source_urls: &mut HashSet<(Url, Url)>,\n\n level: usize,\n\n) -> Result<()> {\n\n for process_ref in &mut flow.process_refs {\n\n let subprocess_url = flow\n\n .source_url\n\n .join(&process_ref.source)\n\n .map_err(|e| e.to_string())?;\n\n let process = load_process(\n\n &flow.route,\n\n process_ref.alias(),\n\n flow.id,\n\n flow_count,\n\n &subprocess_url,\n\n provider,\n\n &process_ref.initializations,\n", "file_path": "flowc/src/lib/compiler/loader.rs", "rank": 51, "score": 98260.97864363296 }, { "content": "#[test]\n\nfn dead_process_removed() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\n\n \"flowc/tests/test-flows/dead-process/dead-process.toml\",\n\n );\n\n let process = loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).unwrap();\n\n if let FlowProcess(ref flow) = process {\n\n let tables = compile::compile(flow).unwrap();\n\n // Dead value should be removed - currently can't assume that args function can be removed\n\n assert_eq!(\n\n tables.functions.len(),\n\n 1,\n\n \"Incorrect number of functions after optimization\"\n\n );\n\n assert_eq!(\n\n tables.functions.get(0).unwrap().get_id(),\n\n 0,\n\n \"Function indexes do not start at 0\"\n\n );\n\n // And the connection to it also\n\n assert_eq!(\n\n tables.collapsed_connections.len(),\n\n 0,\n\n \"Incorrect number of connections after optimization\"\n\n );\n\n } else {\n\n panic!(\"Process loaded was not a flow\");\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-compiler_tests.rs", "rank": 52, "score": 98260.97864363296 }, { "content": "/// Run the cargo build to compile wasm from function source\n\npub fn run(implementation_source_path: &Path, wasm_destination: &Path) -> Result<()> {\n\n let mut cargo_manifest_path = implementation_source_path.to_path_buf();\n\n cargo_manifest_path.set_file_name(\"Cargo.toml\");\n\n\n\n // Create a temp directory for building in. To avoid the corner case where the TempDir\n\n // maybe on another FS from the destination (preventing renaming) I create it under the\n\n // destination directory - but it will be cleaned up when `build_dir` goes out of scope\n\n let build_dir = TempDir::new_in(\n\n wasm_destination\n\n .parent()\n\n .ok_or(\"Could not create temp dir for WASM building\")?,\n\n \"flow\",\n\n )\n\n .chain_err(|| \"Error creating new TempDir for compiling in\")?\n\n .into_path();\n\n\n\n cargo_test(cargo_manifest_path.clone(), build_dir.clone())?;\n\n cargo_build(\n\n cargo_manifest_path,\n\n &build_dir,\n", "file_path": "flowc/src/lib/compiler/cargo_build.rs", "rank": 53, "score": 95511.92688472162 }, { "content": "#[test]\n\nfn root_flow_takes_name_from_file() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n // Relative path from project root to the test file\n\n let url =\n\n helper::absolute_file_url_from_relative_path(\"flowc/tests/test-flows/names/names.toml\");\n\n\n\n match loader::load(&url, &meta_provider, &mut HashSet::<(Url, Url)>::new()) {\n\n Ok(FlowProcess(flow)) => assert_eq!(flow.name, Name::from(\"names\")),\n\n _ => panic!(\"Flow could not be loaded\"),\n\n }\n\n}\n\n\n\n/*\n\n This tests that an initializer on an input to a flow process is passed onto function processes\n\n inside the flow, via a connection from the flow input to the function input\n\n*/\n", "file_path": "flowc/tests/flowc-loader_tests.rs", "rank": 54, "score": 94230.41367935305 }, { "content": "/// Generate a manifest for the library in JSON that can be used to load it using 'flowr'\n\npub fn write(lib_manifest: &LibraryManifest, json_manifest_filename: &Path) -> Result<()> {\n\n let mut manifest_file = File::create(&json_manifest_filename)?;\n\n\n\n manifest_file.write_all(\n\n serde_json::to_string_pretty(lib_manifest)\n\n .chain_err(|| \"Could not pretty format the library manifest JSON contents\")?\n\n .as_bytes(),\n\n )?;\n\n\n\n info!(\n\n \"Generated library JSON manifest at '{}'\",\n\n json_manifest_filename.display()\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "flowc/src/lib/compiler/json_manifest.rs", "rank": 55, "score": 94089.17350898968 }, { "content": "/// This module is responsible for parsing the flow tree and gathering information into a set of\n\n/// flat tables that the compiler can use for code generation.\n\npub fn gather_functions_and_connections(flow: &Flow, tables: &mut GenerationTables) -> Result<()> {\n\n // Add Connections from this flow hierarchy to the connections table\n\n let mut connections = flow.connections.clone();\n\n tables.connections.append(&mut connections);\n\n\n\n // Do the same for all subprocesses referenced from this one\n\n for subprocess in &flow.subprocesses {\n\n match subprocess.1 {\n\n FlowProcess(ref flow) => {\n\n gather_functions_and_connections(flow, tables)?; // recurse\n\n }\n\n FunctionProcess(ref function) => {\n\n // Add Functions from this flow to the table of functions\n\n tables.functions.push(function.clone());\n\n }\n\n }\n\n }\n\n\n\n // Add the library references of this flow into the tables list\n\n let lib_refs = &flow.lib_references;\n\n tables.libs.extend(lib_refs.iter().cloned());\n\n\n\n Ok(())\n\n}\n\n\n\n/*\n\n Give each function a unique index that will later be used to indicate where outputs get sent\n\n to, and used in code generation.\n\n*/\n", "file_path": "flowc/src/lib/compiler/gatherer.rs", "rank": 56, "score": 93726.91806913225 }, { "content": "/// Accept an optional string (URL or filename) and from it create an absolute path URL with correct\n\n/// scheme. This allows specifying of full URL (http, file etc) as well as file paths relative\n\n/// to the working directory.\n\n///\n\n/// Depending on the parameter passed in:\n\n/// - None --> Return the Current Working Directory (CWD)\n\n/// - Some(absolute path) --> Return the absolute path passed in\n\n/// - Some(relative path) --> Join the CWD with the relative path and return the resulting\n\n/// absolute path.\n\n///\n\n/// Returns a full URL with appropriate scheme (depending on the original scheme passed in),\n\n/// and an absolute path.\n\n///\n\npub fn url_from_string(base_url: &Url, string: Option<&str>) -> Result<Url> {\n\n match string {\n\n None => {\n\n info!(\"No url specified, so using: '{}'\", base_url);\n\n Ok(base_url.clone())\n\n }\n\n Some(url_string) => base_url\n\n .join(url_string)\n\n .chain_err(|| format!(\"Problem joining url '{}' with '{}'\", base_url, url_string)),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::env;\n\n use std::path::PathBuf;\n\n use std::str::FromStr;\n\n\n\n use url::Url;\n\n\n", "file_path": "flowcore/src/url_helper.rs", "rank": 57, "score": 93626.69150993666 }, { "content": "fn execute(mut job: Job, job_tx: &Sender<Job>, name: &str) -> Result<()> {\n\n // Run the job and catch the execution result\n\n trace!(\"Job #{}:\\tExecuting on '{}'\", job.job_id, name);\n\n let result = job.implementation.run(&job.input_set);\n\n\n\n job.result = result;\n\n job_tx\n\n .send(job)\n\n .chain_err(|| \"Error sending job result back after execution\")\n\n}\n", "file_path": "flowr/src/lib/execution.rs", "rank": 58, "score": 93442.21588761015 }, { "content": "/// load a Wasm module from the specified Url.\n\npub fn load(provider: &dyn Provider, source_url: &Url) -> Result<WasmExecutor> {\n\n let (resolved_url, _) = provider\n\n .resolve_url(source_url, DEFAULT_WASM_FILENAME, &[\"wasm\"])\n\n .chain_err(|| \"Could not resolve url for manifest while attempting to load manifest\")?;\n\n let content = provider.get_contents(&resolved_url).chain_err(|| {\n\n format!(\n\n \"Could not fetch content from url '{}' for loading wasm\",\n\n resolved_url\n\n )\n\n })?;\n\n let module = Module::from_buffer(content).chain_err(|| {\n\n format!(\n\n \"Could not create Wasm Module from content in '{}'\",\n\n resolved_url\n\n )\n\n })?;\n\n\n\n let module_ref = ModuleInstance::new(&module, &ImportsBuilder::default())\n\n .chain_err(|| \"Could not create new ModuleInstance when loading WASM content\")?\n\n .assert_no_start();\n", "file_path": "flowr/src/lib/wasm/wasmi.rs", "rank": 59, "score": 92133.07067684506 }, { "content": "/// load a Wasm module from the specified Url.\n\npub fn load(provider: &dyn Provider, source_url: &Url) -> Result<WasmExecutor> {\n\n let (resolved_url, _) = provider\n\n .resolve_url(source_url, DEFAULT_WASM_FILENAME, &[\"wasm\"])\n\n .chain_err(|| \"Could not resolve url for manifest while attempting to load manifest\")?;\n\n let content = provider.get_contents(&resolved_url).chain_err(|| {\n\n format!(\n\n \"Could not fetch content from url '{}' for loading wasm\",\n\n resolved_url\n\n )\n\n })?;\n\n\n\n let mut store: Store<()> = Store::default();\n\n let module = Module::new(store.engine(), content)\n\n .map_err(|e| format!(\"Could not create WASM Module: {}\", e))?;\n\n let instance = Instance::new(&mut store, &module, &[])\n\n .map_err(|e| format!(\"Could not create WASM Instance: {}\", e))?;\n\n let memory = instance\n\n .get_memory(&mut store, \"memory\")\n\n .ok_or(\"Could not get WASM linear memory\")?;\n\n let implementation = instance\n", "file_path": "flowr/src/lib/wasm/wasmtime.rs", "rank": 60, "score": 92133.07067684506 }, { "content": "/// Parse the string 's' as a coordinate pair, like \"400x600\" or \"1.0,0.5\"\n\n/// Specifically, 's' should have the form <left><sep><right> where <sep> is the character given by\n\n/// the 'separator' argument, and <left> and <right> are both strings that can be parsed\n\n/// by 'T::from_str'.\n\n/// If 's' has the proper form, return 'Some<(x,y)>'.\n\n/// If 's' doesn't parse correctly, return None.\n\npub fn parse_pair<T: FromStr>(s: &str, separator: &str) -> Option<(T, T)> {\n\n match s.find(separator) {\n\n None => None,\n\n Some(index) => {\n\n match (T::from_str(&s[..index]), T::from_str(&s[index + 1..])) {\n\n (Ok(l), Ok(r)) => Some((l, r)),\n\n _ => None\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::parse_pair;\n\n\n\n #[test]\n\n fn test_parse_pair() {\n\n assert_eq!(parse_pair::<i32>(\"\", \",\"), None);\n\n assert_eq!(parse_pair::<i32>(\"10,\", \",\"), None);\n\n assert_eq!(parse_pair::<i32>(\",10\", \",\"), None);\n\n assert_eq!(parse_pair::<i32>(\"10,20\", \",\"), Some((10, 20)));\n\n assert_eq!(parse_pair::<i32>(\"10,20xy\", \",\"), None);\n\n assert_eq!(parse_pair::<f64>(\"0.5x\", \",\"), None);\n\n assert_eq!(parse_pair::<f64>(\"0.5x1.5\", \"x\"), Some((0.5, 1.5)));\n\n }\n\n}\n\n\n\n\n", "file_path": "samples/mandlebrot/parse_pair.rs", "rank": 61, "score": 90926.95113814346 }, { "content": "/// Dump the compiler tables of a loaded flow in human readable format to a specified\n\n/// output directory.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use std::env;\n\n/// use url::Url;\n\n/// use flowcore::lib_provider::{Provider, MetaProvider};\n\n/// use flowcore::errors::Result;\n\n/// use flowclib::model::process::Process::FlowProcess;\n\n/// use std::collections::HashSet;\n\n/// use simpath::Simpath;\n\n///\n\n/// let lib_search_path = Simpath::new(\"FLOW_LIB_PATH\");\n\n/// let provider = MetaProvider::new(lib_search_path);\n\n///\n\n/// let mut url = url::Url::from_file_path(env::current_dir().unwrap()).unwrap();\n\n/// url = url.join(\"samples/hello-world/context.toml\").unwrap();\n\n///\n\n/// let mut source_urls = HashSet::<(Url, Url)>::new();\n\n///\n\n/// if let Ok(FlowProcess(mut flow)) = flowclib::compiler::loader::load(&url,\n\n/// &provider,\n\n/// &mut source_urls) {\n\n/// let tables = flowclib::compiler::compile::compile(&mut flow).unwrap();\n\n/// let output_dir = tempdir::TempDir::new(\"flow\").unwrap().into_path();\n\n///\n\n/// let tables = flowclib::compiler::compile::compile(&mut flow).unwrap();\n\n/// let output_dir = tempdir::TempDir::new(\"dumper\").unwrap().into_path();\n\n///\n\n/// flowclib::dumper::dump_tables::dump_tables(&tables, &output_dir).unwrap();\n\n/// }\n\n/// ```\n\n///\n\npub fn dump_tables(tables: &GenerationTables, output_dir: &Path) -> std::io::Result<()> {\n\n info!(\"=== Dumper: Dumping tables to '{}'\", output_dir.display());\n\n\n\n let mut writer = create_output_file(output_dir, \"connections\", \"dump\")?;\n\n info!(\"\\tGenerating connections.dump\");\n\n writer.write_all(serde_json::to_string_pretty(&tables.connections)?.as_bytes())?;\n\n\n\n writer = create_output_file(output_dir, \"source_routes\", \"dump\")?;\n\n info!(\"\\tGenerating source_routes.dump\");\n\n writer.write_all(serde_json::to_string_pretty(&tables.sources)?.as_bytes())?;\n\n\n\n writer = create_output_file(output_dir, \"destination_routes\", \"dump\")?;\n\n info!(\"\\tGenerating destination_routes.dump\");\n\n writer.write_all(serde_json::to_string_pretty(&tables.destination_routes)?.as_bytes())?;\n\n\n\n writer = create_output_file(output_dir, \"collapsed_connections\", \"dump\")?;\n\n info!(\"\\tGenerating collapsed_connections.dump\");\n\n writer.write_all(serde_json::to_string_pretty(&tables.collapsed_connections)?.as_bytes())?;\n\n\n\n writer = create_output_file(output_dir, \"libs\", \"dump\")?;\n\n info!(\"\\tGenerating libs.dump\");\n\n writer.write_all(serde_json::to_string_pretty(&tables.libs)?.as_bytes())\n\n}\n\n\n", "file_path": "flowc/src/lib/dumper/dump_tables.rs", "rank": 62, "score": 89346.7426862792 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\npub fn write(lib_root: &Path, lib_manifest: &LibraryManifest, filename: &Path) -> Result<()> {\n\n // Create the file we will be writing to\n\n let mut manifest_file = File::create(&filename)?;\n\n\n\n // Create the list of top level modules\n\n let mut modules = HashSet::<&str>::new();\n\n for module_url in lib_manifest.locators.keys() {\n\n let module_name = module_url\n\n .path_segments()\n\n .chain_err(|| \"Could not get path segments\")?\n\n .into_iter()\n\n .next()\n\n .chain_err(|| \"Could not get first path segment\")?;\n\n\n\n modules.insert(module_name);\n\n }\n\n\n\n // generate their pub mod statements, specifying a path in the original source directory\n\n for module in modules {\n\n manifest_file.write_all(format!(\"\\n/// functions from module '{}'\", module).as_bytes())?;\n", "file_path": "flowc/src/lib/compiler/rust_manifest.rs", "rank": 63, "score": 89337.73606584551 }, { "content": "/// load library metadata from the given url using the provider.\n\n/// Currently it uses the `package` table of Cargo.toml as a source but it could\n\n/// easily use another file as along as it has the required fields to satisfy `MetaData` struct\n\npub fn load_metadata(url: &Url, provider: &dyn Provider) -> Result<(MetaData, LibType)> {\n\n trace!(\"Loading Metadata\");\n\n let (resolved_url, _) = provider\n\n .resolve_url(url, \"Cargo\", &[\"toml\"])\n\n .chain_err(|| format!(\"Could not resolve the url: '{}'\", url))?;\n\n\n\n if &resolved_url != url {\n\n debug!(\"Source URL '{}' resolved to: '{}'\", url, resolved_url);\n\n }\n\n\n\n let contents = provider\n\n .get_contents(&resolved_url)\n\n .chain_err(|| format!(\"Could not get contents of resolved url: '{}'\", resolved_url))?;\n\n let content = String::from_utf8(contents).chain_err(|| \"Could not read UTF8 contents\")?;\n\n\n\n let deserializer = get_deserializer::<Cargo>(&resolved_url)?;\n\n\n\n let cargo: Cargo = deserializer.deserialize(&content, Some(&resolved_url))?;\n\n\n\n Ok((cargo.package, LibType::RustLib))\n\n}\n\n\n\n/*\n\n Load sub-processes from the process_refs in a flow\n\n*/\n", "file_path": "flowc/src/lib/compiler/loader.rs", "rank": 64, "score": 87921.906409462 }, { "content": "fn create_executor(name: String, job_rx: Arc<Mutex<Receiver<Job>>>, job_tx: Sender<Job>) {\n\n let builder = thread::Builder::new();\n\n let _ = builder.spawn(move || {\n\n set_panic_hook();\n\n\n\n loop {\n\n let _ = get_and_execute_job(&job_rx, &job_tx, &name);\n\n }\n\n });\n\n}\n\n\n", "file_path": "flowr/src/lib/execution.rs", "rank": 65, "score": 87810.77151439234 }, { "content": "fn connection_to(tables: &GenerationTables, input: &Route) -> bool {\n\n for connection in &tables.collapsed_connections {\n\n if connection.to_io().route() == input {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n", "file_path": "flowc/src/lib/compiler/checker.rs", "rank": 66, "score": 84188.82308738896 }, { "content": "fn remove_dead_processes(tables: &mut GenerationTables) -> bool {\n\n let mut processes_to_remove = vec![];\n\n let mut connections_to_remove = vec![];\n\n\n\n for (index, function) in tables.functions.iter().enumerate() {\n\n if dead_function(&tables.collapsed_connections, function) {\n\n debug!(\n\n \"Function #{} '{}' @ '{}' has no connection from it, so it will be removed\",\n\n index,\n\n function.alias(),\n\n function.route()\n\n );\n\n processes_to_remove.push(index);\n\n\n\n let removed_route = function.route();\n\n // remove connections to and from the process\n\n for (conn_index, connection) in tables.collapsed_connections.iter().enumerate() {\n\n if connection\n\n .from_io()\n\n .route()\n", "file_path": "flowc/src/lib/compiler/optimizer.rs", "rank": 67, "score": 83341.8904508578 }, { "content": "fn output_name_to_port<T: Hash>(t: &T) -> &str {\n\n OUTPUT_PORTS[index_from_name(t, OUTPUT_PORTS.len())]\n\n}\n\n\n", "file_path": "flowc/src/lib/dumper/dump_dot.rs", "rank": 68, "score": 82524.49401237855 }, { "content": "fn input_name_to_port<T: Hash>(t: &T) -> &str {\n\n INPUT_PORTS[index_from_name(t, INPUT_PORTS.len())]\n\n}\n\n\n", "file_path": "flowc/src/lib/dumper/dump_dot.rs", "rank": 69, "score": 82524.49401237855 }, { "content": "fn get(test_dir: &Path, file_name: &str) -> String {\n\n let mut expected_file = test_dir.to_path_buf();\n\n expected_file.push(file_name);\n\n let mut f = File::open(&expected_file).unwrap();\n\n let mut buffer = Vec::new();\n\n f.read_to_end(&mut buffer).unwrap();\n\n String::from_utf8(buffer).unwrap()\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 70, "score": 82524.49401237855 }, { "content": "fn index_from_name<T: Hash>(t: &T, length: usize) -> usize {\n\n let mut s = DefaultHasher::new();\n\n t.hash(&mut s);\n\n let index = s.finish() % length as u64;\n\n index as usize\n\n}\n\n\n", "file_path": "flowc/src/lib/dumper/dump_dot.rs", "rank": 71, "score": 79413.06756822605 }, { "content": "fn test_args(test_dir: &Path, test_name: &str) -> Vec<String> {\n\n let test_args = format!(\"{}.args\", test_name);\n\n let mut args_file = test_dir.to_path_buf();\n\n args_file.push(test_args);\n\n let f = File::open(&args_file).unwrap();\n\n let f = BufReader::new(f);\n\n\n\n let mut args = Vec::new();\n\n for line in f.lines() {\n\n args.push(line.unwrap());\n\n }\n\n args\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 72, "score": 78534.88641255134 }, { "content": "fn execute_test(test_name: &str, search_path: Simpath, client_server: bool) {\n\n let mut root_dir = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n root_dir.pop();\n\n let test_dir = root_dir.join(&format!(\"flowc/tests/test-flows/{}\", test_name));\n\n\n\n if let FlowProcess(ref flow) = load_flow(&test_dir, test_name, search_path) {\n\n let tables = compile::compile(flow).unwrap();\n\n let out_dir = test_dir.clone();\n\n let manifest_path = write_manifest(flow, true, out_dir, test_name, &tables).unwrap();\n\n\n\n let test_args = test_args(&test_dir, test_name);\n\n let input = get(&test_dir, &format!(\"{}.stdin\", test_name));\n\n let (actual_stdout, actual_stderr) =\n\n execute_flow(manifest_path, test_args, input, client_server);\n\n let expected_output = get(&test_dir, &format!(\"{}.expected\", test_name));\n\n assert!(actual_stderr.is_empty(), \"{}\", actual_stderr);\n\n assert_eq!(\n\n expected_output, actual_stdout,\n\n \"Flow output did not match that in .expected file\"\n\n );\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 73, "score": 77038.24711339852 }, { "content": "#[derive(Debug, Clone)]\n\nenum BlockType {\n\n OutputBlocked,\n\n // Cannot run and send it's Output as a destination Input is full\n\n UnreadySender, // Has to send output to an empty Input for other process to be able to run\n\n}\n\n\n", "file_path": "flowr/src/lib/debugger.rs", "rank": 74, "score": 74623.81759292504 }, { "content": "#[allow(clippy::ptr_arg)]\n\nfn add_output_set(output_set: &IOSet, from: &Route, connect_subflow: bool) -> String {\n\n let mut string = String::new();\n\n\n\n string.push_str(\"\\n\\t// Outputs\\n\\t{ rank=sink\\n\");\n\n for output in output_set {\n\n // Only add output if it's not got the same route as it's function i.e. it's not the default output\n\n if output.route() != from {\n\n // Add an entry for each output using it's route\n\n string.push_str(&format!(\"\\t\\\"{}\\\" [label=\\\"{}\\\", shape=invhouse, style=filled, fillcolor=black, fontcolor=white];\\n\",\n\n output.route(), output.name()));\n\n\n\n if connect_subflow {\n\n // and connect the output to the sub-flow\n\n let output_port = output_name_to_port(output.name());\n\n string.push_str(&format!(\n\n \"\\t\\\"{}\\\":{} -> \\\"{}\\\"[style=invis, headtooltip=\\\"{}\\\"];\\n\",\n\n from,\n\n output_port,\n\n output.route(),\n\n output.name()\n\n ));\n\n }\n\n }\n\n }\n\n string.push_str(\"\\t}\\n\");\n\n\n\n string\n\n}\n\n\n", "file_path": "flowc/src/lib/dumper/dump_dot.rs", "rank": 75, "score": 73497.9399748062 }, { "content": "#[allow(clippy::ptr_arg)]\n\nfn add_input_set(input_set: &IOSet, to: &Route, connect_subflow: bool) -> String {\n\n let mut string = String::new();\n\n\n\n string.push_str(\"\\n\\t// Inputs\\n\\t{ rank=source\\n\");\n\n for input in input_set {\n\n // Avoid creating extra points to connect to for default input\n\n if input.route() != to {\n\n // Add an entry for each input using it's route\n\n string.push_str(&format!(\n\n \"\\t\\\"{}\\\" [label=\\\"{}\\\", shape=house, style=filled, fillcolor=white];\\n\",\n\n input.route(),\n\n input.name()\n\n ));\n\n\n\n if connect_subflow {\n\n // and connect the input to the sub-flow\n\n string.push_str(&format!(\n\n \"\\t\\\"{}\\\" -> \\\"{}\\\":n [style=invis, headtooltip=\\\"{}\\\"];\\n\",\n\n input.route(),\n\n to,\n", "file_path": "flowc/src/lib/dumper/dump_dot.rs", "rank": 76, "score": 73497.9399748062 }, { "content": "/// A content provider is responsible with interfacing with the environment and doing IO\n\n/// or what is required to supply content related with flows - isolating other libraries\n\n/// from the File SSystem or IO. It must implement the `Provider` trait\n\npub trait Provider {\n\n /// Take a URL and uses it to determine a url where actual content can be read from\n\n /// using some provider specific logic. This may involve looking for default files in a\n\n /// directory (a file provider) or a server path (an http provider), or it may involve\n\n /// translating a library URL into a real on where content can be found.\n\n fn resolve_url(\n\n &self,\n\n url: &Url,\n\n default_file: &str,\n\n extensions: &[&str],\n\n ) -> Result<(Url, Option<String>)>;\n\n\n\n /// Fetches content from a URL. It resolves the URL internally before attempting to\n\n /// fetch actual content\n\n fn get_contents(&self, url: &Url) -> Result<Vec<u8>>;\n\n}\n\n\n\nconst FILE_PROVIDER: &dyn Provider = &FileProvider as &dyn Provider;\n\nconst HTTP_PROVIDER: &dyn Provider = &HttpProvider as &dyn Provider;\n\n\n", "file_path": "flowcore/src/lib_provider.rs", "rank": 77, "score": 69443.91924801105 }, { "content": "/// `Find` trait is implemented by a number of object types to help find a sub-object\n\n/// using it's Name or Route\n\npub trait Find {\n\n /// Find a sub-object using it's Route\n\n fn find(&self, route: &Route) -> bool;\n\n /// Find a sub-object (Input) using it's name and set the input initializer on it\n\n fn find_by_name_and_set_initializer(\n\n &mut self,\n\n name: &Name,\n\n initial_value: &Option<InputInitializer>,\n\n ) -> Result<IO>;\n\n\n\n /// Find a sub-object (Input) using it's Route and set the input initializer on it\n\n fn find_by_route_and_set_initializer(\n\n &mut self,\n\n route: &Route,\n\n initial_value: &Option<InputInitializer>,\n\n ) -> Result<IO>;\n\n}\n\n\n\nimpl Find for IOSet {\n\n fn find(&self, route: &Route) -> bool {\n", "file_path": "flowc/src/lib/model/io.rs", "rank": 78, "score": 68272.5754425028 }, { "content": "/// Many structs in the model implement the `Validate` method which is used to check the\n\n/// description deserialized from file obeys some additional constraints that cannot be expressed\n\n/// in the struct definition in `serde`\n\npub trait Validate {\n\n /// Validate that a deserialized model data structure is valid for use\n\n fn validate(&self) -> Result<()>;\n\n}\n\n\n", "file_path": "flowc/src/lib/compiler/loader.rs", "rank": 79, "score": 68267.55127654846 }, { "content": "/// Trait that is used on multiple objects to get their data type\n\npub trait HasDataType {\n\n /// Return a reference to the datatype of the object implementing this trait\n\n fn datatype(&self) -> &DataType;\n\n}\n\n\n\nimpl DataType {\n\n /// Determine if a datatype specified in a flow is a valid datatype or not\n\n pub fn valid(&self) -> Result<()> {\n\n // Split the type hierarchy and check all levels are valid\n\n let type_levels = self.split('/');\n\n\n\n for type_level in type_levels {\n\n if !DATA_TYPES.contains(&type_level) {\n\n bail!(\"Type '{}' is invalid\", &self);\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Return if this datatype is an array or not\n", "file_path": "flowc/src/lib/model/datatype.rs", "rank": 80, "score": 67143.13051441257 }, { "content": "/// An implementation runs with an array of inputs and returns a value (or null) and a\n\n/// bool indicating if it should be ran again.\n\n///\n\n/// Any 'implementation' of a function must implement this trait\n\n///\n\n/// # Examples\n\n///\n\n/// Here is an example implementation of this trait:\n\n///\n\n/// ```\n\n/// use flowcore::{Implementation, RUN_AGAIN, RunAgain};\n\n/// use serde_json::Value;\n\n/// use serde_json::json;\n\n///\n\n/// #[derive(Debug)]\n\n/// pub struct Compare;\n\n///\n\n/// /*\n\n/// A compare implementation that takes two numbers and outputs the comparisons between them\n\n/// */\n\n/// impl Implementation for Compare {\n\n/// fn run(&self, mut inputs: &[Value]) -> (Option<Value>, RunAgain) {\n\n/// let left = inputs[0].as_i64().unwrap();\n\n/// let right = inputs[1].as_i64().unwrap();\n\n///\n\n/// let output = json!({\n\n/// \"equal\" : left == right,\n\n/// \"lt\" : left < right,\n\n/// \"gt\" : left > right,\n\n/// \"lte\" : left <= right,\n\n/// \"gte\" : left >= right\n\n/// });\n\n///\n\n/// (None, RUN_AGAIN)\n\n/// }\n\n/// }\n\n/// ```\n\npub trait Implementation: Sync + Send {\n\n /// The `run` method is used to execute the implementation\n\n fn run(&self, inputs: &[Value]) -> (Option<Value>, RunAgain);\n\n}\n", "file_path": "flowcore/src/lib.rs", "rank": 81, "score": 64069.40411272524 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n\n\n if args.len() != 5 {\n\n writeln!(std::io::stderr(), \"Usage: {} FILE PIXELS UPPERLEFT LOWERRIGHT\", args[0]).unwrap();\n\n writeln!(std::io::stderr(), \"Example: {} mandel.png 1000x750 -1.2,0.35 -1,0.20\", args[0]).unwrap();\n\n std::process::exit(1);\n\n }\n\n\n\n let _executable_name = &args[0];\n\n\n\n let filename = PathBuf::from(&args[1]);\n\n\n\n let bounds = parse_pair::parse_pair(&args[2], \"x\").expect(\"error parsing image dimensions\");\n\n\n\n let upper_left_args: (f64, f64) = parse_pair::parse_pair(&args[3], \",\").expect(\"error parsing upper left corner point\");\n\n let upper_left = Complex { re: upper_left_args.0, im: upper_left_args.1};\n\n\n\n let lower_right_args = parse_pair::parse_pair(&args[4], \",\").expect(\"error parsing lower rightcorner point\");\n\n let lower_right = Complex{ re: lower_right_args.0, im: lower_right_args.1};\n\n\n\n let mut pixels = vec![0; bounds.0 * bounds.1 * 3];\n\n\n\n render(&mut pixels, bounds, upper_left, lower_right);\n\n\n\n write_bitmap(&filename, &pixels, bounds);\n\n}\n\n\n", "file_path": "samples/mandlebrot/main.rs", "rank": 82, "score": 63019.071944377254 }, { "content": "#[test]\n\n#[serial]\n\nfn args() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"args\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 83, "score": 60469.21537586387 }, { "content": "#[test]\n\nfn args() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path =\n\n helper::absolute_file_url_from_relative_path(\"flowc/tests/test-flows/args/args.toml\");\n\n let process = loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).unwrap();\n\n if let FlowProcess(ref flow) = process {\n\n let _tables = compile::compile(flow).unwrap();\n\n } else {\n\n panic!(\"Process loaded was not a flow\");\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-compiler_tests.rs", "rank": 84, "score": 60463.94298993139 }, { "content": "#[test]\n\n#[serial]\n\nfn print_args() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"print-args\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 85, "score": 59299.01449969893 }, { "content": "#[test]\n\n#[serial]\n\nfn array_input() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"array-input\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 86, "score": 59299.01449969893 }, { "content": "#[test]\n\n#[serial]\n\nfn duplicate_connection() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"duplicate-connection\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 87, "score": 59299.01449969893 }, { "content": "#[test]\n\n#[serial]\n\nfn two_destinations() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"two-destinations\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 88, "score": 59299.01449969893 }, { "content": "#[test]\n\n#[serial]\n\nfn args_json() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"args_json\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 89, "score": 59299.01449969893 }, { "content": "#[test]\n\n#[serial]\n\nfn hello_world() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"hello-world\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 90, "score": 59299.01449969893 }, { "content": "#[test]\n\n#[serial]\n\nfn double_connection() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"double-connection\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 91, "score": 59299.01449969893 }, { "content": "#[test]\n\n#[serial]\n\nfn line_echo() {\n\n let search_path = helper::set_lib_search_path_to_project();\n\n execute_test(\"line-echo\", search_path, false);\n\n}\n\n\n", "file_path": "flowc/tests/flowc-execution_tests.rs", "rank": 92, "score": 59299.01449969893 }, { "content": "#[test]\n\nfn invalid_toml() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\"flowc/tests/test-flows/invalid.toml\");\n\n if loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).is_ok() {\n\n panic!(\"invalid.toml should not load successfully\");\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-loader_tests.rs", "rank": 93, "score": 59293.74211376646 }, { "content": "#[test]\n\nfn context_with_io() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\n\n \"flowc/tests/test-flows/context_with_io/context_with_io.toml\",\n\n );\n\n let process = loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).unwrap();\n\n if let FlowProcess(ref flow) = process {\n\n if compile::compile(flow).is_ok() {\n\n // flow loaded, but has ios\n\n assert!(!flow.inputs().is_empty());\n\n assert!(!flow.outputs().is_empty());\n\n }\n\n } else {\n\n panic!(\"Process loaded was not a flow\");\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-compiler_tests.rs", "rank": 94, "score": 59293.74211376646 }, { "content": "fn get_source(\n\n source_routes: &HashMap<Route, (Source, usize)>,\n\n from_route: &Route,\n\n) -> Option<(Source, usize)> {\n\n let mut source_route = from_route.clone();\n\n let mut sub_route = Route::from(\"\");\n\n\n\n // Look for a function/output or function/input with a route that matches what we are looking for\n\n // popping off sub-structure sub-path segments until none left\n\n loop {\n\n match source_routes.get(&source_route) {\n\n Some((Output(io_sub_route), function_index)) => {\n\n return if io_sub_route.is_empty() {\n\n Some((Source::Output(format!(\"{}\", sub_route)), *function_index))\n\n } else {\n\n Some((\n\n Source::Output(format!(\"/{}{}\", io_sub_route, sub_route)),\n\n *function_index,\n\n ))\n\n }\n", "file_path": "flowc/src/lib/compiler/connector.rs", "rank": 95, "score": 59293.74211376646 }, { "content": "fn function_to_runtimefunction(\n\n manifest_url: &Url,\n\n function: &Function,\n\n debug_symbols: bool,\n\n) -> Result<RuntimeFunction> {\n\n #[cfg(feature = \"debugger\")]\n\n let name = if debug_symbols {\n\n function.alias().to_string()\n\n } else {\n\n \"\".to_string()\n\n };\n\n\n\n #[cfg(feature = \"debugger\")]\n\n let route = if debug_symbols {\n\n function.route().to_string()\n\n } else {\n\n \"\".to_string()\n\n };\n\n\n\n // make the location of implementation relative to the output directory if it is under it\n", "file_path": "flowc/src/lib/generator/generate.rs", "rank": 96, "score": 59293.74211376646 }, { "content": "fn get_and_execute_job(\n\n job_rx: &Arc<Mutex<Receiver<Job>>>,\n\n job_tx: &Sender<Job>,\n\n name: &str,\n\n) -> Result<()> {\n\n let guard = job_rx\n\n .lock()\n\n .map_err(|e| format!(\"Error locking receiver to get job: '{}'\", e))?;\n\n let job = guard\n\n .recv()\n\n .map_err(|e| format!(\"Error receiving job for execution: '{}'\", e))?;\n\n execute(job, job_tx, name)\n\n}\n\n\n", "file_path": "flowr/src/lib/execution.rs", "rank": 97, "score": 59293.74211376646 }, { "content": "#[test]\n\nfn malformed_connection() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\n\n \"flowc/tests/test-flows/malformed-connection.toml\",\n\n );\n\n if loader::load(&path, &meta_provider, &mut HashSet::<(Url, Url)>::new()).is_ok() {\n\n panic!(\"malformed-connection.toml should not load successfully\");\n\n }\n\n}\n\n\n", "file_path": "flowc/tests/flowc-loader_tests.rs", "rank": 98, "score": 59293.74211376646 }, { "content": "#[test]\n\nfn load_library() {\n\n let meta_provider = MetaProvider::new(helper::set_lib_search_path_to_project());\n\n let path = helper::absolute_file_url_from_relative_path(\"flowc/tests/test_libs/Cargo.toml\");\n\n loader::load_metadata(&path, &meta_provider).unwrap();\n\n}\n", "file_path": "flowc/tests/flowc-loader_tests.rs", "rank": 99, "score": 59293.74211376646 } ]
Rust
src/jsonrpc.rs
silvanshade/tower-lsp
4da888ddf92969ff7ba200d20c1080dd83cf0c07
pub use self::error::{Error, ErrorCode}; pub use self::router::{FromParams, IntoResponse, Method}; pub(crate) use self::router::Router; use std::borrow::Cow; use std::fmt::{self, Debug, Display, Formatter}; use lsp_types::NumberOrString; use serde::de::{self, Deserializer}; use serde::ser::Serializer; use serde::{Deserialize, Serialize}; use serde_json::Value; mod error; mod router; pub type Result<T> = std::result::Result<T, Error>; #[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)] #[serde(untagged)] pub enum Id { Number(i64), String(String), Null, } impl Default for Id { fn default() -> Self { Id::Null } } impl Display for Id { fn fmt(&self, f: &mut Formatter) -> fmt::Result { match self { Id::Number(id) => Display::fmt(id, f), Id::String(id) => Debug::fmt(id, f), Id::Null => f.write_str("null"), } } } impl From<i64> for Id { fn from(n: i64) -> Self { Id::Number(n) } } impl From<&'_ str> for Id { fn from(s: &'_ str) -> Self { Id::String(s.to_string()) } } impl From<String> for Id { fn from(s: String) -> Self { Id::String(s) } } impl From<NumberOrString> for Id { fn from(num_or_str: NumberOrString) -> Self { match num_or_str { NumberOrString::Number(num) => Id::Number(num as i64), NumberOrString::String(s) => Id::String(s), } } } fn deserialize_some<'de, T, D>(deserializer: D) -> std::result::Result<Option<T>, D::Error> where T: Deserialize<'de>, D: Deserializer<'de>, { T::deserialize(deserializer).map(Some) } #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] pub struct Request { jsonrpc: Version, #[serde(default)] method: Cow<'static, str>, #[serde(default, deserialize_with = "deserialize_some")] #[serde(skip_serializing_if = "Option::is_none")] params: Option<Value>, #[serde(default, deserialize_with = "deserialize_some")] #[serde(skip_serializing_if = "Option::is_none")] id: Option<Id>, } impl Request { pub fn build<M>(method: M) -> RequestBuilder where M: Into<Cow<'static, str>>, { RequestBuilder { method: method.into(), params: None, id: None, } } pub(crate) fn from_request<R>(id: Id, params: R::Params) -> Self where R: lsp_types::request::Request, { Request { jsonrpc: Version, method: R::METHOD.into(), params: Some(serde_json::to_value(params).unwrap()), id: Some(id), } } pub(crate) fn from_notification<N>(params: N::Params) -> Self where N: lsp_types::notification::Notification, { Request { jsonrpc: Version, method: N::METHOD.into(), params: Some(serde_json::to_value(params).unwrap()), id: None, } } #[inline] pub fn method(&self) -> &str { self.method.as_ref() } #[inline] pub fn id(&self) -> Option<&Id> { self.id.as_ref() } #[inline] pub fn params(&self) -> Option<&Value> { self.params.as_ref() } #[inline] pub fn into_parts(self) -> (Cow<'static, str>, Option<Id>, Option<Value>) { (self.method, self.id, self.params) } } impl Display for Request { fn fmt(&self, f: &mut Formatter) -> fmt::Result { let mut w = WriterFormatter { inner: f }; serde_json::to_writer(&mut w, self).map_err(|_| fmt::Error) } } struct WriterFormatter<'a, 'b: 'a> { inner: &'a mut Formatter<'b>, } impl<'a, 'b> std::io::Write for WriterFormatter<'a, 'b> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { fn io_error<E>(_: E) -> std::io::Error { std::io::Error::new(std::io::ErrorKind::Other, "fmt error") } let s = std::str::from_utf8(buf).map_err(io_error)?; self.inner.write_str(s).map_err(io_error)?; Ok(buf.len()) } fn flush(&mut self) -> std::io::Result<()> { Ok(()) } } #[derive(Debug)] pub struct RequestBuilder { method: Cow<'static, str>, params: Option<Value>, id: Option<Id>, } impl RequestBuilder { pub fn id<I: Into<Id>>(mut self, id: I) -> Self { self.id = Some(id.into()); self } pub fn params<V: Into<Value>>(mut self, params: V) -> Self { self.params = Some(params.into()); self } pub fn finish(self) -> Request { Request { jsonrpc: Version, method: self.method, params: self.params, id: self.id, } } } #[derive(Clone, PartialEq, Deserialize, Serialize)] pub struct Response { jsonrpc: Version, #[serde(flatten)] kind: ResponseKind, id: Id, } impl Response { #[inline] pub const fn from_ok(id: Id, result: Value) -> Self { Response { jsonrpc: Version, kind: ResponseKind::Ok { result }, id, } } #[inline] pub const fn from_error(id: Id, error: Error) -> Self { Response { jsonrpc: Version, kind: ResponseKind::Err { error }, id, } } pub fn from_parts(id: Id, body: Result<Value>) -> Self { match body { Ok(result) => Response::from_ok(id, result), Err(error) => Response::from_error(id, error), } } pub fn into_parts(self) -> (Id, Result<Value>) { match self.kind { ResponseKind::Ok { result } => (self.id, Ok(result)), ResponseKind::Err { error } => (self.id, Err(error)), } } #[inline] pub const fn is_ok(&self) -> bool { matches!(self.kind, ResponseKind::Ok { .. }) } #[inline] pub const fn is_error(&self) -> bool { !self.is_ok() } #[inline] pub const fn result(&self) -> Option<&Value> { match &self.kind { ResponseKind::Ok { result } => Some(result), _ => None, } } #[inline] pub const fn error(&self) -> Option<&Error> { match &self.kind { ResponseKind::Err { error } => Some(error), _ => None, } } #[inline] pub const fn id(&self) -> &Id { &self.id } } impl Debug for Response { fn fmt(&self, f: &mut Formatter) -> fmt::Result { let mut d = f.debug_struct("Response"); d.field("jsonrpc", &self.jsonrpc); match &self.kind { ResponseKind::Ok { result } => d.field("result", result), ResponseKind::Err { error } => d.field("error", error), }; d.field("id", &self.id).finish() } } #[derive(Clone, PartialEq, Deserialize, Serialize)] #[serde(untagged)] enum ResponseKind { Ok { result: Value }, Err { error: Error }, } #[derive(Deserialize, Serialize)] #[cfg_attr(test, derive(Debug, PartialEq))] #[serde(untagged)] pub(crate) enum Message { Response(Response), Request(Request), } #[derive(Clone, Copy, Debug, PartialEq)] pub(crate) struct Version; impl<'de> Deserialize<'de> for Version { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { match Cow::<'de, str>::deserialize(deserializer)?.as_ref() { "2.0" => Ok(Version), _ => Err(de::Error::custom("expected JSON-RPC version \"2.0\"")), } } } impl Serialize for Version { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { "2.0".serialize(serializer) } } pub(crate) fn not_initialized_error() -> Error { Error { code: ErrorCode::ServerError(-32002), message: "Server not initialized".to_string(), data: None, } } #[cfg(test)] mod tests { use serde_json::json; use super::*; #[test] fn incoming_from_str_or_value() { let v = json!({"jsonrpc":"2.0","method":"initialize","params":{"capabilities":{}},"id":0}); let from_str: Message = serde_json::from_str(&v.to_string()).unwrap(); let from_value: Message = serde_json::from_value(v).unwrap(); assert_eq!(from_str, from_value); } #[test] fn outgoing_from_str_or_value() { let v = json!({"jsonrpc":"2.0","result":{},"id":1}); let from_str: Message = serde_json::from_str(&v.to_string()).unwrap(); let from_value: Message = serde_json::from_value(v).unwrap(); assert_eq!(from_str, from_value); } #[test] fn parses_incoming_message() { let server_request = json!({"jsonrpc":"2.0","method":"initialize","params":{"capabilities":{}},"id":0}); let incoming = serde_json::from_value(server_request).unwrap(); assert!(matches!(incoming, Message::Request(_))); let server_notif = json!({"jsonrpc":"2.0","method":"initialized","params":{}}); let incoming = serde_json::from_value(server_notif).unwrap(); assert!(matches!(incoming, Message::Request(_))); let client_request = json!({"jsonrpc":"2.0","id":0,"result":[null]}); let incoming = serde_json::from_value(client_request).unwrap(); assert!(matches!(incoming, Message::Response(_))); } #[test] fn parses_outgoing_message() { let client_request = json!({"jsonrpc":"2.0","method":"workspace/configuration","params":{"scopeUri":null,"section":"foo"},"id":0}); let outgoing = serde_json::from_value(client_request).unwrap(); assert!(matches!(outgoing, Message::Request(_))); let client_notif = json!({"jsonrpc":"2.0","method":"window/logMessage","params":{"message":"foo","type":0}}); let outgoing = serde_json::from_value(client_notif).unwrap(); assert!(matches!(outgoing, Message::Request(_))); let server_response = json!({"jsonrpc":"2.0","id":0,"result":[null]}); let outgoing = serde_json::from_value(server_response).unwrap(); assert!(matches!(outgoing, Message::Response(_))); } #[test] fn parses_invalid_server_request() { let unknown_method = json!({"jsonrpc":"2.0","method":"foo"}); let incoming = serde_json::from_value(unknown_method).unwrap(); assert!(matches!(incoming, Message::Request(_))); let unknown_method_with_id = json!({"jsonrpc":"2.0","method":"foo","id":0}); let incoming = serde_json::from_value(unknown_method_with_id).unwrap(); assert!(matches!(incoming, Message::Request(_))); let missing_method = json!({"jsonrpc":"2.0"}); let incoming = serde_json::from_value(missing_method).unwrap(); assert!(matches!(incoming, Message::Request(_))); let missing_method_with_id = json!({"jsonrpc":"2.0","id":0}); let incoming = serde_json::from_value(missing_method_with_id).unwrap(); assert!(matches!(incoming, Message::Request(_))); } #[test] fn accepts_null_request_id() { let request_id: Id = serde_json::from_value(json!(null)).unwrap(); assert_eq!(request_id, Id::Null); } #[test] fn accepts_negative_integer_request_id() { let request_id: Id = serde_json::from_value(json!(-1)).unwrap(); assert_eq!(request_id, Id::Number(-1)); } }
pub use self::error::{Error, ErrorCode}; pub use self::router::{FromParams, IntoResponse, Method}; pub(crate) use self::router::Router; use std::borrow::Cow; use std::fmt::{self, Debug, Display, Formatter}; use lsp_types::NumberOrString; use serde::de::{self, Deserializer}; use serde::ser::Serializer; use serde::{Deserialize, Serialize}; use serde_json::Value; mod error; mod router; pub type Result<T> = std::result::Result<T, Error>; #[derive(Clone, Debug, Eq, Hash, PartialEq, Deserialize, Serialize)] #[serde(untagged)] pub enum Id { Number(i64), String(String), Null, } impl Default for Id { fn default() -> Self { Id::Null } } impl Display for Id {
} impl From<i64> for Id { fn from(n: i64) -> Self { Id::Number(n) } } impl From<&'_ str> for Id { fn from(s: &'_ str) -> Self { Id::String(s.to_string()) } } impl From<String> for Id { fn from(s: String) -> Self { Id::String(s) } } impl From<NumberOrString> for Id { fn from(num_or_str: NumberOrString) -> Self { match num_or_str { NumberOrString::Number(num) => Id::Number(num as i64), NumberOrString::String(s) => Id::String(s), } } } fn deserialize_some<'de, T, D>(deserializer: D) -> std::result::Result<Option<T>, D::Error> where T: Deserialize<'de>, D: Deserializer<'de>, { T::deserialize(deserializer).map(Some) } #[derive(Clone, Debug, PartialEq, Deserialize, Serialize)] pub struct Request { jsonrpc: Version, #[serde(default)] method: Cow<'static, str>, #[serde(default, deserialize_with = "deserialize_some")] #[serde(skip_serializing_if = "Option::is_none")] params: Option<Value>, #[serde(default, deserialize_with = "deserialize_some")] #[serde(skip_serializing_if = "Option::is_none")] id: Option<Id>, } impl Request { pub fn build<M>(method: M) -> RequestBuilder where M: Into<Cow<'static, str>>, { RequestBuilder { method: method.into(), params: None, id: None, } } pub(crate) fn from_request<R>(id: Id, params: R::Params) -> Self where R: lsp_types::request::Request, { Request { jsonrpc: Version, method: R::METHOD.into(), params: Some(serde_json::to_value(params).unwrap()), id: Some(id), } } pub(crate) fn from_notification<N>(params: N::Params) -> Self where N: lsp_types::notification::Notification, { Request { jsonrpc: Version, method: N::METHOD.into(), params: Some(serde_json::to_value(params).unwrap()), id: None, } } #[inline] pub fn method(&self) -> &str { self.method.as_ref() } #[inline] pub fn id(&self) -> Option<&Id> { self.id.as_ref() } #[inline] pub fn params(&self) -> Option<&Value> { self.params.as_ref() } #[inline] pub fn into_parts(self) -> (Cow<'static, str>, Option<Id>, Option<Value>) { (self.method, self.id, self.params) } } impl Display for Request { fn fmt(&self, f: &mut Formatter) -> fmt::Result { let mut w = WriterFormatter { inner: f }; serde_json::to_writer(&mut w, self).map_err(|_| fmt::Error) } } struct WriterFormatter<'a, 'b: 'a> { inner: &'a mut Formatter<'b>, } impl<'a, 'b> std::io::Write for WriterFormatter<'a, 'b> { fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> { fn io_error<E>(_: E) -> std::io::Error { std::io::Error::new(std::io::ErrorKind::Other, "fmt error") } let s = std::str::from_utf8(buf).map_err(io_error)?; self.inner.write_str(s).map_err(io_error)?; Ok(buf.len()) } fn flush(&mut self) -> std::io::Result<()> { Ok(()) } } #[derive(Debug)] pub struct RequestBuilder { method: Cow<'static, str>, params: Option<Value>, id: Option<Id>, } impl RequestBuilder { pub fn id<I: Into<Id>>(mut self, id: I) -> Self { self.id = Some(id.into()); self } pub fn params<V: Into<Value>>(mut self, params: V) -> Self { self.params = Some(params.into()); self } pub fn finish(self) -> Request { Request { jsonrpc: Version, method: self.method, params: self.params, id: self.id, } } } #[derive(Clone, PartialEq, Deserialize, Serialize)] pub struct Response { jsonrpc: Version, #[serde(flatten)] kind: ResponseKind, id: Id, } impl Response { #[inline] pub const fn from_ok(id: Id, result: Value) -> Self { Response { jsonrpc: Version, kind: ResponseKind::Ok { result }, id, } } #[inline] pub const fn from_error(id: Id, error: Error) -> Self { Response { jsonrpc: Version, kind: ResponseKind::Err { error }, id, } } pub fn from_parts(id: Id, body: Result<Value>) -> Self { match body { Ok(result) => Response::from_ok(id, result), Err(error) => Response::from_error(id, error), } } pub fn into_parts(self) -> (Id, Result<Value>) { match self.kind { ResponseKind::Ok { result } => (self.id, Ok(result)), ResponseKind::Err { error } => (self.id, Err(error)), } } #[inline] pub const fn is_ok(&self) -> bool { matches!(self.kind, ResponseKind::Ok { .. }) } #[inline] pub const fn is_error(&self) -> bool { !self.is_ok() } #[inline] pub const fn result(&self) -> Option<&Value> { match &self.kind { ResponseKind::Ok { result } => Some(result), _ => None, } } #[inline] pub const fn error(&self) -> Option<&Error> { match &self.kind { ResponseKind::Err { error } => Some(error), _ => None, } } #[inline] pub const fn id(&self) -> &Id { &self.id } } impl Debug for Response { fn fmt(&self, f: &mut Formatter) -> fmt::Result { let mut d = f.debug_struct("Response"); d.field("jsonrpc", &self.jsonrpc); match &self.kind { ResponseKind::Ok { result } => d.field("result", result), ResponseKind::Err { error } => d.field("error", error), }; d.field("id", &self.id).finish() } } #[derive(Clone, PartialEq, Deserialize, Serialize)] #[serde(untagged)] enum ResponseKind { Ok { result: Value }, Err { error: Error }, } #[derive(Deserialize, Serialize)] #[cfg_attr(test, derive(Debug, PartialEq))] #[serde(untagged)] pub(crate) enum Message { Response(Response), Request(Request), } #[derive(Clone, Copy, Debug, PartialEq)] pub(crate) struct Version; impl<'de> Deserialize<'de> for Version { fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error> where D: Deserializer<'de>, { match Cow::<'de, str>::deserialize(deserializer)?.as_ref() { "2.0" => Ok(Version), _ => Err(de::Error::custom("expected JSON-RPC version \"2.0\"")), } } } impl Serialize for Version { fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> where S: Serializer, { "2.0".serialize(serializer) } } pub(crate) fn not_initialized_error() -> Error { Error { code: ErrorCode::ServerError(-32002), message: "Server not initialized".to_string(), data: None, } } #[cfg(test)] mod tests { use serde_json::json; use super::*; #[test] fn incoming_from_str_or_value() { let v = json!({"jsonrpc":"2.0","method":"initialize","params":{"capabilities":{}},"id":0}); let from_str: Message = serde_json::from_str(&v.to_string()).unwrap(); let from_value: Message = serde_json::from_value(v).unwrap(); assert_eq!(from_str, from_value); } #[test] fn outgoing_from_str_or_value() { let v = json!({"jsonrpc":"2.0","result":{},"id":1}); let from_str: Message = serde_json::from_str(&v.to_string()).unwrap(); let from_value: Message = serde_json::from_value(v).unwrap(); assert_eq!(from_str, from_value); } #[test] fn parses_incoming_message() { let server_request = json!({"jsonrpc":"2.0","method":"initialize","params":{"capabilities":{}},"id":0}); let incoming = serde_json::from_value(server_request).unwrap(); assert!(matches!(incoming, Message::Request(_))); let server_notif = json!({"jsonrpc":"2.0","method":"initialized","params":{}}); let incoming = serde_json::from_value(server_notif).unwrap(); assert!(matches!(incoming, Message::Request(_))); let client_request = json!({"jsonrpc":"2.0","id":0,"result":[null]}); let incoming = serde_json::from_value(client_request).unwrap(); assert!(matches!(incoming, Message::Response(_))); } #[test] fn parses_outgoing_message() { let client_request = json!({"jsonrpc":"2.0","method":"workspace/configuration","params":{"scopeUri":null,"section":"foo"},"id":0}); let outgoing = serde_json::from_value(client_request).unwrap(); assert!(matches!(outgoing, Message::Request(_))); let client_notif = json!({"jsonrpc":"2.0","method":"window/logMessage","params":{"message":"foo","type":0}}); let outgoing = serde_json::from_value(client_notif).unwrap(); assert!(matches!(outgoing, Message::Request(_))); let server_response = json!({"jsonrpc":"2.0","id":0,"result":[null]}); let outgoing = serde_json::from_value(server_response).unwrap(); assert!(matches!(outgoing, Message::Response(_))); } #[test] fn parses_invalid_server_request() { let unknown_method = json!({"jsonrpc":"2.0","method":"foo"}); let incoming = serde_json::from_value(unknown_method).unwrap(); assert!(matches!(incoming, Message::Request(_))); let unknown_method_with_id = json!({"jsonrpc":"2.0","method":"foo","id":0}); let incoming = serde_json::from_value(unknown_method_with_id).unwrap(); assert!(matches!(incoming, Message::Request(_))); let missing_method = json!({"jsonrpc":"2.0"}); let incoming = serde_json::from_value(missing_method).unwrap(); assert!(matches!(incoming, Message::Request(_))); let missing_method_with_id = json!({"jsonrpc":"2.0","id":0}); let incoming = serde_json::from_value(missing_method_with_id).unwrap(); assert!(matches!(incoming, Message::Request(_))); } #[test] fn accepts_null_request_id() { let request_id: Id = serde_json::from_value(json!(null)).unwrap(); assert_eq!(request_id, Id::Null); } #[test] fn accepts_negative_integer_request_id() { let request_id: Id = serde_json::from_value(json!(-1)).unwrap(); assert_eq!(request_id, Id::Number(-1)); } }
fn fmt(&self, f: &mut Formatter) -> fmt::Result { match self { Id::Number(id) => Display::fmt(id, f), Id::String(id) => Debug::fmt(id, f), Id::Null => f.write_str("null"), } }
function_block-full_function
[]
Rust
packages/yew-macro/src/hook/mod.rs
WorldSEnder/yew
b580bd4c2f6ec948e7a7fd92a34eb983ea8d0924
use proc_macro2::{Span, TokenStream}; use proc_macro_error::emit_error; use quote::quote; use syn::parse::{Parse, ParseStream}; use syn::{ parse_file, parse_quote, visit_mut, Attribute, Ident, ItemFn, LitStr, ReturnType, Signature, }; mod body; mod lifetime; mod signature; pub use body::BodyRewriter; use signature::HookSignature; #[derive(Clone)] pub struct HookFn { inner: ItemFn, } impl Parse for HookFn { fn parse(input: ParseStream) -> syn::Result<Self> { let func: ItemFn = input.parse()?; let sig = func.sig.clone(); if sig.asyncness.is_some() { emit_error!(sig.asyncness, "async functions can't be hooks"); } if sig.constness.is_some() { emit_error!(sig.constness, "const functions can't be hooks"); } if sig.abi.is_some() { emit_error!(sig.abi, "extern functions can't be hooks"); } if sig.unsafety.is_some() { emit_error!(sig.unsafety, "unsafe functions can't be hooks"); } if !sig.ident.to_string().starts_with("use_") { emit_error!(sig.ident, "hooks must have a name starting with `use_`"); } Ok(Self { inner: func }) } } impl HookFn { fn doc_attr(&self) -> Attribute { let vis = &self.inner.vis; let sig = &self.inner.sig; let sig_s = quote! { #vis #sig { __yew_macro_dummy_function_body__ } } .to_string(); let sig_file = parse_file(&sig_s).unwrap(); let sig_formatted = prettyplease::unparse(&sig_file); let literal = LitStr::new( &format!( r#" # Note When used in function components and hooks, this hook is equivalent to: ``` {} ``` "#, sig_formatted.replace( "__yew_macro_dummy_function_body__", "/* implementation omitted */" ) ), Span::mixed_site(), ); parse_quote!(#[doc = #literal]) } } pub fn hook_impl(hook: HookFn) -> syn::Result<TokenStream> { let doc_attr = hook.doc_attr(); let HookFn { inner: original_fn } = hook; let ItemFn { ref vis, ref sig, ref block, ref attrs, } = original_fn; let mut block = *block.clone(); let hook_sig = HookSignature::rewrite(sig); let Signature { ref fn_token, ref ident, ref inputs, output: ref hook_return_type, ref generics, .. } = hook_sig.sig; let output_type = &hook_sig.output_type; let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let call_generics = hook_sig.call_generics(); let ctx_ident = Ident::new("_ctx", Span::mixed_site()); let mut body_rewriter = BodyRewriter::new(ctx_ident.clone()); visit_mut::visit_block_mut(&mut body_rewriter, &mut block); let inner_fn_ident = Ident::new("inner_fn", Span::mixed_site()); let input_args = hook_sig.input_args(); let inner_fn_rt = match &sig.output { ReturnType::Default => None, ReturnType::Type(rarrow, _) => Some(quote! { #rarrow #output_type }), }; let inner_fn = quote! { fn #inner_fn_ident #generics (#ctx_ident: &mut ::yew::functional::HookContext, #inputs) #inner_fn_rt #where_clause #block }; let inner_type_impl = if hook_sig.needs_boxing { let hook_lifetime = &hook_sig.hook_lifetime; let hook_lifetime_plus = quote! { #hook_lifetime + }; let boxed_inner_ident = Ident::new("boxed_inner", Span::mixed_site()); let boxed_fn_type = quote! { ::std::boxed::Box<dyn #hook_lifetime_plus ::std::ops::FnOnce(&mut ::yew::functional::HookContext) #inner_fn_rt> }; quote! { let #boxed_inner_ident = ::std::boxed::Box::new( move |#ctx_ident: &mut ::yew::functional::HookContext| #inner_fn_rt { #inner_fn_ident (#ctx_ident, #(#input_args,)*) } ) as #boxed_fn_type; ::yew::functional::BoxedHook::<#hook_lifetime, #output_type>::new(#boxed_inner_ident) } } else { let input_types = hook_sig.input_types(); let args_ident = Ident::new("args", Span::mixed_site()); let hook_struct_name = Ident::new("HookProvider", Span::mixed_site()); let phantom_types = hook_sig.phantom_types(); let phantom_lifetimes = hook_sig.phantom_lifetimes(); quote! { struct #hook_struct_name #generics #where_clause { _marker: ::std::marker::PhantomData<( #(#phantom_types,)* #(#phantom_lifetimes,)* )>, #args_ident: (#(#input_types,)*), } #[automatically_derived] impl #impl_generics ::yew::functional::Hook for #hook_struct_name #ty_generics #where_clause { type Output = #output_type; fn run(mut self, #ctx_ident: &mut ::yew::functional::HookContext) -> Self::Output { let (#(#input_args,)*) = self.#args_ident; #inner_fn_ident #call_generics (#ctx_ident, #(#input_args,)*) } } #[automatically_derived] impl #impl_generics #hook_struct_name #ty_generics #where_clause { fn new(#inputs) -> Self { #hook_struct_name { _marker: ::std::marker::PhantomData, #args_ident: (#(#input_args,)*), } } } #hook_struct_name #call_generics ::new(#(#input_args,)*) } }; let output = quote! { #[cfg(not(doctest))] #(#attrs)* #doc_attr #vis #fn_token #ident #generics (#inputs) #hook_return_type #where_clause { #inner_fn #inner_type_impl } #[cfg(doctest)] #original_fn }; Ok(output) }
use proc_macro2::{Span, TokenStream}; use proc_macro_error::emit_error; use quote::quote; use syn::parse::{Parse, ParseStream}; use syn::{ parse_file, parse_quote, visit_mut, Attribute, Ident, ItemFn, LitStr, ReturnType, Signature, }; mod body; mod lifetime; mod signature; pub use body::BodyRewriter; use signature::HookSignature; #[derive(Clone)] pub struct HookFn { inner: ItemFn, } impl Parse for HookFn { fn parse(input: ParseStream) -> syn::Result<Self> { let func: ItemFn = input.parse()?; let sig = func.sig.clone(); if sig.asyncness.is_some() { emit_error!(sig.asyncness, "async functions can't be hooks"); } if sig.constness.is_some() { emit_error!(sig.constness, "const functions can't be hooks"); } if sig.abi.is_some() { emit_error!(sig.abi, "extern functions can't be hooks"); } if sig.unsafety.is_some() { emit_error!(sig.unsafety, "unsafe functions can't be hooks"); } if !sig.ident.to_string().starts_with("use_") { emit_error!(sig.ident, "hooks must have a name starting with `use_`"); } Ok(Self { inner: func }) } } impl HookFn { fn doc_attr(&self) -> Attribute { let vis = &self.inner.vis; let sig = &self.inner.sig; let sig_s = quote! { #vis #sig { __yew_macro_dummy_function_body__ } } .to_string(); let sig_file = parse_file(&sig_s).unwrap(); let sig_formatted = prettyplease::unparse(&sig_file); let literal = LitStr::new( &format!( r#" # Note When used in function components and hooks, this hook is equivalent to: ``` {} ``` "#, sig_formatted.replace( "__yew_macro_dummy_function_body__", "/* implementation omitted */" ) ), Span::mixed_site(), ); parse_quote!(#[doc = #literal]) } } pub fn hook_impl(hook: HookFn) -> syn::Result<TokenStream> { let doc_attr = hook.doc_attr(); let HookFn { inner: original_fn } = hook; let ItemFn { ref vis, ref sig, ref block, ref attrs, } =
original_fn; let mut block = *block.clone(); let hook_sig = HookSignature::rewrite(sig); let Signature { ref fn_token, ref ident, ref inputs, output: ref hook_return_type, ref generics, .. } = hook_sig.sig; let output_type = &hook_sig.output_type; let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let call_generics = hook_sig.call_generics(); let ctx_ident = Ident::new("_ctx", Span::mixed_site()); let mut body_rewriter = BodyRewriter::new(ctx_ident.clone()); visit_mut::visit_block_mut(&mut body_rewriter, &mut block); let inner_fn_ident = Ident::new("inner_fn", Span::mixed_site()); let input_args = hook_sig.input_args(); let inner_fn_rt = match &sig.output { ReturnType::Default => None, ReturnType::Type(rarrow, _) => Some(quote! { #rarrow #output_type }), }; let inner_fn = quote! { fn #inner_fn_ident #generics (#ctx_ident: &mut ::yew::functional::HookContext, #inputs) #inner_fn_rt #where_clause #block }; let inner_type_impl = if hook_sig.needs_boxing { let hook_lifetime = &hook_sig.hook_lifetime; let hook_lifetime_plus = quote! { #hook_lifetime + }; let boxed_inner_ident = Ident::new("boxed_inner", Span::mixed_site()); let boxed_fn_type = quote! { ::std::boxed::Box<dyn #hook_lifetime_plus ::std::ops::FnOnce(&mut ::yew::functional::HookContext) #inner_fn_rt> }; quote! { let #boxed_inner_ident = ::std::boxed::Box::new( move |#ctx_ident: &mut ::yew::functional::HookContext| #inner_fn_rt { #inner_fn_ident (#ctx_ident, #(#input_args,)*) } ) as #boxed_fn_type; ::yew::functional::BoxedHook::<#hook_lifetime, #output_type>::new(#boxed_inner_ident) } } else { let input_types = hook_sig.input_types(); let args_ident = Ident::new("args", Span::mixed_site()); let hook_struct_name = Ident::new("HookProvider", Span::mixed_site()); let phantom_types = hook_sig.phantom_types(); let phantom_lifetimes = hook_sig.phantom_lifetimes(); quote! { struct #hook_struct_name #generics #where_clause { _marker: ::std::marker::PhantomData<( #(#phantom_types,)* #(#phantom_lifetimes,)* )>, #args_ident: (#(#input_types,)*), } #[automatically_derived] impl #impl_generics ::yew::functional::Hook for #hook_struct_name #ty_generics #where_clause { type Output = #output_type; fn run(mut self, #ctx_ident: &mut ::yew::functional::HookContext) -> Self::Output { let (#(#input_args,)*) = self.#args_ident; #inner_fn_ident #call_generics (#ctx_ident, #(#input_args,)*) } } #[automatically_derived] impl #impl_generics #hook_struct_name #ty_generics #where_clause { fn new(#inputs) -> Self { #hook_struct_name { _marker: ::std::marker::PhantomData, #args_ident: (#(#input_args,)*), } } } #hook_struct_name #call_generics ::new(#(#input_args,)*) } }; let output = quote! { #[cfg(not(doctest))] #(#attrs)* #doc_attr #vis #fn_token #ident #generics (#inputs) #hook_return_type #where_clause { #inner_fn #inner_type_impl } #[cfg(doctest)] #original_fn }; Ok(output) }
function_block-function_prefix_line
[ { "content": "/// This hook is used to manually force a function component to re-render.\n\n///\n\n/// Try to use more specialized hooks, such as [`use_state`] and [`use_reducer`].\n\n/// This hook should only be used when your component depends on external state where you\n\n/// can't subscribe to changes, or as a low-level primitive to enable such a subscription-based\n\n/// approach.\n\n///\n\n/// For example, a large externally managed cache, such as a app-wide cache for GraphQL data\n\n/// should not rerender every component whenever new data arrives, but only those where a query\n\n/// changed.\n\n///\n\n/// If the state of your component is not shared, you should need to use this hook.\n\n///\n\n/// # Example\n\n///\n\n/// This example implements a silly, manually updated display of the current time. The component\n\n/// is rerendered every time the button is clicked. You should usually use a timeout and `use_state`\n\n/// to automatically trigger a re-render every second without having to use this hook.\n\n///\n\n/// ```rust\n\n/// # use yew::prelude::*;\n\n///\n\n/// #[function_component]\n\n/// fn ManuallyUpdatedDate() -> Html {\n\n/// let trigger = use_force_update();\n\n/// let onclick = use_state(move || Callback::from(move |_| trigger.force_update()));\n\n/// let last_update = js_sys::Date::new_0().to_utc_string();\n\n/// html! {\n\n/// <div>\n\n/// <button onclick={&*onclick}>{\"Update now!\"}</button>\n\n/// <p>{\"Last updated: \"}{last_update}</p>\n\n/// </div>\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// [`use_state`]: super::use_state()\n\n/// [`use_reducer`]: super::use_reducer()\n\npub fn use_force_update() -> impl Hook<Output = UseForceUpdate> {\n\n struct UseRerenderHook;\n\n\n\n impl Hook for UseRerenderHook {\n\n type Output = UseForceUpdate;\n\n\n\n fn run(self, ctx: &mut HookContext) -> Self::Output {\n\n UseForceUpdate {\n\n trigger: ctx.re_render.clone(),\n\n }\n\n }\n\n }\n\n\n\n UseRerenderHook\n\n}\n", "file_path": "packages/yew/src/functional/hooks/use_force_update.rs", "rank": 1, "score": 380400.271581555 }, { "content": "#[hook]\n\npub fn use_node_ref() -> NodeRef {\n\n (*use_state(NodeRef::default)).clone()\n\n}\n", "file_path": "packages/yew/src/functional/hooks/use_ref.rs", "rank": 2, "score": 349153.5897611202 }, { "content": "#[hook]\n\npub fn use_route<R>() -> Option<R>\n\nwhere\n\n R: Routable + 'static,\n\n{\n\n let navigator = use_navigator()?;\n\n let location = use_location()?;\n\n let path = navigator.strip_basename(location.path().into());\n\n\n\n R::recognize(&path)\n\n}\n", "file_path": "packages/yew-router/src/hooks.rs", "rank": 3, "score": 332669.05301629216 }, { "content": "/// Map IntoIterator<Item=Into<T>> to Iterator<Item=T>\n\npub fn into_node_iter<IT, T, R>(it: IT) -> impl Iterator<Item = R>\n\nwhere\n\n IT: IntoIterator<Item = T>,\n\n T: Into<R>,\n\n{\n\n it.into_iter().map(|n| n.into())\n\n}\n\n\n\n/// A special type necessary for flattening components returned from nested html macros.\n\n#[derive(Debug)]\n\npub struct NodeSeq<IN, OUT>(Vec<OUT>, PhantomData<IN>);\n\n\n\nimpl<IN: Into<OUT>, OUT> From<IN> for NodeSeq<IN, OUT> {\n\n fn from(val: IN) -> Self {\n\n Self(vec![val.into()], PhantomData::default())\n\n }\n\n}\n\n\n\nimpl<IN: Into<OUT>, OUT> From<Vec<IN>> for NodeSeq<IN, OUT> {\n\n fn from(val: Vec<IN>) -> Self {\n", "file_path": "packages/yew/src/utils/mod.rs", "rank": 4, "score": 323971.7808629667 }, { "content": "pub fn function_component_impl(\n\n name: FunctionComponentName,\n\n mut component: FunctionComponent,\n\n) -> syn::Result<TokenStream> {\n\n component.merge_component_name(name)?;\n\n\n\n let func = print_fn(&component);\n\n\n\n let into_comp_generics = component.create_into_component_generics();\n\n let component_attrs = component.filter_attrs_for_component_struct();\n\n let component_impl_attrs = component.filter_attrs_for_component_impl();\n\n let phantom_generics = component.phantom_generics();\n\n let component_name = component.component_name();\n\n let fn_name = component.inner_fn_ident();\n\n\n\n let FunctionComponent {\n\n props_type,\n\n generics,\n\n vis,\n\n ..\n", "file_path": "packages/yew-macro/src/function_component.rs", "rank": 5, "score": 315374.9963596102 }, { "content": "#[proc_macro_error::proc_macro_error]\n\n#[proc_macro_attribute]\n\npub fn function_component(attr: TokenStream, item: TokenStream) -> proc_macro::TokenStream {\n\n let item = parse_macro_input!(item as FunctionComponent);\n\n let attr = parse_macro_input!(attr as FunctionComponentName);\n\n\n\n function_component_impl(attr, item)\n\n .unwrap_or_else(|err| err.to_compile_error())\n\n .into()\n\n}\n\n\n", "file_path": "packages/yew-macro/src/lib.rs", "rank": 6, "score": 314281.31420921866 }, { "content": "#[hook]\n\npub fn use_mut_ref<T: 'static, F>(init_fn: F) -> Rc<RefCell<T>>\n\nwhere\n\n F: FnOnce() -> T,\n\n{\n\n use_memo(|_| RefCell::new(init_fn()), ())\n\n}\n\n\n\n/// This hook is used for obtaining a [`NodeRef`].\n\n/// It persists across renders.\n\n///\n\n/// It is important to note that you do not get notified of state changes.\n\n///\n\n/// # Example\n\n/// ```rust\n\n/// # use wasm_bindgen::{prelude::Closure, JsCast};\n\n/// # use yew::{\n\n/// # function_component, html, use_effect_with_deps, use_node_ref,\n\n/// # Html,\n\n/// # };\n\n/// # use web_sys::{Event, HtmlElement};\n", "file_path": "packages/yew/src/functional/hooks/use_ref.rs", "rank": 7, "score": 311996.6600734791 }, { "content": "#[hook]\n\npub fn use_some_macro_inner(val: &str) -> String {\n\n use_state(|| val.to_owned()).to_string()\n\n}\n\n\n\nmacro_rules! use_some_macro {\n\n () => {\n\n use_some_macro_inner(\"default str\")\n\n };\n\n ($t: tt) => {\n\n use_some_macro_inner($t)\n\n };\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/hook_attr/hook_macro-fail.rs", "rank": 8, "score": 311468.8049927794 }, { "content": "#[proc_macro_error::proc_macro_error]\n\n#[proc_macro_attribute]\n\npub fn hook(attr: TokenStream, item: TokenStream) -> proc_macro::TokenStream {\n\n let item = parse_macro_input!(item as HookFn);\n\n\n\n if let Some(m) = proc_macro2::TokenStream::from(attr).into_iter().next() {\n\n return syn::Error::new_spanned(m, \"hook attribute does not accept any arguments\")\n\n .into_compile_error()\n\n .into();\n\n }\n\n\n\n hook_impl(item)\n\n .unwrap_or_else(|err| err.to_compile_error())\n\n .into()\n\n}\n", "file_path": "packages/yew-macro/src/lib.rs", "rank": 9, "score": 306871.23662122793 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n let document = window().unwrap().document().unwrap();\n\n let mount_el = document.query_selector(\"#main\").unwrap().unwrap();\n\n yew::start_app_in_element::<App>(mount_el);\n\n}\n", "file_path": "tools/benchmark-struct/src/lib.rs", "rank": 10, "score": 298604.81999922456 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n let document = window().unwrap().document().unwrap();\n\n let mount_el = document.query_selector(\"#main\").unwrap().unwrap();\n\n yew::start_app_in_element::<App>(mount_el);\n\n}\n", "file_path": "tools/benchmark-hooks/src/lib.rs", "rank": 11, "score": 298406.2699655198 }, { "content": "#[::yew::functional::hook]\n\npub fn use_some_macro_inner(val: &str) -> ::std::string::String {\n\n let state = ::yew::functional::use_state(|| ::std::borrow::ToOwned::to_owned(val));\n\n ::std::string::ToString::to_string(&*state)\n\n}\n\n\n\nmacro_rules! use_some_macro {\n\n () => {\n\n use_some_macro_inner(\"default str\")\n\n };\n\n ($t: tt) => {\n\n use_some_macro_inner($t)\n\n };\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/hook_attr/hook_macro-pass.rs", "rank": 12, "score": 297819.32470477186 }, { "content": "#[::yew::prelude::hook]\n\nfn use_a_const<const N: u32>() -> u32 {\n\n N\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/hook_attr/hook-const-generic-pass.rs", "rank": 13, "score": 291646.83903783315 }, { "content": "#[::yew::prelude::hook]\n\nfn use_impl_fn<T, U>(_callback: impl ::std::prelude::rust_2021::Fn(&T) -> &U) {}\n\n\n", "file_path": "packages/yew-macro/tests/hook_attr/hook-impl-trait-pass.rs", "rank": 14, "score": 284719.1964217171 }, { "content": "#[hook]\n\npub fn use_effect<F, D>(f: F)\n\nwhere\n\n F: FnOnce() -> D + 'static,\n\n D: FnOnce() + 'static,\n\n{\n\n use_effect_base(|_| f(), (), |_, _| true);\n\n}\n\n\n\n/// This hook is similar to [`use_effect`] but it accepts dependencies.\n\n///\n\n/// Whenever the dependencies are changed, the effect callback is called again.\n\n/// To detect changes, dependencies must implement `PartialEq`.\n\n/// Note that the destructor also runs when dependencies change.\n", "file_path": "packages/yew/src/functional/hooks/use_effect.rs", "rank": 15, "score": 278242.9808997677 }, { "content": "/// A trait that is implemented on hooks.\n\n///\n\n/// Hooks are defined via the [`#[hook]`](crate::functional::hook) macro. It provides rewrites to hook invocations\n\n/// and ensures that hooks can only be called at the top-level of a function component or a hook.\n\n/// Please refer to its documentation on how to implement hooks.\n\npub trait Hook {\n\n /// The return type when a hook is run.\n\n type Output;\n\n\n\n /// Runs the hook inside current state, returns output upon completion.\n\n fn run(self, ctx: &mut HookContext) -> Self::Output;\n\n}\n\n\n\n/// The blanket implementation of boxed hooks.\n\n#[doc(hidden)]\n\n#[allow(missing_debug_implementations, missing_docs)]\n\npub struct BoxedHook<'hook, T> {\n\n inner: Box<dyn 'hook + FnOnce(&mut HookContext) -> T>,\n\n}\n\n\n\nimpl<'hook, T> BoxedHook<'hook, T> {\n\n #[allow(missing_docs)]\n\n pub fn new(inner: Box<dyn 'hook + FnOnce(&mut HookContext) -> T>) -> Self {\n\n Self { inner }\n\n }\n", "file_path": "packages/yew/src/functional/hooks/mod.rs", "rank": 16, "score": 273721.7143089541 }, { "content": "#[hook]\n\npub fn use_reducer<T, F>(init_fn: F) -> UseReducerHandle<T>\n\nwhere\n\n T: Reducible + 'static,\n\n F: FnOnce() -> T,\n\n{\n\n use_reducer_base(init_fn, |_, _| true)\n\n}\n\n\n\n/// [`use_reducer`] but only re-renders when `prev_state != next_state`.\n\n///\n\n/// This requires the state to implement [`PartialEq`] in addition to the [`Reducible`] trait\n\n/// required by [`use_reducer`].\n", "file_path": "packages/yew/src/functional/hooks/use_reducer.rs", "rank": 17, "score": 271563.4756634898 }, { "content": "#[hook]\n\npub fn use_state<T, F>(init_fn: F) -> UseStateHandle<T>\n\nwhere\n\n T: 'static,\n\n F: FnOnce() -> T,\n\n{\n\n let handle = use_reducer(move || UseStateReducer { value: init_fn() });\n\n\n\n UseStateHandle { inner: handle }\n\n}\n\n\n\n/// [`use_state`] but only re-renders when `prev_state != next_state`.\n\n///\n\n/// This hook requires the state to implement [`PartialEq`].\n", "file_path": "packages/yew/src/functional/hooks/use_state.rs", "rank": 18, "score": 271563.47566348984 }, { "content": "#[function_component(Redirect)]\n\npub fn redirect<R>(props: &RedirectProps<R>) -> Html\n\nwhere\n\n R: Routable + 'static,\n\n{\n\n let history = use_navigator().expect_throw(\"failed to read history.\");\n\n\n\n let target_route = props.to.clone();\n\n use_effect(move || {\n\n history.push(target_route.clone());\n\n\n\n || {}\n\n });\n\n\n\n Html::default()\n\n}\n", "file_path": "packages/yew-router/src/components/redirect.rs", "rank": 19, "score": 270115.75217702775 }, { "content": "#[hook]\n\npub fn use_bool_toggle(default: bool) -> UseBoolToggleHandle {\n\n let state = use_state_eq(|| default);\n\n\n\n let toggle = {\n\n let state = state.clone();\n\n Rc::new(move || state.set(!*state))\n\n };\n\n\n\n UseBoolToggleHandle {\n\n value: state,\n\n toggle,\n\n }\n\n}\n", "file_path": "examples/function_todomvc/src/hooks/use_bool_toggle.rs", "rank": 20, "score": 270019.136422536 }, { "content": "#[hook]\n\npub fn use_reducer_eq<T, F>(init_fn: F) -> UseReducerHandle<T>\n\nwhere\n\n T: Reducible + PartialEq + 'static,\n\n F: FnOnce() -> T,\n\n{\n\n use_reducer_base(init_fn, T::ne)\n\n}\n", "file_path": "packages/yew/src/functional/hooks/use_reducer.rs", "rank": 21, "score": 268644.8863682927 }, { "content": "#[hook]\n\npub fn use_state_eq<T, F>(init_fn: F) -> UseStateHandle<T>\n\nwhere\n\n T: PartialEq + 'static,\n\n F: FnOnce() -> T,\n\n{\n\n let handle = use_reducer_eq(move || UseStateReducer { value: init_fn() });\n\n\n\n UseStateHandle { inner: handle }\n\n}\n\n\n\n/// State handle for the [`use_state`] hook.\n\npub struct UseStateHandle<T> {\n\n inner: UseReducerHandle<UseStateReducer<T>>,\n\n}\n\n\n\nimpl<T: fmt::Debug> fmt::Debug for UseStateHandle<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"UseStateHandle\")\n\n .field(\"value\", &format!(\"{:?}\", self.inner.value))\n\n .finish()\n", "file_path": "packages/yew/src/functional/hooks/use_state.rs", "rank": 22, "score": 268644.8863682927 }, { "content": "#[derive(Clone, Properties, PartialEq)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n\n#[function_component(Comp)]\n\nextern \"C\" fn comp(props: &Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/extern-fail.rs", "rank": 23, "score": 265021.4143899144 }, { "content": "#[derive(Clone, Properties, PartialEq)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n\n#[function_component(Comp)]\n\nasync fn comp(props: &Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/async-fail.rs", "rank": 24, "score": 265021.4143899144 }, { "content": "#[derive(Clone, Properties, PartialEq)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n\n#[function_component(Comp)]\n\nconst fn comp(props: &Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/const-fail.rs", "rank": 25, "score": 265008.4300521292 }, { "content": "fn print_fn(func_comp: &FunctionComponent) -> TokenStream {\n\n let name = func_comp.inner_fn_ident();\n\n let FunctionComponent {\n\n ref fn_token,\n\n ref attrs,\n\n ref block,\n\n ref return_type,\n\n ref generics,\n\n ref arg,\n\n ..\n\n } = func_comp;\n\n let mut block = *block.clone();\n\n let (impl_generics, _ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n // We use _ctx here so if the component does not use any hooks, the usused_vars lint will not\n\n // be triggered.\n\n let ctx_ident = Ident::new(\"_ctx\", Span::mixed_site());\n\n\n\n let mut body_rewriter = BodyRewriter::new(ctx_ident.clone());\n\n visit_mut::visit_block_mut(&mut body_rewriter, &mut block);\n", "file_path": "packages/yew-macro/src/function_component.rs", "rank": 26, "score": 264430.8922094438 }, { "content": "/// The base function of [`use_reducer`] and [`use_reducer_eq`]\n\nfn use_reducer_base<'hook, T>(\n\n init_fn: impl 'hook + FnOnce() -> T,\n\n should_render_fn: fn(&T, &T) -> bool,\n\n) -> impl 'hook + Hook<Output = UseReducerHandle<T>>\n\nwhere\n\n T: Reducible + 'static,\n\n{\n\n struct HookProvider<'hook, T, F>\n\n where\n\n T: Reducible + 'static,\n\n F: 'hook + FnOnce() -> T,\n\n {\n\n _marker: PhantomData<&'hook ()>,\n\n\n\n init_fn: F,\n\n should_render_fn: fn(&T, &T) -> bool,\n\n }\n\n\n\n impl<'hook, T, F> Hook for HookProvider<'hook, T, F>\n\n where\n", "file_path": "packages/yew/src/functional/hooks/use_reducer.rs", "rank": 27, "score": 262524.79174664785 }, { "content": "#[derive(Clone, Properties, PartialEq)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/generic-lifetime-fail.rs", "rank": 28, "score": 260641.96801803107 }, { "content": "#[derive(::yew::prelude::Properties, ::std::prelude::rust_2021::PartialEq,)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/no-name-default-pass.rs", "rank": 29, "score": 260636.92393584712 }, { "content": "#[derive(Clone, Properties, PartialEq)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/bad-name-fail.rs", "rank": 30, "score": 260636.92393584712 }, { "content": "struct Ctx;\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/hook_location-pass.rs", "rank": 31, "score": 260243.2782273697 }, { "content": "#[derive(Debug, PartialEq, Clone)]\n\nstruct Ctx;\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/hook_location-fail.rs", "rank": 32, "score": 260243.2782273697 }, { "content": "#[::yew::functional::hook]\n\nfn use_as_is<'a>(input: &'a ()) -> &'a () {\n\n input\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/hook_attr/hook-lifetime-pass.rs", "rank": 33, "score": 259201.5112979235 }, { "content": "#[hook]\n\npub fn use_context<T: Clone + PartialEq + 'static>() -> Option<T> {\n\n struct UseContext<T: Clone + PartialEq + 'static> {\n\n context: Option<(T, ContextHandle<T>)>,\n\n }\n\n\n\n let scope = use_component_scope();\n\n\n\n let val = use_state(|| -> Option<T> { None });\n\n let state = {\n\n let val_dispatcher = val.setter();\n\n use_memo(\n\n move |_| UseContext {\n\n context: scope.context::<T>(Callback::from(move |m| {\n\n val_dispatcher.clone().set(Some(m));\n\n })),\n\n },\n\n (),\n\n )\n\n };\n\n\n\n // we fallback to initial value if it was not updated.\n\n (*val)\n\n .clone()\n\n .or_else(move || state.context.as_ref().map(|m| m.0.clone()))\n\n}\n", "file_path": "packages/yew/src/functional/hooks/use_context.rs", "rank": 34, "score": 258952.88087679545 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/async-fail.rs", "rank": 35, "score": 257812.34335339014 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/extern-fail.rs", "rank": 36, "score": 257812.34335339014 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/const-fail.rs", "rank": 37, "score": 257799.3590156049 }, { "content": "#[::yew::prelude::function_component(App)]\n\npub fn app() -> ::yew::prelude::Html {\n\n ::yew::prelude::html! { <Comp /> } // No generics here.\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/with-defaulted-type-param-pass.rs", "rank": 38, "score": 257197.0786883626 }, { "content": "#[::yew::function_component(ConstGenerics)]\n\nfn const_generics<const N: ::std::primitive::i32>() -> ::yew::Html {\n\n ::yew::html! {\n\n <div>\n\n { N }\n\n </div>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/generic-pass.rs", "rank": 39, "score": 256970.02905230317 }, { "content": "#[hook]\n\npub fn use_effect_with_deps<T, F, D>(f: F, deps: T)\n\nwhere\n\n T: PartialEq + 'static,\n\n F: FnOnce(&T) -> D + 'static,\n\n D: FnOnce() + 'static,\n\n{\n\n use_effect_base(f, deps, |lhs, rhs| lhs != rhs)\n\n}\n", "file_path": "packages/yew/src/functional/hooks/use_effect.rs", "rank": 40, "score": 256759.0966421225 }, { "content": "#[derive(Clone, Properties, PartialEq)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/lifetime-props-param-fail.rs", "rank": 41, "score": 256463.34944113452 }, { "content": "#[function_component(Comp)]\n\nstruct Test;\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/applied-to-non-fn-fail.rs", "rank": 42, "score": 255559.52979835743 }, { "content": "#[derive(Clone, Properties, PartialEq)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/applied-to-non-fn-fail.rs", "rank": 43, "score": 255548.48281801512 }, { "content": "#[::yew::prelude::hook]\n\nfn use_some_string(a: impl ::std::convert::Into<::std::string::String>) -> ::std::string::String {\n\n a.into()\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/hook_attr/hook-impl-trait-pass.rs", "rank": 44, "score": 253597.11760680156 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/generic-lifetime-fail.rs", "rank": 45, "score": 253447.02923245818 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/bad-name-fail.rs", "rank": 46, "score": 253441.98515027427 }, { "content": "fn main() {\n\n let _ = ::yew::prelude::html! {\n\n <Comp a={0} />\n\n };\n\n}\n", "file_path": "packages/yew-macro/tests/function_component_attr/no-name-default-pass.rs", "rank": 47, "score": 253441.98515027427 }, { "content": "#[inline]\n\nfn with<R>(f: impl FnOnce(&mut Scheduler) -> R) -> R {\n\n thread_local! {\n\n /// This is a global scheduler suitable to schedule and run any tasks.\n\n ///\n\n /// Exclusivity of mutable access is controlled by only accessing it through a set of public\n\n /// functions.\n\n static SCHEDULER: RefCell<Scheduler> = Default::default();\n\n }\n\n\n\n SCHEDULER.with(|s| f(&mut *s.borrow_mut()))\n\n}\n\n\n", "file_path": "packages/yew/src/scheduler.rs", "rank": 48, "score": 253404.00714916422 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/hook_location-pass.rs", "rank": 49, "score": 253048.3394417968 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/hook_location-fail.rs", "rank": 50, "score": 253048.3394417968 }, { "content": "#[hook]\n\npub fn use_callback<IN, OUT, F, D>(f: F, deps: D) -> Callback<IN, OUT>\n\nwhere\n\n IN: 'static,\n\n OUT: 'static,\n\n F: Fn(IN) -> OUT + 'static,\n\n D: PartialEq + 'static,\n\n{\n\n (*use_memo(move |_| Callback::from(f), deps)).clone()\n\n}\n", "file_path": "packages/yew/src/functional/hooks/use_callback.rs", "rank": 51, "score": 253024.46555643232 }, { "content": "#[derive(Clone, Properties, PartialEq)]\n\nstruct Props {\n\n a: usize,\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/mut-ref-props-param-fail.rs", "rank": 52, "score": 252428.09909538526 }, { "content": "#[hook]\n\npub fn use_memo<T, F, D>(f: F, deps: D) -> Rc<T>\n\nwhere\n\n T: 'static,\n\n F: FnOnce(&D) -> T,\n\n D: 'static + PartialEq,\n\n{\n\n let val = use_state(|| -> RefCell<Option<Rc<T>>> { RefCell::new(None) });\n\n let last_deps = use_state(|| -> RefCell<Option<D>> { RefCell::new(None) });\n\n\n\n let mut val = val.borrow_mut();\n\n let mut last_deps = last_deps.borrow_mut();\n\n\n\n match (\n\n val.as_ref(),\n\n last_deps.as_ref().and_then(|m| (m != &deps).then(|| ())),\n\n ) {\n\n // Previous value exists and last_deps == deps\n\n (Some(m), None) => m.clone(),\n\n _ => {\n\n let new_val = Rc::new(f(&deps));\n\n *last_deps = Some(deps);\n\n\n\n *val = Some(new_val.clone());\n\n\n\n new_val\n\n }\n\n }\n\n}\n", "file_path": "packages/yew/src/functional/hooks/use_memo.rs", "rank": 53, "score": 249639.25265551326 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/lifetime-props-param-fail.rs", "rank": 54, "score": 249282.0069768207 }, { "content": "#[function_component(component)]\n\nfn component(props: &Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/bad-name-fail.rs", "rank": 55, "score": 247731.07647142492 }, { "content": "#[wasm_bindgen(start)]\n\npub fn start() {\n\n use js_sys::{global, Reflect};\n\n\n\n if Reflect::has(&global(), &JsValue::from_str(\"window\")).unwrap() {\n\n yew::Renderer::<App>::new().render();\n\n } else {\n\n agent::Worker::register();\n\n }\n\n}\n", "file_path": "examples/web_worker_fib/src/lib.rs", "rank": 56, "score": 247608.08141935174 }, { "content": "/// Some attributes on the original struct are to be preserved and added to the builder struct,\n\n/// in order to avoid warnings (sometimes reported as errors) in the output.\n\nfn should_preserve_attr(attr: &Attribute) -> bool {\n\n // #[cfg(...)]: does not usually appear in macro inputs, but rust-analyzer seems to generate it sometimes.\n\n // If not preserved, results in \"no-such-field\" errors generating the field setter for `build`\n\n // #[allow(...)]: silences warnings from clippy, such as dead_code etc.\n\n // #[deny(...)]: enable additional warnings from clippy\n\n let path = &attr.path;\n\n path.is_ident(\"allow\") || path.is_ident(\"deny\") || path.is_ident(\"cfg\")\n\n}\n\n\n\nimpl Parse for DerivePropsInput {\n\n fn parse(input: ParseStream) -> Result<Self> {\n\n let input: DeriveInput = input.parse()?;\n\n let prop_fields = match input.data {\n\n syn::Data::Struct(data) => match data.fields {\n\n syn::Fields::Named(fields) => {\n\n let mut prop_fields: Vec<PropField> = fields\n\n .named\n\n .into_iter()\n\n .map(|f| f.try_into())\n\n .collect::<Result<Vec<PropField>>>()?;\n", "file_path": "packages/yew-macro/src/derive_props/mod.rs", "rank": 57, "score": 247484.03934662172 }, { "content": "#[function_component]\n\nfn Comp() -> Html {\n\n if let Some(_m) = use_context::<Ctx>() {\n\n use_context::<Ctx>().unwrap();\n\n todo!()\n\n }\n\n\n\n let _ = || {\n\n use_context::<Ctx>().unwrap();\n\n todo!()\n\n };\n\n\n\n for _ in 0..10 {\n\n use_context::<Ctx>().unwrap();\n\n }\n\n\n\n while let Some(_m) = use_context::<Ctx>() {\n\n use_context::<Ctx>().unwrap();\n\n }\n\n\n\n match use_context::<Ctx>() {\n", "file_path": "packages/yew-macro/tests/function_component_attr/hook_location-fail.rs", "rank": 58, "score": 246710.29348182704 }, { "content": "#[function_component]\n\npub fn Nav() -> Html {\n\n let navbar_active = use_state_eq(|| false);\n\n\n\n let toggle_navbar = {\n\n let navbar_active = navbar_active.clone();\n\n\n\n Callback::from(move |_| {\n\n navbar_active.set(!*navbar_active);\n\n })\n\n };\n\n\n\n let active_class = if !*navbar_active { \"is-active\" } else { \"\" };\n\n\n\n html! {\n\n <nav class=\"navbar is-primary\" role=\"navigation\" aria-label=\"main navigation\">\n\n <div class=\"navbar-brand\">\n\n <h1 class=\"navbar-item is-size-3\">{ \"Yew Blog\" }</h1>\n\n\n\n <button class={classes!(\"navbar-burger\", \"burger\", active_class)}\n\n aria-label=\"menu\" aria-expanded=\"false\"\n", "file_path": "examples/function_router/src/components/nav.rs", "rank": 59, "score": 246268.32571398775 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/function_component_attr/mut-ref-props-param-fail.rs", "rank": 60, "score": 245259.84694100608 }, { "content": "#[function_component]\n\npub fn App() -> Html {\n\n let state = use_reducer(State::reset);\n\n let sec_past = use_state(|| 0_u32);\n\n let sec_past_timer: Rc<RefCell<Option<Interval>>> = use_mut_ref(|| None);\n\n let flip_back_timer: Rc<RefCell<Option<Timeout>>> = use_mut_ref(|| None);\n\n let sec_past_time = *sec_past;\n\n\n\n use_effect_with_deps(\n\n move |state| {\n\n // game reset\n\n if state.status == Status::Ready {\n\n sec_past.set(0);\n\n }\n\n // game start\n\n else if *sec_past == 0 && state.last_card.is_some() {\n\n let sec_past = sec_past.clone();\n\n let mut sec = *sec_past;\n\n *sec_past_timer.borrow_mut() = Some(Interval::new(1000, move || {\n\n sec += 1;\n\n sec_past.set(sec);\n", "file_path": "examples/function_memory_game/src/components/app.rs", "rank": 61, "score": 242679.30269183347 }, { "content": "#[hook]\n\npub fn use_location() -> Option<Location> {\n\n Some(use_context::<LocationContext>()?.location())\n\n}\n\n\n\n/// A hook to access the current route.\n\n///\n\n/// This hook will return [`None`] if there's no available location or none of the routes match.\n\n///\n\n/// # Note\n\n///\n\n/// If your `Routable` has a `#[not_found]` route, you can use `.unwrap_or_default()` instead of\n\n/// `.unwrap()` to unwrap.\n", "file_path": "packages/yew-router/src/hooks.rs", "rank": 62, "score": 240215.3415555901 }, { "content": "#[hook]\n\npub fn use_navigator() -> Option<Navigator> {\n\n use_context::<NavigatorContext>().map(|m| m.navigator())\n\n}\n\n\n\n/// A hook to access the current [`Location`].\n", "file_path": "packages/yew-router/src/hooks.rs", "rank": 63, "score": 240215.3415555901 }, { "content": "#[function_component(InfoFooter)]\n\npub fn info_footer() -> Html {\n\n html! {\n\n <footer class=\"info\">\n\n <p>{ \"Double-click to edit a todo\" }</p>\n\n <p>{ \"Written by \" }<a href=\"https://github.com/Yoroshikun/\" target=\"_blank\">{ \"Drew Hutton <Yoroshi>\" }</a></p>\n\n <p>{ \"Part of \" }<a href=\"http://todomvc.com/\" target=\"_blank\">{ \"TodoMVC\" }</a></p>\n\n </footer>\n\n }\n\n}\n", "file_path": "examples/function_todomvc/src/components/info_footer.rs", "rank": 64, "score": 239228.76762201474 }, { "content": "#[::yew::prelude::function_component(Comp)]\n\npub fn comp<A = ()>(_props: &CompProps<A>) -> ::yew::prelude::Html {\n\n ::std::todo!()\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/with-defaulted-type-param-pass.rs", "rank": 65, "score": 236948.73671440542 }, { "content": "#[function_component]\n\npub fn Logo() -> Html {\n\n html! {\n\n <h1 class=\"logo\">\n\n <a href=\"https://examples.yew.rs/function_memory_game\" target=\"_blank\">{\"Memory\"}</a>\n\n </h1>\n\n }\n\n}\n", "file_path": "examples/function_memory_game/src/components/score_board_logo.rs", "rank": 66, "score": 235909.57037130356 }, { "content": "#[function_component(let)]\n\nfn comp(props: &Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/bad-name-fail.rs", "rank": 67, "score": 235600.28394859785 }, { "content": "#[function_component(x, y, z)]\n\nfn comp_2(props: &Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/bad-name-fail.rs", "rank": 68, "score": 235594.73489524936 }, { "content": "#[function_component(124)]\n\nfn comp_3(props: &Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/bad-name-fail.rs", "rank": 69, "score": 235594.73489524936 }, { "content": "#[::yew::prelude::function_component]\n\nfn Comp() -> ::yew::prelude::Html {\n\n ::yew::prelude::use_context::<Ctx>().unwrap();\n\n\n\n if let ::std::prelude::rust_2021::Some(_m) = ::yew::prelude::use_context::<Ctx>() {\n\n ::std::todo!()\n\n }\n\n\n\n let _ctx = { ::yew::prelude::use_context::<Ctx>() };\n\n\n\n match ::yew::prelude::use_context::<Ctx>() {\n\n ::std::prelude::rust_2021::Some(_) => {\n\n ::std::todo!()\n\n }\n\n ::std::prelude::rust_2021::None => {\n\n ::std::todo!()\n\n }\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/hook_location-pass.rs", "rank": 70, "score": 235222.4158598129 }, { "content": "#[function_component(Comp)]\n\nfn comp<'a>(props: &'a Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/generic-lifetime-fail.rs", "rank": 71, "score": 232718.49644819455 }, { "content": "#[function_component]\n\npub fn Links(props: &Props) -> Html {\n\n const LINKS_PER_SIDE: usize = 3;\n\n\n\n let Props {\n\n page, total_pages, ..\n\n } = *props;\n\n\n\n let pages_prev = page.checked_sub(1).unwrap_or_default() as usize;\n\n let pages_next = (total_pages - page) as usize;\n\n\n\n let links_left = LINKS_PER_SIDE.min(pages_prev)\n\n // if there are less than `LINKS_PER_SIDE` to the right, we add some more on the left.\n\n + LINKS_PER_SIDE.checked_sub(pages_next).unwrap_or_default();\n\n let links_right = 2 * LINKS_PER_SIDE - links_left;\n\n\n\n html! {\n\n <>\n\n <RenderLinks range={ 1..page } len={pages_prev} max_links={links_left} props={props.clone()} />\n\n <RenderLink to_page={page} props={props.clone()} />\n\n <RenderLinks range={ page + 1..total_pages + 1 } len={pages_next} max_links={links_right} props={props.clone()} />\n\n </>\n\n }\n\n}\n\n\n", "file_path": "examples/function_router/src/components/pagination.rs", "rank": 72, "score": 232261.74738008782 }, { "content": "#[function_component]\n\npub fn Pagination(props: &Props) -> Html {\n\n html! {\n\n <nav class=\"pagination is-right\" role=\"navigation\" aria-label=\"pagination\">\n\n <RelNavButtons ..{props.clone()} />\n\n <ul class=\"pagination-list\">\n\n <Links ..{props.clone()} />\n\n </ul>\n\n </nav>\n\n }\n\n}\n", "file_path": "examples/function_router/src/components/pagination.rs", "rank": 73, "score": 232261.74738008782 }, { "content": "struct UseReducer<T>\n\nwhere\n\n T: Reducible,\n\n{\n\n current_state: Rc<RefCell<Rc<T>>>,\n\n\n\n dispatch: DispatchFn<T>,\n\n}\n\n\n\n/// State handle for [`use_reducer`] and [`use_reducer_eq`] hook\n\npub struct UseReducerHandle<T>\n\nwhere\n\n T: Reducible,\n\n{\n\n value: Rc<T>,\n\n dispatch: DispatchFn<T>,\n\n}\n\n\n\nimpl<T> UseReducerHandle<T>\n\nwhere\n", "file_path": "packages/yew/src/functional/hooks/use_reducer.rs", "rank": 74, "score": 229332.8850949011 }, { "content": "#[function_component]\n\npub fn Chessboard(props: &Props) -> Html {\n\n html! {\n\n <div class=\"chess-board\">\n\n { for props.cards.iter().map(|card|\n\n html! {\n\n <ChessboardCard card={card.clone()} on_flip={&props.on_flip} />\n\n }\n\n ) }\n\n </div>\n\n }\n\n}\n", "file_path": "examples/function_memory_game/src/components/chessboard.rs", "rank": 75, "score": 228942.30291791252 }, { "content": "#[function_component]\n\npub fn Filter(props: &FilterProps) -> Html {\n\n let filter = props.filter;\n\n\n\n let cls = if props.selected {\n\n \"selected\"\n\n } else {\n\n \"not-selected\"\n\n };\n\n\n\n let onset_filter = {\n\n let onset_filter = props.onset_filter.clone();\n\n move |_| onset_filter.emit(filter)\n\n };\n\n\n\n html! {\n\n <li>\n\n <a class={cls}\n\n href={props.filter.as_href()}\n\n onclick={onset_filter}\n\n >\n\n { props.filter }\n\n </a>\n\n </li>\n\n }\n\n}\n", "file_path": "examples/function_todomvc/src/components/filter.rs", "rank": 76, "score": 228942.30291791254 }, { "content": "#[function_component(Entry)]\n\npub fn entry(props: &EntryProps) -> Html {\n\n let id = props.entry.id;\n\n let mut class = Classes::from(\"todo\");\n\n\n\n // We use the `use_bool_toggle` hook and set the default value to `false`\n\n // as the default we are not editing the the entry. When we want to edit the\n\n // entry we can call the toggle method on the `UseBoolToggleHandle`\n\n // which will trigger a re-render with the toggle value being `true` for that\n\n // render and after that render the value of toggle will be flipped back to\n\n // its default (`false`).\n\n // We are relying on the behavior of `onblur` and `onkeypress` to cause\n\n // another render so that this component will render again with the\n\n // default value of toggle.\n\n let edit_toggle = use_bool_toggle(false);\n\n let is_editing = *edit_toggle;\n\n\n\n if is_editing {\n\n class.push(\"editing\");\n\n }\n\n\n", "file_path": "examples/function_todomvc/src/components/entry.rs", "rank": 77, "score": 228942.1779293171 }, { "content": "#[derive(PartialEq, Debug, Default, Clone)]\n\nstruct NodeRefInner {\n\n node: Option<Node>,\n\n link: Option<NodeRef>,\n\n}\n\n\n\nimpl NodeRef {\n\n /// Get the wrapped Node reference if it exists\n\n pub fn get(&self) -> Option<Node> {\n\n let inner = self.0.borrow();\n\n inner.node.clone().or_else(|| inner.link.as_ref()?.get())\n\n }\n\n\n\n /// Try converting the node reference into another form\n\n pub fn cast<INTO: AsRef<Node> + From<JsValue>>(&self) -> Option<INTO> {\n\n let node = self.get();\n\n node.map(Into::into).map(INTO::from)\n\n }\n\n\n\n /// Place a Node in a reference for later use\n\n pub(crate) fn set(&self, node: Option<Node>) {\n", "file_path": "packages/yew/src/html/mod.rs", "rank": 78, "score": 228728.33964412974 }, { "content": "#[function_component(Comp)]\n\nfn comp(props: &'static Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/lifetime-props-param-fail.rs", "rank": 79, "score": 226762.2715888549 }, { "content": "struct UseStateReducer<T> {\n\n value: T,\n\n}\n\n\n\nimpl<T> Reducible for UseStateReducer<T> {\n\n type Action = T;\n\n fn reduce(self: Rc<Self>, action: Self::Action) -> Rc<Self> {\n\n Rc::new(Self { value: action })\n\n }\n\n}\n\n\n\nimpl<T> PartialEq for UseStateReducer<T>\n\nwhere\n\n T: PartialEq,\n\n{\n\n fn eq(&self, rhs: &Self) -> bool {\n\n self.value == rhs.value\n\n }\n\n}\n\n\n", "file_path": "packages/yew/src/functional/hooks/use_state.rs", "rank": 80, "score": 226029.93734452012 }, { "content": "#[function_component]\n\npub fn AuthorCard(props: &Props) -> Html {\n\n let seed = props.seed;\n\n\n\n let author = use_reducer_eq(|| AuthorState {\n\n inner: Author::generate_from_seed(seed),\n\n });\n\n\n\n {\n\n let author_dispatcher = author.dispatcher();\n\n use_effect_with_deps(\n\n move |seed| {\n\n author_dispatcher.dispatch(*seed);\n\n\n\n || {}\n\n },\n\n seed,\n\n );\n\n }\n\n\n\n let author = &author.inner;\n", "file_path": "examples/function_router/src/components/author_card.rs", "rank": 81, "score": 225746.39749668652 }, { "content": "#[function_component]\n\npub fn PostCard(props: &Props) -> Html {\n\n let seed = props.seed;\n\n\n\n let post = use_reducer_eq(|| PostMetaState {\n\n inner: PostMeta::generate_from_seed(seed),\n\n });\n\n\n\n {\n\n let post_dispatcher = post.dispatcher();\n\n use_effect_with_deps(\n\n move |seed| {\n\n post_dispatcher.dispatch(*seed);\n\n\n\n || {}\n\n },\n\n seed,\n\n );\n\n }\n\n\n\n let post = &post.inner;\n", "file_path": "examples/function_router/src/components/post_card.rs", "rank": 82, "score": 225746.39749668655 }, { "content": "#[function_component]\n\npub fn ProgressDelay(props: &Props) -> Html {\n\n let Props { duration_ms, .. } = props.clone();\n\n\n\n let value = {\n\n let props = props.clone();\n\n use_reducer(move || ValueState {\n\n start: Instant::now(),\n\n value: 0.0,\n\n\n\n props,\n\n })\n\n };\n\n\n\n {\n\n let value = value.clone();\n\n use_effect_with_deps(\n\n move |_| {\n\n let interval = (duration_ms / RESOLUTION).min(MIN_INTERVAL_MS);\n\n let interval =\n\n Interval::new(interval as u32, move || value.dispatch(ValueAction::Tick));\n", "file_path": "examples/function_router/src/components/progress_delay.rs", "rank": 83, "score": 225746.39749668652 }, { "content": "#[function_component]\n\npub fn RelNavButtons(props: &Props) -> Html {\n\n let Props {\n\n page,\n\n total_pages,\n\n route_to_page: to,\n\n } = props.clone();\n\n\n\n html! {\n\n <>\n\n <Link<Route, PageQuery>\n\n classes={classes!(\"pagination-previous\")}\n\n disabled={page==1}\n\n query={Some(PageQuery{page: page - 1})}\n\n to={to.clone()}\n\n >\n\n { \"Previous\" }\n\n </Link<Route, PageQuery>>\n\n <Link<Route, PageQuery>\n\n classes={classes!(\"pagination-next\")}\n\n disabled={page==total_pages}\n", "file_path": "examples/function_router/src/components/pagination.rs", "rank": 84, "score": 225746.39749668652 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/hook_attr/hook-lifetime-pass.rs", "rank": 85, "score": 225563.57074218034 }, { "content": "#[::yew::prelude::function_component]\n\nfn Comp(props: &Props) -> ::yew::prelude::Html {\n\n ::yew::prelude::html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/no-name-default-pass.rs", "rank": 86, "score": 225395.3279720206 }, { "content": "fn strip_braces(block: ExprBlock) -> syn::Result<Expr> {\n\n match block {\n\n ExprBlock {\n\n block: Block { mut stmts, .. },\n\n ..\n\n } if stmts.len() == 1 => {\n\n let stmt = stmts.remove(0);\n\n match stmt {\n\n Stmt::Expr(expr) => Ok(expr),\n\n Stmt::Semi(_expr, semi) => Err(syn::Error::new_spanned(\n\n semi,\n\n \"only an expression may be assigned as a property. Consider removing this semicolon\",\n\n )),\n\n _ => Err(syn::Error::new_spanned(\n\n stmt,\n\n \"only an expression may be assigned as a property\",\n\n ))\n\n }\n\n }\n\n block => Ok(Expr::Block(block)),\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/src/props/prop.rs", "rank": 87, "score": 224458.07684825838 }, { "content": "#[function_component(Comp)]\n\nfn comp(props: &mut Props) -> Html {\n\n html! {\n\n <p>\n\n { props.a }\n\n </p>\n\n }\n\n}\n\n\n", "file_path": "packages/yew-macro/tests/function_component_attr/mut-ref-props-param-fail.rs", "rank": 88, "score": 223281.86831082933 }, { "content": "#[function_component]\n\npub fn ChessboardCard(props: &Props) -> Html {\n\n let Props { card, on_flip } = props.clone();\n\n let Card { flipped, name, id } = card;\n\n\n\n let get_link_by_cardname = {\n\n match name {\n\n CardName::EightBall => \"public/8-ball.png\",\n\n CardName::Kronos => \"public/kronos.png\",\n\n CardName::BakedPotato => \"public/baked-potato.png\",\n\n CardName::Dinosaur => \"public/dinosaur.png\",\n\n CardName::Rocket => \"public/rocket.png\",\n\n CardName::SkinnyUnicorn => \"public/skinny-unicorn.png\",\n\n CardName::ThatGuy => \"public/that-guy.png\",\n\n CardName::Zeppelin => \"public/zeppelin.png\",\n\n }\n\n .to_string()\n\n };\n\n\n\n let onclick = move |e: MouseEvent| {\n\n e.stop_propagation();\n", "file_path": "examples/function_memory_game/src/components/chessboard_card.rs", "rank": 89, "score": 222667.2605180167 }, { "content": "#[function_component]\n\npub fn ScoreBoard(props: &Props) -> Html {\n\n let Props {\n\n best_score,\n\n unresolved_card_pairs,\n\n } = props.clone();\n\n html! {\n\n <div class=\"score-board\">\n\n <Logo />\n\n <GameProgress {unresolved_card_pairs} />\n\n <BestScore {best_score} />\n\n </div>\n\n }\n\n}\n", "file_path": "examples/function_memory_game/src/components/score_board.rs", "rank": 90, "score": 222667.2605180167 }, { "content": "#[function_component]\n\npub fn RenderLink(props: &RenderLinkProps) -> Html {\n\n let RenderLinkProps { to_page, props } = props.clone();\n\n\n\n let Props {\n\n page,\n\n route_to_page,\n\n ..\n\n } = props;\n\n\n\n let is_current_class = if to_page == page { \"is-current\" } else { \"\" };\n\n\n\n html! {\n\n <li>\n\n <Link<Route, PageQuery>\n\n classes={classes!(\"pagination-link\", is_current_class)}\n\n to={route_to_page}\n\n query={Some(PageQuery{page: to_page})}\n\n >\n\n { to_page }\n\n </Link<Route, PageQuery>>\n\n </li>\n\n }\n\n}\n\n\n", "file_path": "examples/function_router/src/components/pagination.rs", "rank": 91, "score": 222667.2605180167 }, { "content": "#[function_component]\n\npub fn RenderLinks(props: &RenderLinksProps) -> Html {\n\n let RenderLinksProps {\n\n range,\n\n len,\n\n max_links,\n\n props,\n\n } = props.clone();\n\n\n\n let mut range = range;\n\n\n\n if len > max_links {\n\n let last_link =\n\n html! {<RenderLink to_page={range.next_back().unwrap()} props={props.clone()} />};\n\n // remove 1 for the ellipsis and 1 for the last link\n\n let links = range\n\n .take(max_links - 2)\n\n .map(|page| html! {<RenderLink to_page={page} props={props.clone()} />});\n\n html! {\n\n <>\n\n { for links }\n", "file_path": "examples/function_router/src/components/pagination.rs", "rank": 92, "score": 222667.2605180167 }, { "content": "#[function_component(EntryEdit)]\n\npub fn entry_edit(props: &EntryEditProps) -> Html {\n\n let id = props.entry.id;\n\n\n\n let target_input_value = |e: &Event| {\n\n let input: HtmlInputElement = e.target_unchecked_into();\n\n input.value()\n\n };\n\n\n\n let onblur = {\n\n let edit = props.onedit.clone();\n\n\n\n move |e: FocusEvent| {\n\n let value = target_input_value(&e);\n\n edit.emit((id, value))\n\n }\n\n };\n\n\n\n let onkeypress = {\n\n let edit = props.onedit.clone();\n\n\n", "file_path": "examples/function_todomvc/src/components/entry.rs", "rank": 93, "score": 222667.0133065526 }, { "content": "#[hook]\n\npub fn use_bridge<T, F>(on_output: F) -> UseBridgeHandle<T>\n\nwhere\n\n T: Bridged,\n\n F: Fn(T::Output) + 'static,\n\n{\n\n let on_output = Rc::new(on_output);\n\n\n\n let on_output_clone = on_output.clone();\n\n let on_output_ref = use_mut_ref(move || on_output_clone);\n\n\n\n // Refresh the callback on every render.\n\n {\n\n let mut on_output_ref = on_output_ref.borrow_mut();\n\n *on_output_ref = on_output;\n\n }\n\n\n\n let bridge = use_mut_ref(move || {\n\n T::bridge({\n\n Rc::new(move |output| {\n\n let on_output = on_output_ref.borrow().clone();\n", "file_path": "packages/yew-agent/src/hooks.rs", "rank": 94, "score": 222481.24541829363 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/hook_attr/hook-const-generic-pass.rs", "rank": 95, "score": 222280.37641641105 }, { "content": "fn main() {}\n", "file_path": "packages/yew-macro/tests/hook_attr/hook-impl-trait-pass.rs", "rank": 96, "score": 222247.59855429103 }, { "content": "/// A trait that implements a reducer function of a type.\n\npub trait Reducible {\n\n /// The action type of the reducer.\n\n type Action;\n\n\n\n /// The reducer function.\n\n fn reduce(self: Rc<Self>, action: Self::Action) -> Rc<Self>;\n\n}\n\n\n", "file_path": "packages/yew/src/functional/hooks/use_reducer.rs", "rank": 97, "score": 220169.74681296054 }, { "content": "#[function_component]\n\npub fn GameProgress(props: &Props) -> Html {\n\n html! {\n\n <div class=\"game-progress\">\n\n <span>{\"Cards not Matched\"}</span>\n\n <h2>{ props.unresolved_card_pairs }</h2>\n\n </div>\n\n }\n\n}\n", "file_path": "examples/function_memory_game/src/components/score_board_progress.rs", "rank": 98, "score": 219698.60726050072 }, { "content": "#[function_component(HeaderInput)]\n\npub fn header_input(props: &HeaderInputProps) -> Html {\n\n let onkeypress = {\n\n let onadd = props.onadd.clone();\n\n\n\n move |e: KeyboardEvent| {\n\n if e.key() == \"Enter\" {\n\n let input: HtmlInputElement = e.target_unchecked_into();\n\n let value = input.value();\n\n\n\n input.set_value(\"\");\n\n onadd.emit(value);\n\n }\n\n }\n\n };\n\n\n\n html! {\n\n <input\n\n class=\"new-todo\"\n\n placeholder=\"What needs to be done?\"\n\n {onkeypress}\n\n />\n\n }\n\n}\n", "file_path": "examples/function_todomvc/src/components/header_input.rs", "rank": 99, "score": 219698.36004903662 } ]
Rust
crates/cargo-platform/src/lib.rs
moxian/cargo
6e1ca924a67dd1ac89c33f294ef26b5c43b89168
use std::fmt; use std::str::FromStr; mod cfg; mod error; pub use cfg::{Cfg, CfgExpr}; pub use error::{ParseError, ParseErrorKind}; #[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] pub enum Platform { Name(String), Cfg(CfgExpr), } impl Platform { pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool { match *self { Platform::Name(ref p) => p == name, Platform::Cfg(ref p) => p.matches(cfg), } } fn validate_named_platform(name: &str) -> Result<(), ParseError> { if let Some(ch) = name .chars() .find(|&c| !(c.is_alphanumeric() || c == '_' || c == '-' || c == '.')) { if name.chars().any(|c| c == '(') { return Err(ParseError::new( name, ParseErrorKind::InvalidTarget( "unexpected `(` character, cfg expressions must start with `cfg(`" .to_string(), ), )); } return Err(ParseError::new( name, ParseErrorKind::InvalidTarget(format!( "unexpected character {} in target name", ch )), )); } Ok(()) } pub fn check_cfg_attributes(&self, warnings: &mut Vec<String>) { fn check_cfg_expr(expr: &CfgExpr, warnings: &mut Vec<String>) { match *expr { CfgExpr::Not(ref e) => check_cfg_expr(e, warnings), CfgExpr::All(ref e) | CfgExpr::Any(ref e) => { for e in e { check_cfg_expr(e, warnings); } } CfgExpr::Value(ref e) => match e { Cfg::Name(name) => match name.as_str() { "test" | "debug_assertions" | "proc_macro" => warnings.push(format!( "Found `{}` in `target.'cfg(...)'.dependencies`. \ This value is not supported for selecting dependencies \ and will not work as expected. \ To learn more visit \ https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#platform-specific-dependencies", name )), _ => (), }, Cfg::KeyPair(name, _) => match name.as_str() { "feature" => warnings.push(String::from( "Found `feature = ...` in `target.'cfg(...)'.dependencies`. \ This key is not supported for selecting dependencies \ and will not work as expected. \ Use the [features] section instead: \ https://doc.rust-lang.org/cargo/reference/manifest.html#the-features-section" )), _ => (), }, } } } if let Platform::Cfg(cfg) = self { check_cfg_expr(cfg, warnings); } } } impl serde::Serialize for Platform { fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { self.to_string().serialize(s) } } impl<'de> serde::Deserialize<'de> for Platform { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { let s = String::deserialize(deserializer)?; FromStr::from_str(&s).map_err(serde::de::Error::custom) } } impl FromStr for Platform { type Err = ParseError; fn from_str(s: &str) -> Result<Platform, ParseError> { if s.starts_with("cfg(") && s.ends_with(')') { let s = &s[4..s.len() - 1]; s.parse().map(Platform::Cfg) } else { Platform::validate_named_platform(s)?; Ok(Platform::Name(s.to_string())) } } } impl fmt::Display for Platform { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Platform::Name(ref n) => n.fmt(f), Platform::Cfg(ref e) => write!(f, "cfg({})", e), } } }
use std::fmt; use std::str::FromStr; mod cfg; mod error; pub use cfg::{Cfg, CfgExpr}; pub use error::{ParseError, ParseErrorKind}; #[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Clone, Debug)] pub enum Platform { Name(String), Cfg(CfgExpr), } impl Platform { pub fn matches(&self, name: &str, cfg: &[Cfg]) -> bool { match *self { Platform::Name(ref p) => p == name, Platform::Cfg(ref p) => p.matches(cfg), } } fn validate_named_platform(name: &str) -> Result<(), ParseError> { if let Some(ch) = name .chars() .find(|&c| !(c.is_alphanumeric() || c == '_' || c == '-' || c == '.')) { if name.chars().any(|c| c == '(') { return Err(ParseError::new( name, ParseErrorKind::InvalidTarget( "unexpected `(` character, cfg expressions must start with `cfg(`" .to_string(), ), )); } return
; } Ok(()) } pub fn check_cfg_attributes(&self, warnings: &mut Vec<String>) { fn check_cfg_expr(expr: &CfgExpr, warnings: &mut Vec<String>) { match *expr { CfgExpr::Not(ref e) => check_cfg_expr(e, warnings), CfgExpr::All(ref e) | CfgExpr::Any(ref e) => { for e in e { check_cfg_expr(e, warnings); } } CfgExpr::Value(ref e) => match e { Cfg::Name(name) => match name.as_str() { "test" | "debug_assertions" | "proc_macro" => warnings.push(format!( "Found `{}` in `target.'cfg(...)'.dependencies`. \ This value is not supported for selecting dependencies \ and will not work as expected. \ To learn more visit \ https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#platform-specific-dependencies", name )), _ => (), }, Cfg::KeyPair(name, _) => match name.as_str() { "feature" => warnings.push(String::from( "Found `feature = ...` in `target.'cfg(...)'.dependencies`. \ This key is not supported for selecting dependencies \ and will not work as expected. \ Use the [features] section instead: \ https://doc.rust-lang.org/cargo/reference/manifest.html#the-features-section" )), _ => (), }, } } } if let Platform::Cfg(cfg) = self { check_cfg_expr(cfg, warnings); } } } impl serde::Serialize for Platform { fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { self.to_string().serialize(s) } } impl<'de> serde::Deserialize<'de> for Platform { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de>, { let s = String::deserialize(deserializer)?; FromStr::from_str(&s).map_err(serde::de::Error::custom) } } impl FromStr for Platform { type Err = ParseError; fn from_str(s: &str) -> Result<Platform, ParseError> { if s.starts_with("cfg(") && s.ends_with(')') { let s = &s[4..s.len() - 1]; s.parse().map(Platform::Cfg) } else { Platform::validate_named_platform(s)?; Ok(Platform::Name(s.to_string())) } } } impl fmt::Display for Platform { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Platform::Name(ref n) => n.fmt(f), Platform::Cfg(ref e) => write!(f, "cfg({})", e), } } }
Err(ParseError::new( name, ParseErrorKind::InvalidTarget(format!( "unexpected character {} in target name", ch )), ))
call_expression
[ { "content": "/// Check the base requirements for a package name.\n\n///\n\n/// This can be used for other things than package names, to enforce some\n\n/// level of sanity. Note that package names have other restrictions\n\n/// elsewhere. `cargo new` has a few restrictions, such as checking for\n\n/// reserved names. crates.io has even more restrictions.\n\npub fn validate_package_name(name: &str, what: &str, help: &str) -> CargoResult<()> {\n\n if let Some(ch) = name\n\n .chars()\n\n .find(|ch| !ch.is_alphanumeric() && *ch != '_' && *ch != '-')\n\n {\n\n failure::bail!(\"Invalid character `{}` in {}: `{}`{}\", ch, what, name, help);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/util/mod.rs", "rank": 0, "score": 405935.48464160395 }, { "content": "pub fn is_bad_artifact_name(name: &str) -> bool {\n\n [\"deps\", \"examples\", \"build\", \"incremental\"]\n\n .iter()\n\n .any(|&reserved| reserved == name)\n\n}\n\n\n\nimpl Layout {\n\n /// Calculate the paths for build output, lock the build directory, and return as a Layout.\n\n ///\n\n /// This function will block if the directory is already locked.\n\n ///\n\n /// `dest` should be the final artifact directory name. Currently either\n\n /// \"debug\" or \"release\".\n\n pub fn new(\n\n ws: &Workspace<'_>,\n\n target: Option<CompileTarget>,\n\n dest: &str,\n\n ) -> CargoResult<Layout> {\n\n let mut root = ws.target_dir();\n\n if let Some(target) = target {\n", "file_path": "src/cargo/core/compiler/layout.rs", "rank": 1, "score": 398484.7500896567 }, { "content": "fn is_ident_start(ch: char) -> bool {\n\n ch == '_' || ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z')\n\n}\n\n\n", "file_path": "crates/cargo-platform/src/cfg.rs", "rank": 2, "score": 382169.2365675988 }, { "content": "fn check_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) -> bool {\n\n path.as_ref().join(\"bin\").join(exe(name)).is_file()\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/install.rs", "rank": 3, "score": 341284.5484393853 }, { "content": "/// Used by `cargo install` tests to assert an executable binary\n\n/// has been installed. Example usage:\n\n///\n\n/// assert_has_installed_exe(cargo_home(), \"foo\");\n\npub fn assert_has_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {\n\n assert!(check_has_installed_exe(path, name));\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/install.rs", "rank": 4, "score": 340851.9775917809 }, { "content": "pub fn assert_has_not_installed_exe<P: AsRef<Path>>(path: P, name: &'static str) {\n\n assert!(!check_has_installed_exe(path, name));\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/install.rs", "rank": 5, "score": 340846.4542508173 }, { "content": "/// Compares a line with an expected pattern.\n\n/// - Use `[..]` as a wildcard to match 0 or more characters on the same line\n\n/// (similar to `.*` in a regex).\n\n/// - Use `[EXE]` to optionally add `.exe` on Windows (empty string on other\n\n/// platforms).\n\n/// - There is a wide range of macros (such as `[COMPILING]` or `[WARNING]`)\n\n/// to match cargo's \"status\" output and allows you to ignore the alignment.\n\n/// See `substitute_macros` for a complete list of macros.\n\n/// - `[ROOT]` the path to the test directory's root\n\n/// - `[CWD]` is the working directory of the process that was run.\n\npub fn lines_match(expected: &str, mut actual: &str) -> bool {\n\n let expected = substitute_macros(expected);\n\n for (i, part) in expected.split(\"[..]\").enumerate() {\n\n match actual.find(part) {\n\n Some(j) => {\n\n if i == 0 && j != 0 {\n\n return false;\n\n }\n\n actual = &actual[j + part.len()..];\n\n }\n\n None => return false,\n\n }\n\n }\n\n actual.is_empty() || expected.ends_with(\"[..]\")\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 6, "score": 340177.26433964865 }, { "content": "pub fn values(args: &ArgMatches<'_>, name: &str) -> Vec<String> {\n\n args._values_of(name)\n\n}\n\n\n", "file_path": "src/cargo/util/command_prelude.rs", "rank": 7, "score": 337280.30963793315 }, { "content": "fn is_ident_rest(ch: char) -> bool {\n\n is_ident_start(ch) || ('0' <= ch && ch <= '9')\n\n}\n\n\n\nimpl<'a> Token<'a> {\n\n fn classify(&self) -> &'static str {\n\n match *self {\n\n Token::LeftParen => \"`(`\",\n\n Token::RightParen => \"`)`\",\n\n Token::Ident(..) => \"an identifier\",\n\n Token::Comma => \"`,`\",\n\n Token::Equals => \"`=`\",\n\n Token::String(..) => \"a string\",\n\n }\n\n }\n\n}\n", "file_path": "crates/cargo-platform/src/cfg.rs", "rank": 8, "score": 334960.2107556708 }, { "content": "pub fn dep_req_kind(name: &str, req: &str, kind: Kind, public: bool) -> Dependency {\n\n let mut dep = dep_req(name, req);\n\n dep.set_kind(kind);\n\n dep.set_public(public);\n\n dep\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 9, "score": 334501.9905562325 }, { "content": "pub fn values_os(args: &ArgMatches<'_>, name: &str) -> Vec<OsString> {\n\n args._values_of_os(name)\n\n}\n\n\n\n#[derive(PartialEq, PartialOrd, Eq, Ord)]\n\npub enum CommandInfo {\n\n BuiltIn { name: String, about: Option<String> },\n\n External { name: String, path: PathBuf },\n\n}\n\n\n\nimpl CommandInfo {\n\n pub fn name(&self) -> &str {\n\n match self {\n\n CommandInfo::BuiltIn { name, .. } => name,\n\n CommandInfo::External { name, .. } => name,\n\n }\n\n }\n\n}\n", "file_path": "src/cargo/util/command_prelude.rs", "rank": 10, "score": 327978.36968656245 }, { "content": "pub fn write_if_changed<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> CargoResult<()> {\n\n (|| -> CargoResult<()> {\n\n let contents = contents.as_ref();\n\n let mut f = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(true)\n\n .open(&path)?;\n\n let mut orig = Vec::new();\n\n f.read_to_end(&mut orig)?;\n\n if orig != contents {\n\n f.set_len(0)?;\n\n f.seek(io::SeekFrom::Start(0))?;\n\n f.write_all(contents)?;\n\n }\n\n Ok(())\n\n })()\n\n .chain_err(|| format!(\"failed to write `{}`\", path.as_ref().display()))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 11, "score": 325828.3239179555 }, { "content": "/// Variant of `lines_match` that applies normalization to the strings.\n\npub fn normalized_lines_match(expected: &str, actual: &str, cwd: Option<&Path>) -> bool {\n\n let expected = normalize_matcher(expected, cwd);\n\n let actual = normalize_matcher(actual, cwd);\n\n lines_match(&expected, &actual)\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 12, "score": 315724.04053499294 }, { "content": "pub fn dep(name: &str) -> Dependency {\n\n dep_req(name, \"*\")\n\n}\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 13, "score": 313791.9152092099 }, { "content": "pub fn dep_loc(name: &str, location: &str) -> Dependency {\n\n let url = location.into_url().unwrap();\n\n let master = GitReference::Branch(\"master\".to_string());\n\n let source_id = SourceId::for_git(&url, master).unwrap();\n\n Dependency::parse_no_deprecated(name, Some(\"1.0.0\"), source_id).unwrap()\n\n}\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 14, "score": 312555.0333744931 }, { "content": "pub fn pkg_loc(name: &str, loc: &str) -> Summary {\n\n let link = if name.ends_with(\"-sys\") {\n\n Some(name)\n\n } else {\n\n None\n\n };\n\n Summary::new(\n\n pkg_id_loc(name, loc),\n\n Vec::new(),\n\n &BTreeMap::<String, Vec<String>>::new(),\n\n link,\n\n false,\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 15, "score": 312555.03337449307 }, { "content": "pub fn dep_req(name: &str, req: &str) -> Dependency {\n\n Dependency::parse_no_deprecated(name, Some(req), registry_loc()).unwrap()\n\n}\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 16, "score": 312555.0333744931 }, { "content": "/// Whether or not this running in a Continuous Integration environment.\n\npub fn is_ci() -> bool {\n\n std::env::var(\"CI\").is_ok() || std::env::var(\"TF_BUILD\").is_ok()\n\n}\n", "file_path": "src/cargo/util/mod.rs", "rank": 17, "score": 310709.3915052826 }, { "content": "pub fn exe(name: &str) -> String {\n\n format!(\"{}{}\", name, EXE_SUFFIX)\n\n}\n", "file_path": "crates/cargo-test-support/src/install.rs", "rank": 18, "score": 309203.0714506065 }, { "content": "pub fn basic_manifest(name: &str, version: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n name = \"{}\"\n\n version = \"{}\"\n\n authors = []\n\n \"#,\n\n name, version\n\n )\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 19, "score": 308685.98894586554 }, { "content": "/// Get the filename for a library.\n\n///\n\n/// `kind` should be one of: \"lib\", \"rlib\", \"staticlib\", \"dylib\", \"proc-macro\"\n\n///\n\n/// For example, dynamic library named \"foo\" would return:\n\n/// - macOS: \"libfoo.dylib\"\n\n/// - Windows: \"foo.dll\"\n\n/// - Unix: \"libfoo.so\"\n\npub fn get_lib_filename(name: &str, kind: &str) -> String {\n\n let prefix = get_lib_prefix(kind);\n\n let extension = get_lib_extension(kind);\n\n format!(\"{}{}.{}\", prefix, name, extension)\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/paths.rs", "rank": 20, "score": 304987.5989000721 }, { "content": "pub fn pkg_id(name: &str) -> PackageId {\n\n PackageId::new(name, \"1.0.0\", registry_loc()).unwrap()\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 21, "score": 304835.0436775106 }, { "content": "pub fn generate_url(name: &str) -> Url {\n\n Url::from_file_path(generate_path(name)).ok().unwrap()\n\n}\n", "file_path": "crates/cargo-test-support/src/registry.rs", "rank": 22, "score": 304835.0436775106 }, { "content": "pub fn assert_match(expected: &str, actual: &str) {\n\n if !normalized_lines_match(expected, actual, None) {\n\n panic!(\n\n \"Did not find expected:\\n{}\\nActual:\\n{}\\n\",\n\n expected, actual\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 23, "score": 301073.20040525624 }, { "content": "pub fn basic_lib_manifest(name: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n\n\n name = \"{}\"\n\n version = \"0.5.0\"\n\n authors = [\"[email protected]\"]\n\n\n\n [lib]\n\n\n\n name = \"{}\"\n\n \"#,\n\n name, name\n\n )\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 24, "score": 300672.26780588203 }, { "content": "pub fn generate_path(name: &str) -> PathBuf {\n\n paths::root().join(name)\n\n}\n", "file_path": "crates/cargo-test-support/src/registry.rs", "rank": 25, "score": 300672.26780588203 }, { "content": "pub fn basic_bin_manifest(name: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n\n\n name = \"{}\"\n\n version = \"0.5.0\"\n\n authors = [\"[email protected]\"]\n\n\n\n [[bin]]\n\n\n\n name = \"{}\"\n\n \"#,\n\n name, name\n\n )\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 26, "score": 300672.26780588203 }, { "content": "// Generates a project layout inside our fake home dir\n\npub fn project_in_home(name: &str) -> ProjectBuilder {\n\n ProjectBuilder::new(paths::home().join(name))\n\n}\n\n\n\n// === Helpers ===\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 27, "score": 300672.26780588203 }, { "content": "pub fn subcommand(name: &'static str) -> App {\n\n SubCommand::with_name(name).settings(&[\n\n AppSettings::UnifiedHelpMessage,\n\n AppSettings::DeriveDisplayOrder,\n\n AppSettings::DontCollapseArgsInUsage,\n\n ])\n\n}\n\n\n\n// Determines whether or not to gate `--profile` as unstable when resolving it.\n\npub enum ProfileChecking {\n\n Checked,\n\n Unchecked,\n\n}\n\n\n", "file_path": "src/cargo/util/command_prelude.rs", "rank": 28, "score": 300546.71478495706 }, { "content": "pub fn assert_error<E: Borrow<failure::Error>>(error: E, msgs: &str) {\n\n let causes = error\n\n .borrow()\n\n .iter_chain()\n\n .map(|e| e.to_string())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n\n assert_match(msgs, &causes);\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 29, "score": 297993.8853458604 }, { "content": "pub fn generate_alt_dl_url(name: &str) -> String {\n\n let base = Url::from_file_path(generate_path(name)).ok().unwrap();\n\n format!(\"{}/{{crate}}/{{version}}/{{crate}}-{{version}}.crate\", base)\n\n}\n\n\n\n/// A builder for creating a new package in a registry.\n\n///\n\n/// This uses \"source replacement\" using an automatically generated\n\n/// `.cargo/config` file to ensure that dependencies will use these packages\n\n/// instead of contacting crates.io. See `source-replacement.md` for more\n\n/// details on how source replacement works.\n\n///\n\n/// Call `publish` to finalize and create the package.\n\n///\n\n/// If no files are specified, an empty `lib.rs` file is automatically created.\n\n///\n\n/// The `Cargo.toml` file is automatically generated based on the methods\n\n/// called on `Package` (for example, calling `dep()` will add to the\n\n/// `[dependencies]` automatically). You may also specify a `Cargo.toml` file\n\n/// to override the generated one.\n", "file_path": "crates/cargo-test-support/src/registry.rs", "rank": 30, "score": 296700.6088750446 }, { "content": "pub fn create_dir_all(p: impl AsRef<Path>) -> CargoResult<()> {\n\n _create_dir_all(p.as_ref())\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 31, "score": 288868.6138812895 }, { "content": "pub fn remove_file<P: AsRef<Path>>(p: P) -> CargoResult<()> {\n\n _remove_file(p.as_ref())\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 32, "score": 285769.7746113171 }, { "content": "pub fn remove_dir<P: AsRef<Path>>(p: P) -> CargoResult<()> {\n\n _remove_dir(p.as_ref())\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 33, "score": 285769.7746113171 }, { "content": "pub fn remove_dir_all<P: AsRef<Path>>(p: P) -> CargoResult<()> {\n\n _remove_dir_all(p.as_ref())\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 34, "score": 285769.7746113171 }, { "content": "pub fn names<P: ToPkgId>(names: &[P]) -> Vec<PackageId> {\n\n names.iter().map(|name| name.to_pkgid()).collect()\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 35, "score": 285242.36836824927 }, { "content": "pub fn dep_kind(name: &str, kind: Kind) -> Dependency {\n\n dep(name).set_kind(kind).clone()\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 36, "score": 284458.9743628629 }, { "content": "/// Create a new tag in the git repository.\n\npub fn tag(repo: &git2::Repository, name: &str) {\n\n let head = repo.head().unwrap().target().unwrap();\n\n t!(repo.tag(\n\n name,\n\n &t!(repo.find_object(head, None)),\n\n &t!(repo.signature()),\n\n \"make a new tag\",\n\n false\n\n ));\n\n}\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 37, "score": 284458.9743628629 }, { "content": "pub fn write_config_at(path: impl AsRef<Path>, contents: &str) {\n\n let path = paths::root().join(path.as_ref());\n\n fs::create_dir_all(path.parent().unwrap()).unwrap();\n\n fs::write(path, contents).unwrap();\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 38, "score": 282100.15883373824 }, { "content": "pub fn opt(name: &'static str, help: &'static str) -> Arg<'static, 'static> {\n\n Arg::with_name(name).long(name).help(help)\n\n}\n\n\n", "file_path": "src/cargo/util/command_prelude.rs", "rank": 39, "score": 278154.8825755961 }, { "content": "pub fn fix_maybe_exec_rustc() -> CargoResult<bool> {\n\n let lock_addr = match env::var(FIX_ENV) {\n\n Ok(s) => s,\n\n Err(_) => return Ok(false),\n\n };\n\n\n\n let args = FixArgs::get();\n\n trace!(\"cargo-fix as rustc got file {:?}\", args.file);\n\n let rustc = args.rustc.as_ref().expect(\"fix wrapper rustc was not set\");\n\n\n\n let mut fixes = FixedCrate::default();\n\n if let Some(path) = &args.file {\n\n trace!(\"start rustfixing {:?}\", path);\n\n fixes = rustfix_crate(&lock_addr, rustc.as_ref(), path, &args)?;\n\n }\n\n\n\n // Ok now we have our final goal of testing out the changes that we applied.\n\n // If these changes went awry and actually started to cause the crate to\n\n // *stop* compiling then we want to back them out and continue to print\n\n // warnings to the user.\n", "file_path": "src/cargo/ops/fix.rs", "rank": 40, "score": 278131.469644334 }, { "content": "pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec<PackageId> {\n\n names\n\n .iter()\n\n .map(|&(name, loc)| pkg_id_loc(name, loc))\n\n .collect()\n\n}\n\n\n\n/// By default `Summary` and `Dependency` have a very verbose `Debug` representation.\n\n/// This replaces with a representation that uses constructors from this file.\n\n///\n\n/// If `registry_strategy` is improved to modify more fields\n\n/// then this needs to update to display the corresponding constructor.\n\npub struct PrettyPrintRegistry(pub Vec<Summary>);\n\n\n\nimpl fmt::Debug for PrettyPrintRegistry {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"vec![\")?;\n\n for s in &self.0 {\n\n if s.dependencies().is_empty() {\n\n write!(f, \"pkg!((\\\"{}\\\", \\\"{}\\\")),\", s.name(), s.version())?;\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 41, "score": 277010.1781166265 }, { "content": "pub fn is_simple_exit_code(code: i32) -> bool {\n\n // Typical unix exit codes are 0 to 127.\n\n // Windows doesn't have anything \"typical\", and is a\n\n // 32-bit number (which appears signed here, but is really\n\n // unsigned). However, most of the interesting NTSTATUS\n\n // codes are very large. This is just a rough\n\n // approximation of which codes are \"normal\" and which\n\n // ones are abnormal termination.\n\n code >= 0 && code <= 127\n\n}\n\n\n", "file_path": "src/cargo/util/errors.rs", "rank": 42, "score": 275189.0482812647 }, { "content": "/// Create a new git repository with a project.\n\npub fn new<F>(name: &str, callback: F) -> Project\n\nwhere\n\n F: FnOnce(ProjectBuilder) -> ProjectBuilder,\n\n{\n\n new_repo(name, callback).0\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 43, "score": 273832.85383104417 }, { "content": "/// Checks the result of a crate publish.\n\npub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) {\n\n let new_path = registry::api_path().join(\"api/v1/crates/new\");\n\n _validate_upload(\n\n &new_path,\n\n expected_json,\n\n expected_crate_name,\n\n expected_files,\n\n &[],\n\n );\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/publish.rs", "rank": 44, "score": 272525.26033760386 }, { "content": "fn check_name(name: &str, opts: &NewOptions) -> CargoResult<()> {\n\n // If --name is already used to override, no point in suggesting it\n\n // again as a fix.\n\n let name_help = match opts.name {\n\n Some(_) => \"\",\n\n None => \"\\nuse --name to override crate name\",\n\n };\n\n\n\n // Ban keywords + test list found at\n\n // https://doc.rust-lang.org/grammar.html#keywords\n\n let blacklist = [\n\n \"abstract\", \"alignof\", \"as\", \"become\", \"box\", \"break\", \"const\", \"continue\", \"crate\", \"do\",\n\n \"else\", \"enum\", \"extern\", \"false\", \"final\", \"fn\", \"for\", \"if\", \"impl\", \"in\", \"let\", \"loop\",\n\n \"macro\", \"match\", \"mod\", \"move\", \"mut\", \"offsetof\", \"override\", \"priv\", \"proc\", \"pub\",\n\n \"pure\", \"ref\", \"return\", \"self\", \"sizeof\", \"static\", \"struct\", \"super\", \"test\", \"trait\",\n\n \"true\", \"type\", \"typeof\", \"unsafe\", \"unsized\", \"use\", \"virtual\", \"where\", \"while\", \"yield\",\n\n ];\n\n if blacklist.contains(&name) || (opts.kind.is_bin() && compiler::is_bad_artifact_name(name)) {\n\n failure::bail!(\n\n \"The name `{}` cannot be used as a crate name{}\",\n", "file_path": "src/cargo/ops/cargo_new.rs", "rank": 45, "score": 271418.75435946754 }, { "content": "fn pkg(name: &str, vers: &str) {\n\n Package::new(name, vers)\n\n .file(\"src/lib.rs\", \"\")\n\n .file(\n\n \"src/main.rs\",\n\n &format!(\"extern crate {}; fn main() {{}}\", name),\n\n )\n\n .publish();\n\n}\n\n\n", "file_path": "tests/testsuite/install.rs", "rank": 46, "score": 267694.15599476814 }, { "content": "fn pkg(name: &str, vers: &str) {\n\n Package::new(name, vers)\n\n .file(\"src/main.rs\", \"fn main() {{}}\")\n\n .publish();\n\n}\n\n\n", "file_path": "tests/testsuite/concurrent.rs", "rank": 47, "score": 267694.15599476814 }, { "content": "pub fn display_causes(error: &Error) -> String {\n\n error\n\n .iter_chain()\n\n .map(|e| e.to_string())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\\nCaused by:\\n \")\n\n}\n", "file_path": "src/cargo/util/errors.rs", "rank": 48, "score": 267308.58654153615 }, { "content": "pub fn save_credentials(cfg: &Config, token: String, registry: Option<String>) -> CargoResult<()> {\n\n // If 'credentials.toml' exists, we should write to that, otherwise\n\n // use the legacy 'credentials'. There's no need to print the warning\n\n // here, because it would already be printed at load time.\n\n let home_path = cfg.home_path.clone().into_path_unlocked();\n\n let filename = match cfg.get_file_path(&home_path, \"credentials\", false)? {\n\n Some(path) => match path.file_name() {\n\n Some(filename) => Path::new(filename).to_owned(),\n\n None => Path::new(\"credentials\").to_owned(),\n\n },\n\n None => Path::new(\"credentials\").to_owned(),\n\n };\n\n\n\n let mut file = {\n\n cfg.home_path.create_dir()?;\n\n cfg.home_path\n\n .open_rw(filename, cfg, \"credentials' config file\")?\n\n };\n\n\n\n let (key, mut value) = {\n", "file_path": "src/cargo/util/config/mod.rs", "rank": 49, "score": 266601.3472777948 }, { "content": "fn bad<T>(s: &str, err: &str)\n\nwhere\n\n T: FromStr + fmt::Display,\n\n T::Err: fmt::Display,\n\n{\n\n let e = match T::from_str(s) {\n\n Ok(cfg) => panic!(\"expected `{}` to not parse but got {}\", s, cfg),\n\n Err(e) => e.to_string(),\n\n };\n\n assert!(\n\n e.contains(err),\n\n \"when parsing `{}`,\\n\\\"{}\\\" not contained \\\n\n inside: {}\",\n\n s,\n\n err,\n\n e\n\n );\n\n}\n\n\n", "file_path": "crates/cargo-platform/tests/test_cfg.rs", "rank": 50, "score": 265227.9949610041 }, { "content": "// Helper for publishing a package.\n\nfn pkg(name: &str, vers: &str) {\n\n Package::new(name, vers)\n\n .file(\n\n \"src/main.rs\",\n\n r#\"fn main() { println!(\"{}\", env!(\"CARGO_PKG_VERSION\")) }\"#,\n\n )\n\n .publish();\n\n}\n\n\n", "file_path": "tests/testsuite/install_upgrade.rs", "rank": 51, "score": 264119.04974536784 }, { "content": "pub fn parse(toml: &str, file: &Path, config: &Config) -> CargoResult<toml::Value> {\n\n let first_error = match toml.parse() {\n\n Ok(ret) => return Ok(ret),\n\n Err(e) => e,\n\n };\n\n\n\n let mut second_parser = toml::de::Deserializer::new(toml);\n\n second_parser.set_require_newline_after_table(false);\n\n if let Ok(ret) = toml::Value::deserialize(&mut second_parser) {\n\n let msg = format!(\n\n \"\\\n\nTOML file found which contains invalid syntax and will soon not parse\n\nat `{}`.\n\n\n\nThe TOML spec requires newlines after table definitions (e.g., `[a] b = 1` is\n\ninvalid), but this file has a table header which does not have a newline after\n\nit. A newline needs to be added and this warning will soon become a hard error\n\nin the future.\",\n\n file.display()\n\n );\n", "file_path": "src/cargo/util/toml/mod.rs", "rank": 52, "score": 263063.8966456015 }, { "content": "fn get_cfgs() -> Vec<Cfg> {\n\n let output = Command::new(\"rustc\")\n\n .arg(\"--print=cfg\")\n\n .output()\n\n .expect(\"rustc failed to run\");\n\n let stdout = String::from_utf8(output.stdout).unwrap();\n\n stdout\n\n .lines()\n\n .map(|line| Cfg::from_str(line).unwrap())\n\n .collect()\n\n}\n", "file_path": "crates/cargo-platform/examples/matches.rs", "rank": 53, "score": 261145.97777576523 }, { "content": "fn set_not_readonly(p: &Path) -> io::Result<bool> {\n\n let mut perms = p.metadata()?.permissions();\n\n if !perms.readonly() {\n\n return Ok(false);\n\n }\n\n perms.set_readonly(false);\n\n fs::set_permissions(p, perms)?;\n\n Ok(true)\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 54, "score": 258732.49349620275 }, { "content": "#[cargo_test]\n\nfn error_from_deep_recursion() -> Result<(), fmt::Error> {\n\n let mut big_macro = String::new();\n\n writeln!(big_macro, \"macro_rules! m {{\")?;\n\n for i in 0..130 {\n\n writeln!(big_macro, \"({}) => {{ m!({}); }};\", i, i + 1)?;\n\n }\n\n writeln!(big_macro, \"}}\")?;\n\n writeln!(big_macro, \"m!(0);\")?;\n\n\n\n let p = project().file(\"src/lib.rs\", &big_macro).build();\n\n p.cargo(\"check --message-format=json\")\n\n .with_status(101)\n\n .with_stdout_contains(\n\n \"[..]\\\"message\\\":\\\"recursion limit reached while expanding the macro `m`\\\"[..]\",\n\n )\n\n .run();\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/testsuite/check.rs", "rank": 55, "score": 258257.20260392653 }, { "content": "/// Check that the given package name/version has the following bins listed in\n\n/// the trackers. Also verifies that both trackers are in sync and valid.\n\n/// Pass in an empty `bins` list to assert that the package is *not* installed.\n\nfn validate_trackers(name: &str, version: &str, bins: &[&str]) {\n\n let v1 = load_crates1();\n\n let v1_table = v1.get(\"v1\").unwrap().as_table().unwrap();\n\n let v2 = load_crates2();\n\n let v2_table = v2[\"installs\"].as_object().unwrap();\n\n assert_eq!(v1_table.len(), v2_table.len());\n\n // Convert `bins` to a BTreeSet.\n\n let bins: BTreeSet<String> = bins\n\n .iter()\n\n .map(|b| format!(\"{}{}\", b, env::consts::EXE_SUFFIX))\n\n .collect();\n\n // Check every entry matches between v1 and v2.\n\n for (pkg_id_str, v1_bins) in v1_table {\n\n let pkg_id: PackageId = toml::Value::from(pkg_id_str.to_string())\n\n .try_into()\n\n .unwrap();\n\n let v1_bins: BTreeSet<String> = v1_bins\n\n .as_array()\n\n .unwrap()\n\n .iter()\n", "file_path": "tests/testsuite/install_upgrade.rs", "rank": 56, "score": 257198.88730570461 }, { "content": "pub fn needs_custom_http_transport(config: &Config) -> CargoResult<bool> {\n\n Ok(http_proxy_exists(config)?\n\n || *config.http_config()? != Default::default()\n\n || env::var_os(\"HTTP_TIMEOUT\").is_some())\n\n}\n\n\n", "file_path": "src/cargo/ops/registry.rs", "rank": 57, "score": 256310.8254574777 }, { "content": "// Several test fail on windows if the user does not have permission to\n\n// create symlinks (the `SeCreateSymbolicLinkPrivilege`). Instead of\n\n// disabling these test on Windows, use this function to test whether we\n\n// have permission, and return otherwise. This way, we still don't run these\n\n// tests most of the time, but at least we do if the user has the right\n\n// permissions.\n\n// This function is derived from libstd fs tests.\n\npub fn got_symlink_permission() -> bool {\n\n if cfg!(unix) {\n\n return true;\n\n }\n\n let link = paths::root().join(\"some_hopefully_unique_link_name\");\n\n let target = paths::root().join(\"nonexisting_target\");\n\n\n\n match symlink_file(&target, &link) {\n\n Ok(_) => true,\n\n // ERROR_PRIVILEGE_NOT_HELD = 1314\n\n Err(ref err) if err.raw_os_error() == Some(1314) => false,\n\n Err(_) => true,\n\n }\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 58, "score": 255973.1484192899 }, { "content": "/// Resolves all dependencies for a package using an optional previous instance.\n\n/// of resolve to guide the resolution process.\n\n///\n\n/// This also takes an optional hash set, `to_avoid`, which is a list of package\n\n/// IDs that should be avoided when consulting the previous instance of resolve\n\n/// (often used in pairings with updates).\n\n///\n\n/// The previous resolve normally comes from a lock file. This function does not\n\n/// read or write lock files from the filesystem.\n\n///\n\n/// `specs` may be empty, which indicates it should resolve all workspace\n\n/// members. In this case, `opts.all_features` must be `true`.\n\n///\n\n/// If `register_patches` is true, then entries from the `[patch]` table in\n\n/// the manifest will be added to the given `PackageRegistry`.\n\npub fn resolve_with_previous<'cfg>(\n\n registry: &mut PackageRegistry<'cfg>,\n\n ws: &Workspace<'cfg>,\n\n opts: ResolveOpts,\n\n previous: Option<&Resolve>,\n\n to_avoid: Option<&HashSet<PackageId>>,\n\n specs: &[PackageIdSpec],\n\n register_patches: bool,\n\n) -> CargoResult<Resolve> {\n\n assert!(\n\n !specs.is_empty() || opts.all_features,\n\n \"no specs requires all_features\"\n\n );\n\n\n\n // We only want one Cargo at a time resolving a crate graph since this can\n\n // involve a lot of frobbing of the global caches.\n\n let _lock = ws.config().acquire_package_cache_lock()?;\n\n\n\n // Here we place an artificial limitation that all non-registry sources\n\n // cannot be locked at more than one revision. This means that if a Git\n", "file_path": "src/cargo/ops/resolve.rs", "rank": 59, "score": 255917.28811455012 }, { "content": "/// This is a little complicated.\n\n/// This should return false if:\n\n/// - this is an artifact of the rustc distribution process for \"stable\" or for \"beta\"\n\n/// - this is an `#[test]` that does not opt in with `enable_nightly_features`\n\n/// - this is a integration test that uses `ProcessBuilder`\n\n/// that does not opt in with `masquerade_as_nightly_cargo`\n\n/// This should return true if:\n\n/// - this is an artifact of the rustc distribution process for \"nightly\"\n\n/// - this is being used in the rustc distribution process internally\n\n/// - this is a cargo executable that was built from source\n\n/// - this is an `#[test]` that called `enable_nightly_features`\n\n/// - this is a integration test that uses `ProcessBuilder`\n\n/// that called `masquerade_as_nightly_cargo`\n\npub fn nightly_features_allowed() -> bool {\n\n if ENABLE_NIGHTLY_FEATURES.with(|c| c.get()) {\n\n return true;\n\n }\n\n match &channel()[..] {\n\n \"nightly\" | \"dev\" => NIGHTLY_FEATURES_ALLOWED.with(|c| c.get()),\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/cargo/core/features.rs", "rank": 60, "score": 252177.4115826965 }, { "content": "pub fn is_nightly() -> bool {\n\n env::var(\"CARGO_TEST_DISABLE_NIGHTLY\").is_err()\n\n && RUSTC\n\n .with(|r| r.verbose_version.contains(\"-nightly\") || r.verbose_version.contains(\"-dev\"))\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 61, "score": 252165.68030294465 }, { "content": "/// Create a new git repository with a project.\n\n/// Returns both the Project and the git Repository.\n\npub fn new_repo<F>(name: &str, callback: F) -> (Project, git2::Repository)\n\nwhere\n\n F: FnOnce(ProjectBuilder) -> ProjectBuilder,\n\n{\n\n let mut git_project = project().at(name);\n\n git_project = callback(git_project);\n\n let git_project = git_project.build();\n\n\n\n let repo = init(&git_project.root());\n\n add(&repo);\n\n commit(&repo);\n\n (git_project, repo)\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/git.rs", "rank": 62, "score": 251768.32347783542 }, { "content": "pub fn pkgid(ws: &Workspace<'_>, spec: Option<&str>) -> CargoResult<PackageIdSpec> {\n\n let resolve = match ops::load_pkg_lockfile(ws)? {\n\n Some(resolve) => resolve,\n\n None => failure::bail!(\"a Cargo.lock must exist for this command\"),\n\n };\n\n\n\n let pkgid = match spec {\n\n Some(spec) => PackageIdSpec::query_str(spec, resolve.iter())?,\n\n None => ws.current()?.package_id(),\n\n };\n\n Ok(PackageIdSpec::from_package_id(pkgid))\n\n}\n", "file_path": "src/cargo/ops/cargo_pkgid.rs", "rank": 63, "score": 251765.42149537176 }, { "content": "/// Hardlink (file) or symlink (dir) src to dst if possible, otherwise copy it.\n\n///\n\n/// If the destination already exists, it is removed before linking.\n\npub fn link_or_copy(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> CargoResult<()> {\n\n let src = src.as_ref();\n\n let dst = dst.as_ref();\n\n _link_or_copy(src, dst)\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 64, "score": 251196.8983013754 }, { "content": "pub fn get_lib_prefix(kind: &str) -> &str {\n\n match kind {\n\n \"lib\" | \"rlib\" => \"lib\",\n\n \"staticlib\" | \"dylib\" | \"proc-macro\" => {\n\n if cfg!(windows) {\n\n \"\"\n\n } else {\n\n \"lib\"\n\n }\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/paths.rs", "rank": 65, "score": 250831.6385264306 }, { "content": "pub fn get_lib_extension(kind: &str) -> &str {\n\n match kind {\n\n \"lib\" | \"rlib\" => \"rlib\",\n\n \"staticlib\" => {\n\n if cfg!(windows) {\n\n \"lib\"\n\n } else {\n\n \"a\"\n\n }\n\n }\n\n \"dylib\" | \"proc-macro\" => {\n\n if cfg!(windows) {\n\n \"dll\"\n\n } else if cfg!(target_os = \"macos\") {\n\n \"dylib\"\n\n } else {\n\n \"so\"\n\n }\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/paths.rs", "rank": 66, "score": 250831.6385264306 }, { "content": "fn pl_manifest(name: &str, version: &str, extra: &str) -> String {\n\n format!(\n\n r#\"\n\n [package]\n\n name = \"{}\"\n\n version = \"{}\"\n\n authors = []\n\n license = \"MIT\"\n\n description = \"foo\"\n\n documentation = \"foo\"\n\n homepage = \"foo\"\n\n repository = \"foo\"\n\n\n\n {}\n\n \"#,\n\n name, version, extra\n\n )\n\n}\n\n\n", "file_path": "tests/testsuite/publish_lockfile.rs", "rank": 67, "score": 249464.10096357844 }, { "content": "#[cfg(not(windows))]\n\npub fn symlink_supported() -> bool {\n\n true\n\n}\n\n\n\n/// The error message for ENOENT.\n\n///\n\n/// It's generally not good to match against OS error messages, but I think\n\n/// this one is relatively stable.\n\n#[cfg(windows)]\n\npub const NO_SUCH_FILE_ERR_MSG: &str = \"The system cannot find the file specified. (os error 2)\";\n\n#[cfg(not(windows))]\n\npub const NO_SUCH_FILE_ERR_MSG: &str = \"No such file or directory (os error 2)\";\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 68, "score": 248565.78052738338 }, { "content": "/// Returns `true` if the local filesystem has low-resolution mtimes.\n\npub fn is_coarse_mtime() -> bool {\n\n // If the filetime crate is being used to emulate HFS then\n\n // return `true`, without looking at the actual hardware.\n\n cfg!(emulate_second_only_system) ||\n\n // This should actually be a test that `$CARGO_TARGET_DIR` is on an HFS\n\n // filesystem, (or any filesystem with low-resolution mtimes). However,\n\n // that's tricky to detect, so for now just deal with CI.\n\n cfg!(target_os = \"macos\") && is_ci()\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 69, "score": 248565.4511503769 }, { "content": "pub fn clippy_is_available() -> bool {\n\n if let Err(e) = process(\"clippy-driver\").arg(\"-V\").exec_with_output() {\n\n eprintln!(\"clippy-driver not available, skipping clippy test\");\n\n eprintln!(\"{:?}\", e);\n\n false\n\n } else {\n\n true\n\n }\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 70, "score": 248559.26544780613 }, { "content": "pub fn disabled() -> bool {\n\n // First, disable if `./configure` requested so.\n\n match env::var(\"CFG_DISABLE_CROSS_TESTS\") {\n\n Ok(ref s) if *s == \"1\" => return true,\n\n _ => {}\n\n }\n\n\n\n // Right now, the Windows bots cannot cross compile due to the Mingw setup,\n\n // so we disable ourselves on all but macOS/Linux setups where the rustc\n\n // install script ensures we have both architectures.\n\n if !(cfg!(target_os = \"macos\") || cfg!(target_os = \"linux\") || cfg!(target_env = \"msvc\")) {\n\n return true;\n\n }\n\n\n\n // It's not particularly common to have a cross-compilation setup, so\n\n // try to detect that before we fail a bunch of tests through no fault\n\n // of the user.\n\n static CAN_RUN_CROSS_TESTS: AtomicBool = AtomicBool::new(false);\n\n static CHECK: Once = Once::new();\n\n\n", "file_path": "crates/cargo-test-support/src/cross_compile.rs", "rank": 71, "score": 248559.26544780613 }, { "content": "/// Resolve the standard library dependencies.\n\npub fn resolve_std<'cfg>(\n\n ws: &Workspace<'cfg>,\n\n crates: &[String],\n\n) -> CargoResult<(PackageSet<'cfg>, Resolve)> {\n\n let src_path = detect_sysroot_src_path(ws)?;\n\n let to_patch = [\n\n \"rustc-std-workspace-core\",\n\n \"rustc-std-workspace-alloc\",\n\n \"rustc-std-workspace-std\",\n\n ];\n\n let patches = to_patch\n\n .iter()\n\n .map(|&name| {\n\n let source_path = SourceId::for_path(&src_path.join(\"src\").join(\"tools\").join(name))?;\n\n let dep = Dependency::parse_no_deprecated(name, None, source_path)?;\n\n Ok(dep)\n\n })\n\n .collect::<CargoResult<Vec<_>>>()?;\n\n let crates_io_url = crate::sources::CRATES_IO_INDEX.parse().unwrap();\n\n let mut patch = HashMap::new();\n", "file_path": "src/cargo/core/compiler/standard_lib.rs", "rank": 72, "score": 248502.89471238165 }, { "content": "pub fn write_config(config: &str) {\n\n write_config_at(paths::root().join(\".cargo/config\"), config);\n\n}\n\n\n", "file_path": "tests/testsuite/config.rs", "rank": 73, "score": 247215.76905672718 }, { "content": "#[test]\n\nfn cfg_matches() {\n\n assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)]));\n\n assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)]));\n\n assert!(e!(any(foo, bar)).matches(&[c!(bar)]));\n\n assert!(e!(any(foo, bar)).matches(&[c!(foo)]));\n\n assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));\n\n assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));\n\n assert!(e!(not(foo)).matches(&[c!(bar)]));\n\n assert!(e!(not(foo)).matches(&[]));\n\n assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)]));\n\n assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)]));\n\n\n\n assert!(!e!(foo).matches(&[]));\n\n assert!(!e!(foo).matches(&[c!(bar)]));\n\n assert!(!e!(foo).matches(&[c!(fo)]));\n\n assert!(!e!(any(foo)).matches(&[]));\n\n assert!(!e!(any(foo)).matches(&[c!(bar)]));\n\n assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)]));\n\n assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)]));\n\n assert!(!e!(all(foo, bar)).matches(&[c!(bar)]));\n\n assert!(!e!(all(foo, bar)).matches(&[c!(foo)]));\n\n assert!(!e!(all(foo, bar)).matches(&[]));\n\n assert!(!e!(not(bar)).matches(&[c!(bar)]));\n\n assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)]));\n\n assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)]));\n\n}\n\n\n", "file_path": "crates/cargo-platform/tests/test_cfg.rs", "rank": 74, "score": 246174.20446013336 }, { "content": "fn render_filename<P: AsRef<Path>>(path: P, basedir: Option<&str>) -> CargoResult<String> {\n\n let path = path.as_ref();\n\n let relpath = match basedir {\n\n None => path,\n\n Some(base) => match path.strip_prefix(base) {\n\n Ok(relpath) => relpath,\n\n _ => path,\n\n },\n\n };\n\n relpath\n\n .to_str()\n\n .ok_or_else(|| internal(\"path not utf-8\"))\n\n .map(|f| f.replace(\" \", \"\\\\ \"))\n\n}\n\n\n", "file_path": "src/cargo/core/compiler/output_depinfo.rs", "rank": 75, "score": 245038.89632654248 }, { "content": "pub fn lev_distance(me: &str, t: &str) -> usize {\n\n if me.is_empty() {\n\n return t.chars().count();\n\n }\n\n if t.is_empty() {\n\n return me.chars().count();\n\n }\n\n\n\n let mut dcol = (0..=t.len()).collect::<Vec<_>>();\n\n let mut t_last = 0;\n\n\n\n for (i, sc) in me.chars().enumerate() {\n\n let mut current = i;\n\n dcol[0] = current + 1;\n\n\n\n for (j, tc) in t.chars().enumerate() {\n\n let next = dcol[j + 1];\n\n\n\n if sc == tc {\n\n dcol[j + 1] = current;\n", "file_path": "src/cargo/util/lev_distance.rs", "rank": 76, "score": 245014.05595328152 }, { "content": "/// Prepare for work when a package starts to build\n\npub fn prepare_init<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<()> {\n\n let new1 = cx.files().fingerprint_dir(unit);\n\n\n\n // Doc tests have no output, thus no fingerprint.\n\n if !new1.exists() && !unit.mode.is_doc_test() {\n\n paths::create_dir_all(&new1)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/core/compiler/fingerprint.rs", "rank": 77, "score": 244601.92519536937 }, { "content": "fn maybe_spurious(err: &Error) -> bool {\n\n for e in err.iter_chain() {\n\n if let Some(git_err) = e.downcast_ref::<git2::Error>() {\n\n match git_err.class() {\n\n git2::ErrorClass::Net | git2::ErrorClass::Os => return true,\n\n _ => (),\n\n }\n\n }\n\n if let Some(curl_err) = e.downcast_ref::<curl::Error>() {\n\n if curl_err.is_couldnt_connect()\n\n || curl_err.is_couldnt_resolve_proxy()\n\n || curl_err.is_couldnt_resolve_host()\n\n || curl_err.is_operation_timedout()\n\n || curl_err.is_recv_error()\n\n || curl_err.is_http2_stream_error()\n\n || curl_err.is_ssl_connect_error()\n\n {\n\n return true;\n\n }\n\n }\n\n if let Some(not_200) = e.downcast_ref::<HttpNot200>() {\n\n if 500 <= not_200.code && not_200.code < 600 {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/cargo/util/network.rs", "rank": 78, "score": 244208.10126452643 }, { "content": "fn has_crlf_line_endings(s: &str) -> bool {\n\n // Only check the first line.\n\n if let Some(lf) = s.find('\\n') {\n\n s[..lf].ends_with('\\r')\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/cargo/ops/lockfile.rs", "rank": 79, "score": 244025.03506284073 }, { "content": "/// Helper for executing binaries installed by cargo.\n\nfn installed_process(name: &str) -> Execs {\n\n static NEXT_ID: AtomicUsize = AtomicUsize::new(0);\n\n thread_local!(static UNIQUE_ID: usize = NEXT_ID.fetch_add(1, Ordering::SeqCst));\n\n\n\n // This copies the executable to a unique name so that it may be safely\n\n // replaced on Windows. See Project::rename_run for details.\n\n let src = installed_exe(name);\n\n let dst = installed_exe(&UNIQUE_ID.with(|my_id| format!(\"{}-{}\", name, my_id)));\n\n // Note: Cannot use copy. On Linux, file descriptors may be left open to\n\n // the executable as other tests in other threads are constantly spawning\n\n // new processes (see https://github.com/rust-lang/cargo/pull/5557 for\n\n // more).\n\n fs::rename(&src, &dst)\n\n .unwrap_or_else(|e| panic!(\"Failed to rename `{:?}` to `{:?}`: {}\", src, dst, e));\n\n // Leave behind a fake file so that reinstall duplicate check works.\n\n fs::write(src, \"\").unwrap();\n\n let p = process(dst);\n\n execs().with_process_builder(p)\n\n}\n\n\n", "file_path": "tests/testsuite/install_upgrade.rs", "rank": 80, "score": 243962.07536964398 }, { "content": "/// Determines if a `unit` is up-to-date, and if not prepares necessary work to\n\n/// update the persisted fingerprint.\n\n///\n\n/// This function will inspect `unit`, calculate a fingerprint for it, and then\n\n/// return an appropriate `Job` to run. The returned `Job` will be a noop if\n\n/// `unit` is considered \"fresh\", or if it was previously built and cached.\n\n/// Otherwise the `Job` returned will write out the true fingerprint to the\n\n/// filesystem, to be executed after the unit's work has completed.\n\n///\n\n/// The `force` flag is a way to force the `Job` to be \"dirty\", or always\n\n/// update the fingerprint. **Beware using this flag** because it does not\n\n/// transitively propagate throughout the dependency graph, it only forces this\n\n/// one unit which is very unlikely to be what you want unless you're\n\n/// exclusively talking about top-level units.\n\npub fn prepare_target<'a, 'cfg>(\n\n cx: &mut Context<'a, 'cfg>,\n\n unit: &Unit<'a>,\n\n force: bool,\n\n) -> CargoResult<Job> {\n\n let _p = profile::start(format!(\n\n \"fingerprint: {} / {}\",\n\n unit.pkg.package_id(),\n\n unit.target.name()\n\n ));\n\n let bcx = cx.bcx;\n\n let new = cx.files().fingerprint_dir(unit);\n\n let loc = new.join(&filename(cx, unit));\n\n\n\n debug!(\"fingerprint at: {}\", loc.display());\n\n\n\n // Figure out if this unit is up to date. After calculating the fingerprint\n\n // compare it to an old version, if any, and attempt to print diagnostic\n\n // information about failed comparisons to aid in debugging.\n\n let fingerprint = calculate(cx, unit)?;\n", "file_path": "src/cargo/core/compiler/fingerprint.rs", "rank": 81, "score": 243528.0497047968 }, { "content": "fn check_token(expected_token: &str, registry: Option<&str>) -> bool {\n\n let credentials = cargo_home().join(\"credentials\");\n\n assert!(credentials.is_file());\n\n\n\n let mut contents = String::new();\n\n File::open(&credentials)\n\n .unwrap()\n\n .read_to_string(&mut contents)\n\n .unwrap();\n\n let toml: toml::Value = contents.parse().unwrap();\n\n\n\n let token = match (registry, toml) {\n\n // A registry has been provided, so check that the token exists in a\n\n // table for the registry.\n\n (Some(registry), toml::Value::Table(table)) => table\n\n .get(\"registries\")\n\n .and_then(|registries_table| registries_table.get(registry))\n\n .and_then(|registry_table| match registry_table.get(\"token\") {\n\n Some(&toml::Value::String(ref token)) => Some(token.as_str().to_string()),\n\n _ => None,\n", "file_path": "tests/testsuite/login.rs", "rank": 82, "score": 242409.8504073692 }, { "content": "fn pkg_id_loc(name: &str, loc: &str) -> PackageId {\n\n let remote = loc.into_url();\n\n let master = GitReference::Branch(\"master\".to_string());\n\n let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap();\n\n\n\n PackageId::new(name, \"1.0.0\", source_id).unwrap()\n\n}\n\n\n", "file_path": "crates/resolver-tests/src/lib.rs", "rank": 83, "score": 242117.39038963115 }, { "content": "/// Display a list of installed binaries.\n\npub fn install_list(dst: Option<&str>, config: &Config) -> CargoResult<()> {\n\n let root = resolve_root(dst, config)?;\n\n let tracker = InstallTracker::load(config, &root)?;\n\n for (k, v) in tracker.all_installed_bins() {\n\n println!(\"{}:\", k);\n\n for bin in v {\n\n println!(\" {}\", bin);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cargo/ops/cargo_install.rs", "rank": 84, "score": 241975.20221505864 }, { "content": "pub fn path2url<P: AsRef<Path>>(p: P) -> Url {\n\n Url::from_file_path(p).ok().unwrap()\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 85, "score": 241970.08152878328 }, { "content": "/// Computes several maps in `Context`:\n\n/// - `build_scripts`: A map that tracks which build scripts each package\n\n/// depends on.\n\n/// - `build_explicit_deps`: Dependency statements emitted by build scripts\n\n/// from a previous run.\n\n/// - `build_script_outputs`: Pre-populates this with any overridden build\n\n/// scripts.\n\n///\n\n/// The important one here is `build_scripts`, which for each `(package,\n\n/// kind)` stores a `BuildScripts` object which contains a list of\n\n/// dependencies with build scripts that the unit should consider when\n\n/// linking. For example this lists all dependencies' `-L` flags which need to\n\n/// be propagated transitively.\n\n///\n\n/// The given set of units to this function is the initial set of\n\n/// targets/profiles which are being built.\n\npub fn build_map<'b, 'cfg>(cx: &mut Context<'b, 'cfg>, units: &[Unit<'b>]) -> CargoResult<()> {\n\n let mut ret = HashMap::new();\n\n for unit in units {\n\n build(&mut ret, cx, unit)?;\n\n }\n\n cx.build_scripts\n\n .extend(ret.into_iter().map(|(k, v)| (k, Arc::new(v))));\n\n return Ok(());\n\n\n\n // Recursive function to build up the map we're constructing. This function\n\n // memoizes all of its return values as it goes along.\n\n fn build<'a, 'b, 'cfg>(\n\n out: &'a mut HashMap<Unit<'b>, BuildScripts>,\n\n cx: &mut Context<'b, 'cfg>,\n\n unit: &Unit<'b>,\n\n ) -> CargoResult<&'a BuildScripts> {\n\n // Do a quick pre-flight check to see if we've already calculated the\n\n // set of dependencies.\n\n if out.contains_key(unit) {\n\n return Ok(&out[unit]);\n", "file_path": "src/cargo/core/compiler/custom_build.rs", "rank": 86, "score": 241312.09895419353 }, { "content": "fn installed_exe(name: &str) -> PathBuf {\n\n cargo_home().join(\"bin\").join(exe(name))\n\n}\n\n\n", "file_path": "tests/testsuite/install_upgrade.rs", "rank": 87, "score": 240353.68910130113 }, { "content": "pub fn process_error(\n\n msg: &str,\n\n status: Option<ExitStatus>,\n\n output: Option<&Output>,\n\n) -> ProcessError {\n\n let exit = match status {\n\n Some(s) => status_to_string(s),\n\n None => \"never executed\".to_string(),\n\n };\n\n let mut desc = format!(\"{} ({})\", &msg, exit);\n\n\n\n if let Some(out) = output {\n\n match str::from_utf8(&out.stdout) {\n\n Ok(s) if !s.trim().is_empty() => {\n\n desc.push_str(\"\\n--- stdout\\n\");\n\n desc.push_str(s);\n\n }\n\n Ok(..) | Err(..) => {}\n\n }\n\n match str::from_utf8(&out.stderr) {\n", "file_path": "src/cargo/util/errors.rs", "rank": 88, "score": 239955.0612221415 }, { "content": "pub fn dylib_path_envvar() -> &'static str {\n\n if cfg!(windows) {\n\n \"PATH\"\n\n } else if cfg!(target_os = \"macos\") {\n\n // When loading and linking a dynamic library or bundle, dlopen\n\n // searches in LD_LIBRARY_PATH, DYLD_LIBRARY_PATH, PWD, and\n\n // DYLD_FALLBACK_LIBRARY_PATH.\n\n // In the Mach-O format, a dynamic library has an \"install path.\"\n\n // Clients linking against the library record this path, and the\n\n // dynamic linker, dyld, uses it to locate the library.\n\n // dyld searches DYLD_LIBRARY_PATH *before* the install path.\n\n // dyld searches DYLD_FALLBACK_LIBRARY_PATH only if it cannot\n\n // find the library in the install path.\n\n // Setting DYLD_LIBRARY_PATH can easily have unintended\n\n // consequences.\n\n //\n\n // Also, DYLD_LIBRARY_PATH appears to have significant performance\n\n // penalty starting in 10.13. Cargo's testsuite ran more than twice as\n\n // slow with it on CI.\n\n \"DYLD_FALLBACK_LIBRARY_PATH\"\n\n } else {\n\n \"LD_LIBRARY_PATH\"\n\n }\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 89, "score": 239813.67421469226 }, { "content": "/// Prepares a `Work` that executes the target as a custom build script.\n\npub fn prepare<'a, 'cfg>(cx: &mut Context<'a, 'cfg>, unit: &Unit<'a>) -> CargoResult<Job> {\n\n let _p = profile::start(format!(\n\n \"build script prepare: {}/{}\",\n\n unit.pkg,\n\n unit.target.name()\n\n ));\n\n\n\n let key = (unit.pkg.package_id(), unit.kind);\n\n\n\n if cx.build_script_outputs.lock().unwrap().contains_key(&key) {\n\n // The output is already set, thus the build script is overridden.\n\n fingerprint::prepare_target(cx, unit, false)\n\n } else {\n\n build_work(cx, unit)\n\n }\n\n}\n\n\n", "file_path": "src/cargo/core/compiler/custom_build.rs", "rank": 90, "score": 238480.31330582628 }, { "content": "fn good<T>(s: &str, expected: T)\n\nwhere\n\n T: FromStr + PartialEq + fmt::Debug,\n\n T::Err: fmt::Display,\n\n{\n\n let c = match T::from_str(s) {\n\n Ok(c) => c,\n\n Err(e) => panic!(\"failed to parse `{}`: {}\", s, e),\n\n };\n\n assert_eq!(c, expected);\n\n}\n\n\n", "file_path": "crates/cargo-platform/tests/test_cfg.rs", "rank": 91, "score": 237330.89772748595 }, { "content": "pub fn internal<S: fmt::Display>(error: S) -> failure::Error {\n\n _internal(&error)\n\n}\n\n\n", "file_path": "src/cargo/util/errors.rs", "rank": 92, "score": 236979.20899551726 }, { "content": "pub fn build_unit_dependencies<'a, 'cfg>(\n\n bcx: &'a BuildContext<'a, 'cfg>,\n\n resolve: &'a Resolve,\n\n std_resolve: Option<&'a Resolve>,\n\n roots: &[Unit<'a>],\n\n std_roots: &[Unit<'a>],\n\n) -> CargoResult<UnitGraph<'a>> {\n\n let mut state = State {\n\n bcx,\n\n downloads: bcx.packages.enable_download()?,\n\n waiting_on_download: HashSet::new(),\n\n unit_dependencies: HashMap::new(),\n\n package_cache: HashMap::new(),\n\n usr_resolve: resolve,\n\n std_resolve,\n\n is_std: false,\n\n };\n\n\n\n let std_unit_deps = calc_deps_of_std(&mut state, std_roots)?;\n\n\n", "file_path": "src/cargo/core/compiler/unit_dependencies.rs", "rank": 93, "score": 236486.47910136313 }, { "content": "pub fn main_file(println: &str, deps: &[&str]) -> String {\n\n let mut buf = String::new();\n\n\n\n for dep in deps.iter() {\n\n buf.push_str(&format!(\"extern crate {};\\n\", dep));\n\n }\n\n\n\n buf.push_str(\"fn main() { println!(\");\n\n buf.push_str(println);\n\n buf.push_str(\"); }\\n\");\n\n\n\n buf\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/lib.rs", "rank": 94, "score": 236045.0794886785 }, { "content": "// Helper for testing dep-info files in the fingerprint dir.\n\nfn assert_deps(project: &Project, fingerprint: &str, test_cb: impl Fn(&Path, &[(u8, &str)])) {\n\n let mut files = project\n\n .glob(fingerprint)\n\n .map(|f| f.expect(\"unwrap glob result\"))\n\n // Filter out `.json` entries.\n\n .filter(|f| f.extension().is_none());\n\n let info_path = files\n\n .next()\n\n .unwrap_or_else(|| panic!(\"expected 1 dep-info file at {}, found 0\", fingerprint));\n\n assert!(files.next().is_none(), \"expected only 1 dep-info file\");\n\n let dep_info = fs::read(&info_path).unwrap();\n\n let deps: Vec<(u8, &str)> = dep_info\n\n .split(|&x| x == 0)\n\n .filter(|x| !x.is_empty())\n\n .map(|p| {\n\n (\n\n p[0],\n\n std::str::from_utf8(&p[1..]).expect(\"expected valid path\"),\n\n )\n\n })\n\n .collect();\n\n test_cb(&info_path, &deps);\n\n}\n\n\n", "file_path": "tests/testsuite/dep_info.rs", "rank": 95, "score": 235212.9996450039 }, { "content": "pub fn alternate_arch() -> &'static str {\n\n match env::consts::ARCH {\n\n \"x86\" => \"x86_64\",\n\n \"x86_64\" => \"x86\",\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "crates/cargo-test-support/src/cross_compile.rs", "rank": 96, "score": 233123.95963807797 }, { "content": "fn bin_already_exists(explicit: bool, rellocation: &str) {\n\n let path = paths::root().join(\"foo\");\n\n fs::create_dir_all(&path.join(\"src\")).unwrap();\n\n\n\n let sourcefile_path = path.join(rellocation);\n\n\n\n let content = br#\"\n\n fn main() {\n\n println!(\"Hello, world 2!\");\n\n }\n\n \"#;\n\n\n\n File::create(&sourcefile_path)\n\n .unwrap()\n\n .write_all(content)\n\n .unwrap();\n\n\n\n if explicit {\n\n cargo_process(\"init --bin --vcs none\")\n\n .env(\"USER\", \"foo\")\n", "file_path": "tests/testsuite/init.rs", "rank": 97, "score": 233053.33248141827 }, { "content": "pub fn join_paths<T: AsRef<OsStr>>(paths: &[T], env: &str) -> CargoResult<OsString> {\n\n let err = match env::join_paths(paths.iter()) {\n\n Ok(paths) => return Ok(paths),\n\n Err(e) => e,\n\n };\n\n let paths = paths.iter().map(Path::new).collect::<Vec<_>>();\n\n let err = failure::Error::from(err);\n\n let explain = Internal::new(failure::format_err!(\n\n \"failed to join path array: {:?}\",\n\n paths\n\n ));\n\n let err = failure::Error::from(err.context(explain));\n\n let more_explain = format!(\n\n \"failed to join search paths together\\n\\\n\n Does ${} have an unterminated quote character?\",\n\n env\n\n );\n\n Err(err.context(more_explain).into())\n\n}\n\n\n", "file_path": "src/cargo/util/paths.rs", "rank": 98, "score": 231765.52925409024 } ]
Rust
linkerd/proxy/transport/src/tls/accept.rs
olix0r/linkerd2-proxy
2e6357ff040474b5e3e2a19d671c8fcdb4e8daa6
use super::{conditional_accept, Conditional, PeerIdentity, ReasonForNoPeerName}; use crate::io::{BoxedIo, PrefixedIo}; use crate::listen::Addrs; use bytes::BytesMut; use futures::prelude::*; use linkerd2_dns_name as dns; use linkerd2_error::Error; use linkerd2_identity as identity; use linkerd2_stack::{layer, NewService}; pub use rustls::ServerConfig as Config; use std::{ pin::Pin, sync::Arc, task::{Context, Poll}, time::Duration, }; use tokio::{ io::{self, AsyncReadExt}, net::TcpStream, }; use tower::util::ServiceExt; use tracing::{debug, trace, warn}; pub trait HasConfig { fn tls_server_name(&self) -> identity::Name; fn tls_server_config(&self) -> Arc<Config>; } pub fn empty_config() -> Arc<Config> { let verifier = rustls::NoClientAuth::new(); Arc::new(Config::new(verifier)) } #[derive(Clone, Debug)] pub struct Meta { pub peer_identity: PeerIdentity, pub addrs: Addrs, } pub type Connection = (Meta, BoxedIo); #[derive(Clone, Debug)] pub struct DetectTls<I, A> { local_identity: Conditional<I>, inner: A, timeout: Duration, } #[derive(Clone, Debug)] pub struct DetectTimeout(()); #[derive(Clone, Debug)] pub struct AcceptTls<I, A> { addrs: Addrs, local_identity: Conditional<I>, inner: A, timeout: Duration, } const PEEK_CAPACITY: usize = 512; const BUFFER_CAPACITY: usize = 8192; impl<I: HasConfig, M> DetectTls<I, M> { pub fn new(local_identity: Conditional<I>, inner: M, timeout: Duration) -> Self { Self { local_identity, inner, timeout, } } pub fn layer( local_identity: Conditional<I>, timeout: Duration, ) -> impl layer::Layer<M, Service = Self> where I: Clone, { layer::mk(move |inner| Self::new(local_identity.clone(), inner, timeout)) } } impl<I, M> NewService<Addrs> for DetectTls<I, M> where I: HasConfig + Clone, M: NewService<Meta> + Clone, { type Service = AcceptTls<I, M>; fn new_service(&mut self, addrs: Addrs) -> Self::Service { AcceptTls { addrs, local_identity: self.local_identity.clone(), inner: self.inner.clone(), timeout: self.timeout, } } } impl<I: HasConfig, M, A> tower::Service<TcpStream> for AcceptTls<I, M> where M: NewService<Meta, Service = A> + Clone + Send + 'static, A: tower::Service<BoxedIo, Response = ()> + Send + 'static, A::Error: Into<Error>, A::Future: Send, { type Response = (); type Error = Error; type Future = Pin<Box<dyn Future<Output = Result<(), Error>> + Send + 'static>>; fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } fn call(&mut self, tcp: TcpStream) -> Self::Future { let addrs = self.addrs.clone(); let mut new_accept = self.inner.clone(); match self.local_identity.as_ref() { Conditional::Some(local) => { let config = local.tls_server_config(); let name = local.tls_server_name(); let timeout = tokio::time::delay_for(self.timeout); Box::pin(async move { let (peer_identity, io) = tokio::select! { res = detect(config, name, tcp) => { res? } () = timeout => { return Err(DetectTimeout(()).into()); } }; let meta = Meta { peer_identity, addrs, }; new_accept .new_service(meta) .oneshot(io) .err_into::<Error>() .await }) } Conditional::None(reason) => { let meta = Meta { peer_identity: Conditional::None(reason), addrs, }; let svc = new_accept.new_service(meta); Box::pin(svc.oneshot(BoxedIo::new(tcp)).err_into::<Error>()) } } } } pub async fn detect( tls_config: Arc<Config>, local_id: identity::Name, mut tcp: TcpStream, ) -> io::Result<(PeerIdentity, BoxedIo)> { const NO_TLS_META: PeerIdentity = Conditional::None(ReasonForNoPeerName::NoTlsFromRemote); let mut buf = [0u8; PEEK_CAPACITY]; let sz = tcp.peek(&mut buf).await?; debug!(sz, "Peeked bytes from TCP stream"); match conditional_accept::match_client_hello(&buf, &local_id) { conditional_accept::Match::Matched => { trace!("Identified matching SNI via peek"); let (peer_id, tls) = handshake(tls_config, tcp).await?; return Ok((peer_id, BoxedIo::new(tls))); } conditional_accept::Match::NotMatched => { trace!("Not a matching TLS ClientHello"); return Ok((NO_TLS_META, BoxedIo::new(tcp))); } conditional_accept::Match::Incomplete => {} } debug!("Attempting to buffer TLS ClientHello after incomplete peek"); let mut buf = BytesMut::with_capacity(BUFFER_CAPACITY); debug!(buf.capacity = %buf.capacity(), "Reading bytes from TCP stream"); while tcp.read_buf(&mut buf).await? != 0 { debug!(buf.len = %buf.len(), "Read bytes from TCP stream"); match conditional_accept::match_client_hello(buf.as_ref(), &local_id) { conditional_accept::Match::Matched => { trace!("Identified matching SNI via buffered read"); let (peer_id, tls) = handshake(tls_config.clone(), PrefixedIo::new(buf.freeze(), tcp)).await?; return Ok((peer_id, BoxedIo::new(tls))); } conditional_accept::Match::NotMatched => break, conditional_accept::Match::Incomplete => { if buf.capacity() == 0 { warn!("Buffer insufficient for TLS ClientHello"); break; } } } } trace!("Could not read TLS ClientHello via buffering"); let io = BoxedIo::new(PrefixedIo::new(buf.freeze(), tcp)); Ok((NO_TLS_META, io)) } async fn handshake<T>( tls_config: Arc<Config>, io: T, ) -> io::Result<(PeerIdentity, tokio_rustls::server::TlsStream<T>)> where T: io::AsyncRead + io::AsyncWrite + Unpin, { let tls = tokio_rustls::TlsAcceptor::from(tls_config) .accept(io) .await?; let peer_id = client_identity(&tls) .map(Conditional::Some) .unwrap_or_else(|| Conditional::None(ReasonForNoPeerName::NoPeerIdFromRemote)); trace!(peer.identity = ?peer_id, "Accepted TLS connection"); Ok((peer_id, tls)) } fn client_identity<S>(tls: &tokio_rustls::server::TlsStream<S>) -> Option<identity::Name> { use rustls::Session; use webpki::GeneralDNSNameRef; let (_io, session) = tls.get_ref(); let certs = session.get_peer_certificates()?; let c = certs.first().map(rustls::Certificate::as_ref)?; let end_cert = webpki::EndEntityCert::from(c).ok()?; let dns_names = end_cert.dns_names().ok()?; match dns_names.first()? { GeneralDNSNameRef::DNSName(n) => Some(identity::Name::from(dns::Name::from(n.to_owned()))), GeneralDNSNameRef::Wildcard(_) => { None } } } impl HasConfig for identity::CrtKey { fn tls_server_name(&self) -> identity::Name { identity::CrtKey::tls_server_name(self) } fn tls_server_config(&self) -> Arc<Config> { identity::CrtKey::tls_server_config(self) } } impl std::fmt::Display for DetectTimeout { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "TLS detection timeout") } } impl std::error::Error for DetectTimeout {} impl Into<std::net::SocketAddr> for &'_ Meta { fn into(self) -> std::net::SocketAddr { (&self.addrs).into() } }
use super::{conditional_accept, Conditional, PeerIdentity, ReasonForNoPeerName}; use crate::io::{BoxedIo, PrefixedIo}; use crate::listen::Addrs; use bytes::BytesMut; use futures::prelude::*; use linkerd2_dns_name as dns; use linkerd2_error::Error; use linkerd2_identity as identity; use linkerd2_stack::{layer, NewService}; pub use rustls::ServerConfig as Config; use std::{ pin::Pin, sync::Arc, task::{Context, Poll}, time::Duration, }; use tokio::{ io::{self, AsyncReadExt}, net::TcpStream, }; use tower::util::ServiceExt; use tracing::{debug, trace, warn}; pub trait HasConfig { fn tls_server_name(&self) -> identity::Name; fn tls_server_config(&self) -> Arc<Config>; } pub fn empty_config() -> Arc<Config> { let verifier = rustls::NoClientAuth::new(); Arc::new(Config::new(verifier)) } #[derive(Clone, Debug)] pub struct Meta { pub peer_identity: PeerIdentity, pub addrs: Addrs, } pub type Connection = (Meta, BoxedIo); #[derive(Clone, Debug)] pub struct DetectTls<I, A> { local_identity: Conditional<I>, inner: A, timeout: Duration, } #[derive(Clone, Debug)] pub struct DetectTimeout(()); #[derive(Clone, Debug)] pub struct AcceptTls<I, A> { addrs: Addrs, local_identity: Conditional<I>, inner: A, timeout: Duration, } const PEEK_CAPACITY: usize = 512; const BUFFER_CAPACITY: usize = 8192; impl<I: HasConfig, M> DetectTls<I, M> { pub fn new(local_identity: Conditional<I>, inner: M, timeout: Duration) -> Self { Self { local_identity, inner, timeout, } } pub fn layer( local_identity: Conditional<I>, timeout: Duration, ) -> impl layer::Layer<M, Service = Self> where I: Clone, { layer::mk(move |inner| Self::new(local_identity.clone(), inner, timeout)) } } impl<I, M> NewService<Addrs> for DetectTls<I, M> where I: HasConfig + Clone, M: NewService<Meta> + Clone, { type Service = AcceptTls<I, M>; fn new_service(&mut self, addrs: Addrs) -> Self::Service { AcceptTls { addrs, local_identity: self.local_identity.clone(), inner: self.inner.clone(), timeout: self.timeout, } } } impl<I: HasConfig, M, A> tower::Service<TcpStream> for AcceptTls<I, M> where M: NewService<Meta, Service = A> + Clone + Send + 'static, A: tower::Service<BoxedIo, Response = ()> + Send + 'static, A::Error: Into<Error>, A::Future: Send, { type Response = (); type Error = Error; type Future = Pin<Box<dyn Future<Output = Result<(), Error>> + Send + 'static>>; fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { Poll::Ready(Ok(())) } fn call(&mut self, tcp: TcpStream) -> Self::Future { let addrs = self.addrs.clone(); let mut new_accept = self.inner.clone(); match self.local_identity.as_ref() { Conditional::Some(local) => { let config = local.tls_server_config(); let name = local.tls_server_name(); let timeout = tokio::time::delay_for(self.timeout); Box::pin(async move { let (peer_identity, io) = tokio::select! { res = detect(config, name, tcp) => { res? } () = timeout => { return Err(DetectTimeout(()).into
handshake(tls_config.clone(), PrefixedIo::new(buf.freeze(), tcp)).await?; return Ok((peer_id, BoxedIo::new(tls))); } conditional_accept::Match::NotMatched => break, conditional_accept::Match::Incomplete => { if buf.capacity() == 0 { warn!("Buffer insufficient for TLS ClientHello"); break; } } } } trace!("Could not read TLS ClientHello via buffering"); let io = BoxedIo::new(PrefixedIo::new(buf.freeze(), tcp)); Ok((NO_TLS_META, io)) } async fn handshake<T>( tls_config: Arc<Config>, io: T, ) -> io::Result<(PeerIdentity, tokio_rustls::server::TlsStream<T>)> where T: io::AsyncRead + io::AsyncWrite + Unpin, { let tls = tokio_rustls::TlsAcceptor::from(tls_config) .accept(io) .await?; let peer_id = client_identity(&tls) .map(Conditional::Some) .unwrap_or_else(|| Conditional::None(ReasonForNoPeerName::NoPeerIdFromRemote)); trace!(peer.identity = ?peer_id, "Accepted TLS connection"); Ok((peer_id, tls)) } fn client_identity<S>(tls: &tokio_rustls::server::TlsStream<S>) -> Option<identity::Name> { use rustls::Session; use webpki::GeneralDNSNameRef; let (_io, session) = tls.get_ref(); let certs = session.get_peer_certificates()?; let c = certs.first().map(rustls::Certificate::as_ref)?; let end_cert = webpki::EndEntityCert::from(c).ok()?; let dns_names = end_cert.dns_names().ok()?; match dns_names.first()? { GeneralDNSNameRef::DNSName(n) => Some(identity::Name::from(dns::Name::from(n.to_owned()))), GeneralDNSNameRef::Wildcard(_) => { None } } } impl HasConfig for identity::CrtKey { fn tls_server_name(&self) -> identity::Name { identity::CrtKey::tls_server_name(self) } fn tls_server_config(&self) -> Arc<Config> { identity::CrtKey::tls_server_config(self) } } impl std::fmt::Display for DetectTimeout { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "TLS detection timeout") } } impl std::error::Error for DetectTimeout {} impl Into<std::net::SocketAddr> for &'_ Meta { fn into(self) -> std::net::SocketAddr { (&self.addrs).into() } }
()); } }; let meta = Meta { peer_identity, addrs, }; new_accept .new_service(meta) .oneshot(io) .err_into::<Error>() .await }) } Conditional::None(reason) => { let meta = Meta { peer_identity: Conditional::None(reason), addrs, }; let svc = new_accept.new_service(meta); Box::pin(svc.oneshot(BoxedIo::new(tcp)).err_into::<Error>()) } } } } pub async fn detect( tls_config: Arc<Config>, local_id: identity::Name, mut tcp: TcpStream, ) -> io::Result<(PeerIdentity, BoxedIo)> { const NO_TLS_META: PeerIdentity = Conditional::None(ReasonForNoPeerName::NoTlsFromRemote); let mut buf = [0u8; PEEK_CAPACITY]; let sz = tcp.peek(&mut buf).await?; debug!(sz, "Peeked bytes from TCP stream"); match conditional_accept::match_client_hello(&buf, &local_id) { conditional_accept::Match::Matched => { trace!("Identified matching SNI via peek"); let (peer_id, tls) = handshake(tls_config, tcp).await?; return Ok((peer_id, BoxedIo::new(tls))); } conditional_accept::Match::NotMatched => { trace!("Not a matching TLS ClientHello"); return Ok((NO_TLS_META, BoxedIo::new(tcp))); } conditional_accept::Match::Incomplete => {} } debug!("Attempting to buffer TLS ClientHello after incomplete peek"); let mut buf = BytesMut::with_capacity(BUFFER_CAPACITY); debug!(buf.capacity = %buf.capacity(), "Reading bytes from TCP stream"); while tcp.read_buf(&mut buf).await? != 0 { debug!(buf.len = %buf.len(), "Read bytes from TCP stream"); match conditional_accept::match_client_hello(buf.as_ref(), &local_id) { conditional_accept::Match::Matched => { trace!("Identified matching SNI via buffered read"); let (peer_id, tls) =
random
[ { "content": "pub fn is_loop(err: &(dyn std::error::Error + 'static)) -> bool {\n\n err.is::<LoopPrevented>() || err.source().map(is_loop).unwrap_or(false)\n\n}\n\n\n\nimpl std::fmt::Display for LoopPrevented {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"outbound requests must not target localhost:{}\",\n\n self.port\n\n )\n\n }\n\n}\n\n\n\nimpl std::error::Error for LoopPrevented {}\n", "file_path": "linkerd/app/outbound/src/tcp/connect.rs", "rank": 0, "score": 437351.44097811304 }, { "content": "pub fn layer<N, S, Req>() -> impl layer::Layer<N, Service = NewSplit<N, S, Req>> + Clone {\n\n // This RNG doesn't need to be cryptographically secure. Small and fast is\n\n // preferable.\n\n let rng = SmallRng::from_entropy();\n\n layer::mk(move |inner| NewSplit {\n\n inner,\n\n rng: rng.clone(),\n\n _service: PhantomData,\n\n })\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct NewSplit<N, S, Req> {\n\n inner: N,\n\n rng: SmallRng,\n\n _service: PhantomData<fn(Req) -> S>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Split<T, N, S, Req> {\n\n inner: Inner<T, N, S, Req>,\n\n}\n\n\n", "file_path": "linkerd/service-profiles/src/split.rs", "rank": 1, "score": 372661.63139316073 }, { "content": "fn set_route_timeout(route: &mut http::Route, timeout: Result<Duration, Duration>) {\n\n match timeout {\n\n Ok(dur) => {\n\n route.set_timeout(dur);\n\n }\n\n Err(_) => {\n\n warn!(\"route timeout is negative: {:?}\", route);\n\n }\n\n }\n\n}\n\n\n", "file_path": "linkerd/service-profiles/src/client.rs", "rank": 2, "score": 370378.9534214692 }, { "content": "/// Determintes whether the given `input` looks like the start of a TLS\n\n/// connection that the proxy should terminate.\n\n///\n\n/// The determination is made based on whether the input looks like (the start\n\n/// of) a valid ClientHello that a reasonable TLS client might send, and the\n\n/// SNI matches the given identity.\n\n///\n\n/// XXX: Once the TLS record header is matched, the determination won't be\n\n/// made until the entire TLS record including the entire ClientHello handshake\n\n/// message is available. TODO: Reject non-matching inputs earlier.\n\n///\n\n/// This assumes that the ClientHello is small and is sent in a single TLS\n\n/// record, which is what all reasonable implementations do. (If they were not\n\n/// to, they wouldn't interoperate with picky servers.)\n\npub fn match_client_hello(input: &[u8], identity: &identity::Name) -> Match {\n\n let r = untrusted::Input::from(input).read_all(untrusted::EndOfInput, |input| {\n\n let r = extract_sni(input);\n\n input.skip_to_end(); // Ignore anything after what we parsed.\n\n r\n\n });\n\n match r {\n\n Ok(Some(sni)) => {\n\n let m = identity::Name::from_hostname(sni.as_slice_less_safe())\n\n .map(|sni| {\n\n if sni == *identity {\n\n Match::Matched\n\n } else {\n\n Match::NotMatched\n\n }\n\n })\n\n .unwrap_or(Match::NotMatched);\n\n trace!(\n\n \"match_client_hello: parsed correctly up to SNI; matches: {:?}\",\n\n m\n", "file_path": "linkerd/proxy/transport/src/tls/conditional_accept.rs", "rank": 3, "score": 368485.4843044691 }, { "content": "pub fn is_discovery_rejected(err: &(dyn std::error::Error + 'static)) -> bool {\n\n if let Some(status) = err.downcast_ref::<tonic::Status>() {\n\n status.code() == tonic::Code::InvalidArgument\n\n } else if let Some(err) = err.source() {\n\n is_discovery_rejected(err)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "linkerd/app/core/src/lib.rs", "rank": 4, "score": 354555.25211305503 }, { "content": "pub fn init_log_compat() -> Result<(), Error> {\n\n tracing_log::LogTracer::init().map_err(Error::from)\n\n}\n\n\n", "file_path": "linkerd/tracing/src/lib.rs", "rank": 5, "score": 351361.750849467 }, { "content": "/// Initialize tracing and logging with the value of the `ENV_LOG`\n\n/// environment variable as the verbosity-level filter.\n\npub fn init() -> Result<Handle, Error> {\n\n let log_level = env::var(ENV_LOG_LEVEL).unwrap_or(DEFAULT_LOG_LEVEL.to_string());\n\n if let \"OFF\" = log_level.to_uppercase().trim() {\n\n return Ok(Handle(Inner::Disabled));\n\n }\n\n\n\n let log_format = env::var(ENV_LOG_FORMAT).unwrap_or(DEFAULT_LOG_FORMAT.to_string());\n\n let (dispatch, handle) = with_filter_and_format(log_level, log_format);\n\n\n\n // Set up log compatibility.\n\n init_log_compat()?;\n\n // Set the default subscriber.\n\n tracing::dispatcher::set_global_default(dispatch)?;\n\n\n\n Ok(handle)\n\n}\n\n\n", "file_path": "linkerd/tracing/src/lib.rs", "rank": 6, "score": 349373.81432890415 }, { "content": " /// This trait is private, since its purpose is for creating a dynamic trait\n\n /// object, but doing so without care can to lead not getting vectored\n\n /// writes.\n\n ///\n\n /// Instead, use the concrete `BoxedIo` type.\n\n pub trait Io: AsyncRead + AsyncWrite + PeerAddr + Send {\n\n /// This method is to allow using `Async::polL_read_buf` even through a\n\n /// trait object.\n\n fn poll_read_buf_erased(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut dyn BufMut,\n\n ) -> Poll<usize>;\n\n\n\n /// This method is to allow using `Async::poll_write_buf` even through a\n\n /// trait object.\n\n fn poll_write_buf_erased(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut dyn Buf,\n\n ) -> Poll<usize>;\n\n }\n\n\n\n impl Io for tokio::net::TcpStream {\n\n fn poll_write_buf_erased(\n", "file_path": "linkerd/io/src/lib.rs", "rank": 7, "score": 343184.17510193685 }, { "content": "pub fn http_request_authority_addr<B>(req: &http::Request<B>) -> Result<Addr, addr::Error> {\n\n req.uri()\n\n .authority()\n\n .ok_or(addr::Error::InvalidHost)\n\n .and_then(|a| Addr::from_authority_and_default_port(a, DEFAULT_PORT))\n\n}\n\n\n", "file_path": "linkerd/app/core/src/lib.rs", "rank": 8, "score": 337700.96026269556 }, { "content": "pub fn http_request_host_addr<B>(req: &http::Request<B>) -> Result<Addr, addr::Error> {\n\n use crate::proxy::http::h1;\n\n\n\n h1::authority_from_host(req)\n\n .ok_or(addr::Error::InvalidHost)\n\n .and_then(|a| Addr::from_authority_and_default_port(&a, DEFAULT_PORT))\n\n}\n", "file_path": "linkerd/app/core/src/lib.rs", "rank": 9, "score": 337700.96026269556 }, { "content": "pub fn http_request_l5d_override_dst_addr<B>(req: &http::Request<B>) -> Result<Addr, addr::Error> {\n\n proxy::http::authority_from_header(req, DST_OVERRIDE_HEADER)\n\n .ok_or_else(|| {\n\n tracing::trace!(\"{} not in request headers\", DST_OVERRIDE_HEADER);\n\n addr::Error::InvalidHost\n\n })\n\n .and_then(|a| Addr::from_authority_and_default_port(&a, DEFAULT_PORT))\n\n}\n\n\n", "file_path": "linkerd/app/core/src/lib.rs", "rank": 10, "score": 331985.3424980681 }, { "content": "type ConnectFn<E> = Box<dyn FnMut(E) -> ConnectFuture + Send>;\n\n\n\npub type ConnectFuture = Pin<Box<dyn Future<Output = Result<io::Mock, Error>> + Send + 'static>>;\n\n\n\n#[derive(Clone)]\n\npub struct Connect<E> {\n\n endpoints: Arc<Mutex<HashMap<SocketAddr, ConnectFn<E>>>>,\n\n}\n\n\n\nimpl<E> tower::Service<E> for Connect<E>\n\nwhere\n\n E: Clone + fmt::Debug + Into<SocketAddr>,\n\n{\n\n type Response = io::Mock;\n\n type Future = Instrumented<ConnectFuture>;\n\n type Error = Error;\n\n\n\n fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n", "file_path": "linkerd/app/test/src/connect.rs", "rank": 11, "score": 329250.99740964774 }, { "content": "pub fn layer<M, N: Clone, R>(\n\n new_route: N,\n\n) -> impl layer::Layer<M, Service = NewRouteRequest<M, N, R>> {\n\n // This is saved so that the same `Arc`s are used and cloned instead of\n\n // calling `Route::default()` every time.\n\n let default = Route::default();\n\n layer::mk(move |inner| NewRouteRequest {\n\n inner,\n\n new_route: new_route.clone(),\n\n default: default.clone(),\n\n _route: PhantomData,\n\n })\n\n}\n\n\n\npub struct NewRouteRequest<M, N, R> {\n\n inner: M,\n\n new_route: N,\n\n default: Route,\n\n _route: PhantomData<R>,\n\n}\n", "file_path": "linkerd/service-profiles/src/http/route_request.rs", "rank": 12, "score": 327479.1026507217 }, { "content": "pub fn tcp(addr: SocketAddr) -> tcp::TcpClient {\n\n tcp::client(addr)\n\n}\n\npub struct Client {\n\n authority: String,\n\n /// This is a future that completes when the associated connection for\n\n /// this Client has been dropped.\n\n running: Running,\n\n tx: Sender,\n\n task: JoinHandle<()>,\n\n version: http::Version,\n\n tls: Option<TlsConfig>,\n\n}\n\n\n\nimpl Client {\n\n fn new(addr: SocketAddr, authority: String, r: Run, tls: Option<TlsConfig>) -> Client {\n\n let v = match r {\n\n Run::Http1 { .. } => http::Version::HTTP_11,\n\n Run::Http2 => http::Version::HTTP_2,\n\n };\n", "file_path": "linkerd/app/integration/src/client.rs", "rank": 13, "score": 323934.4329859836 }, { "content": "/// Load a `App` by reading ENV variables.\n\npub fn parse_config<S: Strings>(strings: &S) -> Result<super::Config, EnvError> {\n\n // Parse all the environment variables. `parse` will log any errors so\n\n // defer returning any errors until all of them have been parsed.\n\n let outbound_listener_addr = parse(strings, ENV_OUTBOUND_LISTEN_ADDR, parse_socket_addr);\n\n let inbound_listener_addr = parse(strings, ENV_INBOUND_LISTEN_ADDR, parse_socket_addr);\n\n let admin_listener_addr = parse(strings, ENV_ADMIN_LISTEN_ADDR, parse_socket_addr);\n\n\n\n let inbound_dispatch_timeout = parse(strings, ENV_INBOUND_DISPATCH_TIMEOUT, parse_duration);\n\n let inbound_connect_timeout = parse(strings, ENV_INBOUND_CONNECT_TIMEOUT, parse_duration);\n\n\n\n let outbound_dispatch_timeout = parse(strings, ENV_OUTBOUND_DISPATCH_TIMEOUT, parse_duration);\n\n let outbound_connect_timeout = parse(strings, ENV_OUTBOUND_CONNECT_TIMEOUT, parse_duration);\n\n\n\n let inbound_accept_keepalive = parse(strings, ENV_INBOUND_ACCEPT_KEEPALIVE, parse_duration);\n\n let outbound_accept_keepalive = parse(strings, ENV_OUTBOUND_ACCEPT_KEEPALIVE, parse_duration);\n\n\n\n let inbound_connect_keepalive = parse(strings, ENV_INBOUND_CONNECT_KEEPALIVE, parse_duration);\n\n let outbound_connect_keepalive = parse(strings, ENV_OUTBOUND_CONNECT_KEEPALIVE, parse_duration);\n\n\n\n #[cfg(feature = \"mock-orig-dst\")]\n", "file_path": "linkerd/app/src/env.rs", "rank": 14, "score": 310801.423359091 }, { "content": "type UpdateStream = Pin<Box<dyn Stream<Item = Result<Update<()>, Error>> + Send + Sync + 'static>>;\n\n\n\nimpl<T: Into<Addr>> tower::Service<T> for DnsResolve {\n\n type Response = UpdateStream;\n\n type Error = Error;\n\n type Future = Pin<Box<dyn Future<Output = Result<UpdateStream, Error>> + Send + 'static>>;\n\n\n\n fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn call(&mut self, target: T) -> Self::Future {\n\n // If the target address is `localhost.`, skip DNS resolution and use\n\n // 127.0.0.1.\n\n let addr = match target.into() {\n\n Addr::Name(na) if na.is_localhost() => {\n\n SocketAddr::from(([127, 0, 0, 1], na.port())).into()\n\n }\n\n addr => addr,\n\n };\n", "file_path": "linkerd/proxy/dns-resolve/src/lib.rs", "rank": 15, "score": 309977.06677918765 }, { "content": "fn http_status(error: &(dyn std::error::Error + 'static)) -> StatusCode {\n\n if let Some(HttpError { http, .. }) = error.downcast_ref::<HttpError>() {\n\n *http\n\n } else if error.is::<ResponseTimeout>() {\n\n http::StatusCode::GATEWAY_TIMEOUT\n\n } else if error.is::<FailFastError>() {\n\n http::StatusCode::SERVICE_UNAVAILABLE\n\n } else if error.is::<tower::timeout::error::Elapsed>() {\n\n http::StatusCode::SERVICE_UNAVAILABLE\n\n } else if error.is::<IdentityRequired>() {\n\n http::StatusCode::FORBIDDEN\n\n } else if let Some(source) = error.source() {\n\n http_status(source)\n\n } else {\n\n http::StatusCode::BAD_GATEWAY\n\n }\n\n}\n\n\n", "file_path": "linkerd/app/core/src/errors.rs", "rank": 16, "score": 308202.0742701563 }, { "content": "pub fn connect<E: fmt::Debug>() -> connect::Connect<E> {\n\n connect::Connect::new()\n\n}\n\n\n", "file_path": "linkerd/app/test/src/lib.rs", "rank": 17, "score": 308025.80163074646 }, { "content": "type BoxError = Box<dyn std::error::Error + Send + Sync>;\n\n\n", "file_path": "linkerd/app/integration/src/server.rs", "rank": 18, "score": 305647.6628142493 }, { "content": "pub fn layer<C, R: Clone>(\n\n recover: R,\n\n) -> impl layer::Layer<C, Service = NewReconnect<C, R>> + Clone {\n\n layer::mk(move |connect| NewReconnect {\n\n connect,\n\n recover: recover.clone(),\n\n })\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct NewReconnect<C, R> {\n\n connect: C,\n\n recover: R,\n\n}\n\n\n\nimpl<C, R, T> NewService<T> for NewReconnect<C, R>\n\nwhere\n\n R: Recover + Clone,\n\n C: tower::Service<T> + Clone,\n\n{\n\n type Service = service::Service<T, R, C>;\n\n\n\n fn new_service(&mut self, target: T) -> Self::Service {\n\n service::Service::new(target, self.connect.clone(), self.recover.clone())\n\n }\n\n}\n", "file_path": "linkerd/reconnect/src/lib.rs", "rank": 19, "score": 303655.6792592926 }, { "content": "pub fn client(addr: SocketAddr) -> TcpClient {\n\n TcpClient { addr }\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/tcp.rs", "rank": 20, "score": 285524.8720915321 }, { "content": "/// A mockable source for address info, i.e., for tests.\n\npub trait OrigDstAddr: Clone {\n\n fn orig_dst_addr(&self, socket: &TcpStream) -> Option<SocketAddr>;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Bind<O: OrigDstAddr = NoOrigDstAddr> {\n\n bind_addr: SocketAddr,\n\n keepalive: Option<Duration>,\n\n orig_dst_addr: O,\n\n}\n\n\n\npub type Connection = (Addrs, TcpStream);\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Addrs {\n\n local: SocketAddr,\n\n peer: SocketAddr,\n\n orig_dst: Option<SocketAddr>,\n\n}\n\n\n", "file_path": "linkerd/proxy/transport/src/listen.rs", "rank": 21, "score": 282484.1389732477 }, { "content": "/// Attempt to split_to the given index. If there are not enough bytes then\n\n/// Err is returned and the given Bytes is not modified.\n\nfn try_split_to(buf: &mut Bytes, n: usize) -> Result<Bytes, InsufficientBytes> {\n\n if buf.len() >= n {\n\n Ok(buf.split_to(n))\n\n } else {\n\n Err(InsufficientBytes)\n\n }\n\n}\n", "file_path": "linkerd/trace-context/src/propagation.rs", "rank": 22, "score": 282222.28955152957 }, { "content": "/// Produces a PeakEWMA balancer that uses connect latency (and pending\n\n/// connections) as its load metric.\n\npub fn layer<T, D>(\n\n default_rtt: Duration,\n\n decay: Duration,\n\n) -> impl tower::layer::Layer<D, Service = Balance<PeakEwmaDiscover<D, CompleteOnResponse>, T>> + Clone\n\nwhere\n\n D: Discover,\n\n D::Key: Hash,\n\n D::Service: tower::Service<T>,\n\n <D::Service as tower::Service<T>>::Error: Into<Error>,\n\n{\n\n let rng = SmallRng::from_entropy();\n\n layer::mk(move |discover| {\n\n let loaded =\n\n PeakEwmaDiscover::new(discover, default_rtt, decay, CompleteOnResponse::default());\n\n Balance::from_rng(loaded, rng.clone()).expect(\"RNG must be valid\")\n\n })\n\n}\n", "file_path": "linkerd/proxy/tcp/src/balance.rs", "rank": 23, "score": 276575.87927822507 }, { "content": "pub fn connect(keepalive: Option<Duration>) -> Stack<Connect> {\n\n Stack(Connect::new(keepalive))\n\n}\n\n\n", "file_path": "linkerd/app/core/src/svc.rs", "rank": 24, "score": 276079.6650337455 }, { "content": "fn parse_addr(s: &str) -> Result<Addr, ParseError> {\n\n Addr::from_str(s).map_err(|e| {\n\n error!(\"Not a valid address: {}\", s);\n\n ParseError::AddrError(e)\n\n })\n\n}\n\n\n", "file_path": "linkerd/app/src/env.rs", "rank": 25, "score": 275234.654242501 }, { "content": "pub fn layer() -> respond::RespondLayer<NewRespond> {\n\n respond::RespondLayer::new(NewRespond(()))\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct Metrics(metrics::Registry<Label>);\n\n\n\npub type MetricsLayer = metrics::RecordErrorLayer<LabelError, Label>;\n\n\n\n/// Error metric labels.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct LabelError(super::metrics::Direction);\n\n\n\npub type Label = (super::metrics::Direction, Reason);\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub struct HttpError {\n\n http: http::StatusCode,\n\n grpc: Code,\n\n message: &'static str,\n", "file_path": "linkerd/app/core/src/errors.rs", "rank": 26, "score": 273688.18195699377 }, { "content": "pub fn default_config(orig_dst: SocketAddr) -> Config {\n\n Config {\n\n allow_discovery: IpMatch::new(Some(IpNet::from_str(\"0.0.0.0/0\").unwrap())).into(),\n\n proxy: config::ProxyConfig {\n\n server: config::ServerConfig {\n\n bind: listen::Bind::new(SocketAddr::new(LOCALHOST.into(), 0), None)\n\n .with_orig_dst_addr(orig_dst.into()),\n\n h2_settings: h2::Settings::default(),\n\n },\n\n connect: config::ConnectConfig {\n\n keepalive: None,\n\n timeout: Duration::from_secs(1),\n\n backoff: exp_backoff::ExponentialBackoff::new(\n\n Duration::from_millis(100),\n\n Duration::from_millis(500),\n\n 0.1,\n\n )\n\n .unwrap(),\n\n h2_settings: h2::Settings::default(),\n\n },\n\n buffer_capacity: 10_000,\n\n cache_max_idle_age: Duration::from_secs(60),\n\n dispatch_timeout: Duration::from_secs(3),\n\n max_in_flight_requests: 10_000,\n\n detect_protocol_timeout: Duration::from_secs(3),\n\n },\n\n }\n\n}\n", "file_path": "linkerd/app/outbound/src/test_util.rs", "rank": 28, "score": 268314.12679716054 }, { "content": "pub fn layer<A, B>(default_rtt: Duration, decay: Duration) -> Layer<A, B> {\n\n Layer {\n\n decay,\n\n default_rtt,\n\n rng: SmallRng::from_entropy(),\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\nimpl<A, B> Clone for Layer<A, B> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n decay: self.decay,\n\n default_rtt: self.default_rtt,\n\n rng: self.rng.clone(),\n\n _marker: PhantomData,\n\n }\n\n }\n\n}\n\n\n", "file_path": "linkerd/proxy/http/src/balance.rs", "rank": 29, "score": 267171.26201653224 }, { "content": "fn set_keepalive_or_warn(tcp: &TcpStream, ka: Option<Duration>) {\n\n if let Err(e) = tcp.set_keepalive(ka) {\n\n tracing::warn!(\"failed to set keepalive: {}\", e);\n\n }\n\n}\n", "file_path": "linkerd/proxy/transport/src/lib.rs", "rank": 30, "score": 267052.53953864425 }, { "content": "pub trait PeerAddr {\n\n fn peer_addr(&self) -> SocketAddr;\n\n}\n\n\n\nimpl PeerAddr for tokio::net::TcpStream {\n\n fn peer_addr(&self) -> SocketAddr {\n\n tokio::net::TcpStream::peer_addr(self).expect(\"TcpStream must have a peer address\")\n\n }\n\n}\n\n\n\nimpl<T: PeerAddr> PeerAddr for tokio_rustls::client::TlsStream<T> {\n\n fn peer_addr(&self) -> SocketAddr {\n\n self.get_ref().0.peer_addr()\n\n }\n\n}\n\n\n\nimpl<T: PeerAddr> PeerAddr for tokio_rustls::server::TlsStream<T> {\n\n fn peer_addr(&self) -> SocketAddr {\n\n self.get_ref().0.peer_addr()\n\n }\n", "file_path": "linkerd/io/src/lib.rs", "rank": 31, "score": 262069.09055091263 }, { "content": "pub fn layer<T, G, F, M>(\n\n get_profile: G,\n\n filter: F,\n\n) -> impl layer::Layer<M, Service = Service<F, G, M>> + Clone\n\nwhere\n\n F: FilterRequest<T> + Clone,\n\n G: GetProfile<F::Request> + Clone,\n\n{\n\n let get_profile = RecoverDefault::new(RequestFilter::new(filter, get_profile.into_service()));\n\n layer::mk(move |inner| Discover {\n\n get_profile: get_profile.clone(),\n\n inner,\n\n })\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Discover<G, M> {\n\n get_profile: G,\n\n inner: M,\n\n}\n", "file_path": "linkerd/service-profiles/src/discover.rs", "rank": 32, "score": 260752.88186487491 }, { "content": "/// Like `skip_vector` for vectors with `u8` lengths.\n\nfn skip_vector_u8(input: &mut untrusted::Reader<'_>) -> Result<(), untrusted::EndOfInput> {\n\n let length = input.read_byte()?;\n\n input.skip(usize::from(length))\n\n}\n\n\n", "file_path": "linkerd/proxy/transport/src/tls/conditional_accept.rs", "rank": 33, "score": 259032.68504649098 }, { "content": "/// Like `read_vector` except the contents are ignored.\n\nfn skip_vector(input: &mut untrusted::Reader<'_>) -> Result<bool, untrusted::EndOfInput> {\n\n let r = read_vector(input, |input| {\n\n input.skip_to_end();\n\n Ok(Some(()))\n\n });\n\n r.map(|r| r.is_some())\n\n}\n\n\n", "file_path": "linkerd/proxy/transport/src/tls/conditional_accept.rs", "rank": 34, "score": 254944.29927791833 }, { "content": "/// Read a big-endian-encoded `u16`.\n\nfn read_u16(input: &mut untrusted::Reader<'_>) -> Result<u16, untrusted::EndOfInput> {\n\n let hi = input.read_byte()?;\n\n let lo = input.read_byte()?;\n\n Ok(u16::from(hi) << 8 | u16::from(lo))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n /// From `cargo run --example tlsclient -- --http example.com`\n\n static VALID_EXAMPLE_COM: &[u8] = include_bytes!(\"testdata/example-com-client-hello.bin\");\n\n\n\n #[test]\n\n fn matches() {\n\n check_all_prefixes(Match::Matched, \"example.com\", VALID_EXAMPLE_COM);\n\n }\n\n\n\n #[test]\n\n fn mismatch_different_sni() {\n", "file_path": "linkerd/proxy/transport/src/tls/conditional_accept.rs", "rank": 35, "score": 254944.29927791833 }, { "content": "pub fn layers() -> Layers<Identity> {\n\n Layers(Identity::new())\n\n}\n\n\n", "file_path": "linkerd/app/core/src/svc.rs", "rank": 36, "score": 254671.6471687126 }, { "content": "pub fn tcp() -> tcp::TcpServer {\n\n tcp::server()\n\n}\n\n\n\npub struct Server {\n\n routes: HashMap<String, Route>,\n\n version: Run,\n\n tls: Option<Arc<ServerConfig>>,\n\n}\n\n\n\npub struct Listening {\n\n pub addr: SocketAddr,\n\n pub(super) drain: drain::Signal,\n\n pub(super) conn_count: Arc<AtomicUsize>,\n\n pub(super) task: Option<JoinHandle<Result<(), io::Error>>>,\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/server.rs", "rank": 37, "score": 253992.31144130623 }, { "content": "/// A middleware type that cannot exert backpressure.\n\n///\n\n/// Typically used to modify requests or responses.\n\npub trait Proxy<Req, S: tower::Service<Self::Request>> {\n\n /// The type of request sent to the inner `S`-typed service.\n\n type Request;\n\n\n\n /// The type of response returned to callers.\n\n type Response;\n\n\n\n /// The error type returned to callers.\n\n type Error: Into<Error>;\n\n\n\n /// The Future type returned to callers.\n\n type Future: Future<Output = Result<Self::Response, Self::Error>>;\n\n\n\n /// Usually invokes `S::call`, potentially modifying requests or responses.\n\n fn proxy(&self, inner: &mut S, req: Req) -> Self::Future;\n\n\n\n /// Wraps an `S` typed service with the proxy.\n\n fn wrap_service(self, inner: S) -> ProxyService<Self, S>\n\n where\n\n Self: Sized,\n", "file_path": "linkerd/stack/src/proxy.rs", "rank": 38, "score": 253636.5967494634 }, { "content": "pub fn io() -> io::Builder {\n\n io::Builder::new()\n\n}\n\n\n\n/// By default, disable logging in modules that are expected to error in tests.\n\nconst DEFAULT_LOG: &'static str = \"error,\\\n\n linkerd2_proxy_http=off,\\\n\n linkerd2_proxy_transport=off\";\n\n\n", "file_path": "linkerd/app/test/src/lib.rs", "rank": 39, "score": 251255.0024059692 }, { "content": "/// An error recovery strategy.\n\npub trait Recover<E: Into<Error> = Error> {\n\n type Backoff: Stream<Item = ()>;\n\n\n\n /// Given an E-typed error, determine if the error is recoverable.\n\n ///\n\n /// If it is, a backoff stream is returned. When the backoff becomes ready,\n\n /// it signals that the caller should retry its operation. If the backoff is\n\n /// polled agian, it is assumed that the operation failed and a new (possibly\n\n /// longer) backoff is initated.\n\n ///\n\n /// If the error is not recoverable, it is returned immediately.\n\n fn recover(&self, err: E) -> Result<Self::Backoff, E>;\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Default)]\n\npub struct Immediately(());\n\n\n\n// === impl Recover ===\n\n\n\nimpl<E, B, F> Recover<E> for F\n", "file_path": "linkerd/error/src/recover.rs", "rank": 40, "score": 250902.51532890013 }, { "content": "pub trait HasConfig {\n\n fn tls_client_config(&self) -> Arc<Config>;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ConnectLayer<L>(super::Conditional<L>);\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Connect<L, C> {\n\n local: super::Conditional<L>,\n\n inner: C,\n\n}\n\n\n\npub type Connection = BoxedIo;\n\n\n\n/// A socket that is in the process of connecting.\n\n#[pin_project]\n\npub struct ConnectFuture<L, F: TryFuture> {\n\n #[pin]\n\n state: ConnectState<L, F>,\n\n}\n", "file_path": "linkerd/proxy/transport/src/tls/client.rs", "rank": 41, "score": 248562.7478527311 }, { "content": "// Establishes connections to remote peers (for both TCP forwarding and HTTP\n\n// proxying).\n\npub fn stack<P>(\n\n config: &ConnectConfig,\n\n server_port: u16,\n\n local_identity: tls::Conditional<identity::Local>,\n\n metrics: &metrics::Proxy,\n\n) -> impl tower::Service<\n\n Endpoint<P>,\n\n Error = Error,\n\n Future = impl Send,\n\n Response = impl tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static,\n\n> + Unpin\n\n + Clone\n\n + Send {\n\n svc::connect(config.keepalive)\n\n // Initiates mTLS if the target is configured with identity.\n\n .push(tls::client::ConnectLayer::new(local_identity))\n\n // Limits the time we wait for a connection to be established.\n\n .push_timeout(config.timeout)\n\n .push(metrics.transport.layer_connect())\n\n .push_request_filter(PreventLoop { port: server_port })\n\n .into_inner()\n\n}\n\n\n", "file_path": "linkerd/app/outbound/src/tcp/connect.rs", "rank": 43, "score": 246972.59843404568 }, { "content": "fn parse_duration(s: &str) -> Result<Duration, ParseError> {\n\n use regex::Regex;\n\n\n\n let re = Regex::new(r\"^\\s*(\\d+)(ms|s|m|h|d)?\\s*$\").expect(\"duration regex\");\n\n\n\n let cap = re.captures(s).ok_or(ParseError::NotADuration)?;\n\n\n\n let magnitude = parse_number(&cap[1])?;\n\n match cap.get(2).map(|m| m.as_str()) {\n\n None if magnitude == 0 => Ok(Duration::from_secs(0)),\n\n Some(\"ms\") => Ok(Duration::from_millis(magnitude)),\n\n Some(\"s\") => Ok(Duration::from_secs(magnitude)),\n\n Some(\"m\") => Ok(Duration::from_secs(magnitude * 60)),\n\n Some(\"h\") => Ok(Duration::from_secs(magnitude * 60 * 60)),\n\n Some(\"d\") => Ok(Duration::from_secs(magnitude * 60 * 60 * 24)),\n\n _ => Err(ParseError::NotADuration),\n\n }\n\n}\n\n\n", "file_path": "linkerd/app/src/env.rs", "rank": 44, "score": 245507.6779053674 }, { "content": "fn into_bytes(id: trace_context::Id, size: usize) -> Result<Vec<u8>, IdLengthError> {\n\n let bytes: Vec<u8> = id.into();\n\n if bytes.len() == size {\n\n Ok(bytes)\n\n } else {\n\n let actual_size = bytes.len();\n\n Err(IdLengthError {\n\n id: bytes,\n\n expected_size: size,\n\n actual_size,\n\n })\n\n }\n\n}\n\n\n", "file_path": "linkerd/app/core/src/spans.rs", "rank": 45, "score": 243743.22388533677 }, { "content": "#[derive(Clone)]\n\nstruct Target(SocketAddr, Conditional<Name>);\n\n\n", "file_path": "linkerd/proxy/transport/tests/tls_accept.rs", "rank": 46, "score": 243424.6240830373 }, { "content": " pub trait Tap: Clone {\n\n type TapRequestPayload: TapPayload;\n\n type TapResponse: TapResponse<TapPayload = Self::TapResponsePayload>;\n\n type TapResponsePayload: TapPayload;\n\n\n\n /// Returns `true` as l\n\n fn can_tap_more(&self) -> bool;\n\n\n\n /// Initiate a tap, if it matches.\n\n ///\n\n /// If the tap cannot be initialized, for instance because the tap has\n\n /// completed or been canceled, then `None` is returned.\n\n fn tap<B: HttpBody, I: super::Inspect>(\n\n &mut self,\n\n req: &http::Request<B>,\n\n inspect: &I,\n\n ) -> Option<(Self::TapRequestPayload, Self::TapResponse)>;\n\n }\n\n\n", "file_path": "linkerd/proxy/tap/src/lib.rs", "rank": 47, "score": 242509.11574323347 }, { "content": "fn write_zero() -> io::Error {\n\n io::Error::new(io::ErrorKind::WriteZero, \"write zero bytes\")\n\n}\n\n\n\nimpl CopyBuf {\n\n fn new() -> Self {\n\n CopyBuf {\n\n buf: Box::new([0; 4096]),\n\n read_pos: 0,\n\n write_pos: 0,\n\n }\n\n }\n\n\n\n fn reset(&mut self) {\n\n debug_assert_eq!(self.read_pos, self.write_pos);\n\n self.read_pos = 0;\n\n self.write_pos = 0;\n\n }\n\n}\n\n\n", "file_path": "linkerd/duplex/src/lib.rs", "rank": 48, "score": 241690.23081332218 }, { "content": "pub fn parse_identity_config<S: Strings>(\n\n strings: &S,\n\n) -> Result<Option<(ControlAddr, identity::certify::Config)>, EnvError> {\n\n let control = parse_control_addr(strings, ENV_IDENTITY_SVC_BASE);\n\n let ta = parse(strings, ENV_IDENTITY_TRUST_ANCHORS, |ref s| {\n\n identity::TrustAnchors::from_pem(s).ok_or(ParseError::InvalidTrustAnchors)\n\n });\n\n let dir = parse(strings, ENV_IDENTITY_DIR, |ref s| Ok(PathBuf::from(s)));\n\n let tok = parse(strings, ENV_IDENTITY_TOKEN_FILE, |ref s| {\n\n identity::TokenSource::if_nonempty_file(s.to_string()).map_err(|e| {\n\n error!(\"Could not read {}: {}\", ENV_IDENTITY_TOKEN_FILE, e);\n\n ParseError::InvalidTokenSource\n\n })\n\n });\n\n let li = parse(strings, ENV_IDENTITY_IDENTITY_LOCAL_NAME, parse_identity);\n\n let min_refresh = parse(strings, ENV_IDENTITY_MIN_REFRESH, parse_duration);\n\n let max_refresh = parse(strings, ENV_IDENTITY_MAX_REFRESH, parse_duration);\n\n\n\n let disabled = strings\n\n .get(ENV_IDENTITY_DISABLED)?\n", "file_path": "linkerd/app/src/env.rs", "rank": 49, "score": 240094.94980829037 }, { "content": "fn parse_socket_addr(s: &str) -> Result<SocketAddr, ParseError> {\n\n match parse_addr(s)? {\n\n Addr::Socket(a) => Ok(a),\n\n _ => {\n\n error!(\"Expected IP:PORT; found: {}\", s);\n\n Err(ParseError::HostIsNotAnIpAddress)\n\n }\n\n }\n\n}\n\n\n", "file_path": "linkerd/app/src/env.rs", "rank": 50, "score": 239331.7430363518 }, { "content": "/// Creates a response for an error.\n\npub trait Respond<Rsp, E = Error> {\n\n type Response;\n\n fn respond(&self, response: Result<Rsp, E>) -> Result<Self::Response, E>;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct RespondLayer<N> {\n\n new_respond: N,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct RespondService<N, S> {\n\n new_respond: N,\n\n inner: S,\n\n}\n\n\n\n#[pin_project]\n\n#[derive(Debug)]\n\npub struct RespondFuture<R, F> {\n\n respond: R,\n", "file_path": "linkerd/error-respond/src/lib.rs", "rank": 51, "score": 238101.03437518352 }, { "content": "pub fn forward<P, I, C>(\n\n connect: C,\n\n) -> impl svc::NewService<\n\n Endpoint<P>,\n\n Service = impl tower::Service<I, Response = (), Error = Error, Future = impl Send + 'static>\n\n + Clone\n\n + Send\n\n + 'static,\n\n> + Clone\n\n + Send\n\n + 'static\n\nwhere\n\n P: Clone + Send + 'static,\n\n I: io::AsyncRead + io::AsyncWrite + io::PeerAddr + std::fmt::Debug + Unpin + Send + 'static,\n\n C: tower::Service<Endpoint<P>, Error = Error> + Unpin + Clone + Send + Sync + 'static,\n\n C::Response: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static,\n\n C::Future: Unpin + Send,\n\n{\n\n svc::stack(connect)\n\n .push_make_thunk()\n\n .push_on_response(svc::layer::mk(super::Forward::new))\n\n .instrument(|_: &Endpoint<P>| debug_span!(\"tcp.forward\"))\n\n .check_new_service::<Endpoint<P>, I>()\n\n .into_inner()\n\n}\n\n\n\n/// A connection policy that fails connections that target the outbound listener.\n", "file_path": "linkerd/app/outbound/src/tcp/connect.rs", "rank": 52, "score": 237158.73737860064 }, { "content": "pub fn with_name(name: &str) -> Profile {\n\n use std::convert::TryFrom;\n\n let name = dns::Name::try_from(name.as_bytes()).expect(\"non-ascii characters in DNS name! 😢\");\n\n Profile {\n\n name: Some(name),\n\n ..Default::default()\n\n }\n\n}\n", "file_path": "linkerd/app/test/src/profile.rs", "rank": 53, "score": 234951.61762012978 }, { "content": "// Generates a new span id, writes it to the request in the appropriate\n\n// propagation format and returns the generated span id.\n\npub fn increment_span_id<B>(request: &mut http::Request<B>, context: &TraceContext) -> Id {\n\n match context.propagation {\n\n Propagation::Grpc => increment_grpc_span_id(request, context),\n\n Propagation::Http => increment_http_span_id(request),\n\n }\n\n}\n\n\n", "file_path": "linkerd/trace-context/src/propagation.rs", "rank": 54, "score": 234629.6565292622 }, { "content": "pub trait Lazy<V>: Clone {\n\n fn value(&self) -> V;\n\n}\n\n\n\n/// Wraps an HTTP `Service` so that the `T -typed value` is cloned into\n\n/// each request's extensions.\n\n#[derive(Clone, Debug)]\n\npub struct Layer<L, V> {\n\n lazy: L,\n\n _marker: PhantomData<fn() -> V>,\n\n}\n\n\n\npub struct Insert<S, L, V> {\n\n inner: S,\n\n lazy: L,\n\n _marker: PhantomData<fn() -> V>,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct FnLazy<F>(F);\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ValLazy<V>(V);\n\n\n", "file_path": "linkerd/proxy/http/src/insert.rs", "rank": 55, "score": 234429.22635669235 }, { "content": "pub fn parse_control_addr_disable_identity<S: Strings>(\n\n strings: &S,\n\n base: &str,\n\n) -> Result<Option<ControlAddr>, EnvError> {\n\n let a = parse(strings, &format!(\"{}_ADDR\", base), parse_addr)?;\n\n let identity = tls::Conditional::None(tls::ReasonForNoPeerName::LocalIdentityDisabled);\n\n Ok(a.map(|addr| ControlAddr { addr, identity }))\n\n}\n\n\n", "file_path": "linkerd/app/src/env.rs", "rank": 56, "score": 233459.0293939079 }, { "content": "pub fn client(addr: SocketAddr) -> Client {\n\n let api = pb::tap_client::TapClient::new(SyncSvc(client::http2(addr, \"localhost\")));\n\n Client { api }\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/tap.rs", "rank": 57, "score": 232402.3561085531 }, { "content": "pub fn http2_tls<T: Into<String>>(addr: SocketAddr, auth: T, tls: TlsConfig) -> Client {\n\n Client::new(addr, auth.into(), Run::Http2, Some(tls))\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/client.rs", "rank": 58, "score": 232122.934410053 }, { "content": "pub fn http1_tls<T: Into<String>>(addr: SocketAddr, auth: T, tls: TlsConfig) -> Client {\n\n Client::new(\n\n addr,\n\n auth.into(),\n\n Run::Http1 {\n\n absolute_uris: false,\n\n },\n\n Some(tls),\n\n )\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/client.rs", "rank": 59, "score": 232122.934410053 }, { "content": "pub fn layer<C, B>(\n\n h2_settings: h2::Settings,\n\n) -> impl layer::Layer<C, Service = MakeClient<C, B>> + Copy {\n\n layer::mk(move |connect: C| MakeClient {\n\n connect,\n\n h2_settings,\n\n _marker: PhantomData,\n\n })\n\n}\n\n\n\nimpl From<crate::Version> for Settings {\n\n fn from(v: crate::Version) -> Self {\n\n match v {\n\n crate::Version::Http1 => Self::Http1,\n\n crate::Version::H2 => Self::H2,\n\n }\n\n }\n\n}\n\n\n\n// === impl MakeClient ===\n", "file_path": "linkerd/proxy/http/src/client.rs", "rank": 60, "score": 230485.14567600464 }, { "content": "pub fn layer<H>(header: H) -> Layer<H>\n\nwhere\n\n H: IntoHeaderName + Clone,\n\n{\n\n Layer { header }\n\n}\n\n\n\nimpl<H, M> tower::layer::Layer<M> for Layer<H>\n\nwhere\n\n H: IntoHeaderName + Clone,\n\n{\n\n type Service = MakeSvc<H, M>;\n\n\n\n fn layer(&self, inner: M) -> Self::Service {\n\n MakeSvc {\n\n header: self.header.clone(),\n\n inner,\n\n }\n\n }\n\n}\n", "file_path": "linkerd/proxy/http/src/header_from_target.rs", "rank": 61, "score": 229722.98494431243 }, { "content": "pub fn identity_from_header<B, K>(req: &http::Request<B>, header: K) -> Option<identity::Name>\n\nwhere\n\n K: AsHeaderName,\n\n{\n\n header_value_from_request(req, header, |s: &str| {\n\n identity::Name::from_hostname(s.as_bytes()).ok()\n\n })\n\n}\n\n\n", "file_path": "linkerd/proxy/http/src/lib.rs", "rank": 62, "score": 229707.8703011846 }, { "content": "/// Creates an error responder for a request.\n\npub trait NewRespond<Req, Rsp, E = Error> {\n\n type Response;\n\n type Respond: Respond<Rsp, E, Response = Self::Response>;\n\n\n\n fn new_respond(&self, req: &Req) -> Self::Respond;\n\n}\n\n\n", "file_path": "linkerd/error-respond/src/lib.rs", "rank": 63, "score": 229154.90397477587 }, { "content": "pub fn layer<F, V>(f: F) -> Layer<FnLazy<F>, V>\n\nwhere\n\n F: Fn() -> V + Clone,\n\n V: Send + Sync + 'static,\n\n{\n\n Layer::new(FnLazy(f))\n\n}\n\n\n\n// === impl Layer ===\n\n\n\nimpl<L, V> Layer<L, V>\n\nwhere\n\n L: Lazy<V>,\n\n V: Send + Sync + 'static,\n\n{\n\n pub fn new(lazy: L) -> Self {\n\n Self {\n\n lazy,\n\n _marker: PhantomData,\n\n }\n", "file_path": "linkerd/proxy/http/src/insert.rs", "rank": 64, "score": 228230.45325466496 }, { "content": "/// Implement on targets to determine if a service has a timeout.\n\npub trait HasTimeout {\n\n fn timeout(&self) -> Option<Duration>;\n\n}\n\n\n\n/// An HTTP-specific optional timeout layer.\n\n///\n\n/// The stack target must implement `HasTimeout`, and if a duration is\n\n/// specified for the target, a timeout is applied waiting for HTTP responses.\n\n///\n\n/// Timeout errors are translated into `http::Response`s with appropiate\n\n/// status codes.\n\n#[derive(Clone, Debug, Default)]\n\npub struct MakeTimeoutLayer(());\n\n\n\n#[derive(Clone, Debug)]\n\npub struct MakeTimeout<M> {\n\n inner: M,\n\n}\n\n\n\n#[pin_project]\n", "file_path": "linkerd/proxy/http/src/timeout.rs", "rank": 65, "score": 224994.96630356475 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Allow(NameMatch);\n\n\n\nimpl Config {\n\n pub fn build<O, P, S>(\n\n self,\n\n outbound: O,\n\n profiles: P,\n\n local_id: tls::PeerIdentity,\n\n ) -> impl svc::NewService<\n\n inbound::Target,\n\n Service = impl tower::Service<\n\n http::Request<http::boxed::Payload>,\n\n Response = http::Response<http::boxed::Payload>,\n\n Error = impl Into<Error>,\n\n Future = impl Send,\n\n > + Send\n\n + 'static,\n\n > + Clone\n\n + Send\n\n where\n", "file_path": "linkerd/app/gateway/src/config.rs", "rank": 66, "score": 224569.19778101868 }, { "content": "pub fn trace_subscriber() -> (Dispatch, app_core::trace::Handle) {\n\n use std::env;\n\n let log_level = env::var(\"LINKERD2_PROXY_LOG\")\n\n .or_else(|_| env::var(\"RUST_LOG\"))\n\n .unwrap_or_else(|_| DEFAULT_LOG.to_owned());\n\n env::set_var(\"RUST_LOG\", &log_level);\n\n env::set_var(\"LINKERD2_PROXY_LOG\", &log_level);\n\n let log_format = env::var(\"LINKERD2_PROXY_LOG_FORMAT\").unwrap_or_else(|_| \"PLAIN\".to_string());\n\n env::set_var(\"LINKERD2_PROXY_LOG_FORMAT\", &log_format);\n\n // This may fail, since the global log compat layer may have been\n\n // initialized by another test.\n\n let _ = app_core::trace::init_log_compat();\n\n app_core::trace::with_filter_and_format(&log_level, &log_format)\n\n}\n\n\n", "file_path": "linkerd/app/test/src/lib.rs", "rank": 67, "score": 223906.84492151378 }, { "content": "pub fn destination_add(addr: SocketAddr) -> pb::Update {\n\n destination_add_hinted(addr, Hint::Unknown)\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/controller.rs", "rank": 68, "score": 223762.55943831993 }, { "content": "#[pin_project]\n\n#[derive(Debug)]\n\nstruct Inner<T> {\n\n buf: BytesMut,\n\n\n\n #[pin]\n\n io: T,\n\n}\n\n\n", "file_path": "linkerd/io/src/peek.rs", "rank": 69, "score": 220244.91921013437 }, { "content": "fn set_env(name: &str, cmd: &mut Command) {\n\n let value = match cmd.output() {\n\n Ok(output) => String::from_utf8(output.stdout).unwrap(),\n\n Err(err) => {\n\n println!(\"cargo:warning={}\", err);\n\n \"\".to_string()\n\n }\n\n };\n\n println!(\"cargo:rustc-env={}={}\", name, value);\n\n}\n\n\n", "file_path": "linkerd/app/core/build.rs", "rank": 70, "score": 219597.67879875976 }, { "content": "pub fn layer(metrics: HttpRouteRetry) -> NewRetryLayer<NewRetry> {\n\n NewRetryLayer::new(NewRetry::new(metrics))\n\n}\n\n\n", "file_path": "linkerd/app/core/src/retry.rs", "rank": 71, "score": 217605.18625332997 }, { "content": "pub trait Sensor {\n\n fn record_read(&mut self, sz: usize);\n\n fn record_write(&mut self, sz: usize);\n\n fn record_close(&mut self, eos: Option<Errno>);\n\n fn record_error<T>(&mut self, op: Poll<T>) -> Poll<T>;\n\n}\n\n\n\n/// Wraps a transport with telemetry.\n\n#[pin_project]\n\n#[derive(Debug)]\n\npub struct SensorIo<T, S> {\n\n #[pin]\n\n io: T,\n\n\n\n sensor: S,\n\n}\n\n\n\n// === impl SensorIo ===\n\n\n\nimpl<T, S: Sensor> SensorIo<T, S> {\n", "file_path": "linkerd/io/src/sensor.rs", "rank": 72, "score": 217000.7065891256 }, { "content": "pub fn layer<T, E, R, N>(\n\n resolve: R,\n\n watchdog: Duration,\n\n) -> impl layer::Layer<N, Service = Stack<E, R, N>> + Clone\n\nwhere\n\n T: Clone,\n\n for<'t> &'t T: Into<std::net::SocketAddr>,\n\n R: Resolve<Addr, Error = Error, Endpoint = Metadata> + Clone,\n\n R::Future: Send + 'static,\n\n R::Resolution: Send + 'static,\n\n EndpointFromMetadata: map_endpoint::MapEndpoint<T, Metadata, Out = E>,\n\n ResolveStack<R>: Resolve<T, Endpoint = E> + Clone,\n\n N: NewService<E>,\n\n{\n\n const ENDPOINT_BUFFER_CAPACITY: usize = 1_000;\n\n\n\n let resolve = new_resolve(resolve);\n\n layer::mk(move |new_endpoint| {\n\n let endpoints = discover::resolve(new_endpoint, resolve.clone());\n\n Buffer::new(ENDPOINT_BUFFER_CAPACITY, watchdog, endpoints)\n", "file_path": "linkerd/app/outbound/src/resolve.rs", "rank": 73, "score": 216510.4257071257 }, { "content": "pub fn new() -> (Registry, Layer, grpc::Server) {\n\n let registry = Registry::new();\n\n let layer = Layer::new(registry.clone());\n\n let server = grpc::Server::new(registry.clone());\n\n (registry, layer, server)\n\n}\n\n\n", "file_path": "linkerd/proxy/tap/src/lib.rs", "rank": 74, "score": 216510.42570712572 }, { "content": "pub trait LabelError<E> {\n\n type Labels: FmtLabels + Hash + Eq;\n\n\n\n fn label_error(&self, error: &E) -> Self::Labels;\n\n}\n\n\n\n/// Produces layers and reports results.\n\n#[derive(Debug)]\n\npub struct Registry<K: Hash + Eq> {\n\n errors: Arc<Mutex<IndexMap<K, Counter>>>,\n\n}\n\n\n\nimpl<K: Hash + Eq> Registry<K> {\n\n pub fn layer<L>(&self, label: L) -> RecordErrorLayer<L, K> {\n\n RecordErrorLayer::new(label, self.errors.clone())\n\n }\n\n}\n\n\n\nimpl<K: Hash + Eq> Default for Registry<K> {\n\n fn default() -> Self {\n", "file_path": "linkerd/error-metrics/src/lib.rs", "rank": 75, "score": 215779.2992033864 }, { "content": "/// Immediately and infalliby creates (usually) a Service.\n\npub trait NewService<T> {\n\n type Service;\n\n\n\n fn new_service(&mut self, target: T) -> Self::Service;\n\n}\n\n\n\n/// A Layer that modifies inner `MakeService`s to be exposd as a `NewService`.\n\n#[derive(Clone, Copy, Debug, Default)]\n\npub struct FromMakeServiceLayer(());\n\n\n\n/// Modifies inner `MakeService`s to be exposd as a `NewService`.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct FromMakeService<S> {\n\n make_service: S,\n\n}\n\n\n\n/// Modifies inner `MakeService`s to be exposd as a `NewService`.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct IntoMakeService<N> {\n\n new_service: N,\n", "file_path": "linkerd/stack/src/new_service.rs", "rank": 76, "score": 215622.7786818186 }, { "content": "fn stack_labels(name: &'static str) -> metrics::StackLabels {\n\n metrics::StackLabels::outbound(name)\n\n}\n\n\n", "file_path": "linkerd/app/outbound/src/lib.rs", "rank": 77, "score": 214960.52250324166 }, { "content": "fn stack_labels(name: &'static str) -> metrics::StackLabels {\n\n metrics::StackLabels::inbound(name)\n\n}\n\n\n\n// === impl SkipByPort ===\n\n\n\nimpl From<indexmap::IndexSet<u16>> for SkipByPort {\n\n fn from(ports: indexmap::IndexSet<u16>) -> Self {\n\n SkipByPort(ports.into())\n\n }\n\n}\n\n\n\nimpl svc::stack::Switch<listen::Addrs> for SkipByPort {\n\n fn use_primary(&self, t: &listen::Addrs) -> bool {\n\n !self.0.contains(&t.target_addr().port())\n\n }\n\n}\n", "file_path": "linkerd/app/inbound/src/lib.rs", "rank": 78, "score": 214960.52250324166 }, { "content": "trait CallBox: 'static {\n\n fn call_box(\n\n self: Box<Self>,\n\n sock: TcpStream,\n\n ) -> Pin<Box<dyn Future<Output = ()> + Send + 'static>>;\n\n}\n\n\n\nimpl<F> CallBox for F\n\nwhere\n\n F: FnOnce(TcpStream) -> Pin<Box<dyn Future<Output = ()> + Send + 'static>> + 'static,\n\n{\n\n fn call_box(\n\n self: Box<Self>,\n\n sock: TcpStream,\n\n ) -> Pin<Box<dyn Future<Output = ()> + Send + 'static>> {\n\n (*self)(sock)\n\n }\n\n}\n\n\n\npub struct TcpServer {\n", "file_path": "linkerd/app/integration/src/tcp.rs", "rank": 79, "score": 214763.5165749921 }, { "content": "pub fn server() -> TcpServer {\n\n TcpServer {\n\n accepts: VecDeque::new(),\n\n }\n\n}\n\n\n\npub struct TcpClient {\n\n addr: SocketAddr,\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/tcp.rs", "rank": 81, "score": 211986.75825847778 }, { "content": "pub trait SpanSink {\n\n fn try_send(&mut self, span: Span) -> Result<(), Error>;\n\n}\n\n\n\nimpl SpanSink for mpsc::Sender<Span> {\n\n fn try_send(&mut self, span: Span) -> Result<(), Error> {\n\n self.try_send(span).map_err(Into::into)\n\n }\n\n}\n\n\n\n// === impl Id ===\n\n\n\nimpl Id {\n\n fn new_span_id<R: Rng>(rng: &mut R) -> Self {\n\n let mut bytes = vec![0; SPAN_ID_LEN];\n\n rng.fill(bytes.as_mut_slice());\n\n Self(bytes)\n\n }\n\n}\n\n\n", "file_path": "linkerd/trace-context/src/lib.rs", "rank": 82, "score": 211161.6144384746 }, { "content": "pub fn stack<S>(inner: S) -> Stack<S> {\n\n Stack(inner)\n\n}\n\n\n", "file_path": "linkerd/app/core/src/svc.rs", "rank": 83, "score": 210463.793862382 }, { "content": "pub fn destination_add_hinted(addr: SocketAddr, hint: Hint) -> pb::Update {\n\n destination_add_labeled(addr, hint, HashMap::new(), HashMap::new())\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/controller.rs", "rank": 84, "score": 210304.05471080265 }, { "content": "#[pin_project]\n\nstruct Inner<S, R>\n\nwhere\n\n S: GrpcService<BoxBody>,\n\n R: Recover,\n\n{\n\n service: DestinationClient<S>,\n\n recover: R,\n\n #[pin]\n\n state: State<R::Backoff>,\n\n request: api::GetDestination,\n\n}\n\n\n", "file_path": "linkerd/service-profiles/src/client.rs", "rank": 85, "score": 210141.21899634882 }, { "content": "pub fn with_filter_and_format(\n\n filter: impl AsRef<str>,\n\n format: impl AsRef<str>,\n\n) -> (Dispatch, Handle) {\n\n let filter = filter.as_ref();\n\n\n\n // Set up the subscriber\n\n let filter = tracing_subscriber::EnvFilter::new(filter);\n\n let formatter = tracing_subscriber::fmt::format()\n\n .with_timer(Uptime::starting_now())\n\n .with_thread_ids(true);\n\n\n\n let (dispatch, level, tasks) = match format.as_ref().to_uppercase().as_ref() {\n\n \"JSON\" => {\n\n let (tasks, tasks_layer) = TasksLayer::<format::JsonFields>::new();\n\n let (filter, level) = tracing_subscriber::reload::Layer::new(filter);\n\n let dispatch = tracing_subscriber::registry()\n\n .with(tasks_layer)\n\n .with(\n\n tracing_subscriber::fmt::layer()\n", "file_path": "linkerd/tracing/src/lib.rs", "rank": 86, "score": 209949.97836707812 }, { "content": "pub fn identity() -> identity::Controller {\n\n identity::Controller::new()\n\n}\n\n\n\npub type Labels = HashMap<String, String>;\n\n\n\npub type DstReceiver = mpsc::UnboundedReceiver<Result<pb::Update, grpc::Status>>;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct DstSender(mpsc::UnboundedSender<Result<pb::Update, grpc::Status>>);\n\n\n\npub type ProfileReceiver = mpsc::UnboundedReceiver<Result<pb::DestinationProfile, grpc::Status>>;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ProfileSender(mpsc::UnboundedSender<Result<pb::DestinationProfile, grpc::Status>>);\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct Controller {\n\n expect_dst_calls: Arc<Mutex<VecDeque<Dst>>>,\n\n expect_profile_calls: Arc<Mutex<VecDeque<(pb::GetDestination, ProfileReceiver)>>>,\n", "file_path": "linkerd/app/integration/src/controller.rs", "rank": 87, "score": 208564.2047484388 }, { "content": "pub fn destination_add_tls(addr: SocketAddr, local_id: &str) -> pb::Update {\n\n pb::Update {\n\n update: Some(pb::update::Update::Add(pb::WeightedAddrSet {\n\n addrs: vec![pb::WeightedAddr {\n\n addr: Some(net::TcpAddress {\n\n ip: Some(ip_conv(addr.ip())),\n\n port: u32::from(addr.port()),\n\n }),\n\n tls_identity: Some(pb::TlsIdentity {\n\n strategy: Some(pb::tls_identity::Strategy::DnsLikeIdentity(\n\n pb::tls_identity::DnsLikeIdentity {\n\n name: local_id.into(),\n\n },\n\n )),\n\n }),\n\n ..Default::default()\n\n }],\n\n ..Default::default()\n\n })),\n\n }\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/controller.rs", "rank": 88, "score": 208383.30289512995 }, { "content": "pub trait HasPeerIdentity {\n\n fn peer_identity(&self) -> PeerIdentity;\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\npub enum ReasonForNoPeerName {\n\n /// The destination service didn't give us the identity, which is its way\n\n /// of telling us that we shouldn't do TLS for this endpoint.\n\n NotProvidedByServiceDiscovery,\n\n\n\n /// No TLS is wanted because the connection is a loopback connection which\n\n /// doesn't need or support TLS.\n\n Loopback,\n\n\n\n // The connection was insecure.\n\n NoTlsFromRemote,\n\n\n\n // Identity was not provided by the remote peer.\n\n NoPeerIdFromRemote,\n\n\n", "file_path": "linkerd/proxy/transport/src/tls/mod.rs", "rank": 89, "score": 208383.26610260297 }, { "content": "/// Make a `Layer` from a closure.\n\npub fn mk<F>(f: F) -> LayerFn<F> {\n\n LayerFn(f)\n\n}\n\n\n\n/// Make a `Layer` from a closure.\n\n#[derive(Clone, Copy, Debug)]\n\npub struct LayerFn<F>(F);\n\n\n\nimpl<F, S, Out> Layer<S> for LayerFn<F>\n\nwhere\n\n F: Fn(S) -> Out,\n\n{\n\n type Service = Out;\n\n\n\n fn layer(&self, inner: S) -> Self::Service {\n\n (self.0)(inner)\n\n }\n\n}\n", "file_path": "linkerd/stack/src/layer.rs", "rank": 90, "score": 207308.56201470122 }, { "content": "pub fn new<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n http2(addr, auth.into())\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/client.rs", "rank": 91, "score": 206131.90891108295 }, { "content": "pub fn http2<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n Client::new(addr, auth.into(), Run::Http2, None)\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/client.rs", "rank": 92, "score": 206131.90891108295 }, { "content": "pub fn http1<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n Client::new(\n\n addr,\n\n auth.into(),\n\n Run::Http1 {\n\n absolute_uris: false,\n\n },\n\n None,\n\n )\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/client.rs", "rank": 93, "score": 206131.90891108295 }, { "content": "pub trait CloneRequest<Req> {\n\n fn clone_request(req: &Req) -> Option<Req>;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct NewRetry<C = ()> {\n\n metrics: HttpRouteRetry,\n\n _clone_request: PhantomData<C>,\n\n}\n\n\n\npub struct Retry<C = ()> {\n\n metrics: Handle,\n\n budget: Arc<Budget>,\n\n response_classes: profiles::http::ResponseClasses,\n\n _clone_request: PhantomData<C>,\n\n}\n\n\n\nimpl NewRetry {\n\n pub fn new(metrics: HttpRouteRetry) -> Self {\n\n Self {\n", "file_path": "linkerd/app/core/src/retry.rs", "rank": 94, "score": 204716.17988010118 }, { "content": "/// A strategy for creating spans based on a service's target.\n\npub trait GetSpan<T> {\n\n fn get_span(&self, target: &T) -> tracing::Span;\n\n}\n\n\n\n/// A middleware that instruments tracing for stacks.\n\n#[derive(Clone, Debug)]\n\npub struct InstrumentMakeLayer<G> {\n\n get_span: G,\n\n}\n\n\n\n/// Instruments a `MakeService` or `NewService` stack.\n\n#[derive(Clone, Debug)]\n\npub struct InstrumentMake<G, M> {\n\n get_span: G,\n\n make: M,\n\n}\n\n\n\n/// Instruments a service produced by `InstrumentMake`.\n\n#[pin_project]\n\n#[derive(Clone, Debug)]\n", "file_path": "linkerd/stack/tracing/src/lib.rs", "rank": 95, "score": 204531.89055021928 }, { "content": "/// Watches a destination's Profile.\n\npub trait GetProfile<T> {\n\n type Error: Into<Error>;\n\n type Future: Future<Output = Result<Option<Receiver>, Self::Error>>;\n\n\n\n fn get_profile(&mut self, target: T) -> Self::Future;\n\n\n\n fn into_service(self) -> GetProfileService<Self>\n\n where\n\n Self: Sized,\n\n {\n\n GetProfileService(self)\n\n }\n\n}\n\n\n\nimpl<T, S> GetProfile<T> for S\n\nwhere\n\n S: tower::Service<T, Response = Option<Receiver>> + Clone,\n\n S::Error: Into<Error>,\n\n{\n\n type Error = S::Error;\n", "file_path": "linkerd/service-profiles/src/lib.rs", "rank": 96, "score": 204378.16248728638 }, { "content": "pub fn client_with_auth<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n let api = pb::tap_client::TapClient::new(SyncSvc(client::http2(addr, auth)));\n\n Client { api }\n\n}\n\n\n\npub struct Client {\n\n api: pb::tap_client::TapClient<SyncSvc>,\n\n}\n\n\n\nimpl Client {\n\n pub async fn observe(\n\n &mut self,\n\n req: ObserveBuilder,\n\n ) -> Pin<Box<dyn Stream<Item = Result<pb::TapEvent, tonic::Status>> + Send + Sync>> {\n\n let req = tonic::Request::new(req.0);\n\n match self.api.observe(req).await {\n\n Ok(rsp) => Box::pin(rsp.into_inner()),\n\n Err(e) => Box::pin(stream::once(async move { Err(e) })),\n\n }\n\n }\n", "file_path": "linkerd/app/integration/src/tap.rs", "rank": 97, "score": 204152.0665850868 }, { "content": "/// Returns a mock HTTP router that asserts that the HTTP router is never used.\n\npub fn no_http() -> NoHttp {\n\n NoHttp\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct NoHttp;\n\n\n\nimpl<T: std::fmt::Debug> svc::NewService<T> for NoHttp {\n\n type Service = Self;\n\n fn new_service(&mut self, target: T) -> Self::Service {\n\n panic!(\"the HTTP router should not be used in this test, but we tried to build a service for {:?}\", target)\n\n }\n\n}\n\n\n\nimpl svc::Service<http::Request<http::boxed::Payload>> for NoHttp {\n\n type Response = http::Response<http::boxed::Payload>;\n\n type Error = Error;\n\n type Future = futures::future::Ready<Result<Self::Response, Self::Error>>;\n\n fn poll_ready(\n\n &mut self,\n\n _: &mut std::task::Context<'_>,\n\n ) -> std::task::Poll<Result<(), Self::Error>> {\n\n panic!(\"http services should not be used in this test!\")\n\n }\n\n\n\n fn call(&mut self, _: http::Request<http::boxed::Payload>) -> Self::Future {\n\n panic!(\"http services should not be used in this test!\")\n\n }\n\n}\n", "file_path": "linkerd/app/test/src/service.rs", "rank": 98, "score": 203275.03309347818 }, { "content": "/// This sends `GET http://foo.com/ HTTP/1.1` instead of just `GET / HTTP/1.1`.\n\npub fn http1_absolute_uris<T: Into<String>>(addr: SocketAddr, auth: T) -> Client {\n\n Client::new(\n\n addr,\n\n auth.into(),\n\n Run::Http1 {\n\n absolute_uris: true,\n\n },\n\n None,\n\n )\n\n}\n\n\n", "file_path": "linkerd/app/integration/src/client.rs", "rank": 99, "score": 202236.57819818385 } ]
Rust
testing/jormungandr-integration-tests/src/jormungandr/mempool/fragments_dump.rs
rmourey26/jormungandr
e5d13409b931a58aee3ea72a5729a99f068b6043
use crate::common::jormungandr::{starter::Role, Starter}; use crate::common::{jormungandr::ConfigurationBuilder, startup}; use assert_fs::fixture::PathChild; use assert_fs::TempDir; use chain_impl_mockchain::chaintypes::ConsensusVersion; use jormungandr_lib::interfaces::InitialUTxO; use jormungandr_lib::interfaces::PersistentLog; use jormungandr_lib::interfaces::{BlockDate, Mempool}; use jormungandr_testing_utils::testing::fragments::FragmentExporter; use jormungandr_testing_utils::testing::fragments::PersistentLogViewer; use jormungandr_testing_utils::testing::{ node::time, FragmentGenerator, FragmentSender, FragmentSenderSetup, FragmentVerifier, MemPoolCheck, }; use jormungandr_testing_utils::testing::{AdversaryFragmentSender, AdversaryFragmentSenderSetup}; use jortestkit::prelude::Wait; use std::fs::metadata; use std::path::Path; use std::thread::sleep; use std::time::Duration; #[test] pub fn dump_send_correct_fragments() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(1) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1_000_000usize.into(), log_max_entries: 1_000_000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let fragment_sender = FragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), FragmentSenderSetup::dump_into(dump_folder.path().to_path_buf()), ); let time_era = jormungandr.time_era(); let mut fragment_generator = FragmentGenerator::new( sender, receiver, jormungandr.to_remote(), time_era.slots_per_epoch(), 2, 2, fragment_sender, ); fragment_generator.prepare(BlockDate::new(1, 0)); let verifier = FragmentVerifier; time::wait_for_epoch(1, jormungandr.rest()); let wait = Wait::new(Duration::from_secs(1), 25); verifier .wait_until_all_processed(wait, &jormungandr) .unwrap(); let mem_checks: Vec<MemPoolCheck> = fragment_generator.send_all().unwrap(); verifier .wait_and_verify_all_are_in_block(Duration::from_secs(2), mem_checks, &jormungandr) .unwrap(); assert_all_fragment_are_persisted(dump_folder.path(), persistent_log_path.path()); } #[test] pub fn dump_send_invalid_fragments() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(1) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1_000_000usize.into(), log_max_entries: 1_000_000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); assert_all_fragment_are_persisted(dump_folder.path(), persistent_log_path.path()); } fn assert_all_fragment_are_persisted<P: AsRef<Path>, R: AsRef<Path>>(left: P, right: R) { let exporter = FragmentExporter::new(left.as_ref().to_path_buf()).unwrap(); let fragments = exporter.read_as_bytes().unwrap(); let persistent_log_viewer = PersistentLogViewer::new(right.as_ref().to_path_buf()); assert_eq!(fragments.len(), persistent_log_viewer.get_all().len()); assert_eq!(fragments, persistent_log_viewer.get_bin()); } #[test] pub fn non_existing_folder() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump"); let persistent_log_path = dump_folder.child("persistent_log"); let receiver = startup::create_new_account_address(); let sender = startup::create_new_account_address(); let _jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(1) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1_000_000usize.into(), log_max_entries: 1_000_000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let path = persistent_log_path.path(); assert!(path.exists()); assert!(metadata(path).unwrap().is_dir()); assert!(std::fs::read_dir(&path).unwrap().count() > 0); } #[test] pub fn invalid_folder() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump"); let persistent_log_path = dump_folder.child("/dev/null/foo::///;log"); let config = ConfigurationBuilder::new() .with_mempool(Mempool { pool_max_entries: 1_000_000usize.into(), log_max_entries: 1_000_000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }) .build(&temp_dir); Starter::new() .config(config) .start_fail("failed to open persistent log file"); } #[test] pub fn fragment_which_reached_mempool_should_be_persisted() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump_folder"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(3) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1usize.into(), log_max_entries: 1000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); assert_all_fragment_are_persisted(dump_folder.path(), persistent_log_path.path()); } #[test] pub fn fragment_which_is_not_in_fragment_log_should_be_persisted() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump_folder"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(3) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1000usize.into(), log_max_entries: 1usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); assert_all_fragment_are_persisted(dump_folder.path(), persistent_log_path.path()); } #[test] pub fn pending_fragment_should_be_persisted() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump_folder"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(5) .with_slot_duration(60) .with_explorer() .with_mempool(Mempool { pool_max_entries: 10usize.into(), log_max_entries: 10usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let fragment_sender = FragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), FragmentSenderSetup::dump_into(dump_folder.path().to_path_buf()), ); fragment_sender .send_transaction(&mut sender, &receiver, &jormungandr, 1.into()) .unwrap(); sleep(Duration::from_secs(1)); let persistent_log_viewer = PersistentLogViewer::new(persistent_log_path.path().to_path_buf()); assert_eq!(1, persistent_log_viewer.get_all().len()); let fragment_logs = jormungandr.rest().fragment_logs().unwrap(); assert_eq!(fragment_logs.len(), 1); assert!(fragment_logs.values().next().unwrap().is_pending()); } #[test] pub fn node_should_pickup_log_after_restart() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump_folder"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let config = ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(3) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1usize.into(), log_max_entries: 1000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }) .with_block0_consensus(ConsensusVersion::Bft) .with_funds(vec![ InitialUTxO { address: sender.address(), value: 1_000_000.into(), }, InitialUTxO { address: receiver.address(), value: 1_000_000.into(), }, ]) .build(&temp_dir); let jormungandr = Starter::new() .config(config.clone()) .role(Role::Leader) .start() .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); jormungandr.stop(); let jormungandr = Starter::new() .temp_dir(temp_dir) .config(config) .role(Role::Leader) .start() .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); let persistent_log_viewer = PersistentLogViewer::new(persistent_log_path.path().to_path_buf()); assert_eq!(20, persistent_log_viewer.get_all().len()); }
use crate::common::jormungandr::{starter::Role, Starter}; use crate::common::{jormungandr::ConfigurationBuilder, startup}; use assert_fs::fixture::PathChild; use assert_fs::TempDir; use chain_impl_mockchain::chaintypes::ConsensusVersion; use jormungandr_lib::interfaces::InitialUTxO; use jormungandr_lib::interfaces::PersistentLog; use jormungandr_lib::interfaces::{BlockDate, Mempool}; use jormungandr_testing_utils::testing::fragments::FragmentExporter; use jormungandr_testing_utils::testing::fragments::PersistentLogViewer; use jormungandr_testing_utils::testing::{ node::time, FragmentGenerator, FragmentSender, FragmentSenderSetup, FragmentVerifier, MemPoolCheck, }; use jormungandr_testing_utils::testing::{AdversaryFragmentSender, AdversaryFragmentSenderSetup}; use jortestkit::prelude::Wait; use std::fs::metadata; use std::path::Path; use std::thread::sleep; use std::time::Duration; #[test] pub fn dump_send_correct_fragments() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(1) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1_000_000usize.into(), log_max_entries: 1_000_000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let fragment_sender = FragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), FragmentSenderSetup::dump_into(dump_folder.path().to_path_buf()), ); let time_era = jormungandr.time_era(); let mut fragment_generator = FragmentGenerator::new( sender, receiver, jormungandr.to_remote(), time_era.slots_per_epoch(), 2, 2, fragment_sender, ); fragment_generator.prepare(BlockDate::new(1, 0)); let verifier = FragmentVerifier; time::wait_for_epoch(1, jormungandr.rest()); let wait = Wait::new(Duration::from_secs(1), 25); verifier .wait_until_all_processed(wait, &jormungandr) .unwrap(); let mem_checks: Vec<MemPoolCheck> = fragment_generator.send_all().unwrap(); verifier .wait_and_verify_all_are_in_block(Duration::from_secs(2), mem_checks, &jormungandr) .unwrap(); assert_all_fragment_are_persisted(dump_folder.path(), persistent_log_path.path()); } #[test] pub fn dump_send_invalid_fragments() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(1) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1_000_000usize.into(), log_max_entries: 1_000_000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); assert_all_fragment_are_persisted(dump_folder.path(), persistent_log_path.path()); } fn assert_all_fragment_are_persisted<P: AsRef<Path>, R: AsRef<Path>>(left: P, right: R) { let exporter = FragmentExporter::new(left.as_ref().to_path_buf()).unwrap(); let fragments = exporter.read_as_bytes().unwrap(); let persistent_log_viewer = PersistentLogViewer::new(right.as_ref().to_path_buf()); assert_eq!(fragments.len(), persistent_log_viewer.get_all().len()); assert_eq!(fragments, persistent_log_viewer.get_bin()); } #[test] pub fn non_existing_folder() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump"); let persistent_log_path = dump_folder.child("persistent_log"); let receiver = startup::create_new_account_address(); let sender = startup::create_new_account_address(); let _jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(1) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1_000_000usize.into(), log_max_entries: 1_000_000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let path = persistent_log_path.path(); assert!(path.exists()); assert!(metadata(path).unwrap().is_dir()); assert!(std::fs::read_dir(&path).unwrap().count() > 0); } #[test] pub fn invalid_folder() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump"); let persistent_log_path = dump_folder.child("/dev/null/foo::///;log"); let config = ConfigurationBuilder::new() .with_mempool(Mempool { pool_max_entries: 1_000_000usize.into(), log_max_entries: 1_000_000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }) .build(&temp_dir); Starter::new() .config(config) .start_fail("failed to open persistent log file"); } #[test] pub fn fragment_which_reached_mempool_should_be_persisted() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump_folder"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(3) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1usize.into(), log_max_entries: 1000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_
#[test] pub fn fragment_which_is_not_in_fragment_log_should_be_persisted() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump_folder"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(3) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1000usize.into(), log_max_entries: 1usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); assert_all_fragment_are_persisted(dump_folder.path(), persistent_log_path.path()); } #[test] pub fn pending_fragment_should_be_persisted() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump_folder"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let jormungandr = startup::start_bft( vec![&sender, &receiver], ConfigurationBuilder::new() .with_slots_per_epoch(5) .with_slot_duration(60) .with_explorer() .with_mempool(Mempool { pool_max_entries: 10usize.into(), log_max_entries: 10usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }), ) .unwrap(); let fragment_sender = FragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), FragmentSenderSetup::dump_into(dump_folder.path().to_path_buf()), ); fragment_sender .send_transaction(&mut sender, &receiver, &jormungandr, 1.into()) .unwrap(); sleep(Duration::from_secs(1)); let persistent_log_viewer = PersistentLogViewer::new(persistent_log_path.path().to_path_buf()); assert_eq!(1, persistent_log_viewer.get_all().len()); let fragment_logs = jormungandr.rest().fragment_logs().unwrap(); assert_eq!(fragment_logs.len(), 1); assert!(fragment_logs.values().next().unwrap().is_pending()); } #[test] pub fn node_should_pickup_log_after_restart() { let temp_dir = TempDir::new().unwrap(); let dump_folder = temp_dir.child("dump_folder"); let persistent_log_path = temp_dir.child("persistent_log"); let receiver = startup::create_new_account_address(); let mut sender = startup::create_new_account_address(); let config = ConfigurationBuilder::new() .with_slots_per_epoch(60) .with_slot_duration(3) .with_explorer() .with_mempool(Mempool { pool_max_entries: 1usize.into(), log_max_entries: 1000usize.into(), persistent_log: Some(PersistentLog { dir: persistent_log_path.path().to_path_buf(), }), }) .with_block0_consensus(ConsensusVersion::Bft) .with_funds(vec![ InitialUTxO { address: sender.address(), value: 1_000_000.into(), }, InitialUTxO { address: receiver.address(), value: 1_000_000.into(), }, ]) .build(&temp_dir); let jormungandr = Starter::new() .config(config.clone()) .role(Role::Leader) .start() .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); jormungandr.stop(); let jormungandr = Starter::new() .temp_dir(temp_dir) .config(config) .role(Role::Leader) .start() .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); let persistent_log_viewer = PersistentLogViewer::new(persistent_log_path.path().to_path_buf()); assert_eq!(20, persistent_log_viewer.get_all().len()); }
path_buf(), }), }), ) .unwrap(); let adversary_sender = AdversaryFragmentSender::new( jormungandr.genesis_block_hash(), jormungandr.fees(), AdversaryFragmentSenderSetup::dump_into(dump_folder.path().to_path_buf(), false), ); adversary_sender .send_transactions_with_invalid_counter(10, &mut sender, &receiver, &jormungandr) .unwrap(); sleep(Duration::from_secs(1)); assert_all_fragment_are_persisted(dump_folder.path(), persistent_log_path.path()); }
function_block-function_prefixed
[ { "content": "pub fn read_persistent_fragment_logs_from_file_path(\n\n entries: impl Iterator<Item = PathBuf>,\n\n) -> io::Result<impl Iterator<Item = Result<PersistentFragmentLog, DeserializeError>>> {\n\n let mut handles = Vec::new();\n\n for entry in entries {\n\n handles.push(FileFragments::from_path(entry)?);\n\n }\n\n Ok(handles.into_iter().flatten())\n\n}\n\n\n", "file_path": "jormungandr-lib/src/interfaces/fragment_log_persistent.rs", "rank": 1, "score": 434398.22217350954 }, { "content": "pub fn list_persistent_fragment_log_files_from_folder_path(\n\n folder: &Path,\n\n) -> io::Result<impl Iterator<Item = PathBuf>> {\n\n let mut entries: Vec<_> = fs::read_dir(folder)?\n\n .filter_map(|entry| match entry {\n\n Ok(entry) => Some(folder.join(entry.path())),\n\n _ => None,\n\n })\n\n .collect();\n\n entries.sort();\n\n Ok(entries.into_iter())\n\n}\n\n\n", "file_path": "jormungandr-lib/src/interfaces/fragment_log_persistent.rs", "rank": 3, "score": 428008.3045393358 }, { "content": "pub fn load_persistent_fragments_logs_from_folder_path(\n\n folder: &Path,\n\n) -> io::Result<impl Iterator<Item = Result<PersistentFragmentLog, DeserializeError>>> {\n\n read_persistent_fragment_logs_from_file_path(\n\n list_persistent_fragment_log_files_from_folder_path(folder)?,\n\n )\n\n}\n", "file_path": "jormungandr-lib/src/interfaces/fragment_log_persistent.rs", "rank": 5, "score": 391484.25606439926 }, { "content": "pub fn compare_schema<P: AsRef<Path>>(actual_schema_path: P) {\n\n let expected_schema_path =\n\n PathBuf::from_str(\"./jormungandr-testing-utils/resources/explorer/graphql/schema.graphql\")\n\n .unwrap();\n\n\n\n if !file::have_the_same_content(actual_schema_path.as_ref(), &expected_schema_path) {\n\n file::copy_file(actual_schema_path.as_ref(), &expected_schema_path, true);\n\n println!(\"discrepancies detected, already replaced file with new content. Please commit to update schema\");\n\n }\n\n}\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/explorer/mod.rs", "rank": 7, "score": 387547.34212056146 }, { "content": "#[test]\n\npub fn test_mempool_log_max_entries_only_one_fragment() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(1)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 1.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v0.rs", "rank": 8, "score": 374174.5480228802 }, { "content": "#[test]\n\npub fn test_mempool_log_max_entries_only_one_fragment() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(1)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 1.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v1.rs", "rank": 9, "score": 374174.5480228802 }, { "content": "pub fn write_block0_config(block0_config: &Block0Configuration, output_file: &ChildPath) {\n\n let content = serde_yaml::to_string(&block0_config).unwrap();\n\n output_file.write_str(&content).unwrap();\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 11, "score": 368720.2172116365 }, { "content": "/// open the given file path as a writable stream, or stdout if no path\n\n/// provided\n\npub fn open_file_write<P: AsRef<Path>>(path: &Option<P>) -> Result<impl Write, Error> {\n\n match path {\n\n Some(path) => {\n\n let writer = std::fs::OpenOptions::new()\n\n .create(true)\n\n .write(true)\n\n .read(false)\n\n .append(false)\n\n .truncate(true)\n\n .open(path)?;\n\n Ok(Box::new(writer) as Box<dyn Write>)\n\n }\n\n None => Ok(Box::new(stdout()) as Box<dyn Write>),\n\n }\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/utils/io.rs", "rank": 12, "score": 367036.28698326607 }, { "content": "pub fn assert_accepted_rejected(\n\n accepted: Vec<FragmentId>,\n\n rejected: Vec<(FragmentId, FragmentRejectionReason)>,\n\n result: Result<Vec<MemPoolCheck>, RestError>,\n\n) -> Vec<MemPoolCheck> {\n\n match result.err().unwrap() {\n\n RestError::NonSuccessErrorCode {\n\n checks,\n\n status,\n\n response,\n\n } => {\n\n let summary: FragmentsProcessingSummary = serde_json::from_str(&response).unwrap();\n\n if !rejected.is_empty() {\n\n assert_eq!(status, reqwest::StatusCode::BAD_REQUEST);\n\n }\n\n assert_eq!(summary.accepted, accepted);\n\n assert_eq!(\n\n summary\n\n .rejected\n\n .iter()\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/verifier/fragment_log.rs", "rank": 13, "score": 365880.16984342644 }, { "content": "/// open the given file path as a readable stream, or stdin if no path\n\n/// provided\n\npub fn open_file_read<P: AsRef<Path>>(path: &Option<P>) -> Result<impl BufRead, Error> {\n\n match path {\n\n Some(path) => {\n\n let reader = std::fs::OpenOptions::new()\n\n .create(false)\n\n .write(false)\n\n .read(true)\n\n .append(false)\n\n .open(path)?;\n\n Ok(Box::new(BufReader::new(reader)) as Box<dyn BufRead>)\n\n }\n\n None => Ok(Box::new(BufReader::new(stdin())) as Box<dyn BufRead>),\n\n }\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/utils/io.rs", "rank": 14, "score": 362983.3097996581 }, { "content": "pub fn wait_for_epoch(target_epoch_id: u32, mut rest: JormungandrRest) {\n\n rest.enable_logger();\n\n\n\n while get_current_date(&mut rest).epoch() < target_epoch_id {\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n }\n\n}\n\n\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/time.rs", "rank": 15, "score": 358674.82364102895 }, { "content": "pub fn wait_for_date(target_block_date: BlockDate, mut rest: JormungandrRest) {\n\n rest.enable_logger();\n\n\n\n while get_current_date(&mut rest) < target_block_date {\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n }\n\n}\n\n\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/time.rs", "rank": 16, "score": 354798.0333884564 }, { "content": "#[test]\n\npub fn persistent_log_load_test() {\n\n let mut faucet = startup::create_new_account_address();\n\n\n\n let temp_dir = TempDir::new().unwrap();\n\n let persistent_log_path = temp_dir.child(\"fragment_dump\");\n\n\n\n let jormungandr = startup::start_bft(\n\n vec![&faucet],\n\n ConfigurationBuilder::new()\n\n .with_slots_per_epoch(60)\n\n .with_slot_duration(1)\n\n .with_explorer()\n\n .with_mempool(Mempool {\n\n pool_max_entries: 1_000_000usize.into(),\n\n log_max_entries: 1_000_000usize.into(),\n\n persistent_log: Some(PersistentLog {\n\n dir: persistent_log_path.path().to_path_buf(),\n\n }),\n\n }),\n\n )\n", "file_path": "testing/jormungandr-integration-tests/src/non_functional/persistent_log.rs", "rank": 17, "score": 350932.17162349494 }, { "content": "#[test]\n\npub fn test_mempool_log_max_entries_equals_0() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(1)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 0.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v1.rs", "rank": 20, "score": 338388.2897899514 }, { "content": "#[test]\n\npub fn test_mempool_log_max_entries_equals_0() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(1)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 0.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v0.rs", "rank": 21, "score": 338388.2897899514 }, { "content": "pub fn assert_bad_request(result: Result<Vec<MemPoolCheck>, RestError>) -> Vec<MemPoolCheck> {\n\n match result.err().unwrap() {\n\n RestError::NonSuccessErrorCode { status, checks, .. } => {\n\n assert_eq!(status, reqwest::StatusCode::BAD_REQUEST);\n\n checks\n\n }\n\n _ => panic!(\"unexcepted error\"),\n\n }\n\n}\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/verifier/fragment_log.rs", "rank": 24, "score": 330995.1807036523 }, { "content": "pub fn read_vote_plan_shares_from_file<P: AsRef<Path>>(\n\n share_path: Option<P>,\n\n proposals: usize,\n\n threshold: Option<usize>,\n\n) -> Result<VotePlanDecryptShares, SharesError> {\n\n let vote_plan_shares: VotePlanDecryptShares =\n\n serde_json::from_reader(io::open_file_read(&share_path)?)?;\n\n if vote_plan_shares.0.len() != proposals || vote_plan_shares.0[0].len() < threshold.unwrap_or(1)\n\n {\n\n return Err(SharesError::InsufficientShares);\n\n }\n\n\n\n Ok(vote_plan_shares)\n\n}\n", "file_path": "jcli/src/jcli_lib/utils/vote.rs", "rank": 25, "score": 330155.6378884009 }, { "content": "pub fn get_jormungandr_bin(release: &Release, temp_dir: &impl PathChild) -> PathBuf {\n\n let asset = RELEASES\n\n .get_asset_for_current_os_by_version(release.version_str())\n\n .unwrap()\n\n .unwrap();\n\n let asset_name = asset.name();\n\n let output = temp_dir.child(&asset_name);\n\n asset\n\n .download_to(output.path())\n\n .expect(\"cannot download file\");\n\n let release_dir = temp_dir.child(format!(\"release-{}\", release.version()));\n\n release_dir.create_dir_all().unwrap();\n\n decompress(output.path(), release_dir.path()).unwrap();\n\n file::find_file(release_dir.path(), \"jormungandr\").unwrap()\n\n}\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/legacy/mod.rs", "rank": 26, "score": 327453.40299978206 }, { "content": "#[test]\n\npub fn test_mempool_pool_max_entries_overrides_log_max_entries() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(1)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 2.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v0.rs", "rank": 27, "score": 326142.8860065666 }, { "content": "#[test]\n\npub fn test_mempool_pool_max_entries_overrides_log_max_entries() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(1)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 2.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v1.rs", "rank": 28, "score": 326142.8860065666 }, { "content": "pub fn read_ed25519_secret_key_from_file<P: AsRef<Path>>(\n\n path: &Option<P>,\n\n) -> Result<EitherEd25519SecretKey, Error> {\n\n let bech32_str: String =\n\n io::read_line(path).map_err(|source| Error::SecretKeyFileReadFailed {\n\n source,\n\n path: io::path_to_path_buf(path),\n\n })?;\n\n\n\n match SecretKey::try_from_bech32_str(&bech32_str) {\n\n Ok(sk) => Ok(EitherEd25519SecretKey::Extended(sk)),\n\n Err(_) => SecretKey::try_from_bech32_str(&bech32_str)\n\n .map(EitherEd25519SecretKey::Normal)\n\n .map_err(|source| Error::SecretKeyFileMalformed {\n\n source,\n\n path: io::path_to_path_buf(path),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/utils/key_parser.rs", "rank": 29, "score": 325995.64290027897 }, { "content": "pub fn path_to_path_buf<P: AsRef<Path>>(path: &Option<P>) -> PathBuf {\n\n path.as_ref()\n\n .map(|path| path.as_ref().to_path_buf())\n\n .unwrap_or_default()\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/utils/io.rs", "rank": 30, "score": 322439.4005236931 }, { "content": "pub fn wait(seconds: u64) {\n\n std::thread::sleep(Duration::from_secs(seconds));\n\n}\n\n\n", "file_path": "testing/jormungandr-scenario-tests/src/test/utils/mod.rs", "rank": 31, "score": 317774.89267181465 }, { "content": "#[test]\n\npub fn test_all_fragments() {\n\n let jcli: JCli = Default::default();\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let mut faucet = startup::create_new_account_address();\n\n let mut stake_pool_owner = startup::create_new_account_address();\n\n let mut full_delegator = startup::create_new_account_address();\n\n let mut split_delegator = startup::create_new_account_address();\n\n\n\n let stake_pool_owner_stake = 1_000;\n\n\n\n let (jormungandr, stake_pools) = startup::start_stake_pool(\n\n &[faucet.clone()],\n\n &[full_delegator.clone(), split_delegator.clone()],\n\n &mut ConfigurationBuilder::new().with_storage(&temp_dir.child(\"storage\")),\n\n )\n\n .unwrap();\n\n\n\n let initial_stake_pool = stake_pools.get(0).unwrap();\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/genesis/fragments.rs", "rank": 32, "score": 315600.1179081303 }, { "content": "pub fn sleep_till_next_epoch(grace_period: u32, config: &Block0Configuration) {\n\n sleep_till_epoch(1, grace_period, config);\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 33, "score": 314659.80711810087 }, { "content": "pub fn get_command<Conf: TestConfig + Serialize>(\n\n params: &JormungandrParams<Conf>,\n\n bin_path: impl AsRef<Path>,\n\n role: Role,\n\n from_genesis: FromGenesis,\n\n) -> Command {\n\n let bin_path = bin_path.as_ref();\n\n let mut builder = CommandBuilder::new(bin_path)\n\n .config(params.node_config_path())\n\n .rewards_history(params.rewards_history());\n\n if params.node_config().log_file_path().is_none() {\n\n builder = builder.stderr_to_log_file(params.log_file_path());\n\n }\n\n let builder = match (role, from_genesis) {\n\n (Role::Passive, _) => builder.genesis_block_hash(params.genesis_block_hash()),\n\n (Role::Leader, FromGenesis::File) => builder\n\n .genesis_block_path(params.genesis_block_path())\n\n .leader_with_secrets(params.secret_model_paths()),\n\n (Role::Leader, FromGenesis::Hash) => builder\n\n .genesis_block_hash(params.genesis_block_hash())\n\n .leader_with_secrets(params.secret_model_paths()),\n\n };\n\n builder.command()\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/common/jormungandr/starter/commands.rs", "rank": 34, "score": 314500.7720265078 }, { "content": "#[test]\n\npub fn send_all_fragments() {\n\n let receiver = startup::create_new_account_address();\n\n let sender = startup::create_new_account_address();\n\n\n\n let (jormungandr, _) = startup::start_stake_pool(\n\n &[sender.clone()],\n\n &[receiver.clone()],\n\n ConfigurationBuilder::new()\n\n .with_block0_consensus(ConsensusType::GenesisPraos)\n\n .with_slots_per_epoch(60)\n\n .with_consensus_genesis_praos_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)\n\n .with_slot_duration(3)\n\n .with_linear_fees(LinearFee::new(1, 1, 1))\n\n .with_explorer()\n\n .with_mempool(Mempool {\n\n pool_max_entries: 1_000_000usize.into(),\n\n log_max_entries: 1_000_000usize.into(),\n\n persistent_log: None,\n\n }),\n\n )\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/fragments.rs", "rank": 35, "score": 314040.09372661187 }, { "content": "#[test]\n\npub fn test_all_adversary_fragments() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let mut faucet = startup::create_new_account_address();\n\n let stake_pool_owner = startup::create_new_account_address();\n\n let mut full_delegator = startup::create_new_account_address();\n\n let split_delegator = startup::create_new_account_address();\n\n\n\n let stake_pool_owner_stake = 1_000;\n\n\n\n let (jormungandr, stake_pools) = startup::start_stake_pool(\n\n &[stake_pool_owner.clone()],\n\n &[full_delegator.clone(), split_delegator, faucet.clone()],\n\n &mut ConfigurationBuilder::new().with_storage(&temp_dir.child(\"storage\")),\n\n )\n\n .unwrap();\n\n\n\n let initial_stake_pool = stake_pools.get(0).unwrap();\n\n\n\n let transaction_sender = FragmentSender::new(\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/genesis/fragments.rs", "rank": 36, "score": 311583.2582916125 }, { "content": "pub fn _read_secret_key_from_file<A: AsymmetricKey, P: AsRef<Path>>(\n\n path: &Option<P>,\n\n) -> Result<SecretKey<A>, Error> {\n\n let bech32_str: String =\n\n io::read_line(path).map_err(|source| Error::SecretKeyFileReadFailed {\n\n source,\n\n path: io::path_to_path_buf(path),\n\n })?;\n\n SecretKey::try_from_bech32_str(&bech32_str).map_err(|source| Error::SecretKeyFileMalformed {\n\n source,\n\n path: io::path_to_path_buf(path),\n\n })\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/utils/key_parser.rs", "rank": 37, "score": 310478.8955716691 }, { "content": "#[test]\n\npub fn fragment_load_test() {\n\n let faucet = startup::create_new_account_address();\n\n let receiver = startup::create_new_account_address();\n\n\n\n let (mut jormungandr, _) = startup::start_stake_pool(\n\n &[faucet.clone()],\n\n &[receiver.clone()],\n\n ConfigurationBuilder::new()\n\n .with_slots_per_epoch(30)\n\n .with_consensus_genesis_praos_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)\n\n .with_slot_duration(4)\n\n .with_epoch_stability_depth(10)\n\n .with_kes_update_speed(KesUpdateSpeed::new(43200).unwrap()),\n\n )\n\n .unwrap();\n\n\n\n jormungandr.steal_temp_dir().unwrap().into_persistent();\n\n\n\n let configuration = Configuration::duration(\n\n 1,\n", "file_path": "testing/jormungandr-integration-tests/src/non_functional/fragment.rs", "rank": 38, "score": 301634.62438790605 }, { "content": "pub fn read_line<P: AsRef<Path>>(path: &Option<P>) -> Result<String, Error> {\n\n let mut line = String::new();\n\n open_file_read(path)?.read_line(&mut line)?;\n\n Ok(line.trim_end().to_string())\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum ReadYamlError {\n\n #[error(\"could not read input\")]\n\n Io(#[from] Error),\n\n #[error(\"input contains malformed yaml\")]\n\n Yaml(#[from] serde_yaml::Error),\n\n}\n\n\n", "file_path": "jcli/src/jcli_lib/utils/io.rs", "rank": 39, "score": 299866.5315268627 }, { "content": "pub fn sleep_till_epoch(epoch_interval: u32, grace_period: u32, config: &Block0Configuration) {\n\n let coeff = epoch_interval * 2;\n\n let slots_per_epoch: u32 = config.blockchain_configuration.slots_per_epoch.into();\n\n let slot_duration: u8 = config.blockchain_configuration.slot_duration.into();\n\n let wait_time = ((slots_per_epoch * (slot_duration as u32)) * coeff) + grace_period;\n\n process_utils::sleep(wait_time.into());\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 40, "score": 299295.0705420007 }, { "content": "#[named]\n\npub fn point_to_point_on_file_storage(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n LEADER_4,\n\n LEADER_3 -> LEADER_4,\n\n LEADER_2 -> LEADER_3,\n\n LEADER_1 -> LEADER_2,\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ LEADER_1 ],\n\n initials = [\n\n \"account\" \"unassigned1\" with 500_000_000,\n\n \"account\" \"delegated1\" with 2_000_000_000 delegates to LEADER_1,\n\n ],\n", "file_path": "testing/jormungandr-scenario-tests/src/test/network/topology/scenarios.rs", "rank": 41, "score": 298890.95440232515 }, { "content": "#[test]\n\n#[cfg(any(unix, windows))]\n\npub fn test_rest_tls_config() {\n\n let temp_dir = TempDir::new().unwrap().into_persistent();\n\n let prv_key_file = resources::tls_server_private_key();\n\n let server_crt_file = resources::tls_server_crt();\n\n let ca_crt_file = resources::tls_ca_crt();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_rest_tls_config(Tls {\n\n cert_file: server_crt_file.as_os_str().to_str().unwrap().to_owned(),\n\n priv_key_file: prv_key_file.as_os_str().to_str().unwrap().to_owned(),\n\n })\n\n .build(&temp_dir);\n\n\n\n let jormungandr = Starter::new()\n\n .temp_dir(temp_dir)\n\n .config(config)\n\n .verify_by(StartupVerificationMode::Log)\n\n .start()\n\n .unwrap();\n\n println!(\"Bootstrapped\");\n\n jormungandr.assert_no_errors_in_log();\n\n\n\n println!(\n\n \"{:?}\",\n\n jormungandr.secure_rest(&ca_crt_file).stats().unwrap()\n\n );\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/tls.rs", "rank": 42, "score": 298499.03488677926 }, { "content": "#[test]\n\npub fn get_fragments() {\n\n let mut sender = startup::create_new_account_address();\n\n let receiver = startup::create_new_account_address();\n\n let config = ConfigurationBuilder::new()\n\n .with_slot_duration(4)\n\n .with_funds(vec![InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n }])\n\n .to_owned();\n\n\n\n let setup = setup::client::bootstrap(config);\n\n let output_value = 1u64;\n\n let jcli: JCli = Default::default();\n\n let transaction = sender\n\n .transaction_to(\n\n &setup.server.genesis_block_hash(),\n\n &setup.server.fees(),\n\n receiver.address(),\n\n output_value.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/grpc/client_tests.rs", "rank": 43, "score": 298206.3105911098 }, { "content": "#[test]\n\npub fn fragment_batch_load_test() {\n\n let mut faucet = startup::create_new_account_address();\n\n\n\n let (mut jormungandr, _) = startup::start_stake_pool(\n\n &[faucet.clone()],\n\n &[],\n\n ConfigurationBuilder::new()\n\n .with_slots_per_epoch(60)\n\n .with_consensus_genesis_praos_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)\n\n .with_slot_duration(4)\n\n .with_epoch_stability_depth(10)\n\n .with_kes_update_speed(KesUpdateSpeed::new(43200).unwrap()),\n\n )\n\n .unwrap();\n\n\n\n jormungandr.steal_temp_dir().unwrap().into_persistent();\n\n\n\n let configuration = Configuration::duration(\n\n 5,\n\n std::time::Duration::from_secs(60),\n", "file_path": "testing/jormungandr-integration-tests/src/non_functional/fragment.rs", "rank": 44, "score": 297651.10192639614 }, { "content": "#[test]\n\npub fn test_mempool_pool_max_entries_equal_0() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(1)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 0.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v0.rs", "rank": 45, "score": 297271.13852084806 }, { "content": "#[test]\n\npub fn test_mempool_pool_max_entries_limit() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let leader_config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(5)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 1.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v0.rs", "rank": 46, "score": 297271.13852084806 }, { "content": "#[test]\n\npub fn test_mempool_pool_max_entries_equal_0() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(1)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 0.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v1.rs", "rank": 47, "score": 297271.13852084806 }, { "content": "#[test]\n\npub fn test_mempool_pool_max_entries_limit() {\n\n let temp_dir = TempDir::new().unwrap();\n\n\n\n let receiver = startup::create_new_account_address();\n\n let mut sender = startup::create_new_account_address();\n\n\n\n let leader_config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n InitialUTxO {\n\n address: sender.address(),\n\n value: 100.into(),\n\n },\n\n InitialUTxO {\n\n address: receiver.address(),\n\n value: 100.into(),\n\n },\n\n ])\n\n .with_slot_duration(2)\n\n .with_mempool(Mempool {\n\n pool_max_entries: 1.into(),\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/bft/mempool/v1.rs", "rank": 48, "score": 297271.13852084806 }, { "content": "pub fn transaction_to(\n\n block0_hash: &Hash,\n\n fees: &LinearFee,\n\n from: &Wallet,\n\n address: Address,\n\n value: Value,\n\n) -> Result<Fragment, FragmentBuilderError> {\n\n transaction_to_many(block0_hash, fees, from, &[address], value)\n\n}\n\n\n", "file_path": "testing/jormungandr-testing-utils/src/testing/fragments/transaction.rs", "rank": 49, "score": 297007.9206524567 }, { "content": "#[rstest]\n\npub fn fragment_already_in_log(world: (JormungandrProcess, Wallet, Wallet, Wallet)) {\n\n let (jormungandr, mut alice, bob, _) = world;\n\n\n\n let alice_fragment = alice\n\n .transaction_to(\n\n &jormungandr.genesis_block_hash(),\n\n &jormungandr.fees(),\n\n bob.address(),\n\n 100.into(),\n\n )\n\n .unwrap();\n\n\n\n let response = jormungandr\n\n .rest()\n\n .raw()\n\n .send_fragment_batch(vec![alice_fragment.clone(), alice_fragment.clone()], false)\n\n .unwrap();\n\n\n\n assert_eq!(response.status(), reqwest::StatusCode::OK);\n\n let summary: FragmentsProcessingSummary =\n\n serde_json::from_str(&response.text().unwrap()).unwrap();\n\n assert_eq!(summary.accepted, vec![alice_fragment.id()]);\n\n assert_eq!(summary.rejected, vec![]);\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/rest/v0/errors.rs", "rank": 50, "score": 295325.25408057886 }, { "content": "#[ignore]\n\npub fn test_legacy_node_all_fragments() {\n\n let temp_dir = TempDir::new().unwrap();\n\n let jcli: JCli = Default::default();\n\n\n\n let legacy_release = download_last_n_releases(1).iter().cloned().next().unwrap();\n\n let jormungandr = get_jormungandr_bin(&legacy_release, &temp_dir);\n\n\n\n let mut first_stake_pool_owner = startup::create_new_account_address();\n\n let mut second_stake_pool_owner = startup::create_new_account_address();\n\n let mut full_delegator = startup::create_new_account_address();\n\n let mut split_delegator = startup::create_new_account_address();\n\n\n\n let config = ConfigurationBuilder::new()\n\n .with_funds(vec![\n\n first_stake_pool_owner.to_initial_fund(1_000_000),\n\n second_stake_pool_owner.to_initial_fund(2_000_000),\n\n full_delegator.to_initial_fund(2_000_000),\n\n split_delegator.to_initial_fund(2_000_000),\n\n ])\n\n .build(&temp_dir);\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/legacy/mod.rs", "rank": 51, "score": 294181.4618151798 }, { "content": "#[named]\n\npub fn current_node_fragment_propagation(\n\n mut context: Context<ChaChaRng>,\n\n) -> Result<ScenarioResult> {\n\n let title = \"test_legacy_current_node_fragment_propagation\";\n\n let scenario_settings = prepare_scenario! {\n\n title,\n\n &mut context,\n\n topology [\n\n LEADER,\n\n PASSIVE -> LEADER,\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ LEADER],\n\n initials = [\n\n \"account\" \"alice\" with 2_000_000_000 delegates to LEADER,\n\n \"account\" \"bob\" with 500_000_000,\n\n \"account\" \"clarice\" with 500_000_000,\n", "file_path": "testing/jormungandr-scenario-tests/src/test/legacy/fragment_propagation.rs", "rank": 52, "score": 293802.36621186696 }, { "content": "pub fn transaction_to_many(\n\n block0_hash: &Hash,\n\n fees: &LinearFee,\n\n from: &Wallet,\n\n addresses: &[Address],\n\n value: Value,\n\n) -> Result<Fragment, FragmentBuilderError> {\n\n let mut iobuilder = InputOutputBuilder::empty();\n\n\n\n for address in addresses {\n\n iobuilder\n\n .add_output(address.clone().into(), value.into())\n\n .unwrap();\n\n }\n\n\n\n let value_u64: u64 = value.into();\n\n let input_without_fees: Value = (value_u64 * addresses.len() as u64).into();\n\n let input_value = fees.calculate(None, 1, addresses.len() as u8) + input_without_fees.into();\n\n let input = from.add_input_with_value(input_value.unwrap().into());\n\n iobuilder.add_input(&input).unwrap();\n", "file_path": "testing/jormungandr-testing-utils/src/testing/fragments/transaction.rs", "rank": 53, "score": 292540.31497917615 }, { "content": "// Read json-encoded vote plan(s) from file and returns the one\n\n// with the specified id. If there is only one vote plan in the input\n\n// the id can be\n\npub fn get_vote_plan_by_id<P: AsRef<Path>>(\n\n vote_plan_file: Option<P>,\n\n id: Option<&Hash>,\n\n) -> Result<VotePlanStatus, VotePlanError> {\n\n let value: Value = serde_json::from_reader(io::open_file_read(&vote_plan_file)?)?;\n\n match value {\n\n Value::Array(vote_plans) => {\n\n let plans = vote_plans\n\n .into_iter()\n\n .map(serde_json::from_value)\n\n .collect::<Result<Vec<VotePlanStatus>, serde_json::Error>>()?;\n\n match id {\n\n Some(id) => plans\n\n .into_iter()\n\n .find(|plan| &plan.id == id)\n\n .ok_or(VotePlanError::VotePlanIdNotFound),\n\n None if plans.len() == 1 => Ok(plans.into_iter().next().unwrap()),\n\n _ => Err(VotePlanError::UnclearVotePlan),\n\n }\n\n }\n", "file_path": "jcli/src/jcli_lib/utils/vote.rs", "rank": 54, "score": 291579.24304745387 }, { "content": "pub fn print<R: rand_core::RngCore>(context: &Context<R>, name: &str) {\n\n println!(\n\n r###\"\n\n ---_ ......._-_--.\n\n (|\\ / / /| \\ \\ _ ___ ____ __ __ _ _ _ _ ____ _ _ _ ____ ____\n\n / / .' -=-' `. | |/ _ \\| _ \\| \\/ | | | | \\ | |/ ___| / \\ | \\ | | _ \\| _ \\\n\n / / .' ) _ | | | | | |_) | |\\/| | | | | \\| | | _ / _ \\ | \\| | | | | |_) |\n\n _/ / .' _.) / | |_| | |_| | _ <| | | | |_| | |\\ | |_| |/ ___ \\| |\\ | |_| | _ <\n\n / o o _.-' / .' \\___/ \\___/|_| \\_\\_| |_|\\___/|_| \\_|\\____/_/ \\_\\_| \\_|____/|_| \\_\\\n\n \\ _.-' / .'#|\n\n \\______.-'// .'.' \\#| {}\n\n \\| \\ | // .'.' _ |#|\n\n ` \\|// .'.'_._._|#|\n\n . .// .'.' | _._ \\#|\n\n \\`-|\\_/ / \\ _._ \\#\\\n\n `/'\\__/ \\ _._ \\#\\\n\n /^| \\ _-_ \\#\n\n ' ` \\ _-_ \\\n\n \\_\n\n\n", "file_path": "testing/jormungandr-scenario-tests/src/introduction.rs", "rank": 55, "score": 290772.69189858215 }, { "content": "#[named]\n\npub fn leader_restart_preserves_leadership_log(\n\n mut context: Context<ChaChaRng>,\n\n) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n LEADER_1,\n\n LEADER_2 -> LEADER_1,\n\n ]\n\n blockchain {\n\n consensus = Bft,\n\n number_of_slots_per_epoch = 120,\n\n slot_duration = 2,\n\n leaders = [ LEADER_1, LEADER_2 ],\n\n initials = [\n\n \"account\" \"alice\" with 500_000_000,\n\n \"account\" \"bob\" with 500_000_000,\n\n ],\n", "file_path": "testing/jormungandr-scenario-tests/src/test/features/leadership_log.rs", "rank": 56, "score": 290527.1261208089 }, { "content": "#[named]\n\npub fn current_node_legacy_fragment_propagation(\n\n mut context: Context<ChaChaRng>,\n\n) -> Result<ScenarioResult> {\n\n let title = \"test_legacy_current_node_fragment_propagation\";\n\n let scenario_settings = prepare_scenario! {\n\n title,\n\n &mut context,\n\n topology [\n\n LEADER,\n\n PASSIVE -> LEADER,\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ LEADER],\n\n initials = [\n\n \"account\" \"alice\" with 2_000_000_000 delegates to LEADER,\n\n \"account\" \"bob\" with 500_000_000,\n\n \"account\" \"clarice\" with 500_000_000,\n", "file_path": "testing/jormungandr-scenario-tests/src/test/legacy/fragment_propagation.rs", "rank": 57, "score": 290092.9262674605 }, { "content": "#[named]\n\npub fn legacy_current_node_fragment_propagation(\n\n mut context: Context<ChaChaRng>,\n\n) -> Result<ScenarioResult> {\n\n let title = \"test_legacy_current_node_fragment_propagation\";\n\n let scenario_settings = prepare_scenario! {\n\n title,\n\n &mut context,\n\n topology [\n\n LEADER,\n\n PASSIVE -> LEADER,\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ LEADER],\n\n initials = [\n\n \"account\" \"alice\" with 2_000_000_000 delegates to LEADER,\n\n \"account\" \"bob\" with 500_000_000,\n\n \"account\" \"clarice\" with 500_000_000,\n", "file_path": "testing/jormungandr-scenario-tests/src/test/legacy/fragment_propagation.rs", "rank": 58, "score": 290092.9262674605 }, { "content": "pub fn speed_benchmark_from_log(\n\n log: &JormungandrLogger,\n\n name: &str,\n\n timeout: Duration,\n\n start_measurement: &str,\n\n stop_measurement: &str,\n\n) -> SpeedBenchmarkFinish {\n\n let start_entry: Timestamp = log\n\n .get_lines()\n\n .into_iter()\n\n .find(|x| x.message().contains(start_measurement))\n\n .expect(\"cannot find start mesurement entry in log\")\n\n .into();\n\n\n\n let stop_entry: Timestamp = log\n\n .get_lines()\n\n .into_iter()\n\n .find(|x| x.message().contains(stop_measurement))\n\n .expect(\"cannot find stop mesurement entry in log\")\n\n .into();\n\n\n\n let definition = SpeedBenchmarkDef::new(name.to_string())\n\n .target(timeout)\n\n .clone();\n\n let speed = Speed::new(&start_entry, &stop_entry);\n\n\n\n SpeedBenchmarkFinish::new(definition, speed)\n\n}\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/benchmark.rs", "rank": 59, "score": 288599.17189389 }, { "content": "pub fn get_openapi_path() -> PathBuf {\n\n let mut path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n path.pop();\n\n path.pop();\n\n path.push(\"doc\");\n\n path.push(\"api\");\n\n path.push(\"v0.yaml\");\n\n path\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/common/configuration.rs", "rank": 60, "score": 288353.091818074 }, { "content": "#[test]\n\npub fn transaction_load_test() {\n\n let mut faucet = startup::create_new_account_address();\n\n\n\n let (mut jormungandr, _) = startup::start_stake_pool(\n\n &[faucet.clone()],\n\n &[],\n\n ConfigurationBuilder::new()\n\n .with_slots_per_epoch(60)\n\n .with_consensus_genesis_praos_active_slot_coeff(ActiveSlotCoefficient::MAXIMUM)\n\n .with_slot_duration(4)\n\n .with_epoch_stability_depth(10)\n\n .with_kes_update_speed(KesUpdateSpeed::new(43200).unwrap()),\n\n )\n\n .unwrap();\n\n\n\n jormungandr.steal_temp_dir().unwrap().into_persistent();\n\n\n\n let configuration = Configuration::duration(\n\n 1,\n\n std::time::Duration::from_secs(60),\n", "file_path": "testing/jormungandr-integration-tests/src/non_functional/fragment.rs", "rank": 61, "score": 288257.67668740335 }, { "content": "pub fn start_bft(\n\n initial_funds: Vec<&Wallet>,\n\n config_builder: &mut ConfigurationBuilder,\n\n) -> Result<JormungandrProcess, StartupError> {\n\n let temp_dir = TempDir::new()?;\n\n\n\n let config = config_builder\n\n .with_funds(\n\n initial_funds\n\n .iter()\n\n .map(|x| InitialUTxO {\n\n address: x.address(),\n\n value: 1_000_000_000.into(),\n\n })\n\n .collect(),\n\n )\n\n .with_block0_consensus(ConsensusVersion::Bft)\n\n .with_explorer()\n\n .build(&temp_dir);\n\n\n\n Starter::new().temp_dir(temp_dir).config(config).start()\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 62, "score": 287133.2394227442 }, { "content": "#[test]\n\npub fn test_leadership_logs_parent_hash_is_correct() {\n\n let faucet = startup::create_new_account_address();\n\n let jcli: JCli = Default::default();\n\n let (jormungandr, _) =\n\n startup::start_stake_pool(&[faucet], &[], &mut ConfigurationBuilder::new()).unwrap();\n\n\n\n jormungandr\n\n .wait_for_bootstrap(&StartupVerificationMode::Rest, Duration::from_secs(10))\n\n .unwrap();\n\n\n\n // Give the node some time to produce blocks\n\n std::thread::sleep(Duration::from_secs(5));\n\n\n\n let leadership_logs = jcli.rest().v0().leadership_log(jormungandr.rest_uri());\n\n\n\n // leadership logs are fetched in reverse order (newest first)\n\n for leadership in leadership_logs.iter().take(10).rev() {\n\n if let LeadershipLogStatus::Block {\n\n block,\n\n parent,\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/genesis/leadership.rs", "rank": 63, "score": 286904.2699698482 }, { "content": "/// Get jormungandr executable from current environment\n\npub fn get_jormungandr_app() -> PathBuf {\n\n const JORMUNGANDR_NAME: &str = env!(\"JORMUNGANDR_NAME\");\n\n let mut path = get_working_directory();\n\n path.push(JORMUNGANDR_NAME);\n\n if cfg!(windows) {\n\n path.set_extension(\"exe\");\n\n }\n\n assert!(\n\n path.is_file(),\n\n \"File does not exist: {:?}, pwd: {:?}\",\n\n path,\n\n env::current_dir()\n\n );\n\n path\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/configuration.rs", "rank": 64, "score": 284767.4282169771 }, { "content": "pub fn storage_loading_benchmark_from_log(\n\n log: &JormungandrLogger,\n\n name: &str,\n\n timeout: Duration,\n\n) -> SpeedBenchmarkFinish {\n\n speed_benchmark_from_log(\n\n log,\n\n name,\n\n timeout,\n\n \"storing blockchain\",\n\n \"Loaded from storage\",\n\n )\n\n}\n\n\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/benchmark.rs", "rank": 65, "score": 284470.9614037656 }, { "content": "pub fn start_stake_pool(\n\n owners: &[Wallet],\n\n initial_funds: &[Wallet],\n\n config_builder: &mut ConfigurationBuilder,\n\n) -> Result<(JormungandrProcess, Vec<StakePool>), StartupError> {\n\n let stake_pools: Vec<StakePool> = owners.iter().map(|x| StakePool::new(x)).collect();\n\n\n\n let stake_pool_registration_certs: Vec<SignedCertificate> = stake_pools\n\n .iter()\n\n .map(|x| signed_stake_pool_cert(&x).into())\n\n .collect();\n\n let stake_pool_owner_delegation_certs: Vec<SignedCertificate> = stake_pools\n\n .iter()\n\n .map(|x| signed_delegation_cert(x.owner(), x.id()).into())\n\n .collect();\n\n\n\n let mut initial_certs = stake_pool_registration_certs;\n\n initial_certs.extend(stake_pool_owner_delegation_certs.iter().cloned());\n\n\n\n let leaders: Vec<ConsensusLeaderId> = stake_pools\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 66, "score": 282697.4688509168 }, { "content": "pub fn build_genesis_block(\n\n block0_config: &Block0Configuration,\n\n temp_dir: &impl PathChild,\n\n) -> PathBuf {\n\n let config_file = temp_dir.child(\"genesis.yaml\");\n\n write_block0_config(&block0_config, &config_file);\n\n let output_block_file = temp_dir.child(\"block-0.bin\");\n\n let jcli: JCli = Default::default();\n\n jcli.genesis()\n\n .encode(config_file.path(), &output_block_file);\n\n\n\n output_block_file.path().into()\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 67, "score": 282697.4688509168 }, { "content": "pub fn wait_for_nodes_sync(sync_wait_params: &SyncWaitParams) {\n\n let wait_time = sync_wait_params.wait_time();\n\n std::thread::sleep(wait_time);\n\n}\n", "file_path": "testing/jormungandr-testing-utils/src/testing/sync/mod.rs", "rank": 68, "score": 280497.1023493912 }, { "content": "pub fn write_secret(node_secret: &NodeSecret, output_file: &ChildPath) {\n\n let content = serde_yaml::to_string(&node_secret).expect(\"Cannot serialize secret node model\");\n\n output_file.write_str(&content).unwrap();\n\n}\n\n\n\nimpl SecretModelFactory {\n\n pub fn empty() -> NodeSecret {\n\n NodeSecret {\n\n bft: None,\n\n genesis: None,\n\n }\n\n }\n\n\n\n pub fn bft(signing_key: SigningKey<Ed25519>) -> NodeSecret {\n\n NodeSecret {\n\n bft: Some(Bft { signing_key }),\n\n genesis: None,\n\n }\n\n }\n\n\n", "file_path": "testing/jormungandr-testing-utils/src/testing/node/configuration/secret_model_factory.rs", "rank": 69, "score": 277750.67129892466 }, { "content": "#[test]\n\npub fn test_to_bytes_for_non_existent_input_file() {\n\n let jcli: JCli = Default::default();\n\n let byte_key_file = NamedTempFile::new(\"byte_file\").unwrap();\n\n jcli.key().convert_from_bytes_string_expect_fail(\n\n \"ed25519Extended\",\n\n byte_key_file.path(),\n\n \"file\",\n\n );\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/key/to_bytes.rs", "rank": 70, "score": 276776.3759262833 }, { "content": "#[test]\n\n#[cfg(not(target_os = \"linux\"))]\n\npub fn test_cannot_create_input_when_staging_file_is_readonly() {\n\n use jortestkit::file;\n\n let jcli: JCli = Default::default();\n\n let mut transaction_wrapper =\n\n jcli.transaction_builder(Hash::from_hex(FAKE_GENESIS_HASH).unwrap());\n\n\n\n transaction_wrapper.new_transaction();\n\n file::make_readonly(&transaction_wrapper.staging_file_path());\n\n transaction_wrapper.add_input_expect_fail(&FAKE_INPUT_TRANSACTION_ID, 0, \"100\", \"denied\");\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/transaction/input.rs", "rank": 71, "score": 276776.0484305216 }, { "content": "#[test]\n\n#[cfg(not(target_os = \"linux\"))]\n\npub fn test_cannot_create_input_when_staging_file_is_readonly() {\n\n let mut transaction_wrapper = JCLITransactionWrapper::new_transaction(FAKE_GENESIS_HASH);\n\n make_readonly(&transaction_wrapper.staging_file_path());\n\n transaction_wrapper.assert_add_input_fail(&FAKE_INPUT_TRANSACTION_ID, &0, \"100\", \"denied\");\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/transaction/new.rs", "rank": 72, "score": 276776.0484305216 }, { "content": "#[test]\n\npub fn test_correct_error_is_returned_for_incorrect_path() {\n\n let jcli: JCli = Default::default();\n\n let config = NodeConfigBuilder::new().build();\n\n let incorrect_uri = format!(\"http://{}/api/api\", config.rest.listen);\n\n\n\n jcli.rest()\n\n .v0()\n\n .tip_expect_fail(incorrect_uri, \"tcp connect error\");\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/rest/tip.rs", "rank": 73, "score": 276753.62149425736 }, { "content": "pub fn measure_fragment_propagation_speed<A: FragmentNode + Sized>(\n\n fragment_id: FragmentId,\n\n leaders: &[&A],\n\n sync_wait: Thresholds<Speed>,\n\n info: &str,\n\n report_node_stats_interval: MeasurementReportInterval,\n\n) -> Result<(), VerificationError> {\n\n let benchmark = benchmark_speed(info.to_owned())\n\n .with_thresholds(sync_wait)\n\n .start();\n\n\n\n let leaders_nodes_count = leaders.len() as u32;\n\n let mut report_node_stats = MeasurementReporter::new(report_node_stats_interval);\n\n let mut leaders_ids: Vec<u32> = (1..=leaders_nodes_count).collect();\n\n\n\n while !benchmark.timeout_exceeded() {\n\n leaders_ids.retain(|leader_id| {\n\n let leader_index_usize = (leader_id - 1) as usize;\n\n let leader: &A = leaders.get(leader_index_usize).unwrap();\n\n let fragment_logs = leader.fragment_logs().unwrap();\n", "file_path": "testing/jormungandr-testing-utils/src/testing/sync/measure.rs", "rank": 74, "score": 276579.25061285583 }, { "content": "#[rstest]\n\npub fn test_single_id(world: (JormungandrProcess, FragmentId, FragmentId, FragmentId)) {\n\n let (jormungandr, alice_tx_id, _, _) = world;\n\n jormungandr\n\n .correct_state_verifier()\n\n .fragment_logs()\n\n .assert_single_id(alice_tx_id.to_string(), \"alice tx\");\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/rest/v1/statuses.rs", "rank": 75, "score": 275951.11119533 }, { "content": "#[rstest]\n\npub fn test_invalid_id(world: (JormungandrProcess, FragmentId, FragmentId, FragmentId)) {\n\n let (jormungandr, _, _, clarice_tx_id) = world;\n\n jormungandr\n\n .correct_state_verifier()\n\n .fragment_logs()\n\n .assert_invalid_id(clarice_tx_id.to_string(), \"invalid clarice tx\");\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/rest/v1/statuses.rs", "rank": 76, "score": 275951.11119533 }, { "content": "#[rstest]\n\npub fn test_empty_ids(world: (JormungandrProcess, FragmentId, FragmentId, FragmentId)) {\n\n let (jormungandr, _, _, _) = world;\n\n jormungandr\n\n .correct_state_verifier()\n\n .fragment_logs()\n\n .assert_empty_ids(vec![], \"no tx\");\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/rest/v1/statuses.rs", "rank": 77, "score": 275951.11119533 }, { "content": "#[rstest]\n\npub fn test_multiple_ids(world: (JormungandrProcess, FragmentId, FragmentId, FragmentId)) {\n\n let (jormungandr, alice_tx_id, bob_tx_id, _) = world;\n\n\n\n jormungandr\n\n .correct_state_verifier()\n\n .fragment_logs()\n\n .assert_multiple_ids(\n\n vec![alice_tx_id.to_string(), bob_tx_id.to_string()],\n\n \"alice or bob tx\",\n\n );\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jormungandr/rest/v1/statuses.rs", "rank": 78, "score": 275951.11119533 }, { "content": "/// Get jcli executable from current environment\n\npub fn get_jcli_app() -> PathBuf {\n\n const JOR_CLI_NAME: &str = env!(\"JOR_CLI_NAME\");\n\n let mut path = get_working_directory();\n\n path.push(JOR_CLI_NAME);\n\n if cfg!(windows) {\n\n path.set_extension(\"exe\");\n\n }\n\n assert!(\n\n path.is_file(),\n\n \"File does not exist: {:?}, pwd: {:?}\",\n\n path,\n\n env::current_dir()\n\n );\n\n path\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/configuration.rs", "rank": 79, "score": 274710.8706901405 }, { "content": "pub fn tls_server_crt() -> PathBuf {\n\n let mut tls_server_crt = root_dir();\n\n tls_server_crt.push(\"resources/tls/server.crt\");\n\n tls_server_crt\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/resources.rs", "rank": 80, "score": 274710.87069014047 }, { "content": "pub fn tls_ca_crt() -> PathBuf {\n\n let mut tls_ca_crt = root_dir();\n\n tls_ca_crt.push(\"resources/tls/ca.crt\");\n\n tls_ca_crt\n\n}\n", "file_path": "testing/jormungandr-integration-tests/src/common/resources.rs", "rank": 81, "score": 274710.87069014047 }, { "content": "pub fn stream_request<T, R>(\n\n buffer: usize,\n\n) -> (RequestStreamHandle<T, R>, RequestSink<T>, ReplyFuture<R>) {\n\n let (sender, receiver) = async_msg::channel(buffer);\n\n let (reply, reply_future) = unary_reply();\n\n let handle = RequestStreamHandle { receiver, reply };\n\n let sink = RequestSink { sender };\n\n (handle, sink, reply_future)\n\n}\n\n\n\n/// ...\n\n#[allow(clippy::large_enum_variant)]\n\n#[derive(Debug)]\n\npub enum TransactionMsg {\n\n SendTransactions {\n\n origin: FragmentOrigin,\n\n fragments: Vec<Fragment>,\n\n fail_fast: bool,\n\n reply_handle: ReplyHandle<FragmentsProcessingSummary>,\n\n },\n", "file_path": "jormungandr/src/intercom.rs", "rank": 82, "score": 273810.5975249347 }, { "content": "fn get_legacy_app(temp_dir: &TempDir) -> (PathBuf, Version) {\n\n let releases = download_last_n_releases(1);\n\n let last_release = releases.get(0).unwrap();\n\n let jormungandr = get_jormungandr_bin(&last_release, temp_dir);\n\n (jormungandr, last_release.version())\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/networking/testnet.rs", "rank": 83, "score": 272590.15197141713 }, { "content": "#[named]\n\npub fn star(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n LEADER_5,\n\n LEADER_1 -> LEADER_5,\n\n LEADER_2 -> LEADER_5,\n\n LEADER_3 -> LEADER_5,\n\n LEADER_4 -> LEADER_5,\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ LEADER_1 ],\n\n initials = [\n\n \"account\" \"unassigned1\" with 500_000_000,\n\n \"account\" \"delegated1\" with 2_000_000_000 delegates to LEADER_5,\n", "file_path": "testing/jormungandr-scenario-tests/src/test/network/topology/scenarios.rs", "rank": 84, "score": 271686.8334620167 }, { "content": "#[named]\n\npub fn relay(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n CORE_NODE,\n\n RELAY_NODE_1 -> CORE_NODE,\n\n RELAY_NODE_2 -> CORE_NODE,\n\n LEADER_1 -> RELAY_NODE_1,\n\n LEADER_2 -> RELAY_NODE_1,\n\n LEADER_3 -> RELAY_NODE_1,\n\n LEADER_4 -> RELAY_NODE_2,\n\n LEADER_5 -> RELAY_NODE_2,\n\n LEADER_6 -> RELAY_NODE_2,\n\n LEADER_7 -> RELAY_NODE_2\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n", "file_path": "testing/jormungandr-scenario-tests/src/test/network/topology/scenarios.rs", "rank": 85, "score": 271686.8334620167 }, { "content": "#[named]\n\npub fn tree(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n LEADER_1,\n\n LEADER_2 -> LEADER_1,\n\n LEADER_3 -> LEADER_1,\n\n LEADER_4 -> LEADER_2,\n\n LEADER_5 -> LEADER_2,\n\n LEADER_6 -> LEADER_3,\n\n LEADER_7 -> LEADER_3\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ LEADER_1 ],\n\n initials = [\n", "file_path": "testing/jormungandr-scenario-tests/src/test/network/topology/scenarios.rs", "rank": 86, "score": 271686.8334620167 }, { "content": "#[named]\n\npub fn mesh(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n LEADER_4,\n\n LEADER_1 -> LEADER_4,\n\n LEADER_2 -> LEADER_1 -> LEADER_4,\n\n LEADER_3 -> LEADER_1 -> LEADER_2,\n\n LEADER_5 -> LEADER_2 -> LEADER_1,\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ LEADER_1 ],\n\n initials = [\n\n \"account\" \"unassigned1\" with 500_000_000,\n\n \"account\" \"delegated1\" with 2_000_000_000 delegates to LEADER_3,\n", "file_path": "testing/jormungandr-scenario-tests/src/test/network/topology/scenarios.rs", "rank": 87, "score": 271686.8334620167 }, { "content": "fn get_legacy_data(title: &str, context: &mut Context<ChaChaRng>) -> (PathBuf, Version) {\n\n let releases = download_last_n_releases(1);\n\n let last_release = releases.last().unwrap();\n\n let legacy_app = get_jormungandr_bin(last_release, &context.child_directory(title));\n\n (legacy_app, last_release.version())\n\n}\n\n\n", "file_path": "testing/jormungandr-scenario-tests/src/test/legacy/fragment_propagation.rs", "rank": 88, "score": 271638.907343965 }, { "content": "#[named]\n\npub fn scenario_1(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n \"node1\",\n\n \"node2\" -> \"node1\",\n\n ]\n\n blockchain {\n\n consensus = Bft,\n\n number_of_slots_per_epoch = 10,\n\n slot_duration = 1,\n\n leaders = [ \"node1\" ],\n\n initials = [\n\n \"account\" \"faucet1\" with 1_000_000_000,\n\n \"account\" \"faucet2\" with 2_000_000_000 delegates to \"node2\",\n\n ],\n\n }\n\n };\n", "file_path": "testing/jormungandr-scenario-tests/src/example_scenarios.rs", "rank": 89, "score": 271518.58242552984 }, { "content": "#[named]\n\npub fn scenario_2(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n \"Leader1\",\n\n \"Passive1\" -> \"Leader1\",\n\n \"Passive2\" -> \"Leader1\",\n\n \"Passive3\" -> \"Leader1\",\n\n \"Unknown1\",\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ \"Leader2\" ],\n\n initials = [\n\n \"account\" \"unassigned1\" with 500_000_000,\n\n \"account\" \"unassigned2\" with 100_000_000,\n", "file_path": "testing/jormungandr-scenario-tests/src/example_scenarios.rs", "rank": 90, "score": 271518.58242552984 }, { "content": "#[named]\n\npub fn interactive(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n \"Leader1\",\n\n \"Leader2\" -> \"Leader1\",\n\n \"Leader3\" -> \"Leader1\",\n\n \"Leader4\" -> \"Leader1\",\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 1,\n\n leaders = [ \"Leader2\" ],\n\n initials = [\n\n \"account\" \"unassigned1\" with 500_000_000,\n\n \"account\" \"unassigned2\" with 100_000_000,\n\n \"account\" \"delegated1\" with 2_000_000_000 delegates to \"Leader1\",\n", "file_path": "testing/jormungandr-scenario-tests/src/interactive/mod.rs", "rank": 91, "score": 271518.58242552984 }, { "content": "pub fn create_new_utxo_address() -> Wallet {\n\n Wallet::new_utxo(&mut rand::rngs::OsRng)\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 92, "score": 270646.4982618581 }, { "content": "pub fn create_new_delegation_address() -> Wallet {\n\n let account = Wallet::new_account(&mut rand::rngs::OsRng);\n\n create_new_delegation_address_for(&account.identifier())\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 93, "score": 270646.4982618582 }, { "content": "pub fn create_new_account_address() -> Wallet {\n\n Wallet::new_account(&mut rand::rngs::OsRng)\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/startup/mod.rs", "rank": 94, "score": 270646.4982618581 }, { "content": "pub fn tls_server_private_key() -> PathBuf {\n\n let mut tls_server_private_key = root_dir();\n\n tls_server_private_key.push(\"resources/tls/server.key\");\n\n tls_server_private_key\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/common/resources.rs", "rank": 95, "score": 270618.39100171375 }, { "content": "fn read_decryption_key<P: AsRef<Path>>(path: &Option<P>) -> Result<OpeningVoteKey, Error> {\n\n let data = io::read_line(path)?;\n\n bech32::decode(&data)\n\n .map_err(Error::from)\n\n .and_then(|(hrp, raw_key)| {\n\n if hrp != crate::jcli_lib::vote::bech32_constants::MEMBER_SK_HRP {\n\n return Err(Error::InvalidSecretKey);\n\n }\n\n OpeningVoteKey::from_bytes(\n\n &Vec::<u8>::from_base32(&raw_key).map_err(|_| Error::DecryptionKeyRead)?,\n\n )\n\n .ok_or(Error::DecryptionKeyRead)\n\n })\n\n}\n\n\n\nimpl TallyGenerateVotePlanDecryptionShares {\n\n pub fn exec(&self) -> Result<(), Error> {\n\n let vote_plan =\n\n vote::get_vote_plan_by_id(self.vote_plan.as_ref(), self.vote_plan_id.as_ref())?;\n\n let decryption_key = read_decryption_key(&Some(&self.key))?;\n", "file_path": "jcli/src/jcli_lib/vote/tally/decryption_tally.rs", "rank": 96, "score": 270265.02334844635 }, { "content": "#[named]\n\npub fn p2p_stats_test(mut context: Context<ChaChaRng>) -> Result<ScenarioResult> {\n\n let name = function_name!();\n\n let scenario_settings = prepare_scenario! {\n\n name,\n\n &mut context,\n\n topology [\n\n LEADER1,\n\n LEADER2 -> LEADER1,\n\n LEADER3 -> LEADER1,\n\n LEADER4 -> LEADER2 -> LEADER3,\n\n ]\n\n blockchain {\n\n consensus = GenesisPraos,\n\n number_of_slots_per_epoch = 60,\n\n slot_duration = 2,\n\n leaders = [ LEADER1 ],\n\n initials = [\n\n \"account\" \"delegated1\" with 2_000_000_000 delegates to LEADER1,\n\n \"account\" \"delegated2\" with 2_000_000_000 delegates to LEADER2,\n\n \"account\" \"delegated3\" with 2_000_000_000 delegates to LEADER3,\n", "file_path": "testing/jormungandr-scenario-tests/src/test/features/p2p/stats.rs", "rank": 97, "score": 269948.58759194065 }, { "content": "#[test]\n\n#[cfg(not(target_os = \"linux\"))]\n\npub fn test_make_witness_with_readonly_private_key_file_fails() {\n\n let jcli: JCli = Default::default();\n\n let reciever = startup::create_new_utxo_address();\n\n let mut transaction_wrapper = JCLITransactionWrapper::new_transaction(FAKE_GENESIS_HASH);\n\n let private_key = jcli.key().generate_default();\n\n\n\n let witness = Witness::new(\n\n FAKE_GENESIS_HASH,\n\n FAKE_INPUT_TRANSACTION_ID,\n\n \"utxo\",\n\n &private_key,\n\n &0,\n\n );\n\n make_readonly(&witness.file);\n\n transaction_wrapper\n\n .assert_add_input(&FAKE_INPUT_TRANSACTION_ID, &0, &100)\n\n .assert_add_output(&reciever.address, &100)\n\n .assert_finalize()\n\n .assert_make_witness_fails(&witness, \"denied\");\n\n}\n\n\n", "file_path": "testing/jormungandr-integration-tests/src/jcli/transaction/witness.rs", "rank": 98, "score": 269543.69509922 }, { "content": "/// internal function to prepare an executable path name for `jormungandr` and `jcli`\n\n///\n\n/// if the program could not be found in the $PATH or the current path then this\n\n/// function will panic so the tests are not executed.\n\npub fn prepare_command(exe: impl Into<PathBuf>) -> PathBuf {\n\n let exe = exe.into();\n\n check_command_version(&exe);\n\n exe\n\n}\n\n\n", "file_path": "testing/jormungandr-scenario-tests/src/programs.rs", "rank": 99, "score": 269290.56044660957 } ]
Rust
src/memtable.rs
g302ge/leveldb-rs
ab3a17a51819f04fcef1b88f00a1c581bb2e6898
use crate::cmp::{Cmp, MemtableKeyCmp}; use crate::key_types::{build_memtable_key, parse_internal_key, parse_memtable_key, ValueType}; use crate::key_types::{LookupKey, UserKey}; use crate::skipmap::{SkipMap, SkipMapIter}; use crate::types::{current_key_val, LdbIterator, SequenceNumber}; use std::rc::Rc; use integer_encoding::FixedInt; pub struct MemTable { map: SkipMap, } impl MemTable { pub fn new(cmp: Rc<Box<dyn Cmp>>) -> MemTable { MemTable::new_raw(Rc::new(Box::new(MemtableKeyCmp(cmp)))) } fn new_raw(cmp: Rc<Box<dyn Cmp>>) -> MemTable { MemTable { map: SkipMap::new(cmp), } } pub fn len(&self) -> usize { self.map.len() } pub fn approx_mem_usage(&self) -> usize { self.map.approx_memory() } pub fn add<'a>(&mut self, seq: SequenceNumber, t: ValueType, key: UserKey<'a>, value: &[u8]) { self.map .insert(build_memtable_key(key, value, t, seq), Vec::new()) } #[allow(unused_variables)] pub fn get(&self, key: &LookupKey) -> (Option<Vec<u8>>, bool) { let mut iter = self.map.iter(); iter.seek(key.memtable_key()); if let Some((foundkey, _)) = current_key_val(&iter) { let (fkeylen, fkeyoff, tag, vallen, valoff) = parse_memtable_key(&foundkey); if key.user_key() == &foundkey[fkeyoff..fkeyoff + fkeylen] { if tag & 0xff == ValueType::TypeValue as u64 { return (Some(foundkey[valoff..valoff + vallen].to_vec()), false); } else { return (None, true); } } } (None, false) } pub fn iter(&self) -> MemtableIterator { MemtableIterator { skipmapiter: self.map.iter(), } } } pub struct MemtableIterator { skipmapiter: SkipMapIter, } impl LdbIterator for MemtableIterator { fn advance(&mut self) -> bool { if !self.skipmapiter.advance() { return false; } self.skipmapiter.valid() } fn reset(&mut self) { self.skipmapiter.reset(); } fn prev(&mut self) -> bool { let (mut key, mut val) = (vec![], vec![]); loop { if !self.skipmapiter.prev() { return false; } if self.skipmapiter.current(&mut key, &mut val) { let (_, _, tag, _, _) = parse_memtable_key(&key); if tag & 0xff == ValueType::TypeValue as u64 { return true; } else { continue; } } else { return false; } } } fn valid(&self) -> bool { self.skipmapiter.valid() } fn current(&self, key: &mut Vec<u8>, val: &mut Vec<u8>) -> bool { if !self.valid() { return false; } if self.skipmapiter.current(key, val) { let (keylen, keyoff, _, vallen, valoff) = parse_memtable_key(&key); val.clear(); val.extend_from_slice(&key[valoff..valoff + vallen]); shift_left(key, keyoff); key.truncate(keylen + u64::required_space()); true } else { panic!("should not happen"); } } fn seek(&mut self, to: &[u8]) { let (_, seq, ukey) = parse_internal_key(to); self.skipmapiter .seek(LookupKey::new(ukey, seq).memtable_key()); } } fn shift_left(s: &mut Vec<u8>, mid: usize) { for i in mid..s.len() { s.swap(i, i - mid); } let newlen = s.len() - mid; s.truncate(newlen); } #[cfg(test)] #[allow(unused_variables)] mod tests { use super::*; use crate::key_types::{parse_tag, truncate_to_userkey}; use crate::options; use crate::test_util::{test_iterator_properties, LdbIteratorIter}; #[test] fn test_shift_left() { let mut v = vec![1, 2, 3, 4, 5]; shift_left(&mut v, 1); assert_eq!(v, vec![2, 3, 4, 5]); let mut v = vec![1, 2, 3, 4, 5]; shift_left(&mut v, 4); assert_eq!(v, vec![5]); } fn get_memtable() -> MemTable { let mut mt = MemTable::new(options::for_test().cmp); let entries = vec![ (ValueType::TypeValue, 115, "abc", "122"), (ValueType::TypeValue, 120, "abc", "123"), (ValueType::TypeValue, 121, "abd", "124"), (ValueType::TypeDeletion, 122, "abe", "125"), (ValueType::TypeValue, 123, "abf", "126"), ]; for e in entries.iter() { mt.add(e.1, e.0, e.2.as_bytes(), e.3.as_bytes()); } mt } #[test] fn test_memtable_parse_tag() { let tag = (12345 << 8) | 1; assert_eq!(parse_tag(tag), (ValueType::TypeValue, 12345)); } #[test] fn test_memtable_add() { let mut mt = MemTable::new(options::for_test().cmp); mt.add( 123, ValueType::TypeValue, "abc".as_bytes(), "123".as_bytes(), ); assert_eq!( mt.map.iter().next().unwrap().0, &[11, 97, 98, 99, 1, 123, 0, 0, 0, 0, 0, 0, 3, 49, 50, 51] ); assert_eq!( mt.iter().next().unwrap().0, &[97, 98, 99, 1, 123, 0, 0, 0, 0, 0, 0] ); } #[test] fn test_memtable_add_get() { let mt = get_memtable(); if let Some(v) = mt.get(&LookupKey::new("abc".as_bytes(), 110)).0 { eprintln!("{:?}", v); panic!("found"); } if let Some(v) = mt.get(&LookupKey::new("abf".as_bytes(), 110)).0 { eprintln!("{:?}", v); panic!("found"); } if let Some(v) = mt.get(&LookupKey::new("abc".as_bytes(), 116)).0 { assert_eq!(v, "122".as_bytes()); } else { panic!("not found"); } if let (Some(v), deleted) = mt.get(&LookupKey::new("abc".as_bytes(), 120)) { assert_eq!(v, "123".as_bytes()); assert!(!deleted); } else { panic!("not found"); } if let (None, deleted) = mt.get(&LookupKey::new("abe".as_bytes(), 122)) { assert!(deleted); } else { panic!("found deleted"); } if let Some(v) = mt.get(&LookupKey::new("abf".as_bytes(), 129)).0 { assert_eq!(v, "126".as_bytes()); } else { panic!("not found"); } } #[test] fn test_memtable_iterator_init() { let mt = get_memtable(); let mut iter = mt.iter(); assert!(!iter.valid()); iter.next(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice() ); iter.reset(); assert!(!iter.valid()); } #[test] fn test_memtable_iterator_seek() { let mt = get_memtable(); let mut iter = mt.iter(); assert!(!iter.valid()); iter.seek(LookupKey::new("abc".as_bytes(), 400).internal_key()); let (mut gotkey, gotval) = current_key_val(&iter).unwrap(); truncate_to_userkey(&mut gotkey); assert_eq!( ("abc".as_bytes(), "123".as_bytes()), (gotkey.as_slice(), gotval.as_slice()) ); iter.seek(LookupKey::new("xxx".as_bytes(), 400).internal_key()); assert!(!iter.valid()); iter.seek(LookupKey::new("abd".as_bytes(), 400).internal_key()); let (mut gotkey, gotval) = current_key_val(&iter).unwrap(); truncate_to_userkey(&mut gotkey); assert_eq!( ("abd".as_bytes(), "124".as_bytes()), (gotkey.as_slice(), gotval.as_slice()) ); } #[test] fn test_memtable_iterator_fwd() { let mt = get_memtable(); let mut iter = mt.iter(); let expected = vec![ "123".as_bytes(), /* i.e., the abc entry with * higher sequence number comes first */ "122".as_bytes(), "124".as_bytes(), "125".as_bytes(), "126".as_bytes(), ]; let mut i = 0; for (k, v) in LdbIteratorIter::wrap(&mut iter) { assert_eq!(v, expected[i]); i += 1; } } #[test] fn test_memtable_iterator_reverse() { let mt = get_memtable(); let mut iter = mt.iter(); iter.next(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice() ); iter.next(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 115, 0, 0, 0, 0, 0, 0].as_slice() ); iter.next(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 100, 1, 121, 0, 0, 0, 0, 0, 0].as_slice() ); iter.prev(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 115, 0, 0, 0, 0, 0, 0].as_slice() ); iter.prev(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice() ); iter.prev(); assert!(!iter.valid()); } #[test] fn test_memtable_parse_key() { let key = vec![11, 1, 2, 3, 1, 123, 0, 0, 0, 0, 0, 0, 3, 4, 5, 6]; let (keylen, keyoff, tag, vallen, valoff) = parse_memtable_key(&key); assert_eq!(keylen, 3); assert_eq!(&key[keyoff..keyoff + keylen], vec![1, 2, 3].as_slice()); assert_eq!(tag, 123 << 8 | 1); assert_eq!(vallen, 3); assert_eq!(&key[valoff..valoff + vallen], vec![4, 5, 6].as_slice()); } #[test] fn test_memtable_iterator_behavior() { let mut mt = MemTable::new(options::for_test().cmp); let entries = vec![ (115, "abc", "122"), (120, "abd", "123"), (121, "abe", "124"), (123, "abf", "126"), ]; for e in entries.iter() { mt.add(e.0, ValueType::TypeValue, e.1.as_bytes(), e.2.as_bytes()); } test_iterator_properties(mt.iter()); } }
use crate::cmp::{Cmp, MemtableKeyCmp}; use crate::key_types::{build_memtable_key, parse_internal_key, parse_memtable_key, ValueType}; use crate::key_types::{LookupKey, UserKey}; use crate::skipmap::{SkipMap, SkipMapIter}; use crate::types::{current_key_val, LdbIterator, SequenceNumber}; use std::rc::Rc; use integer_encoding::FixedInt; pub struct MemTable { map: SkipMap, } impl MemTable { pub fn new(cmp: Rc<Box<dyn Cmp>>) -> MemTable { MemTable::new_raw(Rc::new(Box::new(MemtableKeyCmp(cmp)))) } fn new_raw(cmp: Rc<Box<dyn Cmp>>) -> MemTable { MemTable { map: SkipMap::new(cmp), } } pub fn len(&self) -> usize { self.map.len() } pub fn approx_mem_usage(&self) -> usize { self.map.approx_memory() } pub fn add<'a>(&mut self, seq: SequenceNumber, t: ValueType, key: UserKey<'a>, value: &[u8]) { self.map .insert(build_memtable_key(key, value, t, seq), Vec::new()) } #[allow(unused_variables)] pub fn get(&self, key: &LookupKey) -> (Option<Vec<u8>>, bool) { let mut iter = self.map.iter(); iter.seek(key.memtable_key()); if let Some((foundkey, _)) = current_key_val(&iter) { let (fkeylen, fkeyoff, tag, vallen, valoff) = parse_memtable_key(&foundkey); if key.user_key() == &foundkey[fkeyoff..fkeyoff + fkeylen] { if tag & 0xff == ValueType::TypeValue as u64 { return (Some(foundkey[valoff..valoff + vallen].to_vec()), false); } else { return (None, true); } } } (None, false) } pub fn iter(&self) -> MemtableIterator { MemtableIterator { skipmapiter: self.map.iter(), } } } pub struct MemtableIterator { skipmapiter: SkipMapIter, } impl LdbIterator for MemtableIterator { fn advance(&mut self) -> bool { if !self.skipmapiter.advance() { return false; } self.skipmapiter.valid() } fn reset(&mut self) { self.skipmapiter.reset(); } fn prev(&mut self) -> bool { let (mut key, mut val) = (vec![], vec![]); loop { if !self.skipmapiter.prev() { return false; } if self.skipmapiter.current(&mut key, &mut val) { let (_, _, tag, _, _) = parse_memtable_key(&key); if tag & 0xff == ValueType::TypeValue as u64 { return true; } else { continue; } } else { return false; } } } fn valid(&self) -> bool { self.skipmapiter.valid() } fn current(&self, key: &mut Vec<u8>, val: &mut Vec<u8>) -> bool { if !self.valid() { return false; } if self.skipmapiter.current(key, val) { let (keylen, keyoff, _, vallen, valoff) = parse_memtable_key(&key); val.clear(); val.extend_from_slice(&key[valoff..valoff + vallen]); shift_left(key, keyoff); key.truncate(keylen + u64::required_space()); true } else { panic!("should not happen"); } } fn seek(&mut self, to: &[u8]) { let (_, seq, ukey) = parse_internal_key(to); self.skipmapiter .seek(LookupKey::new(ukey, seq).memtable_key()); } } fn shift_left(s: &mut Vec<u8>, mid: usize) { for i in mid..s.len() { s.swap(i, i - mid); } let newlen = s.len() - mid; s.truncate(newlen); } #[cfg(test)] #[allow(unused_variables)] mod tests { use super::*; use crate::key_types::{parse_tag, truncate_to_userkey}; use crate::options; use crate::test_util::{test_iterator_properties, LdbIteratorIter}; #[test] fn test_shift_left() { let mut v = vec![1, 2, 3, 4, 5]; shift_left(&mut v, 1); assert_eq!(v, vec![2, 3, 4, 5]); let mut v = vec![1, 2, 3, 4, 5]; shift_left(&mut v, 4); assert_eq!(v, vec![5]); } fn get_memtable() -> MemTable { let mut mt = MemTable::new(options::for_test().cmp); let entries = vec![ (ValueType::TypeValue, 115, "abc", "122"), (ValueType::TypeValue, 120, "abc", "123"), (ValueType::TypeValue, 121, "abd", "124"), (ValueType::TypeDeletion, 122, "abe", "125"), (ValueType::TypeValue, 123, "abf", "126"), ]; for e in entries.iter() { mt.add(e.1, e.0, e.2.as_bytes(), e.3.as_bytes()); } mt } #[test] fn test_memtable_parse_tag() { let tag = (12345 << 8) | 1; assert_eq!(parse_tag(tag), (ValueType::TypeValue, 12345)); } #[test] fn test_memtable_add() { let mut mt = MemTable::new(options::for_test().cmp); mt.add( 123, ValueType::TypeValue, "abc".as_bytes(), "123".as_bytes(), ); assert_eq!( mt.map.iter().next().unwrap().0, &[11, 97, 98, 99, 1, 123, 0, 0, 0, 0, 0, 0, 3, 49, 50, 51] ); assert_eq!( mt.iter().next().unwrap().0, &[97, 98, 99, 1, 123, 0, 0, 0, 0, 0, 0] ); } #[test] fn test_memtable_add_get() { let mt = get_memtable(); if let Some(v) = mt.get(&LookupKey::new("abc".as_bytes(), 110)).0 { eprintln!("{:?}", v); panic!("found"); } if let Some(v) = mt.get(&LookupKey::new("abf".as_bytes(), 110)).0 { eprintln!("{:?}", v); panic!("found"); } if let Some(v) = mt.get(&LookupKey::new("abc".as_bytes(), 116)).0 { assert_eq!(v, "122".as_bytes()); } else { panic!("not found"); } if let (Some(v), deleted) = mt.get(&LookupKey::new("abc".as_bytes(), 120)) { assert_eq!(v, "123".as_bytes()); assert!(!deleted); } else { panic!("not found"); } if let (None, deleted) = mt.get(&LookupKey::new("abe".as_bytes(), 122)) { assert!(deleted); } else { panic!("found deleted"); } if let Some(v) = mt.get(&LookupKey::new("abf".as_bytes(), 129)).0 { assert_eq!(v, "126".as_bytes()); } else { panic!("not found"); }
&key[valoff..valoff + vallen], vec![4, 5, 6].as_slice()); } #[test] fn test_memtable_iterator_behavior() { let mut mt = MemTable::new(options::for_test().cmp); let entries = vec![ (115, "abc", "122"), (120, "abd", "123"), (121, "abe", "124"), (123, "abf", "126"), ]; for e in entries.iter() { mt.add(e.0, ValueType::TypeValue, e.1.as_bytes(), e.2.as_bytes()); } test_iterator_properties(mt.iter()); } }
} #[test] fn test_memtable_iterator_init() { let mt = get_memtable(); let mut iter = mt.iter(); assert!(!iter.valid()); iter.next(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice() ); iter.reset(); assert!(!iter.valid()); } #[test] fn test_memtable_iterator_seek() { let mt = get_memtable(); let mut iter = mt.iter(); assert!(!iter.valid()); iter.seek(LookupKey::new("abc".as_bytes(), 400).internal_key()); let (mut gotkey, gotval) = current_key_val(&iter).unwrap(); truncate_to_userkey(&mut gotkey); assert_eq!( ("abc".as_bytes(), "123".as_bytes()), (gotkey.as_slice(), gotval.as_slice()) ); iter.seek(LookupKey::new("xxx".as_bytes(), 400).internal_key()); assert!(!iter.valid()); iter.seek(LookupKey::new("abd".as_bytes(), 400).internal_key()); let (mut gotkey, gotval) = current_key_val(&iter).unwrap(); truncate_to_userkey(&mut gotkey); assert_eq!( ("abd".as_bytes(), "124".as_bytes()), (gotkey.as_slice(), gotval.as_slice()) ); } #[test] fn test_memtable_iterator_fwd() { let mt = get_memtable(); let mut iter = mt.iter(); let expected = vec![ "123".as_bytes(), /* i.e., the abc entry with * higher sequence number comes first */ "122".as_bytes(), "124".as_bytes(), "125".as_bytes(), "126".as_bytes(), ]; let mut i = 0; for (k, v) in LdbIteratorIter::wrap(&mut iter) { assert_eq!(v, expected[i]); i += 1; } } #[test] fn test_memtable_iterator_reverse() { let mt = get_memtable(); let mut iter = mt.iter(); iter.next(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice() ); iter.next(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 115, 0, 0, 0, 0, 0, 0].as_slice() ); iter.next(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 100, 1, 121, 0, 0, 0, 0, 0, 0].as_slice() ); iter.prev(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 115, 0, 0, 0, 0, 0, 0].as_slice() ); iter.prev(); assert!(iter.valid()); assert_eq!( current_key_val(&iter).unwrap().0, vec![97, 98, 99, 1, 120, 0, 0, 0, 0, 0, 0].as_slice() ); iter.prev(); assert!(!iter.valid()); } #[test] fn test_memtable_parse_key() { let key = vec![11, 1, 2, 3, 1, 123, 0, 0, 0, 0, 0, 0, 3, 4, 5, 6]; let (keylen, keyoff, tag, vallen, valoff) = parse_memtable_key(&key); assert_eq!(keylen, 3); assert_eq!(&key[keyoff..keyoff + keylen], vec![1, 2, 3].as_slice()); assert_eq!(tag, 123 << 8 | 1); assert_eq!(vallen, 3); assert_eq!(
random
[ { "content": "fn gen_key_val<R: Rng>(gen: &mut R, keylen: usize, vallen: usize) -> (Vec<u8>, Vec<u8>) {\n\n let mut key = Vec::with_capacity(keylen);\n\n let mut val = Vec::with_capacity(vallen);\n\n\n\n for _i in 0..keylen {\n\n key.push(gen.gen_range(b'a', b'z'));\n\n }\n\n for _i in 0..vallen {\n\n val.push(gen.gen_range(b'a', b'z'));\n\n }\n\n (key, val)\n\n}\n\n\n", "file_path": "src/benches/maps_bench.rs", "rank": 0, "score": 313065.2370416329 }, { "content": "/// A memtable key is a bytestring containing (keylen, key, tag, vallen, val). This function\n\n/// builds such a key. It's called key because the underlying Map implementation will only be\n\n/// concerned with keys; the value field is not used (instead, the value is encoded in the key,\n\n/// and for lookups we just search for the next bigger entry).\n\n/// keylen is the length of key + 8 (to account for the tag)\n\npub fn build_memtable_key(key: &[u8], value: &[u8], t: ValueType, seq: SequenceNumber) -> Vec<u8> {\n\n // We are using the original LevelDB approach here -- encoding key and value into the\n\n // key that is used for insertion into the SkipMap.\n\n // The format is: [key_size: varint32, key_data: [u8], flags: u64, value_size: varint32,\n\n // value_data: [u8]]\n\n\n\n let keysize = key.len() + U64_SPACE;\n\n let valsize = value.len();\n\n let mut buf = Vec::new();\n\n buf.resize(\n\n keysize + valsize + keysize.required_space() + valsize.required_space(),\n\n 0,\n\n );\n\n\n\n {\n\n let mut writer = buf.as_mut_slice();\n\n writer.write_varint(keysize).expect(\"write to slice failed\");\n\n writer.write_all(key).expect(\"write to slice failed\");\n\n writer\n\n .write_fixedint((t as u64) | (seq << 8))\n\n .expect(\"write to slice failed\");\n\n writer.write_varint(valsize).expect(\"write to slice failed\");\n\n writer.write_all(value).expect(\"write to slice failed\");\n\n assert_eq!(writer.len(), 0);\n\n }\n\n buf\n\n}\n\n\n", "file_path": "src/key_types.rs", "rank": 2, "score": 302525.65835142345 }, { "content": "/// truncate_to_userkey performs an in-place conversion from InternalKey to UserKey format.\n\npub fn truncate_to_userkey(ikey: &mut Vec<u8>) {\n\n let len = ikey.len();\n\n assert!(len > 8);\n\n ikey.truncate(len - 8);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_memtable_lookupkey() {\n\n use integer_encoding::VarInt;\n\n\n\n let lk1 = LookupKey::new(\"abcde\".as_bytes(), 123);\n\n let lk2 = LookupKey::new(\"xyabxy\".as_bytes(), 97);\n\n\n\n // Assert correct allocation strategy\n\n assert_eq!(lk1.key.len(), 14);\n\n assert_eq!(lk1.key.capacity(), 14);\n", "file_path": "src/key_types.rs", "rank": 3, "score": 301897.4596200154 }, { "content": "/// Parses a tag into (type, sequence number)\n\npub fn parse_tag(tag: u64) -> (ValueType, u64) {\n\n let seq = tag >> 8;\n\n let typ = tag & 0xff;\n\n\n\n match typ {\n\n 0 => (ValueType::TypeDeletion, seq),\n\n 1 => (ValueType::TypeValue, seq),\n\n _ => (ValueType::TypeValue, seq),\n\n }\n\n}\n\n\n", "file_path": "src/key_types.rs", "rank": 4, "score": 260235.89275201818 }, { "content": "/// current_key_val is a helper allocating two vectors and filling them with the current key/value\n\n/// of the specified iterator.\n\npub fn current_key_val<It: LdbIterator + ?Sized>(it: &It) -> Option<(Vec<u8>, Vec<u8>)> {\n\n let (mut k, mut v) = (vec![], vec![]);\n\n if it.current(&mut k, &mut v) {\n\n Some((k, v))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl LdbIterator for Box<dyn LdbIterator> {\n\n fn advance(&mut self) -> bool {\n\n self.as_mut().advance()\n\n }\n\n fn current(&self, key: &mut Vec<u8>, val: &mut Vec<u8>) -> bool {\n\n self.as_ref().current(key, val)\n\n }\n\n fn seek(&mut self, key: &[u8]) {\n\n self.as_mut().seek(key)\n\n }\n\n fn reset(&mut self) {\n", "file_path": "src/types.rs", "rank": 5, "score": 251782.39449683172 }, { "content": "/// Parses a memtable key and returns (keylen, key offset, tag, vallen, val offset).\n\n/// If the key only contains (keylen, key, tag), the vallen and val offset return values will be\n\n/// meaningless.\n\npub fn parse_memtable_key(mkey: MemtableKey) -> (usize, usize, u64, usize, usize) {\n\n let (keylen, mut i): (usize, usize) = VarInt::decode_var(&mkey).unwrap();\n\n let keyoff = i;\n\n i += keylen - 8;\n\n\n\n if mkey.len() > i {\n\n let tag = FixedInt::decode_fixed(&mkey[i..i + 8]);\n\n i += 8;\n\n let (vallen, j): (usize, usize) = VarInt::decode_var(&mkey[i..]).unwrap();\n\n i += j;\n\n let valoff = i;\n\n (keylen - 8, keyoff, tag, vallen, valoff)\n\n } else {\n\n (keylen - 8, keyoff, 0, 0, 0)\n\n }\n\n}\n\n\n", "file_path": "src/key_types.rs", "rank": 6, "score": 246432.53392404455 }, { "content": "/// This shared test takes an iterator with exactly four elements and tests that it fulfills the\n\n/// generic iterator properties. Every iterator defined in this code base should pass this test.\n\npub fn test_iterator_properties<It: LdbIterator>(mut it: It) {\n\n assert!(!it.valid());\n\n assert!(it.advance());\n\n assert!(it.valid());\n\n let first = current_key_val(&it);\n\n assert!(it.advance());\n\n let second = current_key_val(&it);\n\n assert!(it.advance());\n\n let third = current_key_val(&it);\n\n // fourth (last) element\n\n assert!(it.advance());\n\n assert!(it.valid());\n\n let fourth = current_key_val(&it);\n\n // past end is invalid\n\n assert!(!it.advance());\n\n assert!(!it.valid());\n\n\n\n it.reset();\n\n it.seek(&fourth.as_ref().unwrap().0);\n\n assert!(it.valid());\n", "file_path": "src/test_util.rs", "rank": 7, "score": 206341.49508246966 }, { "content": "/// cmp_memtable_key efficiently compares two memtable keys by only parsing what's actually needed.\n\npub fn cmp_memtable_key<'a, 'b>(\n\n ucmp: &dyn Cmp,\n\n a: MemtableKey<'a>,\n\n b: MemtableKey<'b>,\n\n) -> Ordering {\n\n let (alen, aoff): (usize, usize) = VarInt::decode_var(&a).unwrap();\n\n let (blen, boff): (usize, usize) = VarInt::decode_var(&b).unwrap();\n\n let userkey_a = &a[aoff..aoff + alen - 8];\n\n let userkey_b = &b[boff..boff + blen - 8];\n\n\n\n match ucmp.cmp(userkey_a, userkey_b) {\n\n Ordering::Less => Ordering::Less,\n\n Ordering::Greater => Ordering::Greater,\n\n Ordering::Equal => {\n\n let atag = FixedInt::decode_fixed(&a[aoff + alen - 8..aoff + alen]);\n\n let btag = FixedInt::decode_fixed(&b[boff + blen - 8..boff + blen]);\n\n let (_, aseq) = parse_tag(atag);\n\n let (_, bseq) = parse_tag(btag);\n\n\n\n // reverse!\n\n bseq.cmp(&aseq)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/key_types.rs", "rank": 8, "score": 201920.99797863167 }, { "content": "/// key_is_after_file returns true if the given user key is larger than the largest key in f.\n\nfn key_is_after_file<'a>(cmp: &InternalKeyCmp, key: UserKey<'a>, f: &FileMetaHandle) -> bool {\n\n let f = f.borrow();\n\n let ulargest = parse_internal_key(&f.largest).2;\n\n !key.is_empty() && cmp.cmp_inner(key, ulargest) == Ordering::Greater\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 9, "score": 198908.33691017795 }, { "content": "/// key_is_before_file returns true if the given user key is larger than the largest key in f.\n\nfn key_is_before_file<'a>(cmp: &InternalKeyCmp, key: UserKey<'a>, f: &FileMetaHandle) -> bool {\n\n let f = f.borrow();\n\n let usmallest = parse_internal_key(&f.smallest).2;\n\n !key.is_empty() && cmp.cmp_inner(key, usmallest) == Ordering::Less\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 10, "score": 198908.33691017795 }, { "content": "/// offset_data_iterate iterates over the entries in data that are indexed by the offsets given in\n\n/// offsets. This is e.g. the internal format of a FilterBlock.\n\nfn offset_data_iterate<F: FnMut(&[u8])>(data: &[u8], offsets: &[usize], mut f: F) {\n\n for offix in 0..offsets.len() {\n\n let upper = if offix == offsets.len() - 1 {\n\n data.len()\n\n } else {\n\n offsets[offix + 1]\n\n };\n\n let piece = &data[offsets[offix]..upper];\n\n f(piece);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::key_types::LookupKey;\n\n\n\n const _BITS_PER_KEY: u32 = 12;\n\n\n\n fn input_data() -> (Vec<u8>, Vec<usize>) {\n", "file_path": "src/filter.rs", "rank": 11, "score": 193243.9736093353 }, { "content": "/// Parse a key in InternalKey format.\n\npub fn parse_internal_key(ikey: InternalKey) -> (ValueType, SequenceNumber, UserKey) {\n\n if ikey.is_empty() {\n\n return (ValueType::TypeDeletion, 0, &ikey[0..0]);\n\n }\n\n assert!(ikey.len() >= 8);\n\n let (typ, seq) = parse_tag(FixedInt::decode_fixed(&ikey[ikey.len() - 8..]));\n\n (typ, seq, &ikey[0..ikey.len() - 8])\n\n}\n\n\n", "file_path": "src/key_types.rs", "rank": 12, "score": 179503.11741124198 }, { "content": "fn bench_gen_key_val(b: &mut Bencher) {\n\n let mut gen = rand::thread_rng();\n\n b.iter(|| {\n\n let (k, _v) = gen_key_val(&mut gen, 10, 10);\n\n k.len();\n\n });\n\n}\n\n\n", "file_path": "src/benches/maps_bench.rs", "rank": 13, "score": 174144.6121199253 }, { "content": "/// cmp_internal_key efficiently compares keys in InternalKey format by only parsing the parts that\n\n/// are actually needed for a comparison.\n\npub fn cmp_internal_key<'a, 'b>(\n\n ucmp: &dyn Cmp,\n\n a: InternalKey<'a>,\n\n b: InternalKey<'b>,\n\n) -> Ordering {\n\n match ucmp.cmp(&a[0..a.len() - 8], &b[0..b.len() - 8]) {\n\n Ordering::Less => Ordering::Less,\n\n Ordering::Greater => Ordering::Greater,\n\n Ordering::Equal => {\n\n let seqa = parse_tag(FixedInt::decode_fixed(&a[a.len() - 8..])).1;\n\n let seqb = parse_tag(FixedInt::decode_fixed(&b[b.len() - 8..])).1;\n\n // reverse comparison!\n\n seqb.cmp(&seqa)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/key_types.rs", "rank": 14, "score": 172134.2945193688 }, { "content": "/// sort_files_by_smallest sorts the list of files by the smallest keys of the files.\n\nfn sort_files_by_smallest<C: Cmp>(cmp: &C, files: &mut Vec<FileMetaHandle>) {\n\n files.sort_by(|a, b| cmp.cmp(&a.borrow().smallest, &b.borrow().smallest))\n\n}\n\n\n", "file_path": "src/version_set.rs", "rank": 15, "score": 169128.7053521506 }, { "content": "pub fn micros() -> u64 {\n\n loop {\n\n let now = time::SystemTime::now().duration_since(time::UNIX_EPOCH);\n\n\n\n match now {\n\n Err(_) => continue,\n\n Ok(dur) => return dur.as_secs() * 1000000 + dur.subsec_micros() as u64,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/env_common.rs", "rank": 16, "score": 164612.98494901013 }, { "content": "fn read_length_prefixed<R: Read>(reader: &mut R) -> Result<Vec<u8>> {\n\n if let Ok(klen) = reader.read_varint() {\n\n let mut keybuf = Vec::new();\n\n keybuf.resize(klen, 0);\n\n\n\n if let Ok(l) = reader.read(&mut keybuf) {\n\n if l != klen {\n\n return err(StatusCode::IOError, \"Couldn't read full key\");\n\n }\n\n Ok(keybuf)\n\n } else {\n\n err(StatusCode::IOError, \"Couldn't read key\")\n\n }\n\n } else {\n\n err(StatusCode::IOError, \"Couldn't read key length\")\n\n }\n\n}\n\n\n\n/// Manages changes to the set of managed SSTables and logfiles.\n\npub struct VersionEdit {\n", "file_path": "src/version_edit.rs", "rank": 17, "score": 155816.10616755346 }, { "content": "fn bench_skipmap_insert(b: &mut Bencher) {\n\n let mut gen = rand::thread_rng();\n\n\n\n let mut skm = SkipMap::new(Rc::new(Box::new(DefaultCmp)));\n\n\n\n b.iter(|| {\n\n let (mut k, v) = gen_key_val(&mut gen, 10, 10);\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n skm.insert(k, v);\n\n });\n\n}\n\n\n", "file_path": "src/benches/maps_bench.rs", "rank": 18, "score": 152044.08215484663 }, { "content": "fn iter(db: &mut DB) {\n\n let mut it = db.new_iter().unwrap();\n\n let (mut k, mut v) = (vec![], vec![]);\n\n let mut out = io::BufWriter::new(io::stdout());\n\n while it.advance() {\n\n it.current(&mut k, &mut v);\n\n out.write_all(&k).unwrap();\n\n out.write_all(b\" => \").unwrap();\n\n out.write_all(&v).unwrap();\n\n out.write_all(b\"\\n\").unwrap();\n\n }\n\n}\n\n\n", "file_path": "examples/leveldb-tool/src/main.rs", "rank": 19, "score": 148255.82104572095 }, { "content": "pub fn for_test() -> Options {\n\n let mut o = Options::default();\n\n o.env = Rc::new(Box::new(MemEnv::new()));\n\n o.log = Some(share(infolog::stderr()));\n\n o\n\n}\n", "file_path": "src/options.rs", "rank": 20, "score": 143597.91971331468 }, { "content": "/// new_version_iter returns an iterator over the entries in the specified ordered list of table\n\n/// files.\n\npub fn new_version_iter(\n\n files: Vec<FileMetaHandle>,\n\n cache: Shared<TableCache>,\n\n ucmp: Rc<Box<dyn Cmp>>,\n\n) -> VersionIter {\n\n VersionIter {\n\n files,\n\n cache,\n\n cmp: InternalKeyCmp(ucmp),\n\n current: None,\n\n current_ix: 0,\n\n }\n\n}\n\n\n\n/// VersionIter iterates over the entries in an ordered list of table files (specifically, for\n\n/// example, the tables in a level).\n\n///\n\n/// Note that VersionIter returns entries of type Deletion.\n\npub struct VersionIter {\n\n // NOTE: Maybe we need to change this to Rc to support modification of the file set after\n", "file_path": "src/version.rs", "rank": 21, "score": 142766.27683348724 }, { "content": "/// Verify checksum of block\n\nfn verify_table_block(data: &[u8], compression: u8, want: u32) -> bool {\n\n let mut digest = crc32::Digest::new(crc32::CASTAGNOLI);\n\n digest.write(data);\n\n digest.write(&[compression; 1]);\n\n digest.sum32() == want\n\n}\n", "file_path": "src/table_block.rs", "rank": 22, "score": 141159.100652383 }, { "content": "pub fn build_table<I: LdbIterator, P: AsRef<Path>>(\n\n dbname: P,\n\n opt: &Options,\n\n mut from: I,\n\n num: FileNum,\n\n) -> Result<FileMetaData> {\n\n from.reset();\n\n let filename = table_file_name(dbname.as_ref(), num);\n\n\n\n let (mut kbuf, mut vbuf) = (vec![], vec![]);\n\n let mut firstkey = None;\n\n // lastkey is what remains in kbuf.\n\n\n\n // Clean up file if write fails at any point.\n\n //\n\n // TODO: Replace with catch {} when available.\n\n let r = (|| -> Result<()> {\n\n let f = opt.env.open_writable_file(Path::new(&filename))?;\n\n let f = BufWriter::new(f);\n\n let mut builder = TableBuilder::new(opt.clone(), f);\n", "file_path": "src/db_impl.rs", "rank": 23, "score": 134547.52445825314 }, { "content": "/// Comparator trait, supporting types that can be nested (i.e., add additional functionality on\n\n/// top of an inner comparator)\n\npub trait Cmp {\n\n /// Compare to byte strings, bytewise.\n\n fn cmp(&self, a: &[u8], b: &[u8]) -> Ordering;\n\n\n\n /// Return the shortest byte string that compares \"Greater\" to the first argument and \"Less\" to\n\n /// the second one.\n\n fn find_shortest_sep(&self, from: &[u8], to: &[u8]) -> Vec<u8>;\n\n /// Return the shortest byte string that compares \"Greater\" to the argument.\n\n fn find_short_succ(&self, key: &[u8]) -> Vec<u8>;\n\n\n\n /// A unique identifier for a comparator. A comparator wrapper (like InternalKeyCmp) may\n\n /// return the id of its inner comparator.\n\n fn id(&self) -> &'static str;\n\n}\n\n\n\n/// The default byte-wise comparator.\n\n#[derive(Clone)]\n\npub struct DefaultCmp;\n\n\n\nimpl Cmp for DefaultCmp {\n", "file_path": "src/cmp.rs", "rank": 24, "score": 133486.94156058365 }, { "content": "/// Implements the backing store for a `MemTable`. The important methods are `insert()` and\n\n/// `contains()`; in order to get full key and value for an entry, use a `SkipMapIter` instance,\n\n/// `seek()` to the key to look up (this is as fast as any lookup in a skip map), and then call\n\n/// `current()`.\n\nstruct InnerSkipMap {\n\n head: Box<Node>,\n\n rand: StdRng,\n\n len: usize,\n\n // approximation of memory used.\n\n approx_mem: usize,\n\n cmp: Rc<Box<dyn Cmp>>,\n\n}\n\n\n\npub struct SkipMap {\n\n map: Rc<RefCell<InnerSkipMap>>,\n\n}\n\n\n\nimpl SkipMap {\n\n /// Returns a SkipMap that wraps the comparator inside a MemtableKeyCmp.\n\n pub fn new_memtable_map(cmp: Rc<Box<dyn Cmp>>) -> SkipMap {\n\n SkipMap::new(Rc::new(Box::new(MemtableKeyCmp(cmp))))\n\n }\n\n\n\n /// Returns a SkipMap that uses the specified comparator.\n", "file_path": "src/skipmap.rs", "rank": 25, "score": 128457.46086996284 }, { "content": "fn bench_hashmap_insert(b: &mut Bencher) {\n\n let mut gen = rand::thread_rng();\n\n let mut hm = HashMap::new();\n\n\n\n b.iter(|| {\n\n let (mut k, v) = gen_key_val(&mut gen, 10, 10);\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n hm.insert(k, v);\n\n });\n\n}\n\n\n", "file_path": "src/benches/maps_bench.rs", "rank": 26, "score": 124777.72309906501 }, { "content": "fn bench_btree_insert(b: &mut Bencher) {\n\n let mut gen = rand::thread_rng();\n\n let mut btm = BTreeMap::new();\n\n\n\n b.iter(|| {\n\n let (mut k, v) = gen_key_val(&mut gen, 10, 10);\n\n btm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n btm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n btm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n btm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n btm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n btm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n\n btm.insert(k.clone(), v.clone());\n\n k[9] += 1;\n", "file_path": "src/benches/maps_bench.rs", "rank": 27, "score": 124777.72309906501 }, { "content": "/// total_size returns the sum of sizes of the given files.\n\npub fn total_size<'a, I: Iterator<Item = &'a FileMetaHandle>>(files: I) -> usize {\n\n files.fold(0, |a, f| a + f.borrow().size)\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 28, "score": 124502.9939181521 }, { "content": "fn aux_get_byte_slice<T: AsRef<[u8]>>(source: &'_ T) -> &'_ [u8] {\n\n source.as_ref()\n\n}\n\n\n", "file_path": "examples/gramine/src/main.rs", "rank": 29, "score": 121423.56786528416 }, { "content": "/// Reads the data for the specified block handle from a file.\n\nfn read_bytes(f: &dyn RandomAccess, location: &BlockHandle) -> Result<Vec<u8>> {\n\n let mut buf = vec![0; location.size()];\n\n f.read_at(location.offset(), &mut buf).map(|_| buf)\n\n}\n\n\n", "file_path": "src/table_block.rs", "rank": 30, "score": 116406.24344820209 }, { "content": "fn delete(db: &mut DB, k: &str) {\n\n db.delete(k.as_bytes()).unwrap();\n\n db.flush().unwrap();\n\n}\n\n\n", "file_path": "examples/leveldb-tool/src/main.rs", "rank": 31, "score": 115943.33531360736 }, { "content": "fn fill_db(db: &mut DB, entries: usize) -> Result<(), Box<dyn Error>> {\n\n for i in 0..entries {\n\n let (k, v) = (gen_string(KEY_LEN), gen_string(VAL_LEN));\n\n db.put(k.as_bytes(), v.as_bytes())?;\n\n\n\n if i % 100 == 0 {\n\n db.flush()?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/write-a-lot/src/main.rs", "rank": 32, "score": 114330.76540355413 }, { "content": "/// Returns Options that will cause a database to exist purely in-memory instead of being stored on\n\n/// disk. This is useful for testing or ephemeral databases.\n\npub fn in_memory() -> Options {\n\n let mut opt = Options::default();\n\n opt.env = Rc::new(Box::new(MemEnv::new()));\n\n opt\n\n}\n\n\n", "file_path": "src/options.rs", "rank": 33, "score": 110786.37257098418 }, { "content": "pub fn stderr() -> Logger {\n\n Logger(Box::new(io::stderr()))\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! log {\n\n ($l:expr) => ($l.as_ref().map(|l| l.borrow_mut().0.write(\"\\n\".as_bytes()).is_ok()));\n\n ($l:expr, $fmt:expr) => (\n\n $l.as_ref().map(|l| l.borrow_mut().0.write(concat!($fmt, \"\\n\").as_bytes()).is_ok()));\n\n ($l:expr, $fmt:expr, $($arg:tt)*) => (\n\n $l.as_ref().map(\n\n |l| l.borrow_mut().0.write_fmt(format_args!(concat!($fmt, \"\\n\"), $($arg)*)).is_ok()));\n\n}\n", "file_path": "src/infolog.rs", "rank": 34, "score": 110774.53799742227 }, { "content": "/// Reads a serialized filter block from a file and returns a FilterBlockReader.\n\npub fn read_filter_block(\n\n src: &dyn RandomAccess,\n\n location: &BlockHandle,\n\n policy: filter::BoxedFilterPolicy,\n\n) -> Result<FilterBlockReader> {\n\n if location.size() == 0 {\n\n return err(\n\n StatusCode::InvalidArgument,\n\n \"no filter block in empty location\",\n\n );\n\n }\n\n let buf = read_bytes(src, location)?;\n\n Ok(FilterBlockReader::new_owned(policy, buf))\n\n}\n\n\n", "file_path": "src/table_block.rs", "rank": 35, "score": 109009.56065552826 }, { "content": "/// Reads a table block from a random-access source.\n\n/// A table block consists of [bytes..., compress (1B), checksum (4B)]; the handle only refers to\n\n/// the location and length of [bytes...].\n\npub fn read_table_block(\n\n opt: Options,\n\n f: &dyn RandomAccess,\n\n location: &BlockHandle,\n\n) -> Result<Block> {\n\n // The block is denoted by offset and length in BlockHandle. A block in an encoded\n\n // table is followed by 1B compression type and 4B checksum.\n\n // The checksum refers to the compressed contents.\n\n let buf = read_bytes(f, location)?;\n\n let compress = read_bytes(\n\n f,\n\n &BlockHandle::new(\n\n location.offset() + location.size(),\n\n table_builder::TABLE_BLOCK_COMPRESS_LEN,\n\n ),\n\n )?;\n\n let cksum = read_bytes(\n\n f,\n\n &BlockHandle::new(\n\n location.offset() + location.size() + table_builder::TABLE_BLOCK_COMPRESS_LEN,\n", "file_path": "src/table_block.rs", "rank": 36, "score": 109005.21799200107 }, { "content": "/// open_info_log opens an info log file in the given database. It transparently returns a\n\n/// /dev/null logger in case the open fails.\n\nfn open_info_log<E: Env + ?Sized, P: AsRef<Path>>(env: &E, db: P) -> Logger {\n\n let db = db.as_ref();\n\n let logfilename = db.join(\"LOG\");\n\n let _ = env.mkdir(Path::new(db));\n\n if let Ok(e) = env.exists(Path::new(&logfilename)) {\n\n if e {\n\n let oldlogfilename = db.join(\"LOG.old\");\n\n // replace rename usage to reduce big LOG file overload\n\n cfg_if::cfg_if! {\n\n if #[cfg(feature = \"gramine\")] {\n\n let _ = env.delete(Path::new(&oldlogfilename));\n\n } else {\n\n let _ = env.rename(Path::new(&logfilename), Path::new(&oldlogfilename));\n\n }\n\n }\n\n }\n\n }\n\n if let Ok(w) = env.open_writable_file(Path::new(&logfilename)) {\n\n Logger(w)\n\n } else {\n", "file_path": "src/db_impl.rs", "rank": 37, "score": 107128.45364726035 }, { "content": "/// get_range returns the indices of the files within files that have the smallest lower bound\n\n/// respectively the largest upper bound.\n\nfn get_range<'a, C: Cmp, I: Iterator<Item = &'a FileMetaHandle>>(\n\n c: &C,\n\n files: I,\n\n) -> (Vec<u8>, Vec<u8>) {\n\n let mut smallest = None;\n\n let mut largest = None;\n\n for f in files {\n\n if smallest.is_none() {\n\n smallest = Some(f.borrow().smallest.clone());\n\n }\n\n if largest.is_none() {\n\n largest = Some(f.borrow().largest.clone());\n\n }\n\n let f = f.borrow();\n\n if c.cmp(&f.smallest, smallest.as_ref().unwrap()) == Ordering::Less {\n\n smallest = Some(f.smallest.clone());\n\n }\n\n if c.cmp(&f.largest, largest.as_ref().unwrap()) == Ordering::Greater {\n\n largest = Some(f.largest.clone());\n\n }\n", "file_path": "src/version_set.rs", "rank": 38, "score": 104097.24507421788 }, { "content": "/// A node in a skipmap contains links to the next node and others that are further away (skips);\n\n/// `skips[0]` is the immediate element after, that is, the element contained in `next`.\n\nstruct Node {\n\n skips: Vec<Option<*mut Node>>,\n\n next: Option<Box<Node>>,\n\n key: Vec<u8>,\n\n value: Vec<u8>,\n\n}\n\n\n", "file_path": "src/skipmap.rs", "rank": 39, "score": 102791.14835313248 }, { "content": "fn put(db: &mut DB, k: &str, v: &str) {\n\n db.put(k.as_bytes(), v.as_bytes()).unwrap();\n\n db.flush().unwrap();\n\n}\n\n\n", "file_path": "examples/leveldb-tool/src/main.rs", "rank": 40, "score": 101498.4977053247 }, { "content": "pub fn sleep_for(micros: u32) {\n\n thread::sleep(time::Duration::new(0, micros * 1000));\n\n}\n", "file_path": "src/env_common.rs", "rank": 41, "score": 101475.01623804378 }, { "content": "pub fn path_to_string(p: &Path) -> String {\n\n p.to_str().map(String::from).unwrap()\n\n}\n\n\n", "file_path": "src/env.rs", "rank": 42, "score": 98502.02448981492 }, { "content": "pub fn mask_crc(c: u32) -> u32 {\n\n (c.wrapping_shr(15) | c.wrapping_shl(17)).wrapping_add(MASK_DELTA)\n\n}\n\n\n", "file_path": "src/log.rs", "rank": 43, "score": 98502.02448981492 }, { "content": "pub fn path_to_str(p: &Path) -> &str {\n\n p.to_str().unwrap()\n\n}\n", "file_path": "src/env.rs", "rank": 44, "score": 98502.02448981492 }, { "content": "pub fn unmask_crc(mc: u32) -> u32 {\n\n let rot = mc.wrapping_sub(MASK_DELTA);\n\n rot.wrapping_shr(17) | rot.wrapping_shl(15)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::Cursor;\n\n\n\n #[test]\n\n fn test_crc_mask_crc() {\n\n let crc = crc32::checksum_castagnoli(\"abcde\".as_bytes());\n\n assert_eq!(crc, unmask_crc(mask_crc(crc)));\n\n assert!(crc != mask_crc(crc));\n\n }\n\n\n\n #[test]\n\n fn test_crc_sanity() {\n\n assert_eq!(0x8a9136aa, crc32::checksum_castagnoli(&[0 as u8; 32]));\n", "file_path": "src/log.rs", "rank": 45, "score": 95958.4914068843 }, { "content": "#[derive(Debug, Default)]\n\nstruct CompactionStats {\n\n micros: u64,\n\n read: usize,\n\n written: usize,\n\n}\n\n\n\nimpl CompactionStats {\n\n fn add(&mut self, cs: CompactionStats) {\n\n self.micros += cs.micros;\n\n self.read += cs.read;\n\n self.written += cs.written;\n\n }\n\n}\n\n\n", "file_path": "src/db_impl.rs", "rank": 46, "score": 95158.67924384886 }, { "content": "struct CompactionState {\n\n compaction: Compaction,\n\n smallest_seq: SequenceNumber,\n\n outputs: Vec<FileMetaData>,\n\n builder: Option<TableBuilder<Box<dyn Write>>>,\n\n total_bytes: usize,\n\n}\n\n\n\nimpl CompactionState {\n\n fn new(c: Compaction, smallest: SequenceNumber) -> CompactionState {\n\n CompactionState {\n\n compaction: c,\n\n smallest_seq: smallest,\n\n outputs: vec![],\n\n builder: None,\n\n total_bytes: 0,\n\n }\n\n }\n\n\n\n fn current_output(&mut self) -> &mut FileMetaData {\n", "file_path": "src/db_impl.rs", "rank": 47, "score": 95158.67924384886 }, { "content": "struct MemFSEntry {\n\n f: MemFile,\n\n locked: bool,\n\n}\n\n\n\n/// MemFS implements a completely in-memory file system, both for testing and temporary in-memory\n\n/// databases. It supports full concurrency.\n\npub struct MemFS {\n\n store: Arc<Mutex<HashMap<String, MemFSEntry>>>,\n\n}\n\n\n\nimpl MemFS {\n\n fn new() -> MemFS {\n\n MemFS {\n\n store: Arc::new(Mutex::new(HashMap::new())),\n\n }\n\n }\n\n\n\n /// Open a file. The caller can use the MemFile either inside a MemFileReader or as\n\n /// RandomAccess.\n", "file_path": "src/mem_env.rs", "rank": 48, "score": 92728.11645140588 }, { "content": "/// An extension of the standard `Iterator` trait that supports some methods necessary for LevelDB.\n\n/// This works because the iterators used are stateful and keep the last returned element.\n\n///\n\n/// Note: Implementing types are expected to hold `!valid()` before the first call to `advance()`.\n\n///\n\n/// test_util::test_iterator_properties() verifies that all properties hold.\n\npub trait LdbIterator {\n\n /// Advances the position of the iterator by one element (which can be retrieved using\n\n /// current(). If no more elements are available, advance() returns false, and the iterator\n\n /// becomes invalid (i.e. as if reset() had been called).\n\n fn advance(&mut self) -> bool;\n\n /// Return the current item (i.e. the item most recently returned by `next()`).\n\n fn current(&self, key: &mut Vec<u8>, val: &mut Vec<u8>) -> bool;\n\n /// Seek the iterator to `key` or the next bigger key. If the seek is invalid (past last\n\n /// element, or before first element), the iterator is `reset()` and not valid.\n\n fn seek(&mut self, key: &[u8]);\n\n /// Resets the iterator to be `!valid()`, i.e. positioned before the first element.\n\n fn reset(&mut self);\n\n /// Returns true if the iterator is not positioned before the first or after the last element,\n\n /// i.e. if `current()` would succeed.\n\n fn valid(&self) -> bool;\n\n /// Go to the previous item; if the iterator is moved beyond the first element, `prev()`\n\n /// returns false and it will be `!valid()`. This is inefficient for most iterator\n\n /// implementations.\n\n fn prev(&mut self) -> bool;\n\n\n", "file_path": "src/types.rs", "rank": 49, "score": 91506.92593958596 }, { "content": "pub fn set_current_file<P: AsRef<Path>>(\n\n env: &Box<dyn Env>,\n\n dbname: P,\n\n manifest_file_num: FileNum,\n\n) -> Result<()> {\n\n let dbname = dbname.as_ref();\n\n let manifest_base = manifest_name(manifest_file_num);\n\n let tempfile = temp_file_name(dbname, manifest_file_num);\n\n {\n\n let mut f = env.open_writable_file(Path::new(&tempfile))?;\n\n f.write_all(manifest_base.display().to_string().as_bytes())?;\n\n f.write_all(b\"\\n\")?;\n\n }\n\n let currentfile = current_file_name(dbname);\n\n if let Err(e) = env.rename(Path::new(&tempfile), Path::new(&currentfile)) {\n\n // ignore error.\n\n let _ = env.delete(Path::new(&tempfile));\n\n return Err(Status::from(e));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/version_set.rs", "rank": 50, "score": 91476.35980401209 }, { "content": "pub fn int_to_compressiontype(i: u32) -> Option<CompressionType> {\n\n match i {\n\n 0 => Some(CompressionType::CompressionNone),\n\n 1 => Some(CompressionType::CompressionSnappy),\n\n _ => None,\n\n }\n\n}\n\n\n\n/// Options contains general parameters for a LevelDB instance. Most of the names are\n\n/// self-explanatory; the defaults are defined in the `Default` implementation.\n\n#[derive(Clone)]\n\npub struct Options {\n\n pub cmp: Rc<Box<dyn Cmp>>,\n\n pub env: Rc<Box<dyn Env>>,\n\n pub log: Option<Shared<Logger>>,\n\n pub create_if_missing: bool,\n\n pub error_if_exists: bool,\n\n pub paranoid_checks: bool,\n\n pub write_buffer_size: usize,\n\n pub max_open_files: usize,\n", "file_path": "src/options.rs", "rank": 51, "score": 91041.5849303664 }, { "content": "/// merge_iters merges and collects the items from two sorted iterators.\n\nfn merge_iters<\n\n Item,\n\n C: Fn(&Item, &Item) -> Ordering,\n\n I: Iterator<Item = Item>,\n\n J: Iterator<Item = Item>,\n\n>(\n\n mut iter_a: I,\n\n mut iter_b: J,\n\n cmp: C,\n\n) -> Vec<Item> {\n\n let mut a = iter_a.next();\n\n let mut b = iter_b.next();\n\n let mut out = vec![];\n\n while a.is_some() && b.is_some() {\n\n let ord = cmp(a.as_ref().unwrap(), b.as_ref().unwrap());\n\n if ord == Ordering::Less {\n\n out.push(a.unwrap());\n\n a = iter_a.next();\n\n } else {\n\n out.push(b.unwrap());\n", "file_path": "src/version_set.rs", "rank": 52, "score": 90646.89960956955 }, { "content": "fn get(db: &mut DB, k: &str) {\n\n match db.get(k.as_bytes()) {\n\n Some(v) => {\n\n if let Ok(s) = String::from_utf8(v.clone()) {\n\n eprintln!(\"{} => {}\", k, s);\n\n } else {\n\n eprintln!(\"{} => {:?}\", k, v);\n\n }\n\n }\n\n None => eprintln!(\"{} => <not found>\", k),\n\n }\n\n}\n\n\n", "file_path": "examples/leveldb-tool/src/main.rs", "rank": 53, "score": 89570.79837764634 }, { "content": "fn random_period() -> isize {\n\n rand::random::<isize>() % 2 * READ_BYTES_PERIOD\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::db_impl::testutil::*;\n\n use crate::db_impl::DB;\n\n use crate::test_util::LdbIteratorIter;\n\n use crate::types::{current_key_val, Direction};\n\n\n\n use std::collections::HashMap;\n\n use std::collections::HashSet;\n\n use std::iter::FromIterator;\n\n\n\n #[test]\n\n fn db_iter_basic_test() {\n\n let mut db = build_db().0;\n\n let mut iter = db.new_iter().unwrap();\n", "file_path": "src/db_iter.rs", "rank": 54, "score": 85293.91021353206 }, { "content": "pub fn share<T>(t: T) -> Rc<RefCell<T>> {\n\n Rc::new(RefCell::new(t))\n\n}\n\n\n\n#[derive(PartialEq)]\n\npub enum Direction {\n\n Forward,\n\n Reverse,\n\n}\n\n\n\n/// Denotes a key range\n\npub struct Range<'a> {\n\n pub start: &'a [u8],\n\n pub limit: &'a [u8],\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 55, "score": 84624.38072628705 }, { "content": "fn tag_to_enum(t: u32) -> Option<EditTag> {\n\n match t {\n\n 1 => Some(EditTag::Comparator),\n\n 2 => Some(EditTag::LogNumber),\n\n 3 => Some(EditTag::NextFileNumber),\n\n 4 => Some(EditTag::LastSequence),\n\n 5 => Some(EditTag::CompactPointer),\n\n 6 => Some(EditTag::DeletedFile),\n\n 7 => Some(EditTag::NewFile),\n\n 9 => Some(EditTag::PrevLogNumber),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/version_edit.rs", "rank": 56, "score": 84222.24349614959 }, { "content": "fn run(mut db: leveldb::DB) -> io::Result<()> {\n\n let files = std::env::args().skip(1);\n\n\n\n for f in files {\n\n let f = OpenOptions::new().read(true).open(Path::new(&f))?;\n\n for line in io::BufReader::new(f).lines() {\n\n for word in line.unwrap().split_whitespace() {\n\n let mut word = word.to_ascii_lowercase();\n\n word.retain(|c| c.is_ascii_alphanumeric());\n\n update_count(&word, &mut db);\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/word-analyze/src/main.rs", "rank": 57, "score": 83438.69998105316 }, { "content": "fn compact(db: &mut DB, from: &str, to: &str) {\n\n db.compact_range(from.as_bytes(), to.as_bytes()).unwrap();\n\n}\n\n\n", "file_path": "examples/leveldb-tool/src/main.rs", "rank": 58, "score": 83246.45563286511 }, { "content": "/// A MemFileWriter holds a reference to a MemFile and a write offset.\n\nstruct MemFileWriter(MemFile, usize);\n\n\n\nimpl MemFileWriter {\n\n fn new(f: MemFile, append: bool) -> MemFileWriter {\n\n let len = f.0.lock().unwrap().len();\n\n MemFileWriter(f, if append { len } else { 0 })\n\n }\n\n}\n\n\n\nimpl Write for MemFileWriter {\n\n fn write(&mut self, src: &[u8]) -> io::Result<usize> {\n\n let mut buf = (self.0).0.lock().unwrap();\n\n // Write is append.\n\n if self.1 == buf.len() {\n\n buf.extend_from_slice(src);\n\n } else {\n\n // Write in the middle, possibly appending.\n\n let remaining = buf.len() - self.1;\n\n if src.len() <= remaining {\n\n // src fits into buffer.\n", "file_path": "src/mem_env.rs", "rank": 59, "score": 80011.42167166516 }, { "content": "/// A MemFileReader holds a reference to a MemFile and a read offset.\n\nstruct MemFileReader(MemFile, usize);\n\n\n\nimpl MemFileReader {\n\n fn new(f: MemFile, from: usize) -> MemFileReader {\n\n MemFileReader(f, from)\n\n }\n\n}\n\n\n\n// We need Read/Write/Seek implementations for our MemFile in order to work well with the\n\n// concurrency requirements. It's very hard or even impossible to implement those traits just by\n\n// wrapping MemFile in other types.\n\nimpl Read for MemFileReader {\n\n fn read(&mut self, dst: &mut [u8]) -> io::Result<usize> {\n\n let buf = (self.0).0.lock().unwrap();\n\n if self.1 >= buf.len() {\n\n // EOF\n\n return Ok(0);\n\n }\n\n let remaining = buf.len() - self.1;\n\n let to_read = if dst.len() > remaining {\n", "file_path": "src/mem_env.rs", "rank": 60, "score": 80011.42167166516 }, { "content": "fn filenum_to_key(num: FileNum) -> cache::CacheKey {\n\n let mut buf = [0; 16];\n\n (&mut buf[..]).write_fixedint(num).unwrap();\n\n buf\n\n}\n\n\n\npub struct TableCache {\n\n dbname: PathBuf,\n\n cache: Cache<Table>,\n\n opts: Options,\n\n}\n\n\n\nimpl TableCache {\n\n /// Create a new TableCache for the database named `db`, caching up to `entries` tables.\n\n ///\n\n /// opt.cmp should be the user-supplied comparator.\n\n pub fn new<P: AsRef<Path>>(db: P, opt: Options, entries: usize) -> TableCache {\n\n TableCache {\n\n dbname: db.as_ref().to_owned(),\n\n cache: Cache::new(entries),\n", "file_path": "src/table_cache.rs", "rank": 61, "score": 79884.94130839706 }, { "content": "fn update_count(w: &str, db: &mut leveldb::DB) -> Option<()> {\n\n let mut count: usize = 0;\n\n if let Some(v) = db.get(w.as_bytes()) {\n\n let s = String::from_utf8(v).unwrap();\n\n count = usize::from_str_radix(&s, 10).unwrap();\n\n }\n\n count += 1;\n\n let s = count.to_string();\n\n db.put(w.as_bytes(), s.as_bytes()).unwrap();\n\n Some(())\n\n}\n\n\n", "file_path": "examples/word-analyze/src/main.rs", "rank": 62, "score": 79692.23804315444 }, { "content": "/// err returns a new Status wrapped in a Result.\n\npub fn err<T>(code: StatusCode, msg: &str) -> Result<T> {\n\n Err(Status::new(code, msg))\n\n}\n\n\n\nimpl From<io::Error> for Status {\n\n fn from(e: io::Error) -> Status {\n\n let c = match e.kind() {\n\n io::ErrorKind::NotFound => StatusCode::NotFound,\n\n io::ErrorKind::InvalidData => StatusCode::Corruption,\n\n io::ErrorKind::InvalidInput => StatusCode::InvalidArgument,\n\n io::ErrorKind::PermissionDenied => StatusCode::PermissionDenied,\n\n _ => StatusCode::IOError,\n\n };\n\n\n\n Status::new(c, &e.to_string())\n\n }\n\n}\n\n\n\nimpl<T> From<sync::PoisonError<T>> for Status {\n\n fn from(_: sync::PoisonError<T>) -> Status {\n", "file_path": "src/error.rs", "rank": 63, "score": 79021.57196707974 }, { "content": "/// map_err_with_name annotates an io::Error with information about the operation and the file.\n\nfn map_err_with_name(method: &'static str, f: &Path, e: io::Error) -> Status {\n\n let mut s = Status::from(e);\n\n s.err = format!(\"{}: {}: {}\", method, s.err, path_to_str(f));\n\n s\n\n}\n\n\n\n// Note: We're using Ok(f()?) in several locations below in order to benefit from the automatic\n\n// error conversion using std::convert::From.\n\nimpl Env for PosixDiskEnv {\n\n fn open_sequential_file(&self, p: &Path) -> Result<Box<dyn Read>> {\n\n Ok(Box::new(\n\n fs::OpenOptions::new()\n\n .read(true)\n\n .open(p)\n\n .map_err(|e| map_err_with_name(\"open (seq)\", p, e))?,\n\n ))\n\n }\n\n fn open_random_access_file(&self, p: &Path) -> Result<Box<dyn RandomAccess>> {\n\n Ok(fs::OpenOptions::new()\n\n .read(true)\n", "file_path": "src/disk_env.rs", "rank": 64, "score": 76774.40427681942 }, { "content": "fn put_key_fn(rq: &canteen::Request) -> canteen::Response {\n\n unsafe {\n\n STORAGE_SERVICE\n\n .as_ref()\n\n .unwrap()\n\n .lock()\n\n .unwrap()\n\n .handle_put(rq)\n\n }\n\n}\n\n\n", "file_path": "examples/kvserver/src/main.rs", "rank": 65, "score": 76629.94574942034 }, { "content": "fn get_key_fn(rq: &canteen::Request) -> canteen::Response {\n\n unsafe {\n\n STORAGE_SERVICE\n\n .as_ref()\n\n .unwrap()\n\n .lock()\n\n .unwrap()\n\n .handle_get(rq)\n\n }\n\n}\n\n\n", "file_path": "examples/kvserver/src/main.rs", "rank": 66, "score": 76629.94574942034 }, { "content": "pub fn read_current_file(env: &Box<dyn Env>, dbname: &Path) -> Result<String> {\n\n let mut current = String::new();\n\n let mut f = env.open_sequential_file(Path::new(&current_file_name(dbname)))?;\n\n f.read_to_string(&mut current)?;\n\n if current.is_empty() || !current.ends_with('\\n') {\n\n return err(\n\n StatusCode::Corruption,\n\n \"current file is empty or has no newline\",\n\n );\n\n }\n\n Ok(current)\n\n}\n\n\n", "file_path": "src/version_set.rs", "rank": 67, "score": 72631.82173111815 }, { "content": "fn gen_string(len: usize) -> String {\n\n let mut rng = rand::thread_rng();\n\n String::from_iter(rng.gen_ascii_chars().take(len))\n\n}\n\n\n", "file_path": "examples/write-a-lot/src/main.rs", "rank": 68, "score": 72209.01167950127 }, { "content": "fn lock_file_name(db: &Path) -> PathBuf {\n\n db.join(\"LOCK\")\n\n}\n\n\n", "file_path": "src/db_impl.rs", "rank": 69, "score": 71724.8381368529 }, { "content": "pub fn parse_file_name<P: AsRef<Path>>(ff: P) -> Result<(FileNum, FileType)> {\n\n let f = ff.as_ref().to_str().unwrap();\n\n if f == \"CURRENT\" {\n\n return Ok((0, FileType::Current));\n\n } else if f == \"LOCK\" {\n\n return Ok((0, FileType::DBLock));\n\n } else if f == \"LOG\" || f == \"LOG.old\" {\n\n return Ok((0, FileType::InfoLog));\n\n } else if f.starts_with(\"MANIFEST-\") {\n\n if let Some(ix) = f.find('-') {\n\n if let Ok(num) = FileNum::from_str_radix(&f[ix + 1..], 10) {\n\n return Ok((num, FileType::Descriptor));\n\n }\n\n return err(\n\n StatusCode::InvalidArgument,\n\n \"manifest file number is invalid\",\n\n );\n\n }\n\n return err(StatusCode::InvalidArgument, \"manifest file has no dash\");\n\n } else if let Some(ix) = f.find('.') {\n", "file_path": "src/types.rs", "rank": 70, "score": 71240.60587368993 }, { "content": "pub fn table_file_name<P: AsRef<Path>>(name: P, num: FileNum) -> PathBuf {\n\n assert!(num > 0);\n\n name.as_ref().join(format!(\"{:06}.ldb\", num))\n\n}\n\n\n", "file_path": "src/table_cache.rs", "rank": 71, "score": 69935.7262659047 }, { "content": "pub fn manifest_file_name<P: AsRef<Path>>(dbname: P, file_num: FileNum) -> PathBuf {\n\n dbname.as_ref().join(manifest_name(file_num))\n\n}\n\n\n", "file_path": "src/version_set.rs", "rank": 72, "score": 68709.38797827213 }, { "content": "type WrappedCmp = Rc<Box<dyn Cmp>>;\n\n\n", "file_path": "src/cmp.rs", "rank": 73, "score": 67496.24351950354 }, { "content": "#[inline]\n\nfn get_filter_index(offset: usize, base_lg2: u32) -> u32 {\n\n // divide by 2048\n\n (offset >> base_lg2 as usize) as u32\n\n}\n\n\n\n/// A Filter Block is built like this:\n\n///\n\n/// [filter0, filter1, filter2, ..., offset of filter0, offset of filter1, ..., offset of offsets\n\n/// array, log2 of FILTER_BASE]\n\n///\n\n/// where offsets are 4 bytes, offset of offsets is 4 bytes, and log2 of FILTER_BASE is 1 byte.\n\n/// Two consecutive filter offsets may be the same.\n\npub struct FilterBlockBuilder {\n\n policy: BoxedFilterPolicy,\n\n // filters, concatenated\n\n filters: Vec<u8>,\n\n filter_offsets: Vec<usize>,\n\n\n\n // Reset on every start_block()\n\n key_offsets: Vec<usize>,\n", "file_path": "src/filter_block.rs", "rank": 74, "score": 65752.60691430878 }, { "content": "struct Builder {\n\n // (added, deleted) files per level.\n\n deleted: [Vec<FileNum>; NUM_LEVELS],\n\n added: [Vec<FileMetaHandle>; NUM_LEVELS],\n\n}\n\n\n\nimpl Builder {\n\n fn new() -> Builder {\n\n Builder {\n\n deleted: Default::default(),\n\n added: Default::default(),\n\n }\n\n }\n\n\n\n /// apply applies the edits recorded in edit to the builder state. compaction pointers are\n\n /// copied to the supplied compaction_ptrs array.\n\n fn apply(&mut self, edit: &VersionEdit, compaction_ptrs: &mut [Vec<u8>; NUM_LEVELS]) {\n\n for c in edit.compaction_ptrs.iter() {\n\n compaction_ptrs[c.level] = c.key.clone();\n\n }\n", "file_path": "src/version_set.rs", "rank": 75, "score": 64652.479958712305 }, { "content": "#[derive(Clone)]\n\nstruct InnerSnapshot {\n\n id: SnapshotHandle,\n\n seq: SequenceNumber,\n\n sl: Shared<InnerSnapshotList>,\n\n}\n\n\n\nimpl Drop for InnerSnapshot {\n\n fn drop(&mut self) {\n\n self.sl.borrow_mut().delete(self.id);\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Snapshot {\n\n inner: Rc<InnerSnapshot>,\n\n}\n\n\n\nimpl Snapshot {\n\n pub fn sequence(&self) -> SequenceNumber {\n\n (*self.inner).seq\n", "file_path": "src/snapshot.rs", "rank": 76, "score": 64652.479958712305 }, { "content": "/// Reads the table footer.\n\nfn read_footer(f: &dyn RandomAccess, size: usize) -> Result<Footer> {\n\n let mut buf = vec![0; table_builder::FULL_FOOTER_LENGTH];\n\n f.read_at(size - table_builder::FULL_FOOTER_LENGTH, &mut buf)?;\n\n match Footer::decode(&buf) {\n\n Some(ok) => Ok(ok),\n\n None => err(\n\n error::StatusCode::Corruption,\n\n &format!(\"Couldn't decode damaged footer {:?}\", &buf),\n\n ),\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Table {\n\n file: Rc<Box<dyn RandomAccess>>,\n\n file_size: usize,\n\n cache_id: cache::CacheID,\n\n\n\n opt: Options,\n\n\n", "file_path": "src/table_reader.rs", "rank": 77, "score": 63738.537075872824 }, { "content": "fn log_file_name(db: &Path, num: FileNum) -> PathBuf {\n\n db.join(format!(\"{:06}.log\", num))\n\n}\n\n\n", "file_path": "src/db_impl.rs", "rank": 78, "score": 63606.28467853958 }, { "content": "/// A list of all snapshots is kept in the DB.\n\nstruct InnerSnapshotList {\n\n map: HashMap<SnapshotHandle, SequenceNumber>,\n\n newest: SnapshotHandle,\n\n oldest: SnapshotHandle,\n\n}\n\n\n\npub struct SnapshotList {\n\n inner: Shared<InnerSnapshotList>,\n\n}\n\n\n\nimpl SnapshotList {\n\n pub fn new() -> SnapshotList {\n\n SnapshotList {\n\n inner: share(InnerSnapshotList {\n\n map: HashMap::new(),\n\n newest: 0,\n\n oldest: 0,\n\n }),\n\n }\n\n }\n", "file_path": "src/snapshot.rs", "rank": 79, "score": 62946.56232902985 }, { "content": "struct KVService {\n\n db: rusty_leveldb::DB,\n\n}\n\n\n\nstatic mut STORAGE_SERVICE: Option<std::sync::Mutex<KVService>> = None;\n\n\n\nimpl KVService {\n\n fn handle_get(&mut self, req: &canteen::Request) -> canteen::Response {\n\n let key: String = req.get(\"key\");\n\n\n\n let val = self.db.get(key.as_bytes());\n\n\n\n let mut rp = canteen::Response::new();\n\n\n\n rp.set_status(200);\n\n rp.set_content_type(\"text/plain\");\n\n\n\n if let Some(val) = val {\n\n rp.append(val);\n\n } else {\n", "file_path": "examples/kvserver/src/main.rs", "rank": 80, "score": 61401.36918333413 }, { "content": "/// Opaque snapshot handle; Represents index to SnapshotList.map\n\ntype SnapshotHandle = u64;\n\n\n\n/// An InnerSnapshot is shared by several Snapshots. This enables cloning snapshots, and a snapshot\n\n/// is released once the last instance is dropped.\n", "file_path": "src/snapshot.rs", "rank": 81, "score": 60721.069531370274 }, { "content": "pub trait Env {\n\n fn open_sequential_file(&self, _: &Path) -> Result<Box<dyn Read>>;\n\n fn open_random_access_file(&self, _: &Path) -> Result<Box<dyn RandomAccess>>;\n\n fn open_writable_file(&self, _: &Path) -> Result<Box<dyn Write>>;\n\n fn open_appendable_file(&self, _: &Path) -> Result<Box<dyn Write>>;\n\n\n\n fn exists(&self, _: &Path) -> Result<bool>;\n\n fn children(&self, _: &Path) -> Result<Vec<PathBuf>>;\n\n fn size_of(&self, _: &Path) -> Result<usize>;\n\n\n\n fn delete(&self, _: &Path) -> Result<()>;\n\n fn mkdir(&self, _: &Path) -> Result<()>;\n\n fn rmdir(&self, _: &Path) -> Result<()>;\n\n fn rename(&self, _: &Path, _: &Path) -> Result<()>;\n\n\n\n fn lock(&self, _: &Path) -> Result<FileLock>;\n\n fn unlock(&self, l: FileLock) -> Result<()>;\n\n\n\n fn new_logger(&self, _: &Path) -> Result<Logger>;\n\n\n", "file_path": "src/env.rs", "rank": 82, "score": 60693.10184569401 }, { "content": "struct LRUNode<T> {\n\n next: Option<Box<LRUNode<T>>>, // None in the list's last node\n\n prev: Option<*mut LRUNode<T>>,\n\n data: Option<T>, // if None, then we have reached the head node\n\n}\n\n\n", "file_path": "src/cache.rs", "rank": 83, "score": 60269.295866460496 }, { "content": "struct LRUList<T> {\n\n head: LRUNode<T>,\n\n count: usize,\n\n}\n\n\n\n/// This is likely unstable; more investigation is needed into correct behavior!\n\nimpl<T> LRUList<T> {\n\n fn new() -> LRUList<T> {\n\n LRUList {\n\n head: LRUNode {\n\n data: None,\n\n next: None,\n\n prev: None,\n\n },\n\n count: 0,\n\n }\n\n }\n\n\n\n /// Inserts new element at front (least recently used element)\n\n fn insert(&mut self, elem: T) -> LRUHandle<T> {\n", "file_path": "src/cache.rs", "rank": 84, "score": 60269.295866460496 }, { "content": "/// Encapsulates a filter algorithm allowing to search for keys more efficiently.\n\n/// Usually, policies are used as a BoxedFilterPolicy (see below), so they\n\n/// can be easily cloned and nested.\n\npub trait FilterPolicy {\n\n /// Returns a string identifying this policy.\n\n fn name(&self) -> &'static str;\n\n /// Create a filter matching the given keys. Keys are given as a long byte array that is\n\n /// indexed by the offsets contained in key_offsets.\n\n fn create_filter(&self, keys: &[u8], key_offsets: &[usize]) -> Vec<u8>;\n\n /// Check whether the given key may match the filter.\n\n fn key_may_match(&self, key: &[u8], filter: &[u8]) -> bool;\n\n}\n\n\n\n/// A boxed and refcounted filter policy (reference-counted because a Box with unsized content\n\n/// couldn't be cloned otherwise)\n\npub type BoxedFilterPolicy = Rc<Box<dyn FilterPolicy>>;\n\n\n\nimpl FilterPolicy for BoxedFilterPolicy {\n\n fn name(&self) -> &'static str {\n\n (**self).name()\n\n }\n\n fn create_filter(&self, keys: &[u8], key_offsets: &[usize]) -> Vec<u8> {\n\n (**self).create_filter(keys, key_offsets)\n", "file_path": "src/filter.rs", "rank": 85, "score": 59003.53817090099 }, { "content": "pub trait RandomAccess {\n\n fn read_at(&self, off: usize, dst: &mut [u8]) -> Result<usize>;\n\n}\n\n\n\n#[cfg(unix)]\n\nimpl RandomAccess for File {\n\n fn read_at(&self, off: usize, dst: &mut [u8]) -> Result<usize> {\n\n Ok((self as &dyn FileExt).read_at(dst, off as u64)?)\n\n }\n\n}\n\n\n\n#[cfg(windows)]\n\nimpl RandomAccess for File {\n\n fn read_at(&self, off: usize, dst: &mut [u8]) -> Result<usize> {\n\n Ok((self as &dyn FileExt).seek_read(dst, off as u64)?)\n\n }\n\n}\n\n\n\npub struct FileLock {\n\n pub id: String,\n\n}\n\n\n", "file_path": "src/env.rs", "rank": 86, "score": 58996.20842280089 }, { "content": "fn main() {\n\n let db = rusty_leveldb::DB::open(\"httpdb\", rusty_leveldb::Options::default()).unwrap();\n\n let service = KVService { db: db };\n\n unsafe { STORAGE_SERVICE = Some(std::sync::Mutex::new(service)) };\n\n\n\n let mut ct = canteen::Canteen::new();\n\n ct.add_route(\"/kvs/get/<str:key>\", &[canteen::Method::Get], get_key_fn);\n\n ct.add_route(\n\n \"/kvs/put/<str:key>\",\n\n &[canteen::Method::Put, canteen::Method::Post],\n\n put_key_fn,\n\n );\n\n ct.bind(\"0.0.0.0:8080\");\n\n ct.run()\n\n}\n", "file_path": "examples/kvserver/src/main.rs", "rank": 87, "score": 58146.478014039734 }, { "content": "fn main() {\n\n let path = format!(\"{}/simple_db_workds\", DB_PATH);\n\n {\n\n let mut options = rusty_leveldb::Options::default();\n\n options.env = Rc::new(Box::new(rusty_leveldb::gramine_env::GramineEnv::new()));\n\n let mut db = rusty_leveldb::DB::open(Path::new(&path), options).unwrap();\n\n assert!(db.put(b\"k1\", b\"v1\").is_ok());\n\n let result = db.get(b\"k1\").unwrap();\n\n assert_eq!(aux_get_byte_slice(&result), b\"v1\");\n\n db.flush();\n\n }\n\n\n\n {\n\n let manifest_file = format!(\"{}/simple_db_workds/MANIFEST-000001\", DB_PATH);\n\n let mut manifest_handler = std::fs::OpenOptions::new()\n\n .read(true)\n\n .open(&manifest_file)\n\n .unwrap();\n\n let mut buffer = Vec::new();\n\n let _ = manifest_handler.read_to_end(&mut buffer);\n", "file_path": "examples/gramine/src/main.rs", "rank": 88, "score": 58146.478014039734 }, { "content": "/// find_file returns the index of the file in files that potentially contains the internal key\n\n/// key. files must not overlap and be ordered ascendingly. If no file can contain the key, None is\n\n/// returned.\n\nfn find_file<'a>(\n\n cmp: &InternalKeyCmp,\n\n files: &[FileMetaHandle],\n\n key: InternalKey<'a>,\n\n) -> Option<usize> {\n\n let (mut left, mut right) = (0, files.len());\n\n while left < right {\n\n let mid = (left + right) / 2;\n\n if cmp.cmp(&files[mid].borrow().largest, key) == Ordering::Less {\n\n left = mid + 1;\n\n } else {\n\n right = mid;\n\n }\n\n }\n\n if right < files.len() {\n\n Some(right)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 89, "score": 57543.158530213 }, { "content": "fn main() {\n\n let args = Vec::from_iter(args());\n\n\n\n if args.len() < 2 {\n\n panic!(\n\n \"Usage: {} [get|put/set|delete|iter|compact] [key|from] [val|to]\",\n\n args[0]\n\n );\n\n }\n\n\n\n let mut opt = Options::default();\n\n opt.reuse_logs = false;\n\n opt.reuse_manifest = false;\n\n opt.compression_type = rusty_leveldb::CompressionType::CompressionNone;\n\n let mut db = DB::open(\"tooldb\", opt).unwrap();\n\n\n\n match args[1].as_str() {\n\n \"get\" => {\n\n if args.len() < 3 {\n\n panic!(\"Usage: {} get key\", args[0]);\n", "file_path": "examples/leveldb-tool/src/main.rs", "rank": 90, "score": 56625.1405505912 }, { "content": "fn main() {\n\n let mut opts = leveldb::Options::default();\n\n opts.compression_type = leveldb::CompressionType::CompressionNone;\n\n let db = leveldb::DB::open(\"wordsdb\", opts).unwrap();\n\n\n\n run(db).unwrap();\n\n}\n", "file_path": "examples/word-analyze/src/main.rs", "rank": 91, "score": 56625.1405505912 }, { "content": "fn main() {\n\n let mut opt = Options::default();\n\n opt.compression_type = CompressionType::CompressionSnappy;\n\n let mut db = DB::open(\"test1\", opt).unwrap();\n\n\n\n fill_db(&mut db, 32768).unwrap();\n\n}\n", "file_path": "examples/write-a-lot/src/main.rs", "rank": 92, "score": 56625.1405505912 }, { "content": "/// some_file_overlaps_range returns true if any of the given possibly overlapping files contains\n\n/// keys in the range [smallest; largest].\n\nfn some_file_overlaps_range<'a, 'b>(\n\n cmp: &InternalKeyCmp,\n\n files: &[FileMetaHandle],\n\n smallest: UserKey<'a>,\n\n largest: UserKey<'b>,\n\n) -> bool {\n\n for f in files {\n\n if !(key_is_after_file(cmp, smallest, f) || key_is_before_file(cmp, largest, f)) {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n\n#[cfg(test)]\n\npub mod testutil {\n\n use super::*;\n\n use crate::cmp::DefaultCmp;\n\n use crate::env::Env;\n\n use crate::key_types::ValueType;\n", "file_path": "src/version.rs", "rank": 93, "score": 53940.96852884635 }, { "content": "type CacheEntry<T> = (T, LRUHandle<CacheKey>);\n\n\n\n/// Implementation of `ShardedLRUCache`.\n\n/// Based on a HashMap; the elements are linked in order to support the LRU ordering.\n\npub struct Cache<T> {\n\n // note: CacheKeys (Vec<u8>) are duplicated between list and map. If this turns out to be a\n\n // performance bottleneck, another layer of indirection™ can solve this by mapping the key\n\n // to a numeric handle that keys both list and map.\n\n list: LRUList<CacheKey>,\n\n map: HashMap<CacheKey, CacheEntry<T>>,\n\n cap: usize,\n\n id: u64,\n\n}\n\n\n\nimpl<T> Cache<T> {\n\n pub fn new(capacity: usize) -> Cache<T> {\n\n assert!(capacity > 0);\n\n Cache {\n\n list: LRUList::new(),\n\n map: HashMap::with_capacity(1024),\n", "file_path": "src/cache.rs", "rank": 94, "score": 52605.98495641787 }, { "content": "/// some_file_overlaps_range_disjoint returns true if any of the given disjoint files (i.e. level >\n\n/// 1) contain keys in the range defined by the user keys [smallest; largest].\n\nfn some_file_overlaps_range_disjoint<'a, 'b>(\n\n cmp: &InternalKeyCmp,\n\n files: &[FileMetaHandle],\n\n smallest: UserKey<'a>,\n\n largest: UserKey<'b>,\n\n) -> bool {\n\n let ikey = LookupKey::new(smallest, MAX_SEQUENCE_NUMBER);\n\n if let Some(ix) = find_file(cmp, files, ikey.internal_key()) {\n\n !key_is_before_file(cmp, largest, &files[ix])\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 95, "score": 52419.94205682144 }, { "content": "// No clone, no copy! That asserts that an LRUHandle exists only once.\n\ntype LRUHandle<T> = *mut LRUNode<T>;\n\n\n", "file_path": "src/cache.rs", "rank": 96, "score": 48784.44269558153 }, { "content": "fn manifest_name(file_num: FileNum) -> PathBuf {\n\n Path::new(&format!(\"MANIFEST-{:06}\", file_num)).to_owned()\n\n}\n\n\n", "file_path": "src/version_set.rs", "rank": 97, "score": 44909.68311975928 }, { "content": "fn current_file_name<P: AsRef<Path>>(dbname: P) -> PathBuf {\n\n dbname.as_ref().join(\"CURRENT\").to_owned()\n\n}\n\n\n", "file_path": "src/version_set.rs", "rank": 98, "score": 39454.37202662386 } ]
Rust
sulis_view/src/item_list_pane.rs
ThyWoof/sulis
e89eda94a1a72228224e1926d307aa4c9228bdcb
use std::any::Any; use std::cell::{Cell, RefCell}; use std::rc::Rc; use sulis_core::ui::{Callback, Widget, WidgetKind}; use sulis_core::widgets::{Button, ScrollDirection, ScrollPane}; use sulis_module::{Item, ItemState, Module}; use sulis_state::{script::ScriptItemKind, EntityState, GameState}; use crate::{item_callback_handler::*, ItemButton}; pub const NAME: &str = "item_list_pane"; enum Kind { Entity, Merchant(String), Prop(usize), } #[derive(Debug, PartialEq, Clone, Copy)] pub enum Filter { All, Weapon, Armor, Accessory, Usable, } impl Filter { fn is_allowed(self, item: &Rc<Item>) -> bool { use self::Filter::*; match self { All => true, Weapon => item.is_weapon(), Armor => item.is_armor(), Accessory => { if item.is_weapon() || item.is_armor() { return false; } item.equippable.is_some() } Usable => item.usable.is_some(), } } } use self::Filter::*; const FILTERS_LIST: [Filter; 5] = [All, Weapon, Armor, Accessory, Usable]; pub struct ItemListPane { entity: Rc<RefCell<EntityState>>, kind: Kind, cur_filter: Rc<Cell<Filter>>, } impl ItemListPane { fn new( entity: &Rc<RefCell<EntityState>>, kind: Kind, cur_filter: &Rc<Cell<Filter>>, ) -> Rc<RefCell<ItemListPane>> { Rc::new(RefCell::new(ItemListPane { entity: Rc::clone(entity), kind, cur_filter: Rc::clone(cur_filter), })) } pub fn new_entity( entity: &Rc<RefCell<EntityState>>, cur_filter: &Rc<Cell<Filter>>, ) -> Rc<RefCell<ItemListPane>> { ItemListPane::new(entity, Kind::Entity, cur_filter) } pub fn new_prop( entity: &Rc<RefCell<EntityState>>, prop_index: usize, cur_filter: &Rc<Cell<Filter>>, ) -> Rc<RefCell<ItemListPane>> { ItemListPane::new(entity, Kind::Prop(prop_index), cur_filter) } pub fn new_merchant( entity: &Rc<RefCell<EntityState>>, merchant_id: String, cur_filter: &Rc<Cell<Filter>>, ) -> Rc<RefCell<ItemListPane>> { ItemListPane::new(entity, Kind::Merchant(merchant_id), cur_filter) } fn set_filter(&mut self, filter: Filter, widget: &Rc<RefCell<Widget>>) { self.cur_filter.set(filter); widget.borrow_mut().invalidate_children(); } fn create_content_merchant(&self, merchant_id: &str) -> Rc<RefCell<Widget>> { let area_state = GameState::area_state(); let area_state = area_state.borrow(); let merchant = area_state.get_merchant(merchant_id); let merchant = match merchant { None => return Widget::empty("none"), Some(ref merchant) => merchant, }; let scrollpane = ScrollPane::new(ScrollDirection::Vertical); let list_content = Widget::with_theme(scrollpane.clone(), "items_list"); for (index, &(qty, ref item)) in merchant.items().iter().enumerate() { if !self.cur_filter.get().is_allowed(&item.item) { continue; } let item_button = ItemButton::merchant(item, qty, index, merchant_id); item_button .borrow_mut() .add_action("Buy", buy_item_cb(merchant_id, index), true); scrollpane .borrow() .add_to_content(Widget::with_defaults(item_button)); } list_content } fn create_content_prop(&self, prop_index: usize) -> Rc<RefCell<Widget>> { let combat_active = GameState::is_combat_active(); let area_state = GameState::area_state(); let area_state = area_state.borrow(); let prop = area_state.props().get(prop_index); let scrollpane = ScrollPane::new(ScrollDirection::Vertical); let list_content = Widget::with_theme(scrollpane.clone(), "items_list"); match prop.items() { None => (), Some(items) => { for (index, &(qty, ref item)) in items.iter().enumerate() { if !self.cur_filter.get().is_allowed(&item.item) { continue; } let item_button = ItemButton::prop(item, qty, index, prop_index); if !combat_active { item_button.borrow_mut().add_action( "Take", take_item_cb(prop_index, index), true, ); } scrollpane .borrow() .add_to_content(Widget::with_defaults(item_button)); } } } list_content } fn create_content_inventory(&self) -> Rc<RefCell<Widget>> { let combat_active = GameState::is_combat_active(); let actor = &self.entity.borrow().actor; let scrollpane = ScrollPane::new(ScrollDirection::Vertical); let list_content = Widget::with_theme(scrollpane.clone(), "items_list"); let stash = GameState::party_stash(); let stash = stash.borrow(); for (index, &(quantity, ref item)) in stash.items().iter().enumerate() { if !self.cur_filter.get().is_allowed(&item.item) { continue; } let item_but = ItemButton::inventory(item, quantity, index); if let Some(ref usable) = item.item.usable { if !combat_active && item.item.meets_prereqs(&actor.actor) { let mut but = item_but.borrow_mut(); if usable.use_in_slot { but.add_action( "Add to Use Slot", set_quickslot_cb(&self.entity, index), true, ); } else { let kind = ScriptItemKind::Stash(index); but.add_action("Use", use_item_cb(&self.entity, kind), true); } } } if !combat_active && actor.can_equip(item) { item_but .borrow_mut() .add_action("Equip", equip_item_cb(&self.entity, index), true); } if !combat_active && !item.item.quest { item_but .borrow_mut() .add_action("Drop", drop_item_cb(&self.entity, index), false); } scrollpane .borrow() .add_to_content(Widget::with_defaults(item_but)); } list_content } } impl WidgetKind for ItemListPane { widget_kind!(NAME); fn on_add(&mut self, _widget: &Rc<RefCell<Widget>>) -> Vec<Rc<RefCell<Widget>>> { let mut children = Vec::new(); let content = match &self.kind { Kind::Entity => self.create_content_inventory(), Kind::Prop(index) => self.create_content_prop(*index), Kind::Merchant(id) => self.create_content_merchant(id), }; children.push(content); if let Kind::Entity = &self.kind { let coins_item = match Module::item(&Module::rules().coins_item) { None => { warn!("Unable to find coins item"); return Vec::new(); } Some(item) => item, }; let coins_item_state = ItemState::new(coins_item, None); let amount = GameState::party_coins() as f32 / Module::rules().item_value_display_factor; let button = ItemButton::inventory(&coins_item_state, amount as u32, 0); let coins_button = Widget::with_theme(button, "coins_button"); coins_button.borrow_mut().state.set_enabled(false); children.push(coins_button); } for filter in FILTERS_LIST.iter() { let filter = *filter; let button = Widget::with_theme( Button::empty(), &format!("filter_{:?}", filter).to_lowercase(), ); button .borrow_mut() .state .add_callback(Callback::new(Rc::new(move |widget, _| { let (parent, pane) = Widget::parent_mut::<ItemListPane>(widget); pane.set_filter(filter, &parent); }))); if filter == self.cur_filter.get() { button.borrow_mut().state.set_active(true); } children.push(button); } children } }
use std::any::Any; use std::cell::{Cell, RefCell}; use std::rc::Rc; use sulis_core::ui::{Callback, Widget, WidgetKind}; use sulis_core::widgets::{Button, ScrollDirection, ScrollPane}; use sulis_module::{Item, ItemState, Module}; use sulis_state::{script::ScriptItemKind, EntityState, GameState}; use crate::{item_callback_handler::*, ItemButton}; pub const NAME: &str = "item_list_pane"; enum Kind { Entity, Merchant(String), Prop(usize), } #[derive(Debug, PartialEq, Clone, Copy)] pub enum Filter { All, Weapon, Armor, Accessory, Usable, } impl Filter {
} use self::Filter::*; const FILTERS_LIST: [Filter; 5] = [All, Weapon, Armor, Accessory, Usable]; pub struct ItemListPane { entity: Rc<RefCell<EntityState>>, kind: Kind, cur_filter: Rc<Cell<Filter>>, } impl ItemListPane { fn new( entity: &Rc<RefCell<EntityState>>, kind: Kind, cur_filter: &Rc<Cell<Filter>>, ) -> Rc<RefCell<ItemListPane>> { Rc::new(RefCell::new(ItemListPane { entity: Rc::clone(entity), kind, cur_filter: Rc::clone(cur_filter), })) } pub fn new_entity( entity: &Rc<RefCell<EntityState>>, cur_filter: &Rc<Cell<Filter>>, ) -> Rc<RefCell<ItemListPane>> { ItemListPane::new(entity, Kind::Entity, cur_filter) } pub fn new_prop( entity: &Rc<RefCell<EntityState>>, prop_index: usize, cur_filter: &Rc<Cell<Filter>>, ) -> Rc<RefCell<ItemListPane>> { ItemListPane::new(entity, Kind::Prop(prop_index), cur_filter) } pub fn new_merchant( entity: &Rc<RefCell<EntityState>>, merchant_id: String, cur_filter: &Rc<Cell<Filter>>, ) -> Rc<RefCell<ItemListPane>> { ItemListPane::new(entity, Kind::Merchant(merchant_id), cur_filter) } fn set_filter(&mut self, filter: Filter, widget: &Rc<RefCell<Widget>>) { self.cur_filter.set(filter); widget.borrow_mut().invalidate_children(); } fn create_content_merchant(&self, merchant_id: &str) -> Rc<RefCell<Widget>> { let area_state = GameState::area_state(); let area_state = area_state.borrow(); let merchant = area_state.get_merchant(merchant_id); let merchant = match merchant { None => return Widget::empty("none"), Some(ref merchant) => merchant, }; let scrollpane = ScrollPane::new(ScrollDirection::Vertical); let list_content = Widget::with_theme(scrollpane.clone(), "items_list"); for (index, &(qty, ref item)) in merchant.items().iter().enumerate() { if !self.cur_filter.get().is_allowed(&item.item) { continue; } let item_button = ItemButton::merchant(item, qty, index, merchant_id); item_button .borrow_mut() .add_action("Buy", buy_item_cb(merchant_id, index), true); scrollpane .borrow() .add_to_content(Widget::with_defaults(item_button)); } list_content } fn create_content_prop(&self, prop_index: usize) -> Rc<RefCell<Widget>> { let combat_active = GameState::is_combat_active(); let area_state = GameState::area_state(); let area_state = area_state.borrow(); let prop = area_state.props().get(prop_index); let scrollpane = ScrollPane::new(ScrollDirection::Vertical); let list_content = Widget::with_theme(scrollpane.clone(), "items_list"); match prop.items() { None => (), Some(items) => { for (index, &(qty, ref item)) in items.iter().enumerate() { if !self.cur_filter.get().is_allowed(&item.item) { continue; } let item_button = ItemButton::prop(item, qty, index, prop_index); if !combat_active { item_button.borrow_mut().add_action( "Take", take_item_cb(prop_index, index), true, ); } scrollpane .borrow() .add_to_content(Widget::with_defaults(item_button)); } } } list_content } fn create_content_inventory(&self) -> Rc<RefCell<Widget>> { let combat_active = GameState::is_combat_active(); let actor = &self.entity.borrow().actor; let scrollpane = ScrollPane::new(ScrollDirection::Vertical); let list_content = Widget::with_theme(scrollpane.clone(), "items_list"); let stash = GameState::party_stash(); let stash = stash.borrow(); for (index, &(quantity, ref item)) in stash.items().iter().enumerate() { if !self.cur_filter.get().is_allowed(&item.item) { continue; } let item_but = ItemButton::inventory(item, quantity, index); if let Some(ref usable) = item.item.usable { if !combat_active && item.item.meets_prereqs(&actor.actor) { let mut but = item_but.borrow_mut(); if usable.use_in_slot { but.add_action( "Add to Use Slot", set_quickslot_cb(&self.entity, index), true, ); } else { let kind = ScriptItemKind::Stash(index); but.add_action("Use", use_item_cb(&self.entity, kind), true); } } } if !combat_active && actor.can_equip(item) { item_but .borrow_mut() .add_action("Equip", equip_item_cb(&self.entity, index), true); } if !combat_active && !item.item.quest { item_but .borrow_mut() .add_action("Drop", drop_item_cb(&self.entity, index), false); } scrollpane .borrow() .add_to_content(Widget::with_defaults(item_but)); } list_content } } impl WidgetKind for ItemListPane { widget_kind!(NAME); fn on_add(&mut self, _widget: &Rc<RefCell<Widget>>) -> Vec<Rc<RefCell<Widget>>> { let mut children = Vec::new(); let content = match &self.kind { Kind::Entity => self.create_content_inventory(), Kind::Prop(index) => self.create_content_prop(*index), Kind::Merchant(id) => self.create_content_merchant(id), }; children.push(content); if let Kind::Entity = &self.kind { let coins_item = match Module::item(&Module::rules().coins_item) { None => { warn!("Unable to find coins item"); return Vec::new(); } Some(item) => item, }; let coins_item_state = ItemState::new(coins_item, None); let amount = GameState::party_coins() as f32 / Module::rules().item_value_display_factor; let button = ItemButton::inventory(&coins_item_state, amount as u32, 0); let coins_button = Widget::with_theme(button, "coins_button"); coins_button.borrow_mut().state.set_enabled(false); children.push(coins_button); } for filter in FILTERS_LIST.iter() { let filter = *filter; let button = Widget::with_theme( Button::empty(), &format!("filter_{:?}", filter).to_lowercase(), ); button .borrow_mut() .state .add_callback(Callback::new(Rc::new(move |widget, _| { let (parent, pane) = Widget::parent_mut::<ItemListPane>(widget); pane.set_filter(filter, &parent); }))); if filter == self.cur_filter.get() { button.borrow_mut().state.set_active(true); } children.push(button); } children } }
fn is_allowed(self, item: &Rc<Item>) -> bool { use self::Filter::*; match self { All => true, Weapon => item.is_weapon(), Armor => item.is_armor(), Accessory => { if item.is_weapon() || item.is_armor() { return false; } item.equippable.is_some() } Usable => item.usable.is_some(), } }
function_block-full_function
[]
Rust
d13/src/lib.rs
arturhoo/aoc2021
6aaed6d1207be757588ae41e25e6f3ce2ece9061
use std::cmp::max; use std::collections::HashSet; pub mod util; pub fn p1(input: &Vec<String>) -> usize { let (points, folds, edge) = parse_input(input); let (new_points, _new_edge) = perform_fold(points, &folds[0], edge); new_points.len() } pub fn p2(input: &Vec<String>) -> usize { let (mut points, folds, mut edge) = parse_input(input); for fold in folds { let (new_points, new_edge) = perform_fold(points, &fold, edge); points = new_points; edge = new_edge; } print_points(&points, &edge); points.len() } #[derive(Debug, PartialEq, Eq, Hash)] struct Point { x: usize, y: usize, } #[derive(Debug, PartialEq, Eq, Hash)] enum Direction { Vertical, Horizontal, } #[derive(Debug, PartialEq, Eq, Hash)] struct Fold { direction: Direction, coord: usize, } fn parse_input(input: &Vec<String>) -> (HashSet<Point>, Vec<Fold>, Point) { let mut points: HashSet<Point> = HashSet::new(); let mut folds: Vec<Fold> = vec![]; let (mut max_x, mut max_y) = (0usize, 0usize); for line in input { if line.is_empty() { continue; } match &line[..3] { "fol" => { let tokens: Vec<&str> = line.split(" ").collect(); let fold_tokens: Vec<&str> = tokens[2].split("=").collect(); let direction = match fold_tokens[0] { "x" => Direction::Vertical, "y" => Direction::Horizontal, _ => unreachable!(), }; let coord: usize = fold_tokens[1].parse().unwrap(); let fold = Fold { direction, coord }; folds.push(fold); } _ => { let coords: Vec<&str> = line.split(",").collect(); let point = Point { x: coords[0].parse().unwrap(), y: coords[1].parse().unwrap(), }; max_x = max(max_x, point.x); max_y = max(max_y, point.y); points.insert(point); } } } let edge = Point { x: max_x, y: max_y }; (points, folds, edge) } fn perform_fold(points: HashSet<Point>, fold: &Fold, edge: Point) -> (HashSet<Point>, Point) { let mut new_points: HashSet<Point> = HashSet::new(); let new_edge; match fold.direction { Direction::Vertical => { for point in points { if point.x > fold.coord { new_points.insert(Point { x: edge.x - point.x, y: point.y, }); } else { new_points.insert(point); } } new_edge = Point { x: edge.x / 2 - 1, y: edge.y, }; } Direction::Horizontal => { for point in points { if point.y > fold.coord { new_points.insert(Point { x: point.x, y: edge.y - point.y, }); } else { new_points.insert(point); } } new_edge = Point { x: edge.x, y: edge.y / 2 - 1, }; } }; (new_points, new_edge) } fn print_points(points: &HashSet<Point>, edge: &Point) -> () { for y in 0..(edge.y + 1) { for x in 0..(edge.x + 1) { if points.contains(&Point { x, y }) { print!("#"); } else { print!("."); } } println!(); } }
use std::cmp::max; use std::collections::HashSet; pub mod util; pub fn p1(input: &Vec<String>) -> usize { let (points, folds, edge) = parse_input(input); let (new_points, _new_edge) = perform_fold(points, &folds[0], edge); new_points.len() } pub fn p2(input: &Vec<String>) -> usize { let (mut points, folds, mut edge) = parse_input(input); for fold in folds { let (new_points, new_edge) = perform_fold(points, &fold, edge); points = new_points; edge = new_edge; } print_points(&points, &edge); points.len() } #[derive(Debug, PartialEq, Eq, Hash)] struct Point { x: usize, y: usize, } #[derive(Debug, PartialEq, Eq, Hash)] enum Direction { Vertical, Horizontal, } #[derive(Debug, PartialEq, Eq, Hash)] struct Fold { direction: Direction, coord: usize, } fn parse_input(input: &Vec<String>) -> (HashSet<Point>, Vec<Fold>, Point) { let mut points: HashSet<Point> = HashSet::new(); let mut folds: Vec<Fold> = vec![]; let (mut max_x, mut max_y) = (0usize, 0usize); for line in input { if line.is_empty() { continue; } match &line[..3] { "fol" => { let tokens: Vec<&str> = line.split(" ").collect(); let fold_tokens: Vec<&str> = tokens[2].split("=").collect(); let direction = match fold_tokens[0] { "x" => Direction::Vertical, "y" => Direction::Horizontal, _ => unreachable!(), }; let coord: usize = fold_tokens[1].parse().unwrap(); let fold = Fold { direction, coord }; folds.push(fold); } _ => { let coords: Vec<&st
fn perform_fold(points: HashSet<Point>, fold: &Fold, edge: Point) -> (HashSet<Point>, Point) { let mut new_points: HashSet<Point> = HashSet::new(); let new_edge; match fold.direction { Direction::Vertical => { for point in points { if point.x > fold.coord { new_points.insert(Point { x: edge.x - point.x, y: point.y, }); } else { new_points.insert(point); } } new_edge = Point { x: edge.x / 2 - 1, y: edge.y, }; } Direction::Horizontal => { for point in points { if point.y > fold.coord { new_points.insert(Point { x: point.x, y: edge.y - point.y, }); } else { new_points.insert(point); } } new_edge = Point { x: edge.x, y: edge.y / 2 - 1, }; } }; (new_points, new_edge) } fn print_points(points: &HashSet<Point>, edge: &Point) -> () { for y in 0..(edge.y + 1) { for x in 0..(edge.x + 1) { if points.contains(&Point { x, y }) { print!("#"); } else { print!("."); } } println!(); } }
r> = line.split(",").collect(); let point = Point { x: coords[0].parse().unwrap(), y: coords[1].parse().unwrap(), }; max_x = max(max_x, point.x); max_y = max(max_y, point.y); points.insert(point); } } } let edge = Point { x: max_x, y: max_y }; (points, folds, edge) }
function_block-function_prefixed
[ { "content": "fn build_lines(input: &Vec<String>) -> (Vec<Line>, usize, usize) {\n\n let (mut max_x, mut max_y) = (0usize, 0usize);\n\n let mut lines: Vec<Line> = vec![];\n\n\n\n for input_line in input {\n\n let coords: Vec<&str> = input_line.split(\"->\").map(|token| token.trim()).collect();\n\n let coord1: Vec<&str> = coords[0].split(\",\").collect();\n\n let p1 = Point {\n\n x: coord1[0].parse().unwrap(),\n\n y: coord1[1].parse().unwrap(),\n\n };\n\n let coord2: Vec<&str> = coords[1].split(\",\").collect();\n\n let p2 = Point {\n\n x: coord2[0].parse().unwrap(),\n\n y: coord2[1].parse().unwrap(),\n\n };\n\n max_x = max(max_x, max(p1.x, p2.x));\n\n max_y = max(max_y, max(p1.y, p2.y));\n\n\n\n let line = Line { p1: p1, p2: p2 };\n\n lines.push(line);\n\n }\n\n let width: usize = max_x + 1;\n\n let height: usize = max_y + 1;\n\n (lines, width, height)\n\n}\n\n\n", "file_path": "d05/src/lib.rs", "rank": 1, "score": 200399.49277992913 }, { "content": "fn mark_line(grid: &mut Vec<Vec<usize>>, line: &Line, consider_diagonal: bool) -> () {\n\n let direction = if line.p1.x == line.p2.x {\n\n Direction::Vertical\n\n } else if line.p1.y == line.p2.y {\n\n Direction::Horizontal\n\n } else {\n\n Direction::Diagonal\n\n };\n\n\n\n match (direction, consider_diagonal) {\n\n (Direction::Vertical, _) => {\n\n let mut extremes = [line.p1.y, line.p2.y];\n\n extremes.sort();\n\n for y in extremes[0]..(extremes[1] + 1) {\n\n grid[y][line.p1.x] += 1;\n\n }\n\n }\n\n (Direction::Horizontal, _) => {\n\n let mut extremes = [line.p1.x, line.p2.x];\n\n extremes.sort();\n", "file_path": "d05/src/lib.rs", "rank": 3, "score": 188500.61192361795 }, { "content": "pub fn p1(input: &Vec<String>) -> usize {\n\n let (lines, width, height) = build_lines(input);\n\n let mut grid = build_grid(width, height);\n\n\n\n for line in lines {\n\n mark_line(&mut grid, &line, false);\n\n }\n\n calculate_overlaps(&grid)\n\n}\n\n\n", "file_path": "d05/src/lib.rs", "rank": 5, "score": 186427.59060107818 }, { "content": "pub fn p2(input: &Vec<String>) -> usize {\n\n let (lines, width, height) = build_lines(input);\n\n let mut grid = build_grid(width, height);\n\n\n\n for line in lines {\n\n mark_line(&mut grid, &line, true);\n\n }\n\n calculate_overlaps(&grid)\n\n}\n\n\n", "file_path": "d05/src/lib.rs", "rank": 6, "score": 186427.59060107818 }, { "content": "pub fn p2(input: &Vec<String>, days: usize) -> usize {\n\n let numbers: Vec<u8> = input[0].split(\",\").map(|t| t.parse().unwrap()).collect();\n\n let mut cnt: usize = 0;\n\n let mut memory: HashMap<usize, usize> = HashMap::new();\n\n for num in &numbers {\n\n cnt += calculate_for_single_fish(*num, days, &mut memory);\n\n }\n\n cnt + numbers.len()\n\n}\n\n\n", "file_path": "d06/src/lib.rs", "rank": 8, "score": 178319.04035809755 }, { "content": "pub fn p1(input: &Vec<String>, days: i32) -> usize {\n\n let mut fishes: Vec<Lanternfish> = input[0]\n\n .split(\",\")\n\n .map(|token| Lanternfish {\n\n timer: token.parse().unwrap(),\n\n })\n\n .collect();\n\n for _ in 0..days {\n\n for i in 0..fishes.len() {\n\n let should_produce = fishes[i].process();\n\n if should_produce {\n\n let new_fish = Lanternfish { timer: 8 };\n\n fishes.push(new_fish);\n\n }\n\n }\n\n }\n\n fishes.len()\n\n}\n\n\n", "file_path": "d06/src/lib.rs", "rank": 9, "score": 169511.00380909728 }, { "content": "pub fn p1(input: &Vec<String>) -> i32 {\n\n let coord = navigate(input, false);\n\n coord.horiz * coord.depth\n\n}\n\n\n", "file_path": "d02/src/lib.rs", "rank": 10, "score": 162995.53093688883 }, { "content": "pub fn p2(input: &Vec<String>) -> i32 {\n\n let coord = navigate(input, true);\n\n coord.horiz * coord.depth\n\n}\n\n\n", "file_path": "d02/src/lib.rs", "rank": 11, "score": 162995.53093688883 }, { "content": "fn navigate(input: &Vec<String>, consider_aim: bool) -> Coord {\n\n let mut coord = Coord {\n\n horiz: 0,\n\n depth: 0,\n\n aim: 0,\n\n };\n\n for line in input {\n\n let tokens: Vec<&str> = line.split_whitespace().collect();\n\n let direction = tokens[0];\n\n let value: i32 = tokens[1].parse().unwrap();\n\n match (direction, consider_aim) {\n\n (\"forward\", false) => coord.horiz += value,\n\n (\"down\", false) => coord.depth += value,\n\n (\"up\", false) => coord.depth -= value,\n\n (\"forward\", true) => {\n\n coord.horiz += value;\n\n coord.depth += value * coord.aim;\n\n }\n\n (\"down\", true) => coord.aim += value,\n\n (\"up\", true) => coord.aim -= value,\n\n _ => {}\n\n }\n\n }\n\n coord\n\n}\n", "file_path": "d02/src/lib.rs", "rank": 12, "score": 152303.86255051754 }, { "content": "pub fn readlines(filename: &str) -> Vec<String> {\n\n let file = File::open(filename).unwrap();\n\n let reader = BufReader::new(file);\n\n reader\n\n .lines()\n\n .map(|l| l.unwrap().trim().to_string())\n\n .collect()\n\n}\n", "file_path": "d06/src/util.rs", "rank": 14, "score": 145822.93671798144 }, { "content": "pub fn readlines(filename: &str) -> Vec<String> {\n\n let file = File::open(filename).unwrap();\n\n let reader = BufReader::new(file);\n\n reader\n\n .lines()\n\n .map(|l| l.unwrap().trim().to_string())\n\n .collect()\n\n}\n", "file_path": "d05/src/util.rs", "rank": 15, "score": 145822.93671798144 }, { "content": "pub fn readlines(filename: &str) -> Vec<String> {\n\n let file = File::open(filename).unwrap();\n\n let reader = BufReader::new(file);\n\n reader\n\n .lines()\n\n .map(|l| l.unwrap().trim().to_string())\n\n .collect()\n\n}\n", "file_path": "d13/src/util.rs", "rank": 16, "score": 145822.93671798144 }, { "content": "pub fn readlines(filename: &str) -> Vec<String> {\n\n let file = File::open(filename).unwrap();\n\n let reader = BufReader::new(file);\n\n reader\n\n .lines()\n\n .map(|l| l.unwrap().trim().to_string())\n\n .collect()\n\n}\n", "file_path": "d02/src/util.rs", "rank": 17, "score": 145822.93671798144 }, { "content": "fn calculate_overlaps(grid: &Vec<Vec<usize>>) -> usize {\n\n let mut cnt: usize = 0;\n\n for y in 0..grid.len() {\n\n for x in 0..grid[0].len() {\n\n if grid[y][x] >= 2 {\n\n cnt += 1;\n\n }\n\n }\n\n }\n\n cnt\n\n}\n", "file_path": "d05/src/lib.rs", "rank": 18, "score": 127403.35160916603 }, { "content": "fn build_grid(width: usize, height: usize) -> Vec<Vec<usize>> {\n\n let mut grid: Vec<Vec<usize>> = Vec::with_capacity(height);\n\n for _ in 0..height {\n\n grid.push(vec![0; width]);\n\n }\n\n grid\n\n}\n\n\n", "file_path": "d05/src/lib.rs", "rank": 19, "score": 123200.21028042794 }, { "content": "fn calculate_when_timer_at_8(days_left: usize, memory: &mut HashMap<usize, usize>) -> usize {\n\n if days_left <= 8 {\n\n return 0;\n\n } else if memory.contains_key(&days_left) {\n\n return memory[&days_left];\n\n }\n\n\n\n let directly_generated: usize = ((days_left - 2) / 7).try_into().unwrap();\n\n let mut cnt = directly_generated;\n\n for i in 0..directly_generated {\n\n cnt += calculate_when_timer_at_8(days_left - 9 - 7 * i, memory);\n\n }\n\n memory.insert(days_left, cnt);\n\n cnt\n\n}\n", "file_path": "d06/src/lib.rs", "rank": 20, "score": 122629.15042388064 }, { "content": "struct Coord {\n\n horiz: i32,\n\n depth: i32,\n\n aim: i32,\n\n}\n\n\n", "file_path": "d02/src/lib.rs", "rank": 22, "score": 115019.34113059528 }, { "content": "#[derive(Debug)]\n\nstruct Point {\n\n x: usize,\n\n y: usize,\n\n}\n\n\n", "file_path": "d05/src/lib.rs", "rank": 24, "score": 114837.54219948311 }, { "content": "#[derive(Debug)]\n\nstruct Line {\n\n p1: Point,\n\n p2: Point,\n\n}\n\n\n", "file_path": "d05/src/lib.rs", "rank": 25, "score": 114821.83837965241 }, { "content": "enum Direction {\n\n Horizontal,\n\n Vertical,\n\n Diagonal,\n\n}\n\n\n", "file_path": "d05/src/lib.rs", "rank": 27, "score": 86782.49844317426 }, { "content": " def parse_input(input)\n\n lines = input.split(\"\\n\")\n\n @entries = []\n\n lines.each do |line|\n\n patterns, value_digits = line.split('|').map(&:split)\n\n @entries << [patterns, value_digits]\n\n end\n\n end\n\n\n", "file_path": "d08/d8.rb", "rank": 28, "score": 60085.91289232545 }, { "content": " def parse_input(input)\n\n @grid = input.split(\"\\n\").map { |l| l.chars.map(&:to_i) }\n\n @width = @grid[0].length\n\n @height = @grid.length\n\n end\n\n\n", "file_path": "d15/d15.rb", "rank": 29, "score": 60085.91289232545 }, { "content": " def parse_input(input)\n\n lines = input.split(\"\\n\")\n\n lines.each do |line|\n\n cave1, cave2 = line.split('-')\n\n @system[cave1] << cave2\n\n @system[cave2] << cave1\n\n end\n\n end\n\n\n", "file_path": "d12/d12.rb", "rank": 30, "score": 60085.91289232545 }, { "content": " def parse_input(input)\n\n lines = input.split(\"\\n\")\n\n @drawn_numbers = lines[0].split(',').map(&:to_i)\n\n parse_boards(lines[2..])\n\n end\n\n\n", "file_path": "d04/d4.rb", "rank": 31, "score": 60085.91289232545 }, { "content": "class Line\n", "file_path": "d10/d10.rb", "rank": 32, "score": 58781.62448793658 }, { "content": "#[derive(Debug)]\n\nstruct Lanternfish {\n\n timer: u8,\n\n}\n\n\n\nimpl Lanternfish {\n\n fn process(&mut self) -> bool {\n\n self.timer = self.timer.wrapping_sub(1);\n\n if self.timer == u8::MAX {\n\n self.timer = 6;\n\n return true;\n\n }\n\n false\n\n }\n\n}\n\n\n", "file_path": "d06/src/lib.rs", "rank": 33, "score": 57410.19273915899 }, { "content": "fn main() {\n\n let input = util::readlines(\"src/input2.txt\");\n\n println!(\"{}\", d2::p1(&input));\n\n println!(\"{}\", d2::p2(&input));\n\n}\n", "file_path": "d02/src/main.rs", "rank": 34, "score": 54865.75719158695 }, { "content": "fn main() {\n\n let input = util::readlines(\"src/input6.txt\");\n\n println!(\"{}\", d6::p1(&input, 80));\n\n println!(\"{}\", d6::p2(&input, 256));\n\n}\n", "file_path": "d06/src/main.rs", "rank": 35, "score": 54865.75719158695 }, { "content": "fn main() {\n\n let input = util::readlines(\"src/input13.txt\");\n\n println!(\"{}\", d13::p1(&input));\n\n println!(\"{}\", d13::p2(&input));\n\n}\n", "file_path": "d13/src/main.rs", "rank": 36, "score": 54865.75719158695 }, { "content": "fn main() {\n\n let input = util::readlines(\"src/input5.txt\");\n\n println!(\"{}\", d5::p1(&input));\n\n println!(\"{}\", d5::p2(&input));\n\n}\n", "file_path": "d05/src/main.rs", "rank": 37, "score": 54865.75719158695 }, { "content": " def initialize(input)\n\n @numbers = input.split(\"\\n\").map(&:to_i)\n\n end\n\n\n", "file_path": "d01/d1.rb", "rank": 38, "score": 53114.671395591664 }, { "content": " def initialize(input)\n\n @system = Hash.new { |hash, key| hash[key] = [] }\n\n parse_input(input)\n\n end\n\n\n", "file_path": "d12/d12.rb", "rank": 39, "score": 53114.671395591664 }, { "content": " def initialize(input)\n\n lines = input.split(\"\\n\")\n\n @octopus_map = OctopusMap.new(lines)\n\n end\n\n\n", "file_path": "d11/d11.rb", "rank": 40, "score": 53114.671395591664 }, { "content": " def initialize(input)\n\n parse_input(input)\n\n end\n\n\n", "file_path": "d08/d8.rb", "rank": 41, "score": 53114.671395591664 }, { "content": " def initialize(input)\n\n lines = input.split(\"\\n\")\n\n @heightmap = Heightmap.new(lines)\n\n end\n\n\n", "file_path": "d09/d9.rb", "rank": 42, "score": 53114.671395591664 }, { "content": " def initialize(input)\n\n @boards = []\n\n parse_input(input)\n\n end\n\n\n", "file_path": "d04/d4.rb", "rank": 43, "score": 53114.671395591664 }, { "content": " def initialize(input)\n\n lines = input.split(\"\\n\")\n\n @lines = lines.map { |l| Line.new(l) }\n\n end\n\n\n", "file_path": "d10/d10.rb", "rank": 44, "score": 53114.671395591664 }, { "content": " def initialize(input)\n\n parse_input(input)\n\n end\n\n\n", "file_path": "d15/d15.rb", "rank": 45, "score": 53114.671395591664 }, { "content": "fn calculate_for_single_fish(\n\n timer: u8,\n\n days_left: usize,\n\n memory: &mut HashMap<usize, usize>,\n\n) -> usize {\n\n // Normalize the fish as if its timer was at 8\n\n let mut new_days_left = days_left;\n\n if timer <= 8 {\n\n new_days_left = (8 - timer as usize) + days_left;\n\n }\n\n\n\n let result = calculate_when_timer_at_8(new_days_left, memory);\n\n result\n\n}\n\n\n", "file_path": "d06/src/lib.rs", "rank": 46, "score": 52541.07802854499 }, { "content": "#[test]\n\nfn p1_solves_the_example() {\n\n let example = d13::util::readlines(\"tests/example.txt\");\n\n assert_eq!(17, d13::p1(&example));\n\n}\n\n\n", "file_path": "d13/tests/test_13.rs", "rank": 47, "score": 52541.07802854499 }, { "content": "#[test]\n\nfn p2_solves_the_example() {\n\n let example = d13::util::readlines(\"tests/example.txt\");\n\n assert_eq!(16, d13::p2(&example));\n\n}\n", "file_path": "d13/tests/test_13.rs", "rank": 48, "score": 52541.07802854499 }, { "content": " def coord_risk(x, y)\n\n @map[y][x] + 1\n\n end\n\n\n", "file_path": "d09/d9.rb", "rank": 49, "score": 52166.695434892245 }, { "content": " def neighbour_coords(x, y)\n\n coords = []\n\n [[0, -1], [1, 0], [0, 1], [-1, 0]].each do |x_offset, y_offset|\n\n next if (y_offset + y).negative? || y_offset + y >= @map.length\n\n next if (x_offset + x).negative? || x_offset + x >= @map[0].length\n\n\n\n coords << [x_offset + x, y_offset + y]\n\n end\n\n coords\n\n end\n\n\n", "file_path": "d09/d9.rb", "rank": 50, "score": 52166.695434892245 }, { "content": " def neighbour_coords(x, y)\n\n coords = []\n\n (-1..1).each do |y_offset|\n\n (-1..1).each do |x_offset|\n\n next if y_offset.zero? && x_offset.zero?\n\n next if (y_offset + y).negative? || y_offset + y >= @map.length\n\n next if (x_offset + x).negative? || x_offset + x >= @map[0].length\n\n\n\n coords << [x_offset + x, y_offset + y]\n\n end\n\n end\n\n coords\n\n end\n\n\n", "file_path": "d11/d11.rb", "rank": 51, "score": 52166.695434892245 }, { "content": "#[test]\n\nfn p1_solves_the_example() {\n\n let example = d2::util::readlines(\"tests/example.txt\");\n\n assert_eq!(150, d2::p1(&example));\n\n}\n\n\n", "file_path": "d02/tests/test_d2.rs", "rank": 52, "score": 51518.03347010907 }, { "content": "#[test]\n\nfn p2_solves_the_example() {\n\n let example = d5::util::readlines(\"tests/example.txt\");\n\n assert_eq!(12, d5::p2(&example));\n\n}\n", "file_path": "d05/tests/test_d5.rs", "rank": 53, "score": 51518.03347010907 }, { "content": "#[test]\n\nfn p1_solves_the_example() {\n\n let example = d6::util::readlines(\"tests/example.txt\");\n\n assert_eq!(26, d6::p1(&example, 18));\n\n assert_eq!(5934, d6::p1(&example, 80));\n\n}\n\n\n", "file_path": "d06/tests/test_d6.rs", "rank": 54, "score": 51518.03347010907 }, { "content": "#[test]\n\nfn p1_solves_the_example() {\n\n let example = d5::util::readlines(\"tests/example.txt\");\n\n assert_eq!(5, d5::p1(&example));\n\n}\n\n\n", "file_path": "d05/tests/test_d5.rs", "rank": 55, "score": 51518.03347010907 }, { "content": "#[test]\n\nfn p2_solves_the_example() {\n\n let example = d6::util::readlines(\"tests/example.txt\");\n\n assert_eq!(26, d6::p2(&example, 18));\n\n assert_eq!(5934, d6::p2(&example, 80));\n\n assert_eq!(26984457539, d6::p2(&example, 256));\n\n}\n", "file_path": "d06/tests/test_d6.rs", "rank": 56, "score": 51518.03347010907 }, { "content": "#[test]\n\nfn p2_solves_the_example() {\n\n let example = d2::util::readlines(\"tests/example.txt\");\n\n assert_eq!(900, d2::p2(&example));\n\n}\n", "file_path": "d02/tests/test_d2.rs", "rank": 57, "score": 51518.03347010907 }, { "content": " def find_basin(x, y, points)\n\n return if points.include?([x, y]) || @map[y][x] == 9\n\n\n\n points << [x, y]\n\n neighbour_coords(x, y).each do |n_x, n_y|\n\n find_basin(n_x, n_y, points)\n\n end\n\n end\n\nend\n\n\n\nif $PROGRAM_NAME == __FILE__\n\n file = File.read('input9.txt')\n\n\n\n puts(D9.new(file).p1)\n\n puts(D9.new(file).p2)\n\nend\n", "file_path": "d09/d9.rb", "rank": 58, "score": 48907.537334594934 }, { "content": " def coords_to_values(coords)\n\n coords.map { |x, y| @map[y][x] }\n\n end\n\n\n", "file_path": "d09/d9.rb", "rank": 59, "score": 36025.135004341806 }, { "content": "use std::fs::File;\n\nuse std::io::BufRead;\n\nuse std::io::BufReader;\n\n\n", "file_path": "d13/src/util.rs", "rank": 60, "score": 30222.47679064645 }, { "content": "use std::fs::File;\n\nuse std::io::BufRead;\n\nuse std::io::BufReader;\n\n\n", "file_path": "d06/src/util.rs", "rank": 61, "score": 30222.47679064645 }, { "content": "use std::fs::File;\n\nuse std::io::BufRead;\n\nuse std::io::BufReader;\n\n\n", "file_path": "d02/src/util.rs", "rank": 62, "score": 30222.47679064645 }, { "content": "use std::fs::File;\n\nuse std::io::BufRead;\n\nuse std::io::BufReader;\n\n\n", "file_path": "d05/src/util.rs", "rank": 63, "score": 30222.47679064645 }, { "content": " def low_points\n\n points = []\n\n @map.each_with_index do |line, y_coord|\n\n line.each_with_index do |value, x_coord|\n\n points << [x_coord, y_coord] if neighbour_coords(x_coord, y_coord)\n\n .select { |x, y| @map[y][x] <= value }\n\n .empty?\n\n end\n\n end\n\n points\n\n end\n\n\n", "file_path": "d09/d9.rb", "rank": 64, "score": 28880.36605160151 }, { "content": " # @param lines [Array<String>] list of string of numbers\n\n def initialize(lines)\n\n @numbers = []\n\n lines.each { |l| @numbers << l.split.map(&:to_i) }\n\n\n\n @rows_cnt = @numbers.length\n\n @cols_cnt = @numbers[0].length\n\n @score = {\n\n rows: Array.new(@rows_cnt) { 0 },\n\n cols: Array.new(@cols_cnt) { 0 }\n\n }\n\n @markings = Array.new(@rows_cnt) { Array.new(@cols_cnt) { false } }\n\n @bingo = false\n\n end\n\n\n", "file_path": "d04/d4.rb", "rank": 65, "score": 28817.768261030462 }, { "content": " def initialize(lines)\n\n @map = lines.map { |l| l.chars.map(&:to_i) }\n\n reset_flashes\n\n @total_flashes = 0\n\n @total_steps = 0\n\n end\n\n\n", "file_path": "d11/d11.rb", "rank": 66, "score": 28815.221757887666 }, { "content": " def initialize(lines)\n\n @map = lines.map { |l| l.chars.map(&:to_i) }\n\n end\n\n\n", "file_path": "d09/d9.rb", "rank": 67, "score": 28815.221757887666 }, { "content": " def initialize(line_str)\n\n @line = line_str.chars\n\n end\n\n\n", "file_path": "d10/d10.rb", "rank": 68, "score": 27564.199105591535 }, { "content": " def parse_boards(lines)\n\n temp_lines = []\n\n lines.each do |line|\n\n if line.empty?\n\n @boards << Board.new(temp_lines)\n\n temp_lines = []\n\n else\n\n temp_lines << line\n\n end\n\n end\n\n @boards << Board.new(temp_lines) unless temp_lines.empty?\n\n end\n\nend\n\n\n", "file_path": "d04/d4.rb", "rank": 69, "score": 27564.199105591535 }, { "content": " def process_octopus(x, y)\n\n @map[y][x] += 1\n\n return unless @map[y][x] > 9 && !@flash_map[y][x]\n\n\n\n @total_flashes += 1\n\n @flash_map[y][x] = true\n\n\n\n neighbour_coords(x, y).each do |n_x, n_y|\n\n process_octopus(n_x, n_y)\n\n end\n\n end\n\n\n", "file_path": "d11/d11.rb", "rank": 70, "score": 23253.93579827601 }, { "content": "use std::collections::HashMap;\n\n\n\npub mod util;\n\n\n\n#[derive(Debug)]\n", "file_path": "d06/src/lib.rs", "rank": 72, "score": 13.626755572683136 }, { "content": "pub mod util;\n\nuse std::cmp::max;\n\n\n\n#[derive(Debug)]\n", "file_path": "d05/src/lib.rs", "rank": 75, "score": 9.32007435010414 }, { "content": "pub mod util;\n\n\n", "file_path": "d02/src/lib.rs", "rank": 76, "score": 8.697003485014104 }, { "content": " for x in extremes[0]..(extremes[1] + 1) {\n\n grid[line.p1.y][x] += 1;\n\n }\n\n }\n\n (Direction::Diagonal, true) => {\n\n let line_length = (line.p1.x as i32 - line.p2.x as i32).abs() + 1;\n\n let x_opr: i32 = if line.p2.x > line.p1.x { 1 } else { -1 };\n\n let y_opr: i32 = if line.p2.y > line.p1.y { 1 } else { -1 };\n\n let (mut x, mut y) = (line.p1.x as i32, line.p1.y as i32);\n\n for _ in 0..(line_length) {\n\n grid[y as usize][x as usize] += 1;\n\n y += y_opr;\n\n x += x_opr;\n\n }\n\n }\n\n _ => {}\n\n };\n\n}\n\n\n", "file_path": "d05/src/lib.rs", "rank": 77, "score": 8.147427688279926 }, { "content": "use d13;\n\npub use d13::util;\n\n\n\n#[test]\n", "file_path": "d13/tests/test_13.rs", "rank": 78, "score": 8.111402876259405 }, { "content": "use d6;\n\npub use d6::util;\n\n\n\n#[test]\n", "file_path": "d06/tests/test_d6.rs", "rank": 79, "score": 8.111402876259405 }, { "content": "use d5;\n\npub use d5::util;\n\n\n\n#[test]\n", "file_path": "d05/tests/test_d5.rs", "rank": 80, "score": 8.111402876259405 }, { "content": "use d2;\n\npub use d2::util;\n\n\n\n#[test]\n", "file_path": "d02/tests/test_d2.rs", "rank": 81, "score": 8.111402876259405 }, { "content": "mod util;\n\n\n", "file_path": "d06/src/main.rs", "rank": 82, "score": 6.028050159248739 }, { "content": "mod util;\n\n\n", "file_path": "d13/src/main.rs", "rank": 83, "score": 6.028050159248739 }, { "content": "mod util;\n\n\n", "file_path": "d02/src/main.rs", "rank": 84, "score": 6.028050159248739 }, { "content": "mod util;\n\n\n", "file_path": "d05/src/main.rs", "rank": 85, "score": 6.028050159248739 }, { "content": "\n\n 14 21 17 24 4\n\n 10 16 15 9 19\n\n 18 8 23 26 20\n\n 22 11 13 6 5\n\n 2 0 12 3 7\n\n EXAMPLE_INPUT\n\n end\n\n\n\n it 'works for part 1' do\n\n expect(D4.new(input).p1).to eq(4512)\n\n end\n\n\n\n it 'works for part 2' do\n\n expect(D4.new(input).p2).to eq(1924)\n\n end\n\nend\n", "file_path": "d04/d4_spec.rb", "rank": 86, "score": 5.446768371187649 }, { "content": "# frozen_string_literal: true\n\n\n\nrequire './d1'\n\n\n\ndescribe D1 do\n\n let(:input) do\n\n <<~EXAMPLE_INPUT\n\n 199\n\n 200\n\n 208\n\n 210\n\n 200\n\n 207\n\n 240\n\n 269\n\n 260\n\n 263\n\n EXAMPLE_INPUT\n\n end\n\n\n\n it 'works for part 1' do\n\n expect(D1.new(input).p1).to eq(7)\n\n end\n\n\n\n it 'works for part 2' do\n\n expect(D1.new(input).p2).to eq(5)\n\n end\n\nend\n", "file_path": "d01/d1_spec.rb", "rank": 87, "score": 5.144947797219974 }, { "content": "# frozen_string_literal: true\n\n\n\nrequire './d15'\n\n\n\ndescribe D15 do\n\n let(:input) do\n\n <<~EXAMPLE_INPUT\n\n 1163751742\n\n 1381373672\n\n 2136511328\n\n 3694931569\n\n 7463417111\n\n 1319128137\n\n 1359912421\n\n 3125421639\n\n 1293138521\n\n 2311944581\n\n EXAMPLE_INPUT\n\n end\n\n\n\n it 'works for part 1' do\n\n expect(D15.new(input).p1).to eq(40)\n\n end\n\n\n\n it 'works for part 2' do\n\n expect(D15.new(input).p2).to eq(315)\n\n end\n\nend\n", "file_path": "d15/d15_spec.rb", "rank": 88, "score": 5.144947797219974 }, { "content": "# frozen_string_literal: true\n\n\n\nrequire './d10'\n\n\n\ndescribe D10 do\n\n let(:input) do\n\n <<~EXAMPLE_INPUT\n\n [({(<(())[]>[[{[]{<()<>>\n\n [(()[<>])]({[<{<<[]>>(\n\n {([(<{}[<>[]}>{[]{[(<()>\n\n (((({<>}<{<{<>}{[]{[]{}\n\n [[<[([]))<([[{}[[()]]]\n\n [{[{({}]{}}([{[{{{}}([]\n\n {<[[]]>}<{[{[{[]{()[[[]\n\n [<(<(<(<{}))><([]([]()\n\n <{([([[(<>()){}]>(<<{{\n\n <{([{{}}[<[[[<>{}]]]>[]]\n\n EXAMPLE_INPUT\n\n end\n\n\n\n it 'works for part 1' do\n\n expect(D10.new(input).p1).to eq(26_397)\n\n end\n\n\n\n it 'works for part 2' do\n\n expect(D10.new(input).p2).to eq(288_957)\n\n end\n\nend\n", "file_path": "d10/d10_spec.rb", "rank": 89, "score": 5.144947797219974 }, { "content": "# frozen_string_literal: true\n\n\n\nrequire './d11'\n\n\n\ndescribe D11 do\n\n let(:input) do\n\n <<~EXAMPLE_INPUT\n\n 5483143223\n\n 2745854711\n\n 5264556173\n\n 6141336146\n\n 6357385478\n\n 4167524645\n\n 2176841721\n\n 6882881134\n\n 4846848554\n\n 5283751526\n\n EXAMPLE_INPUT\n\n end\n\n\n\n it 'works for part 1' do\n\n expect(D11.new(input).p1).to eq(1656)\n\n end\n\n\n\n it 'works for part 2' do\n\n expect(D11.new(input).p2).to eq(195)\n\n end\n\nend\n", "file_path": "d11/d11_spec.rb", "rank": 90, "score": 5.144947797219974 }, { "content": "# frozen_string_literal: true\n\n\n\nrequire './d9'\n\n\n\ndescribe D9 do\n\n let(:input) do\n\n <<~EXAMPLE_INPUT\n\n 2199943210\n\n 3987894921\n\n 9856789892\n\n 8767896789\n\n 9899965678\n\n EXAMPLE_INPUT\n\n end\n\n\n\n it 'works for part 1' do\n\n expect(D9.new(input).p1).to eq(15)\n\n end\n\n\n\n it 'works for part 2' do\n\n expect(D9.new(input).p2).to eq(1134)\n\n end\n\nend\n", "file_path": "d09/d9_spec.rb", "rank": 91, "score": 5.144947797219974 }, { "content": " end-zg\n\n zg-sl\n\n zg-pj\n\n pj-he\n\n RW-he\n\n fs-DX\n\n pj-RW\n\n zg-RW\n\n start-pj\n\n he-WI\n\n zg-he\n\n pj-fs\n\n start-RW\n\n EXAMPLE_INPUT\n\n end\n\n\n\n it 'works for part 1' do\n\n expect(D12.new(input1).p1).to eq(10)\n\n expect(D12.new(input2).p1).to eq(19)\n\n expect(D12.new(input3).p1).to eq(226)\n", "file_path": "d12/d12_spec.rb", "rank": 92, "score": 3.2804675740320826 }, { "content": "# frozen_string_literal: true\n\n\n\nrequire './d4'\n\n\n\ndescribe D4 do\n\n let(:input) do\n\n <<~EXAMPLE_INPUT\n\n 7,4,9,5,11,17,23,2,0,14,21,24,10,16,13,6,15,25,12,22,18,20,8,19,3,26,1\n\n\n\n 22 13 17 11 0\n\n 8 2 23 4 24\n\n 21 9 14 16 7\n\n 6 10 3 18 5\n\n 1 12 20 15 19\n\n\n\n 3 15 0 2 22\n\n 9 18 13 17 5\n\n 19 8 7 25 23\n\n 20 11 10 24 4\n\n 14 21 16 12 6\n", "file_path": "d04/d4_spec.rb", "rank": 93, "score": 2.8858555411815376 }, { "content": "# frozen_string_literal: true\n\n\n\nrequire './d12'\n\n\n\n# rubocop:disable Metrics/BlockLength\n\ndescribe D12 do\n\n let(:input1) do\n\n <<~EXAMPLE_INPUT\n\n start-A\n\n start-b\n\n A-c\n\n A-b\n\n b-d\n\n A-end\n\n b-end\n\n EXAMPLE_INPUT\n\n end\n\n\n\n let(:input2) do\n\n <<~EXAMPLE_INPUT\n", "file_path": "d12/d12_spec.rb", "rank": 94, "score": 2.739473758327858 }, { "content": " end\n\n\n\n it 'works for part 2' do\n\n expect(D12.new(input1).p2).to eq(36)\n\n expect(D12.new(input2).p2).to eq(103)\n\n expect(D12.new(input3).p2).to eq(3509)\n\n end\n\nend\n\n# rubocop:enable Metrics/BlockLength\n", "file_path": "d12/d12_spec.rb", "rank": 95, "score": 2.6624585532412395 }, { "content": " dc-end\n\n HN-start\n\n start-kj\n\n dc-start\n\n dc-HN\n\n LN-dc\n\n HN-end\n\n kj-sa\n\n kj-HN\n\n kj-dc\n\n EXAMPLE_INPUT\n\n end\n\n\n\n let(:input3) do\n\n <<~EXAMPLE_INPUT\n\n fs-end\n\n he-DX\n\n fs-he\n\n start-DX\n\n pj-DX\n", "file_path": "d12/d12_spec.rb", "rank": 96, "score": 2.0871346879313535 }, { "content": "# frozen_string_literal: true\n\n\n\nrequire './d8'\n\n\n\ndescribe D8 do\n\n let(:input) do\n\n <<~EXAMPLE_INPUT\n\n be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe\n\n edbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc\n\n fgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg\n\n fbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb\n\n aecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea\n\n fgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb\n\n dbcfg fgd bdegcaf fgec aegbdf ecdfab fbedc dacgb gdcebf gf | cefg dcbef fcge gbcadfe\n\n bdfegc cbegaf gecbf dfcage bdacg ed bedf ced adcbefg gebcd | ed bcgafe cdgba cbgef\n\n egadfb cdbfeg cegd fecab cgb gbdefca cg fgcdab egfdb bfceg | gbdfcae bgc cg cgb\n\n gcafb gcf dcaebfg ecagb gf abcdeg gaef cafbge fdbac fegbdc | fgae cfgab fg bagce\n\n EXAMPLE_INPUT\n\n end\n\n\n\n it 'works for part 1' do\n\n expect(D8.new(input).p1).to eq(26)\n\n end\n\n\n\n it 'works for part 2' do\n\n expect(D8.new(input).p2).to eq(61_229)\n\n end\n\nend\n", "file_path": "d08/d8_spec.rb", "rank": 97, "score": 2.084474407834746 }, { "content": "# Advent of Code 2021\n\n\n\n[![CI](https://github.com/elixir-lang/elixir/workflows/CI/badge.svg?branch=main)](https://github.com/arturhoo/aoc2021/actions/workflows/ci.yml?query=branch%3Amain++)\n\n\n\nhttps://adventofcode.com/2021\n\n\n", "file_path": "README.md", "rank": 98, "score": 1.2343442983202406 } ]
Rust
all-is-cubes-desktop/src/aic_glfw.rs
kpreid/all-is-cubes
81e0fb8fbd5a0557f0f9002085f4160bce37174a
use cgmath::{Point2, Vector2}; use glfw::{Action, Context as _, CursorMode, Window, WindowEvent}; use luminance_glfw::GlfwSurface; use luminance_windowing::{WindowDim, WindowOpt}; use std::error::Error; use std::time::Instant; use all_is_cubes::apps::AllIsCubesAppState; use all_is_cubes::camera::Viewport; use all_is_cubes::lum::GLRenderer; pub fn glfw_main_loop( mut app: AllIsCubesAppState, window_title: &str, requested_size: Option<Vector2<u32>>, ) -> Result<(), Box<dyn Error + Send + Sync>> { let glfw_start_time = Instant::now(); let mut glfw = glfw::init::<()>(None)?; let dim = glfw.with_primary_monitor(|_, monitor| { if let Some(size) = requested_size { WindowDim::Windowed { width: size.x.max(1), height: size.y.max(1), } } else if let Some(monitor) = monitor { let (_, _, width, height) = monitor.get_workarea(); WindowDim::Windowed { width: width as u32 * 7 / 10, height: height as u32 * 7 / 10, } } else { WindowDim::Windowed { width: 800, height: 600, } } }); let GlfwSurface { context, events_rx, .. } = GlfwSurface::new_gl33(window_title, WindowOpt::default().set_dim(dim))?; let viewport = map_glfw_viewport(&context.window); let mut renderer = GLRenderer::new(context, app.graphics_options(), viewport)?; renderer.set_character(app.character().map(Clone::clone)); renderer.set_ui_space(Some(app.ui_space().clone())); let ready_time = Instant::now(); log::debug!( "Renderer and GLFW initialized in {:.3} s", ready_time.duration_since(glfw_start_time).as_secs_f32() ); let mut first_frame = true; 'app: loop { app.frame_clock.advance_to(Instant::now()); app.maybe_step_universe(); if app.frame_clock.should_draw() { renderer.update_world_camera(); app.update_cursor(renderer.ui_camera(), renderer.world_camera()); let render_info = renderer.render_frame(app.cursor_result()).unwrap(); renderer .add_info_text(&format!("{}", app.info_text(render_info))) .unwrap(); renderer.surface.window.swap_buffers(); app.frame_clock.did_draw(); } else { std::thread::yield_now(); } if first_frame { first_frame = false; log::debug!( "First frame completed in {:.3} s", Instant::now().duration_since(ready_time).as_secs_f32() ); } renderer .surface .window .set_cursor_mode(if app.input_processor.wants_pointer_lock() { CursorMode::Disabled } else { CursorMode::Normal }); app.input_processor .has_pointer_lock(renderer.surface.window.get_cursor_mode() == CursorMode::Disabled); glfw.poll_events(); for (_, event) in events_rx.try_iter() { match event { WindowEvent::Close => break 'app, WindowEvent::Key(key, _, Action::Press, _) => { if let Some(key) = map_glfw_key(key) { app.input_processor.key_down(key); } } WindowEvent::Key(key, _, Action::Release, _) => { if let Some(key) = map_glfw_key(key) { app.input_processor.key_up(key); } } WindowEvent::Key(_, _, Action::Repeat, _) => { } WindowEvent::Char(..) => {} WindowEvent::CharModifiers(..) => {} WindowEvent::CursorPos(..) => { app.input_processor.mouse_pixel_position( renderer.viewport(), Some(Point2::from(renderer.surface.window.get_cursor_pos())), true, ); } WindowEvent::CursorEnter(true) => { app.input_processor.mouse_pixel_position( renderer.viewport(), Some(Point2::from(renderer.surface.window.get_cursor_pos())), false, ); } WindowEvent::CursorEnter(false) => { app.input_processor .mouse_pixel_position(renderer.viewport(), None, false); } WindowEvent::MouseButton(button, Action::Press, _) => { app.click(map_glfw_button(button)); } WindowEvent::MouseButton(_, Action::Release, _) => {} WindowEvent::MouseButton(_, Action::Repeat, _) => {} WindowEvent::Scroll(..) => { } WindowEvent::FramebufferSize(..) | WindowEvent::ContentScale(..) => { renderer .set_viewport(map_glfw_viewport(&renderer.surface.window)) .unwrap(); } WindowEvent::Focus(has_focus) => { app.input_processor.key_focus(has_focus); } WindowEvent::Pos(..) => {} WindowEvent::Size(..) => {} WindowEvent::Refresh => {} WindowEvent::Iconify(_) => {} WindowEvent::FileDrop(_) => {} WindowEvent::Maximize(_) => {} } } } Ok(()) } pub fn map_glfw_viewport(window: &Window) -> Viewport { Viewport { nominal_size: Vector2::from(window.get_size()).map(|s| s.into()), framebuffer_size: Vector2::from(window.get_framebuffer_size()).map(|s| s as u32), } } pub fn map_glfw_button(button: glfw::MouseButton) -> usize { use glfw::MouseButton::*; match button { Button1 => 0, Button2 => 1, Button3 => 2, Button4 => 3, Button5 => 4, Button6 => 5, Button7 => 6, Button8 => 7, } } pub fn map_glfw_key(key: glfw::Key) -> Option<all_is_cubes::apps::Key> { use all_is_cubes::apps::Key as A; use glfw::Key as G; Some(match key { G::Space => A::Character(' '), G::Apostrophe => A::Character('\''), G::Comma => A::Character(','), G::Minus => A::Character('-'), G::Period => A::Character('.'), G::Slash => A::Character('/'), G::Num0 => A::Character('0'), G::Num1 => A::Character('1'), G::Num2 => A::Character('2'), G::Num3 => A::Character('3'), G::Num4 => A::Character('4'), G::Num5 => A::Character('5'), G::Num6 => A::Character('6'), G::Num7 => A::Character('7'), G::Num8 => A::Character('8'), G::Num9 => A::Character('9'), G::Semicolon => A::Character(';'), G::Equal => A::Character('='), G::A => A::Character('a'), G::B => A::Character('b'), G::C => A::Character('c'), G::D => A::Character('d'), G::E => A::Character('e'), G::F => A::Character('f'), G::G => A::Character('g'), G::H => A::Character('h'), G::I => A::Character('i'), G::J => A::Character('j'), G::K => A::Character('k'), G::L => A::Character('l'), G::M => A::Character('m'), G::N => A::Character('n'), G::O => A::Character('o'), G::P => A::Character('p'), G::Q => A::Character('q'), G::R => A::Character('r'), G::S => A::Character('s'), G::T => A::Character('t'), G::U => A::Character('u'), G::V => A::Character('v'), G::W => A::Character('w'), G::X => A::Character('x'), G::Y => A::Character('y'), G::Z => A::Character('z'), G::LeftBracket => A::Character('['), G::Backslash => A::Character('\\'), G::RightBracket => A::Character(']'), G::GraveAccent => A::Character('`'), G::World1 => return None, G::World2 => return None, G::Escape => return None, G::Enter => A::Character('\r'), G::Tab => A::Character('\t'), G::Backspace => A::Character('\u{8}'), G::Insert => return None, G::Delete => return None, G::Right => A::Right, G::Left => A::Left, G::Down => A::Down, G::Up => A::Up, G::PageUp => return None, G::PageDown => return None, G::Home => return None, G::End => return None, G::CapsLock => return None, G::ScrollLock => return None, G::NumLock => return None, G::PrintScreen => return None, G::Pause => return None, G::F1 => return None, G::F2 => return None, G::F3 => return None, G::F4 => return None, G::F5 => return None, G::F6 => return None, G::F7 => return None, G::F8 => return None, G::F9 => return None, G::F10 => return None, G::F11 => return None, G::F12 => return None, G::F13 => return None, G::F14 => return None, G::F15 => return None, G::F16 => return None, G::F17 => return None, G::F18 => return None, G::F19 => return None, G::F20 => return None, G::F21 => return None, G::F22 => return None, G::F23 => return None, G::F24 => return None, G::F25 => return None, G::Kp0 => A::Character('0'), G::Kp1 => A::Character('1'), G::Kp2 => A::Character('2'), G::Kp3 => A::Character('3'), G::Kp4 => A::Character('4'), G::Kp5 => A::Character('5'), G::Kp6 => A::Character('6'), G::Kp7 => A::Character('7'), G::Kp8 => A::Character('8'), G::Kp9 => A::Character('9'), G::KpDecimal => A::Character('.'), G::KpDivide => A::Character('/'), G::KpMultiply => A::Character('*'), G::KpSubtract => A::Character('-'), G::KpAdd => A::Character('+'), G::KpEnter => A::Character('\r'), G::KpEqual => A::Character('='), G::LeftShift => return None, G::LeftControl => return None, G::LeftAlt => return None, G::LeftSuper => return None, G::RightShift => return None, G::RightControl => return None, G::RightAlt => return None, G::RightSuper => return None, G::Menu => return None, G::Unknown => return None, }) }
use cgmath::{Point2, Vector2}; use glfw::{Action, Context as _, CursorMode, Window, WindowEvent}; use luminance_glfw::GlfwSurface; use luminance_windowing::{WindowDim, WindowOpt}; use std::error::Error; use std::time::Instant; use all_is_cubes::apps::AllIsCubesAppState; use all_is_cubes::camera::Viewport; use all_is_cubes::lum::GLRenderer; pub fn glfw_main_loop( mut app: AllIsCubesAppState, window_title: &str, requested_size: Option<Vector2<u32>>, ) -> Result<(), Box<dyn Error + Send + Sync>> { let glfw_start_time = Instant::now(); let mut glfw = glfw::init::<()>(None)?; let dim = glfw.with_primary_monitor(|_, monitor| { if let Some(size) = requested_size { WindowDim::Windowed { width: size.x.max(1), height: size.y.max(1), } } else if let Some(monitor) = monitor { let (_, _, width, height) = monitor.get_workarea(); WindowDim::Windowed { width: width as u32 * 7 / 10, height: height as u32 * 7 / 10, } } else { WindowDim::Windowed { width: 800, height: 600, } } }); let GlfwSurface { context, events_rx, .. } = GlfwSurface::new_gl33(window_title, WindowOpt::default().set_dim(dim))?; let viewport = map_glfw_viewport(&context.window); let mut renderer = GLRenderer::new(context, app.graphics_options(), viewport)?; renderer.set_character(app.character().map(Clone::clone)); renderer.set_ui_space(Some(app.ui_space().clone())); let ready_time = Instant::now(); log::debug!( "Renderer and GLFW initialized in {:.3} s", ready_time.duration_since(glfw_start_time).as_secs_f32() ); let mut first_frame = true; 'app: loop { app.frame_clock.advance_to(Instant::now()); app.maybe_step_universe(); if app.frame_clock.should_draw() { renderer.update_world_camera(); app.update_cursor(renderer.ui_camera(), renderer.world_camera()); let render_info = renderer.render_frame(app.cursor_result()).unwrap(); renderer .add_info_text(&format!("{}", app.info_text(render_info))) .unwrap(); renderer.surface.window.swap_buffers(); app.frame_clock.did_draw(); } else { std::thread::yield_now(); } if first_frame { first_frame = false; log::debug!( "First frame completed in {:.3} s", Instant::now().duration_since(ready_time).as_secs_f32() ); } renderer .surface .window .set_cursor_mode(if app.input_processor.wants_pointer_lock() { CursorMode::Disabled } else { CursorMode::Normal }); app.input_processor .has_pointer_lock(renderer.surface.window.get_cursor_mode() == CursorMode::Disabled); glfw.poll_events(); for (_, event) in events_rx.try_iter() { match event { WindowEvent::Close => break 'app, WindowEvent::Key(key, _, Action::Press, _) => { if let Some(key) = map_glfw_key(key) { app.input_processor.key_down(key); } } WindowEvent::Key(key, _, Action::Release, _) => { if let Some(key) = map_glfw_key(key) { app.input_processor.key_up(key); } } WindowEvent::Key(_, _, Action::Repeat, _) => { } WindowEvent::Char(..) => {} WindowEvent::CharModifiers(..) => {} WindowEvent::CursorPos(..) => { app.input_processor.mouse_pixel_position( renderer.viewport(), Some(Point2::from(renderer.surface.window.get_cursor_pos())), true, ); } WindowEvent::CursorEnter(true) => { app.input_processor.mouse_pixel_position( renderer.viewport(), Some(Point2::from(renderer.surface.window.get_cursor_pos())), false, ); } WindowEvent::CursorEnter(false) => { app.input_processor .mouse_pixel_position(renderer.viewport(), None, false); } WindowEvent::MouseButton(button, Action::Press, _) => { app.click(map_glfw_button(button)); } WindowEvent::MouseButton(_, Action::Release, _) => {} WindowEvent::MouseButton(_, Action::Repeat, _) => {} WindowEvent::Scroll(..) => { } WindowEvent::FramebufferSize(..) | WindowEvent::ContentScale(..) => { renderer .set_viewport(map_glfw_viewport(&renderer.surface.window)) .unwrap(); } WindowEvent::Focus(has_focus) => { app.input_processor.key_focus(has_focus); } WindowEvent::Pos(..) => {} WindowEvent::Size(..) => {} WindowEvent::Refresh => {} WindowEvent::Iconify(_) => {} WindowEvent::FileDrop(_) => {} WindowEvent::Maximize(_) => {} } } } Ok(()) } pub fn map_glfw_viewport(window: &Window
.get_framebuffer_size()).map(|s| s as u32), } } pub fn map_glfw_button(button: glfw::MouseButton) -> usize { use glfw::MouseButton::*; match button { Button1 => 0, Button2 => 1, Button3 => 2, Button4 => 3, Button5 => 4, Button6 => 5, Button7 => 6, Button8 => 7, } } pub fn map_glfw_key(key: glfw::Key) -> Option<all_is_cubes::apps::Key> { use all_is_cubes::apps::Key as A; use glfw::Key as G; Some(match key { G::Space => A::Character(' '), G::Apostrophe => A::Character('\''), G::Comma => A::Character(','), G::Minus => A::Character('-'), G::Period => A::Character('.'), G::Slash => A::Character('/'), G::Num0 => A::Character('0'), G::Num1 => A::Character('1'), G::Num2 => A::Character('2'), G::Num3 => A::Character('3'), G::Num4 => A::Character('4'), G::Num5 => A::Character('5'), G::Num6 => A::Character('6'), G::Num7 => A::Character('7'), G::Num8 => A::Character('8'), G::Num9 => A::Character('9'), G::Semicolon => A::Character(';'), G::Equal => A::Character('='), G::A => A::Character('a'), G::B => A::Character('b'), G::C => A::Character('c'), G::D => A::Character('d'), G::E => A::Character('e'), G::F => A::Character('f'), G::G => A::Character('g'), G::H => A::Character('h'), G::I => A::Character('i'), G::J => A::Character('j'), G::K => A::Character('k'), G::L => A::Character('l'), G::M => A::Character('m'), G::N => A::Character('n'), G::O => A::Character('o'), G::P => A::Character('p'), G::Q => A::Character('q'), G::R => A::Character('r'), G::S => A::Character('s'), G::T => A::Character('t'), G::U => A::Character('u'), G::V => A::Character('v'), G::W => A::Character('w'), G::X => A::Character('x'), G::Y => A::Character('y'), G::Z => A::Character('z'), G::LeftBracket => A::Character('['), G::Backslash => A::Character('\\'), G::RightBracket => A::Character(']'), G::GraveAccent => A::Character('`'), G::World1 => return None, G::World2 => return None, G::Escape => return None, G::Enter => A::Character('\r'), G::Tab => A::Character('\t'), G::Backspace => A::Character('\u{8}'), G::Insert => return None, G::Delete => return None, G::Right => A::Right, G::Left => A::Left, G::Down => A::Down, G::Up => A::Up, G::PageUp => return None, G::PageDown => return None, G::Home => return None, G::End => return None, G::CapsLock => return None, G::ScrollLock => return None, G::NumLock => return None, G::PrintScreen => return None, G::Pause => return None, G::F1 => return None, G::F2 => return None, G::F3 => return None, G::F4 => return None, G::F5 => return None, G::F6 => return None, G::F7 => return None, G::F8 => return None, G::F9 => return None, G::F10 => return None, G::F11 => return None, G::F12 => return None, G::F13 => return None, G::F14 => return None, G::F15 => return None, G::F16 => return None, G::F17 => return None, G::F18 => return None, G::F19 => return None, G::F20 => return None, G::F21 => return None, G::F22 => return None, G::F23 => return None, G::F24 => return None, G::F25 => return None, G::Kp0 => A::Character('0'), G::Kp1 => A::Character('1'), G::Kp2 => A::Character('2'), G::Kp3 => A::Character('3'), G::Kp4 => A::Character('4'), G::Kp5 => A::Character('5'), G::Kp6 => A::Character('6'), G::Kp7 => A::Character('7'), G::Kp8 => A::Character('8'), G::Kp9 => A::Character('9'), G::KpDecimal => A::Character('.'), G::KpDivide => A::Character('/'), G::KpMultiply => A::Character('*'), G::KpSubtract => A::Character('-'), G::KpAdd => A::Character('+'), G::KpEnter => A::Character('\r'), G::KpEqual => A::Character('='), G::LeftShift => return None, G::LeftControl => return None, G::LeftAlt => return None, G::LeftSuper => return None, G::RightShift => return None, G::RightControl => return None, G::RightAlt => return None, G::RightSuper => return None, G::Menu => return None, G::Unknown => return None, }) }
) -> Viewport { Viewport { nominal_size: Vector2::from(window.get_size()).map(|s| s.into()), framebuffer_size: Vector2::from(window
function_block-random_span
[]
Rust
src/async_read.rs
Marwes/rust-partial-io
a308f83766909b5868f15e04928f08cbb8d96902
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use std::cmp; use std::fmt; use std::io::{self, Read, Write}; use futures::{task, Poll}; use tokio_io::{AsyncRead, AsyncWrite}; use crate::{make_ops, PartialOp}; pub struct PartialAsyncRead<R> { inner: R, ops: Box<dyn Iterator<Item = PartialOp> + Send>, } impl<R> PartialAsyncRead<R> where R: AsyncRead, { pub fn new<I>(inner: R, iter: I) -> Self where I: IntoIterator<Item = PartialOp> + 'static, I::IntoIter: Send, { PartialAsyncRead { inner, ops: make_ops(iter), } } pub fn set_ops<I>(&mut self, iter: I) -> &mut Self where I: IntoIterator<Item = PartialOp> + 'static, I::IntoIter: Send, { self.ops = make_ops(iter); self } pub fn get_ref(&self) -> &R { &self.inner } pub fn get_mut(&mut self) -> &mut R { &mut self.inner } pub fn into_inner(self) -> R { self.inner } } impl<R> Read for PartialAsyncRead<R> where R: AsyncRead, { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match self.ops.next() { Some(PartialOp::Limited(n)) => { let len = cmp::min(n, buf.len()); self.inner.read(&mut buf[..len]) } Some(PartialOp::Err(err)) => { if err == io::ErrorKind::WouldBlock { task::park().unpark(); } Err(io::Error::new( err, "error during read, generated by partial-io", )) } Some(PartialOp::Unlimited) | None => self.inner.read(buf), } } } impl<R> AsyncRead for PartialAsyncRead<R> where R: AsyncRead {} impl<R> Write for PartialAsyncRead<R> where R: AsyncRead + Write, { #[inline] fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.inner.write(buf) } #[inline] fn flush(&mut self) -> io::Result<()> { self.inner.flush() } } impl<R> AsyncWrite for PartialAsyncRead<R> where R: AsyncRead + AsyncWrite, { #[inline] fn shutdown(&mut self) -> Poll<(), io::Error> { self.inner.shutdown() } } impl<R> fmt::Debug for PartialAsyncRead<R> where R: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("PartialAsyncRead") .field("inner", &self.inner) .finish() } } #[cfg(test)] mod tests { use super::*; use std::fs::File; use crate::tests::assert_send; #[test] fn test_sendable() { assert_send::<PartialAsyncRead<File>>(); } }
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use std::cmp; use std::fmt; use std::io::{self, Read, Write}; use futures::{task, Poll}; use tokio_io::{AsyncRead, AsyncWrite}; use crate::{make_ops, PartialOp}; pub struct PartialAsyncRead<R> { inner: R, ops: Box<dyn Iterator<Item = PartialOp> + Send>, } impl<R> PartialAsyncRead<R> where R: AsyncRead, { pub fn new<I>(inner: R, iter: I) -> Self where I: IntoIterator<Item = PartialOp> + 'static, I::IntoIter: Send, { PartialAsyncRead { inner, ops: make_ops(iter), } } pub fn set_ops<I>(&mut self, iter: I) -> &mut Self where I: IntoIterator<Item = PartialOp> + 'static, I::IntoIter: Send, { self.ops = make_ops(iter); self } pub fn get_ref(&self) -> &R { &self.inner } pub fn get_mut(&mut self) -> &mut R { &mut self.inner } pub fn into_inner(self) -> R { self.inner } } impl<R> Read for PartialAsyncRead<R> where R: AsyncRead, { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match self.ops.next() { Some(PartialOp::Limited(n)) => { let len = cmp::min(n, buf.len()); self.inner.read(&mut buf[..len]) } Some(PartialOp::Err(err)) => { if err == io::ErrorKind::WouldBlock { task::park().unpark(); } Err(io::Error::new( err, "error during read, generated by partial-io", )) } Some(PartialOp::Unlimited) | None => self.inner.read(buf), } } } impl<R> AsyncRead for PartialAsyncRead<R> where R: AsyncRead {} impl<R> Write for PartialAsyncRead<R> where R: AsyncRead + Write, { #[inline] fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.inner.write(buf) } #[inline] fn f
s::assert_send; #[test] fn test_sendable() { assert_send::<PartialAsyncRead<File>>(); } }
lush(&mut self) -> io::Result<()> { self.inner.flush() } } impl<R> AsyncWrite for PartialAsyncRead<R> where R: AsyncRead + AsyncWrite, { #[inline] fn shutdown(&mut self) -> Poll<(), io::Error> { self.inner.shutdown() } } impl<R> fmt::Debug for PartialAsyncRead<R> where R: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("PartialAsyncRead") .field("inner", &self.inner) .finish() } } #[cfg(test)] mod tests { use super::*; use std::fs::File; use crate::test
random
[ { "content": "#[inline]\n\nfn make_ops<I>(iter: I) -> Box<dyn Iterator<Item = PartialOp> + Send>\n\nwhere\n\n I: IntoIterator<Item = PartialOp> + 'static,\n\n I::IntoIter: Send,\n\n{\n\n Box::new(iter.into_iter().fuse())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n pub fn assert_send<S: Send>() {}\n\n}\n", "file_path": "src/lib.rs", "rank": 0, "score": 93074.43169699537 }, { "content": "/// Represents a way to generate `io::ErrorKind` instances.\n\n///\n\n/// See [the module level documentation](index.html) for more.\n\npub trait GenError: Clone + Default + Send {\n\n /// Optionally generate an `io::ErrorKind` instance.\n\n fn gen_error<G: Gen>(&mut self, g: &mut G) -> Option<io::ErrorKind>;\n\n}\n\n\n\n/// Generate an `ErrorKind::Interrupted` error 20% of the time.\n\n///\n\n/// See [the module level documentation](index.html) for more.\n\n#[derive(Clone, Debug, Default)]\n\npub struct GenInterrupted;\n\n\n\n/// Generate an `ErrorKind::WouldBlock` error 20% of the time.\n\n///\n\n/// See [the module level documentation](index.html) for more.\n\n#[derive(Clone, Debug, Default)]\n\npub struct GenWouldBlock;\n\n\n\n/// Generate `Interrupted` and `WouldBlock` errors 10% of the time each.\n\n///\n\n/// See [the module level documentation](index.html) for more.\n", "file_path": "src/quickcheck_types.rs", "rank": 1, "score": 64707.60619161865 }, { "content": "fn main() {\n\n test::buggy_write();\n\n // To run this test and see it fail, uncomment the next line. To fix the\n\n // bug, see `examples/buggy_write.rs`.\n\n //test::quickcheck_buggy_write();\n\n}\n\n\n\nmod test {\n\n //! Tests to demonstrate how to use partial-io to catch bugs in `buggy_write`.\n\n\n\n // * 'cargo test' doesn't support running tests inside examples.\n\n // * We'd like to make it possible to run this example to test it out.\n\n // * There's no way to programmatically collect and run tests in Rust.\n\n //\n\n // As a compromise, have tests marked 'pub' and private wrappers marked\n\n // 'test'.\n\n\n\n use std::io::{self, Write};\n\n\n\n use lazy_static::lazy_static;\n", "file_path": "examples/buggy_write.rs", "rank": 2, "score": 50747.35597601082 }, { "content": "/*\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * This source code is licensed under the MIT license found in the\n\n * LICENSE file in the root directory of this source tree.\n\n */\n\n\n\n//! This module contains a reader wrapper that breaks its inputs up according to\n\n//! a provided iterator.\n\n\n\nuse std::cmp;\n\nuse std::fmt;\n\nuse std::io::{self, Read, Write};\n\n\n\nuse crate::{make_ops, PartialOp};\n\n\n\n/// A reader wrapper that breaks inner `Read` instances up according to the\n\n/// provided iterator.\n\n///\n\n/// # Examples\n", "file_path": "src/read.rs", "rank": 3, "score": 23328.168108046288 }, { "content": "}\n\n\n\nimpl<R> Read for PartialRead<R>\n\nwhere\n\n R: Read,\n\n{\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n match self.ops.next() {\n\n Some(PartialOp::Limited(n)) => {\n\n let len = cmp::min(n, buf.len());\n\n self.inner.read(&mut buf[..len])\n\n }\n\n Some(PartialOp::Err(err)) => Err(io::Error::new(\n\n err,\n\n \"error during read, generated by partial-io\",\n\n )),\n\n Some(PartialOp::Unlimited) | None => self.inner.read(buf),\n\n }\n\n }\n\n}\n", "file_path": "src/read.rs", "rank": 4, "score": 23323.381896539573 }, { "content": "impl<R> PartialRead<R>\n\nwhere\n\n R: Read,\n\n{\n\n /// Creates a new `PartialRead` wrapper over the reader with the specified `PartialOp`s.\n\n pub fn new<I>(inner: R, iter: I) -> Self\n\n where\n\n I: IntoIterator<Item = PartialOp> + 'static,\n\n I::IntoIter: Send,\n\n {\n\n PartialRead {\n\n inner,\n\n ops: make_ops(iter),\n\n }\n\n }\n\n\n\n /// Sets the `PartialOp`s for this reader.\n\n pub fn set_ops<I>(&mut self, iter: I) -> &mut Self\n\n where\n\n I: IntoIterator<Item = PartialOp> + 'static,\n", "file_path": "src/read.rs", "rank": 5, "score": 23318.959010891 }, { "content": "\n\n// Forwarding impl to support duplex structs.\n\nimpl<R> Write for PartialRead<R>\n\nwhere\n\n R: Read + Write,\n\n{\n\n #[inline]\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n self.inner.write(buf)\n\n }\n\n\n\n #[inline]\n\n fn flush(&mut self) -> io::Result<()> {\n\n self.inner.flush()\n\n }\n\n}\n\n\n\nimpl<R> fmt::Debug for PartialRead<R>\n\nwhere\n\n R: fmt::Debug,\n", "file_path": "src/read.rs", "rank": 6, "score": 23313.90208627717 }, { "content": "///\n\n/// ```rust\n\n/// use std::io::{Cursor, Read};\n\n///\n\n/// use partial_io::{PartialOp, PartialRead};\n\n///\n\n/// let reader = Cursor::new(vec![1, 2, 3, 4]);\n\n/// let iter = ::std::iter::repeat(PartialOp::Limited(1));\n\n/// let mut partial_reader = PartialRead::new(reader, iter);\n\n/// let mut out = vec![0; 256];\n\n///\n\n/// let size = partial_reader.read(&mut out).unwrap();\n\n/// assert_eq!(size, 1);\n\n/// assert_eq!(&out[..1], &[1]);\n\n/// ```\n\npub struct PartialRead<R> {\n\n inner: R,\n\n ops: Box<dyn Iterator<Item = PartialOp> + Send>,\n\n}\n\n\n", "file_path": "src/read.rs", "rank": 7, "score": 23313.483955824297 }, { "content": " I::IntoIter: Send,\n\n {\n\n self.ops = make_ops(iter);\n\n self\n\n }\n\n\n\n /// Acquires a reference to the underlying reader.\n\n pub fn get_ref(&self) -> &R {\n\n &self.inner\n\n }\n\n\n\n /// Acquires a mutable reference to the underlying reader.\n\n pub fn get_mut(&mut self) -> &mut R {\n\n &mut self.inner\n\n }\n\n\n\n /// Consumes this wrapper, returning the underlying reader.\n\n pub fn into_inner(self) -> R {\n\n self.inner\n\n }\n", "file_path": "src/read.rs", "rank": 8, "score": 23311.697754737856 }, { "content": "{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"PartialRead\")\n\n .field(\"inner\", &self.inner)\n\n .finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use std::fs::File;\n\n\n\n use crate::tests::assert_send;\n\n\n\n #[test]\n\n fn test_sendable() {\n\n assert_send::<PartialRead<File>>();\n\n }\n\n}\n", "file_path": "src/read.rs", "rank": 9, "score": 23310.177543738537 }, { "content": "/*\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * This source code is licensed under the MIT license found in the\n\n * LICENSE file in the root directory of this source tree.\n\n */\n\n\n\n//! This module contains a writer wrapper that breaks writes up according to a\n\n//! provided iterator.\n\n\n\nuse std::cmp;\n\nuse std::fmt;\n\nuse std::io::{self, Read, Write};\n\n\n\nuse crate::{make_ops, PartialOp};\n\n\n\n/// A writer wrapper that breaks inner `Write` instances up according to the\n\n/// provided iterator.\n\n///\n\n/// # Examples\n", "file_path": "src/write.rs", "rank": 19, "score": 22034.80377735808 }, { "content": " self.inner\n\n }\n\n}\n\n\n\nimpl<W> Write for PartialWrite<W>\n\nwhere\n\n W: Write,\n\n{\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n match self.ops.next() {\n\n Some(PartialOp::Limited(n)) => {\n\n let len = cmp::min(n, buf.len());\n\n self.inner.write(&buf[..len])\n\n }\n\n Some(PartialOp::Err(err)) => Err(io::Error::new(\n\n err,\n\n \"error during write, generated by partial-io\",\n\n )),\n\n Some(PartialOp::Unlimited) | None => self.inner.write(buf),\n\n }\n", "file_path": "src/write.rs", "rank": 20, "score": 22026.135563546748 }, { "content": "impl<W> PartialWrite<W>\n\nwhere\n\n W: Write,\n\n{\n\n /// Creates a new `PartialWrite` wrapper over the writer with the specified `PartialOp`s.\n\n pub fn new<I>(inner: W, iter: I) -> Self\n\n where\n\n I: IntoIterator<Item = PartialOp> + 'static,\n\n I::IntoIter: Send,\n\n {\n\n PartialWrite {\n\n inner,\n\n // Use fuse here so that we don't keep calling the inner iterator\n\n // once it's returned None.\n\n ops: make_ops(iter),\n\n }\n\n }\n\n\n\n /// Sets the `PartialOp`s for this writer.\n\n pub fn set_ops<I>(&mut self, iter: I) -> &mut Self\n", "file_path": "src/write.rs", "rank": 21, "score": 22024.797662121946 }, { "content": " }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n match self.ops.next() {\n\n Some(PartialOp::Err(err)) => Err(io::Error::new(\n\n err,\n\n \"error during flush, generated by partial-io\",\n\n )),\n\n _ => self.inner.flush(),\n\n }\n\n }\n\n}\n\n\n\n// Forwarding impl to support duplex structs.\n\nimpl<W> Read for PartialWrite<W>\n\nwhere\n\n W: Read + Write,\n\n{\n\n #[inline]\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n", "file_path": "src/write.rs", "rank": 22, "score": 22024.309025823743 }, { "content": " self.inner.read(buf)\n\n }\n\n}\n\n\n\nimpl<W> fmt::Debug for PartialWrite<W>\n\nwhere\n\n W: fmt::Debug,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"PartialWrite\")\n\n .field(\"inner\", &self.inner)\n\n .finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use std::fs::File;\n\n\n\n use crate::tests::assert_send;\n\n\n\n #[test]\n\n fn test_sendable() {\n\n assert_send::<PartialWrite<File>>();\n\n }\n\n}\n", "file_path": "src/write.rs", "rank": 23, "score": 22019.34399011144 }, { "content": " where\n\n I: IntoIterator<Item = PartialOp> + 'static,\n\n I::IntoIter: Send,\n\n {\n\n self.ops = make_ops(iter);\n\n self\n\n }\n\n\n\n /// Acquires a reference to the underlying writer.\n\n pub fn get_ref(&self) -> &W {\n\n &self.inner\n\n }\n\n\n\n /// Acquires a mutable reference to the underlying writer.\n\n pub fn get_mut(&mut self) -> &mut W {\n\n &mut self.inner\n\n }\n\n\n\n /// Consumes this wrapper, returning the underlying writer.\n\n pub fn into_inner(self) -> W {\n", "file_path": "src/write.rs", "rank": 24, "score": 22017.23942014765 }, { "content": "///\n\n/// ```rust\n\n/// use std::io::Write;\n\n///\n\n/// use partial_io::{PartialOp, PartialWrite};\n\n///\n\n/// let writer = Vec::new();\n\n/// let iter = ::std::iter::repeat(PartialOp::Limited(1));\n\n/// let mut partial_writer = PartialWrite::new(writer, iter);\n\n/// let in_data = vec![1, 2, 3, 4];\n\n///\n\n/// let size = partial_writer.write(&in_data).unwrap();\n\n/// assert_eq!(size, 1);\n\n/// assert_eq!(&partial_writer.get_ref()[..], &[1]);\n\n/// ```\n\npub struct PartialWrite<W> {\n\n inner: W,\n\n ops: Box<dyn Iterator<Item = PartialOp> + Send>,\n\n}\n\n\n", "file_path": "src/write.rs", "rank": 25, "score": 22016.106951671387 }, { "content": "/*\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * This source code is licensed under the MIT license found in the\n\n * LICENSE file in the root directory of this source tree.\n\n */\n\n\n\n//! An example of a buggy buffered writer that does not handle\n\n//! `io::ErrorKind::Interrupted` properly.\n\n\n\n#![deny(warnings)]\n\n#![allow(dead_code)]\n\n\n\nuse std::io::{self, Write};\n\n\n\n/// A buffered writer whose `write` method is faulty.\n\npub struct BuggyWrite<W> {\n\n inner: W,\n\n buf: Vec<u8>,\n\n offset: usize,\n", "file_path": "examples/buggy_write.rs", "rank": 26, "score": 20838.585359234 }, { "content": "/*\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * This source code is licensed under the MIT license found in the\n\n * LICENSE file in the root directory of this source tree.\n\n */\n\n\n\n//! This module contains an `AsyncWrite` wrapper that breaks writes up\n\n//! according to a provided iterator.\n\n//!\n\n//! This is separate from `PartialWrite` because on `WouldBlock` errors, it\n\n//! causes `futures` to try writing or flushing again.\n\n\n\nuse std::cmp;\n\nuse std::fmt;\n\nuse std::io::{self, Read, Write};\n\n\n\nuse futures::{task, Poll};\n\nuse tokio_io::{AsyncRead, AsyncWrite};\n\n\n", "file_path": "src/async_write.rs", "rank": 27, "score": 20837.817231390552 }, { "content": "{\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n match self.ops.next() {\n\n Some(PartialOp::Limited(n)) => {\n\n let len = cmp::min(n, buf.len());\n\n self.inner.write(&buf[..len])\n\n }\n\n Some(PartialOp::Err(err)) => {\n\n if err == io::ErrorKind::WouldBlock {\n\n // Make sure this task is rechecked.\n\n task::park().unpark();\n\n }\n\n Err(io::Error::new(\n\n err,\n\n \"error during write, generated by partial-io\",\n\n ))\n\n }\n\n Some(PartialOp::Unlimited) | None => self.inner.write(buf),\n\n }\n\n }\n", "file_path": "src/async_write.rs", "rank": 28, "score": 20829.66825632136 }, { "content": "impl<W> PartialAsyncWrite<W>\n\nwhere\n\n W: AsyncWrite,\n\n{\n\n /// Creates a new `PartialAsyncWrite` wrapper over the writer with the specified `PartialOp`s.\n\n pub fn new<I>(inner: W, iter: I) -> Self\n\n where\n\n I: IntoIterator<Item = PartialOp> + 'static,\n\n I::IntoIter: Send,\n\n {\n\n PartialAsyncWrite {\n\n inner,\n\n ops: make_ops(iter),\n\n }\n\n }\n\n\n\n /// Sets the `PartialOp`s for this reader.\n\n pub fn set_ops<I>(&mut self, iter: I) -> &mut Self\n\n where\n\n I: IntoIterator<Item = PartialOp> + 'static,\n", "file_path": "src/async_write.rs", "rank": 29, "score": 20825.575073087115 }, { "content": " W: AsyncWrite,\n\n{\n\n #[inline]\n\n fn shutdown(&mut self) -> Poll<(), io::Error> {\n\n self.inner.shutdown()\n\n }\n\n}\n\n\n\n// Forwarding impls to support duplex structs.\n\nimpl<W> Read for PartialAsyncWrite<W>\n\nwhere\n\n W: AsyncWrite + Read,\n\n{\n\n #[inline]\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n self.inner.read(buf)\n\n }\n\n}\n\n\n\nimpl<W> AsyncRead for PartialAsyncWrite<W> where W: AsyncRead + AsyncWrite {}\n", "file_path": "src/async_write.rs", "rank": 30, "score": 20824.467113576382 }, { "content": "/// let writer = Cursor::new(Vec::new());\n\n/// let iter = vec![PartialOp::Err(io::ErrorKind::WouldBlock), PartialOp::Limited(2)];\n\n/// let partial_writer = PartialAsyncWrite::new(writer, iter);\n\n/// let in_data = vec![1, 2, 3, 4];\n\n///\n\n/// let mut core = Core::new().unwrap();\n\n///\n\n/// let write_fut = write_all(partial_writer, in_data);\n\n///\n\n/// let (partial_writer, _in_data) = core.run(write_fut).unwrap();\n\n/// let cursor = partial_writer.into_inner();\n\n/// let out = cursor.into_inner();\n\n/// assert_eq!(&out, &[1, 2, 3, 4]);\n\n/// }\n\n/// ```\n\npub struct PartialAsyncWrite<W> {\n\n inner: W,\n\n ops: Box<dyn Iterator<Item = PartialOp> + Send>,\n\n}\n\n\n", "file_path": "src/async_write.rs", "rank": 31, "score": 20821.168003260707 }, { "content": " I::IntoIter: Send,\n\n {\n\n self.ops = make_ops(iter);\n\n self\n\n }\n\n\n\n /// Acquires a mutable reference to the underlying writer.\n\n pub fn get_mut(&mut self) -> &mut W {\n\n &mut self.inner\n\n }\n\n\n\n /// Consumes this wrapper, returning the underlying writer.\n\n pub fn into_inner(self) -> W {\n\n self.inner\n\n }\n\n}\n\n\n\nimpl<W> Write for PartialAsyncWrite<W>\n\nwhere\n\n W: Write,\n", "file_path": "src/async_write.rs", "rank": 32, "score": 20821.037361785387 }, { "content": "}\n\n\n\nimpl<W: Write> BuggyWrite<W> {\n\n pub fn new(inner: W) -> Self {\n\n BuggyWrite {\n\n inner,\n\n buf: Vec::with_capacity(256),\n\n offset: 0,\n\n }\n\n }\n\n\n\n fn write_from_offset(&mut self) -> io::Result<()> {\n\n while self.offset < self.buf.len() {\n\n self.offset += self.inner.write(&self.buf[self.offset..])?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn reset_buffer(&mut self) {\n\n unsafe {\n", "file_path": "examples/buggy_write.rs", "rank": 33, "score": 20820.591401038677 }, { "content": " self.buf.set_len(0);\n\n }\n\n self.offset = 0;\n\n }\n\n\n\n pub fn into_inner(self) -> W {\n\n self.inner\n\n }\n\n}\n\n\n\nimpl<W: Write> Write for BuggyWrite<W> {\n\n fn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n // Write out anything that is currently in the internal buffer.\n\n if self.offset < self.buf.len() {\n\n self.write_from_offset()?;\n\n }\n\n\n\n // Reset the internal buffer.\n\n self.reset_buffer();\n\n\n", "file_path": "examples/buggy_write.rs", "rank": 34, "score": 20820.382139447065 }, { "content": "\n\nimpl<W> fmt::Debug for PartialAsyncWrite<W>\n\nwhere\n\n W: fmt::Debug,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"PartialAsyncWrite\")\n\n .field(\"inner\", &self.inner)\n\n .finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use std::fs::File;\n\n\n\n use crate::tests::assert_send;\n\n\n\n #[test]\n\n fn test_sendable() {\n\n assert_send::<PartialAsyncWrite<File>>();\n\n }\n\n}\n", "file_path": "src/async_write.rs", "rank": 35, "score": 20819.424811174642 }, { "content": "\n\n fn flush(&mut self) -> io::Result<()> {\n\n match self.ops.next() {\n\n Some(PartialOp::Err(err)) => {\n\n if err == io::ErrorKind::WouldBlock {\n\n // Make sure this task is rechecked.\n\n task::park().unpark();\n\n }\n\n Err(io::Error::new(\n\n err,\n\n \"error during flush, generated by partial-io\",\n\n ))\n\n }\n\n _ => self.inner.flush(),\n\n }\n\n }\n\n}\n\n\n\nimpl<W> AsyncWrite for PartialAsyncWrite<W>\n\nwhere\n", "file_path": "src/async_write.rs", "rank": 36, "score": 20819.388564696004 }, { "content": " use quickcheck::{quickcheck, TestResult};\n\n\n\n use partial_io::{GenInterrupted, PartialOp, PartialWithErrors, PartialWrite};\n\n\n\n use super::*;\n\n\n\n lazy_static! {\n\n // These strings have been chosen to be around the default size for\n\n // quickcheck (100). With significantly smaller or larger inputs, the\n\n // results might not be as good.\n\n static ref HELLO_STR: Vec<u8> = \"Hello\".repeat(50).into_bytes();\n\n static ref WORLD_STR: Vec<u8> = \"World\".repeat(40).into_bytes();\n\n }\n\n\n\n #[test]\n\n fn test_buggy_write() {\n\n buggy_write();\n\n }\n\n\n\n pub fn buggy_write() {\n", "file_path": "examples/buggy_write.rs", "rank": 37, "score": 20818.614787225557 }, { "content": " I: IntoIterator<Item = PartialOp> + 'static,\n\n I::IntoIter: Send,\n\n {\n\n let inner = Vec::new();\n\n let partial_writer = PartialWrite::new(inner, partial_iter);\n\n\n\n let mut buggy_write = BuggyWrite::new(partial_writer);\n\n\n\n // Try writing a couple of things into it.\n\n let hello_res = buggy_write.write(&HELLO_STR);\n\n let world_res = buggy_write.write(&WORLD_STR);\n\n\n\n // Flush the contents to make sure nothing remains in the internal buffer.\n\n let flush_res = buggy_write.flush();\n\n\n\n let inner = buggy_write.into_inner().into_inner();\n\n\n\n (hello_res, world_res, flush_res, inner)\n\n }\n\n\n\n}\n", "file_path": "examples/buggy_write.rs", "rank": 38, "score": 20817.8133937859 }, { "content": " // Read from the provided buffer.\n\n self.buf.extend_from_slice(buf);\n\n\n\n // BUG: it is incorrect to call write immediately because if it fails,\n\n // we'd have read some bytes from the buffer without telling the caller\n\n // how many.\n\n // XXX: To fix the bug, comment out the next line.\n\n self.write_from_offset()?;\n\n Ok(self.buf.len())\n\n }\n\n\n\n fn flush(&mut self) -> io::Result<()> {\n\n // Flush out any data that can be flushed out.\n\n while self.offset < self.buf.len() {\n\n self.write_from_offset()?;\n\n }\n\n\n\n // If that succeeded, reset the internal buffer.\n\n self.reset_buffer();\n\n\n\n // Flush the inner writer\n\n self.inner.flush()\n\n }\n\n}\n\n\n", "file_path": "examples/buggy_write.rs", "rank": 39, "score": 20817.420553852364 }, { "content": "use crate::{make_ops, PartialOp};\n\n\n\n/// A wrapper that breaks inner `AsyncWrite` instances up according to the\n\n/// provided iterator.\n\n///\n\n/// Available with the `tokio` feature.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// use std::io::{self, Cursor};\n\n///\n\n/// fn main() {\n\n/// // Note that this test doesn't demonstrate a limited write because\n\n/// // tokio-io doesn't have a combinator for that, just write_all.\n\n/// use tokio_core::reactor::Core;\n\n/// use tokio_io::io::write_all;\n\n///\n\n/// use partial_io::{PartialAsyncWrite, PartialOp};\n\n///\n", "file_path": "src/async_write.rs", "rank": 40, "score": 20814.803312119973 }, { "content": " let partial = vec![\n\n PartialOp::Err(io::ErrorKind::Interrupted),\n\n PartialOp::Unlimited,\n\n ];\n\n let (hello_res, world_res, flush_res, inner) = buggy_write_internal(partial);\n\n assert_eq!(hello_res.unwrap_err().kind(), io::ErrorKind::Interrupted);\n\n assert_eq!(world_res.unwrap(), 5 * 40);\n\n assert_eq!(flush_res.unwrap(), ());\n\n\n\n // Note that inner has both \"Hello\" and \"World\" in it, even though according\n\n // to what the API returned it should only have had \"World\" in it.\n\n let mut expected = Vec::new();\n\n expected.extend_from_slice(&HELLO_STR);\n\n expected.extend_from_slice(&WORLD_STR);\n\n assert_eq!(inner, expected);\n\n }\n\n\n\n /// Test that quickcheck catches buggy writes.\n\n ///\n\n /// To run this test and see it fail, remove the #[ignore] annotation. To\n", "file_path": "examples/buggy_write.rs", "rank": 41, "score": 20814.750503577954 }, { "content": " /// fix the bug, see `examples/buggy_write.rs`.\n\n #[test]\n\n #[ignore]\n\n fn test_quickcheck_buggy_write() {\n\n quickcheck_buggy_write();\n\n }\n\n\n\n pub fn quickcheck_buggy_write() {\n\n quickcheck(quickcheck_buggy_write2 as fn(PartialWithErrors<GenInterrupted>) -> TestResult);\n\n }\n\n\n\n fn quickcheck_buggy_write2(partial: PartialWithErrors<GenInterrupted>) -> TestResult {\n\n let (hello_res, world_res, flush_res, inner) = buggy_write_internal(partial);\n\n // If flush_res failed then we can't really do anything since we don't know\n\n // how much was written internally. Otherwise hello_res and world_res should\n\n // work.\n\n if flush_res.is_err() {\n\n return TestResult::discard();\n\n }\n\n\n", "file_path": "examples/buggy_write.rs", "rank": 42, "score": 20812.554037632406 }, { "content": " let mut expected = Vec::new();\n\n if hello_res.is_ok() {\n\n expected.extend_from_slice(&HELLO_STR);\n\n }\n\n if world_res.is_ok() {\n\n expected.extend_from_slice(&WORLD_STR);\n\n }\n\n assert_eq!(inner, expected);\n\n TestResult::passed()\n\n }\n\n\n\n fn buggy_write_internal<I>(\n\n partial_iter: I,\n\n ) -> (\n\n io::Result<usize>,\n\n io::Result<usize>,\n\n io::Result<()>,\n\n Vec<u8>,\n\n )\n\n where\n", "file_path": "examples/buggy_write.rs", "rank": 43, "score": 20812.459856592086 }, { "content": "## Scope\n\n\n\nThis Code of Conduct applies within all project spaces, and it also applies when\n\nan individual is representing the project or its community in public spaces.\n\nExamples of representing a project or community include using an official\n\nproject e-mail address, posting via an official social media account, or acting\n\nas an appointed representative at an online or offline event. Representation of\n\na project may be further defined and clarified by project maintainers.\n\n\n\n## Enforcement\n\n\n\nInstances of abusive, harassing, or otherwise unacceptable behavior may be\n\nreported by contacting the project team at <[email protected]>. All\n\ncomplaints will be reviewed and investigated and will result in a response that\n\nis deemed necessary and appropriate to the circumstances. The project team is\n\nobligated to maintain confidentiality with regard to the reporter of an incident.\n\nFurther details of specific enforcement policies may be posted separately.\n\n\n\nProject maintainers who do not follow or enforce the Code of Conduct in good\n\nfaith may face temporary or permanent repercussions as determined by other\n\nmembers of the project's leadership.\n\n\n\n## Attribution\n\n\n\nThis Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,\n\navailable at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html\n\n\n\n[homepage]: https://www.contributor-covenant.org\n\n\n\nFor answers to common questions about this code of conduct, see\n\nhttps://www.contributor-covenant.org/faq\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 44, "score": 14164.602640097872 }, { "content": "# Code of Conduct\n\n\n\n## Our Pledge\n\n\n\nIn the interest of fostering an open and welcoming environment, we as\n\ncontributors and maintainers pledge to make participation in our project and\n\nour community a harassment-free experience for everyone, regardless of age, body\n\nsize, disability, ethnicity, sex characteristics, gender identity and expression,\n\nlevel of experience, education, socio-economic status, nationality, personal\n\nappearance, race, religion, or sexual identity and orientation.\n\n\n\n## Our Standards\n\n\n\nExamples of behavior that contributes to creating a positive environment\n\ninclude:\n\n\n\n* Using welcoming and inclusive language\n\n* Being respectful of differing viewpoints and experiences\n\n* Gracefully accepting constructive criticism\n\n* Focusing on what is best for the community\n\n* Showing empathy towards other community members\n\n\n\nExamples of unacceptable behavior by participants include:\n\n\n\n* The use of sexualized language or imagery and unwelcome sexual attention or\n\n advances\n\n* Trolling, insulting/derogatory comments, and personal or political attacks\n\n* Public or private harassment\n\n* Publishing others' private information, such as a physical or electronic\n\n address, without explicit permission\n\n* Other conduct which could reasonably be considered inappropriate in a\n\n professional setting\n\n\n\n## Our Responsibilities\n\n\n\nProject maintainers are responsible for clarifying the standards of acceptable\n\nbehavior and are expected to take appropriate and fair corrective action in\n\nresponse to any instances of unacceptable behavior.\n\n\n\nProject maintainers have the right and responsibility to remove, edit, or\n\nreject comments, commits, code, wiki edits, issues, and other contributions\n\nthat are not aligned to this Code of Conduct, or to ban temporarily or\n\npermanently any contributor for other behaviors that they deem inappropriate,\n\nthreatening, offensive, or harmful.\n\n\n", "file_path": "CODE_OF_CONDUCT.md", "rank": 45, "score": 14164.500792288667 }, { "content": "/*\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * This source code is licensed under the MIT license found in the\n\n * LICENSE file in the root directory of this source tree.\n\n */\n\n\n\n//! `QuickCheck` support for partial IO operations.\n\n//!\n\n//! This module allows sequences of [`PartialOp`]s to be randomly generated. These\n\n//! sequences can then be fed into a [`PartialRead`], [`PartialWrite`],\n\n//! [`PartialAsyncRead`] or [`PartialAsyncWrite`].\n\n//!\n\n//! Once `quickcheck` has identified a failing test case, it will shrink the\n\n//! sequence of `PartialOp`s and find a minimal test case. This minimal case can\n\n//! then be used to reproduce the issue.\n\n//!\n\n//! To generate random sequences of operations, write a `quickcheck` test with a\n\n//! `PartialWithErrors<GE>` input, where `GE` implements [`GenError`]. Then pass\n\n//! the sequence in as the second argument to the partial wrapper.\n", "file_path": "src/quickcheck_types.rs", "rank": 46, "score": 27.402137164854405 }, { "content": "/*\n\n * Copyright (c) Facebook, Inc. and its affiliates.\n\n *\n\n * This source code is licensed under the MIT license found in the\n\n * LICENSE file in the root directory of this source tree.\n\n */\n\n\n\n#![deny(warnings)]\n\n// futures-rs 0.1.14 changed task::park() and unpark() to current() and\n\n// notify(), respectively. Keep using the old versions for compat.\n\n#![allow(deprecated)]\n\n\n\n//! Helpers for testing I/O behavior with partial, interrupted and blocking reads and writes.\n\n//!\n\n//! This library provides:\n\n//!\n\n//! * [`PartialRead`] and [`PartialWrite`], which wrap existing `Read` and\n\n//! `Write` implementations and allow specifying arbitrary behavior on the\n\n//! next `read`, `write` or `flush` call.\n\n//! * With the optional `tokio` feature, [`PartialAsyncRead`] and\n", "file_path": "src/lib.rs", "rank": 47, "score": 26.185039159345315 }, { "content": " GE: GenError + 'static,\n\n{\n\n fn arbitrary<G: Gen>(g: &mut G) -> Self {\n\n let size = g.size();\n\n // Generate a sequence of operations. A uniform distribution for this is\n\n // fine because the goal is to shake bugs out relatively effectively.\n\n let mut gen_error = GE::default();\n\n let items: Vec<_> = (0..size)\n\n .map(|_| {\n\n match gen_error.gen_error(g) {\n\n Some(err) => PartialOp::Err(err),\n\n // Don't generate 0 because for writers it can mean that\n\n // writes are no longer accepted.\n\n None => PartialOp::Limited(g.gen_range(1, size)),\n\n }\n\n })\n\n .collect();\n\n PartialWithErrors {\n\n items,\n\n _marker: PhantomData,\n", "file_path": "src/quickcheck_types.rs", "rank": 48, "score": 18.152757910260988 }, { "content": "# partial-io [![Build Status](https://travis-ci.org/facebookincubator/rust-partial-io.svg?branch=master)](https://travis-ci.org/facebookincubator/rust-partial-io) [![crates.io](https://img.shields.io/crates/v/partial-io.svg)](https://crates.io/crates/partial-io)\n\n\n\nA Rust utility library to test resilience of `Read` or `Write` wrappers.\n\n\n\nIf you'd like to help out, see [CONTRIBUTING.md](CONTRIBUTING.md).\n\n\n\n[Documentation (latest release)](https://docs.rs/partial-io)\n\n\n\n[Documentation (master)](https://facebookincubator.github.io/rust-partial-io)\n\n\n\n## Example\n\n\n\n```rust\n\nuse std::io::{self, Cursor, Read};\n\n\n\nuse partial_io::{PartialOp, PartialRead};\n\n\n\nlet data = b\"Hello, world!\".to_vec();\n\nlet cursor = Cursor::new(data); // Cursor<Vec<u8>> implements io::Read\n\nlet ops = vec![PartialOp::Limited(7), PartialOp::Err(io::ErrorKind::Interrupted)];\n\nlet mut partial_read = PartialRead::new(cursor, ops);\n\n\n\nlet mut out = vec![0; 256];\n\n\n\n// The first read will read 7 bytes.\n\nassert_eq!(partial_read.read(&mut out).unwrap(), 7);\n\nassert_eq!(&out[..7], b\"Hello, \");\n\n// The second read will fail with ErrorKind::Interrupted.\n\nassert_eq!(partial_read.read(&mut out[7..]).unwrap_err().kind(), io::ErrorKind::Interrupted);\n\n// The iterator has run out of operations, so it no longer truncates reads.\n\nassert_eq!(partial_read.read(&mut out[7..]).unwrap(), 6);\n\nassert_eq!(&out[..13], b\"Hello, world!\");\n\n```\n\n\n\n## Quick start\n\n\n\nAdd this to your `Cargo.toml`:\n\n\n\n```toml\n\n[dev-dependencies]\n\npartial-io = \"0.3\"\n\n```\n\n\n\nNow you can use `partial-io` in your tests.\n\n\n\n## Tokio integration\n\n\n\n`partial-io` can optionally integrate with the `tokio-io` library to provide\n\nwrappers for `AsyncRead` and `AsyncWrite` instances. Enable the `tokio` feature\n\nto use this:\n\n\n\n```toml\n\n[dev-dependencies]\n\npartial-io = { version = \"0.3\", features = [\"tokio\"] }\n\n```\n\n\n", "file_path": "README.md", "rank": 49, "score": 17.48445632134846 }, { "content": "//! ```rust\n\n//! use std::io::{self, Cursor, Read};\n\n//!\n\n//! use partial_io::{PartialOp, PartialRead};\n\n//!\n\n//! let data = b\"Hello, world!\".to_vec();\n\n//! let cursor = Cursor::new(data); // Cursor<Vec<u8>> implements io::Read\n\n//! let ops = vec![PartialOp::Limited(7), PartialOp::Err(io::ErrorKind::Interrupted)];\n\n//! let mut partial_read = PartialRead::new(cursor, ops);\n\n//!\n\n//! let mut out = vec![0; 256];\n\n//!\n\n//! // The first read will read 7 bytes.\n\n//! assert_eq!(partial_read.read(&mut out).unwrap(), 7);\n\n//! assert_eq!(&out[..7], b\"Hello, \");\n\n//! // The second read will fail with ErrorKind::Interrupted.\n\n//! assert_eq!(partial_read.read(&mut out[7..]).unwrap_err().kind(), io::ErrorKind::Interrupted);\n\n//! // The iterator has run out of operations, so it no longer truncates reads.\n\n//! assert_eq!(partial_read.read(&mut out[7..]).unwrap(), 6);\n\n//! assert_eq!(&out[..13], b\"Hello, world!\");\n", "file_path": "src/lib.rs", "rank": 50, "score": 16.866871088044793 }, { "content": " }\n\n }\n\n\n\n fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {\n\n Box::new(self.items.clone().shrink().map(|items| PartialWithErrors {\n\n items,\n\n _marker: PhantomData,\n\n }))\n\n }\n\n}\n\n\n\nimpl Arbitrary for PartialOp {\n\n fn arbitrary<G: Gen>(_g: &mut G) -> Self {\n\n // We only use this for shrink, so we don't need to implement this.\n\n unimplemented!();\n\n }\n\n\n\n fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {\n\n match *self {\n\n // Skip 0 because for writers it can mean that writes are no longer\n\n // accepted.\n\n PartialOp::Limited(n) => {\n\n Box::new(n.shrink().filter(|k| k != &0).map(PartialOp::Limited))\n\n }\n\n _ => empty_shrinker(),\n\n }\n\n }\n\n}\n", "file_path": "src/quickcheck_types.rs", "rank": 51, "score": 15.470563648544978 }, { "content": "/// clonable.\n\n#[derive(Clone, Debug)]\n\npub enum PartialOp {\n\n /// Limit the next IO operation to a certain number of bytes.\n\n ///\n\n /// The wrapper will call into the inner `Read` or `Write`\n\n /// instance. Depending on what the underlying operation does, this may\n\n /// return an error or a fewer number of bytes.\n\n Limited(usize),\n\n\n\n /// Do not limit the next IO operation.\n\n ///\n\n /// The wrapper will call into the inner `Read` or `Write`\n\n /// instance. Depending on what the underlying operation does, this may\n\n /// return an error or a limited number of bytes.\n\n Unlimited,\n\n\n\n /// Return an error instead of calling into the underlying operation.\n\n Err(io::ErrorKind),\n\n}\n\n\n\n#[inline]\n", "file_path": "src/lib.rs", "rank": 52, "score": 15.410645486737266 }, { "content": "## Coding Style\n\nKeep `use` statements sorted in the following order:\n\n\n\n1. `std` imports.\n\n2. Imports from external non-`std` crates.\n\n3. Imports from within this crate.\n\n4. `super` imports.\n\n5. `self` imports.\n\n\n\nWithin each subgroup, `use` statements should be in alphabetical order.\n\n\n\nUse [`rustfmt`](https://github.com/rust-lang-nursery/rustfmt/) to format your\n\ncode. This means:\n\n\n\n* 4 spaces for indentation rather than tabs\n\n* 80 character line length recommended, up to 100 characters if necessary.\n\n\n\nThis project uses the `rustfmt` currently based on nightly Rust\n\n(`rustfmt-nightly` as of June 2017). For instructions on how to install it, see\n\nthe\n\n[`rustfmt` README](https://github.com/rust-lang-nursery/rustfmt/#installation).\n\n\n\n## License\n\nBy contributing to rust-partial-io, you agree that your contributions will be\n\nlicensed under the LICENSE file in the root directory of this source tree.\n", "file_path": "CONTRIBUTING.md", "rank": 53, "score": 13.832886752447056 }, { "content": "mod write;\n\n\n\nuse std::io;\n\n\n\n#[cfg(feature = \"tokio\")]\n\npub use crate::async_read::PartialAsyncRead;\n\n#[cfg(feature = \"tokio\")]\n\npub use crate::async_write::PartialAsyncWrite;\n\n#[cfg(feature = \"quickcheck\")]\n\npub use crate::quickcheck_types::{\n\n GenError, GenInterrupted, GenInterruptedWouldBlock, GenNoErrors, GenWouldBlock,\n\n PartialWithErrors,\n\n};\n\npub use crate::read::PartialRead;\n\npub use crate::write::PartialWrite;\n\n\n\n/// What to do the next time an IO operation is performed.\n\n///\n\n/// This is not the same as `io::Result<Option<usize>>` because it contains\n\n/// `io::ErrorKind` instances, not `io::Error` instances. This allows it to be\n", "file_path": "src/lib.rs", "rank": 54, "score": 13.54246637007022 }, { "content": "/// Given a custom error generator, randomly generate a list of `PartialOp`s.\n\n#[derive(Clone, Debug)]\n\npub struct PartialWithErrors<GE> {\n\n items: Vec<PartialOp>,\n\n _marker: PhantomData<GE>,\n\n}\n\n\n\nimpl<GE> IntoIterator for PartialWithErrors<GE> {\n\n type Item = PartialOp;\n\n type IntoIter = ::std::vec::IntoIter<PartialOp>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.items.into_iter()\n\n }\n\n}\n\n\n\nimpl<GE> Deref for PartialWithErrors<GE> {\n\n type Target = [PartialOp];\n\n fn deref(&self) -> &Self::Target {\n\n self.items.deref()\n\n }\n\n}\n\n\n\n/// Represents a way to generate `io::ErrorKind` instances.\n\n///\n\n/// See [the module level documentation](index.html) for more.\n", "file_path": "src/quickcheck_types.rs", "rank": 55, "score": 12.445675945451093 }, { "content": "//!\n\n//! For a real-world example, see the [tests in `bzip2-rs`].\n\n//!\n\n//! [`PartialOp`]: ../struct.PartialOp.html\n\n//! [`PartialRead`]: ../struct.PartialRead.html\n\n//! [`PartialWrite`]: ../struct.PartialWrite.html\n\n//! [`PartialAsyncRead`]: ../struct.PartialAsyncRead.html\n\n//! [`PartialAsyncWrite`]: ../struct.PartialAsyncWrite.html\n\n//! [`GenError`]: trait.GenError.html\n\n//! [tests in `bzip2-rs`]: https://github.com/alexcrichton/bzip2-rs/blob/master/src/write.rs\n\n\n\nuse std::io;\n\nuse std::marker::PhantomData;\n\nuse std::ops::Deref;\n\n\n\nuse quickcheck::{empty_shrinker, Arbitrary, Gen};\n\nuse rand::{seq::SliceRandom, Rng};\n\n\n\nuse crate::PartialOp;\n\n\n", "file_path": "src/quickcheck_types.rs", "rank": 56, "score": 12.232745738210498 }, { "content": "impl_gen_error!(\n\n GenInterruptedWouldBlock,\n\n [io::ErrorKind::Interrupted, io::ErrorKind::WouldBlock]\n\n);\n\n\n\n/// Do not generate any errors. The only operations generated will be\n\n/// `PartialOp::Limited` instances.\n\n///\n\n/// See [the module level documentation](index.html) for more.\n\n#[derive(Clone, Debug, Default)]\n\npub struct GenNoErrors;\n\n\n\nimpl GenError for GenNoErrors {\n\n fn gen_error<G: Gen>(&mut self, _g: &mut G) -> Option<io::ErrorKind> {\n\n None\n\n }\n\n}\n\n\n\nimpl<GE> Arbitrary for PartialWithErrors<GE>\n\nwhere\n", "file_path": "src/quickcheck_types.rs", "rank": 57, "score": 10.377961296433483 }, { "content": "# Contributing to rust-partial-io\n\nWe want to make contributing to this project as easy and transparent as\n\npossible.\n\n\n\n## Our Development Process\n\nrust-partial-io is currently developed in Facebook's internal repositories and\n\nthen exported out to GitHub automatically. We invite you to submit pull requests\n\nas described below.\n\n\n\n## Pull Requests\n\nWe actively welcome your pull requests.\n\n\n\n1. Fork the repo and create your branch from `master`.\n\n2. If you've added code that should be tested, add tests.\n\n3. If you've changed APIs, update the documentation.\n\n4. Ensure the test suite passes (`cargo test`).\n\n5. Make sure your code is well-formatted (using `rustfmt`).\n\n6. If you haven't already, complete the Contributor License Agreement (\"CLA\").\n\n\n\n## Contributor License Agreement (\"CLA\")\n\nIn order to accept your pull request, we need you to submit a CLA. You only need\n\nto do this once to work on any of Facebook's open source projects.\n\n\n\nComplete your CLA here: <https://code.facebook.com/cla>\n\n\n\n## Issues\n\nWe use GitHub issues to track public bugs. Please ensure your description is\n\nclear and has sufficient instructions to be able to reproduce the issue.\n\n\n\nFacebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe\n\ndisclosure of security bugs. In those cases, please go through the process\n\noutlined on that page and do not file a public issue.\n\n\n", "file_path": "CONTRIBUTING.md", "rank": 58, "score": 9.020227076248563 }, { "content": "//!\n\n//! Several implementations of `GenError` are provided. These can be used to\n\n//! customize the sorts of errors generated. For even more customization, you\n\n//! can write your own `GenError` implementation.\n\n//!\n\n//! # Examples\n\n//!\n\n//! ```rust,ignore\n\n//! use partial_io::{GenInterrupted, PartialWithErrors};\n\n//!\n\n//! quickcheck! {\n\n//! fn test_something(seq: PartialWithErrors<GenInterrupted>) {\n\n//! let reader = ...;\n\n//! let partial_reader = PartialRead::new(reader, seq);\n\n//! // ...\n\n//! }\n\n//! }\n\n//! ```\n\n//!\n\n//! For a detailed example, see `examples/buggy_write.rs` in this repository.\n", "file_path": "src/quickcheck_types.rs", "rank": 59, "score": 8.982241032165971 }, { "content": "#[derive(Clone, Debug, Default)]\n\npub struct GenInterruptedWouldBlock;\n\n\n\nmacro_rules! impl_gen_error {\n\n ($id: ident, [$($errors:expr),+]) => {\n\n impl GenError for $id {\n\n fn gen_error<G: Gen>(&mut self, g: &mut G) -> Option<io::ErrorKind> {\n\n // 20% chance to generate an error.\n\n if g.gen_ratio(1, 5) {\n\n Some([$($errors,)*].choose(g).unwrap().clone())\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl_gen_error!(GenInterrupted, [io::ErrorKind::Interrupted]);\n\nimpl_gen_error!(GenWouldBlock, [io::ErrorKind::WouldBlock]);\n", "file_path": "src/quickcheck_types.rs", "rank": 60, "score": 8.774308073955588 }, { "content": "//! [`PartialAsyncWrite`] to wrap existing `AsyncRead` and `AsyncWrite`\n\n//! implementations. These implementations are task-aware, so they will know\n\n//! how to pause and unpause tasks if they return a `WouldBlock` error.\n\n//! * With the optional `quickcheck` feature, generation of random sequences of\n\n//! operations which can be provided to one of the wrappers. See the\n\n//! [`quickcheck_types` documentation](quickcheck_types/index.html) for more.\n\n//!\n\n//! # Motivation\n\n//!\n\n//! A `Read` or `Write` wrapper is conceptually simple but can be difficult to\n\n//! get right, especially if the wrapper has an internal buffer. Common\n\n//! issues include:\n\n//!\n\n//! * A partial read or write, even without an error, might leave the wrapper\n\n//! in an invalid state ([example fix][1]).\n\n//!\n\n//! With `tokio`'s `AsyncRead` and `AsyncWrite`:\n\n//!\n\n//! * `read_to_end` or `write_all` within the wrapper might be partly\n\n//! successful but then error out. These functions will return the error\n", "file_path": "src/lib.rs", "rank": 61, "score": 7.7425925172536845 }, { "content": "//! without informing the caller of how much was read or written. Wrappers\n\n//! with an internal buffer will want to advance their state corresponding\n\n//! to the partial success, so they can't use `read_to_end` or `write_all`\n\n//! ([example fix][2]).\n\n//! * Instances cannot propagate `ErrorKind::Interrupted` failures up. Wrappers\n\n//! must always retry.\n\n//! * Instances must propagate `ErrorKind::WouldBlock` failures up, but that\n\n//! shouldn't leave them in an invalid state.\n\n//!\n\n//! These situations can be hard to think about and hard to test.\n\n//!\n\n//! `partial-io` can help in two ways:\n\n//!\n\n//! 1. For a known bug involving any of these situations, `partial-io` can help\n\n//! you write a test.\n\n//! 2. With the `quickcheck` feature enabled, `partial-io` can also help shake\n\n//! out bugs in your wrapper. See [`quickcheck_types`] for more.\n\n//!\n\n//! # Examples\n\n//!\n", "file_path": "src/lib.rs", "rank": 62, "score": 7.237950450788123 }, { "content": "//! ```\n\n//!\n\n//! For a real-world example, see the [tests in `zstd-rs`].\n\n//!\n\n//! [`PartialRead`]: struct.PartialRead.html\n\n//! [`PartialWrite`]: struct.PartialWrite.html\n\n//! [`PartialAsyncRead`]: struct.PartialAsyncRead.html\n\n//! [`PartialAsyncWrite`]: struct.PartialAsyncWrite.html\n\n//! [`quickcheck_types`]: quickcheck_types/index.html\n\n//! [1]: https://github.com/gyscos/zstd-rs/commit/3123e418595f6badd5b06db2a14c4ff4555e7705\n\n//! [2]: https://github.com/gyscos/zstd-rs/commit/02dc9d9a3419618fc729542b45c96c32b0f178bb\n\n//! [tests in `zstd-rs`]: https://github.com/gyscos/zstd-rs/blob/master/src/stream/mod.rs\n\n\n\n#[cfg(feature = \"tokio\")]\n\nmod async_read;\n\n#[cfg(feature = \"tokio\")]\n\nmod async_write;\n\n#[cfg(feature = \"quickcheck\")]\n\npub mod quickcheck_types;\n\nmod read;\n", "file_path": "src/lib.rs", "rank": 63, "score": 6.578999583696351 }, { "content": "## QuickCheck integration\n\n\n\n`partial-io` can optionally integrate with the `quickcheck` library to generate\n\nrandom test cases. Enable the `quickcheck` feature to use this:\n\n\n\n```toml\n\n[dev-dependencies]\n\npartial-io = { version = \"0.3\", features = [\"quickcheck\"] }\n\n```\n\n\n\nSee the\n\n[documentation](https://facebookincubator.github.io/rust-partial-io/partial_io/quickcheck_types/index.html)\n\nfor how to use `quickcheck` to generate tests.\n\n\n\n## License\n\n\n\n`partial-io` is MIT-licensed.\n", "file_path": "README.md", "rank": 64, "score": 4.296257149129433 } ]
Rust
fyrox-ui/src/popup.rs
jackos/Fyrox
4b293733bda8e1a0a774aaf82554ac8930afdd8b
use crate::{ border::BorderBuilder, core::{algebra::Vector2, math::Rect, pool::Handle}, define_constructor, message::{ButtonState, MessageDirection, OsEvent, UiMessage}, widget::{Widget, WidgetBuilder, WidgetMessage}, BuildContext, Control, NodeHandleMapping, RestrictionEntry, Thickness, UiNode, UserInterface, BRUSH_DARKER, BRUSH_LIGHTER, }; use std::{ any::{Any, TypeId}, ops::{Deref, DerefMut}, }; #[derive(Debug, Clone, PartialEq)] pub enum PopupMessage { Open, Close, Content(Handle<UiNode>), Placement(Placement), AdjustPosition, } impl PopupMessage { define_constructor!(PopupMessage:Open => fn open(), layout: false); define_constructor!(PopupMessage:Close => fn close(), layout: false); define_constructor!(PopupMessage:Content => fn content(Handle<UiNode>), layout: false); define_constructor!(PopupMessage:Placement => fn placement(Placement), layout: false); define_constructor!(PopupMessage:AdjustPosition => fn adjust_position(), layout: true); } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Placement { LeftTop(Handle<UiNode>), RightTop(Handle<UiNode>), Center(Handle<UiNode>), LeftBottom(Handle<UiNode>), RightBottom(Handle<UiNode>), Cursor(Handle<UiNode>), Position { position: Vector2<f32>, target: Handle<UiNode>, }, } #[derive(Clone)] pub struct Popup { widget: Widget, placement: Placement, stays_open: bool, is_open: bool, content: Handle<UiNode>, body: Handle<UiNode>, smart_placement: bool, } crate::define_widget_deref!(Popup); fn adjust_placement_position( node_screen_bounds: Rect<f32>, screen_size: Vector2<f32>, ) -> Vector2<f32> { let mut new_position = node_screen_bounds.position; let right_bottom = node_screen_bounds.right_bottom_corner(); if right_bottom.x > screen_size.x { new_position.x -= right_bottom.x - screen_size.x; } if right_bottom.y > screen_size.y { new_position.y -= right_bottom.y - screen_size.y; } new_position } impl Popup { fn left_top_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position()) .unwrap_or_default() } fn right_top_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position() + Vector2::new(n.actual_size().x, 0.0)) .unwrap_or_else(|| Vector2::new(ui.screen_size().x - self.widget.actual_size().x, 0.0)) } fn center_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position() + n.actual_size().scale(0.5)) .unwrap_or_else(|| (ui.screen_size - self.widget.actual_size()).scale(0.5)) } fn left_bottom_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position() + Vector2::new(0.0, n.actual_size().y)) .unwrap_or_else(|| Vector2::new(0.0, ui.screen_size().y - self.widget.actual_size().y)) } fn right_bottom_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position() + n.actual_size()) .unwrap_or_else(|| ui.screen_size - self.widget.actual_size()) } } impl Control for Popup { fn query_component(&self, type_id: TypeId) -> Option<&dyn Any> { if type_id == TypeId::of::<Self>() { Some(self) } else { None } } fn resolve(&mut self, node_map: &NodeHandleMapping) { node_map.resolve(&mut self.content); node_map.resolve(&mut self.body); } fn handle_routed_message(&mut self, ui: &mut UserInterface, message: &mut UiMessage) { self.widget.handle_routed_message(ui, message); if let Some(msg) = message.data::<PopupMessage>() { if message.destination() == self.handle() { match msg { PopupMessage::Open => { if !self.is_open { self.is_open = true; ui.send_message(WidgetMessage::visibility( self.handle(), MessageDirection::ToWidget, true, )); ui.push_picking_restriction(RestrictionEntry { handle: self.handle(), stop: false, }); ui.send_message(WidgetMessage::topmost( self.handle(), MessageDirection::ToWidget, )); let position = match self.placement { Placement::LeftTop(target) => self.left_top_placement(ui, target), Placement::RightTop(target) => self.right_top_placement(ui, target), Placement::Center(target) => self.center_placement(ui, target), Placement::LeftBottom(target) => { self.left_bottom_placement(ui, target) } Placement::RightBottom(target) => { self.right_bottom_placement(ui, target) } Placement::Cursor(_) => ui.cursor_position(), Placement::Position { position, .. } => position, }; ui.send_message(WidgetMessage::desired_position( self.handle(), MessageDirection::ToWidget, position, )); if self.smart_placement { ui.send_message(PopupMessage::adjust_position( self.handle, MessageDirection::ToWidget, )); } } } PopupMessage::Close => { if self.is_open { self.is_open = false; ui.send_message(WidgetMessage::visibility( self.handle(), MessageDirection::ToWidget, false, )); ui.remove_picking_restriction(self.handle()); if ui.captured_node() == self.handle() { ui.release_mouse_capture(); } } } PopupMessage::Content(content) => { if self.content.is_some() { ui.send_message(WidgetMessage::remove( self.content, MessageDirection::ToWidget, )); } self.content = *content; ui.send_message(WidgetMessage::link( self.content, MessageDirection::ToWidget, self.body, )); } PopupMessage::Placement(placement) => { self.placement = *placement; self.invalidate_layout(); } PopupMessage::AdjustPosition => { let new_position = adjust_placement_position(self.screen_bounds(), ui.screen_size()); if new_position != self.screen_position() { ui.send_message(WidgetMessage::desired_position( self.handle, MessageDirection::ToWidget, new_position, )); } } } } } } fn handle_os_event( &mut self, self_handle: Handle<UiNode>, ui: &mut UserInterface, event: &OsEvent, ) { if let OsEvent::MouseInput { state, .. } = event { if let Some(top_restriction) = ui.top_picking_restriction() { if *state == ButtonState::Pressed && top_restriction.handle == self_handle && self.is_open { let pos = ui.cursor_position(); if !self.widget.screen_bounds().contains(pos) && !self.stays_open { ui.send_message(PopupMessage::close( self.handle(), MessageDirection::ToWidget, )); } } } } } } pub struct PopupBuilder { widget_builder: WidgetBuilder, placement: Placement, stays_open: bool, content: Handle<UiNode>, smart_placement: bool, } impl PopupBuilder { pub fn new(widget_builder: WidgetBuilder) -> Self { Self { widget_builder, placement: Placement::Cursor(Default::default()), stays_open: false, content: Default::default(), smart_placement: true, } } pub fn with_placement(mut self, placement: Placement) -> Self { self.placement = placement; self } pub fn with_smart_placement(mut self, smart_placement: bool) -> Self { self.smart_placement = smart_placement; self } pub fn stays_open(mut self, value: bool) -> Self { self.stays_open = value; self } pub fn with_content(mut self, content: Handle<UiNode>) -> Self { self.content = content; self } pub fn build(self, ctx: &mut BuildContext) -> Handle<UiNode> { let body = BorderBuilder::new( WidgetBuilder::new() .with_background(BRUSH_DARKER) .with_foreground(BRUSH_LIGHTER) .with_child(self.content), ) .with_stroke_thickness(Thickness::uniform(1.0)) .build(ctx); let popup = Popup { widget: self .widget_builder .with_child(body) .with_visibility(false) .with_handle_os_events(true) .build(), placement: self.placement, stays_open: self.stays_open, is_open: false, content: self.content, smart_placement: self.smart_placement, body, }; ctx.add_node(UiNode::new(popup)) } }
use crate::{ border::BorderBuilder, core::{algebra::Vector2, math::Rect, pool::Handle}, define_constructor, message::{ButtonState, MessageDirection, OsEvent, UiMessage}, widget::{Widget, WidgetBuilder, WidgetMessage}, BuildContext, Control, NodeHandleMapping, RestrictionEntry, Thickness, UiNode, UserInterface, BRUSH_DARKER, BRUSH_LIGHTER, }; use std::{ any::{Any, TypeId}, ops::{Deref, DerefMut}, }; #[derive(Debug, Clone, PartialEq)] pub enum PopupMessage { Open, Close, Content(Handle<UiNode>), Placement(Placement), AdjustPosition, } impl PopupMessage { define_constructor!(PopupMessage:Open => fn open(), layout: false); define_constructor!(PopupMessage:Close => fn close(), layout: false); define_constructor!(PopupMessage:Content => fn content(Handle<UiNode>), layout: false); define_constructor!(PopupMessage:Placement => fn placement(Placement), layout: false); define_constructor!(PopupMessage:AdjustPosition => fn adjust_position(), layout: true); } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Placement { LeftTop(Handle<UiNode>), RightTop(Handle<UiNode>), Center(Handle<UiNode>), LeftBottom(Handle<UiNode>), RightBottom(Handle<UiNode>), Cursor(Handle<UiNode>), Position { position: Vector2<f32>, target: Handle<UiNode>, }, } #[derive(Clone)] pub struct Popup { widget: Widget, placement: Placement, stays_open: bool, is_open: bool, content: Handle<UiNode>, body: Handle<UiNode>, smart_placement: bool, } crate::define_widget_deref!(Popup); fn adjust_placement_position( node_screen_bounds: Rect<f32>, screen_size: Vector2<f32>, ) -> Vector2<f32> { let mut new_position = node_screen_bounds.position; let right_bottom = node_screen_bounds.right_bottom_corner(); if right_bottom.x > screen_size.x { new_position.x -= right_bottom.x - screen_size.x; } if right_bottom.y > screen_size.y { new_position.y -= right_bottom.y - screen_size.y; } new_position } impl Popup { fn left_top_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position()) .unwrap_or_default() } fn right_top_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position() + Vector2::new(n.actual_size().x, 0.0)) .unwrap_or_else(|| Vector2::new(ui.screen_size().x - self.widget.actual_size().x, 0.0)) } fn center_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position() + n.actual_size().scale(0.5)) .unwrap_or_else(|| (ui.screen_size - self.widget.actual_size()).scale(0.5)) } fn left_bottom_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position() + Vector2::new(0.0, n.actual_size().y)) .unwrap_or_else(|| Vector2::new(0.0, ui.screen_size().y - self.widget.actual_size().y)) } fn right_bottom_placement(&self, ui: &UserInterface, target: Handle<UiNode>) -> Vector2<f32> { ui.try_get_node(target) .map(|n| n.screen_position() + n.actual_size()) .unwrap_or_else(|| ui.screen_size - self.widget.actual_size()) } } impl Control for Popup { fn query_component(&self, type_id: TypeId) -> Option<&dyn Any> { if type_id == TypeId::of::<Self>() { Some(self) } else { None } } fn resolve(&mut self, node_map: &NodeHandleMapping) { node_map.resolve(&mut self.content); node_map.resolve(&mut self.body); } fn handle_routed_message(&mut self, ui: &mut UserInterface, message: &mut UiMessage) { self.widget.handle_routed_message(ui, message); if let Some(msg) = message.data::<PopupMessage>() { if message.destination() == self.handle() { match msg { PopupMessage::Open => { if !self.is_open { self.is_open = true; ui.send_message(WidgetMessage::visibility( self.handle(), MessageDirection::ToWidget, true, )); ui.push_picking_restriction(RestrictionEntry { handle: self.handle(), stop: false, }); ui.send_message(WidgetMessage::topmost( self.handle(), MessageDirection::ToWidget, ));
ui.send_message(WidgetMessage::desired_position( self.handle(), MessageDirection::ToWidget, position, )); if self.smart_placement { ui.send_message(PopupMessage::adjust_position( self.handle, MessageDirection::ToWidget, )); } } } PopupMessage::Close => { if self.is_open { self.is_open = false; ui.send_message(WidgetMessage::visibility( self.handle(), MessageDirection::ToWidget, false, )); ui.remove_picking_restriction(self.handle()); if ui.captured_node() == self.handle() { ui.release_mouse_capture(); } } } PopupMessage::Content(content) => { if self.content.is_some() { ui.send_message(WidgetMessage::remove( self.content, MessageDirection::ToWidget, )); } self.content = *content; ui.send_message(WidgetMessage::link( self.content, MessageDirection::ToWidget, self.body, )); } PopupMessage::Placement(placement) => { self.placement = *placement; self.invalidate_layout(); } PopupMessage::AdjustPosition => { let new_position = adjust_placement_position(self.screen_bounds(), ui.screen_size()); if new_position != self.screen_position() { ui.send_message(WidgetMessage::desired_position( self.handle, MessageDirection::ToWidget, new_position, )); } } } } } } fn handle_os_event( &mut self, self_handle: Handle<UiNode>, ui: &mut UserInterface, event: &OsEvent, ) { if let OsEvent::MouseInput { state, .. } = event { if let Some(top_restriction) = ui.top_picking_restriction() { if *state == ButtonState::Pressed && top_restriction.handle == self_handle && self.is_open { let pos = ui.cursor_position(); if !self.widget.screen_bounds().contains(pos) && !self.stays_open { ui.send_message(PopupMessage::close( self.handle(), MessageDirection::ToWidget, )); } } } } } } pub struct PopupBuilder { widget_builder: WidgetBuilder, placement: Placement, stays_open: bool, content: Handle<UiNode>, smart_placement: bool, } impl PopupBuilder { pub fn new(widget_builder: WidgetBuilder) -> Self { Self { widget_builder, placement: Placement::Cursor(Default::default()), stays_open: false, content: Default::default(), smart_placement: true, } } pub fn with_placement(mut self, placement: Placement) -> Self { self.placement = placement; self } pub fn with_smart_placement(mut self, smart_placement: bool) -> Self { self.smart_placement = smart_placement; self } pub fn stays_open(mut self, value: bool) -> Self { self.stays_open = value; self } pub fn with_content(mut self, content: Handle<UiNode>) -> Self { self.content = content; self } pub fn build(self, ctx: &mut BuildContext) -> Handle<UiNode> { let body = BorderBuilder::new( WidgetBuilder::new() .with_background(BRUSH_DARKER) .with_foreground(BRUSH_LIGHTER) .with_child(self.content), ) .with_stroke_thickness(Thickness::uniform(1.0)) .build(ctx); let popup = Popup { widget: self .widget_builder .with_child(body) .with_visibility(false) .with_handle_os_events(true) .build(), placement: self.placement, stays_open: self.stays_open, is_open: false, content: self.content, smart_placement: self.smart_placement, body, }; ctx.add_node(UiNode::new(popup)) } }
let position = match self.placement { Placement::LeftTop(target) => self.left_top_placement(ui, target), Placement::RightTop(target) => self.right_top_placement(ui, target), Placement::Center(target) => self.center_placement(ui, target), Placement::LeftBottom(target) => { self.left_bottom_placement(ui, target) } Placement::RightBottom(target) => { self.right_bottom_placement(ui, target) } Placement::Cursor(_) => ui.cursor_position(), Placement::Position { position, .. } => position, };
assignment_statement
[ { "content": "pub fn send_sync_message(ui: &UserInterface, mut msg: UiMessage) {\n\n msg.flags = MSG_SYNC_FLAG;\n\n ui.send_message(msg);\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 0, "score": 447076.9232536799 }, { "content": "/// Trait for all UI controls in library.\n\npub trait Control: BaseControl + Deref<Target = Widget> + DerefMut {\n\n /// Allows a widget to provide access to inner components. For example you can build your custom\n\n /// MyTree widget using engine's Tree widget as a base. The engine needs to know whether the custom\n\n /// widget is actually extends functionality of some existing widget.\n\n ///\n\n /// # Implementation\n\n ///\n\n /// It should at least return `Some(self)` for `type_id == TypeId::of::<Self>`.\n\n fn query_component(&self, type_id: TypeId) -> Option<&dyn Any>;\n\n\n\n fn resolve(&mut self, _node_map: &NodeHandleMapping) {}\n\n\n\n fn on_remove(&self, _sender: &Sender<UiMessage>) {}\n\n\n\n fn measure_override(&self, ui: &UserInterface, available_size: Vector2<f32>) -> Vector2<f32> {\n\n scope_profile!();\n\n\n\n self.deref().measure_override(ui, available_size)\n\n }\n\n\n", "file_path": "fyrox-ui/src/lib.rs", "rank": 1, "score": 445003.84609378787 }, { "content": "pub fn make_save_file_selector(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n FileSelectorBuilder::new(\n\n WindowBuilder::new(WidgetBuilder::new().with_width(300.0).with_height(400.0))\n\n .with_title(WindowTitle::Text(\"Save Scene As\".into()))\n\n .open(false),\n\n )\n\n .with_mode(FileBrowserMode::Save {\n\n default_file_name: PathBuf::from(\"unnamed.rgs\"),\n\n })\n\n .with_path(\"./\")\n\n .with_filter(make_scene_file_filter())\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 2, "score": 336634.18451867776 }, { "content": "pub fn make_simple_tooltip(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_visibility(false)\n\n .with_foreground(Brush::Solid(Color::opaque(160, 160, 160)))\n\n .with_max_size(Vector2::new(250.0, f32::INFINITY))\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_wrap(WrapMode::Word)\n\n .with_text(text)\n\n .build(ctx),\n\n ),\n\n )\n\n .build(ctx)\n\n}\n", "file_path": "fyrox-ui/src/utils.rs", "rank": 3, "score": 332740.1082168162 }, { "content": "pub fn create_ui(ui: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text = TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0))\n\n .with_wrap(WrapMode::Word)\n\n .build(ui);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ui);\n\n progress_bar\n\n })\n\n .with_child({\n", "file_path": "examples/shared/mod.rs", "rank": 4, "score": 330072.3247734125 }, { "content": "fn make_bool_input_field(ctx: &mut BuildContext, row: usize, value: bool) -> Handle<UiNode> {\n\n CheckBoxBuilder::new(\n\n WidgetBuilder::new()\n\n .on_row(row)\n\n .with_margin(Thickness::uniform(1.0))\n\n .on_column(1),\n\n )\n\n .checked(Some(value))\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/settings/mod.rs", "rank": 5, "score": 322546.35272745485 }, { "content": "pub fn make_dropdown_list_option(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_height(26.0).with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_horizontal_text_alignment(HorizontalAlignment::Center)\n\n .with_text(name)\n\n .build(ctx),\n\n ),\n\n ))\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/gui.rs", "rank": 6, "score": 318380.9064290382 }, { "content": "pub fn make_default_anchor(ctx: &mut BuildContext, row: usize, column: usize) -> Handle<UiNode> {\n\n let default_anchor_size = 30.0;\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(default_anchor_size)\n\n .with_height(default_anchor_size)\n\n .with_visibility(false)\n\n .on_row(row)\n\n .on_column(column)\n\n .with_draw_on_top(true)\n\n .with_background(Brush::Solid(DEFAULT_ANCHOR_COLOR)),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl TileBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n\n content: TileContent::Empty,\n", "file_path": "fyrox-ui/src/dock.rs", "rank": 7, "score": 316612.0482312439 }, { "content": "pub fn make_button(ctx: &mut BuildContext, arrow: ArrowDirection, row: usize) -> Handle<UiNode> {\n\n ButtonBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::right(1.0))\n\n .on_row(row),\n\n )\n\n .with_back(\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_foreground(Brush::Solid(Color::opaque(90, 90, 90))),\n\n ))\n\n .with_normal_brush(Brush::Solid(Color::opaque(60, 60, 60)))\n\n .with_hover_brush(Brush::Solid(Color::opaque(80, 80, 80)))\n\n .with_pressed_brush(Brush::Solid(Color::opaque(80, 118, 178)))\n\n .build(ctx),\n\n )\n\n .with_content(make_arrow(ctx, arrow, 6.0))\n\n .build(ctx)\n\n}\n\n\n\nimpl<T: NumericType> NumericUpDownBuilder<T> {\n", "file_path": "fyrox-ui/src/numeric.rs", "rank": 8, "score": 316612.0482312439 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/lod.rs", "rank": 9, "score": 296128.1784157146 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/scene.rs", "rank": 10, "score": 296128.1784157146 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/navmesh.rs", "rank": 11, "score": 296128.1784157146 }, { "content": "fn make_history_entry_widget(ctx: &mut BuildContext, entry: &HistoryEntry) -> Handle<UiNode> {\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_height(18.0)\n\n .with_margin(Thickness {\n\n left: 1.0,\n\n top: 0.0,\n\n right: 1.0,\n\n bottom: 1.0,\n\n })\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_text(format!(\"{}\", entry.work_dir.display(),))\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .build(ctx),\n\n ),\n\n ))\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/configurator.rs", "rank": 12, "score": 292389.84538096364 }, { "content": "/// A set of useful methods that is possible to auto-implement.\n\npub trait BaseNodeTrait: Any + Debug + Deref<Target = Base> + DerefMut + Send {\n\n /// This method creates raw copy of a node, it should never be called in normal circumstances\n\n /// because internally nodes may (and most likely will) contain handles to other nodes. To\n\n /// correctly clone a node you have to use [copy_node](struct.Graph.html#method.copy_node).\n\n fn clone_box(&self) -> Node;\n\n\n\n /// Returns self as shared reference to [`Any`].\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n /// Returns self as mutable reference to [`Any`].\n\n fn as_any_mut(&mut self) -> &mut dyn Any;\n\n}\n\n\n\nimpl<T> BaseNodeTrait for T\n\nwhere\n\n T: Clone + NodeTrait + 'static,\n\n{\n\n fn clone_box(&self) -> Node {\n\n Node(Box::new(self.clone()))\n\n }\n", "file_path": "src/scene/node/mod.rs", "rank": 13, "score": 291904.15249214205 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n #[wasm_bindgen(js_namespace = console)]\n\n fn error(msg: String);\n\n\n\n type Error;\n\n\n\n #[wasm_bindgen(constructor)]\n\n fn new() -> Error;\n\n\n\n #[wasm_bindgen(structural, method, getter)]\n\n fn stack(error: &Error) -> String;\n\n}\n\n\n", "file_path": "examples/wasm/src/lib.rs", "rank": 15, "score": 288118.90643120825 }, { "content": "fn colorize(handle: Handle<UiNode>, ui: &UserInterface, index: &mut usize) {\n\n let node = ui.node(handle);\n\n\n\n if let Some(decorator) = node.cast::<Decorator>() {\n\n if node.parent().is_some() && ui.node(node.parent()).cast::<Button>().is_none() {\n\n let new_brush = Brush::Solid(if *index % 2 == 0 {\n\n Color::opaque(50, 50, 50)\n\n } else {\n\n Color::opaque(60, 60, 60)\n\n });\n\n\n\n if decorator.normal_brush() != &new_brush {\n\n ui.send_message(DecoratorMessage::normal_brush(\n\n handle,\n\n MessageDirection::ToWidget,\n\n new_brush,\n\n ));\n\n }\n\n }\n\n }\n\n\n\n *index += 1;\n\n\n\n for &item in node.children() {\n\n colorize(item, ui, index);\n\n }\n\n}\n\n\n", "file_path": "editor/src/world/mod.rs", "rank": 16, "score": 284318.6476401995 }, { "content": "fn generate_item_container(ctx: &mut BuildContext, item: Handle<UiNode>) -> Handle<UiNode> {\n\n let item = ListViewItem {\n\n widget: WidgetBuilder::new().with_child(item).build(),\n\n };\n\n\n\n ctx.add_node(UiNode::new(item))\n\n}\n\n\n", "file_path": "fyrox-ui/src/list_view.rs", "rank": 17, "score": 278905.7960181541 }, { "content": "pub trait MessageData: 'static + Debug + Any {\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn compare(&self, other: &dyn MessageData) -> bool;\n\n}\n\n\n\nimpl<T> MessageData for T\n\nwhere\n\n T: 'static + Debug + PartialEq + Any,\n\n{\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn compare(&self, other: &dyn MessageData) -> bool {\n\n other\n\n .as_any()\n\n .downcast_ref::<T>()\n\n .map(|other| other == self)\n\n .unwrap_or_default()\n", "file_path": "fyrox-ui/src/message.rs", "rank": 18, "score": 278096.84612261655 }, { "content": "/// Translates window mouse button into fyrox-ui mouse button.\n\npub fn translate_button(button: crate::event::MouseButton) -> crate::gui::message::MouseButton {\n\n match button {\n\n crate::event::MouseButton::Left => crate::gui::message::MouseButton::Left,\n\n crate::event::MouseButton::Right => crate::gui::message::MouseButton::Right,\n\n crate::event::MouseButton::Middle => crate::gui::message::MouseButton::Middle,\n\n crate::event::MouseButton::Other(i) => crate::gui::message::MouseButton::Other(i),\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 19, "score": 274611.2251792444 }, { "content": "/// Translates cursor icon from fyrox-ui library to glutin format.\n\npub fn translate_cursor_icon(icon: crate::gui::message::CursorIcon) -> crate::window::CursorIcon {\n\n match icon {\n\n crate::gui::message::CursorIcon::Default => crate::window::CursorIcon::Default,\n\n crate::gui::message::CursorIcon::Crosshair => crate::window::CursorIcon::Crosshair,\n\n crate::gui::message::CursorIcon::Hand => crate::window::CursorIcon::Hand,\n\n crate::gui::message::CursorIcon::Arrow => crate::window::CursorIcon::Arrow,\n\n crate::gui::message::CursorIcon::Move => crate::window::CursorIcon::Move,\n\n crate::gui::message::CursorIcon::Text => crate::window::CursorIcon::Text,\n\n crate::gui::message::CursorIcon::Wait => crate::window::CursorIcon::Wait,\n\n crate::gui::message::CursorIcon::Help => crate::window::CursorIcon::Help,\n\n crate::gui::message::CursorIcon::Progress => crate::window::CursorIcon::Progress,\n\n crate::gui::message::CursorIcon::NotAllowed => crate::window::CursorIcon::NotAllowed,\n\n crate::gui::message::CursorIcon::ContextMenu => crate::window::CursorIcon::ContextMenu,\n\n crate::gui::message::CursorIcon::Cell => crate::window::CursorIcon::Cell,\n\n crate::gui::message::CursorIcon::VerticalText => crate::window::CursorIcon::VerticalText,\n\n crate::gui::message::CursorIcon::Alias => crate::window::CursorIcon::Alias,\n\n crate::gui::message::CursorIcon::Copy => crate::window::CursorIcon::Copy,\n\n crate::gui::message::CursorIcon::NoDrop => crate::window::CursorIcon::NoDrop,\n\n crate::gui::message::CursorIcon::Grab => crate::window::CursorIcon::Grab,\n\n crate::gui::message::CursorIcon::Grabbing => crate::window::CursorIcon::Grabbing,\n", "file_path": "src/utils/mod.rs", "rank": 20, "score": 271256.33560597006 }, { "content": "fn switch_window_state(window: Handle<UiNode>, ui: &UserInterface, center: bool) {\n\n let current_state = ui.node(window).visibility();\n\n ui.send_message(if current_state {\n\n WindowMessage::close(window, MessageDirection::ToWidget)\n\n } else {\n\n WindowMessage::open(window, MessageDirection::ToWidget, center)\n\n })\n\n}\n\n\n\nimpl ViewMenu {\n\n pub fn new(ctx: &mut BuildContext) -> Self {\n\n let sidebar;\n\n let asset_browser;\n\n let world_outliner;\n\n\n\n let light_panel;\n\n let log_panel;\n\n\n\n let menu = create_root_menu_item(\n\n \"View\",\n", "file_path": "editor/src/menu/view.rs", "rank": 21, "score": 270177.204083945 }, { "content": "fn make_mark(ctx: &mut BuildContext, button: HeaderButton) -> Handle<UiNode> {\n\n VectorImageBuilder::new(\n\n WidgetBuilder::new()\n\n .with_horizontal_alignment(HorizontalAlignment::Center)\n\n .with_vertical_alignment(match button {\n\n HeaderButton::Close => VerticalAlignment::Center,\n\n HeaderButton::Minimize => VerticalAlignment::Bottom,\n\n })\n\n .with_margin(match button {\n\n HeaderButton::Close => Thickness::uniform(0.0),\n\n HeaderButton::Minimize => Thickness::bottom(3.0),\n\n })\n\n .with_foreground(BRUSH_BRIGHT),\n\n )\n\n .with_primitives(match button {\n\n HeaderButton::Close => {\n\n vec![\n\n Primitive::Line {\n\n begin: Vector2::new(0.0, 0.0),\n\n end: Vector2::new(12.0, 12.0),\n", "file_path": "fyrox-ui/src/window.rs", "rank": 22, "score": 269534.23677592084 }, { "content": "fn make_text_title(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::uniform(5.0))\n\n .on_row(0)\n\n .on_column(0),\n\n )\n\n .with_text(text)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/window.rs", "rank": 23, "score": 269534.23677592084 }, { "content": "fn make_tooltip(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n if text.is_empty() {\n\n Handle::NONE\n\n } else {\n\n make_simple_tooltip(ctx, text)\n\n }\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 24, "score": 269534.23677592084 }, { "content": "fn filtered_out(filter: &mut Option<Filter>, path: &Path) -> bool {\n\n match filter.as_mut() {\n\n Some(filter) => !filter.0.borrow_mut().deref_mut().lock().unwrap()(path),\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "fyrox-ui/src/file_browser.rs", "rank": 25, "score": 266591.9176003694 }, { "content": "pub fn handle_rigid_body_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n rigid_body: &RigidBody,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n RigidBody::MASS => SetBodyMassCommand,\n\n RigidBody::LIN_VEL => SetBodyLinVelCommand,\n\n RigidBody::ANG_VEL => SetBodyAngVelCommand,\n\n RigidBody::BODY_TYPE => SetBodyStatusCommand,\n\n RigidBody::X_ROTATION_LOCKED => SetBodyXRotationLockedCommand,\n\n RigidBody::Y_ROTATION_LOCKED => SetBodyYRotationLockedCommand,\n\n RigidBody::Z_ROTATION_LOCKED => SetBodyZRotationLockedCommand,\n\n RigidBody::TRANSLATION_LOCKED => SetBodyTranslationLockedCommand,\n\n RigidBody::CAN_SLEEP => SetBodyCanSleepCommand,\n\n RigidBody::CCD_ENABLED => SetBodyCcdEnabledCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n RigidBody::BASE => handle_base_property_changed(inner, handle, rigid_body),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/rigid_body.rs", "rank": 26, "score": 266427.6677247454 }, { "content": "fn make_header_button(ctx: &mut BuildContext, button: HeaderButton) -> Handle<UiNode> {\n\n ButtonBuilder::new(WidgetBuilder::new().with_margin(Thickness::uniform(2.0)))\n\n .with_back(\n\n DecoratorBuilder::new(\n\n BorderBuilder::new(WidgetBuilder::new())\n\n .with_stroke_thickness(Thickness::uniform(0.0)),\n\n )\n\n .with_normal_brush(Brush::Solid(Color::TRANSPARENT))\n\n .with_hover_brush(BRUSH_LIGHT)\n\n .with_pressed_brush(BRUSH_LIGHTEST)\n\n .build(ctx),\n\n )\n\n .with_content(make_mark(ctx, button))\n\n .build(ctx)\n\n}\n\n\n\nimpl<'a> WindowBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n", "file_path": "fyrox-ui/src/window.rs", "rank": 27, "score": 266200.1225167351 }, { "content": "fn is_node_enabled(nodes: &Pool<UiNode>, handle: Handle<UiNode>) -> bool {\n\n let root_node = &nodes[handle];\n\n let mut enabled = root_node.enabled();\n\n let mut parent = root_node.parent();\n\n while parent.is_some() {\n\n let node = &nodes[parent];\n\n if !node.enabled() {\n\n enabled = false;\n\n break;\n\n }\n\n parent = node.parent();\n\n }\n\n enabled\n\n}\n\n\n\nimpl UserInterface {\n\n pub fn new(screen_size: Vector2<f32>) -> UserInterface {\n\n let (sender, receiver) = mpsc::channel();\n\n let (layout_events_sender, layout_events_receiver) = mpsc::channel();\n\n let mut ui = UserInterface {\n", "file_path": "fyrox-ui/src/lib.rs", "rank": 28, "score": 263166.76685478864 }, { "content": "fn poll_ui_messages(editor: &mut Editor, engine: &mut GameEngine) {\n\n scope_profile!();\n\n\n\n while let Some(ui_message) = engine.user_interface.poll_message() {\n\n editor.handle_ui_message(&ui_message, engine);\n\n }\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 29, "score": 261647.61812033068 }, { "content": "fn close_menu_chain(from: Handle<UiNode>, ui: &UserInterface) {\n\n let mut handle = from;\n\n while handle.is_some() {\n\n if let Some((popup_handle, popup)) = ui.try_borrow_by_type_up::<Popup>(handle) {\n\n ui.send_message(PopupMessage::close(\n\n popup_handle,\n\n MessageDirection::ToWidget,\n\n ));\n\n\n\n // Continue search from parent menu item of popup.\n\n handle = popup\n\n .user_data_ref::<Handle<UiNode>>()\n\n .cloned()\n\n .unwrap_or_default();\n\n }\n\n }\n\n}\n\n\n\nimpl Control for MenuItem {\n\n fn query_component(&self, type_id: TypeId) -> Option<&dyn Any> {\n", "file_path": "fyrox-ui/src/menu.rs", "rank": 30, "score": 260789.83156928697 }, { "content": "fn create_int_view(ctx: &mut BuildContext, value: i32) -> Handle<UiNode> {\n\n NumericUpDownBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value as f32)\n\n .with_precision(0)\n\n .with_max_value(i32::MAX as f32)\n\n .with_min_value(-i32::MAX as f32)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 31, "score": 258783.74770612235 }, { "content": "fn make_section(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n TreeBuilder::new(WidgetBuilder::new())\n\n .with_content(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_text(name)\n\n .build(ctx),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl SettingsWindow {\n\n pub fn new(engine: &mut GameEngine, sender: Sender<Message>, settings: &Settings) -> Self {\n\n let ok;\n\n let default;\n\n\n\n let ctx = &mut engine.user_interface.build_ctx();\n\n let text =\n\n \"Here you can select graphics settings to improve performance and/or to understand how \\\n\n you scene will look like with different graphics settings. Please note that these settings won't be saved \\\n\n with scene!\";\n", "file_path": "editor/src/settings/mod.rs", "rank": 32, "score": 258783.74770612238 }, { "content": "fn make_folder(ctx: &mut BuildContext, name: &str) -> Handle<UiNode> {\n\n TreeBuilder::new(WidgetBuilder::new())\n\n .with_content(\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::left(5.0))\n\n .with_foreground(Brush::Solid(Color::opaque(153, 217, 234))),\n\n )\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(name)\n\n .build(ctx),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl WorldViewer {\n\n pub fn new(ctx: &mut BuildContext, sender: Sender<Message>) -> Self {\n\n let track_selection_state = true;\n\n let tree_root;\n\n let node_path;\n", "file_path": "editor/src/world/mod.rs", "rank": 33, "score": 258783.74770612238 }, { "content": "fn create_float_view(ctx: &mut BuildContext, value: f32) -> Handle<UiNode> {\n\n NumericUpDownBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 34, "score": 258783.74770612235 }, { "content": "fn create_uint_view(ctx: &mut BuildContext, value: u32) -> Handle<UiNode> {\n\n NumericUpDownBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value as f32)\n\n .with_precision(0)\n\n .with_max_value(u32::MAX as f32)\n\n .with_min_value(0.0)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 35, "score": 258783.74770612235 }, { "content": "fn mark_handled(message: UiMessage) -> UiMessage {\n\n message.set_handled(true);\n\n message\n\n}\n\n\n\nimpl ColorPicker {\n\n fn sync_fields(&self, ui: &mut UserInterface, color: Color, hsv: Hsv) {\n\n ui.send_message(mark_handled(NumericUpDownMessage::value(\n\n self.hue,\n\n MessageDirection::ToWidget,\n\n hsv.hue(),\n\n )));\n\n\n\n ui.send_message(mark_handled(NumericUpDownMessage::value(\n\n self.saturation,\n\n MessageDirection::ToWidget,\n\n hsv.saturation(),\n\n )));\n\n\n\n ui.send_message(mark_handled(NumericUpDownMessage::value(\n", "file_path": "fyrox-ui/src/color.rs", "rank": 36, "score": 257153.32518538155 }, { "content": "fn create_item_views(items: &[Item], ctx: &mut BuildContext) -> Vec<Handle<UiNode>> {\n\n items\n\n .iter()\n\n .enumerate()\n\n .map(|(n, item)| {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_child(\n\n ExpanderBuilder::new(WidgetBuilder::new())\n\n .with_header(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(format!(\"Item {}\", n))\n\n .build(ctx),\n\n )\n\n .with_content(item.inspector)\n\n .build(ctx),\n\n )\n\n .with_foreground(Brush::Solid(Color::opaque(130, 130, 130))),\n\n )\n\n .build(ctx)\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/editors/array.rs", "rank": 37, "score": 256464.76677425567 }, { "content": "fn create_header(ctx: &mut BuildContext, text: &str, layer_index: usize) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new().with_margin(make_property_margin(layer_index)))\n\n .with_text(text)\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 38, "score": 253278.55389415417 }, { "content": "fn create_vec4_view(ctx: &mut BuildContext, value: Vector4<f32>) -> Handle<UiNode> {\n\n Vec4EditorBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 39, "score": 251802.78199017074 }, { "content": "fn create_vec3_view(ctx: &mut BuildContext, value: Vector3<f32>) -> Handle<UiNode> {\n\n Vec3EditorBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 40, "score": 251802.78199017074 }, { "content": "fn create_vec2_view(ctx: &mut BuildContext, value: Vector2<f32>) -> Handle<UiNode> {\n\n Vec2EditorBuilder::new(WidgetBuilder::new().with_height(24.0))\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/material.rs", "rank": 41, "score": 251802.78199017074 }, { "content": "pub trait InspectableEnum: Debug + Inspect + 'static {}\n\n\n\nimpl<T: Debug + Inspect + 'static> InspectableEnum for T {}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum EnumPropertyEditorMessage {\n\n Variant(usize),\n\n PropertyChanged(PropertyChanged),\n\n}\n\n\n\nimpl EnumPropertyEditorMessage {\n\n define_constructor!(EnumPropertyEditorMessage:Variant => fn variant(usize), layout: false);\n\n define_constructor!(EnumPropertyEditorMessage:PropertyChanged => fn property_changed(PropertyChanged), layout: false);\n\n}\n\n\n\npub struct EnumPropertyEditor<T: InspectableEnum> {\n\n widget: Widget,\n\n variant_selector: Handle<UiNode>,\n\n inspector: Handle<UiNode>,\n\n definition: EnumPropertyEditorDefinition<T>,\n", "file_path": "fyrox-ui/src/inspector/editors/enumeration.rs", "rank": 42, "score": 250992.0966807031 }, { "content": "fn create_file_selector(ctx: &mut BuildContext, mode: FileBrowserMode) -> Handle<UiNode> {\n\n FileSelectorBuilder::new(\n\n WindowBuilder::new(WidgetBuilder::new().with_width(300.0).with_height(400.0)).open(false),\n\n )\n\n .with_filter(Filter::new(|path| {\n\n if let Some(ext) = path.extension() {\n\n ext.to_string_lossy().as_ref() == \"crv\"\n\n } else {\n\n path.is_dir()\n\n }\n\n }))\n\n .with_mode(mode)\n\n .build(ctx)\n\n}\n\n\n\nimpl CurveEditorWindow {\n\n pub fn new(ctx: &mut BuildContext) -> Self {\n\n let load_file_selector = create_file_selector(ctx, FileBrowserMode::Open);\n\n let save_file_selector = create_file_selector(\n\n ctx,\n", "file_path": "editor/src/curve_editor.rs", "rank": 43, "score": 248580.6436307259 }, { "content": "pub fn is_slice_equal_permutation<T: PartialEq>(a: &[T], b: &[T]) -> bool {\n\n if a.is_empty() && !b.is_empty() {\n\n false\n\n } else {\n\n // TODO: Find a way to do this faster.\n\n for source in a.iter() {\n\n let mut found = false;\n\n for other in b.iter() {\n\n if other == source {\n\n found = true;\n\n break;\n\n }\n\n }\n\n if !found {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n}\n", "file_path": "editor/src/utils/mod.rs", "rank": 44, "score": 247068.72210556275 }, { "content": "fn make_text_mark(ctx: &mut BuildContext, text: &str, row: usize, column: usize) -> Handle<UiNode> {\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_vertical_alignment(VerticalAlignment::Center)\n\n .on_row(row)\n\n .on_column(column),\n\n )\n\n .with_text(text)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/color.rs", "rank": 45, "score": 244575.69759887015 }, { "content": "fn make_text_mark(ctx: &mut BuildContext, text: &str, row: usize) -> Handle<UiNode> {\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_vertical_alignment(VerticalAlignment::Center)\n\n .with_margin(Thickness::left(4.0))\n\n .on_row(row)\n\n .on_column(0),\n\n )\n\n .with_text(text)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "editor/src/settings/mod.rs", "rank": 46, "score": 241958.23161410794 }, { "content": "pub fn make_property_margin(layer_index: usize) -> Thickness {\n\n let mut margin = HEADER_MARGIN;\n\n margin.left += 10.0 + layer_index as f32 * 10.0;\n\n margin\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 47, "score": 240834.07426687283 }, { "content": "fn is_vorbis_ogg(source: &mut DataSource) -> bool {\n\n let pos = source.seek(SeekFrom::Current(0)).unwrap();\n\n\n\n let is_vorbis = OggStreamReader::new(source.by_ref()).is_ok();\n\n\n\n source.seek(SeekFrom::Start(pos)).unwrap();\n\n\n\n is_vorbis\n\n}\n\n\n\nimpl OggDecoder {\n\n pub fn new(mut source: DataSource) -> Result<Self, DataSource> {\n\n if is_vorbis_ogg(&mut source) {\n\n let mut reader = OggStreamReader::new(source).unwrap();\n\n\n\n let samples = if let Ok(Some(samples)) =\n\n reader.read_dec_packet_generic::<InterleavedSamples<f32>>()\n\n {\n\n samples.samples.into_iter()\n\n } else {\n", "file_path": "fyrox-sound/src/decoder/vorbis.rs", "rank": 48, "score": 238744.4579825287 }, { "content": "#[inline]\n\npub fn wrapf(mut n: f32, mut min_limit: f32, mut max_limit: f32) -> f32 {\n\n if n >= min_limit && n <= max_limit {\n\n return n;\n\n }\n\n\n\n if max_limit == 0.0 && min_limit == 0.0 {\n\n return 0.0;\n\n }\n\n\n\n max_limit -= min_limit;\n\n\n\n let offset = min_limit;\n\n min_limit = 0.0;\n\n n -= offset;\n\n\n\n let num_of_max = (n / max_limit).abs().floor();\n\n\n\n if n >= max_limit {\n\n n -= num_of_max * max_limit;\n\n } else if n < min_limit {\n\n n += (num_of_max + 1.0) * max_limit;\n\n }\n\n\n\n n + offset\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 49, "score": 235631.95724495326 }, { "content": "pub fn fix_shadows_distance(mut quality: QualitySettings) -> QualitySettings {\n\n // Scale distance because game world has different scale.\n\n quality.spot_shadows_distance *= 2.0;\n\n quality.point_shadows_distance *= 2.0;\n\n quality\n\n}\n", "file_path": "examples/shared/mod.rs", "rank": 50, "score": 229451.52428068052 }, { "content": "pub fn gen_inspect_fn_body(\n\n field_prefix: FieldPrefix,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n // `inspect` function body, consisting of a sequence of quotes\n\n let mut quotes = Vec::new();\n\n\n\n // 1. collect non-expanible field properties\n\n let props = field_args\n\n .fields\n\n .iter()\n\n .enumerate()\n\n .filter(|(_i, f)| !(f.skip || f.expand || f.expand_subtree))\n\n .map(|(i, field)| self::quote_field_prop(field_prefix, i, field, field_args.style));\n\n\n\n quotes.push(quote! {\n\n let mut props = Vec::new();\n\n #(props.push(#props);)*\n\n });\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 51, "score": 228106.0616547129 }, { "content": "// User interface in the engine build up on graph data structure, on tree to be\n\n// more precise. Each UI element can have single parent and multiple children.\n\n// UI uses complex layout system which automatically organizes your widgets.\n\n// In this example we'll use Grid and StackPanel layout controls. Grid can be\n\n// divided in rows and columns, its child element can set their desired column\n\n// and row and grid will automatically put them in correct position. StackPanel\n\n// will \"stack\" UI elements either on top of each other or in one line. Such\n\n// complex layout system was borrowed from WPF framework. You can read more here:\n\n// https://docs.microsoft.com/en-us/dotnet/framework/wpf/advanced/layout\n\nfn create_ui(engine: &mut Engine) -> Interface {\n\n let window_width = engine.renderer.get_frame_size().0 as f32;\n\n\n\n // Gather all suitable video modes, we'll use them to fill combo box of\n\n // available resolutions.\n\n let video_modes = engine\n\n .get_window()\n\n .primary_monitor()\n\n .unwrap()\n\n .video_modes()\n\n .filter(|vm| {\n\n // Leave only modern video modes, we are not in 1998.\n\n vm.size().width > 800 && vm.size().height > 600 && vm.bit_depth() == 32\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let ctx = &mut engine.user_interface.build_ctx();\n\n\n\n // First of all create debug text that will show title of example and current FPS.\n\n let debug_text = TextBuilder::new(WidgetBuilder::new()).build(ctx);\n", "file_path": "examples/ui.rs", "rank": 52, "score": 227821.50171376468 }, { "content": "pub fn make_arrow(\n\n ctx: &mut BuildContext,\n\n orientation: ArrowDirection,\n\n size: f32,\n\n) -> Handle<UiNode> {\n\n VectorImageBuilder::new(\n\n WidgetBuilder::new()\n\n .with_foreground(BRUSH_BRIGHT)\n\n .with_horizontal_alignment(HorizontalAlignment::Center)\n\n .with_vertical_alignment(VerticalAlignment::Center),\n\n )\n\n .with_primitives(make_arrow_primitives(orientation, size))\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/utils.rs", "rank": 53, "score": 227174.64824398299 }, { "content": "#[inline]\n\npub fn barycentric_is_inside(bary: (f32, f32, f32)) -> bool {\n\n (bary.0 >= 0.0) && (bary.1 >= 0.0) && (bary.0 + bary.1 < 1.0)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 54, "score": 225965.04298229743 }, { "content": "pub fn set_mesh_diffuse_color(mesh: &mut Mesh, color: Color) {\n\n for surface in mesh.surfaces() {\n\n surface\n\n .material()\n\n .lock()\n\n .set_property(\n\n &ImmutableString::new(\"diffuseColor\"),\n\n PropertyValue::Color(color),\n\n )\n\n .unwrap();\n\n }\n\n}\n\n\n", "file_path": "editor/src/main.rs", "rank": 55, "score": 225480.43185715337 }, { "content": "// MenuItem uses popup to show its content, popup can be top-most only if it is\n\n// direct child of root canvas of UI. This fact adds some complications to search\n\n// of parent menu - we can't just traverse the tree because popup is not a child\n\n// of menu item, instead we trying to fetch handle to parent menu item from popup's\n\n// user data and continue up-search until we find menu.\n\nfn find_menu(from: Handle<UiNode>, ui: &UserInterface) -> Handle<UiNode> {\n\n let mut handle = from;\n\n while handle.is_some() {\n\n if let Some((_, popup)) = ui.try_borrow_by_type_up::<Popup>(handle) {\n\n // Continue search from parent menu item of popup.\n\n handle = popup\n\n .user_data_ref::<Handle<UiNode>>()\n\n .cloned()\n\n .unwrap_or_default();\n\n } else {\n\n // Maybe we have Menu as parent for MenuItem.\n\n return ui.find_by_criteria_up(handle, |n| n.cast::<Menu>().is_some());\n\n }\n\n }\n\n Default::default()\n\n}\n\n\n", "file_path": "fyrox-ui/src/menu.rs", "rank": 56, "score": 225228.0648068555 }, { "content": "pub fn make_mark(\n\n ctx: &mut BuildContext,\n\n text: &str,\n\n column: usize,\n\n color: Color,\n\n) -> Handle<UiNode> {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .on_row(0)\n\n .on_column(column)\n\n .with_background(Brush::Solid(color))\n\n .with_foreground(Brush::Solid(Color::TRANSPARENT))\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(text)\n\n .build(ctx),\n\n ),\n\n )\n\n .build(ctx)\n\n}\n", "file_path": "fyrox-ui/src/vec/mod.rs", "rank": 57, "score": 223320.30839725258 }, { "content": "fn create_ui(ctx: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let cancel;\n\n let progress_grid;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text =\n\n TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0)).build(ctx);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_grid = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_visibility(!Path::new(LIGHTMAP_SCENE_PATH).exists())\n\n .on_column(1)\n", "file_path": "examples/lightmap.rs", "rank": 58, "score": 222185.47080912878 }, { "content": "fn create_ui(ctx: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text =\n\n TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0)).build(ctx);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ctx);\n\n progress_bar\n\n })\n\n .with_child({\n\n progress_text = TextBuilder::new(\n", "file_path": "examples/async.rs", "rank": 59, "score": 222185.47080912878 }, { "content": "pub fn create_impl(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let generics = self::create_impl_generics(&ty_args.generics, field_args);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n quote! {\n\n impl #impl_generics Visit for #ty_ident #ty_generics #where_clause {\n\n fn visit(\n\n &mut self,\n\n name: &str,\n\n visitor: &mut Visitor,\n\n ) -> VisitResult {\n\n #impl_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/visit/utils.rs", "rank": 60, "score": 220963.75301479013 }, { "content": "pub fn make_expander_container(\n\n layer_index: usize,\n\n property_name: &str,\n\n header: Handle<UiNode>,\n\n content: Handle<UiNode>,\n\n ctx: &mut BuildContext,\n\n) -> Handle<UiNode> {\n\n ExpanderBuilder::new(WidgetBuilder::new())\n\n .with_checkbox(make_expander_check_box(layer_index, property_name, ctx))\n\n .with_expander_column(Column::strict(NAME_COLUMN_WIDTH))\n\n .with_expanded(true)\n\n .with_header(header)\n\n .with_content(content)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "fyrox-ui/src/inspector/mod.rs", "rank": 61, "score": 219657.1337289071 }, { "content": "#[inline]\n\npub fn quat_from_euler<T: SimdRealField + RealField + Copy + Clone>(\n\n euler_radians: Vector3<T>,\n\n order: RotationOrder,\n\n) -> UnitQuaternion<T> {\n\n let qx = UnitQuaternion::from_axis_angle(&Vector3::x_axis(), euler_radians.x);\n\n let qy = UnitQuaternion::from_axis_angle(&Vector3::y_axis(), euler_radians.y);\n\n let qz = UnitQuaternion::from_axis_angle(&Vector3::z_axis(), euler_radians.z);\n\n match order {\n\n RotationOrder::XYZ => qz * qy * qx,\n\n RotationOrder::XZY => qy * qz * qx,\n\n RotationOrder::YZX => qx * qz * qy,\n\n RotationOrder::YXZ => qz * qx * qy,\n\n RotationOrder::ZXY => qy * qx * qz,\n\n RotationOrder::ZYX => qx * qy * qz,\n\n }\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 62, "score": 217663.68145404154 }, { "content": "/// Saves given `data` and overwrites `data_default` with the saved data.\n\n///\n\n/// Test the equality after running this method!\n\npub fn save_load<T: Visit>(test_name: &str, data: &mut T, data_default: &mut T) {\n\n // Locate output path\n\n let (bin, txt) = {\n\n let manifest_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let root = PathBuf::from(manifest_dir).join(\"test_output\");\n\n let _ = std::fs::create_dir(&root);\n\n (\n\n root.join(format!(\"{}.bin\", test_name)),\n\n root.join(format!(\"{}.txt\", test_name)),\n\n )\n\n };\n\n\n\n // Save `data`\n\n {\n\n let mut visitor = Visitor::new();\n\n data.visit(\"Data\", &mut visitor).unwrap();\n\n\n\n visitor.save_binary(&bin).unwrap();\n\n let mut file = File::create(&txt).unwrap();\n\n file.write_all(visitor.save_text().as_bytes()).unwrap();\n", "file_path": "fyrox-core-derive/tests/it/visit.rs", "rank": 63, "score": 217458.25625783327 }, { "content": "pub fn handle_prismatic_joint(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n) -> Option<SceneCommand> {\n\n handle_property_changed!(args, handle,\n\n PrismaticJoint::LOCAL_ANCHOR_1 => SetPrismaticJointAnchor1Command,\n\n PrismaticJoint::LOCAL_ANCHOR_2 => SetPrismaticJointAnchor2Command,\n\n PrismaticJoint::LOCAL_AXIS_1 => SetPrismaticJointAxis1Command,\n\n PrismaticJoint::LOCAL_AXIS_2 => SetPrismaticJointAxis2Command\n\n )\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/joint.rs", "rank": 64, "score": 217354.11843427707 }, { "content": "pub fn handle_prismatic_joint(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n) -> Option<SceneCommand> {\n\n handle_property_changed!(args, handle,\n\n PrismaticJoint::LOCAL_ANCHOR_1 => SetPrismaticJointAnchor1Command,\n\n PrismaticJoint::LOCAL_ANCHOR_2 => SetPrismaticJointAnchor2Command,\n\n PrismaticJoint::LOCAL_AXIS_1 => SetPrismaticJointAxis1Command,\n\n PrismaticJoint::LOCAL_AXIS_2 => SetPrismaticJointAxis2Command\n\n )\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/joint2d.rs", "rank": 65, "score": 217354.11843427707 }, { "content": "fn item_effect(item: Handle<UiNode>, ui: &UserInterface) -> Handle<Effect> {\n\n *ui.node(item)\n\n .user_data_ref::<Handle<Effect>>()\n\n .expect(\"Must be Handle<Effect>\")\n\n}\n\n\n\nimpl AudioPanel {\n\n pub fn new(engine: &mut Engine) -> Self {\n\n let ctx = &mut engine.user_interface.build_ctx();\n\n\n\n let edit_context;\n\n let add_effect;\n\n let effects;\n\n let window = WindowBuilder::new(WidgetBuilder::new())\n\n .with_content(\n\n GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_child({\n\n effects =\n\n ListViewBuilder::new(WidgetBuilder::new().on_row(0)).build(ctx);\n", "file_path": "editor/src/audio.rs", "rank": 66, "score": 216226.6305234262 }, { "content": "#[inline]\n\npub fn m4x4_approx_eq(a: &Matrix4<f32>, b: &Matrix4<f32>) -> bool {\n\n a.iter()\n\n .zip(b.iter())\n\n .all(|(a, b)| (*a - *b).abs() <= 0.001)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::algebra::Vector2;\n\n use crate::math::Rect;\n\n use crate::math::SmoothAngle;\n\n\n\n #[test]\n\n fn ray_rect_intersection() {\n\n let rect = Rect::new(0.0, 0.0, 10.0, 10.0);\n\n\n\n // Edge-case: Horizontal ray.\n\n assert!(super::ray_rect_intersection(\n\n rect,\n\n Vector2::new(-1.0, 5.0),\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 67, "score": 215971.44394380652 }, { "content": "pub trait PropertyEditorDefinition: Debug {\n\n fn value_type_id(&self) -> TypeId;\n\n\n\n fn create_instance(\n\n &self,\n\n ctx: PropertyEditorBuildContext,\n\n ) -> Result<PropertyEditorInstance, InspectorError>;\n\n\n\n fn create_message(\n\n &self,\n\n ctx: PropertyEditorMessageContext,\n\n ) -> Result<Option<UiMessage>, InspectorError>;\n\n\n\n fn translate_message(\n\n &self,\n\n name: &str,\n\n owner_type_id: TypeId,\n\n message: &UiMessage,\n\n ) -> Option<PropertyChanged>;\n\n}\n", "file_path": "fyrox-ui/src/inspector/editors/mod.rs", "rank": 68, "score": 215872.35590000526 }, { "content": "fn create_ui(engine: &mut Engine) -> Interface {\n\n let ctx = &mut engine.user_interface.build_ctx();\n\n\n\n let debug_text = TextBuilder::new(WidgetBuilder::new()).build(ctx);\n\n\n\n let definition_container = Rc::new(PropertyEditorDefinitionContainer::new());\n\n\n\n let inspector;\n\n WindowBuilder::new(WidgetBuilder::new().with_width(400.0))\n\n .with_title(WindowTitle::text(\"Inspector\"))\n\n .with_content({\n\n inspector = InspectorBuilder::new(\n\n WidgetBuilder::new().with_desired_position(Vector2::new(200.0, 200.0)),\n\n )\n\n .build(ctx);\n\n inspector\n\n })\n\n .build(ctx);\n\n\n\n Interface {\n\n debug_text,\n\n inspector,\n\n definition_container,\n\n }\n\n}\n\n\n", "file_path": "examples/inspector.rs", "rank": 69, "score": 214639.2353383312 }, { "content": "pub fn handle_transform_property_changed(\n\n args: &PropertyChanged,\n\n node_handle: Handle<Node>,\n\n base: &Base,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n \"local_position\" => Some(SceneCommand::new(MoveNodeCommand::new(\n\n node_handle,\n\n **base.local_transform().position(),\n\n *value.cast_value()?,\n\n ))),\n\n \"local_rotation\" => Some(SceneCommand::new(RotateNodeCommand::new(\n\n node_handle,\n\n **base.local_transform().rotation(),\n\n *value.cast_value()?,\n\n ))),\n\n \"local_scale\" => Some(SceneCommand::new(ScaleNodeCommand::new(\n\n node_handle,\n\n **base.local_transform().scale(),\n", "file_path": "editor/src/inspector/handlers/node/transform.rs", "rank": 70, "score": 214005.00475839543 }, { "content": "pub fn handle_rectangle_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_rectangle() {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Rectangle::TEXTURE => {\n\n make_command!(SetRectangleTextureCommand, handle, value)\n\n }\n\n Rectangle::COLOR => {\n\n make_command!(SetRectangleColorCommand, handle, value)\n\n }\n\n _ => None,\n\n },\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Rectangle::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/rectangle.rs", "rank": 71, "score": 214005.00475839543 }, { "content": "pub fn handle_base_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n base: &Base,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Base::NAME => SetNameCommand,\n\n Base::TAG => SetTagCommand,\n\n Base::FRUSTUM_CULLING => SetFrustumCullingCommand,\n\n Base::VISIBILITY => SetVisibleCommand,\n\n Base::MOBILITY => SetMobilityCommand,\n\n Base::LIFETIME => SetLifetimeCommand,\n\n Base::DEPTH_OFFSET => SetDepthOffsetCommand,\n\n Base::LOD_GROUP => SetLodGroupCommand,\n\n Base::CAST_SHADOWS => SetCastShadowsCommand\n\n )\n\n }\n\n FieldKind::Collection(ref collection_changed) => match args.name.as_ref() {\n", "file_path": "editor/src/inspector/handlers/node/base.rs", "rank": 72, "score": 214005.00475839543 }, { "content": "pub fn handle_pivot_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Sprite::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/pivot.rs", "rank": 73, "score": 214005.00475839543 }, { "content": "pub fn handle_collider2d_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n collider: &Collider,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Collider::FRICTION => SetColliderFrictionCommand,\n\n Collider::RESTITUTION => SetColliderRestitutionCommand,\n\n Collider::IS_SENSOR => SetColliderIsSensorCommand,\n\n Collider::DENSITY => SetColliderDensityCommand,\n\n Collider::SHAPE => SetColliderShapeCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner_property) => match args.name.as_ref() {\n\n Collider::COLLISION_GROUPS => match inner_property.value {\n\n FieldKind::Object(ref value) => match inner_property.name.as_ref() {\n\n InteractionGroups::MEMBERSHIPS => {\n\n let mut new_value = collider.collision_groups();\n", "file_path": "editor/src/inspector/handlers/node/collider2d.rs", "rank": 74, "score": 214005.00475839543 }, { "content": "pub fn handle_sound_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => match args.name.as_ref() {\n\n Sound::GAIN => {\n\n make_command!(SetSoundSourceGainCommand, handle, value)\n\n }\n\n Sound::BUFFER => {\n\n make_command!(SetSoundSourceBufferCommand, handle, value)\n\n }\n\n Sound::PANNING => {\n\n make_command!(SetSoundSourcePanningCommand, handle, value)\n\n }\n\n Sound::PITCH => {\n\n make_command!(SetSoundSourcePitchCommand, handle, value)\n\n }\n\n Sound::LOOPING => {\n\n make_command!(SetSoundSourceLoopingCommand, handle, value)\n", "file_path": "editor/src/inspector/handlers/node/sound.rs", "rank": 75, "score": 214005.00475839543 }, { "content": "pub fn handle_decal_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_decal() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Decal::DIFFUSE_TEXTURE => SetDecalDiffuseTextureCommand,\n\n Decal::NORMAL_TEXTURE => SetDecalNormalTextureCommand,\n\n Decal::COLOR => SetDecalColorCommand,\n\n Decal::LAYER => SetDecalLayerIndexCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Decal::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/decal.rs", "rank": 76, "score": 214005.00475839543 }, { "content": "pub fn handle_mesh_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_mesh() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Mesh::RENDER_PATH => SetMeshRenderPathCommand,\n\n Mesh::DECAL_LAYER_INDEX => SetMeshDecalLayerIndexCommand\n\n )\n\n }\n\n FieldKind::Collection(ref args) => match **args {\n\n CollectionChanged::Add => {\n\n // TODO\n\n None\n\n }\n\n CollectionChanged::Remove(_) => {\n\n // TODO\n", "file_path": "editor/src/inspector/handlers/node/mesh.rs", "rank": 77, "score": 214005.00475839543 }, { "content": "pub fn handle_camera_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if let Some(camera) = node.cast::<Camera>() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Camera::EXPOSURE => SetExposureCommand,\n\n Camera::PROJECTION => SetProjectionCommand,\n\n Camera::VIEWPORT => SetViewportCommand,\n\n Camera::ENABLED => SetCameraPreviewCommand,\n\n Camera::SKY_BOX => SetSkyBoxCommand,\n\n Camera::ENVIRONMENT => SetEnvironmentMap,\n\n Camera::COLOR_GRADING_LUT => SetColorGradingLutCommand,\n\n Camera::COLOR_GRADING_ENABLED => SetColorGradingEnabledCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n", "file_path": "editor/src/inspector/handlers/node/camera.rs", "rank": 78, "score": 214005.00475839543 }, { "content": "pub fn handle_reverb_effect_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Effect>,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n ReverbEffect::DRY => SetReverbDryCommand,\n\n ReverbEffect::WET => SetReverbWetCommand,\n\n ReverbEffect::FC => SetReverbFcCommand,\n\n ReverbEffect::DECAY_TIME => SetReverbDecayTimeCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n ReverbEffect::BASE => handle_base_effect_property_changed(inner, handle),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/effect.rs", "rank": 79, "score": 214005.00475839543 }, { "content": "pub fn handle_joint2d_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n joint: &Joint,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Joint::BODY_1 => SetJointBody1Command,\n\n Joint::BODY_2 => SetJointBody2Command\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Joint::PARAMS => {\n\n if inner.owner_type_id == TypeId::of::<BallJoint>() {\n\n handle_ball_joint(inner, handle)\n\n } else if inner.owner_type_id == TypeId::of::<FixedJoint>() {\n\n handle_fixed_joint(inner, handle)\n\n } else if inner.owner_type_id == TypeId::of::<PrismaticJoint>() {\n\n handle_prismatic_joint(inner, handle)\n", "file_path": "editor/src/inspector/handlers/node/joint2d.rs", "rank": 80, "score": 214005.00475839543 }, { "content": "pub fn handle_sprite_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_sprite() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Sprite::TEXTURE => SetSpriteTextureCommand,\n\n Sprite::COLOR => SetSpriteColorCommand,\n\n Sprite::SIZE => SetSpriteSizeCommand,\n\n Sprite::ROTATION => SetSpriteRotationCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Sprite::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/sprite.rs", "rank": 81, "score": 214005.00475839543 }, { "content": "pub fn handle_base_effect_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Effect>,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n BaseEffect::NAME => SetNameCommand,\n\n BaseEffect::GAIN => SetGainCommand\n\n )\n\n }\n\n FieldKind::Collection(ref collection_changed) => match args.name.as_ref() {\n\n BaseEffect::INPUTS => match **collection_changed {\n\n CollectionChanged::Add => Some(SceneCommand::new(AddInputCommand {\n\n handle,\n\n value: Default::default(),\n\n })),\n\n CollectionChanged::Remove(i) => Some(SceneCommand::new(RemoveInputCommand {\n\n handle,\n\n index: i,\n", "file_path": "editor/src/inspector/handlers/effect.rs", "rank": 82, "score": 214005.00475839543 }, { "content": "pub fn handle_ortho_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n) -> Option<SceneCommand> {\n\n handle_property_changed!(args, handle,\n\n OrthographicProjection::Z_NEAR => SetOrthoZNear,\n\n OrthographicProjection::Z_FAR => SetOrthoZFar,\n\n OrthographicProjection::VERTICAL_SIZE => SetOrthoVerticalSize\n\n )\n\n}\n", "file_path": "editor/src/inspector/handlers/node/camera.rs", "rank": 83, "score": 214005.00475839543 }, { "content": "pub fn handle_joint_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n joint: &Joint,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Joint::BODY_1 => SetJointBody1Command,\n\n Joint::BODY_2 => SetJointBody2Command\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Joint::PARAMS => {\n\n if inner.owner_type_id == TypeId::of::<BallJoint>() {\n\n handle_ball_joint(inner, handle)\n\n } else if inner.owner_type_id == TypeId::of::<RevoluteJoint>() {\n\n handle_revolute_joint(inner, handle)\n\n } else if inner.owner_type_id == TypeId::of::<FixedJoint>() {\n\n handle_fixed_joint(inner, handle)\n", "file_path": "editor/src/inspector/handlers/node/joint.rs", "rank": 84, "score": 214005.00475839543 }, { "content": "pub fn handle_listener_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_listener() {\n\n match args.value {\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n Listener::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/listener.rs", "rank": 85, "score": 214005.00475839543 }, { "content": "pub fn handle_perspective_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n) -> Option<SceneCommand> {\n\n handle_property_changed!(args, handle,\n\n PerspectiveProjection::Z_NEAR => SetPerspectiveZNear,\n\n PerspectiveProjection::Z_FAR => SetPerspectiveZFar,\n\n PerspectiveProjection::FOV => SetPerspectiveFov\n\n )\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/camera.rs", "rank": 86, "score": 214005.00475839543 }, { "content": "pub fn handle_terrain_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n graph: &Graph,\n\n) -> Option<SceneCommand> {\n\n if node.is_terrain() {\n\n match args.value {\n\n FieldKind::Collection(ref collection_changed) => match args.name.as_ref() {\n\n Terrain::LAYERS => match &**collection_changed {\n\n CollectionChanged::Add => Some(SceneCommand::new(AddTerrainLayerCommand::new(\n\n handle, graph,\n\n ))),\n\n CollectionChanged::Remove(index) => Some(SceneCommand::new(\n\n DeleteTerrainLayerCommand::new(handle, *index),\n\n )),\n\n CollectionChanged::ItemChanged { index, property } => {\n\n assert_eq!(property.owner_type_id, TypeId::of::<Layer>());\n\n match property.value {\n\n FieldKind::Object(ref args) => match property.name.as_ref() {\n", "file_path": "editor/src/inspector/handlers/node/terrain.rs", "rank": 87, "score": 214005.00475839543 }, { "content": "pub fn handle_collider_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n collider: &Collider,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n Collider::FRICTION => SetColliderFrictionCommand,\n\n Collider::RESTITUTION => SetColliderRestitutionCommand,\n\n Collider::IS_SENSOR => SetColliderIsSensorCommand,\n\n Collider::DENSITY => SetColliderDensityCommand,\n\n Collider::SHAPE => SetColliderShapeCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner_property) => match args.name.as_ref() {\n\n Collider::COLLISION_GROUPS => match inner_property.value {\n\n FieldKind::Object(ref value) => match inner_property.name.as_ref() {\n\n InteractionGroups::MEMBERSHIPS => {\n\n let mut new_value = collider.collision_groups();\n", "file_path": "editor/src/inspector/handlers/node/collider.rs", "rank": 88, "score": 214005.00475839543 }, { "content": "/// Creates `Inspect` trait impl and field prop keys\n\npub fn create_inspect_impl<'f>(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = &'f args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let prop_keys_impl = self::prop_keys_impl(ty_args);\n\n let trait_impl = self::inspect_trait_impl(ty_args, field_args, impl_body);\n\n\n\n quote! {\n\n #prop_keys_impl\n\n #trait_impl\n\n }\n\n}\n\n\n", "file_path": "fyrox-core-derive/src/inspect/utils.rs", "rank": 89, "score": 213762.7517073428 }, { "content": "fn tree_node(ui: &UserInterface, tree: Handle<UiNode>) -> Handle<Node> {\n\n if let Some(item) = ui.node(tree).cast::<SceneItem<Node>>() {\n\n return item.entity_handle;\n\n }\n\n unreachable!()\n\n}\n\n\n", "file_path": "editor/src/world/mod.rs", "rank": 90, "score": 213646.3547820106 }, { "content": "///\n\n/// Triangulates specified polygon.\n\n///\n\npub fn triangulate(vertices: &[Vector3<f32>], out_triangles: &mut Vec<[usize; 3]>) {\n\n out_triangles.clear();\n\n if vertices.len() == 3 {\n\n // Triangulating a triangle?\n\n out_triangles.push([0, 1, 2]);\n\n } else if vertices.len() == 4 {\n\n // Special case for quadrilaterals (much faster than generic)\n\n let mut start_vertex = 0;\n\n for i in 0..4 {\n\n let v = vertices[i];\n\n let v0 = vertices[(i + 3) % 4];\n\n if let Some(left) = (v0 - v).try_normalize(f32::EPSILON) {\n\n let v1 = vertices[(i + 2) % 4];\n\n if let Some(diag) = (v1 - v).try_normalize(f32::EPSILON) {\n\n let v2 = vertices[(i + 1) % 4];\n\n if let Some(right) = (v2 - v).try_normalize(f32::EPSILON) {\n\n // Check for concave vertex\n\n let angle = left.dot(&diag).acos() + right.dot(&diag).acos();\n\n if angle > std::f32::consts::PI {\n\n start_vertex = i;\n", "file_path": "fyrox-core/src/math/triangulator.rs", "rank": 91, "score": 212383.93254319852 }, { "content": "pub fn handle_point_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_point_light() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n PointLight::SHADOW_BIAS => SetPointLightShadowBiasCommand,\n\n PointLight::RADIUS => SetPointLightRadiusCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n PointLight::BASE_LIGHT => handle_base_light_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 92, "score": 210810.49633913027 }, { "content": "pub fn handle_spot_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_spot_light() {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n SpotLight::HOTSPOT_CONE_ANGLE => SetSpotLightHotspotCommand,\n\n SpotLight::FALLOFF_ANGLE_DELTA => SetSpotLightFalloffAngleDeltaCommand,\n\n SpotLight::SHADOW_BIAS => SetSpotLightShadowBiasCommand,\n\n SpotLight::DISTANCE => SetSpotLightDistanceCommand,\n\n SpotLight::COOKIE_TEXTURE => SetSpotLightCookieTextureCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n SpotLight::BASE_LIGHT => handle_base_light_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 93, "score": 210810.49633913027 }, { "content": "pub fn handle_base_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n BaseLight::COLOR => SetLightColorCommand,\n\n BaseLight::CAST_SHADOWS => SetLightCastShadowsCommand,\n\n BaseLight::SCATTER => SetLightScatterCommand,\n\n BaseLight::SCATTER_ENABLED => SetLightScatterEnabledCommand,\n\n BaseLight::INTENSITY => SetLightIntensityCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n BaseLight::BASE => handle_base_property_changed(inner, handle, node),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 94, "score": 210810.49633913027 }, { "content": "pub fn handle_directional_light_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n node: &Node,\n\n) -> Option<SceneCommand> {\n\n if node.is_directional_light() {\n\n match args.value {\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n DirectionalLight::BASE_LIGHT => {\n\n handle_base_light_property_changed(inner, handle, node)\n\n }\n\n DirectionalLight::CSM_OPTIONS => match inner.name.as_ref() {\n\n CsmOptions::SPLIT_OPTIONS => match inner.value {\n\n FieldKind::Inspectable(ref split_options_value) => {\n\n if let FieldKind::Collection(ref collection_changed) =\n\n split_options_value.value\n\n {\n\n if let CollectionChanged::ItemChanged { .. } = **collection_changed\n\n {\n\n match split_options_value.name.as_ref() {\n", "file_path": "editor/src/inspector/handlers/node/light.rs", "rank": 95, "score": 210810.49633913027 }, { "content": "#[inline]\n\npub fn is_point_inside_triangle(p: &Vector3<f32>, vertices: &[Vector3<f32>; 3]) -> bool {\n\n let ba = vertices[1] - vertices[0];\n\n let ca = vertices[2] - vertices[0];\n\n let vp = *p - vertices[0];\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot02 = ca.dot(&vp);\n\n let dot12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // Calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot02 - ca_dot_ba * dot12) * inv_denom;\n\n let v = (ca_dot_ca * dot12 - ca_dot_ba * dot02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "fyrox-core/src/math/mod.rs", "rank": 96, "score": 210027.8932912117 }, { "content": "pub fn handle_rigid_body2d_property_changed(\n\n args: &PropertyChanged,\n\n handle: Handle<Node>,\n\n rigid_body: &RigidBody,\n\n) -> Option<SceneCommand> {\n\n match args.value {\n\n FieldKind::Object(ref value) => {\n\n handle_properties!(args.name.as_ref(), handle, value,\n\n RigidBody::MASS => SetBodyMassCommand,\n\n RigidBody::LIN_VEL => SetBodyLinVelCommand,\n\n RigidBody::ANG_VEL => SetBodyAngVelCommand,\n\n RigidBody::BODY_TYPE => SetBodyStatusCommand,\n\n RigidBody::ROTATION_LOCKED => SetBodyRotationLockedCommand,\n\n RigidBody::TRANSLATION_LOCKED => SetBodyTranslationLockedCommand,\n\n RigidBody::CAN_SLEEP => SetBodyCanSleepCommand,\n\n RigidBody::CCD_ENABLED => SetBodyCcdEnabledCommand\n\n )\n\n }\n\n FieldKind::Inspectable(ref inner) => match args.name.as_ref() {\n\n RigidBody::BASE => handle_base_property_changed(inner, handle, rigid_body),\n\n _ => None,\n\n },\n\n _ => None,\n\n }\n\n}\n", "file_path": "editor/src/inspector/handlers/node/rigid_body2d.rs", "rank": 97, "score": 207760.12913041213 }, { "content": "pub fn read_ascii<R>(reader: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let mut nodes: Pool<FbxNode> = Pool::new();\n\n let root_handle = nodes.spawn(FbxNode {\n\n name: String::from(\"__ROOT__\"),\n\n children: Vec::new(),\n\n parent: Handle::NONE,\n\n attributes: Vec::new(),\n\n });\n\n let mut parent_handle: Handle<FbxNode> = root_handle;\n\n let mut node_handle: Handle<FbxNode> = Handle::NONE;\n\n let mut buffer: Vec<u8> = Vec::new();\n\n let mut name: Vec<u8> = Vec::new();\n\n let mut value: Vec<u8> = Vec::new();\n\n\n\n let buf_len = reader.seek(SeekFrom::End(0))?;\n\n reader.seek(SeekFrom::Start(0))?;\n\n\n", "file_path": "src/resource/fbx/document/ascii.rs", "rank": 98, "score": 207495.42822539737 }, { "content": "pub fn read_binary<R>(file: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let total_length = file.seek(SeekFrom::End(0))?;\n\n file.seek(SeekFrom::Start(0))?;\n\n\n\n // Ignore all stuff until version.\n\n let mut temp = [0; 23];\n\n file.read_exact(&mut temp)?;\n\n\n\n // Verify version.\n\n let version = file.read_u32::<LittleEndian>()? as i32;\n\n\n\n // Anything else should be supported.\n\n if version < 7100 {\n\n return Err(FbxError::UnsupportedVersion(version));\n\n }\n\n\n\n let mut nodes = Pool::new();\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 99, "score": 207495.42822539737 } ]
Rust
noodles-bam/src/async/reader/query.rs
jamestwebber/noodles
4af97d57885821edf53c82a45e3fa81a38481b59
use std::ops::{Bound, RangeBounds}; use futures::{stream, Stream}; use noodles_bgzf as bgzf; use noodles_csi::index::reference_sequence::bin::Chunk; use tokio::io::{self, AsyncRead, AsyncSeek}; use super::Reader; use crate::Record; enum State { Seek, Read(bgzf::VirtualPosition), Done, } struct Context<'a, R> where R: AsyncRead + AsyncSeek, { reader: &'a mut Reader<R>, chunks: Vec<Chunk>, i: usize, reference_sequence_id: usize, start: i32, end: i32, state: State, } pub fn query<R, B>( reader: &mut Reader<R>, chunks: Vec<Chunk>, reference_sequence_id: usize, interval: B, ) -> impl Stream<Item = io::Result<Record>> + '_ where R: AsyncRead + AsyncSeek + Unpin, B: RangeBounds<i32>, { let (start, end) = resolve_interval(interval); let ctx = Context { reader, chunks, i: 0, reference_sequence_id, start, end, state: State::Seek, }; Box::pin(stream::unfold(ctx, |mut ctx| async { loop { match ctx.state { State::Seek => { ctx.state = match next_chunk(&ctx.chunks, &mut ctx.i) { Some(chunk) => { if let Err(e) = ctx.reader.seek(chunk.start()).await { return Some((Err(e), ctx)); } State::Read(chunk.end()) } None => State::Done, }; } State::Read(chunk_end) => match next_record(&mut ctx.reader).await { Some(Ok(record)) => { if ctx.reader.virtual_position() >= chunk_end { ctx.state = State::Seek; } match intersects(&record, ctx.reference_sequence_id, ctx.start, ctx.end) { Ok(true) => return Some((Ok(record), ctx)), Ok(false) => {} Err(e) => return Some((Err(e), ctx)), } } Some(Err(e)) => return Some((Err(e), ctx)), None => ctx.state = State::Seek, }, State::Done => return None, } } })) } fn resolve_interval<B>(interval: B) -> (i32, i32) where B: RangeBounds<i32>, { match (interval.start_bound(), interval.end_bound()) { (Bound::Included(s), Bound::Included(e)) => (*s, *e), (Bound::Included(s), Bound::Unbounded) => (*s, i32::MAX), (Bound::Unbounded, Bound::Unbounded) => (1, i32::MAX), _ => todo!(), } } fn next_chunk(chunks: &[Chunk], i: &mut usize) -> Option<Chunk> { let chunk = chunks.get(*i).copied(); *i += 1; chunk } async fn next_record<R>(reader: &mut Reader<R>) -> Option<io::Result<Record>> where R: AsyncRead + AsyncSeek + Unpin, { let mut record = Record::default(); match reader.read_record(&mut record).await { Ok(0) => None, Ok(_) => Some(Ok(record)), Err(e) => Some(Err(e)), } } fn intersects( record: &Record, reference_sequence_id: usize, interval_start: i32, interval_end: i32, ) -> io::Result<bool> { let id = match record.reference_sequence_id() { Some(i) => i32::from(i) as usize, None => return Ok(false), }; let start = record.position().map(i32::from).expect("missing position"); let len = record.cigar().reference_len().map(|len| len as i32)?; let end = start + len - 1; Ok(id == reference_sequence_id && in_interval(start, end, interval_start, interval_end)) } fn in_interval(a_start: i32, a_end: i32, b_start: i32, b_end: i32) -> bool { a_start <= b_end && b_start <= a_end }
use std::ops::{Bound, RangeBounds}; use futures::{stream, Stream}; use noodles_bgzf as bgzf; use noodles_csi::index::reference_sequence::bin::Chunk; use tokio::io::{self, AsyncRead, AsyncSeek}; use super::Reader; use crate::Record; enum State { Seek, Read(bgzf::VirtualPosition), Done, } struct Context<'a, R> where R: AsyncRead + AsyncSeek, { reader: &'a mut Reader<R>, chunks: Vec<Chunk>, i: usize, reference_sequence_id: usize, start: i32, end: i32, state: State, } pub fn query<R, B>( reader: &mut Reader<R>, chunks: Vec<Chunk>, reference_sequence_id: usize, interval: B, ) -> impl Stream<Item = io::Result<Record>> + '_ where R: AsyncRead + AsyncSeek + Unpin, B: RangeBounds<i32>, { let (start, end) = resolve_interval(interval);
Box::pin(stream::unfold(ctx, |mut ctx| async { loop { match ctx.state { State::Seek => { ctx.state = match next_chunk(&ctx.chunks, &mut ctx.i) { Some(chunk) => { if let Err(e) = ctx.reader.seek(chunk.start()).await { return Some((Err(e), ctx)); } State::Read(chunk.end()) } None => State::Done, }; } State::Read(chunk_end) => match next_record(&mut ctx.reader).await { Some(Ok(record)) => { if ctx.reader.virtual_position() >= chunk_end { ctx.state = State::Seek; } match intersects(&record, ctx.reference_sequence_id, ctx.start, ctx.end) { Ok(true) => return Some((Ok(record), ctx)), Ok(false) => {} Err(e) => return Some((Err(e), ctx)), } } Some(Err(e)) => return Some((Err(e), ctx)), None => ctx.state = State::Seek, }, State::Done => return None, } } })) } fn resolve_interval<B>(interval: B) -> (i32, i32) where B: RangeBounds<i32>, { match (interval.start_bound(), interval.end_bound()) { (Bound::Included(s), Bound::Included(e)) => (*s, *e), (Bound::Included(s), Bound::Unbounded) => (*s, i32::MAX), (Bound::Unbounded, Bound::Unbounded) => (1, i32::MAX), _ => todo!(), } } fn next_chunk(chunks: &[Chunk], i: &mut usize) -> Option<Chunk> { let chunk = chunks.get(*i).copied(); *i += 1; chunk } async fn next_record<R>(reader: &mut Reader<R>) -> Option<io::Result<Record>> where R: AsyncRead + AsyncSeek + Unpin, { let mut record = Record::default(); match reader.read_record(&mut record).await { Ok(0) => None, Ok(_) => Some(Ok(record)), Err(e) => Some(Err(e)), } } fn intersects( record: &Record, reference_sequence_id: usize, interval_start: i32, interval_end: i32, ) -> io::Result<bool> { let id = match record.reference_sequence_id() { Some(i) => i32::from(i) as usize, None => return Ok(false), }; let start = record.position().map(i32::from).expect("missing position"); let len = record.cigar().reference_len().map(|len| len as i32)?; let end = start + len - 1; Ok(id == reference_sequence_id && in_interval(start, end, interval_start, interval_end)) } fn in_interval(a_start: i32, a_end: i32, b_start: i32, b_end: i32) -> bool { a_start <= b_end && b_start <= a_end }
let ctx = Context { reader, chunks, i: 0, reference_sequence_id, start, end, state: State::Seek, };
assignment_statement
[ { "content": "pub fn read_itf8<R>(reader: &mut R) -> io::Result<i32>\n\nwhere\n\n R: Read,\n\n{\n\n let b0 = read_u8_as_i32(reader)?;\n\n\n\n let value = if b0 & 0x80 == 0 {\n\n b0\n\n } else if b0 & 0x40 == 0 {\n\n let b1 = read_u8_as_i32(reader)?;\n\n (b0 & 0x7f) << 8 | b1\n\n } else if b0 & 0x20 == 0 {\n\n let b1 = read_u8_as_i32(reader)?;\n\n let b2 = read_u8_as_i32(reader)?;\n\n (b0 & 0x3f) << 16 | b1 << 8 | b2\n\n } else if b0 & 0x10 == 0 {\n\n let b1 = read_u8_as_i32(reader)?;\n\n let b2 = read_u8_as_i32(reader)?;\n\n let b3 = read_u8_as_i32(reader)?;\n\n (b0 & 0x1f) << 24 | b1 << 16 | b2 << 8 | b3\n", "file_path": "noodles-cram/src/num/itf8.rs", "rank": 0, "score": 510204.07716633147 }, { "content": "pub fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n let result = reader.read_line(buf);\n\n buf.pop();\n\n result\n\n}\n", "file_path": "noodles-fasta/src/fai/reader.rs", "rank": 1, "score": 487006.0175275222 }, { "content": "fn read_i32_array<R>(reader: &mut R, len: usize) -> io::Result<Vec<i32>>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = vec![0; len];\n\n reader.read_i32_into::<LittleEndian>(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 2, "score": 481808.13472715963 }, { "content": "fn read_intervals<R>(reader: &mut R) -> io::Result<Vec<bgzf::VirtualPosition>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_intv = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut intervals = Vec::with_capacity(n_intv);\n\n\n\n for _ in 0..n_intv {\n\n let ioff = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n intervals.push(ioff);\n\n }\n\n\n\n Ok(intervals)\n\n}\n\n\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 3, "score": 479304.55175992916 }, { "content": "fn read_intervals<R>(reader: &mut R) -> io::Result<Vec<bgzf::VirtualPosition>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_intv = reader.read_u32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut intervals = Vec::with_capacity(n_intv);\n\n\n\n for _ in 0..n_intv {\n\n let ioffset = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n intervals.push(ioffset);\n\n }\n\n\n\n Ok(intervals)\n\n}\n\n\n", "file_path": "noodles-bam/src/bai/reader.rs", "rank": 4, "score": 475819.1949570703 }, { "content": "pub fn read_string_map_index<R>(reader: &mut R) -> io::Result<usize>\n\nwhere\n\n R: Read,\n\n{\n\n let i = match read_value(reader)? {\n\n Some(Value::Int8(Some(Int8::Value(i)))) => i32::from(i),\n\n Some(Value::Int16(Some(Int16::Value(i)))) => i32::from(i),\n\n Some(Value::Int32(Some(Int32::Value(i)))) => i,\n\n v => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"expected {{Int8, Int16, Int32}}, got {:?}\", v),\n\n ))\n\n }\n\n };\n\n\n\n usize::try_from(i).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/string_map.rs", "rank": 5, "score": 474844.8861519771 }, { "content": "pub fn read_string_map_indices<R>(reader: &mut R) -> io::Result<Vec<usize>>\n\nwhere\n\n R: Read,\n\n{\n\n let indices = match read_value(reader)? {\n\n Some(Value::Int8(Some(Int8::Value(i)))) => vec![i32::from(i)],\n\n Some(Value::Int8Array(indices)) => indices.into_iter().map(i32::from).collect(),\n\n Some(Value::Int16(Some(Int16::Value(i)))) => vec![i32::from(i)],\n\n Some(Value::Int16Array(indices)) => indices.into_iter().map(i32::from).collect(),\n\n Some(Value::Int32(Some(Int32::Value(i)))) => vec![i],\n\n Some(Value::Int32Array(indices)) => indices,\n\n None => Vec::new(),\n\n v => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"expected {{Int8, Int8Array, Int16, Int16Array, Int32, Int32Array}}, got {:?}\",\n\n v\n\n ),\n\n ))\n", "file_path": "noodles-bcf/src/reader/string_map.rs", "rank": 6, "score": 468210.51297768124 }, { "content": "fn read_i32<R>(reader: &mut R) -> io::Result<i32>\n\nwhere\n\n R: Read,\n\n{\n\n reader.read_i32::<LittleEndian>()\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 7, "score": 455824.0075867304 }, { "content": "fn read_chunks<R>(reader: &mut R) -> io::Result<Vec<Chunk>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_chunk = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut chunks = Vec::with_capacity(n_chunk);\n\n\n\n for _ in 0..n_chunk {\n\n let chunk_beg = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n let chunk_end = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n chunks.push(Chunk::new(chunk_beg, chunk_end));\n\n }\n\n\n\n Ok(chunks)\n\n}\n\n\n", "file_path": "noodles-csi/src/reader.rs", "rank": 8, "score": 452660.23975029093 }, { "content": "fn read_chunks<R>(reader: &mut R) -> io::Result<Vec<Chunk>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_chunk = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut chunks = Vec::with_capacity(n_chunk);\n\n\n\n for _ in 0..n_chunk {\n\n let cnk_beg = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n let cnk_end = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n chunks.push(Chunk::new(cnk_beg, cnk_end));\n\n }\n\n\n\n Ok(chunks)\n\n}\n\n\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 9, "score": 452660.23975029093 }, { "content": "fn read_block<R>(reader: &mut R, cdata: &mut Vec<u8>, block: &mut Block) -> io::Result<usize>\n\nwhere\n\n R: Read,\n\n{\n\n let clen = match read_header(reader) {\n\n Ok(0) => return Ok(0),\n\n Ok(bs) => bs as usize,\n\n Err(e) => return Err(e),\n\n };\n\n\n\n if clen < BGZF_HEADER_SIZE + gz::TRAILER_SIZE {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"expected clen >= {}, got {}\",\n\n BGZF_HEADER_SIZE + gz::TRAILER_SIZE,\n\n clen\n\n ),\n\n ));\n\n }\n", "file_path": "noodles-bgzf/src/reader.rs", "rank": 10, "score": 450840.6670483154 }, { "content": "fn read_chunks<R>(reader: &mut R) -> io::Result<Vec<Chunk>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_chunk = reader.read_u32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut chunks = Vec::with_capacity(n_chunk);\n\n\n\n for _ in 0..n_chunk {\n\n let chunk_beg = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n let chunk_end = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n chunks.push(Chunk::new(chunk_beg, chunk_end));\n\n }\n\n\n\n Ok(chunks)\n\n}\n\n\n", "file_path": "noodles-bam/src/bai/reader.rs", "rank": 11, "score": 449809.46543589304 }, { "content": "#[allow(clippy::eq_op)]\n\nfn region_to_bin(start: i32, mut end: i32) -> i32 {\n\n end -= 1;\n\n\n\n if start >> 14 == end >> 14 {\n\n ((1 << 15) - 1) / 7 + (start >> 14)\n\n } else if start >> 17 == end >> 17 {\n\n ((1 << 12) - 1) / 7 + (start >> 17)\n\n } else if start >> 20 == end >> 20 {\n\n ((1 << 9) - 1) / 7 + (start >> 20)\n\n } else if start >> 23 == end >> 23 {\n\n ((1 << 6) - 1) / 7 + (start >> 23)\n\n } else if start >> 26 == end >> 26 {\n\n ((1 << 3) - 1) / 7 + (start >> 26)\n\n } else {\n\n 0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "noodles-tabix/src/index/reference_sequence/builder.rs", "rank": 12, "score": 446579.3067174525 }, { "content": "fn read_u8_as_i32<R>(reader: &mut R) -> io::Result<i32>\n\nwhere\n\n R: Read,\n\n{\n\n reader.read_u8().map(i32::from)\n\n}\n\n\n", "file_path": "noodles-cram/src/num/itf8.rs", "rank": 13, "score": 440856.22305527364 }, { "content": "fn inflate_data<R>(reader: R, writer: &mut Vec<u8>) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut decoder = DeflateDecoder::new(reader);\n\n decoder.read_to_end(writer)\n\n}\n\n\n", "file_path": "noodles-bgzf/src/reader.rs", "rank": 14, "score": 440304.9878837697 }, { "content": "pub fn read_header<R>(reader: &mut R) -> io::Result<Header>\n\nwhere\n\n R: Read,\n\n{\n\n let length = reader.read_i32::<LittleEndian>()?;\n\n\n\n let reference_sequence_id = read_itf8(reader).and_then(|n| {\n\n ReferenceSequenceId::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let starting_position_on_the_reference = read_itf8(reader)?;\n\n let alignment_span = read_itf8(reader)?;\n\n let number_of_records = read_itf8(reader)?;\n\n let record_counter = read_ltf8(reader)?;\n\n let bases = read_ltf8(reader)?;\n\n let number_of_blocks = read_itf8(reader)?;\n\n let landmarks = read_landmarks(reader)?;\n\n let crc32 = reader.read_u32::<LittleEndian>()?;\n\n\n\n Ok(Header::builder()\n", "file_path": "noodles-cram/src/reader/container.rs", "rank": 15, "score": 436422.0457375484 }, { "content": "pub fn read_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let raw_kind = read_itf8(reader)?;\n\n\n\n match raw_kind {\n\n 0 => Ok(Encoding::Null),\n\n 1 => read_external_encoding(reader),\n\n 2 => unimplemented!(\"GOLOMB\"),\n\n 3 => read_huffman_encoding(reader),\n\n 4 => read_byte_array_len_encoding(reader),\n\n 5 => read_byte_array_stop_encoding(reader),\n\n 6 => read_beta_encoding(reader),\n\n 7 => read_subexp_encoding(reader),\n\n 8 => unimplemented!(\"GOLOMB_RICE\"),\n\n 9 => read_gamma_encoding(reader),\n\n _ => Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid encoding kind\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 16, "score": 436422.04573754844 }, { "content": "pub fn read_block<R>(reader: &mut R) -> io::Result<Block>\n\nwhere\n\n R: Read,\n\n{\n\n let method = reader.read_u8().and_then(|b| {\n\n CompressionMethod::try_from(b).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let block_content_type_id = reader.read_u8().and_then(|b| {\n\n ContentType::try_from(b).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let block_content_id = read_itf8(reader)?;\n\n let size_in_bytes = read_itf8(reader)?;\n\n let raw_size_in_bytes = read_itf8(reader)?;\n\n\n\n let mut data = vec![0; size_in_bytes as usize];\n\n reader.read_exact(&mut data)?;\n\n\n\n let crc32 = reader.read_u32::<LittleEndian>()?;\n", "file_path": "noodles-cram/src/reader/block.rs", "rank": 17, "score": 436422.0457375484 }, { "content": "pub fn rans_renorm<R>(reader: &mut R, mut r: u32) -> io::Result<u32>\n\nwhere\n\n R: Read,\n\n{\n\n while r < (1 << 23) {\n\n r = (r << 8) + reader.read_u8().map(u32::from)?;\n\n }\n\n\n\n Ok(r)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_header() -> io::Result<()> {\n\n let data = [0x00, 0x25, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00];\n\n let mut reader = &data[..];\n\n assert_eq!(read_header(&mut reader)?, (Order::Zero, 37, 7));\n", "file_path": "noodles-cram/src/rans/decode.rs", "rank": 18, "score": 434711.11429140414 }, { "content": "pub fn decode<R>(reader: &mut R, output: &mut [u8]) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut freqs = vec![vec![0; 256]; 256];\n\n let mut cumulative_freqs = vec![vec![0; 256]; 256];\n\n\n\n read_frequencies_1(reader, &mut freqs, &mut cumulative_freqs)?;\n\n\n\n let mut state = [0; 4];\n\n reader.read_u32_into::<LittleEndian>(&mut state)?;\n\n\n\n let mut i = 0;\n\n let mut last_syms = [0; 4];\n\n\n\n while i < output.len() / 4 {\n\n for j in 0..4 {\n\n let f = rans_get_cumulative_freq(state[j]);\n\n let s = rans_get_symbol_from_freq(&cumulative_freqs[last_syms[j] as usize], f);\n\n\n", "file_path": "noodles-cram/src/rans/decode/order_1.rs", "rank": 19, "score": 433238.0806375942 }, { "content": "pub fn decode<R>(reader: &mut R, output: &mut [u8]) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut freqs = vec![0; 256];\n\n let mut cumulative_freqs = vec![0; 256];\n\n\n\n read_frequencies_0(reader, &mut freqs, &mut cumulative_freqs)?;\n\n\n\n let mut state = [0; 4];\n\n reader.read_u32_into::<LittleEndian>(&mut state)?;\n\n\n\n let mut i = 0;\n\n\n\n while i < output.len() {\n\n for j in 0..4 {\n\n if i + j >= output.len() {\n\n return Ok(());\n\n }\n\n\n", "file_path": "noodles-cram/src/rans/decode/order_0.rs", "rank": 20, "score": 433238.0806375942 }, { "content": "pub fn read_value<R>(reader: &mut R) -> io::Result<Option<Value>>\n\nwhere\n\n R: Read,\n\n{\n\n let ty = read_type(reader)?;\n\n\n\n match ty {\n\n Some(Type::Int8(len)) => match len {\n\n 0 => Ok(Some(Value::Int8(None))),\n\n 1 => read_i8(reader)\n\n .map(Int8::from)\n\n .map(Some)\n\n .map(Value::Int8)\n\n .map(Some),\n\n _ => read_i8_array(reader, len).map(Value::Int8Array).map(Some),\n\n },\n\n Some(Type::Int16(len)) => match len {\n\n 0 => Ok(Some(Value::Int16(None))),\n\n 1 => read_i16(reader)\n\n .map(Int16::from)\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 21, "score": 430191.1946584475 }, { "content": "pub fn read_header<R>(reader: &mut R) -> io::Result<slice::Header>\n\nwhere\n\n R: Read,\n\n{\n\n let reference_sequence_id = read_itf8(reader).and_then(|n| {\n\n ReferenceSequenceId::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let alignment_start = read_itf8(reader)?;\n\n let alignment_span = read_itf8(reader)?;\n\n let record_count = read_itf8(reader)?;\n\n let record_counter = read_ltf8(reader)?;\n\n let block_count = read_itf8(reader)?;\n\n let block_content_ids = read_block_content_ids(reader)?;\n\n let embedded_reference_bases_block_content_id =\n\n read_itf8(reader).map(EmbeddedReferenceBasesBlockContentId::from)?;\n\n let reference_md5 = read_reference_md5(reader)?;\n\n let optional_tags = read_optional_tags(reader)?;\n\n\n\n Ok(slice::Header::builder()\n", "file_path": "noodles-cram/src/reader/slice.rs", "rank": 22, "score": 430191.19465844746 }, { "content": "pub fn read_compression_header<R>(reader: &mut R) -> io::Result<CompressionHeader>\n\nwhere\n\n R: Read,\n\n{\n\n let preservation_map = read_preservation_map(reader)?;\n\n let data_series_encoding_map = read_data_series_encoding_map(reader)?;\n\n let tag_encoding_map = read_tag_encoding_map(reader)?;\n\n\n\n Ok(CompressionHeader::new(\n\n preservation_map,\n\n data_series_encoding_map,\n\n tag_encoding_map,\n\n ))\n\n}\n", "file_path": "noodles-cram/src/reader/compression_header.rs", "rank": 23, "score": 427846.22724819044 }, { "content": "pub fn read_type<R>(reader: &mut R) -> io::Result<Option<Type>>\n\nwhere\n\n R: Read,\n\n{\n\n use super::{Int16, Int32, Int8};\n\n\n\n let encoding = reader.read_u8()?;\n\n\n\n let mut len = usize::from(encoding >> 4);\n\n\n\n if len == 0x0f {\n\n let value = read_value(reader)?;\n\n\n\n let next_len = match value {\n\n Some(Value::Int8(Some(Int8::Value(n)))) => i32::from(n),\n\n Some(Value::Int16(Some(Int16::Value(n)))) => i32::from(n),\n\n Some(Value::Int32(Some(Int32::Value(n)))) => n,\n\n _ => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n", "file_path": "noodles-bcf/src/reader/value/ty.rs", "rank": 24, "score": 427333.366066364 }, { "content": "pub fn read_ltf8<R>(reader: &mut R) -> io::Result<i64>\n\nwhere\n\n R: Read,\n\n{\n\n let b0 = read_u8_as_i64(reader)?;\n\n\n\n let value = if b0 & 0x80 == 0 {\n\n b0\n\n } else if b0 & 0x40 == 0 {\n\n let b1 = read_u8_as_i64(reader)?;\n\n (b0 & 0x7f) << 8 | b1\n\n } else if b0 & 0x20 == 0 {\n\n let b1 = read_u8_as_i64(reader)?;\n\n let b2 = read_u8_as_i64(reader)?;\n\n (b0 & 0x3f) << 16 | b1 << 8 | b2\n\n } else if b0 & 0x10 == 0 {\n\n let b1 = read_u8_as_i64(reader)?;\n\n let b2 = read_u8_as_i64(reader)?;\n\n let b3 = read_u8_as_i64(reader)?;\n\n (b0 & 0x1f) << 24 | b1 << 16 | b2 << 8 | b3\n", "file_path": "noodles-cram/src/num/ltf8.rs", "rank": 25, "score": 424304.6306056697 }, { "content": "pub fn read_preservation_map<R>(reader: &mut R) -> io::Result<PreservationMap>\n\nwhere\n\n R: Read,\n\n{\n\n let data_len = read_itf8(reader)?;\n\n\n\n let mut buf = vec![0; data_len as usize];\n\n reader.read_exact(&mut buf)?;\n\n\n\n let mut buf_reader = &buf[..];\n\n let map_len = read_itf8(&mut buf_reader)?;\n\n\n\n let mut read_names_included = true;\n\n let mut ap_data_series_delta = true;\n\n let mut reference_required = true;\n\n let mut substitution_matrix = None;\n\n let mut tag_ids_dictionary = None;\n\n\n\n let mut key_buf = [0; 2];\n\n\n", "file_path": "noodles-cram/src/reader/compression_header/preservation_map.rs", "rank": 26, "score": 422442.81902651826 }, { "content": "fn read_reference_sequences<R>(reader: &mut R, depth: i32) -> io::Result<Vec<ReferenceSequence>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_ref = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut reference_sequences = Vec::with_capacity(n_ref);\n\n\n\n for _ in 0..n_ref {\n\n let (bins, metadata) = read_bins(reader, depth)?;\n\n let reference_sequence = ReferenceSequence::new(bins, metadata);\n\n reference_sequences.push(reference_sequence);\n\n }\n\n\n\n Ok(reference_sequences)\n\n}\n\n\n", "file_path": "noodles-csi/src/reader.rs", "rank": 27, "score": 421994.82539473404 }, { "content": "pub fn rans_decode<R>(reader: &mut R) -> io::Result<Vec<u8>>\n\nwhere\n\n R: Read,\n\n{\n\n let (order, _, data_len) = read_header(reader)?;\n\n\n\n let mut buf = vec![0; data_len as usize];\n\n\n\n match order {\n\n Order::Zero => order_0::decode(reader, &mut buf)?,\n\n Order::One => order_1::decode(reader, &mut buf)?,\n\n }\n\n\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-cram/src/rans/decode.rs", "rank": 28, "score": 417942.19197277975 }, { "content": "fn read_bins<R>(reader: &mut R, depth: i32) -> io::Result<(Vec<Bin>, Option<Metadata>)>\n\nwhere\n\n R: Read,\n\n{\n\n let n_bin = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut bins = Vec::with_capacity(n_bin);\n\n\n\n let metadata_id = Bin::metadata_id(depth);\n\n let mut metadata = None;\n\n\n\n for _ in 0..n_bin {\n\n let id = reader.read_u32::<LittleEndian>()?;\n\n\n\n let loffset = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n", "file_path": "noodles-csi/src/reader.rs", "rank": 29, "score": 415979.31830795994 }, { "content": "pub fn read_tag_encoding_map<R>(reader: &mut R) -> io::Result<TagEncodingMap>\n\nwhere\n\n R: Read,\n\n{\n\n let data_len = read_itf8(reader)?;\n\n let mut buf = vec![0; data_len as usize];\n\n reader.read_exact(&mut buf)?;\n\n\n\n let mut buf_reader = &buf[..];\n\n let map_len = read_itf8(&mut buf_reader)?;\n\n\n\n let mut map = HashMap::with_capacity(map_len as usize);\n\n\n\n for _ in 0..map_len {\n\n let key = read_itf8(&mut buf_reader)?;\n\n let encoding = read_encoding(&mut buf_reader)?;\n\n map.insert(key, encoding);\n\n }\n\n\n\n Ok(TagEncodingMap::from(map))\n\n}\n", "file_path": "noodles-cram/src/reader/compression_header/tag_encoding_map.rs", "rank": 30, "score": 414766.9725684061 }, { "content": "// Reads all bytes until a line feed ('\\n') or EOF is reached.\n\n//\n\n// The buffer will not include the trailing newline ('\\n' or '\\r\\n').\n\nfn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n if buf.ends_with(LINE_FEED) {\n\n buf.pop();\n\n\n\n if buf.ends_with(CARRIAGE_RETURN) {\n\n buf.pop();\n\n }\n\n }\n\n\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "noodles-vcf/src/reader.rs", "rank": 31, "score": 413519.97296445456 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n if buf.ends_with(LINE_FEED) {\n\n buf.pop();\n\n\n\n if buf.ends_with(CARRIAGE_RETURN) {\n\n buf.pop();\n\n }\n\n }\n\n\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "noodles-gff/src/reader.rs", "rank": 32, "score": 413519.97296445456 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n buf.pop();\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{convert::TryFrom, io::Write};\n\n\n\n use flate2::write::GzEncoder;\n\n use noodles_bam as bam;\n", "file_path": "noodles-cram/src/crai/reader.rs", "rank": 33, "score": 410787.25103166036 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n if buf.ends_with(LINE_FEED) {\n\n buf.pop();\n\n\n\n if buf.ends_with(CARRIAGE_RETURN) {\n\n buf.pop();\n\n }\n\n }\n\n\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "noodles-fastq/src/fai/reader.rs", "rank": 34, "score": 410787.2510316603 }, { "content": "/// Reads a BGZF block trailer.\n\n///\n\n/// The position of the stream is expected to be at the start of the block trailer, i.e., 8 bytes\n\n/// from the end of the block.\n\n///\n\n/// This returns the length of the uncompressed data (`ISIZE`).\n\nfn read_trailer<R>(reader: &mut R) -> io::Result<u32>\n\nwhere\n\n R: Read,\n\n{\n\n let mut trailer = [0; gz::TRAILER_SIZE];\n\n reader.read_exact(&mut trailer)?;\n\n let r#isize = LittleEndian::read_u32(&trailer[4..]);\n\n Ok(r#isize)\n\n}\n\n\n", "file_path": "noodles-bgzf/src/reader.rs", "rank": 35, "score": 408641.16180716513 }, { "content": "/// Reads a BGZF block header.\n\n///\n\n/// The position of the stream is expected to be at the start of a block.\n\n///\n\n/// If successful, the block size (`BSIZE` + 1) is returned. If a block size of 0 is returned, the\n\n/// stream reached EOF.\n\nfn read_header<R>(reader: &mut R) -> io::Result<u32>\n\nwhere\n\n R: Read,\n\n{\n\n let mut header = [0; BGZF_HEADER_SIZE];\n\n\n\n match reader.read_exact(&mut header) {\n\n Ok(_) => {}\n\n Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => return Ok(0),\n\n Err(e) => return Err(e),\n\n }\n\n\n\n let bsize = LittleEndian::read_u16(&header[16..]);\n\n\n\n // Add 1 because BSIZE is \"total Block SIZE minus 1\".\n\n Ok(u32::from(bsize) + 1)\n\n}\n\n\n", "file_path": "noodles-bgzf/src/reader.rs", "rank": 36, "score": 408636.4414690745 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut Vec<u8>) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_until(LINE_FEED, buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n if buf.ends_with(&[LINE_FEED]) {\n\n buf.pop();\n\n\n\n if buf.ends_with(&[CARRIAGE_RETURN]) {\n\n buf.pop();\n\n }\n\n }\n\n\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "noodles-fastq/src/reader.rs", "rank": 37, "score": 408580.0400341491 }, { "content": "fn in_interval(a_start: i32, a_end: i32, b_start: i32, b_end: i32) -> bool {\n\n a_start <= b_end && b_start <= a_end\n\n}\n", "file_path": "noodles-bcf/src/reader/query.rs", "rank": 38, "score": 408557.80712022237 }, { "content": "fn in_interval(a_start: i32, a_end: i32, b_start: i32, b_end: i32) -> bool {\n\n a_start <= b_end && b_start <= a_end\n\n}\n", "file_path": "noodles-vcf/src/reader/query.rs", "rank": 39, "score": 408557.8071202224 }, { "content": "fn in_interval(a_start: i32, a_end: i32, b_start: i32, b_end: i32) -> bool {\n\n a_start <= b_end && b_start <= a_end\n\n}\n", "file_path": "noodles-bam/src/reader/query.rs", "rank": 40, "score": 408557.80712022237 }, { "content": "pub fn read_data_series_encoding_map<R>(reader: &mut R) -> io::Result<DataSeriesEncodingMap>\n\nwhere\n\n R: Read,\n\n{\n\n let data_len = read_itf8(reader)?;\n\n let mut buf = vec![0; data_len as usize];\n\n reader.read_exact(&mut buf)?;\n\n\n\n let mut buf_reader = &buf[..];\n\n let map_len = read_itf8(&mut buf_reader)?;\n\n\n\n let mut builder = DataSeriesEncodingMap::builder();\n\n let mut key_buf = [0; 2];\n\n\n\n for _ in 0..map_len {\n\n buf_reader.read_exact(&mut key_buf)?;\n\n\n\n let key = DataSeries::try_from(&key_buf[..])\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;\n\n\n", "file_path": "noodles-cram/src/reader/compression_header/data_series_encoding_map.rs", "rank": 41, "score": 407562.49694924266 }, { "content": "fn in_interval(a_start: i32, a_end: i32, b_start: i32, b_end: i32) -> bool {\n\n a_start <= b_end && b_start <= a_end\n\n}\n", "file_path": "noodles-vcf/src/async/reader/query.rs", "rank": 43, "score": 405293.6064729162 }, { "content": "pub fn query<R, B>(\n\n reader: &mut Reader<bgzf::AsyncReader<R>>,\n\n chunks: Vec<Chunk>,\n\n reference_sequence_name: String,\n\n interval: B,\n\n) -> impl Stream<Item = io::Result<Record>> + '_\n\nwhere\n\n R: AsyncRead + AsyncSeek + Unpin,\n\n B: RangeBounds<i32>,\n\n{\n\n let (start, end) = resolve_interval(interval);\n\n\n\n let ctx = Context {\n\n reader,\n\n\n\n chunks,\n\n i: 0,\n\n\n\n reference_sequence_name,\n\n start,\n", "file_path": "noodles-vcf/src/async/reader/query.rs", "rank": 44, "score": 403630.22560733825 }, { "content": "fn resolve_interval<B>(interval: B) -> (i32, i32)\n\nwhere\n\n B: RangeBounds<i32>,\n\n{\n\n match (interval.start_bound(), interval.end_bound()) {\n\n (Bound::Included(s), Bound::Included(e)) => (*s, *e),\n\n (Bound::Included(s), Bound::Unbounded) => (*s, i32::MAX),\n\n (Bound::Unbounded, Bound::Unbounded) => (1, i32::MAX),\n\n _ => todo!(),\n\n }\n\n}\n\n\n", "file_path": "noodles-vcf/src/async/reader/query.rs", "rank": 46, "score": 402641.2123757784 }, { "content": "fn consume_byte<R>(reader: &mut R, value: u8) -> io::Result<usize>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 1];\n\n reader.read_exact(&mut buf)?;\n\n\n\n if buf[0] == value {\n\n Ok(buf.len())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"read name missing @ prefix\",\n\n ))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "noodles-fastq/src/reader.rs", "rank": 48, "score": 397542.5592351965 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut Vec<u8>) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n reader.read_until(LINE_FEED, buf)\n\n}\n\n\n", "file_path": "noodles-fastq/src/indexer.rs", "rank": 49, "score": 396207.61743759667 }, { "content": "fn read_string<R>(reader: &mut R, len: usize) -> io::Result<String>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = vec![0; len];\n\n reader.read_exact(&mut buf)?;\n\n String::from_utf8(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_value() {\n\n let data = [0x00];\n\n let mut reader = &data[..];\n\n assert!(matches!(read_value(&mut reader), Ok(None)));\n\n\n\n let data = [0x01];\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 50, "score": 394617.38775150315 }, { "content": "// 0-based, [start, end)\n\nfn region_to_bins(start: usize, mut end: usize) -> BitVec {\n\n end -= 1;\n\n\n\n let mut bins = BitVec::from_elem(bin::MAX_ID as usize, false);\n\n bins.set(0, true);\n\n\n\n for k in (1 + (start >> 26))..=(1 + (end >> 26)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (9 + (start >> 23))..=(9 + (end >> 23)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (73 + (start >> 20))..=(73 + (end >> 20)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (585 + (start >> 17))..=(585 + (end >> 17)) {\n\n bins.set(k, true);\n", "file_path": "noodles-tabix/src/index/reference_sequence.rs", "rank": 51, "score": 389399.008759983 }, { "content": "fn read_references<R>(reader: &mut R, len: usize) -> io::Result<Vec<ReferenceSequence>>\n\nwhere\n\n R: Read,\n\n{\n\n let mut references = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n let (bins, metadata) = read_bins(reader)?;\n\n let intervals = read_intervals(reader)?;\n\n references.push(ReferenceSequence::new(bins, intervals, metadata));\n\n }\n\n\n\n Ok(references)\n\n}\n\n\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 52, "score": 389333.76010680967 }, { "content": "fn read_float_array<R>(reader: &mut R, len: usize) -> io::Result<Vec<f32>>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = vec![0.0; len];\n\n reader.read_f32_into::<LittleEndian>(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 53, "score": 386539.3750963793 }, { "content": "fn read_i16_array<R>(reader: &mut R, len: usize) -> io::Result<Vec<i16>>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = vec![0; len];\n\n reader.read_i16_into::<LittleEndian>(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 54, "score": 386539.3750963793 }, { "content": "fn read_i8_array<R>(reader: &mut R, len: usize) -> io::Result<Vec<i8>>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = vec![0; len];\n\n reader.read_i8_into(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 55, "score": 386539.3750963793 }, { "content": "// 0-based, [start, end)\n\nfn region_to_bins(start: usize, mut end: usize) -> BitVec {\n\n end -= 1;\n\n\n\n let mut bins = BitVec::from_elem(bin::MAX_ID as usize, false);\n\n bins.set(0, true);\n\n\n\n for k in (1 + (start >> 26))..=(1 + (end >> 26)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (9 + (start >> 23))..=(9 + (end >> 23)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (73 + (start >> 20))..=(73 + (end >> 20)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (585 + (start >> 17))..=(585 + (end >> 17)) {\n\n bins.set(k, true);\n", "file_path": "noodles-bam/src/bai/index/reference_sequence.rs", "rank": 56, "score": 386323.1877666807 }, { "content": "fn read_ref_alt<R>(reader: &mut R, len: usize) -> io::Result<Vec<String>>\n\nwhere\n\n R: Read,\n\n{\n\n let mut alleles = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n match read_value(reader)? {\n\n Some(Value::String(Some(s))) => alleles.push(s),\n\n Some(Value::String(None)) => alleles.push(String::from(\".\")),\n\n v => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"expected string, got {:?}\", v),\n\n ))\n\n }\n\n }\n\n }\n\n\n\n Ok(alleles)\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/record/site.rs", "rank": 57, "score": 383806.99402051425 }, { "content": "fn next_chunk(chunks: &[Chunk], i: &mut usize) -> Option<Chunk> {\n\n let chunk = chunks.get(*i).copied();\n\n *i += 1;\n\n chunk\n\n}\n\n\n\nasync fn next_record<R>(reader: &mut Reader<bgzf::AsyncReader<R>>) -> Option<io::Result<Record>>\n\nwhere\n\n R: AsyncRead + Unpin,\n\n{\n\n let mut buf = String::new();\n\n\n\n match reader.read_record(&mut buf).await {\n\n Ok(0) => None,\n\n Ok(_) => {\n\n let result = buf\n\n .parse()\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e));\n\n\n\n Some(result)\n\n }\n\n Err(e) => Some(Err(e)),\n\n }\n\n}\n\n\n", "file_path": "noodles-vcf/src/async/reader/query.rs", "rank": 58, "score": 367873.79780640674 }, { "content": "fn read_magic<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut magic = [0; 4];\n\n reader.read_exact(&mut magic)?;\n\n\n\n if magic == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid CSI file format\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-csi/src/reader.rs", "rank": 60, "score": 363700.19518420927 }, { "content": "fn read_magic<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut magic = [0; 4];\n\n reader.read_exact(&mut magic)?;\n\n\n\n if magic == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid tabix header\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 61, "score": 363700.1951842093 }, { "content": "fn read_magic<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut magic = [0; 4];\n\n reader.read_exact(&mut magic)?;\n\n\n\n if magic == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid BAI header\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-bam/src/bai/reader.rs", "rank": 62, "score": 361259.4132087969 }, { "content": "fn read_magic_number<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 4];\n\n reader.read_exact(&mut buf)?;\n\n\n\n if buf == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid CRAM header\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-cram/src/reader.rs", "rank": 63, "score": 361259.4132087969 }, { "content": "fn read_format<R>(reader: &mut R) -> io::Result<Version>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 2];\n\n reader.read_exact(&mut buf)?;\n\n Ok(Version::new(buf[0], buf[1]))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader.rs", "rank": 64, "score": 358524.2894288243 }, { "content": "fn read_metadata<R>(reader: &mut R) -> io::Result<Metadata>\n\nwhere\n\n R: Read,\n\n{\n\n use crate::index::reference_sequence::bin::METADATA_CHUNK_COUNT;\n\n\n\n let n_chunk = reader.read_u32::<LittleEndian>()?;\n\n\n\n if n_chunk != METADATA_CHUNK_COUNT {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"invalid metadata pseudo-bin chunk count: expected {}, got {}\",\n\n METADATA_CHUNK_COUNT, n_chunk\n\n ),\n\n ));\n\n }\n\n\n\n let ref_beg = reader\n\n .read_u64::<LittleEndian>()\n", "file_path": "noodles-csi/src/reader.rs", "rank": 65, "score": 358524.2894288243 }, { "content": "fn read_header<R>(reader: &mut R) -> io::Result<String>\n\nwhere\n\n R: Read,\n\n{\n\n let l_text = reader.read_u32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut text = vec![0; l_text];\n\n reader.read_exact(&mut text)?;\n\n\n\n // § 4.2 The BAM format (2021-06-03): \"Plain header text in SAM; not necessarily\n\n // NUL-terminated\".\n\n bytes_with_nul_to_string(&text).or_else(|_| {\n\n String::from_utf8(text).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })\n\n}\n\n\n", "file_path": "noodles-bam/src/reader.rs", "rank": 66, "score": 358524.2894288243 }, { "content": "fn read_metadata<R>(reader: &mut R) -> io::Result<Metadata>\n\nwhere\n\n R: Read,\n\n{\n\n use reference_sequence::bin::METADATA_CHUNK_COUNT;\n\n\n\n let n_chunk = reader.read_u32::<LittleEndian>()?;\n\n\n\n if n_chunk != METADATA_CHUNK_COUNT {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"invalid metadata pseudo-bin chunk count: expected {}, got {}\",\n\n METADATA_CHUNK_COUNT, n_chunk\n\n ),\n\n ));\n\n }\n\n\n\n let ref_beg = reader\n\n .read_u64::<LittleEndian>()\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 67, "score": 358524.2894288243 }, { "content": "fn read_float<R>(reader: &mut R) -> io::Result<f32>\n\nwhere\n\n R: Read,\n\n{\n\n reader.read_f32::<LittleEndian>()\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 68, "score": 356138.86364048265 }, { "content": "fn read_i8<R>(reader: &mut R) -> io::Result<i8>\n\nwhere\n\n R: Read,\n\n{\n\n reader.read_i8()\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 69, "score": 356138.86364048265 }, { "content": "fn read_i16<R>(reader: &mut R) -> io::Result<i16>\n\nwhere\n\n R: Read,\n\n{\n\n reader.read_i16::<LittleEndian>()\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 70, "score": 356138.86364048265 }, { "content": "fn read_metadata<R>(reader: &mut R) -> io::Result<Metadata>\n\nwhere\n\n R: Read,\n\n{\n\n use reference_sequence::bin::METADATA_CHUNK_COUNT;\n\n\n\n let n_chunk = reader.read_u32::<LittleEndian>()?;\n\n\n\n if n_chunk != METADATA_CHUNK_COUNT {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"invalid metadata pseudo-bin chunk count: expected {}, got {}\",\n\n METADATA_CHUNK_COUNT, n_chunk\n\n ),\n\n ));\n\n }\n\n\n\n let ref_beg = reader\n\n .read_u64::<LittleEndian>()\n", "file_path": "noodles-bam/src/bai/reader.rs", "rank": 71, "score": 356138.86364048265 }, { "content": "fn read_magic<R>(reader: &mut R) -> io::Result<[u8; 4]>\n\nwhere\n\n R: Read,\n\n{\n\n let mut magic = [0; 4];\n\n reader.read_exact(&mut magic)?;\n\n Ok(magic)\n\n}\n\n\n", "file_path": "noodles-bam/src/reader.rs", "rank": 72, "score": 355899.0334720662 }, { "content": "fn read_array<R>(reader: &mut R) -> io::Result<Value>\n\nwhere\n\n R: BufRead,\n\n{\n\n let subtype = reader.read_u8().and_then(|b| {\n\n Subtype::try_from(b).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let len = reader.read_i32::<LittleEndian>()? as usize;\n\n\n\n match subtype {\n\n Subtype::Int8 => {\n\n let mut buf = vec![0; len];\n\n reader.read_i8_into(&mut buf)?;\n\n Ok(Value::Int8Array(buf))\n\n }\n\n Subtype::UInt8 => {\n\n let mut buf = vec![0; len];\n\n reader.read_exact(&mut buf)?;\n\n Ok(Value::UInt8Array(buf))\n", "file_path": "noodles-bam/src/record/data/reader.rs", "rank": 73, "score": 353806.8417372565 }, { "content": "fn read_id<R>(reader: &mut R) -> io::Result<Ids>\n\nwhere\n\n R: Read,\n\n{\n\n match read_value(reader)? {\n\n Some(Value::String(Some(id))) => id\n\n .parse()\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)),\n\n Some(Value::String(None)) => Ok(Ids::default()),\n\n v => Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"expected string, got {:?}\", v),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/record/site.rs", "rank": 74, "score": 353806.8417372565 }, { "content": "fn read_gamma_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let offset = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::Gamma(offset))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_null_encoding() -> io::Result<()> {\n\n let data = [\n\n 0, // null encoding ID\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 75, "score": 353806.8417372565 }, { "content": "fn read_string<R>(reader: &mut R) -> io::Result<String>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut buf = Vec::new();\n\n reader.read_until(b'\\0', &mut buf)?;\n\n buf.pop();\n\n String::from_utf8(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-bam/src/record/data/reader.rs", "rank": 76, "score": 353806.8417372565 }, { "content": "fn read_external_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let block_content_id = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::External(block_content_id))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 77, "score": 353806.8417372565 }, { "content": "fn read_reference_sequence<R>(reader: &mut R) -> io::Result<ReferenceSequence>\n\nwhere\n\n R: Read,\n\n{\n\n let l_name = reader.read_u32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut c_name = vec![0; l_name];\n\n reader.read_exact(&mut c_name)?;\n\n\n\n let name = bytes_with_nul_to_string(&c_name)?;\n\n let l_ref = reader.read_u32::<LittleEndian>().and_then(|len| {\n\n i32::try_from(len).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n ReferenceSequence::new(name, l_ref).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n\npub(crate) fn bytes_with_nul_to_string(buf: &[u8]) -> io::Result<String> {\n", "file_path": "noodles-bam/src/reader.rs", "rank": 78, "score": 353806.8417372565 }, { "content": "fn read_names<R>(reader: &mut R) -> io::Result<ReferenceSequenceNames>\n\nwhere\n\n R: Read,\n\n{\n\n let l_nm = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut names = vec![0; l_nm];\n\n reader.read_exact(&mut names)?;\n\n\n\n parse_names(&names)\n\n}\n\n\n\npub(crate) fn parse_names(buf: &[u8]) -> io::Result<ReferenceSequenceNames> {\n\n let mut names = ReferenceSequenceNames::new();\n\n let mut start = 0;\n\n\n\n loop {\n\n let buf = &buf[start..];\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 79, "score": 353806.8417372565 }, { "content": "fn read_huffman_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let alphabet_len = read_itf8(&mut args_reader)? as usize;\n\n let mut alphabet = Vec::with_capacity(alphabet_len);\n\n\n\n for _ in 0..alphabet_len {\n\n let symbol = read_itf8(&mut args_reader)?;\n\n alphabet.push(symbol);\n\n }\n\n\n\n let bit_lens_len = read_itf8(&mut args_reader)? as usize;\n\n let mut bit_lens = Vec::with_capacity(bit_lens_len);\n\n\n\n for _ in 0..bit_lens_len {\n\n let len = read_itf8(&mut args_reader)?;\n\n bit_lens.push(len);\n\n }\n\n\n\n Ok(Encoding::Huffman(alphabet, bit_lens))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 80, "score": 353806.8417372565 }, { "content": "fn read_beta_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let offset = read_itf8(&mut args_reader)?;\n\n let len = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::Beta(offset, len))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 81, "score": 353806.8417372565 }, { "content": "fn read_subexp_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let offset = read_itf8(&mut args_reader)?;\n\n let k = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::Subexp(offset, k))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 82, "score": 353806.8417372565 }, { "content": "fn read_aux<R>(reader: &mut R) -> io::Result<Vec<u8>>\n\nwhere\n\n R: Read,\n\n{\n\n let l_aux = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut aux = vec![0; l_aux];\n\n reader.read_exact(&mut aux)?;\n\n\n\n Ok(aux)\n\n}\n\n\n", "file_path": "noodles-csi/src/reader.rs", "rank": 83, "score": 353513.6076837245 }, { "content": "fn read_header<R>(reader: &mut R) -> io::Result<index::Header>\n\nwhere\n\n R: Read,\n\n{\n\n let format = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n Format::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let col_seq = reader.read_i32::<LittleEndian>().and_then(|i| {\n\n usize::try_from(i).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let col_beg = reader.read_i32::<LittleEndian>().and_then(|i| {\n\n usize::try_from(i).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let col_end = reader.read_i32::<LittleEndian>().and_then(|i| {\n\n if i == 0 {\n\n Ok(None)\n\n } else {\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 84, "score": 353513.6076837245 }, { "content": "fn read_file_id<R>(reader: &mut R) -> io::Result<[u8; 20]>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 20];\n\n reader.read_exact(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n\npub(crate) fn read_file_header_block(block: &Block) -> io::Result<String> {\n\n use crate::container::block::ContentType;\n\n\n\n if block.content_type() != ContentType::FileHeader {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"invalid block content type: expected {:?}, got {:?}\",\n\n ContentType::FileHeader,\n\n block.content_type()\n\n ),\n", "file_path": "noodles-cram/src/reader.rs", "rank": 85, "score": 353513.6076837245 }, { "content": "fn read_args<R>(reader: &mut R) -> io::Result<Vec<u8>>\n\nwhere\n\n R: Read,\n\n{\n\n let len = read_itf8(reader)?;\n\n let mut buf = vec![0; len as usize];\n\n reader.read_exact(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 86, "score": 351181.58578049834 }, { "content": "fn read_landmarks<R>(reader: &mut R) -> io::Result<Vec<Itf8>>\n\nwhere\n\n R: Read,\n\n{\n\n let len = read_itf8(reader).map(|l| l as usize)?;\n\n let mut buf = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n let pos = read_itf8(reader)?;\n\n buf.push(pos);\n\n }\n\n\n\n Ok(buf)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "noodles-cram/src/reader/container.rs", "rank": 87, "score": 351181.58578049834 }, { "content": "fn read_reference_md5<R>(reader: &mut R) -> io::Result<[u8; 16]>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 16];\n\n reader.read_exact(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/slice.rs", "rank": 88, "score": 351181.58578049834 }, { "content": "fn read_tag<R>(reader: &mut R) -> io::Result<[u8; 2]>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 2];\n\n reader.read_exact(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-bam/src/record/data/reader.rs", "rank": 89, "score": 351181.58578049834 }, { "content": "/// Optimizes a list of chunks into a list of non-overlapping chunks.\n\n///\n\n/// Unlike [`merge_chunks`], `min_offset` (typically from the linear index) is given to remove\n\n/// chunks that cannot be in the query.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use noodles_bgzf as bgzf;\n\n/// use noodles_csi::{\n\n/// binning_index::optimize_chunks,\n\n/// index::reference_sequence::bin::Chunk,\n\n/// };\n\n///\n\n/// let chunks = [\n\n/// Chunk::new(bgzf::VirtualPosition::from(2), bgzf::VirtualPosition::from(3)),\n\n/// Chunk::new(bgzf::VirtualPosition::from(5), bgzf::VirtualPosition::from(8)),\n\n/// Chunk::new(bgzf::VirtualPosition::from(7), bgzf::VirtualPosition::from(13)),\n\n/// Chunk::new(bgzf::VirtualPosition::from(21), bgzf::VirtualPosition::from(34)),\n\n/// ];\n\n/// let min_offset = bgzf::VirtualPosition::from(5);\n\n///\n\n/// let actual = optimize_chunks(&chunks, min_offset);\n\n///\n\n/// let expected = [\n\n/// Chunk::new(bgzf::VirtualPosition::from(5), bgzf::VirtualPosition::from(13)),\n\n/// Chunk::new(bgzf::VirtualPosition::from(21), bgzf::VirtualPosition::from(34)),\n\n/// ];\n\n///\n\n/// assert_eq!(actual, expected);\n\n/// ```\n\npub fn optimize_chunks(chunks: &[Chunk], min_offset: bgzf::VirtualPosition) -> Vec<Chunk> {\n\n let mut chunks: Vec<_> = chunks\n\n .iter()\n\n .filter(|c| c.end() > min_offset)\n\n .copied()\n\n .collect();\n\n\n\n if chunks.is_empty() {\n\n return chunks;\n\n }\n\n\n\n chunks.sort_unstable_by_key(|c| c.start());\n\n\n\n // At worst, no chunks are merged, and the resulting list will be the same size as the input.\n\n let mut merged_chunks = Vec::with_capacity(chunks.len());\n\n\n\n // `chunks` is guaranteed to be non-empty.\n\n let mut current_chunk = chunks[0];\n\n\n\n for next_chunk in chunks.iter().skip(1) {\n", "file_path": "noodles-csi/src/binning_index.rs", "rank": 90, "score": 350721.44073840487 }, { "content": "fn read_byte_array_len_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let len_encoding = read_encoding(&mut args_reader)?;\n\n let value_encoding = read_encoding(&mut args_reader)?;\n\n\n\n Ok(Encoding::ByteArrayLen(\n\n Box::new(len_encoding),\n\n Box::new(value_encoding),\n\n ))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 91, "score": 349295.6657318298 }, { "content": "fn read_byte_array_stop_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let stop_byte = args_reader.read_u8()?;\n\n let block_content_id = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::ByteArrayStop(stop_byte, block_content_id))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/encoding.rs", "rank": 92, "score": 349295.6657318298 }, { "content": "fn read_bool<R>(reader: &mut R) -> io::Result<bool>\n\nwhere\n\n R: Read,\n\n{\n\n match reader.read_u8() {\n\n Ok(0) => Ok(false),\n\n Ok(1) => Ok(true),\n\n Ok(_) => Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid bool value\",\n\n )),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/compression_header/preservation_map.rs", "rank": 93, "score": 349295.66573182977 }, { "content": "fn read_references<R>(reader: &mut R) -> io::Result<Vec<ReferenceSequence>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_ref = reader.read_u32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut references = Vec::with_capacity(n_ref);\n\n\n\n for _ in 0..n_ref {\n\n let (bins, metadata) = read_bins(reader)?;\n\n let intervals = read_intervals(reader)?;\n\n references.push(ReferenceSequence::new(bins, intervals, metadata));\n\n }\n\n\n\n Ok(references)\n\n}\n\n\n", "file_path": "noodles-bam/src/bai/reader.rs", "rank": 94, "score": 348901.1096863324 }, { "content": "fn read_optional_tags<R>(reader: &mut R) -> io::Result<Vec<u8>>\n\nwhere\n\n R: Read,\n\n{\n\n let len = match read_itf8(reader) {\n\n Ok(len) => len as usize,\n\n Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => return Ok(Vec::new()),\n\n Err(e) => return Err(e),\n\n };\n\n\n\n let mut buf = vec![0; len];\n\n reader.read_exact(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "noodles-cram/src/reader/slice.rs", "rank": 95, "score": 348901.1096863324 }, { "content": "fn read_unplaced_unmapped_record_count<R>(reader: &mut R) -> io::Result<Option<u64>>\n\nwhere\n\n R: Read,\n\n{\n\n match reader.read_u64::<LittleEndian>() {\n\n Ok(n) => Ok(Some(n)),\n\n Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => Ok(None),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_magic_with_invalid_magic_number() {\n\n let data = [];\n\n let mut reader = &data[..];\n\n assert!(matches!(\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 96, "score": 346670.40977507166 }, { "content": "fn read_block_content_ids<R>(reader: &mut R) -> io::Result<Vec<Itf8>>\n\nwhere\n\n R: Read,\n\n{\n\n let len = read_itf8(reader).map(|i| i as usize)?;\n\n let mut buf = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n let value = read_itf8(reader)?;\n\n buf.push(value);\n\n }\n\n\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/slice.rs", "rank": 97, "score": 346670.40977507166 }, { "content": "fn read_unplaced_unmapped_record_count<R>(reader: &mut R) -> io::Result<Option<u64>>\n\nwhere\n\n R: Read,\n\n{\n\n match reader.read_u64::<LittleEndian>() {\n\n Ok(n) => Ok(Some(n)),\n\n Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => Ok(None),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_magic() {\n\n let data = b\"CSI\\x01\";\n\n let mut reader = &data[..];\n\n assert!(read_magic(&mut reader).is_ok());\n", "file_path": "noodles-csi/src/reader.rs", "rank": 98, "score": 346670.40977507166 }, { "content": "fn read_substitution_matrix<R>(reader: &mut R) -> io::Result<SubstitutionMatrix>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 5];\n\n reader.read_exact(&mut buf[..])?;\n\n SubstitutionMatrix::try_from(&buf[..])\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/compression_header/preservation_map.rs", "rank": 99, "score": 344976.92650618264 } ]
Rust
src/spawn/options.rs
storyai/rust-heph
aa6ccf79de85c0abccb6a26f920c7d9d405e80cf
use std::cmp::Ordering; use std::num::NonZeroU8; use std::ops::Mul; use std::time::Duration; #[derive(Clone, Debug)] pub struct ActorOptions { priority: Priority, ready: bool, } impl ActorOptions { pub const fn priority(&self) -> Priority { self.priority } pub const fn with_priority(mut self, priority: Priority) -> Self { self.priority = priority; self } pub const fn is_ready(&self) -> bool { self.ready } pub const fn mark_ready(mut self, ready: bool) -> Self { self.ready = ready; self } } impl Default for ActorOptions { fn default() -> ActorOptions { ActorOptions { priority: Priority::default(), ready: true, } } } #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[repr(transparent)] pub struct Priority(NonZeroU8); impl Priority { pub const LOW: Priority = Priority(NonZeroU8::new(15).unwrap()); pub const NORMAL: Priority = Priority(NonZeroU8::new(10).unwrap()); pub const HIGH: Priority = Priority(NonZeroU8::new(5).unwrap()); } impl Default for Priority { fn default() -> Priority { Priority::NORMAL } } impl Ord for Priority { fn cmp(&self, other: &Self) -> Ordering { other.0.cmp(&self.0) } } impl PartialOrd for Priority { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { other.0.partial_cmp(&self.0) } fn lt(&self, other: &Self) -> bool { other.0 < self.0 } fn le(&self, other: &Self) -> bool { other.0 <= self.0 } fn gt(&self, other: &Self) -> bool { other.0 > self.0 } fn ge(&self, other: &Self) -> bool { other.0 >= self.0 } } #[doc(hidden)] impl Mul<Priority> for Duration { type Output = Duration; fn mul(self, rhs: Priority) -> Duration { self * u32::from(rhs.0.get()) } } #[test] fn priority_duration_multiplication() { let duration = Duration::from_millis(1); let high = duration * Priority::HIGH; let normal = duration * Priority::NORMAL; let low = duration * Priority::LOW; assert!(high < normal); assert!(normal < low); assert!(high < low); } #[derive(Debug, Default)] pub struct SyncActorOptions { thread_name: Option<String>, } impl SyncActorOptions { pub fn name(&self) -> Option<&str> { self.thread_name.as_deref() } pub(crate) fn take_name(self) -> Option<String> { self.thread_name } pub fn with_name(mut self, thread_name: String) -> Self { self.thread_name = Some(thread_name); self } } #[derive(Clone, Debug, Default)] pub struct FutureOptions { priority: Priority, } impl FutureOptions { pub const fn priority(&self) -> Priority { self.priority } pub const fn with_priority(mut self, priority: Priority) -> Self { self.priority = priority; self } }
use std::cmp::Ordering; use std::num::NonZeroU8; use std::ops::Mul; use std::time::Duration; #[derive(Clone, Debug)] pub struct ActorOptions { priority: Priority, ready: bool, } impl ActorOptions { pub const fn priority(&self) -> Priority { self.priority } pub const fn with_priority(mut self, priority: Priority) -> Self { self.priority = priority; self } pub const fn is_ready(&self) -> bool { self.ready } pub const fn mark_ready(mut self, ready: bool) -> Self { self.ready = ready; self } } impl Default for ActorOptions { fn default() -> ActorOptions { ActorOptions { priority: Priori
-> Priority { Priority::NORMAL } } impl Ord for Priority { fn cmp(&self, other: &Self) -> Ordering { other.0.cmp(&self.0) } } impl PartialOrd for Priority { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { other.0.partial_cmp(&self.0) } fn lt(&self, other: &Self) -> bool { other.0 < self.0 } fn le(&self, other: &Self) -> bool { other.0 <= self.0 } fn gt(&self, other: &Self) -> bool { other.0 > self.0 } fn ge(&self, other: &Self) -> bool { other.0 >= self.0 } } #[doc(hidden)] impl Mul<Priority> for Duration { type Output = Duration; fn mul(self, rhs: Priority) -> Duration { self * u32::from(rhs.0.get()) } } #[test] fn priority_duration_multiplication() { let duration = Duration::from_millis(1); let high = duration * Priority::HIGH; let normal = duration * Priority::NORMAL; let low = duration * Priority::LOW; assert!(high < normal); assert!(normal < low); assert!(high < low); } #[derive(Debug, Default)] pub struct SyncActorOptions { thread_name: Option<String>, } impl SyncActorOptions { pub fn name(&self) -> Option<&str> { self.thread_name.as_deref() } pub(crate) fn take_name(self) -> Option<String> { self.thread_name } pub fn with_name(mut self, thread_name: String) -> Self { self.thread_name = Some(thread_name); self } } #[derive(Clone, Debug, Default)] pub struct FutureOptions { priority: Priority, } impl FutureOptions { pub const fn priority(&self) -> Priority { self.priority } pub const fn with_priority(mut self, priority: Priority) -> Self { self.priority = priority; self } }
ty::default(), ready: true, } } } #[derive(Copy, Clone, Debug, Eq, PartialEq)] #[repr(transparent)] pub struct Priority(NonZeroU8); impl Priority { pub const LOW: Priority = Priority(NonZeroU8::new(15).unwrap()); pub const NORMAL: Priority = Priority(NonZeroU8::new(10).unwrap()); pub const HIGH: Priority = Priority(NonZeroU8::new(5).unwrap()); } impl Default for Priority { fn default()
random
[ { "content": "#[track_caller]\n\npub fn is_ready<E>(poll: Poll<Result<(), E>>) -> bool\n\nwhere\n\n E: fmt::Display,\n\n{\n\n match poll {\n\n Poll::Ready(Ok(())) => true,\n\n Poll::Ready(Err(err)) => panic!(\"unexpected error: {}\", err),\n\n Poll::Pending => false,\n\n }\n\n}\n\n\n\n/// Returns a [`Future`] that return [`Poll::Pending`] once, without waking\n\n/// itself.\n\npub const fn pending_once() -> PendingOnce {\n\n PendingOnce(false)\n\n}\n\n\n\npub struct PendingOnce(bool);\n\n\n\nimpl Future for PendingOnce {\n", "file_path": "tests/util/mod.rs", "rank": 0, "score": 205914.52887715923 }, { "content": "/// Returns `true` if the tagged pointer is a marker that the process is\n\n/// ready-to-run.\n\nfn is_ready_marker(ptr: TaggedPointer) -> bool {\n\n has_tag(ptr, READY_TO_RUN)\n\n}\n\n\n", "file_path": "src/rt/shared/scheduler/inactive.rs", "rank": 1, "score": 159606.92328849167 }, { "content": "#[track_caller]\n\npub fn expect_ready<T>(poll: Poll<T>, expected: T)\n\nwhere\n\n T: fmt::Debug + PartialEq,\n\n{\n\n match poll {\n\n Poll::Pending => panic!(\"unexpected `Poll::Pending`\"),\n\n Poll::Ready(value) => assert_eq!(value, expected),\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 2, "score": 143712.92343889328 }, { "content": "/// Returns a reference to a fake local runtime.\n\n///\n\n/// # Notes\n\n///\n\n/// The returned runtime reference is **not** a reference to the *test* runtime\n\n/// as described in the module documentation.\n\npub fn runtime() -> RuntimeRef {\n\n thread_local! {\n\n /// Per thread runtime.\n\n static TEST_RT: Runtime = {\n\n let (_, receiver) = rt::channel::new()\n\n .expect(\"failed to create runtime channel for test module\");\n\n Runtime::new_test(SHARED_INTERNAL.clone(), receiver)\n\n .expect(\"failed to create local `Runtime` for test module\")\n\n };\n\n }\n\n\n\n TEST_RT.with(Runtime::create_ref)\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 3, "score": 138782.21457753584 }, { "content": "#[allow(clippy::type_complexity)]\n\npub fn init_actor<NA>(\n\n new_actor: NA,\n\n arg: NA::Argument,\n\n) -> Result<(NA::Actor, ActorRef<NA::Message>), NA::Error>\n\nwhere\n\n NA: NewActor<RuntimeAccess = ThreadSafe>,\n\n{\n\n init_actor_with_inbox(new_actor, arg).map(|(actor, _, actor_ref)| (actor, actor_ref))\n\n}\n\n\n\n/// Initialise a thread-local actor with access to it's inbox.\n\n#[allow(clippy::type_complexity)]\n\npub(crate) fn init_local_actor_with_inbox<NA>(\n\n mut new_actor: NA,\n\n arg: NA::Argument,\n\n) -> Result<(NA::Actor, Manager<NA::Message>, ActorRef<NA::Message>), NA::Error>\n\nwhere\n\n NA: NewActor<RuntimeAccess = ThreadLocal>,\n\n{\n\n let (manager, sender, receiver) = Manager::new_small_channel();\n", "file_path": "src/test.rs", "rank": 4, "score": 138782.21457753584 }, { "content": "#[allow(clippy::type_complexity)]\n\npub fn init_local_actor<NA>(\n\n new_actor: NA,\n\n arg: NA::Argument,\n\n) -> Result<(NA::Actor, ActorRef<NA::Message>), NA::Error>\n\nwhere\n\n NA: NewActor<RuntimeAccess = ThreadLocal>,\n\n{\n\n init_local_actor_with_inbox(new_actor, arg).map(|(actor, _, actor_ref)| (actor, actor_ref))\n\n}\n\n\n\n/// Initialise a thread-safe actor.\n", "file_path": "src/test.rs", "rank": 5, "score": 136050.1343704535 }, { "content": "/// Bind to any IPv4 port on localhost.\n\npub fn any_local_address() -> SocketAddr {\n\n \"127.0.0.1:0\".parse().unwrap()\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 6, "score": 133489.4505313995 }, { "content": "/// Returns an address to which the connection will be refused.\n\npub fn refused_address() -> SocketAddr {\n\n \"0.0.0.0:1\".parse().unwrap()\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 7, "score": 133489.4505313995 }, { "content": "/// Attempt to spawn a thread-safe actor on the *test* runtime.\n\n///\n\n/// See the [module documentation] for more information about the *test*\n\n/// runtime. And see the [`Spawn`] trait for more information about spawning\n\n/// actors.\n\n///\n\n/// [module documentation]: crate::test\n\n/// [`Spawn`]: crate::spawn::Spawn\n\n///\n\n/// # Notes\n\n///\n\n/// This requires the `Supervisor` (`S`) and `NewActor` (`NA`) to be [`Send`] as\n\n/// they are send to another thread which runs the *test* runtime (and thus the\n\n/// actor). The actor (`NA::Actor`) itself doesn't have to be `Send`.\n\npub fn try_spawn<S, NA>(\n\n supervisor: S,\n\n new_actor: NA,\n\n arg: NA::Argument,\n\n options: ActorOptions,\n\n) -> Result<ActorRef<NA::Message>, NA::Error>\n\nwhere\n\n S: Supervisor<NA> + Send + Sync + 'static,\n\n NA: NewActor<RuntimeAccess = ThreadSafe> + Sync + Send + 'static,\n\n NA::Actor: Send + Sync + 'static,\n\n NA::Message: Send,\n\n NA::Argument: Send,\n\n NA::Error: Send,\n\n{\n\n run_on_test_runtime_wait(move |mut runtime_ref| {\n\n runtime_ref.try_spawn(supervisor, new_actor, arg, options)\n\n })\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 8, "score": 131548.60851922166 }, { "content": "/// Bind to any IPv6 port on localhost.\n\npub fn any_local_ipv6_address() -> SocketAddr {\n\n \"[::1]:0\".parse().unwrap()\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 9, "score": 131084.5248282831 }, { "content": "/// Attempt to spawn a thread-local actor on the *test* runtime.\n\n///\n\n/// See the [module documentation] for more information about the *test*\n\n/// runtime. And see the [`Spawn`] trait for more information about spawning\n\n/// actors.\n\n///\n\n/// [module documentation]: crate::test\n\n/// [`Spawn`]: crate::spawn::Spawn\n\n///\n\n/// # Notes\n\n///\n\n/// This requires the `Supervisor` (`S`) and `NewActor` (`NA`) to be [`Send`] as\n\n/// they are send to another thread which runs the *test* runtime (and thus the\n\n/// actor). The actor (`NA::Actor`) itself doesn't have to be `Send`.\n\npub fn try_spawn_local<S, NA>(\n\n supervisor: S,\n\n new_actor: NA,\n\n arg: NA::Argument,\n\n options: ActorOptions,\n\n) -> Result<ActorRef<NA::Message>, NA::Error>\n\nwhere\n\n S: Supervisor<NA> + Send + 'static,\n\n NA: NewActor<RuntimeAccess = ThreadLocal> + Send + 'static,\n\n NA::Actor: 'static,\n\n NA::Message: Send,\n\n NA::Argument: Send,\n\n NA::Error: Send,\n\n{\n\n run_on_test_runtime_wait(move |mut runtime_ref| {\n\n runtime_ref.try_spawn_local(supervisor, new_actor, arg, options)\n\n })\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 10, "score": 128987.92468016763 }, { "content": "pub fn assert_send<T: Send>() {}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 11, "score": 128987.92468016763 }, { "content": "pub fn assert_sync<T: Sync>() {}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 12, "score": 128987.92468016763 }, { "content": "#[track_caller]\n\npub fn expect_ready_ok<T, E>(poll: Poll<Result<T, E>>, expected: T)\n\nwhere\n\n T: fmt::Debug + PartialEq,\n\n E: fmt::Display,\n\n{\n\n match poll {\n\n Poll::Pending => panic!(\"unexpected `Poll::Pending`\"),\n\n Poll::Ready(Ok(value)) => assert_eq!(value, expected),\n\n Poll::Ready(Err(err)) => panic!(\"unexpected error: {}\", err),\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 13, "score": 127165.82449732655 }, { "content": "pub fn remove(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Removing timer\");\n\n binary_heap::remove(&mut group);\n\n btreemap::remove(&mut group);\n\n sorted_vec::remove(&mut group);\n\n group.finish();\n\n}\n\n\n", "file_path": "benches/timers_container/bench.rs", "rank": 14, "score": 125174.27980310982 }, { "content": "pub fn add(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Adding timer\");\n\n binary_heap::add_timer(&mut group);\n\n btreemap::add_timer(&mut group);\n\n sorted_vec::add_timer(&mut group);\n\n group.finish();\n\n}\n\n\n", "file_path": "benches/timers_container/bench.rs", "rank": 15, "score": 125174.27980310982 }, { "content": "struct ActorImpl;\n\n\n\nimpl Actor for ActorImpl {\n\n type Error = &'static str;\n\n\n\n fn try_poll(self: Pin<&mut Self>, _: &mut task::Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "tests/functional/restart_supervisor.rs", "rank": 16, "score": 125143.12167576881 }, { "content": "/// Returns `true` if the tagged pointer points to a process.\n\nfn is_process(ptr: TaggedPointer) -> bool {\n\n has_tag(ptr, PROCESS_TAG)\n\n}\n\n\n", "file_path": "src/rt/shared/scheduler/inactive.rs", "rank": 17, "score": 124757.00199968126 }, { "content": "/// Returns `true` if the tagged pointer points to a branch.\n\nfn is_branch(ptr: TaggedPointer) -> bool {\n\n has_tag(ptr, BRANCH_TAG)\n\n}\n\n\n", "file_path": "src/rt/shared/scheduler/inactive.rs", "rank": 18, "score": 124757.00199968126 }, { "content": "/// Set the percentage of messages lost on purpose.\n\n///\n\n/// This is useful to test the resilience of actors with respect to message\n\n/// loss. Any and all messages send, thus including remote and local messages,\n\n/// could be lost on purpose when using this function.\n\n///\n\n/// Note that the sending of the messages will not return an error if the\n\n/// message is lost using this function.\n\n///\n\n/// `percent` must be number between `0` and `100`, setting this to `0` (the\n\n/// default) will disable the message loss.\n\npub fn set_message_loss(mut percent: u8) {\n\n if percent > 100 {\n\n percent = 100;\n\n }\n\n MSG_LOSS.store(percent, Ordering::SeqCst)\n\n}\n\n\n\n/// Returns `true` if the message should be lost.\n\npub(crate) fn should_lose_msg() -> bool {\n\n // Safety: `Relaxed` is fine here as we'll get the update, sending a message\n\n // when we're not supposed to isn't too bad.\n\n let loss = MSG_LOSS.load(Ordering::Relaxed);\n\n loss != 0 || random_percentage() < loss\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 19, "score": 122778.83797026117 }, { "content": "#[track_caller]\n\npub fn assert_size<T>(expected: usize) {\n\n assert_eq!(size_of::<T>(), expected);\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 20, "score": 122769.35409999342 }, { "content": "pub fn remove_next(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Removing next timer\");\n\n binary_heap::remove_next(&mut group);\n\n btreemap::remove_next(&mut group);\n\n sorted_vec::remove_next(&mut group);\n\n group.finish();\n\n}\n\n\n", "file_path": "benches/timers_container/bench.rs", "rank": 21, "score": 122769.35409999342 }, { "content": "struct NewActorImpl;\n\n\n\nimpl NewActor for NewActorImpl {\n\n type Message = !;\n\n type Argument = bool;\n\n type Actor = ActorImpl;\n\n type Error = &'static str;\n\n type RuntimeAccess = ThreadSafe;\n\n\n\n fn new(\n\n &mut self,\n\n _: actor::Context<Self::Message, Self::RuntimeAccess>,\n\n _: Self::Argument,\n\n ) -> Result<Self::Actor, Self::Error> {\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "tests/functional/restart_supervisor.rs", "rank": 22, "score": 122350.1470096783 }, { "content": "pub fn remove_already_removed(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Removing timer (already removed)\");\n\n binary_heap::remove_already_removed(&mut group);\n\n btreemap::remove_already_removed(&mut group);\n\n sorted_vec::remove_already_removed(&mut group);\n\n group.finish();\n\n}\n\n\n\nmod binary_heap {\n\n use std::cmp::Reverse;\n\n use std::collections::BinaryHeap;\n\n\n\n use criterion::measurement::Measurement;\n\n use criterion::{BatchSize, BenchmarkGroup};\n\n\n\n use crate::{new_timers, remove_timers, start_timers, Timer, START_SIZE};\n\n\n\n pub fn add_timer<M: Measurement>(group: &mut BenchmarkGroup<M>) {\n\n group.bench_function(\"BinaryHeap\", |b| {\n\n let initial_heap = create_heap();\n", "file_path": "benches/timers_container/bench.rs", "rank": 23, "score": 120506.39459506347 }, { "content": "/// Returns a path to a non-existing temporary file.\n\npub fn temp_file(name: &str) -> PathBuf {\n\n static CLEANUP: Once = Once::new();\n\n\n\n let mut dir = temp_dir();\n\n dir.push(\"heph.test/\");\n\n\n\n CLEANUP.call_once(|| {\n\n let _ = remove_dir_all(&dir);\n\n if let Err(err) = create_dir_all(&dir) {\n\n panic!(\"failed to create temporary directory: {}\", err);\n\n }\n\n });\n\n\n\n dir.push(name);\n\n dir\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 24, "score": 120506.39459506347 }, { "content": "#[test]\n\n#[allow(clippy::eq_op)] // Need to compare `Priority` to itself.\n\nfn priority() {\n\n assert!(Priority::HIGH > Priority::NORMAL);\n\n assert!(Priority::NORMAL > Priority::LOW);\n\n assert!(Priority::HIGH > Priority::LOW);\n\n\n\n assert_eq!(Priority::HIGH, Priority::HIGH);\n\n assert_ne!(Priority::HIGH, Priority::NORMAL);\n\n\n\n assert_eq!(Priority::default(), Priority::NORMAL);\n\n}\n\n\n", "file_path": "tests/functional/runtime.rs", "rank": 25, "score": 118145.38143642983 }, { "content": "#[track_caller]\n\npub fn expect_pending<T>(poll: Poll<T>)\n\nwhere\n\n T: fmt::Debug,\n\n{\n\n match poll {\n\n Poll::Pending => {} // Ok.\n\n Poll::Ready(value) => panic!(\"expected pending, got `Poll::Ready({:?})`\", value),\n\n }\n\n}\n\n\n", "file_path": "tests/util/mod.rs", "rank": 26, "score": 117234.14936254319 }, { "content": "/// Spawn `future` on the *test* runtime and wait for the result.\n\n///\n\n/// This is useful to test async functions and futures in synchronous tests.\n\npub fn block_on<Fut>(future: Fut) -> Fut::Output\n\nwhere\n\n Fut: Future + Send + 'static,\n\n Fut::Output: Send,\n\n{\n\n let (sender, receiver) = new_oneshot();\n\n let waker = SyncWaker::new();\n\n spawn_local_future(\n\n async move {\n\n let result = future.await;\n\n assert!(\n\n sender.try_send(result).is_ok(),\n\n \"failed to return future result\"\n\n );\n\n },\n\n FutureOptions::default(),\n\n );\n\n waker\n\n .block_on(receiver.recv_once())\n\n .expect(\"failed to receive result from future\")\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 27, "score": 116663.49725037988 }, { "content": "#[test]\n\nfn impl_for_vec() {\n\n let mut buf = Vec::<u8>::with_capacity(2 * DATA.len());\n\n assert_eq!(buf.spare_capacity(), 2 * DATA.len());\n\n assert!(buf.has_spare_capacity());\n\n let n = write_bytes(DATA, &mut buf);\n\n assert_eq!(n, DATA.len());\n\n assert_eq!(buf.len(), DATA.len());\n\n assert_eq!(&*buf, DATA);\n\n assert_eq!(buf.spare_capacity(), DATA.len());\n\n assert!(buf.has_spare_capacity());\n\n}\n\n\n", "file_path": "tests/functional/bytes.rs", "rank": 28, "score": 115001.04853764124 }, { "content": "/// Spawn a synchronous actor.\n\n///\n\n/// This returns the thread handle for the thread the synchronous actor is\n\n/// running on and an actor reference to the actor.\n\npub fn spawn_sync_actor<S, A, E, Arg, M>(\n\n supervisor: S,\n\n actor: A,\n\n arg: Arg,\n\n options: SyncActorOptions,\n\n) -> io::Result<(thread::JoinHandle<()>, ActorRef<M>)>\n\nwhere\n\n S: SyncSupervisor<A> + Send + 'static,\n\n A: SyncActor<Message = M, Argument = Arg, Error = E> + Send + 'static,\n\n Arg: Send + 'static,\n\n M: Send + 'static,\n\n{\n\n static SYNC_WORKER_TEST_ID: AtomicUsize = AtomicUsize::new(SYNC_WORKER_ID_START);\n\n let id = SYNC_WORKER_TEST_ID.fetch_add(1, Ordering::SeqCst);\n\n assert!(\n\n id < SYNC_WORKER_ID_END,\n\n \"spawned too many synchronous test actors\"\n\n );\n\n\n\n SyncWorker::start(id, supervisor, actor, arg, options, None).map(|(worker, actor_ref)| {\n\n let handle = worker.into_handle();\n\n (handle, actor_ref)\n\n })\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 29, "score": 114395.65606237529 }, { "content": "/// Returns `true` if the tagged pointer has a tag equal to `tag`.\n\nfn has_tag(ptr: TaggedPointer, tag: usize) -> bool {\n\n (ptr as usize & TAG_MASK) == tag\n\n}\n\n\n", "file_path": "src/rt/shared/scheduler/inactive.rs", "rank": 30, "score": 113471.34859339538 }, { "content": "/// Spawn a thread-safe [`Future`] on the *test* runtime.\n\n///\n\n/// See the [module documentation] for more information about the *test*\n\n/// runtime.\n\n///\n\n/// [module documentation]: crate::test\n\npub fn spawn_future<Fut>(future: Fut, options: FutureOptions)\n\nwhere\n\n Fut: Future<Output = ()> + Send + Sync + 'static,\n\n{\n\n run_on_test_runtime(move |mut runtime_ref| {\n\n runtime_ref.spawn_future(future, options);\n\n Ok(())\n\n });\n\n}\n\n\n\n/// Returned by [`join`] and [`join_many`].\n\n#[derive(Copy, Clone, Debug)]\n\n#[must_use = \"this `JoinResult` should be handled\"]\n\npub enum JoinResult {\n\n /// Actor(s) finished.\n\n Ok,\n\n /// Waiting for the actors timed out.\n\n TimedOut,\n\n}\n\n\n\nimpl JoinResult {\n\n /// Unwrap the `JoinResult` expecting [`JoinResult::Ok`].\n\n #[track_caller]\n\n pub fn unwrap(self) {\n\n if let JoinResult::TimedOut = self {\n\n panic!(\"joining actors timed out\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 31, "score": 112262.45245042419 }, { "content": "/// Spawn a thread-local [`Future`] on the *test* runtime.\n\n///\n\n/// See the [module documentation] for more information about the *test*\n\n/// runtime.\n\n///\n\n/// [module documentation]: crate::test\n\npub fn spawn_local_future<Fut>(future: Fut, options: FutureOptions)\n\nwhere\n\n Fut: Future<Output = ()> + Send + 'static,\n\n{\n\n run_on_test_runtime(move |mut runtime_ref| {\n\n runtime_ref.spawn_local_future(future, options);\n\n Ok(())\n\n });\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 33, "score": 110248.1556544991 }, { "content": "#[test]\n\nfn mark_ready() {\n\n let mut scheduler = Scheduler::new();\n\n\n\n // Incorrect (outdated) pid should be ok.\n\n scheduler.mark_ready(ProcessId(1));\n\n\n\n let actor_entry = scheduler.add_actor();\n\n let pid = actor_entry.pid();\n\n let new_actor = simple_actor as fn(_) -> _;\n\n let (actor, inbox, _) = init_local_actor_with_inbox(new_actor, ()).unwrap();\n\n actor_entry.add(\n\n Priority::NORMAL,\n\n NoSupervisor,\n\n new_actor,\n\n actor,\n\n inbox,\n\n false,\n\n );\n\n\n\n scheduler.mark_ready(pid);\n\n assert!(scheduler.has_process());\n\n assert!(scheduler.has_ready_process());\n\n}\n\n\n", "file_path": "src/rt/local/scheduler/tests.rs", "rank": 34, "score": 109325.36351099865 }, { "content": "/// Returns the name of the binary called (i.e. `arg[0]`) as name.\n\nfn default_app_name() -> String {\n\n match env::args().next() {\n\n Some(mut bin_path) => {\n\n if let Some(idx) = bin_path.rfind('/') {\n\n drop(bin_path.drain(..=idx));\n\n }\n\n bin_path\n\n }\n\n None => \"<unknown>\".to_string(),\n\n }\n\n}\n", "file_path": "src/rt/setup.rs", "rank": 35, "score": 106611.18398613967 }, { "content": "/// Wrapper around an [`ActorRef`] to change the message type.\n\nstruct MappedActorRefFn<M, F> {\n\n actor_ref: ActorRef<M>,\n\n map: F,\n\n}\n\n\n\nimpl<M, Msg, F, E> MappedActorRef<Msg> for MappedActorRefFn<M, F>\n\nwhere\n\n F: Fn(Msg) -> Result<M, E>,\n\n{\n\n fn try_mapped_send(&self, msg: Msg) -> Result<(), SendError> {\n\n match (self.map)(msg) {\n\n Ok(msg) => self.actor_ref.try_send(msg),\n\n Err(..) => Err(SendError),\n\n }\n\n }\n\n\n\n fn mapped_send<'r, 'fut>(\n\n &'r self,\n\n msg: Msg,\n\n ) -> Pin<Box<dyn Future<Output = Result<(), SendError>> + 'fut>>\n", "file_path": "src/actor_ref/mod.rs", "rank": 36, "score": 106305.38775321055 }, { "content": "#[test]\n\nfn marking_unknown_pid_as_ready() {\n\n let scheduler = Scheduler::new();\n\n\n\n assert!(!scheduler.has_process());\n\n assert!(!scheduler.has_ready_process());\n\n assert_eq!(scheduler.remove(), None);\n\n\n\n // Scheduling an unknown process should do nothing.\n\n scheduler.mark_ready(ProcessId(0));\n\n assert!(!scheduler.has_process());\n\n assert!(!scheduler.has_ready_process());\n\n assert_eq!(scheduler.remove(), None);\n\n}\n\n\n", "file_path": "src/rt/shared/scheduler/tests.rs", "rank": 37, "score": 104352.3488767827 }, { "content": "#[test]\n\nfn add_process_marked_ready() {\n\n let mut scheduler = Scheduler::new();\n\n\n\n let actor_entry = scheduler.add_actor();\n\n let pid = actor_entry.pid();\n\n let new_actor = simple_actor as fn(_) -> _;\n\n let (actor, inbox, _) = init_local_actor_with_inbox(new_actor, ()).unwrap();\n\n actor_entry.add(\n\n Priority::NORMAL,\n\n NoSupervisor,\n\n new_actor,\n\n actor,\n\n inbox,\n\n true,\n\n );\n\n\n\n let process = scheduler.next_process().unwrap();\n\n scheduler.add_process(process);\n\n assert!(scheduler.has_process());\n\n assert!(!scheduler.has_ready_process());\n\n\n\n scheduler.mark_ready(pid);\n\n assert!(scheduler.has_process());\n\n assert!(scheduler.has_ready_process());\n\n let process = scheduler.next_process().unwrap();\n\n assert_eq!(process.as_ref().id(), pid);\n\n}\n\n\n", "file_path": "src/rt/local/scheduler/tests.rs", "rank": 38, "score": 104352.3488767827 }, { "content": "/// Poll an actor.\n\n///\n\n/// This is effectively the same function as [`poll_future`], but instead polls\n\n/// an actor. The [`task::Context`] will be provided by the *test* runtime.\n\n///\n\n/// # Notes\n\n///\n\n/// Wake notifications will be ignored, if this is required run an end to end\n\n/// test with a completely functional runtime instead.\n\npub fn poll_actor<A>(actor: Pin<&mut A>) -> Poll<Result<(), A::Error>>\n\nwhere\n\n A: Actor + ?Sized,\n\n{\n\n let waker = runtime().new_local_task_waker(TEST_PID);\n\n let mut ctx = task::Context::from_waker(&waker);\n\n Actor::try_poll(actor, &mut ctx)\n\n}\n\n\n\n/// Percentage of messages lost on purpose.\n\nstatic MSG_LOSS: AtomicU8 = AtomicU8::new(0);\n\n\n", "file_path": "src/test.rs", "rank": 39, "score": 103882.57725666418 }, { "content": "/// Wait for all `actors` in the group to finish.\n\n///\n\n/// # Notes\n\n///\n\n/// If you want to wait for actors with different message types try\n\n/// [`ActorRef::map`] or [`ActorRef::try_map`].\n\npub fn join_all<M>(actors: &ActorGroup<M>, timeout: Duration) -> JoinResult {\n\n match SyncWaker::new().block_for(actors.join_all(), timeout) {\n\n Some(()) => JoinResult::Ok,\n\n None => JoinResult::TimedOut,\n\n }\n\n}\n\n\n\n/// Initialise a thread-local actor.\n", "file_path": "src/test.rs", "rank": 40, "score": 103662.76309717576 }, { "content": "/// Wait for the actor behind `actor_ref` to finish.\n\n///\n\n/// See [`join_many`] for more documentation.\n\npub fn join<M>(actor_ref: &ActorRef<M>, timeout: Duration) -> JoinResult {\n\n join_many(slice::from_ref(actor_ref), timeout)\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 41, "score": 101858.28411153179 }, { "content": "/// Poll a future.\n\n///\n\n/// The [`task::Context`] will be provided by the *test* runtime.\n\n///\n\n/// # Notes\n\n///\n\n/// Wake notifications will be ignored, if this is required run an end to end\n\n/// test with a completely functional runtime instead.\n\npub fn poll_future<Fut>(future: Pin<&mut Fut>) -> Poll<Fut::Output>\n\nwhere\n\n Fut: Future + ?Sized,\n\n{\n\n let waker = runtime().new_local_task_waker(TEST_PID);\n\n let mut ctx = task::Context::from_waker(&waker);\n\n Future::poll(future, &mut ctx)\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 42, "score": 101710.7259534648 }, { "content": "/// Wait for all actors behind the `actor_refs` to finish.\n\n///\n\n/// # Notes\n\n///\n\n/// If you want to wait for actors with different message types try\n\n/// [`ActorRef::map`] or [`ActorRef::try_map`].\n\npub fn join_many<M>(actor_refs: &[ActorRef<M>], timeout: Duration) -> JoinResult {\n\n let waker = SyncWaker::new();\n\n let start = Instant::now();\n\n for actor_ref in actor_refs {\n\n let elapsed = start.elapsed();\n\n if elapsed > timeout {\n\n return JoinResult::TimedOut;\n\n }\n\n match waker.clone().block_for(actor_ref.join(), timeout - elapsed) {\n\n Some(()) => {}\n\n None => return JoinResult::TimedOut,\n\n }\n\n }\n\n JoinResult::Ok\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 43, "score": 100146.62750328795 }, { "content": "/// Poll a stream.\n\n///\n\n/// The [`task::Context`] will be provided by the *test* runtime.\n\n///\n\n/// # Notes\n\n///\n\n/// Wake notifications will be ignored, if this is required run an end to end\n\n/// test with a completely functional runtime instead.\n\npub fn poll_next<S>(stream: Pin<&mut S>) -> Poll<Option<S::Item>>\n\nwhere\n\n S: Stream + ?Sized,\n\n{\n\n let waker = runtime().new_local_task_waker(TEST_PID);\n\n let mut ctx = task::Context::from_waker(&waker);\n\n Stream::poll_next(stream, &mut ctx)\n\n}\n\n\n", "file_path": "src/test.rs", "rank": 44, "score": 98158.73909948935 }, { "content": "/// Create a mark ready-to-run `Pointer`.\n\nfn ready_to_run(pid: ProcessId) -> TaggedPointer {\n\n debug_assert!(ok_ptr(pid.0 as *mut ()));\n\n (pid.0 | READY_TO_RUN) as *mut ()\n\n}\n\n\n\n/// Convert a tagged pointer into a pointer to a process.\n\n///\n\n/// # Safety\n\n///\n\n/// Caller must ensure unique access to `ptr` and that it's a process.\n\nunsafe fn process_from_tagged(ptr: TaggedPointer) -> Pin<Box<ProcessData>> {\n\n debug_assert!(is_process(ptr));\n\n Pin::new(Box::from_raw(as_ptr(ptr).cast()))\n\n}\n\n\n\n/// Convert a tagged pointer into a pointer to a branch.\n\n///\n\n/// # Safety\n\n///\n\n/// Caller must ensure unique access to `ptr` and that it's a branch.\n\nunsafe fn branch_from_tagged(ptr: TaggedPointer) -> Pin<Box<Branch>> {\n\n debug_assert!(is_branch(ptr));\n\n Pin::new(Box::from_raw(as_ptr(ptr).cast()))\n\n}\n\n\n", "file_path": "src/rt/shared/scheduler/inactive.rs", "rank": 45, "score": 91095.94513068195 }, { "content": "/// Remove the first timer if it's before `time`.\n\n///\n\n/// Returns `Ok(timer)` if there is a timer with a deadline before `time`.\n\n/// Returns `Err(is_empty)`, indicating if `timers` is empty. Returns\n\n/// `Err(true)` is `timers` is empty, `Err(false)` if the are more timers in\n\n/// `timers`, but none with a deadline before `time`.\n\nfn remove_if_before<T>(timers: &mut Vec<Timer<T>>, time: T) -> Result<Timer<T>, bool>\n\nwhere\n\n T: Ord + Copy,\n\n{\n\n match timers.last() {\n\n // TODO: is the `unwrap` removed here? Or do we need `unwrap_unchecked`?\n\n Some(timer) if timer.deadline <= time => Ok(timers.pop().unwrap()),\n\n Some(_) => Err(false),\n\n None => Err(true),\n\n }\n\n}\n\n\n\n/// To avoid having to check all slots and the overflow for timers in an\n\n/// [`Timers`] this type caches the earliest deadline. This speeds up\n\n/// [`Timers::next`].\n", "file_path": "src/rt/local/timers.rs", "rank": 46, "score": 90556.60745625173 }, { "content": "/// Remove the first timer if it's before `time`.\n\n///\n\n/// Returns `Ok(timer)` if there is a timer with a deadline before `time`.\n\n/// Returns `Err(is_empty)`, indicating if `timers` is empty. Returns\n\n/// `Err(true)` is `timers` is empty, `Err(false)` if the are more timers in\n\n/// `timers`, but none with a deadline before `time`.\n\nfn remove_if_before<T>(timers: &mut Vec<Timer<T>>, time: T) -> Result<Timer<T>, bool>\n\nwhere\n\n T: Ord + Copy,\n\n{\n\n match timers.last() {\n\n // TODO: is the `unwrap` removed here? Or do we need `unwrap_unchecked`?\n\n Some(timer) if timer.deadline <= time => Ok(timers.pop().unwrap()),\n\n Some(_) => Err(false),\n\n None => Err(true),\n\n }\n\n}\n\n\n\n/// A timer.\n\n///\n\n/// # Notes\n\n///\n\n/// The [`Ord`] implementation is in reverse order, i.e. the deadline to expire\n\n/// first will have the highest ordering value. Furthermore the ordering is only\n\n/// done base on the deadline, the process id is ignored in ordering. This\n\n/// allows `change_timer` to not worry about order when changing the process id\n\n/// of a timer.\n", "file_path": "src/rt/shared/timers.rs", "rank": 47, "score": 90556.60745625173 }, { "content": "/// Create a new Unix pipe.\n\n///\n\n/// This is a wrapper around Unix's [`pipe(2)`] system call and can be used as\n\n/// inter-process or thread communication channel.\n\n///\n\n/// This channel may be created before forking the process and then one end used\n\n/// in each process, e.g. the parent process has the sending end to send\n\n/// commands to the child process.\n\n///\n\n/// [`pipe(2)`]: https://pubs.opengroup.org/onlinepubs/9699919799/functions/pipe.html\n\npub fn new<M, RT>(ctx: &mut actor::Context<M, RT>) -> io::Result<(Sender, Receiver)>\n\nwhere\n\n RT: rt::Access,\n\n{\n\n let (mut sender, mut receiver) = pipe::new()?;\n\n\n\n let rt = ctx.runtime();\n\n rt.register(&mut sender, Interest::WRITABLE)?;\n\n rt.register(&mut receiver, Interest::READABLE)?;\n\n\n\n Ok((Sender { inner: sender }, Receiver { inner: receiver }))\n\n}\n\n\n\n/// Sending end of an Unix pipe.\n\n///\n\n/// Created by calling [`new`] or converted from [`ChildStdin`].\n\n#[derive(Debug)]\n\npub struct Sender {\n\n inner: pipe::Sender,\n\n}\n", "file_path": "src/pipe.rs", "rank": 48, "score": 90536.82486986669 }, { "content": "struct Ping;\n\n\n\nimpl fmt::Display for Ping {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"Ping\")\n\n }\n\n}\n\n\n", "file_path": "examples/3_rpc.rs", "rank": 49, "score": 86766.73180185878 }, { "content": "struct Pong;\n\n\n\nimpl fmt::Display for Pong {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"Pong\")\n\n }\n\n}\n", "file_path": "examples/3_rpc.rs", "rank": 50, "score": 86766.73180185878 }, { "content": "#[derive(Debug)]\n\nstruct ChildCommand {\n\n inner: Child,\n\n}\n\n\n\nimpl Deref for ChildCommand {\n\n type Target = Child;\n\n\n\n fn deref(&self) -> &Child {\n\n &self.inner\n\n }\n\n}\n\n\n\nimpl DerefMut for ChildCommand {\n\n fn deref_mut(&mut self) -> &mut Child {\n\n &mut self.inner\n\n }\n\n}\n\n\n\nimpl Drop for ChildCommand {\n\n fn drop(&mut self) {\n\n let _ = self.inner.kill();\n\n self.inner.wait().expect(\"can't wait on child process\");\n\n }\n\n}\n\n\n", "file_path": "tests/examples.rs", "rank": 51, "score": 85055.31819485649 }, { "content": "#[derive(Debug)]\n\nstruct Waker {\n\n notifications: Sender<ProcessId>,\n\n thread_waker: ThreadWaker,\n\n}\n\n\n\nimpl Waker {\n\n /// Wake up the process with `pid`.\n\n fn wake(&self, pid: ProcessId) {\n\n trace!(\"waking: pid={}\", pid);\n\n if let Err(err) = self.notifications.try_send(pid) {\n\n error!(\"unable to send wake up notification: {}\", err);\n\n return;\n\n }\n\n\n\n self.wake_thread()\n\n }\n\n\n\n /// Wake up the thread, without waking a specific process.\n\n fn wake_thread(&self) {\n\n if let Err(err) = self.thread_waker.wake() {\n", "file_path": "src/rt/waker.rs", "rank": 52, "score": 85055.31819485649 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct ServerSupervisor;\n\n\n\nimpl<NA> Supervisor<NA> for ServerSupervisor\n\nwhere\n\n NA: NewActor<Argument = (), Error = io::Error>,\n\n NA::Actor: Actor<Error = tcp::server::Error<!>>,\n\n{\n\n fn decide(&mut self, err: tcp::server::Error<!>) -> SupervisorStrategy<()> {\n\n use tcp::server::Error::*;\n\n match err {\n\n Accept(err) => {\n\n error!(\"error accepting new connection: {}\", err);\n\n SupervisorStrategy::Restart(())\n\n }\n\n NewActor(_) => unreachable!(),\n\n }\n\n }\n\n\n\n fn decide_on_restart_error(&mut self, err: io::Error) -> SupervisorStrategy<()> {\n\n error!(\"error restarting the TCP server: {}\", err);\n\n SupervisorStrategy::Stop\n\n }\n\n\n\n fn second_restart_error(&mut self, err: io::Error) {\n\n error!(\"error restarting the actor a second time: {}\", err);\n\n }\n\n}\n\n\n", "file_path": "examples/redis.rs", "rank": 53, "score": 85055.19131596462 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct ServerSupervisor;\n\n\n\nimpl<NA> Supervisor<NA> for ServerSupervisor\n\nwhere\n\n NA: NewActor<Argument = (), Error = io::Error>,\n\n NA::Actor: Actor<Error = tcp::server::Error<!>>,\n\n{\n\n fn decide(&mut self, err: tcp::server::Error<!>) -> SupervisorStrategy<()> {\n\n use tcp::server::Error::*;\n\n match err {\n\n // When we hit an error accepting a connection we'll drop the old\n\n // listener and create a new one.\n\n Accept(err) => {\n\n error!(\"error accepting new connection: {}\", err);\n\n SupervisorStrategy::Restart(())\n\n }\n\n // Async function never return an error creating a new actor.\n\n NewActor(_) => unreachable!(),\n\n }\n\n }\n", "file_path": "examples/2_my_ip.rs", "rank": 54, "score": 85055.19131596462 }, { "content": "#[derive(Debug)]\n\nstruct Epoch {\n\n time: Instant,\n\n index: u8,\n\n}\n\n\n\n/// Metrics for [`Timers`].\n\n#[derive(Debug)]\n\n#[allow(dead_code)] // https://github.com/rust-lang/rust/issues/88900.\n\npub(crate) struct Metrics {\n\n timers: usize,\n\n next_timer: Option<Duration>,\n\n}\n\n\n\nimpl Timers {\n\n /// Create a new collection of timers.\n\n pub(crate) fn new() -> Timers {\n\n Timers {\n\n epoch: RwLock::new(Epoch {\n\n time: Instant::now(),\n\n index: 0,\n", "file_path": "src/rt/shared/timers.rs", "rank": 55, "score": 83457.19005726356 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct ServerSupervisor;\n\n\n\nimpl<NA> Supervisor<NA> for ServerSupervisor\n\nwhere\n\n NA: NewActor<Argument = (), Error = io::Error>,\n\n NA::Actor: Actor<Error = http::server::Error<!>>,\n\n{\n\n fn decide(&mut self, err: http::server::Error<!>) -> SupervisorStrategy<()> {\n\n use http::server::Error::*;\n\n match err {\n\n Accept(err) => {\n\n error!(\"error accepting new connection: {}\", err);\n\n SupervisorStrategy::Restart(())\n\n }\n\n NewActor(_) => unreachable!(),\n\n }\n\n }\n\n\n\n fn decide_on_restart_error(&mut self, err: io::Error) -> SupervisorStrategy<()> {\n\n error!(\"error restarting the TCP server: {}\", err);\n\n SupervisorStrategy::Stop\n\n }\n\n\n\n fn second_restart_error(&mut self, err: io::Error) {\n\n error!(\"error restarting the actor a second time: {}\", err);\n\n }\n\n}\n\n\n", "file_path": "http/examples/route.rs", "rank": 56, "score": 83457.0631783717 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct ServerSupervisor;\n\n\n\nimpl<NA> Supervisor<NA> for ServerSupervisor\n\nwhere\n\n NA: NewActor<Argument = (), Error = io::Error>,\n\n NA::Actor: Actor<Error = http::server::Error<!>>,\n\n{\n\n fn decide(&mut self, err: http::server::Error<!>) -> SupervisorStrategy<()> {\n\n use http::server::Error::*;\n\n match err {\n\n Accept(err) => {\n\n error!(\"error accepting new connection: {}\", err);\n\n SupervisorStrategy::Restart(())\n\n }\n\n NewActor(_) => unreachable!(),\n\n }\n\n }\n\n\n\n fn decide_on_restart_error(&mut self, err: io::Error) -> SupervisorStrategy<()> {\n\n error!(\"error restarting the TCP server: {}\", err);\n\n SupervisorStrategy::Stop\n\n }\n\n\n\n fn second_restart_error(&mut self, err: io::Error) {\n\n error!(\"error restarting the actor a second time: {}\", err);\n\n }\n\n}\n\n\n", "file_path": "http/examples/my_ip.rs", "rank": 57, "score": 83457.0631783717 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nstruct AlwaysPending;\n\n\n\nimpl Future for AlwaysPending {\n\n type Output = Result<(), DeadlinePassed>;\n\n\n\n fn poll(self: Pin<&mut Self>, _: &mut task::Context<'_>) -> Poll<Self::Output> {\n\n Poll::Pending\n\n }\n\n}\n\n\n", "file_path": "tests/functional/timer.rs", "rank": 58, "score": 83456.94190022396 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct Pong;\n\n\n\nasync fn ping(_: actor::Context<!, ThreadLocal>, relay_ref: ActorRef<RpcTestMessage>) {\n\n let rpc = relay_ref.rpc(Ping);\n\n let res = rpc.await;\n\n assert_eq!(res, Ok(Pong));\n\n}\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 59, "score": 83456.8832468048 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct Ping;\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 60, "score": 83456.8832468048 }, { "content": "#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq)]\n\nstruct Timer {\n\n pid: ProcessId,\n\n deadline: Instant,\n\n}\n\n\n", "file_path": "benches/timers_container/bench.rs", "rank": 61, "score": 83456.71472548973 }, { "content": "#[derive(Clone)] // Needed in setup function.\n\nstruct WaitFuture {\n\n #[allow(clippy::type_complexity)]\n\n inner: Arc<(Mutex<(Option<task::Waker>, bool)>, Condvar)>,\n\n}\n\n\n\nimpl Future for WaitFuture {\n\n type Output = Result<(), !>;\n\n\n\n fn poll(self: Pin<&mut Self>, ctx: &mut task::Context<'_>) -> Poll<Self::Output> {\n\n let mut guard = self.inner.0.lock().unwrap();\n\n match &mut *guard {\n\n (_, true) => Poll::Ready(Ok(())),\n\n (waker, false) => {\n\n *waker = Some(ctx.waker().clone());\n\n self.inner.1.notify_all();\n\n Poll::Pending\n\n }\n\n }\n\n }\n\n}\n", "file_path": "tests/functional/runtime.rs", "rank": 62, "score": 83451.56832391262 }, { "content": "struct ServerSupervisor;\n\n\n\nimpl<L, A> Supervisor<L> for ServerSupervisor\n\nwhere\n\n L: NewActor<Message = tcp::server::Message, Argument = (), Actor = A, Error = io::Error>,\n\n A: Actor<Error = tcp::server::Error<!>>,\n\n{\n\n fn decide(&mut self, _: tcp::server::Error<!>) -> SupervisorStrategy<()> {\n\n SupervisorStrategy::Stop\n\n }\n\n\n\n fn decide_on_restart_error(&mut self, _: io::Error) -> SupervisorStrategy<()> {\n\n SupervisorStrategy::Stop\n\n }\n\n\n\n fn second_restart_error(&mut self, _: io::Error) {}\n\n}\n\n\n\n#[allow(clippy::type_complexity)] // `servers` is too complex.\n\nasync fn conn_actor(\n", "file_path": "tests/regression/issue_145.rs", "rank": 63, "score": 83451.56832391262 }, { "content": "struct TestFuture {\n\n wakes: usize,\n\n}\n\n\n\nconst fn test_future() -> TestFuture {\n\n TestFuture { wakes: 0 }\n\n}\n\n\n\nimpl Future for TestFuture {\n\n type Output = ();\n\n\n\n fn poll(mut self: Pin<&mut Self>, ctx: &mut task::Context<'_>) -> Poll<Self::Output> {\n\n match self.wakes {\n\n 0 => {\n\n ctx.waker().wake_by_ref();\n\n self.wakes += 1;\n\n Poll::Pending\n\n }\n\n 1 => {\n\n ctx.waker().clone().wake();\n\n self.wakes += 1;\n\n Poll::Pending\n\n }\n\n _ => Poll::Ready(()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/functional/future.rs", "rank": 64, "score": 83451.56832391262 }, { "content": "#[derive(Debug)]\n\nstruct Node {\n\n process: Pin<Box<ProcessData>>,\n\n left: Branch,\n\n right: Branch,\n\n}\n\n\n\nimpl RunQueue {\n\n /// Returns an empty `RunQueue`.\n\n pub(super) fn empty() -> RunQueue {\n\n RunQueue {\n\n root: Mutex::new(None),\n\n }\n\n }\n\n\n\n /// Returns the number of processes in the queue.\n\n ///\n\n /// # Notes\n\n ///\n\n /// Don't call this often, it's terrible for performance.\n\n pub(super) fn len(&self) -> usize {\n", "file_path": "src/rt/shared/scheduler/runqueue.rs", "rank": 65, "score": 81966.03075129664 }, { "content": "#[derive(Clone, Debug)]\n\nstruct BlockFuture {\n\n data: Arc<Mutex<(bool, Option<task::Waker>)>>,\n\n}\n\n\n\nimpl BlockFuture {\n\n fn new() -> BlockFuture {\n\n BlockFuture {\n\n data: Arc::new(Mutex::new((false, None))),\n\n }\n\n }\n\n\n\n fn unblock(&self) {\n\n let mut data = self.data.lock().unwrap();\n\n data.0 = true;\n\n data.1.take().unwrap().wake();\n\n }\n\n\n\n fn wake(&self) {\n\n self.data.lock().unwrap().1.take().unwrap().wake_by_ref();\n\n }\n", "file_path": "tests/functional/sync_actor.rs", "rank": 66, "score": 81965.96658778621 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct ServerSupervisor;\n\n\n\nimpl<NA> Supervisor<NA> for ServerSupervisor\n\nwhere\n\n NA: NewActor<Argument = (), Error = io::Error>,\n\n NA::Actor: Actor<Error = http::server::Error<!>>,\n\n{\n\n fn decide(&mut self, err: http::server::Error<!>) -> SupervisorStrategy<()> {\n\n use http::server::Error::*;\n\n match err {\n\n Accept(err) => panic!(\"error accepting new connection: {}\", err),\n\n NewActor(_) => unreachable!(),\n\n }\n\n }\n\n\n\n fn decide_on_restart_error(&mut self, err: io::Error) -> SupervisorStrategy<()> {\n\n panic!(\"error restarting the TCP server: {}\", err);\n\n }\n\n\n\n fn second_restart_error(&mut self, err: io::Error) {\n\n panic!(\"error restarting the actor a second time: {}\", err);\n\n }\n\n}\n\n\n", "file_path": "http/tests/functional/server.rs", "rank": 67, "score": 81965.90387240477 }, { "content": "/// Tagged pointer to either a `Branch` or `ProcessData`.\n\nstruct Pointer {\n\n /// This is actually either a `Pin<Box<ProcessData>>` or `Pin<Box<Branch>>`.\n\n tagged_ptr: NonNull<()>,\n\n}\n\n\n\n/// Number of bits used for the tag in `Pointer`.\n\nconst POINTER_TAG_BITS: usize = 1;\n\n/// Tags used for the `Pointer`.\n\nconst PROCESS_TAG: usize = 0b1;\n\nconst BRANCH_TAG: usize = 0b0;\n\n\n\nimpl Pointer {\n\n /// Attempts to take a process pointer from `this`, or returns a mutable\n\n /// reference to the branch.\n\n ///\n\n /// Returns:\n\n /// * `None` if `this` is `None`, `this` is unchanged.\n\n /// * `Some(Ok(..))` if the pointer is `Some` and points to a process,\n\n /// `this` will be `None`.\n\n /// * `Some(Err(..))` if the pointer is `Some` and points to a branch,\n", "file_path": "src/rt/local/scheduler/inactive.rs", "rank": 68, "score": 81960.4090179457 }, { "content": "/// Timer generator.\n\nstruct GenTimers {\n\n /// Pseudo-random number generator.\n\n prng: Xoshiro128PlusPlus,\n\n epoch: Instant,\n\n}\n\n\n\nimpl Iterator for GenTimers {\n\n type Item = Timer;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let pid = self.prng.next_u64();\n\n let mut add = [0; 2];\n\n self.prng.fill_bytes(&mut add);\n\n // The containers use `Instant` as key so they have to be unique.\n\n let add = max(u16::from_ne_bytes(add), 1);\n\n let deadline = self.epoch + Duration::from_nanos(add.into());\n\n self.epoch = deadline;\n\n Some(Timer { pid, deadline })\n\n }\n\n}\n", "file_path": "benches/timers_container/bench.rs", "rank": 69, "score": 81960.4090179457 }, { "content": "struct Branch {\n\n branches: [Option<Pointer>; N_BRANCHES],\n\n}\n\n\n\nimpl fmt::Debug for Branch {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_map()\n\n .entry(&\"0000\", &self.branches[0])\n\n .entry(&\"0001\", &self.branches[1])\n\n .entry(&\"0010\", &self.branches[2])\n\n .entry(&\"0011\", &self.branches[3])\n\n .entry(&\"0100\", &self.branches[4])\n\n .entry(&\"0101\", &self.branches[5])\n\n .entry(&\"0110\", &self.branches[6])\n\n .entry(&\"0111\", &self.branches[7])\n\n .entry(&\"1000\", &self.branches[8])\n\n .entry(&\"1001\", &self.branches[9])\n\n .entry(&\"1010\", &self.branches[10])\n\n .entry(&\"1011\", &self.branches[11])\n\n .entry(&\"1100\", &self.branches[12])\n", "file_path": "src/rt/local/scheduler/inactive.rs", "rank": 70, "score": 81960.4090179457 }, { "content": "#[derive(Clone)]\n\nstruct HeaderPart {\n\n name: HeaderName<'static>,\n\n /// Indices into `Headers.values`.\n\n start: usize,\n\n end: usize,\n\n}\n\n\n\nimpl Headers {\n\n /// Empty list of headers.\n\n pub const EMPTY: Headers = Headers {\n\n values: Vec::new(),\n\n parts: Vec::new(),\n\n };\n\n\n\n /// Creates new `Headers` from `headers`.\n\n ///\n\n /// Calls `F` for each header.\n\n pub(crate) fn from_httparse_headers<F, E>(\n\n raw_headers: &[httparse::Header<'_>],\n\n mut f: F,\n", "file_path": "http/src/head/header.rs", "rank": 71, "score": 81960.4090179457 }, { "content": "struct Branch {\n\n /// Each pointer is a [`TaggedPointer`], see that type for valid values.\n\n ///\n\n /// Once the value of a pointer is set to point to a `Branch` they **must\n\n /// not** be changed to ensure the structure of the tree remains consistent.\n\n branches: [AtomicPtr<()>; N_BRANCHES],\n\n}\n\n\n", "file_path": "src/rt/shared/scheduler/inactive.rs", "rank": 72, "score": 81960.4090179457 }, { "content": "struct TestServer {\n\n address: SocketAddr,\n\n listener: Mutex<TcpListener>,\n\n}\n\n\n\nimpl TestServer {\n\n fn spawn() -> Arc<TestServer> {\n\n static TEST_SERVER: SyncLazy<Mutex<Weak<TestServer>>> =\n\n SyncLazy::new(|| Mutex::new(Weak::new()));\n\n\n\n let mut test_server = TEST_SERVER.lock().unwrap();\n\n if let Some(test_server) = test_server.upgrade() {\n\n // Use an existing running server.\n\n test_server\n\n } else {\n\n // Start a new server.\n\n let new_server = Arc::new(TestServer::new());\n\n *test_server = Arc::downgrade(&new_server);\n\n new_server\n\n }\n", "file_path": "http/tests/functional/client.rs", "rank": 73, "score": 81960.4090179457 }, { "content": "struct EmptyStream;\n\n\n\nimpl Stream for EmptyStream {\n\n type Item = io::Result<&'static [u8]>;\n\n\n\n fn poll_next(self: Pin<&mut Self>, _: &mut task::Context<'_>) -> Poll<Option<Self::Item>> {\n\n Poll::Ready(None)\n\n }\n\n}\n\n\n", "file_path": "http/tests/functional/body.rs", "rank": 74, "score": 81960.4090179457 }, { "content": "struct TestServer {\n\n address: SocketAddr,\n\n server_ref: ActorRef<Terminate>,\n\n handle: Option<thread::JoinHandle<()>>,\n\n}\n\n\n\nimpl TestServer {\n\n fn spawn() -> Arc<TestServer> {\n\n static TEST_SERVER: SyncLazy<Mutex<Weak<TestServer>>> =\n\n SyncLazy::new(|| Mutex::new(Weak::new()));\n\n\n\n let mut test_server = TEST_SERVER.lock().unwrap();\n\n if let Some(test_server) = test_server.upgrade() {\n\n // Use an existing running server.\n\n test_server\n\n } else {\n\n // Start a new server.\n\n let new_server = Arc::new(TestServer::new());\n\n *test_server = Arc::downgrade(&new_server);\n\n new_server\n", "file_path": "http/tests/functional/server.rs", "rank": 75, "score": 81960.4090179457 }, { "content": "/// Trace events.\n\n///\n\n/// See the [`trace`] module for usage.\n\n///\n\n/// [`trace`]: crate::trace\n\n///\n\n/// # Examples\n\n///\n\n/// The following example adds tracing for receiving and handling of a message.\n\n///\n\n/// ```\n\n/// use heph::actor;\n\n/// use heph::rt::ThreadLocal;\n\n/// use heph::trace::Trace;\n\n///\n\n/// async fn actor(mut ctx: actor::Context<String, ThreadLocal>) {\n\n/// // Start a trace of receiving and handling a message.\n\n/// let mut trace_timing = ctx.start_trace();\n\n/// while let Ok(msg) = ctx.receive_next().await {\n\n/// // Finish the trace for receiving the message.\n\n/// ctx.finish_trace(trace_timing.clone(), \"receiving message\", &[(\"message\", &msg)]);\n\n///\n\n/// // Handle the message by printing it.\n\n/// let print_timing = ctx.start_trace();\n\n/// println!(\"got a message: {}\", msg);\n\n///\n\n/// // Finish the trace for the printing and handling of the message.\n\n/// ctx.finish_trace(print_timing, \"Printing message\", &[]);\n\n/// ctx.finish_trace(trace_timing, \"Handling message\", &[]);\n\n///\n\n/// // Start tracing the next message.\n\n/// trace_timing = ctx.start_trace();\n\n/// }\n\n/// }\n\n///\n\n/// # drop(actor);\n\n/// ```\n\npub trait Trace {\n\n /// Start timing an event if tracing is enabled.\n\n ///\n\n /// To finish the trace call [`finish_trace`]. See the [`trace`] module for\n\n /// more information.\n\n ///\n\n /// [`finish_trace`]: Trace::finish_trace\n\n /// [`trace`]: crate::trace\n\n ///\n\n /// # Notes\n\n ///\n\n /// If [`finish_trace`] is not called no trace event will be written. Be\n\n /// careful with this when using the [`Try`] (`?`) operator.\n\n ///\n\n /// [`Try`]: std::ops::Try\n\n #[must_use = \"tracing events must be finished, otherwise they aren't recorded\"]\n\n fn start_trace(&self) -> Option<EventTiming>;\n\n\n\n /// Finish tracing an event, partner function to [`start_trace`].\n\n ///\n", "file_path": "src/trace.rs", "rank": 76, "score": 81111.26497534593 }, { "content": "/// Trait to make easier to work with uninitialised buffers.\n\n///\n\n/// This is implemented for common types such as `Vec<u8>`, [see below].\n\n///\n\n/// [see below]: #foreign-impls\n\npub trait Bytes {\n\n /// Returns itself as a slice of bytes that may or may not be initialised.\n\n ///\n\n /// # Notes\n\n ///\n\n /// The implementation must guarantee that two calls (without a call to\n\n /// [`update_length`] in between) returns the same slice of bytes.\n\n ///\n\n /// [`update_length`]: Bytes::update_length\n\n fn as_bytes(&mut self) -> &mut [MaybeUninit<u8>];\n\n\n\n /// Returns the length of the buffer as returned by [`as_bytes`].\n\n ///\n\n /// [`as_bytes`]: Bytes::as_bytes\n\n fn spare_capacity(&self) -> usize;\n\n\n\n /// Returns `true` if the buffer has spare capacity.\n\n fn has_spare_capacity(&self) -> bool {\n\n self.spare_capacity() == 0\n\n }\n", "file_path": "src/bytes.rs", "rank": 77, "score": 81109.3132708901 }, { "content": "#[derive(Debug)]\n\nstruct NopTestProcess;\n\n\n\nimpl Process for NopTestProcess {\n\n fn name(&self) -> &'static str {\n\n \"NopTestProcess\"\n\n }\n\n\n\n fn run(self: Pin<&mut Self>, _: &mut RuntimeRef, _: ProcessId) -> ProcessResult {\n\n unimplemented!();\n\n }\n\n}\n\n\n", "file_path": "src/rt/process/tests.rs", "rank": 78, "score": 80571.44831403816 }, { "content": "#[test]\n\nfn mapped_fn() {\n\n let expect_msgs = expect_msgs as fn(_, _) -> _;\n\n let expected = MSGS.iter().map(|s| (*s).to_owned()).collect();\n\n let (actor, actor_ref): (_, ActorRef<String>) =\n\n init_local_actor(expect_msgs, expected).unwrap();\n\n let mut actor = Box::pin(actor);\n\n\n\n let actor_ref: ActorRef<&str> = actor_ref.map_fn(|msg: &str| msg.to_owned());\n\n for msg in MSGS {\n\n actor_ref.try_send(*msg).unwrap();\n\n }\n\n\n\n assert_eq!(poll_actor(Pin::as_mut(&mut actor)), Poll::Ready(Ok(())));\n\n}\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 79, "score": 80436.508071304 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Uuid(u128);\n\n\n\nimpl fmt::Display for Uuid {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n // Always force a length of 32.\n\n write!(f, \"{:032x}\", self.0)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Uuid {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Display::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl Coordinator {\n\n /// Initialise the `Coordinator`.\n\n ///\n\n /// # Notes\n\n ///\n", "file_path": "src/rt/coordinator.rs", "rank": 80, "score": 79595.99967706419 }, { "content": "/// Trait to make easier to work with uninitialised buffers using vectored I/O.\n\n///\n\n/// This trait is implemented for arrays and tuples. When all of buffers are\n\n/// *homogeneous*, i.e. of the same type, the array implementation is the\n\n/// easiest to use along side with the [`Bytes`] trait. If however the buffers\n\n/// are *heterogeneous*, i.e. of different types, the tuple implementation can\n\n/// be used. See the examples below.\n\n///\n\n/// # Examples\n\n///\n\n/// Using the homogeneous array implementation.\n\n///\n\n/// ```\n\n/// # #![feature(maybe_uninit_write_slice)]\n\n/// use heph::bytes::BytesVectored;\n\n///\n\n/// let mut buf1 = Vec::with_capacity(12);\n\n/// let mut buf2 = Vec::with_capacity(1);\n\n/// let mut buf3 = Vec::with_capacity(5);\n\n/// let mut buf4 = Vec::with_capacity(10); // Has extra capacity.\n\n///\n\n/// let bufs = [&mut buf1, &mut buf2, &mut buf3, &mut buf4];\n\n/// let text = b\"Hello world. From mars!\";\n\n/// let bytes_written = write_vectored(bufs, text);\n\n/// assert_eq!(bytes_written, text.len());\n\n///\n\n/// assert_eq!(buf1, b\"Hello world.\");\n\n/// assert_eq!(buf2, b\" \");\n\n/// assert_eq!(buf3, b\"From \");\n\n/// assert_eq!(buf4, b\"mars!\");\n\n///\n\n/// /// Writes `text` to the `bufs`.\n\n/// fn write_vectored<B>(mut bufs: B, text: &[u8]) -> usize\n\n/// where B: BytesVectored,\n\n/// {\n\n/// // Implementation is not relevant to the example.\n\n/// # let mut written = 0;\n\n/// # let mut left = text;\n\n/// # for buf in bufs.as_bufs().as_mut().iter_mut() {\n\n/// # let n = std::cmp::min(buf.len(), left.len());\n\n/// # let _ = std::mem::MaybeUninit::write_slice(&mut buf[..n], &left[..n]);\n\n/// # left = &left[n..];\n\n/// # written += n;\n\n/// # if left.is_empty() {\n\n/// # break;\n\n/// # }\n\n/// # }\n\n/// # // NOTE: we could update the length of the buffers in the loop above,\n\n/// # // but this also acts as a smoke test for the implementation and this is\n\n/// # // what would happen with actual vectored I/O.\n\n/// # unsafe { bufs.update_lengths(written); }\n\n/// # written\n\n/// }\n\n/// ```\n\n///\n\n/// Using the heterogeneous tuple implementation.\n\n///\n\n/// ```\n\n/// # #![feature(maybe_uninit_uninit_array, maybe_uninit_slice, maybe_uninit_write_slice)]\n\n/// use std::mem::MaybeUninit;\n\n///\n\n/// use heph::bytes::{Bytes, BytesVectored};\n\n///\n\n/// // Buffers of different types.\n\n/// let mut buf1 = Vec::with_capacity(12);\n\n/// let mut buf2 = StackBuf::new(); // Has extra capacity.\n\n///\n\n/// // Using tuples we can use different kind of buffers. Here we use a `Vec` and\n\n/// // our own `StackBuf` type.\n\n/// let bufs = (&mut buf1, &mut buf2);\n\n/// let text = b\"Hello world. From mars!\";\n\n/// let bytes_written = write_vectored(bufs, text);\n\n/// assert_eq!(bytes_written, text.len());\n\n///\n\n/// assert_eq!(buf1, b\"Hello world.\");\n\n/// assert_eq!(buf2.bytes(), b\" From mars!\");\n\n///\n\n/// /// Writes `text` to the `bufs`.\n\n/// fn write_vectored<B>(mut bufs: B, text: &[u8]) -> usize\n\n/// where B: BytesVectored,\n\n/// {\n\n/// // Implementation is not relevant to the example.\n\n/// # let mut written = 0;\n\n/// # let mut left = text;\n\n/// # for buf in bufs.as_bufs().as_mut().iter_mut() {\n\n/// # let n = std::cmp::min(buf.len(), left.len());\n\n/// # let _ = MaybeUninit::write_slice(&mut buf[..n], &left[..n]);\n\n/// # left = &left[n..];\n\n/// # written += n;\n\n/// # if left.is_empty() {\n\n/// # break;\n\n/// # }\n\n/// # }\n\n/// # // NOTE: we could update the length of the buffers in the loop above,\n\n/// # // but this also acts as a smoke test for the implementation and this is\n\n/// # // what would happen with actual vectored I/O.\n\n/// # unsafe { bufs.update_lengths(written); }\n\n/// # written\n\n/// }\n\n///\n\n/// /// Custom stack buffer type that implements the `Bytes` trait.\n\n/// struct StackBuf {\n\n/// bytes: [MaybeUninit<u8>; 4096],\n\n/// initialised: usize,\n\n/// }\n\n///\n\n/// impl StackBuf {\n\n/// fn new() -> StackBuf {\n\n/// StackBuf {\n\n/// bytes: MaybeUninit::uninit_array(),\n\n/// initialised: 0,\n\n/// }\n\n/// }\n\n///\n\n/// fn bytes(&self) -> &[u8] {\n\n/// unsafe { MaybeUninit::slice_assume_init_ref(&self.bytes[..self.initialised]) }\n\n/// }\n\n/// }\n\n///\n\n/// impl Bytes for StackBuf {\n\n/// fn as_bytes(&mut self) -> &mut [MaybeUninit<u8>] {\n\n/// &mut self.bytes[self.initialised..]\n\n/// }\n\n///\n\n/// fn spare_capacity(&self) -> usize {\n\n/// self.bytes.len() - self.initialised\n\n/// }\n\n///\n\n/// fn has_spare_capacity(&self) -> bool {\n\n/// self.bytes.len() != self.initialised\n\n/// }\n\n///\n\n/// unsafe fn update_length(&mut self, n: usize) {\n\n/// self.initialised += n;\n\n/// }\n\n/// }\n\n/// ```\n\npub trait BytesVectored {\n\n /// Type used as slice of buffers, usually this is an array.\n\n type Bufs<'b>: AsMut<[MaybeUninitSlice<'b>]>;\n\n\n\n /// Returns itself as a slice of [`MaybeUninitSlice`].\n\n fn as_bufs<'b>(&'b mut self) -> Self::Bufs<'b>;\n\n\n\n /// Returns the total length of the buffers as returned by [`as_bufs`].\n\n ///\n\n /// [`as_bufs`]: BytesVectored::as_bufs\n\n fn spare_capacity(&self) -> usize;\n\n\n\n /// Returns `true` if (one of) the buffers has spare capacity.\n\n fn has_spare_capacity(&self) -> bool {\n\n self.spare_capacity() == 0\n\n }\n\n\n\n /// Update the length of the buffers in the slice.\n\n ///\n\n /// # Safety\n", "file_path": "src/bytes.rs", "rank": 81, "score": 79524.0629777792 }, { "content": "#[must_use = \"actor do nothing unless you poll them\"]\n\npub trait Actor {\n\n /// An error the actor can return to its [supervisor]. This error will be\n\n /// considered terminal for this actor and should **not** be an error of\n\n /// regular processing of a message.\n\n ///\n\n /// How to process non-terminal errors that happen during regular processing\n\n /// is up to the actor.\n\n ///\n\n /// [supervisor]: crate::supervisor\n\n type Error;\n\n\n\n /// Try to poll this actor.\n\n ///\n\n /// This is basically the same as calling [`Future::poll`].\n\n ///\n\n /// # Panics\n\n ///\n\n /// Just like with [`Future`]s polling after it returned [`Poll::Ready`] may\n\n /// cause undefined behaviour, including but not limited to panicking.\n\n fn try_poll(self: Pin<&mut Self>, ctx: &mut task::Context<'_>)\n", "file_path": "src/actor/mod.rs", "rank": 82, "score": 79512.38202594829 }, { "content": " /// Trait that defines how to write an attribute value.\n\n pub trait AttributeValue {\n\n /// The type byte for this attribute value.\n\n // NOTE: this should be a assiociated constant, however that is not\n\n // object safe.\n\n fn type_byte(&self) -> u8;\n\n\n\n /// Write the contents of the attribute, without type byte.\n\n fn write_attribute(&self, buf: &mut Vec<u8>);\n\n }\n\n\n\n impl<'a, T> AttributeValue for &'a T\n\n where\n\n T: AttributeValue + ?Sized,\n\n {\n\n fn type_byte(&self) -> u8 {\n\n (&**self).type_byte()\n\n }\n\n\n\n fn write_attribute(&self, buf: &mut Vec<u8>) {\n\n (&**self).write_attribute(buf)\n", "file_path": "src/trace.rs", "rank": 83, "score": 79507.1232364008 }, { "content": "#[test]\n\nfn try_mapped_fn() {\n\n let expect_msgs = expect_msgs as fn(_, _) -> _;\n\n let expected = vec![\n\n NonZeroUsize::new(1).unwrap(),\n\n NonZeroUsize::new(2).unwrap(),\n\n NonZeroUsize::new(3).unwrap(),\n\n ];\n\n let (actor, actor_ref): (_, ActorRef<NonZeroUsize>) =\n\n init_local_actor(expect_msgs, expected).unwrap();\n\n let mut actor = Box::pin(actor);\n\n\n\n let actor_ref: ActorRef<usize> =\n\n actor_ref.try_map_fn(|msg| NonZeroUsize::new(msg).ok_or(SendError));\n\n assert!(actor_ref.try_send(0usize).is_err());\n\n for msg in 1..4usize {\n\n actor_ref.try_send(msg).unwrap();\n\n }\n\n\n\n assert_eq!(poll_actor(Pin::as_mut(&mut actor)), Poll::Ready(Ok(())));\n\n}\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 84, "score": 79328.42195863875 }, { "content": "#[test]\n\nfn mapped_fn_cloned() {\n\n let expected: Vec<usize> = (0..INBOX_SIZE - 1).collect();\n\n let expect_msgs = expect_msgs as fn(_, _) -> _;\n\n let (actor, actor_ref) = init_local_actor(expect_msgs, expected.clone()).unwrap();\n\n let actor_ref = actor_ref.map_fn(|msg: u8| msg as usize);\n\n let mut actor = Box::pin(actor);\n\n\n\n let m: Vec<(ActorRef<u8>, u8)> = expected\n\n .into_iter()\n\n .map(|msg| (actor_ref.clone(), msg as u8))\n\n .collect();\n\n for (actor_ref, msg) in m {\n\n actor_ref.try_send(msg).unwrap();\n\n }\n\n\n\n assert_eq!(poll_actor(Pin::as_mut(&mut actor)), Poll::Ready(Ok(())));\n\n}\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 85, "score": 79328.42195863875 }, { "content": "#[test]\n\nfn mapped_fn_send() {\n\n let expect_msgs = expect_msgs as fn(_, _) -> _;\n\n let expected = MSGS.iter().map(|s| (*s).to_owned()).collect();\n\n let (actor, actor_ref) = init_local_actor(expect_msgs, expected).unwrap();\n\n let mut actor = Box::pin(actor);\n\n let actor_ref: ActorRef<&str> = actor_ref.map_fn(|msg: &str| msg.to_owned());\n\n\n\n let relay_msgs = relay_msgs as fn(_, _, _) -> _;\n\n let (relay_actor, _) = init_local_actor(relay_msgs, (actor_ref, MSGS.to_vec())).unwrap();\n\n let mut relay_actor = Box::pin(relay_actor);\n\n\n\n assert_eq!(poll_actor(Pin::as_mut(&mut actor)), Poll::Pending);\n\n assert_eq!(\n\n poll_actor(Pin::as_mut(&mut relay_actor)),\n\n Poll::Ready(Ok(()))\n\n );\n\n assert_eq!(poll_actor(Pin::as_mut(&mut actor)), Poll::Ready(Ok(())));\n\n}\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 86, "score": 79328.42195863875 }, { "content": "#[test]\n\nfn mapped_fn_is_connected() {\n\n let expect_msgs = expect_msgs as fn(_, Vec<usize>) -> _;\n\n let (actor, actor_ref) = init_local_actor(expect_msgs, Vec::new()).unwrap();\n\n let actor_ref: ActorRef<u8> = actor_ref.map_fn(|msg| msg as usize);\n\n assert!(actor_ref.is_connected());\n\n\n\n drop(actor);\n\n assert!(!actor_ref.is_connected());\n\n}\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 87, "score": 79328.42195863875 }, { "content": "#[derive(Debug)]\n\nstruct NopTestProcess;\n\n\n\nimpl Process for NopTestProcess {\n\n fn name(&self) -> &'static str {\n\n \"NopTestProcess\"\n\n }\n\n\n\n fn run(self: Pin<&mut Self>, _: &mut RuntimeRef, _: ProcessId) -> ProcessResult {\n\n unimplemented!();\n\n }\n\n}\n\n\n", "file_path": "src/rt/local/scheduler/tests.rs", "rank": 88, "score": 79264.35465695623 }, { "content": "#[test]\n\nfn try_mapped_fn_send() {\n\n let expect_msgs = expect_msgs as fn(_, _) -> _;\n\n let expected = MSGS.iter().map(|s| (*s).to_owned()).collect();\n\n let (actor, actor_ref) = init_local_actor(expect_msgs, expected).unwrap();\n\n let mut actor = Box::pin(actor);\n\n let actor_ref: ActorRef<&str> = actor_ref.try_map_fn::<_, _, !>(|msg: &str| Ok(msg.to_owned()));\n\n\n\n let relay_msgs = relay_msgs as fn(_, _, _) -> _;\n\n let (relay_actor, _) = init_local_actor(relay_msgs, (actor_ref, MSGS.to_vec())).unwrap();\n\n let mut relay_actor = Box::pin(relay_actor);\n\n\n\n assert_eq!(poll_actor(Pin::as_mut(&mut actor)), Poll::Pending);\n\n assert_eq!(\n\n poll_actor(Pin::as_mut(&mut relay_actor)),\n\n Poll::Ready(Ok(()))\n\n );\n\n assert_eq!(poll_actor(Pin::as_mut(&mut actor)), Poll::Ready(Ok(())));\n\n}\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 89, "score": 78265.8319755141 }, { "content": "#[test]\n\nfn try_mapped_fn_cloned() {\n\n let expected: Vec<usize> = (0..INBOX_SIZE - 1).collect();\n\n let expect_msgs = expect_msgs as fn(_, _) -> _;\n\n let (actor, actor_ref) = init_local_actor(expect_msgs, expected.clone()).unwrap();\n\n let actor_ref = actor_ref.try_map_fn::<_, _, !>(|msg| Ok(msg as usize));\n\n let mut actor = Box::pin(actor);\n\n\n\n let m: Vec<(ActorRef<u8>, u8)> = expected\n\n .into_iter()\n\n .map(|msg| (actor_ref.clone(), msg as u8))\n\n .collect();\n\n for (actor_ref, msg) in m {\n\n actor_ref.try_send(msg).unwrap();\n\n }\n\n\n\n assert_eq!(poll_actor(Pin::as_mut(&mut actor)), Poll::Ready(Ok(())));\n\n}\n\n\n", "file_path": "tests/functional/actor_ref.rs", "rank": 90, "score": 78265.8319755141 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct Timer<T> {\n\n pid: ProcessId,\n\n deadline: T,\n\n}\n\n\n\nimpl<T> Ord for Timer<T>\n\nwhere\n\n T: Ord,\n\n{\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n other.deadline.cmp(&self.deadline)\n\n }\n\n}\n\n\n\nimpl<T> PartialOrd for Timer<T>\n\nwhere\n\n T: Ord,\n\n{\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n", "file_path": "src/rt/shared/timers.rs", "rank": 91, "score": 78110.15529398945 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\nstruct Timer<T> {\n\n pid: ProcessId,\n\n deadline: T,\n\n}\n\n\n\nimpl<T> Ord for Timer<T>\n\nwhere\n\n T: Ord,\n\n{\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n other.deadline.cmp(&self.deadline)\n\n }\n\n}\n\n\n\nimpl<T> PartialOrd for Timer<T>\n\nwhere\n\n T: Ord,\n\n{\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.cmp(other))\n", "file_path": "src/rt/local/timers.rs", "rank": 92, "score": 78110.15529398945 }, { "content": "#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\n#[repr(transparent)]\n\nstruct WakerData(usize);\n\n\n\nconst THREAD_BITS: usize = 8;\n\nconst THREAD_SHIFT: usize = (size_of::<*const ()>() * 8) - THREAD_BITS;\n\nconst THREAD_MASK: usize = ((1 << THREAD_BITS) - 1) << THREAD_SHIFT;\n\n\n\nimpl WakerData {\n\n /// Create new `WakerData`.\n\n fn new(thread_id: WakerId, pid: ProcessId) -> WakerData {\n\n debug_assert!(pid.0 < (1 << THREAD_SHIFT), \"pid too large\");\n\n WakerData((thread_id.0 as usize) << THREAD_SHIFT | pid.0)\n\n }\n\n\n\n /// Get the thread id of from the waker data.\n\n const fn waker_id(self) -> WakerId {\n\n // Safety: `WakerId` is u8, so no truncating.\n\n #[allow(clippy::cast_possible_truncation)]\n\n WakerId((self.0 >> THREAD_SHIFT) as u8)\n\n }\n\n\n", "file_path": "src/rt/waker.rs", "rank": 93, "score": 78110.04174635359 }, { "content": "#[derive(Debug)]\n\n#[allow(dead_code)] // https://github.com/rust-lang/rust/issues/88900.\n\nstruct Metrics<'c, 'l> {\n\n heph_version: &'static str,\n\n os: &'c str,\n\n architecture: &'static str,\n\n host_name: &'c str,\n\n host_id: Uuid,\n\n app_name: &'c str,\n\n process_id: u32,\n\n parent_process_id: u32,\n\n uptime: Duration,\n\n worker_threads: usize,\n\n sync_actors: usize,\n\n shared: shared::Metrics,\n\n process_signals: SignalSet,\n\n process_signal_receivers: usize,\n\n total_cpu_time: Duration,\n\n cpu_time: Duration,\n\n trace_log: Option<trace::CoordinatorMetrics<'l>>,\n\n}\n\n\n", "file_path": "src/rt/coordinator.rs", "rank": 94, "score": 78039.04195274565 }, { "content": "struct TestAssertUnmovedNewActor;\n\n\n\nimpl NewActor for TestAssertUnmovedNewActor {\n\n type Message = ();\n\n type Argument = ();\n\n type Actor = AssertUnmoved<Pending<Result<(), !>>>;\n\n type Error = !;\n\n type RuntimeAccess = ThreadLocal;\n\n\n\n fn new(\n\n &mut self,\n\n _: actor::Context<Self::Message, ThreadLocal>,\n\n _: Self::Argument,\n\n ) -> Result<Self::Actor, Self::Error> {\n\n Ok(AssertUnmoved::new(pending()))\n\n }\n\n}\n\n\n", "file_path": "src/rt/process/tests.rs", "rank": 95, "score": 78031.14555637373 }, { "content": "/// The trait that defines how to create a new [`Actor`].\n\n///\n\n/// The easiest way to implement this by using an asynchronous function, see the\n\n/// [actor module] documentation.\n\n///\n\n/// [actor module]: crate::actor\n\npub trait NewActor {\n\n /// The type of messages the actor can receive.\n\n ///\n\n /// Using an enum allows an actor to handle multiple types of messages.\n\n ///\n\n /// # Examples\n\n ///\n\n /// Here is an example of using an enum as message type.\n\n ///\n\n /// ```\n\n /// #![feature(never_type)]\n\n ///\n\n /// use heph::rt::{self, Runtime, ThreadLocal};\n\n /// use heph::spawn::ActorOptions;\n\n /// use heph::supervisor::NoSupervisor;\n\n /// use heph::{actor, from_message};\n\n ///\n\n /// fn main() -> Result<(), rt::Error> {\n\n /// // Create and run the runtime.\n\n /// let mut runtime = Runtime::new()?;\n", "file_path": "src/actor/mod.rs", "rank": 96, "score": 78021.3107104457 }, { "content": "/// Synchronous actor.\n\n///\n\n/// Synchronous actor run on its own thread and therefore can perform\n\n/// synchronous operations such as blocking I/O. Much like regular [actors] the\n\n/// actor will be supplied with a [context], which can be used for receiving\n\n/// messages. As with regular actors communication is done via message sending,\n\n/// using [actor references].\n\n///\n\n/// The easiest way to implement this trait by using regular functions, see the\n\n/// [module level] documentation for an example of this.\n\n///\n\n/// [module level]: crate::actor\n\n///\n\n/// Synchronous actor can only be spawned before starting the runtime, see\n\n/// [`Runtime::spawn_sync_actor`].\n\n///\n\n/// # Panics\n\n///\n\n/// Panics are not caught and will **not** be returned to the actor's\n\n/// supervisor. If a synchronous actor panics it will bring down the entire\n\n/// runtime.\n\n///\n\n/// [actors]: crate::Actor\n\n/// [context]: SyncContext\n\n/// [actor references]: crate::ActorRef\n\n/// [`Runtime::spawn_sync_actor`]: crate::Runtime::spawn_sync_actor\n\npub trait SyncActor {\n\n /// The type of messages the synchronous actor can receive.\n\n ///\n\n /// Using an enum allows an actor to handle multiple types of messages. See\n\n /// [`NewActor::Message`] for examples.\n\n ///\n\n /// [`NewActor::Message`]: crate::NewActor::Message\n\n type Message;\n\n\n\n /// The argument(s) passed to the actor.\n\n ///\n\n /// This works just like the [arguments in `NewActor`].\n\n ///\n\n /// [arguments in `NewActor`]: crate::NewActor::Argument\n\n type Argument;\n\n\n\n /// An error the actor can return to its [supervisor]. This error will be\n\n /// considered terminal for this actor and should **not** be an error of\n\n /// regular processing of a message.\n\n ///\n", "file_path": "src/actor/sync.rs", "rank": 97, "score": 78021.31150090051 }, { "content": " /// Trait to support [`Actor`] for `Result<(), E>` and `()`.\n\n ///\n\n /// [`Actor`]: crate::actor::Actor\n\n pub trait ActorResult {\n\n /// See [`Actor::Error`].\n\n ///\n\n /// [`Actor::Error`]: crate::actor::Actor::Error\n\n type Error;\n\n\n\n /// Convert the return type in an `Result<(), Self::Error>`.\n\n fn into(self) -> Result<(), Self::Error>;\n\n }\n\n\n\n impl<E> ActorResult for Result<(), E> {\n\n type Error = E;\n\n\n\n fn into(self) -> Result<(), E> {\n\n self\n\n }\n\n }\n\n\n\n impl ActorResult for () {\n\n type Error = !;\n", "file_path": "src/actor/mod.rs", "rank": 98, "score": 78016.71170738546 }, { "content": "/// Actual trait behind [`rt::Access`].\n\n///\n\n/// [`rt::Access`]: crate::rt::Access\n\npub trait PrivateAccess {\n\n /// Returns the process id.\n\n fn pid(&self) -> ProcessId;\n\n\n\n /// Changes the process id to `new_pid`, returning the old process id.\n\n fn change_pid(&mut self, new_pid: ProcessId) -> ProcessId;\n\n\n\n /// Registers the `source`.\n\n fn register<S>(&mut self, source: &mut S, interest: Interest) -> io::Result<()>\n\n where\n\n S: event::Source + ?Sized;\n\n\n\n /// Reregisters the `source`.\n\n fn reregister<S>(&mut self, source: &mut S, interest: Interest) -> io::Result<()>\n\n where\n\n S: event::Source + ?Sized;\n\n\n\n /// Add a deadline.\n\n fn add_deadline(&mut self, deadline: Instant);\n\n\n", "file_path": "src/rt/access.rs", "rank": 99, "score": 78016.71170738546 } ]
Rust
perf/sawtooth_perf/src/batch_map.rs
sTyL3/sawtooth-core
bcd260ee836e58e85f39d57c39d28dcb9a6d7a0e
/* * Copyright 2018 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ------------------------------------------------------------------------------ */ use std::collections::HashMap; use sawtooth_sdk::messages::batch::BatchList; pub struct BatchMap { batches_by_id: HashMap<String, BatchList>, } impl BatchMap { pub fn new() -> BatchMap { BatchMap { batches_by_id: HashMap::new(), } } pub fn mark_submit_success(&mut self, batch_id: &str) { self.batches_by_id.remove(batch_id); } pub fn get_batchlist_to_submit(&mut self, batch_id: &str) -> Option<BatchList> { self.batches_by_id.get(batch_id).cloned() } pub fn add(&mut self, batchlist: BatchList) { batchlist .batches .last() .map(|b| b.header_signature.clone()) .map(|batch_id| { if !self.batches_by_id.contains_key(batch_id.as_str()) { self.batches_by_id.insert(batch_id, batchlist); } }); } } #[cfg(test)] mod tests { use super::BatchMap; use rand::Rng; use rand::StdRng; use protobuf::RepeatedField; use sawtooth_sdk::signing; use sawtooth_sdk::messages::batch::Batch; use sawtooth_sdk::messages::batch::BatchList; #[test] fn test_2_cycles_of_retries() { let mut timed_batch_iterator = BatchMap::new(); let mut batchlists = generate_batchlists(3); let batchlist1 = batchlists.pop(); let batchlist2 = batchlists.pop(); let batchlist3 = batchlists.pop(); let batch_id1 = batchlist1 .clone() .unwrap() .batches .last() .unwrap() .header_signature .clone(); let batch_id2 = batchlist2 .clone() .unwrap() .batches .last() .unwrap() .header_signature .clone(); let batch_id3 = batchlist3 .clone() .unwrap() .batches .last() .unwrap() .header_signature .clone(); timed_batch_iterator.add(batchlist1.clone().unwrap()); timed_batch_iterator.add(batchlist2.clone().unwrap()); timed_batch_iterator.add(batchlist3.clone().unwrap()); timed_batch_iterator.add(batchlist1.clone().unwrap()); timed_batch_iterator.add(batchlist2.clone().unwrap()); timed_batch_iterator.add(batchlist3.clone().unwrap()); timed_batch_iterator.mark_submit_success(&batch_id1); timed_batch_iterator.mark_submit_success(&batch_id3); assert_eq!( timed_batch_iterator.get_batchlist_to_submit(&batch_id2), batchlist2 ); assert_eq!( timed_batch_iterator.get_batchlist_to_submit(&batch_id1), None ); assert_eq!( timed_batch_iterator.get_batchlist_to_submit(&batch_id3), None ); timed_batch_iterator.mark_submit_success(&batch_id2); assert_eq!( timed_batch_iterator.get_batchlist_to_submit(&batch_id2), None ); } fn generate_batchlists(num: u32) -> Vec<BatchList> { let context = signing::create_context("secp256k1").unwrap(); let private_key = context.new_random_private_key().unwrap(); let _signer = signing::Signer::new(context.as_ref(), private_key.as_ref()); let mut batchlists = Vec::new(); let mut rng = StdRng::new().unwrap(); for _ in 0..num { let mut batch = Batch::new(); let mut batchlist = BatchList::new(); batch.set_header_signature(rng.gen_iter::<char>().take(100).collect()); let batches = RepeatedField::from_vec(vec![batch]); batchlist.set_batches(batches); batchlists.push(batchlist); } batchlists } }
/* * Copyright 2018 Intel Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ------------------------------------------------------------------------------ */ use std::collections::HashMap; use sawtooth_sdk::messages::batch::BatchList; pub struct BatchMap { batches_by_id: HashMap<String, BatchList>, } impl BatchMap { pub fn new() -> BatchMap { BatchMap { batches_by_id: HashMap::new(), } } pub fn mark_submit_success(&mut self, batch_id: &str) { self.batches_by_id.remove(batch_id); } pub fn get_batchlist_to_submit(&mut self, batch_id: &str) -> Option<BatchList> { self.batches_by_id.get(batch_id).cloned() }
} #[cfg(test)] mod tests { use super::BatchMap; use rand::Rng; use rand::StdRng; use protobuf::RepeatedField; use sawtooth_sdk::signing; use sawtooth_sdk::messages::batch::Batch; use sawtooth_sdk::messages::batch::BatchList; #[test] fn test_2_cycles_of_retries() { let mut timed_batch_iterator = BatchMap::new(); let mut batchlists = generate_batchlists(3); let batchlist1 = batchlists.pop(); let batchlist2 = batchlists.pop(); let batchlist3 = batchlists.pop(); let batch_id1 = batchlist1 .clone() .unwrap() .batches .last() .unwrap() .header_signature .clone(); let batch_id2 = batchlist2 .clone() .unwrap() .batches .last() .unwrap() .header_signature .clone(); let batch_id3 = batchlist3 .clone() .unwrap() .batches .last() .unwrap() .header_signature .clone(); timed_batch_iterator.add(batchlist1.clone().unwrap()); timed_batch_iterator.add(batchlist2.clone().unwrap()); timed_batch_iterator.add(batchlist3.clone().unwrap()); timed_batch_iterator.add(batchlist1.clone().unwrap()); timed_batch_iterator.add(batchlist2.clone().unwrap()); timed_batch_iterator.add(batchlist3.clone().unwrap()); timed_batch_iterator.mark_submit_success(&batch_id1); timed_batch_iterator.mark_submit_success(&batch_id3); assert_eq!( timed_batch_iterator.get_batchlist_to_submit(&batch_id2), batchlist2 ); assert_eq!( timed_batch_iterator.get_batchlist_to_submit(&batch_id1), None ); assert_eq!( timed_batch_iterator.get_batchlist_to_submit(&batch_id3), None ); timed_batch_iterator.mark_submit_success(&batch_id2); assert_eq!( timed_batch_iterator.get_batchlist_to_submit(&batch_id2), None ); } fn generate_batchlists(num: u32) -> Vec<BatchList> { let context = signing::create_context("secp256k1").unwrap(); let private_key = context.new_random_private_key().unwrap(); let _signer = signing::Signer::new(context.as_ref(), private_key.as_ref()); let mut batchlists = Vec::new(); let mut rng = StdRng::new().unwrap(); for _ in 0..num { let mut batch = Batch::new(); let mut batchlist = BatchList::new(); batch.set_header_signature(rng.gen_iter::<char>().take(100).collect()); let batches = RepeatedField::from_vec(vec![batch]); batchlist.set_batches(batches); batchlists.push(batchlist); } batchlists } }
pub fn add(&mut self, batchlist: BatchList) { batchlist .batches .last() .map(|b| b.header_signature.clone()) .map(|batch_id| { if !self.batches_by_id.contains_key(batch_id.as_str()) { self.batches_by_id.insert(batch_id, batchlist); } }); }
function_block-full_function
[ { "content": "/// Create the request from the next Target Url and the batchlist.\n\npub fn form_request_from_batchlist(\n\n targets: &mut Cycle<IntoIter<String>>,\n\n batch_list: Result<BatchList, WorkloadError>,\n\n basic_auth: &Option<String>,\n\n) -> Result<(Request, Option<String>), WorkloadError> {\n\n let mut batch_url = targets.next().unwrap();\n\n batch_url.push_str(\"/batches\");\n\n debug!(\"Batches POST: {}\", batch_url);\n\n\n\n let batchlist_unwrapped = batch_list?;\n\n\n\n let batch_id = match batchlist_unwrapped.batches.last() {\n\n Some(batch) => Some(batch.header_signature.clone()),\n\n None => None,\n\n };\n\n let bytes = batchlist_unwrapped.write_to_bytes()?;\n\n let mut req = Request::new(Method::Post, Uri::from_str(&batch_url)?);\n\n let content_len = bytes.len() as u64;\n\n req.set_body(bytes);\n\n req.headers_mut().set(ContentType::octet_stream());\n\n req.headers_mut().set(ContentLength(content_len));\n\n\n\n if let Some(ref basic_auth) = *basic_auth {\n\n req.headers_mut()\n\n .set(Authorization(Basic::from_str(&basic_auth)?));\n\n }\n\n\n\n Ok((req, batch_id))\n\n}\n\n\n", "file_path": "perf/sawtooth_perf/src/workload.rs", "rank": 0, "score": 236804.21308166708 }, { "content": "/// Call next on the BatchList Iterator and return the batchlist if no error.\n\npub fn get_next_batchlist(\n\n batch_list_iter: &mut Iterator<Item = BatchListResult>,\n\n batch_map: &Rc<RefCell<BatchMap>>,\n\n batches: &Rc<RefCell<Vec<BatchList>>>,\n\n) -> Result<BatchList, WorkloadError> {\n\n match batches.borrow_mut().pop() {\n\n Some(batchlist) => Ok(batchlist),\n\n None => match batch_list_iter.next() {\n\n Some(Ok(batch_list)) => {\n\n batch_map.borrow_mut().add(batch_list.clone());\n\n Ok(batch_list)\n\n }\n\n Some(Err(err)) => Err(WorkloadError::from(err)),\n\n None => Err(WorkloadError::NoBatchError),\n\n },\n\n }\n\n}\n\n\n", "file_path": "perf/sawtooth_perf/src/workload.rs", "rank": 1, "score": 236803.9886163387 }, { "content": "pub fn load_pem_key(pemstr: &str, password: &str) -> Result<(String, String), Error> {\n\n let c_pemstr = CString::new(pemstr).unwrap();\n\n let c_password = CString::new(password).unwrap();\n\n let pemstr_len = pemstr.len();\n\n let mut c_out_priv_key =\n\n CString::new(\"-----------------------------------------------------------------\").unwrap();\n\n let mut c_out_pub_key = CString::new(\"-----------------------------------------------------------------------------------------------------------------------------------\").unwrap();\n\n\n\n let err_num = unsafe {\n\n let c_ptr_pemstr = c_pemstr.into_raw();\n\n let c_ptr_password = c_password.into_raw();\n\n let c_ptr_out_priv_key = c_out_priv_key.into_raw();\n\n let c_ptr_out_pub_key = c_out_pub_key.into_raw();\n\n\n\n let err_num = ffi::load_pem_key(\n\n c_ptr_pemstr,\n\n pemstr_len,\n\n c_ptr_password,\n\n c_ptr_out_priv_key,\n\n c_ptr_out_pub_key,\n", "file_path": "sdk/rust/src/signing/pem_loader.rs", "rank": 2, "score": 215676.2936070586 }, { "content": "pub fn bytes_to_hex_str(b: &[u8]) -> String {\n\n b.iter()\n\n .map(|b| format!(\"{:02x}\", b))\n\n .collect::<Vec<_>>()\n\n .join(\"\")\n\n}\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 3, "score": 207585.43891098019 }, { "content": "pub fn create_context(algorithm_name: &str) -> Result<Box<Context>, Error> {\n\n match algorithm_name {\n\n \"secp256k1\" => Ok(Box::new(secp256k1::Secp256k1Context::new())),\n\n _ => Err(Error::NoSuchAlgorithm(format!(\n\n \"no such algorithm: {}\",\n\n algorithm_name\n\n ))),\n\n }\n\n}\n\n/// Factory for generating signers.\n\npub struct CryptoFactory<'a> {\n\n context: &'a Context,\n\n}\n\n\n\nimpl<'a> CryptoFactory<'a> {\n\n /// Constructs a CryptoFactory.\n\n /// # Arguments\n\n ///\n\n /// * `context` - a cryptographic context\n\n pub fn new(context: &'a Context) -> Self {\n", "file_path": "sdk/rust/src/signing/mod.rs", "rank": 4, "score": 204647.08927863045 }, { "content": "/// Log if time since last log is greater than update time.\n\npub fn log(\n\n counter: &Rc<HTTPRequestCounter>,\n\n last_log_time: &mut time::Instant,\n\n update_time: u32,\n\n) -> Result<(), WorkloadError> {\n\n let log_time = time::Instant::now() - *last_log_time;\n\n if log_time.as_secs() as u32 >= update_time {\n\n counter.log(log_time.as_secs(), log_time.subsec_nanos());\n\n *last_log_time = time::Instant::now();\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "perf/sawtooth_perf/src/workload.rs", "rank": 5, "score": 187701.18834689358 }, { "content": "/// POST the batchlist to the rest api.\n\npub fn make_request(\n\n client: &Rc<Client<HttpConnector>>,\n\n handle: &Handle,\n\n counter: Rc<HTTPRequestCounter>,\n\n batch_map: Rc<RefCell<BatchMap>>,\n\n batches: Rc<RefCell<Vec<BatchList>>>,\n\n req: Result<(Request, Option<String>), WorkloadError>,\n\n) -> Result<(), WorkloadError> {\n\n let handle_clone = handle.clone();\n\n match req {\n\n Ok((req, batch_id)) => {\n\n counter.increment_sent();\n\n let response_future = client\n\n .request(req)\n\n .then(move |response: Result<Response, HyperError>| {\n\n handle_http_error(response, batch_id, &batches, &batch_map, &counter)\n\n })\n\n .map(|_| ())\n\n .map_err(|_| ());\n\n\n\n handle_clone.spawn(response_future);\n\n\n\n Ok(())\n\n }\n\n\n\n Err(err) => Err(err),\n\n }\n\n}\n", "file_path": "perf/sawtooth_perf/src/workload.rs", "rank": 6, "score": 185422.96932928235 }, { "content": "/// Generates a playlist of Smallbank transactions.\n\n///\n\n/// This function generates a collection of smallbank transactions and writes\n\n/// the result to the given output. The resulting playlist will consist of\n\n/// `num_accounts` CREATE_ACCOUNT transactions, followed by `num_transactions`\n\n/// additional transactions (deposits, transfers, etc).\n\n///\n\n/// A random seed may be provided to create repeatable, random output.\n\npub fn generate_smallbank_playlist(\n\n output: &mut Write,\n\n num_accounts: usize,\n\n num_transactions: usize,\n\n seed: Option<i32>,\n\n) -> Result<(), PlaylistError> {\n\n let mut fmt_writer = FmtWriter::new(output);\n\n let mut emitter = YamlEmitter::new(&mut fmt_writer);\n\n\n\n let txn_array: Vec<Yaml> = create_smallbank_playlist(num_accounts, num_transactions, seed)\n\n .map(Yaml::from)\n\n .collect();\n\n\n\n let final_yaml = Yaml::Array(txn_array);\n\n try!(\n\n emitter\n\n .dump(&final_yaml)\n\n .map_err(PlaylistError::YamlOutputError)\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 7, "score": 183231.4442212942 }, { "content": "pub fn http_submitter(\n\n target: &str,\n\n rate: u64,\n\n receiver: &Arc<Mutex<mpsc::Receiver<Option<BatchList>>>>,\n\n) {\n\n let mut core = Core::new().unwrap();\n\n\n\n let client = Client::configure()\n\n .connector(HttpConnector::new(1, &core.handle()))\n\n .keep_alive(true)\n\n .build(&core.handle());\n\n\n\n let timer = tokio_timer::wheel()\n\n .tick_duration(time::Duration::new(0, 1_000_000))\n\n .build();\n\n\n\n // Define a target timeslice (how often to submit batches) based\n\n // on number of nanoseconds in a second divided by rate\n\n let timeslice = time::Duration::new(0, 1_000_000_000 / rate as u32);\n\n\n", "file_path": "perf/sawtooth_perf/src/batch_submit.rs", "rank": 8, "score": 183225.0181033077 }, { "content": "pub fn create_smallbank_playlist(\n\n num_accounts: usize,\n\n num_transactions: usize,\n\n seed: Option<i32>,\n\n) -> Box<Iterator<Item = SmallbankTransactionPayload>> {\n\n let rng = match seed {\n\n Some(seed) => {\n\n let v = vec![seed as usize];\n\n let seed: &[usize] = &v;\n\n SeedableRng::from_seed(seed)\n\n }\n\n None => StdRng::new().unwrap(),\n\n };\n\n\n\n let iter = SmallbankGeneratingIter {\n\n num_accounts: num_accounts,\n\n current_account: 0,\n\n rng: rng,\n\n };\n\n\n\n Box::new(iter.take(num_transactions))\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 9, "score": 183225.0181033077 }, { "content": "/// Created signed Smallbank transactions from a given playlist.\n\n///\n\n/// The playlist input is expected to be the same Yaml format as generated by\n\n/// the `generate_smallbank_playlist` function. All transactions will be\n\n/// signed with the given `PrivateKey` instance.\n\npub fn process_smallbank_playlist(\n\n output: &mut Write,\n\n playlist_input: &mut Read,\n\n signing_context: &signing::Context,\n\n signing_key: &signing::PrivateKey,\n\n) -> Result<(), PlaylistError> {\n\n let payloads = try!(read_smallbank_playlist(playlist_input));\n\n\n\n let crypto_factory = signing::CryptoFactory::new(signing_context);\n\n let signer = crypto_factory.new_signer(signing_key);\n\n let pub_key = try!(\n\n signing_context\n\n .get_public_key(signing_key)\n\n .map_err(PlaylistError::SigningError)\n\n );\n\n let pub_key_hex = pub_key.as_hex();\n\n\n\n let start = Instant::now();\n\n for payload in payloads {\n\n let mut txn = Transaction::new();\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 10, "score": 183225.0181033077 }, { "content": "/// Run a continuous load of the BatchLists that are generated by BatchListIter.\n\npub fn run_workload(\n\n batch_list_iter: &mut Iterator<Item = BatchListResult>,\n\n time_to_wait: u32,\n\n update_time: u32,\n\n targets: Vec<String>,\n\n basic_auth: &Option<String>,\n\n) -> Result<(), workload::WorkloadError> {\n\n let mut core = Core::new().unwrap();\n\n let handle = core.handle();\n\n let client = Rc::new(Client::configure().build(&handle));\n\n let counter = Rc::new(workload::HTTPRequestCounter::new());\n\n\n\n let mut urls: Cycle<IntoIter<String>> = targets.into_iter().cycle();\n\n\n\n let batch_map = Rc::new(RefCell::new(BatchMap::new()));\n\n let batch_map_clone = Rc::clone(&batch_map);\n\n\n\n let batches = Rc::new(RefCell::new(Vec::new()));\n\n let batches_clone = Rc::clone(&batches);\n\n\n", "file_path": "perf/sawtooth_perf/src/batch_submit.rs", "rank": 11, "score": 183225.0181033077 }, { "content": "pub fn read_smallbank_playlist(\n\n input: &mut Read,\n\n) -> Result<Vec<SmallbankTransactionPayload>, PlaylistError> {\n\n let mut results = Vec::new();\n\n let buf = try!(read_yaml(input));\n\n let yaml_array = try!(load_yaml_array(&buf));\n\n for yaml in yaml_array.iter() {\n\n results.push(SmallbankTransactionPayload::from(yaml));\n\n }\n\n\n\n Ok(results)\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 12, "score": 183225.0181033077 }, { "content": "/// Populates a channel from a stream of length-delimited batches.\n\n/// Starts one workload submitter of the appropriate type (http, zmq)\n\n/// per target. Workload submitters consume from the channel at\n\n/// the configured rate until the channel is exhausted.\n\npub fn submit_signed_batches(\n\n reader: &mut Read,\n\n target: String,\n\n rate: usize,\n\n) -> Result<(), BatchReadingError> {\n\n let (sender, receiver) = mpsc::channel();\n\n let receiver = Arc::new(Mutex::new(receiver));\n\n\n\n let submit_thread = thread::spawn(move || {\n\n http_submitter(&target, rate as u64, &receiver);\n\n });\n\n\n\n let mut feeder = BatchListFeeder::new(reader);\n\n\n\n loop {\n\n match feeder.next() {\n\n Some(Ok(batch_list)) => {\n\n sender.send(Some(batch_list)).unwrap();\n\n }\n\n None => {\n", "file_path": "perf/sawtooth_perf/src/batch_submit.rs", "rank": 13, "score": 181119.02918713886 }, { "content": "pub fn get_blockstore_filename() -> String {\n\n String::from(DEFAULT_BLOCKSTORE_FILENAME)\n\n}\n", "file_path": "adm/src/config.rs", "rank": 14, "score": 175826.1683609712 }, { "content": "pub fn get_path_config() -> PathConfig {\n\n match env::var(\"SAWTOOTH_HOME\") {\n\n Ok(prefix) => PathConfig {\n\n config_dir: Path::new(&prefix).join(\"etc\").to_path_buf(),\n\n log_dir: Path::new(&prefix).join(\"logs\").to_path_buf(),\n\n data_dir: Path::new(&prefix).join(\"data\").to_path_buf(),\n\n key_dir: Path::new(&prefix).join(\"keys\").to_path_buf(),\n\n policy_dir: Path::new(&prefix).join(\"policy\").to_path_buf(),\n\n },\n\n Err(_) => PathConfig {\n\n config_dir: Path::new(DEFAULT_CONFIG_DIR).to_path_buf(),\n\n log_dir: Path::new(DEFAULT_LOG_DIR).to_path_buf(),\n\n data_dir: Path::new(DEFAULT_DATA_DIR).to_path_buf(),\n\n key_dir: Path::new(DEFAULT_KEY_DIR).to_path_buf(),\n\n policy_dir: Path::new(DEFAULT_POLICY_DIR).to_path_buf(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "adm/src/config.rs", "rank": 15, "score": 173633.9188464514 }, { "content": "/// Generates signed batches from a stream of length-delimited transactions.\n\n/// Constrains the batches to `max_batch_size` number of transactions per\n\n/// batch. The resulting batches are written in a length-delimited fashion to\n\n/// the given writer.\n\npub fn generate_signed_batches<'a>(\n\n reader: &'a mut Read,\n\n writer: &'a mut Write,\n\n max_batch_size: usize,\n\n signing_context: &signing::Context,\n\n signing_key: &signing::PrivateKey,\n\n) -> Result<(), BatchingError> {\n\n let crypto_factory = signing::CryptoFactory::new(signing_context);\n\n let signer = crypto_factory.new_signer(signing_key);\n\n\n\n let mut producer = SignedBatchProducer::new(reader, max_batch_size, &signer);\n\n loop {\n\n match producer.next() {\n\n Some(Ok(batch)) => {\n\n if let Err(err) = batch.write_length_delimited_to_writer(writer) {\n\n return Err(BatchingError::MessageError(err));\n\n }\n\n }\n\n None => break,\n\n Some(Err(err)) => return Err(err),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "perf/sawtooth_perf/src/batch_gen.rs", "rank": 16, "score": 171527.92993028252 }, { "content": "fn glob_simple(pattern: &str) -> Vec<String> {\n\n glob::glob(pattern)\n\n .expect(\"glob\")\n\n .map(|g| {\n\n g.expect(\"item\")\n\n .as_path()\n\n .to_str()\n\n .expect(\"utf-8\")\n\n .to_owned()\n\n })\n\n .collect()\n\n}\n", "file_path": "sdk/rust/build.rs", "rank": 17, "score": 160237.09707495416 }, { "content": "fn create_smallbank_address(payload: &str) -> String {\n\n let mut sha = Sha512::new();\n\n sha.input(payload.as_bytes());\n\n get_smallbank_prefix() + &sha.result_str()[..64].to_string()\n\n}\n", "file_path": "families/smallbank/smallbank_rust/src/handler.rs", "rank": 18, "score": 160108.12871238525 }, { "content": "fn glob_simple(pattern: &str) -> Vec<String> {\n\n glob::glob(pattern)\n\n .expect(\"glob\")\n\n .map(|g| {\n\n g.expect(\"item\")\n\n .as_path()\n\n .to_str()\n\n .expect(\"utf-8\")\n\n .to_owned()\n\n })\n\n .collect()\n\n}\n", "file_path": "perf/smallbank_workload/build.rs", "rank": 19, "score": 158131.31047931185 }, { "content": "fn hex_str_to_bytes(s: &str) -> Result<Vec<u8>, Error> {\n\n for (i, ch) in s.chars().enumerate() {\n\n if !ch.is_digit(16) {\n\n return Err(Error::ParseError(format!(\n\n \"invalid character position {}\",\n\n i\n\n )));\n\n }\n\n }\n\n\n\n let input: Vec<_> = s.chars().collect();\n\n\n\n let decoded: Vec<u8> = input\n\n .chunks(2)\n\n .map(|chunk| {\n\n ((chunk[0].to_digit(16).unwrap() << 4) | (chunk[1].to_digit(16).unwrap())) as u8\n\n })\n\n .collect();\n\n\n\n Ok(decoded)\n\n}\n\n\n", "file_path": "sdk/rust/src/signing/secp256k1.rs", "rank": 20, "score": 157562.84908312693 }, { "content": "fn glob_simple(pattern: &str) -> Vec<String> {\n\n glob::glob(pattern)\n\n .expect(\"glob\")\n\n .map(|g| {\n\n g.expect(\"item\")\n\n .as_path()\n\n .to_str()\n\n .expect(\"utf-8\")\n\n .to_owned()\n\n })\n\n .collect()\n\n}\n", "file_path": "families/smallbank/smallbank_rust/build.rs", "rank": 21, "score": 156106.78363676678 }, { "content": "struct from_json_fn\n\n{\n\n private:\n\n template<typename BasicJsonType, typename T>\n\n auto call(const BasicJsonType& j, T& val, priority_tag<1>) const\n\n noexcept(noexcept(from_json(j, val)))\n\n -> decltype(from_json(j, val), void())\n\n {\n\n return from_json(j, val);\n\n }\n\n\n\n template<typename BasicJsonType, typename T>\n\n void call(const BasicJsonType&, T&, priority_tag<0>) const noexcept\n\n {\n\n static_assert(sizeof(BasicJsonType) == 0,\n\n \"could not find from_json() method in T's namespace\");\n\n }\n\n\n\n public:\n\n template<typename BasicJsonType, typename T>\n\n void operator()(const BasicJsonType& j, T& val) const\n\n noexcept(noexcept(std::declval<from_json_fn>().call(j, val, priority_tag<1> {})))\n\n {\n\n return call(j, val, priority_tag<1> {});\n\n }\n\n};\n\n\n\n// taken from ranges-v3\n\ntemplate<typename T>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 22, "score": 155987.84570666758 }, { "content": "struct to_json_fn\n\n{\n\n private:\n\n template<typename BasicJsonType, typename T>\n\n auto call(BasicJsonType& j, T&& val, priority_tag<1>) const noexcept(noexcept(to_json(j, std::forward<T>(val))))\n\n -> decltype(to_json(j, std::forward<T>(val)), void())\n\n {\n\n return to_json(j, std::forward<T>(val));\n\n }\n\n\n\n template<typename BasicJsonType, typename T>\n\n void call(BasicJsonType&, T&&, priority_tag<0>) const noexcept\n\n {\n\n static_assert(sizeof(BasicJsonType) == 0,\n\n \"could not find to_json() method in T's namespace\");\n\n }\n\n\n\n public:\n\n template<typename BasicJsonType, typename T>\n\n void operator()(BasicJsonType& j, T&& val) const\n\n noexcept(noexcept(std::declval<to_json_fn>().call(j, std::forward<T>(val), priority_tag<1> {})))\n\n {\n\n return call(j, std::forward<T>(val), priority_tag<1> {});\n\n }\n\n};\n\n\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 23, "score": 155987.84570666758 }, { "content": "fn apply_write_check(\n\n write_check_data: &SmallbankTransactionPayload_WriteCheckTransactionData,\n\n context: &mut TransactionContext,\n\n) -> Result<(), ApplyError> {\n\n match load_account(write_check_data.get_customer_id(), context)? {\n\n None => {\n\n warn!(\"Invalid transaction: during WRITE_CHECK, Account must exist\");\n\n Err(ApplyError::InvalidTransaction(format!(\n\n \"Account must exist\"\n\n )))\n\n }\n\n Some(mut account) => {\n\n let balance = account.get_checking_balance() - write_check_data.get_amount();\n\n account.set_checking_balance(balance);\n\n save_account(&account, context)\n\n }\n\n }\n\n}\n\n\n", "file_path": "families/smallbank/smallbank_rust/src/handler.rs", "rank": 24, "score": 151523.84600461728 }, { "content": "fn make_smallbank_write_check_txn(\n\n rng: &mut StdRng,\n\n num_accounts: usize,\n\n) -> smallbank::SmallbankTransactionPayload_WriteCheckTransactionData {\n\n let mut payload = smallbank::SmallbankTransactionPayload_WriteCheckTransactionData::new();\n\n payload.set_customer_id(rng.gen_range(0, num_accounts as u32));\n\n payload.set_amount(rng.gen_range(10, 200));\n\n\n\n payload\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 25, "score": 149417.62389298595 }, { "content": "#[inline]\n\nfn arg_error(msg: &str) -> Result<(), Box<Error>> {\n\n Err(Box::new(CliError::ArgumentError(String::from(msg))))\n\n}\n\n\n", "file_path": "perf/sawtooth_workload/src/main.rs", "rank": 26, "score": 149297.0015898316 }, { "content": "#[inline]\n\nfn arg_error(msg: &str) -> Result<(), Box<Error>> {\n\n Err(Box::new(CliError::ArgumentError(String::from(msg))))\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/main.rs", "rank": 27, "score": 149297.0015898316 }, { "content": "fn load_yaml_array(yaml_str: &str) -> Result<Cow<Vec<Yaml>>, PlaylistError> {\n\n let mut yaml = try!(YamlLoader::load_from_str(yaml_str).map_err(PlaylistError::YamlInputError));\n\n let element = yaml.remove(0);\n\n let yaml_array = element.as_vec().cloned().unwrap().clone();\n\n\n\n Ok(Cow::Owned(yaml_array))\n\n}\n\n\n\npub struct SmallbankGeneratingIter {\n\n num_accounts: usize,\n\n current_account: usize,\n\n rng: StdRng,\n\n}\n\n\n\nimpl SmallbankGeneratingIter {\n\n pub fn new(num_accounts: usize, seed: &[usize]) -> Self {\n\n SmallbankGeneratingIter {\n\n num_accounts: num_accounts,\n\n current_account: 0,\n\n rng: SeedableRng::from_seed(seed),\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 28, "score": 147807.68175518318 }, { "content": "pub fn make_addresses(payload: &SmallbankTransactionPayload) -> Vec<String> {\n\n match payload.get_payload_type() {\n\n SBPayloadType::CREATE_ACCOUNT => vec![\n\n customer_id_address(payload.get_create_account().get_customer_id()),\n\n ],\n\n SBPayloadType::DEPOSIT_CHECKING => vec![\n\n customer_id_address(payload.get_deposit_checking().get_customer_id()),\n\n ],\n\n SBPayloadType::WRITE_CHECK => vec![\n\n customer_id_address(payload.get_write_check().get_customer_id()),\n\n ],\n\n SBPayloadType::TRANSACT_SAVINGS => vec![\n\n customer_id_address(payload.get_transact_savings().get_customer_id()),\n\n ],\n\n SBPayloadType::SEND_PAYMENT => vec![\n\n customer_id_address(payload.get_send_payment().get_source_customer_id()),\n\n customer_id_address(payload.get_send_payment().get_dest_customer_id()),\n\n ],\n\n SBPayloadType::AMALGAMATE => vec![\n\n customer_id_address(payload.get_amalgamate().get_source_customer_id()),\n\n customer_id_address(payload.get_amalgamate().get_dest_customer_id()),\n\n ],\n\n SBPayloadType::PAYLOAD_TYPE_UNSET => panic!(\"Payload type was not set: {:?}\", payload),\n\n }\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 29, "score": 145616.54270940184 }, { "content": "pub fn run<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {\n\n match args.subcommand() {\n\n (\"backup\", Some(args)) => run_backup_command(args),\n\n (\"restore\", Some(args)) => run_restore_command(args),\n\n (\"list\", Some(args)) => run_list_command(args),\n\n (\"show\", Some(args)) => run_show_command(args),\n\n (\"prune\", Some(args)) => run_prune_command(args),\n\n (\"export\", Some(args)) => run_export_command(args),\n\n (\"import\", Some(args)) => run_import_command(args),\n\n (\"stats\", Some(args)) => run_stats_command(args),\n\n _ => {\n\n println!(\"Invalid subcommand; Pass --help for usage.\");\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n", "file_path": "adm/src/commands/blockstore.rs", "rank": 30, "score": 141830.672688745 }, { "content": "pub fn run<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {\n\n let path_config = config::get_path_config();\n\n let key_dir = &path_config.key_dir;\n\n if !key_dir.exists() {\n\n return Err(CliError::EnvironmentError(format!(\n\n \"Key directory does not exist: {:?}\",\n\n key_dir\n\n )));\n\n }\n\n\n\n let key_name = args.value_of(\"key_name\").unwrap_or(\"validator\");\n\n let private_key_path = key_dir.join(key_name).with_extension(\"priv\");\n\n let public_key_path = key_dir.join(key_name).with_extension(\"pub\");\n\n\n\n if !args.is_present(\"force\") {\n\n if private_key_path.exists() {\n\n return Err(CliError::EnvironmentError(format!(\n\n \"file exists: {:?}\",\n\n private_key_path\n\n )));\n", "file_path": "adm/src/commands/keygen.rs", "rank": 31, "score": 141830.672688745 }, { "content": "pub fn run<'a>(args: &ArgMatches<'a>) -> Result<(), CliError> {\n\n let genesis_file_path = if args.is_present(\"output\") {\n\n args.value_of(\"output\")\n\n .ok_or_else(|| CliError::ArgumentError(format!(\"Failed to read `output` arg\")))\n\n .map(|pathstr| Path::new(pathstr).to_path_buf())\n\n } else {\n\n Ok(config::get_path_config().data_dir.join(\"genesis.batch\"))\n\n }?;\n\n\n\n if genesis_file_path.exists() {\n\n return Err(CliError::EnvironmentError(format!(\n\n \"File already exists: {:?}\",\n\n genesis_file_path\n\n )));\n\n }\n\n\n\n let input_files = args.values_of(\"input_file\")\n\n .ok_or_else(|| CliError::ArgumentError(\"No input files passed\".into()))?;\n\n\n\n let batch_lists = input_files\n", "file_path": "adm/src/commands/genesis.rs", "rank": 32, "score": 141830.672688745 }, { "content": "fn read_yaml(input: &mut Read) -> Result<Cow<str>, PlaylistError> {\n\n let mut buf: String = String::new();\n\n try!(\n\n input\n\n .read_to_string(&mut buf)\n\n .map_err(PlaylistError::IoError)\n\n );\n\n Ok(buf.into())\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 33, "score": 136402.1026747933 }, { "content": "fn bytes_to_hex_str(b: &[u8]) -> String {\n\n b.iter()\n\n .map(|b| format!(\"{:02x}\", b))\n\n .collect::<Vec<_>>()\n\n .join(\"\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod secp256k1_test {\n\n use super::Secp256k1PrivateKey;\n\n use super::Secp256k1PublicKey;\n\n use super::super::CryptoFactory;\n\n use super::super::PrivateKey;\n\n use super::super::PublicKey;\n\n use super::super::create_context;\n\n\n\n static KEY1_PRIV_HEX: &'static str =\n\n \"2f1e7b7a130d7ba9da0068b3bb0ba1d79e7e77110302c9f746c3c2a63fe40088\";\n\n static KEY1_PUB_HEX: &'static str =\n\n \"026a2c795a9776f75464aa3bda3534c3154a6e91b357b1181d3f515110f84b67c5\";\n", "file_path": "sdk/rust/src/signing/secp256k1.rs", "rank": 34, "score": 134478.64513056222 }, { "content": "struct is_compatible_integer_type_impl : std::false_type {};\n\n\n\ntemplate<typename RealIntegerType, typename CompatibleNumberIntegerType>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 35, "score": 134300.54752721568 }, { "content": "struct is_compatible_object_type_impl : std::false_type {};\n\n\n\ntemplate<class RealType, class CompatibleObjectType>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 36, "score": 134300.54752721568 }, { "content": "fn backup_block<W: Write>(block: &Block, writer: &mut W) -> Result<(), CliError> {\n\n block\n\n .write_length_delimited_to_writer(writer)\n\n .map_err(|err| CliError::EnvironmentError(format!(\"{}\", err)))\n\n}\n\n\n", "file_path": "adm/src/commands/blockstore.rs", "rank": 37, "score": 129273.14512584882 }, { "content": "struct is_compatible_object_type_impl<true, RealType, CompatibleObjectType>\n\n{\n\n static constexpr auto value =\n\n std::is_constructible<typename RealType::key_type,\n\n typename CompatibleObjectType::key_type>::value and\n\n std::is_constructible<typename RealType::mapped_type,\n\n typename CompatibleObjectType::mapped_type>::value;\n\n};\n\n\n\ntemplate<class BasicJsonType, class CompatibleObjectType>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 38, "score": 125148.59241830578 }, { "content": "struct is_compatible_integer_type_impl<true, RealIntegerType, CompatibleNumberIntegerType>\n\n{\n\n // is there an assert somewhere on overflows?\n\n using RealLimits = std::numeric_limits<RealIntegerType>;\n\n using CompatibleLimits = std::numeric_limits<CompatibleNumberIntegerType>;\n\n\n\n static constexpr auto value =\n\n std::is_constructible<RealIntegerType,\n\n CompatibleNumberIntegerType>::value and\n\n CompatibleLimits::is_integer and\n\n RealLimits::is_signed == CompatibleLimits::is_signed;\n\n};\n\n\n\ntemplate<typename RealIntegerType, typename CompatibleNumberIntegerType>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 39, "score": 121843.7280115457 }, { "content": "struct conjunction<B1, Bn...> : std::conditional<bool(B1::value), conjunction<Bn...>, B1>::type {};\n\n\n\ntemplate<class B> struct negation : std::integral_constant < bool, !B::value > {};\n\n\n\n// dispatch utility (taken from ranges-v3)\n\ntemplate<unsigned N> struct priority_tag : priority_tag < N - 1 > {};\n\ntemplate<> struct priority_tag<0> {};\n\n\n\n\n\n//////////////////\n\n// constructors //\n\n//////////////////\n\n\n\ntemplate<value_t> struct external_constructor;\n\n\n\ntemplate<>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 40, "score": 107875.09378934828 }, { "content": " def write(self, line):\n\n if line != '\\n':\n", "file_path": "validator/sawtooth_validator/server/log.py", "rank": 41, "score": 105064.26228793885 }, { "content": "fn main() {\n\n let args = parse_args();\n\n\n\n let result = match args.subcommand() {\n\n (\"blockstore\", Some(args)) => commands::blockstore::run(args),\n\n (\"keygen\", Some(args)) => commands::keygen::run(args),\n\n (\"genesis\", Some(args)) => commands::genesis::run(args),\n\n _ => {\n\n println!(\"Invalid subcommand; Pass --help for usage.\");\n\n Ok(())\n\n }\n\n };\n\n\n\n std::process::exit(match result {\n\n Ok(_) => 0,\n\n Err(err) => {\n\n eprintln!(\"Error: {}\", err);\n\n 1\n\n }\n\n });\n\n}\n\n\n", "file_path": "adm/src/main.rs", "rank": 42, "score": 103925.05947424733 }, { "content": "fn main() {\n\n let gil = Python::acquire_gil();\n\n let py = &mut gil.python();\n\n\n\n let mut args: Vec<PyString> = std::env::args()\n\n .skip(1)\n\n .map(|s| PyString::new(*py, &s))\n\n .collect();\n\n\n\n args.insert(0, PyString::new(*py, env!(\"CARGO_PKG_NAME\")));\n\n\n\n let cli = py.import(\"sawtooth_validator.server.cli\")\n\n .map_err(|err| err.print(*py))\n\n .unwrap();\n\n cli.call(*py, \"main\", (args,), None)\n\n .map_err(|err| err.print(*py))\n\n .unwrap();\n\n}\n", "file_path": "validator/src/main.rs", "rank": 43, "score": 103925.05947424733 }, { "content": "fn main() {\n\n // Compile C PEM loader file\n\n if cfg!(feature = \"pem\") {\n\n println!(\"cargo:rustc-link-lib={}={}\", \"dylib\", \"crypto\");\n\n cc::Build::new()\n\n .file(\"../c/loader.c\")\n\n .file(\"../c/c11_support.c\")\n\n .include(\"../c\")\n\n .compile(\"libloader.a\");\n\n }\n\n\n\n // Generate protobuf files\n\n let proto_src_files = glob_simple(\"../../protos/*.proto\");\n\n println!(\"{:?}\", proto_src_files);\n\n\n\n fs::create_dir_all(\"src/messages\").unwrap();\n\n\n\n protoc_rust::run(protoc_rust::Args {\n\n out_dir: \"src/messages\",\n\n input: &proto_src_files\n\n .iter()\n\n .map(|a| a.as_ref())\n\n .collect::<Vec<&str>>(),\n\n includes: &[\"src\", \"../../protos\"],\n\n }).expect(\"unable to run protoc\");\n\n}\n\n\n", "file_path": "sdk/rust/build.rs", "rank": 44, "score": 103925.05947424733 }, { "content": "fn main() {\n\n let proto_src_files = glob_simple(\"../../families/smallbank/protos/*.proto\");\n\n println!(\"{:?}\", proto_src_files);\n\n\n\n protoc_rust::run(protoc_rust::Args {\n\n out_dir: \"./src\",\n\n input: &proto_src_files\n\n .iter()\n\n .map(|a| a.as_ref())\n\n .collect::<Vec<&str>>(),\n\n includes: &[\"../../families/smallbank/protos\"],\n\n }).expect(\"Error generating rust files from smallbank protos\");\n\n}\n\n\n", "file_path": "perf/smallbank_workload/build.rs", "rank": 45, "score": 102627.67150053487 }, { "content": "struct has_to_json\n\n{\n\n private:\n\n template<typename U, typename = decltype(uncvref_t<U>::to_json(\n\n std::declval<BasicJsonType&>(), std::declval<T>()))>\n\n static int detect(U&&);\n\n static void detect(...);\n\n\n\n public:\n\n static constexpr bool value = std::is_integral<decltype(detect(\n\n std::declval<typename BasicJsonType::template json_serializer<T, void>>()))>::value;\n\n};\n\n\n\n\n\n/////////////\n\n// to_json //\n\n/////////////\n\n\n\ntemplate<typename BasicJsonType, typename T, enable_if_t<\n\n std::is_same<T, typename BasicJsonType::boolean_t>::value, int> = 0>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 46, "score": 101449.99101197752 }, { "content": "struct has_from_json\n\n{\n\n private:\n\n // also check the return type of from_json\n\n template<typename U, typename = enable_if_t<std::is_same<void, decltype(uncvref_t<U>::from_json(\n\n std::declval<BasicJsonType>(), std::declval<T&>()))>::value>>\n\n static int detect(U&&);\n\n static void detect(...);\n\n\n\n public:\n\n static constexpr bool value = std::is_integral<decltype(\n\n detect(std::declval<typename BasicJsonType::template json_serializer<T, void>>()))>::value;\n\n};\n\n\n\n// This trait checks if JSONSerializer<T>::from_json(json const&) exists\n\n// this overload is used for non-default-constructible user-defined-types\n\ntemplate<typename BasicJsonType, typename T>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 47, "score": 101449.99101197752 }, { "content": "fn main() {\n\n let arg_matches = App::new(APP_NAME)\n\n .version(VERSION)\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(create_batch_subcommand_args())\n\n .subcommand(create_submit_subcommand_args())\n\n .get_matches();\n\n\n\n let result = match arg_matches.subcommand() {\n\n (\"batch\", Some(args)) => run_batch_command(args),\n\n (\"submit\", Some(args)) => run_submit_command(args),\n\n _ => panic!(\"Should have processed a subcommand or exited before here\"),\n\n };\n\n\n\n std::process::exit(match result {\n\n Ok(_) => 0,\n\n Err(err) => {\n\n eprintln!(\"Error: {}\", err);\n\n 1\n\n }\n\n });\n\n}\n\n\n", "file_path": "perf/sawtooth_workload/src/main.rs", "rank": 48, "score": 101385.69368368282 }, { "content": "fn main() {\n\n match SimpleLogger::init(LevelFilter::Warn, Config::default()) {\n\n Ok(_) => (),\n\n Err(err) => println!(\"Failed to load logger: {}\", err.description()),\n\n }\n\n\n\n let arg_matches = get_arg_matches();\n\n\n\n match run_load_command(&arg_matches) {\n\n Ok(_) => (),\n\n Err(err) => println!(\"{}\", err.description()),\n\n }\n\n}\n\n\n", "file_path": "perf/intkey_workload/src/main.rs", "rank": 49, "score": 101385.69368368282 }, { "content": "fn main() {\n\n let proto_src_files = glob_simple(\"../protos/*.proto\");\n\n println!(\"{:?}\", proto_src_files);\n\n\n\n protoc_rust::run(protoc_rust::Args {\n\n out_dir: \"./src\",\n\n input: &proto_src_files\n\n .iter()\n\n .map(|a| a.as_ref())\n\n .collect::<Vec<&str>>(),\n\n includes: &[\"../protos\"],\n\n }).expect(\"Error generating rust files from smallbank protos\");\n\n}\n\n\n", "file_path": "families/smallbank/smallbank_rust/build.rs", "rank": 50, "score": 101385.69368368282 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let arg_matches = App::new(APP_NAME)\n\n .version(VERSION)\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .subcommand(create_batch_subcommand_args())\n\n .subcommand(create_submit_subcommand_args())\n\n .subcommand(create_playlist_subcommand_args())\n\n .subcommand(create_load_subcommand_args())\n\n .get_matches();\n\n\n\n let result = match arg_matches.subcommand() {\n\n (\"batch\", Some(args)) => run_batch_command(args),\n\n (\"submit\", Some(args)) => run_submit_command(args),\n\n (\"playlist\", Some(args)) => run_playlist_command(args),\n\n (\"load\", Some(args)) => run_load_command(args),\n\n _ => panic!(\"Should have processed a subcommand or exited before here\"),\n\n };\n\n\n\n std::process::exit(match result {\n\n Ok(_) => 0,\n\n Err(err) => {\n\n eprintln!(\"Error: {}\", err);\n\n 1\n\n }\n\n });\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/main.rs", "rank": 51, "score": 101385.69368368282 }, { "content": "namespace Intel\n\n{\n\n\n\n// SGX SHA256 State handle specialization\n\n// When using an SGX SHA256 state handle file handle, you must specify the\n\n// second type to the AutoHandle template because the handle is really just a\n\n// typedef for a opaque void pointer, and there are several other handles like\n\n// that in SGX. For example:\n\n//\n\n// AutoHandle<sgx_sha_state_handle_t, SgxSha256StateHandleTraits>\n\n\n\nstruct SgxSha256StateHandleTraits\n\n{\n\n static sgx_sha_state_handle_t InvalidHandle()\n\n {\n\n return NULL;\n\n }\n\n \n\n static void Cleanup(sgx_sha_state_handle_t handle)\n\n {\n\n sgx_sha256_close(handle);\n\n }\n", "file_path": "consensus/poet/sgx/sawtooth_poet_sgx/libpoet_shared/auto_handle_sgx.h", "rank": 52, "score": 100543.57595824052 }, { "content": "struct adl_serializer\n\n{\n\n /*!\n\n @brief convert a JSON value to any value type\n\n\n\n This function is usually called by the `get()` function of the\n\n @ref basic_json class (either explicit or via conversion operators).\n\n\n\n @param[in] j JSON value to read from\n\n @param[in,out] val value to write to\n\n */\n\n template<typename BasicJsonType, typename ValueType>\n\n static void from_json(BasicJsonType&& j, ValueType& val) noexcept(\n\n noexcept(::nlohmann::from_json(std::forward<BasicJsonType>(j), val)))\n\n {\n\n ::nlohmann::from_json(std::forward<BasicJsonType>(j), val);\n\n }\n\n\n\n /*!\n\n @brief convert any value type to a JSON value\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 53, "score": 100259.8767794004 }, { "content": "struct static_const\n\n{\n\n static constexpr T value{};\n\n};\n\n\n\ntemplate<typename T>\n\nconstexpr T static_const<T>::value;\n\n} // namespace detail\n\n\n\n\n\n/// namespace to hold default `to_json` / `from_json` functions\n\nnamespace\n\n{\n\nconstexpr const auto& to_json = detail::static_const<detail::to_json_fn>::value;\n\nconstexpr const auto& from_json = detail::static_const<detail::from_json_fn>::value;\n\n}\n\n\n\n\n\n/*!\n\n@brief default JSONSerializer template argument\n\n\n\nThis serializer ignores the template arguments and uses ADL\n\n([argument-dependent lookup](http://en.cppreference.com/w/cpp/language/adl))\n\nfor serialization.\n\n*/\n\ntemplate<typename = void, typename = void>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 54, "score": 100259.8767794004 }, { "content": "fn main() {\n\n let matches = clap_app!(intkey =>\n\n (version: crate_version!())\n\n (about: \"Intkey Transaction Processor (Rust)\")\n\n (@arg connect: -C --connect +takes_value\n\n \"connection endpoint for validator\")\n\n (@arg verbose: -v --verbose +multiple\n\n \"increase output verbosity\"))\n\n .get_matches();\n\n\n\n let endpoint = matches\n\n .value_of(\"connect\")\n\n .unwrap_or(\"tcp://localhost:4004\");\n\n\n\n let console_log_level;\n\n match matches.occurrences_of(\"verbose\") {\n\n 0 => console_log_level = LogLevelFilter::Warn,\n\n 1 => console_log_level = LogLevelFilter::Info,\n\n 2 => console_log_level = LogLevelFilter::Debug,\n\n 3 | _ => console_log_level = LogLevelFilter::Trace,\n", "file_path": "sdk/examples/intkey_rust/src/main.rs", "rank": 55, "score": 100195.65047288558 }, { "content": "fn main() {\n\n let matches = clap_app!(smallbank =>\n\n (version: crate_version!())\n\n (about: \"Smallbank Transaction Processor (Rust)\")\n\n (@arg connect: -C --connect +takes_value\n\n \"connection endpoint for validator\")\n\n (@arg verbose: -v --verbose +multiple\n\n \"increase output verbosity\"))\n\n .get_matches();\n\n\n\n let endpoint = matches\n\n .value_of(\"connect\")\n\n .unwrap_or(\"tcp://localhost:4004\");\n\n\n\n let console_log_level;\n\n match matches.occurrences_of(\"verbose\") {\n\n 0 => console_log_level = LogLevelFilter::Warn,\n\n 1 => console_log_level = LogLevelFilter::Info,\n\n 2 => console_log_level = LogLevelFilter::Debug,\n\n 3 | _ => console_log_level = LogLevelFilter::Trace,\n", "file_path": "families/smallbank/smallbank_rust/src/main.rs", "rank": 56, "score": 100195.65047288558 }, { "content": "struct IntkeyPayload {\n\n verb: Verb,\n\n name: String,\n\n value: u32,\n\n}\n\n\n\nimpl IntkeyPayload {\n\n pub fn new(payload_data: &[u8]) -> Result<Option<IntkeyPayload>, ApplyError> {\n\n let input = Cursor::new(payload_data);\n\n\n\n let mut decoder = cbor::GenericDecoder::new(cbor::Config::default(), input);\n\n let decoder_value = decoder\n\n .value()\n\n .map_err(|err| ApplyError::InternalError(format!(\"{}\", err)))?;\n\n\n\n let c = cbor::value::Cursor::new(&decoder_value);\n\n\n\n let verb_raw: String = match c.field(\"Verb\").text_plain() {\n\n None => {\n\n return Err(ApplyError::InvalidTransaction(String::from(\n", "file_path": "sdk/examples/intkey_rust/src/handler.rs", "rank": 57, "score": 99118.50922588687 }, { "content": "struct is_compatible_object_type\n\n{\n\n static auto constexpr value = is_compatible_object_type_impl <\n\n conjunction<negation<std::is_same<void, CompatibleObjectType>>,\n\n has_mapped_type<CompatibleObjectType>,\n\n has_key_type<CompatibleObjectType>>::value,\n\n typename BasicJsonType::object_t, CompatibleObjectType >::value;\n\n};\n\n\n\ntemplate<typename BasicJsonType, typename T>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 58, "score": 99118.50922588687 }, { "content": "struct is_compatible_integer_type\n\n{\n\n static constexpr auto value =\n\n is_compatible_integer_type_impl <\n\n std::is_integral<CompatibleNumberIntegerType>::value and\n\n not std::is_same<bool, CompatibleNumberIntegerType>::value,\n\n RealIntegerType, CompatibleNumberIntegerType > ::value;\n\n};\n\n\n\n\n\n// trait checking if JSONSerializer<T>::from_json(json const&, udt&) exists\n\ntemplate<typename BasicJsonType, typename T>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 59, "score": 99118.50922588687 }, { "content": "struct has_non_default_from_json\n\n{\n\n private:\n\n template <\n\n typename U,\n\n typename = enable_if_t<std::is_same<\n\n T, decltype(uncvref_t<U>::from_json(std::declval<BasicJsonType>()))>::value >>\n\n static int detect(U&&);\n\n static void detect(...);\n\n\n\n public:\n\n static constexpr bool value = std::is_integral<decltype(detect(\n\n std::declval<typename BasicJsonType::template json_serializer<T, void>>()))>::value;\n\n};\n\n\n\n// This trait checks if BasicJsonType::json_serializer<T>::to_json exists\n\ntemplate<typename BasicJsonType, typename T>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 60, "score": 99118.50922588687 }, { "content": "#[derive(Clone)]\n\nstruct InboundRouter {\n\n inbound_tx: SyncSender<MessageResult>,\n\n expected_replies: Arc<Mutex<HashMap<String, Sender<MessageResult>>>>,\n\n}\n\n\n\nimpl InboundRouter {\n\n fn new(inbound_tx: SyncSender<MessageResult>) -> Self {\n\n InboundRouter {\n\n inbound_tx: inbound_tx,\n\n expected_replies: Arc::new(Mutex::new(HashMap::new())),\n\n }\n\n }\n\n fn route(&mut self, message_result: MessageResult) {\n\n match message_result {\n\n Ok(message) => {\n\n let mut expected_replies = self.expected_replies.lock().unwrap();\n\n match expected_replies.remove(message.get_correlation_id()) {\n\n Some(sender) => sender.send(Ok(message)).expect(\"Unable to route reply\"),\n\n None => self.inbound_tx\n\n .send(Ok(message))\n", "file_path": "sdk/rust/src/messaging/zmq_stream.rs", "rank": 61, "score": 99118.50922588687 }, { "content": "struct is_compatible_array_type\n\n{\n\n static auto constexpr value =\n\n conjunction<negation<std::is_same<void, CompatibleArrayType>>,\n\n negation<is_compatible_object_type<\n\n BasicJsonType, CompatibleArrayType>>,\n\n negation<std::is_constructible<typename BasicJsonType::string_t,\n\n CompatibleArrayType>>,\n\n negation<is_basic_json_nested_type<BasicJsonType, CompatibleArrayType>>,\n\n has_value_type<CompatibleArrayType>,\n\n has_iterator<CompatibleArrayType>>::value;\n\n};\n\n\n\ntemplate<bool, typename, typename>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 62, "score": 99118.50922588687 }, { "content": "/// Log if there is a HTTP Error.\n\nfn handle_http_error(\n\n response: Result<Response, HyperError>,\n\n batch_id: Option<String>,\n\n batches: &Rc<RefCell<Vec<BatchList>>>,\n\n batch_map: &Rc<RefCell<BatchMap>>,\n\n counter: &Rc<HTTPRequestCounter>,\n\n) -> Result<(), HyperError> {\n\n if let Some(batch_id) = batch_id {\n\n match response {\n\n Ok(response) => match response.status() {\n\n StatusCode::Accepted => batch_map.borrow_mut().mark_submit_success(&batch_id),\n\n StatusCode::TooManyRequests => counter.increment_queue_full(),\n\n\n\n _ => if let Some(batchlist) =\n\n batch_map.borrow_mut().get_batchlist_to_submit(&batch_id)\n\n {\n\n batches.borrow_mut().push(batchlist)\n\n },\n\n },\n\n Err(err) => {\n\n if let Some(batchlist) = batch_map.borrow_mut().get_batchlist_to_submit(&batch_id) {\n\n batches.borrow_mut().push(batchlist)\n\n }\n\n info!(\"{}\", err);\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "perf/sawtooth_perf/src/workload.rs", "rank": 63, "score": 99054.35103212371 }, { "content": "fn apply_amalgamate(\n\n amalgamate_data: &SmallbankTransactionPayload_AmalgamateTransactionData,\n\n context: &mut TransactionContext,\n\n) -> Result<(), ApplyError> {\n\n fn err() -> ApplyError {\n\n warn!(\"Invalid transaction: during AMALGAMATE, both source and dest accounts must exist\");\n\n ApplyError::InvalidTransaction(String::from(\"Both source and dest accounts must exist\"))\n\n }\n\n\n\n let mut source_account =\n\n load_account(amalgamate_data.get_source_customer_id(), context)?.ok_or_else(err)?;\n\n let mut dest_account =\n\n load_account(amalgamate_data.get_dest_customer_id(), context)?.ok_or_else(err)?;\n\n\n\n let balance = dest_account.get_checking_balance() + source_account.get_savings_balance();\n\n source_account.set_savings_balance(0);\n\n dest_account.set_checking_balance(balance);\n\n save_account(&source_account, context).and(save_account(&dest_account, context))\n\n}\n\n\n", "file_path": "families/smallbank/smallbank_rust/src/handler.rs", "rank": 64, "score": 99054.35103212371 }, { "content": "fn load_account(\n\n customer_id: u32,\n\n context: &mut TransactionContext,\n\n) -> Result<Option<Account>, ApplyError> {\n\n let response = context\n\n .get_state(&create_smallbank_address(&format!(\"{}\", customer_id)))\n\n .map_err(|err| {\n\n warn!(\"Invalid transaction: Failed to load Account: {:?}\", err);\n\n ApplyError::InvalidTransaction(format!(\"Failed to load Account: {:?}\", err))\n\n })?;\n\n match response {\n\n Some(packed) => unpack_account(&packed).map(Some),\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "families/smallbank/smallbank_rust/src/handler.rs", "rank": 65, "score": 99054.35103212371 }, { "content": "/// Internal stream, guarding a zmq socket.\n\nstruct SendReceiveStream {\n\n address: String,\n\n socket: zmq::Socket,\n\n outbound_recv: Receiver<SocketCommand>,\n\n inbound_router: InboundRouter,\n\n monitor_socket: zmq::Socket,\n\n}\n\n\n\nconst POLL_TIMEOUT: i64 = 10;\n\n\n\nimpl SendReceiveStream {\n\n fn new(\n\n context: &zmq::Context,\n\n address: &str,\n\n outbound_recv: Receiver<SocketCommand>,\n\n inbound_router: InboundRouter,\n\n ) -> Self {\n\n let socket = context.socket(zmq::DEALER).unwrap();\n\n socket\n\n .monitor(\n", "file_path": "sdk/rust/src/messaging/zmq_stream.rs", "rank": 66, "score": 98022.95348560432 }, { "content": "struct is_basic_json_nested_type\n\n{\n\n static auto constexpr value = std::is_same<T, typename BasicJsonType::iterator>::value or\n\n std::is_same<T, typename BasicJsonType::const_iterator>::value or\n\n std::is_same<T, typename BasicJsonType::reverse_iterator>::value or\n\n std::is_same<T, typename BasicJsonType::const_reverse_iterator>::value or\n\n std::is_same<T, typename BasicJsonType::json_pointer>::value;\n\n};\n\n\n\ntemplate<class BasicJsonType, class CompatibleArrayType>\n", "file_path": "sdk/examples/intkey_cxx/json.hpp", "rank": 67, "score": 98022.95348560432 }, { "content": "#[derive(Debug)]\n\nstruct IntKeyCliError {\n\n msg: String,\n\n}\n\n\n\nimpl Error for IntKeyCliError {\n\n fn description(&self) -> &str {\n\n self.msg.as_str()\n\n }\n\n}\n\n\n\nimpl fmt::Display for IntKeyCliError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", format!(\"IntKeyCliError {}\", self.msg))\n\n }\n\n}\n\n\n\nimpl From<ParseIntError> for IntKeyCliError {\n\n fn from(error: ParseIntError) -> Self {\n\n IntKeyCliError {\n\n msg: error.description().to_string(),\n", "file_path": "perf/intkey_workload/src/main.rs", "rank": 68, "score": 98022.95348560432 }, { "content": "fn print_block_store_list_row(\n\n block_num: u64,\n\n block_id: &str,\n\n batches: usize,\n\n txns: usize,\n\n signer: &str,\n\n) {\n\n println!(\n\n \"{:<5} {:<128} {:<5} {:<5} {}...\",\n\n block_num,\n\n block_id,\n\n batches,\n\n txns,\n\n &signer[..6]\n\n );\n\n}\n\n\n", "file_path": "adm/src/commands/blockstore.rs", "rank": 69, "score": 97958.86067070691 }, { "content": "fn print_block_store_list_header() {\n\n println!(\n\n \"{:<5} {:<128} {:<5} {:<5} {}\",\n\n \"NUM\", \"BLOCK_ID\", \"BATS\", \"TXNS\", \"SIGNER\"\n\n );\n\n}\n\n\n", "file_path": "adm/src/commands/blockstore.rs", "rank": 70, "score": 97958.86067070691 }, { "content": "fn apply_deposit_checking(\n\n deposit_checking_data: &SmallbankTransactionPayload_DepositCheckingTransactionData,\n\n context: &mut TransactionContext,\n\n) -> Result<(), ApplyError> {\n\n match load_account(deposit_checking_data.get_customer_id(), context)? {\n\n None => {\n\n warn!(\"Invalid transaction: during DEPOSIT_CHECKING, Account must exist\");\n\n Err(ApplyError::InvalidTransaction(format!(\n\n \"Account must exist\"\n\n )))\n\n }\n\n Some(mut account) => {\n\n let balance = account.get_checking_balance() + deposit_checking_data.get_amount();\n\n account.set_checking_balance(balance);\n\n save_account(&account, context)\n\n }\n\n }\n\n}\n\n\n", "file_path": "families/smallbank/smallbank_rust/src/handler.rs", "rank": 71, "score": 97958.86067070691 }, { "content": "fn apply_create_account(\n\n mut create_account_data: SmallbankTransactionPayload_CreateAccountTransactionData,\n\n context: &mut TransactionContext,\n\n) -> Result<(), ApplyError> {\n\n match load_account(create_account_data.get_customer_id(), context)? {\n\n Some(_) => {\n\n warn!(\"Invalid transaction: during CREATE_ACCOUNT, Customer Name must be set\");\n\n Err(ApplyError::InvalidTransaction(format!(\n\n \"Customer Name must be set\"\n\n )))\n\n }\n\n None => {\n\n if create_account_data.get_customer_name().is_empty() {\n\n warn!(\"Invalid transaction: during CREATE_ACCOUNT, Customer Name must be set\");\n\n Err(ApplyError::InvalidTransaction(format!(\n\n \"Customer Name must be set\"\n\n )))\n\n } else {\n\n let mut new_account = Account::new();\n\n new_account.set_customer_id(create_account_data.get_customer_id());\n\n new_account.set_customer_name(create_account_data.take_customer_name());\n\n new_account.set_savings_balance(create_account_data.get_initial_savings_balance());\n\n new_account\n\n .set_checking_balance(create_account_data.get_initial_checking_balance());\n\n save_account(&new_account, context)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "families/smallbank/smallbank_rust/src/handler.rs", "rank": 72, "score": 97958.86067070691 }, { "content": "fn apply_send_payment(\n\n send_payment_data: &SmallbankTransactionPayload_SendPaymentTransactionData,\n\n context: &mut TransactionContext,\n\n) -> Result<(), ApplyError> {\n\n fn err() -> ApplyError {\n\n warn!(\"Invalid transaction: during SEND_PAYMENT, both source and dest accounts must exist\");\n\n ApplyError::InvalidTransaction(String::from(\"Both source and dest accounts must exist\"))\n\n }\n\n\n\n let mut source_account =\n\n load_account(send_payment_data.get_source_customer_id(), context)?.ok_or_else(err)?;\n\n let mut dest_account =\n\n load_account(send_payment_data.get_dest_customer_id(), context)?.ok_or_else(err)?;\n\n\n\n if source_account.get_checking_balance() < send_payment_data.get_amount() {\n\n warn!(\"Invalid transaction: during SEND_PAYMENT, Insufficient funds in source checking account\");\n\n Err(ApplyError::InvalidTransaction(String::from(\n\n \"Insufficient funds in source checking account\",\n\n )))\n\n } else {\n\n let source_balance = source_account.get_checking_balance() - send_payment_data.get_amount();\n\n source_account.set_checking_balance(source_balance);\n\n let dest_balance = dest_account.get_checking_balance() + send_payment_data.get_amount();\n\n dest_account.set_checking_balance(dest_balance);\n\n save_account(&source_account, context).and(save_account(&dest_account, context))\n\n }\n\n}\n\n\n", "file_path": "families/smallbank/smallbank_rust/src/handler.rs", "rank": 73, "score": 97958.86067070691 }, { "content": "fn apply_transact_savings(\n\n transact_savings_data: &SmallbankTransactionPayload_TransactSavingsTransactionData,\n\n context: &mut TransactionContext,\n\n) -> Result<(), ApplyError> {\n\n match load_account(transact_savings_data.get_customer_id(), context)? {\n\n None => {\n\n warn!(\"Invalid transaction: during TRANSACT_SAVINGS, Account must exist\");\n\n Err(ApplyError::InvalidTransaction(format!(\n\n \"Account must exist\"\n\n )))\n\n }\n\n Some(mut account) => {\n\n if transact_savings_data.get_amount() < 0\n\n && (-transact_savings_data.get_amount() as u32) > account.get_savings_balance()\n\n {\n\n warn!(\"Invalid transaction: during TRANSACT_SAVINGS, Insufficient funds in source savings account\");\n\n return Err(ApplyError::InvalidTransaction(format!(\n\n \"Insufficient funds in source savings account\"\n\n )));\n\n }\n", "file_path": "families/smallbank/smallbank_rust/src/handler.rs", "rank": 74, "score": 97958.86067070691 }, { "content": "fn make_smallbank_amalgamate_txn(\n\n rng: &mut StdRng,\n\n num_accounts: usize,\n\n) -> smallbank::SmallbankTransactionPayload_AmalgamateTransactionData {\n\n let mut payload = smallbank::SmallbankTransactionPayload_AmalgamateTransactionData::new();\n\n let source_id = rng.gen_range(0, num_accounts as u32);\n\n let dest_id = next_non_matching_in_range(rng, num_accounts as u32, source_id);\n\n payload.set_source_customer_id(source_id);\n\n payload.set_dest_customer_id(dest_id);\n\n\n\n payload\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 75, "score": 97958.86067070691 }, { "content": "fn make_smallbank_send_payment_txn(\n\n rng: &mut StdRng,\n\n num_accounts: usize,\n\n) -> smallbank::SmallbankTransactionPayload_SendPaymentTransactionData {\n\n let mut payload = smallbank::SmallbankTransactionPayload_SendPaymentTransactionData::new();\n\n let source_id = rng.gen_range(0, num_accounts as u32);\n\n let dest_id = next_non_matching_in_range(rng, num_accounts as u32, source_id);\n\n payload.set_source_customer_id(source_id);\n\n payload.set_dest_customer_id(dest_id);\n\n payload.set_amount(rng.gen_range(10, 200));\n\n\n\n payload\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 76, "score": 96906.47564626824 }, { "content": "fn make_smallbank_transact_savings_txn(\n\n rng: &mut StdRng,\n\n num_accounts: usize,\n\n) -> smallbank::SmallbankTransactionPayload_TransactSavingsTransactionData {\n\n let mut payload = smallbank::SmallbankTransactionPayload_TransactSavingsTransactionData::new();\n\n payload.set_customer_id(rng.gen_range(0, num_accounts as u32));\n\n payload.set_amount(rng.gen_range(10, 200));\n\n\n\n payload\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 77, "score": 96906.47564626824 }, { "content": "fn make_smallbank_deposit_checking_txn(\n\n rng: &mut StdRng,\n\n num_accounts: usize,\n\n) -> smallbank::SmallbankTransactionPayload_DepositCheckingTransactionData {\n\n let mut payload = smallbank::SmallbankTransactionPayload_DepositCheckingTransactionData::new();\n\n payload.set_customer_id(rng.gen_range(0, num_accounts as u32));\n\n payload.set_amount(rng.gen_range(10, 200));\n\n\n\n payload\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 78, "score": 96906.47564626824 }, { "content": " def test_does_not_require_genesis_with_no_file_no_network(self):\n\n \"\"\"\n\n In this case, when there is:\n\n - no genesis.batch file\n\n - no chain head\n\n - no network\n\n the the GenesisController should not require genesis.\n\n \"\"\"\n\n block_store = self.make_block_store()\n\n\n\n genesis_ctrl = GenesisController(\n\n Mock(name='context_manager'),\n\n Mock(name='txn_executor'),\n\n Mock('completer'),\n\n block_store,\n\n StateViewFactory(DictDatabase()),\n\n self._signer,\n\n data_dir=self._temp_dir,\n\n config_dir=self._temp_dir,\n\n chain_id_manager=ChainIdManager(self._temp_dir),\n\n batch_sender=Mock('batch_sender'))\n\n\n", "file_path": "validator/tests/test_genesis/tests.py", "rank": 79, "score": 95627.72206044446 }, { "content": "def write_validator_config(sawtooth_home, **kwargs):\n\n with open(os.path.join(sawtooth_home, 'etc',\n\n 'validator.toml'), mode='w') as out:\n", "file_path": "integration/sawtooth_integration/tests/test_network_permissioning.py", "rank": 80, "score": 95524.91910038413 }, { "content": "/// A context for a cryptographic signing algorithm.\n\npub trait Context {\n\n /// Returns the algorithm name.\n\n fn get_algorithm_name(&self) -> &str;\n\n /// Sign a message\n\n /// Given a private key for this algorithm, sign the given message bytes\n\n /// and return a hex-encoded string of the resulting signature.\n\n /// # Arguments\n\n ///\n\n /// * `message`- the message bytes\n\n /// * `private_key` the private key\n\n ///\n\n /// # Returns\n\n ///\n\n /// * `signature` - The signature in a hex-encoded string\n\n fn sign(&self, message: &[u8], key: &PrivateKey) -> Result<String, Error>;\n\n\n\n /// Verifies that the signature of a message was produced with the\n\n /// associated public key.\n\n /// # Arguments\n\n ///\n", "file_path": "sdk/rust/src/signing/mod.rs", "rank": 81, "score": 93910.35034065996 }, { "content": " def __str__(self):\n\n return \\\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_attributes.py", "rank": 82, "score": 93852.63490559616 }, { "content": " def __str__(self):\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_basename.py", "rank": 83, "score": 93852.63490559616 }, { "content": " def __str__(self):\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_measurement.py", "rank": 84, "score": 93852.63490559616 }, { "content": " def __str__(self):\n\n return \\\n\n 'SGX_QUOTE: version={}, sign_type={}, epid_group_id={}, '\\\n\n 'qe_svn={}, pce_svn={}, extended_epid_group_id={}, '\\\n\n 'basename={{{}}}, report_body={{{}}}, signature_len={}, '\\\n\n 'signature={}'.format(\n\n self.version,\n\n self.sign_type,\n\n self.epid_group_id.hex(),\n\n self.qe_svn,\n\n self.pce_svn,\n\n self.extended_epid_group_id,\n\n self.basename,\n\n self.report_body,\n\n self.signature_len,\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_quote.py", "rank": 85, "score": 93852.63490559616 }, { "content": " def __str__(self):\n\n return \\\n\n 'SGX_REPORT: body={{{}}}, key_id={{{}}}, mac={}'.format(\n\n self.body,\n\n self.key_id,\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_report.py", "rank": 86, "score": 93852.63490559616 }, { "content": "struct FmtWriter<'a> {\n\n writer: Box<&'a mut Write>,\n\n}\n\n\n\nimpl<'a> FmtWriter<'a> {\n\n pub fn new(writer: &'a mut Write) -> Self {\n\n FmtWriter {\n\n writer: Box::new(writer),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Write for FmtWriter<'a> {\n\n fn write_str(&mut self, s: &str) -> Result<(), fmt::Error> {\n\n let w = &mut *self.writer;\n\n w.write_all(s.as_bytes()).map_err(|_| fmt::Error::default())\n\n }\n\n}\n\n\n", "file_path": "perf/smallbank_workload/src/playlist.rs", "rank": 87, "score": 93846.29263812839 }, { "content": "pub trait TransactionHandler {\n\n /// TransactionHandler that defines the business logic for a new transaction family.\n\n /// The family_name, family_versions, and namespaces functions are\n\n /// used by the processor to route processing requests to the handler.\n\n\n\n /// family_name should return the name of the transaction family that this\n\n /// handler can process, e.g. \"intkey\"\n\n fn family_name(&self) -> String;\n\n\n\n /// family_versions should return a list of versions this transaction\n\n /// family handler can process, e.g. [\"1.0\"]\n\n fn family_versions(&self) -> Vec<String>;\n\n\n\n /// namespaces should return a list containing all the handler's\n\n /// namespaces, e.g. [\"abcdef\"]\n\n fn namespaces(&self) -> Vec<String>;\n\n\n\n /// Apply is the single method where all the business logic for a\n\n /// transaction family is defined. The method will be called by the\n\n /// transaction processor upon receiving a TpProcessRequest that the\n\n /// handler understands and will pass in the TpProcessRequest and an\n\n /// initialized instance of the Context type.\n\n fn apply(\n\n &self,\n\n request: &TpProcessRequest,\n\n context: &mut TransactionContext,\n\n ) -> Result<(), ApplyError>;\n\n}\n", "file_path": "sdk/rust/src/processor/handler.rs", "rank": 88, "score": 92767.72905235576 }, { "content": "/// A private key instance.\n\n/// The underlying content is dependent on implementation.\n\npub trait PrivateKey {\n\n /// Returns the algorithm name used for this private key.\n\n fn get_algorithm_name(&self) -> &str;\n\n /// Return the private key encoded as a hex string.\n\n fn as_hex(&self) -> String;\n\n /// Return the private key bytes.\n\n fn as_slice(&self) -> &[u8];\n\n}\n\n\n", "file_path": "sdk/rust/src/signing/mod.rs", "rank": 89, "score": 92767.72905235576 }, { "content": "/// A Message Sender\n\n///\n\n/// A message\n\npub trait MessageSender {\n\n fn send(\n\n &mut self,\n\n destination: Message_MessageType,\n\n correlation_id: &str,\n\n contents: &[u8],\n\n ) -> Result<MessageFuture, SendError>;\n\n\n\n fn reply(\n\n &mut self,\n\n destination: Message_MessageType,\n\n correlation_id: &str,\n\n contents: &[u8],\n\n ) -> Result<(), SendError>;\n\n\n\n fn close(&mut self);\n\n}\n\n\n\n/// Result for a message received.\n\npub type MessageResult = Result<Message, ReceiveError>;\n\n\n\n/// A message Receiver\n\npub type MessageReceiver = Receiver<MessageResult>;\n\n\n", "file_path": "sdk/rust/src/messaging/stream.rs", "rank": 90, "score": 92767.72905235576 }, { "content": "/// A public key instance.\n\n/// The underlying content is dependent on implementation.\n\npub trait PublicKey {\n\n /// Returns the algorithm name used for this public key.\n\n fn get_algorithm_name(&self) -> &str;\n\n /// Return the public key encoded as a hex string.\n\n fn as_hex(&self) -> String;\n\n /// Return the public key bytes.\n\n fn as_slice(&self) -> &[u8];\n\n}\n\n\n", "file_path": "sdk/rust/src/signing/mod.rs", "rank": 91, "score": 92767.72905235576 }, { "content": " def test_requires_genesis_fails_if_joins_network_with_file(self):\n\n \"\"\"\n\n In this case, when there is\n\n - a genesis_batch_file\n\n - network id\n\n the validator should produce an assertion error, as it is joining\n\n a network, and not a genesis node.\n\n \"\"\"\n\n self._with_empty_batch_file()\n\n self._with_network_name('some_block_chain_id')\n\n\n\n block_store = self.make_block_store()\n\n\n\n genesis_ctrl = GenesisController(\n\n Mock('context_manager'),\n\n Mock('txn_executor'),\n\n Mock('completer'),\n\n block_store,\n\n StateViewFactory(DictDatabase()),\n\n self._signer,\n\n data_dir=self._temp_dir,\n\n config_dir=self._temp_dir,\n\n chain_id_manager=ChainIdManager(self._temp_dir),\n\n batch_sender=Mock('batch_sender'))\n\n\n\n with self.assertRaises(InvalidGenesisStateError):\n", "file_path": "validator/tests/test_genesis/tests.py", "rank": 92, "score": 92315.61033180225 }, { "content": " def __str__(self):\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_key_id.py", "rank": 93, "score": 92229.68785690186 }, { "content": " def __str__(self):\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_cpu_svn.py", "rank": 94, "score": 92229.68785690186 }, { "content": " def __str__(self):\n\n return \\\n\n 'SGX_REPORT_BODY: cpu_svn={{{}}}, misc_select={}, ' \\\n\n 'attributes={{{}}}, mr_enclave={{{}}}, mr_signer={{{}}}, ' \\\n\n 'isv_prod_id={}, isv_svn={}, report_data={{{}}}'.format(\n\n self.cpu_svn,\n\n self.misc_select,\n\n self.attributes,\n\n self.mr_enclave,\n\n self.mr_signer,\n\n self.isv_prod_id,\n\n self.isv_svn,\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_report_body.py", "rank": 95, "score": 92229.68785690186 }, { "content": " def __str__(self):\n", "file_path": "consensus/poet/common/sawtooth_poet_common/sgx_structs/_sgx_report_data.py", "rank": 96, "score": 92229.68785690186 }, { "content": "struct SignatureTracker<T>\n\nwhere\n\n T: Eq + Hash,\n\n{\n\n signature_by_id: HashMap<T, String>,\n\n}\n\n\n\nimpl<T> SignatureTracker<T>\n\nwhere\n\n T: Eq + Hash,\n\n{\n\n pub fn new() -> SignatureTracker<T> {\n\n SignatureTracker {\n\n signature_by_id: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn get_signature(&self, id: &T) -> Option<&String> {\n\n self.signature_by_id.get(id)\n\n }\n", "file_path": "perf/smallbank_workload/src/smallbank_tranformer.rs", "rank": 97, "score": 91609.36934433231 }, { "content": "/// Generates a random correlation id for use in Message\n\nfn generate_correlation_id() -> String {\n\n const LENGTH: usize = 16;\n\n rand::thread_rng().gen_ascii_chars().take(LENGTH).collect()\n\n}\n\n\n\npub struct TransactionProcessor<'a> {\n\n endpoint: String,\n\n conn: ZmqMessageConnection,\n\n handlers: Vec<&'a TransactionHandler>,\n\n}\n\n\n\nimpl<'a> TransactionProcessor<'a> {\n\n /// TransactionProcessor is for communicating with a\n\n /// validator and routing transaction processing requests to a registered\n\n /// handler. It uses ZMQ and channels to handle requests concurrently.\n\n pub fn new(endpoint: &str) -> TransactionProcessor {\n\n TransactionProcessor {\n\n endpoint: String::from(endpoint),\n\n conn: ZmqMessageConnection::new(endpoint),\n\n handlers: Vec::new(),\n", "file_path": "sdk/rust/src/processor/mod.rs", "rank": 98, "score": 91559.37645828014 }, { "content": "def write_batch_file(args, batches):\n\n batch_list = batch_pb2.BatchList(batches=batches)\n\n print(\"Writing to {}...\".format(args.output))\n\n with open(args.output, \"wb\") as fd:\n", "file_path": "sdk/examples/intkey_python/sawtooth_intkey/client_cli/create_batch.py", "rank": 99, "score": 90737.80004075855 } ]
Rust
actix-web/src/test/test_request.rs
niklasha/actix-web
2f13e5f67579238761aba34e35786026ce4c805c
use std::{borrow::Cow, net::SocketAddr, rc::Rc}; use actix_http::{test::TestRequest as HttpTestRequest, Request}; use serde::Serialize; use crate::{ app_service::AppInitServiceState, config::AppConfig, data::Data, dev::{Extensions, Path, Payload, ResourceDef, Service, Url}, http::header::ContentType, http::{header::TryIntoHeaderPair, Method, Uri, Version}, rmap::ResourceMap, service::{ServiceRequest, ServiceResponse}, test, web::Bytes, HttpRequest, HttpResponse, }; #[cfg(feature = "cookies")] use crate::cookie::{Cookie, CookieJar}; pub struct TestRequest { req: HttpTestRequest, rmap: ResourceMap, config: AppConfig, path: Path<Url>, peer_addr: Option<SocketAddr>, app_data: Extensions, #[cfg(feature = "cookies")] cookies: CookieJar, } impl Default for TestRequest { fn default() -> TestRequest { TestRequest { req: HttpTestRequest::default(), rmap: ResourceMap::new(ResourceDef::new("")), config: AppConfig::default(), path: Path::new(Url::new(Uri::default())), peer_addr: None, app_data: Extensions::new(), #[cfg(feature = "cookies")] cookies: CookieJar::new(), } } } #[allow(clippy::wrong_self_convention)] impl TestRequest { pub fn with_uri(path: &str) -> TestRequest { TestRequest::default().uri(path) } pub fn get() -> TestRequest { TestRequest::default().method(Method::GET) } pub fn post() -> TestRequest { TestRequest::default().method(Method::POST) } pub fn put() -> TestRequest { TestRequest::default().method(Method::PUT) } pub fn patch() -> TestRequest { TestRequest::default().method(Method::PATCH) } pub fn delete() -> TestRequest { TestRequest::default().method(Method::DELETE) } pub fn version(mut self, ver: Version) -> Self { self.req.version(ver); self } pub fn method(mut self, meth: Method) -> Self { self.req.method(meth); self } pub fn uri(mut self, path: &str) -> Self { self.req.uri(path); self } pub fn insert_header(mut self, header: impl TryIntoHeaderPair) -> Self { self.req.insert_header(header); self } pub fn append_header(mut self, header: impl TryIntoHeaderPair) -> Self { self.req.append_header(header); self } #[cfg(feature = "cookies")] pub fn cookie(mut self, cookie: Cookie<'_>) -> Self { self.cookies.add(cookie.into_owned()); self } pub fn param( mut self, name: impl Into<Cow<'static, str>>, value: impl Into<Cow<'static, str>>, ) -> Self { self.path.add_static(name, value); self } pub fn peer_addr(mut self, addr: SocketAddr) -> Self { self.peer_addr = Some(addr); self } pub fn set_payload(mut self, data: impl Into<Bytes>) -> Self { self.req.set_payload(data); self } pub fn set_form(mut self, data: impl Serialize) -> Self { let bytes = serde_urlencoded::to_string(&data) .expect("Failed to serialize test data as a urlencoded form"); self.req.set_payload(bytes); self.req.insert_header(ContentType::form_url_encoded()); self } pub fn set_json(mut self, data: impl Serialize) -> Self { let bytes = serde_json::to_string(&data).expect("Failed to serialize test data to json"); self.req.set_payload(bytes); self.req.insert_header(ContentType::json()); self } pub fn data<T: 'static>(mut self, data: T) -> Self { self.app_data.insert(Data::new(data)); self } pub fn app_data<T: 'static>(mut self, data: T) -> Self { self.app_data.insert(data); self } #[cfg(test)] pub(crate) fn rmap(mut self, rmap: ResourceMap) -> Self { self.rmap = rmap; self } fn finish(&mut self) -> Request { #[allow(unused_mut)] let mut req = self.req.finish(); #[cfg(feature = "cookies")] { use actix_http::header::{HeaderValue, COOKIE}; let cookie: String = self .cookies .delta() .map(|c| c.stripped().encoded().to_string()) .collect::<Vec<_>>() .join("; "); if !cookie.is_empty() { req.headers_mut() .insert(COOKIE, HeaderValue::from_str(&cookie).unwrap()); } } req } pub fn to_request(mut self) -> Request { let mut req = self.finish(); req.head_mut().peer_addr = self.peer_addr; req } pub fn to_srv_request(mut self) -> ServiceRequest { let (mut head, payload) = self.finish().into_parts(); head.peer_addr = self.peer_addr; self.path.get_mut().update(&head.uri); let app_state = AppInitServiceState::new(Rc::new(self.rmap), self.config.clone()); ServiceRequest::new( HttpRequest::new( self.path, head, app_state, Rc::new(self.app_data), None, Default::default(), ), payload, ) } pub fn to_srv_response<B>(self, res: HttpResponse<B>) -> ServiceResponse<B> { self.to_srv_request().into_response(res) } pub fn to_http_request(mut self) -> HttpRequest { let (mut head, _) = self.finish().into_parts(); head.peer_addr = self.peer_addr; self.path.get_mut().update(&head.uri); let app_state = AppInitServiceState::new(Rc::new(self.rmap), self.config.clone()); HttpRequest::new( self.path, head, app_state, Rc::new(self.app_data), None, Default::default(), ) } pub fn to_http_parts(mut self) -> (HttpRequest, Payload) { let (mut head, payload) = self.finish().into_parts(); head.peer_addr = self.peer_addr; self.path.get_mut().update(&head.uri); let app_state = AppInitServiceState::new(Rc::new(self.rmap), self.config.clone()); let req = HttpRequest::new( self.path, head, app_state, Rc::new(self.app_data), None, Default::default(), ); (req, payload) } pub async fn send_request<S, B, E>(self, app: &S) -> S::Response where S: Service<Request, Response = ServiceResponse<B>, Error = E>, E: std::fmt::Debug, { let req = self.to_request(); test::call_service(app, req).await } #[cfg(test)] pub fn set_server_hostname(&mut self, host: &str) { self.config.set_host(host) } } #[cfg(test)] mod tests { use std::time::SystemTime; use super::*; use crate::{http::header, test::init_service, web, App, Error, HttpResponse, Responder}; #[actix_rt::test] async fn test_basics() { let req = TestRequest::default() .version(Version::HTTP_2) .insert_header(header::ContentType::json()) .insert_header(header::Date(SystemTime::now().into())) .param("test", "123") .data(10u32) .app_data(20u64) .peer_addr("127.0.0.1:8081".parse().unwrap()) .to_http_request(); assert!(req.headers().contains_key(header::CONTENT_TYPE)); assert!(req.headers().contains_key(header::DATE)); assert_eq!( req.head().peer_addr, Some("127.0.0.1:8081".parse().unwrap()) ); assert_eq!(&req.match_info()["test"], "123"); assert_eq!(req.version(), Version::HTTP_2); let data = req.app_data::<Data<u32>>().unwrap(); assert!(req.app_data::<Data<u64>>().is_none()); assert_eq!(*data.get_ref(), 10); assert!(req.app_data::<u32>().is_none()); let data = req.app_data::<u64>().unwrap(); assert_eq!(*data, 20); } #[actix_rt::test] async fn test_send_request() { let app = init_service( App::new().service( web::resource("/index.html") .route(web::get().to(|| HttpResponse::Ok().body("welcome!"))), ), ) .await; let resp = TestRequest::get() .uri("/index.html") .send_request(&app) .await; let result = test::read_body(resp).await; assert_eq!(result, Bytes::from_static(b"welcome!")); } #[actix_rt::test] async fn test_async_with_block() { async fn async_with_block() -> Result<HttpResponse, Error> { let res = web::block(move || Some(4usize).ok_or("wrong")).await; match res { Ok(value) => Ok(HttpResponse::Ok() .content_type("text/plain") .body(format!("Async with block value: {:?}", value))), Err(_) => panic!("Unexpected"), } } let app = init_service(App::new().service(web::resource("/index.html").to(async_with_block))) .await; let req = TestRequest::post().uri("/index.html").to_request(); let res = app.call(req).await.unwrap(); assert!(res.status().is_success()); } #[allow(deprecated)] #[actix_rt::test] async fn test_server_data() { async fn handler(data: web::Data<usize>) -> impl Responder { assert_eq!(**data, 10); HttpResponse::Ok() } let app = init_service( App::new() .data(10usize) .service(web::resource("/index.html").to(handler)), ) .await; let req = TestRequest::post().uri("/index.html").to_request(); let res = app.call(req).await.unwrap(); assert!(res.status().is_success()); } }
use std::{borrow::Cow, net::SocketAddr, rc::Rc}; use actix_http::{test::TestRequest as HttpTestRequest, Request}; use serde::Serialize; use crate::{ app_service::AppInitServiceState, config::AppConfig, data::Data, dev::{Extensions, Path, Payload, ResourceDef, Service, Url}, http::header::ContentType, http::{header::TryIntoHeaderPair, Method, Uri, Version}, rmap::ResourceMap, service::{ServiceRequest, ServiceResponse}, test, web::Bytes, HttpRequest, HttpResponse, }; #[cfg(feature = "cookies")] use crate::cookie::{Cookie, CookieJar}; pub struct TestRequest { req: HttpTestRequest, rmap: ResourceMap, config: AppConfig, path: Path<Url>, peer_addr: Option<SocketAddr>, app_data: Extensions, #[cfg(feature = "cookies")] cookies: CookieJar, } impl Default for TestRequest { fn default() -> TestRequest { TestRequest { req: HttpTestRequest::default(), rmap: ResourceMap::new(ResourceDef::new("")), config: AppConfig::default(), path: Path::new(Url::new(Uri::default())), peer_addr: None, app_data: Extensions::new(), #[cfg(feature = "cookies")] cookies: CookieJar::new(), } } } #[allow(clippy::wrong_self_convention)] impl TestRequest { pub fn with_uri(path: &str) -> TestRequest { TestRequest::default().uri(path) } pub fn get() -> TestRequest { TestRequest::default().method(Method::GET) } pub fn post() -> TestRequest { TestRequest::default().method(Method::POST) } pub fn put() -> TestRequest { TestRequest::default().method(Method::PUT) } pub fn patch() -> TestRequest { TestRequest::default().method(Method::PATCH) } pub fn delete() -> TestRequest { TestRequest::default().method(Method::DELETE) } pub fn version(mut self, ver: Version) -> Self { self.req.version(ver); self } pub fn method(mut self, meth: Method) -> Self { self.req.method(meth); self } pub fn uri(mut self, path: &str) -> Self { self.req.uri(path); self } pub fn insert_header(mut self, header: impl TryIntoHeaderPair) -> Self { self.req.insert_header(header); self } pub fn append_header(mut self, header: impl TryIntoHeaderPair) -> Self { self.req.append_header(header); self } #[cfg(feature = "cookies")] pub fn cookie(mut self, cookie: Cookie<'_>) -> Self { self.cookies.add(cookie.into_owned()); self } pub fn param( mut self, name: impl Into<Cow<'static, str>>, value: impl Into<Cow<'static, str>>, ) -> Self { self.path.add_static(name, value); self } pub fn peer_addr(mut self, addr: SocketAddr) -> Self { self.peer_addr = Some(addr); self } pub fn set_payload(mut self, data: impl Into<Bytes>) -> Self { self.req.set_payload(data); self } pub fn set_form(mut self, data: impl Serialize) -> Self { let bytes = serde_urlencoded::to_string(&data) .expect("Failed to serialize test data as a urlencoded form"); self.req.set_payload(bytes); self.req.insert_header(ContentType::form_url_encoded()); self } pub fn set_json(mut self, data: impl Serialize) -> Self { let bytes = serde_json::to_string(&data).expect("Failed to serialize test data to json"); self.req.set_payload(bytes); self.req.insert_header(ContentType::json()); self } pub fn data<T: 'static>(mut self, data: T) -> Self { self.app_data.insert(Data::new(data)); self } pub fn app_data<T: 'static>(mut self, data: T) -> Self { self.app_data.insert(data); self } #[cfg(test)] pub(crate) fn rmap(mut self, rmap: ResourceMap) -> Self { self.rmap = rmap; self }
pub fn to_request(mut self) -> Request { let mut req = self.finish(); req.head_mut().peer_addr = self.peer_addr; req } pub fn to_srv_request(mut self) -> ServiceRequest { let (mut head, payload) = self.finish().into_parts(); head.peer_addr = self.peer_addr; self.path.get_mut().update(&head.uri); let app_state = AppInitServiceState::new(Rc::new(self.rmap), self.config.clone()); ServiceRequest::new( HttpRequest::new( self.path, head, app_state, Rc::new(self.app_data), None, Default::default(), ), payload, ) } pub fn to_srv_response<B>(self, res: HttpResponse<B>) -> ServiceResponse<B> { self.to_srv_request().into_response(res) } pub fn to_http_request(mut self) -> HttpRequest { let (mut head, _) = self.finish().into_parts(); head.peer_addr = self.peer_addr; self.path.get_mut().update(&head.uri); let app_state = AppInitServiceState::new(Rc::new(self.rmap), self.config.clone()); HttpRequest::new( self.path, head, app_state, Rc::new(self.app_data), None, Default::default(), ) } pub fn to_http_parts(mut self) -> (HttpRequest, Payload) { let (mut head, payload) = self.finish().into_parts(); head.peer_addr = self.peer_addr; self.path.get_mut().update(&head.uri); let app_state = AppInitServiceState::new(Rc::new(self.rmap), self.config.clone()); let req = HttpRequest::new( self.path, head, app_state, Rc::new(self.app_data), None, Default::default(), ); (req, payload) } pub async fn send_request<S, B, E>(self, app: &S) -> S::Response where S: Service<Request, Response = ServiceResponse<B>, Error = E>, E: std::fmt::Debug, { let req = self.to_request(); test::call_service(app, req).await } #[cfg(test)] pub fn set_server_hostname(&mut self, host: &str) { self.config.set_host(host) } } #[cfg(test)] mod tests { use std::time::SystemTime; use super::*; use crate::{http::header, test::init_service, web, App, Error, HttpResponse, Responder}; #[actix_rt::test] async fn test_basics() { let req = TestRequest::default() .version(Version::HTTP_2) .insert_header(header::ContentType::json()) .insert_header(header::Date(SystemTime::now().into())) .param("test", "123") .data(10u32) .app_data(20u64) .peer_addr("127.0.0.1:8081".parse().unwrap()) .to_http_request(); assert!(req.headers().contains_key(header::CONTENT_TYPE)); assert!(req.headers().contains_key(header::DATE)); assert_eq!( req.head().peer_addr, Some("127.0.0.1:8081".parse().unwrap()) ); assert_eq!(&req.match_info()["test"], "123"); assert_eq!(req.version(), Version::HTTP_2); let data = req.app_data::<Data<u32>>().unwrap(); assert!(req.app_data::<Data<u64>>().is_none()); assert_eq!(*data.get_ref(), 10); assert!(req.app_data::<u32>().is_none()); let data = req.app_data::<u64>().unwrap(); assert_eq!(*data, 20); } #[actix_rt::test] async fn test_send_request() { let app = init_service( App::new().service( web::resource("/index.html") .route(web::get().to(|| HttpResponse::Ok().body("welcome!"))), ), ) .await; let resp = TestRequest::get() .uri("/index.html") .send_request(&app) .await; let result = test::read_body(resp).await; assert_eq!(result, Bytes::from_static(b"welcome!")); } #[actix_rt::test] async fn test_async_with_block() { async fn async_with_block() -> Result<HttpResponse, Error> { let res = web::block(move || Some(4usize).ok_or("wrong")).await; match res { Ok(value) => Ok(HttpResponse::Ok() .content_type("text/plain") .body(format!("Async with block value: {:?}", value))), Err(_) => panic!("Unexpected"), } } let app = init_service(App::new().service(web::resource("/index.html").to(async_with_block))) .await; let req = TestRequest::post().uri("/index.html").to_request(); let res = app.call(req).await.unwrap(); assert!(res.status().is_success()); } #[allow(deprecated)] #[actix_rt::test] async fn test_server_data() { async fn handler(data: web::Data<usize>) -> impl Responder { assert_eq!(**data, 10); HttpResponse::Ok() } let app = init_service( App::new() .data(10usize) .service(web::resource("/index.html").to(handler)), ) .await; let req = TestRequest::post().uri("/index.html").to_request(); let res = app.call(req).await.unwrap(); assert!(res.status().is_success()); } }
fn finish(&mut self) -> Request { #[allow(unused_mut)] let mut req = self.req.finish(); #[cfg(feature = "cookies")] { use actix_http::header::{HeaderValue, COOKIE}; let cookie: String = self .cookies .delta() .map(|c| c.stripped().encoded().to_string()) .collect::<Vec<_>>() .join("; "); if !cookie.is_empty() { req.headers_mut() .insert(COOKIE, HeaderValue::from_str(&cookie).unwrap()); } } req }
function_block-full_function
[ { "content": "#[allow(non_snake_case)]\n\npub fn Header(name: &'static str, value: &'static str) -> impl Guard {\n\n HeaderGuard(\n\n header::HeaderName::try_from(name).unwrap(),\n\n header::HeaderValue::from_static(value),\n\n )\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 0, "score": 403653.05893073 }, { "content": "/// Extracts and trims first value for given header name.\n\nfn first_header_value<'a>(req: &'a RequestHead, name: &'_ HeaderName) -> Option<&'a str> {\n\n let hdr = req.headers.get(name)?.to_str().ok()?;\n\n let val = hdr.split(',').next()?.trim();\n\n Some(val)\n\n}\n\n\n\n/// HTTP connection information.\n\n///\n\n/// `ConnectionInfo` implements `FromRequest` and can be extracted in handlers.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// # use actix_web::{HttpResponse, Responder};\n\n/// use actix_web::dev::ConnectionInfo;\n\n///\n\n/// async fn handler(conn: ConnectionInfo) -> impl Responder {\n\n/// match conn.host() {\n\n/// \"actix.rs\" => HttpResponse::Ok().body(\"Welcome!\"),\n\n/// \"admin.actix.rs\" => HttpResponse::Ok().body(\"Admin portal.\"),\n\n/// _ => HttpResponse::NotFound().finish()\n", "file_path": "actix-web/src/info.rs", "rank": 1, "score": 364976.8168859147 }, { "content": "/// Criterion Benchmark for async Service\n\n/// Should be used from within criterion group:\n\n/// ```ignore\n\n/// let mut criterion: ::criterion::Criterion<_> =\n\n/// ::criterion::Criterion::default().configure_from_args();\n\n/// bench_async_service(&mut criterion, ok_service(), \"async_service_direct\");\n\n/// ```\n\n///\n\n/// Usable for benching Service wrappers:\n\n/// Using minimum service code implementation we first measure\n\n/// time to run minimum service, then measure time with wrapper.\n\n///\n\n/// Sample output\n\n/// async_service_direct time: [1.0908 us 1.1656 us 1.2613 us]\n\npub fn bench_async_service<S>(c: &mut Criterion, srv: S, name: &str)\n\nwhere\n\n S: Service<ServiceRequest, Response = ServiceResponse, Error = Error> + 'static,\n\n{\n\n let rt = actix_rt::System::new();\n\n let srv = Rc::new(RefCell::new(srv));\n\n\n\n let req = TestRequest::default().to_srv_request();\n\n assert!(rt\n\n .block_on(srv.borrow_mut().call(req))\n\n .unwrap()\n\n .status()\n\n .is_success());\n\n\n\n // start benchmark loops\n\n c.bench_function(name, move |b| {\n\n b.iter_custom(|iters| {\n\n let srv = srv.clone();\n\n // exclude request generation, it appears it takes significant time vs call (3us vs 1us)\n\n let futs = (0..iters)\n", "file_path": "actix-web/benches/service.rs", "rank": 2, "score": 332107.96409040573 }, { "content": "fn echo_payload_service() -> impl Service<Request, Response = Response<Bytes>, Error = Error> {\n\n fn_service(|mut req: Request| {\n\n Box::pin(async move {\n\n use futures_util::stream::StreamExt as _;\n\n\n\n let mut pl = req.take_payload();\n\n let mut body = BytesMut::new();\n\n while let Some(chunk) = pl.next().await {\n\n body.extend_from_slice(chunk.unwrap().chunk())\n\n }\n\n\n\n Ok::<_, Error>(Response::ok().set_body(body.freeze()))\n\n })\n\n })\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn late_request() {\n\n let mut buf = TestBuffer::empty();\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 3, "score": 329259.60651130695 }, { "content": "/// Returns true if `req` doesn't have an `If-None-Match` header matching `req`.\n\nfn none_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {\n\n match req.get_header::<header::IfNoneMatch>() {\n\n Some(header::IfNoneMatch::Any) => false,\n\n\n\n Some(header::IfNoneMatch::Items(ref items)) => {\n\n if let Some(some_etag) = etag {\n\n for item in items {\n\n if item.weak_eq(some_etag) {\n\n return false;\n\n }\n\n }\n\n }\n\n\n\n true\n\n }\n\n\n\n None => true,\n\n }\n\n}\n\n\n", "file_path": "actix-files/src/named.rs", "rank": 4, "score": 310375.59136227955 }, { "content": "#[doc(hidden)]\n\n#[deprecated(since = \"4.0.0\", note = \"Renamed to `status_service`.\")]\n\npub fn default_service(\n\n status_code: StatusCode,\n\n) -> impl Service<ServiceRequest, Response = ServiceResponse<BoxBody>, Error = Error> {\n\n status_service(status_code)\n\n}\n", "file_path": "actix-web/src/test/test_services.rs", "rank": 5, "score": 310368.15212980896 }, { "content": "fn http_msg(msg: impl AsRef<str>) -> BytesMut {\n\n let mut msg = msg\n\n .as_ref()\n\n .trim()\n\n .split('\\n')\n\n .into_iter()\n\n .map(|line| [line.trim_start(), \"\\r\"].concat())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n\n\n\n // remove trailing \\r\n\n msg.pop();\n\n\n\n if !msg.is_empty() && !msg.contains(\"\\r\\n\\r\\n\") {\n\n msg.push_str(\"\\r\\n\\r\\n\");\n\n }\n\n\n\n BytesMut::from(msg.as_bytes())\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 6, "score": 304808.65569718205 }, { "content": "/// Parses extended header parameter values (`ext-value`), as defined\n\n/// in [RFC 5987 §3.2](https://datatracker.ietf.org/doc/html/rfc5987#section-3.2).\n\n///\n\n/// Extended values are denoted by parameter names that end with `*`.\n\n///\n\n/// ## ABNF\n\n///\n\n/// ```plain\n\n/// ext-value = charset \"'\" [ language ] \"'\" value-chars\n\n/// ; like RFC 2231's <extended-initial-value>\n\n/// ; (see [RFC 2231 §7])\n\n///\n\n/// charset = \"UTF-8\" / \"ISO-8859-1\" / mime-charset\n\n///\n\n/// mime-charset = 1*mime-charsetc\n\n/// mime-charsetc = ALPHA / DIGIT\n\n/// / \"!\" / \"#\" / \"$\" / \"%\" / \"&\"\n\n/// / \"+\" / \"-\" / \"^\" / \"_\" / \"`\"\n\n/// / \"{\" / \"}\" / \"~\"\n\n/// ; as <mime-charset> in [RFC 2978 §2.3]\n\n/// ; except that the single quote is not included\n\n/// ; SHOULD be registered in the IANA charset registry\n\n///\n\n/// language = <Language-Tag, defined in [RFC 5646 §2.1]>\n\n///\n\n/// value-chars = *( pct-encoded / attr-char )\n\n///\n\n/// pct-encoded = \"%\" HEXDIG HEXDIG\n\n/// ; see [RFC 3986 §2.1]\n\n///\n\n/// attr-char = ALPHA / DIGIT\n\n/// / \"!\" / \"#\" / \"$\" / \"&\" / \"+\" / \"-\" / \".\"\n\n/// / \"^\" / \"_\" / \"`\" / \"|\" / \"~\"\n\n/// ; token except ( \"*\" / \"'\" / \"%\" )\n\n/// ```\n\n///\n\n/// [RFC 2231 §7]: https://datatracker.ietf.org/doc/html/rfc2231#section-7\n\n/// [RFC 2978 §2.3]: https://datatracker.ietf.org/doc/html/rfc2978#section-2.3\n\n/// [RFC 3986 §2.1]: https://datatracker.ietf.org/doc/html/rfc5646#section-2.1\n\npub fn parse_extended_value(val: &str) -> Result<ExtendedValue, crate::error::ParseError> {\n\n // Break into three pieces separated by the single-quote character\n\n let mut parts = val.splitn(3, '\\'');\n\n\n\n // Interpret the first piece as a Charset\n\n let charset: Charset = match parts.next() {\n\n None => return Err(crate::error::ParseError::Header),\n\n Some(n) => FromStr::from_str(n).map_err(|_| crate::error::ParseError::Header)?,\n\n };\n\n\n\n // Interpret the second piece as a language tag\n\n let language_tag: Option<LanguageTag> = match parts.next() {\n\n None => return Err(crate::error::ParseError::Header),\n\n Some(\"\") => None,\n\n Some(s) => match s.parse() {\n\n Ok(lt) => Some(lt),\n\n Err(_) => return Err(crate::error::ParseError::Header),\n\n },\n\n };\n\n\n", "file_path": "actix-http/src/header/shared/extended.rs", "rank": 7, "score": 300345.1156687584 }, { "content": "fn get_host_uri(req: &RequestHead) -> Option<Uri> {\n\n req.headers\n\n .get(header::HOST)\n\n .and_then(|host_value| host_value.to_str().ok())\n\n .or_else(|| req.uri.host())\n\n .and_then(|host| host.parse().ok())\n\n}\n\n\n\n#[doc(hidden)]\n\npub struct HostGuard {\n\n host: String,\n\n scheme: Option<String>,\n\n}\n\n\n\nimpl HostGuard {\n\n /// Set request scheme to match\n\n pub fn scheme<H: AsRef<str>>(mut self, scheme: H) -> HostGuard {\n\n self.scheme = Some(scheme.as_ref().to_string());\n\n self\n\n }\n", "file_path": "actix-web/src/guard.rs", "rank": 8, "score": 293303.2702770509 }, { "content": "/// Create default test server config.\n\npub fn config() -> TestServerConfig {\n\n TestServerConfig::default()\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct TestServerConfig {\n\n tp: HttpVer,\n\n stream: StreamType,\n\n client_request_timeout: Duration,\n\n}\n\n\n\nimpl Default for TestServerConfig {\n\n fn default() -> Self {\n\n TestServerConfig::new()\n\n }\n\n}\n\n\n\nimpl TestServerConfig {\n\n /// Create default server configuration\n\n pub(crate) fn new() -> TestServerConfig {\n", "file_path": "actix-test/src/lib.rs", "rank": 9, "score": 287761.3800641381 }, { "content": "#[allow(non_snake_case)]\n\npub fn Method(method: HttpMethod) -> impl Guard {\n\n MethodGuard(method)\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 10, "score": 287712.65549728216 }, { "content": "fn stabilize_date_header(payload: &mut [u8]) {\n\n let mut from = 0;\n\n while let Some(pos) = find_slice(payload, b\"date\", from) {\n\n payload[(from + pos)..(from + pos + 35)]\n\n .copy_from_slice(b\"date: Thu, 01 Jan 1970 12:34:56 UTC\");\n\n from += 35;\n\n }\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 11, "score": 284337.9787349533 }, { "content": "fn remove_sensitive_headers(headers: &mut header::HeaderMap, prev_uri: &Uri, next_uri: &Uri) {\n\n if next_uri.host() != prev_uri.host()\n\n || next_uri.port() != prev_uri.port()\n\n || next_uri.scheme() != prev_uri.scheme()\n\n {\n\n headers.remove(header::COOKIE);\n\n headers.remove(header::AUTHORIZATION);\n\n headers.remove(header::PROXY_AUTHORIZATION);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::str::FromStr;\n\n\n\n use actix_web::{web, App, Error, HttpRequest, HttpResponse};\n\n\n\n use super::*;\n\n use crate::{http::header::HeaderValue, ClientBuilder};\n\n\n", "file_path": "awc/src/middleware/redirect.rs", "rank": 12, "score": 278228.7719641505 }, { "content": "/// Returns true if `req` has no `If-Match` header or one which matches `etag`.\n\nfn any_match(etag: Option<&header::EntityTag>, req: &HttpRequest) -> bool {\n\n match req.get_header::<header::IfMatch>() {\n\n None | Some(header::IfMatch::Any) => true,\n\n\n\n Some(header::IfMatch::Items(ref items)) => {\n\n if let Some(some_etag) = etag {\n\n for item in items {\n\n if item.strong_eq(some_etag) {\n\n return true;\n\n }\n\n }\n\n }\n\n\n\n false\n\n }\n\n }\n\n}\n\n\n", "file_path": "actix-files/src/named.rs", "rank": 13, "score": 275367.86005488766 }, { "content": "/// Creates scope for common path prefix.\n\n///\n\n/// Scopes collect multiple paths under a common path prefix. The scope's path can contain dynamic\n\n/// path segments.\n\n///\n\n/// # Avoid Trailing Slashes\n\n/// Avoid using trailing slashes in the scope prefix (e.g., `web::scope(\"/scope/\")`). It will almost\n\n/// certainly not have the expected behavior. See the [documentation on resource definitions][pat]\n\n/// to understand why this is the case and how to correctly construct scope/prefix definitions.\n\n///\n\n/// # Examples\n\n/// In this example, three routes are set up (and will handle any method):\n\n/// - `/{project_id}/path1`\n\n/// - `/{project_id}/path2`\n\n/// - `/{project_id}/path3`\n\n///\n\n/// ```\n\n/// use actix_web::{web, App, HttpResponse};\n\n///\n\n/// let app = App::new().service(\n\n/// web::scope(\"/{project_id}\")\n\n/// .service(web::resource(\"/path1\").to(|| HttpResponse::Ok()))\n\n/// .service(web::resource(\"/path2\").to(|| HttpResponse::Ok()))\n\n/// .service(web::resource(\"/path3\").to(|| HttpResponse::MethodNotAllowed()))\n\n/// );\n\n/// ```\n\n///\n\n/// [pat]: crate::dev::ResourceDef#prefix-resources\n\npub fn scope(path: &str) -> Scope {\n\n Scope::new(path)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 14, "score": 273261.51674098964 }, { "content": "fn ok_service() -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {\n\n status_service(StatusCode::OK)\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 15, "score": 270278.4310453339 }, { "content": "/// Creates service that always responds with `200 OK` and no body.\n\npub fn ok_service(\n\n) -> impl Service<ServiceRequest, Response = ServiceResponse<BoxBody>, Error = Error> {\n\n status_service(StatusCode::OK)\n\n}\n\n\n", "file_path": "actix-web/src/test/test_services.rs", "rank": 16, "score": 264965.80887501605 }, { "content": "#[doc(hidden)]\n\n#[deprecated(since = \"4.0.0\", note = \"Renamed to `status_service`.\")]\n\npub fn simple_service(\n\n status_code: StatusCode,\n\n) -> impl Service<ServiceRequest, Response = ServiceResponse<BoxBody>, Error = Error> {\n\n status_service(status_code)\n\n}\n\n\n", "file_path": "actix-web/src/test/test_services.rs", "rank": 17, "score": 264965.71647758846 }, { "content": "/// Creates service that always responds with given status code and no body.\n\npub fn status_service(\n\n status_code: StatusCode,\n\n) -> impl Service<ServiceRequest, Response = ServiceResponse<BoxBody>, Error = Error> {\n\n fn_service(move |req: ServiceRequest| {\n\n ok(req.into_response(HttpResponseBuilder::new(status_code).finish()))\n\n })\n\n}\n\n\n", "file_path": "actix-web/src/test/test_services.rs", "rank": 18, "score": 264965.71647758846 }, { "content": "/// Get a localhost socket address with random, unused port.\n\npub fn unused_addr() -> net::SocketAddr {\n\n let addr: net::SocketAddr = \"127.0.0.1:0\".parse().unwrap();\n\n let socket = Socket::new(Domain::IPV4, Type::STREAM, Some(Protocol::TCP)).unwrap();\n\n socket.bind(&addr.into()).unwrap();\n\n socket.set_reuse_address(true).unwrap();\n\n let tcp = net::TcpListener::from(socket);\n\n tcp.local_addr().unwrap()\n\n}\n", "file_path": "actix-http-test/src/lib.rs", "rank": 19, "score": 259624.4573219326 }, { "content": "struct HeaderGuard(header::HeaderName, header::HeaderValue);\n\n\n\nimpl Guard for HeaderGuard {\n\n fn check(&self, ctx: &GuardContext<'_>) -> bool {\n\n if let Some(val) = ctx.head().headers.get(&self.0) {\n\n return val == self.1;\n\n }\n\n\n\n false\n\n }\n\n}\n\n\n\n/// Creates a guard that matches requests targetting a specific host.\n\n///\n\n/// # Matching Host\n\n/// This guard will:\n\n/// - match against the `Host` header, if present;\n\n/// - fall-back to matching against the request target's host, if present;\n\n/// - return false if host cannot be determined;\n\n///\n", "file_path": "actix-web/src/guard.rs", "rank": 20, "score": 251182.29655931436 }, { "content": "pub fn get_negotiated_alpn_protocol(\n\n addr: SocketAddr,\n\n client_alpn_protocol: &[u8],\n\n) -> Option<Vec<u8>> {\n\n let mut config = rustls::ClientConfig::builder()\n\n .with_safe_defaults()\n\n .with_root_certificates(webpki_roots_cert_store())\n\n .with_no_client_auth();\n\n\n\n config.alpn_protocols.push(client_alpn_protocol.to_vec());\n\n\n\n let mut sess = rustls::ClientConnection::new(\n\n Arc::new(config),\n\n ServerName::try_from(\"localhost\").unwrap(),\n\n )\n\n .unwrap();\n\n\n\n let mut sock = StdTcpStream::connect(addr).unwrap();\n\n let mut stream = rustls::Stream::new(&mut sess, &mut sock);\n\n\n", "file_path": "actix-http/tests/test_rustls.rs", "rank": 21, "score": 248002.44039956725 }, { "content": "#[allow(non_snake_case)]\n\npub fn Host(host: impl AsRef<str>) -> HostGuard {\n\n HostGuard {\n\n host: host.as_ref().to_string(),\n\n scheme: None,\n\n }\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 22, "score": 247757.0220159356 }, { "content": "/// Creates a raw service for a specific path.\n\n///\n\n/// ```\n\n/// use actix_web::{dev, web, guard, App, Error, HttpResponse};\n\n///\n\n/// async fn my_service(req: dev::ServiceRequest) -> Result<dev::ServiceResponse, Error> {\n\n/// Ok(req.into_response(HttpResponse::Ok().finish()))\n\n/// }\n\n///\n\n/// let app = App::new().service(\n\n/// web::service(\"/users/*\")\n\n/// .guard(guard::Header(\"content-type\", \"text/plain\"))\n\n/// .finish(my_service)\n\n/// );\n\n/// ```\n\npub fn service<T: IntoPatterns>(path: T) -> WebService {\n\n WebService::new(path)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 23, "score": 240746.17530201952 }, { "content": "#[inline]\n\npub fn from_one_raw_str<T: FromStr>(val: Option<&HeaderValue>) -> Result<T, ParseError> {\n\n if let Some(line) = val {\n\n let line = line.to_str().map_err(|_| ParseError::Header)?;\n\n\n\n if !line.is_empty() {\n\n return T::from_str(line).or(Err(ParseError::Header));\n\n }\n\n }\n\n\n\n Err(ParseError::Header)\n\n}\n\n\n\n/// Format an array into a comma-delimited string.\n", "file_path": "actix-http/src/header/utils.rs", "rank": 24, "score": 240232.43700663836 }, { "content": "/// Creates a new route with specified method guard.\n\n///\n\n/// # Examples\n\n/// In this example, one `GET /{project_id}` route is set up:\n\n///\n\n/// ```\n\n/// use actix_web::{web, http, App, HttpResponse};\n\n///\n\n/// let app = App::new().service(\n\n/// web::resource(\"/{project_id}\")\n\n/// .route(web::method(http::Method::GET).to(|| HttpResponse::Ok()))\n\n/// );\n\n/// ```\n\npub fn method(method: Method) -> Route {\n\n Route::new().method(method)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 25, "score": 239353.20664138466 }, { "content": "#[inline]\n\npub fn http_percent_encode(f: &mut fmt::Formatter<'_>, bytes: &[u8]) -> fmt::Result {\n\n let encoded = percent_encoding::percent_encode(bytes, HTTP_VALUE);\n\n fmt::Display::fmt(&encoded, f)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn comma_delimited_parsing() {\n\n let headers = vec![];\n\n let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();\n\n assert_eq!(res, vec![0; 0]);\n\n\n\n let headers = vec![\n\n HeaderValue::from_static(\"1, 2\"),\n\n HeaderValue::from_static(\"3,4\"),\n\n ];\n\n let res: Vec<usize> = from_comma_delimited(headers.iter()).unwrap();\n", "file_path": "actix-http/src/header/utils.rs", "rank": 26, "score": 238140.73136969813 }, { "content": "/// Prepare WebSocket handshake response.\n\n///\n\n/// This function returns handshake `HttpResponse`, ready to send to peer. It does not perform\n\n/// any IO.\n\npub fn handshake(req: &HttpRequest) -> Result<HttpResponseBuilder, HandshakeError> {\n\n handshake_with_protocols(req, &[])\n\n}\n\n\n", "file_path": "actix-web-actors/src/ws.rs", "rank": 27, "score": 233164.66872816102 }, { "content": "/// Create WebSocket handshake response.\n\n///\n\n/// This function returns handshake `Response`, ready to send to peer.\n\npub fn handshake_response(req: &RequestHead) -> ResponseBuilder {\n\n let key = {\n\n let key = req.headers().get(header::SEC_WEBSOCKET_KEY).unwrap();\n\n proto::hash_key(key.as_ref())\n\n };\n\n\n\n Response::build(StatusCode::SWITCHING_PROTOCOLS)\n\n .upgrade(\"websocket\")\n\n .insert_header((\n\n header::SEC_WEBSOCKET_ACCEPT,\n\n // key is known to be header value safe ascii\n\n HeaderValue::from_bytes(&key).unwrap(),\n\n ))\n\n .take()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{header, Method};\n\n\n", "file_path": "actix-http/src/ws/mod.rs", "rank": 28, "score": 232844.4141459845 }, { "content": "fn get_conn_info(connection: &dyn Any, data: &mut Extensions) {\n\n if let Some(sock) = connection.downcast_ref::<TcpStream>() {\n\n data.insert(ConnectionInfo {\n\n bind: sock.local_addr().unwrap(),\n\n peer: sock.peer_addr().unwrap(),\n\n ttl: sock.ttl().ok(),\n\n });\n\n } else {\n\n unreachable!(\"connection should only be plaintext since no TLS is set up\");\n\n }\n\n}\n\n\n\n#[actix_web::main]\n\nasync fn main() -> io::Result<()> {\n\n env_logger::init_from_env(env_logger::Env::new().default_filter_or(\"info\"));\n\n\n\n let bind = (\"127.0.0.1\", 8080);\n\n log::info!(\"staring server at http://{}:{}\", &bind.0, &bind.1);\n\n\n\n HttpServer::new(|| App::new().default_service(web::to(route_whoami)))\n\n .on_connect(get_conn_info)\n\n .bind(bind)?\n\n .workers(1)\n\n .run()\n\n .await\n\n}\n", "file_path": "actix-web/examples/on-connect.rs", "rank": 29, "score": 228079.32188867344 }, { "content": "/// Verify WebSocket handshake request.\n\npub fn verify_handshake(req: &RequestHead) -> Result<(), HandshakeError> {\n\n // WebSocket accepts only GET\n\n if req.method != Method::GET {\n\n return Err(HandshakeError::GetMethodRequired);\n\n }\n\n\n\n // Check for \"UPGRADE\" to WebSocket header\n\n let has_hdr = if let Some(hdr) = req.headers().get(header::UPGRADE) {\n\n if let Ok(s) = hdr.to_str() {\n\n s.to_ascii_lowercase().contains(\"websocket\")\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n };\n\n if !has_hdr {\n\n return Err(HandshakeError::NoWebsocketUpgrade);\n\n }\n\n\n", "file_path": "actix-http/src/ws/mod.rs", "rank": 30, "score": 224097.68168876774 }, { "content": "fn bench_quality_display_impls(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"quality value display impls\");\n\n\n\n for i in CODES.iter() {\n\n group.bench_with_input(BenchmarkId::new(\"New (fast?)\", i), i, |b, &i| {\n\n b.iter(|| _new::Quality(i).to_string())\n\n });\n\n\n\n group.bench_with_input(BenchmarkId::new(\"Naive\", i), i, |b, &i| {\n\n b.iter(|| _naive::Quality(i).to_string())\n\n });\n\n }\n\n\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, bench_quality_display_impls);\n\ncriterion_main!(benches);\n\n\n\nmod _new {\n", "file_path": "actix-http/benches/quality-value.rs", "rank": 31, "score": 221915.66560013962 }, { "content": "/// Verify WebSocket handshake request and create handshake response.\n\npub fn handshake(req: &RequestHead) -> Result<ResponseBuilder, HandshakeError> {\n\n verify_handshake(req)?;\n\n Ok(handshake_response(req))\n\n}\n\n\n", "file_path": "actix-http/src/ws/mod.rs", "rank": 32, "score": 216202.97456779354 }, { "content": "pub fn service_benches() {\n\n let mut criterion: ::criterion::Criterion<_> =\n\n ::criterion::Criterion::default().configure_from_args();\n\n bench_async_service(&mut criterion, ok_service(), \"async_service_direct\");\n\n async_web_service(&mut criterion);\n\n}\n\ncriterion_main!(service_benches);\n", "file_path": "actix-web/benches/service.rs", "rank": 33, "score": 214387.21222823035 }, { "content": "fn echo_path_service(\n\n) -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {\n\n fn_service(|req: Request| {\n\n let path = req.path().as_bytes();\n\n ready(Ok::<_, Error>(\n\n Response::ok().set_body(Bytes::copy_from_slice(path)),\n\n ))\n\n })\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 34, "score": 212638.2842302105 }, { "content": "fn drop_payload_service(\n\n) -> impl Service<Request, Response = Response<&'static str>, Error = Error> {\n\n fn_service(|mut req: Request| async move {\n\n let _ = req.take_payload();\n\n Ok::<_, Error>(Response::with_body(StatusCode::OK, \"payload dropped\"))\n\n })\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 35, "score": 212638.2842302105 }, { "content": "fn payload_from_bytes(bytes: Bytes) -> dev::Payload {\n\n let (_, mut h1_payload) = actix_http::h1::Payload::create(true);\n\n h1_payload.unread_data(bytes);\n\n dev::Payload::from(h1_payload)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use serde::{Deserialize, Serialize};\n\n\n\n use super::*;\n\n use crate::{\n\n test::TestRequest,\n\n web::{Form, Json},\n\n };\n\n\n\n #[derive(Debug, Clone, Serialize, Deserialize)]\n\n struct TestForm {\n\n hello: String,\n\n }\n", "file_path": "actix-web/src/types/either.rs", "rank": 36, "score": 208253.528780652 }, { "content": "/// Write integer to a `fmt::Write`.\n\npub fn itoa_fmt<W: fmt::Write, V: itoa::Integer>(mut wr: W, value: V) -> fmt::Result {\n\n let mut buf = itoa::Buffer::new();\n\n wr.write_str(buf.format(value))\n\n}\n\n\n\n#[derive(Debug, Clone, Display, Error)]\n\n#[display(fmt = \"quality out of bounds\")]\n\n#[non_exhaustive]\n\npub struct QualityOutOfBounds;\n\n\n\nimpl TryFrom<f32> for Quality {\n\n type Error = QualityOutOfBounds;\n\n\n\n #[inline]\n\n fn try_from(value: f32) -> Result<Self, Self::Error> {\n\n if (0.0..=MAX_QUALITY_FLOAT).contains(&value) {\n\n Ok(Quality::from_f32(value))\n\n } else {\n\n Err(QualityOutOfBounds)\n\n }\n", "file_path": "actix-http/src/header/shared/quality.rs", "rank": 37, "score": 207963.771028437 }, { "content": "/// An interface for types that already represent a valid header.\n\npub trait Header: TryIntoHeaderValue {\n\n /// Returns the name of the header field.\n\n fn name() -> HeaderName;\n\n\n\n /// Parse the header from a HTTP message.\n\n fn parse<M: HttpMessage>(msg: &M) -> Result<Self, ParseError>;\n\n}\n\n\n\n/// This encode set is used for HTTP header values and is defined at\n\n/// <https://datatracker.ietf.org/doc/html/rfc5987#section-3.2>.\n\npub(crate) const HTTP_VALUE: &AsciiSet = &CONTROLS\n\n .add(b' ')\n\n .add(b'\"')\n\n .add(b'%')\n\n .add(b'\\'')\n\n .add(b'(')\n\n .add(b')')\n\n .add(b'*')\n\n .add(b',')\n\n .add(b'/')\n", "file_path": "actix-http/src/header/mod.rs", "rank": 38, "score": 206631.15105222072 }, { "content": "/// Perform WebSocket handshake and start actor.\n\n///\n\n/// To customize options, see [`WsResponseBuilder`].\n\npub fn start<A, T>(actor: A, req: &HttpRequest, stream: T) -> Result<HttpResponse, Error>\n\nwhere\n\n A: Actor<Context = WebsocketContext<A>> + StreamHandler<Result<Message, ProtocolError>>,\n\n T: Stream<Item = Result<Bytes, PayloadError>> + 'static,\n\n{\n\n let mut res = handshake(req)?;\n\n Ok(res.streaming(WebsocketContext::create(actor, stream)))\n\n}\n\n\n\n/// Perform WebSocket handshake and start actor.\n\n///\n\n/// `req` is an HTTP Request that should be requesting a websocket protocol change. `stream` should\n\n/// be a `Bytes` stream (such as `actix_web::web::Payload`) that contains a stream of the\n\n/// body request.\n\n///\n\n/// If there is a problem with the handshake, an error is returned.\n\n///\n\n/// If successful, returns a pair where the first item is an address for the created actor and the\n\n/// second item is the response that should be returned from the WebSocket request.\n", "file_path": "actix-web-actors/src/ws.rs", "rank": 39, "score": 203637.24860963822 }, { "content": "/// Creates a guard using the given function.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use actix_web::{guard, web, HttpResponse};\n\n///\n\n/// web::route()\n\n/// .guard(guard::fn_guard(|ctx| {\n\n/// ctx.head().headers().contains_key(\"content-type\")\n\n/// }))\n\n/// .to(|| HttpResponse::Ok());\n\n/// ```\n\npub fn fn_guard<F>(f: F) -> impl Guard\n\nwhere\n\n F: Fn(&GuardContext<'_>) -> bool,\n\n{\n\n FnGuard(f)\n\n}\n\n\n", "file_path": "actix-web/src/guard.rs", "rank": 40, "score": 201677.69106729858 }, { "content": "type FormErrHandler = Option<Rc<dyn Fn(UrlencodedError, &HttpRequest) -> Error>>;\n\n\n\npub struct FormExtractFut<T> {\n\n fut: UrlEncoded<T>,\n\n err_handler: FormErrHandler,\n\n req: HttpRequest,\n\n}\n\n\n\nimpl<T> Future for FormExtractFut<T>\n\nwhere\n\n T: DeserializeOwned + 'static,\n\n{\n\n type Output = Result<Form<T>, Error>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let this = self.get_mut();\n\n\n\n let res = ready!(Pin::new(&mut this.fut).poll(cx));\n\n\n\n let res = match res {\n", "file_path": "actix-web/src/types/form.rs", "rank": 41, "score": 201367.2167565227 }, { "content": "/// Start test server with custom configuration\n\n///\n\n/// Check [`TestServerConfig`] docs for configuration options.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use actix_web::{get, web, test, App, HttpResponse, Error, Responder};\n\n///\n\n/// #[get(\"/\")]\n\n/// async fn my_handler() -> Result<impl Responder, Error> {\n\n/// Ok(HttpResponse::Ok())\n\n/// }\n\n///\n\n/// #[actix_web::test]\n\n/// async fn test_example() {\n\n/// let srv = actix_test::start_with(actix_test::config().h1(), ||\n\n/// App::new().service(my_handler)\n\n/// );\n\n///\n\n/// let req = srv.get(\"/\");\n\n/// let res = req.send().await.unwrap();\n\n///\n\n/// assert!(res.status().is_success());\n\n/// }\n\n/// ```\n\npub fn start_with<F, I, S, B>(cfg: TestServerConfig, factory: F) -> TestServer\n\nwhere\n\n F: Fn() -> I + Send + Clone + 'static,\n\n I: IntoServiceFactory<S, Request>,\n\n S: ServiceFactory<Request, Config = AppConfig> + 'static,\n\n S::Error: Into<Error> + 'static,\n\n S::InitError: fmt::Debug,\n\n S::Response: Into<Response<B>> + 'static,\n\n <S::Service as Service<Request>>::Future: 'static,\n\n B: MessageBody + 'static,\n\n{\n\n // for sending handles and server info back from the spawned thread\n\n let (started_tx, started_rx) = std::sync::mpsc::channel();\n\n\n\n // for signaling the shutdown of spawned server and system\n\n let (thread_stop_tx, thread_stop_rx) = mpsc::channel(1);\n\n\n\n let tls = match cfg.stream {\n\n StreamType::Tcp => false,\n\n #[cfg(feature = \"openssl\")]\n", "file_path": "actix-test/src/lib.rs", "rank": 42, "score": 201222.58014780786 }, { "content": "// Benchmark basic WebService directly\n\n// this approach is usable for benching WebService, though it adds some time to direct service call:\n\n// Sample results on MacBook Pro '14\n\n// time: [2.0724 us 2.1345 us 2.2074 us]\n\nfn async_web_service(c: &mut Criterion) {\n\n let rt = actix_rt::System::new();\n\n let srv = Rc::new(RefCell::new(rt.block_on(init_service(\n\n App::new().service(web::service(\"/\").finish(index)),\n\n ))));\n\n\n\n let req = TestRequest::get().uri(\"/\").to_request();\n\n assert!(rt\n\n .block_on(srv.borrow_mut().call(req))\n\n .unwrap()\n\n .status()\n\n .is_success());\n\n\n\n // start benchmark loops\n\n c.bench_function(\"async_web_service_direct\", move |b| {\n\n b.iter_custom(|iters| {\n\n let srv = srv.clone();\n\n let futs = (0..iters)\n\n .map(|_| TestRequest::get().uri(\"/\").to_request())\n\n .map(|req| srv.borrow_mut().call(req));\n", "file_path": "actix-web/benches/service.rs", "rank": 43, "score": 197976.70102329698 }, { "content": "fn call() -> impl Iterator<Item = &'static str> {\n\n let arr = [\n\n \"/authorizations\",\n\n \"/user/repos\",\n\n \"/repos/rust-lang/rust/stargazers\",\n\n \"/orgs/rust-lang/public_members/nikomatsakis\",\n\n \"/repos/rust-lang/rust/releases/1.51.0\",\n\n ];\n\n\n\n IntoIterator::into_iter(arr)\n\n}\n\n\n", "file_path": "actix-router/benches/router.rs", "rank": 44, "score": 196593.03781993256 }, { "content": "/// Split at the index of the first `needle` if it exists or at the end.\n\nfn split_once(haystack: &str, needle: char) -> (&str, &str) {\n\n haystack.find(needle).map_or_else(\n\n || (haystack, \"\"),\n\n |sc| {\n\n let (first, last) = haystack.split_at(sc);\n\n (first, last.split_at(1).1)\n\n },\n\n )\n\n}\n\n\n", "file_path": "actix-web/src/http/header/content_disposition.rs", "rank": 45, "score": 195673.46873596607 }, { "content": "fn bench_header_parsing(c: &mut Criterion) {\n\n c.bench_function(\"Original (Unsound) [short]\", |b| {\n\n b.iter(|| {\n\n let mut buf = BytesMut::from(REQ_SHORT);\n\n _original::parse_headers(&mut buf);\n\n })\n\n });\n\n\n\n c.bench_function(\"New (safe) [short]\", |b| {\n\n b.iter(|| {\n\n let mut buf = BytesMut::from(REQ_SHORT);\n\n _new::parse_headers(&mut buf);\n\n })\n\n });\n\n\n\n c.bench_function(\"Original (Unsound) [realistic]\", |b| {\n\n b.iter(|| {\n\n let mut buf = BytesMut::from(REQ);\n\n _original::parse_headers(&mut buf);\n\n })\n", "file_path": "actix-http/benches/uninit-headers.rs", "rank": 46, "score": 194303.6318623433 }, { "content": "#[inline]\n\npub fn fmt_comma_delimited<T>(f: &mut fmt::Formatter<'_>, parts: &[T]) -> fmt::Result\n\nwhere\n\n T: fmt::Display,\n\n{\n\n let mut iter = parts.iter();\n\n\n\n if let Some(part) = iter.next() {\n\n fmt::Display::fmt(part, f)?;\n\n }\n\n\n\n for part in iter {\n\n f.write_str(\", \")?;\n\n fmt::Display::fmt(part, f)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Percent encode a sequence of bytes with a character set defined in [RFC 5987 §3.2].\n\n///\n\n/// [RFC 5987 §3.2]: https://datatracker.ietf.org/doc/html/rfc5987#section-3.2\n", "file_path": "actix-http/src/header/utils.rs", "rank": 47, "score": 193974.3677025815 }, { "content": "/// Split at the index of the first `needle` if it exists or at the end, trim the right of the\n\n/// first part and the left of the last part.\n\nfn split_once_and_trim(haystack: &str, needle: char) -> (&str, &str) {\n\n let (first, last) = split_once(haystack, needle);\n\n (first.trim_end(), last.trim_start())\n\n}\n\n\n\n/// The implied disposition of the content of the HTTP body.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum DispositionType {\n\n /// Inline implies default processing.\n\n Inline,\n\n\n\n /// Attachment implies that the recipient should prompt the user to save the response locally,\n\n /// rather than process it normally (as per its media type).\n\n Attachment,\n\n\n\n /// Used in *multipart/form-data* as defined in\n\n /// [RFC 7578](https://datatracker.ietf.org/doc/html/rfc7578) to carry the field name and\n\n /// optional filename.\n\n FormData,\n\n\n", "file_path": "actix-web/src/http/header/content_disposition.rs", "rank": 48, "score": 193232.47833051637 }, { "content": "#[inline]\n\nfn parts(parts: &mut Option<Inner>) -> &mut Inner {\n\n parts.as_mut().expect(\"cannot reuse test request builder\")\n\n}\n\n\n\n/// Async I/O test buffer.\n\n#[derive(Debug)]\n\npub struct TestBuffer {\n\n pub read_buf: Rc<RefCell<BytesMut>>,\n\n pub write_buf: Rc<RefCell<BytesMut>>,\n\n pub err: Option<Rc<io::Error>>,\n\n}\n\n\n\nimpl TestBuffer {\n\n /// Create new `TestBuffer` instance with initial read buffer.\n\n pub fn new<T>(data: T) -> Self\n\n where\n\n T: Into<BytesMut>,\n\n {\n\n Self {\n\n read_buf: Rc::new(RefCell::new(data.into())),\n", "file_path": "actix-http/src/test.rs", "rank": 49, "score": 190902.99837833233 }, { "content": "fn split_in_two(s: &str, separator: char) -> Option<(&str, &str)> {\n\n let mut iter = s.splitn(2, separator);\n\n match (iter.next(), iter.next()) {\n\n (Some(a), Some(b)) => Some((a, b)),\n\n _ => None,\n\n }\n\n}\n\n\n\nimpl FromStr for ContentRangeSpec {\n\n type Err = ParseError;\n\n\n\n fn from_str(s: &str) -> Result<Self, ParseError> {\n\n let res = match split_in_two(s, ' ') {\n\n Some((\"bytes\", resp)) => {\n\n let (range, instance_length) =\n\n split_in_two(resp, '/').ok_or(ParseError::Header)?;\n\n\n\n let instance_length = if instance_length == \"*\" {\n\n None\n\n } else {\n", "file_path": "actix-web/src/http/header/content_range.rs", "rank": 50, "score": 190608.16750376663 }, { "content": "fn tls_config() -> ServerConfig {\n\n let cert = rcgen::generate_simple_self_signed(vec![\"localhost\".to_owned()]).unwrap();\n\n let cert_file = cert.serialize_pem().unwrap();\n\n let key_file = cert.serialize_private_key_pem();\n\n\n\n let cert_file = &mut BufReader::new(cert_file.as_bytes());\n\n let key_file = &mut BufReader::new(key_file.as_bytes());\n\n\n\n let cert_chain = certs(cert_file)\n\n .unwrap()\n\n .into_iter()\n\n .map(Certificate)\n\n .collect();\n\n let mut keys = pkcs8_private_keys(key_file).unwrap();\n\n\n\n ServerConfig::builder()\n\n .with_safe_defaults()\n\n .with_no_client_auth()\n\n .with_single_cert(cert_chain, PrivateKey(keys.remove(0)))\n\n .unwrap()\n", "file_path": "awc/tests/test_rustls_client.rs", "rank": 51, "score": 189873.7422440053 }, { "content": "#[inline]\n\npub fn file_extension_to_mime(ext: &str) -> mime::Mime {\n\n from_ext(ext).first_or_octet_stream()\n\n}\n\n\n", "file_path": "actix-files/src/lib.rs", "rank": 52, "score": 189619.77985507058 }, { "content": "#[derive(Debug, Display, Error)]\n\n#[display(fmt = \"error\")]\n\nstruct BadRequest;\n\n\n\nimpl From<BadRequest> for Response<BoxBody> {\n\n fn from(_: BadRequest) -> Self {\n\n Response::bad_request().set_body(BoxBody::new(\"error\"))\n\n }\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn h2_service_error() {\n\n let mut srv = test_server(move || {\n\n HttpService::build()\n\n .h2(|_| err::<Response<BoxBody>, _>(BadRequest))\n\n .rustls(tls_config())\n\n })\n\n .await;\n\n\n\n let response = srv.sget(\"/\").send().await.unwrap();\n\n assert_eq!(response.status(), http::StatusCode::BAD_REQUEST);\n\n\n", "file_path": "actix-http/tests/test_rustls.rs", "rank": 53, "score": 188577.8898533108 }, { "content": "#[derive(Debug, Display, Error)]\n\n#[display(fmt = \"error\")]\n\nstruct BadRequest;\n\n\n\nimpl From<BadRequest> for Response<BoxBody> {\n\n fn from(_: BadRequest) -> Self {\n\n Response::bad_request().set_body(BoxBody::new(\"error\"))\n\n }\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn h1_service_error() {\n\n let mut srv = test_server(|| {\n\n HttpService::build()\n\n .h1(|_| err::<Response<()>, _>(BadRequest))\n\n .tcp()\n\n })\n\n .await;\n\n\n\n let response = srv.get(\"/\").send().await.unwrap();\n\n assert_eq!(response.status(), http::StatusCode::BAD_REQUEST);\n\n\n", "file_path": "actix-http/tests/test_server.rs", "rank": 54, "score": 188577.8898533108 }, { "content": "#[derive(Debug, Display, Error)]\n\n#[display(fmt = \"error\")]\n\nstruct BadRequest;\n\n\n\nimpl From<BadRequest> for Response<BoxBody> {\n\n fn from(err: BadRequest) -> Self {\n\n Response::build(StatusCode::BAD_REQUEST)\n\n .body(err.to_string())\n\n .map_into_boxed_body()\n\n }\n\n}\n\n\n\n#[actix_rt::test]\n\nasync fn h2_service_error() {\n\n let mut srv = test_server(move || {\n\n HttpService::build()\n\n .h2(|_| err::<Response<BoxBody>, _>(BadRequest))\n\n .openssl(tls_config())\n\n .map_err(|_| ())\n\n })\n\n .await;\n\n\n", "file_path": "actix-http/tests/test_openssl.rs", "rank": 55, "score": 188577.8898533108 }, { "content": "/// Sealed trait implemented for types that can be effectively borrowed as a [`HeaderValue`].\n\n///\n\n/// [`HeaderValue`]: super::HeaderValue\n\npub trait AsHeaderName: Sealed {}\n\n\n\npub struct Seal;\n\n\n", "file_path": "actix-http/src/header/as_name.rs", "rank": 56, "score": 188458.37497451092 }, { "content": "/// Creates a new resource for a specific path.\n\n///\n\n/// Resources may have dynamic path segments. For example, a resource with the path `/a/{name}/c`\n\n/// would match all incoming requests with paths such as `/a/b/c`, `/a/1/c`, or `/a/etc/c`.\n\n///\n\n/// A dynamic segment is specified in the form `{identifier}`, where the identifier can be used\n\n/// later in a request handler to access the matched value for that segment. This is done by looking\n\n/// up the identifier in the `Path` object returned by [`HttpRequest.match_info()`] method.\n\n///\n\n/// By default, each segment matches the regular expression `[^{}/]+`.\n\n///\n\n/// You can also specify a custom regex in the form `{identifier:regex}`:\n\n///\n\n/// For instance, to route `GET`-requests on any route matching `/users/{userid}/{friend}` and store\n\n/// `userid` and `friend` in the exposed `Path` object:\n\n///\n\n/// ```\n\n/// use actix_web::{web, App, HttpResponse};\n\n///\n\n/// let app = App::new().service(\n\n/// web::resource(\"/users/{userid}/{friend}\")\n\n/// .route(web::get().to(|| HttpResponse::Ok()))\n\n/// .route(web::head().to(|| HttpResponse::MethodNotAllowed()))\n\n/// );\n\n/// ```\n\npub fn resource<T: IntoPatterns>(path: T) -> Resource {\n\n Resource::new(path)\n\n}\n\n\n", "file_path": "actix-web/src/web.rs", "rank": 57, "score": 188418.10036586077 }, { "content": "fn tls_config() -> RustlsServerConfig {\n\n let cert = rcgen::generate_simple_self_signed(vec![\"localhost\".to_owned()]).unwrap();\n\n let cert_file = cert.serialize_pem().unwrap();\n\n let key_file = cert.serialize_private_key_pem();\n\n\n\n let cert_file = &mut BufReader::new(cert_file.as_bytes());\n\n let key_file = &mut BufReader::new(key_file.as_bytes());\n\n\n\n let cert_chain = certs(cert_file)\n\n .unwrap()\n\n .into_iter()\n\n .map(Certificate)\n\n .collect();\n\n let mut keys = pkcs8_private_keys(key_file).unwrap();\n\n\n\n let mut config = RustlsServerConfig::builder()\n\n .with_safe_defaults()\n\n .with_no_client_auth()\n\n .with_single_cert(cert_chain, PrivateKey(keys.remove(0)))\n\n .unwrap();\n\n\n\n config.alpn_protocols.push(HTTP1_1_ALPN_PROTOCOL.to_vec());\n\n config.alpn_protocols.push(H2_ALPN_PROTOCOL.to_vec());\n\n\n\n config\n\n}\n\n\n", "file_path": "actix-http/tests/test_rustls.rs", "rank": 58, "score": 186897.8394902964 }, { "content": "/// An interface for types that can be converted into a [`HeaderValue`].\n\npub trait TryIntoHeaderValue: Sized {\n\n /// The type returned in the event of a conversion error.\n\n type Error: Into<HttpError>;\n\n\n\n /// Try to convert value to a HeaderValue.\n\n fn try_into_value(self) -> Result<HeaderValue, Self::Error>;\n\n}\n\n\n\nimpl TryIntoHeaderValue for HeaderValue {\n\n type Error = InvalidHeaderValue;\n\n\n\n #[inline]\n\n fn try_into_value(self) -> Result<HeaderValue, Self::Error> {\n\n Ok(self)\n\n }\n\n}\n\n\n\nimpl TryIntoHeaderValue for &HeaderValue {\n\n type Error = InvalidHeaderValue;\n\n\n", "file_path": "actix-http/src/header/into_value.rs", "rank": 59, "score": 185511.7931059641 }, { "content": "#[deprecated(since = \"4.0.0\", note = \"Prefer `WsResponseBuilder::start_with_addr`.\")]\n\npub fn start_with_addr<A, T>(\n\n actor: A,\n\n req: &HttpRequest,\n\n stream: T,\n\n) -> Result<(Addr<A>, HttpResponse), Error>\n\nwhere\n\n A: Actor<Context = WebsocketContext<A>> + StreamHandler<Result<Message, ProtocolError>>,\n\n T: Stream<Item = Result<Bytes, PayloadError>> + 'static,\n\n{\n\n let mut res = handshake(req)?;\n\n let (addr, out_stream) = WebsocketContext::create_with_addr(actor, stream);\n\n Ok((addr, res.streaming(out_stream)))\n\n}\n\n\n\n/// Do WebSocket handshake and start ws actor.\n\n///\n\n/// `protocols` is a sequence of known protocols.\n\n#[deprecated(\n\n since = \"4.0.0\",\n\n note = \"Prefer `WsResponseBuilder` for setting protocols.\"\n\n)]\n", "file_path": "actix-web-actors/src/ws.rs", "rank": 60, "score": 184704.646532312 }, { "content": "fn build_next_uri(res: &ClientResponse, prev_uri: &Uri) -> Result<Uri, SendRequestError> {\n\n let uri = res\n\n .headers()\n\n .get(header::LOCATION)\n\n .map(|value| {\n\n // try to parse the location to a full uri\n\n let uri = Uri::try_from(value.as_bytes())\n\n .map_err(|e| SendRequestError::Url(InvalidUrl::HttpError(e.into())))?;\n\n if uri.scheme().is_none() || uri.authority().is_none() {\n\n let uri = Uri::builder()\n\n .scheme(prev_uri.scheme().cloned().unwrap())\n\n .authority(prev_uri.authority().cloned().unwrap())\n\n .path_and_query(value.as_bytes())\n\n .build()?;\n\n Ok::<_, SendRequestError>(uri)\n\n } else {\n\n Ok(uri)\n\n }\n\n })\n\n // TODO: this error type is wrong.\n\n .ok_or(SendRequestError::Url(InvalidUrl::MissingScheme))??;\n\n\n\n Ok(uri)\n\n}\n\n\n", "file_path": "awc/src/middleware/redirect.rs", "rank": 61, "score": 182985.22254491062 }, { "content": "struct WebServiceImpl<T> {\n\n srv: T,\n\n rdef: Patterns,\n\n name: Option<String>,\n\n guards: Vec<Box<dyn Guard>>,\n\n}\n\n\n\nimpl<T> HttpServiceFactory for WebServiceImpl<T>\n\nwhere\n\n T: ServiceFactory<\n\n ServiceRequest,\n\n Config = (),\n\n Response = ServiceResponse,\n\n Error = Error,\n\n InitError = (),\n\n > + 'static,\n\n{\n\n fn register(mut self, config: &mut AppService) {\n\n let guards = if self.guards.is_empty() {\n\n None\n", "file_path": "actix-web/src/service.rs", "rank": 62, "score": 182525.50869537162 }, { "content": "type PathFilter = dyn Fn(&Path, &RequestHead) -> bool;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{\n\n fs::{self},\n\n ops::Add,\n\n time::{Duration, SystemTime},\n\n };\n\n\n\n use actix_web::{\n\n dev::ServiceFactory,\n\n guard,\n\n http::{\n\n header::{self, ContentDisposition, DispositionParam, DispositionType},\n\n Method, StatusCode,\n\n },\n\n middleware::Compress,\n\n test::{self, TestRequest},\n\n web::{self, Bytes},\n", "file_path": "actix-files/src/lib.rs", "rank": 63, "score": 181876.2711491542 }, { "content": "fn tls_config() -> SslAcceptor {\n\n let cert = rcgen::generate_simple_self_signed(vec![\"localhost\".to_owned()]).unwrap();\n\n let cert_file = cert.serialize_pem().unwrap();\n\n let key_file = cert.serialize_private_key_pem();\n\n let cert = X509::from_pem(cert_file.as_bytes()).unwrap();\n\n let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap();\n\n\n\n let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();\n\n builder.set_certificate(&cert).unwrap();\n\n builder.set_private_key(&key).unwrap();\n\n\n\n builder.set_alpn_select_callback(|_, protos| {\n\n const H2: &[u8] = b\"\\x02h2\";\n\n if protos.windows(3).any(|window| window == H2) {\n\n Ok(b\"h2\")\n\n } else {\n\n Err(openssl::ssl::AlpnError::NOACK)\n\n }\n\n });\n\n builder.set_alpn_protos(b\"\\x02h2\").unwrap();\n", "file_path": "awc/tests/test_connector.rs", "rank": 64, "score": 180188.71787136167 }, { "content": "#[proc_macro_attribute]\n\npub fn test(_: TokenStream, item: TokenStream) -> TokenStream {\n\n let mut output: TokenStream = (quote! {\n\n #[::actix_web::rt::test(system = \"::actix_web::rt::System\")]\n\n })\n\n .into();\n\n\n\n output.extend(item);\n\n output\n\n}\n", "file_path": "actix-web-codegen/src/lib.rs", "rank": 65, "score": 179004.5106992686 }, { "content": "#[inline]\n\npub fn apply_mask(buf: &mut [u8], mask: [u8; 4]) {\n\n apply_mask_fast32(buf, mask)\n\n}\n\n\n\n/// A safe unoptimized mask application.\n", "file_path": "actix-http/src/ws/mask.rs", "rank": 66, "score": 178424.7095985478 }, { "content": "#[cfg(feature = \"cookies\")]\n\nstruct Cookies(Vec<Cookie<'static>>);\n\n\n\n/// An incoming request.\n\n#[derive(Clone)]\n\npub struct HttpRequest {\n\n /// # Invariant\n\n /// `Rc<HttpRequestInner>` is used exclusively and NO `Weak<HttpRequestInner>`\n\n /// is allowed anywhere in the code. Weak pointer is purposely ignored when\n\n /// doing `Rc`'s ref counter check. Expect panics if this invariant is violated.\n\n pub(crate) inner: Rc<HttpRequestInner>,\n\n}\n\n\n\npub(crate) struct HttpRequestInner {\n\n pub(crate) head: Message<RequestHead>,\n\n pub(crate) path: Path<Url>,\n\n pub(crate) app_data: SmallVec<[Rc<Extensions>; 4]>,\n\n pub(crate) conn_data: Option<Rc<Extensions>>,\n\n pub(crate) extensions: Rc<RefCell<Extensions>>,\n\n app_state: Rc<AppInitServiceState>,\n\n}\n", "file_path": "actix-web/src/request.rs", "rank": 67, "score": 177388.1852240747 }, { "content": "#[cfg(feature = \"openssl\")]\n\nfn openssl_config() -> SslAcceptor {\n\n let cert = rcgen::generate_simple_self_signed(vec![\"localhost\".to_owned()]).unwrap();\n\n let cert_file = cert.serialize_pem().unwrap();\n\n let key_file = cert.serialize_private_key_pem();\n\n let cert = X509::from_pem(cert_file.as_bytes()).unwrap();\n\n let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap();\n\n\n\n let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();\n\n builder.set_certificate(&cert).unwrap();\n\n builder.set_private_key(&key).unwrap();\n\n\n\n builder.set_alpn_select_callback(|_, protos| {\n\n const H2: &[u8] = b\"\\x02h2\";\n\n if protos.windows(3).any(|window| window == H2) {\n\n Ok(b\"h2\")\n\n } else {\n\n Err(openssl::ssl::AlpnError::NOACK)\n\n }\n\n });\n\n builder.set_alpn_protos(b\"\\x02h2\").unwrap();\n\n\n\n builder.build()\n\n}\n\n\n", "file_path": "actix-web/tests/test_server.rs", "rank": 68, "score": 176880.43396680447 }, { "content": "fn tls_config() -> SslAcceptor {\n\n let cert = rcgen::generate_simple_self_signed(vec![\"localhost\".to_owned()]).unwrap();\n\n let cert_file = cert.serialize_pem().unwrap();\n\n let key_file = cert.serialize_private_key_pem();\n\n let cert = X509::from_pem(cert_file.as_bytes()).unwrap();\n\n let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap();\n\n\n\n let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();\n\n builder.set_certificate(&cert).unwrap();\n\n builder.set_private_key(&key).unwrap();\n\n\n\n builder.set_alpn_select_callback(|_, protos| {\n\n const H2: &[u8] = b\"\\x02h2\";\n\n if protos.windows(3).any(|window| window == H2) {\n\n Ok(b\"h2\")\n\n } else {\n\n Err(openssl::ssl::AlpnError::NOACK)\n\n }\n\n });\n\n builder.set_alpn_protos(b\"\\x02h2\").unwrap();\n", "file_path": "actix-http/tests/test_openssl.rs", "rank": 69, "score": 176880.43396680447 }, { "content": "fn tls_config() -> SslAcceptor {\n\n let cert = rcgen::generate_simple_self_signed(vec![\"localhost\".to_owned()]).unwrap();\n\n let cert_file = cert.serialize_pem().unwrap();\n\n let key_file = cert.serialize_private_key_pem();\n\n let cert = X509::from_pem(cert_file.as_bytes()).unwrap();\n\n let key = PKey::private_key_from_pem(key_file.as_bytes()).unwrap();\n\n\n\n let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls()).unwrap();\n\n builder.set_certificate(&cert).unwrap();\n\n builder.set_private_key(&key).unwrap();\n\n\n\n builder.set_alpn_select_callback(|_, protos| {\n\n const H2: &[u8] = b\"\\x02h2\";\n\n if protos.windows(3).any(|window| window == H2) {\n\n Ok(b\"h2\")\n\n } else {\n\n Err(openssl::ssl::AlpnError::NOACK)\n\n }\n\n });\n\n builder.set_alpn_protos(b\"\\x02h2\").unwrap();\n", "file_path": "awc/tests/test_ssl_client.rs", "rank": 70, "score": 176880.43396680447 }, { "content": "#[derive(Debug, Default)]\n\nstruct Inner {\n\n headers: HeaderMap,\n\n}\n\n\n\nimpl DefaultHeaders {\n\n /// Constructs an empty `DefaultHeaders` middleware.\n\n #[inline]\n\n pub fn new() -> DefaultHeaders {\n\n DefaultHeaders::default()\n\n }\n\n\n\n /// Adds a header to the default set.\n\n ///\n\n /// # Panics\n\n /// Panics when resolved header name or value is invalid.\n\n #[allow(clippy::should_implement_trait)]\n\n pub fn add(mut self, header: impl TryIntoHeaderPair) -> Self {\n\n // standard header terminology `insert` or `append` for this method would make the behavior\n\n // of this middleware less obvious since it only adds the headers if they are not present\n\n\n", "file_path": "actix-web/src/middleware/default_headers.rs", "rank": 71, "score": 176673.34937327087 }, { "content": "/// Parses 0 or more items out of a comma delimited string, ignoring invalid items.\n\nfn from_comma_delimited<T: FromStr>(s: &str) -> Vec<T> {\n\n s.split(',')\n\n .filter_map(|x| match x.trim() {\n\n \"\" => None,\n\n y => Some(y),\n\n })\n\n .filter_map(|x| x.parse().ok())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use actix_http::{test::TestRequest, Request};\n\n\n\n use super::*;\n\n\n\n fn req(s: &str) -> Request {\n\n TestRequest::default()\n\n .insert_header((header::RANGE, s))\n\n .finish()\n", "file_path": "actix-web/src/http/header/range.rs", "rank": 72, "score": 175769.88745869522 }, { "content": "#[inline]\n\npub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {\n\n let mask_u32 = u32::from_ne_bytes(mask);\n\n\n\n // SAFETY:\n\n //\n\n // buf is a valid slice borrowed mutably from bytes::BytesMut.\n\n //\n\n // un aligned prefix and suffix would be mask/unmask per byte.\n\n // proper aligned middle slice goes into fast path and operates on 4-byte blocks.\n\n let (prefix, words, suffix) = unsafe { buf.align_to_mut::<u32>() };\n\n apply_mask_fallback(prefix, mask);\n\n let head = prefix.len() & 3;\n\n let mask_u32 = if head > 0 {\n\n if cfg!(target_endian = \"big\") {\n\n mask_u32.rotate_left(8 * head as u32)\n\n } else {\n\n mask_u32.rotate_right(8 * head as u32)\n\n }\n\n } else {\n\n mask_u32\n", "file_path": "actix-http/src/ws/mask.rs", "rank": 73, "score": 175647.25951973436 }, { "content": "#[derive(Clone)]\n\nstruct WsService(Cell<bool>);\n\n\n\nimpl WsService {\n\n fn new() -> Self {\n\n WsService(Cell::new(false))\n\n }\n\n\n\n fn set_polled(&self) {\n\n self.0.set(true);\n\n }\n\n\n\n fn was_polled(&self) -> bool {\n\n self.0.get()\n\n }\n\n}\n\n\n", "file_path": "actix-http/tests/test_ws.rs", "rank": 74, "score": 175349.1567338134 }, { "content": "fn check_slice_validity(slice: &str) -> bool {\n\n slice.bytes().all(entity_validate_char)\n\n}\n\n\n\n/// An entity tag, defined in [RFC 7232 §2.3].\n\n///\n\n/// An entity tag consists of a string enclosed by two literal double quotes.\n\n/// Preceding the first double quote is an optional weakness indicator,\n\n/// which always looks like `W/`. Examples for valid tags are `\"xyzzy\"` and\n\n/// `W/\"xyzzy\"`.\n\n///\n\n/// # ABNF\n\n/// ```plain\n\n/// entity-tag = [ weak ] opaque-tag\n\n/// weak = %x57.2F ; \"W/\", case-sensitive\n\n/// opaque-tag = DQUOTE *etagc DQUOTE\n\n/// etagc = %x21 / %x23-7E / obs-text\n\n/// ; VCHAR except double quotes, plus obs-text\n\n/// ```\n\n///\n", "file_path": "actix-web/src/http/header/entity.rs", "rank": 75, "score": 175257.77563410456 }, { "content": "#[derive(Clone)]\n\nstruct CustomRequestFn {\n\n inner_fn: Rc<dyn Fn(&ServiceRequest) -> String>,\n\n}\n\n\n\nimpl CustomRequestFn {\n\n fn call(&self, req: &ServiceRequest) -> String {\n\n (self.inner_fn)(req)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for CustomRequestFn {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"custom_request_fn\")\n\n }\n\n}\n\n\n\nimpl FormatText {\n\n fn render(\n\n &self,\n\n fmt: &mut fmt::Formatter<'_>,\n", "file_path": "actix-web/src/middleware/logger.rs", "rank": 76, "score": 173200.15149203537 }, { "content": "pub trait Sealed {\n\n fn try_as_name(&self, seal: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName>;\n\n}\n\n\n\nimpl Sealed for HeaderName {\n\n #[inline]\n\n fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {\n\n Ok(Cow::Borrowed(self))\n\n }\n\n}\n\nimpl AsHeaderName for HeaderName {}\n\n\n\nimpl Sealed for &HeaderName {\n\n #[inline]\n\n fn try_as_name(&self, _: Seal) -> Result<Cow<'_, HeaderName>, InvalidHeaderName> {\n\n Ok(Cow::Borrowed(*self))\n\n }\n\n}\n\nimpl AsHeaderName for &HeaderName {}\n\n\n", "file_path": "actix-http/src/header/as_name.rs", "rank": 77, "score": 172711.78668897826 }, { "content": "fn status_service(\n\n status: StatusCode,\n\n) -> impl Service<Request, Response = Response<impl MessageBody>, Error = Error> {\n\n fn_service(move |_req: Request| ready(Ok::<_, Error>(Response::new(status))))\n\n}\n\n\n", "file_path": "actix-http/src/h1/dispatcher_tests.rs", "rank": 78, "score": 170805.44608913423 }, { "content": "/// Write out content length header.\n\n///\n\n/// Buffer must to contain enough space or be implicitly extendable.\n\npub fn write_content_length<B: BufMut>(n: u64, buf: &mut B, camel_case: bool) {\n\n if n == 0 {\n\n if camel_case {\n\n buf.put_slice(b\"\\r\\nContent-Length: 0\\r\\n\");\n\n } else {\n\n buf.put_slice(b\"\\r\\ncontent-length: 0\\r\\n\");\n\n }\n\n\n\n return;\n\n }\n\n\n\n let mut buffer = itoa::Buffer::new();\n\n\n\n if camel_case {\n\n buf.put_slice(b\"\\r\\nContent-Length: \");\n\n } else {\n\n buf.put_slice(b\"\\r\\ncontent-length: \");\n\n }\n\n\n\n buf.put_slice(buffer.format(n).as_bytes());\n", "file_path": "actix-http/src/helpers.rs", "rank": 79, "score": 170746.38599241752 }, { "content": "#[inline]\n\npub fn q<T>(quality: T) -> Quality\n\nwhere\n\n T: TryInto<Quality>,\n\n T::Error: fmt::Debug,\n\n{\n\n quality.try_into().expect(\"quality value was out of bounds\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn q_helper() {\n\n assert_eq!(q(0.5), Quality(500));\n\n }\n\n\n\n #[test]\n\n fn display_output() {\n\n assert_eq!(Quality::ZERO.to_string(), \"0\");\n", "file_path": "actix-http/src/header/shared/quality.rs", "rank": 80, "score": 170317.08321629133 }, { "content": "/// Start default [`TestServer`].\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use actix_web::{get, web, test, App, HttpResponse, Error, Responder};\n\n///\n\n/// #[get(\"/\")]\n\n/// async fn my_handler() -> Result<impl Responder, Error> {\n\n/// Ok(HttpResponse::Ok())\n\n/// }\n\n///\n\n/// #[actix_web::test]\n\n/// async fn test_example() {\n\n/// let srv = actix_test::start(||\n\n/// App::new().service(my_handler)\n\n/// );\n\n///\n\n/// let req = srv.get(\"/\");\n\n/// let res = req.send().await.unwrap();\n\n///\n\n/// assert!(res.status().is_success());\n\n/// }\n\n/// ```\n\npub fn start<F, I, S, B>(factory: F) -> TestServer\n\nwhere\n\n F: Fn() -> I + Send + Clone + 'static,\n\n I: IntoServiceFactory<S, Request>,\n\n S: ServiceFactory<Request, Config = AppConfig> + 'static,\n\n S::Error: Into<Error> + 'static,\n\n S::InitError: fmt::Debug,\n\n S::Response: Into<Response<B>> + 'static,\n\n <S::Service as Service<Request>>::Future: 'static,\n\n B: MessageBody + 'static,\n\n{\n\n start_with(TestServerConfig::default(), factory)\n\n}\n\n\n", "file_path": "actix-test/src/lib.rs", "rank": 81, "score": 170109.0447998338 }, { "content": "struct Config {\n\n host: Option<String>,\n\n keep_alive: KeepAlive,\n\n client_request_timeout: Duration,\n\n client_disconnect_timeout: Duration,\n\n}\n\n\n\n/// An HTTP Server.\n\n///\n\n/// Create new HTTP server with application factory.\n\n///\n\n/// ```no_run\n\n/// use actix_web::{web, App, HttpResponse, HttpServer};\n\n///\n\n/// #[actix_rt::main]\n\n/// async fn main() -> std::io::Result<()> {\n\n/// HttpServer::new(\n\n/// || App::new()\n\n/// .service(web::resource(\"/\").to(|| HttpResponse::Ok())))\n\n/// .bind(\"127.0.0.1:59090\")?\n", "file_path": "actix-web/src/server.rs", "rank": 82, "score": 169560.65518535068 }, { "content": "/// Trim whitespace then any quote marks.\n\nfn unquote(val: &str) -> &str {\n\n val.trim().trim_start_matches('\"').trim_end_matches('\"')\n\n}\n\n\n", "file_path": "actix-web/src/info.rs", "rank": 83, "score": 165238.09104117315 }, { "content": "fn main() {}\n", "file_path": "actix-web-codegen/tests/trybuild/route-malformed-path-fail.rs", "rank": 84, "score": 164136.1864507207 }, { "content": "#[get(\"/test\")]\n\nfn auto_sync() -> impl Future<Output = Result<HttpResponse, actix_web::Error>> {\n\n ok(HttpResponse::Ok().finish())\n\n}\n\n\n\n#[put(\"/test/{param}\")]\n\nasync fn put_param_test(_: web::Path<String>) -> impl Responder {\n\n HttpResponse::Created()\n\n}\n\n\n\n#[delete(\"/test/{param}\")]\n\nasync fn delete_param_test(_: web::Path<String>) -> impl Responder {\n\n HttpResponse::NoContent()\n\n}\n\n\n\n#[get(\"/test/{param}\")]\n\nasync fn get_param_test(_: web::Path<String>) -> impl Responder {\n\n HttpResponse::Ok()\n\n}\n\n\n\n#[route(\"/multi\", method = \"GET\", method = \"POST\", method = \"HEAD\")]\n", "file_path": "actix-web-codegen/tests/test_macro.rs", "rank": 85, "score": 162851.15489895438 }, { "content": "#[get(\"/test\")]\n\nfn auto_async() -> impl Future<Output = Result<HttpResponse, actix_web::Error>> {\n\n ok(HttpResponse::Ok().finish())\n\n}\n\n\n", "file_path": "actix-web-codegen/tests/test_macro.rs", "rank": 86, "score": 162851.15489895438 }, { "content": "/// Executes blocking function on a thread pool, returns future that resolves to result of the\n\n/// function execution.\n\npub fn block<F, R>(f: F) -> impl Future<Output = Result<R, BlockingError>>\n\nwhere\n\n F: FnOnce() -> R + Send + 'static,\n\n R: Send + 'static,\n\n{\n\n let fut = actix_rt::task::spawn_blocking(f);\n\n async { fut.await.map_err(|_| BlockingError) }\n\n}\n", "file_path": "actix-web/src/web.rs", "rank": 87, "score": 159337.33849851275 }, { "content": "struct ParamsDeserializer<'de, T: ResourcePath> {\n\n params: PathIter<'de, T>,\n\n current: Option<(&'de str, &'de str)>,\n\n}\n\n\n\nimpl<'de, T: ResourcePath> de::MapAccess<'de> for ParamsDeserializer<'de, T> {\n\n type Error = de::value::Error;\n\n\n\n fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>\n\n where\n\n K: de::DeserializeSeed<'de>,\n\n {\n\n self.current = self.params.next().map(|ref item| (item.0, item.1));\n\n match self.current {\n\n Some((key, _)) => Ok(Some(seed.deserialize(Key { key })?)),\n\n None => Ok(None),\n\n }\n\n }\n\n\n\n fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>\n", "file_path": "actix-router/src/de.rs", "rank": 88, "score": 157840.62634149828 }, { "content": "struct ParamsSeq<'de, T: ResourcePath> {\n\n params: PathIter<'de, T>,\n\n}\n\n\n\nimpl<'de, T: ResourcePath> de::SeqAccess<'de> for ParamsSeq<'de, T> {\n\n type Error = de::value::Error;\n\n\n\n fn next_element_seed<U>(&mut self, seed: U) -> Result<Option<U::Value>, Self::Error>\n\n where\n\n U: de::DeserializeSeed<'de>,\n\n {\n\n match self.params.next() {\n\n Some(item) => Ok(Some(seed.deserialize(Value { value: item.1 })?)),\n\n None => Ok(None),\n\n }\n\n }\n\n}\n\n\n", "file_path": "actix-router/src/de.rs", "rank": 89, "score": 157840.62634149828 }, { "content": "/// An interface for types that can be converted into a [`HeaderName`] + [`HeaderValue`] pair for\n\n/// insertion into a [`HeaderMap`].\n\n///\n\n/// [`HeaderMap`]: super::HeaderMap\n\npub trait TryIntoHeaderPair: Sized {\n\n type Error: Into<HttpError>;\n\n\n\n fn try_into_pair(self) -> Result<(HeaderName, HeaderValue), Self::Error>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum InvalidHeaderPart {\n\n Name(InvalidHeaderName),\n\n Value(InvalidHeaderValue),\n\n}\n\n\n\nimpl From<InvalidHeaderPart> for HttpError {\n\n fn from(part_err: InvalidHeaderPart) -> Self {\n\n match part_err {\n\n InvalidHeaderPart::Name(err) => err.into(),\n\n InvalidHeaderPart::Value(err) => err.into(),\n\n }\n\n }\n\n}\n", "file_path": "actix-http/src/header/into_pair.rs", "rank": 90, "score": 156195.96646572597 }, { "content": "/// Prepare WebSocket handshake response.\n\n///\n\n/// This function returns handshake `HttpResponse`, ready to send to peer. It does not perform\n\n/// any IO.\n\n///\n\n/// `protocols` is a sequence of known protocols. On successful handshake, the returned response\n\n/// headers contain the first protocol in this list which the server also knows.\n\npub fn handshake_with_protocols(\n\n req: &HttpRequest,\n\n protocols: &[&str],\n\n) -> Result<HttpResponseBuilder, HandshakeError> {\n\n // WebSocket accepts only GET\n\n if *req.method() != Method::GET {\n\n return Err(HandshakeError::GetMethodRequired);\n\n }\n\n\n\n // check for \"UPGRADE\" to WebSocket header\n\n let has_hdr = if let Some(hdr) = req.headers().get(&header::UPGRADE) {\n\n if let Ok(s) = hdr.to_str() {\n\n s.to_ascii_lowercase().contains(\"websocket\")\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n };\n\n if !has_hdr {\n", "file_path": "actix-web-actors/src/ws.rs", "rank": 91, "score": 151855.72514547667 }, { "content": "#[inline]\n\npub fn from_comma_delimited<'a, I, T>(all: I) -> Result<Vec<T>, ParseError>\n\nwhere\n\n I: Iterator<Item = &'a HeaderValue> + 'a,\n\n T: FromStr,\n\n{\n\n let size_guess = all.size_hint().1.unwrap_or(2);\n\n let mut result = Vec::with_capacity(size_guess);\n\n\n\n for h in all {\n\n let s = h.to_str().map_err(|_| ParseError::Header)?;\n\n\n\n result.extend(\n\n s.split(',')\n\n .filter_map(|x| match x.trim() {\n\n \"\" => None,\n\n y => Some(y),\n\n })\n\n .filter_map(|x| x.trim().parse().ok()),\n\n )\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n\n/// Reads a single string when parsing a header.\n", "file_path": "actix-http/src/header/utils.rs", "rank": 92, "score": 150421.29108613962 }, { "content": "/// Creates a new un-configured route.\n\npub fn route() -> Route {\n\n Route::new()\n\n}\n\n\n\nmacro_rules! method_route {\n\n ($method_fn:ident, $method_const:ident) => {\n\n #[doc = concat!(\" Creates a new route with `\", stringify!($method_const), \"` method guard.\")]\n\n ///\n\n /// # Examples\n\n #[doc = concat!(\" In this example, one `\", stringify!($method_const), \" /{project_id}` route is set up:\")]\n\n /// ```\n\n /// use actix_web::{web, App, HttpResponse};\n\n ///\n\n /// let app = App::new().service(\n\n /// web::resource(\"/{project_id}\")\n\n #[doc = concat!(\" .route(web::\", stringify!($method_fn), \"().to(|| HttpResponse::Ok()))\")]\n\n ///\n\n /// );\n\n /// ```\n\n pub fn $method_fn() -> Route {\n", "file_path": "actix-web/src/web.rs", "rank": 93, "score": 148739.485429909 }, { "content": "fn bytes_to_string(body: Bytes, encoding: &'static Encoding) -> Result<String, Error> {\n\n if encoding == UTF_8 {\n\n Ok(str::from_utf8(body.as_ref())\n\n .map_err(|_| ErrorBadRequest(\"Can not decode body\"))?\n\n .to_owned())\n\n } else {\n\n Ok(encoding\n\n .decode_without_bom_handling_and_without_replacement(&body)\n\n .map(Cow::into_owned)\n\n .ok_or_else(|| ErrorBadRequest(\"Can not decode body\"))?)\n\n }\n\n}\n\n\n\n/// Configuration for request payloads.\n\n///\n\n/// Applies to the built-in [`Bytes`] and [`String`] extractors.\n\n/// Note that the [`Payload`] extractor does not automatically check\n\n/// conformance with this configuration to allow more flexibility when\n\n/// building extractors on top of [`Payload`].\n\n///\n", "file_path": "actix-web/src/types/payload.rs", "rank": 94, "score": 148513.7488730558 }, { "content": "struct TestBody {\n\n data: Bytes,\n\n chunk_size: usize,\n\n delay: Pin<Box<actix_rt::time::Sleep>>,\n\n}\n\n\n\nimpl TestBody {\n\n fn new(data: Bytes, chunk_size: usize) -> Self {\n\n TestBody {\n\n data,\n\n chunk_size,\n\n delay: Box::pin(actix_rt::time::sleep(std::time::Duration::from_millis(10))),\n\n }\n\n }\n\n}\n\n\n\nimpl futures_core::stream::Stream for TestBody {\n\n type Item = Result<Bytes, Error>;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n", "file_path": "actix-web/tests/test_server.rs", "rank": 95, "score": 146666.48673285556 }, { "content": "fn responder(c: &mut Criterion) {\n\n let rt = actix_rt::System::new();\n\n let req = TestRequest::default().to_http_request();\n\n c.bench_function(\"responder\", move |b| {\n\n b.iter_custom(|_| {\n\n let responders =\n\n (0..100_000).map(|_| StringResponder(String::from(\"Hello World!!\")));\n\n\n\n let start = Instant::now();\n\n let _res = rt.block_on(async {\n\n // don't need runtime block on but to be fair.\n\n responders.map(|r| r.respond_to(&req)).collect::<Vec<_>>()\n\n });\n\n\n\n start.elapsed()\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(responder_bench, future_responder, responder);\n\ncriterion_main!(responder_bench);\n", "file_path": "actix-web/benches/responder.rs", "rank": 96, "score": 144716.01440577384 }, { "content": "#[doc(hidden)]\n\npub trait Head: Default + 'static {\n\n fn clear(&mut self);\n\n\n\n fn with_pool<F, R>(f: F) -> R\n\n where\n\n F: FnOnce(&MessagePool<Self>) -> R;\n\n}\n\n\n\npub struct Message<T: Head> {\n\n /// Rc here should not be cloned by anyone.\n\n /// It's used to reuse allocation of T and no shared ownership is allowed.\n\n head: Rc<T>,\n\n}\n\n\n\nimpl<T: Head> Message<T> {\n\n /// Get new message from the pool of objects\n\n #[allow(clippy::new_without_default)]\n\n pub fn new() -> Self {\n\n T::with_pool(MessagePool::get_message)\n\n }\n", "file_path": "actix-http/src/message.rs", "rank": 97, "score": 143598.6376127679 }, { "content": "/// Trait for transform a type to another one.\n\n/// Both the input and output type should impl [actix_service::Service] trait.\n\npub trait Transform<S, Req> {\n\n type Transform: Service<Req>;\n\n\n\n /// Creates and returns a new Transform component.\n\n fn new_transform(self, service: S) -> Self::Transform;\n\n}\n\n\n\n#[doc(hidden)]\n\n/// Helper struct for constructing Nested types that would call `Transform::new_transform`\n\n/// in a chain.\n\n///\n\n/// The child field would be called first and the output `Service` type is\n\n/// passed to parent as input type.\n\npub struct NestTransform<T1, T2, S, Req>\n\nwhere\n\n T1: Transform<S, Req>,\n\n T2: Transform<T1::Transform, Req>,\n\n{\n\n child: T1,\n\n parent: T2,\n", "file_path": "awc/src/middleware/mod.rs", "rank": 98, "score": 143505.5996881777 }, { "content": "fn future_responder(c: &mut Criterion) {\n\n let rt = actix_rt::System::new();\n\n let req = TestRequest::default().to_http_request();\n\n\n\n c.bench_function(\"future_responder\", move |b| {\n\n b.iter_custom(|_| {\n\n let futs = (0..100_000).map(|_| async {\n\n StringResponder(String::from(\"Hello World!!\"))\n\n .future_respond_to(&req)\n\n .await\n\n });\n\n\n\n let futs = join_all(futs);\n\n\n\n let start = Instant::now();\n\n\n\n let _res = rt.block_on(async { futs.await });\n\n\n\n start.elapsed()\n\n })\n\n });\n\n}\n\n\n", "file_path": "actix-web/benches/responder.rs", "rank": 99, "score": 142232.49777775598 } ]
Rust
src/low/v7400/fbx_footer.rs
lo48576/fbxcel
542389fb81582d03def7322dc866b971c0ff1c58
use byteorder::{ByteOrder, LittleEndian}; use log::debug; use crate::{ low::FbxVersion, pull_parser::{ error::DataError, v7400::{FromParser, Parser}, Error as ParserError, ParserSource, SyntacticPosition, Warning, }, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct FbxFooter { pub unknown1: [u8; 16], pub padding_len: u8, pub unknown2: [u8; 4], pub fbx_version: FbxVersion, pub unknown3: [u8; 16], } impl FromParser for FbxFooter { fn read_from_parser<R>(parser: &mut Parser<R>) -> Result<Self, ParserError> where R: ParserSource, { let start_pos = parser.reader().position(); let unknown1 = { const EXPECTED: [u8; 16] = [ 0xf0, 0xb0, 0xa0, 0x00, 0xd0, 0xc0, 0xd0, 0x60, 0xb0, 0x70, 0xf0, 0x80, 0x10, 0xf0, 0x20, 0x70, ]; let mut buf = [0u8; 16]; parser.reader().read_exact(&mut buf)?; for (byte, expected) in buf.iter().zip(&EXPECTED) { if (byte & 0xf0) != *expected { let pos = SyntacticPosition { byte_pos: parser.reader().position() - 16, component_byte_pos: start_pos, node_path: Vec::new(), attribute_index: None, }; parser.warn(Warning::UnexpectedFooterFieldValue, pos)?; break; } } buf }; let (padding_len, unknown2, version, unknown3) = { let buf_start_pos = parser.reader().position(); let expected_padding_len = (buf_start_pos.wrapping_neg() & 0x0f) as usize; debug!( "Current position = {}, expected padding length = {}", buf_start_pos, expected_padding_len ); const BUF_LEN: usize = 144; let mut buf = [0u8; BUF_LEN]; parser.reader().read_exact(&mut buf)?; let unknown3_pos = { const SEARCH_OFFSET: usize = BUF_LEN - 16; let pos = (&buf[SEARCH_OFFSET..]) .iter() .position(|&v| v != 0) .ok_or(DataError::BrokenFbxFooter)?; SEARCH_OFFSET + pos }; let padding_len = unknown3_pos & 0x0f; assert!(padding_len < 16); assert_eq!(unknown3_pos, padding_len + 128); let padding = &buf[..padding_len]; let mut unknown2 = [0u8; 4]; unknown2.copy_from_slice(&buf[padding_len..(padding_len + 4)]); let version_buf = &buf[(padding_len + 4)..(padding_len + 8)]; let zeroes_120 = &buf[(padding_len + 8)..(padding_len + 128)]; let unknown3_part = &buf[(padding_len + 128)..]; if !padding.iter().all(|&v| v == 0) { return Err(DataError::BrokenFbxFooter.into()); } if unknown2 != [0u8; 4] { return Err(DataError::BrokenFbxFooter.into()); } let version = FbxVersion::new(LittleEndian::read_u32(version_buf)); if version != parser.fbx_version() { return Err(DataError::BrokenFbxFooter.into()); } if !zeroes_120.iter().all(|&v| v == 0) { return Err(DataError::BrokenFbxFooter.into()); } const UNKNOWN3_EXPECTED: [u8; 16] = [ 0xf8, 0x5a, 0x8c, 0x6a, 0xde, 0xf5, 0xd9, 0x7e, 0xec, 0xe9, 0x0c, 0xe3, 0x75, 0x8f, 0x29, 0x0b, ]; let mut unknown3 = [0u8; 16]; unknown3[0..unknown3_part.len()].copy_from_slice(unknown3_part); parser .reader() .read_exact(&mut unknown3[unknown3_part.len()..])?; if unknown3 != UNKNOWN3_EXPECTED { return Err(DataError::BrokenFbxFooter.into()); } if padding_len != expected_padding_len { let pos = SyntacticPosition { byte_pos: buf_start_pos, component_byte_pos: start_pos, node_path: Vec::new(), attribute_index: None, }; parser.warn( Warning::InvalidFooterPaddingLength(expected_padding_len, padding_len), pos, )?; } (padding_len, unknown2, version, unknown3) }; Ok(Self { unknown1, padding_len: padding_len as u8, unknown2, fbx_version: version, unknown3, }) } }
use byteorder::{ByteOrder, LittleEndian}; use log::debug; use crate::{ low::FbxVersion, pull_parser::{ error::DataError, v7400::{FromParser, Parser}, Error as ParserError, ParserSource, SyntacticPosition, Warning, }, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct FbxFooter { pub unknown1: [u8; 16], pub padding_len: u8, pub unknown2: [u8; 4], pub fbx_version: FbxVersion, pub unknown3: [u8; 16], } impl FromParser for FbxFooter { fn read_from_parser<R>(parser: &mut Parser<R>) -> Result<Self, ParserError> where R: ParserSource, { let start_pos = parser.reader().position(); let unknown1 = { const EXPECTED: [u8; 16] = [ 0xf0, 0xb0, 0xa0, 0x00, 0xd0, 0xc0, 0xd0, 0x60, 0xb0, 0x70, 0xf0, 0x80, 0x10, 0xf0, 0x20, 0x7
}
0, ]; let mut buf = [0u8; 16]; parser.reader().read_exact(&mut buf)?; for (byte, expected) in buf.iter().zip(&EXPECTED) { if (byte & 0xf0) != *expected { let pos = SyntacticPosition { byte_pos: parser.reader().position() - 16, component_byte_pos: start_pos, node_path: Vec::new(), attribute_index: None, }; parser.warn(Warning::UnexpectedFooterFieldValue, pos)?; break; } } buf }; let (padding_len, unknown2, version, unknown3) = { let buf_start_pos = parser.reader().position(); let expected_padding_len = (buf_start_pos.wrapping_neg() & 0x0f) as usize; debug!( "Current position = {}, expected padding length = {}", buf_start_pos, expected_padding_len ); const BUF_LEN: usize = 144; let mut buf = [0u8; BUF_LEN]; parser.reader().read_exact(&mut buf)?; let unknown3_pos = { const SEARCH_OFFSET: usize = BUF_LEN - 16; let pos = (&buf[SEARCH_OFFSET..]) .iter() .position(|&v| v != 0) .ok_or(DataError::BrokenFbxFooter)?; SEARCH_OFFSET + pos }; let padding_len = unknown3_pos & 0x0f; assert!(padding_len < 16); assert_eq!(unknown3_pos, padding_len + 128); let padding = &buf[..padding_len]; let mut unknown2 = [0u8; 4]; unknown2.copy_from_slice(&buf[padding_len..(padding_len + 4)]); let version_buf = &buf[(padding_len + 4)..(padding_len + 8)]; let zeroes_120 = &buf[(padding_len + 8)..(padding_len + 128)]; let unknown3_part = &buf[(padding_len + 128)..]; if !padding.iter().all(|&v| v == 0) { return Err(DataError::BrokenFbxFooter.into()); } if unknown2 != [0u8; 4] { return Err(DataError::BrokenFbxFooter.into()); } let version = FbxVersion::new(LittleEndian::read_u32(version_buf)); if version != parser.fbx_version() { return Err(DataError::BrokenFbxFooter.into()); } if !zeroes_120.iter().all(|&v| v == 0) { return Err(DataError::BrokenFbxFooter.into()); } const UNKNOWN3_EXPECTED: [u8; 16] = [ 0xf8, 0x5a, 0x8c, 0x6a, 0xde, 0xf5, 0xd9, 0x7e, 0xec, 0xe9, 0x0c, 0xe3, 0x75, 0x8f, 0x29, 0x0b, ]; let mut unknown3 = [0u8; 16]; unknown3[0..unknown3_part.len()].copy_from_slice(unknown3_part); parser .reader() .read_exact(&mut unknown3[unknown3_part.len()..])?; if unknown3 != UNKNOWN3_EXPECTED { return Err(DataError::BrokenFbxFooter.into()); } if padding_len != expected_padding_len { let pos = SyntacticPosition { byte_pos: buf_start_pos, component_byte_pos: start_pos, node_path: Vec::new(), attribute_index: None, }; parser.warn( Warning::InvalidFooterPaddingLength(expected_padding_len, padding_len), pos, )?; } (padding_len, unknown2, version, unknown3) }; Ok(Self { unknown1, padding_len: padding_len as u8, unknown2, fbx_version: version, unknown3, }) }
function_block-function_prefixed
[ { "content": "/// Loads a tree from the given reader.\n\n///\n\n/// This works for seekable readers (which implement [`std::io::Seek`]), but\n\n/// [`from_seekable_reader`] should be used for them, because it is more\n\n/// efficent.\n\npub fn from_reader<R: Read>(mut reader: R) -> Result<AnyParser<PlainSource<R>>> {\n\n let header = FbxHeader::load(&mut reader)?;\n\n match parser_version(header)? {\n\n ParserVersion::V7400 => {\n\n let parser = pull_parser::v7400::from_reader(header, reader).unwrap_or_else(|e| {\n\n panic!(\n\n \"Should never fail: FBX version {:?} should be supported by v7400 parser: {}\",\n\n header.version(),\n\n e\n\n )\n\n });\n\n Ok(AnyParser::V7400(parser))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/pull_parser/any.rs", "rank": 0, "score": 217869.5944579377 }, { "content": "/// Loads a tree from the given seekable reader.\n\npub fn from_seekable_reader<R: Read + Seek>(mut reader: R) -> Result<AnyParser<SeekableSource<R>>> {\n\n let header = FbxHeader::load(&mut reader)?;\n\n match parser_version(header)? {\n\n ParserVersion::V7400 => {\n\n let parser =\n\n pull_parser::v7400::from_seekable_reader(header, reader).unwrap_or_else(|e| {\n\n panic!(\n\n \"Should never fail: FBX version {:?} should be supported by v7400 parser: {}\",\n\n header.version(),\n\n e\n\n )\n\n });\n\n Ok(AnyParser::V7400(parser))\n\n }\n\n }\n\n}\n", "file_path": "src/pull_parser/any.rs", "rank": 1, "score": 210065.9081273725 }, { "content": "/// Creates a new [`Parser`] from the given reader.\n\n///\n\n/// Returns an error if the given FBX version in unsupported.\n\npub fn from_reader<R>(header: FbxHeader, reader: R) -> Result<Parser<PlainSource<R>>>\n\nwhere\n\n R: io::Read,\n\n{\n\n Parser::create(\n\n header.version(),\n\n PlainSource::with_offset(reader, header.len()),\n\n )\n\n}\n\n\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 2, "score": 187390.97917642148 }, { "content": "pub fn expect_node_end<R: ParserSource + std::fmt::Debug>(\n\n parser: &mut Parser<R>,\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n match parser.next_event()? {\n\n Event::EndNode => Ok(()),\n\n ev => panic!(\"Unexpected event: {:?}\", ev),\n\n }\n\n}\n\n\n", "file_path": "tests/v7400/writer.rs", "rank": 3, "score": 187048.52975247448 }, { "content": "pub fn expect_fbx_end<R: ParserSource + std::fmt::Debug>(\n\n parser: &mut Parser<R>,\n\n) -> Result<Result<Box<fbxcel::low::v7400::FbxFooter>, ParseError>, Box<dyn std::error::Error>> {\n\n match parser.next_event()? {\n\n Event::EndFbx(footer_res) => Ok(footer_res),\n\n ev => panic!(\"Unexpected event: {:?}\", ev),\n\n }\n\n}\n", "file_path": "tests/v7400/writer.rs", "rank": 4, "score": 187048.52975247448 }, { "content": "/// Creates a new [`Parser`] from the given seekable reader.\n\n///\n\n/// Returns an error if the given FBX version in unsupported.\n\npub fn from_seekable_reader<R>(header: FbxHeader, reader: R) -> Result<Parser<SeekableSource<R>>>\n\nwhere\n\n R: io::Read + io::Seek,\n\n{\n\n Parser::create(\n\n header.version(),\n\n SeekableSource::with_offset(reader, header.len()),\n\n )\n\n}\n\n\n\n/// Pull parser for FBX 7.4 binary or compatible later versions.\n\npub struct Parser<R> {\n\n /// Parser state.\n\n state: State,\n\n /// Reader.\n\n reader: R,\n\n /// Warning handler.\n\n warning_handler: Option<WarningHandler>,\n\n}\n\n\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 5, "score": 185432.77436950803 }, { "content": "pub fn expect_node_start<'a, R: ParserSource + std::fmt::Debug>(\n\n parser: &'a mut Parser<R>,\n\n name: &str,\n\n) -> Result<Attributes<'a, R>, Box<dyn std::error::Error>> {\n\n match parser.next_event()? {\n\n Event::StartNode(node) => {\n\n assert_eq!(node.name(), name);\n\n Ok(node.attributes())\n\n }\n\n ev => panic!(\"Unexpected event: {:?}\", ev),\n\n }\n\n}\n\n\n", "file_path": "tests/v7400/writer.rs", "rank": 6, "score": 184678.3519803745 }, { "content": "fn dump_fbx_7400<R: pull_parser::ParserSource>(\n\n mut parser: pull_parser::v7400::Parser<R>,\n\n) -> pull_parser::Result<()> {\n\n let mut depth = 0;\n\n\n\n /// Dump format of node attributes.\n\n enum AttrsDumpFormat {\n\n /// Type only.\n\n Type,\n\n /// Value for primitive types, length for array, binary, and string.\n\n Length,\n\n /// Values for all types.\n\n ///\n\n /// Not recommended because the output might be quite large.\n\n Full,\n\n }\n\n\n\n let attrs_dump_format = match std::env::var(\"DUMP_ATTRIBUTES\").as_ref().map(AsRef::as_ref) {\n\n Ok(\"length\") => AttrsDumpFormat::Length,\n\n Ok(\"full\") => AttrsDumpFormat::Full,\n", "file_path": "examples/dump-pull-parser-events.rs", "rank": 7, "score": 134335.16207733745 }, { "content": "/// Warning handler type.\n\ntype WarningHandler = Box<dyn FnMut(Warning, &SyntacticPosition) -> Result<()>>;\n\n\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 8, "score": 131665.30255877902 }, { "content": "fn dump_v7400_attributes_length<R>(\n\n depth: usize,\n\n mut attrs: pull_parser::v7400::Attributes<'_, R>,\n\n) -> pull_parser::Result<()>\n\nwhere\n\n R: pull_parser::ParserSource,\n\n{\n\n use fbxcel::{\n\n low::v7400::AttributeValue, pull_parser::v7400::attribute::loaders::DirectLoader,\n\n };\n\n\n\n while let Some(attr) = attrs.load_next(DirectLoader)? {\n\n let type_ = attr.type_();\n\n indent(depth);\n\n match attr {\n\n AttributeValue::Bool(_) => println!(\"Attribute: {:?}\", attr),\n\n AttributeValue::I16(_) => println!(\"Attribute: {:?}\", attr),\n\n AttributeValue::I32(_) => println!(\"Attribute: {:?}\", attr),\n\n AttributeValue::I64(_) => println!(\"Attribute: {:?}\", attr),\n\n AttributeValue::F32(_) => println!(\"Attribute: {:?}\", attr),\n", "file_path": "examples/dump-pull-parser-events.rs", "rank": 9, "score": 131349.87920671643 }, { "content": "fn dump_v7400_attributes_type<R>(\n\n depth: usize,\n\n mut attrs: pull_parser::v7400::Attributes<'_, R>,\n\n) -> pull_parser::Result<()>\n\nwhere\n\n R: pull_parser::ParserSource,\n\n{\n\n use self::pull_parser::v7400::attribute::loaders::TypeLoader;\n\n\n\n while let Some(type_) = attrs.load_next(TypeLoader).unwrap() {\n\n indent(depth);\n\n println!(\"Attribute: {:?}\", type_);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/dump-pull-parser-events.rs", "rank": 10, "score": 131349.87920671643 }, { "content": "fn dump_v7400_attributes_full<R>(\n\n depth: usize,\n\n mut attrs: pull_parser::v7400::Attributes<'_, R>,\n\n) -> pull_parser::Result<()>\n\nwhere\n\n R: pull_parser::ParserSource,\n\n{\n\n use fbxcel::{\n\n low::v7400::AttributeValue, pull_parser::v7400::attribute::loaders::DirectLoader,\n\n };\n\n\n\n while let Some(attr) = attrs.load_next(DirectLoader)? {\n\n let type_ = attr.type_();\n\n indent(depth);\n\n match attr {\n\n AttributeValue::Bool(_) => println!(\"Attribute: {:?}\", attr),\n\n AttributeValue::I16(_) => println!(\"Attribute: {:?}\", attr),\n\n AttributeValue::I32(_) => println!(\"Attribute: {:?}\", attr),\n\n AttributeValue::I64(_) => println!(\"Attribute: {:?}\", attr),\n\n AttributeValue::F32(_) => println!(\"Attribute: {:?}\", attr),\n", "file_path": "examples/dump-pull-parser-events.rs", "rank": 11, "score": 131349.87920671643 }, { "content": "#[derive(Debug)]\n\nstruct Repr {\n\n /// Error.\n\n error: ErrorContainer,\n\n /// Syntactic position.\n\n position: Option<SyntacticPosition>,\n\n}\n\n\n\nimpl Repr {\n\n /// Creates a new `Repr`.\n\n pub(crate) fn new(error: ErrorContainer) -> Self {\n\n Self {\n\n error,\n\n position: None,\n\n }\n\n }\n\n\n\n /// Creates a new `Repr` with the given syntactic position info.\n\n pub(crate) fn with_position(error: ErrorContainer, position: SyntacticPosition) -> Self {\n\n Self {\n\n error,\n", "file_path": "src/pull_parser/error.rs", "rank": 12, "score": 129822.0780414129 }, { "content": "/// Loads the next attrbute.\n\nfn load_next<R, V>(\n\n attributes: &mut Attributes<'_, R>,\n\n loaders: &mut impl Iterator<Item = V>,\n\n) -> Option<Result<V::Output>>\n\nwhere\n\n R: ParserSource,\n\n V: LoadAttribute,\n\n{\n\n let loader = loaders.next()?;\n\n attributes.load_next(loader).transpose()\n\n}\n\n\n", "file_path": "src/pull_parser/v7400/attribute/iter.rs", "rank": 13, "score": 128125.03309600256 }, { "content": "/// Loads the next attrbute with buffered I/O.\n\nfn load_next_buffered<R, V>(\n\n attributes: &mut Attributes<'_, R>,\n\n loaders: &mut impl Iterator<Item = V>,\n\n) -> Option<Result<V::Output>>\n\nwhere\n\n R: ParserSource + io::BufRead,\n\n V: LoadAttribute,\n\n{\n\n let loader = loaders.next()?;\n\n attributes.load_next(loader).transpose()\n\n}\n\n\n\n/// Node attributes iterator.\n\n#[derive(Debug)]\n\npub struct BorrowedIter<'a, 'r, R, I> {\n\n /// Attributes.\n\n attributes: &'a mut Attributes<'r, R>,\n\n /// Loaders.\n\n loaders: I,\n\n}\n", "file_path": "src/pull_parser/v7400/attribute/iter.rs", "rank": 14, "score": 125863.35512168238 }, { "content": "/// Creates size hint from the given attributes and loaders.\n\nfn make_size_hint_for_attrs<R, V>(\n\n attributes: &Attributes<'_, R>,\n\n loaders: &impl Iterator<Item = V>,\n\n) -> (usize, Option<usize>)\n\nwhere\n\n R: ParserSource,\n\n V: LoadAttribute,\n\n{\n\n let (loaders_min, loaders_max) = loaders.size_hint();\n\n let attrs_rest = attributes.rest_count() as usize;\n\n let min = std::cmp::min(attrs_rest, loaders_min);\n\n let max = loaders_max.map_or_else(usize::max_value, |v| std::cmp::min(attrs_rest, v));\n\n\n\n (min, Some(max))\n\n}\n\n\n", "file_path": "src/pull_parser/v7400/attribute/iter.rs", "rank": 15, "score": 123708.24836081185 }, { "content": "pub fn main() {\n\n env_logger::init();\n\n\n\n let path = match std::env::args_os().nth(1) {\n\n Some(v) => PathBuf::from(v),\n\n None => {\n\n eprintln!(\"Usage: load-tree <FBX_FILE>\");\n\n std::process::exit(1);\n\n }\n\n };\n\n let file = File::open(path).expect(\"Failed to open file\");\n\n let reader = BufReader::new(file);\n\n\n\n match AnyTree::from_seekable_reader(reader).expect(\"Failed to load tree\") {\n\n AnyTree::V7400(fbx_version, tree, footer) => {\n\n println!(\"FBX version = {:#?}\", fbx_version);\n\n println!(\"tree = {:#?}\", tree);\n\n println!(\"footer = {:#?}\", footer);\n\n }\n\n _ => panic!(\"FBX version unsupported by this example\"),\n\n }\n\n}\n", "file_path": "examples/load-tree.rs", "rank": 16, "score": 111730.45482113774 }, { "content": " /// Footer padding length is invalid.\n\n InvalidFooterPaddingLength(usize, usize),\n\n /// Missing a node end marker where the marker is expected.\n\n MissingNodeEndMarker,\n\n /// Unexpected value for footer fields (mainly for unknown fields).\n\n UnexpectedFooterFieldValue,\n\n}\n\n\n\nimpl error::Error for Warning {}\n\n\n\nimpl fmt::Display for Warning {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Warning::EmptyNodeName => write!(f, \"Node name is empty\"),\n\n Warning::ExtraNodeEndMarker => write!(f, \"Extra (unexpected) node end marker found\"),\n\n Warning::IncorrectBooleanRepresentation => {\n\n write!(f, \"Incorrect boolean representation\")\n\n }\n\n Warning::InvalidFooterPaddingLength(expected, got) => write!(\n\n f,\n\n \"Invalid footer padding length: expected {} bytes, got {} bytes\",\n\n expected, got\n\n ),\n\n Warning::MissingNodeEndMarker => write!(f, \"Missing node end marker\"),\n\n Warning::UnexpectedFooterFieldValue => write!(f, \"Unexpected footer field value\"),\n\n }\n\n }\n\n}\n", "file_path": "src/pull_parser/error/warning.rs", "rank": 17, "score": 106946.97545177699 }, { "content": "//! Invalid operation.\n\n\n\nuse std::{error, fmt};\n\n\n\n/// Warning.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum Warning {\n\n /// Node name is empty.\n\n EmptyNodeName,\n\n /// Extra (unexpected) node end marker found.\n\n ExtraNodeEndMarker,\n\n /// Incorrect boolean representation.\n\n ///\n\n /// Boolean value in node attributes should be some prescribed value\n\n /// (for example `b'T'` and `b'Y'` for FBX 7.4).\n\n /// Official SDK and tools may emit those values correctly, but some\n\n /// third-party exporters emits them wrongly with `0x00` and `0x01`, and\n\n /// those will be ignored by official SDK and tools.\n\n IncorrectBooleanRepresentation,\n", "file_path": "src/pull_parser/error/warning.rs", "rank": 18, "score": 106941.86511262774 }, { "content": "#[test]\n\nfn tree_write_v7500() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut dest = Vec::new();\n\n let cursor = Cursor::new(&mut dest);\n\n let mut writer = Writer::new(cursor, FbxVersion::V7_5)?;\n\n write_v7400_binary!(\n\n writer=writer,\n\n tree={\n\n Node0: {\n\n Node0_0: {},\n\n Node0_1: {},\n\n },\n\n Node1: [true] {\n\n Node1_0: (vec![42i32.into(), 1.234f64.into()]) {}\n\n Node1_1: [&[1u8, 2, 4, 8, 16][..], \"Hello, world\"] {}\n\n },\n\n },\n\n )?;\n\n writer.finalize_and_flush(&Default::default())?;\n\n\n\n let mut parser = match from_seekable_reader(Cursor::new(dest))? {\n", "file_path": "tests/write-and-parse-v7400-binary.rs", "rank": 19, "score": 100987.21559385872 }, { "content": "#[test]\n\nfn macro_v7400_idempotence() -> Result<(), Box<dyn std::error::Error>> {\n\n let version = FbxVersion::V7_4;\n\n let mut writer = Writer::new(std::io::Cursor::new(Vec::new()), version)?;\n\n\n\n write_v7400_binary!(\n\n writer=writer,\n\n tree={\n\n Node0: {\n\n Node0_0: {},\n\n Node0_1: {},\n\n },\n\n Node1: [true] {\n\n Node1_0: (vec![42i32.into(), 1.234f64.into()]) {}\n\n Node1_1: [&[1u8, 2, 4, 8, 16][..], \"Hello, world\"] {}\n\n },\n\n },\n\n )?;\n\n let bin = writer.finalize_and_flush(&Default::default())?.into_inner();\n\n\n\n let mut parser = match from_seekable_reader(Cursor::new(bin))? {\n", "file_path": "tests/write-and-parse-v7400-binary.rs", "rank": 20, "score": 100987.21559385872 }, { "content": "#[test]\n\nfn empty_write_v7400() -> Result<(), Box<dyn std::error::Error>> {\n\n let mut dest = Vec::new();\n\n let cursor = Cursor::new(&mut dest);\n\n let writer = Writer::new(cursor, FbxVersion::V7_4)?;\n\n let footer = FbxFooter {\n\n unknown1: Some(&CUSTOM_UNKNOWN1),\n\n padding_len: Default::default(),\n\n unknown2: None,\n\n unknown3: None,\n\n };\n\n writer.finalize_and_flush(&footer)?;\n\n\n\n let expected = {\n\n let raw_ver = 7400u32;\n\n let mut vec = Vec::new();\n\n // Header.\n\n {\n\n // Magic.\n\n vec.extend(MAGIC);\n\n // Version.\n", "file_path": "tests/write-and-parse-v7400-binary.rs", "rank": 21, "score": 100987.21559385872 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nstruct State {\n\n /// Target FBX version.\n\n fbx_version: FbxVersion,\n\n /// Health of the parser.\n\n health: Health,\n\n /// Started nodes stack.\n\n ///\n\n /// This stack should not have an entry for implicit root node.\n\n started_nodes: Vec<StartedNode>,\n\n /// Last event kind.\n\n last_event_kind: Option<EventKind>,\n\n /// Number of known top-level nodes.\n\n ///\n\n /// This is here because [`StartedNode`] is not used for implicit root node.\n\n known_toplevel_nodes_count: usize,\n\n}\n\n\n\nimpl State {\n\n /// Creates a new `State` for the given FBX version.\n\n fn new(fbx_version: FbxVersion) -> Self {\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 22, "score": 99597.72699750832 }, { "content": "#[test]\n\nfn extra_node_end_marker() -> Result<(), Box<dyn std::error::Error>> {\n\n let data = {\n\n let raw_ver = 7400_u32;\n\n let mut vec = Vec::new();\n\n // Header.\n\n {\n\n // Magic.\n\n vec.extend(MAGIC);\n\n // Version.\n\n vec.extend(&raw_ver.to_le_bytes());\n\n }\n\n // Nodes.\n\n {\n\n // Container node.\n\n {\n\n const CONTAINER: &[u8] = b\"Container\";\n\n let container_start = vec.len();\n\n // End offset.\n\n vec.extend(&[0; 4]);\n\n // Number of node properties.\n", "file_path": "tests/missing-and-extra-node-end-marker.rs", "rank": 23, "score": 98589.00694389759 }, { "content": "#[test]\n\nfn missing_node_end_marker() -> Result<(), Box<dyn std::error::Error>> {\n\n let data = {\n\n let raw_ver = 7400_u32;\n\n let mut vec = Vec::new();\n\n // Header.\n\n {\n\n // Magic.\n\n vec.extend(MAGIC);\n\n // Version.\n\n vec.extend(&raw_ver.to_le_bytes());\n\n }\n\n // Nodes.\n\n {\n\n // Container node.\n\n {\n\n const CONTAINER: &[u8] = b\"Container\";\n\n let container_start = vec.len();\n\n // End offset.\n\n vec.extend(&[0; 4]);\n\n // Number of node properties.\n", "file_path": "tests/missing-and-extra-node-end-marker.rs", "rank": 24, "score": 98589.00694389759 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nstruct StartedNode {\n\n /// Start offset of the node attribute.\n\n node_start_offset: u64,\n\n /// End offset of the node.\n\n ///\n\n /// \"End offset\" means a next byte of the last byte of the last node.\n\n node_end_offset: u64,\n\n /// Number of node attributes.\n\n attributes_count: u64,\n\n /// End offset of the previous attribute.\n\n ///\n\n /// \"End offset\" means a next byte of the last byte of the last attribute.\n\n attributes_end_offset: u64,\n\n /// Node name.\n\n name: String,\n\n /// Number of known children.\n\n known_children_count: usize,\n\n}\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 25, "score": 97952.14365633222 }, { "content": "#[test]\n\nfn tree_write_parse_idempotence_v7500() -> Result<(), Box<dyn std::error::Error>> {\n\n // Construct tree.\n\n let tree1 = tree_v7400! {\n\n Node0: {\n\n Node0_0: {},\n\n Node0_1: {},\n\n },\n\n Node1: [true] {\n\n Node1_0: (vec![42i32.into(), 1.234f64.into()]) {}\n\n Node1_1: [&[1u8, 2, 4, 8, 16][..], \"Hello, world\"] {}\n\n },\n\n };\n\n\n\n let mut writer = Writer::new(Cursor::new(Vec::new()), FbxVersion::V7_5)?;\n\n writer.write_tree(&tree1)?;\n\n let bin = writer.finalize_and_flush(&Default::default())?.into_inner();\n\n\n\n let mut parser = match from_seekable_reader(Cursor::new(bin))? {\n\n AnyParser::V7400(parser) => parser,\n\n _ => panic!(\"Generated data should be parsable with v7400 parser\"),\n", "file_path": "tests/write-tree-and-parse-v7400-binary.rs", "rank": 26, "score": 97454.34845941793 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let path = match std::env::args_os().nth(1) {\n\n Some(v) => PathBuf::from(v),\n\n None => {\n\n eprintln!(\"Usage: dump-pull-parser-events <FBX_FILE>\");\n\n std::process::exit(1);\n\n }\n\n };\n\n let file = File::open(path).expect(\"Failed to open file\");\n\n let reader = BufReader::new(file);\n\n\n\n match from_seekable_reader(reader).expect(\"Failed to create parser\") {\n\n AnyParser::V7400(mut parser) => {\n\n let version = parser.fbx_version();\n\n println!(\"FBX version: {}.{}\", version.major(), version.minor());\n\n parser.set_warning_handler(|w, pos| {\n\n eprintln!(\"WARNING: {} (pos={:?})\", w, pos);\n\n Ok(())\n\n });\n\n dump_fbx_7400(parser).expect(\"Failed to parse FBX file\");\n\n }\n\n parser => panic!(\n\n \"Unsupported by this example: fbx_version={:?}\",\n\n parser.fbx_version()\n\n ),\n\n }\n\n}\n\n\n", "file_path": "examples/dump-pull-parser-events.rs", "rank": 27, "score": 87843.35025947406 }, { "content": "/// Returns the parser version for the FBX data.\n\nfn parser_version(header: FbxHeader) -> Result<ParserVersion> {\n\n header\n\n .parser_version()\n\n .ok_or_else(|| Error::UnsupportedVersion(header.version()))\n\n}\n\n\n", "file_path": "src/pull_parser/any.rs", "rank": 28, "score": 87022.1285969497 }, { "content": "/// A trait for types which can be data sources.\n\n///\n\n/// Users can implement this manually, but usually it is enough to use wrappers\n\n/// in the [`reader`][`self`] module.\n\npub trait ParserSource: Sized + io::Read {\n\n /// Returns the offset of a byte which would be read next.\n\n ///\n\n /// This is called many times during parsing, so it is desirable to be fast\n\n /// as possible.\n\n ///\n\n /// Reader types with [`std::io::Seek`] can implement this as\n\n /// `self.stream_position().unwrap()`, but this is fallible and\n\n /// can be inefficient.\n\n /// Use of [`PositionCacheReader`] is reccomended.\n\n fn position(&self) -> u64;\n\n\n\n /// Skips (seeks formward) the given size.\n\n ///\n\n /// Reader types can make this more efficient using [`std::io::Seek::seek`]\n\n /// if possible.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n", "file_path": "src/pull_parser/reader.rs", "rank": 29, "score": 86348.31283550733 }, { "content": "fn indent(depth: usize) {\n\n print!(\"{:depth$}\", \"\", depth = depth * 4);\n\n}\n\n\n", "file_path": "examples/dump-pull-parser-events.rs", "rank": 30, "score": 80927.5339766905 }, { "content": "/// A trait for attribute loader types.\n\n///\n\n/// This is a lot like a \"visitor\", but node attributes do not have recursive\n\n/// structures, so this loader is not \"visitor\".\n\n///\n\n/// The `load_*` method corresponding to the node attribute type are called with\n\n/// its value.\n\n///\n\n/// All of `load_*` has default implementation to return error as \"unexpected\n\n/// attribute\".\n\n/// Users should implement them manually for types they want to interpret.\n\n///\n\n/// For simple types, [`pull_parser::v7400::attribute::loaders`][`super::loaders`] module contains\n\n/// useful loaders.\n\npub trait LoadAttribute: Sized + fmt::Debug {\n\n /// Result type on successful read.\n\n type Output;\n\n\n\n /// Describes the expecting value.\n\n fn expecting(&self) -> String;\n\n\n\n /// Loads boolean value.\n\n fn load_bool(self, _: bool) -> Result<Self::Output> {\n\n Err(DataError::UnexpectedAttribute(self.expecting(), \"boolean\".into()).into())\n\n }\n\n\n\n /// Loads `i16` value.\n\n fn load_i16(self, _: i16) -> Result<Self::Output> {\n\n Err(DataError::UnexpectedAttribute(self.expecting(), \"i16\".into()).into())\n\n }\n\n\n\n /// Loads `i32` value.\n\n fn load_i32(self, _: i32) -> Result<Self::Output> {\n\n Err(DataError::UnexpectedAttribute(self.expecting(), \"i32\".into()).into())\n", "file_path": "src/pull_parser/v7400/attribute/loader.rs", "rank": 31, "score": 74704.71337249318 }, { "content": "//! Errors and result types.\n\n//!\n\n//! Types in this module will be used among multiple versions of parsers.\n\n\n\nuse std::{error, fmt, io};\n\n\n\nuse crate::pull_parser::SyntacticPosition;\n\n\n\npub use self::{\n\n data::{Compression, DataError},\n\n operation::OperationError,\n\n warning::Warning,\n\n};\n\n\n\nmod data;\n\nmod operation;\n\nmod warning;\n\n\n\n/// Parsing result.\n\npub type Result<T> = std::result::Result<T, Error>;\n", "file_path": "src/pull_parser/error.rs", "rank": 32, "score": 71980.90243642719 }, { "content": "//! Error and result types for `pull_parser::any` module.\n\n\n\nuse std::{error, fmt};\n\n\n\nuse crate::low::{FbxVersion, HeaderError};\n\n\n\n/// AnyTree load result.\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// Error.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum Error {\n\n /// Header error.\n\n Header(HeaderError),\n\n /// Unsupported version.\n\n UnsupportedVersion(FbxVersion),\n\n}\n\n\n\nimpl error::Error for Error {\n", "file_path": "src/pull_parser/any/error.rs", "rank": 33, "score": 71977.97230509153 }, { "content": " position: Some(position),\n\n }\n\n }\n\n}\n\n\n\n/// Error kind for parsing errors.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum ErrorKind {\n\n /// Invalid data.\n\n ///\n\n /// With this error kind, the inner error must be [`DataError`].\n\n ///\n\n /// [`DataError`]: enum.DataError.html\n\n Data,\n\n /// I/O error.\n\n ///\n\n /// With this error kind, the inner error must be [`std::io::Error`].\n\n ///\n\n /// [`std::io::Error`]:\n\n /// https://doc.rust-lang.org/stable/std/io/struct.Error.html\n", "file_path": "src/pull_parser/error.rs", "rank": 34, "score": 71977.08090100574 }, { "content": " pub fn as_error(&self) -> &(dyn 'static + error::Error) {\n\n match self {\n\n ErrorContainer::Data(e) => e,\n\n ErrorContainer::Io(e) => e,\n\n ErrorContainer::Operation(e) => e,\n\n ErrorContainer::Warning(e) => e,\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for ErrorContainer {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n Some(self.as_error())\n\n }\n\n}\n\n\n\nimpl fmt::Display for ErrorContainer {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n ErrorContainer::Data(e) => write!(f, \"Data error: {}\", e),\n", "file_path": "src/pull_parser/error.rs", "rank": 35, "score": 71974.35545955313 }, { "content": " /// I/O error.\n\n Io(io::Error),\n\n /// Invalid operation.\n\n Operation(OperationError),\n\n /// Critical warning.\n\n Warning(Warning),\n\n}\n\n\n\nimpl ErrorContainer {\n\n /// Returns the error kind of the error.\n\n pub fn kind(&self) -> ErrorKind {\n\n match self {\n\n ErrorContainer::Data(_) => ErrorKind::Data,\n\n ErrorContainer::Io(_) => ErrorKind::Io,\n\n ErrorContainer::Operation(_) => ErrorKind::Operation,\n\n ErrorContainer::Warning(_) => ErrorKind::Warning,\n\n }\n\n }\n\n\n\n /// Returns `&dyn std::error::Error`.\n", "file_path": "src/pull_parser/error.rs", "rank": 36, "score": 71972.38797764717 }, { "content": " pub fn downcast_ref<T: 'static + error::Error>(&self) -> Option<&T> {\n\n self.repr.error.as_error().downcast_ref::<T>()\n\n }\n\n\n\n /// Returns the syntactic position if available.\n\n pub fn position(&self) -> Option<&SyntacticPosition> {\n\n self.repr.position.as_ref()\n\n }\n\n\n\n /// Creates a new `Error` with the given syntactic position info.\n\n pub(crate) fn with_position(error: ErrorContainer, position: SyntacticPosition) -> Self {\n\n Self {\n\n repr: Box::new(Repr::with_position(error, position)),\n\n }\n\n }\n\n\n\n /// Sets the syntactic position and returns the new error.\n\n pub(crate) fn and_position(mut self, position: SyntacticPosition) -> Self {\n\n self.repr.position = Some(position);\n\n self\n", "file_path": "src/pull_parser/error.rs", "rank": 37, "score": 71971.97525643469 }, { "content": " fn from(e: OperationError) -> Self {\n\n ErrorContainer::Operation(e)\n\n }\n\n}\n\n\n\nimpl From<Warning> for ErrorContainer {\n\n fn from(e: Warning) -> Self {\n\n ErrorContainer::Warning(e)\n\n }\n\n}\n", "file_path": "src/pull_parser/error.rs", "rank": 38, "score": 71970.71280424132 }, { "content": " ErrorContainer::Io(e) => write!(f, \"I/O error: {}\", e),\n\n ErrorContainer::Operation(e) => write!(f, \"Invalid operation: {}\", e),\n\n ErrorContainer::Warning(e) => write!(f, \"Warning considered critical: {}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl From<io::Error> for ErrorContainer {\n\n fn from(e: io::Error) -> Self {\n\n ErrorContainer::Io(e)\n\n }\n\n}\n\n\n\nimpl From<DataError> for ErrorContainer {\n\n fn from(e: DataError) -> Self {\n\n ErrorContainer::Data(e)\n\n }\n\n}\n\n\n\nimpl From<OperationError> for ErrorContainer {\n", "file_path": "src/pull_parser/error.rs", "rank": 39, "score": 71970.47264409218 }, { "content": " Io,\n\n /// Invalid operation.\n\n ///\n\n /// With this error kind, the inner error must be [`OperationError`].\n\n ///\n\n /// [`OperationError`]: enum.OperationError.html\n\n Operation,\n\n /// Critical warning.\n\n ///\n\n /// With this error kind, the inner error must be [`Warning`].\n\n ///\n\n /// [`Warning`]: enum.Warning.html\n\n Warning,\n\n}\n\n\n\n/// Parsing error container.\n\n#[derive(Debug)]\n\npub enum ErrorContainer {\n\n /// Invalid data.\n\n Data(DataError),\n", "file_path": "src/pull_parser/error.rs", "rank": 40, "score": 71970.09579905054 }, { "content": "\n\n/// Parsing error.\n\n#[derive(Debug)]\n\npub struct Error {\n\n /// The real error.\n\n repr: Box<Repr>,\n\n}\n\n\n\nimpl Error {\n\n /// Returns the error kind.\n\n pub fn kind(&self) -> ErrorKind {\n\n self.repr.error.kind()\n\n }\n\n\n\n /// Returns a reference to the inner error container.\n\n pub fn get_ref(&self) -> &ErrorContainer {\n\n &self.repr.error\n\n }\n\n\n\n /// Returns a reference to the inner error if the type matches.\n", "file_path": "src/pull_parser/error.rs", "rank": 41, "score": 71969.44769635114 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.repr.error.fmt(f)\n\n }\n\n}\n\n\n\nimpl error::Error for Error {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n self.repr.error.source()\n\n }\n\n}\n\n\n\nimpl<T> From<T> for Error\n\nwhere\n\n T: Into<ErrorContainer>,\n\n{\n\n fn from(e: T) -> Self {\n\n Error {\n\n repr: Box::new(Repr::new(e.into())),\n\n }\n\n }\n\n}\n\n\n\n/// Internal representation of parsing error.\n\n#[derive(Debug)]\n", "file_path": "src/pull_parser/error.rs", "rank": 42, "score": 71969.20703857054 }, { "content": " fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match self {\n\n Error::Header(e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Error::Header(e) => write!(f, \"FBX header error: {}\", e),\n\n Error::UnsupportedVersion(ver) => write!(f, \"Unsupported FBX version: {:?}\", ver),\n\n }\n\n }\n\n}\n\n\n\nimpl From<HeaderError> for Error {\n\n fn from(e: HeaderError) -> Self {\n\n Error::Header(e)\n\n }\n\n}\n", "file_path": "src/pull_parser/any/error.rs", "rank": 43, "score": 71968.80392756607 }, { "content": " f,\n\n \"Node ends with unexpected position: expected {}, got {:?}\",\n\n expected, got\n\n ),\n\n DataError::UnexpectedAttribute(expected, got) => write!(\n\n f,\n\n \"Unexpected attribute value or type: expected {}, got {}\",\n\n expected, got\n\n ),\n\n }\n\n }\n\n}\n\n\n\n/// Compression format or algorithm.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum Compression {\n\n /// ZLIB compression.\n\n Zlib,\n\n}\n", "file_path": "src/pull_parser/error/data.rs", "rank": 44, "score": 69951.5479124988 }, { "content": "//! Invalid operation.\n\n\n\nuse std::{error, fmt};\n\n\n\nuse crate::{low::FbxVersion, pull_parser::ParserVersion};\n\n\n\n/// Invalid operation.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum OperationError {\n\n /// Attempt to parse more data while the parsing is aborted.\n\n AlreadyAborted,\n\n /// Attempt to parse more data while the parsing is (successfully) finished.\n\n AlreadyFinished,\n\n /// Attempt to create a parser with unsupported FBX version.\n\n UnsupportedFbxVersion(ParserVersion, FbxVersion),\n\n}\n\n\n\nimpl error::Error for OperationError {}\n\n\n", "file_path": "src/pull_parser/error/operation.rs", "rank": 45, "score": 69947.23280610067 }, { "content": "impl fmt::Display for OperationError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n OperationError::AlreadyAborted => {\n\n write!(f, \"Attempt to parse more data while the parsing is aborted\")\n\n }\n\n OperationError::AlreadyFinished => write!(\n\n f,\n\n \"Attempt to parse more data while the parsing is successfully finished\"\n\n ),\n\n OperationError::UnsupportedFbxVersion(parser, fbx) => write!(\n\n f,\n\n \"Unsupported FBX version: parser={:?}, fbx={:?}\",\n\n parser, fbx\n\n ),\n\n }\n\n }\n\n}\n", "file_path": "src/pull_parser/error/operation.rs", "rank": 46, "score": 69941.09233189413 }, { "content": " /// position will be `None`.\n\n NodeLengthMismatch(u64, Option<u64>),\n\n /// Unexpected attribute value or type.\n\n ///\n\n /// The former is the expected, the latter is a description of the actual value.\n\n UnexpectedAttribute(String, String),\n\n}\n\n\n\nimpl error::Error for DataError {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match self {\n\n DataError::BrokenCompression(_, e) => Some(e.as_ref()),\n\n DataError::InvalidNodeNameEncoding(e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for DataError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/pull_parser/error/data.rs", "rank": 47, "score": 69940.67860173703 }, { "content": " ///\n\n /// The `u8` is the code the parser got.\n\n InvalidAttributeTypeCode(u8),\n\n /// Invalid node name encoding.\n\n ///\n\n /// This error indicates that the node name is non-valid UTF-8.\n\n InvalidNodeNameEncoding(FromUtf8Error),\n\n /// Node attribute error.\n\n ///\n\n /// This error indicates that some error happened while reading node\n\n /// attributes.\n\n NodeAttributeError,\n\n /// Node length mismatch.\n\n ///\n\n /// This error indicates that a node ends at the position which differs from\n\n /// the offset declared at the header.\n\n ///\n\n /// The former `u64` is expected position, the latter `Option<u64>` is the\n\n /// actual position the node ends.\n\n /// If the error is detected before the node actually ends, the actual\n", "file_path": "src/pull_parser/error/data.rs", "rank": 48, "score": 69940.43829462759 }, { "content": "//! Data error.\n\n//!\n\n//! This is mainly syntax and low-level structure error.\n\n\n\nuse std::{error, fmt, string::FromUtf8Error};\n\n\n\n/// Data error.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum DataError {\n\n /// Data with broken compression.\n\n BrokenCompression(Compression, Box<dyn std::error::Error + Send + Sync>),\n\n /// FBX footer is broken.\n\n ///\n\n /// Detail is not available because the footer may contain variable length\n\n /// field, and it is hard to identify what is actually broken.\n\n BrokenFbxFooter,\n\n /// Got an unknown array attribute encoding.\n\n InvalidArrayAttributeEncoding(u32),\n\n /// Invalid node attribute type code.\n", "file_path": "src/pull_parser/error/data.rs", "rank": 49, "score": 69936.58909764048 }, { "content": " match self {\n\n DataError::BrokenFbxFooter => write!(f, \"FBX footer is broken\"),\n\n DataError::BrokenCompression(codec, e) => write!(\n\n f,\n\n \"Data with broken compression (codec={:?}): {:?}\",\n\n codec, e\n\n ),\n\n DataError::InvalidArrayAttributeEncoding(encoding) => {\n\n write!(f, \"Unknown array attribute encoding: got {:?}\", encoding)\n\n }\n\n DataError::InvalidAttributeTypeCode(code) => {\n\n write!(f, \"Invalid node attribute type code: {:?}\", code)\n\n }\n\n DataError::InvalidNodeNameEncoding(e) => {\n\n write!(f, \"Invalid node name encoding: {:?}\", e)\n\n }\n\n DataError::NodeAttributeError => {\n\n write!(f, \"Some error occured while reading node attributes\")\n\n }\n\n DataError::NodeLengthMismatch(expected, got) => write!(\n", "file_path": "src/pull_parser/error/data.rs", "rank": 50, "score": 69934.18822791579 }, { "content": "/// Compares nodes strictly.\n\nfn nodes_strict_eq(left: NodeHandle<'_>, right: NodeHandle<'_>) -> bool {\n\n // Compare name.\n\n if left.name() != right.name() {\n\n return false;\n\n }\n\n // Compare attributes.\n\n {\n\n let left = left.attributes();\n\n let right = right.attributes();\n\n if left.len() != right.len() {\n\n return false;\n\n }\n\n if !left.iter().zip(right).all(|(l, r)| l.strict_eq(r)) {\n\n return false;\n\n }\n\n }\n\n // Compare children.\n\n {\n\n let mut left = left.children();\n\n let mut right = right.children();\n", "file_path": "src/tree/v7400/node/handle.rs", "rank": 51, "score": 68853.397585064 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct OpenNode {\n\n /// Header position.\n\n header_pos: u64,\n\n /// Position of beginning of attributes part.\n\n body_pos: u64,\n\n /// Header.\n\n header: NodeHeader,\n\n /// Whether the node has child.\n\n has_child: bool,\n\n /// Whether the attributes are finalized.\n\n is_attrs_finalized: bool,\n\n}\n", "file_path": "src/writer/v7400/binary.rs", "rank": 52, "score": 58890.05335688383 }, { "content": "/// A simple wrapper for pretty-printing tree.\n\nstruct DebugTree<'a> {\n\n /// Tree.\n\n tree: &'a Tree,\n\n}\n\n\n\nimpl fmt::Debug for DebugTree<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let v = DebugNodeHandle {\n\n node: self.tree.root(),\n\n };\n\n v.fmt(f)\n\n }\n\n}\n\n\n", "file_path": "src/tree/v7400.rs", "rank": 53, "score": 57742.627326337155 }, { "content": "/// A simple wrapper for pretty-printing node.\n\nstruct DebugNodeHandle<'a> {\n\n /// Node.\n\n node: NodeHandle<'a>,\n\n}\n\n\n\nimpl fmt::Debug for DebugNodeHandle<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Node\")\n\n .field(\"name\", &self.node.name())\n\n .field(\"attributes\", &self.node.attributes())\n\n .field(\"children\", &DebugNodeHandleChildren { node: self.node })\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "src/tree/v7400.rs", "rank": 54, "score": 56729.010661043954 }, { "content": "/// A simple wrapper for pretty-printing children.\n\nstruct DebugNodeHandleChildren<'a> {\n\n /// Parent node.\n\n node: NodeHandle<'a>,\n\n}\n\n\n\nimpl fmt::Debug for DebugNodeHandleChildren<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_list()\n\n .entries(\n\n self.node\n\n .children()\n\n .map(|child| DebugNodeHandle { node: child }),\n\n )\n\n .finish()\n\n }\n\n}\n", "file_path": "src/tree/v7400.rs", "rank": 55, "score": 55769.53375828866 }, { "content": " /// // To treat the given warning as a critical error, return\n\n /// // `Err(warning.into())`.\n\n /// Ok(())\n\n /// });\n\n /// assert!(!parser.is_used(), \"Parser emitted no events yet\");\n\n /// let _ = parser.next_event();\n\n /// assert!(parser.is_used(), \"Parser emitted an event\");\n\n /// ```\n\n pub fn is_used(&self) -> bool {\n\n self.state.last_event_kind.is_some()\n\n }\n\n}\n\n\n\nimpl<R: fmt::Debug> fmt::Debug for Parser<R> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Parser\")\n\n .field(\"state\", &self.state)\n\n .field(\"reader\", &self.reader)\n\n .field(\n\n \"warning_handler\",\n\n &self.warning_handler.as_ref().map(|v| v as *const _),\n\n )\n\n .finish()\n\n }\n\n}\n\n\n\n/// Health of a parser.\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 56, "score": 41442.44688117113 }, { "content": " Health::Aborted(err_pos) => Err(Error::with_position(\n\n OperationError::AlreadyAborted.into(),\n\n err_pos.clone(),\n\n )),\n\n }\n\n }\n\n\n\n /// Reads the given type from the underlying reader.\n\n pub(crate) fn parse<T: FromParser>(&mut self) -> Result<T> {\n\n T::read_from_parser(self)\n\n }\n\n\n\n /// Passes the given warning to the warning handler.\n\n pub(crate) fn warn(&mut self, warning: Warning, pos: SyntacticPosition) -> Result<()> {\n\n match self.warning_handler {\n\n Some(ref mut handler) => match handler(warning, &pos) {\n\n Ok(()) => Ok(()),\n\n Err(e) => Err(e.and_position(pos)),\n\n },\n\n None => Ok(()),\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 57, "score": 41438.531784350074 }, { "content": " /// // Print warning.\n\n /// eprintln!(\"WARNING: {} (pos={:?})\", warning, pos);\n\n /// // To ignore the warning and continue processing, return `Ok(())`.\n\n /// // To treat the given warning as a critical error, return\n\n /// // `Err(warning.into())`.\n\n /// Ok(())\n\n /// });\n\n /// ```\n\n ///\n\n /// [syntactic position]: `SyntacticPosition`\n\n pub fn set_warning_handler<F>(&mut self, warning_handler: F)\n\n where\n\n F: 'static + FnMut(Warning, &SyntacticPosition) -> Result<()>,\n\n {\n\n self.warning_handler = Some(Box::new(warning_handler));\n\n }\n\n\n\n /// Returns a mutable reference to the inner reader.\n\n pub(crate) fn reader(&mut self) -> &mut R {\n\n &mut self.reader\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 58, "score": 41436.84831451478 }, { "content": "impl<R: ParserSource> Parser<R> {\n\n /// Parser version.\n\n pub const PARSER_VERSION: ParserVersion = ParserVersion::V7400;\n\n\n\n /// Creates a new `Parser`.\n\n ///\n\n /// Returns an error if the given FBX version in unsupported.\n\n pub(crate) fn create(fbx_version: FbxVersion, reader: R) -> Result<Self> {\n\n if ParserVersion::from_fbx_version(fbx_version) != Some(Self::PARSER_VERSION) {\n\n return Err(\n\n OperationError::UnsupportedFbxVersion(Self::PARSER_VERSION, fbx_version).into(),\n\n );\n\n }\n\n\n\n Ok(Self {\n\n state: State::new(fbx_version),\n\n reader,\n\n warning_handler: None,\n\n })\n\n }\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 59, "score": 41436.31047123659 }, { "content": " /// .expect(\"Failed to create parser\");\n\n /// // Do something here.\n\n /// // Something done.\n\n /// let depth = parser.current_depth();\n\n /// if depth > 0 {\n\n /// parser.skip_current_node().expect(\"Failed to skip current node\");\n\n /// assert_eq!(parser.current_depth(), depth - 1);\n\n /// }\n\n /// ```\n\n ///\n\n /// `parser.skip_current_node()` is almost same as the code below, except\n\n /// for error handling.\n\n ///\n\n /// ```no_run\n\n /// # use fbxcel::pull_parser::{v7400::{Parser, Event}, ParserSource, Result};\n\n /// fn skip_current_node<R: ParserSource>(parser: &mut Parser<R>) -> Result<()> {\n\n /// loop {\n\n /// match parser.next_event()? {\n\n /// Event::StartNode(_) => skip_current_node(parser)?,\n\n /// Event::EndNode => return Ok(()),\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 60, "score": 41434.37876815207 }, { "content": "//! Parser for FBX 7.4 or later.\n\n\n\nuse std::{fmt, io};\n\n\n\nuse crate::{\n\n low::{\n\n v7400::{FbxFooter, NodeHeader},\n\n FbxHeader, FbxVersion,\n\n },\n\n pull_parser::{\n\n error::{DataError, OperationError},\n\n reader::{PlainSource, SeekableSource},\n\n v7400::{Event, FromParser, StartNode},\n\n Error, ParserSource, ParserVersion, Result, SyntacticPosition, Warning,\n\n },\n\n};\n\n\n\n/// Warning handler type.\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 61, "score": 41432.76479048545 }, { "content": " }\n\n\n\n /// Returns whether the parser is already used or brand-new.\n\n ///\n\n /// Returns `true` if the parser emitted some events in the past, returns\n\n /// `false` if the parser have not emitted any events.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # use fbxcel::low::FbxHeader;\n\n /// # let reader = std::io::empty();\n\n /// # let header: FbxHeader = unimplemented!();\n\n /// let mut parser = fbxcel::pull_parser::v7400::from_reader(header, reader)\n\n /// .expect(\"Failed to create parser\");\n\n /// assert!(!parser.is_used());\n\n /// parser.set_warning_handler(|warning, pos| {\n\n /// // Print warning.\n\n /// eprintln!(\"WARNING: {} (pos={:?})\", warning, pos);\n\n /// // To ignore the warning and continue processing, return `Ok(())`.\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 62, "score": 41431.639588330225 }, { "content": " }\n\n }\n\n\n\n /// Returns next event if successfully read.\n\n ///\n\n /// You should not call `next_event()` if a parser functionality has been\n\n /// already failed and returned error.\n\n /// If you call `next_event()` with failed parser, error created from\n\n /// [`OperationError::AlreadyAborted`] will be returned.\n\n pub fn next_event(&mut self) -> Result<Event<'_, R>> {\n\n let previous_depth = self.current_depth();\n\n\n\n // Precondition: Health should be `Health::Running`.\n\n self.ensure_continuable()?;\n\n\n\n // Update health.\n\n let event_kind = match self.next_event_impl() {\n\n Ok(v) => v,\n\n Err(e) => {\n\n let err_pos = self.position();\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 63, "score": 41431.13165142738 }, { "content": "\n\n /// Sets the warning handler.\n\n ///\n\n /// The warning handler will receive warnings and their [syntactic\n\n /// position]s each time the warnings happen.\n\n ///\n\n /// If the handler returned `Ok(())`, the warning is considered non-critical\n\n /// and parsing can be continued.\n\n /// If the handler returned `Err(_)`, the warning is considered critical,\n\n /// and the parsing cannot be continued.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # use fbxcel::low::FbxHeader;\n\n /// # let reader = std::io::empty();\n\n /// # let header: FbxHeader = unimplemented!();\n\n /// let mut parser = fbxcel::pull_parser::v7400::from_reader(header, reader)\n\n /// .expect(\"Failed to create parser\");\n\n /// parser.set_warning_handler(|warning, pos| {\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 64, "score": 41429.88613870841 }, { "content": " /// Returns the number of attributes of the current node.\n\n pub(crate) fn current_attributes_count(&self) -> u64 {\n\n self.state\n\n .current_node()\n\n .expect(\"Implicit top-level node has no name\")\n\n .attributes_count\n\n }\n\n\n\n /// Returns current node depth.\n\n ///\n\n /// Implicit root node is considered to be depth 0.\n\n pub fn current_depth(&self) -> usize {\n\n self.state.started_nodes.len()\n\n }\n\n\n\n /// Returns `true` if the parser can continue parsing, `false` otherwise.\n\n pub(crate) fn ensure_continuable(&self) -> Result<()> {\n\n match self.state.health() {\n\n Health::Running => Ok(()),\n\n Health::Finished => Err(OperationError::AlreadyFinished.into()),\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 65, "score": 41427.982633976084 }, { "content": " /// This method seeks to the node end position without any additional\n\n /// parsing, since the parser already knows the node end position.\n\n /// Because of this, some errors can be overlooked, or detected at the\n\n /// different position from the true error position.\n\n ///\n\n /// To detect errors correctly, you should use [`next_event`][`Self::next_event`] manually.\n\n /// See an example to how to do this.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if there are no open nodes, i.e. when [`current_depth()`][`Self::current_depth`]\n\n /// returns 0.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # use fbxcel::low::FbxHeader;\n\n /// # let reader = std::io::empty();\n\n /// # let header: FbxHeader = unimplemented!();\n\n /// let mut parser = fbxcel::pull_parser::v7400::from_reader(header, reader)\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 66, "score": 41426.73803503751 }, { "content": " };\n\n if attributes_end_offset > self.reader().position() {\n\n // Skip if attributes remains (partially or entirely) unread.\n\n self.reader().skip_to(attributes_end_offset)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Sets the parser to aborted state.\n\n pub(crate) fn set_aborted(&mut self, pos: SyntacticPosition) {\n\n self.state.health = Health::Aborted(pos);\n\n }\n\n\n\n /// Ignores events until the current node closes.\n\n ///\n\n /// This discards parser events until the [`EndNode`] event for the current\n\n /// node is read.\n\n /// The last [`EndNode`] (for the current node) is also discarded.\n\n ///\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 67, "score": 41424.5905264619 }, { "content": " .map_or(self.state.known_toplevel_nodes_count, |v| {\n\n v.known_children_count\n\n });\n\n pos.node_path.push((local_node_index, String::new()));\n\n\n\n self.warn(Warning::EmptyNodeName, pos)?;\n\n }\n\n\n\n // Read the node name.\n\n let name = {\n\n let mut vec = vec![0; node_header.bytelen_name as usize];\n\n self.reader.read_exact(&mut vec[..])?;\n\n String::from_utf8(vec).map_err(DataError::InvalidNodeNameEncoding)?\n\n };\n\n let current_offset = self.reader().position();\n\n let starting = StartedNode {\n\n node_start_offset: event_start_offset,\n\n node_end_offset: node_header.end_offset,\n\n attributes_count: node_header.num_attributes,\n\n attributes_end_offset: current_offset + node_header.bytelen_attributes,\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 68, "score": 41421.68682795781 }, { "content": " if closing.attributes_count != 0 && closing.known_children_count == 0 {\n\n // It's odd, the current node should not have a node end\n\n // marker at the ending, but found.\n\n self.warn(Warning::ExtraNodeEndMarker, self.position())?;\n\n }\n\n Ok(EventKind::EndNode)\n\n }\n\n None => Ok(EventKind::EndFbx),\n\n };\n\n }\n\n\n\n if node_header.bytelen_name == 0 {\n\n let mut pos = self.position();\n\n // Need to modify position, because the currently reading node is\n\n // not reflected to the parser.\n\n pos.byte_pos = self.reader().position();\n\n pos.component_byte_pos = event_start_offset;\n\n let local_node_index = self\n\n .state\n\n .current_node()\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 69, "score": 41421.62685471624 }, { "content": " /// Event::EndFbx(_) => panic!(\"Attempt to skip implicit top-level node\"),\n\n /// }\n\n /// }\n\n /// }\n\n /// ```\n\n ///\n\n /// [`EndNode`]: `Event::EndNode`\n\n pub fn skip_current_node(&mut self) -> Result<()> {\n\n let end_pos = self\n\n .state\n\n .started_nodes\n\n .pop()\n\n .expect(\"Attempt to skip implicit top-level node\")\n\n .node_end_offset;\n\n self.state.last_event_kind = Some(EventKind::EndNode);\n\n self.reader.skip_to(end_pos)?;\n\n\n\n Ok(())\n\n }\n\n /// Returns the syntactic position of the current node.\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 70, "score": 41421.18272136906 }, { "content": " ///\n\n /// Note that this allocates memory.\n\n pub fn position(&self) -> SyntacticPosition {\n\n let byte_pos = self.reader.position();\n\n if self.state.current_node().is_none() {\n\n // Reading implicit root node.\n\n return SyntacticPosition {\n\n byte_pos,\n\n component_byte_pos: 0,\n\n node_path: Vec::new(),\n\n attribute_index: None,\n\n };\n\n }\n\n\n\n let toplevel_index = self\n\n .state\n\n .known_toplevel_nodes_count\n\n .checked_sub(1)\n\n .expect(\"Should never fail: implicit root node should have some children here\");\n\n // For now, use 0 for start offset of implicit root node.\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 71, "score": 41421.083375940354 }, { "content": " }\n\n\n\n /// Returns FBX version.\n\n pub fn fbx_version(&self) -> FbxVersion {\n\n self.state.fbx_version\n\n }\n\n\n\n /// Returns the name of the current node.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This panics if there are no open nodes.\n\n pub fn current_node_name(&self) -> &str {\n\n self.state\n\n .current_node()\n\n .expect(\"Implicit top-level node has no name\")\n\n .name\n\n .as_str()\n\n }\n\n\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 72, "score": 41420.209777734206 }, { "content": " fn next_event_impl(&mut self) -> Result<EventKind> {\n\n assert_eq!(self.state.health(), &Health::Running);\n\n assert_ne!(self.state.last_event_kind(), Some(EventKind::EndFbx));\n\n\n\n // Skip unread attribute of previous node, if exists.\n\n self.skip_unread_attributes()?;\n\n\n\n let event_start_offset = self.reader().position();\n\n\n\n // Check if the current node ends here (without any marker).\n\n // A node end marker (all-zero node header, which indicates end of the\n\n // current node) is omitted if and only if:\n\n //\n\n // * the node has no children nodes, and\n\n // * the node has one or more attributes.\n\n //\n\n // Note that the check can be skipped for the implicit root node,\n\n // It has always a node end marker at the ending (because it has no\n\n // attributes).\n\n if let Some(current_node) = self.state.current_node() {\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 73, "score": 41420.17642100125 }, { "content": " // Postcondition: The last event kind should be memorized correctly.\n\n assert_eq!(\n\n self.state.last_event_kind(),\n\n Some(event_kind),\n\n \"The last event kind should be memorized correctly\"\n\n );\n\n\n\n // Create the real result.\n\n Ok(match event_kind {\n\n EventKind::StartNode => Event::StartNode(StartNode::new(self)),\n\n EventKind::EndNode => Event::EndNode,\n\n EventKind::EndFbx => {\n\n let footer_res = FbxFooter::read_from_parser(self).map(Box::new);\n\n Event::EndFbx(footer_res)\n\n }\n\n })\n\n }\n\n\n\n /// Reads the next node header and changes the parser state (except for\n\n /// parser health and the last event kind).\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 74, "score": 41419.353287983766 }, { "content": " name,\n\n known_children_count: 0,\n\n };\n\n\n\n // Update parser status.\n\n match self.state.started_nodes.last_mut() {\n\n Some(parent) => parent.known_children_count += 1,\n\n None => self.state.known_toplevel_nodes_count += 1,\n\n }\n\n self.state.started_nodes.push(starting);\n\n Ok(EventKind::StartNode)\n\n }\n\n\n\n /// Skips unread attribute of the current node, if remains.\n\n ///\n\n /// If there are no unread attributes, this method simply do nothing.\n\n fn skip_unread_attributes(&mut self) -> Result<()> {\n\n let attributes_end_offset = match self.state.current_node() {\n\n Some(v) => v.attributes_end_offset,\n\n None => return Ok(()),\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 75, "score": 41419.06065357993 }, { "content": " // This behaviour may change in future.\n\n let node_start_pos = self.state.current_node().map_or(0, |v| v.node_start_offset);\n\n // Use not `checked_sub` but `saturating_sub` here, because\n\n // `Iterator::zip` might read extra elements which can be used as\n\n // result.\n\n let trailing_indices = self\n\n .state\n\n .started_nodes\n\n .iter()\n\n .map(|v| v.known_children_count.saturating_sub(1));\n\n let node_indices = std::iter::once(toplevel_index).chain(trailing_indices);\n\n let node_names = self.state.started_nodes.iter().map(|v| v.name.clone());\n\n let node_path = node_indices.zip(node_names).collect();\n\n\n\n SyntacticPosition {\n\n byte_pos,\n\n component_byte_pos: node_start_pos,\n\n node_path,\n\n attribute_index: None,\n\n }\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 76, "score": 41418.71512485325 }, { "content": " self.set_aborted(err_pos.clone());\n\n return Err(e.and_position(err_pos));\n\n }\n\n };\n\n if event_kind == EventKind::EndFbx {\n\n self.state.health = Health::Finished;\n\n }\n\n\n\n // Update the last event kind.\n\n self.state.last_event_kind = Some(event_kind);\n\n\n\n // Postcondition: Depth should be updated correctly.\n\n let current_depth = self.current_depth();\n\n match event_kind {\n\n EventKind::StartNode => {\n\n assert_eq!(\n\n current_depth.wrapping_sub(previous_depth),\n\n 1,\n\n \"The depth should be incremented on `StartNode`\"\n\n );\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 77, "score": 41418.62590702075 }, { "content": " }\n\n }\n\n\n\n // Read node header.\n\n let node_header = NodeHeader::read_from_parser(self)?;\n\n\n\n let header_end_offset = self.reader().position();\n\n\n\n // Check if a node or a document ends here (with explicit marker).\n\n if node_header.is_node_end() {\n\n // The current node explicitly ends here.\n\n return match self.state.started_nodes.pop() {\n\n Some(closing) => {\n\n if closing.node_end_offset != header_end_offset {\n\n return Err(DataError::NodeLengthMismatch(\n\n closing.node_end_offset,\n\n Some(header_end_offset),\n\n )\n\n .into());\n\n }\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 78, "score": 41418.54114696285 }, { "content": " if current_node.node_end_offset < event_start_offset {\n\n // The current node has already been ended.\n\n return Err(\n\n DataError::NodeLengthMismatch(current_node.node_end_offset, None).into(),\n\n );\n\n }\n\n if current_node.node_end_offset == event_start_offset {\n\n // `last_event_kind() == Some(EventKind::EndNode)` means that\n\n // some node ends right before the event currently reading.\n\n let has_children = self.state.last_event_kind() == Some(EventKind::EndNode);\n\n let has_attributes = current_node.attributes_count != 0;\n\n\n\n if has_children || !has_attributes {\n\n // It's odd, the current node should have a node end marker\n\n // at the ending, but `node_end_offset` data tells that the\n\n // node ends without node end marker.\n\n self.warn(Warning::MissingNodeEndMarker, self.position())?;\n\n }\n\n self.state.started_nodes.pop();\n\n return Ok(EventKind::EndNode);\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 79, "score": 41418.470623327084 }, { "content": " }\n\n EventKind::EndNode => {\n\n assert_eq!(\n\n previous_depth.wrapping_sub(current_depth),\n\n 1,\n\n \"The depth should be decremented on `EndNode`\"\n\n );\n\n }\n\n EventKind::EndFbx => {\n\n assert_eq!(\n\n previous_depth, 0,\n\n \"Depth should be 0 before parsing finishes\"\n\n );\n\n assert_eq!(\n\n current_depth, 0,\n\n \"Depth should be 0 after parsing is finished\"\n\n );\n\n }\n\n }\n\n\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 80, "score": 41417.71409668414 }, { "content": " Self {\n\n fbx_version,\n\n health: Health::Running,\n\n started_nodes: Vec::new(),\n\n last_event_kind: None,\n\n known_toplevel_nodes_count: 0,\n\n }\n\n }\n\n\n\n /// Returns health of the parser.\n\n fn health(&self) -> &Health {\n\n &self.health\n\n }\n\n\n\n /// Returns info about current node (except for implicit root node).\n\n fn current_node(&self) -> Option<&StartedNode> {\n\n self.started_nodes.last()\n\n }\n\n\n\n /// Returns the last event kind.\n\n fn last_event_kind(&self) -> Option<EventKind> {\n\n self.last_event_kind\n\n }\n\n}\n\n\n\n/// Event kind.\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 81, "score": 41415.781432038595 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nenum Health {\n\n /// Ready or already started, but not yet finished, and no critical errors.\n\n Running,\n\n /// Successfully finished.\n\n Finished,\n\n /// Aborted due to critical error.\n\n Aborted(SyntacticPosition),\n\n}\n\n\n\n/// Parser state.\n\n///\n\n/// This type contains parser state especially which are independent of parser\n\n/// source type.\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 82, "score": 40716.28752221702 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nenum EventKind {\n\n /// Node start.\n\n StartNode,\n\n /// Node end.\n\n EndNode,\n\n /// FBX document end.\n\n EndFbx,\n\n}\n\n\n\n/// Information about started node.\n", "file_path": "src/pull_parser/v7400/parser.rs", "rank": 83, "score": 40034.207848956314 }, { "content": "//! Error and result types for `tree::any` module.\n\n\n\nuse std::{error, fmt};\n\n\n\nuse crate::{pull_parser, tree};\n\n\n\n/// AnyTree load result.\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// Error.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum Error {\n\n /// Parser creation error.\n\n ParserCreation(pull_parser::any::Error),\n\n /// Parser error.\n\n Parser(pull_parser::Error),\n\n /// Tree load error.\n\n Tree(Box<dyn error::Error + Send + Sync + 'static>),\n\n}\n", "file_path": "src/tree/any/error.rs", "rank": 84, "score": 38636.686214234236 }, { "content": "\n\nimpl error::Error for Error {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match self {\n\n Error::ParserCreation(e) => Some(e),\n\n Error::Parser(e) => Some(e),\n\n Error::Tree(e) => Some(&**e),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Error::ParserCreation(e) => write!(f, \"Failed to create a parser: {}\", e),\n\n Error::Parser(e) => write!(f, \"Parser error: {}\", e),\n\n Error::Tree(e) => write!(f, \"Tree load error: {}\", e),\n\n }\n\n }\n\n}\n", "file_path": "src/tree/any/error.rs", "rank": 85, "score": 38633.38411068222 }, { "content": "\n\nimpl From<pull_parser::any::Error> for Error {\n\n fn from(e: pull_parser::any::Error) -> Self {\n\n Error::ParserCreation(e)\n\n }\n\n}\n\n\n\nimpl From<pull_parser::Error> for Error {\n\n fn from(e: pull_parser::Error) -> Self {\n\n Error::Parser(e)\n\n }\n\n}\n\n\n\nimpl From<tree::v7400::LoadError> for Error {\n\n fn from(e: tree::v7400::LoadError) -> Self {\n\n Error::Tree(e.into())\n\n }\n\n}\n", "file_path": "src/tree/any/error.rs", "rank": 86, "score": 38631.394144226215 }, { "content": "//! Error types.\n\n\n\nuse std::{error, fmt};\n\n\n\nuse crate::pull_parser::Error as ParserError;\n\n\n\n/// FBX data tree load error.\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum LoadError {\n\n /// Bad parser.\n\n ///\n\n /// This error will be mainly caused by user logic error.\n\n BadParser,\n\n /// Parser error.\n\n Parser(ParserError),\n\n}\n\n\n\nimpl fmt::Display for LoadError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "src/tree/v7400/error.rs", "rank": 87, "score": 37520.225964931524 }, { "content": " match self {\n\n LoadError::BadParser => f.write_str(\"Attempt to use a bad parser\"),\n\n LoadError::Parser(e) => write!(f, \"Parser error: {}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for LoadError {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match self {\n\n LoadError::Parser(e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl From<ParserError> for LoadError {\n\n fn from(e: ParserError) -> Self {\n\n LoadError::Parser(e)\n\n }\n\n}\n", "file_path": "src/tree/v7400/error.rs", "rank": 88, "score": 37512.62415689067 }, { "content": "//! Binary writer error.\n\n\n\nuse std::{error, fmt, io};\n\n\n\nuse crate::low::FbxVersion;\n\n\n\n/// Write result.\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// Write error.\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Node attribute is too long.\n\n AttributeTooLong(usize),\n\n /// Compression error.\n\n Compression(CompressionError),\n\n /// File is too large.\n\n FileTooLarge(u64),\n\n /// I/O error.\n\n Io(io::Error),\n", "file_path": "src/writer/v7400/binary/error.rs", "rank": 89, "score": 36453.069414987775 }, { "content": "\n\n/// Compression error.\n\n#[derive(Debug)]\n\npub enum CompressionError {\n\n /// Zlib error.\n\n Zlib(io::Error),\n\n}\n\n\n\nimpl error::Error for CompressionError {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match self {\n\n CompressionError::Zlib(e) => Some(e),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for CompressionError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n CompressionError::Zlib(e) => write!(f, \"Zlib compression error: {}\", e),\n\n }\n\n }\n\n}\n", "file_path": "src/writer/v7400/binary/error.rs", "rank": 90, "score": 36452.40171800935 }, { "content": " Error::Io(e) => Some(e),\n\n Error::UserDefined(e) => Some(&**e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Error::AttributeTooLong(v) => write!(f, \"Node attribute is too long: {} bytes\", v),\n\n Error::Compression(e) => write!(f, \"Compression error: {}\", e),\n\n Error::FileTooLarge(v) => write!(f, \"File is too large: {} bytes\", v),\n\n Error::Io(e) => write!(f, \"I/O error: {}\", e),\n\n Error::NoNodesToClose => write!(f, \"There are no nodes to close\"),\n\n Error::NodeNameTooLong(v) => write!(f, \"Node name is too long: {} bytes\", v),\n\n Error::TooManyArrayAttributeElements(v) => write!(\n\n f,\n\n \"Too many array elements for a single node attribute: count={}\",\n\n v\n", "file_path": "src/writer/v7400/binary/error.rs", "rank": 91, "score": 36447.72344403637 }, { "content": " ),\n\n Error::TooManyAttributes(v) => write!(f, \"Too many attributes: count={}\", v),\n\n Error::UnclosedNode(v) => write!(f, \"There remains unclosed nodes: depth={}\", v),\n\n Error::UnsupportedFbxVersion(v) => write!(f, \"Unsupported FBX version: {:?}\", v),\n\n Error::UserDefined(e) => write!(f, \"User-defined error: {}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(e: io::Error) -> Self {\n\n Error::Io(e)\n\n }\n\n}\n\n\n\nimpl From<CompressionError> for Error {\n\n fn from(e: CompressionError) -> Self {\n\n Error::Compression(e)\n\n }\n\n}\n", "file_path": "src/writer/v7400/binary/error.rs", "rank": 92, "score": 36446.83613421281 }, { "content": " /// There are no nodes to close.\n\n NoNodesToClose,\n\n /// Node name is too long.\n\n NodeNameTooLong(usize),\n\n /// Too many array attribute elements.\n\n TooManyArrayAttributeElements(usize),\n\n /// Too many attributes.\n\n TooManyAttributes(usize),\n\n /// There remains unclosed nodes.\n\n UnclosedNode(usize),\n\n /// Unsupported FBX version.\n\n UnsupportedFbxVersion(FbxVersion),\n\n /// User-defined error.\n\n UserDefined(Box<dyn std::error::Error + 'static>),\n\n}\n\n\n\nimpl error::Error for Error {\n\n fn source(&self) -> Option<&(dyn error::Error + 'static)> {\n\n match self {\n\n Error::Compression(e) => Some(e),\n", "file_path": "src/writer/v7400/binary/error.rs", "rank": 93, "score": 36445.37516408169 }, { "content": "//! Types and functions for all supported versions.\n\n\n\nuse std::io::{Read, Seek};\n\n\n\nuse crate::{\n\n low::{FbxHeader, FbxVersion},\n\n pull_parser::{\n\n self,\n\n reader::{PlainSource, SeekableSource},\n\n ParserSource, ParserVersion,\n\n },\n\n};\n\n\n\npub use self::error::{Error, Result};\n\n\n\nmod error;\n\n\n\n/// FBX tree type with any supported version.\n\n#[non_exhaustive]\n\npub enum AnyParser<R> {\n", "file_path": "src/pull_parser/any.rs", "rank": 94, "score": 35514.387965833135 }, { "content": "//! ),\n\n//! }\n\n//! ```\n\n\n\npub use self::{\n\n error::{Error, Result, Warning},\n\n position::SyntacticPosition,\n\n reader::ParserSource,\n\n version::ParserVersion,\n\n};\n\n\n\npub mod any;\n\npub mod error;\n\nmod position;\n\npub mod reader;\n\npub mod v7400;\n\nmod version;\n", "file_path": "src/pull_parser.rs", "rank": 95, "score": 35511.527967511436 }, { "content": " /// FBX 7.4 or later.\n\n V7400(pull_parser::v7400::Parser<R>),\n\n}\n\n\n\nimpl<R: ParserSource> AnyParser<R> {\n\n /// Returns the parser version.\n\n pub fn parser_version(&self) -> ParserVersion {\n\n match self {\n\n AnyParser::V7400(_) => pull_parser::v7400::Parser::<R>::PARSER_VERSION,\n\n }\n\n }\n\n\n\n /// Returns the FBX version.\n\n pub fn fbx_version(&self) -> FbxVersion {\n\n match self {\n\n AnyParser::V7400(parser) => parser.fbx_version(),\n\n }\n\n }\n\n}\n\n\n\n/// Returns the parser version for the FBX data.\n", "file_path": "src/pull_parser/any.rs", "rank": 96, "score": 35507.679275145405 }, { "content": "//! ```\n\n//!\n\n//! ## Manual setup\n\n//!\n\n//! In this way you have full control, but usual users don't need this.\n\n//!\n\n//! 1. Get FBX header.\n\n//! 2. Decide which version of parser to use.\n\n//! 3. Create parser with source reader.\n\n//!\n\n//! ```no_run\n\n//! use fbxcel::{low::FbxHeader, pull_parser::ParserVersion};\n\n//!\n\n//! let file = std::fs::File::open(\"sample.fbx\").expect(\"Failed to open file\");\n\n//! // You can also use raw `file`, but do buffering for better efficiency.\n\n//! let mut reader = std::io::BufReader::new(file);\n\n//!\n\n//! // 1. Get FBX header.\n\n//! let header = FbxHeader::load(&mut reader)\n\n//! .expect(\"Failed to load FBX header\");\n", "file_path": "src/pull_parser.rs", "rank": 97, "score": 35506.47998657388 }, { "content": "//!\n\n//! ```no_run\n\n//! use fbxcel::pull_parser::any::{from_seekable_reader, AnyParser};\n\n//!\n\n//! let file = std::fs::File::open(\"sample.fbx\").expect(\"Failed to open file\");\n\n//! // You can also use raw `file`, but do buffering for better efficiency.\n\n//! let reader = std::io::BufReader::new(file);\n\n//!\n\n//! // Use `from_seekable_reader` for readers implementing `std::io::Seek`.\n\n//! // To use readers without `std::io::Seek` implementation, use `from_reader`\n\n//! // instead.\n\n//! match from_seekable_reader(reader).expect(\"Failed to setup FBX parser\") {\n\n//! // Use v7400 parser (implemented in `v7400` module).\n\n//! AnyParser::V7400(mut parser) => {\n\n//! // You got a parser! Do what you want!\n\n//! },\n\n//! // `AnyParser` is nonexhaustive.\n\n//! // You should handle new unknown parser version case.\n\n//! _ => panic!(\"Unsupported FBX parser is required\"),\n\n//! }\n", "file_path": "src/pull_parser.rs", "rank": 98, "score": 35506.291652197484 }, { "content": "//! // 2. Decide which version of parser to use.\n\n//! match header.parser_version() {\n\n//! // Use v7400 parser (implemented in `v7400` module).\n\n//! Some(ParserVersion::V7400) => {\n\n//! // 3. Create parser with source reader.\n\n//! // Pass both header and reader.\n\n//! // Use `from_seekable_reader` for readers implementing `std::io::Seek`.\n\n//! // To use readers without `std::io::Seek` implementation, use\n\n//! // `from_reader` instead.\n\n//! let mut parser = fbxcel::pull_parser::v7400::from_seekable_reader(header, reader)\n\n//! .expect(\"Failed to setup parser\");\n\n//! // You got a parser! Do what you want!\n\n//! },\n\n//! // `ParserVersion` is nonexhaustive.\n\n//! // You should handle new unknown parser version case.\n\n//! Some(v) => panic!(\"Parser version {:?} is not yet supported\", v),\n\n//! // No appropriate parser found\n\n//! None => panic!(\n\n//! \"FBX version {:?} is not supported by backend library\",\n\n//! header.version()\n", "file_path": "src/pull_parser.rs", "rank": 99, "score": 35505.361636982605 } ]
Rust
cargo-wharf-frontend/src/config/builder.rs
RoeeJ/cargo-wharf
48ab920be2e396be534ec12238fa0ca79da6316f
use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use failure::{format_err, Error, ResultExt}; use log::*; use serde::Serialize; use buildkit_frontend::oci; use buildkit_frontend::Bridge; use buildkit_llb::ops::source::ImageSource; use buildkit_llb::prelude::*; use super::base::{BaseBuilderConfig, CustomCommand}; use super::{merge_spec_and_overriden_env, BaseImageConfig}; use crate::shared::TARGET_PATH; #[derive(Debug, Serialize)] pub struct BuilderConfig { #[serde(skip_serializing)] source: ImageSource, overrides: BaseBuilderConfig, defaults: BuilderConfigDefaults, merged_env: BTreeMap<String, String>, cargo_home: PathBuf, } #[derive(Debug, Serialize, Default)] struct BuilderConfigDefaults { env: Option<BTreeMap<String, String>>, user: Option<String>, } impl BuilderConfig { pub async fn analyse(bridge: &mut Bridge, config: BaseBuilderConfig) -> Result<Self, Error> { let source = config.source(); let (digest, spec) = { bridge .resolve_image_config(&source, Some("Resolving builder image")) .await .context("Unable to resolve image config")? }; debug!("resolved builder image config: {:#?}", spec.config); let spec = { spec.config .ok_or_else(|| format_err!("Missing source image config"))? }; let source = if !digest.is_empty() { source.with_digest(digest) } else { source }; let merged_env = merge_spec_and_overriden_env(&spec.env, &config.env); let user = { config .user .as_ref() .or_else(|| spec.user.as_ref()) .map(String::as_str) }; let cargo_home = PathBuf::from( merged_env .get("CARGO_HOME") .cloned() .or_else(|| guess_cargo_home(user)) .ok_or_else(|| format_err!("Unable to find or guess CARGO_HOME env variable"))?, ); Ok(Self { source, overrides: config, defaults: spec.into(), cargo_home, merged_env, }) } #[cfg(test)] pub fn mocked_new(source: ImageSource, cargo_home: PathBuf) -> Self { BuilderConfig { source, defaults: Default::default(), overrides: Default::default(), cargo_home, merged_env: Default::default(), } } pub fn cargo_home(&self) -> &Path { &self.cargo_home } pub fn source(&self) -> &ImageSource { &self.source } pub fn target(&self) -> Option<&str> { self.overrides.target.as_ref().map(String::as_str) } pub fn user(&self) -> Option<&str> { self.overrides .user .as_ref() .or_else(|| self.defaults.user.as_ref()) .map(String::as_str) } pub fn env(&self) -> impl Iterator<Item = (&str, &str)> { self.merged_env .iter() .map(|(key, value)| (key.as_str(), value.as_str())) } pub fn setup_commands(&self) -> Option<&Vec<CustomCommand>> { self.overrides.setup_commands.as_ref() } } impl BaseImageConfig for BuilderConfig { fn populate_env<'a>(&self, mut command: Command<'a>) -> Command<'a> { command = command.env("CARGO_TARGET_DIR", TARGET_PATH); if let Some(user) = self.user() { command = command.user(user); } for (name, value) in self.env() { command = command.env(name, value); } command .env("CARGO_HOME", self.cargo_home().display().to_string()) .mount(Mount::SharedCache(self.cargo_home().join("git"))) .mount(Mount::SharedCache(self.cargo_home().join("registry"))) } fn image_source(&self) -> Option<&ImageSource> { Some(&self.source) } } fn guess_cargo_home(user: Option<&str>) -> Option<String> { match user { Some("root") => Some("/root/.cargo".into()), Some(user) => Some(format!("/home/{}/.cargo", user)), None => None, } } impl From<oci::ImageConfig> for BuilderConfigDefaults { fn from(config: oci::ImageConfig) -> Self { Self { env: config.env, user: config.user, } } } #[test] fn cargo_home_guessing() { assert_eq!(guess_cargo_home(None), None); assert_eq!(guess_cargo_home(Some("root")), Some("/root/.cargo".into())); assert_eq!( guess_cargo_home(Some("den")), Some("/home/den/.cargo".into()) ); }
use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use failure::{format_err, Error, ResultExt}; use log::*; use serde::Serialize; use buildkit_frontend::oci; use buildkit_frontend::Bridge; use buildkit_llb::ops::source::ImageSource; use buildkit_llb::prelude::*; use super::base::{BaseBuilderConfig, CustomCommand}; use super::{merge_spec_and_overriden_env, BaseImageConfig}; use crate::shared::TARGET_PATH; #[derive(Debug, Serialize)] pub struct BuilderConfig { #[serde(skip_serializing)] source: ImageSource, overrides: BaseBuilderConfig, defaults: BuilderConfigDefaults, merged_env: BTreeMap<String, String>, cargo_home: PathBuf, } #[derive(Debug, Serialize, Default)] struct BuilderConfigDefaults { env: Option<BTreeMap<String, String>>, user: Option<String>, } impl BuilderConfig { pub async fn analyse(bridge: &mut Bridge, config: BaseBuilderConfig) -> Result<Self, Error> { let source = config.source(); let (digest, spec) = { bridge .resolve_image_config(&source, Some("Resolving builder image")) .await .context("Unable to resolve image config")? }; debug!("resolved builder image config: {:#?}", spec.config); let spec = { spec.config .ok_or_else(|| format_err!("Missing source image config"))? }; let source = if !digest.is_empty() { source.with_digest(digest) } else { source }; let merged_env = merge_spec_and_overriden_env(&spec.env, &config.env); let user = { config .user .as_ref() .or_else(|| spec.user.as_ref()) .map(String::as_str) }; let cargo_home = PathBuf::from( merged_env .get("CARGO_HOME") .cloned() .or_else(|| guess_cargo_home(user)) .ok_or_else(|| format_err!("Unable to find or guess CARGO_HOME env variable"))?, ); Ok(Self { source, overrides: config, defaults: spec.into(), cargo_home, merged_env, }) } #[cfg(test)] pub fn mocked_new(source: ImageSource, cargo_home: PathBuf) -> Self { BuilderConfig { source, defaults: Default::default(), overrides: Default::default(), cargo_home, merged_env: Default::default(), } } pub fn cargo_home(&self) -> &Path { &self.cargo_home } pub fn source(&self) -> &ImageSource { &self.source } pub fn target(&self) -> Option<&str> { self.overrides.target.as_ref().map(String::as_str) } pub fn user(&self) -> Option<&str> { self.overrides .user .as_ref() .or_else(|| self.defaults.user.as_ref()) .map(String::as_str) } pub fn env(&self) -> impl Iterator<Item = (&str, &str)> { self.merged_env .iter() .map(|(key, value)| (key.as_str(), value.as_str())) } pub fn setup_commands(&self) -> Option<&Vec<CustomCommand>> { self.overrides.setup_commands.as_ref() } } impl BaseImageConfig for BuilderConfig {
fn image_source(&self) -> Option<&ImageSource> { Some(&self.source) } } fn guess_cargo_home(user: Option<&str>) -> Option<String> { match user { Some("root") => Some("/root/.cargo".into()), Some(user) => Some(format!("/home/{}/.cargo", user)), None => None, } } impl From<oci::ImageConfig> for BuilderConfigDefaults { fn from(config: oci::ImageConfig) -> Self { Self { env: config.env, user: config.user, } } } #[test] fn cargo_home_guessing() { assert_eq!(guess_cargo_home(None), None); assert_eq!(guess_cargo_home(Some("root")), Some("/root/.cargo".into())); assert_eq!( guess_cargo_home(Some("den")), Some("/home/den/.cargo".into()) ); }
fn populate_env<'a>(&self, mut command: Command<'a>) -> Command<'a> { command = command.env("CARGO_TARGET_DIR", TARGET_PATH); if let Some(user) = self.user() { command = command.user(user); } for (name, value) in self.env() { command = command.env(name, value); } command .env("CARGO_HOME", self.cargo_home().display().to_string()) .mount(Mount::SharedCache(self.cargo_home().join("git"))) .mount(Mount::SharedCache(self.cargo_home().join("registry"))) }
function_block-full_function
[]
Rust
examples/reshaping.rs
Clomance/CatEngine
6f14694b6e7f493216b3dc4f01f9458385d55b07
use cat_engine::{ app::{ App, AppAttributes, Window, WindowInner, WindowEvent, WindowProcedure, VirtualKeyCode, quit, }, graphics::{ BlendingFunction, PrimitiveType, TexturedVertex2D, ShapeObject, }, texture::{ ImageBase, ImageObject, Texture }, }; struct WindowHandle; impl WindowProcedure<WindowInner<Option<Texture>>> for WindowHandle{ fn render(window:&Window,window_inner:&mut WindowInner<Option<Texture>>){ window_inner.context().make_current(true).unwrap_or_else(|_|{quit()}); let [width,height]=window.client_size(); unsafe{ window_inner.graphics().core().viewport.set([0,0,width as i32,height as i32]); } window_inner.graphics().draw_parameters().set_viewport([0f32,0f32,width as f32,height as f32]); if let Some(texture)=window_inner.storage_ref().as_ref(){ window_inner.graphics_ref().clear_colour([1f32;4]); window_inner.graphics_ref().draw_stack_textured_object(0,texture.texture_2d()); window_inner.graphics_ref().draw_stack_textured_object(1,texture.texture_2d()); window_inner.graphics_ref().draw_stack_textured_object(2,texture.texture_2d()); window_inner.graphics_ref().core().finish(); window_inner.context().swap_buffers().unwrap_or_else(|_|{quit()}); } } fn handle(event:WindowEvent,_window:&Window,window_inner:&mut WindowInner<Option<Texture>>){ match event{ WindowEvent::KeyPress(VirtualKeyCode::A)=>{ let image_base=ImageBase::new( [400f32,100f32,100f32,100f32], [0.5,0.5,0.5,1.0] ); window_inner.graphics().write_stack_textured_object_vertices(1,&image_base.vertices()); } WindowEvent::CloseRequest=>quit(), _=>{} } } } fn main(){ let app_attributes=AppAttributes::new(); let texture:Option<Texture>=None; let app=App::new::<WindowHandle>(app_attributes,texture); let graphics=app.graphics(); graphics.core().blending.enable(); graphics.core().blending.set_function( BlendingFunction::SourceAlpha, BlendingFunction::OneMinusSourceAlpha ); let vertices=[ TexturedVertex2D::new( [400f32,0f32], [1f32,1f32], [1.0,0.5,0.5,0.0] ), TexturedVertex2D::new([400f32,400f32],[1f32,0f32],[0.5,0.5,0.5,0.6]), TexturedVertex2D::new([0f32,400f32],[0f32,0f32],[0.5,0.5,0.5,1.0]), TexturedVertex2D::new([0f32,0f32],[0f32,1f32],[0.5,0.5,0.5,1.0]), ]; let _image1=graphics.push_textured_object_raw( &vertices, &[0,1,3,1,2,3], PrimitiveType::Triangles ).unwrap(); let image_base=ImageBase::new( [400f32,0f32,400f32,400f32], [0.5,0.5,0.5,1.0] ); let _image2=graphics.push_textured_object(&image_base).unwrap(); let image_base=ImageObject::new( [800f32,0f32,200f32,400f32], [0f32,0f32,0.5f32,1f32], [1.0;4] ); let _image3=graphics.push_textured_object(&image_base).unwrap(); *app.storage()=Some(Texture::from_path("logo_400x400.png").unwrap()); app.event_loop.run(|event,_app_control|{ match event{ _=>{} } }); }
use cat_engine::{ app::{ App, AppAttributes, Window, WindowInner, WindowEvent, WindowProcedure, VirtualKeyCode, quit, }, graphics::{ BlendingFunction, PrimitiveType, TexturedVertex2D, ShapeObject, }, texture::{ ImageBase, ImageObject, Texture }, }; struct
core().blending.set_function( BlendingFunction::SourceAlpha, BlendingFunction::OneMinusSourceAlpha ); let vertices=[ TexturedVertex2D::new( [400f32,0f32], [1f32,1f32], [1.0,0.5,0.5,0.0] ), TexturedVertex2D::new([400f32,400f32],[1f32,0f32],[0.5,0.5,0.5,0.6]), TexturedVertex2D::new([0f32,400f32],[0f32,0f32],[0.5,0.5,0.5,1.0]), TexturedVertex2D::new([0f32,0f32],[0f32,1f32],[0.5,0.5,0.5,1.0]), ]; let _image1=graphics.push_textured_object_raw( &vertices, &[0,1,3,1,2,3], PrimitiveType::Triangles ).unwrap(); let image_base=ImageBase::new( [400f32,0f32,400f32,400f32], [0.5,0.5,0.5,1.0] ); let _image2=graphics.push_textured_object(&image_base).unwrap(); let image_base=ImageObject::new( [800f32,0f32,200f32,400f32], [0f32,0f32,0.5f32,1f32], [1.0;4] ); let _image3=graphics.push_textured_object(&image_base).unwrap(); *app.storage()=Some(Texture::from_path("logo_400x400.png").unwrap()); app.event_loop.run(|event,_app_control|{ match event{ _=>{} } }); }
WindowHandle; impl WindowProcedure<WindowInner<Option<Texture>>> for WindowHandle{ fn render(window:&Window,window_inner:&mut WindowInner<Option<Texture>>){ window_inner.context().make_current(true).unwrap_or_else(|_|{quit()}); let [width,height]=window.client_size(); unsafe{ window_inner.graphics().core().viewport.set([0,0,width as i32,height as i32]); } window_inner.graphics().draw_parameters().set_viewport([0f32,0f32,width as f32,height as f32]); if let Some(texture)=window_inner.storage_ref().as_ref(){ window_inner.graphics_ref().clear_colour([1f32;4]); window_inner.graphics_ref().draw_stack_textured_object(0,texture.texture_2d()); window_inner.graphics_ref().draw_stack_textured_object(1,texture.texture_2d()); window_inner.graphics_ref().draw_stack_textured_object(2,texture.texture_2d()); window_inner.graphics_ref().core().finish(); window_inner.context().swap_buffers().unwrap_or_else(|_|{quit()}); } } fn handle(event:WindowEvent,_window:&Window,window_inner:&mut WindowInner<Option<Texture>>){ match event{ WindowEvent::KeyPress(VirtualKeyCode::A)=>{ let image_base=ImageBase::new( [400f32,100f32,100f32,100f32], [0.5,0.5,0.5,1.0] ); window_inner.graphics().write_stack_textured_object_vertices(1,&image_base.vertices()); } WindowEvent::CloseRequest=>quit(), _=>{} } } } fn main(){ let app_attributes=AppAttributes::new(); let texture:Option<Texture>=None; let app=App::new::<WindowHandle>(app_attributes,texture); let graphics=app.graphics(); graphics.core().blending.enable(); graphics.
random
[ { "content": "struct WindowStorage{\n\n free_ids:Vec<usize>,\n\n windows:Vec<Option<Window>>,\n\n window_graphics:Vec<Option<WindowGraphics>>,\n\n}\n\n\n\nimpl WindowStorage{\n\n pub fn empty(capacity:usize)->WindowStorage{\n\n let mut free_ids=Vec::with_capacity(capacity);\n\n let mut windows=Vec::with_capacity(capacity);\n\n let mut window_graphics=Vec::with_capacity(capacity);\n\n\n\n for id in (0..capacity).rev(){\n\n free_ids.push(id);\n\n windows.push(None);\n\n window_graphics.push(None);\n\n }\n\n\n\n Self{\n\n free_ids,\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 0, "score": 152275.7384070721 }, { "content": "struct WindowGraphics{\n\n context:OpenGLRenderContext,\n\n graphics:Graphics,\n\n texture:Texture,\n\n}\n\n\n", "file_path": "examples/raw.rs", "rank": 1, "score": 147505.70798290713 }, { "content": "/// Indicates to the event loop that it's thread has made a request to close.\n\npub fn quit(){\n\n unsafe{\n\n winapi::um::winuser::PostQuitMessage(0);\n\n }\n\n}", "file_path": "basement/src/windows/window/mod.rs", "rank": 2, "score": 108564.12205052291 }, { "content": "struct WindowHandle;\n\n\n\nimpl WindowProcedure<WindowInner<Option<(Texture,f32)>>> for WindowHandle{\n\n fn render(window:&Window,window_inner:&mut WindowInner<Option<(Texture,f32)>>){\n\n window_inner.draw(window,|_,graphics,texture|{\n\n graphics.clear_colour([1f32;4]);\n\n\n\n // read here (line 83)\n\n if let Some((texture,shift))=texture.as_ref(){\n\n graphics.draw_parameters().switch(DrawMode::Shift);\n\n graphics.draw_parameters().set_shift([*shift;2]);\n\n\n\n graphics.draw_stack_textured_object(0,texture.texture_2d());\n\n\n\n graphics.draw_parameters().switch(DrawMode::Shift);\n\n }\n\n }).unwrap_or_else(|_|{quit()});\n\n }\n\n\n\n fn handle(event:WindowEvent,_window:&Window,_window_inner:&mut WindowInner<Option<(Texture,f32)>>){\n\n match event{\n\n WindowEvent::CloseRequest=>quit(),\n\n _=>{}\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/shifting.rs", "rank": 3, "score": 106275.46591186835 }, { "content": "struct Handler;\n\n\n\nimpl WindowProcedure<()> for Handler{\n\n fn render(_:&Window,_:&mut ()){}\n\n\n\n fn handle(event:WindowEvent,window:&Window,_args:&mut ()){\n\n match event{\n\n WindowEvent::CloseRequest=>window.destroy().unwrap(),\n\n\n\n WindowEvent::Destroy=>quit(),\n\n _=>{}\n\n }\n\n }\n\n}\n\n\n", "file_path": "basement/examples/window.rs", "rank": 4, "score": 106275.46591186835 }, { "content": "struct WindowHandle;\n\n\n\nimpl WindowProcedure<WindowInner<Option<(Texture,f32)>>> for WindowHandle{\n\n fn render(window:&Window,window_inner:&mut WindowInner<Option<(Texture,f32)>>){\n\n if let Some((_,angle))=window_inner.storage(){\n\n *angle+=0.01\n\n }\n\n window_inner.draw(window,|window,graphics,texture|{\n\n graphics.clear_colour([1f32;4]);\n\n\n\n // read here (line 83)\n\n if let Some((texture,angle))=texture.as_ref(){\n\n let [width,height]=window.client_size();\n\n\n\n graphics.draw_parameters().switch(DrawMode::Rotation);\n\n graphics.draw_parameters().set_rotation(\n\n [angle.cos(),angle.sin(),width as f32/2f32,height as f32/2f32]\n\n );\n\n\n\n graphics.draw_stack_textured_object(0,texture.texture_2d());\n", "file_path": "examples/rotating.rs", "rank": 5, "score": 106275.46591186835 }, { "content": "struct WindowHandle;\n\n\n\nimpl WindowProcedure<WindowInner<Option<Texture>>> for WindowHandle{\n\n fn render(window:&Window,window_inner:&mut WindowInner<Option<Texture>>){\n\n window_inner.draw(window,|window,graphics,texture|{\n\n graphics.clear_colour([1f32;4]);\n\n\n\n if let Some(texture)=texture.as_ref(){\n\n let [width,height]=window.client_size();\n\n\n\n graphics.draw_parameters().set_shift([(width/2) as f32-200f32,(height/2) as f32-200f32]);\n\n graphics.draw_stack_textured_object(0,texture.texture_2d());\n\n }\n\n }).unwrap_or_else(|_|{quit()});\n\n }\n\n\n\n fn handle(event:WindowEvent,window:&Window,_window_inner:&mut WindowInner<Option<Texture>>){\n\n match event{\n\n\n\n WindowEvent::KeyPress(VirtualKeyCode::A)=>{\n", "file_path": "examples/fullscreen.rs", "rank": 6, "score": 106275.46591186835 }, { "content": "struct WindowHandle;\n\n\n\nimpl WindowProcedure<WindowInner<Option<CachedFont>>> for WindowHandle{\n\n fn render(window:&Window,window_inner:&mut WindowInner<Option<CachedFont>>){\n\n window_inner.draw(window,|_,graphics,font|{\n\n graphics.clear_colour([0f32,0f32,0f32,1f32]);\n\n\n\n if let Some(font)=font.as_ref(){\n\n let mut position=[120f32,240f32];\n\n let mut horizontal_advance=0f32;\n\n for character in \"Hello, world!!!\".chars(){\n\n graphics.draw_char(\n\n character,\n\n [1f32;4],\n\n position,\n\n Some(&mut horizontal_advance),\n\n Scale::new(0.1f32,0.1f32),\n\n font,\n\n );\n\n\n", "file_path": "examples/text.rs", "rank": 7, "score": 106275.46591186835 }, { "content": "struct WindowHandle;\n\n\n\nimpl WindowProcedure<WindowInner<Option<Texture>>> for WindowHandle{\n\n fn render(window:&Window,window_inner:&mut WindowInner<Option<Texture>>){\n\n window_inner.draw(window,|_window,graphics,texture|{\n\n graphics.clear_colour([1f32;4]);\n\n\n\n if let Some(texture)=texture.as_ref(){\n\n graphics.draw_stack_textured_object(0,texture.texture_2d());\n\n graphics.draw_stack_textured_object(1,texture.texture_2d());\n\n graphics.draw_stack_textured_object(2,texture.texture_2d());\n\n }\n\n }).unwrap_or_else(|_|{quit()});\n\n }\n\n\n\n fn handle(event:WindowEvent,_window:&Window,_window_inner:&mut WindowInner<Option<Texture>>){\n\n match event{\n\n WindowEvent::CloseRequest=>quit(),\n\n _=>{}\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/images.rs", "rank": 8, "score": 106275.46591186835 }, { "content": "struct Handler;\n\n\n\nimpl WindowProcedure<RenderData> for Handler{\n\n fn render(window:&Window,args:&mut RenderData){\n\n // use it when you have more than one window\n\n args.context.make_current(true).unwrap_or_else(|_|{quit()});\n\n\n\n // set viewport if a window may change it's size\n\n // or if you have more than one window\n\n // otherwise set it after creating the window\n\n let [width,height]=window.client_size();\n\n\n\n unsafe{\n\n GCore.viewport.set([0,0,width as i32,height as i32]);\n\n\n\n args.colour[0]+=0.01;\n\n if args.colour[0]>=1f32{\n\n args.colour[0]=0f32;\n\n }\n\n\n", "file_path": "basement/examples/texture_test.rs", "rank": 10, "score": 105523.22598286545 }, { "content": "/// An empty struct for an empty window procedure.\n\nstruct EmptyHandler;\n\n\n\nimpl WindowProcedure<()> for EmptyHandler{\n\n fn render(_:&Window,_:&mut ()){\n\n\n\n }\n\n fn handle(_:WindowEvent,_:&Window,_:&mut ()){}\n\n}\n\n\n", "file_path": "basement/examples/window.rs", "rank": 11, "score": 103847.46318660238 }, { "content": "struct WindowHandle;\n\n\n\nimpl WindowProcedure<WindowInner<RenderData>> for WindowHandle{\n\n fn render(window:&Window,window_inner:&mut WindowInner<RenderData>){\n\n let draw_start=Instant::now();\n\n let redraw_event_period=draw_start.duration_since(window_inner.storage().redraw.last_redraw);\n\n window_inner.storage().redraw.last_redraw=draw_start;\n\n\n\n window_inner.storage().redraw.current_fps=\n\n (Duration::from_secs(1).as_nanos()/redraw_event_period.as_nanos()) as u32;\n\n\n\n window_inner.draw(window,|_window,graphics,render_data|{\n\n graphics.clear_colour([1f32;4]);\n\n\n\n if let Some(texture)=render_data.texture.as_ref(){\n\n graphics.draw_stack_textured_object(0,texture.texture_2d());\n\n graphics.draw_stack_textured_object(1,texture.texture_2d());\n\n graphics.draw_stack_textured_object(2,texture.texture_2d());\n\n }\n\n }).unwrap_or_else(|_|{quit()});\n\n }\n\n\n\n fn handle(event:WindowEvent,_window:&Window,_window_inner:&mut WindowInner<RenderData>){\n\n match event{\n\n WindowEvent::CloseRequest=>quit(),\n\n _=>{}\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/fps_counter.rs", "rank": 12, "score": 103837.47428901815 }, { "content": "/// An empty struct for an empty window procedure.\n\nstruct EmptyHandler;\n\n\n\nimpl WindowProcedure<RenderData> for EmptyHandler{\n\n fn render(_:&Window,_:&mut RenderData){}\n\n fn handle(_:WindowEvent,_:&Window,_:&mut RenderData){}\n\n}\n\n\n", "file_path": "basement/examples/texture_test.rs", "rank": 13, "score": 103182.49522226996 }, { "content": "struct RenderData{\n\n context:OpenGLRenderContext,\n\n colour:Colour,\n\n}\n\n\n", "file_path": "basement/examples/texture_test.rs", "rank": 14, "score": 103172.50632468573 }, { "content": "use super::{\n\n // types\n\n FrameIDType,\n\n ObjectIDType,\n\n ElementIndexType,\n\n // consts\n\n frame_size,\n\n // structs\n\n TexturedVertex2D,\n\n HeapObject,\n\n StackObject,\n\n ObjectAllocation,\n\n DrawParameters,\n\n // enums\n\n HeapDrawType,\n\n StackDrawType,\n\n};\n\n\n\nuse cat_engine_basement::graphics::{\n\n GCore,\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 15, "score": 94368.14893180702 }, { "content": " object_allocation:ObjectAllocation<TexturedVertex2D>,\n\n\n\n draw:Program,\n\n\n\n}\n\n\n\nimpl TextureGraphics{\n\n pub fn new(\n\n stack_vertices:ElementIndexType,\n\n stack_indices:i32,\n\n stack_objects:ObjectIDType,\n\n heap_vertex_frames:FrameIDType,\n\n heap_index_frames:FrameIDType,\n\n heap_objects:ObjectIDType\n\n )->TextureGraphics{\n\n let vertex_shader=VertexShader::new(&include_str!(\"shaders/texture/vertex_shader.glsl\")).unwrap();\n\n let fragment_shader=FragmentShader::new(&include_str!(\"shaders/texture/fragment_shader.glsl\")).unwrap();\n\n\n\n let program=Program::new(&vertex_shader,&fragment_shader).unwrap();\n\n\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 16, "score": 94365.19556649684 }, { "content": " &indices,\n\n &object.count,\n\n object.primitive_type\n\n )\n\n }\n\n }\n\n\n\n self.vertex_array.unbind();\n\n }\n\n }\n\n}\n\n\n\n/// Stack.\n\nimpl TextureGraphics{\n\n pub fn push_object_raw(\n\n &mut self,\n\n vertices:&[TexturedVertex2D],\n\n indices:&[ElementIndexType],\n\n primitive_type:PrimitiveType\n\n )->Option<ObjectIDType>{\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 17, "score": 94364.74618149346 }, { "content": " core::{\n\n drawing::PrimitiveType,\n\n buffer::BufferUsage,\n\n },\n\n level0::VertexArray,\n\n level1::{\n\n VertexBuffer,\n\n IndexBuffer,\n\n VertexShader,\n\n FragmentShader,\n\n Texture2D,\n\n },\n\n level2::Program,\n\n};\n\n\n\npub struct TextureGraphics{\n\n vertex_buffer:VertexBuffer<TexturedVertex2D>,\n\n index_buffer:IndexBuffer<ElementIndexType>,\n\n vertex_array:VertexArray<TexturedVertex2D>,\n\n\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 18, "score": 94364.5259058482 }, { "content": " heap_vertex_frames,\n\n heap_index_frames,\n\n heap_objects,\n\n ),\n\n\n\n draw:program,\n\n }\n\n }\n\n}\n\n\n\n/// Heap.\n\nimpl TextureGraphics{\n\n pub fn add_object_raw(\n\n &mut self,\n\n vertices:&[TexturedVertex2D],\n\n indices:&[ElementIndexType],\n\n primitive_type:PrimitiveType\n\n )->Option<ObjectIDType>{\n\n self.object_allocation.heap_system.add_object(\n\n &self.vertex_buffer,\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 19, "score": 94364.20813600374 }, { "content": "\n\n pub fn write_stack_object_vertices(&mut self,id:ObjectIDType,vertices:&[TexturedVertex2D]){\n\n self.object_allocation.stack_system.write_object_vertices(&self.vertex_buffer,id,vertices)\n\n }\n\n\n\n pub fn write_stack_object_indices(&mut self,id:ObjectIDType,indices:&[ElementIndexType]){\n\n self.object_allocation.stack_system.write_object_indices(&self.index_buffer,id,indices)\n\n }\n\n\n\n pub fn draw_stack_object(\n\n &self,\n\n index:ObjectIDType,\n\n texture:&Texture2D,\n\n draw_parameters:&DrawParameters\n\n ){\n\n if let Some(object)=&self.object_allocation.stack_system.get_object(index){\n\n self.draw.bind();\n\n\n\n self.vertex_array.bind();\n\n self.vertex_buffer.bind();\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 20, "score": 94359.93400269294 }, { "content": " let _=self.draw.set_uniform_value(\"vertex_shift\",shift);\n\n }\n\n\n\n if let Some(rotation)=draw_parameters.rotation(){\n\n let _=self.draw.set_uniform_value(\"vertex_rotation\",rotation);\n\n }\n\n\n\n self.vertex_array.bind();\n\n self.index_buffer.bind();\n\n self.vertex_buffer.bind();\n\n\n\n texture.bind();\n\n\n\n match object.draw_type{\n\n HeapDrawType::Vertices(first)=>unsafe{\n\n GCore.drawing.multi_draw_arrays(&first,&object.count,object.primitive_type)\n\n }\n\n\n\n HeapDrawType::Indices(indices)=>unsafe{\n\n GCore.drawing.multi_draw_elements_typed::<ElementIndexType>(\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 21, "score": 94359.00461993605 }, { "content": " self.index_buffer.bind();\n\n\n\n let _=self.draw.set_uniform_value(\"viewport\",draw_parameters.viewport());\n\n\n\n let _=self.draw.set_uniform_value(\"draw_mode\",draw_parameters.flag());\n\n\n\n if let Some(shift)=draw_parameters.shift(){\n\n let _=self.draw.set_uniform_value(\"vertex_shift\",shift);\n\n }\n\n\n\n if let Some(rotation)=draw_parameters.rotation(){\n\n let _=self.draw.set_uniform_value(\"vertex_rotation\",rotation);\n\n }\n\n\n\n texture.bind();\n\n\n\n let drawable=object.drawable();\n\n match drawable.draw_type{\n\n StackDrawType::Vertices(first)=>unsafe{\n\n GCore.drawing.draw_arrays(first,drawable.count,object.primitive_type)\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 22, "score": 94358.9654067702 }, { "content": " &self.index_buffer,\n\n vertices,\n\n indices,\n\n primitive_type\n\n )\n\n }\n\n\n\n /// Removes an object.\n\n /// \n\n /// It's not actually removes it, just clears it's data.\n\n pub fn remove_object(&mut self,index:ObjectIDType){\n\n self.object_allocation.heap_system.remove_object(index)\n\n }\n\n\n\n pub fn get_heap_object(&self,id:ObjectIDType)->Option<&HeapObject>{\n\n self.object_allocation.heap_system.get_object(id)\n\n }\n\n\n\n pub fn write_heap_object_vertices(&mut self,id:ObjectIDType,vertices:&[TexturedVertex2D]){\n\n self.object_allocation.heap_system.write_object_vertices(&self.vertex_buffer,id,vertices)\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 23, "score": 94358.94617466499 }, { "content": " }\n\n\n\n pub fn write_heap_object_indices(&mut self,id:ObjectIDType,indices:&[ElementIndexType]){\n\n self.object_allocation.heap_system.write_object_indices(&self.index_buffer,id,indices)\n\n }\n\n\n\n pub fn draw_heap_object(\n\n &self,\n\n index:ObjectIDType,\n\n texture:&Texture2D,\n\n draw_parameters:&DrawParameters\n\n ){\n\n if let Some(object)=self.object_allocation.heap_system.get_drawable_object(index){\n\n self.draw.bind();\n\n\n\n let _=self.draw.set_uniform_value(\"viewport\",draw_parameters.viewport());\n\n\n\n let _=self.draw.set_uniform_value(\"draw_mode\",draw_parameters.flag());\n\n\n\n if let Some(shift)=draw_parameters.shift(){\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 24, "score": 94358.90843586852 }, { "content": " let heap_vertex_buffer_size=heap_vertex_frames as ElementIndexType*frame_size as ElementIndexType;\n\n\n\n let heap_index_buffer_size=heap_index_frames as ElementIndexType*frame_size as ElementIndexType;\n\n\n\n let vertex_buffer_size=heap_vertex_buffer_size+stack_vertices;\n\n let index_buffer_size=heap_index_buffer_size+stack_indices as ElementIndexType;\n\n\n\n let vertex_buffer=VertexBuffer::empty(vertex_buffer_size as isize,BufferUsage::DynamicDraw).unwrap();\n\n let index_buffer=IndexBuffer::empty(index_buffer_size as isize,BufferUsage::DynamicDraw).unwrap();\n\n let vertex_array=VertexArray::new(vertex_buffer.as_raw());\n\n\n\n Self{\n\n vertex_buffer,\n\n index_buffer,\n\n vertex_array,\n\n\n\n object_allocation:ObjectAllocation::new(\n\n stack_vertices,\n\n stack_indices,\n\n stack_objects,\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 25, "score": 94355.94459253052 }, { "content": " self.object_allocation.stack_system.push_object_raw(\n\n &self.vertex_buffer,\n\n &self.index_buffer,\n\n vertices,\n\n indices,\n\n primitive_type\n\n )\n\n }\n\n\n\n pub fn get_stack_object(&self,id:ObjectIDType)->Option<&StackObject>{\n\n self.object_allocation.stack_system.get_object(id)\n\n }\n\n\n\n pub fn pop_object(&mut self){\n\n self.object_allocation.stack_system.pop_object()\n\n }\n\n\n\n pub fn clear_stack(&mut self){\n\n self.object_allocation.stack_system.clear()\n\n }\n", "file_path": "src/graphics/texture_graphics.rs", "rank": 26, "score": 94355.94459253052 }, { "content": " }\n\n\n\n StackDrawType::Indices(first)=>unsafe{\n\n GCore.drawing.draw_elements_typed::<ElementIndexType>(\n\n first,\n\n drawable.count,\n\n object.primitive_type\n\n )\n\n }\n\n }\n\n\n\n self.vertex_array.unbind();\n\n }\n\n }\n\n}", "file_path": "src/graphics/texture_graphics.rs", "rank": 27, "score": 94355.94459253052 }, { "content": "\n\n // pub fn get_render_context(&self,id:usize)->Option<&OpenGLRenderContext>{\n\n // self.app.window_storage.get_render_context(id)\n\n // }\n\n\n\n // pub fn get_render_context_unchecked(&self,id:usize)->&OpenGLRenderContext{\n\n // self.app.window_storage.get_render_context_unchecked(id)\n\n // }\n\n\n\n pub fn get_window_graphics(&self,id:usize)->Option<&WindowGraphics>{\n\n self.app.window_storage.get_window_graphics(id)\n\n }\n\n\n\n pub fn get_window_graphics_unchecked(&self,id:usize)->&WindowGraphics{\n\n self.app.window_storage.get_window_graphics_unchecked(id)\n\n }\n\n\n\n pub fn get_window_graphics_mut(&mut self,id:usize)->Option<&mut WindowGraphics>{\n\n self.app.window_storage.get_window_graphics_mut(id)\n\n }\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 28, "score": 91815.67869457812 }, { "content": " let graphics=Graphics::new(attributes.graphics);\n\n\n\n let mut app_storage=Box::new(storage);\n\n\n\n unsafe{\n\n (window_inner.as_mut() as *mut WindowInner<S>).write(WindowInner{\n\n graphics,\n\n context,\n\n storage:app_storage.as_mut()\n\n });\n\n }\n\n\n\n unsafe{window.set_window_handle::<W,WindowInner<S>>()}\n\n\n\n Self{\n\n event_loop,\n\n window_class:class,\n\n window,\n\n window_inner:UnsafeCell::new(window_inner),\n\n storage:UnsafeCell::new(app_storage),\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 29, "score": 91815.05528798574 }, { "content": "\n\n pub fn get_window_graphics_unchecked_mut(&mut self,id:usize)->&mut WindowGraphics{\n\n self.app.window_storage.get_window_graphics_unchecked_mut(id)\n\n }\n\n\n\n // pub fn draw<F:FnMut(&Window,&mut Graphics)>(&mut self,id:usize,mut f:F)->Result<(),WinError>{\n\n // if let Some(window)=self.app.window_storage.get_window(id){\n\n // let window:&'static Window=unsafe{std::mem::transmute(window)};\n\n // // Указатель на графические функции (чтобы не ругался)\n\n // let graphics:&'static mut Graphics=unsafe{std::mem::transmute(\n\n // self.app.window_storage.get_graphics_unchecked_mut(id) as *mut Graphics\n\n // )};\n\n\n\n // let render_context=self.app.window_storage.get_render_context_unchecked(id);\n\n // render_context.make_current(true)?;\n\n\n\n // let [width,height]=window.client_size();\n\n // graphics.core().viewport().set([0,0,width as i32,height as i32]);\n\n // graphics.draw_parameters().change_viewport([0f32,0f32,width as f32,height as f32]);\n\n\n\n // f(window,graphics);\n\n\n\n // render_context.swap_buffers()?;\n\n // }\n\n // Ok(())\n\n // }\n\n}", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 30, "score": 91814.98741569919 }, { "content": "use crate::graphics::{\n\n Graphics,\n\n Graphics2DAttributes,\n\n};\n\n\n\nuse cat_engine_basement::windows::{\n\n WindowClass,\n\n EventLoop,\n\n OpenGraphicsLibrary,\n\n};\n\n\n\npub use cat_engine_basement::{\n\n windows::{\n\n Window,\n\n WinError,\n\n CursorIcon,\n\n Background,\n\n Fullscreen,\n\n Monitor,\n\n OpenGLRenderContext,\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 31, "score": 91814.7812811062 }, { "content": "use crate::graphics::{\n\n Graphics,\n\n Graphics2DAttributes,\n\n};\n\n\n\nuse cat_engine_basement::windows::{\n\n WindowClass,\n\n OpenGraphicsLibrary,\n\n};\n\n\n\npub use cat_engine_basement::{\n\n windows::{\n\n EventLoop,\n\n Window,\n\n CursorIcon,\n\n Background,\n\n Fullscreen,\n\n Monitor,\n\n MouseButton,\n\n OpenGLRenderContext,\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 32, "score": 91814.7812811062 }, { "content": "\n\n pub fn get_window_graphics(&self,id:usize)->Option<&WindowGraphics>{\n\n self.window_storage.get_window_graphics(id)\n\n }\n\n\n\n pub fn ge_windowt_graphics_unchecked(&self,id:usize)->&WindowGraphics{\n\n self.window_storage.get_window_graphics_unchecked(id)\n\n }\n\n\n\n pub fn get_window_graphics_mut(&mut self,id:usize)->Option<&mut WindowGraphics>{\n\n self.window_storage.get_window_graphics_mut(id)\n\n }\n\n\n\n pub fn get_window_graphics_unchecked_mut(&mut self,id:usize)->&mut WindowGraphics{\n\n self.window_storage.get_window_graphics_unchecked_mut(id)\n\n }\n\n}\n\n\n\nimpl App{\n\n pub fn run<F:FnMut(Event,&mut AppControl)>(&mut self,mut event_handler:F){\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 33, "score": 91814.65425459876 }, { "content": "impl App{\n\n pub fn new(attributes:AppAttributes)->App{\n\n let mut graphics_library=None;\n\n\n\n let mut event_loop=EventLoop::new(attributes.event_loop);\n\n\n\n let class=WindowClass::new(attributes.class).unwrap();\n\n\n\n let mut window_storage=WindowStorage::empty(attributes.windows_limit as usize);\n\n\n\n Self{\n\n window_class:class,\n\n window_storage,\n\n graphics_library,\n\n event_loop,\n\n }\n\n }\n\n}\n\n\n\nimpl App{\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 34, "score": 91814.53080076723 }, { "content": " else{\n\n None\n\n }\n\n }\n\n\n\n pub fn get_window_graphics_unchecked_mut(&mut self,id:usize)->&mut WindowGraphics{\n\n unsafe{\n\n let maybe_graphics=self.window_graphics.get_unchecked_mut(id).as_mut();\n\n std::mem::transmute(maybe_graphics)\n\n }\n\n }\n\n}\n\n\n\npub struct App{\n\n graphics_library:Option<OpenGraphicsLibrary>,\n\n event_loop:EventLoop,\n\n window_class:WindowClass,\n\n window_storage:WindowStorage,\n\n}\n\n\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 35, "score": 91813.93723150487 }, { "content": " WindowAttributes,\n\n WindowClassAttributes,\n\n VirtualKeyCode,\n\n LoopControl,\n\n EventLoopAttributes,\n\n OpenGLRenderContextAttributes,\n\n UpdateInterval,\n\n\n\n WindowProcedure,\n\n quit,\n\n },\n\n event::{\n\n Event,\n\n WindowEvent,\n\n },\n\n};\n\n\n\npub struct WindowGraphics{\n\n pub window_id:usize,\n\n pub graphics:Graphics,\n\n pub context:OpenGLRenderContext,\n\n}\n\n\n\nimpl WindowGraphics{\n\n\n\n}\n\n\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 36, "score": 91813.53853828636 }, { "content": "\n\npub struct AppAttributes{\n\n pub windows_limit:u8,\n\n pub event_loop:EventLoopAttributes,\n\n pub class:WindowClassAttributes,\n\n // pub window:WindowAttributes,\n\n // pub render_context:OpenGLRenderContextAttributes,\n\n // pub graphics:Graphics2DAttributes,\n\n}\n\n\n\nimpl AppAttributes{\n\n pub fn new()->AppAttributes{\n\n Self{\n\n windows_limit:1u8,\n\n event_loop:EventLoopAttributes::new(),\n\n class:WindowClassAttributes::new(\"NewWindowClass\"),\n\n // window:WindowAttributes::new(\"NewWindow\"),\n\n // render_context:OpenGLRenderContextAttributes::new(),\n\n // graphics:Graphics2DAttributes::new(),\n\n }\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 37, "score": 91813.20292594787 }, { "content": " }\n\n }\n\n\n\n /// Replaces the window procedure with functions defined by `W`.\n\n pub fn set_window_handle<W:WindowProcedure<WindowInner<S>>>(&self){\n\n unsafe{\n\n self.window.set_window_handle::<W,WindowInner<S>>()\n\n }\n\n }\n\n}\n\n\n\nimpl<S:Sized+'static> App<S>{\n\n pub fn window_inner(&self)->&mut WindowInner<S>{\n\n unsafe{\n\n (&mut*self.window_inner.get()).as_mut()\n\n }\n\n }\n\n\n\n pub fn graphics(&self)->&mut Graphics{\n\n &mut self.window_inner().graphics\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 38, "score": 91813.16577739135 }, { "content": " }\n\n}\n\n\n\npub struct AppControl{\n\n app:&'static mut App,\n\n loop_control:&'static mut LoopControl,\n\n}\n\n\n\nimpl AppControl{\n\n pub fn new(app:&mut App,loop_control:&mut LoopControl)->AppControl{\n\n unsafe{\n\n Self{\n\n app:std::mem::transmute(app),\n\n loop_control:std::mem::transmute(loop_control)\n\n }\n\n }\n\n }\n\n\n\n pub fn create_window<W:WindowProcedure<Option<WindowGraphics>>>(\n\n &mut self,\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 39, "score": 91813.11362165168 }, { "content": "\n\nimpl AppAttributes{\n\n pub fn new()->AppAttributes{\n\n Self{\n\n event_loop:EventLoopAttributes::new(),\n\n class:WindowClassAttributes::new(\"CatEngineWindowClass\"),\n\n window:WindowAttributes::new(\"CatEngineWindow\"),\n\n render_context:OpenGLRenderContextAttributes::new(),\n\n graphics:Graphics2DAttributes::new(),\n\n }\n\n }\n\n}", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 40, "score": 91812.6382875876 }, { "content": " window_attributes:WindowAttributes,\n\n render_context_attributes:OpenGLRenderContextAttributes,\n\n graphics_attributes:Graphics2DAttributes\n\n )->Result<bool,WinError>{\n\n self.app.create_window::<W>(\n\n window_attributes,\n\n render_context_attributes,\n\n graphics_attributes\n\n )\n\n }\n\n\n\n /// Break app's event loop.\n\n pub fn break_loop(&mut self){\n\n *self.loop_control=LoopControl::Break;\n\n }\n\n\n\n /// Sets the 'lazy' mode flag.\n\n pub fn lazy(&mut self,lazy:bool){\n\n if lazy{\n\n *self.loop_control=LoopControl::Lazy;\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 41, "score": 91812.49097082014 }, { "content": " self.context.make_current(true)?;\n\n\n\n let [width,height]=window.client_size();\n\n unsafe{\n\n self.graphics.core().viewport.set([0,0,width as i32,height as i32]);\n\n }\n\n self.graphics.draw_parameters().set_viewport([0f32,0f32,width as f32,height as f32]);\n\n\n\n f(window,&mut self.graphics,unsafe{&*self.storage});\n\n\n\n self.graphics.core().finish();\n\n self.context.swap_buffers()?;\n\n Ok(())\n\n }\n\n}\n\n\n\n/// A structure to easily create a windowed application.\n\n/// \n\n/// Loads everything needed for drawing.\n\npub struct App<S:Sized+'static>{\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 42, "score": 91812.24517437402 }, { "content": " pub event_loop:EventLoop,\n\n window_class:WindowClass,\n\n pub window:Window,\n\n window_inner:UnsafeCell<Box<WindowInner<S>>>,\n\n storage:UnsafeCell<Box<S>>,\n\n}\n\n\n\nimpl<S:Sized+'static> App<S>{\n\n /// Creates an application with the given attributes.\n\n /// \n\n /// `W` is the type\n\n /// that implements the `WindowProcedure` trait\n\n /// that defines window's behavior.\n\n /// \n\n /// `WindowInner` stores graphics and context structures and `S`.\n\n /// \n\n /// `S` is user defined type for anything (e.g. for storing objects for rendering).\n\n pub fn new<W:WindowProcedure<WindowInner<S>>>(attributes:AppAttributes,storage:S)->App<S>{\n\n let event_loop=EventLoop::new(attributes.event_loop);\n\n\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 43, "score": 91812.02086387493 }, { "content": " WindowAttributes,\n\n WindowClassAttributes,\n\n VirtualKeyCode,\n\n LoopControl,\n\n EventLoopAttributes,\n\n OpenGLRenderContextAttributes,\n\n EventInterval,\n\n WinError,\n\n WindowProcedure,\n\n ProcessEvent,\n\n Event,\n\n WindowEvent,\n\n quit,\n\n },\n\n};\n\n\n\nuse std::{\n\n cell::UnsafeCell,\n\n mem::MaybeUninit\n\n};\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 44, "score": 91811.55544762973 }, { "content": " }\n\n\n\n pub fn context(&self)->&OpenGLRenderContext{\n\n &self.window_inner().context\n\n }\n\n\n\n pub fn storage(&self)->&mut S{\n\n unsafe{\n\n (&mut*self.storage.get()).as_mut()\n\n }\n\n }\n\n}\n\n\n\npub struct AppAttributes{\n\n pub event_loop:EventLoopAttributes,\n\n pub class:WindowClassAttributes,\n\n pub window:WindowAttributes,\n\n pub render_context:OpenGLRenderContextAttributes,\n\n pub graphics:Graphics2DAttributes,\n\n}\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 45, "score": 91810.8826451229 }, { "content": " pub fn get_window_graphics(&self,id:usize)->Option<&WindowGraphics>{\n\n if let Some(graphics)=self.window_graphics.get(id){\n\n graphics.as_ref()\n\n }\n\n else{\n\n None\n\n }\n\n }\n\n\n\n pub fn get_window_graphics_unchecked(&self,id:usize)->&WindowGraphics{\n\n unsafe{\n\n let maybe_graphics=self.window_graphics.get_unchecked(id).as_ref();\n\n std::mem::transmute(maybe_graphics)\n\n }\n\n }\n\n\n\n pub fn get_window_graphics_mut(&mut self,id:usize)->Option<&mut WindowGraphics>{\n\n if let Some(graphics)=self.window_graphics.get_mut(id){\n\n graphics.as_mut()\n\n }\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 46, "score": 91810.87114748542 }, { "content": " window_attributes,\n\n &mut self.window_graphics[id]\n\n ){\n\n Ok(window)=>{\n\n let context=OpenGLRenderContext::new(\n\n window.get_context(),\n\n context_attributes\n\n ).unwrap();\n\n\n\n if graphics_library.is_none(){\n\n let library=OpenGraphicsLibrary::new();\n\n library.load_functions();\n\n *graphics_library=Some(library)\n\n }\n\n\n\n let graphics=Graphics::new(graphics_attributes);\n\n\n\n self.windows[id]=Some(window);\n\n\n\n let window_graphics=WindowGraphics{\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 47, "score": 91810.75130437093 }, { "content": " }\n\n else{\n\n *self.loop_control=LoopControl::Run;\n\n }\n\n }\n\n}\n\n\n\nimpl AppControl{\n\n /// Checks whether an app has any windows.\n\n pub fn is_any_window(&self)->bool{\n\n self.app.window_storage.is_any_window()\n\n }\n\n\n\n pub fn get_window(&self,id:usize)->Option<&Window>{\n\n self.app.window_storage.get_window(id)\n\n }\n\n\n\n pub fn get_window_unchecked(&self,id:usize)->&Window{\n\n self.app.window_storage.get_window_unchecked(id)\n\n }\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 48, "score": 91810.63268806098 }, { "content": "\n\npub struct EmptyHandler;\n\n\n\nimpl<S> WindowProcedure<WindowInner<S>> for EmptyHandler{\n\n fn render(_:&Window,_:&mut WindowInner<S>){}\n\n fn handle(_:WindowEvent,_:&Window,_:&mut WindowInner<S>){}\n\n}\n\n\n\npub struct WindowInner<S>{\n\n graphics:Graphics,\n\n context:OpenGLRenderContext,\n\n storage:*mut S,\n\n}\n\n\n\nimpl<S> WindowInner<S>{\n\n pub fn graphics_ref(&self)->&Graphics{\n\n &self.graphics\n\n }\n\n\n\n pub fn graphics(&mut self)->&mut Graphics{\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 49, "score": 91810.48360375175 }, { "content": " windows,\n\n window_graphics,\n\n }\n\n }\n\n\n\n pub fn add_window<W:WindowProcedure<Option<WindowGraphics>>>(\n\n &mut self,\n\n window_class:&WindowClass,\n\n window_attributes:WindowAttributes,\n\n context_attributes:OpenGLRenderContextAttributes,\n\n graphics_library:&mut Option<OpenGraphicsLibrary>,\n\n graphics_attributes:Graphics2DAttributes,\n\n )->Result<bool,WinError>{\n\n if let Some(id)=self.free_ids.pop(){\n\n // let window_subclass_args=WindowSubclassArguments::new(main_thread_id as usize,id);\n\n // self.window_subclass_args[id]=Some(window_subclass_args);\n\n //.as_ref().unwrap();\n\n\n\n match Window::new::<W,Option<WindowGraphics>>(\n\n window_class,\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 50, "score": 91810.27404707472 }, { "content": " let event_loop:&'static mut EventLoop=unsafe{std::mem::transmute(&mut self.event_loop)};\n\n\n\n event_loop.run(|event,loop_control|{\n\n let mut app_control=AppControl::new(self,loop_control);\n\n\n\n match &event{\n\n // Event::WindowEvent{window_event,window_id}=>match window_event{\n\n // WindowEvent::Destroy=>{\n\n // let _window=app_control.app.window_storage.remove_window(*window_id);\n\n // }\n\n\n\n // _=>{}\n\n // }\n\n _=>{},\n\n }\n\n\n\n event_handler(event,&mut app_control);\n\n });\n\n }\n\n}\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 51, "score": 91810.21396389106 }, { "content": " window_id:id,\n\n graphics,\n\n context,\n\n };\n\n\n\n self.window_graphics[id]=Some(window_graphics);\n\n Ok(true)\n\n }\n\n Err(e)=>Err(e),\n\n }\n\n }\n\n else{\n\n Ok(false)\n\n }\n\n }\n\n\n\n pub fn remove_window(&mut self,id:usize)->Option<Window>{\n\n if let Some(maybe_window)=self.windows.get_mut(id){\n\n if let Some(window)=maybe_window.take(){\n\n unsafe{\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 52, "score": 91810.1383309693 }, { "content": " pub fn create_window<W:WindowProcedure<Option<WindowGraphics>>>(\n\n &mut self,\n\n window_attributes:WindowAttributes,\n\n render_context_attributes:OpenGLRenderContextAttributes,\n\n graphics_attributes:Graphics2DAttributes,\n\n )->Result<bool,WinError>{\n\n self.window_storage.add_window::<W>(\n\n &self.window_class,\n\n window_attributes,\n\n render_context_attributes,\n\n &mut self.graphics_library,\n\n graphics_attributes,\n\n )\n\n }\n\n\n\n pub fn remove_window(&mut self,id:usize)->Option<Window>{\n\n self.window_storage.remove_window(id)\n\n }\n\n}\n\n\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 53, "score": 91810.01973478297 }, { "content": " let _window_graphics=self.window_graphics.get_unchecked_mut(id).take().unwrap();\n\n }\n\n self.free_ids.push(id);\n\n Some(window)\n\n }\n\n else{\n\n None\n\n }\n\n }\n\n else{\n\n None\n\n }\n\n }\n\n\n\n pub fn is_any_window(&self)->bool{\n\n self.free_ids.len()<self.free_ids.capacity()\n\n }\n\n\n\n pub fn get_any_window(&self)->Option<&Window>{\n\n for maybe_window in &self.windows{\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 54, "score": 91809.5959526687 }, { "content": " &mut self.graphics\n\n }\n\n\n\n pub fn context(&self)->&OpenGLRenderContext{\n\n &self.context\n\n }\n\n\n\n pub fn storage(&mut self)->&mut S{\n\n unsafe{\n\n &mut *self.storage\n\n }\n\n }\n\n\n\n pub fn storage_ref(&self)->&S{\n\n unsafe{\n\n &*self.storage\n\n }\n\n }\n\n\n\n pub fn draw<F:FnMut(&Window,&mut Graphics,&S)>(&mut self,window:&Window,mut f:F)->Result<(),WinError>{\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 55, "score": 91808.53181253922 }, { "content": " let class=WindowClass::new(attributes.class).unwrap();\n\n\n\n let inner=MaybeUninit::<WindowInner<S>>::zeroed();\n\n let inner=unsafe{inner.assume_init()};\n\n let mut window_inner:Box<WindowInner<S>>=Box::new(inner);\n\n\n\n let window=Window::new::<EmptyHandler,WindowInner<S>>(\n\n &class,\n\n attributes.window,\n\n window_inner.as_mut()\n\n ).unwrap();\n\n\n\n let context=OpenGLRenderContext::new(\n\n &window,\n\n attributes.render_context\n\n ).unwrap();\n\n\n\n let library=OpenGraphicsLibrary::new();\n\n library.load_functions();\n\n\n", "file_path": "src/app/mono_windowing/windows/mod.rs", "rank": 56, "score": 91807.75827777518 }, { "content": "impl App{\n\n pub fn is_any_window(&self)->bool{\n\n self.window_storage.is_any_window()\n\n }\n\n\n\n pub fn get_window(&self,id:usize)->Option<&Window>{\n\n self.window_storage.get_window(id)\n\n }\n\n\n\n pub fn get_window_unchecked(&self,id:usize)->&Window{\n\n self.window_storage.get_window_unchecked(id)\n\n }\n\n\n\n // pub fn get_render_context(&self,id:usize)->Option<&OpenGLRenderContext>{\n\n // self.window_storage.get_render_context(id)\n\n // }\n\n\n\n // pub fn get_render_context_unchecked(&self,id:usize)->&OpenGLRenderContext{\n\n // self.window_storage.get_render_context_unchecked(id)\n\n // }\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 57, "score": 91807.7504791345 }, { "content": " if let Some(window)=maybe_window{\n\n return Some(window)\n\n }\n\n }\n\n\n\n None\n\n }\n\n\n\n pub fn get_window(&self,id:usize)->Option<&Window>{\n\n if let Some(maybe_window)=self.windows.get(id){\n\n maybe_window.as_ref()\n\n }\n\n else{\n\n None\n\n }\n\n }\n\n\n\n pub fn get_window_unchecked(&self,id:usize)->&Window{\n\n unsafe{\n\n let maybe_window=self.windows.get_unchecked(id).as_ref();\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 58, "score": 91804.87940985162 }, { "content": " std::mem::transmute(maybe_window)\n\n }\n\n }\n\n\n\n // pub fn get_render_context(&self,id:usize)->Option<&OpenGLRenderContext>{\n\n // if let Some(render_context)=self.render_contexts.get(id){\n\n // render_context.as_ref()\n\n // }\n\n // else{\n\n // None\n\n // }\n\n // }\n\n\n\n // pub fn get_render_context_unchecked(&self,id:usize)->&OpenGLRenderContext{\n\n // unsafe{\n\n // let maybe_render_context=self.render_contexts.get_unchecked(id).as_ref();\n\n // std::mem::transmute(maybe_render_context)\n\n // }\n\n // }\n\n\n", "file_path": "src/app/multi_windowing/windows/mod.rs", "rank": 59, "score": 91801.84053711248 }, { "content": "use crate::graphics::{\n\n core::GLError,\n\n core::texture::{\n\n TextureBindTarget,\n\n Texture2DRewriteTarget,\n\n Texture2DWriteTarget,\n\n TextureMagFilter,\n\n TextureMinFilter,\n\n Texture2DInternalFormat,\n\n ImageDataFormat,\n\n TextureParameterTarget,\n\n },\n\n level0::Texture,\n\n};\n\n\n\npub struct Texture2D{\n\n texture:Texture,\n\n}\n\n\n\nimpl Texture2D{\n", "file_path": "basement/src/graphics/level1/texture/texture_2d.rs", "rank": 60, "score": 90880.94369500743 }, { "content": " texture_internal_format,\n\n mag,\n\n min,\n\n size,\n\n ImageDataFormat::R_U8,\n\n &[]\n\n )\n\n }\n\n\n\n pub fn raw(texture:Texture)->Texture2D{\n\n Self{\n\n texture,\n\n }\n\n }\n\n\n\n pub fn as_raw(&self)->&Texture{\n\n &self.texture\n\n }\n\n\n\n pub fn into_raw(self)->Texture{\n", "file_path": "basement/src/graphics/level1/texture/texture_2d.rs", "rank": 61, "score": 90874.0968144575 }, { "content": " pub fn create()->Result<Texture2D,GLError>{\n\n match Texture::create(TextureBindTarget::Texture2D){\n\n Result::Ok(texture)=>Ok(Self{texture}),\n\n Result::Err(e)=>Err(e),\n\n }\n\n }\n\n\n\n /// Creates a texture.\n\n pub fn new(\n\n texture_internal_format:Texture2DInternalFormat,\n\n mag_filter:TextureMagFilter,\n\n min_filter:TextureMinFilter,\n\n size:[u32;2],\n\n image_data_format:ImageDataFormat,\n\n data:&[u8]\n\n )->Result<Texture2D,GLError>{\n\n let texture=Texture2D::create()?;\n\n\n\n Texture::set_min_filter(TextureParameterTarget::Texture2D,min_filter);\n\n Texture::set_mag_filter(TextureParameterTarget::Texture2D,mag_filter);\n", "file_path": "basement/src/graphics/level1/texture/texture_2d.rs", "rank": 62, "score": 90873.9742478177 }, { "content": "\n\n Texture::rewrite_image_2d(\n\n Texture2DRewriteTarget::Texture2D,\n\n 0,\n\n texture_internal_format,\n\n [size[0] as i32,size[1] as i32],\n\n image_data_format,\n\n data\n\n );\n\n\n\n Ok(texture)\n\n }\n\n\n\n pub fn empty(\n\n texture_internal_format:Texture2DInternalFormat,\n\n mag:TextureMagFilter,\n\n min:TextureMinFilter,\n\n size:[u32;2]\n\n )->Result<Texture2D,GLError>{\n\n Texture2D::new(\n", "file_path": "basement/src/graphics/level1/texture/texture_2d.rs", "rank": 63, "score": 90873.71737961336 }, { "content": " self.texture\n\n }\n\n\n\n pub fn bind(&self)->GLError{\n\n self.texture.bind(TextureBindTarget::Texture2D)\n\n }\n\n}\n\n\n\nimpl Texture2D{\n\n pub fn rewrite_image(\n\n &self,\n\n texture_internal_format:Texture2DInternalFormat,\n\n size:[u32;2],\n\n image_data_format:ImageDataFormat,\n\n data:&[u8]\n\n )->GLError{\n\n let result=self.bind();\n\n if result.is_error(){\n\n result\n\n }\n", "file_path": "basement/src/graphics/level1/texture/texture_2d.rs", "rank": 64, "score": 90873.43864353484 }, { "content": " else{\n\n Texture::rewrite_image_2d(\n\n Texture2DRewriteTarget::Texture2D,\n\n 0,\n\n texture_internal_format,\n\n [size[0] as i32,size[1] as i32],\n\n image_data_format,\n\n data\n\n )\n\n }\n\n }\n\n\n\n pub fn write_image(\n\n &self,\n\n [x,y,width,height]:[u32;4],\n\n image_data_format:ImageDataFormat,\n\n data:&[u8]\n\n )->GLError{\n\n let result=self.bind();\n\n if result.is_error(){\n", "file_path": "basement/src/graphics/level1/texture/texture_2d.rs", "rank": 65, "score": 90872.62068789596 }, { "content": " result\n\n }\n\n else{\n\n Texture::write_image_2d(\n\n Texture2DWriteTarget::Texture2D,\n\n 0,\n\n [x as i32,y as i32,width as i32,height as i32],\n\n image_data_format,\n\n data\n\n )\n\n }\n\n }\n\n}", "file_path": "basement/src/graphics/level1/texture/texture_2d.rs", "rank": 66, "score": 90872.17410100068 }, { "content": "#[cfg(any(target_os=\"windows\"))]\n\npub mod windows;", "file_path": "src/app/mono_windowing/mod.rs", "rank": 67, "score": 84159.37667279055 }, { "content": "", "file_path": "src/app/multi_windowing/mod.rs", "rank": 68, "score": 84153.77388295022 }, { "content": "#[cfg(target_os=\"windows\")]\n\nuse crate::windows::OpenGraphicsLibrary;\n\n\n\nuse core::mem::transmute;\n\n\n\n// Texture targets\n\nconst TEXTURE_1D:u32=0x0DE0;\n\nconst TEXTURE_2D:u32=0x0DE1;\n\nconst PROXY_TEXTURE_1D:u32=0x8063;\n\nconst PROXY_TEXTURE_2D:u32=0x8064;\n\nconst TEXTURE_3D:u32=0x806F;\n\nconst TEXTURE_RECTANGLE:u32=0x84F5;\n\nconst TEXTURE_CUBE_MAP:u32=0x8513;\n\nconst TEXTURE_1D_ARRAY:u32=0x8C18;\n\nconst TEXTURE_2D_ARRAY:u32=0x8C1A;\n\nconst TEXTURE_BUFFER:u32=0x8C2A;\n\nconst TEXTURE_2D_MULTISAMPLE:u32=0x9100;\n\nconst TEXTURE_2D_MULTISAMPLE_ARRAY:u32=0x9102;\n\n\n\n// Texture internal formats\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 69, "score": 82793.2794175909 }, { "content": "use crate::graphics::{\n\n GCore,\n\n core::GLError,\n\n core::texture::{\n\n TextureBindTarget,\n\n Texture2DRewriteTarget,\n\n Texture2DWriteTarget,\n\n Texture2DInternalFormat,\n\n ImageDataFormat,\n\n TextureParameterTarget,\n\n TextureMinFilter,\n\n TextureMagFilter,\n\n TextureCompareFunction,\n\n TextureCompareMode,\n\n TextureWrap,\n\n },\n\n};\n\n\n\nuse core::mem::MaybeUninit;\n\n\n", "file_path": "basement/src/graphics/level0/texture.rs", "rank": 70, "score": 82790.52037044645 }, { "content": " Self{\n\n glGenTextures:0,\n\n glDeleteTextures:0,\n\n\n\n glBindTexture:0,\n\n\n\n glTexImage2D:0,\n\n glTexSubImage2D:0,\n\n glCopyTexSubImage2D:0,\n\n\n\n glTexParameteri:0,\n\n }\n\n }\n\n\n\n #[cfg(target_os=\"windows\")]\n\n pub fn load(&mut self,library:&OpenGraphicsLibrary){\n\n unsafe{\n\n self.glGenTextures=transmute(library.get_proc_address(\"glGenTextures\\0\"));\n\n self.glDeleteTextures=transmute(library.get_proc_address(\"glDeleteTextures\\0\"));\n\n\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 71, "score": 82788.62076876259 }, { "content": " /// \n\n /// The texture magnification function is used whenever the level-of-detail function used\n\n /// when sampling from the texture determines that the texture should be magified.\n\n /// \n\n /// Initially, it is set to `TextureMagFilter::Linear`.\n\n #[inline(always)]\n\n pub fn set_mag_filter(target:TextureParameterTarget,filter:TextureMagFilter){\n\n unsafe{\n\n GCore.texture.set_mag_filter(target,filter)\n\n }\n\n }\n\n\n\n /// Specifies the texture minifying function.\n\n /// \n\n /// The texture minifying function is used whenever the level-of-detail function used\n\n /// when sampling from the texture determines that the texture should be minified.\n\n /// \n\n /// Initially, it is set to `TextureMinFilter::LinearMipmapLinear`.\n\n #[inline(always)]\n\n pub fn set_min_filter(target:TextureParameterTarget,filter:TextureMinFilter){\n", "file_path": "basement/src/graphics/level0/texture.rs", "rank": 72, "score": 82787.70108426137 }, { "content": " }\n\n }\n\n\n\n /// Specifies the texture minifying function.\n\n /// \n\n /// The texture minifying function is used whenever the level-of-detail function used\n\n /// when sampling from the texture determines that the texture should be minified.\n\n /// \n\n /// Initially, it is set to `TextureMinFilter::LinearMipmapLinear`.\n\n #[inline(always)]\n\n pub fn set_min_filter(&self,target:TextureParameterTarget,filter:TextureMinFilter){\n\n unsafe{\n\n transmute::<usize,fn(TextureParameterTarget,u32,TextureMinFilter)>(self.glTexParameteri)(target,TEXTURE_MIN_FILTER,filter)\n\n }\n\n }\n\n\n\n /// Sets the wrap parameter for texture coordinate `s`.\n\n /// \n\n /// Initially, it is set to `TextureWrap::Repeat`.\n\n #[inline(always)]\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 73, "score": 82786.85457577078 }, { "content": " }\n\n\n\n /// Specifies the texture comparison mode for currently bound depth textures (the iternal format = `DEPTH_COMPONENT`).\n\n #[inline(always)]\n\n pub fn set_compare_mode(&self,target:TextureParameterTarget,mode:TextureCompareMode){\n\n unsafe{\n\n transmute::<usize,fn(TextureParameterTarget,u32,TextureCompareMode)>(self.glTexParameteri)(target,TEXTURE_COMPARE_MODE,mode)\n\n }\n\n }\n\n\n\n /// Specifies the texture magnification function.\n\n /// \n\n /// The texture magnification function is used whenever the level-of-detail function used\n\n /// when sampling from the texture determines that the texture should be magified.\n\n /// \n\n /// Initially, it is set to `TextureMagFilter::Linear`.\n\n #[inline(always)]\n\n pub fn set_mag_filter(&self,target:TextureParameterTarget,filter:TextureMagFilter){\n\n unsafe{\n\n transmute::<usize,fn(TextureParameterTarget,u32,TextureMagFilter)>(self.glTexParameteri)(target,TEXTURE_MAG_FILTER,filter)\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 74, "score": 82786.41893800045 }, { "content": "#[repr(u32)]\n\n#[derive(Clone,Copy,Debug)]\n\npub enum TextureMinFilter{\n\n /// Returns the value of the texture element\n\n /// that is nearest (in Manhattan distance) to the specified texture coordinates.\n\n Nearest=NEAREST,\n\n\n\n /// Returns the weighted average of the four texture elements that are closest to the specified texture coordinates.\n\n /// These can include items wrapped or repeated from other parts of a texture,\n\n /// depending on the values of `GL_TEXTURE_WRAP_S` and `GL_TEXTURE_WRAP_T`,and on the exact mapping.\n\n Linear=LINEAR,\n\n\n\n /// Chooses the mipmap that most closely matches the size of the pixel being textured\n\n /// and uses the `GL_NEAREST` criterion (the texture element closest to the specified texture coordinates)\n\n /// to produce a texture value.\n\n NearestMipmapNearest=NEAREST_MIPMAP_NEAREST,\n\n\n\n /// Chooses the mipmap that most closely matches the size of the pixel being textured\n\n /// and uses the `GL_LINEAR` criterion (a weighted average of the four texture elements that are closest to the specified texture coordinates)\n\n /// to produce a texture value.\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 75, "score": 82786.34565243675 }, { "content": " LinearMipmapNearest=LINEAR_MIPMAP_NEAREST,\n\n\n\n /// Chooses the two mipmaps that most closely match the size of the pixel being textured\n\n /// and uses the GL_NEAREST criterion (the texture element closest to the specified texture coordinates )\n\n /// to produce a texture value from each mipmap.\n\n /// The final texture value is a weighted average of those two values.\n\n NearestMipmapLinear=NEAREST_MIPMAP_LINEAR,\n\n\n\n /// Chooses the two mipmaps that most closely match the size of the pixel being textured\n\n /// and uses the GL_LINEAR criterion (a weighted average of the texture elements that are closest to the specified texture coordinates)\n\n /// to produce a texture value from each mipmap.\n\n /// The final texture value is a weighted average of those two values.\n\n LinearMipmapLinear=LINEAR_MIPMAP_LINEAR,\n\n}\n\n\n\n#[repr(u32)]\n\n#[derive(Clone,Copy,Debug)]\n\npub enum TextureWrap{\n\n Repeat=REPEAT,\n\n MirroredRepeat=MIRRORED_REPEAT,\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 76, "score": 82786.26811590351 }, { "content": " /// but does not generate an error.\n\n /// To query for an entire mipmap array,\n\n /// use an image array level greater than or equal to 1.\n\n ProxyTexture1D=PROXY_TEXTURE_1D\n\n}\n\n\n\n#[repr(u32)]\n\n#[derive(Clone,Copy,Debug)]\n\npub enum Texture2DRewriteTarget{\n\n Texture2D=TEXTURE_2D,\n\n ProxyTexture2D=PROXY_TEXTURE_2D,\n\n // TEXTURE_1D_ARRAY,\n\n // PROXY_TEXTURE_1D_ARRAY,\n\n // TEXTURE_RECTANGLE,\n\n // PROXY_TEXTURE_RECTANGLE,\n\n // TEXTURE_CUBE_MAP_POSITIVE_X,\n\n // TEXTURE_CUBE_MAP_NEGATIVE_X,\n\n // TEXTURE_CUBE_MAP_POSITIVE_Y,\n\n // TEXTURE_CUBE_MAP_NEGATIVE_Y,\n\n // TEXTURE_CUBE_MAP_POSITIVE_Z,\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 77, "score": 82785.52705327256 }, { "content": "\n\n /// Specifies the comparison operator.\n\n /// \n\n /// The comparison operator is used when `TEXTURE_COMPARE_MODE` is set to `COMPARE_REF_TO_TEXTURE`.\n\n #[inline(always)]\n\n pub fn set_compare_function(&self,target:TextureParameterTarget,function:TextureCompareFunction){\n\n unsafe{\n\n GCore.texture.set_compare_function(target,function)\n\n }\n\n }\n\n\n\n /// Specifies the texture comparison mode for currently bound depth textures (the iternal format = `DEPTH_COMPONENT`).\n\n #[inline(always)]\n\n pub fn set_compare_mode(&self,target:TextureParameterTarget,mode:TextureCompareMode){\n\n unsafe{\n\n GCore.texture.set_compare_mode(target,mode)\n\n }\n\n }\n\n\n\n /// Specifies the texture magnification function.\n", "file_path": "basement/src/graphics/level0/texture.rs", "rank": 78, "score": 82785.51739636785 }, { "content": "/// Texture parameters.\n\nimpl Texture{\n\n /// Specifies the index of the lowest defined mipmap level.\n\n /// \n\n /// The initial value is 0.\n\n #[inline(always)]\n\n pub fn set_base_level(&self,target:TextureParameterTarget,level:i32){\n\n unsafe{\n\n transmute::<usize,fn(TextureParameterTarget,u32,i32)>(self.glTexParameteri)(target,TEXTURE_BASE_LEVEL,level)\n\n }\n\n }\n\n\n\n /// Specifies the comparison operator.\n\n /// \n\n /// The comparison operator is used when `TextureCompareMode::CompareRefToTexture` is set (see `Texture::set_compare_mode`).\n\n #[inline(always)]\n\n pub fn set_compare_function(&self,target:TextureParameterTarget,function:TextureCompareFunction){\n\n unsafe{\n\n transmute::<usize,fn(TextureParameterTarget,u32,TextureCompareFunction)>(self.glTexParameteri)(target,TEXTURE_COMPARE_FUNC,function)\n\n }\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 79, "score": 82785.29538171183 }, { "content": " }\n\n\n\n /// Copies a two-dimensional texture subimage.\n\n /// \n\n /// Replaces a rectangular portion of a two-dimensional texture image or cube-map texture image with pixels from the current `READ_BUFFER`.\n\n /// \n\n /// `target` - Specifies the target texture.\n\n /// \n\n /// `mipmap_level` - Specifies the level-of-detail number.\n\n /// Level 0 is the base image level.\n\n /// Level n is the nth mipmap reduction image.\n\n /// \n\n /// `read_x`, `read_y` - Specify the window coordinates of the lower left corner\n\n /// of the rectangular region of pixels to be copied.\n\n /// \n\n /// `write_x` - Specifies a texel offset in the x direction within the texture array.\n\n /// \n\n /// `write_y` - Specifies a texel offset in the y direction within the texture array.\n\n /// \n\n /// `width` - Specifies the width of the texture subimage.\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 80, "score": 82785.22131298613 }, { "content": " Texture2D=TEXTURE_2D,\n\n Texture3D=TEXTURE_3D,\n\n TextureRectable=TEXTURE_RECTANGLE,\n\n TextureCubeMap=TEXTURE_CUBE_MAP,\n\n Texture1DArray=TEXTURE_1D_ARRAY,\n\n Texture2DArray=TEXTURE_2D_ARRAY,\n\n TextureBuffer=TEXTURE_BUFFER,\n\n Texture2DMultisample=TEXTURE_2D_MULTISAMPLE,\n\n Texture2DMultisampleArray=TEXTURE_2D_MULTISAMPLE_ARRAY,\n\n}\n\n\n\n#[repr(u32)]\n\n#[derive(Clone,Copy,Debug)]\n\npub enum TextureParameterTarget{\n\n Texture1D=TEXTURE_1D,\n\n Texture2D=TEXTURE_2D,\n\n Texture3D=TEXTURE_3D,\n\n TextureRectable=TEXTURE_RECTANGLE,\n\n TextureCubeMap=TEXTURE_CUBE_MAP,\n\n Texture1DArray=TEXTURE_1D_ARRAY,\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 81, "score": 82782.97332307365 }, { "content": " /// \n\n /// If a texture that is currently bound is deleted,\n\n /// the binding reverts to 0 (the default texture).\n\n #[inline(always)]\n\n pub fn delete(&self,textures:&[u32]){\n\n unsafe{\n\n transmute::<usize,fn(i32,&u32)>(self.glDeleteTextures)(textures.len() as i32,&textures[0])\n\n }\n\n }\n\n\n\n /// Binds a texture to a texturing target.\n\n /// \n\n /// When a texture is bound to a target,\n\n /// the previous binding for that target is automatically broken.\n\n /// \n\n /// When a texture is first bound, it assumes the specified target:\n\n /// A texture first bound to `GL_TEXTURE_1D` becomes one-dimensional texture,\n\n /// a texture first bound to `GL_TEXTURE_2D` becomes two-dimensional texture,\n\n /// and so on.\n\n /// \n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 82, "score": 82782.95512786762 }, { "content": " /// `GLError::InvalidValue` is generated\n\n /// if target is not a name returned from a previous call to glGenTextures.\n\n /// \n\n /// `GLError::InvalidOperation` is generated\n\n /// if texture was previously created with a target that doesn't match that of target.\n\n #[inline(always)]\n\n pub unsafe fn bind(&self,target:TextureBindTarget,texture_id:u32){\n\n transmute::<usize,fn(TextureBindTarget,u32)>(self.glBindTexture)(target,texture_id)\n\n }\n\n}\n\n\n\n // TEXTURE_LOD_BIAS,\n\n // TEXTURE_MIN_LOD,\n\n // TEXTURE_MAX_LOD,\n\n // TEXTURE_MAX_LEVEL,\n\n // TEXTURE_SWIZZLE_R,\n\n // TEXTURE_SWIZZLE_G,\n\n // TEXTURE_SWIZZLE_B,\n\n // TEXTURE_SWIZZLE_A,\n\n\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 83, "score": 82782.88086946099 }, { "content": " /// Deletes a named texture.\n\n /// \n\n /// Silently ignores 0's and names that do not correspond to existing textures.\n\n /// \n\n /// If a texture that is currently bound is deleted,\n\n /// the binding reverts to 0 (the default texture).\n\n #[inline(always)]\n\n pub unsafe fn delete_one(&self,texture:&u32){\n\n transmute::<usize,fn(i32,&u32)>(self.glDeleteTextures)(1,texture)\n\n }\n\n\n\n /// Generates texture names.\n\n #[inline(always)]\n\n pub unsafe fn generate(&self,textures:&mut [u32]){\n\n transmute::<usize,fn(i32,&mut u32)>(self.glGenTextures)(textures.len() as i32,&mut textures[0])\n\n }\n\n\n\n /// Deletes named textures.\n\n /// \n\n /// Silently ignores 0's and names that do not correspond to existing textures.\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 84, "score": 82782.87977166097 }, { "content": " pub fn set_wrap_s(&self,target:TextureParameterTarget,value:TextureWrap){\n\n unsafe{\n\n transmute::<usize,fn(TextureParameterTarget,u32,TextureWrap)>(self.glTexParameteri)(target,TEXTURE_WRAP_S,value)\n\n }\n\n }\n\n\n\n /// Sets the wrap parameter for texture coordinate `t`.\n\n /// \n\n /// Initially, it is set to `TextureWrap::Repeat`.\n\n #[inline(always)]\n\n pub fn set_wrap_t(&self,target:TextureParameterTarget,value:TextureWrap){\n\n unsafe{\n\n transmute::<usize,fn(TextureParameterTarget,u32,TextureWrap)>(self.glTexParameteri)(target,TEXTURE_WRAP_T,value)\n\n }\n\n }\n\n\n\n /// Sets the wrap parameter for texture coordinate `r`.\n\n /// \n\n /// Initially, it is set to `TextureWrap::Repeat`.\n\n #[inline(always)]\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 85, "score": 82782.80964565036 }, { "content": " pub fn create(target:TextureBindTarget)->Result<Texture,GLError>{\n\n let texture=Texture::generate();\n\n let error=texture.bind(target);\n\n if error.is_error(){\n\n Err(error)\n\n }\n\n else{\n\n Ok(texture)\n\n }\n\n }\n\n\n\n /// Binds a texture to a texturing target.\n\n /// \n\n /// When a texture is bound to a target,\n\n /// the previous binding for that target is automatically broken.\n\n /// \n\n /// Returns `GLError::NoError` if no error has accured.\n\n /// \n\n /// Returns `GLError::InvalidValue` if there's no current context.\n\n /// \n", "file_path": "basement/src/graphics/level0/texture.rs", "rank": 86, "score": 82782.76254369655 }, { "content": " unsafe{\n\n GCore.texture.set_min_filter(target,filter);\n\n }\n\n }\n\n\n\n /// Sets the wrap parameter for texture coordinate `s`.\n\n /// \n\n /// Initially, it is set to `TextureWrap::Repeat`.\n\n #[inline(always)]\n\n pub fn set_wrap_s(target:TextureParameterTarget,value:TextureWrap){\n\n unsafe{\n\n GCore.texture.set_wrap_s(target,value);\n\n }\n\n }\n\n\n\n /// Sets the wrap parameter for texture coordinate `t`.\n\n /// \n\n /// Initially, it is set to `TextureWrap::Repeat`.\n\n #[inline(always)]\n\n pub fn set_wrap_t(target:TextureParameterTarget,value:TextureWrap){\n", "file_path": "basement/src/graphics/level0/texture.rs", "rank": 87, "score": 82782.72016071892 }, { "content": " CompareRefToTexture=COMPARE_REF_TO_TEXTURE,\n\n\n\n /// Specifies that the red channelshould be assigned\n\n /// the appropriate value from the currently bound depth texture.\n\n None=NONE,\n\n}\n\n\n\n#[repr(u32)]\n\n#[derive(Clone,Copy,Debug)]\n\npub enum TextureMagFilter{\n\n /// Returns the value of the texture element\n\n /// that is nearest (in Manhattan distance) to the specified texture coordinates.\n\n Nearest=NEAREST,\n\n\n\n /// Returns the weighted average of the four texture elements that are closest to the specified texture coordinates.\n\n /// These can include items wrapped or repeated from other parts of a texture,\n\n /// depending on the values of `GL_TEXTURE_WRAP_S` and `GL_TEXTURE_WRAP_T`,and on the exact mapping.\n\n Linear=LINEAR,\n\n}\n\n\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 88, "score": 82782.70717887138 }, { "content": " pub fn set_wrap_r(&self,target:TextureParameterTarget,value:TextureWrap){\n\n unsafe{\n\n transmute::<usize,fn(TextureParameterTarget,u32,TextureWrap)>(self.glTexParameteri)(target,TEXTURE_WRAP_R,value)\n\n }\n\n }\n\n}\n\n\n\n/// Texture 2D data.\n\nimpl Texture{\n\n /// Specify a two-dimensional texture image.\n\n /// \n\n /// `target` - Specifies the target texture.\n\n /// \n\n /// `mipmap_level` - Specifies the level-of-detail number.\n\n /// Level 0 is the base image level.\n\n /// Level n is the nth mipmap reduction image.\n\n /// If target is `Texture2DRewriteTarget::TextureRectangle`\n\n /// or `Texture2DRewriteTarget::ProxyTextureRectangle`,\n\n /// level must be 0.\n\n /// \n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 89, "score": 82782.70007510394 }, { "content": " // TEXTURE_CUBE_MAP_NEGATIVE_Z,\n\n // PROXY_TEXTURE_CUBE_MAP\n\n}\n\n\n\n#[repr(u32)]\n\n#[derive(Clone,Copy,Debug)]\n\npub enum Texture2DWriteTarget{\n\n Texture2D=TEXTURE_2D,\n\n // TEXTURE_CUBE_MAP_POSITIVE_X,\n\n // TEXTURE_CUBE_MAP_NEGATIVE_X,\n\n // TEXTURE_CUBE_MAP_POSITIVE_Y,\n\n // TEXTURE_CUBE_MAP_NEGATIVE_Y,\n\n // TEXTURE_CUBE_MAP_POSITIVE_Z,\n\n // TEXTURE_CUBE_MAP_NEGATIVE_Z,\n\n // TEXTURE_1D_ARRAY\n\n}\n\n\n\n#[repr(u32)]\n\n#[derive(Clone,Copy,Debug)]\n\npub enum Texture2DCopyTarget{\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 90, "score": 82782.66565581465 }, { "content": "pub struct Texture{\n\n id:u32,\n\n}\n\n\n\nimpl Texture{\n\n /// Generates a texture.\n\n pub fn generate()->Texture{\n\n unsafe{\n\n let mut id=MaybeUninit::uninit().assume_init();\n\n GCore.texture.generate_one(&mut id);\n\n\n\n Self{\n\n id,\n\n }\n\n }\n\n }\n\n\n\n /// Generates a texture with the given target.\n\n /// \n\n /// Returns `GLError::InvalidValue` if there's no current context.\n", "file_path": "basement/src/graphics/level0/texture.rs", "rank": 91, "score": 82782.58302246776 }, { "content": " unsafe{\n\n GCore.texture.set_wrap_t(target,value);\n\n }\n\n }\n\n\n\n /// Sets the wrap parameter for texture coordinate `r`.\n\n /// \n\n /// Initially, it is set to `TextureWrap::Repeat`.\n\n #[inline(always)]\n\n pub fn set_wrap_r(target:TextureParameterTarget,value:TextureWrap){\n\n unsafe{\n\n GCore.texture.set_wrap_r(target,value);\n\n }\n\n }\n\n}\n\n\n\nimpl Texture{\n\n pub fn rewrite_image_2d(\n\n target:Texture2DRewriteTarget,\n\n mipmap_level:i32,\n", "file_path": "basement/src/graphics/level0/texture.rs", "rank": 92, "score": 82782.56127158462 }, { "content": " /// `GLError::InvalidValue` is generated\n\n /// if `width` is less than `0` or greater than `GL_MAX_TEXTURE_SIZE`,\n\n /// if `target` is not `GL_TEXTURE_1D_ARRAY` or `GL_PROXY_TEXTURE_1D_ARRAY`\n\n /// and `height` is less than `0` or greater than `GL_MAX_TEXTURE_SIZE`,\n\n /// if `target` is `GL_TEXTURE_1D_ARRAY` or `GL_PROXY_TEXTURE_1D_ARRAY`\n\n /// and `height` is less than `0` or greater than `GL_MAX_ARRAY_TEXTURE_LAYERS`,\n\n /// if `mipmap_level` is less than 0,\n\n /// if `mipmap_level` is greater than log2(max),\n\n /// where `max` is the returned value of `GL_MAX_TEXTURE_SIZE`,\n\n /// if `width` or `height` is less than 0 or greater than `GL_MAX_TEXTURE_SIZE,\n\n /// if non-power-of-two textures are not supported\n\n /// and the width or height cannot be represented\n\n /// as `2^k` for some integer value of `k`,\n\n /// if target is `GL_TEXTURE_RECTANGLE` or `GL_PROXY_TEXTURE_RECTANGLE` and `mipmap_level` is not `0`.\n\n #[inline(always)]\n\n pub unsafe fn rewrite_image_2d<I:Sized>(\n\n &self,\n\n target:Texture2DRewriteTarget,\n\n mipmap_level:i32,\n\n internal_format:Texture2DInternalFormat,\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 93, "score": 82782.5075839209 }, { "content": " /// Returns `GLError::InvalidOperation`\n\n /// if texture was previously created with a target that doesn't match that of target.\n\n pub fn bind(&self,target:TextureBindTarget)->GLError{\n\n unsafe{\n\n GCore.texture.bind(target,self.id);\n\n GCore.get_error()\n\n }\n\n }\n\n\n\n /// Binds the zero-named texture to a texturing target.\n\n /// \n\n /// When a texture is bound to a target,\n\n /// the previous binding for that target is automatically broken.\n\n /// \n\n /// Returns `GLError::NoError` if no error has accured.\n\n /// \n\n /// Returns `GLError::InvalidValue` if there's no current context.\n\n pub fn unbind(target:TextureBindTarget)->GLError{\n\n unsafe{\n\n GCore.texture.bind(target,0);\n", "file_path": "basement/src/graphics/level0/texture.rs", "rank": 94, "score": 82782.46284612463 }, { "content": " ClampToEdge=CLAMP_TO_EDGE,\n\n ClampToBorder=CLAMP_TO_BORDER,\n\n}\n\n\n\n\n\npub struct Texture{\n\n glGenTextures:usize,\n\n glDeleteTextures:usize,\n\n\n\n glBindTexture:usize,\n\n\n\n glTexImage2D:usize,\n\n glTexSubImage2D:usize,\n\n glCopyTexSubImage2D:usize,\n\n\n\n glTexParameteri:usize,\n\n}\n\n\n\nimpl Texture{\n\n pub const fn new()->Texture{\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 95, "score": 82782.46048781615 }, { "content": "const TEXTURE_MIN_FILTER:u32=0x2801;\n\nconst TEXTURE_BASE_LEVEL:u32=0x813C;\n\nconst TEXTURE_COMPARE_MODE:u32=0x884C;\n\nconst TEXTURE_COMPARE_FUNC:u32=0x884D;\n\nconst TEXTURE_WRAP_R:u32=0x8072;\n\nconst TEXTURE_WRAP_S:u32=0x2802;\n\nconst TEXTURE_WRAP_T:u32=0x2803;\n\n\n\n// Compare functions\n\nconst NEVER:u32=0x0200;\n\nconst LESS:u32=0x0201;\n\nconst EQUAL:u32=0x0202;\n\nconst LEQUAL:u32=0x0203;\n\nconst GREATER:u32=0x0204;\n\nconst NOTEQUAL:u32=0x0205;\n\nconst GEQUAL:u32=0x0206;\n\nconst ALWAYS:u32=0x0207;\n\n\n\n// Compare modes\n\nconst NONE:u32=0;\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 96, "score": 82782.38565862113 }, { "content": " Texture2D=TEXTURE_2D,\n\n // TEXTURE_CUBE_MAP_POSITIVE_X,\n\n // TEXTURE_CUBE_MAP_NEGATIVE_X,\n\n // TEXTURE_CUBE_MAP_POSITIVE_Y,\n\n // TEXTURE_CUBE_MAP_NEGATIVE_Y,\n\n // TEXTURE_CUBE_MAP_POSITIVE_Z,\n\n // TEXTURE_CUBE_MAP_NEGATIVE_Z,\n\n}\n\n\n\n#[repr(u32)]\n\n#[derive(Clone,Copy,Debug)]\n\npub enum Texture2DInternalFormat{\n\n R8=R8,\n\n // RedI8=R8I,\n\n // RedU8=R8UI,\n\n // R8_SNorm=R8_SNORM,\n\n R16=R16,\n\n // R16_SNorm=R16_SNORM,\n\n // RedI16=R16I,\n\n // RedU16=R16UI,\n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 97, "score": 82782.33597103084 }, { "content": " /// `internal_format` - Specifies the number of color components in the texture.\n\n /// \n\n /// `width` - Specifies the width of the texture image.\n\n /// All implementations support texture images that are at least 1024 texels wide.\n\n /// \n\n /// `height` - Specifies the height of the texture image, or the number of layers in a texture array,\n\n /// in the case of the `Texture1DArray` and `ProxyTexture1DArray` targets.\n\n /// All implementations support 2D texture images that are at least 1024 texels high,\n\n /// and texture arrays that are at least 256 layers deep.\n\n /// \n\n /// `image_format` - Specifies the format of the pixel data.\n\n /// \n\n /// `image_data_type` - Specifies the data type of the pixel data.\n\n /// \n\n /// `data` - Specifies a pointer to the image data in memory.\n\n /// \n\n /// `GLError::InvalidEnum` is generated\n\n /// if `target` is one of the six cube map 2D image targets\n\n /// and the width and height parameters are not equal.\n\n /// \n", "file_path": "basement/src/graphics/core/texture.rs", "rank": 98, "score": 82782.21071272383 }, { "content": " /// \n\n /// `height` - Specifies the height of the texture subimage.\n\n /// \n\n /// `GLError::InvalidOperation` is generated\n\n /// if the texture array has not been defined\n\n /// by a previous `Texture::rewrite_image_2d()` or `Texture::copy_image_2d()` operation.\n\n /// \n\n /// `GLError::InvalidValue` is generated if `mipmap_level` is less than 0,\n\n /// if `mipmap_level>log2(max)`, where `max` is the returned value of `MAX_TEXTURE_SIZE`,\n\n /// if `write_x<0`, `(write_x+width)>w`, `write_y<0`, or `(write_y+height)>h`,\n\n /// where `w` is the `TEXTURE_WIDTH`, and `h` is the `TEXTURE_HEIGHT` of the texture image being modified.\n\n #[inline(always)]\n\n pub unsafe fn copy_image_2d(\n\n &self,\n\n target:Texture2DCopyTarget,\n\n mipmap_level:i32,\n\n [read_x,read_y]:[i32;2],\n\n [write_x,write_y]:[i32;2],\n\n [width,height]:[i32;2]\n\n ){\n\n transmute::<usize,fn(Texture2DCopyTarget,i32,i32,i32,i32,i32,i32,i32)>(self.glCopyTexSubImage2D)(\n\n target,\n\n mipmap_level,\n\n write_x,write_y,\n\n read_x,read_y,width,height\n\n )\n\n }\n\n}", "file_path": "basement/src/graphics/core/texture.rs", "rank": 99, "score": 82782.19636194147 } ]
Rust
src/der.rs
str4d/x509-rs
8b0b485254182465c2e089c4766fa85bbe34957f
enum DerType { Explicit(u8), Boolean, Integer, BitString, OctetString, Null, Oid, Utf8String, Sequence, Set, UtcTime, GeneralizedTime, } impl DerType { pub(super) fn parts(&self) -> (u8, u8, u8) { match self { DerType::Explicit(typ) => (2, 1, *typ), DerType::Boolean => (0, 0, 1), DerType::Integer => (0, 0, 2), DerType::BitString => (0, 0, 3), DerType::OctetString => (0, 0, 4), DerType::Null => (0, 0, 5), DerType::Oid => (0, 0, 6), DerType::Utf8String => (0, 0, 12), DerType::Sequence => (0, 1, 16), DerType::Set => (0, 1, 17), DerType::UtcTime => (0, 0, 23), DerType::GeneralizedTime => (0, 0, 24), } } } pub trait Oid: AsRef<[u64]> {} impl Oid for &'static [u64] {} impl<T> Oid for &T where T: Oid {} pub mod write { use chrono::{DateTime, Utc}; use cookie_factory::{ bytes::be_u8, combinator::{cond, slice, string}, gen_simple, multi::all, sequence::{pair, tuple, Tuple}, SerializeFn, WriteContext, }; use std::io::Write; use super::{DerType, Oid}; fn der_type<W: Write>(typ: DerType) -> impl SerializeFn<W> { let (class, pc, num) = typ.parts(); be_u8((class << 6) | (pc << 5) | num) } fn der_length<W: Write>(len: usize) -> impl SerializeFn<W> { pair( cond(len < 128, be_u8(len as u8)), cond(len >= 128, move |w: WriteContext<W>| { let len_bytes = len.to_be_bytes(); let mut len_slice = &len_bytes[..]; while !len_slice.is_empty() && len_slice[0] == 0 { len_slice = &len_slice[1..]; } assert!(len_slice.len() < 127); let res = pair(be_u8((1 << 7) | (len_slice.len() as u8)), slice(len_slice))(w)?; Ok(res) }), ) } fn der_tlv<W: Write, Gen>(typ: DerType, ser_content: Gen) -> impl SerializeFn<W> where Gen: SerializeFn<Vec<u8>>, { let content = gen_simple(ser_content, vec![]).expect("can serialize into Vec"); tuple((der_type(typ), der_length(content.len()), slice(content))) } pub fn der_default<W: Write, Gen, F, T>(inner: F, val: T, default: T) -> impl SerializeFn<W> where Gen: SerializeFn<W>, F: FnOnce(T) -> Gen, T: PartialEq, { cond(val != default, inner(val)) } pub fn der_explicit<W: Write, Gen>(typ: u8, inner: Gen) -> impl SerializeFn<W> where Gen: SerializeFn<Vec<u8>>, { der_tlv(DerType::Explicit(typ), inner) } pub fn der_boolean<W: Write>(val: bool) -> impl SerializeFn<W> { der_tlv(DerType::Boolean, slice(if val { &[0xff] } else { &[0x00] })) } pub fn der_integer<'a, W: Write + 'a>(mut num: &'a [u8]) -> impl SerializeFn<W> + 'a { while !num.is_empty() && num[0] == 0 { num = &num[1..]; } der_tlv( DerType::Integer, pair( cond(num.is_empty() || num[0] >= 0x80, slice(&[0])), slice(num), ), ) } pub fn der_integer_usize<W: Write>(num: usize) -> impl SerializeFn<W> { move |w: WriteContext<W>| der_integer(&num.to_be_bytes())(w) } pub fn der_bit_string<'a, W: Write + 'a>(bytes: &'a [u8]) -> impl SerializeFn<W> + 'a { der_tlv(DerType::BitString, pair(be_u8(0), slice(bytes))) } pub fn der_octet_string<'a, W: Write + 'a>(bytes: &'a [u8]) -> impl SerializeFn<W> + 'a { der_tlv(DerType::OctetString, slice(bytes)) } pub fn der_null<'a, W: Write + 'a>() -> impl SerializeFn<W> + 'a { der_tlv(DerType::Null, Ok) } pub fn der_oid<W: Write, OID: Oid>(oid: OID) -> impl SerializeFn<W> { fn subidentifier<W: Write>(id: u64) -> impl SerializeFn<W> { let id_bytes = [ 0x80 | ((id >> (9 * 7)) as u8 & 0x7f), 0x80 | ((id >> (8 * 7)) as u8 & 0x7f), 0x80 | ((id >> (7 * 7)) as u8 & 0x7f), 0x80 | ((id >> (6 * 7)) as u8 & 0x7f), 0x80 | ((id >> (5 * 7)) as u8 & 0x7f), 0x80 | ((id >> (4 * 7)) as u8 & 0x7f), 0x80 | ((id >> (3 * 7)) as u8 & 0x7f), 0x80 | ((id >> (2 * 7)) as u8 & 0x7f), 0x80 | ((id >> 7) as u8 & 0x7f), id as u8 & 0x7f, ]; move |w: WriteContext<W>| { let mut id_slice = &id_bytes[..]; while !id_slice.is_empty() && id_slice[0] == 0x80 { id_slice = &id_slice[1..]; } slice(id_slice)(w) } } move |w: WriteContext<W>| { let oid_slice = oid.as_ref(); assert!(oid_slice.len() >= 2); der_tlv( DerType::Oid, pair( subidentifier(oid_slice[0] * 40 + oid_slice[1]), all(oid_slice[2..].iter().map(|id| subidentifier(*id))), ), )(w) } } pub fn der_utf8_string<'a, W: Write + 'a>(s: &'a str) -> impl SerializeFn<W> + 'a { der_tlv(DerType::Utf8String, string(s)) } pub fn der_sequence<W: Write, List: Tuple<Vec<u8>>>(l: List) -> impl SerializeFn<W> { der_tlv(DerType::Sequence, move |w: WriteContext<Vec<u8>>| { l.serialize(w) }) } pub fn der_set<W: Write, List: Tuple<Vec<u8>>>(l: List) -> impl SerializeFn<W> { der_tlv(DerType::Set, move |w: WriteContext<Vec<u8>>| l.serialize(w)) } pub fn der_utc_time<W: Write>(t: DateTime<Utc>) -> impl SerializeFn<W> { der_tlv( DerType::UtcTime, string(t.format("%y%m%d%H%M%SZ").to_string()), ) } pub fn der_generalized_time<W: Write>(t: DateTime<Utc>) -> impl SerializeFn<W> { der_tlv( DerType::GeneralizedTime, string(t.format("%Y%m%d%H%M%SZ").to_string()), ) } #[cfg(test)] mod tests { use cookie_factory::gen_simple; use super::*; #[test] fn der_types() { assert_eq!( gen_simple(der_type(DerType::Integer), vec![]).unwrap(), &[0x02] ); assert_eq!( gen_simple(der_type(DerType::Sequence), vec![]).unwrap(), &[0x30] ); } #[test] fn der_lengths() { assert_eq!(gen_simple(der_length(1), vec![]).unwrap(), &[1]); assert_eq!(gen_simple(der_length(127), vec![]).unwrap(), &[127]); assert_eq!( gen_simple(der_length(128), vec![]).unwrap(), &[0x80 | 1, 128] ); assert_eq!( gen_simple(der_length(255), vec![]).unwrap(), &[0x80 | 1, 255] ); assert_eq!( gen_simple(der_length(256), vec![]).unwrap(), &[0x80 | 2, 1, 0] ); } #[test] fn der_tlvs() { assert_eq!( gen_simple(der_tlv(DerType::Integer, slice(&[0x07; 4])), vec![]).unwrap(), &[0x02, 0x04, 0x07, 0x07, 0x07, 0x07] ); } #[test] fn der_usize_integers() { assert_eq!( gen_simple(der_integer_usize(0), vec![]).unwrap(), vec![2, 1, 0] ); assert_eq!( gen_simple(der_integer_usize(127), vec![]).unwrap(), vec![2, 1, 127] ); assert_eq!( gen_simple(der_integer_usize(128), vec![]).unwrap(), vec![2, 2, 0, 128] ); assert_eq!( gen_simple(der_integer_usize(255), vec![]).unwrap(), vec![2, 2, 0, 255] ); assert_eq!( gen_simple(der_integer_usize(256), vec![]).unwrap(), vec![2, 2, 1, 0] ); assert_eq!( gen_simple(der_integer_usize(32767), vec![]).unwrap(), vec![2, 2, 127, 255] ); assert_eq!( gen_simple(der_integer_usize(32768), vec![]).unwrap(), vec![2, 3, 0, 128, 0] ); assert_eq!( gen_simple(der_integer_usize(65535), vec![]).unwrap(), vec![2, 3, 0, 255, 255] ); assert_eq!( gen_simple(der_integer_usize(65536), vec![]).unwrap(), vec![2, 3, 1, 0, 0] ); } } }
enum DerType { Explicit(u8), Boolean, Integer, BitString, OctetString, Null, Oid, Utf8String, Sequence, Set, UtcTime, GeneralizedTime, } impl DerType { pub(super) fn parts(&self) -> (u8, u8, u8) { match self { DerType::Explicit(typ) => (2, 1, *typ), DerType::Boolean => (0, 0, 1), DerType::Integer => (0, 0, 2), DerType::BitString => (0, 0, 3), DerType::OctetString => (0, 0, 4), DerType::Null => (0, 0, 5), DerType::Oid => (0, 0, 6), DerType::Utf8String => (0, 0, 12), DerType::Sequence => (0, 1, 16), DerType::Set => (0, 1, 17), DerType::UtcTime => (0, 0, 23), DerType::GeneralizedTime => (0, 0, 24), } } } pub trait Oid: AsRef<[u64]> {} impl Oid for &'static [u64] {} impl<T> Oid for &T where T: Oid {} pub mod write { use chrono::{DateTime, Utc}; use cookie_factory::{ bytes::be_u8, combinator::{cond, slice, string}, gen_simple, multi::all, sequence::{pair, tuple, Tuple}, SerializeFn, WriteContext, }; use std::io::Write; use super::{DerType, Oid}; fn der_type<W: Write>(typ: DerType) -> impl SerializeFn<W> { let (class, pc, num) = typ.parts(); be_u8((class << 6) | (pc << 5) | num) } fn der_length<W: Write>(len: usize) -> impl SerializeFn<W> { pair( cond(len < 128, be_u8(len as u8)), cond(len >= 128, move |w: WriteContext<W>| { let len_bytes = len.to_be_bytes(); let mut len_slice = &len_bytes[..]; while !len_slice.is_empty() && len_slice[0] == 0 { len_slice = &len_slice[1..]; } assert!(len_slice.len() < 127); let res = pair(be_u8((1 << 7) | (len_slice.len() as u8)), slice(len_slice))(w)?; Ok(res) }), ) } fn der_tlv<W: Write, Gen>(typ: DerType, ser_content: Gen) -> impl SerializeFn<W> where Gen: SerializeFn<Vec<u8>>, { let content = gen_simple(ser_content, vec![]).expect("can serialize into Vec"); tuple((der_type(typ), der_length(content.len()), slice(content))) } pub fn der_default<W: Write, Gen, F, T>(inner: F, val: T, default: T) -> impl SerializeFn<W> where Gen: SerializeFn<W>, F: FnOnce(T) -> Gen, T: PartialEq, { cond(val != default, inner(val)) } pub fn der_explicit<W: Write, Gen>(typ: u8, inner: Gen) -> impl SerializeFn<W> where Gen: SerializeFn<Vec<u8>>, { der_tlv(DerType::Explicit(typ), inner) } pub fn der_boolean<W: Write>(val: bool) -> impl SerializeFn<W> { der_tlv(DerType::Boolean, slice(if val { &[0xff] } else { &[0x00] })) } pub
(), &[0x30] ); } #[test] fn der_lengths() { assert_eq!(gen_simple(der_length(1), vec![]).unwrap(), &[1]); assert_eq!(gen_simple(der_length(127), vec![]).unwrap(), &[127]); assert_eq!( gen_simple(der_length(128), vec![]).unwrap(), &[0x80 | 1, 128] ); assert_eq!( gen_simple(der_length(255), vec![]).unwrap(), &[0x80 | 1, 255] ); assert_eq!( gen_simple(der_length(256), vec![]).unwrap(), &[0x80 | 2, 1, 0] ); } #[test] fn der_tlvs() { assert_eq!( gen_simple(der_tlv(DerType::Integer, slice(&[0x07; 4])), vec![]).unwrap(), &[0x02, 0x04, 0x07, 0x07, 0x07, 0x07] ); } #[test] fn der_usize_integers() { assert_eq!( gen_simple(der_integer_usize(0), vec![]).unwrap(), vec![2, 1, 0] ); assert_eq!( gen_simple(der_integer_usize(127), vec![]).unwrap(), vec![2, 1, 127] ); assert_eq!( gen_simple(der_integer_usize(128), vec![]).unwrap(), vec![2, 2, 0, 128] ); assert_eq!( gen_simple(der_integer_usize(255), vec![]).unwrap(), vec![2, 2, 0, 255] ); assert_eq!( gen_simple(der_integer_usize(256), vec![]).unwrap(), vec![2, 2, 1, 0] ); assert_eq!( gen_simple(der_integer_usize(32767), vec![]).unwrap(), vec![2, 2, 127, 255] ); assert_eq!( gen_simple(der_integer_usize(32768), vec![]).unwrap(), vec![2, 3, 0, 128, 0] ); assert_eq!( gen_simple(der_integer_usize(65535), vec![]).unwrap(), vec![2, 3, 0, 255, 255] ); assert_eq!( gen_simple(der_integer_usize(65536), vec![]).unwrap(), vec![2, 3, 1, 0, 0] ); } } }
fn der_integer<'a, W: Write + 'a>(mut num: &'a [u8]) -> impl SerializeFn<W> + 'a { while !num.is_empty() && num[0] == 0 { num = &num[1..]; } der_tlv( DerType::Integer, pair( cond(num.is_empty() || num[0] >= 0x80, slice(&[0])), slice(num), ), ) } pub fn der_integer_usize<W: Write>(num: usize) -> impl SerializeFn<W> { move |w: WriteContext<W>| der_integer(&num.to_be_bytes())(w) } pub fn der_bit_string<'a, W: Write + 'a>(bytes: &'a [u8]) -> impl SerializeFn<W> + 'a { der_tlv(DerType::BitString, pair(be_u8(0), slice(bytes))) } pub fn der_octet_string<'a, W: Write + 'a>(bytes: &'a [u8]) -> impl SerializeFn<W> + 'a { der_tlv(DerType::OctetString, slice(bytes)) } pub fn der_null<'a, W: Write + 'a>() -> impl SerializeFn<W> + 'a { der_tlv(DerType::Null, Ok) } pub fn der_oid<W: Write, OID: Oid>(oid: OID) -> impl SerializeFn<W> { fn subidentifier<W: Write>(id: u64) -> impl SerializeFn<W> { let id_bytes = [ 0x80 | ((id >> (9 * 7)) as u8 & 0x7f), 0x80 | ((id >> (8 * 7)) as u8 & 0x7f), 0x80 | ((id >> (7 * 7)) as u8 & 0x7f), 0x80 | ((id >> (6 * 7)) as u8 & 0x7f), 0x80 | ((id >> (5 * 7)) as u8 & 0x7f), 0x80 | ((id >> (4 * 7)) as u8 & 0x7f), 0x80 | ((id >> (3 * 7)) as u8 & 0x7f), 0x80 | ((id >> (2 * 7)) as u8 & 0x7f), 0x80 | ((id >> 7) as u8 & 0x7f), id as u8 & 0x7f, ]; move |w: WriteContext<W>| { let mut id_slice = &id_bytes[..]; while !id_slice.is_empty() && id_slice[0] == 0x80 { id_slice = &id_slice[1..]; } slice(id_slice)(w) } } move |w: WriteContext<W>| { let oid_slice = oid.as_ref(); assert!(oid_slice.len() >= 2); der_tlv( DerType::Oid, pair( subidentifier(oid_slice[0] * 40 + oid_slice[1]), all(oid_slice[2..].iter().map(|id| subidentifier(*id))), ), )(w) } } pub fn der_utf8_string<'a, W: Write + 'a>(s: &'a str) -> impl SerializeFn<W> + 'a { der_tlv(DerType::Utf8String, string(s)) } pub fn der_sequence<W: Write, List: Tuple<Vec<u8>>>(l: List) -> impl SerializeFn<W> { der_tlv(DerType::Sequence, move |w: WriteContext<Vec<u8>>| { l.serialize(w) }) } pub fn der_set<W: Write, List: Tuple<Vec<u8>>>(l: List) -> impl SerializeFn<W> { der_tlv(DerType::Set, move |w: WriteContext<Vec<u8>>| l.serialize(w)) } pub fn der_utc_time<W: Write>(t: DateTime<Utc>) -> impl SerializeFn<W> { der_tlv( DerType::UtcTime, string(t.format("%y%m%d%H%M%SZ").to_string()), ) } pub fn der_generalized_time<W: Write>(t: DateTime<Utc>) -> impl SerializeFn<W> { der_tlv( DerType::GeneralizedTime, string(t.format("%Y%m%d%H%M%SZ").to_string()), ) } #[cfg(test)] mod tests { use cookie_factory::gen_simple; use super::*; #[test] fn der_types() { assert_eq!( gen_simple(der_type(DerType::Integer), vec![]).unwrap(), &[0x02] ); assert_eq!( gen_simple(der_type(DerType::Sequence), vec![]).unwrap
random
[ { "content": "/// A trait for objects which represent ASN.1 `AlgorithmIdentifier`s.\n\npub trait AlgorithmIdentifier {\n\n type AlgorithmOid: der::Oid;\n\n\n\n /// Returns the object identifier for this `AlgorithmIdentifier`.\n\n fn algorithm(&self) -> Self::AlgorithmOid;\n\n\n\n /// Writes the parameters for this `AlgorithmIdentifier`, if any.\n\n fn parameters<W: Write>(&self, w: WriteContext<W>) -> GenResult<W>;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 46156.229940565914 }, { "content": "/// A trait for objects which represent ASN.1 `SubjectPublicKeyInfo`s.\n\npub trait SubjectPublicKeyInfo {\n\n type AlgorithmId: AlgorithmIdentifier;\n\n type SubjectPublicKey: AsRef<[u8]>;\n\n\n\n /// Returns the [`AlgorithmIdentifier`] for this public key.\n\n fn algorithm_id(&self) -> Self::AlgorithmId;\n\n\n\n /// Returns the encoded public key.\n\n fn public_key(&self) -> Self::SubjectPublicKey;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 42305.22405170312 }, { "content": "#[derive(Clone)]\n\nenum RdnType {\n\n Country,\n\n Organization,\n\n OrganizationalUnit,\n\n CommonName,\n\n}\n\n\n\n/// An X.509 RelativeDistinguishedName.\n\n#[derive(Clone)]\n\npub struct RelativeDistinguishedName<'a> {\n\n typ: RdnType,\n\n value: &'a str,\n\n}\n\n\n\nimpl<'a> RelativeDistinguishedName<'a> {\n\n /// Constructs a Country RDN.\n\n ///\n\n /// # Panics\n\n /// Panics if `value.len() > 64`.\n\n pub fn country(value: &'a str) -> Self {\n", "file_path": "src/lib.rs", "rank": 4, "score": 26493.016312982865 }, { "content": "}\n\n\n\n/// X.509 serialization APIs.\n\npub mod write {\n\n use chrono::{DateTime, Datelike, TimeZone, Utc};\n\n use cookie_factory::{\n\n combinator::{cond, slice},\n\n multi::all,\n\n sequence::pair,\n\n SerializeFn, WriteContext,\n\n };\n\n use std::io::Write;\n\n\n\n use crate::{Extension, RdnType, RelativeDistinguishedName};\n\n\n\n use super::{\n\n der::{write::*, Oid},\n\n AlgorithmIdentifier, SubjectPublicKeyInfo,\n\n };\n\n\n", "file_path": "src/lib.rs", "rank": 14, "score": 18.179151254203124 }, { "content": " /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1):\n\n /// ```text\n\n /// Certificate ::= SEQUENCE {\n\n /// tbsCertificate TBSCertificate,\n\n /// signatureAlgorithm AlgorithmIdentifier,\n\n /// signatureValue BIT STRING }\n\n /// ```\n\n ///\n\n /// Use [`tbs_certificate`] to serialize the certificate itself, then sign it and call\n\n /// this function with the serialized `TBSCertificate` and signature.\n\n pub fn certificate<'a, W: Write + 'a, Alg: AlgorithmIdentifier>(\n\n cert: &'a [u8],\n\n signature_algorithm: &'a Alg,\n\n signature: &'a [u8],\n\n ) -> impl SerializeFn<W> + 'a {\n\n der_sequence((\n\n slice(cert),\n\n algorithm_identifier(signature_algorithm),\n\n der_bit_string(signature),\n\n ))\n", "file_path": "src/lib.rs", "rank": 16, "score": 16.219459953191862 }, { "content": " }\n\n\n\n /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1):\n\n /// ```text\n\n /// Extension ::= SEQUENCE {\n\n /// extnID OBJECT IDENTIFIER,\n\n /// critical BOOLEAN DEFAULT FALSE,\n\n /// extnValue OCTET STRING\n\n /// -- contains the DER encoding of an ASN.1 value\n\n /// -- corresponding to the extension type identified\n\n /// -- by extnID\n\n /// }\n\n /// ```\n\n fn extension<'a, W: Write + 'a, O: Oid + 'a>(\n\n extension: &'a Extension<'a, O>,\n\n ) -> impl SerializeFn<W> + 'a {\n\n der_sequence((\n\n der_oid(&extension.oid),\n\n der_default(der_boolean, extension.critical, false),\n\n der_octet_string(extension.value),\n", "file_path": "src/lib.rs", "rank": 17, "score": 16.16217112322606 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use chrono::Utc;\n\n\n\n use crate::{\n\n write, AlgorithmIdentifier, Extension, RelativeDistinguishedName, SubjectPublicKeyInfo,\n\n };\n\n\n\n struct MockAlgorithmId;\n\n\n\n impl AlgorithmIdentifier for MockAlgorithmId {\n\n type AlgorithmOid = &'static [u64];\n\n\n\n fn algorithm(&self) -> Self::AlgorithmOid {\n\n &[1, 1, 1, 1]\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 15.813093705349825 }, { "content": " /// CertificateSerialNumber ::= INTEGER\n\n ///\n\n /// Certificate users MUST be able to handle serialNumber values up to 20 octets.\n\n /// Conforming CAs MUST NOT use serialNumber values longer than 20 octets.\n\n /// ```\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if:\n\n /// - `serial_number.len() > 20`\n\n pub fn tbs_certificate<'a, W: Write + 'a, Alg, PKI, O: Oid + 'a>(\n\n serial_number: &'a [u8],\n\n signature: &'a Alg,\n\n issuer: &'a [RelativeDistinguishedName<'a>],\n\n not_before: DateTime<Utc>,\n\n not_after: Option<DateTime<Utc>>,\n\n subject: &'a [RelativeDistinguishedName<'a>],\n\n subject_pki: &'a PKI,\n\n exts: &'a [Extension<'a, O>],\n\n ) -> impl SerializeFn<W> + 'a\n", "file_path": "src/lib.rs", "rank": 20, "score": 15.40197466751216 }, { "content": " /// ub-common-name INTEGER ::= 64\n\n /// ```\n\n fn relative_distinguished_name<'a, W: Write + 'a>(\n\n rdn: &'a RelativeDistinguishedName<'a>,\n\n ) -> impl SerializeFn<W> + 'a {\n\n der_set((der_sequence((\n\n der_oid(rdn.oid()),\n\n der_utf8_string(&rdn.value),\n\n )),))\n\n }\n\n\n\n /// Encodes an X.509 Name.\n\n ///\n\n /// From [RFC 5280 section 4.1.2.4](https://tools.ietf.org/html/rfc5280#section-4.1.2.4):\n\n /// ```text\n\n /// Name ::= CHOICE { -- only one possibility for now --\n\n /// rdnSequence RDNSequence }\n\n ///\n\n /// RDNSequence ::= SEQUENCE OF RelativeDistinguishedName\n\n /// ```\n", "file_path": "src/lib.rs", "rank": 21, "score": 15.132672895979837 }, { "content": " /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1.1.2):\n\n /// ```text\n\n /// AlgorithmIdentifier ::= SEQUENCE {\n\n /// algorithm OBJECT IDENTIFIER,\n\n /// parameters ANY DEFINED BY algorithm OPTIONAL }\n\n /// ```\n\n pub fn algorithm_identifier<'a, W: Write + 'a, Alg: AlgorithmIdentifier>(\n\n algorithm_id: &'a Alg,\n\n ) -> impl SerializeFn<W> + 'a {\n\n der_sequence((\n\n der_oid(algorithm_id.algorithm()),\n\n move |w: WriteContext<Vec<u8>>| algorithm_id.parameters(w),\n\n ))\n\n }\n\n\n\n /// Encodes an X.509 RelativeDistinguishedName.\n\n ///\n\n /// From [RFC 5280 section 4.1.2.4](https://tools.ietf.org/html/rfc5280#section-4.1.2.4):\n\n /// ```text\n\n /// RelativeDistinguishedName ::=\n", "file_path": "src/lib.rs", "rank": 22, "score": 14.387824149177414 }, { "content": " RdnType::Organization => InternalOid::Organization,\n\n RdnType::OrganizationalUnit => InternalOid::OrganizationalUnit,\n\n }\n\n }\n\n }\n\n\n\n /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1):\n\n /// ```text\n\n /// TBSCertificate ::= SEQUENCE {\n\n /// version [0] EXPLICIT Version DEFAULT v1,\n\n /// ...\n\n /// }\n\n ///\n\n /// Version ::= INTEGER { v1(0), v2(1), v3(2) }\n\n /// ```\n\n fn version<W: Write>(version: Version) -> impl SerializeFn<W> {\n\n // TODO: Omit version if V1, once x509-parser correctly handles this.\n\n der_explicit(0, der_integer_usize(version.into()))\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 23, "score": 14.288216147810378 }, { "content": " fn name<'a, W: Write + 'a>(\n\n name: &'a [RelativeDistinguishedName<'a>],\n\n ) -> impl SerializeFn<W> + 'a {\n\n der_sequence((all(name.iter().map(relative_distinguished_name)),))\n\n }\n\n\n\n /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1):\n\n /// ```text\n\n /// Time ::= CHOICE {\n\n /// utcTime UTCTime,\n\n /// generalTime GeneralizedTime }\n\n ///\n\n /// CAs conforming to this profile MUST always encode certificate\n\n /// validity dates through the year 2049 as UTCTime; certificate validity\n\n /// dates in 2050 or later MUST be encoded as GeneralizedTime.\n\n /// ```\n\n fn time<W: Write>(t: DateTime<Utc>) -> impl SerializeFn<W> {\n\n pair(\n\n cond(t.year() < 2050, der_utc_time(t)),\n\n cond(t.year() >= 2050, der_generalized_time(t)),\n", "file_path": "src/lib.rs", "rank": 25, "score": 13.148820238637956 }, { "content": " ))\n\n }\n\n\n\n /// Encodes a `PublicKeyInfo` as an ASN.1 `SubjectPublicKeyInfo` using DER.\n\n ///\n\n /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1):\n\n /// ```text\n\n /// SubjectPublicKeyInfo ::= SEQUENCE {\n\n /// algorithm AlgorithmIdentifier,\n\n /// subjectPublicKey BIT STRING }\n\n /// ```\n\n fn subject_public_key_info<'a, W: Write + 'a, PKI: SubjectPublicKeyInfo>(\n\n subject_pki: &'a PKI,\n\n ) -> impl SerializeFn<W> + 'a {\n\n move |w: WriteContext<W>| {\n\n der_sequence((\n\n algorithm_identifier(&subject_pki.algorithm_id()),\n\n der_bit_string(subject_pki.public_key().as_ref()),\n\n ))(w)\n\n }\n", "file_path": "src/lib.rs", "rank": 26, "score": 12.747521035218714 }, { "content": "//! *Pure-Rust X.509 certificate serialization*\n\n//!\n\n//! `x509` is a crate providing serialization APIs for X.509 v3 ([RFC 5280]) certificates,\n\n//! implemented using the `cookie-factory` combinatorial serializer framework.\n\n//!\n\n//! [RFC 5280]: https://tools.ietf.org/html/rfc5280\n\n\n\nuse cookie_factory::{GenResult, WriteContext};\n\nuse std::io::Write;\n\n\n\npub mod der;\n\n\n\n/// A trait for objects which represent ASN.1 `AlgorithmIdentifier`s.\n", "file_path": "src/lib.rs", "rank": 27, "score": 12.378075151544664 }, { "content": " fn parameters<W: std::io::Write>(\n\n &self,\n\n w: cookie_factory::WriteContext<W>,\n\n ) -> cookie_factory::GenResult<W> {\n\n Ok(w)\n\n }\n\n }\n\n\n\n struct MockPublicKeyInfo;\n\n\n\n impl SubjectPublicKeyInfo for MockPublicKeyInfo {\n\n type AlgorithmId = MockAlgorithmId;\n\n type SubjectPublicKey = Vec<u8>;\n\n\n\n fn algorithm_id(&self) -> Self::AlgorithmId {\n\n MockAlgorithmId\n\n }\n\n\n\n fn public_key(&self) -> Self::SubjectPublicKey {\n\n vec![]\n", "file_path": "src/lib.rs", "rank": 29, "score": 12.0841060664129 }, { "content": " )\n\n }\n\n\n\n /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1):\n\n /// ```text\n\n /// Validity ::= SEQUENCE {\n\n /// notBefore Time,\n\n /// notAfter Time }\n\n ///\n\n /// To indicate that a certificate has no well-defined expiration date,\n\n /// the notAfter SHOULD be assigned the GeneralizedTime value of\n\n /// 99991231235959Z.\n\n /// ```\n\n fn validity<W: Write>(\n\n not_before: DateTime<Utc>,\n\n not_after: Option<DateTime<Utc>>,\n\n ) -> impl SerializeFn<W> {\n\n der_sequence((\n\n time(not_before),\n\n time(not_after.unwrap_or_else(|| Utc.ymd(9999, 12, 31).and_hms(23, 59, 59))),\n", "file_path": "src/lib.rs", "rank": 30, "score": 11.877658843860386 }, { "content": " critical: bool,\n\n /// The DER encoding of an ASN.1 value corresponding to the extension type identified\n\n /// by `oid`.\n\n value: &'a [u8],\n\n}\n\n\n\nimpl<'a, O: der::Oid + 'a> Extension<'a, O> {\n\n /// Constructs an extension.\n\n ///\n\n /// If this extension is not recognized by a certificate-using system, it will be\n\n /// ignored.\n\n ///\n\n /// `oid` is an OID that specifies the format and definitions of the extension.\n\n ///\n\n /// `value` is the DER encoding of an ASN.1 value corresponding to the extension type\n\n /// identified by `oid`.\n\n pub fn regular(oid: O, value: &'a [u8]) -> Self {\n\n Extension {\n\n oid,\n\n critical: false,\n", "file_path": "src/lib.rs", "rank": 32, "score": 11.685081459863131 }, { "content": " }\n\n\n\n impl AsRef<[u64]> for InternalOid {\n\n fn as_ref(&self) -> &[u64] {\n\n match self {\n\n InternalOid::Country => &[2, 5, 4, 6],\n\n InternalOid::Organization => &[2, 5, 4, 10],\n\n InternalOid::OrganizationalUnit => &[2, 5, 4, 11],\n\n InternalOid::CommonName => &[2, 5, 4, 3],\n\n }\n\n }\n\n }\n\n\n\n impl Oid for InternalOid {}\n\n\n\n impl<'a> RelativeDistinguishedName<'a> {\n\n fn oid(&self) -> InternalOid {\n\n match self.typ {\n\n RdnType::Country => InternalOid::Country,\n\n RdnType::CommonName => InternalOid::CommonName,\n", "file_path": "src/lib.rs", "rank": 33, "score": 11.2173441240434 }, { "content": " ))\n\n }\n\n\n\n /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1):\n\n /// ```text\n\n /// TBSCertificate ::= SEQUENCE {\n\n /// ...\n\n /// extensions [3] EXPLICIT Extensions OPTIONAL\n\n /// -- If present, version MUST be v3\n\n /// }\n\n ///\n\n /// Extensions ::= SEQUENCE SIZE (1..MAX) OF Extension\n\n /// ```\n\n fn extensions<'a, W: Write + 'a, O: Oid + 'a>(\n\n exts: &'a [Extension<'a, O>],\n\n ) -> impl SerializeFn<W> + 'a {\n\n cond(\n\n !exts.is_empty(),\n\n der_explicit(3, der_sequence((all(exts.iter().map(extension)),))),\n\n )\n", "file_path": "src/lib.rs", "rank": 35, "score": 11.00521698470873 }, { "content": " Extension::critical(&[1u64, 4, 5, 6][..], &[7, 7, 7]),\n\n ];\n\n\n\n let mut tbs_cert = vec![];\n\n cookie_factory::gen(\n\n write::tbs_certificate(\n\n &[],\n\n &signature,\n\n &[],\n\n not_before,\n\n None,\n\n &[],\n\n &subject_pki,\n\n exts,\n\n ),\n\n &mut tbs_cert,\n\n )\n\n .unwrap();\n\n\n\n let mut data = vec![];\n", "file_path": "src/lib.rs", "rank": 36, "score": 10.988054589086556 }, { "content": " write::tbs_certificate(\n\n &[],\n\n &MockAlgorithmId,\n\n &[],\n\n Utc::now(),\n\n None,\n\n subject,\n\n &MockPublicKeyInfo,\n\n exts,\n\n ),\n\n &mut tbs_cert,\n\n )\n\n .unwrap();\n\n\n\n let mut data = vec![];\n\n cookie_factory::gen(\n\n write::certificate(&tbs_cert, &MockAlgorithmId, &[]),\n\n &mut data,\n\n )\n\n .unwrap();\n", "file_path": "src/lib.rs", "rank": 37, "score": 9.831336630556017 }, { "content": " /// X.509 versions that we care about.\n\n #[derive(Clone, Copy)]\n\n enum Version {\n\n V3,\n\n }\n\n\n\n impl From<Version> for usize {\n\n fn from(version: Version) -> usize {\n\n match version {\n\n Version::V3 => 2,\n\n }\n\n }\n\n }\n\n\n\n /// Object identifiers used internally by X.509.\n\n enum InternalOid {\n\n Country,\n\n Organization,\n\n OrganizationalUnit,\n\n CommonName,\n", "file_path": "src/lib.rs", "rank": 38, "score": 9.306795780820446 }, { "content": " value,\n\n }\n\n }\n\n\n\n /// Constructs a critical extension.\n\n ///\n\n /// If this extension is not recognized by a certificate-using system, the certificate\n\n /// will be rejected.\n\n ///\n\n /// `oid` is an OID that specifies the format and definitions of the extension.\n\n ///\n\n /// `value` is the DER encoding of an ASN.1 value corresponding to the extension type\n\n /// identified by `oid`.\n\n pub fn critical(oid: O, value: &'a [u8]) -> Self {\n\n Extension {\n\n oid,\n\n critical: true,\n\n value,\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 39, "score": 9.161890817075449 }, { "content": " }\n\n\n\n /// Encodes a version 1 X.509 `TBSCertificate` using DER.\n\n ///\n\n /// `extensions` is optional; if empty, no extensions section will be serialized. Due\n\n /// to the need for an `O: Oid` type parameter, users who do not have any extensions\n\n /// should use the following workaround:\n\n ///\n\n /// ```ignore\n\n /// let exts: &[Extension<'_, &[u64]>] = &[];\n\n /// x509::write::tbs_certificate(\n\n /// serial_number,\n\n /// signature,\n\n /// issuer,\n\n /// not_before,\n\n /// not_after,\n\n /// subject,\n\n /// subject_pki,\n\n /// exts,\n\n /// );\n", "file_path": "src/lib.rs", "rank": 40, "score": 8.65250490279068 }, { "content": " cookie_factory::gen(\n\n write::certificate(&tbs_cert, &MockAlgorithmId, &[]),\n\n &mut data,\n\n )\n\n .unwrap();\n\n\n\n let (_, cert) = x509_parser::parse_x509_certificate(&data).unwrap();\n\n\n\n assert_eq!(\n\n cert.validity().not_before.timestamp(),\n\n not_before.timestamp()\n\n );\n\n\n\n for ext in exts {\n\n let oid = x509_parser::der_parser::oid::Oid::from(ext.oid).unwrap();\n\n if let Some(extension) = cert.extensions().get(&oid) {\n\n assert_eq!(extension.critical, ext.critical);\n\n assert_eq!(extension.value, ext.value);\n\n } else {\n\n panic!();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 43, "score": 6.502175346246994 }, { "content": " where\n\n Alg: AlgorithmIdentifier,\n\n PKI: SubjectPublicKeyInfo,\n\n {\n\n assert!(serial_number.len() <= 20);\n\n\n\n der_sequence((\n\n version(Version::V3),\n\n der_integer(serial_number),\n\n algorithm_identifier(signature),\n\n name(issuer),\n\n validity(not_before, not_after),\n\n name(subject),\n\n subject_public_key_info(subject_pki),\n\n extensions(exts),\n\n ))\n\n }\n\n\n\n /// Encodes an X.509 certificate using DER.\n\n ///\n", "file_path": "src/lib.rs", "rank": 44, "score": 6.479121852295281 }, { "content": "\n\n RelativeDistinguishedName {\n\n typ: RdnType::CommonName,\n\n value,\n\n }\n\n }\n\n}\n\n\n\n/// A certificate extension.\n\npub struct Extension<'a, O: der::Oid + 'a> {\n\n /// An OID that specifies the format and definitions of the extension.\n\n oid: O,\n\n /// Whether the information in the extension is important.\n\n ///\n\n /// ```text\n\n /// Each extension in a certificate may be designated as critical or non-critical. A\n\n /// certificate using system MUST reject the certificate if it encounters a critical\n\n /// extension it does not recognize; however, a non-critical extension may be ignored\n\n /// if it is not recognized.\n\n /// ```\n", "file_path": "src/lib.rs", "rank": 45, "score": 6.342924840633346 }, { "content": "\n\n /// Constructs an OrganizationalUnit RDN.\n\n ///\n\n /// # Panics\n\n /// Panics if `value.len() > 64`.\n\n pub fn organizational_unit(value: &'a str) -> Self {\n\n assert!(value.len() <= 64);\n\n\n\n RelativeDistinguishedName {\n\n typ: RdnType::OrganizationalUnit,\n\n value,\n\n }\n\n }\n\n\n\n /// Constructs a CommonName RDN.\n\n ///\n\n /// # Panics\n\n /// Panics if `value.len() > 64`.\n\n pub fn common_name(value: &'a str) -> Self {\n\n assert!(value.len() <= 64);\n", "file_path": "src/lib.rs", "rank": 46, "score": 5.942875090323062 }, { "content": " .map(|c| c.as_str())\n\n .collect::<Result<Vec<_>, _>>(),\n\n Ok(vec![ORGANIZATIONAL_UNIT])\n\n );\n\n assert_eq!(\n\n cert.subject()\n\n .iter_common_name()\n\n .map(|c| c.as_str())\n\n .collect::<Result<Vec<_>, _>>(),\n\n Ok(vec![COMMON_NAME])\n\n );\n\n }\n\n\n\n #[test]\n\n fn extensions() {\n\n let signature = MockAlgorithmId;\n\n let not_before = Utc::now();\n\n let subject_pki = MockPublicKeyInfo;\n\n let exts = &[\n\n Extension::regular(&[1u64, 2, 3, 4][..], &[1, 2, 3]),\n", "file_path": "src/lib.rs", "rank": 47, "score": 5.850992692868871 }, { "content": " assert!(value.len() <= 64);\n\n\n\n RelativeDistinguishedName {\n\n typ: RdnType::Country,\n\n value,\n\n }\n\n }\n\n\n\n /// Constructs an Organization RDN.\n\n ///\n\n /// # Panics\n\n /// Panics if `value.len() > 64`.\n\n pub fn organization(value: &'a str) -> Self {\n\n assert!(value.len() <= 64);\n\n\n\n RelativeDistinguishedName {\n\n typ: RdnType::Organization,\n\n value,\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 48, "score": 5.809638884666424 }, { "content": " }\n\n }\n\n\n\n #[test]\n\n fn names() {\n\n const COUNTRY: &str = \"NZ\";\n\n const ORGANIZATION: &str = \"ACME\";\n\n const ORGANIZATIONAL_UNIT: &str = \"Road Runner\";\n\n const COMMON_NAME: &str = \"Test-in-a-Box\";\n\n\n\n let subject = &[\n\n RelativeDistinguishedName::country(COUNTRY),\n\n RelativeDistinguishedName::organization(ORGANIZATION),\n\n RelativeDistinguishedName::organizational_unit(ORGANIZATIONAL_UNIT),\n\n RelativeDistinguishedName::common_name(COMMON_NAME),\n\n ];\n\n let exts: &[Extension<'_, &[u64]>] = &[];\n\n\n\n let mut tbs_cert = vec![];\n\n cookie_factory::gen(\n", "file_path": "src/lib.rs", "rank": 49, "score": 5.56479335831389 }, { "content": " /// SET SIZE (1..MAX) OF AttributeTypeAndValue\n\n ///\n\n /// AttributeTypeAndValue ::= SEQUENCE {\n\n /// type AttributeType,\n\n /// value AttributeValue }\n\n ///\n\n /// AttributeType ::= OBJECT IDENTIFIER\n\n ///\n\n /// AttributeValue ::= ANY -- DEFINED BY AttributeType\n\n /// ```\n\n ///\n\n /// From [RFC 5280 appendix A.1](https://tools.ietf.org/html/rfc5280#appendix-A.1):\n\n /// ```text\n\n /// X520CommonName ::= CHOICE {\n\n /// teletexString TeletexString (SIZE (1..ub-common-name)),\n\n /// printableString PrintableString (SIZE (1..ub-common-name)),\n\n /// universalString UniversalString (SIZE (1..ub-common-name)),\n\n /// utf8String UTF8String (SIZE (1..ub-common-name)),\n\n /// bmpString BMPString (SIZE (1..ub-common-name)) }\n\n ///\n", "file_path": "src/lib.rs", "rank": 50, "score": 5.292751103127642 }, { "content": " /// ```\n\n ///\n\n /// From [RFC 5280](https://tools.ietf.org/html/rfc5280#section-4.1):\n\n /// ```text\n\n /// TBSCertificate ::= SEQUENCE {\n\n /// version [0] EXPLICIT Version DEFAULT v1,\n\n /// serialNumber CertificateSerialNumber,\n\n /// signature AlgorithmIdentifier,\n\n /// issuer Name,\n\n /// validity Validity,\n\n /// subject Name,\n\n /// subjectPublicKeyInfo SubjectPublicKeyInfo,\n\n /// issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL,\n\n /// -- If present, version MUST be v2 or v3\n\n /// subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL,\n\n /// -- If present, version MUST be v2 or v3\n\n /// extensions [3] EXPLICIT Extensions OPTIONAL\n\n /// -- If present, version MUST be v3\n\n /// }\n\n ///\n", "file_path": "src/lib.rs", "rank": 51, "score": 3.952830273761037 }, { "content": "\n\n let (_, cert) = x509_parser::parse_x509_certificate(&data).unwrap();\n\n\n\n assert_eq!(\n\n cert.subject()\n\n .iter_country()\n\n .map(|c| c.as_str())\n\n .collect::<Result<Vec<_>, _>>(),\n\n Ok(vec![COUNTRY])\n\n );\n\n assert_eq!(\n\n cert.subject()\n\n .iter_organization()\n\n .map(|c| c.as_str())\n\n .collect::<Result<Vec<_>, _>>(),\n\n Ok(vec![ORGANIZATION])\n\n );\n\n assert_eq!(\n\n cert.subject()\n\n .iter_organizational_unit()\n", "file_path": "src/lib.rs", "rank": 52, "score": 2.7112997262607097 }, { "content": "# x509: Pure-Rust X.509 certificate serialization\n\n\n\nWork in progress.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be dual licensed as above, without any additional terms or\n\nconditions.\n\n\n", "file_path": "README.md", "rank": 54, "score": 1.1993306949830584 } ]
Rust
sdk/src/commitment_config.rs
ZhengYuTay/solana
446c7a53f0d9c9bcd5027de8440363823b1dc1de
#![allow(deprecated)] #![cfg(feature = "full")] use std::str::FromStr; use thiserror::Error; #[derive(Serialize, Deserialize, Default, Clone, Copy, Debug, PartialEq, Eq, Hash)] #[serde(rename_all = "camelCase")] pub struct CommitmentConfig { pub commitment: CommitmentLevel, } impl CommitmentConfig { #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::processed() instead" )] pub fn recent() -> Self { Self { commitment: CommitmentLevel::Recent, } } #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::finalized() instead" )] pub fn max() -> Self { Self { commitment: CommitmentLevel::Max, } } #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::finalized() instead" )] pub fn root() -> Self { Self { commitment: CommitmentLevel::Root, } } #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::confirmed() instead" )] pub fn single() -> Self { Self { commitment: CommitmentLevel::Single, } } #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::confirmed() instead" )] pub fn single_gossip() -> Self { Self { commitment: CommitmentLevel::SingleGossip, } } pub fn finalized() -> Self { Self { commitment: CommitmentLevel::Finalized, } } pub fn confirmed() -> Self { Self { commitment: CommitmentLevel::Confirmed, } } pub fn processed() -> Self { Self { commitment: CommitmentLevel::Processed, } } pub fn ok(self) -> Option<Self> { if self == Self::default() { None } else { Some(self) } } pub fn is_finalized(&self) -> bool { matches!( &self.commitment, CommitmentLevel::Finalized | CommitmentLevel::Max | CommitmentLevel::Root ) } pub fn is_confirmed(&self) -> bool { matches!( &self.commitment, CommitmentLevel::Confirmed | CommitmentLevel::SingleGossip | CommitmentLevel::Single ) } pub fn is_processed(&self) -> bool { matches!( &self.commitment, CommitmentLevel::Processed | CommitmentLevel::Recent ) } pub fn is_at_least_confirmed(&self) -> bool { self.is_confirmed() || self.is_finalized() } pub fn use_deprecated_commitment(commitment: CommitmentConfig) -> Self { match commitment.commitment { CommitmentLevel::Finalized => CommitmentConfig::max(), CommitmentLevel::Confirmed => CommitmentConfig::single_gossip(), CommitmentLevel::Processed => CommitmentConfig::recent(), _ => commitment, } } } impl FromStr for CommitmentConfig { type Err = ParseCommitmentLevelError; fn from_str(s: &str) -> Result<Self, Self::Err> { CommitmentLevel::from_str(s).map(|commitment| Self { commitment }) } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq, Eq, Hash)] #[serde(rename_all = "camelCase")] pub enum CommitmentLevel { #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Finalized instead" )] Max, #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Processed instead" )] Recent, #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Finalized instead" )] Root, #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Confirmed instead" )] Single, #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Confirmed instead" )] SingleGossip, Processed, Confirmed, Finalized, } impl Default for CommitmentLevel { fn default() -> Self { Self::Finalized } } impl FromStr for CommitmentLevel { type Err = ParseCommitmentLevelError; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "max" => Ok(CommitmentLevel::Max), "recent" => Ok(CommitmentLevel::Recent), "root" => Ok(CommitmentLevel::Root), "single" => Ok(CommitmentLevel::Single), "singleGossip" => Ok(CommitmentLevel::SingleGossip), "processed" => Ok(CommitmentLevel::Processed), "confirmed" => Ok(CommitmentLevel::Confirmed), "finalized" => Ok(CommitmentLevel::Finalized), _ => Err(ParseCommitmentLevelError::Invalid), } } } impl std::fmt::Display for CommitmentLevel { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { CommitmentLevel::Max => "max", CommitmentLevel::Recent => "recent", CommitmentLevel::Root => "root", CommitmentLevel::Single => "single", CommitmentLevel::SingleGossip => "singleGossip", CommitmentLevel::Processed => "processed", CommitmentLevel::Confirmed => "confirmed", CommitmentLevel::Finalized => "finalized", }; write!(f, "{}", s) } } #[derive(Error, Debug)] pub enum ParseCommitmentLevelError { #[error("invalid variant")] Invalid, }
#![allow(deprecated)] #![cfg(feature = "full")] use std::str::FromStr; use thiserror::Error; #[derive(Serialize, Deserialize, Default, Clone, Copy, Debug, PartialEq, Eq, Hash)] #[serde(rename_all = "camelCase")] pub struct CommitmentConfig { pub commitment: CommitmentLevel, } impl CommitmentConfig { #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::processed() instead" )] pub fn recent() -> Self { Self { commitment: CommitmentLevel::Recent, } } #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::finalized() instead" )] pub fn max() -> Self { Self { commitment: CommitmentLevel::Max, } } #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::finalized() instead" )] pub fn root() -> Self { Self { commitment: CommitmentLevel::Root, } } #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::confirmed() instead" )] pub fn single() -> Self { Self { commitment: CommitmentLevel::Single, } } #[deprecated( since = "1.5.5", note = "Please use CommitmentConfig::confirmed() instead" )] pub fn single_gossip() -> Self { Self { commitment: CommitmentLevel::SingleGossip, } } pub fn finalized() -> Self { Self { commitment: CommitmentLevel::Finalized, } } pub fn confirmed() -> Self { Self { commitment: CommitmentLevel::Confirmed, } } pub fn processed() -> Self { Self { commitment: CommitmentLevel::Processed, } } pub fn ok(self) -> Option<Self> { if self == Self::default() { None } else { Some(self) } } pub fn is_finalized(&self) -> bool { matches!( &self.commitment, CommitmentLevel::Finalized | CommitmentLevel::Max | CommitmentLevel::Root ) } pub fn is_confirmed(&self) -> bool { matches!( &self.commitment, CommitmentLevel::Confirmed | CommitmentLevel::SingleGossip | CommitmentLevel::Single ) } pub fn is_processed(&self) -> bool { matches!( &self.commitment, CommitmentLevel::Processed | CommitmentLevel::Recent ) } pub fn is_at_least_confirmed(&self) -> bool { self.is_confirmed() || self.is_finalized() } pub fn use_deprecated_commitment(commitment: CommitmentConfig) -> Self {
} } impl FromStr for CommitmentConfig { type Err = ParseCommitmentLevelError; fn from_str(s: &str) -> Result<Self, Self::Err> { CommitmentLevel::from_str(s).map(|commitment| Self { commitment }) } } #[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq, Eq, Hash)] #[serde(rename_all = "camelCase")] pub enum CommitmentLevel { #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Finalized instead" )] Max, #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Processed instead" )] Recent, #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Finalized instead" )] Root, #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Confirmed instead" )] Single, #[deprecated( since = "1.5.5", note = "Please use CommitmentLevel::Confirmed instead" )] SingleGossip, Processed, Confirmed, Finalized, } impl Default for CommitmentLevel { fn default() -> Self { Self::Finalized } } impl FromStr for CommitmentLevel { type Err = ParseCommitmentLevelError; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "max" => Ok(CommitmentLevel::Max), "recent" => Ok(CommitmentLevel::Recent), "root" => Ok(CommitmentLevel::Root), "single" => Ok(CommitmentLevel::Single), "singleGossip" => Ok(CommitmentLevel::SingleGossip), "processed" => Ok(CommitmentLevel::Processed), "confirmed" => Ok(CommitmentLevel::Confirmed), "finalized" => Ok(CommitmentLevel::Finalized), _ => Err(ParseCommitmentLevelError::Invalid), } } } impl std::fmt::Display for CommitmentLevel { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { CommitmentLevel::Max => "max", CommitmentLevel::Recent => "recent", CommitmentLevel::Root => "root", CommitmentLevel::Single => "single", CommitmentLevel::SingleGossip => "singleGossip", CommitmentLevel::Processed => "processed", CommitmentLevel::Confirmed => "confirmed", CommitmentLevel::Finalized => "finalized", }; write!(f, "{}", s) } } #[derive(Error, Debug)] pub enum ParseCommitmentLevelError { #[error("invalid variant")] Invalid, }
match commitment.commitment { CommitmentLevel::Finalized => CommitmentConfig::max(), CommitmentLevel::Confirmed => CommitmentConfig::single_gossip(), CommitmentLevel::Processed => CommitmentConfig::recent(), _ => commitment, }
if_condition
[ { "content": "/// calculate maximum possible prioritization fee, if `use-randomized-compute-unit-price` is\n\n/// enabled, round to nearest lamports.\n\npub fn max_lamports_for_prioritization(use_randomized_compute_unit_price: bool) -> u64 {\n\n if use_randomized_compute_unit_price {\n\n const MICRO_LAMPORTS_PER_LAMPORT: u64 = 1_000_000;\n\n let micro_lamport_fee: u128 = (MAX_COMPUTE_UNIT_PRICE as u128)\n\n .saturating_mul(COMPUTE_UNIT_PRICE_MULTIPLIER as u128)\n\n .saturating_mul(TRANSFER_TRANSACTION_COMPUTE_UNIT as u128);\n\n let fee = micro_lamport_fee\n\n .saturating_add(MICRO_LAMPORTS_PER_LAMPORT.saturating_sub(1) as u128)\n\n .saturating_div(MICRO_LAMPORTS_PER_LAMPORT as u128);\n\n u64::try_from(fee).unwrap_or(u64::MAX)\n\n } else {\n\n 0u64\n\n }\n\n}\n\n\n\n// set transfer transaction's loaded account data size to 30K - large enough yet smaller than\n\n// 32K page size, so it'd cost 0 extra CU.\n\nconst TRANSFER_TRANSACTION_LOADED_ACCOUNTS_DATA_SIZE: u32 = 30 * 1024;\n\n\n\npub type TimestampedTransaction = (Transaction, Option<u64>);\n\npub type SharedTransactions = Arc<RwLock<VecDeque<Vec<TimestampedTransaction>>>>;\n\n\n", "file_path": "bench-tps/src/bench.rs", "rank": 0, "score": 317701.765198541 }, { "content": "pub fn process_confirm(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n signature: &Signature,\n\n) -> ProcessResult {\n\n match rpc_client.get_signature_statuses_with_history(&[*signature]) {\n\n Ok(status) => {\n\n let cli_transaction = if let Some(transaction_status) = &status.value[0] {\n\n let mut transaction = None;\n\n let mut get_transaction_error = None;\n\n if config.verbose {\n\n match rpc_client.get_transaction_with_config(\n\n signature,\n\n RpcTransactionConfig {\n\n encoding: Some(UiTransactionEncoding::Base64),\n\n commitment: Some(CommitmentConfig::confirmed()),\n\n max_supported_transaction_version: Some(0),\n\n },\n\n ) {\n\n Ok(confirmed_transaction) => {\n", "file_path": "cli/src/wallet.rs", "rank": 1, "score": 309509.059078379 }, { "content": "/// Return a Sha256 hash for the given data.\n\npub fn hash(val: &[u8]) -> Hash {\n\n hashv(&[val])\n\n}\n\n\n", "file_path": "sdk/program/src/hash.rs", "rank": 2, "score": 295747.2463940159 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_blockstore_from_root(\n\n blockstore: &Blockstore,\n\n bank_forks: &RwLock<BankForks>,\n\n leader_schedule_cache: &LeaderScheduleCache,\n\n opts: &ProcessOptions,\n\n transaction_status_sender: Option<&TransactionStatusSender>,\n\n cache_block_meta_sender: Option<&CacheBlockMetaSender>,\n\n entry_notification_sender: Option<&EntryNotifierSender>,\n\n accounts_background_request_sender: &AbsRequestSender,\n\n) -> result::Result<(), BlockstoreProcessorError> {\n\n let (start_slot, start_slot_hash) = {\n\n // Starting slot must be a root, and thus has no parents\n\n assert_eq!(bank_forks.read().unwrap().banks().len(), 1);\n\n let bank = bank_forks.read().unwrap().root_bank();\n\n assert!(bank.parent().is_none());\n\n (bank.slot(), bank.hash())\n\n };\n\n\n\n info!(\"Processing ledger from slot {}...\", start_slot);\n\n let now = Instant::now();\n", "file_path": "ledger/src/blockstore_processor.rs", "rank": 3, "score": 294801.52728650067 }, { "content": "pub fn version_from_hash(hash: &Hash) -> u16 {\n\n let hash = hash.as_ref();\n\n let mut accum = [0u8; 2];\n\n hash.chunks(2).for_each(|seed| {\n\n accum\n\n .iter_mut()\n\n .zip(seed)\n\n .for_each(|(accum, seed)| *accum ^= *seed)\n\n });\n\n // convert accum into a u16\n\n // Because accum[0] is a u8, 8bit left shift of the u16 can never overflow\n\n #[allow(clippy::integer_arithmetic)]\n\n let version = ((accum[0] as u16) << 8) | accum[1] as u16;\n\n\n\n // ensure version is never zero, to avoid looking like an uninitialized version\n\n version.saturating_add(1)\n\n}\n\n\n", "file_path": "sdk/src/shred_version.rs", "rank": 4, "score": 291434.3160398191 }, { "content": "pub fn process_wait_for_max_stake(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n max_stake_percent: f32,\n\n) -> ProcessResult {\n\n let now = std::time::Instant::now();\n\n rpc_client.wait_for_max_stake(config.commitment, max_stake_percent)?;\n\n Ok(format!(\"Done waiting, took: {}s\", now.elapsed().as_secs()))\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 5, "score": 288145.1863967491 }, { "content": "pub fn commitment_of(matches: &ArgMatches, name: &str) -> Option<CommitmentConfig> {\n\n matches\n\n .value_of(name)\n\n .map(|value| CommitmentConfig::from_str(value).unwrap_or_default())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {\n\n super::*,\n\n clap::{Arg, Command},\n\n solana_sdk::signature::write_keypair_file,\n\n std::fs,\n\n };\n\n\n\n fn app<'ab>() -> Command<'ab> {\n\n Command::new(\"test\")\n\n .arg(\n\n Arg::new(\"multiple\")\n\n .long(\"multiple\")\n", "file_path": "clap-v3-utils/src/input_parsers.rs", "rank": 6, "score": 287365.88983374427 }, { "content": "/// Return a Keccak256 hash for the given data.\n\npub fn hash(val: &[u8]) -> Hash {\n\n hashv(&[val])\n\n}\n\n\n", "file_path": "sdk/program/src/keccak.rs", "rank": 7, "score": 287280.3433426217 }, { "content": "/// Return a Blake3 hash for the given data.\n\npub fn hash(val: &[u8]) -> Hash {\n\n hashv(&[val])\n\n}\n\n\n", "file_path": "sdk/program/src/blake3.rs", "rank": 8, "score": 287280.3433426217 }, { "content": "pub fn commitment_of(matches: &ArgMatches<'_>, name: &str) -> Option<CommitmentConfig> {\n\n matches\n\n .value_of(name)\n\n .map(|value| CommitmentConfig::from_str(value).unwrap_or_default())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use {\n\n super::*,\n\n clap::{App, Arg},\n\n solana_sdk::signature::write_keypair_file,\n\n std::fs,\n\n };\n\n\n\n fn app<'ab, 'v>() -> App<'ab, 'v> {\n\n App::new(\"test\")\n\n .arg(\n\n Arg::with_name(\"multiple\")\n\n .long(\"multiple\")\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 9, "score": 287102.81558409304 }, { "content": "pub fn warn_for_deprecated_arguments(matches: &ArgMatches) {\n\n for DeprecatedArg {\n\n arg,\n\n replaced_by,\n\n usage_warning,\n\n } in deprecated_arguments().into_iter()\n\n {\n\n if matches.is_present(arg.b.name) {\n\n let mut msg = format!(\"--{} is deprecated\", arg.b.name.replace('_', \"-\"));\n\n if let Some(replaced_by) = replaced_by {\n\n msg.push_str(&format!(\", please use --{replaced_by}\"));\n\n }\n\n msg.push('.');\n\n if let Some(usage_warning) = usage_warning {\n\n msg.push_str(&format!(\" {usage_warning}\"));\n\n if !msg.ends_with('.') {\n\n msg.push('.');\n\n }\n\n }\n\n warn!(\"{}\", msg);\n", "file_path": "validator/src/cli.rs", "rank": 10, "score": 286320.6427676368 }, { "content": "/// Clones and maps snapshot hashes into what CRDS expects\n\nfn clone_hashes_for_crds(hashes: &[impl AsSnapshotHash]) -> Vec<(Slot, Hash)> {\n\n hashes.iter().map(AsSnapshotHash::clone_for_crds).collect()\n\n}\n", "file_path": "core/src/snapshot_packager_service/snapshot_gossip_manager.rs", "rank": 11, "score": 284497.2035304144 }, { "content": "#[must_use]\n\npub fn is_in_calculation_window(bank: &Bank) -> bool {\n\n let info = calculation_info(bank);\n\n let range = info.calculation_start..info.calculation_stop;\n\n range.contains(&bank.slot())\n\n}\n\n\n", "file_path": "runtime/src/bank/epoch_accounts_hash_utils.rs", "rank": 12, "score": 284304.5925143184 }, { "content": "#[must_use]\n\npub fn is_enabled_this_epoch(bank: &Bank) -> bool {\n\n // The EAH calculation \"start\" is based on when a bank is *rooted*, and \"stop\" is based on when a\n\n // bank is *frozen*. Banks are rooted after exceeding the maximum lockout, so there is a delay\n\n // of at least `maximum lockout` number of slots the EAH calculation must take into\n\n // consideration. To ensure an EAH calculation has started by the time that calculation is\n\n // needed, the calculation interval must be at least `maximum lockout` plus some buffer to\n\n // handle when banks are not rooted every single slot.\n\n const MINIMUM_CALCULATION_INTERVAL: u64 =\n\n (MAX_LOCKOUT_HISTORY as u64).saturating_add(CALCULATION_INTERVAL_BUFFER);\n\n // The calculation buffer is a best-attempt at median worst-case for how many bank ancestors can\n\n // accumulate before the bank is rooted.\n\n // [brooks] On Wed Oct 26 12:15:21 2022, over the previous 6 hour period against mainnet-beta,\n\n // I saw multiple validators reporting metrics in the 120s for `total_parent_banks`. The mean\n\n // is 2 to 3, but a number of nodes also reported values in the low 20s. A value of 150 should\n\n // capture the majority of validators, and will not be an issue for clusters running with\n\n // normal slots-per-epoch; this really will only affect tests and epoch schedule warmup.\n\n const CALCULATION_INTERVAL_BUFFER: u64 = 150;\n\n\n\n let calculation_interval = calculation_interval(bank);\n\n calculation_interval >= MINIMUM_CALCULATION_INTERVAL\n\n}\n\n\n\n/// Calculation of the EAH occurs once per epoch. All nodes in the cluster must agree on which\n\n/// slot the EAH is based on. This slot will be at an offset into the epoch, and referred to as\n\n/// the \"start\" slot for the EAH calculation.\n", "file_path": "runtime/src/bank/epoch_accounts_hash_utils.rs", "rank": 13, "score": 284304.5925143184 }, { "content": "/// Return the hash of the given hash extended with the given value.\n\npub fn extend_and_hash(id: &Hash, val: &[u8]) -> Hash {\n\n let mut hash_data = id.as_ref().to_vec();\n\n hash_data.extend_from_slice(val);\n\n hash(&hash_data)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_new_unique() {\n\n assert!(Hash::new_unique() != Hash::new_unique());\n\n }\n\n\n\n #[test]\n\n fn test_hash_fromstr() {\n\n let hash = hash(&[1u8]);\n\n\n\n let mut hash_base58_str = bs58::encode(hash).into_string();\n", "file_path": "sdk/program/src/hash.rs", "rank": 14, "score": 276309.1460203474 }, { "content": "/// Return a Sha256 hash for the given data.\n\npub fn hashv(vals: &[&[u8]]) -> Hash {\n\n // Perform the calculation inline, calling this from within a program is\n\n // not supported\n\n #[cfg(not(target_os = \"solana\"))]\n\n {\n\n let mut hasher = Hasher::default();\n\n hasher.hashv(vals);\n\n hasher.result()\n\n }\n\n // Call via a system call to perform the calculation\n\n #[cfg(target_os = \"solana\")]\n\n {\n\n let mut hash_result = [0; HASH_BYTES];\n\n unsafe {\n\n crate::syscalls::sol_sha256(\n\n vals as *const _ as *const u8,\n\n vals.len() as u64,\n\n &mut hash_result as *mut _ as *mut u8,\n\n );\n\n }\n\n Hash::new_from_array(hash_result)\n\n }\n\n}\n\n\n", "file_path": "sdk/program/src/hash.rs", "rank": 15, "score": 273118.8239133241 }, { "content": "pub fn hidden_unless_forced() -> bool {\n\n std::env::var(\"SOLANA_NO_HIDDEN_CLI_ARGS\").is_err()\n\n}\n\n\n\npub mod compute_unit_price;\n\npub mod fee_payer;\n\npub mod input_parsers;\n\npub mod input_validators;\n\npub mod keypair;\n\npub mod memo;\n\npub mod nonce;\n\npub mod offline;\n", "file_path": "clap-utils/src/lib.rs", "rank": 16, "score": 273098.2826278695 }, { "content": "pub fn build_balance_message(lamports: u64, use_lamports_unit: bool, show_unit: bool) -> String {\n\n build_balance_message_with_config(\n\n lamports,\n\n &BuildBalanceMessageConfig {\n\n use_lamports_unit,\n\n show_unit,\n\n ..BuildBalanceMessageConfig::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "cli-output/src/display.rs", "rank": 17, "score": 270781.5904193753 }, { "content": "/// Return the hash of the given hash extended with the given value.\n\npub fn extend_and_hash(id: &Hash, val: &[u8]) -> Hash {\n\n let mut hash_data = id.as_ref().to_vec();\n\n hash_data.extend_from_slice(val);\n\n hash(&hash_data)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_new_unique() {\n\n assert!(Hash::new_unique() != Hash::new_unique());\n\n }\n\n\n\n #[test]\n\n fn test_hash_fromstr() {\n\n let hash = hash(&[1u8]);\n\n\n\n let mut hash_base58_str = bs58::encode(hash).into_string();\n", "file_path": "sdk/program/src/blake3.rs", "rank": 18, "score": 270749.53622442874 }, { "content": "/// Return the hash of the given hash extended with the given value.\n\npub fn extend_and_hash(id: &Hash, val: &[u8]) -> Hash {\n\n let mut hash_data = id.as_ref().to_vec();\n\n hash_data.extend_from_slice(val);\n\n hash(&hash_data)\n\n}\n", "file_path": "sdk/program/src/keccak.rs", "rank": 19, "score": 270749.53622442874 }, { "content": "pub fn hash_transactions(transactions: &[VersionedTransaction]) -> Hash {\n\n // a hash of a slice of transactions only needs to hash the signatures\n\n let signatures: Vec<_> = transactions\n\n .iter()\n\n .flat_map(|tx| tx.signatures.iter())\n\n .collect();\n\n let merkle_tree = MerkleTree::new(&signatures);\n\n if let Some(root_hash) = merkle_tree.get_root() {\n\n *root_hash\n\n } else {\n\n Hash::default()\n\n }\n\n}\n\n\n", "file_path": "entry/src/entry.rs", "rank": 20, "score": 268591.6879051163 }, { "content": "pub fn process_vote_with_account<S: std::hash::BuildHasher>(\n\n vote_account: &mut BorrowedAccount,\n\n slot_hashes: &[SlotHash],\n\n clock: &Clock,\n\n vote: &Vote,\n\n signers: &HashSet<Pubkey, S>,\n\n feature_set: &FeatureSet,\n\n) -> Result<(), InstructionError> {\n\n let mut vote_state = verify_and_get_vote_state(vote_account, clock, signers)?;\n\n\n\n process_vote(&mut vote_state, vote, slot_hashes, clock.epoch)?;\n\n if let Some(timestamp) = vote.timestamp {\n\n vote.slots\n\n .iter()\n\n .max()\n\n .ok_or(VoteError::EmptySlots)\n\n .and_then(|slot| vote_state.process_timestamp(*slot, timestamp))?;\n\n }\n\n set_vote_account_state(vote_account, vote_state, feature_set)\n\n}\n\n\n", "file_path": "programs/vote/src/vote_state/mod.rs", "rank": 21, "score": 267903.973014974 }, { "content": "pub fn create_random_ticks(num_ticks: u64, max_hashes_per_tick: u64, mut hash: Hash) -> Vec<Entry> {\n\n repeat_with(|| {\n\n let hashes_per_tick = thread_rng().gen_range(1, max_hashes_per_tick);\n\n next_entry_mut(&mut hash, hashes_per_tick, vec![])\n\n })\n\n .take(num_ticks as usize)\n\n .collect()\n\n}\n\n\n", "file_path": "entry/src/entry.rs", "rank": 22, "score": 264788.8735090853 }, { "content": "fn compare_hashes(computed_hash: Hash, ref_entry: &Entry) -> bool {\n\n let actual = if !ref_entry.transactions.is_empty() {\n\n let tx_hash = hash_transactions(&ref_entry.transactions);\n\n let mut poh = Poh::new(computed_hash, None);\n\n poh.record(tx_hash).unwrap().hash\n\n } else if ref_entry.num_hashes > 0 {\n\n let mut poh = Poh::new(computed_hash, None);\n\n poh.tick().unwrap().hash\n\n } else {\n\n computed_hash\n\n };\n\n actual == ref_entry.hash\n\n}\n\n\n", "file_path": "entry/src/entry.rs", "rank": 23, "score": 264227.1029191837 }, { "content": "pub fn process_vote_state_update<S: std::hash::BuildHasher>(\n\n vote_account: &mut BorrowedAccount,\n\n slot_hashes: &[SlotHash],\n\n clock: &Clock,\n\n vote_state_update: VoteStateUpdate,\n\n signers: &HashSet<Pubkey, S>,\n\n feature_set: &FeatureSet,\n\n) -> Result<(), InstructionError> {\n\n let mut vote_state = verify_and_get_vote_state(vote_account, clock, signers)?;\n\n do_process_vote_state_update(\n\n &mut vote_state,\n\n slot_hashes,\n\n clock.epoch,\n\n vote_state_update,\n\n Some(feature_set),\n\n )?;\n\n set_vote_account_state(vote_account, vote_state, feature_set)\n\n}\n\n\n", "file_path": "programs/vote/src/vote_state/mod.rs", "rank": 24, "score": 263470.1770278166 }, { "content": "pub trait Contains<'a, T: Eq + Hash> {\n\n type Item: Borrow<T>;\n\n type Iter: Iterator<Item = Self::Item>;\n\n fn contains(&self, key: &T) -> bool;\n\n fn contains_iter(&'a self) -> Self::Iter;\n\n}\n\n\n\nimpl<'a, T: 'a + Eq + Hash, U: 'a> Contains<'a, T> for HashMap<T, U> {\n\n type Item = &'a T;\n\n type Iter = std::collections::hash_map::Keys<'a, T, U>;\n\n\n\n fn contains(&self, key: &T) -> bool {\n\n <HashMap<T, U>>::contains_key(self, key)\n\n }\n\n fn contains_iter(&'a self) -> Self::Iter {\n\n self.keys()\n\n }\n\n}\n\n\n\nimpl<'a, T: 'a + Eq + Hash> Contains<'a, T> for HashSet<T> {\n", "file_path": "runtime/src/contains.rs", "rank": 25, "score": 260034.39023267443 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn is_renice_allowed(adjustment: i8) -> bool {\n\n adjustment == 0\n\n}\n\n\n", "file_path": "perf/src/thread.rs", "rank": 26, "score": 256192.63954426057 }, { "content": "/// Check if given bytes contain valid UTF8 string\n\npub fn is_utf8(data: &[u8]) -> bool {\n\n std::str::from_utf8(data).is_ok()\n\n}\n\n\n\n#[repr(u8)]\n\n#[derive(Debug, PartialEq, Eq, Copy, Clone, TryFromPrimitive, IntoPrimitive)]\n\npub enum MessageFormat {\n\n RestrictedAscii,\n\n LimitedUtf8,\n\n ExtendedUtf8,\n\n}\n\n\n\n#[allow(clippy::integer_arithmetic)]\n\npub mod v0 {\n\n use {\n\n super::{is_printable_ascii, is_utf8, MessageFormat, OffchainMessage as Base},\n\n crate::{\n\n hash::{Hash, Hasher},\n\n packet::PACKET_DATA_SIZE,\n\n sanitize::SanitizeError,\n", "file_path": "sdk/src/offchain_message.rs", "rank": 27, "score": 256192.63954426057 }, { "content": "/// Return a Blake3 hash for the given data.\n\npub fn hashv(vals: &[&[u8]]) -> Hash {\n\n // Perform the calculation inline, calling this from within a program is\n\n // not supported\n\n #[cfg(not(target_os = \"solana\"))]\n\n {\n\n let mut hasher = Hasher::default();\n\n hasher.hashv(vals);\n\n hasher.result()\n\n }\n\n // Call via a system call to perform the calculation\n\n #[cfg(target_os = \"solana\")]\n\n {\n\n let mut hash_result = [0; HASH_BYTES];\n\n unsafe {\n\n crate::syscalls::sol_blake3(\n\n vals as *const _ as *const u8,\n\n vals.len() as u64,\n\n &mut hash_result as *mut _ as *mut u8,\n\n );\n\n }\n\n Hash::new_from_array(hash_result)\n\n }\n\n}\n\n\n", "file_path": "sdk/program/src/blake3.rs", "rank": 28, "score": 256167.2314465699 }, { "content": "/// Return a Keccak256 hash for the given data.\n\npub fn hashv(vals: &[&[u8]]) -> Hash {\n\n // Perform the calculation inline, calling this from within a program is\n\n // not supported\n\n #[cfg(not(target_os = \"solana\"))]\n\n {\n\n let mut hasher = Hasher::default();\n\n hasher.hashv(vals);\n\n hasher.result()\n\n }\n\n // Call via a system call to perform the calculation\n\n #[cfg(target_os = \"solana\")]\n\n {\n\n let mut hash_result = [0; HASH_BYTES];\n\n unsafe {\n\n crate::syscalls::sol_keccak256(\n\n vals as *const _ as *const u8,\n\n vals.len() as u64,\n\n &mut hash_result as *mut _ as *mut u8,\n\n );\n\n }\n\n Hash::new_from_array(hash_result)\n\n }\n\n}\n\n\n", "file_path": "sdk/program/src/keccak.rs", "rank": 29, "score": 256167.2314465699 }, { "content": "pub fn process_get_genesis_hash(rpc_client: &RpcClient) -> ProcessResult {\n\n let genesis_hash = rpc_client.get_genesis_hash()?;\n\n Ok(genesis_hash.to_string())\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 30, "score": 255230.05161397916 }, { "content": "fn get_sysvar<T: Default + Sysvar + Sized + serde::de::DeserializeOwned + Clone>(\n\n sysvar: Result<Arc<T>, InstructionError>,\n\n var_addr: *mut u8,\n\n) -> u64 {\n\n let invoke_context = get_invoke_context();\n\n if invoke_context\n\n .consume_checked(invoke_context.get_compute_budget().sysvar_base_cost + T::size_of() as u64)\n\n .is_err()\n\n {\n\n panic!(\"Exceeded compute budget\");\n\n }\n\n\n\n match sysvar {\n\n Ok(sysvar_data) => unsafe {\n\n *(var_addr as *mut _ as *mut T) = T::clone(&sysvar_data);\n\n SUCCESS\n\n },\n\n Err(_) => UNSUPPORTED_SYSVAR,\n\n }\n\n}\n\n\n", "file_path": "program-test/src/lib.rs", "rank": 31, "score": 253687.16735462926 }, { "content": "pub fn process_allocations(\n\n client: &RpcClient,\n\n args: &DistributeTokensArgs,\n\n exit: Arc<AtomicBool>,\n\n) -> Result<Option<usize>, Error> {\n\n let require_lockup_heading = args.stake_args.is_some();\n\n let mut allocations: Vec<Allocation> = read_allocations(\n\n &args.input_csv,\n\n args.transfer_amount,\n\n require_lockup_heading,\n\n args.spl_token_args.is_some(),\n\n )?;\n\n\n\n let starting_total_tokens = allocations.iter().map(|x| x.amount).sum();\n\n let starting_total_tokens = if let Some(spl_token_args) = &args.spl_token_args {\n\n Token::spl_token(starting_total_tokens, spl_token_args.decimals)\n\n } else {\n\n Token::sol(starting_total_tokens)\n\n };\n\n println!(\n", "file_path": "tokens/src/commands.rs", "rank": 32, "score": 253066.47499427566 }, { "content": "pub fn process_airdrop(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n pubkey: &Option<Pubkey>,\n\n lamports: u64,\n\n) -> ProcessResult {\n\n let pubkey = if let Some(pubkey) = pubkey {\n\n *pubkey\n\n } else {\n\n config.pubkey()?\n\n };\n\n println!(\n\n \"Requesting airdrop of {}\",\n\n build_balance_message(lamports, false, true),\n\n );\n\n\n\n let pre_balance = rpc_client.get_balance(&pubkey)?;\n\n\n\n let result = request_and_confirm_airdrop(rpc_client, config, &pubkey, lamports);\n\n if let Ok(signature) = result {\n", "file_path": "cli/src/wallet.rs", "rank": 33, "score": 253066.47499427566 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_transfer(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n amount: SpendAmount,\n\n to: &Pubkey,\n\n from: SignerIndex,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n allow_unfunded_recipient: bool,\n\n no_wait: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<&Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n derived_address_seed: Option<String>,\n\n derived_address_program_id: Option<&Pubkey>,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let from = config.signers[from];\n", "file_path": "cli/src/wallet.rs", "rank": 34, "score": 253066.47499427566 }, { "content": "pub fn process_balances(\n\n client: &RpcClient,\n\n args: &BalancesArgs,\n\n exit: Arc<AtomicBool>,\n\n) -> Result<(), Error> {\n\n let allocations: Vec<Allocation> =\n\n read_allocations(&args.input_csv, None, false, args.spl_token_args.is_some())?;\n\n let allocations = merge_allocations(&allocations);\n\n\n\n let token = if let Some(spl_token_args) = &args.spl_token_args {\n\n spl_token_args.mint.to_string()\n\n } else {\n\n \"◎\".to_string()\n\n };\n\n println!(\"{} {}\", style(\"Token:\").bold(), token);\n\n\n\n println!(\n\n \"{}\",\n\n style(format!(\n\n \"{:<44} {:>24} {:>24} {:>24}\",\n", "file_path": "tokens/src/commands.rs", "rank": 35, "score": 253066.47499427566 }, { "content": "pub fn process_balance(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n pubkey: &Option<Pubkey>,\n\n use_lamports_unit: bool,\n\n) -> ProcessResult {\n\n let pubkey = if let Some(pubkey) = pubkey {\n\n *pubkey\n\n } else {\n\n config.pubkey()?\n\n };\n\n let balance = rpc_client.get_balance(&pubkey)?;\n\n let balance_output = CliBalance {\n\n lamports: balance,\n\n config: BuildBalanceMessageConfig {\n\n use_lamports_unit,\n\n show_unit: true,\n\n trim_trailing_zeros: true,\n\n },\n\n };\n\n\n\n Ok(config.output_format.formatted_string(&balance_output))\n\n}\n\n\n", "file_path": "cli/src/wallet.rs", "rank": 36, "score": 253066.47499427566 }, { "content": "/// Creates the hash `num_hashes` after `start_hash`. If the transaction contains\n\n/// a signature, the final hash will be a hash of both the previous ID and\n\n/// the signature. If num_hashes is zero and there's no transaction data,\n\n/// start_hash is returned.\n\npub fn next_hash(\n\n start_hash: &Hash,\n\n num_hashes: u64,\n\n transactions: &[VersionedTransaction],\n\n) -> Hash {\n\n if num_hashes == 0 && transactions.is_empty() {\n\n return *start_hash;\n\n }\n\n\n\n let mut poh = Poh::new(*start_hash, None);\n\n poh.hash(num_hashes.saturating_sub(1));\n\n if transactions.is_empty() {\n\n poh.tick().unwrap().hash\n\n } else {\n\n poh.record(hash_transactions(transactions)).unwrap().hash\n\n }\n\n}\n\n\n", "file_path": "entry/src/entry.rs", "rank": 37, "score": 253030.0899258473 }, { "content": "/// Create and sign new system_instruction::Assign transaction\n\npub fn assign(from_keypair: &Keypair, recent_blockhash: Hash, program_id: &Pubkey) -> Transaction {\n\n let from_pubkey = from_keypair.pubkey();\n\n let instruction = system_instruction::assign(&from_pubkey, program_id);\n\n let message = Message::new(&[instruction], Some(&from_pubkey));\n\n Transaction::new(&[from_keypair], message, recent_blockhash)\n\n}\n\n\n", "file_path": "sdk/src/system_transaction.rs", "rank": 38, "score": 252024.12967308227 }, { "content": "pub fn bigtable_process_command(ledger_path: &Path, matches: &ArgMatches<'_>) {\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n\n\n let verbose = matches.is_present(\"verbose\");\n\n let force_update_to_open = matches.is_present(\"force_update_to_open\");\n\n let output_format = OutputFormat::from_matches(matches, \"output_format\", verbose);\n\n let enforce_ulimit_nofile = !matches.is_present(\"ignore_ulimit_nofile_error\");\n\n\n\n let (subcommand, sub_matches) = matches.subcommand();\n\n let instance_name = get_global_subcommand_arg(\n\n matches,\n\n sub_matches,\n\n \"rpc_bigtable_instance_name\",\n\n solana_storage_bigtable::DEFAULT_INSTANCE_NAME,\n\n );\n\n let app_profile_id = get_global_subcommand_arg(\n\n matches,\n\n sub_matches,\n\n \"rpc_bigtable_app_profile_id\",\n\n solana_storage_bigtable::DEFAULT_APP_PROFILE_ID,\n", "file_path": "ledger-tool/src/bigtable.rs", "rank": 39, "score": 251762.09294110333 }, { "content": "/// Check if given bytes contain only printable ASCII characters\n\npub fn is_printable_ascii(data: &[u8]) -> bool {\n\n for &char in data {\n\n if !(0x20..=0x7e).contains(&char) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "sdk/src/offchain_message.rs", "rank": 40, "score": 251508.5729725223 }, { "content": "#[cfg(not(windows))]\n\npub fn stop_process(process: &mut Child) -> Result<(), io::Error> {\n\n use {\n\n nix::{\n\n errno::Errno::{EINVAL, EPERM, ESRCH},\n\n sys::signal::{kill, Signal},\n\n unistd::Pid,\n\n },\n\n std::{\n\n io::ErrorKind,\n\n thread,\n\n time::{Duration, Instant},\n\n },\n\n };\n\n\n\n let nice_wait = Duration::from_secs(5);\n\n let pid = Pid::from_raw(process.id() as i32);\n\n match kill(pid, Signal::SIGINT) {\n\n Ok(()) => {\n\n let expire = Instant::now() + nice_wait;\n\n while let Ok(None) = process.try_wait() {\n", "file_path": "install/src/stop_process.rs", "rank": 41, "score": 247704.53789780778 }, { "content": "pub fn update(config_file: &str, check_only: bool) -> Result<bool, String> {\n\n init_or_update(config_file, false, check_only)\n\n}\n\n\n", "file_path": "install/src/command.rs", "rank": 42, "score": 247687.51394070807 }, { "content": "// Set the finalized bit in the database if the transaction is rooted.\n\n// Remove the TransactionInfo from the database if the transaction failed.\n\n// Return the number of confirmations on the transaction or None if either\n\n// finalized or discarded.\n\npub fn update_finalized_transaction(\n\n db: &mut PickleDb,\n\n signature: &Signature,\n\n opt_transaction_status: Option<TransactionStatus>,\n\n last_valid_block_height: u64,\n\n finalized_block_height: u64,\n\n) -> Result<Option<usize>, Error> {\n\n if opt_transaction_status.is_none() {\n\n if finalized_block_height > last_valid_block_height {\n\n eprintln!(\n\n \"Signature not found {signature} and blockhash expired. Transaction either dropped or the validator purged the transaction status.\"\n\n );\n\n eprintln!();\n\n\n\n // Don't discard the transaction, because we are not certain the\n\n // blockhash is expired. Instead, return None to signal that\n\n // we don't need to wait for confirmations.\n\n return Ok(None);\n\n }\n\n\n", "file_path": "tokens/src/db.rs", "rank": 43, "score": 247450.44699006918 }, { "content": "pub fn request_and_confirm_airdrop(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n to_pubkey: &Pubkey,\n\n lamports: u64,\n\n) -> ClientResult<Signature> {\n\n let recent_blockhash = rpc_client.get_latest_blockhash()?;\n\n let signature =\n\n rpc_client.request_airdrop_with_blockhash(to_pubkey, lamports, &recent_blockhash)?;\n\n rpc_client.confirm_transaction_with_spinner(\n\n &signature,\n\n &recent_blockhash,\n\n config.commitment,\n\n )?;\n\n Ok(signature)\n\n}\n\n\n", "file_path": "cli/src/cli.rs", "rank": 44, "score": 247399.85635415395 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn confirm_slot(\n\n blockstore: &Blockstore,\n\n bank: &Arc<Bank>,\n\n timing: &mut ConfirmationTiming,\n\n progress: &mut ConfirmationProgress,\n\n skip_verification: bool,\n\n transaction_status_sender: Option<&TransactionStatusSender>,\n\n entry_notification_sender: Option<&EntryNotifierSender>,\n\n replay_vote_sender: Option<&ReplayVoteSender>,\n\n recyclers: &VerifyRecyclers,\n\n allow_dead_slots: bool,\n\n log_messages_bytes_limit: Option<usize>,\n\n prioritization_fee_cache: &PrioritizationFeeCache,\n\n) -> result::Result<(), BlockstoreProcessorError> {\n\n let slot = bank.slot();\n\n\n\n let slot_entries_load_result = {\n\n let mut load_elapsed = Measure::start(\"load_elapsed\");\n\n let load_result = blockstore\n\n .get_slot_entries_with_shred_info(slot, progress.num_shreds, allow_dead_slots)\n", "file_path": "ledger/src/blockstore_processor.rs", "rank": 45, "score": 247399.85635415395 }, { "content": "pub fn process_get_nonce(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n nonce_account_pubkey: &Pubkey,\n\n) -> ProcessResult {\n\n #[allow(clippy::redundant_closure)]\n\n match get_account_with_commitment(rpc_client, nonce_account_pubkey, config.commitment)\n\n .and_then(|ref a| state_from_account(a))?\n\n {\n\n State::Uninitialized => Ok(\"Nonce account is uninitialized\".to_string()),\n\n State::Initialized(ref data) => Ok(format!(\"{:?}\", data.blockhash())),\n\n }\n\n}\n\n\n", "file_path": "cli/src/nonce.rs", "rank": 46, "score": 247366.13475188697 }, { "content": "pub fn process_show_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n account_pubkey: &Pubkey,\n\n output_file: &Option<String>,\n\n use_lamports_unit: bool,\n\n) -> ProcessResult {\n\n let account = rpc_client.get_account(account_pubkey)?;\n\n let data = &account.data;\n\n let cli_account = CliAccount::new(account_pubkey, &account, use_lamports_unit);\n\n\n\n let mut account_string = config.output_format.formatted_string(&cli_account);\n\n\n\n match config.output_format {\n\n OutputFormat::Json | OutputFormat::JsonCompact => {\n\n if let Some(output_file) = output_file {\n\n let mut f = File::create(output_file)?;\n\n f.write_all(account_string.as_bytes())?;\n\n writeln!(&mut account_string)?;\n\n writeln!(&mut account_string, \"Wrote account to {output_file}\")?;\n", "file_path": "cli/src/wallet.rs", "rank": 47, "score": 247366.13475188697 }, { "content": "pub fn process_program_subcommand(\n\n rpc_client: Arc<RpcClient>,\n\n config: &CliConfig,\n\n program_subcommand: &ProgramCliCommand,\n\n) -> ProcessResult {\n\n match program_subcommand {\n\n ProgramCliCommand::Deploy {\n\n program_location,\n\n program_signer_index,\n\n program_pubkey,\n\n buffer_signer_index,\n\n buffer_pubkey,\n\n upgrade_authority_signer_index,\n\n is_final,\n\n max_len,\n\n allow_excessive_balance,\n\n skip_fee_check,\n\n } => process_program_deploy(\n\n rpc_client,\n\n config,\n", "file_path": "cli/src/program.rs", "rank": 48, "score": 247366.13475188697 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_split_stake(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account_pubkey: &Pubkey,\n\n stake_authority: SignerIndex,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n split_stake_account: SignerIndex,\n\n split_stake_account_seed: &Option<String>,\n\n lamports: u64,\n\n fee_payer: SignerIndex,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let split_stake_account = config.signers[split_stake_account];\n\n let fee_payer = config.signers[fee_payer];\n\n\n", "file_path": "cli/src/stake.rs", "rank": 49, "score": 247366.13475188697 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_stake_authorize(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account_pubkey: &Pubkey,\n\n new_authorizations: &[StakeAuthorizationIndexed],\n\n custodian: Option<SignerIndex>,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n no_wait: bool,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let mut ixs = Vec::new();\n\n let custodian = custodian.map(|index| config.signers[index]);\n\n let current_stake_account = if !sign_only {\n\n Some(get_stake_account_state(\n", "file_path": "cli/src/stake.rs", "rank": 50, "score": 247366.13475188697 }, { "content": "pub fn process_feature_subcommand(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n feature_subcommand: &FeatureCliCommand,\n\n) -> ProcessResult {\n\n match feature_subcommand {\n\n FeatureCliCommand::Status {\n\n features,\n\n display_all,\n\n } => process_status(rpc_client, config, features, *display_all),\n\n FeatureCliCommand::Activate {\n\n feature,\n\n cluster,\n\n force,\n\n fee_payer,\n\n } => process_activate(rpc_client, config, *feature, *cluster, *force, *fee_payer),\n\n }\n\n}\n\n\n", "file_path": "cli/src/feature.rs", "rank": 51, "score": 247366.13475188697 }, { "content": "pub fn process_new_nonce(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n nonce_account: &Pubkey,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n check_unique_pubkeys(\n\n (&config.signers[0].pubkey(), \"cli keypair\".to_string()),\n\n (nonce_account, \"nonce_account_pubkey\".to_string()),\n\n )?;\n\n\n\n if let Err(err) = rpc_client.get_account(nonce_account) {\n\n return Err(CliError::BadParameter(format!(\n\n \"Unable to advance nonce account {nonce_account}. error: {err}\"\n\n ))\n\n .into());\n\n }\n\n\n", "file_path": "cli/src/nonce.rs", "rank": 52, "score": 247366.13475188697 }, { "content": "pub fn process_fees(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n blockhash: Option<&Hash>,\n\n) -> ProcessResult {\n\n let fees = if let Some(recent_blockhash) = blockhash {\n\n #[allow(deprecated)]\n\n let result = rpc_client.get_fee_calculator_for_blockhash_with_commitment(\n\n recent_blockhash,\n\n config.commitment,\n\n )?;\n\n if let Some(fee_calculator) = result.value {\n\n CliFees::some(\n\n result.context.slot,\n\n *recent_blockhash,\n\n fee_calculator.lamports_per_signature,\n\n None,\n\n None,\n\n )\n\n } else {\n", "file_path": "cli/src/cluster_query.rs", "rank": 53, "score": 247366.13475188697 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\npub fn process_decode_transaction(\n\n config: &CliConfig,\n\n transaction: &VersionedTransaction,\n\n) -> ProcessResult {\n\n let sigverify_status = CliSignatureVerificationStatus::verify_transaction(transaction);\n\n let decode_transaction = CliTransaction {\n\n decoded_transaction: transaction.clone(),\n\n transaction: transaction.json_encode(),\n\n meta: None,\n\n block_time: None,\n\n slot: None,\n\n prefix: \"\".to_string(),\n\n sigverify_status,\n\n };\n\n Ok(config.output_format.formatted_string(&decode_transaction))\n\n}\n\n\n", "file_path": "cli/src/wallet.rs", "rank": 54, "score": 247366.13475188697 }, { "content": "pub fn process_supply(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n print_accounts: bool,\n\n) -> ProcessResult {\n\n let supply_response = rpc_client.supply()?;\n\n let mut supply: CliSupply = supply_response.value.into();\n\n supply.print_accounts = print_accounts;\n\n Ok(config.output_format.formatted_string(&supply))\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 55, "score": 247366.13475188697 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_merge_stake(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account_pubkey: &Pubkey,\n\n source_stake_account_pubkey: &Pubkey,\n\n stake_authority: SignerIndex,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let fee_payer = config.signers[fee_payer];\n\n\n\n check_unique_pubkeys(\n\n (&fee_payer.pubkey(), \"fee-payer keypair\".to_string()),\n\n (stake_account_pubkey, \"stake_account\".to_string()),\n", "file_path": "cli/src/stake.rs", "rank": 56, "score": 247366.13475188697 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_vote_authorize(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n vote_account_pubkey: &Pubkey,\n\n new_authorized_pubkey: &Pubkey,\n\n vote_authorize: VoteAuthorize,\n\n authorized: SignerIndex,\n\n new_authorized: Option<SignerIndex>,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let authorized = config.signers[authorized];\n\n let new_authorized_signer = new_authorized.map(|index| config.signers[index]);\n\n\n", "file_path": "cli/src/vote.rs", "rank": 57, "score": 247366.13475188697 }, { "content": "pub fn process_inflation_subcommand(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n inflation_subcommand: &InflationCliCommand,\n\n) -> ProcessResult {\n\n match inflation_subcommand {\n\n InflationCliCommand::Show => process_show(rpc_client, config),\n\n InflationCliCommand::Rewards(ref addresses, rewards_epoch) => {\n\n process_rewards(rpc_client, config, addresses, *rewards_epoch)\n\n }\n\n }\n\n}\n\n\n", "file_path": "cli/src/inflation.rs", "rank": 58, "score": 247366.13475188697 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_delegate_stake(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account_pubkey: &Pubkey,\n\n vote_account_pubkey: &Pubkey,\n\n stake_authority: SignerIndex,\n\n force: bool,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n redelegation_stake_account: Option<SignerIndex>,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n check_unique_pubkeys(\n\n (&config.signers[0].pubkey(), \"cli keypair\".to_string()),\n\n (stake_account_pubkey, \"stake_account_pubkey\".to_string()),\n", "file_path": "cli/src/stake.rs", "rank": 59, "score": 247366.13475188697 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_withdraw_stake(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account_pubkey: &Pubkey,\n\n destination_account_pubkey: &Pubkey,\n\n amount: SpendAmount,\n\n withdraw_authority: SignerIndex,\n\n custodian: Option<SignerIndex>,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<&Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n seed: Option<&String>,\n\n fee_payer: SignerIndex,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let withdraw_authority = config.signers[withdraw_authority];\n\n let custodian = custodian.map(|index| config.signers[index]);\n", "file_path": "cli/src/stake.rs", "rank": 60, "score": 247366.13475188697 }, { "content": "pub fn process_ping(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n interval: &Duration,\n\n count: &Option<u64>,\n\n timeout: &Duration,\n\n fixed_blockhash: &Option<Hash>,\n\n print_timestamp: bool,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let (signal_sender, signal_receiver) = unbounded();\n\n ctrlc::set_handler(move || {\n\n let _ = signal_sender.send(());\n\n })\n\n .expect(\"Error setting Ctrl-C handler\");\n\n\n\n let mut cli_pings = vec![];\n\n\n\n let mut submit_count = 0;\n\n let mut confirmed_count = 0;\n", "file_path": "cli/src/cluster_query.rs", "rank": 61, "score": 247366.13475188697 }, { "content": "pub fn process_catchup(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n node_pubkey: Option<Pubkey>,\n\n mut node_json_rpc_url: Option<String>,\n\n follow: bool,\n\n our_localhost_port: Option<u16>,\n\n log: bool,\n\n) -> ProcessResult {\n\n let sleep_interval = 5;\n\n\n\n let progress_bar = new_spinner_progress_bar();\n\n progress_bar.set_message(\"Connecting...\");\n\n\n\n if let Some(our_localhost_port) = our_localhost_port {\n\n let gussed_default = Some(format!(\"http://localhost:{our_localhost_port}\"));\n\n if node_json_rpc_url.is_some() && node_json_rpc_url != gussed_default {\n\n // go to new line to leave this message on console\n\n println!(\n\n \"Prefering explicitly given rpc ({}) as us, \\\n", "file_path": "cli/src/cluster_query.rs", "rank": 62, "score": 247366.13475188697 }, { "content": "/// is this a max-size append vec designed to be used as an ancient append vec?\n\npub fn is_ancient(storage: &AccountsFile) -> bool {\n\n match storage {\n\n AccountsFile::AppendVec(storage) => storage.capacity() >= get_ancient_append_vec_capacity(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use {\n\n super::*,\n\n crate::{\n\n account_info::AccountInfo,\n\n account_storage::meta::{AccountMeta, StoredAccountMeta, StoredMeta},\n\n accounts_db::{\n\n get_temp_accounts_paths,\n\n tests::{\n\n append_single_account_with_default_hash, compare_all_accounts,\n\n create_db_with_storages_and_index, create_storages_and_update_index,\n\n get_all_accounts, remove_account_for_tests, CAN_RANDOMLY_SHRINK_FALSE,\n\n },\n", "file_path": "runtime/src/ancient_append_vecs.rs", "rank": 63, "score": 247117.76760567585 }, { "content": "/// Returns `true` of the given `Pubkey` is a sysvar account.\n\npub fn is_sysvar_id(id: &Pubkey) -> bool {\n\n ALL_IDS.iter().any(|key| key == id)\n\n}\n\n\n\n/// Declares an ID that implements [`SysvarId`].\n\n#[macro_export]\n\nmacro_rules! declare_sysvar_id(\n\n ($name:expr, $type:ty) => (\n\n $crate::declare_id!($name);\n\n\n\n impl $crate::sysvar::SysvarId for $type {\n\n fn id() -> $crate::pubkey::Pubkey {\n\n id()\n\n }\n\n\n\n fn check_id(pubkey: &$crate::pubkey::Pubkey) -> bool {\n\n check_id(pubkey)\n\n }\n\n }\n\n\n", "file_path": "sdk/program/src/sysvar/mod.rs", "rank": 64, "score": 247103.72156211222 }, { "content": "pub fn init_or_update(config_file: &str, is_init: bool, check_only: bool) -> Result<bool, String> {\n\n let mut config = Config::load(config_file)?;\n\n\n\n let semver_update_type = if is_init {\n\n SemverUpdateType::Fixed\n\n } else {\n\n SemverUpdateType::Patch\n\n };\n\n\n\n let (updated_version, download_url_and_sha256, release_dir) = if let Some(explicit_release) =\n\n &config.explicit_release\n\n {\n\n match explicit_release {\n\n ExplicitRelease::Semver(current_release_semver) => {\n\n let progress_bar = new_spinner_progress_bar();\n\n progress_bar.set_message(format!(\"{LOOKING_GLASS}Checking for updates...\"));\n\n\n\n let github_release = check_for_newer_github_release(\n\n current_release_semver,\n\n semver_update_type,\n", "file_path": "install/src/command.rs", "rank": 65, "score": 246804.35189658782 }, { "content": "#[must_use]\n\npub fn check_feature_activation(feature: &Pubkey, shred_slot: Slot, root_bank: &Bank) -> bool {\n\n match root_bank.feature_set.activated_slot(feature) {\n\n None => false,\n\n Some(feature_slot) => {\n\n let epoch_schedule = root_bank.epoch_schedule();\n\n let feature_epoch = epoch_schedule.get_epoch(feature_slot);\n\n let shred_epoch = epoch_schedule.get_epoch(shred_slot);\n\n feature_epoch < shred_epoch\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_cluster_nodes_retransmit() {\n\n let mut rng = rand::thread_rng();\n\n let (nodes, stakes, cluster_info) = make_test_cluster(&mut rng, 1_000, None);\n", "file_path": "turbine/src/cluster_nodes.rs", "rank": 66, "score": 243408.47787337186 }, { "content": "/// Format data as hex.\n\n///\n\n/// If `data`'s length is greater than 0, add a field called \"data\" to `f`. The\n\n/// first 64 bytes of `data` is displayed; bytes after that are ignored.\n\npub fn debug_account_data(data: &[u8], f: &mut fmt::DebugStruct<'_, '_>) {\n\n let data_len = cmp::min(MAX_DEBUG_ACCOUNT_DATA, data.len());\n\n if data_len > 0 {\n\n f.field(\"data\", &Hex(&data[..data_len]));\n\n }\n\n}\n\n\n\npub(crate) struct Hex<'a>(pub(crate) &'a [u8]);\n\nimpl fmt::Debug for Hex<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for &byte in self.0 {\n\n write!(f, \"{byte:02x}\")?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "sdk/program/src/debug_account_data.rs", "rank": 67, "score": 243172.65331519395 }, { "content": "pub fn check_loader_id(id: &Pubkey) -> bool {\n\n bpf_loader::check_id(id)\n\n || bpf_loader_deprecated::check_id(id)\n\n || bpf_loader_upgradeable::check_id(id)\n\n}\n\n\n", "file_path": "programs/bpf_loader/src/lib.rs", "rank": 68, "score": 242953.84203792954 }, { "content": "pub fn is_builtin_key_or_sysvar(key: &Pubkey) -> bool {\n\n if MAYBE_BUILTIN_KEY_OR_SYSVAR[key.0[0] as usize] {\n\n return sysvar::is_sysvar_id(key) || BUILTIN_PROGRAMS_KEYS.contains(key);\n\n }\n\n false\n\n}\n\n\n", "file_path": "sdk/program/src/message/legacy.rs", "rank": 69, "score": 242953.84203792954 }, { "content": "#[cfg(feature = \"full\")]\n\npub fn new_rand<R: ?Sized>(rng: &mut R) -> Hash\n\nwhere\n\n R: rand::Rng,\n\n{\n\n let mut buf = [0u8; HASH_BYTES];\n\n rng.fill(&mut buf);\n\n Hash::new(&buf)\n\n}\n", "file_path": "sdk/src/hash.rs", "rank": 70, "score": 242242.09819221086 }, { "content": "pub fn should_take_full_snapshot(\n\n block_height: Slot,\n\n full_snapshot_archive_interval_slots: Slot,\n\n) -> bool {\n\n block_height % full_snapshot_archive_interval_slots == 0\n\n}\n\n\n", "file_path": "runtime/src/snapshot_utils.rs", "rank": 71, "score": 242093.20910895735 }, { "content": "pub fn max_entries_per_n_shred(\n\n entry: &Entry,\n\n num_shreds: u64,\n\n shred_data_size: Option<usize>,\n\n) -> u64 {\n\n // Default 32:32 erasure batches yields 64 shreds; log2(64) = 6.\n\n let merkle_proof_size = Some(6);\n\n let data_buffer_size = ShredData::capacity(merkle_proof_size).unwrap();\n\n let shred_data_size = shred_data_size.unwrap_or(data_buffer_size) as u64;\n\n let vec_size = bincode::serialized_size(&vec![entry]).unwrap();\n\n let entry_size = bincode::serialized_size(entry).unwrap();\n\n let count_size = vec_size - entry_size;\n\n\n\n (shred_data_size * num_shreds - count_size) / entry_size\n\n}\n\n\n", "file_path": "ledger/src/shred.rs", "rank": 72, "score": 242092.05117469686 }, { "content": "pub fn deserialize_parameters(\n\n transaction_context: &TransactionContext,\n\n instruction_context: &InstructionContext,\n\n copy_account_data: bool,\n\n buffer: &[u8],\n\n accounts_metadata: &[SerializedAccountMetadata],\n\n) -> Result<(), InstructionError> {\n\n let is_loader_deprecated = *instruction_context\n\n .try_borrow_last_program_account(transaction_context)?\n\n .get_owner()\n\n == bpf_loader_deprecated::id();\n\n let account_lengths = accounts_metadata.iter().map(|a| a.original_data_len);\n\n if is_loader_deprecated {\n\n deserialize_parameters_unaligned(\n\n transaction_context,\n\n instruction_context,\n\n copy_account_data,\n\n buffer,\n\n account_lengths,\n\n )\n", "file_path": "programs/bpf_loader/src/serialization.rs", "rank": 73, "score": 242086.06932135674 }, { "content": "pub fn check_account_for_fee_with_commitment(\n\n rpc_client: &RpcClient,\n\n account_pubkey: &Pubkey,\n\n message: &Message,\n\n commitment: CommitmentConfig,\n\n) -> Result<(), CliError> {\n\n check_account_for_multiple_fees_with_commitment(\n\n rpc_client,\n\n account_pubkey,\n\n &[message],\n\n commitment,\n\n )\n\n}\n\n\n", "file_path": "cli/src/checks.rs", "rank": 74, "score": 242075.0557468478 }, { "content": "pub fn check_account_for_balance_with_commitment(\n\n rpc_client: &RpcClient,\n\n account_pubkey: &Pubkey,\n\n balance: u64,\n\n commitment: CommitmentConfig,\n\n) -> ClientResult<bool> {\n\n let lamports = rpc_client\n\n .get_balance_with_commitment(account_pubkey, commitment)?\n\n .value;\n\n if lamports != 0 && lamports >= balance {\n\n return Ok(true);\n\n }\n\n Ok(false)\n\n}\n\n\n", "file_path": "cli/src/checks.rs", "rank": 75, "score": 242075.0557468478 }, { "content": "/// Process an ordered list of entries in parallel\n\n/// 1. In order lock accounts for each entry while the lock succeeds, up to a Tick entry\n\n/// 2. Process the locked group in parallel\n\n/// 3. Register the `Tick` if it's available\n\n/// 4. Update the leader scheduler, goto 1\n\n///\n\n/// This method is for use testing against a single Bank, and assumes `Bank::transaction_count()`\n\n/// represents the number of transactions executed in this Bank\n\npub fn process_entries_for_tests(\n\n bank: &Arc<Bank>,\n\n entries: Vec<Entry>,\n\n randomize: bool,\n\n transaction_status_sender: Option<&TransactionStatusSender>,\n\n replay_vote_sender: Option<&ReplayVoteSender>,\n\n) -> Result<()> {\n\n let verify_transaction = {\n\n let bank = bank.clone();\n\n move |versioned_tx: VersionedTransaction| -> Result<SanitizedTransaction> {\n\n bank.verify_transaction(versioned_tx, TransactionVerificationMode::FullVerification)\n\n }\n\n };\n\n\n\n let mut entry_starting_index: usize = bank.transaction_count().try_into().unwrap();\n\n let mut batch_timing = BatchExecutionTiming::default();\n\n let mut replay_entries: Vec<_> =\n\n entry::verify_transactions(entries, Arc::new(verify_transaction))?\n\n .into_iter()\n\n .map(|entry| {\n", "file_path": "ledger/src/blockstore_processor.rs", "rank": 76, "score": 242057.15663953844 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_stake_set_lockup(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account_pubkey: &Pubkey,\n\n lockup: &LockupArgs,\n\n new_custodian_signer: Option<SignerIndex>,\n\n custodian: SignerIndex,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let recent_blockhash = blockhash_query.get_blockhash(rpc_client, config.commitment)?;\n\n let custodian = config.signers[custodian];\n\n\n\n let ixs = vec![if new_custodian_signer.is_some() {\n", "file_path": "cli/src/stake.rs", "rank": 77, "score": 242038.95522057047 }, { "content": "pub fn builtin_process_instruction(\n\n process_instruction: solana_sdk::entrypoint::ProcessInstruction,\n\n invoke_context: &mut InvokeContext,\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n set_invoke_context(invoke_context);\n\n\n\n let transaction_context = &invoke_context.transaction_context;\n\n let instruction_context = transaction_context.get_current_instruction_context()?;\n\n let instruction_data = instruction_context.get_instruction_data();\n\n let instruction_account_indices = 0..instruction_context.get_number_of_instruction_accounts();\n\n\n\n let log_collector = invoke_context.get_log_collector();\n\n let program_id = instruction_context.get_last_program_key(transaction_context)?;\n\n stable_log::program_invoke(\n\n &log_collector,\n\n program_id,\n\n invoke_context.get_stack_height(),\n\n );\n\n\n\n // Copy indices_in_instruction into a HashSet to ensure there are no duplicates\n", "file_path": "program-test/src/lib.rs", "rank": 78, "score": 242038.95522057047 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_create_stake_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account: SignerIndex,\n\n seed: &Option<String>,\n\n staker: &Option<Pubkey>,\n\n withdrawer: &Option<Pubkey>,\n\n withdrawer_signer: Option<SignerIndex>,\n\n lockup: &Lockup,\n\n amount: SpendAmount,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<&Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n from: SignerIndex,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n", "file_path": "cli/src/stake.rs", "rank": 79, "score": 242038.95522057047 }, { "content": "pub fn process_withdraw_from_nonce_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n nonce_account: &Pubkey,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n destination_account_pubkey: &Pubkey,\n\n lamports: u64,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let latest_blockhash = rpc_client.get_latest_blockhash()?;\n\n\n\n let nonce_authority = config.signers[nonce_authority];\n\n let ixs = vec![withdraw_nonce_account(\n\n nonce_account,\n\n &nonce_authority.pubkey(),\n\n destination_account_pubkey,\n\n lamports,\n\n )]\n\n .with_memo(memo)\n", "file_path": "cli/src/nonce.rs", "rank": 80, "score": 242038.95522057047 }, { "content": "pub fn process_largest_accounts(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n filter: Option<RpcLargestAccountsFilter>,\n\n) -> ProcessResult {\n\n let accounts = rpc_client\n\n .get_largest_accounts_with_config(RpcLargestAccountsConfig {\n\n commitment: Some(config.commitment),\n\n filter,\n\n })?\n\n .value;\n\n let largest_accounts = CliAccountBalances { accounts };\n\n Ok(config.output_format.formatted_string(&largest_accounts))\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 81, "score": 242038.95522057047 }, { "content": "pub fn process_get_block(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n slot: Option<Slot>,\n\n) -> ProcessResult {\n\n let slot = if let Some(slot) = slot {\n\n slot\n\n } else {\n\n rpc_client.get_slot_with_commitment(CommitmentConfig::finalized())?\n\n };\n\n\n\n let encoded_confirmed_block = rpc_client\n\n .get_block_with_config(\n\n slot,\n\n RpcBlockConfig {\n\n encoding: Some(UiTransactionEncoding::Base64),\n\n commitment: Some(CommitmentConfig::confirmed()),\n\n max_supported_transaction_version: Some(0),\n\n ..RpcBlockConfig::default()\n\n },\n\n )?\n\n .into();\n\n let cli_block = CliBlock {\n\n encoded_confirmed_block,\n\n slot,\n\n };\n\n Ok(config.output_format.formatted_string(&cli_block))\n\n}\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 82, "score": 242038.95522057047 }, { "content": "pub fn process_transaction_history(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n address: &Pubkey,\n\n before: Option<Signature>,\n\n until: Option<Signature>,\n\n limit: usize,\n\n show_transactions: bool,\n\n) -> ProcessResult {\n\n let results = rpc_client.get_signatures_for_address_with_config(\n\n address,\n\n GetConfirmedSignaturesForAddress2Config {\n\n before,\n\n until,\n\n limit: Some(limit),\n\n commitment: Some(CommitmentConfig::confirmed()),\n\n },\n\n )?;\n\n\n\n if !show_transactions {\n", "file_path": "cli/src/cluster_query.rs", "rank": 83, "score": 242038.95522057047 }, { "content": "pub fn test_process_blockstore(\n\n genesis_config: &GenesisConfig,\n\n blockstore: &Blockstore,\n\n opts: &ProcessOptions,\n\n exit: Arc<AtomicBool>,\n\n) -> (Arc<RwLock<BankForks>>, LeaderScheduleCache) {\n\n // Spin up a thread to be a fake Accounts Background Service. Need to intercept and handle all\n\n // EpochAccountsHash requests so future rooted banks do not hang in Bank::freeze() waiting for\n\n // an in-flight EAH calculation to complete.\n\n let (snapshot_request_sender, snapshot_request_receiver) = crossbeam_channel::unbounded();\n\n let abs_request_sender = AbsRequestSender::new(snapshot_request_sender);\n\n let bg_exit = Arc::new(AtomicBool::new(false));\n\n let bg_thread = {\n\n let exit = Arc::clone(&bg_exit);\n\n std::thread::spawn(move || {\n\n while !exit.load(Relaxed) {\n\n snapshot_request_receiver\n\n .try_iter()\n\n .filter(|snapshot_request| {\n\n snapshot_request.request_type == SnapshotRequestType::EpochAccountsHash\n", "file_path": "ledger/src/blockstore_processor.rs", "rank": 84, "score": 242038.95522057047 }, { "content": "pub fn process_create_nonce_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n nonce_account: SignerIndex,\n\n seed: Option<String>,\n\n nonce_authority: Option<Pubkey>,\n\n memo: Option<&String>,\n\n amount: SpendAmount,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let nonce_account_pubkey = config.signers[nonce_account].pubkey();\n\n let nonce_account_address = if let Some(ref seed) = seed {\n\n Pubkey::create_with_seed(&nonce_account_pubkey, seed, &system_program::id())?\n\n } else {\n\n nonce_account_pubkey\n\n };\n\n\n\n check_unique_pubkeys(\n\n (&config.signers[0].pubkey(), \"cli keypair\".to_string()),\n\n (&nonce_account_address, \"nonce_account\".to_string()),\n", "file_path": "cli/src/nonce.rs", "rank": 85, "score": 242038.95522057047 }, { "content": "pub fn process_calculate_rent(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n data_length: usize,\n\n use_lamports_unit: bool,\n\n) -> ProcessResult {\n\n let rent_account = rpc_client.get_account(&sysvar::rent::id())?;\n\n let rent: Rent = rent_account.deserialize_data()?;\n\n let rent_exempt_minimum_lamports = rent.minimum_balance(data_length);\n\n let cli_rent_calculation = CliRentCalculation {\n\n lamports_per_byte_year: 0,\n\n lamports_per_epoch: 0,\n\n rent_exempt_minimum_lamports,\n\n use_lamports_unit,\n\n };\n\n\n\n Ok(config.output_format.formatted_string(&cli_rent_calculation))\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "cli/src/cluster_query.rs", "rank": 86, "score": 242038.95522057047 }, { "content": "pub fn process_show_validators(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n use_lamports_unit: bool,\n\n validators_sort_order: CliValidatorsSortOrder,\n\n validators_reverse_sort: bool,\n\n number_validators: bool,\n\n keep_unstaked_delinquents: bool,\n\n delinquent_slot_distance: Option<Slot>,\n\n) -> ProcessResult {\n\n let progress_bar = new_spinner_progress_bar();\n\n progress_bar.set_message(\"Fetching vote accounts...\");\n\n let epoch_info = rpc_client.get_epoch_info()?;\n\n let vote_accounts = rpc_client.get_vote_accounts_with_config(RpcGetVoteAccountsConfig {\n\n keep_unstaked_delinquents: Some(keep_unstaked_delinquents),\n\n delinquent_slot_distance,\n\n ..RpcGetVoteAccountsConfig::default()\n\n })?;\n\n\n\n progress_bar.set_message(\"Fetching block production...\");\n", "file_path": "cli/src/cluster_query.rs", "rank": 87, "score": 242038.95522057047 }, { "content": "pub fn process_show_nonce_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n nonce_account_pubkey: &Pubkey,\n\n use_lamports_unit: bool,\n\n) -> ProcessResult {\n\n let nonce_account =\n\n get_account_with_commitment(rpc_client, nonce_account_pubkey, config.commitment)?;\n\n let print_account = |data: Option<&nonce::state::Data>| {\n\n let mut nonce_account = CliNonceAccount {\n\n balance: nonce_account.lamports,\n\n minimum_balance_for_rent_exemption: rpc_client\n\n .get_minimum_balance_for_rent_exemption(State::size())?,\n\n use_lamports_unit,\n\n ..CliNonceAccount::default()\n\n };\n\n if let Some(data) = data {\n\n nonce_account.nonce = Some(data.blockhash().to_string());\n\n nonce_account.lamports_per_signature = Some(data.fee_calculator.lamports_per_signature);\n\n nonce_account.authority = Some(data.authority.to_string());\n\n }\n\n\n\n Ok(config.output_format.formatted_string(&nonce_account))\n\n };\n\n match state_from_account(&nonce_account)? {\n\n State::Uninitialized => print_account(None),\n\n State::Initialized(ref data) => print_account(Some(data)),\n\n }\n\n}\n\n\n", "file_path": "cli/src/nonce.rs", "rank": 88, "score": 242038.95522057047 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_create_vote_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n vote_account: SignerIndex,\n\n seed: &Option<String>,\n\n identity_account: SignerIndex,\n\n authorized_voter: &Option<Pubkey>,\n\n authorized_withdrawer: Pubkey,\n\n commission: u8,\n\n sign_only: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<&Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n fee_payer: SignerIndex,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let vote_account = config.signers[vote_account];\n\n let vote_account_pubkey = vote_account.pubkey();\n", "file_path": "cli/src/vote.rs", "rank": 89, "score": 242038.95522057047 }, { "content": "pub fn process_show_stakes(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n use_lamports_unit: bool,\n\n vote_account_pubkeys: Option<&[Pubkey]>,\n\n withdraw_authority_pubkey: Option<&Pubkey>,\n\n) -> ProcessResult {\n\n use crate::stake::build_stake_state;\n\n\n\n let progress_bar = new_spinner_progress_bar();\n\n progress_bar.set_message(\"Fetching stake accounts...\");\n\n\n\n let mut program_accounts_config = RpcProgramAccountsConfig {\n\n account_config: RpcAccountInfoConfig {\n\n encoding: Some(solana_account_decoder::UiAccountEncoding::Base64),\n\n ..RpcAccountInfoConfig::default()\n\n },\n\n ..RpcProgramAccountsConfig::default()\n\n };\n\n\n", "file_path": "cli/src/cluster_query.rs", "rank": 90, "score": 242038.95522057047 }, { "content": "pub fn process_instruction(\n\n invoke_context: &mut InvokeContext,\n\n _arg0: u64,\n\n _arg1: u64,\n\n _arg2: u64,\n\n _arg3: u64,\n\n _arg4: u64,\n\n _memory_mapping: &mut MemoryMapping,\n\n result: &mut ProgramResult,\n\n) {\n\n *result = process_instruction_inner(invoke_context).into();\n\n}\n\n\n", "file_path": "programs/bpf_loader/src/lib.rs", "rank": 91, "score": 242038.95522057047 }, { "content": "pub fn process_show_stake_history(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n use_lamports_unit: bool,\n\n limit_results: usize,\n\n) -> ProcessResult {\n\n let stake_history_account = rpc_client.get_account(&stake_history::id())?;\n\n let stake_history =\n\n from_account::<StakeHistory, _>(&stake_history_account).ok_or_else(|| {\n\n CliError::RpcRequestError(\"Failed to deserialize stake history\".to_string())\n\n })?;\n\n\n\n let limit_results = match config.output_format {\n\n OutputFormat::Json | OutputFormat::JsonCompact => std::usize::MAX,\n\n _ => {\n\n if limit_results == 0 {\n\n std::usize::MAX\n\n } else {\n\n limit_results\n\n }\n", "file_path": "cli/src/stake.rs", "rank": 92, "score": 242038.95522057047 }, { "content": "pub fn process_leader_schedule(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n epoch: Option<Epoch>,\n\n) -> ProcessResult {\n\n let epoch_info = rpc_client.get_epoch_info()?;\n\n let epoch = epoch.unwrap_or(epoch_info.epoch);\n\n if epoch > (epoch_info.epoch + 1) {\n\n return Err(format!(\"Epoch {epoch} is more than one epoch in the future\").into());\n\n }\n\n\n\n let epoch_schedule = rpc_client.get_epoch_schedule()?;\n\n let first_slot_in_epoch = epoch_schedule.get_first_slot_in_epoch(epoch);\n\n\n\n let leader_schedule = rpc_client.get_leader_schedule(Some(first_slot_in_epoch))?;\n\n if leader_schedule.is_none() {\n\n return Err(\n\n format!(\"Unable to fetch leader schedule for slot {first_slot_in_epoch}\").into(),\n\n );\n\n }\n", "file_path": "cli/src/cluster_query.rs", "rank": 93, "score": 242038.95522057047 }, { "content": "pub fn process_stake_minimum_delegation(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n use_lamports_unit: bool,\n\n) -> ProcessResult {\n\n let stake_minimum_delegation =\n\n rpc_client.get_stake_minimum_delegation_with_commitment(config.commitment)?;\n\n\n\n let stake_minimum_delegation_output = CliBalance {\n\n lamports: stake_minimum_delegation,\n\n config: BuildBalanceMessageConfig {\n\n use_lamports_unit,\n\n show_unit: true,\n\n trim_trailing_zeros: true,\n\n },\n\n };\n\n\n\n Ok(config\n\n .output_format\n\n .formatted_string(&stake_minimum_delegation_output))\n", "file_path": "cli/src/stake.rs", "rank": 94, "score": 242038.95522057047 }, { "content": "pub fn process_authorize_nonce_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n nonce_account: &Pubkey,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n new_authority: &Pubkey,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let latest_blockhash = rpc_client.get_latest_blockhash()?;\n\n\n\n let nonce_authority = config.signers[nonce_authority];\n\n let ixs = vec![authorize_nonce_account(\n\n nonce_account,\n\n &nonce_authority.pubkey(),\n\n new_authority,\n\n )]\n\n .with_memo(memo)\n\n .with_compute_unit_price(compute_unit_price);\n\n let message = Message::new(&ixs, Some(&config.signers[0].pubkey()));\n", "file_path": "cli/src/nonce.rs", "rank": 95, "score": 242038.95522057047 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn process_deactivate_stake_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account_pubkey: &Pubkey,\n\n stake_authority: SignerIndex,\n\n sign_only: bool,\n\n deactivate_delinquent: bool,\n\n dump_transaction_message: bool,\n\n blockhash_query: &BlockhashQuery,\n\n nonce_account: Option<Pubkey>,\n\n nonce_authority: SignerIndex,\n\n memo: Option<&String>,\n\n seed: Option<&String>,\n\n fee_payer: SignerIndex,\n\n compute_unit_price: Option<&u64>,\n\n) -> ProcessResult {\n\n let recent_blockhash = blockhash_query.get_blockhash(rpc_client, config.commitment)?;\n\n\n\n let stake_account_address = if let Some(seed) = seed {\n\n Pubkey::create_with_seed(stake_account_pubkey, seed, &stake::program::id())?\n", "file_path": "cli/src/stake.rs", "rank": 96, "score": 242038.95522057047 }, { "content": "pub fn process_show_stake_account(\n\n rpc_client: &RpcClient,\n\n config: &CliConfig,\n\n stake_account_address: &Pubkey,\n\n use_lamports_unit: bool,\n\n with_rewards: Option<usize>,\n\n) -> ProcessResult {\n\n let stake_account = rpc_client.get_account(stake_account_address)?;\n\n if stake_account.owner != stake::program::id() {\n\n return Err(CliError::RpcRequestError(format!(\n\n \"{stake_account_address:?} is not a stake account\",\n\n ))\n\n .into());\n\n }\n\n match stake_account.state() {\n\n Ok(stake_state) => {\n\n let stake_history_account = rpc_client.get_account(&stake_history::id())?;\n\n let stake_history = from_account(&stake_history_account).ok_or_else(|| {\n\n CliError::RpcRequestError(\"Failed to deserialize stake history\".to_string())\n\n })?;\n", "file_path": "cli/src/stake.rs", "rank": 97, "score": 242038.95522057047 }, { "content": " pub commitment: Option<CommitmentConfig>,\n\n}\n\n\n\n#[deprecated(since = \"1.7.0\", note = \"Please use RpcBlockConfig instead\")]\n\n#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct RpcConfirmedBlockConfig {\n\n pub encoding: Option<UiTransactionEncoding>,\n\n pub transaction_details: Option<TransactionDetails>,\n\n pub rewards: Option<bool>,\n\n #[serde(flatten)]\n\n pub commitment: Option<CommitmentConfig>,\n\n}\n\n\n\nimpl EncodingConfig for RpcConfirmedBlockConfig {\n\n fn new_with_encoding(encoding: &Option<UiTransactionEncoding>) -> Self {\n\n Self {\n\n encoding: *encoding,\n\n ..Self::default()\n\n }\n", "file_path": "rpc-client-api/src/deprecated_config.rs", "rank": 99, "score": 86.65535174461493 } ]
Rust
src/process.rs
zyc801208/rexpect
13d91fff7196ed0bdccf5a03f369cd04153ea0c9
use crate::errors::*; use nix; use nix::fcntl::{open, OFlag}; use nix::libc::{STDERR_FILENO, STDIN_FILENO, STDOUT_FILENO}; use nix::pty::{grantpt, posix_openpt, unlockpt, PtyMaster}; pub use nix::sys::{signal, wait}; use nix::sys::{stat, termios}; use nix::unistd::{dup, dup2, fork, setsid, ForkResult, Pid}; use std; use std::fs::File; use std::os::unix::io::{AsRawFd, FromRawFd}; use std::os::unix::process::CommandExt; use std::process::Command; use std::{thread, time}; pub struct PtyProcess { pub pty: PtyMaster, pub child_pid: Pid, kill_timeout: Option<time::Duration>, } #[cfg(target_os = "linux")] use nix::pty::ptsname_r; #[cfg(target_os = "macos")] fn ptsname_r(fd: &PtyMaster) -> nix::Result<String> { use nix::libc::{ioctl, TIOCPTYGNAME}; use std::ffi::CStr; let mut buf: [i8; 128] = [0; 128]; unsafe { match ioctl(fd.as_raw_fd(), TIOCPTYGNAME as u64, &mut buf) { 0 => { let res = CStr::from_ptr(buf.as_ptr()).to_string_lossy().into_owned(); Ok(res) } _ => Err(nix::Error::last()), } } } impl PtyProcess { pub fn new(mut command: Command) -> Result<Self> { || -> nix::Result<Self> { let master_fd = posix_openpt(OFlag::O_RDWR)?; grantpt(&master_fd)?; unlockpt(&master_fd)?; let slave_name = ptsname_r(&master_fd)?; match fork()? { ForkResult::Child => { setsid()?; let slave_fd = open( std::path::Path::new(&slave_name), OFlag::O_RDWR, stat::Mode::empty(), )?; dup2(slave_fd, STDIN_FILENO)?; dup2(slave_fd, STDOUT_FILENO)?; dup2(slave_fd, STDERR_FILENO)?; let mut flags = termios::tcgetattr(STDIN_FILENO)?; flags.local_flags &= !termios::LocalFlags::ECHO; termios::tcsetattr(STDIN_FILENO, termios::SetArg::TCSANOW, &flags)?; command.exec(); Err(nix::Error::last()) } ForkResult::Parent { child: child_pid } => Ok(PtyProcess { pty: master_fd, child_pid: child_pid, kill_timeout: None, }), } }() .chain_err(|| format!("could not execute {:?}", command)) } pub fn get_file_handle(&self) -> File { let fd = dup(self.pty.as_raw_fd()).unwrap(); unsafe { File::from_raw_fd(fd) } } pub fn set_kill_timeout(&mut self, timeout_ms: Option<u64>) { self.kill_timeout = timeout_ms.and_then(|millis| Some(time::Duration::from_millis(millis))); } pub fn status(&self) -> Option<wait::WaitStatus> { if let Ok(status) = wait::waitpid(self.child_pid, Some(wait::WaitPidFlag::WNOHANG)) { Some(status) } else { None } } pub fn wait(&self) -> Result<wait::WaitStatus> { wait::waitpid(self.child_pid, None).chain_err(|| "wait: cannot read status") } pub fn exit(&mut self) -> Result<wait::WaitStatus> { self.kill(signal::SIGTERM) } pub fn signal(&mut self, sig: signal::Signal) -> Result<()> { signal::kill(self.child_pid, sig).chain_err(|| "failed to send signal to process")?; Ok(()) } pub fn kill(&mut self, sig: signal::Signal) -> Result<wait::WaitStatus> { let start = time::Instant::now(); loop { match signal::kill(self.child_pid, sig) { Ok(_) => {} Err(nix::Error::Sys(nix::errno::Errno::ESRCH)) => { return Ok(wait::WaitStatus::Exited(Pid::from_raw(0), 0)) } Err(e) => return Err(format!("kill resulted in error: {:?}", e).into()), } match self.status() { Some(status) if status != wait::WaitStatus::StillAlive => return Ok(status), Some(_) | None => thread::sleep(time::Duration::from_millis(100)), } if let Some(timeout) = self.kill_timeout { if start.elapsed() > timeout { signal::kill(self.child_pid, signal::Signal::SIGKILL).chain_err(|| "")? } } } } } impl Drop for PtyProcess { fn drop(&mut self) { match self.status() { Some(wait::WaitStatus::StillAlive) => { self.exit().expect("cannot exit"); } _ => {} } } } #[cfg(test)] mod tests { use super::*; use nix::sys::{signal, wait}; use std::io::prelude::*; use std::io::{BufReader, LineWriter}; #[test] fn test_cat() { || -> std::io::Result<()> { let process = PtyProcess::new(Command::new("cat")).expect("could not execute cat"); let f = process.get_file_handle(); let mut writer = LineWriter::new(&f); let mut reader = BufReader::new(&f); writer.write(b"hello cat\n")?; let mut buf = String::new(); reader.read_line(&mut buf)?; assert_eq!(buf, "hello cat\r\n"); thread::sleep(time::Duration::from_millis(100)); writer.write_all(&[3])?; writer.flush()?; let should = wait::WaitStatus::Signaled(process.child_pid, signal::Signal::SIGINT, false); assert_eq!(should, wait::waitpid(process.child_pid, None).unwrap()); Ok(()) }() .unwrap_or_else(|e| panic!("test_cat failed: {}", e)); } }
use crate::errors::*; use nix; use nix::fcntl::{open, OFlag}; use nix::libc::{STDERR_FILENO, STDIN_FILENO, STDOUT_FILENO}; use nix::pty::{grantpt, posix_openpt, unlockpt, PtyMaster}; pub use nix::sys::{signal, wait}; use nix::sys::{stat, termios}; use nix::unistd::{dup, dup2, fork, setsid, ForkResult, Pid}; use std; use std::fs::File; use std::os::unix::io::{AsRawFd, FromRawFd}; use std::os::unix::process::CommandExt; use std::process::Command; use std::{thread, time}; pub struct PtyProcess { pub pty: PtyMaster, pub child_pid: Pid, kill_timeout: Option<time::Duration>, } #[cfg(target_os = "linux")] use nix::pty::ptsname_r; #[cfg(target_os = "macos")] fn ptsname_r(fd: &PtyMaster) -> nix::Result<String> { use nix::libc::{ioctl, TIOCPTYGNAME}; use std::ffi::CStr; let mut buf: [i8; 128] = [0; 128]; unsafe { match ioctl(fd.as_raw_fd(), TIOCPTYGNAME as u64, &mut buf) { 0 => { let res = CStr::from_ptr(buf.as_ptr()).to_string_lossy().into_owned(); Ok(res) } _ => Err(nix::Error::last()), } } } impl PtyProcess { pub fn new(mut command: Command) -> Result<Self> { || -> nix::Result<Self> { let master_fd = posix_openpt(OFlag::O_RDWR)?; grantpt(&master_fd)?; unlockpt(&master_fd)?; let slave_name = ptsname_r(&master_fd)?; match fork()? { ForkResult::Child => { setsid()?; let slave_fd = open( std::path::Path::new(&slave_name), OFlag::O_RDWR, stat::Mode::empty(), )?; dup2(slave_fd, STDIN_FILENO)?; dup2(slave_fd, STDOUT_FILENO)?; dup2(slave_fd, STDERR_FILENO)?; let mut flags = termios::tcgetattr(STDIN_FILENO)?; flags.local_flags &= !termios::LocalFlags::ECHO; termios::tcsetattr(STDIN_FILENO, termios::SetArg::TCSANOW, &flags)?; command.exec(); Err(nix::Error::last()) } ForkResult::Parent { child: child_pid } => Ok(PtyProcess { pty: master_fd, child_pid: child_pid, kill_timeout: None, }), } }() .chain_err(|| format!("could not execute {:?}", command)) } pub fn get_file_handle(&self) -> File { let fd = dup(self.pty.as_raw_fd()).unwrap(); unsafe { File::from_raw_fd(fd) } } pub fn set_kill_timeout(&mut self, timeout_ms: Option<u64>) { self.kill_timeout = timeout_ms.and_then(|millis| Some(time::Duration::from_millis(millis))); } pub fn status(&self) -> Option<wait::WaitStatus> { if let Ok(status) = wait::waitpid(self.child_pid, Some(wait::WaitPidFlag::WNOHANG)) { Some(status) } else { None } } pub fn wait(&self) -> Result<wait::WaitStatus> { wait::waitpid(self.child_pid, None).chain_err(|| "wait: cannot read status") } pub fn exit(&mut self) -> Result<wait::WaitStatus> { self.kill(signal::SIGTERM) } pub fn signal(&mut self, sig: signal::Signal) -> Result<()> { signal::kill(self.child_pid, sig).chain_err(|| "failed to send signal to process")?; Ok(()) }
} impl Drop for PtyProcess { fn drop(&mut self) { match self.status() { Some(wait::WaitStatus::StillAlive) => { self.exit().expect("cannot exit"); } _ => {} } } } #[cfg(test)] mod tests { use super::*; use nix::sys::{signal, wait}; use std::io::prelude::*; use std::io::{BufReader, LineWriter}; #[test] fn test_cat() { || -> std::io::Result<()> { let process = PtyProcess::new(Command::new("cat")).expect("could not execute cat"); let f = process.get_file_handle(); let mut writer = LineWriter::new(&f); let mut reader = BufReader::new(&f); writer.write(b"hello cat\n")?; let mut buf = String::new(); reader.read_line(&mut buf)?; assert_eq!(buf, "hello cat\r\n"); thread::sleep(time::Duration::from_millis(100)); writer.write_all(&[3])?; writer.flush()?; let should = wait::WaitStatus::Signaled(process.child_pid, signal::Signal::SIGINT, false); assert_eq!(should, wait::waitpid(process.child_pid, None).unwrap()); Ok(()) }() .unwrap_or_else(|e| panic!("test_cat failed: {}", e)); } }
pub fn kill(&mut self, sig: signal::Signal) -> Result<wait::WaitStatus> { let start = time::Instant::now(); loop { match signal::kill(self.child_pid, sig) { Ok(_) => {} Err(nix::Error::Sys(nix::errno::Errno::ESRCH)) => { return Ok(wait::WaitStatus::Exited(Pid::from_raw(0), 0)) } Err(e) => return Err(format!("kill resulted in error: {:?}", e).into()), } match self.status() { Some(status) if status != wait::WaitStatus::StillAlive => return Ok(status), Some(_) | None => thread::sleep(time::Duration::from_millis(100)), } if let Some(timeout) = self.kill_timeout { if start.elapsed() > timeout { signal::kill(self.child_pid, signal::Signal::SIGKILL).chain_err(|| "")? } } } }
function_block-full_function
[ { "content": "/// See `spawn`\n\npub fn spawn_command(command: Command, timeout_ms: Option<u64>) -> Result<PtySession> {\n\n let commandname = format!(\"{:?}\", &command);\n\n let mut process = PtyProcess::new(command).chain_err(|| \"couldn't start process\")?;\n\n process.set_kill_timeout(timeout_ms);\n\n\n\n PtySession::new(process, timeout_ms, commandname)\n\n}\n\n\n\n/// A repl session: e.g. bash or the python shell:\n\n/// You have a prompt where a user inputs commands and the shell\n\n/// executes it and writes some output\n\npub struct PtyReplSession {\n\n /// the prompt, used for `wait_for_prompt`, e.g. \">>> \" for python\n\n pub prompt: String,\n\n\n\n /// the pty_session you prepared before (initiating the shell, maybe set a custom prompt, etc.)\n\n /// see `spawn_bash` for an example\n\n pub pty_session: PtySession,\n\n\n\n /// if set, then the quit_command is called when this object is dropped\n", "file_path": "src/session.rs", "rank": 0, "score": 152662.9058476135 }, { "content": "/// Spawn bash in a pty session, run programs and expect output\n\n///\n\n///\n\n/// The difference to `spawn` and `spawn_command` is:\n\n///\n\n/// - spawn_bash starts bash with a custom rcfile which guarantees\n\n/// a certain prompt\n\n/// - the PtyBashSession also provides `wait_for_prompt` and `execute`\n\n///\n\n/// timeout: the duration until which `exp_*` returns a timeout error, or None\n\n/// additionally, when dropping the bash prompt while bash is still blocked by a program\n\n/// (e.g. `sleep 9999`) then the timeout is used as a timeout before a `kill -9` is issued\n\n/// at the bash command. Use a timeout whenever possible because it makes\n\n/// debugging a lot easier (otherwise the program just hangs and you\n\n/// don't know where)\n\n///\n\n/// bash is started with echo off. That means you don't need to \"read back\"\n\n/// what you wrote to bash. But what you need to do is a `wait_for_prompt`\n\n/// after a process finished.\n\n///\n\n/// Also: if you start a program you should use `execute` and not `send_line`.\n\n///\n\n/// For an example see the README\n\npub fn spawn_bash(timeout: Option<u64>) -> Result<PtyReplSession> {\n\n // unfortunately working with a temporary tmpfile is the only\n\n // way to guarantee that we are \"in step\" with the prompt\n\n // all other attempts were futile, especially since we cannot\n\n // wait for the first prompt since we don't know what .bashrc\n\n // would set as PS1 and we cannot know when is the right time\n\n // to set the new PS1\n\n let mut rcfile = tempfile::NamedTempFile::new().unwrap();\n\n rcfile\n\n .write(\n\n b\"include () { [[ -f \\\"$1\\\" ]] && source \\\"$1\\\"; }\\n\\\n\n include /etc/bash.bashrc\\n\\\n\n include ~/.bashrc\\n\\\n\n PS1=\\\"~~~~\\\"\\n\\\n\n unset PROMPT_COMMAND\\n\",\n\n )\n\n .expect(\"cannot write to tmpfile\");\n\n let mut c = Command::new(\"bash\");\n\n c.args(&[\n\n \"--rcfile\",\n", "file_path": "src/session.rs", "rank": 2, "score": 118576.65490599597 }, { "content": "/// Spawn the python shell\n\n///\n\n/// This is just a proof of concept implementation (and serves for documentation purposes)\n\npub fn spawn_python(timeout: Option<u64>) -> Result<PtyReplSession> {\n\n spawn_command(Command::new(\"python\"), timeout).and_then(|p| {\n\n Ok(PtyReplSession {\n\n prompt: \">>> \".to_string(),\n\n pty_session: p,\n\n quit_command: Some(\"exit()\".to_string()),\n\n echo_on: true,\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 3, "score": 118561.8213985308 }, { "content": "/// Start command in background in a pty session (pty fork) and return a struct\n\n/// with writer and buffered reader (for unblocking reads).\n\n///\n\n/// #Arguments:\n\n///\n\n/// - `program`: This is split at spaces and turned into a `process::Command`\n\n/// if you wish more control over this, use `spawn_command`\n\n/// - `timeout`: If Some: all `exp_*` commands time out after x milliseconds, if None: never times\n\n/// out.\n\n/// It's highly recommended to put a timeout there, as otherwise in case of\n\n/// a problem the program just hangs instead of exiting with an\n\n/// error message indicating where it stopped.\n\n/// For automation 30'000 (30s, the default in pexpect) is a good value.\n\npub fn spawn(program: &str, timeout_ms: Option<u64>) -> Result<PtySession> {\n\n if program.is_empty() {\n\n return Err(ErrorKind::EmptyProgramName.into());\n\n }\n\n\n\n let mut parts = tokenize_command(program);\n\n let prog = parts.remove(0);\n\n let mut command = Command::new(prog);\n\n command.args(parts);\n\n spawn_command(command, timeout_ms)\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 4, "score": 111467.76583250417 }, { "content": "/// Spawn a REPL from a stream\n\npub fn spawn_stream<R: Read + Send + 'static, W: Write>(\n\n reader: R,\n\n writer: W,\n\n timeout_ms: Option<u64>,\n\n) -> StreamSession<W> {\n\n StreamSession::new(reader, writer, timeout_ms)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_line() {\n\n || -> Result<()> {\n\n let mut s = spawn(\"cat\", Some(1000))?;\n\n s.send_line(\"hans\")?;\n\n assert_eq!(\"hans\", s.read_line()?);\n\n let should = crate::process::wait::WaitStatus::Signaled(\n\n s.process.child_pid,\n", "file_path": "src/session.rs", "rank": 5, "score": 106674.37119223525 }, { "content": "fn run() -> Result<()> {\n\n let mut p = spawn_bash(Some(2000))?;\n\n\n\n // case 1: wait until program is done\n\n p.send_line(\"hostname\")?;\n\n let hostname = p.read_line()?;\n\n p.wait_for_prompt()?; // go sure `hostname` is really done\n\n println!(\"Current hostname: {}\", hostname);\n\n\n\n // case 2: wait until done, only extract a few infos\n\n p.send_line(\"wc /etc/passwd\")?;\n\n // `exp_regex` returns both string-before-match and match itself, discard first\n\n let (_, lines) = p.exp_regex(\"[0-9]+\")?;\n\n let (_, words) = p.exp_regex(\"[0-9]+\")?;\n\n let (_, bytes) = p.exp_regex(\"[0-9]+\")?;\n\n p.wait_for_prompt()?; // go sure `wc` is really done\n\n println!(\n\n \"/etc/passwd has {} lines, {} words, {} chars\",\n\n lines, words, bytes\n\n );\n", "file_path": "examples/bash_read.rs", "rank": 6, "score": 86421.15110277136 }, { "content": "fn ed_session() -> Result<PtyReplSession> {\n\n let mut ed = PtyReplSession {\n\n // for `echo_on` you need to figure that out by trial and error.\n\n // For bash and python repl it is false\n\n echo_on: false,\n\n\n\n // used for `wait_for_prompt()`\n\n prompt: \"> \".to_string(),\n\n pty_session: spawn(\"/bin/ed -p '> '\", Some(2000))?,\n\n // command which is sent when the instance of this struct is dropped\n\n // in the below example this is not needed, but if you don't explicitly\n\n // exit a REPL then rexpect tries to send a SIGTERM and depending on the repl\n\n // this does not end the repl and would end up in an error\n\n quit_command: Some(\"Q\".to_string()),\n\n };\n\n ed.wait_for_prompt()?;\n\n Ok(ed)\n\n}\n\n\n", "file_path": "examples/repl.rs", "rank": 7, "score": 75903.17775100083 }, { "content": "fn do_ftp() -> Result<()> {\n\n let mut p = spawn(\"ftp speedtest.tele2.net\", Some(2000))?;\n\n p.exp_regex(\"Name \\\\(.*\\\\):\")?;\n\n p.send_line(\"anonymous\")?;\n\n p.exp_string(\"Password\")?;\n\n p.send_line(\"test\")?;\n\n p.exp_string(\"ftp>\")?;\n\n p.send_line(\"cd upload\")?;\n\n p.exp_string(\"successfully changed.\\r\\nftp>\")?;\n\n p.send_line(\"pwd\")?;\n\n p.exp_regex(\"[0-9]+ \\\"/upload\\\"\")?;\n\n p.send_line(\"exit\")?;\n\n p.exp_eof()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/ftp.rs", "rank": 8, "score": 67148.49914211265 }, { "content": "fn run() -> Result<()> {\n\n let mut p = spawn_bash(Some(1000))?;\n\n p.execute(\"ping 8.8.8.8\", \"bytes\")?;\n\n p.send_control('z')?;\n\n p.wait_for_prompt()?;\n\n // bash writes 'ping 8.8.8.8' to stdout again to state which job was put into background\n\n p.execute(\"bg\", \"ping 8.8.8.8\")?;\n\n p.wait_for_prompt()?;\n\n p.send_line(\"sleep 0.5\")?;\n\n p.wait_for_prompt()?;\n\n // bash writes 'ping 8.8.8.8' to stdout again to state which job was put into foreground\n\n p.execute(\"fg\", \"ping 8.8.8.8\")?;\n\n p.send_control('c')?;\n\n p.exp_string(\"packet loss\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/bash.rs", "rank": 9, "score": 67148.49914211265 }, { "content": "fn do_ed_repl() -> Result<()> {\n\n let mut ed = ed_session()?;\n\n ed.send_line(\"a\")?;\n\n ed.send_line(\"ed is the best editor evar\")?;\n\n ed.send_line(\".\")?;\n\n ed.wait_for_prompt()?;\n\n ed.send_line(\",l\")?;\n\n ed.exp_string(\"ed is the best editor evar$\")?;\n\n ed.send_line(\"Q\")?;\n\n ed.exp_eof()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/repl.rs", "rank": 10, "score": 64765.472084383655 }, { "content": "/// The following code emits:\n\n/// cat exited with code 0, all good!\n\n/// cat exited with code 1\n\n/// Output (stdout and stderr): cat: /this/does/not/exist: No such file or directory\n\nfn exit_code_fun() -> Result<()> {\n\n let p = spawn(\"cat /etc/passwd\", Some(2000))?;\n\n match p.process.wait() {\n\n Ok(wait::WaitStatus::Exited(_, 0)) => println!(\"cat exited with code 0, all good!\"),\n\n _ => println!(\"cat exited with code >0, or it was killed\"),\n\n }\n\n\n\n let mut p = spawn(\"cat /this/does/not/exist\", Some(2000))?;\n\n match p.process.wait() {\n\n Ok(wait::WaitStatus::Exited(_, 0)) => println!(\"cat succeeded\"),\n\n Ok(wait::WaitStatus::Exited(_, c)) => {\n\n println!(\"Cat failed with exit code {}\", c);\n\n println!(\"Output (stdout and stderr): {}\", p.exp_eof()?);\n\n }\n\n // for other possible return types of wait()\n\n // see here: https://tailhook.github.io/rotor/nix/sys/wait/enum.WaitStatus.html\n\n _ => println!(\"cat was probably killed\"),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/exit_code.rs", "rank": 11, "score": 60692.96352304581 }, { "content": "/// find first occurrence of needle within buffer\n\n///\n\n/// # Arguments:\n\n///\n\n/// - buffer: the currently read buffer from a process which will still grow in the future\n\n/// - eof: if the process already sent an EOF or a HUP\n\n///\n\n/// # Return\n\n///\n\n/// Tuple with match positions:\n\n/// 1. position before match (0 in case of EOF and Nbytes)\n\n/// 2. position after match\n\npub fn find(needle: &ReadUntil, buffer: &str, eof: bool) -> Option<(usize, usize)> {\n\n match needle {\n\n &ReadUntil::String(ref s) => buffer.find(s).and_then(|pos| Some((pos, pos + s.len()))),\n\n &ReadUntil::Regex(ref pattern) => {\n\n if let Some(mat) = pattern.find(buffer) {\n\n Some((mat.start(), mat.end()))\n\n } else {\n\n None\n\n }\n\n }\n\n &ReadUntil::EOF => {\n\n if eof {\n\n Some((0, buffer.len()))\n\n } else {\n\n None\n\n }\n\n }\n\n &ReadUntil::NBytes(n) => {\n\n if n <= buffer.len() {\n\n Some((0, n))\n", "file_path": "src/reader.rs", "rank": 12, "score": 59380.14655120055 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let tcp = TcpStream::connect(\"www.google.com:80\")?;\n\n let tcp_w = tcp.try_clone()?;\n\n let mut session = spawn_stream(tcp, tcp_w, Some(2000));\n\n session.send_line(\"GET / HTTP/1.1\")?;\n\n session.send_line(\"Host: www.google.com\")?;\n\n session.send_line(\"Accept-Language: fr\")?;\n\n session.send_line(\"\")?;\n\n session.exp_string(\"HTTP/1.1 200 OK\")?;\n\n Ok(())\n\n}\n", "file_path": "examples/tcp.rs", "rank": 13, "score": 56274.83641856711 }, { "content": "fn main() {\n\n run().unwrap_or_else(|e| panic!(\"bash process failed with {}\", e));\n\n}\n", "file_path": "examples/bash_read.rs", "rank": 14, "score": 53721.631033358826 }, { "content": "/// Turn e.g. \"prog arg1 arg2\" into [\"prog\", \"arg1\", \"arg2\"]\n\n/// Also takes care of single and double quotes\n\nfn tokenize_command(program: &str) -> Vec<String> {\n\n let re = Regex::new(r#\"\"[^\"]+\"|'[^']+'|[^'\" ]+\"#).unwrap();\n\n let mut res = vec![];\n\n for cap in re.captures_iter(program) {\n\n res.push(cap[0].to_string());\n\n }\n\n res\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 15, "score": 43329.90924555606 }, { "content": "fn main() {\n\n run().unwrap_or_else(|e| panic!(\"bash process failed with {}\", e));\n\n}\n", "file_path": "examples/bash.rs", "rank": 16, "score": 32160.952639074058 }, { "content": "fn main() {\n\n do_ftp().unwrap_or_else(|e| panic!(\"ftp job failed with {}\", e));\n\n}\n", "file_path": "examples/ftp.rs", "rank": 17, "score": 32160.952639074058 }, { "content": "fn main() {\n\n do_ed_repl().unwrap_or_else(|e| panic!(\"ed session failed with {}\", e));\n\n}\n", "file_path": "examples/repl.rs", "rank": 18, "score": 32160.952639074058 }, { "content": "fn main() {\n\n exit_code_fun().unwrap_or_else(|e| panic!(\"cat function failed with {}\", e));\n\n}\n", "file_path": "examples/exit_code.rs", "rank": 19, "score": 30841.94159911064 }, { "content": "\n\n // case 3: read while program is still executing\n\n p.execute(\"ping 8.8.8.8\", \"bytes of data\")?; // returns when it sees \"bytes of data\" in output\n\n for _ in 0..5 {\n\n // times out if one ping takes longer than 2s\n\n let (_, duration) = p.exp_regex(\"[0-9. ]+ ms\")?;\n\n println!(\"Roundtrip time: {}\", duration);\n\n }\n\n p.send_control('c')?;\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/bash_read.rs", "rank": 32, "score": 24262.236356814235 }, { "content": "extern crate rexpect;\n\nuse rexpect::errors::*;\n\nuse rexpect::spawn_bash;\n\n\n", "file_path": "examples/bash_read.rs", "rank": 33, "score": 24253.50797972631 }, { "content": " /// s.exp_any(vec![ReadUntil::String(\"hello\".into()),\n\n /// ReadUntil::EOF])?;\n\n /// # Ok(())\n\n /// # }().expect(\"test failed\");\n\n /// # }\n\n /// ```\n\n pub fn exp_any(&mut self, needles: Vec<ReadUntil>) -> Result<(String, String)> {\n\n self.exp(&ReadUntil::Any(needles))\n\n }\n\n}\n\n/// Interact with a process with read/write/signals, etc.\n\n#[allow(dead_code)]\n\npub struct PtySession {\n\n pub process: PtyProcess,\n\n pub stream: StreamSession<File>,\n\n pub commandname: String, // only for debugging purposes now\n\n}\n\n\n\nunsafe impl Send for PtySession {}\n\nunsafe impl Sync for PtySession {}\n", "file_path": "src/session.rs", "rank": 34, "score": 27.204899567113806 }, { "content": "//! Main module of rexpect: start new process and interact with it\n\n\n\nuse crate::errors::*; // load error-chain\n\nuse crate::process::PtyProcess;\n\npub use crate::reader::ReadUntil;\n\nuse crate::reader::{NBReader, Regex};\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::io::LineWriter;\n\nuse std::ops::{Deref, DerefMut};\n\nuse std::process::Command;\n\nuse tempfile;\n\n\n\npub struct StreamSession<W: Write> {\n\n pub writer: LineWriter<W>,\n\n pub reader: NBReader,\n\n}\n\n\n\nimpl<W: Write> StreamSession<W> {\n\n pub fn new<R: Read + Send + 'static>(reader: R, writer: W, timeout_ms: Option<u64>) -> Self {\n", "file_path": "src/session.rs", "rank": 35, "score": 23.767986624783507 }, { "content": "///\n\n/// use rexpect::spawn;\n\n/// # use rexpect::errors::*;\n\n///\n\n/// # fn main() {\n\n/// # || -> Result<()> {\n\n/// let mut s = spawn(\"cat\", Some(1000))?;\n\n/// s.send_line(\"hello, polly!\")?;\n\n/// let line = s.read_line()?;\n\n/// assert_eq!(\"hello, polly!\", line);\n\n/// # Ok(())\n\n/// # }().expect(\"test failed\");\n\n/// # }\n\n/// ```\n\nimpl PtySession {\n\n fn new(process: PtyProcess, timeout_ms: Option<u64>, commandname: String) -> Result<Self> {\n\n let f = process.get_file_handle();\n\n let reader = f.try_clone().chain_err(|| \"couldn't open write stream\")?;\n\n let stream = StreamSession::new(reader, f, timeout_ms);\n\n Ok(Self {\n\n process,\n\n stream,\n\n commandname,\n\n })\n\n }\n\n}\n\n\n\n/// Turn e.g. \"prog arg1 arg2\" into [\"prog\", \"arg1\", \"arg2\"]\n\n/// Also takes care of single and double quotes\n", "file_path": "src/session.rs", "rank": 36, "score": 23.645797051118798 }, { "content": " /// For matching the start of the line use `exp_regex(\"\\nfoo\")`\n\n pub fn exp_regex(&mut self, regex: &str) -> Result<(String, String)> {\n\n let res = self\n\n .exp(&ReadUntil::Regex(\n\n Regex::new(regex).chain_err(|| \"invalid regex\")?,\n\n ))\n\n .and_then(|s| Ok(s));\n\n res\n\n }\n\n\n\n /// Wait until provided string is seen on stdout of child process.\n\n /// Return the yet unread output (without the matched string)\n\n pub fn exp_string(&mut self, needle: &str) -> Result<String> {\n\n self.exp(&ReadUntil::String(needle.to_string()))\n\n .and_then(|(s, _)| Ok(s))\n\n }\n\n\n\n /// Wait until provided char is seen on stdout of child process.\n\n /// Return the yet unread output (without the matched char)\n\n pub fn exp_char(&mut self, needle: char) -> Result<String> {\n", "file_path": "src/session.rs", "rank": 37, "score": 20.722734322217722 }, { "content": " /// you need to provide this if the shell you're testing is not killed by just sending\n\n /// SIGTERM\n\n pub quit_command: Option<String>,\n\n\n\n /// set this to true if the repl has echo on (i.e. sends user input to stdout)\n\n /// although echo is set off at pty fork (see `PtyProcess::new`) a few repls still\n\n /// seem to be able to send output. You may need to try with true first, and if\n\n /// tests fail set this to false.\n\n pub echo_on: bool,\n\n}\n\n\n\nimpl PtyReplSession {\n\n pub fn wait_for_prompt(&mut self) -> Result<String> {\n\n self.pty_session.exp_string(&self.prompt)\n\n }\n\n\n\n /// Send cmd to repl and:\n\n /// 1. wait for the cmd to be echoed (if `echo_on == true`)\n\n /// 2. wait for the ready string being present\n\n ///\n", "file_path": "src/session.rs", "rank": 38, "score": 20.281758532728716 }, { "content": "### Basic usage\n\n\n\nAdd this to your `Cargo.toml`\n\n\n\n```toml\n\n[dependencies]\n\nrexpect = \"0.4\"\n\n```\n\n\n\nSimple example for interacting via ftp:\n\n\n\n```rust\n\nextern crate rexpect;\n\n\n\nuse rexpect::spawn;\n\nuse rexpect::errors::*;\n\n\n\nfn do_ftp() -> Result<()> {\n\n let mut p = spawn(\"ftp speedtest.tele2.net\", Some(30_000))?;\n\n p.exp_regex(\"Name \\\\(.*\\\\):\")?;\n\n p.send_line(\"anonymous\")?;\n\n p.exp_string(\"Password\")?;\n\n p.send_line(\"test\")?;\n\n p.exp_string(\"ftp>\")?;\n\n p.send_line(\"cd upload\")?;\n\n p.exp_string(\"successfully changed.\\r\\nftp>\")?;\n\n p.send_line(\"pwd\")?;\n\n p.exp_regex(\"[0-9]+ \\\"/upload\\\"\")?;\n\n p.send_line(\"exit\")?;\n\n p.exp_eof()?;\n\n Ok(())\n\n}\n\n\n\n\n\nfn main() {\n\n do_ftp().unwrap_or_else(|e| panic!(\"ftp job failed with {}\", e));\n\n}\n\n```\n\n\n\n### Example with bash and reading from programs\n\n\n\n\n\n```rust\n\nextern crate rexpect;\n\nuse rexpect::spawn_bash;\n\nuse rexpect::errors::*;\n\n\n\n\n\nfn do_bash() -> Result<()> {\n\n let mut p = spawn_bash(Some(2000))?;\n\n \n\n // case 1: wait until program is done\n\n p.send_line(\"hostname\")?;\n\n let hostname = p.read_line()?;\n\n p.wait_for_prompt()?; // go sure `hostname` is really done\n\n println!(\"Current hostname: {}\", hostname);\n\n\n\n // case 2: wait until done, only extract a few infos\n\n p.send_line(\"wc /etc/passwd\")?;\n\n // `exp_regex` returns both string-before-match and match itself, discard first\n\n let (_, lines) = p.exp_regex(\"[0-9]+\")?;\n\n let (_, words) = p.exp_regex(\"[0-9]+\")?;\n\n let (_, bytes) = p.exp_regex(\"[0-9]+\")?;\n\n p.wait_for_prompt()?; // go sure `wc` is really done\n\n println!(\"/etc/passwd has {} lines, {} words, {} chars\", lines, words, bytes);\n\n\n\n // case 3: read while program is still executing\n\n p.execute(\"ping 8.8.8.8\", \"bytes of data\")?; // returns when it sees \"bytes of data\" in output\n\n for _ in 0..5 {\n\n // times out if one ping takes longer than 2s\n\n let (_, duration) = p.exp_regex(\"[0-9. ]+ ms\")?;\n\n println!(\"Roundtrip time: {}\", duration);\n\n }\n\n p.send_control('c')?;\n\n Ok(())\n", "file_path": "README.md", "rank": 39, "score": 20.139723444147947 }, { "content": " /// (waits until \\n is in the output fetches the line and removes \\r at the end if present)\n\n pub fn read_line(&mut self) -> Result<String> {\n\n match self.exp(&ReadUntil::String('\\n'.to_string())) {\n\n Ok((mut line, _)) => {\n\n if line.ends_with('\\r') {\n\n line.pop().expect(\"this never happens\");\n\n }\n\n Ok(line)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n }\n\n\n\n /// Return `Some(c)` if a char is ready in the stdout stream of the process, return `None`\n\n /// otherwise. This is non-blocking.\n\n pub fn try_read(&mut self) -> Option<char> {\n\n self.reader.try_read()\n\n }\n\n\n\n pub fn try_read_all(&mut self) -> Option<String> {\n", "file_path": "src/session.rs", "rank": 40, "score": 19.065475296902285 }, { "content": " self.reader.try_read_all()\n\n }\n\n\n\n // wrapper around reader::read_until to give more context for errors\n\n fn exp(&mut self, needle: &ReadUntil) -> Result<(String, String)> {\n\n self.reader.read_until(needle)\n\n }\n\n\n\n /// Wait until we see EOF (i.e. child process has terminated)\n\n /// Return all the yet unread output\n\n pub fn exp_eof(&mut self) -> Result<String> {\n\n self.exp(&ReadUntil::EOF).and_then(|(_, s)| Ok(s))\n\n }\n\n\n\n /// Wait until provided regex is seen on stdout of child process.\n\n /// Return a tuple:\n\n /// 1. the yet unread output\n\n /// 2. the matched regex\n\n ///\n\n /// Note that `exp_regex(\"^foo\")` matches the start of the yet consumed output.\n", "file_path": "src/session.rs", "rank": 41, "score": 19.033346057302445 }, { "content": " /// p.exp_string(\"hans\")?;\n\n /// # Ok(())\n\n /// # }().expect(\"test failed\");\n\n /// # }\n\n /// ```\n\n pub fn execute(&mut self, cmd: &str, ready_regex: &str) -> Result<()> {\n\n self.send_line(cmd)?;\n\n if self.echo_on {\n\n self.exp_string(cmd)?;\n\n }\n\n self.exp_regex(ready_regex)?;\n\n Ok(())\n\n }\n\n\n\n /// send line to repl (and flush output) and then, if echo_on=true wait for the\n\n /// input to appear.\n\n /// Return: number of bytes written\n\n pub fn send_line(&mut self, line: &str) -> Result<usize> {\n\n let bytes_written = self.pty_session.send_line(line)?;\n\n if self.echo_on {\n", "file_path": "src/session.rs", "rank": 42, "score": 18.154173896962224 }, { "content": " pub fn new<R: Read + Send + 'static>(f: R, timeout: Option<u64>) -> NBReader {\n\n let (tx, rx) = channel();\n\n\n\n // spawn a thread which reads one char and sends it to tx\n\n thread::spawn(move || {\n\n let _ = || -> Result<()> {\n\n let mut reader = BufReader::new(f);\n\n let mut byte = [0u8];\n\n loop {\n\n match reader.read(&mut byte) {\n\n Ok(0) => {\n\n let _ = tx.send(Ok(PipedChar::EOF)).chain_err(|| \"cannot send\")?;\n\n break;\n\n }\n\n Ok(_) => {\n\n tx.send(Ok(PipedChar::Char(byte[0])))\n\n .chain_err(|| \"cannot send\")?;\n\n }\n\n Err(error) => {\n\n tx.send(Err(PipeError::IO(error)))\n", "file_path": "src/reader.rs", "rank": 43, "score": 17.863755658055076 }, { "content": " crate::process::signal::Signal::SIGTERM,\n\n false,\n\n );\n\n assert_eq!(should, s.process.exit()?);\n\n Ok(())\n\n }()\n\n .unwrap_or_else(|e| panic!(\"test_read_line failed: {}\", e));\n\n }\n\n\n\n #[test]\n\n fn test_expect_eof_timeout() {\n\n || -> Result<()> {\n\n let mut p = spawn(\"sleep 3\", Some(1000)).expect(\"cannot run sleep 3\");\n\n match p.exp_eof() {\n\n Ok(_) => assert!(false, \"should raise Timeout\"),\n\n Err(Error(ErrorKind::Timeout(_, _, _), _)) => {}\n\n Err(_) => assert!(false, \"should raise TimeOut\"),\n\n }\n\n Ok(())\n\n }()\n", "file_path": "src/session.rs", "rank": 44, "score": 17.71711036803238 }, { "content": "//! fn run() -> Result<()> {\n\n//! let mut p = spawn_bash(Some(30_000))?;\n\n//! p.execute(\"ping 8.8.8.8\", \"bytes of data\")?;\n\n//! p.send_control('z')?;\n\n//! p.wait_for_prompt()?;\n\n//! p.execute(\"bg\", \"suspended\")?;\n\n//! p.send_line(\"sleep 1\")?;\n\n//! p.wait_for_prompt()?;\n\n//! p.execute(\"fg\", \"continued\")?;\n\n//! p.send_control('c')?;\n\n//! p.exp_string(\"packet loss\")?;\n\n//! Ok(())\n\n//! }\n\n//!\n\n//! fn main() {\n\n//! run().unwrap_or_else(|e| panic!(\"bash process failed with {}\", e));\n\n//! }\n\n//!\n\n//! ```\n\n\n", "file_path": "src/lib.rs", "rank": 45, "score": 17.67104125619098 }, { "content": "### Example with bash and job control\n\n\n\nOne frequent bitfall with sending ctrl-c and friends is that you need\n\nto somehow ensure that the program has fully loaded, otherwise the ctrl-*\n\ngoes into nirvana. There are two functions to ensure that:\n\n\n\n- `execute` where you need to provide a match string which is present\n\n on stdout/stderr when the program is ready\n\n- `wait_for_prompt` which waits until the prompt is shown again\n\n\n\n\n\n\n\n```rust\n\nextern crate rexpect;\n\nuse rexpect::spawn_bash;\n\nuse rexpect::errors::*;\n\n\n\n\n\nfn do_bash_jobcontrol() -> Result<()> {\n\n let mut p = spawn_bash(Some(1000))?;\n\n p.execute(\"ping 8.8.8.8\", \"bytes of data\")?;\n\n p.send_control('z')?;\n\n p.wait_for_prompt()?;\n\n // bash writes 'ping 8.8.8.8' to stdout again to state which job was put into background\n\n p.execute(\"bg\", \"ping 8.8.8.8\")?;\n\n p.wait_for_prompt()?;\n\n p.send_line(\"sleep 0.5\")?;\n\n p.wait_for_prompt()?;\n\n // bash writes 'ping 8.8.8.8' to stdout again to state which job was put into foreground\n\n p.execute(\"fg\", \"ping 8.8.8.8\")?;\n\n p.send_control('c')?;\n\n p.exp_string(\"packet loss\")?;\n\n Ok(())\n\n}\n\n\n\nfn main() {\n\n do_bash_jobcontrol().unwrap_or_else(|e| panic!(\"bash with job control failed with {}\", e));\n\n}\n\n\n\n```\n\n\n\n## Project Status\n\n\n\nRexpect covers more or less the features of pexpect. If you miss anything\n\nI'm happy to receive PRs or also Issue requests of course.\n\n\n\nThe tests cover most of the aspects and it should run out of the box for\n\nrust stable, beta and nightly on both Linux or Mac.\n\n\n\n## Design decisions\n\n\n\n- use error handling of [error-chain](https://github.com/brson/error-chain)\n\n- use [nix](https://github.com/nix-rust/nix) (and avoid libc wherever possible) to keep the code safe and clean\n\n- sadly, `expect` is used in rust too prominently to unwrap `Option`s and `Result`s, use `exp_*` instead\n\n\n\nLicensed under [MIT License](LICENSE)\n", "file_path": "README.md", "rank": 46, "score": 17.519751719337084 }, { "content": " self.exp(&ReadUntil::String(needle.to_string()))\n\n .and_then(|(s, _)| Ok(s))\n\n }\n\n\n\n /// Wait until any of the provided needles is found.\n\n ///\n\n /// Return a tuple with:\n\n /// 1. the yet unread string, without the matching needle (empty in case of EOF and NBytes)\n\n /// 2. the matched string\n\n ///\n\n /// # Example:\n\n ///\n\n /// ```\n\n /// use rexpect::{spawn, ReadUntil};\n\n /// # use rexpect::errors::*;\n\n ///\n\n /// # fn main() {\n\n /// # || -> Result<()> {\n\n /// let mut s = spawn(\"cat\", Some(1000))?;\n\n /// s.send_line(\"hello, polly!\")?;\n", "file_path": "src/session.rs", "rank": 47, "score": 16.24225879070758 }, { "content": "//! Unblocking reader which supports waiting for strings/regexes and EOF to be present\n\n\n\nuse crate::errors::*; // load error-chain\n\npub use regex::Regex;\n\nuse std::io::prelude::*;\n\nuse std::io::{self, BufReader};\n\nuse std::sync::mpsc::{channel, Receiver};\n\nuse std::{fmt, time};\n\nuse std::{result, thread};\n\n\n\n#[derive(Debug)]\n", "file_path": "src/reader.rs", "rank": 48, "score": 16.072651493145177 }, { "content": "pub mod process;\n\npub mod reader;\n\npub mod session;\n\n\n\npub use reader::ReadUntil;\n\npub use session::{spawn, spawn_bash, spawn_python, spawn_stream};\n\n\n\npub mod errors {\n\n use std::time;\n\n // Create the Error, ErrorKind, ResultExt, and Result types\n\n error_chain::error_chain! {\n\n errors {\n\n EOF(expected:String, got:String, exit_code:Option<String>) {\n\n description(\"End of filestream (usually stdout) occurred, most probably\\\n\n because the process terminated\")\n\n display(\"EOF (End of File): Expected {} but got EOF after reading \\\"{}\\\", \\\n\n process terminated with {:?}\", expected, got,\n\n exit_code.as_ref()\n\n .unwrap_or(& \"unknown\".to_string()))\n\n }\n", "file_path": "src/lib.rs", "rank": 49, "score": 16.042389376107536 }, { "content": " /// Q: Why can't I just do `send_line` and immediately continue?\n\n /// A: Executing a command in e.g. bash causes a fork. If the Unix kernel chooses the\n\n /// parent process (bash) to go first and the bash process sends e.g. Ctrl-C then the\n\n /// Ctrl-C goes to nirvana.\n\n /// The only way to prevent this situation is to wait for a ready string being present\n\n /// in the output.\n\n ///\n\n /// Another safe way to tackle this problem is to use `send_line()` and `wait_for_prompt()`\n\n ///\n\n /// # Example:\n\n ///\n\n /// ```\n\n /// use rexpect::spawn_bash;\n\n /// # use rexpect::errors::*;\n\n ///\n\n /// # fn main() {\n\n /// # || -> Result<()> {\n\n /// let mut p = spawn_bash(Some(1000))?;\n\n /// p.execute(\"cat <(echo ready) -\", \"ready\")?;\n\n /// p.send_line(\"hans\")?;\n", "file_path": "src/session.rs", "rank": 50, "score": 16.02371168560538 }, { "content": "\n\n// make StreamSession's methods available directly\n\nimpl Deref for PtySession {\n\n type Target = StreamSession<File>;\n\n fn deref(&self) -> &StreamSession<File> {\n\n &self.stream\n\n }\n\n}\n\n\n\nimpl DerefMut for PtySession {\n\n fn deref_mut(&mut self) -> &mut StreamSession<File> {\n\n &mut self.stream\n\n }\n\n}\n\n\n\n/// Start a process in a tty session, write and read from it\n\n///\n\n/// # Example\n\n///\n\n/// ```\n", "file_path": "src/session.rs", "rank": 51, "score": 15.004174130587888 }, { "content": " Self {\n\n writer: LineWriter::new(writer),\n\n reader: NBReader::new(reader, timeout_ms),\n\n }\n\n }\n\n\n\n /// sends string and a newline to process\n\n ///\n\n /// this is guaranteed to be flushed to the process\n\n /// returns number of written bytes\n\n pub fn send_line(&mut self, line: &str) -> Result<usize> {\n\n let mut len = self.send(line)?;\n\n len += self\n\n .writer\n\n .write(&['\\n' as u8])\n\n .chain_err(|| \"cannot write newline\")?;\n\n Ok(len)\n\n }\n\n\n\n /// Send string to process. As stdin of the process is most likely buffered, you'd\n", "file_path": "src/session.rs", "rank": 52, "score": 14.697210059836404 }, { "content": " // nothing matched: wait a little\n\n thread::sleep(time::Duration::from_millis(100));\n\n }\n\n }\n\n\n\n /// Try to read one char from internal buffer. Returns None if\n\n /// no char is ready, Some(char) otherwise. This is non-blocking\n\n pub fn try_read(&mut self) -> Option<char> {\n\n // discard eventual errors, EOF will be handled in read_until correctly\n\n let _ = self.read_into_buffer();\n\n if self.buffer.len() > 0 {\n\n self.buffer.drain(..1).last()\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn try_read_all(&mut self) -> Option<String> {\n\n // discard eventual errors, EOF will be handled in read_until correctly\n\n let _ = self.read_into_buffer();\n", "file_path": "src/reader.rs", "rank": 53, "score": 14.590319450291933 }, { "content": " Ok(())\n\n }()\n\n .unwrap_or_else(|e| panic!(\"test_bash failed: {}\", e));\n\n }\n\n\n\n #[test]\n\n fn test_bash_control_chars() {\n\n || -> Result<()> {\n\n let mut p = spawn_bash(Some(1000))?;\n\n p.execute(\"cat <(echo ready) -\", \"ready\")?;\n\n p.send_control('c')?; // abort: SIGINT\n\n p.wait_for_prompt()?;\n\n p.execute(\"cat <(echo ready) -\", \"ready\")?;\n\n p.send_control('z')?; // suspend:SIGTSTPcon\n\n p.exp_regex(r\"(Stopped|suspended)\\s+cat .*\")?;\n\n p.send_line(\"fg\")?;\n\n p.execute(\"cat <(echo ready) -\", \"ready\")?;\n\n p.send_control('c')?;\n\n Ok(())\n\n }()\n", "file_path": "src/session.rs", "rank": 54, "score": 14.541743405322318 }, { "content": " }\n\n\n\n #[test]\n\n fn test_read_string_before() {\n\n || -> Result<()> {\n\n let mut p = spawn(\"cat\", Some(1000)).expect(\"cannot run cat\");\n\n p.send_line(\"lorem ipsum dolor sit amet\")?;\n\n assert_eq!(\"lorem ipsum dolor sit \", p.exp_string(\"amet\")?);\n\n Ok(())\n\n }()\n\n .unwrap_or_else(|e| panic!(\"test_read_string_before failed: {}\", e));\n\n }\n\n\n\n #[test]\n\n fn test_expect_any() {\n\n || -> Result<()> {\n\n let mut p = spawn(\"cat\", Some(1000)).expect(\"cannot run cat\");\n\n p.send_line(\"Hi\")?;\n\n match p.exp_any(vec![\n\n ReadUntil::NBytes(3),\n", "file_path": "src/session.rs", "rank": 55, "score": 14.184026733676198 }, { "content": " /// need to call `flush()` after `send()` to make the process actually see your input.\n\n ///\n\n /// Returns number of written bytes\n\n pub fn send(&mut self, s: &str) -> Result<usize> {\n\n self.writer\n\n .write(s.as_bytes())\n\n .chain_err(|| \"cannot write line to process\")\n\n }\n\n\n\n /// Send a control code to the running process and consume resulting output line\n\n /// (which is empty because echo is off)\n\n ///\n\n /// E.g. `send_control('c')` sends ctrl-c. Upper/smaller case does not matter.\n\n pub fn send_control(&mut self, c: char) -> Result<()> {\n\n let code = match c {\n\n 'a'..='z' => c as u8 + 1 - 'a' as u8,\n\n 'A'..='Z' => c as u8 + 1 - 'A' as u8,\n\n '[' => 27,\n\n '\\\\' => 28,\n\n ']' => 29,\n", "file_path": "src/session.rs", "rank": 56, "score": 14.144917818158735 }, { "content": " #[test]\n\n fn test_kill_timeout() {\n\n || -> Result<()> {\n\n let mut p = spawn_bash(Some(1000))?;\n\n p.execute(\"cat <(echo ready) -\", \"ready\")?;\n\n Ok(())\n\n }()\n\n .unwrap_or_else(|e| panic!(\"test_kill_timeout failed: {}\", e));\n\n // p is dropped here and kill is sent immediately to bash\n\n // Since that is not enough to make bash exit, a kill -9 is sent within 1s (timeout)\n\n }\n\n\n\n #[test]\n\n fn test_bash() {\n\n || -> Result<()> {\n\n let mut p = spawn_bash(Some(1000))?;\n\n p.send_line(\"cd /tmp/\")?;\n\n p.wait_for_prompt()?;\n\n p.send_line(\"pwd\")?;\n\n assert_eq!(\"/tmp\\r\\n\", p.wait_for_prompt()?);\n", "file_path": "src/session.rs", "rank": 57, "score": 13.906238673300173 }, { "content": " /// Returns error if EOF is reached before the needle could be found.\n\n ///\n\n /// # Example with line reading, byte reading, regex and EOF reading.\n\n ///\n\n /// ```\n\n /// # use std::io::Cursor;\n\n /// use rexpect::reader::{NBReader, ReadUntil, Regex};\n\n /// // instead of a Cursor you would put your process output or file here\n\n /// let f = Cursor::new(\"Hello, miss!\\n\\\n\n /// What do you mean: 'miss'?\");\n\n /// let mut e = NBReader::new(f, None);\n\n ///\n\n /// let (first_line, _) = e.read_until(&ReadUntil::String('\\n'.to_string())).unwrap();\n\n /// assert_eq!(\"Hello, miss!\", &first_line);\n\n ///\n\n /// let (_, two_bytes) = e.read_until(&ReadUntil::NBytes(2)).unwrap();\n\n /// assert_eq!(\"Wh\", &two_bytes);\n\n ///\n\n /// let re = Regex::new(r\"'[a-z]+'\").unwrap(); // will find 'miss'\n\n /// let (before, reg_match) = e.read_until(&ReadUntil::Regex(re)).unwrap();\n", "file_path": "src/reader.rs", "rank": 58, "score": 13.848264343708527 }, { "content": "impl Drop for PtyReplSession {\n\n /// for e.g. bash we *need* to run `quit` at the end.\n\n /// if we leave that out, PtyProcess would try to kill the bash\n\n /// which would not work, as a SIGTERM is not enough to kill bash\n\n fn drop(&mut self) {\n\n if let Some(ref cmd) = self.quit_command {\n\n self.pty_session\n\n .send_line(&cmd)\n\n .expect(\"could not run `exit` on bash process\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 59, "score": 13.406989967906448 }, { "content": " // check for EOF\n\n match r.read_until(&ReadUntil::NBytes(10)) {\n\n Ok(_) => assert!(false),\n\n Err(Error(ErrorKind::EOF(_, _, _), _)) => {}\n\n Err(Error(_, _)) => assert!(false),\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_regex() {\n\n let f = io::Cursor::new(\"2014-03-15\");\n\n let mut r = NBReader::new(f, None);\n\n let re = Regex::new(r\"^\\d{4}-\\d{2}-\\d{2}$\").unwrap();\n\n r.read_until(&ReadUntil::Regex(re))\n\n .expect(\"regex doesn't match\");\n\n }\n\n\n\n #[test]\n\n fn test_regex2() {\n\n let f = io::Cursor::new(\"2014-03-15\");\n", "file_path": "src/reader.rs", "rank": 60, "score": 12.969855534617603 }, { "content": "///\n\n/// Typically you'd need that to check for output of a process without blocking your thread.\n\n/// Internally a thread is spawned and the output is read ahead so when\n\n/// calling `read_line` or `read_until` it reads from an internal buffer\n\npub struct NBReader {\n\n reader: Receiver<result::Result<PipedChar, PipeError>>,\n\n buffer: String,\n\n eof: bool,\n\n timeout: Option<time::Duration>,\n\n}\n\n\n\nimpl NBReader {\n\n /// Create a new reader instance\n\n ///\n\n /// # Arguments:\n\n ///\n\n /// - f: file like object\n\n /// - timeout:\n\n /// + `None`: read_until is blocking forever. This is probably not what you want\n\n /// + `Some(millis)`: after millis milliseconds a timeout error is raised\n", "file_path": "src/reader.rs", "rank": 61, "score": 12.777864985738157 }, { "content": " '^' => 30,\n\n '_' => 31,\n\n _ => return Err(format!(\"I don't understand Ctrl-{}\", c).into()),\n\n };\n\n self.writer\n\n .write_all(&[code])\n\n .chain_err(|| \"cannot send control\")?;\n\n // stdout is line buffered, so needs a flush\n\n self.writer\n\n .flush()\n\n .chain_err(|| \"cannot flush after sending ctrl keycode\")?;\n\n Ok(())\n\n }\n\n\n\n /// Make sure all bytes written via `send()` are sent to the process\n\n pub fn flush(&mut self) -> Result<()> {\n\n self.writer.flush().chain_err(|| \"could not flush\")\n\n }\n\n\n\n /// Read one line (blocking!) and return line without the newline\n", "file_path": "src/session.rs", "rank": 62, "score": 12.637662837506351 }, { "content": " fn read_into_buffer(&mut self) -> Result<()> {\n\n if self.eof {\n\n return Ok(());\n\n }\n\n while let Ok(from_channel) = self.reader.try_recv() {\n\n match from_channel {\n\n Ok(PipedChar::Char(c)) => self.buffer.push(c as char),\n\n Ok(PipedChar::EOF) => self.eof = true,\n\n // this is just from experience, e.g. \"sleep 5\" returns the other error which\n\n // most probably means that there is no stdout stream at all -> send EOF\n\n // this only happens on Linux, not on OSX\n\n Err(PipeError::IO(ref err)) if err.kind() == io::ErrorKind::Other => {\n\n self.eof = true\n\n }\n\n // discard other errors\n\n Err(_) => {}\n\n }\n\n }\n\n Ok(())\n\n }\n", "file_path": "src/reader.rs", "rank": 63, "score": 12.608865927501355 }, { "content": " /// assert_eq!(\"at do you mean: \", &before);\n\n /// assert_eq!(\"'miss'\", &reg_match);\n\n ///\n\n /// let (_, until_end) = e.read_until(&ReadUntil::EOF).unwrap();\n\n /// assert_eq!(\"?\", &until_end);\n\n /// ```\n\n ///\n\n pub fn read_until(&mut self, needle: &ReadUntil) -> Result<(String, String)> {\n\n let start = time::Instant::now();\n\n\n\n loop {\n\n self.read_into_buffer()?;\n\n if let Some(tuple_pos) = find(needle, &self.buffer, self.eof) {\n\n let first = self.buffer.drain(..tuple_pos.0).collect();\n\n let second = self.buffer.drain(..tuple_pos.1 - tuple_pos.0).collect();\n\n return Ok((first, second));\n\n }\n\n\n\n // reached end of stream and didn't match -> error\n\n // we don't know the reason of eof yet, so we provide an empty string\n", "file_path": "src/reader.rs", "rank": 64, "score": 12.29924153406184 }, { "content": "//! fn main() {\n\n//! do_ftp().unwrap_or_else(|e| panic!(\"ftp job failed with {}\", e));\n\n//! }\n\n//! ```\n\n//!\n\n//! # Example with bash\n\n//!\n\n//! Tip: try the chain of commands first in a bash session.\n\n//! The tricky thing is to get the wait_for_prompt right.\n\n//! What `wait_for_prompt` actually does is seeking to the next\n\n//! visible prompt. If you forgot to call this once your next call to\n\n//! `wait_for_prompt` comes out of sync and you're seeking to a prompt\n\n//! printed \"above\" the last `execute()`.\n\n//!\n\n//! ```no_run\n\n//! extern crate rexpect;\n\n//! use rexpect::spawn_bash;\n\n//! use rexpect::errors::*;\n\n//!\n\n//!\n", "file_path": "src/lib.rs", "rank": 65, "score": 11.771251292150364 }, { "content": " let mut r = NBReader::new(f, None);\n\n let re = Regex::new(r\"-\\d{2}-\").unwrap();\n\n assert_eq!(\n\n (\"2014\".to_string(), \"-03-\".to_string()),\n\n r.read_until(&ReadUntil::Regex(re))\n\n .expect(\"regex doesn't match\")\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_nbytes() {\n\n let f = io::Cursor::new(\"abcdef\");\n\n let mut r = NBReader::new(f, None);\n\n assert_eq!(\n\n (\"\".to_string(), \"ab\".to_string()),\n\n r.read_until(&ReadUntil::NBytes(2)).expect(\"2 bytes\")\n\n );\n\n assert_eq!(\n\n (\"\".to_string(), \"cde\".to_string()),\n\n r.read_until(&ReadUntil::NBytes(3)).expect(\"3 bytes\")\n", "file_path": "src/reader.rs", "rank": 66, "score": 11.699014158049394 }, { "content": " rcfile\n\n .path()\n\n .to_str()\n\n .unwrap_or_else(|| return \"temp file does not exist\".into()),\n\n ]);\n\n spawn_command(c, timeout).and_then(|p| {\n\n let new_prompt = \"[REXPECT_PROMPT>\";\n\n let mut pb = PtyReplSession {\n\n prompt: new_prompt.to_string(),\n\n pty_session: p,\n\n quit_command: Some(\"quit\".to_string()),\n\n echo_on: false,\n\n };\n\n pb.exp_string(\"~~~~\")?;\n\n rcfile\n\n .close()\n\n .chain_err(|| \"cannot delete temporary rcfile\")?;\n\n pb.send_line(&(\"PS1='\".to_string() + new_prompt + \"'\"))?;\n\n // wait until the new prompt appears\n\n pb.wait_for_prompt()?;\n\n Ok(pb)\n\n })\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 67, "score": 11.368891679016983 }, { "content": "# Change Log\n\n\n\nAll notable changes to this project will be documented in this file.\n\nThis project adheres to [Semantic Versioning](http://semver.org/).\n\n\n\n## [0.4.0] 2020-05-25\n\n\n\n### Changed\n\n\n\n- PtySession now works with any stream type, e.g. also tcp streams are supported now (thanks, thomasantony)\n\n- breaking: PtyBashSession was renamed and generalized into\n\n PtyReplSession to allow an interface for other REPLs \n\n- better error messages in case of timeout to help debug when you expect\n\n strings which you *think* are there, but are e.g. intermixed with newlines\n\n or ctrl characters: newlines are printed as `\\n`, carriage returns as `\\r`\n\n and control characters as `^`\n\n- new: `session::spawn_python`, just as a proof of concept and documentation really,\n\n I don't think this will be used..\n\n\n\n### Fixed\n\n\n\n- `spawn()` now parses single/doublequotes better. E.g. `ed -p '> '` is\n\n now tokenized into `[\"ed\", \"-p\" \"'> '\"]`\n\n\n\n## [0.3.0] 2017-10-05\n\n\n\n### Changed\n\n\n\n- breaking: `execute` takes string to wait for as second argument \n\n (before it waited 10ms which was way too fragile)\n\n- if process doesn't end on SIGTERM a `kill -9` is sent after timeout is elapsed\n\n\n\n### Fixed\n\n\n\n- ctrl-* used to consume one line. As it could be that the reader did not consume all\n\n output data yet this could have been a not-yet-read line. Therefore `send_control`\n\n no longer consumes a line.\n\n\n\n## [0.2.0] 2017-09-20\n\n\n\n### Changed\n\n\n\nAll `exp_*` methods now also return the yet unread string and/or the matched string:\n\n\n\n- `exp_string`: return the yet unread string\n\n- `exp_regex`: return a tuple of (yet unread string, matched string)\n\n- `exp_eof` and `exp_nbytes`: return the yet unread string\n\n\n\n### Fixed\n\n\n\n- each execution of rexpect left a temporary file in /tmp/ this is now no longer the case\n\n- try_read was blocking when there was no char ready (!) -> fixed\n", "file_path": "CHANGELOG.md", "rank": 68, "score": 11.17992733065667 }, { "content": " self.exp_string(line)?;\n\n }\n\n Ok(bytes_written)\n\n }\n\n}\n\n\n\n// make PtySession's methods available directly\n\nimpl Deref for PtyReplSession {\n\n type Target = PtySession;\n\n fn deref(&self) -> &PtySession {\n\n &self.pty_session\n\n }\n\n}\n\n\n\nimpl DerefMut for PtyReplSession {\n\n fn deref_mut(&mut self) -> &mut PtySession {\n\n &mut self.pty_session\n\n }\n\n}\n\n\n", "file_path": "src/session.rs", "rank": 69, "score": 11.151372754742077 }, { "content": " .unwrap_or_else(|e| panic!(\"test_timeout failed: {}\", e));\n\n }\n\n\n\n #[test]\n\n fn test_expect_eof_timeout2() {\n\n let mut p = spawn(\"sleep 1\", Some(1100)).expect(\"cannot run sleep 1\");\n\n assert!(p.exp_eof().is_ok(), \"expected eof\");\n\n }\n\n\n\n #[test]\n\n fn test_expect_string() {\n\n || -> Result<()> {\n\n let mut p = spawn(\"cat\", Some(1000)).expect(\"cannot run cat\");\n\n p.send_line(\"hello world!\")?;\n\n p.exp_string(\"hello world!\")?;\n\n p.send_line(\"hello heaven!\")?;\n\n p.exp_string(\"hello heaven!\")?;\n\n Ok(())\n\n }()\n\n .unwrap_or_else(|e| panic!(\"test_expect_string failed: {}\", e));\n", "file_path": "src/session.rs", "rank": 70, "score": 10.41859808539761 }, { "content": "//! use rexpect::spawn;\n\n//! use rexpect::errors::*;\n\n//!\n\n//! fn do_ftp() -> Result<()> {\n\n//! let mut p = spawn(\"ftp speedtest.tele2.net\", Some(2000))?;\n\n//! p.exp_regex(\"Name \\\\(.*\\\\):\")?;\n\n//! p.send_line(\"anonymous\")?;\n\n//! p.exp_string(\"Password\")?;\n\n//! p.send_line(\"test\")?;\n\n//! p.exp_string(\"ftp>\")?;\n\n//! p.send_line(\"cd upload\")?;\n\n//! p.exp_string(\"successfully changed.\\r\\nftp>\")?;\n\n//! p.send_line(\"pwd\")?;\n\n//! p.exp_regex(\"[0-9]+ \\\"/upload\\\"\")?;\n\n//! p.send_line(\"exit\")?;\n\n//! p.exp_eof()?;\n\n//! Ok(())\n\n//! }\n\n//!\n\n//!\n", "file_path": "src/lib.rs", "rank": 71, "score": 10.3119973400085 }, { "content": " ReadUntil::String(\"Hi\".to_string()),\n\n ]) {\n\n Ok(s) => assert_eq!((\"\".to_string(), \"Hi\\r\".to_string()), s),\n\n Err(e) => assert!(false, format!(\"got error: {}\", e)),\n\n }\n\n Ok(())\n\n }()\n\n .unwrap_or_else(|e| panic!(\"test_expect_any failed: {}\", e));\n\n }\n\n\n\n #[test]\n\n fn test_expect_empty_command_error() {\n\n let p = spawn(\"\", Some(1000));\n\n match p {\n\n Ok(_) => assert!(false, \"should raise an error\"),\n\n Err(Error(ErrorKind::EmptyProgramName, _)) => {}\n\n Err(_) => assert!(false, \"should raise EmptyProgramName\"),\n\n }\n\n }\n\n\n", "file_path": "src/session.rs", "rank": 72, "score": 9.534861670169562 }, { "content": " if self.buffer.len() > 0 {\n\n return Some(self.buffer.drain(..).collect());\n\n }\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_expect_melon() {\n\n let f = io::Cursor::new(\"a melon\\r\\n\");\n\n let mut r = NBReader::new(f, None);\n\n assert_eq!(\n\n (\"a melon\".to_string(), \"\\r\\n\".to_string()),\n\n r.read_until(&ReadUntil::String(\"\\r\\n\".to_string()))\n\n .expect(\"cannot read line\")\n\n );\n", "file_path": "src/reader.rs", "rank": 73, "score": 9.123903972500017 }, { "content": " let f = io::Cursor::new(\"lorem\");\n\n let mut r = NBReader::new(f, None);\n\n r.read_until(&ReadUntil::NBytes(4)).expect(\"4 bytes\");\n\n assert_eq!(Some('m'), r.try_read());\n\n assert_eq!(None, r.try_read());\n\n assert_eq!(None, r.try_read());\n\n assert_eq!(None, r.try_read());\n\n assert_eq!(None, r.try_read());\n\n }\n\n}\n", "file_path": "src/reader.rs", "rank": 74, "score": 8.606070250397561 }, { "content": " .chain_err(|| \"cannot send\")?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n }();\n\n // don't do error handling as on an error it was most probably\n\n // the main thread which exited (remote hangup)\n\n });\n\n // allocate string with a initial capacity of 1024, so when appending chars\n\n // we don't need to reallocate memory often\n\n NBReader {\n\n reader: rx,\n\n buffer: String::with_capacity(1024),\n\n eof: false,\n\n timeout: timeout.and_then(|millis| Some(time::Duration::from_millis(millis))),\n\n }\n\n }\n\n\n\n /// reads all available chars from the read channel and stores them in self.buffer\n", "file_path": "src/reader.rs", "rank": 75, "score": 7.610560601980724 }, { "content": " &ReadUntil::EOF => \"EOF (End of File)\".to_string(),\n\n &ReadUntil::NBytes(n) => format!(\"reading {} bytes\", n),\n\n &ReadUntil::Any(ref v) => {\n\n let mut res = Vec::new();\n\n for r in v {\n\n res.push(r.to_string());\n\n }\n\n res.join(\", \")\n\n }\n\n };\n\n write!(f, \"{}\", printable)\n\n }\n\n}\n\n\n", "file_path": "src/reader.rs", "rank": 76, "score": 7.462436061362855 }, { "content": "//! The main crate of Rexpect\n\n//!\n\n//! # Overview\n\n//!\n\n//! Rexpect is a loose port of [pexpect](pexpect.readthedocs.io/en/stable/)\n\n//! which itself is inspired by Don Libe's expect.\n\n//!\n\n//! It's main components (depending on your need you can use either of those)\n\n//!\n\n//! - [session](session/index.html): automate stuff in Rust\n\n//! - [reader](reader/index.html): a non-blocking reader with buffering, matching on\n\n//! strings/regex/...\n\n//! - [process](process/index.html): spawn a process in a pty\n\n//!\n\n//! # Basic example\n\n//!\n\n//! ```no_run\n\n//!\n\n//! extern crate rexpect;\n\n//!\n", "file_path": "src/lib.rs", "rank": 77, "score": 7.456081365362024 }, { "content": "\n\n /// Read until needle is found (blocking!) and return tuple with:\n\n /// 1. yet unread string until and without needle\n\n /// 2. matched needle\n\n ///\n\n /// This methods loops (while reading from the Cursor) until the needle is found.\n\n ///\n\n /// There are different modes:\n\n ///\n\n /// - `ReadUntil::String` searches for string (use '\\n'.to_string() to search for newline).\n\n /// Returns not yet read data in first String, and needle in second String\n\n /// - `ReadUntil::Regex` searches for regex\n\n /// Returns not yet read data in first String and matched regex in second String\n\n /// - `ReadUntil::NBytes` reads maximum n bytes\n\n /// Returns n bytes in second String, first String is left empty\n\n /// - `ReadUntil::EOF` reads until end of file is reached\n\n /// Returns all bytes in second String, first is left empty\n\n ///\n\n /// Note that when used with a tty the lines end with \\r\\n\n\n ///\n", "file_path": "src/reader.rs", "rank": 78, "score": 7.137007595243794 }, { "content": "extern crate rexpect;\n\n\n\nuse rexpect::errors::*;\n\nuse rexpect::process::wait;\n\nuse rexpect::spawn;\n\n\n\n/// The following code emits:\n\n/// cat exited with code 0, all good!\n\n/// cat exited with code 1\n\n/// Output (stdout and stderr): cat: /this/does/not/exist: No such file or directory\n", "file_path": "examples/exit_code.rs", "rank": 79, "score": 7.0091878109764645 }, { "content": "use rexpect::spawn_stream;\n\nuse std::error::Error;\n\nuse std::net::TcpStream;\n\n\n", "file_path": "examples/tcp.rs", "rank": 80, "score": 6.655625574417714 }, { "content": " );\n\n assert_eq!(\n\n (\"\".to_string(), \"f\".to_string()),\n\n r.read_until(&ReadUntil::NBytes(4)).expect(\"4 bytes\")\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_eof() {\n\n let f = io::Cursor::new(\"lorem ipsum dolor sit amet\");\n\n let mut r = NBReader::new(f, None);\n\n r.read_until(&ReadUntil::NBytes(2)).expect(\"2 bytes\");\n\n assert_eq!(\n\n (\"\".to_string(), \"rem ipsum dolor sit amet\".to_string()),\n\n r.read_until(&ReadUntil::EOF).expect(\"reading until EOF\")\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_try_read() {\n", "file_path": "src/reader.rs", "rank": 81, "score": 6.514036754325989 }, { "content": "# rexpect\n\n\n\n[![Build Status](https://api.travis-ci.org/philippkeller/rexpect.svg?branch=master)](https://travis-ci.org/philippkeller/rexpect)\n\n[![crates.io](https://img.shields.io/crates/v/rexpect.svg)](https://crates.io/crates/rexpect)\n\n[![Released API docs](https://docs.rs/rexpect/badge.svg)](https://docs.rs/rexpect)\n\n[![Master API docs](https://img.shields.io/badge/docs-master-2f343b.svg)](http://philippkeller.github.io/rexpect)\n\n\n\n\n\nSpawn, control, and respond to expected patterns of child applications and processes, enabling the automation of interactions and testing. Components include:\n\n- **session**: start a new process and interact with it; primary module of rexpect.\n\n- **reader**: non-blocking reader, which supports waiting for strings, regex, and EOF.\n\n- **process**: spawn a process in a pty.\n\n\n\nThe goal is to offer a similar set of functionality as [pexpect](https://pexpect.readthedocs.io/en/stable/overview.html).\n\n\n\n## Maintainers wanted\n\n\n\nI have created rexpect as a project to learn rust and linux. But now due to some reasons I haven't used Rust in the past 2 years, so I can't keep up with the latest features/crate dependencies\n\n\n\nIt has become hard now to judge pull requests. If you would be willing to either take over this repo entirely or join in as a maintainer to help evaluate PR please contact me.\n\n\n\n## Examples\n\n\n\n[For more examples, check the examples directory.](https://github.com/philippkeller/rexpect/tree/master/examples)\n\n\n", "file_path": "README.md", "rank": 82, "score": 6.208775468958931 }, { "content": " BrokenPipe {\n\n description(\"The pipe to the process is broken. Most probably because\\\n\n the process died.\")\n\n display(\"PipeError\")\n\n }\n\n Timeout(expected:String, got:String, timeout:time::Duration) {\n\n description(\"The process didn't end within the given timeout\")\n\n display(\"Timeout Error: Expected {} but got \\\"{}\\\" (after waiting {} ms)\",\n\n expected, got, (timeout.as_secs() * 1000) as u32\n\n + timeout.subsec_nanos() / 1_000_000)\n\n }\n\n EmptyProgramName {\n\n description(\"The provided program name is empty.\")\n\n display(\"EmptyProgramName\")\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 83, "score": 5.982212223244007 }, { "content": "}\n\n\n\nfn main() {\n\n do_bash().unwrap_or_else(|e| panic!(\"bash job failed with {}\", e));\n\n}\n\n\n\n```\n\n\n", "file_path": "README.md", "rank": 84, "score": 5.755670766802776 }, { "content": " } else if eof && buffer.len() > 0 {\n\n // reached almost end of buffer, return string, even though it will be\n\n // smaller than the wished n bytes\n\n Some((0, buffer.len()))\n\n } else {\n\n None\n\n }\n\n }\n\n &ReadUntil::Any(ref any) => {\n\n for read_until in any {\n\n if let Some(pos_tuple) = find(&read_until, buffer, eof) {\n\n return Some(pos_tuple);\n\n }\n\n }\n\n None\n\n }\n\n }\n\n}\n\n\n\n/// Non blocking reader\n", "file_path": "src/reader.rs", "rank": 85, "score": 5.713435344607669 }, { "content": "//! An example how you would test your own repl\n\n\n\nextern crate rexpect;\n\n\n\nuse rexpect::errors::*;\n\nuse rexpect::session::PtyReplSession;\n\nuse rexpect::spawn;\n\n\n", "file_path": "examples/repl.rs", "rank": 86, "score": 5.643926702156056 }, { "content": " .unwrap_or_else(|e| panic!(\"test_bash_control_chars failed: {}\", e));\n\n }\n\n\n\n #[test]\n\n fn test_tokenize_command() {\n\n let res = tokenize_command(\"prog arg1 arg2\");\n\n assert_eq!(vec![\"prog\", \"arg1\", \"arg2\"], res);\n\n\n\n let res = tokenize_command(\"prog -k=v\");\n\n assert_eq!(vec![\"prog\", \"-k=v\"], res);\n\n\n\n let res = tokenize_command(\"prog 'my text'\");\n\n assert_eq!(vec![\"prog\", \"'my text'\"], res);\n\n\n\n let res = tokenize_command(r#\"prog \"my text\"\"#);\n\n assert_eq!(vec![\"prog\", r#\"\"my text\"\"#], res);\n\n }\n\n\n\n #[test]\n\n fn test_tokenize_command() {\n", "file_path": "src/session.rs", "rank": 87, "score": 4.9144929912381095 }, { "content": " let res = tokenize_command(\"prog arg1 arg2\");\n\n assert_eq!(vec![\"prog\", \"arg1\", \"arg2\"], res);\n\n\n\n let res = tokenize_command(\"prog -k=v\");\n\n assert_eq!(vec![\"prog\", \"-k=v\"], res);\n\n\n\n let res = tokenize_command(\"prog 'my text'\");\n\n assert_eq!(vec![\"prog\", \"'my text'\"], res);\n\n\n\n let res = tokenize_command(r#\"prog \"my text\"\"#);\n\n assert_eq!(vec![\"prog\", r#\"\"my text\"\"#], res);\n\n }\n\n}\n", "file_path": "src/session.rs", "rank": 88, "score": 3.185840760784906 }, { "content": "extern crate rexpect;\n\n\n\nuse rexpect::errors::*;\n\nuse rexpect::spawn;\n\n\n", "file_path": "examples/ftp.rs", "rank": 89, "score": 3.18087594436643 }, { "content": "extern crate rexpect;\n\nuse rexpect::errors::*;\n\nuse rexpect::spawn_bash;\n\n\n", "file_path": "examples/bash.rs", "rank": 90, "score": 3.1532941971370465 }, { "content": " // this will be filled out in session::exp()\n\n if self.eof {\n\n return Err(ErrorKind::EOF(needle.to_string(), self.buffer.clone(), None).into());\n\n }\n\n\n\n // ran into timeout\n\n if let Some(timeout) = self.timeout {\n\n if start.elapsed() > timeout {\n\n return Err(ErrorKind::Timeout(\n\n needle.to_string(),\n\n self.buffer\n\n .clone()\n\n .replace(\"\\n\", \"`\\\\n`\\n\")\n\n .replace(\"\\r\", \"`\\\\r`\")\n\n .replace('\\u{1b}', \"`^`\"),\n\n timeout,\n\n )\n\n .into());\n\n }\n\n }\n", "file_path": "src/reader.rs", "rank": 91, "score": 2.081087871686287 } ]
Rust
src/lib.rs
ostwilkens/bevy_networking_turbulence
99bd744389ec9cfff5b9027bc8e29b40fa6cc957
use bevy::{ app::{AppBuilder, Events, Plugin}, ecs::prelude::*, tasks::{IoTaskPool, TaskPool}, }; #[cfg(not(target_arch = "wasm32"))] use crossbeam_channel::{unbounded, Receiver, Sender}; #[cfg(not(target_arch = "wasm32"))] use std::sync::RwLock; use std::{ collections::HashMap, error::Error, fmt::Debug, net::SocketAddr, sync::{atomic, Arc, Mutex}, }; use naia_client_socket::{ClientSocket, LinkConditionerConfig as ClientLinkConditionerConfig}; #[cfg(not(target_arch = "wasm32"))] use naia_server_socket::{ LinkConditionerConfig as ServerLinkConditionerConfig, MessageSender as ServerSender, ServerSocket, }; #[cfg(not(target_arch = "wasm32"))] pub use naia_server_socket::find_my_ip_address; use turbulence::{ buffer::BufferPacketPool, message_channels::ChannelMessage, packet::{Packet as PoolPacket, PacketPool, MAX_PACKET_LEN}, packet_multiplexer::MuxPacketPool, }; pub use turbulence::{ message_channels::{MessageChannelMode, MessageChannelSettings}, reliable_channel::Settings as ReliableChannelSettings, }; mod channels; mod transport; use self::channels::{SimpleBufferPool, TaskPoolRuntime}; pub use transport::{Connection, ConnectionChannelsBuilder, Packet}; pub type ConnectionHandle = u32; pub struct NetworkingPlugin; impl Plugin for NetworkingPlugin { fn build(&self, app: &mut AppBuilder) { let task_pool = app .resources() .get::<IoTaskPool>() .expect("IoTaskPool resource not found") .0 .clone(); app.add_resource(NetworkResource::new(task_pool)) .add_event::<NetworkEvent>() .add_system(receive_packets.system()); } } pub struct NetworkResource { task_pool: TaskPool, pending_connections: Arc<Mutex<Vec<Box<dyn Connection>>>>, connection_sequence: atomic::AtomicU32, pub connections: HashMap<ConnectionHandle, Box<dyn Connection>>, #[cfg(not(target_arch = "wasm32"))] listeners: Vec<ServerListener>, #[cfg(not(target_arch = "wasm32"))] server_channels: Arc<RwLock<HashMap<SocketAddr, Sender<Packet>>>>, runtime: TaskPoolRuntime, packet_pool: MuxPacketPool<BufferPacketPool<SimpleBufferPool>>, channels_builder_fn: Option<Box<dyn Fn(&mut ConnectionChannelsBuilder) + Send + Sync>>, } #[cfg(not(target_arch = "wasm32"))] #[allow(dead_code)] struct ServerListener { receiver_task: bevy::tasks::Task<()>, sender: ServerSender, socket_address: SocketAddr, } #[derive(Debug)] pub enum NetworkEvent { Connected(ConnectionHandle), Disconnected(ConnectionHandle), Packet(ConnectionHandle, Packet), } #[cfg(target_arch = "wasm32")] unsafe impl Send for NetworkResource {} #[cfg(target_arch = "wasm32")] unsafe impl Sync for NetworkResource {} impl NetworkResource { fn new(task_pool: TaskPool) -> Self { let runtime = TaskPoolRuntime::new(task_pool.clone()); let packet_pool = MuxPacketPool::new(BufferPacketPool::new(SimpleBufferPool(MAX_PACKET_LEN))); NetworkResource { task_pool, connections: HashMap::new(), connection_sequence: atomic::AtomicU32::new(0), pending_connections: Arc::new(Mutex::new(Vec::new())), #[cfg(not(target_arch = "wasm32"))] listeners: Vec::new(), #[cfg(not(target_arch = "wasm32"))] server_channels: Arc::new(RwLock::new(HashMap::new())), runtime, packet_pool, channels_builder_fn: None, } } #[cfg(not(target_arch = "wasm32"))] pub fn listen(&mut self, socket_address: SocketAddr) { let mut server_socket = futures_lite::future::block_on(ServerSocket::listen(socket_address)) .with_link_conditioner(&ServerLinkConditionerConfig::good_condition()); let sender = server_socket.get_sender(); let server_channels = self.server_channels.clone(); let pending_connections = self.pending_connections.clone(); let task_pool = self.task_pool.clone(); let receiver_task = self.task_pool.spawn(async move { loop { match server_socket.receive().await { Ok(packet) => { let address = packet.address(); let message = String::from_utf8_lossy(packet.payload()); log::debug!( "Server recv <- {}:{}: {}", address, packet.payload().len(), message ); match server_channels.write() { Ok(mut server_channels) => { if !server_channels.contains_key(&address) { let (packet_tx, packet_rx): (Sender<Packet>, Receiver<Packet>) = unbounded(); pending_connections.lock().unwrap().push(Box::new( transport::ServerConnection::new( task_pool.clone(), packet_rx, server_socket.get_sender(), address, ), )); server_channels.insert(address, packet_tx); } } Err(err) => { log::error!("Error locking server channels: {}", err); } } match server_channels .read() .unwrap() .get(&address) .unwrap() .send(Packet::copy_from_slice(packet.payload())) { Ok(()) => {} Err(error) => { log::error!("Server Send Error: {}", error); } } } Err(error) => { log::error!("Server Receive Error: {}", error); } } } }); self.listeners.push(ServerListener { receiver_task, sender, socket_address, }); } pub fn connect(&mut self, socket_address: SocketAddr) { let mut client_socket = ClientSocket::connect(socket_address) .with_link_conditioner(&ClientLinkConditionerConfig::good_condition()); let sender = client_socket.get_sender(); self.pending_connections .lock() .unwrap() .push(Box::new(transport::ClientConnection::new( self.task_pool.clone(), client_socket, sender, ))); } pub fn send( &mut self, handle: ConnectionHandle, payload: Packet, ) -> Result<(), Box<dyn Error + Send>> { match self.connections.get_mut(&handle) { Some(connection) => connection.send(payload), None => Err(Box::new(std::io::Error::new( std::io::ErrorKind::NotFound, "No such connection", ))), } } pub fn broadcast(&mut self, payload: Packet) { for (_handle, connection) in self.connections.iter_mut() { connection.send(payload.clone()).unwrap(); } } pub fn set_channels_builder<F>(&mut self, builder: F) where F: Fn(&mut ConnectionChannelsBuilder) + Send + Sync + 'static, { self.channels_builder_fn = Some(Box::new(builder)); } pub fn send_message<M: ChannelMessage + Debug + Clone>( &mut self, handle: ConnectionHandle, message: M, ) -> Result<Option<M>, Box<dyn Error + Send>> { match self.connections.get_mut(&handle) { Some(connection) => { let channels = connection.channels().unwrap(); let unsent = channels.send(message); channels.flush::<M>(); Ok(unsent) } None => Err(Box::new(std::io::Error::new( std::io::ErrorKind::NotFound, "No such connection", ))), } } pub fn broadcast_message<M: ChannelMessage + Debug + Clone>(&mut self, message: M) { for (handle, connection) in self.connections.iter_mut() { let channels = connection.channels().unwrap(); let result = channels.send(message.clone()); channels.flush::<M>(); if let Some(msg) = result { log::error!("Failed broadcast to [{}]: {:?}", handle, msg); } } } pub fn recv_message<M: ChannelMessage + Debug + Clone>( &mut self, handle: ConnectionHandle, ) -> Option<M> { match self.connections.get_mut(&handle) { Some(connection) => { let channels = connection.channels().unwrap(); channels.recv() } None => None, } } } pub fn receive_packets( mut net: ResMut<NetworkResource>, mut network_events: ResMut<Events<NetworkEvent>>, ) { let pending_connections: Vec<Box<dyn Connection>> = net.pending_connections.lock().unwrap().drain(..).collect(); for mut conn in pending_connections { let handle: ConnectionHandle = net .connection_sequence .fetch_add(1, atomic::Ordering::Relaxed); if let Some(channels_builder_fn) = net.channels_builder_fn.as_ref() { conn.build_channels( channels_builder_fn, net.runtime.clone(), net.packet_pool.clone(), ); } net.connections.insert(handle, conn); network_events.send(NetworkEvent::Connected(handle)); } let packet_pool = net.packet_pool.clone(); for (handle, connection) in net.connections.iter_mut() { while let Some(result) = connection.receive() { match result { Ok(packet) => { let message = String::from_utf8_lossy(&packet); log::debug!("Received on [{}] {} RAW: {}", handle, packet.len(), message); if let Some(channels_rx) = connection.channels_rx() { log::debug!("Processing as message"); let mut pool_packet = packet_pool.acquire(); pool_packet.resize(packet.len(), 0); pool_packet[..].copy_from_slice(&*packet); match channels_rx.try_send(pool_packet) { Ok(()) => { } Err(err) => { log::error!("Channel Incoming Error: {}", err); } } } else { log::debug!("Processing as packet"); network_events.send(NetworkEvent::Packet(*handle, packet)); } } Err(err) => { log::error!("Receive Error: {}", err); } } } } }
use bevy::{ app::{AppBuilder, Events, Plugin}, ecs::prelude::*, tasks::{IoTaskPool, TaskPool}, }; #[cfg(not(target_arch = "wasm32"))] use crossbeam_channel::{unbounded, Receiver, Sender}; #[cfg(not(target_arch = "wasm32"))] use std::sync::RwLock; use std::{ collections::HashMap, error::Error, fmt::Debug, net::SocketAddr, sync::{atomic, Arc, Mutex}, }; use naia_client_socket::{ClientSocket, LinkConditionerConfig as ClientLinkConditionerConfig}; #[cfg(not(target_arch = "wasm32"))] use naia_server_socket::{ LinkConditionerConfig as ServerLinkConditionerConfig, MessageSender as ServerSender, ServerSocket, }; #[cfg(not(target_arch = "wasm32"))] pub use naia_server_socket::find_my_ip_address; use turbulence::{ buffer::BufferPacketPool, message_channels::ChannelMessage, packet::{Packet as PoolPacket, PacketPool, MAX_PACKET_LEN}, packet_multiplexer::MuxPacketPool, }; pub use turbulence::{ message_channels::{MessageChannelMode, MessageChannelSettings}, reliable_channel::Settings as ReliableChannelSettings, }; mod channels; mod transport; use self::channels::{SimpleBufferPool, TaskPoolRuntime}; pub use transport::{Connection, ConnectionChannelsBuilder, Packet}; pub type ConnectionHandle = u32; pub struct NetworkingPlugin; impl Plugin for NetworkingPlugin { fn build(&self, app: &mut AppBuilder) { let task_pool = app .resources() .get::<IoTaskPool>() .expect("IoTaskPool resource not found") .0 .clone(); app.add_resource(NetworkResource::new(task_pool)) .add_event::<NetworkEvent>() .add_system(receive_packets.system()); } } pub struct NetworkResource { task_pool: TaskPool, pending_connections: Arc<Mutex<Vec<Box<dyn Connection>>>>, connection_sequence: atomic::AtomicU32, pub connections: HashMap<ConnectionHandle, Box<dyn Connection>>, #[cfg(not(target_arch = "wasm32"))] listeners: Vec<ServerListener>, #[cfg(not(target_arch = "wasm32"))] server_channels: Arc<RwLock<HashMap<SocketAddr, Sender<Packet>>>>, runtime: TaskPoolRuntime, packet_pool: MuxPacketPool<BufferPacketPool<SimpleBufferPool>>, channels_builder_fn: Option<Box<dyn Fn(&mut ConnectionChannelsBuilder) + Send + Sync>>, } #[cfg(not(target_arch = "wasm32"))] #[allow(dead_code)] struct ServerListener { receiver_task: bevy::tasks::Task<()>, sender: ServerSender, socket_address: SocketAddr, } #[derive(Debug)] pub enum NetworkEvent { Connected(ConnectionHandle), Disconnected(ConnectionHandle), Packet(ConnectionHandle, Packet), } #[cfg(target_arch = "wasm32")] unsafe impl Send for NetworkResource {} #[cfg(target_arch = "wasm32")] unsafe impl Sync for NetworkResource {} impl NetworkResource { fn new(task_pool: TaskPool) -> Self { let runtime = TaskPoolRuntime::new(task_pool.clone()); let packet_pool = MuxPacketPool::new(BufferPacketPool::new(SimpleBufferPool(MAX_PACKET_LEN))); NetworkResource { task_pool, connections: HashMap::new(), connection_sequence: atomic::AtomicU32::new(0), pending_connections: Arc::new(Mutex::new(Vec::new())), #[cfg(not(target_arch = "wasm32"))] listeners: Vec::new(), #[cfg(not(target_arch = "wasm32"))] server_channels: Arc::new(RwLock::new(HashMap::new())), runtime, packet_pool, channels_builder_fn: None, } } #[cfg(not(target_arch = "wasm32"))] pub fn listen(&mut self, socket_address: SocketAddr) { let mut server_socket = futures_lite::future::block_on(ServerSocket::listen(socket_address)) .with_link_conditioner(&ServerLinkConditionerConfig::good_condition()); let sender = server_socket.get_sender(); let server_channels = self.server_channels.clone(); let pending_connections = self.pending_connections.clone(); let task_pool = self.task_pool.clone(); let receiver_task = self.task_pool.spawn(async move { loop { match server_socket.receive().await { Ok(packet) => { let address = packet.address(); let message = String::from_utf8_lossy(packet.payload()); log::debug!( "Server recv <- {}:{}: {}", address, packet.payload().len(), message ); match server_channels.write() { Ok(mut server_channels) => { if !server_channels.contains_key(&address) { let (packet_tx, packet_rx): (Sender<Packet>, Receiver<Packet>) = unbounded(); pending_connections.lock().unwrap().push(Box::new( transport::ServerConnection::new( task_pool.clone(), packet_rx, server_socket.get_sender(), address, ), )); server_channels.insert(address, packet_tx); } } Err(err) => { log::error!("Error locking server channels: {}", err); } } match server_channels .read() .unwrap() .get(&address) .unwrap() .send(Packet::copy_from_slice(packet.payload())) { Ok(()) => {} Err(error) => { log::error!("Server Send Error: {}", error); } } } Err(error) => { log::error!("Server Receive Error: {}", error); } } } }); self.listeners.push(ServerListener { receiver_task, sender, socket_address, }); } pub fn connect(&mut self, socket_address: SocketAddr) { let mut client_socket = ClientSocket::connect(socket_address) .with_link_conditioner(&ClientLinkConditionerConfig::good_condition()); let sender = client_socket.get_sender(); self.pending_connections .lock() .unwrap() .push(Box::new(transport::ClientConnection::new( self.task_pool.clone(), client_socket, sender, ))); } pub fn send( &mut self, handle: ConnectionHandle, payload: Packet, ) -> Result<(), Box<dyn Error + Send>> { match self.connections.get_mut(&handle) { Some(connection) => connection.send(payload), None => Err(Box::new(std::io::Error::new( std::io::ErrorKind::NotFound, "No such connection", ))), } } pub fn broadcast(&mut self, payload: Packet) { for (_handle, connection) in self.connections.iter_mut() { connection.send(payload.clone()).unwrap(); } } pub fn set_channels_builder<F>(&mut self, builder: F) where F: Fn(&mut ConnectionChannelsBuilder) + Send + Sync + 'static, { self.channels_builder_fn = Some(Box::new(builder)); } pub fn send_message<M: ChannelMessage + Debug + Clone>( &
pub fn broadcast_message<M: ChannelMessage + Debug + Clone>(&mut self, message: M) { for (handle, connection) in self.connections.iter_mut() { let channels = connection.channels().unwrap(); let result = channels.send(message.clone()); channels.flush::<M>(); if let Some(msg) = result { log::error!("Failed broadcast to [{}]: {:?}", handle, msg); } } } pub fn recv_message<M: ChannelMessage + Debug + Clone>( &mut self, handle: ConnectionHandle, ) -> Option<M> { match self.connections.get_mut(&handle) { Some(connection) => { let channels = connection.channels().unwrap(); channels.recv() } None => None, } } } pub fn receive_packets( mut net: ResMut<NetworkResource>, mut network_events: ResMut<Events<NetworkEvent>>, ) { let pending_connections: Vec<Box<dyn Connection>> = net.pending_connections.lock().unwrap().drain(..).collect(); for mut conn in pending_connections { let handle: ConnectionHandle = net .connection_sequence .fetch_add(1, atomic::Ordering::Relaxed); if let Some(channels_builder_fn) = net.channels_builder_fn.as_ref() { conn.build_channels( channels_builder_fn, net.runtime.clone(), net.packet_pool.clone(), ); } net.connections.insert(handle, conn); network_events.send(NetworkEvent::Connected(handle)); } let packet_pool = net.packet_pool.clone(); for (handle, connection) in net.connections.iter_mut() { while let Some(result) = connection.receive() { match result { Ok(packet) => { let message = String::from_utf8_lossy(&packet); log::debug!("Received on [{}] {} RAW: {}", handle, packet.len(), message); if let Some(channels_rx) = connection.channels_rx() { log::debug!("Processing as message"); let mut pool_packet = packet_pool.acquire(); pool_packet.resize(packet.len(), 0); pool_packet[..].copy_from_slice(&*packet); match channels_rx.try_send(pool_packet) { Ok(()) => { } Err(err) => { log::error!("Channel Incoming Error: {}", err); } } } else { log::debug!("Processing as packet"); network_events.send(NetworkEvent::Packet(*handle, packet)); } } Err(err) => { log::error!("Receive Error: {}", err); } } } } }
mut self, handle: ConnectionHandle, message: M, ) -> Result<Option<M>, Box<dyn Error + Send>> { match self.connections.get_mut(&handle) { Some(connection) => { let channels = connection.channels().unwrap(); let unsent = channels.send(message); channels.flush::<M>(); Ok(unsent) } None => Err(Box::new(std::io::Error::new( std::io::ErrorKind::NotFound, "No such connection", ))), } }
function_block-function_prefix_line
[ { "content": "pub trait Connection: Send + Sync {\n\n fn remote_address(&self) -> Option<SocketAddr>;\n\n\n\n fn send(&mut self, payload: Packet) -> Result<(), Box<dyn Error + Send>>;\n\n\n\n fn receive(&mut self) -> Option<Result<Packet, Box<dyn Error + Send>>>;\n\n\n\n fn build_channels(\n\n &mut self,\n\n builder_fn: &(dyn Fn(&mut ConnectionChannelsBuilder) + Send + Sync),\n\n runtime: TaskPoolRuntime,\n\n pool: MuxPacketPool<BufferPacketPool<SimpleBufferPool>>,\n\n );\n\n\n\n fn channels(&mut self) -> Option<&mut MessageChannels>;\n\n\n\n fn channels_rx(&mut self) -> Option<&mut IncomingMultiplexedPackets<MultiplexedPacket>>;\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n", "file_path": "src/transport.rs", "rank": 0, "score": 136622.586853315 }, { "content": "fn handle_messages_server(mut net: ResMut<NetworkResource>, mut balls: Query<(&mut Ball, &Pawn)>) {\n\n for (handle, connection) in net.connections.iter_mut() {\n\n let channels = connection.channels().unwrap();\n\n while let Some(client_message) = channels.recv::<ClientMessage>() {\n\n log::debug!(\n\n \"ClientMessage received on [{}]: {:?}\",\n\n handle,\n\n client_message\n\n );\n\n match client_message {\n\n ClientMessage::Hello(id) => {\n\n log::info!(\"Client [{}] connected on [{}]\", id, handle);\n\n // TODO: store client id?\n\n }\n\n ClientMessage::Direction(dir) => {\n\n let mut angle: f32 = 0.03;\n\n if dir == Direction::Right {\n\n angle *= -1.0;\n\n }\n\n for (mut ball, pawn) in balls.iter_mut() {\n", "file_path": "examples/balls.rs", "rank": 2, "score": 106045.05716137444 }, { "content": "type ServerIds = HashMap<u32, (u32, u32)>;\n\n\n", "file_path": "examples/balls.rs", "rank": 3, "score": 95752.47115375975 }, { "content": "fn server_setup(mut net: ResMut<NetworkResource>) {\n\n let ip_address =\n\n bevy_networking_turbulence::find_my_ip_address().expect(\"can't find ip address\");\n\n let socket_address = SocketAddr::new(ip_address, SERVER_PORT);\n\n log::info!(\"Starting server\");\n\n net.listen(socket_address);\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 4, "score": 90093.59949925251 }, { "content": "type MultiplexedPacket = MuxPacket<<BufferPacketPool<SimpleBufferPool> as PacketPool>::Packet>;\n\npub type ConnectionChannelsBuilder =\n\n MessageChannelsBuilder<TaskPoolRuntime, MuxPacketPool<BufferPacketPool<SimpleBufferPool>>>;\n\n\n", "file_path": "src/transport.rs", "rank": 5, "score": 88460.0056575327 }, { "content": "fn send_packets(mut net: ResMut<NetworkResource>, time: Res<Time>, args: Res<Args>) {\n\n if !args.is_server {\n\n if (time.seconds_since_startup * 60.) as i64 % 60 == 0 {\n\n log::info!(\"PING\");\n\n net.broadcast(Packet::from(\"PING\"));\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 6, "score": 81453.49994485984 }, { "content": "pub fn parse_args() -> Args {\n\n cfg_if::cfg_if! {\n\n if #[cfg(target_arch = \"wasm32\")] {\n\n let is_server = false;\n\n } else {\n\n let args: Vec<String> = std::env::args().collect();\n\n\n\n if args.len() < 2 {\n\n panic!(\"Need to select to run as either a server (--server) or a client (--client).\");\n\n }\n\n\n\n let connection_type = &args[1];\n\n\n\n let is_server = match connection_type.as_str() {\n\n \"--server\" | \"-s\" => true,\n\n \"--client\" | \"-c\" => false,\n\n _ => panic!(\"Need to select to run as either a server (--server) or a client (--client).\"),\n\n };\n\n }\n\n }\n\n\n\n Args { is_server }\n\n}\n", "file_path": "examples/utils/mod.rs", "rank": 7, "score": 80801.99968898919 }, { "content": "fn handle_packets(\n\n mut net: ResMut<NetworkResource>,\n\n time: Res<Time>,\n\n mut state: ResMut<NetworkReader>,\n\n network_events: Res<Events<NetworkEvent>>,\n\n) {\n\n for event in state.network_events.iter(&network_events) {\n\n match event {\n\n NetworkEvent::Packet(handle, packet) => {\n\n let message = String::from_utf8_lossy(packet);\n\n log::info!(\"Got packet on [{}]: {}\", handle, message);\n\n if message == \"PING\" {\n\n let message = format!(\"PONG @ {}\", time.seconds_since_startup);\n\n match net.send(*handle, Packet::from(message)) {\n\n Ok(()) => {\n\n log::info!(\"Sent PONG\");\n\n }\n\n Err(error) => {\n\n log::info!(\"PONG send error: {}\", error);\n\n }\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n", "file_path": "examples/simple.rs", "rank": 9, "score": 76875.07582169892 }, { "content": "fn handle_packets(\n\n mut commands: Commands,\n\n mut net: ResMut<NetworkResource>,\n\n mut state: ResMut<NetworkReader>,\n\n args: Res<Args>,\n\n network_events: Res<Events<NetworkEvent>>,\n\n) {\n\n for event in state.network_events.iter(&network_events) {\n\n match event {\n\n NetworkEvent::Connected(handle) => match net.connections.get_mut(handle) {\n\n Some(connection) => {\n\n match connection.remote_address() {\n\n Some(remote_address) => {\n\n log::debug!(\n\n \"Incoming connection on [{}] from [{}]\",\n\n handle,\n\n remote_address\n\n );\n\n\n\n // New client connected - spawn a ball\n", "file_path": "examples/balls.rs", "rank": 10, "score": 76875.07582169892 }, { "content": "fn network_setup(mut net: ResMut<NetworkResource>) {\n\n net.set_channels_builder(|builder: &mut ConnectionChannelsBuilder| {\n\n builder\n\n .register::<ClientMessage>(CLIENT_STATE_MESSAGE_SETTINGS)\n\n .unwrap();\n\n builder\n\n .register::<GameStateMessage>(GAME_STATE_MESSAGE_SETTINGS)\n\n .unwrap();\n\n });\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 11, "score": 74989.7074439983 }, { "content": "fn client_setup(mut commands: Commands, mut net: ResMut<NetworkResource>) {\n\n let mut camera = Camera2dComponents::default();\n\n camera.orthographic_projection.window_origin = WindowOrigin::BottomLeft;\n\n commands.spawn(camera);\n\n\n\n let ip_address =\n\n bevy_networking_turbulence::find_my_ip_address().expect(\"can't find ip address\");\n\n let socket_address = SocketAddr::new(ip_address, SERVER_PORT);\n\n log::info!(\"Starting client\");\n\n net.connect(socket_address);\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 12, "score": 74424.14007324765 }, { "content": "fn handle_messages_client(\n\n mut commands: Commands,\n\n mut net: ResMut<NetworkResource>,\n\n mut server_ids: ResMut<ServerIds>,\n\n mut materials: ResMut<Assets<ColorMaterial>>,\n\n mut balls: Query<(Entity, &mut Ball, &mut Transform)>,\n\n) {\n\n for (handle, connection) in net.connections.iter_mut() {\n\n let channels = connection.channels().unwrap();\n\n while let Some(_client_message) = channels.recv::<ClientMessage>() {\n\n log::error!(\"ClientMessage received on [{}]\", handle);\n\n }\n\n\n\n // it is possible that many state updates came at the same time - spawn once\n\n let mut to_spawn: HashMap<u32, (u32, Vec3, Vec3)> = HashMap::new();\n\n\n\n while let Some(mut state_message) = channels.recv::<GameStateMessage>() {\n\n let message_frame = state_message.frame;\n\n log::info!(\n\n \"GameStateMessage received on [{}]: {:?}\",\n", "file_path": "examples/balls.rs", "rank": 13, "score": 73456.22370103707 }, { "content": "fn startup(mut net: ResMut<NetworkResource>, args: Res<Args>) {\n\n cfg_if::cfg_if! {\n\n if #[cfg(target_arch = \"wasm32\")] {\n\n // FIXME: set this address to your local machine\n\n let mut socket_address: SocketAddr = \"192.168.1.20:0\".parse().unwrap();\n\n socket_address.set_port(SERVER_PORT);\n\n } else {\n\n let ip_address =\n\n bevy_networking_turbulence::find_my_ip_address().expect(\"can't find ip address\");\n\n let socket_address = SocketAddr::new(ip_address, SERVER_PORT);\n\n }\n\n }\n\n\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n if args.is_server {\n\n log::info!(\"Starting server\");\n\n net.listen(socket_address);\n\n }\n\n if !args.is_server {\n\n log::info!(\"Starting client\");\n\n net.connect(socket_address);\n\n }\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 14, "score": 66702.65904653419 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone)]\n\nenum ClientMessage {\n\n Hello(String),\n\n Direction(Direction),\n\n}\n\n\n\nconst CLIENT_STATE_MESSAGE_SETTINGS: MessageChannelSettings = MessageChannelSettings {\n\n channel: 0,\n\n channel_mode: MessageChannelMode::Reliable {\n\n reliability_settings: ReliableChannelSettings {\n\n bandwidth: 4096,\n\n recv_window_size: 1024,\n\n send_window_size: 1024,\n\n burst_bandwidth: 1024,\n\n init_send: 512,\n\n wakeup_time: Duration::from_millis(100),\n\n initial_rtt: Duration::from_millis(200),\n\n max_rtt: Duration::from_secs(2),\n\n rtt_update_factor: 0.1,\n\n rtt_resend_factor: 1.5,\n\n },\n\n max_message_len: 1024,\n\n },\n\n message_buffer_size: 8,\n\n packet_buffer_size: 8,\n\n};\n\n\n", "file_path": "examples/balls.rs", "rank": 15, "score": 61370.544173091395 }, { "content": "fn ball_control_system(mut net: ResMut<NetworkResource>, keyboard_input: Res<Input<KeyCode>>) {\n\n if keyboard_input.pressed(KeyCode::Left) {\n\n net.broadcast_message(ClientMessage::Direction(Direction::Left));\n\n }\n\n\n\n if keyboard_input.pressed(KeyCode::Right) {\n\n net.broadcast_message(ClientMessage::Direction(Direction::Right));\n\n }\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 16, "score": 58231.044509944506 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone)]\n\nstruct GameStateMessage {\n\n frame: u32,\n\n balls: Vec<(u32, Vec3, Vec3)>,\n\n}\n\n\n\nconst GAME_STATE_MESSAGE_SETTINGS: MessageChannelSettings = MessageChannelSettings {\n\n channel: 1,\n\n channel_mode: MessageChannelMode::Unreliable,\n\n message_buffer_size: 8,\n\n packet_buffer_size: 8,\n\n};\n\n\n", "file_path": "examples/balls.rs", "rank": 17, "score": 57177.3341163477 }, { "content": "fn ball_movement_system(time: Res<Time>, mut ball_query: Query<(&Ball, &mut Transform)>) {\n\n for (ball, mut transform) in ball_query.iter_mut() {\n\n let mut translation = transform.translation + (ball.velocity * time.delta_seconds);\n\n let mut x = translation.x() as i32 % BOARD_WIDTH as i32;\n\n let mut y = translation.y() as i32 % BOARD_HEIGHT as i32;\n\n if x < 0 {\n\n x += BOARD_WIDTH as i32;\n\n }\n\n if y < 0 {\n\n y += BOARD_HEIGHT as i32;\n\n }\n\n translation.set_x(x as f32);\n\n translation.set_y(y as f32);\n\n transform.translation = translation;\n\n }\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 18, "score": 52644.76303399658 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]\n\nenum Direction {\n\n Left,\n\n Right,\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 19, "score": 42097.99577656861 }, { "content": "struct Ball {\n\n velocity: Vec3,\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 20, "score": 40218.954085293306 }, { "content": "struct Pawn {\n\n controller: u32,\n\n}\n", "file_path": "examples/balls.rs", "rank": 21, "score": 40218.954085293306 }, { "content": "#[derive(Default)]\n\nstruct NetworkReader {\n\n network_events: EventReader<NetworkEvent>,\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 22, "score": 38902.74637515672 }, { "content": "#[derive(Default)]\n\nstruct NetworkBroadcast {\n\n frame: u32,\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 23, "score": 38902.74637515672 }, { "content": "struct BallsExample;\n\n\n\nimpl Plugin for BallsExample {\n\n fn build(&self, app: &mut AppBuilder) {\n\n let args = parse_args();\n\n if args.is_server {\n\n // Server\n\n app.add_resource(ScheduleRunnerSettings::run_loop(Duration::from_secs_f64(\n\n 1.0 / 60.0,\n\n )))\n\n .add_plugins(MinimalPlugins)\n\n .add_startup_system(server_setup.system())\n\n .add_system(ball_movement_system.system())\n\n .add_resource(NetworkBroadcast { frame: 0 })\n\n .add_system_to_stage(stage::PRE_UPDATE, handle_messages_server.system())\n\n .add_system_to_stage(stage::POST_UPDATE, network_broadcast_system.system())\n\n } else {\n\n // Client\n\n app.add_resource(WindowDescriptor {\n\n width: BOARD_WIDTH,\n", "file_path": "examples/balls.rs", "rank": 24, "score": 38902.74637515672 }, { "content": "#[derive(Default)]\n\nstruct NetworkReader {\n\n network_events: EventReader<NetworkEvent>,\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 25, "score": 38902.74637515672 }, { "content": "fn main() {\n\n cfg_if::cfg_if! {\n\n if #[cfg(target_arch = \"wasm32\")] {\n\n std::panic::set_hook(Box::new(console_error_panic_hook::hook));\n\n console_log::init_with_level(log::Level::Debug).expect(\"cannot initialize console_log\");\n\n }\n\n else {\n\n simple_logger::SimpleLogger::from_env()\n\n .init()\n\n .expect(\"A logger was already initialized\");\n\n }\n\n }\n\n\n\n App::build()\n\n // minimal plugins necessary for timers + headless loop\n\n .add_resource(ScheduleRunnerSettings::run_loop(Duration::from_secs_f64(\n\n 1.0 / 60.0,\n\n )))\n\n .add_plugins(MinimalPlugins)\n\n // The NetworkingPlugin\n\n .add_plugin(NetworkingPlugin)\n\n // Our networking\n\n .add_resource(parse_args())\n\n .add_startup_system(startup.system())\n\n .add_system(send_packets.system())\n\n .init_resource::<NetworkReader>()\n\n .add_system(handle_packets.system())\n\n .run();\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 26, "score": 36987.83625494291 }, { "content": "fn main() {\n\n simple_logger::SimpleLogger::from_env()\n\n .init()\n\n .expect(\"A logger was already initialized\");\n\n\n\n App::build().add_plugin(BallsExample).run();\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 27, "score": 36987.83625494291 }, { "content": "fn network_broadcast_system(\n\n mut state: ResMut<NetworkBroadcast>,\n\n mut net: ResMut<NetworkResource>,\n\n ball_query: Query<(Entity, &Ball, &Transform)>,\n\n) {\n\n let mut message = GameStateMessage {\n\n frame: state.frame,\n\n balls: Vec::new(),\n\n };\n\n state.frame += 1;\n\n\n\n for (entity, ball, transform) in ball_query.iter() {\n\n message\n\n .balls\n\n .push((entity.id(), ball.velocity, transform.translation));\n\n }\n\n\n\n net.broadcast_message(message);\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 28, "score": 34586.272095189 }, { "content": "# bevy_networking_turbulence\n\n\n\nNetworking plugin for [Bevy engine][1] running on [naia-socket][2] and [turbulence][3] libraries.\n\n\n\nCombination of naia-socket and turbulence allows for exchange of raw messages over UDP or UDP-like connection (over WebRTC),\n\nor building unreliable/reliable channels of structured messages over such UDP/UDP-like messages.\n\n\n\nThis plugin works both in native (Linux, Windows) over UDP packets and in browser/WASM over UDP-like messages in WebRTC channel.\n\n\n\nStill unfinished, but main features are working.\n\n\n\n[1]: https://github.com/bevyengine/bevy\n\n[2]: https://github.com/naia-rs/naia-socket\n\n[3]: https://github.com/kyren/turbulence\n\n\n\n## Testing\n\n\n\n### Native\n\n\n\nOn one terminal run:\n\n\n\n $ env RUST_LOG=debug cargo run --example simple --features use-udp -- --server\n\n\n\nOn other terminal run:\n\n\n\n $ env RUST_LOG=debug cargo run --example simple --features use-udp -- --client\n\n\n\n### WASM\n\n\n\nOn one terminal run:\n\n\n\n $ env RUST_LOG=debug cargo run --example simple --no-default-features --features use-webrtc -- --server\n\n\n\nChange IP address in `examples/simple.rs` / `startup()` function to point to your local machine, and run:\n\n\n\n $ cargo build --example simple --target wasm32-unknown-unknown\n\n $ wasm-bindgen --out-dir target --target web target/wasm32-unknown-unknown/debug/examples/simple.wasm\n\n $ basic-http-server .\n\n\n\nOpen <http://127.0.0.1:4000>.\n", "file_path": "README.md", "rank": 29, "score": 25929.906350483932 }, { "content": "#![allow(unused)]\n\n\n\nuse std::{\n\n future::Future,\n\n ops::Deref,\n\n pin::Pin,\n\n sync::{Arc, Mutex},\n\n task::{Context, Poll, Waker},\n\n time::Duration,\n\n};\n\n\n\nuse bevy::tasks::{Task, TaskPool};\n\nuse futures::{stream, Stream};\n\nuse futures_timer::Delay;\n\n\n\nuse turbulence::{\n\n buffer::BufferPool,\n\n packet::{Packet, PacketPool},\n\n packet_multiplexer::{MuxPacket, MuxPacketPool},\n\n runtime::Runtime,\n", "file_path": "src/channels.rs", "rank": 30, "score": 23487.66462281276 }, { "content": " type Sleep = Pin<Box<dyn Future<Output = ()> + Send>>;\n\n\n\n fn spawn<F: Future<Output = ()> + Send + 'static>(&self, f: F) {\n\n let task = self.pool.spawn(Box::pin(f));\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n self.tasks.lock().unwrap().push(task);\n\n }\n\n\n\n fn now(&self) -> Self::Instant {\n\n Self::Instant::now()\n\n }\n\n\n\n fn elapsed(&self, instant: Self::Instant) -> Duration {\n\n instant.elapsed()\n\n }\n\n\n\n fn duration_between(&self, earlier: Self::Instant, later: Self::Instant) -> Duration {\n\n later.duration_since(earlier)\n\n }\n\n\n\n fn sleep(&self, duration: Duration) -> Self::Sleep {\n\n let state = Arc::clone(&self.0);\n\n Box::pin(async move {\n\n Delay::new(duration).await;\n\n })\n\n }\n\n}\n", "file_path": "src/channels.rs", "rank": 31, "score": 23486.243204786817 }, { "content": "};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct SimpleBufferPool(pub usize);\n\n\n\nimpl BufferPool for SimpleBufferPool {\n\n type Buffer = Box<[u8]>;\n\n\n\n fn acquire(&self) -> Self::Buffer {\n\n vec![0; self.0].into_boxed_slice()\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct TaskPoolRuntime(Arc<TaskPoolRuntimeInner>);\n\n\n\npub struct TaskPoolRuntimeInner {\n\n pool: TaskPool,\n\n tasks: Mutex<Vec<Task<()>>>, // FIXME: cleanup finished\n\n}\n", "file_path": "src/channels.rs", "rank": 32, "score": 23485.975126876514 }, { "content": "\n\nimpl TaskPoolRuntime {\n\n pub fn new(pool: TaskPool) -> Self {\n\n TaskPoolRuntime(Arc::new(TaskPoolRuntimeInner {\n\n pool,\n\n tasks: Mutex::new(Vec::new()),\n\n }))\n\n }\n\n}\n\n\n\nimpl Deref for TaskPoolRuntime {\n\n type Target = TaskPoolRuntimeInner;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Runtime for TaskPoolRuntime {\n\n type Instant = instant::Instant;\n", "file_path": "src/channels.rs", "rank": 33, "score": 23484.523890442808 }, { "content": " self.sender\n\n .as_mut()\n\n .unwrap()\n\n .send(ServerPacket::new(self.client_address, payload.to_vec())),\n\n )\n\n }\n\n\n\n fn receive(&mut self) -> Option<Result<Packet, Box<dyn Error + Send>>> {\n\n match self.packet_rx.try_recv() {\n\n Ok(payload) => Some(Ok(payload)),\n\n Err(error) => match error {\n\n crossbeam_channel::TryRecvError::Empty => None,\n\n err => Some(Err(Box::new(err))),\n\n },\n\n }\n\n }\n\n\n\n fn build_channels(\n\n &mut self,\n\n builder_fn: &(dyn Fn(&mut ConnectionChannelsBuilder) + Send + Sync),\n", "file_path": "src/transport.rs", "rank": 34, "score": 22711.304593991284 }, { "content": " }\n\n}\n\n\n\nimpl Connection for ClientConnection {\n\n fn remote_address(&self) -> Option<SocketAddr> {\n\n None\n\n }\n\n\n\n fn send(&mut self, payload: Packet) -> Result<(), Box<dyn Error + Send>> {\n\n self.sender\n\n .as_mut()\n\n .unwrap()\n\n .send(ClientPacket::new(payload.to_vec()))\n\n }\n\n\n\n fn receive(&mut self) -> Option<Result<Packet, Box<dyn Error + Send>>> {\n\n match self.socket.receive() {\n\n Ok(event) => match event {\n\n Some(packet) => Some(Ok(Packet::copy_from_slice(packet.payload()))),\n\n None => None,\n", "file_path": "src/transport.rs", "rank": 35, "score": 22706.069837496572 }, { "content": " ServerConnection {\n\n task_pool,\n\n packet_rx,\n\n sender: Some(sender),\n\n client_address,\n\n channels: None,\n\n channels_rx: None,\n\n channels_task: None,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nimpl Connection for ServerConnection {\n\n fn remote_address(&self) -> Option<SocketAddr> {\n\n Some(self.client_address)\n\n }\n\n\n\n fn send(&mut self, payload: Packet) -> Result<(), Box<dyn Error + Send>> {\n\n block_on(\n", "file_path": "src/transport.rs", "rank": 36, "score": 22701.92490419274 }, { "content": " },\n\n Err(err) => Some(Err(Box::new(err))),\n\n }\n\n }\n\n\n\n fn build_channels(\n\n &mut self,\n\n builder_fn: &(dyn Fn(&mut ConnectionChannelsBuilder) + Send + Sync),\n\n runtime: TaskPoolRuntime,\n\n pool: MuxPacketPool<BufferPacketPool<SimpleBufferPool>>,\n\n ) {\n\n let mut builder = MessageChannelsBuilder::new(runtime, pool);\n\n builder_fn(&mut builder);\n\n\n\n let mut multiplexer = PacketMultiplexer::new();\n\n self.channels = Some(builder.build(&mut multiplexer));\n\n let (channels_rx, mut channels_tx) = multiplexer.start();\n\n self.channels_rx = Some(channels_rx);\n\n\n\n let mut sender = self.sender.take().unwrap();\n", "file_path": "src/transport.rs", "rank": 37, "score": 22699.78597299838 }, { "content": " runtime: TaskPoolRuntime,\n\n pool: MuxPacketPool<BufferPacketPool<SimpleBufferPool>>,\n\n ) {\n\n let mut builder = MessageChannelsBuilder::new(runtime, pool);\n\n builder_fn(&mut builder);\n\n\n\n let mut multiplexer = PacketMultiplexer::new();\n\n self.channels = Some(builder.build(&mut multiplexer));\n\n let (channels_rx, mut channels_tx) = multiplexer.start();\n\n self.channels_rx = Some(channels_rx);\n\n\n\n let mut sender = self.sender.take().unwrap();\n\n let client_address = self.client_address;\n\n self.channels_task = Some(self.task_pool.spawn(async move {\n\n loop {\n\n let packet = channels_tx.next().await.unwrap();\n\n sender\n\n .send(ServerPacket::new(client_address, (*packet).into()))\n\n .await\n\n .unwrap();\n", "file_path": "src/transport.rs", "rank": 38, "score": 22699.733826401392 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\nuse bevy::tasks::Task;\n\nuse bevy::tasks::TaskPool;\n\nuse bytes::Bytes;\n\nuse std::{error::Error, net::SocketAddr};\n\n\n\nuse naia_client_socket::{\n\n ClientSocketTrait, MessageSender as ClientSender, Packet as ClientPacket,\n\n};\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nuse naia_server_socket::{MessageSender as ServerSender, Packet as ServerPacket};\n\n\n\nuse turbulence::{\n\n buffer::BufferPacketPool,\n\n message_channels::{MessageChannels, MessageChannelsBuilder},\n\n packet::PacketPool,\n\n packet_multiplexer::{IncomingMultiplexedPackets, MuxPacket, MuxPacketPool, PacketMultiplexer},\n\n};\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nuse futures_lite::future::block_on;\n\n\n\nuse futures_lite::StreamExt;\n\n\n\nuse super::channels::{SimpleBufferPool, TaskPoolRuntime};\n\n\n\npub type Packet = Bytes;\n", "file_path": "src/transport.rs", "rank": 39, "score": 22697.825857970587 }, { "content": "pub struct ServerConnection {\n\n task_pool: TaskPool,\n\n\n\n packet_rx: crossbeam_channel::Receiver<Packet>,\n\n sender: Option<ServerSender>,\n\n client_address: SocketAddr,\n\n\n\n channels: Option<MessageChannels>,\n\n channels_rx: Option<IncomingMultiplexedPackets<MultiplexedPacket>>,\n\n channels_task: Option<Task<()>>,\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nimpl ServerConnection {\n\n pub fn new(\n\n task_pool: TaskPool,\n\n packet_rx: crossbeam_channel::Receiver<Packet>,\n\n sender: ServerSender,\n\n client_address: SocketAddr,\n\n ) -> Self {\n", "file_path": "src/transport.rs", "rank": 40, "score": 22697.424804342714 }, { "content": "\n\n fn channels(&mut self) -> Option<&mut MessageChannels> {\n\n self.channels.as_mut()\n\n }\n\n\n\n fn channels_rx(&mut self) -> Option<&mut IncomingMultiplexedPackets<MultiplexedPacket>> {\n\n self.channels_rx.as_mut()\n\n }\n\n}\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nunsafe impl Send for ClientConnection {}\n\n#[cfg(target_arch = \"wasm32\")]\n\nunsafe impl Sync for ClientConnection {}\n", "file_path": "src/transport.rs", "rank": 41, "score": 22697.18791291884 }, { "content": " #[allow(unused_variables)]\n\n let channels_task = self.task_pool.spawn(async move {\n\n loop {\n\n match channels_tx.next().await {\n\n Some(packet) => {\n\n sender.send(ClientPacket::new((*packet).into())).unwrap();\n\n }\n\n None => {\n\n log::error!(\"Channel stream Disconnected\");\n\n return; // exit task\n\n }\n\n }\n\n }\n\n });\n\n\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n {\n\n self.channels_task = Some(channels_task);\n\n }\n\n }\n", "file_path": "src/transport.rs", "rank": 42, "score": 22694.945602560158 }, { "content": " channels_rx: Option<IncomingMultiplexedPackets<MultiplexedPacket>>,\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n channels_task: Option<Task<()>>,\n\n}\n\n\n\nimpl ClientConnection {\n\n pub fn new(\n\n task_pool: TaskPool,\n\n socket: Box<dyn ClientSocketTrait>,\n\n sender: ClientSender,\n\n ) -> Self {\n\n ClientConnection {\n\n task_pool,\n\n socket,\n\n sender: Some(sender),\n\n channels: None,\n\n channels_rx: None,\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n channels_task: None,\n\n }\n", "file_path": "src/transport.rs", "rank": 43, "score": 22693.319109161686 }, { "content": " }\n\n }));\n\n }\n\n\n\n fn channels(&mut self) -> Option<&mut MessageChannels> {\n\n self.channels.as_mut()\n\n }\n\n\n\n fn channels_rx(&mut self) -> Option<&mut IncomingMultiplexedPackets<MultiplexedPacket>> {\n\n self.channels_rx.as_mut()\n\n }\n\n}\n\n\n\npub struct ClientConnection {\n\n task_pool: TaskPool,\n\n\n\n socket: Box<dyn ClientSocketTrait>,\n\n sender: Option<ClientSender>,\n\n\n\n channels: Option<MessageChannels>,\n", "file_path": "src/transport.rs", "rank": 44, "score": 22691.502557569016 }, { "content": "pub struct Args {\n\n pub is_server: bool,\n\n}\n\n\n", "file_path": "examples/utils/mod.rs", "rank": 45, "score": 22115.26193369361 }, { "content": "\n\n if !args.is_server {\n\n log::debug!(\"Sending Hello on [{}]\", handle);\n\n match net.send_message(*handle, ClientMessage::Hello(\"test\".to_string())) {\n\n Ok(msg) => match msg {\n\n Some(msg) => {\n\n log::error!(\"Unable to send Hello: {:?}\", msg);\n\n }\n\n None => {}\n\n },\n\n Err(err) => {\n\n log::error!(\"Unable to send Hello: {:?}\", err);\n\n }\n\n };\n\n }\n\n }\n\n None => panic!(\"Got packet for non-existing connection [{}]\", handle),\n\n },\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 57, "score": 20.07139514576631 }, { "content": "/*\n\n- every frame server broadcasts position of all balls\n\n via unreliable channel server->client\n\n- client sends movement commands to server\n\n via reliable channel client->server\n\n*/\n\n\n\nuse bevy::{app::ScheduleRunnerSettings, prelude::*, render::camera::WindowOrigin};\n\nuse bevy_networking_turbulence::{\n\n ConnectionChannelsBuilder, MessageChannelMode, MessageChannelSettings, NetworkEvent,\n\n NetworkResource, NetworkingPlugin, ReliableChannelSettings,\n\n};\n\nuse rand::Rng;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{collections::HashMap, net::SocketAddr, time::Duration};\n\n\n\nmod utils;\n\nuse utils::*;\n\n\n\nconst SERVER_PORT: u16 = 14192;\n\nconst BOARD_WIDTH: u32 = 1000;\n\nconst BOARD_HEIGHT: u32 = 1000;\n\n\n", "file_path": "examples/balls.rs", "rank": 58, "score": 19.31877637407128 }, { "content": "use bevy::{\n\n app::{App, EventReader, Events, ScheduleRunnerSettings},\n\n core::Time,\n\n ecs::prelude::*,\n\n MinimalPlugins,\n\n};\n\nuse bevy_networking_turbulence::{NetworkEvent, NetworkResource, NetworkingPlugin, Packet};\n\n\n\nuse std::{net::SocketAddr, time::Duration};\n\n\n\nmod utils;\n\nuse utils::*;\n\n\n\nconst SERVER_PORT: u16 = 14191;\n\n\n", "file_path": "examples/simple.rs", "rank": 60, "score": 18.881715428776918 }, { "content": " if pawn.controller == *handle {\n\n ball.velocity = Quat::from_rotation_z(angle) * ball.velocity;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n while let Some(_state_message) = channels.recv::<GameStateMessage>() {\n\n log::error!(\"GameStateMessage received on [{}]\", handle);\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 64, "score": 11.350725777699598 }, { "content": " handle,\n\n state_message\n\n );\n\n\n\n // update all balls\n\n for (entity, mut ball, mut transform) in balls.iter_mut() {\n\n let server_id_entry = server_ids.get_mut(&entity.id()).unwrap();\n\n let (server_id, update_frame) = *server_id_entry;\n\n\n\n if let Some(index) = state_message\n\n .balls\n\n .iter()\n\n .position(|&update| update.0 == server_id)\n\n {\n\n let (_id, velocity, translation) = state_message.balls.remove(index);\n\n\n\n if update_frame > message_frame {\n\n continue;\n\n }\n\n server_id_entry.1 = message_frame;\n", "file_path": "examples/balls.rs", "rank": 65, "score": 9.039945540756143 }, { "content": " height: BOARD_HEIGHT,\n\n ..Default::default()\n\n })\n\n .add_plugins(DefaultPlugins)\n\n .add_resource(ClearColor(Color::rgb(0.3, 0.3, 0.3)))\n\n .add_startup_system(client_setup.system())\n\n .add_system_to_stage(stage::PRE_UPDATE, handle_messages_client.system())\n\n .add_resource(ServerIds::default())\n\n .add_system(ball_control_system.system())\n\n }\n\n .add_resource(args)\n\n .add_plugin(NetworkingPlugin)\n\n .add_startup_system(network_setup.system())\n\n .add_resource(NetworkReader::default())\n\n .add_system(handle_packets.system());\n\n }\n\n}\n\n\n", "file_path": "examples/balls.rs", "rank": 66, "score": 8.886167082616925 }, { "content": " let mut rng = rand::thread_rng();\n\n let vel_x = rng.gen_range(-0.5, 0.5);\n\n let vel_y = rng.gen_range(-0.5, 0.5);\n\n let pos_x = rng.gen_range(0.0, BOARD_WIDTH as f32);\n\n let pos_y = rng.gen_range(0.0, BOARD_HEIGHT as f32);\n\n log::info!(\"Spawning {}x{} {}/{}\", pos_x, pos_y, vel_x, vel_y);\n\n commands.spawn((\n\n Ball {\n\n velocity: 400.0 * Vec3::new(vel_x, vel_y, 0.0).normalize(),\n\n },\n\n Pawn {\n\n controller: *handle,\n\n },\n\n Transform::from_translation(Vec3::new(pos_x, pos_y, 1.0)),\n\n ));\n\n }\n\n None => {\n\n log::debug!(\"Connected on [{}]\", handle);\n\n }\n\n }\n", "file_path": "examples/balls.rs", "rank": 67, "score": 5.8605519452942 }, { "content": " let entity = commands\n\n .spawn(SpriteComponents {\n\n material: materials.add(\n\n Color::rgb(0.8 - (*id as f32 / 5.0), 0.2, 0.2 + (*id as f32 / 5.0)).into(),\n\n ),\n\n transform: Transform::from_translation(*translation),\n\n sprite: Sprite::new(Vec2::new(30.0, 30.0)),\n\n ..Default::default()\n\n })\n\n .with(Ball {\n\n velocity: *velocity,\n\n })\n\n .with(Pawn { controller: *id })\n\n .current_entity()\n\n .unwrap();\n\n server_ids.insert(entity.id(), (*id, *frame));\n\n }\n\n }\n\n}\n", "file_path": "examples/balls.rs", "rank": 68, "score": 2.5287115824332287 }, { "content": "\n\n ball.velocity = velocity;\n\n transform.translation = translation;\n\n } else {\n\n // TODO: despawn disconnected balls\n\n }\n\n }\n\n // create new balls\n\n for (id, velocity, translation) in state_message.balls.drain(..) {\n\n if let Some((frame, _velocity, _translation)) = to_spawn.get(&id) {\n\n if *frame > message_frame {\n\n continue;\n\n }\n\n };\n\n to_spawn.insert(id, (message_frame, velocity, translation));\n\n }\n\n }\n\n\n\n for (id, (frame, velocity, translation)) in to_spawn.iter() {\n\n log::info!(\"Spawning {} @{}\", id, frame);\n", "file_path": "examples/balls.rs", "rank": 69, "score": 2.0360664504733004 } ]
Rust
share/elf.rs
RichVillage/SnowFlake
be1c1ce4742de732528382e010b9044542944b10
#![allow(dead_code)] #![allow(non_camel_case_types)] pub type Elf64_Half = u16; pub type Elf64_Addr = u64; pub type Elf64_Off = u64; pub type Elf64_Sword = i32; pub type Elf64_Word = u32; #[repr(C)] #[derive(Copy, Clone, Default)] pub struct ElfHeader { pub e_ident: [u8; 16], pub e_object_type: Elf64_Half, pub e_machine_type: Elf64_Half, pub e_version: Elf64_Word, pub e_entry: Elf64_Addr, pub e_phoff: Elf64_Off, pub e_shoff: Elf64_Off, pub e_flags: Elf64_Word, pub e_ehsize: Elf64_Half, pub e_phentsize: Elf64_Half, pub e_phnum: Elf64_Half, pub e_shentsize: Elf64_Half, pub e_shnum: Elf64_Half, pub e_shstrndx: Elf64_Half, } impl ElfHeader { pub fn check_header(&self) { assert_eq!(&self.e_ident[..8], b"\x7FELF\x02\x01\x01\x00"); assert_eq!(self.e_version, 1); } } #[repr(C)] #[derive(Copy, Clone, Debug, Default)] pub struct PhEnt { pub p_type: Elf64_Word, pub p_flags: Elf64_Word, pub p_offset: Elf64_Off, pub p_vaddr: Elf64_Addr, pub p_paddr: Elf64_Addr, pub p_filesz: Elf64_Addr, pub p_memsz: Elf64_Addr, pub p_align: Elf64_Addr, } impl PhEnt { pub fn start_address(&self) -> usize { self.p_paddr as usize } pub fn end_address(&self) -> usize { (self.p_paddr + self.p_memsz) as usize } pub fn flags(&self) -> ElfSectionFlags { ElfSectionFlags::from_bits_truncate(self.p_flags.into()) } pub fn is_allocated(&self) -> bool { self.flags().contains(ElfSectionFlags::ELF_SECTION_ALLOCATED) } } #[repr(C)] #[derive(Copy,Clone)] pub struct ShEnt { sh_name: Elf64_Word, sh_type: Elf64_Word, sh_flags: Elf64_Word, sh_addr: Elf64_Addr, sh_offset: Elf64_Off, sh_size: Elf64_Word, sh_link: Elf64_Word, sh_info: Elf64_Word, sh_addralign: Elf64_Word, sh_entsize: Elf64_Word, } #[derive(Copy,Clone)] pub struct ElfFile(pub ElfHeader); impl ElfFile { pub fn check_header(&self) { self.0.check_header(); } pub fn phents(&self) -> PhEntIter { assert_eq!( self.0.e_phentsize as usize, ::core::mem::size_of::<PhEnt>() ); let slice: &[PhEnt] = unsafe { let ptr = (&self.0 as *const _ as usize + self.0.e_phoff as usize) as *const PhEnt; ::core::slice::from_raw_parts( ptr, self.0.e_phnum as usize ) }; println!("phents() - slice = {:p}+{}", slice.as_ptr(), slice.len()); PhEntIter( slice ) } fn shents(&self) -> &[ShEnt] { assert_eq!( self.0.e_shentsize as usize, ::core::mem::size_of::<ShEnt>() ); unsafe { let ptr = (&self.0 as *const _ as usize + self.0.e_shoff as usize) as *const ShEnt; ::core::slice::from_raw_parts( ptr, self.0.e_shnum as usize ) } } pub fn entrypoint(&self) -> usize { self.0.e_entry as usize } } #[repr(C)] #[derive(Copy, Clone, Debug, Default)] pub struct PhEntIter<'a>(pub &'a [PhEnt]); impl<'a> Iterator for PhEntIter<'a> { type Item = PhEnt; fn next(&mut self) -> Option<PhEnt> { if self.0.len() == 0 { None } else { let rv = self.0[0].clone(); self.0 = &self.0[1..]; Some(rv) } } } struct ShEntIter<'a>(&'a [ShEnt]); impl<'a> Iterator for ShEntIter<'a> { type Item = ShEnt; fn next(&mut self) -> Option<ShEnt> { if self.0.len() == 0 { None } else { let rv = self.0[0].clone(); self.0 = &self.0[1..]; Some(rv) } } } pub fn elf_get_size(file_base: &ElfFile) -> u32 { println!("elf_get_size(file_base={:p})", file_base); file_base.check_header(); let mut max_end = 0; for phent in file_base.phents() { if phent.p_type == 1 { println!("- {:#x}+{:#x} loads +{:#x}+{:#x}", phent.p_paddr, phent.p_memsz, phent.p_offset, phent.p_filesz ); let end = (phent.p_paddr + phent.p_memsz) as usize; if max_end < end { max_end = end; } } } let max_end = (max_end + 0xFFF) & !0xFFF; println!("return load_size={:#x}", max_end); max_end as u32 } pub fn elf_load_segments(file_base: &ElfFile, output_base: *mut u8) -> u32 { println!("elf_load_segments(file_base={:p}, output_base={:p})", file_base, output_base); for phent in file_base.phents() { if phent.p_type == 1 { println!("- {:#x}+{:#x} loads +{:#x}+{:#x}", phent.p_paddr, phent.p_memsz, phent.p_offset, phent.p_filesz ); let (dst,src) = unsafe { let dst = ::core::slice::from_raw_parts_mut( (output_base as usize + phent.p_paddr as usize) as *mut u8, phent.p_memsz as usize ); let src = ::core::slice::from_raw_parts( (file_base as *const _ as usize + phent.p_offset as usize) as *const u8, phent.p_filesz as usize ); (dst, src) }; for (d, v) in Iterator::zip( dst.iter_mut(), src.iter().cloned().chain(::core::iter::repeat(0)) ) { *d = v; } } } let rv = (file_base.entrypoint() - 0x80000000 + output_base as usize) as u32; println!("return entrypoint={:#x}", rv); rv } #[derive(Copy,Clone,Debug)] pub struct SymEnt { st_name: u32, st_value: u32, st_size: u32, st_info: u8, st_other: u8, st_shndx: u16, } #[repr(C)] #[derive(Debug)] pub struct SymbolInfo { base: *const SymEnt, count: usize, string_table: *const u8, strtab_len: usize, } pub extern "C" fn elf_load_symbols(file_base: &ElfFile, output: &mut SymbolInfo) -> u32 { println!("elf_load_symbols(file_base={:p}, output={:p})", file_base, output); *output = SymbolInfo {base: 0 as *const _, count: 0, string_table: 0 as *const _, strtab_len: 0}; let mut pos = ::core::mem::size_of::<SymbolInfo>(); for ent in file_base.shents() { if ent.sh_type == 2 { println!("Symbol table at +{:#x}+{:#x}, string table {}", ent.sh_offset, ent.sh_size, ent.sh_link); let strtab = file_base.shents()[ent.sh_link as usize]; output.base = (output as *const _ as usize + pos) as *const _; output.count = ent.sh_size as usize / ::core::mem::size_of::<SymEnt>(); unsafe { let bytes = ent.sh_size as usize; let src = ::core::slice::from_raw_parts( (file_base as *const _ as usize + ent.sh_offset as usize) as *const SymEnt, output.count ); let dst = ::core::slice::from_raw_parts_mut( output.base as *mut SymEnt, output.count ); for (d,s) in Iterator::zip( dst.iter_mut(), src.iter() ) { *d = *s; } pos += bytes; } output.string_table = (output as *const _ as usize + pos) as *const _; output.strtab_len = strtab.sh_size as usize; unsafe { let bytes = strtab.sh_size as usize; let src = ::core::slice::from_raw_parts( (file_base as *const _ as usize + strtab.sh_offset as usize) as *const u8, bytes ); let dst = ::core::slice::from_raw_parts_mut( output.string_table as *mut u8, bytes ); for (d,s) in Iterator::zip( dst.iter_mut(), src.iter() ) { *d = *s; } pos += bytes; } break ; } } println!("- output = {:?}", output); pos as u32 } bitflags! { pub struct ElfSectionFlags: u64 { const ELF_SECTION_WRITABLE = 0x1; const ELF_SECTION_ALLOCATED = 0x2; const ELF_SECTION_EXECUTABLE = 0x4; } }
#![allow(dead_code)] #![allow(non_camel_case_types)] pub type Elf64_Half = u16; pub type Elf64_Addr = u64; pub type Elf64_Off = u64; pub type Elf64_Sword = i32; pub type Elf64_Word = u32; #[repr(C)] #[derive(Copy, Clone, Default)] pub struct ElfHeader { pub e_ident: [u8; 16], pub e_object_type: Elf64_Half, pub e_machine_type: Elf64_Half, pub e_version: Elf64_Word, pub e_entry: Elf64_Addr, pub e_phoff: Elf64_Off, pub e_shoff: Elf64_Off, pub e_flags: Elf64_Word, pub e_ehsize: Elf64_Half, pub e_phentsize: Elf64_Half, pub e_phnum: Elf64_Half, pub e_shentsize: Elf64_Half, pub e_shnum: Elf64_Half, pub e_shstrndx: Elf64_Half, } impl ElfHeader { pub fn check_header(&self) { assert_eq!(&self.e_ident[..8], b"\x7FELF\x02\x01\x01\x00"); assert_eq!(self.e_version, 1); } } #[repr(C)] #[derive(Copy, Clone, Debug, Default)] pub struct PhEnt { pub p_type: Elf64_Word, pub p_flags: Elf64_Word, pub p_offset: Elf64_Off, pub p_vaddr: Elf64_Addr, pub p_paddr: Elf64_Addr, pub p_filesz: Elf64_Addr, pub p_memsz: Elf64_Addr, pub p_align: Elf64_Addr, } impl PhEnt { pub fn start_address(&self) -> usize { self.p_paddr as usize } pub fn end_address(&self) -> usize { (self.p_paddr + self.p_memsz) as usize } pub fn flags(&self) -> ElfSectionFlags { ElfSectionFlags::from_bits_truncate(self.p_flags.into()) } pub fn is_allocated(&self) -> bool { self.flags().contains(ElfSectionFlags::ELF_SECTION_ALLOCATED) } } #[repr(C)] #[derive(Copy,Clone)] pub struct ShEnt { sh_name: Elf64_Word, sh_type: Elf64_Word, sh_flags: Elf64_Word, sh_addr: Elf64_Addr, sh_offset: Elf64_Off, sh_size: Elf64_Word, sh_link: Elf64_Word, sh_info: Elf64_Word, sh_addralign: Elf64_Word, sh_entsize: Elf64_Word, } #[derive(Copy,Clone)] pub struct ElfFile(pub ElfHeader); impl ElfFile { pub fn check_header(&self) { self.0.check_header(); } pub fn phents(&self) -> PhEntIter { assert_eq!( self.0.e_phentsize as usize, ::core::mem::size_of::<PhEnt>() ); let slice: &[PhEnt] = unsafe { let ptr = (&self.0 as *const _ as usize + self.0.e_phoff as usize) as *const PhEnt; ::core::slice::from_raw_parts( ptr, self.0.e_phnum as usize ) }; println!("phents() - slice = {:p}+{}", slice.as_ptr(), slice.len()); PhEntIter( slice ) } fn shents(&self) -> &[ShEnt] { assert_eq!( self.0.e_shentsize as usize, ::core::mem::size_of::<ShEnt>() ); unsafe { let ptr = (&self.0 as *const _ as usize + self.0.e_shoff as usize) as *const ShEnt; ::core::slice::from_raw_parts( ptr, self.0.e_shnum as usize ) } } pub fn entrypoint(&self) -> usize { self.0.e_entry as usize } } #[repr(C)] #[derive(Copy, Clone, Debug, Default)] pub struct PhEntIter<'a>(pub &'a [PhEnt]); impl<'a> Iterator for PhEntIter<'a> { type Item = PhEnt; fn next(&mut self) -> Option<PhEnt> { if self.0.len() == 0 { None } else { let rv = self.0[0].clone(); self.0 = &self.0[1..]; Some(rv) } } } struct ShEntIter<'a>(&'a [ShEnt]); impl<'a> Iterator for ShEntIter<'a> { type Item = ShEnt;
} pub fn elf_get_size(file_base: &ElfFile) -> u32 { println!("elf_get_size(file_base={:p})", file_base); file_base.check_header(); let mut max_end = 0; for phent in file_base.phents() { if phent.p_type == 1 { println!("- {:#x}+{:#x} loads +{:#x}+{:#x}", phent.p_paddr, phent.p_memsz, phent.p_offset, phent.p_filesz ); let end = (phent.p_paddr + phent.p_memsz) as usize; if max_end < end { max_end = end; } } } let max_end = (max_end + 0xFFF) & !0xFFF; println!("return load_size={:#x}", max_end); max_end as u32 } pub fn elf_load_segments(file_base: &ElfFile, output_base: *mut u8) -> u32 { println!("elf_load_segments(file_base={:p}, output_base={:p})", file_base, output_base); for phent in file_base.phents() { if phent.p_type == 1 { println!("- {:#x}+{:#x} loads +{:#x}+{:#x}", phent.p_paddr, phent.p_memsz, phent.p_offset, phent.p_filesz ); let (dst,src) = unsafe { let dst = ::core::slice::from_raw_parts_mut( (output_base as usize + phent.p_paddr as usize) as *mut u8, phent.p_memsz as usize ); let src = ::core::slice::from_raw_parts( (file_base as *const _ as usize + phent.p_offset as usize) as *const u8, phent.p_filesz as usize ); (dst, src) }; for (d, v) in Iterator::zip( dst.iter_mut(), src.iter().cloned().chain(::core::iter::repeat(0)) ) { *d = v; } } } let rv = (file_base.entrypoint() - 0x80000000 + output_base as usize) as u32; println!("return entrypoint={:#x}", rv); rv } #[derive(Copy,Clone,Debug)] pub struct SymEnt { st_name: u32, st_value: u32, st_size: u32, st_info: u8, st_other: u8, st_shndx: u16, } #[repr(C)] #[derive(Debug)] pub struct SymbolInfo { base: *const SymEnt, count: usize, string_table: *const u8, strtab_len: usize, } pub extern "C" fn elf_load_symbols(file_base: &ElfFile, output: &mut SymbolInfo) -> u32 { println!("elf_load_symbols(file_base={:p}, output={:p})", file_base, output); *output = SymbolInfo {base: 0 as *const _, count: 0, string_table: 0 as *const _, strtab_len: 0}; let mut pos = ::core::mem::size_of::<SymbolInfo>(); for ent in file_base.shents() { if ent.sh_type == 2 { println!("Symbol table at +{:#x}+{:#x}, string table {}", ent.sh_offset, ent.sh_size, ent.sh_link); let strtab = file_base.shents()[ent.sh_link as usize]; output.base = (output as *const _ as usize + pos) as *const _; output.count = ent.sh_size as usize / ::core::mem::size_of::<SymEnt>(); unsafe { let bytes = ent.sh_size as usize; let src = ::core::slice::from_raw_parts( (file_base as *const _ as usize + ent.sh_offset as usize) as *const SymEnt, output.count ); let dst = ::core::slice::from_raw_parts_mut( output.base as *mut SymEnt, output.count ); for (d,s) in Iterator::zip( dst.iter_mut(), src.iter() ) { *d = *s; } pos += bytes; } output.string_table = (output as *const _ as usize + pos) as *const _; output.strtab_len = strtab.sh_size as usize; unsafe { let bytes = strtab.sh_size as usize; let src = ::core::slice::from_raw_parts( (file_base as *const _ as usize + strtab.sh_offset as usize) as *const u8, bytes ); let dst = ::core::slice::from_raw_parts_mut( output.string_table as *mut u8, bytes ); for (d,s) in Iterator::zip( dst.iter_mut(), src.iter() ) { *d = *s; } pos += bytes; } break ; } } println!("- output = {:?}", output); pos as u32 } bitflags! { pub struct ElfSectionFlags: u64 { const ELF_SECTION_WRITABLE = 0x1; const ELF_SECTION_ALLOCATED = 0x2; const ELF_SECTION_EXECUTABLE = 0x4; } }
fn next(&mut self) -> Option<ShEnt> { if self.0.len() == 0 { None } else { let rv = self.0[0].clone(); self.0 = &self.0[1..]; Some(rv) } }
function_block-full_function
[ { "content": "pub fn nstr(wstring: *const u16) -> String {\n\n let mut string = String::new();\n\n\n\n let mut i = 0;\n\n loop {\n\n let w = unsafe { *wstring.offset(i) };\n\n i += 1;\n\n if w == 0 {\n\n break;\n\n }\n\n let c = unsafe { char::from_u32_unchecked(w as u32) };\n\n string.push(c);\n\n }\n\n\n\n string\n\n}\n\n\n", "file_path": "boot2snow/src/string.rs", "rank": 0, "score": 169695.92577429718 }, { "content": "fn progress_bar(display: &mut Display, x: i32, y: i32, resolution_w: u32, progress: u32) {\n\n let (w, h) = { ((resolution_w / 3) / 100 as u32, 8) };\n\n let progress = (progress as f32 * 1.35) as u32;\n\n let (p1, p2) = { (progress - 1, progress - 2) };\n\n \n\n for i in 0..progress {\n\n if i == 0 {\n\n display.rounded_rect(x, y, w + 4, h, 2, true, Color::rgb(255, 255, 255));\n\n } else if i == p2 { } else if i == p1 {\n\n display.rounded_rect(x + (p2 * w) as i32, y, w * 2, h, 2, true, Color::rgb(255, 255, 255));\n\n } else {\n\n display.rect(x + (i * w) as i32, y, (w as f32 * 1.5) as u32, h, Color::rgb(255, 255, 255));\n\n }\n\n }\n\n}", "file_path": "kernel/src/kmain.rs", "rank": 2, "score": 139218.5966783032 }, { "content": "pub fn wait_timeout(timeout: u64) {\n\n let boot_services = get_boot_services();\n\n let conin = get_conin();\n\n\n\n let mut event: *mut Void = unsafe { ptr::null_mut() };\n\n unsafe { (boot_services.create_event)(0x80000000, 0, None, ptr::null_mut(), &mut event);\n\n (boot_services.set_timer)(event, TimerDelay::Periodic, 10000) };\n\n\n\n let mut index = 0;\n\n let mut input = InputKey {\n\n scan_code: 0,\n\n unicode_char: 0\n\n };\n\n\n\n for num in 0..(timeout * 100) {\n\n unsafe { (boot_services.wait_for_event)(2, &[conin.wait_for_key, event] as *const *mut Void, &mut index) };\n\n \n\n if index == 0 {\n\n input = conin.read_key_stroke().unwrap();\n\n match unsafe { char::from_u32_unchecked(input.unicode_char as u32) } {\n\n 'r' => break,\n\n _ => continue\n\n }\n\n }\n\n }\n\n}", "file_path": "boot2snow/src/io.rs", "rank": 3, "score": 131079.18022420292 }, { "content": "pub fn wstr(string: &str) -> Vec<u16> {\n\n let mut wstring = vec![];\n\n\n\n for c in string.chars() {\n\n wstring.push(c as u16);\n\n }\n\n wstring.push(0);\n\n\n\n wstring\n\n}\n\n\n", "file_path": "boot2snow/src/string.rs", "rank": 4, "score": 123648.30923462714 }, { "content": "pub fn utf8_to_string(vector: Vec<u8>) -> String {\n\n String::from_utf8(vector).unwrap()\n\n}", "file_path": "boot2snow/src/string.rs", "rank": 5, "score": 120840.14518467285 }, { "content": "pub fn load(path: &str) -> Result<Vec<u8>, Status> {\n\n let (_, mut file) = find(path)?;\n\n\n\n let mut data = vec![];\n\n let _ = file.read_to_end(&mut data)?;\n\n\n\n Ok(data)\n\n}", "file_path": "boot2snow/src/fs.rs", "rank": 7, "score": 114776.99924266145 }, { "content": "pub fn parse(file_data: &[u8]) -> Result<Image, String> {\n\n use orbclient::Color;\n\n\n\n let get = |i: usize| -> u8 {\n\n match file_data.get(i) {\n\n Some(byte) => *byte,\n\n None => 0,\n\n }\n\n };\n\n\n\n let gets = |start: usize, len: usize| -> String {\n\n (start..start + len).map(|i| get(i) as char).collect::<String>()\n\n };\n\n\n\n if gets(0, 2) != \"BM\" {\n\n return Err(\"BMP: invalid signature\".to_string())\n\n }\n\n\n\n let getw = |i: usize| -> u16 { (get(i) as u16) + ((get(i + 1) as u16) << 8) };\n\n\n", "file_path": "boot2snow/src/image/bmp.rs", "rank": 8, "score": 114348.24363722137 }, { "content": "/// Get the number of frames available\n\npub fn free_frames() -> usize {\n\n if let Some(ref allocator) = *ALLOCATOR.lock() {\n\n allocator.free_frames()\n\n } else {\n\n panic!(\"frame allocator not initialized\");\n\n }\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 9, "score": 112385.7669198096 }, { "content": "/// Get the number of frames used\n\npub fn used_frames() -> usize {\n\n if let Some(ref allocator) = *ALLOCATOR.lock() {\n\n allocator.used_frames()\n\n } else {\n\n panic!(\"frame allocator not initialized\");\n\n }\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 10, "score": 112385.7669198096 }, { "content": "/// Deallocate a range of frames frame\n\npub fn deallocate_frames(frame: Frame, count: usize) {\n\n if let Some(ref mut allocator) = *ALLOCATOR.lock() {\n\n allocator.deallocate_frames(frame, count)\n\n } else {\n\n panic!(\"frame allocator not initialized\");\n\n }\n\n}\n\n\n\n/// A frame, allocated by the frame allocator.\n\n/// Do not add more derives, or make anything `pub`!\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Frame {\n\n pub number: usize\n\n}\n\n\n\nimpl Frame {\n\n /// Get the address of this frame\n\n pub fn start_address(&self) -> PhysicalAddress {\n\n PhysicalAddress::new(self.number * PAGE_SIZE)\n\n }\n", "file_path": "kernel/src/memory/mod.rs", "rank": 11, "score": 100840.90281373363 }, { "content": "/// Allocate a range of frames\n\npub fn allocate_frames(count: usize) -> Option<Frame> {\n\n if let Some(ref mut allocator) = *ALLOCATOR.lock() {\n\n allocator.allocate_frames(count)\n\n } else {\n\n panic!(\"frame allocator not initialized\");\n\n }\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 12, "score": 100840.90281373363 }, { "content": "pub fn find(path: &str) -> Result<(usize, File), Status> {\n\n let wpath = wstr(path);\n\n\n\n for (i, mut fs) in FileSystem::all().iter_mut().enumerate() {\n\n let mut root = fs.root()?;\n\n match root.open(&wpath) {\n\n Ok(file) => {\n\n return Ok((i, file));\n\n },\n\n Err(err) => if err != NOT_FOUND {\n\n return Err(err);\n\n }\n\n }\n\n }\n\n\n\n Err(NOT_FOUND)\n\n}\n\n\n", "file_path": "boot2snow/src/fs.rs", "rank": 14, "score": 98738.98407625197 }, { "content": "pub fn init() {\n\n use x86_64::instructions::segmentation::set_cs;\n\n use x86_64::instructions::tables::load_tss;\n\n\n\n GDT.0.load();\n\n unsafe {\n\n set_cs(GDT.1.code_selector);\n\n load_tss(GDT.1.tss_selector);\n\n }\n\n}", "file_path": "kernel/arch/x86_64/gdt.rs", "rank": 15, "score": 89090.0479978586 }, { "content": "pub fn init() {\n\n IDT.load();\n\n}\n\n\n", "file_path": "kernel/arch/x86_64/idt.rs", "rank": 16, "score": 89090.0479978586 }, { "content": "pub fn load_conf() -> Conf {\n\n let mut conf: Conf = unsafe { mem::zeroed() };\n\n \n\n if let Ok(file) = load(\"\\\\boot2snow\\\\boot2snow.conf\") {\n\n let line: Vec<String> = utf8_to_string(file).replace(\" \", \"\").split(\"\\n\")\n\n .map(|s: &str| s.to_string())\n\n .collect();\n\n\n\n for data in &line {\n\n let s = data.split(\"=\").nth(0).unwrap().to_string();\n\n if s == \"kernel\" {\n\n conf.kernel = data.split(\"=\").nth(1).unwrap().to_string();\n\n } else if s == \"kernel_option\" {\n\n conf.kernel_option = data.split(\"=\").nth(1).unwrap().to_string();\n\n } else if s == \"boot_timeout\" {\n\n conf.boot_timeout = data.split(\"=\").nth(1).unwrap().to_string().parse::<u64>().unwrap();\n\n }\n\n }\n\n }\n\n\n\n conf\n\n} ", "file_path": "boot2snow/src/conf.rs", "rank": 17, "score": 85389.17946662403 }, { "content": "pub fn get_image_handle() -> Handle {\n\n\tunsafe { S_IMAGE_HANDLE }\n\n}\n\n\n", "file_path": "boot2snow/src/lib.rs", "rank": 18, "score": 83528.47415074297 }, { "content": "pub fn execute(display: &mut Display) {\n\n println!(\"Starting Snowflake Minimal Shell... (Debug)\");\n\n print!(\"///// Welcome to Snowflake Minimal Shell /////\");\n\n\n\n loop {\n\n print!(\"\\n> \");\n\n let gets = unsafe { keyboard::gets() };\n\n let mut command = gets.split_whitespace();\n\n\n\n print!(\"\\n\");\n\n\n\n match command.nth(0).unwrap() {\n\n \"help\" => {\n\n print!(\"Snowflake OS Kernel {}\", env!(\"CARGO_PKG_VERSION\"));\n\n },\n\n \"exit\" => {\n\n println!(\"Exiting Shell... (Debug)\");\n\n break\n\n },\n\n \"start\" => {\n\n testui::uidraw(display);\n\n }\n\n _ => {\n\n print!(\"Unknown command :(\");\n\n }\n\n }\n\n }\n\n}", "file_path": "kernel/src/shell.rs", "rank": 19, "score": 80650.98850860148 }, { "content": "pub fn _print(args: fmt::Arguments) {\n\n unsafe { serial().write_fmt(args).unwrap() }\n\n}\n\n\n", "file_path": "kernel/src/console.rs", "rank": 20, "score": 80650.98850860148 }, { "content": "pub fn _print(args: fmt::Arguments) {\n\n EfiLogger(get_conout()).write_fmt(args).unwrap();\n\n}\n\n\n\n/*pub fn wait_key() -> Result<char, status::Error> {\n\n let uefi = unsafe { &mut *::UEFI };\n\n\n\n let mut index = 0;\n\n (uefi.BootServices.WaitForEvent)(1, &uefi.ConsoleIn.WaitForKey, &mut index)?;\n\n\n\n let mut input = TextInputKey {\n\n ScanCode: 0,\n\n UnicodeChar: 0\n\n };\n\n\n\n (uefi.ConsoleIn.ReadKeyStroke)(uefi.ConsoleIn, &mut input)?;\n\n\n\n Ok(unsafe {\n\n char::from_u32_unchecked(input.UnicodeChar as u32)\n\n })\n\n}*/\n\n\n", "file_path": "boot2snow/src/io.rs", "rank": 21, "score": 80650.98850860148 }, { "content": "pub fn uidraw(display: &mut Display) {\n\n let (width, height) = { (display.width(), display.height()) };\n\n let (width2, height2) = { (20, 20) };\n\n\n\n println! (\"SnowFlake UI Test.\");\n\n display.rect(0, 0, width, height, Color::rgb(50, 45, 55));\n\n display.rect(0, 0, width2, height2, Color::rgb(255, 255, 255));\n\n}", "file_path": "kernel/src/testui.rs", "rank": 22, "score": 80650.98850860148 }, { "content": "pub fn get_runtime_services() -> &'static RuntimeServices {\n\n\tunsafe { &*S_RUNTIME_SERVICES }\n\n}\n\n\n", "file_path": "boot2snow/src/lib.rs", "rank": 23, "score": 78620.9292462121 }, { "content": "pub fn get_boot_services() -> &'static BootServices {\n\n\tunsafe { &*S_BOOT_SERVICES }\n\n}\n\n\n", "file_path": "boot2snow/src/lib.rs", "rank": 24, "score": 78620.9292462121 }, { "content": "pub fn get_conout() -> &'static SimpleTextOutputInterface {\n\n\tunsafe { &*S_CONOUT }\n\n}\n\n\n", "file_path": "boot2snow/src/lib.rs", "rank": 25, "score": 77074.74220616413 }, { "content": "pub fn get_conin() -> &'static mut SimpleInputInterface {\n\n\tunsafe { &mut *S_CONIN }\n\n}\n\n\n", "file_path": "boot2snow/src/lib.rs", "rank": 26, "score": 75717.30309375271 }, { "content": "fn status_msg(display: &mut Display, splash_height: u32, msg: &str) {\n\n let prompt = msg.clone();\n\n let mut x = (display.width() as i32 - prompt.len() as i32 * 8) / 2;\n\n let y = ((display.height() as i32 - splash_height as i32) / 2) as i32 + 256;\n\n\n\n let rect_x = 0;\n\n let rect_y = (y - 16);\n\n let rect_width = display.width();\n\n let rect_height = (y + 16) as u32;\n\n\n\n display.rect(rect_x, rect_y, rect_width, rect_height, InnerColor::rgb(0x00, 0x00, 0x00));\n\n\n\n for c in prompt.chars() {\n\n display.char(x, y, c, InnerColor::rgb(0xff, 0xff, 0xff));\n\n x += 8;\n\n }\n\n\n\n display.sync();\n\n}\n\n\n", "file_path": "boot2snow/src/boot2snow.rs", "rank": 27, "score": 75241.46441124025 }, { "content": "fn load_kernel_sections(filename: &str) -> Result<elf::PhEntIter<'static>, Status> {\n\n\tlet mut kernel_file = match find(filename) {\n\n\t\tOk(k) => { k.1 },\n\n\t\tErr(e) => panic!(\"Failed to open kernel '{}' - {:?}\", filename, e),\n\n\t};\n\n\n\n // Load kernel from this file (ELF).\n\n\tlet elf_hdr = {\n\n\t\tlet mut hdr = elf::ElfHeader::default();\n\n\t\t// SAFE: Converts to POD for read\n\n\t\tkernel_file.read( unsafe { slice::from_raw_parts_mut( &mut hdr as *mut _ as *mut u8, mem::size_of::<elf::ElfHeader>() ) } ).expect(\"ElfHeader read\");\n\n\t\thdr\n\n\t};\n\n\n\n let slice: &[elf::PhEnt] = unsafe {\n\n\t\tlet ptr = (&elf_hdr as *const _ as usize + elf_hdr.e_phoff as usize) as *const elf::PhEnt;\n\n\t slice::from_raw_parts(ptr, elf_hdr.e_phnum as usize)\n\n\t};\n\n\n\n Ok(elf::PhEntIter(slice))\n\n}\n\n\n", "file_path": "boot2snow/src/boot2snow.rs", "rank": 28, "score": 56692.91766085804 }, { "content": "struct Selectors {\n\n code_selector: SegmentSelector,\n\n tss_selector: SegmentSelector,\n\n}\n\n\n", "file_path": "kernel/arch/x86_64/gdt.rs", "rank": 29, "score": 56006.95797397988 }, { "content": "pub trait FrameAllocator {\n\n fn set_noncore(&mut self, noncore: bool);\n\n fn free_frames(&self) -> usize;\n\n fn used_frames(&self) -> usize;\n\n fn allocate_frames(&mut self, size: usize) -> Option<Frame>;\n\n fn deallocate_frames(&mut self, frame: Frame, size: usize);\n\n}", "file_path": "kernel/src/memory/mod.rs", "rank": 30, "score": 44199.571246556676 }, { "content": "pub trait TableLevel {}\n\n\n\npub enum Level4 {}\n\npub enum Level3 {}\n\npub enum Level2 {}\n\npub enum Level1 {}\n\n\n\nimpl TableLevel for Level4 {}\n\nimpl TableLevel for Level3 {}\n\nimpl TableLevel for Level2 {}\n\nimpl TableLevel for Level1 {}\n\n\n", "file_path": "kernel/arch/x86_64/paging/table.rs", "rank": 31, "score": 43268.438357318955 }, { "content": "pub trait HierarchicalLevel: TableLevel {\n\n type NextLevel: TableLevel;\n\n}\n\n\n\nimpl HierarchicalLevel for Level4 {\n\n type NextLevel = Level3;\n\n}\n\n\n\nimpl HierarchicalLevel for Level3 {\n\n type NextLevel = Level2;\n\n}\n\n\n\nimpl HierarchicalLevel for Level2 {\n\n type NextLevel = Level1;\n\n}\n\n\n\npub struct Table<L: TableLevel> {\n\n entries: [Entry; ENTRY_COUNT],\n\n level: PhantomData<L>,\n\n}\n", "file_path": "kernel/arch/x86_64/paging/table.rs", "rank": 32, "score": 40679.052175541015 }, { "content": "fn set_graphics_mode(output: &GraphicsOutput) -> Result<(), ()> {\n\n let mut max_i = None;\n\n let mut max_w = 0;\n\n let mut max_h = 0;\n\n\n\n for i in 0..output.mode.max_mode as usize {\n\n let mut mode_ptr: *mut ModeInformation = ::core::ptr::null_mut();\n\n\t\tlet mut mode_size = 0;\n\n if (output.query_mode)(output, i as u32, &mut mode_size, &mut (mode_ptr as *const ModeInformation)).into_result().is_ok() {\n\n\t\t\tlet mode = unsafe { &mut *mode_ptr };\n\n\n\n\t\t\tlet w = mode.horizontal_resolution;\n\n\t\t\tlet h = mode.vertical_resolution;\n\n\t\t\tlet pixel_format = mode.pixel_format;\n\n if w >= max_w && h >= max_h && pixel_format == PixelFormat::BGRX {\n\n max_i = Some(i as u32);\n\n max_w = w;\n\n max_h = h;\n\n }\n\n }\n", "file_path": "boot2snow/src/lib.rs", "rank": 33, "score": 38735.02882699986 }, { "content": "fn load_kernel_file(filename: &str) -> Result<EntryPoint, Status> {\n\n let boot_services = get_boot_services();\n\n\n\n\tlet mut kernel_file = match find(filename) {\n\n\t\tOk(k) => { k.1 },\n\n\t\tErr(e) => panic!(\"Failed to open kernel '{}' - {:?}\", filename, e),\n\n\t};\n\n\n\n\t// Load kernel from this file (ELF).\n\n\tlet elf_hdr = {\n\n\t\tlet mut hdr = elf::ElfHeader::default();\n\n\t\t// SAFE: Converts to POD for read\n\n\t\tkernel_file.read( unsafe { slice::from_raw_parts_mut( &mut hdr as *mut _ as *mut u8, mem::size_of::<elf::ElfHeader>() ) } ).expect(\"ElfHeader read\");\n\n\t\thdr\n\n\t};\n\n\n\n\telf_hdr.check_header();\n\n\tfor i in 0 .. elf_hdr.e_phnum {\n\n\t\tlet mut ent = elf::PhEnt::default();\n\n\t\tkernel_file.set_position(elf_hdr.e_phoff + (i as usize * mem::size_of::<elf::PhEnt>()) as u64);\n", "file_path": "boot2snow/src/boot2snow.rs", "rank": 34, "score": 36001.38254272517 }, { "content": "\n\n/// Offset to user temporary heap (used when cloning)\n\npub const USER_TMP_HEAP_OFFSET: usize = USER_TMP_OFFSET + PML4_SIZE;\n\npub const USER_TMP_HEAP_PML4: usize = (USER_TMP_HEAP_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset to user temporary page for grants\n\npub const USER_TMP_GRANT_OFFSET: usize = USER_TMP_HEAP_OFFSET + PML4_SIZE;\n\npub const USER_TMP_GRANT_PML4: usize = (USER_TMP_GRANT_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset to user temporary stack (used when cloning)\n\npub const USER_TMP_STACK_OFFSET: usize = USER_TMP_GRANT_OFFSET + PML4_SIZE;\n\npub const USER_TMP_STACK_PML4: usize = (USER_TMP_STACK_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset to user temporary sigstack (used when cloning)\n\npub const USER_TMP_SIGSTACK_OFFSET: usize = USER_TMP_STACK_OFFSET + PML4_SIZE;\n\npub const USER_TMP_SIGSTACK_PML4: usize = (USER_TMP_SIGSTACK_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset to user temporary tls (used when cloning)\n\npub const USER_TMP_TLS_OFFSET: usize = USER_TMP_SIGSTACK_OFFSET + PML4_SIZE;\n\npub const USER_TMP_TLS_PML4: usize = (USER_TMP_TLS_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset for usage in other temporary pages\n\npub const USER_TMP_MISC_OFFSET: usize = USER_TMP_TLS_OFFSET + PML4_SIZE;\n\npub const USER_TMP_MISC_PML4: usize = (USER_TMP_MISC_OFFSET & PML4_MASK)/PML4_SIZE;", "file_path": "kernel/src/consts.rs", "rank": 35, "score": 34325.74852029261 }, { "content": "\n\n/// Offset to user stack\n\npub const USER_STACK_OFFSET: usize = USER_GRANT_OFFSET + PML4_SIZE;\n\npub const USER_STACK_PML4: usize = (USER_STACK_OFFSET & PML4_MASK)/PML4_SIZE;\n\n/// Size of user stack\n\npub const USER_STACK_SIZE: usize = 1024 * 1024; // 1 MB\n\n\n\n/// Offset to user sigstack\n\npub const USER_SIGSTACK_OFFSET: usize = USER_STACK_OFFSET + PML4_SIZE;\n\npub const USER_SIGSTACK_PML4: usize = (USER_SIGSTACK_OFFSET & PML4_MASK)/PML4_SIZE;\n\n/// Size of user sigstack\n\npub const USER_SIGSTACK_SIZE: usize = 256 * 1024; // 256 KB\n\n\n\n/// Offset to user TLS\n\npub const USER_TLS_OFFSET: usize = USER_SIGSTACK_OFFSET + PML4_SIZE;\n\npub const USER_TLS_PML4: usize = (USER_TLS_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset to user temporary image (used when cloning)\n\npub const USER_TMP_OFFSET: usize = USER_TLS_OFFSET + PML4_SIZE;\n\npub const USER_TMP_PML4: usize = (USER_TMP_OFFSET & PML4_MASK)/PML4_SIZE;\n", "file_path": "kernel/src/consts.rs", "rank": 36, "score": 34324.82199552132 }, { "content": "pub const PML4_SIZE: usize = 0x0000_0080_0000_0000;\n\npub const PML4_MASK: usize = 0x0000_ff80_0000_0000;\n\n\n\n/// Offset of recursive paging\n\npub const RECURSIVE_PAGE_OFFSET: usize = (-(PML4_SIZE as isize)) as usize;\n\npub const RECURSIVE_PAGE_PML4: usize = (RECURSIVE_PAGE_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset of kernel\n\npub const KERNEL_OFFSET: usize = RECURSIVE_PAGE_OFFSET - PML4_SIZE;\n\npub const KERNEL_PML4: usize = (KERNEL_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset to kernel heap\n\npub const KERNEL_HEAP_OFFSET: usize = KERNEL_OFFSET - PML4_SIZE;\n\npub const KERNEL_HEAP_PML4: usize = (KERNEL_HEAP_OFFSET & PML4_MASK)/PML4_SIZE;\n\n/// Size of kernel heap\n\npub const KERNEL_HEAP_SIZE: usize = 1 * 1024 * 1024; // 1 MB\n\n\n\n/// Offset to kernel percpu variables\n\n//TODO: Use 64-bit fs offset to enable this pub const KERNEL_PERCPU_OFFSET: usize = KERNEL_HEAP_OFFSET - PML4_SIZE;\n\npub const KERNEL_PERCPU_OFFSET: usize = 0xC000_0000;\n", "file_path": "kernel/src/consts.rs", "rank": 37, "score": 34323.4650005802 }, { "content": "/// Size of kernel percpu variables\n\npub const KERNEL_PERCPU_SIZE: usize = 64 * 1024; // 64 KB\n\n\n\n/// Offset to user image\n\npub const USER_OFFSET: usize = 0;\n\npub const USER_PML4: usize = (USER_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset to user TCB\n\npub const USER_TCB_OFFSET: usize = 0xB000_0000;\n\n\n\n/// Offset to user arguments\n\npub const USER_ARG_OFFSET: usize = USER_OFFSET + PML4_SIZE/2;\n\n\n\n/// Offset to user heap\n\npub const USER_HEAP_OFFSET: usize = USER_OFFSET + PML4_SIZE;\n\npub const USER_HEAP_PML4: usize = (USER_HEAP_OFFSET & PML4_MASK)/PML4_SIZE;\n\n\n\n/// Offset to user grants\n\npub const USER_GRANT_OFFSET: usize = USER_HEAP_OFFSET + PML4_SIZE;\n\npub const USER_GRANT_PML4: usize = (USER_GRANT_OFFSET & PML4_MASK)/PML4_SIZE;\n", "file_path": "kernel/src/consts.rs", "rank": 38, "score": 34323.33407372602 }, { "content": "#[repr(packed)]\n\npub struct MemoryArea {\n\n pub base_addr: u64,\n\n pub length: u64,\n\n pub _type: u32,\n\n pub acpi: u32\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct MemoryAreaIter {\n\n _type: u32,\n\n i: usize\n\n}\n\n\n\nimpl MemoryAreaIter {\n\n fn new(_type: u32) -> Self {\n\n MemoryAreaIter {\n\n _type: _type,\n\n i: 0\n\n }\n", "file_path": "kernel/src/memory/mod.rs", "rank": 40, "score": 30.72180754617419 }, { "content": "pub const MEMORY_AREA_FREE: u32 = 1;\n\n\n\n/// Memory is reserved\n\npub const MEMORY_AREA_RESERVED: u32 = 2;\n\n\n\n/// Memory is used by ACPI, and can be reclaimed\n\npub const MEMORY_AREA_ACPI: u32 = 3;\n\n\n\n/// A memory map area\n\n#[derive(Copy, Clone, Debug, Default)]\n\n#[repr(packed)]\n\npub struct MemoryArea {\n\n pub base_addr: u64,\n\n pub length: u64,\n\n pub _type: u32,\n\n pub acpi: u32\n\n}\n\n\n\npub unsafe fn memory_map() -> (usize, usize, usize, u32, *mut MemoryDescriptor) {\n\n let boot_services = get_boot_services();\n", "file_path": "boot2snow/src/memory_map.rs", "rank": 41, "score": 25.928400026257115 }, { "content": " scale: u32,\n\n w: u32,\n\n h: u32,\n\n data: Box<[Color]>,\n\n font: &'static [u8],\n\n}\n\n\n\nimpl Display {\n\n pub fn new(output: Output) -> Self {\n\n let w = unsafe { (*output.0.mode.info).horizontal_resolution };\n\n let h = unsafe { (*output.0.mode.info).vertical_resolution };\n\n let scale = if h > 1440 { 2 } else { 1 };\n\n Self {\n\n output,\n\n scale,\n\n w,\n\n h,\n\n data: vec![Color::rgb(0, 0, 0); w as usize * h as usize].into_boxed_slice(),\n\n font: include_bytes!(\"../../res/unifont.font\"),\n\n }\n", "file_path": "boot2snow/src/display.rs", "rank": 45, "score": 23.51004377015295 }, { "content": " }\n\n}\n\n\n\nimpl Iterator for MemoryAreaIter {\n\n type Item = &'static MemoryArea;\n\n fn next(&mut self) -> Option<Self::Item> {\n\n while self.i < unsafe { MEMORY_MAP.len() } {\n\n let entry = unsafe { &MEMORY_MAP[self.i] };\n\n self.i += 1;\n\n if entry._type == self._type {\n\n return Some(entry);\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\nstatic ALLOCATOR: Mutex<Option<RecycleAllocator<BumpAllocator>>> = Mutex::new(None);\n\n\n\n/// Init memory module\n", "file_path": "kernel/src/memory/mod.rs", "rank": 46, "score": 23.197508314390298 }, { "content": "#[derive(Clone)]\n\npub struct Image {\n\n w: u32,\n\n h: u32,\n\n data: Box<[Color]>\n\n}\n\n\n\nimpl Image {\n\n /// Create a new image\n\n pub fn new(width: u32, height: u32) -> Self {\n\n Self::from_color(width, height, Color::rgb(0, 0, 0))\n\n }\n\n\n\n /// Create a new image filled whole with color\n\n pub fn from_color(width: u32, height: u32, color: Color) -> Self {\n\n Self::from_data(width, height, vec![color; width as usize * height as usize].into_boxed_slice()).unwrap()\n\n }\n\n\n\n /// Create a new image from a boxed slice of colors\n\n pub fn from_data(width: u32, height: u32, data: Box<[Color]>) -> Result<Self, String> {\n", "file_path": "boot2snow/src/image/mod.rs", "rank": 47, "score": 21.797242608018784 }, { "content": "/// A color\n\n#[derive(Copy, Clone)]\n\npub struct Color(pub u32);\n\n\n\nimpl Color {\n\n /// Create a new color from RGB\n\n pub const fn rgb(r: u8, g: u8, b: u8) -> Self {\n\n Color(0xFF000000 | ((r as u32) << 16) | ((g as u32) << 8) | (b as u32))\n\n }\n\n\n\n /// Set the alpha\n\n pub const fn rgba(r: u8, g: u8, b: u8, a: u8) -> Self {\n\n Color(((a as u32) << 24) | ((r as u32) << 16) | ((g as u32) << 8) | (b as u32))\n\n }\n\n\n\n /// Get the r value\n\n pub fn r(&self) -> u8 {\n\n ((self.0 & 0x00FF0000) >> 16) as u8\n\n }\n\n\n", "file_path": "share/color.rs", "rank": 49, "score": 21.47408356971884 }, { "content": "mod color;\n\nmod elf;\n\nuse color::*;\n\nuse elf::PhEntIter;\n\n\n\n#[repr(C)]\n\npub struct Info {\n\n\tpub runtime_services: *const (),\n\n\n\n\tpub cmdline_ptr: *const u8,\n\n\tpub cmdline_len: usize,\n\n\n\n\tpub elf_sections: Option<PhEntIter<'static>>,\n\n\tpub kernel_base: usize,\n\n\tpub kernel_size: usize,\n\n\tpub stack_base: usize,\n\n\tpub stack_size: usize,\n\n\n\n\tpub map_addr: u64,\n\n\tpub map_len: u32,\n", "file_path": "share/uefi_proto.rs", "rank": 50, "score": 20.568575939805882 }, { "content": " instances.push(instance);\n\n }\n\n }\n\n\n\n Ok(instances)\n\n }\n\n\n\n pub fn all() -> Vec<Self> where Self: Sized {\n\n Self::locate_handle().unwrap_or(Vec::new())\n\n }\n\n}\n\n\n\npub struct File(pub &'static mut InnerFile);\n\n\n\nimpl File {\n\n pub fn read(&mut self, buf: &mut [u8]) -> Result<usize, Status> {\n\n let mut len = buf.len();\n\n unsafe { (self.0.read)(self.0, &mut len, buf.as_mut_ptr() as *mut Void)? };\n\n Ok(len)\n\n }\n", "file_path": "boot2snow/src/fs.rs", "rank": 52, "score": 19.58440348571581 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct FrameIter {\n\n start: Frame,\n\n end: Frame,\n\n}\n\n\n\nimpl Iterator for FrameIter {\n\n type Item = Frame;\n\n\n\n fn next(&mut self) -> Option<Frame> {\n\n if self.start <= self.end {\n\n let frame = self.start.clone();\n\n self.start.number += 1;\n\n Some(frame)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 53, "score": 19.45931196202307 }, { "content": "impl Dir {\n\n pub fn open(&mut self, filename: &[u16]) -> Result<File, Status> {\n\n let mut interface = 0 as *mut InnerFile;\n\n unsafe { ((self.0).0.open)((self.0).0, &mut interface, filename.as_ptr(), FILE_MODE_READ, 0)? };\n\n\n\n Ok(File(unsafe { &mut *interface }))\n\n }\n\n\n\n pub fn open_dir(&mut self, filename: &[u16]) -> Result<Dir, Status> {\n\n let file = self.open(filename)?;\n\n Ok(Dir(file))\n\n }\n\n\n\n pub fn read(&mut self) -> Result<Option<FileInfo>, Status> {\n\n let mut info = FileInfo::default();\n\n let buf = unsafe {\n\n slice::from_raw_parts_mut(\n\n &mut info as *mut _ as *mut u8,\n\n mem::size_of_val(&info)\n\n )\n\n };\n\n match self.0.read(buf) {\n\n Ok(0) => Ok(None),\n\n Ok(_len) => Ok(Some(info)),\n\n Err(err) => Err(err)\n\n }\n\n }\n\n}\n\n\n", "file_path": "boot2snow/src/fs.rs", "rank": 55, "score": 18.96746084439335 }, { "content": " if rows > 0 && rows < height {\n\n let off1 = rows * width;\n\n let off2 = height * width - off1;\n\n unsafe {\n\n let data_ptr = self.data.as_mut_ptr() as *mut u32;\n\n fast_copy(data_ptr as *mut u8, data_ptr.offset(off1 as isize) as *const u8, off2 as usize * 4);\n\n fast_set32(data_ptr.offset(off2 as isize), color.0, off1 as usize);\n\n }\n\n }\n\n }\n\n\n\n fn inner_pixel(&mut self, x: i32, y: i32, color: Color) {\n\n let w = self.w;\n\n let h = self.h;\n\n\n\n let not_inner_pixel = x >= 0 && y >= 0 && x < w as i32 && y < h as i32;\n\n if not_inner_pixel { return };\n\n\n\n let new = color.0;\n\n let alpha = (new >> 24) & 0xFF;\n", "file_path": "boot2snow/src/display.rs", "rank": 56, "score": 18.81520635940405 }, { "content": "/// from 0x500 to 0x5000 (800 is the absolute total)\n\nstatic mut MEMORY_MAP: [MemoryArea; 512] = [MemoryArea { base_addr: 0, length: 0, _type: 0, acpi: 0 }; 512];\n\n\n\n/// Memory does not exist\n\npub const MEMORY_AREA_NULL: u32 = 0;\n\n\n\n/// Memory is free to use\n\npub const MEMORY_AREA_FREE: u32 = 1;\n\n\n\n/// Memory is reserved\n\npub const MEMORY_AREA_RESERVED: u32 = 2;\n\n\n\n/// Memory is used by ACPI, and can be reclaimed\n\npub const MEMORY_AREA_ACPI: u32 = 3;\n\n\n\n/// Size of pages\n\npub const PAGE_SIZE: usize = 4096;\n\n\n\n/// A memory map area\n\n#[derive(Copy, Clone, Debug, Default)]\n", "file_path": "kernel/src/memory/mod.rs", "rank": 57, "score": 18.617183335342947 }, { "content": " pub fn get(&self) -> usize {\n\n self.0\n\n }\n\n}\n\n\n\n/// A virtual address.\n\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct VirtualAddress(usize);\n\n\n\nimpl VirtualAddress {\n\n pub fn new(address: usize) -> Self {\n\n VirtualAddress(address)\n\n }\n\n\n\n pub fn get(&self) -> usize {\n\n self.0\n\n }\n\n}\n\n\n\n/// The current memory map. It's size is maxed out to 512 entries, due to it being\n", "file_path": "kernel/src/memory/mod.rs", "rank": 58, "score": 18.41243872971427 }, { "content": "\n\nimpl Pic {\n\n pub const fn new(port: u16) -> Self {\n\n Self {\n\n cmd: Port::new(port),\n\n data: Port::new(port + 1),\n\n }\n\n }\n\n\n\n pub unsafe fn ack(&mut self) {\n\n self.cmd.write(0x20);\n\n }\n\n\n\n pub unsafe fn mask_set(&mut self, irq: u8) {\n\n assert!(irq < 8);\n\n\n\n let mut mask = self.data.read();\n\n mask |= 1 << irq;\n\n self.data.write(mask);\n\n }\n", "file_path": "kernel/arch/x86_64/pic.rs", "rank": 59, "score": 18.325507857091587 }, { "content": "\tpub descriptor_size: u32,\n\n\n\n\tpub video_info: *const VideoInfo,\n\n}\n\n\n\n#[repr(C)]\n\npub struct VideoInfo {\n\n\tpub physbaseptr: u64,\n\n\tpub xresolution: u32,\n\n\tpub yresolution: u32,\n\n\tpub splashx: i32,\n\n\tpub splashy: i32\n\n}\n\n\n\n// TODO: Grab this from libuefi\n\n#[repr(C)]\n\n#[derive(Copy,Clone)]\n\npub struct MemoryDescriptor\n\n{\n\n\tpub ty: u32,\n\n\t_pad: u32,\n\n\tpub physical_start: u64,\n\n\tpub virtual_start: u64,\n\n\tpub number_of_pages: u64,\n\n\tpub attribute: u64,\n\n\t_pad2: u64,\n\n}", "file_path": "share/uefi_proto.rs", "rank": 60, "score": 18.018663535077696 }, { "content": " }\n\n\n\n pub fn scale(&self) -> u32 {\n\n self.scale\n\n }\n\n\n\n pub fn scroll(&mut self, rows: usize, color: Color) {\n\n let scale = self.scale as usize;\n\n self.inner_scroll(rows * scale, color);\n\n }\n\n\n\n pub fn blit(&mut self, x: i32, y: i32, w: u32, h: u32) -> bool {\n\n let scale = self.scale;\n\n self.inner_blit(\n\n x * scale as i32,\n\n y * scale as i32,\n\n w * scale,\n\n h * scale\n\n )\n\n }\n", "file_path": "boot2snow/src/display.rs", "rank": 61, "score": 17.92512136629475 }, { "content": "\n\n#[no_mangle]\n\npub extern \"C\" fn memcmp(dst: *mut u8, src: *const u8, count: usize) -> isize {\n\n\tunsafe {\n\n\t\tlet rv: isize;\n\n\t\tasm!(\"repnz cmpsb ; movq $$0, $0 ; ja 1f; jb 2f; jmp 3f; 1: inc $0 ; jmp 3f; 2: dec $0; 3:\" : \"=r\" (rv) : \"{rcx}\" (count), \"{rdi}\" (dst), \"{rsi}\" (src) : \"rcx\", \"rsi\", \"rdi\" : \"volatile\");\n\n\t\trv\n\n\t}\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern fn memmove(dest: *mut u8, src: *const u8,\n\n n: usize) -> *mut u8 {\n\n if src < dest as *const u8 {\n\n let n_usize: usize = n/WORD_SIZE; // Number of word sized groups\n\n let mut i: usize = n_usize*WORD_SIZE;\n\n\n\n // Copy `WORD_SIZE` bytes at a time\n\n while i != 0 {\n\n i -= WORD_SIZE;\n", "file_path": "kernel/src/lib.rs", "rank": 62, "score": 17.056381023443496 }, { "content": "\n\n fn inner_blit(&mut self, x: i32, y: i32, w: u32, h: u32) -> bool {\n\n let status = (self.output.0.blt)(\n\n self.output.0,\n\n self.data.as_mut_ptr() as *mut BltPixel,\n\n BltOperation::BufferToVideo,\n\n x as usize,\n\n y as usize,\n\n x as usize,\n\n y as usize,\n\n w as usize,\n\n h as usize,\n\n 0\n\n );\n\n status.into_result().is_ok()\n\n }\n\n\n\n fn inner_scroll(&mut self, rows: usize, color: Color) {\n\n let width = self.w as usize;\n\n let height = self.h as usize;\n", "file_path": "boot2snow/src/display.rs", "rank": 63, "score": 16.76361791586077 }, { "content": " InactivePageTable { p4_frame: Frame::containing_address(PhysicalAddress::new(cr3)) }\n\n }\n\n\n\n pub unsafe fn address(&self) -> usize {\n\n self.p4_frame.start_address().get()\n\n }\n\n}\n\n\n\n/// Page\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Page {\n\n number: usize\n\n}\n\n\n\nimpl Page {\n\n pub fn start_address(&self) -> VirtualAddress {\n\n VirtualAddress::new(self.number * PAGE_SIZE)\n\n }\n\n\n\n pub fn p4_index(&self) -> usize {\n", "file_path": "kernel/arch/x86_64/paging/mod.rs", "rank": 64, "score": 16.63275821593428 }, { "content": "\t\t// SAFE: Converts to POD for read\n\n\t\tkernel_file.read( unsafe { slice::from_raw_parts_mut( &mut ent as *mut _ as *mut u8, mem::size_of::<elf::PhEnt>() ) } ).expect(\"PhEnt read\");\n\n\t\t\n\n\t\tif ent.p_type == 1 {\n\n\t\t\tunsafe {\n\n\t\t\t\tKERNEL_BASE = ent.p_vaddr as usize;\n\n\t\t\t\tKERNEL_SIZE = ent.p_memsz as usize;\n\n\t\t\t}\n\n\t\t\t\n\n\t\t\tlet mut addr = ent.p_paddr;\n\n\t\t\t// SAFE: Correct call to FFI\n\n\t\t\tunsafe { (boot_services.allocate_pages)(\n\n\t\t\t\tAllocateType::Address,\n\n\t\t\t\tMemoryType::LoaderData,\n\n\t\t\t\t(ent.p_memsz + 0xFFF) as usize / 0x1000,\n\n\t\t\t\t&mut addr\n\n\t\t\t).expect(\"Allocating pages for program segment\") };\n\n\t\t\t\n\n\t\t\t// SAFE: This memory has just been allocated by the above\n\n\t\t\tlet data_slice = unsafe { slice::from_raw_parts_mut(addr as usize as *mut u8, ent.p_memsz as usize) };\n", "file_path": "boot2snow/src/boot2snow.rs", "rank": 65, "score": 16.58937449189676 }, { "content": "\n\n SUCCESS\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn memcpy(dst: *mut u8, src: *const u8, count: usize) {\n\n\tunsafe {\n\n\t\tasm!(\"rep movsb\" : : \"{rcx}\" (count), \"{rdi}\" (dst), \"{rsi}\" (src) : \"rcx\", \"rsi\", \"rdi\" : \"volatile\");\n\n\t}\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn memset(dst: *mut u8, val: u8, count: usize) {\n\n\tunsafe {\n\n\t\tasm!(\"rep stosb\" : : \"{rcx}\" (count), \"{rdi}\" (dst), \"{al}\" (val) : \"rcx\", \"rdi\" : \"volatile\");\n\n\t}\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn memcmp(dst: *mut u8, src: *const u8, count: usize) -> isize {\n\n\tunsafe {\n\n\t\tlet rv: isize;\n\n\t\tasm!(\"repnz cmpsb ; movq $$0, $0 ; ja 1f; jb 2f; jmp 3f; 1: inc $0 ; jmp 3f; 2: dec $0; 3:\" : \"=r\" (rv) : \"{rcx}\" (count), \"{rdi}\" (dst), \"{rsi}\" (src) : \"rcx\", \"rsi\", \"rdi\" : \"volatile\");\n\n\t\trv\n\n\t}\n\n}", "file_path": "boot2snow/src/lib.rs", "rank": 66, "score": 16.447358962852967 }, { "content": "\n\nimpl Protocol for FileSystem {\n\n fn guid() -> Guid {\n\n FILE_SYSTEM_GUID\n\n }\n\n\n\n\tunsafe fn from_ptr(v: *const Void) -> *const Self {\n\n\t\tv as *const _\n\n }\n\n}\n\n\n\nimpl FileSystem {\n\n pub fn root(&mut self) -> Result<Dir, Status> {\n\n let mut interface = 0 as *mut InnerFile;\n\n unsafe { (self.0.open_volume)(self.0, &mut interface)? };\n\n\n\n Ok(Dir(File(unsafe { &mut *interface })))\n\n }\n\n\n\n\tpub fn handle_protocol(handle: &Handle) -> Result<Self, Status> {\n", "file_path": "boot2snow/src/fs.rs", "rank": 67, "score": 16.367547634524797 }, { "content": "\n\n pub fn range_inclusive(start: Page, end: Page) -> PageIter {\n\n PageIter {\n\n start: start,\n\n end: end,\n\n }\n\n }\n\n}\n\n\n\npub struct PageIter {\n\n start: Page,\n\n end: Page,\n\n}\n\n\n\nimpl Iterator for PageIter {\n\n type Item = Page;\n\n\n\n fn next(&mut self) -> Option<Page> {\n\n if self.start <= self.end {\n\n let page = self.start;\n\n self.start.number += 1;\n\n Some(page)\n\n } else {\n\n None\n\n }\n\n }\n\n}", "file_path": "kernel/arch/x86_64/paging/mod.rs", "rank": 68, "score": 16.14647935537106 }, { "content": "\n\nimpl<L> Table<L> where L: TableLevel {\n\n pub fn is_unused(&self) -> bool {\n\n if self.entry_count() > 0 {\n\n return false;\n\n }\n\n\n\n true\n\n }\n\n\n\n pub fn zero(&mut self) {\n\n for entry in self.entries.iter_mut() {\n\n entry.set_zero();\n\n }\n\n }\n\n\n\n /// Set number of entries in first table entry\n\n fn set_entry_count(&mut self, count: u64) {\n\n debug_assert!(count <= ENTRY_COUNT as u64, \"count can't be greater than ENTRY_COUNT\");\n\n self.entries[0].set_counter_bits(count);\n", "file_path": "kernel/arch/x86_64/paging/table.rs", "rank": 69, "score": 16.05206908529462 }, { "content": "\n\n pub fn set_icr(&mut self, value: u64) {\n\n if self.x2 {\n\n unsafe { Msr::new(IA32_X2APIC_ICR).write(value); }\n\n } else {\n\n unsafe {\n\n while self.read(0x300) & 1 << 12 == 1 << 12 {}\n\n self.write(0x310, (value >> 32) as u32);\n\n self.write(0x300, value as u32);\n\n while self.read(0x300) & 1 << 12 == 1 << 12 {}\n\n }\n\n }\n\n }\n\n\n\n pub fn ipi(&mut self, apic_id: usize) {\n\n let mut icr = 0x4040;\n\n if self.x2 {\n\n icr |= (apic_id as u64) << 32;\n\n } else {\n\n icr |= (apic_id as u64) << 56;\n", "file_path": "kernel/arch/x86_64/apic.rs", "rank": 70, "score": 15.855282459976937 }, { "content": " for x in start_x..start_x + len {\n\n self.inner_pixel(x, y, color);\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn inner_scroll(&mut self, rows: usize, color: Color) {\n\n let width = self.w as usize;\n\n let height = self.h as usize;\n\n if rows > 0 && rows < height {\n\n let off1 = rows * width;\n\n let off2 = height * width - off1;\n\n unsafe {\n\n let output_ptr = self.output as *mut u32;\n\n fast_copy(output_ptr as *mut u8, output_ptr.offset(off1 as isize) as *const u8, off2 as usize * 4);\n\n fast_set32(output_ptr.offset(off2 as isize), color.0, off1 as usize);\n\n }\n\n }\n\n }\n", "file_path": "kernel/src/display.rs", "rank": 71, "score": 15.734969414581409 }, { "content": "pub unsafe fn fast_copy(dst: *mut u8, src: *const u8, len: usize) {\n\n asm!(\"cld\n\n rep movsb\"\n\n :\n\n : \"{rdi}\"(dst as usize), \"{rsi}\"(src as usize), \"{rcx}\"(len)\n\n : \"cc\", \"memory\", \"rdi\", \"rsi\", \"rcx\"\n\n : \"intel\", \"volatile\");\n\n}\n\n\n\n#[cfg(target_arch = \"x86_64\")]\n\n#[inline(always)]\n\n#[cold]\n\npub unsafe fn fast_set32(dst: *mut u32, src: u32, len: usize) {\n\n asm!(\"cld\n\n rep stosd\"\n\n :\n\n : \"{rdi}\"(dst as usize), \"{eax}\"(src), \"{rcx}\"(len)\n\n : \"cc\", \"memory\", \"rdi\", \"rcx\"\n\n : \"intel\", \"volatile\");\n\n}", "file_path": "kernel/src/display.rs", "rank": 72, "score": 15.68149886103158 }, { "content": "#[cold]\n\npub unsafe fn fast_copy(dst: *mut u8, src: *const u8, len: usize) {\n\n asm!(\"cld\n\n rep movsb\"\n\n :\n\n : \"{rdi}\"(dst as usize), \"{rsi}\"(src as usize), \"{rcx}\"(len)\n\n : \"cc\", \"memory\", \"rdi\", \"rsi\", \"rcx\"\n\n : \"intel\", \"volatile\");\n\n}\n\n\n\n#[cfg(target_arch = \"x86_64\")]\n\n#[inline(always)]\n\n#[cold]\n\npub unsafe fn fast_set32(dst: *mut u32, src: u32, len: usize) {\n\n asm!(\"cld\n\n rep stosd\"\n\n :\n\n : \"{rdi}\"(dst as usize), \"{eax}\"(src), \"{rcx}\"(len)\n\n : \"cc\", \"memory\", \"rdi\", \"rcx\"\n\n : \"intel\", \"volatile\");\n\n}", "file_path": "boot2snow/src/display.rs", "rank": 73, "score": 15.614727108574003 }, { "content": " }\n\n \n\n pub fn pixel(&mut self, x: i32, y: i32, color: Color) {\n\n self.inner_pixel(x, y, color);\n\n }\n\n\n\n pub fn rect(&mut self, x: i32, y: i32, w: u32, h: u32, color: Color) {\n\n self.inner_rect(x, y, w, h, color);\n\n }\n\n\n\n pub fn line(&mut self, argx1: i32, argy1: i32, argx2: i32, argy2: i32, color: Color) {\n\n self.line(argx1, argy1, argx2, argy2, color);\n\n }\n\n\n\n pub fn circle(&mut self, x0: i32, y0: i32, radius: i32, filled: bool, color: Color) {\n\n self.inner_circle(x0, y0, radius, filled, color);\n\n }\n\n\n\n pub fn string(&mut self, x: i32, y: i32, s: &str, color: Color) {\n\n let prompt = s.clone();\n", "file_path": "kernel/src/display.rs", "rank": 74, "score": 15.526772986195995 }, { "content": "}\n\n\n\npub unsafe fn init_ap() {\n\n APIC.init_ap();\n\n}\n\n\n\npub struct Apic {\n\n pub address: usize,\n\n pub x2: bool\n\n}\n\n\n\nimpl Apic {\n\n unsafe fn init(&mut self) {\n\n self.address = (Msr::new(IA32_APIC_BASE).read() as usize & 0xFFFF_0000) + ::KERNEL_BASE;\n\n self.x2 = CpuId::new().get_feature_info().unwrap().has_x2apic();\n\n self.init_ap();\n\n }\n\n\n\n unsafe fn init_ap(&mut self) {\n\n if self.x2 {\n", "file_path": "kernel/arch/x86_64/apic.rs", "rank": 75, "score": 15.51736035902047 }, { "content": "//! Some code was borrowed from [System76 Firmware Update](https://github.com/system76/firmware-update) and [Redox OS Orbital Client Library](https://github.com/redox-os/orbclient)\n\n\n\nuse core::cmp;\n\nuse color::*;\n\n\n\npub struct Display {\n\n output: *mut Color,\n\n scale: u32,\n\n w: u32,\n\n h: u32,\n\n font: &'static [u8]\n\n}\n\n\n\nimpl Display {\n\n pub fn new(output: *mut Color, w: u32, h: u32) -> Self {\n\n let scale = if h > 1440 { 2 } else { 1 };\n\n Self {\n\n output,\n\n scale,\n\n w: w * scale,\n", "file_path": "kernel/src/display.rs", "rank": 76, "score": 15.099522636186327 }, { "content": " fn inner_rect(&mut self, x: i32, y: i32, w: u32, h: u32, color: Color) {\n\n let self_w = self.w;\n\n let self_h = self.h;\n\n\n\n let start_y = cmp::max(0, cmp::min(self_h as i32 - 1, y));\n\n let end_y = cmp::max(start_y, cmp::min(self_h as i32, y + h as i32));\n\n\n\n let start_x = cmp::max(0, cmp::min(self_w as i32 - 1, x));\n\n let len = cmp::max(start_x, cmp::min(self_w as i32, x + w as i32)) - start_x;\n\n\n\n let alpha = (color.0 >> 24) & 0xFF;\n\n if alpha <= 0 { return };\n\n\n\n if alpha >= 255 {\n\n for y in start_y..end_y {\n\n unsafe {\n\n fast_set32(self.data.as_mut_ptr().offset((y * self_w as i32 + start_x) as isize) as *mut u32, color.0, len as usize);\n\n }\n\n }\n\n } else {\n", "file_path": "boot2snow/src/display.rs", "rank": 77, "score": 15.065626287284124 }, { "content": "//! # Memory management\n\n//! Some code was borrowed from [Redox OS Kernel](https://github.com/redox-os/kernel) and [Phil Opp's Blog](http://os.phil-opp.com/allocating-frames.html)\n\n\n\nuse self::bump::BumpAllocator;\n\nuse self::recycle::RecycleAllocator;\n\n\n\nuse spin::Mutex;\n\n\n\npub mod bump;\n\npub mod recycle;\n\n\n\n/// A physical address.\n\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub struct PhysicalAddress(usize);\n\n\n\nimpl PhysicalAddress {\n\n pub fn new(address: usize) -> Self {\n\n PhysicalAddress(address)\n\n }\n\n\n", "file_path": "kernel/src/memory/mod.rs", "rank": 78, "score": 14.978396349531458 }, { "content": " }\n\n }\n\n\n\n pub fn version(&self) -> u32 {\n\n if self.x2 {\n\n unsafe { Msr::new(IA32_X2APIC_VERSION).read() as u32 }\n\n } else {\n\n unsafe { self.read(0x30) }\n\n }\n\n }\n\n\n\n pub fn icr(&self) -> u64 {\n\n if self.x2 {\n\n unsafe { Msr::new(IA32_X2APIC_ICR).read() }\n\n } else {\n\n unsafe {\n\n (self.read(0x310) as u64) << 32 | self.read(0x300) as u64\n\n }\n\n }\n\n }\n", "file_path": "kernel/arch/x86_64/apic.rs", "rank": 80, "score": 14.939034436853362 }, { "content": "\n\n pub fn flush(&mut self, page: Page) {\n\n unsafe { tlb::flush(VirtAddr::new(page.start_address().get() as u64)); }\n\n }\n\n\n\n pub fn flush_all(&mut self) {\n\n unsafe { tlb::flush_all(); }\n\n }\n\n\n\n pub fn with<F>(&mut self, table: &mut InactivePageTable, temporary_page: &mut TemporaryPage, f: F)\n\n where F: FnOnce(&mut Mapper)\n\n {\n\n use x86_64::registers::control::Cr3;\n\n\n\n {\n\n let backup = Frame::containing_address(PhysicalAddress::new(unsafe { Cr3::read().0.start_address().as_u64() as usize }));\n\n\n\n // map temporary_page to current p4 table\n\n let p4_table = temporary_page.map_table_frame(backup.clone(), EntryFlags::PRESENT | EntryFlags::WRITABLE | EntryFlags::NO_EXECUTE, self);\n\n\n", "file_path": "kernel/arch/x86_64/paging/mod.rs", "rank": 81, "score": 14.885251368183095 }, { "content": "\n\nimpl Protocol for Output {\n\n fn guid() -> Guid {\n\n GRAPHICS_OUTPUT_PROTOCOL_GUID\n\n }\n\n\n\n unsafe fn from_ptr(v: *const Void) -> *const Self {\n\n\t\tv as *const _\n\n }\n\n}\n\n\n\nimpl Output {\n\n pub fn one() -> Result<Self, Status> {\n\n let boot_services = get_boot_services();\n\n Ok(Output(unsafe { mem::transmute::<*mut GraphicsOutput, &'static mut GraphicsOutput>(mem::transmute::<&'static GraphicsOutput, *mut GraphicsOutput>(boot_services.locate_protocol::<GraphicsOutput>().unwrap())) }))\n\n }\n\n}\n\n\n\npub struct Display {\n\n output: Output,\n", "file_path": "boot2snow/src/display.rs", "rank": 82, "score": 14.823844095115039 }, { "content": " w: u32,\n\n h: u32,\n\n image: &'a Image\n\n}\n\n\n\nimpl<'a> ImageRoi<'a> {\n\n /// Draw the ROI on a window\n\n pub fn draw<R: Renderer>(&self, renderer: &mut R, x: i32, mut y: i32) {\n\n let stride = self.image.w;\n\n let mut offset = (self.y * stride + self.x) as usize;\n\n let last_offset = cmp::min(((self.y + self.h) * stride + self.x) as usize, self.image.data.len());\n\n while offset < last_offset {\n\n let next_offset = offset + stride as usize;\n\n renderer.image(x, y, self.w, 1, &self.image.data[offset..]);\n\n offset = next_offset;\n\n y += 1;\n\n }\n\n }\n\n}\n\n\n", "file_path": "boot2snow/src/image/mod.rs", "rank": 83, "score": 14.800746166070919 }, { "content": "//! # Bump frame allocator\n\n//! Some code was borrowed from [Redox OS Kernel](https://github.com/redox-os/kernel) and [Phil Opp's Blog](http://os.phil-opp.com/allocating-frames.html)\n\n\n\nuse super::PhysicalAddress;\n\n\n\nuse super::{Frame, FrameAllocator, MemoryArea, MemoryAreaIter};\n\n\n\npub struct BumpAllocator {\n\n next_free_frame: Frame,\n\n current_area: Option<&'static MemoryArea>,\n\n areas: MemoryAreaIter,\n\n kernel_start: Frame,\n\n kernel_end: Frame\n\n}\n\n\n\nimpl BumpAllocator {\n\n pub fn new(kernel_start: usize, kernel_end: usize, memory_areas: MemoryAreaIter) -> Self {\n\n let mut allocator = Self {\n\n next_free_frame: Frame::containing_address(PhysicalAddress::new(0)),\n\n current_area: None,\n", "file_path": "kernel/src/memory/bump.rs", "rank": 84, "score": 14.70902318442361 }, { "content": " }\n\n}\n\n\n\npub const ADDRESS_MASK: usize = 0x000f_ffff_ffff_f000;\n\npub const COUNTER_MASK: u64 = 0x3ff0_0000_0000_0000;\n\n\n\nimpl Entry {\n\n /// Clear entry\n\n pub fn set_zero(&mut self) {\n\n self.0 = 0;\n\n }\n\n\n\n /// Is the entry unused?\n\n pub fn is_unused(&self) -> bool {\n\n self.0 == (self.0 & COUNTER_MASK)\n\n }\n\n\n\n /// Make the entry unused\n\n pub fn set_unused(&mut self) {\n\n self.0 &= COUNTER_MASK;\n", "file_path": "kernel/arch/x86_64/paging/entry.rs", "rank": 85, "score": 14.673674141921282 }, { "content": " let boot_services = get_boot_services();\n\n\n\n\t\tlet mut ptr = 0 as *mut Void;\n\n\t\t// SAFE: Pointer cannot cause unsafety\n\n\t\tunsafe { (boot_services.handle_protocol)(*handle, &Self::guid(), &mut ptr) }\n\n\t\t\t.err_or_else( || unsafe { FileSystem(&mut *(ptr as *mut SimpleFileSystem)) } )\n\n\t}\n\n\n\n pub fn locate_handle() -> Result<Vec<Self>, Status> where Self: Sized {\n\n let boot_services = get_boot_services();\n\n\n\n let guid = Self::guid();\n\n let mut handles = Vec::with_capacity(256);\n\n let mut len = handles.capacity() * mem::size_of::<Handle>();\n\n unsafe { (boot_services.locate_handle)(LocateSearchType::ByProtocol, Some(&guid), 0 as *mut Void, &mut len, handles.as_mut_ptr())? };\n\n unsafe { handles.set_len(len / mem::size_of::<Handle>()); }\n\n\n\n let mut instances = Vec::new();\n\n for handle in handles {\n\n if let Ok(instance) = Self::handle_protocol(&handle) {\n", "file_path": "boot2snow/src/fs.rs", "rank": 86, "score": 14.161189392112934 }, { "content": "\n\n pub fn read_to_end(&mut self, vec: &mut Vec<u8>) -> Result<usize, Status> {\n\n let mut total = 0;\n\n let mut buf = [0; 8192];\n\n\n\n while let Some(count) = Some(self.read(&mut buf)?) {\n\n if count == 0 { break; }\n\n\n\n vec.extend(&buf[.. count]);\n\n total += count;\n\n }\n\n\n\n Ok(total)\n\n }\n\n\n\n pub fn write(&mut self, buf: &[u8]) -> Result<usize, Status> {\n\n let mut len = buf.len();\n\n unsafe { (self.0.write)(self.0, &mut len, buf.as_ptr() as *mut Void)? };\n\n Ok(len)\n\n }\n", "file_path": "boot2snow/src/fs.rs", "rank": 87, "score": 14.136388296808857 }, { "content": "\n\n pub fn get_position(&mut self) -> Result<u64, Status> {\n\n let mut pos = 0;\n\n\t\tunsafe { (self.0.get_position)(self.0, &mut pos)? };\n\n Ok(pos)\n\n }\n\n\n\n pub fn set_position(&mut self, pos: u64) {\n\n unsafe { (self.0.set_position)(self.0, pos) };\n\n }\n\n}\n\n\n\nimpl Drop for File {\n\n fn drop(&mut self) {\n\n let _ = unsafe { (self.0.close)(self.0) };\n\n }\n\n}\n\n\n\npub struct Dir(pub File);\n\n\n", "file_path": "boot2snow/src/fs.rs", "rank": 88, "score": 13.868100945268 }, { "content": "\t\t\tsplashx: display.width() as i32 / 2,\n\n\t\t\tsplashy: (display.height() as i32 + splash.height() as i32) / 2\n\n\t\t};\n\n\n\n\t\tlet boot_info = kernel_proto::Info {\n\n\t\t\truntime_services: runtime_services as *const _ as *const (),\n\n\t\t\t\n\n\t\t\t// TODO: Get from the configuration\n\n\t\t\tcmdline_ptr: 1 as *const u8,\n\n\t\t\tcmdline_len: 0,\n\n\n\n\t\t\telf_sections: Some(sections),\n\n\t\t\tkernel_base: unsafe { KERNEL_BASE },\n\n\t\t\tkernel_size: unsafe { KERNEL_SIZE },\n\n\t\t\tstack_base: STACK_BASE,\n\n\t\t\tstack_size: STACK_BASE + STACK_SIZE,\n\n\t\t\t\n\n\t\t\tmap_addr: MM_BASE,\n\n\t\t\tmap_len: (map_size / ent_size) as u32,\n\n\t\t\tdescriptor_size: mem::size_of::<MemoryDescriptor>() as u32,\n", "file_path": "boot2snow/src/boot2snow.rs", "rank": 89, "score": 13.82533724790702 }, { "content": " x -= 1;\n\n err += 1 - 2*x;\n\n }\n\n }\n\n }\n\n\n\n pub fn rounded_rect(&mut self, x: i32, y: i32, w: u32, h: u32, radius: u32, filled: bool, color: Color) {\n\n let w = w as i32;\n\n let h = h as i32;\n\n let r = radius as i32;\n\n\n\n\n\n if filled {\n\n //Draw inside corners\n\n self.arc(x + r, y + r, -r, 1 << 4 | 1 << 6, color);\n\n self.arc(x + w - 1 - r, y + r, -r, 1 << 5 | 1 << 7, color);\n\n self.arc(x + r, y + h - 1 - r,- r, 1 << 0 | 1 << 2, color);\n\n self.arc(x + w - 1 - r, y + h - 1 - r, -r, 1 << 1 | 1 << 3, color);\n\n\n\n // Draw inside rectangles\n", "file_path": "kernel/src/display.rs", "rank": 90, "score": 13.8248494881281 }, { "content": " }\n\n}\n\n\n\npub struct Keyboard {\n\n control: Port<u8>,\n\n input: Port<u8>,\n\n shift_down: bool,\n\n caps_lock: bool,\n\n num_lock: bool,\n\n scroll_lock: bool,\n\n}\n\n\n\npub unsafe fn init() -> Result<(), ()> {\n\n let _ = KEYBOARD.control.write(0xAE);\n\n\n\n KEYBOARD.input_ack();\n\n\n\n let _ = KEYBOARD.input.write(0xF4);\n\n\n\n KEYBOARD.ack();\n", "file_path": "kernel/arch/x86_64/keyboard.rs", "rank": 91, "score": 13.665572550990214 }, { "content": "//! Recycle allocator\n\n//! Uses freed frames if possible, then uses inner allocator\n\n//! Some code was borrowed from [Redox OS Kernel](https://github.com/redox-os/kernel)\n\n\n\nuse alloc::Vec;\n\n\n\nuse super::PhysicalAddress;\n\n\n\nuse super::{Frame, FrameAllocator};\n\n\n\npub struct RecycleAllocator<T: FrameAllocator> {\n\n inner: T,\n\n noncore: bool,\n\n free: Vec<(usize, usize)>,\n\n}\n\n\n\nimpl<T: FrameAllocator> RecycleAllocator<T> {\n\n pub fn new(inner: T) -> Self {\n\n Self {\n\n inner: inner,\n", "file_path": "kernel/src/memory/recycle.rs", "rank": 92, "score": 13.512817294326284 }, { "content": " input: Port::new(0x60),\n\n shift_down: false,\n\n caps_lock: false,\n\n num_lock: false,\n\n scroll_lock: false\n\n }\n\n }\n\n\n\n pub unsafe fn is_output(&self) -> bool {\n\n if (self.control.read() & 0x01) != 0 { return true } else { return false }\n\n }\n\n\n\n pub unsafe fn is_input(&self) -> bool {\n\n if (self.control.read() & 0x02) != 0 { return true } else { return false }\n\n }\n\n\n\n pub unsafe fn scan_code(&self) -> u8 {\n\n while !self.is_output() { }\n\n self.input.read()\n\n }\n", "file_path": "kernel/arch/x86_64/keyboard.rs", "rank": 93, "score": 13.363998415511203 }, { "content": "\n\nuse core::mem;\n\nuse slab_allocator::LockedHeap;\n\nuse consts::*;\n\n\n\nconst WORD_SIZE: usize = mem::size_of::<usize>();\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn memcpy(dst: *mut u8, src: *const u8, count: usize) {\n\n\tunsafe {\n\n\t\tasm!(\"rep movsb\" : : \"{rcx}\" (count), \"{rdi}\" (dst), \"{rsi}\" (src) : \"rcx\", \"rsi\", \"rdi\" : \"volatile\");\n\n\t}\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn memset(dst: *mut u8, val: u8, count: usize) {\n\n\tunsafe {\n\n\t\tasm!(\"rep stosb\" : : \"{rcx}\" (count), \"{rdi}\" (dst), \"{al}\" (val) : \"rcx\", \"rdi\" : \"volatile\");\n\n\t}\n\n}\n", "file_path": "kernel/src/lib.rs", "rank": 94, "score": 13.130848280098574 }, { "content": "/// Must be called once, and only once,\n\npub unsafe fn init(kernel_start: usize, kernel_end: usize) {\n\n // Copy memory map from bootloader location\n\n for (i, entry) in MEMORY_MAP.iter_mut().enumerate() {\n\n *entry = *(0x500 as *const MemoryArea).offset(i as isize);\n\n if entry._type != MEMORY_AREA_NULL {\n\n println!(\"{:?}\", entry);\n\n }\n\n }\n\n\n\n *ALLOCATOR.lock() = Some(RecycleAllocator::new(BumpAllocator::new(kernel_start, kernel_end, MemoryAreaIter::new(MEMORY_AREA_FREE))));\n\n}\n\n\n\n/// Init memory module after core\n\n/// Must be called once, and only once,\n\npub unsafe fn init_noncore() {\n\n if let Some(ref mut allocator) = *ALLOCATOR.lock() {\n\n allocator.set_noncore(true)\n\n } else {\n\n panic!(\"frame allocator not initialized\");\n\n }\n\n}\n\n\n\n/// Get the number of frames available\n", "file_path": "kernel/src/memory/mod.rs", "rank": 95, "score": 13.005495364040854 }, { "content": " Msr::new(IA32_APIC_BASE).write(Msr::new(IA32_APIC_BASE).read() | 1 << 10);\n\n Msr::new(IA32_X2APIC_SIVR).write(0x100);\n\n } else {\n\n self.write(0xF0, 0x100);\n\n }\n\n }\n\n\n\n unsafe fn read(&self, reg: u32) -> u32 {\n\n volatile_load((self.address + reg as usize) as *const u32)\n\n }\n\n\n\n unsafe fn write(&mut self, reg: u32, value: u32) {\n\n volatile_store((self.address + reg as usize) as *mut u32, value);\n\n }\n\n\n\n pub fn id(&self) -> u32 {\n\n if self.x2 {\n\n unsafe { Msr::new(IA32_X2APIC_APICID).read() as u32 }\n\n } else {\n\n unsafe { self.read(0x20) }\n", "file_path": "kernel/arch/x86_64/apic.rs", "rank": 96, "score": 12.994727605090477 }, { "content": " if (width * height) as usize == data.len() {\n\n Ok(Image {\n\n w: width,\n\n h: height,\n\n data,\n\n })\n\n } else {\n\n Err(\"not enough or too much data given compared to width and height\".to_string())\n\n }\n\n }\n\n\n\n /// Create a new empty image\n\n pub fn default() -> Self {\n\n Self::new(0, 0)\n\n }\n\n\n\n /// Get a piece of the image\n\n pub fn roi<'a>(&'a self, x: u32, y: u32, w: u32, h: u32) -> ImageRoi<'a> {\n\n let x1 = cmp::min(x, self.width());\n\n let y1 = cmp::min(y, self.height());\n", "file_path": "boot2snow/src/image/mod.rs", "rank": 97, "score": 12.87893314682529 }, { "content": "\t\t\tkernel_file.set_position(ent.p_offset as u64);\n\n\t\t\tkernel_file.read( &mut data_slice[.. ent.p_filesz as usize] );\n\n\t\t\tfor b in &mut data_slice[ent.p_filesz as usize .. ent.p_memsz as usize] {\n\n\t\t\t\t*b = 0;\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\t// SAFE: Assuming that the executable is sane, and that it follows the correct calling convention\n\n\tOk(unsafe { mem::transmute(elf_hdr.e_entry as usize) })\n\n}\n\n\n", "file_path": "boot2snow/src/boot2snow.rs", "rank": 98, "score": 12.769059729400283 }, { "content": " ['m', 'M'],\n\n [',', '<'],\n\n ['.', '>'],\n\n ['/', '?'],\n\n ['\\0', '\\0'],\n\n ['\\0', '\\0'],\n\n ['\\0', '\\0'],\n\n [' ', ' ']\n\n ];\n\n\n\n pub fn get_char(scan_code: u8, shift: bool) -> char {\n\n if let Some(c) = US.get(scan_code as usize) {\n\n if shift {\n\n c[1]\n\n } else {\n\n c[0]\n\n }\n\n } else {\n\n '\\0'\n\n }\n", "file_path": "kernel/arch/x86_64/keyboard.rs", "rank": 99, "score": 12.729036277136547 } ]
Rust
src/color.rs
shuhei/colortty
dbb51e29a1f49c9f97174d869a97e63ed9fc0db4
use crate::error::{ErrorKind, Result}; use failure::ResultExt; use regex::Regex; use xml::{Element, Xml}; pub enum ColorSchemeFormat { ITerm, Mintty, Gogh, } impl ColorSchemeFormat { pub fn from_string(s: &str) -> Option<Self> { match s { "iterm" => Some(ColorSchemeFormat::ITerm), "mintty" => Some(ColorSchemeFormat::Mintty), "gogh" => Some(ColorSchemeFormat::Gogh), _ => None, } } pub fn from_filename(s: &str) -> Option<Self> { if s.ends_with(".itermcolors") { Some(ColorSchemeFormat::ITerm) } else if s.ends_with(".minttyrc") { Some(ColorSchemeFormat::Mintty) } else if s.ends_with(".sh") { Some(ColorSchemeFormat::Gogh) } else { None } } } #[derive(Debug, Default, PartialEq)] pub struct Color { pub red: u8, pub green: u8, pub blue: u8, } impl Color { pub fn from_mintty_color(s: &str) -> Result<Self> { let rgb: Vec<_> = s.split(',').collect(); if rgb.len() != 3 { return Err(ErrorKind::InvalidColorFormat(s.to_owned()).into()); } let red = parse_int(rgb[0])?; let green = parse_int(rgb[1])?; let blue = parse_int(rgb[2])?; Ok(Color { red, green, blue }) } pub fn from_gogh_color(s: &str) -> Result<Self> { let red = parse_hex(&s[1..3])?; let green = parse_hex(&s[3..5])?; let blue = parse_hex(&s[5..7])?; Ok(Color { red, green, blue }) } pub fn to_hex(&self) -> String { format!("0x{:>02x}{:>02x}{:>02x}", self.red, self.green, self.blue) } pub fn to_24bit_be(&self) -> String { format!("\x1b[48;2;{};{};{}m", self.red, self.green, self.blue) } pub fn to_24bit_preview(&self) -> String { format!("\x1b[38;2;{};{};{}m●", self.red, self.green, self.blue) } } fn parse_int(s: &str) -> Result<u8> { Ok(s.parse::<u8>().context(ErrorKind::ParseInt)?) } fn parse_hex(s: &str) -> Result<u8> { Ok(u8::from_str_radix(s, 16).context(ErrorKind::ParseInt)?) } fn extract_text(element: &Element) -> Result<&str> { let first = &element.children[0]; match first { Xml::CharacterNode(ref text) => Ok(text), _ => Err(ErrorKind::NotCharacterNode(Box::new(first.to_owned())).into()), } } fn extract_real_color(element: &Element) -> Result<u8> { let real_value = extract_text(element)? .parse::<f32>() .context(ErrorKind::ParseFloat)?; let int_value = (real_value * 255.0) as u8; Ok(int_value) } #[derive(Default)] pub struct ColorScheme { foreground: Color, background: Color, cursor_text: Option<Color>, cursor: Option<Color>, black: Color, red: Color, green: Color, yellow: Color, blue: Color, magenta: Color, cyan: Color, white: Color, bright_black: Color, bright_red: Color, bright_green: Color, bright_yellow: Color, bright_blue: Color, bright_magenta: Color, bright_cyan: Color, bright_white: Color, } impl ColorScheme { pub fn from_minttyrc(content: &str) -> Result<Self> { let mut scheme = ColorScheme::default(); for line in content.lines() { let components: Vec<&str> = line.split('=').collect(); if components.len() != 2 { return Err(ErrorKind::InvalidLineFormat(line.to_owned()).into()); } let name = components[0]; let color = Color::from_mintty_color(components[1])?; match name { "ForegroundColour" => scheme.foreground = color, "BackgroundColour" => scheme.background = color, "Black" => scheme.black = color, "Red" => scheme.red = color, "Green" => scheme.green = color, "Yellow" => scheme.yellow = color, "Blue" => scheme.blue = color, "Magenta" => scheme.magenta = color, "Cyan" => scheme.cyan = color, "White" => scheme.white = color, "BoldRed" => scheme.bright_red = color, "BoldBlack" => scheme.bright_black = color, "BoldGreen" => scheme.bright_green = color, "BoldYellow" => scheme.bright_yellow = color, "BoldBlue" => scheme.bright_blue = color, "BoldMagenta" => scheme.bright_magenta = color, "BoldCyan" => scheme.bright_cyan = color, "BoldWhite" => scheme.bright_white = color, _ => return Err(ErrorKind::UnknownColorName(name.to_owned()).into()), } } Ok(scheme) } pub fn from_iterm(content: &str) -> Result<Self> { let mut scheme = ColorScheme::default(); let root = content.parse::<Element>().context(ErrorKind::XMLParse)?; let root_dict: &Element = root .get_children("dict", None) .nth(0) .ok_or(ErrorKind::NoRootDict)?; let keys = root_dict.get_children("key", None); let values = root_dict.get_children("dict", None); for (key, value) in keys.zip(values) { let color_name = extract_text(key)?; let mut color = Color::default(); let element_nodes = value .children .iter() .flat_map(|child| match child { Xml::ElementNode(elem) => Some(elem), _ => None, }) .collect::<Vec<_>>(); for pair in element_nodes.chunks(2) { if let [color_key, color_value] = pair { let component_name = extract_text(color_key)?; match component_name { "Red Component" => color.red = extract_real_color(color_value)?, "Green Component" => color.green = extract_real_color(color_value)?, "Blue Component" => color.blue = extract_real_color(color_value)?, "Alpha Component" => {} "Color Space" => {} _ => { return Err(ErrorKind::UnknownColorComponent( component_name.to_owned(), ) .into()); } }; } } match color_name { "Ansi 0 Color" => scheme.black = color, "Ansi 1 Color" => scheme.red = color, "Ansi 2 Color" => scheme.green = color, "Ansi 3 Color" => scheme.yellow = color, "Ansi 4 Color" => scheme.blue = color, "Ansi 5 Color" => scheme.magenta = color, "Ansi 6 Color" => scheme.cyan = color, "Ansi 7 Color" => scheme.white = color, "Ansi 8 Color" => scheme.bright_black = color, "Ansi 9 Color" => scheme.bright_red = color, "Ansi 10 Color" => scheme.bright_green = color, "Ansi 11 Color" => scheme.bright_yellow = color, "Ansi 12 Color" => scheme.bright_blue = color, "Ansi 13 Color" => scheme.bright_magenta = color, "Ansi 14 Color" => scheme.bright_cyan = color, "Ansi 15 Color" => scheme.bright_white = color, "Background Color" => scheme.background = color, "Foreground Color" => scheme.foreground = color, "Cursor Color" => scheme.cursor = Some(color), "Cursor Text Color" => scheme.cursor_text = Some(color), _ => (), } } Ok(scheme) } pub fn from_gogh(content: &str) -> Result<Self> { let pattern = Regex::new(r#"export ([A-Z0-9_]+)="(#[0-9a-fA-F]{6})""#).unwrap(); let mut scheme = ColorScheme::default(); for line in content.lines() { if let Some(caps) = pattern.captures(line) { let name = caps.get(1).unwrap().as_str(); let color = Color::from_gogh_color(caps.get(2).unwrap().as_str())?; match name { "FOREGROUND_COLOR" => scheme.foreground = color, "BACKGROUND_COLOR" => scheme.background = color, "COLOR_01" => scheme.black = color, "COLOR_02" => scheme.red = color, "COLOR_03" => scheme.green = color, "COLOR_04" => scheme.yellow = color, "COLOR_05" => scheme.blue = color, "COLOR_06" => scheme.magenta = color, "COLOR_07" => scheme.cyan = color, "COLOR_08" => scheme.white = color, "COLOR_09" => scheme.bright_black = color, "COLOR_10" => scheme.bright_red = color, "COLOR_11" => scheme.bright_green = color, "COLOR_12" => scheme.bright_yellow = color, "COLOR_13" => scheme.bright_blue = color, "COLOR_14" => scheme.bright_magenta = color, "COLOR_15" => scheme.bright_cyan = color, "COLOR_16" => scheme.bright_white = color, _ => {} } } } Ok(scheme) } pub fn to_yaml(&self) -> String { let cursor_colors = match (&self.cursor_text, &self.cursor) { (Some(cursor_text), Some(cursor)) => format!( " # Cursor colors cursor: text: '{}' cursor: '{}' ", cursor_text.to_hex(), cursor.to_hex() ), _ => String::new(), }; format!( "colors: # Default colors primary: background: '{}' foreground: '{}' {} # Normal colors normal: black: '{}' red: '{}' green: '{}' yellow: '{}' blue: '{}' magenta: '{}' cyan: '{}' white: '{}' # Bright colors bright: black: '{}' red: '{}' green: '{}' yellow: '{}' blue: '{}' magenta: '{}' cyan: '{}' white: '{}' ", self.background.to_hex(), self.foreground.to_hex(), cursor_colors, self.black.to_hex(), self.red.to_hex(), self.green.to_hex(), self.yellow.to_hex(), self.blue.to_hex(), self.magenta.to_hex(), self.cyan.to_hex(), self.white.to_hex(), self.bright_black.to_hex(), self.bright_red.to_hex(), self.bright_green.to_hex(), self.bright_yellow.to_hex(), self.bright_blue.to_hex(), self.bright_magenta.to_hex(), self.bright_cyan.to_hex(), self.bright_white.to_hex(), ) } pub fn to_preview(&self) -> String { let colors = vec![ self.background.to_24bit_be(), " ".to_string(), self.foreground.to_24bit_preview(), " ".to_string(), self.black.to_24bit_preview(), self.red.to_24bit_preview(), self.green.to_24bit_preview(), self.yellow.to_24bit_preview(), self.blue.to_24bit_preview(), self.magenta.to_24bit_preview(), self.cyan.to_24bit_preview(), self.white.to_24bit_preview(), " ".to_string(), self.bright_black.to_24bit_preview(), self.bright_red.to_24bit_preview(), self.bright_green.to_24bit_preview(), self.bright_yellow.to_24bit_preview(), self.bright_blue.to_24bit_preview(), self.bright_magenta.to_24bit_preview(), self.bright_cyan.to_24bit_preview(), self.bright_white.to_24bit_preview(), " ".to_string(), "\x1b[0m".to_string(), ]; colors.join("") } }
use crate::error::{ErrorKind, Result}; use failure::ResultExt; use regex::Regex; use xml::{Element, Xml}; pub enum ColorSchemeFormat { ITerm, Mintty, Gogh, } impl ColorSchemeFormat { pub fn from_string(s: &str) -> Option<Self> { match s { "iterm" => Some(ColorSchemeFormat::ITerm), "mintty" => Some(ColorSchemeFormat::Mintty), "gogh" => Some(ColorSchemeFormat::Gogh), _ => None, } } pub fn from_filename(s: &str) -> Option<Self> { if s.ends_with(".itermcolors") { Some(ColorSchemeFormat::ITerm) } else if s.ends_with(".minttyrc") { Some(ColorSchemeFormat::Mintty) } else if s.ends_with(".sh") { Some(ColorSchemeFormat::Gogh) } else { None } } } #[derive(Debug, Default, PartialEq)] pub struct Color { pub red: u8, pub green: u8, pub blue: u8, } impl Color { pub fn from_mintty_color(s: &str) -> Result<Self> { let rgb: Vec<_> = s.split(',').collect(); if rgb.len() != 3 { return Err(ErrorKind::InvalidColorFormat(s.to_owned()).into()); } let red = parse_int(rgb[0])?; let green = parse_int(rgb[1])?; let blue = parse_int(rgb[2])?; Ok(Color { red, green, blue }) } pub fn from_gogh_color(s: &str) -> Result<Self> { let red = parse_hex(&s[1..3])?; let green = parse_hex(&s[3..5])?; let blue = parse_hex(&s[5..7])?; Ok(Color { red, green, blue }) } pub fn to_hex(&self) -> String { format!("0x{:>02x}{:>02x}{:>02x}", self.red, self.green, self.blue) } pub fn to_24bit_be(&self) -> String { format!("\x1b[48;2;{};{};{}m", self.red, self.green, self.blue) } pub fn to_24bit_preview(&self) -> String { format!("\x1b[38;2;{};{};{}m●", self.red, self.green, self.blue) } } fn parse_int(s: &str) -> Result<u8> { Ok(s.parse::<u8>().context(ErrorKind::ParseInt)?) } fn parse_hex(s: &str) -> Result<u8> { Ok(u8::from_str_radix(s, 16).context(ErrorKind::ParseInt)?) } fn extract_text(element: &Element) -> Result<&str> { let first = &element.children[0]; match first { Xml::CharacterNode(ref text) => Ok(text), _ => Err(ErrorKind::NotCharacterNode(Box::new(first.to_owned())).into()), } } fn extract_real_color(element: &Element) -> Result<u8> { let real_value = extract_text(element)? .parse::<f32>() .context(ErrorKind::ParseFloat)?; let int_value = (real_value * 255.0) as u8; Ok(int_value) } #[derive(Default)] pub struct ColorScheme { foreground: Color, background: Color, cursor_text: Option<Color>, cursor: Option<Color>, black: Color, red: Color, green: Color, yellow: Color, blue: Color, magenta: Color, cyan: Color, white: Color, bright_black: Color, bright_red: Color, bright_green: Color, bright_yellow: Color, bright_blue: Color, bright_magenta: Color, bright_cyan: Color, bright_white: Color, } impl ColorScheme { pub fn from_minttyrc(content: &str) -> Result<Self> { let mut scheme = ColorScheme::default(); for line in content.lines() { let components: Vec<&str> = line.split('=').collect(); if components.len() != 2 { return Err(ErrorKind::InvalidLineFormat(line.to_owned()).into()); } let name = components[0]; let color = Color::from_mintty_color(components[1])?; match name { "ForegroundColour" => scheme.foreground = color, "BackgroundColour" => scheme.background = color, "Black" => scheme.black = color, "Red" => scheme.red = color, "Green" => scheme.green = color, "Yellow" => scheme.yellow = color, "Blue" => scheme.blue = color, "Magenta"
t_white = color, _ => return Err(ErrorKind::UnknownColorName(name.to_owned()).into()), } } Ok(scheme) } pub fn from_iterm(content: &str) -> Result<Self> { let mut scheme = ColorScheme::default(); let root = content.parse::<Element>().context(ErrorKind::XMLParse)?; let root_dict: &Element = root .get_children("dict", None) .nth(0) .ok_or(ErrorKind::NoRootDict)?; let keys = root_dict.get_children("key", None); let values = root_dict.get_children("dict", None); for (key, value) in keys.zip(values) { let color_name = extract_text(key)?; let mut color = Color::default(); let element_nodes = value .children .iter() .flat_map(|child| match child { Xml::ElementNode(elem) => Some(elem), _ => None, }) .collect::<Vec<_>>(); for pair in element_nodes.chunks(2) { if let [color_key, color_value] = pair { let component_name = extract_text(color_key)?; match component_name { "Red Component" => color.red = extract_real_color(color_value)?, "Green Component" => color.green = extract_real_color(color_value)?, "Blue Component" => color.blue = extract_real_color(color_value)?, "Alpha Component" => {} "Color Space" => {} _ => { return Err(ErrorKind::UnknownColorComponent( component_name.to_owned(), ) .into()); } }; } } match color_name { "Ansi 0 Color" => scheme.black = color, "Ansi 1 Color" => scheme.red = color, "Ansi 2 Color" => scheme.green = color, "Ansi 3 Color" => scheme.yellow = color, "Ansi 4 Color" => scheme.blue = color, "Ansi 5 Color" => scheme.magenta = color, "Ansi 6 Color" => scheme.cyan = color, "Ansi 7 Color" => scheme.white = color, "Ansi 8 Color" => scheme.bright_black = color, "Ansi 9 Color" => scheme.bright_red = color, "Ansi 10 Color" => scheme.bright_green = color, "Ansi 11 Color" => scheme.bright_yellow = color, "Ansi 12 Color" => scheme.bright_blue = color, "Ansi 13 Color" => scheme.bright_magenta = color, "Ansi 14 Color" => scheme.bright_cyan = color, "Ansi 15 Color" => scheme.bright_white = color, "Background Color" => scheme.background = color, "Foreground Color" => scheme.foreground = color, "Cursor Color" => scheme.cursor = Some(color), "Cursor Text Color" => scheme.cursor_text = Some(color), _ => (), } } Ok(scheme) } pub fn from_gogh(content: &str) -> Result<Self> { let pattern = Regex::new(r#"export ([A-Z0-9_]+)="(#[0-9a-fA-F]{6})""#).unwrap(); let mut scheme = ColorScheme::default(); for line in content.lines() { if let Some(caps) = pattern.captures(line) { let name = caps.get(1).unwrap().as_str(); let color = Color::from_gogh_color(caps.get(2).unwrap().as_str())?; match name { "FOREGROUND_COLOR" => scheme.foreground = color, "BACKGROUND_COLOR" => scheme.background = color, "COLOR_01" => scheme.black = color, "COLOR_02" => scheme.red = color, "COLOR_03" => scheme.green = color, "COLOR_04" => scheme.yellow = color, "COLOR_05" => scheme.blue = color, "COLOR_06" => scheme.magenta = color, "COLOR_07" => scheme.cyan = color, "COLOR_08" => scheme.white = color, "COLOR_09" => scheme.bright_black = color, "COLOR_10" => scheme.bright_red = color, "COLOR_11" => scheme.bright_green = color, "COLOR_12" => scheme.bright_yellow = color, "COLOR_13" => scheme.bright_blue = color, "COLOR_14" => scheme.bright_magenta = color, "COLOR_15" => scheme.bright_cyan = color, "COLOR_16" => scheme.bright_white = color, _ => {} } } } Ok(scheme) } pub fn to_yaml(&self) -> String { let cursor_colors = match (&self.cursor_text, &self.cursor) { (Some(cursor_text), Some(cursor)) => format!( " # Cursor colors cursor: text: '{}' cursor: '{}' ", cursor_text.to_hex(), cursor.to_hex() ), _ => String::new(), }; format!( "colors: # Default colors primary: background: '{}' foreground: '{}' {} # Normal colors normal: black: '{}' red: '{}' green: '{}' yellow: '{}' blue: '{}' magenta: '{}' cyan: '{}' white: '{}' # Bright colors bright: black: '{}' red: '{}' green: '{}' yellow: '{}' blue: '{}' magenta: '{}' cyan: '{}' white: '{}' ", self.background.to_hex(), self.foreground.to_hex(), cursor_colors, self.black.to_hex(), self.red.to_hex(), self.green.to_hex(), self.yellow.to_hex(), self.blue.to_hex(), self.magenta.to_hex(), self.cyan.to_hex(), self.white.to_hex(), self.bright_black.to_hex(), self.bright_red.to_hex(), self.bright_green.to_hex(), self.bright_yellow.to_hex(), self.bright_blue.to_hex(), self.bright_magenta.to_hex(), self.bright_cyan.to_hex(), self.bright_white.to_hex(), ) } pub fn to_preview(&self) -> String { let colors = vec![ self.background.to_24bit_be(), " ".to_string(), self.foreground.to_24bit_preview(), " ".to_string(), self.black.to_24bit_preview(), self.red.to_24bit_preview(), self.green.to_24bit_preview(), self.yellow.to_24bit_preview(), self.blue.to_24bit_preview(), self.magenta.to_24bit_preview(), self.cyan.to_24bit_preview(), self.white.to_24bit_preview(), " ".to_string(), self.bright_black.to_24bit_preview(), self.bright_red.to_24bit_preview(), self.bright_green.to_24bit_preview(), self.bright_yellow.to_24bit_preview(), self.bright_blue.to_24bit_preview(), self.bright_magenta.to_24bit_preview(), self.bright_cyan.to_24bit_preview(), self.bright_white.to_24bit_preview(), " ".to_string(), "\x1b[0m".to_string(), ]; colors.join("") } }
=> scheme.magenta = color, "Cyan" => scheme.cyan = color, "White" => scheme.white = color, "BoldRed" => scheme.bright_red = color, "BoldBlack" => scheme.bright_black = color, "BoldGreen" => scheme.bright_green = color, "BoldYellow" => scheme.bright_yellow = color, "BoldBlue" => scheme.bright_blue = color, "BoldMagenta" => scheme.bright_magenta = color, "BoldCyan" => scheme.bright_cyan = color, "BoldWhite" => scheme.brigh
random
[ { "content": "fn convert(args: Vec<String>) -> Result<()> {\n\n let mut opts = Options::new();\n\n opts.optopt(\n\n \"i\",\n\n \"input-format\",\n\n \"input format: 'iterm'|'mintty'|'gogh'\",\n\n \"INPUT_FORMAT\",\n\n );\n\n let matches = opts.parse(&args[2..]).context(ErrorKind::InvalidArgument)?;\n\n\n\n if matches.free.is_empty() {\n\n return Err(ErrorKind::MissingSource.into());\n\n }\n\n\n\n let source = &matches.free[0];\n\n let input_format = matches\n\n .opt_str(\"i\")\n\n .and_then(|s| ColorSchemeFormat::from_string(&s))\n\n .or_else(|| ColorSchemeFormat::from_filename(&source))\n\n .ok_or(ErrorKind::MissingInputFormat)?;\n", "file_path": "src/main.rs", "rank": 4, "score": 65612.95453383628 }, { "content": "fn get_provider(matches: &getopts::Matches) -> Result<Provider> {\n\n let provider_name = matches.opt_str(\"p\").unwrap_or_else(|| \"iterm\".to_owned());\n\n let provider = match provider_name.as_ref() {\n\n \"iterm\" => Provider::iterm(),\n\n \"gogh\" => Provider::gogh(),\n\n _ => return Err(ErrorKind::UnknownProvider(provider_name).into()),\n\n };\n\n Ok(provider)\n\n}\n", "file_path": "src/main.rs", "rank": 5, "score": 57686.35496792478 }, { "content": "fn handle_error(result: Result<()>) {\n\n if let Err(e) = result {\n\n eprintln!(\"error: {}\", e);\n\n process::exit(1);\n\n }\n\n}\n\n\n\n// -- commands\n\n\n", "file_path": "src/main.rs", "rank": 6, "score": 54373.34997208601 }, { "content": "fn set_provider_option(opts: &mut getopts::Options) {\n\n opts.optopt(\n\n \"p\",\n\n \"provider\",\n\n \"color scheme provider: 'iterm'|'gogh'\",\n\n \"PROVIDER\",\n\n );\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 41071.75755462535 }, { "content": "fn help() {\n\n println!(\n\n \"colortty - color scheme converter for alacritty\n\n\n\nUSAGE:\n\n # List color schemes at https://github.com/mbadolato/iTerm2-Color-Schemes\n\n colortty list\n\n colortty list -p iterm\n\n colortty list -u # update cached color schemes\n\n\n\n # List color schemes at https://github.com/Mayccoll/Gogh\n\n colortty list -p gogh\n\n colortty list -p gogh -u # update cached color schemes\n\n\n\n # Get color scheme from https://github.com/mbadolato/iTerm2-Color-Schemes\n\n colortty get <color scheme name>\n\n colortty get -p iterm <color scheme name>\n\n\n\n # Get color scheme from https://github.com/Mayccoll/Gogh\n\n colortty get -p gogh <color scheme name>\n", "file_path": "src/main.rs", "rank": 8, "score": 25918.762269743944 }, { "content": " black: '0x4f525e'\n\n red: '0xe06c75'\n\n green: '0x98c379'\n\n yellow: '0xe5c07b'\n\n blue: '0x61afef'\n\n magenta: '0xc678dd'\n\n cyan: '0x56b6c2'\n\n white: '0xffffff'\n\n\"\n\n .to_string();\n\n assert_eq!(scheme.to_yaml(), firewatch_alacritty);\n\n }\n\n\n\n #[test]\n\n fn convert_gogh() {\n\n let dracula_gogh = read_fixture(\"tests/fixtures/dracula.sh\");\n\n let dracula_alacritty: String = \"colors:\n\n # Default colors\n\n primary:\n\n background: '0x282a36'\n", "file_path": "tests/color.rs", "rank": 18, "score": 18287.7042496198 }, { "content": " # Cursor colors\n\n cursor:\n\n text: '0xffffff'\n\n cursor: '0xbbbbbb'\n\n\n\n # Normal colors\n\n normal:\n\n black: '0x000000'\n\n red: '0xff5555'\n\n green: '0x50fa7b'\n\n yellow: '0xf1fa8c'\n\n blue: '0xbd93f9'\n\n magenta: '0xff79c6'\n\n cyan: '0x8be9fd'\n\n white: '0xbbbbbb'\n\n\n\n # Bright colors\n\n bright:\n\n black: '0x555555'\n\n red: '0xff5555'\n", "file_path": "tests/color.rs", "rank": 20, "score": 18286.83559556425 }, { "content": " foreground: '0x75541b'\n\n\n\n # Cursor colors\n\n cursor:\n\n text: '0xd5deff'\n\n cursor: '0xda4181'\n\n\n\n # Normal colors\n\n normal:\n\n black: '0x383a42'\n\n red: '0xe45649'\n\n green: '0x50a14f'\n\n yellow: '0xc18401'\n\n blue: '0x0184bc'\n\n magenta: '0xa626a4'\n\n cyan: '0x0997b3'\n\n white: '0xfafafa'\n\n\n\n # Bright colors\n\n bright:\n", "file_path": "tests/color.rs", "rank": 22, "score": 18286.02939991657 }, { "content": " green: '0x50fa7b'\n\n yellow: '0xf1fa8c'\n\n blue: '0xbd93f9'\n\n magenta: '0xff79c6'\n\n cyan: '0x8be9fd'\n\n white: '0xffffff'\n\n\"\n\n .to_string();\n\n let scheme = ColorScheme::from_iterm(&dracula_iterm).unwrap();\n\n assert_eq!(scheme.to_yaml(), dracula_alacritty);\n\n }\n\n\n\n #[test]\n\n fn convert_iterm_complicated() {\n\n let firewatch_iterm = read_fixture(\"tests/fixtures/two-firewatch-light.itermcolors\");\n\n let scheme = ColorScheme::from_iterm(&firewatch_iterm).unwrap();\n\n let firewatch_alacritty: String = \"colors:\n\n # Default colors\n\n primary:\n\n background: '0xf8f6f2'\n", "file_path": "tests/color.rs", "rank": 24, "score": 18285.158344862233 }, { "content": " primary:\n\n background: '0x282a36'\n\n foreground: '0xf8f8f2'\n\n\n\n # Normal colors\n\n normal:\n\n black: '0x000000'\n\n red: '0xff5555'\n\n green: '0x50fa7b'\n\n yellow: '0xf1fa8c'\n\n blue: '0xcaa9fa'\n\n magenta: '0xff79c6'\n\n cyan: '0x8be9fd'\n\n white: '0xbfbfbf'\n\n\n\n # Bright colors\n\n bright:\n\n black: '0x282a35'\n\n red: '0xff6e67'\n\n green: '0x5af78e'\n", "file_path": "tests/color.rs", "rank": 25, "score": 18284.453974398806 }, { "content": " yellow: '0xf4f99d'\n\n blue: '0xcaa9fa'\n\n magenta: '0xff92d0'\n\n cyan: '0x9aedfe'\n\n white: '0xe6e6e6'\n\n\"\n\n .to_string();\n\n let scheme = ColorScheme::from_minttyrc(&dracula_minttyrc).unwrap();\n\n assert_eq!(scheme.to_yaml(), dracula_alacritty);\n\n }\n\n\n\n #[test]\n\n fn convert_iterm() {\n\n let dracula_iterm = read_fixture(\"tests/fixtures/Dracula.itermcolors\");\n\n let dracula_alacritty: String = \"colors:\n\n # Default colors\n\n primary:\n\n background: '0x1e1f28'\n\n foreground: '0xf8f8f2'\n\n\n", "file_path": "tests/color.rs", "rank": 26, "score": 18283.963515453925 }, { "content": " foreground: '0x94a3a5'\n\n\n\n # Normal colors\n\n normal:\n\n black: '0x44475a'\n\n red: '0xff5555'\n\n green: '0x50fa7b'\n\n yellow: '0xffb86c'\n\n blue: '0x8be9fd'\n\n magenta: '0xbd93f9'\n\n cyan: '0xff79c6'\n\n white: '0x94a3a5'\n\n\n\n # Bright colors\n\n bright:\n\n black: '0x000000'\n\n red: '0xff5555'\n\n green: '0x50fa7b'\n\n yellow: '0xffb86c'\n\n blue: '0x8be9fd'\n", "file_path": "tests/color.rs", "rank": 27, "score": 18283.809901155935 }, { "content": " magenta: '0xbd93f9'\n\n cyan: '0xff79c6'\n\n white: '0xffffff'\n\n\"\n\n .to_string();\n\n let scheme = ColorScheme::from_gogh(&dracula_gogh).unwrap();\n\n assert_eq!(scheme.to_yaml(), dracula_alacritty);\n\n }\n\n }\n\n}\n", "file_path": "tests/color.rs", "rank": 31, "score": 18280.48454395024 }, { "content": "\n\n mod color_scheme {\n\n use colortty::ColorScheme;\n\n use std::fs::File;\n\n use std::io::Read;\n\n\n\n fn read_fixture(filename: &str) -> String {\n\n let mut fixture = String::new();\n\n File::open(filename)\n\n .unwrap()\n\n .read_to_string(&mut fixture)\n\n .unwrap();\n\n return fixture;\n\n }\n\n\n\n #[test]\n\n fn convert_minttyrc() {\n\n let dracula_minttyrc = read_fixture(\"tests/fixtures/Dracula.minttyrc\");\n\n let dracula_alacritty: String = \"colors:\n\n # Default colors\n", "file_path": "tests/color.rs", "rank": 33, "score": 18279.292352790686 }, { "content": "#[cfg(test)]\n\nmod color_tests {\n\n mod color {\n\n use colortty::Color;\n\n\n\n #[test]\n\n fn from_mintty_color_works() {\n\n assert_eq!(\n\n Color::from_mintty_color(\"12,3,255\").unwrap(),\n\n Color {\n\n red: 12,\n\n green: 3,\n\n blue: 255\n\n }\n\n );\n\n }\n\n\n\n #[test]\n\n fn from_mintty_color_invalid_format() {\n\n assert!(Color::from_mintty_color(\"123\").is_err());\n", "file_path": "tests/color.rs", "rank": 35, "score": 18277.720958999394 }, { "content": " }\n\n\n\n #[test]\n\n fn from_mintty_color_parse_int_error() {\n\n assert!(Color::from_mintty_color(\"abc,3,fo\").is_err());\n\n }\n\n\n\n #[test]\n\n fn to_hex() {\n\n assert_eq!(\n\n Color {\n\n red: 123,\n\n green: 4,\n\n blue: 255\n\n }\n\n .to_hex(),\n\n \"0x7b04ff\"\n\n );\n\n }\n\n }\n", "file_path": "tests/color.rs", "rank": 36, "score": 18276.29498572665 }, { "content": "use crate::color::ColorScheme;\n\nuse crate::error::{ErrorKind, Result};\n\nuse async_std::{fs, prelude::*};\n\nuse dirs;\n\nuse failure::ResultExt;\n\nuse futures::future;\n\nuse std::path::PathBuf;\n\nuse surf::{middleware::HttpClient, Request};\n\n\n\n/// A GitHub repository that provides color schemes.\n\npub struct Provider {\n\n user_name: String,\n\n repo_name: String,\n\n list_path: String,\n\n extension: String,\n\n}\n\n\n\nimpl Provider {\n\n /// Returns a provider for `mbadolato/iTerm2-Color-Schemes`.\n\n pub fn iterm() -> Self {\n", "file_path": "src/provider.rs", "rank": 37, "score": 17.214090612536122 }, { "content": " Provider::new(\n\n \"mbadolato\",\n\n \"iTerm2-Color-Schemes\",\n\n \"schemes\",\n\n \".itermcolors\",\n\n )\n\n }\n\n\n\n /// Returns a provider for `Mayccoll/Gogh`.\n\n pub fn gogh() -> Self {\n\n Provider::new(\"Mayccoll\", \"Gogh\", \"themes\", \".sh\")\n\n }\n\n\n\n /// Returns a provider instance.\n\n fn new(user_name: &str, repo_name: &str, list_path: &str, extension: &str) -> Self {\n\n Provider {\n\n user_name: user_name.to_string(),\n\n repo_name: repo_name.to_string(),\n\n list_path: list_path.to_string(),\n\n extension: extension.to_string(),\n", "file_path": "src/provider.rs", "rank": 38, "score": 15.468572059067583 }, { "content": "\n\n let mut buffer = String::new();\n\n if source == \"-\" {\n\n io::stdin()\n\n .read_to_string(&mut buffer)\n\n .context(ErrorKind::ReadStdin)?;\n\n } else {\n\n File::open(source)\n\n .unwrap()\n\n .read_to_string(&mut buffer)\n\n .context(ErrorKind::ReadSource)?;\n\n }\n\n\n\n let scheme_result = match input_format {\n\n ColorSchemeFormat::ITerm => ColorScheme::from_iterm(&buffer),\n\n ColorSchemeFormat::Mintty => ColorScheme::from_minttyrc(&buffer),\n\n ColorSchemeFormat::Gogh => ColorScheme::from_gogh(&buffer),\n\n };\n\n\n\n scheme_result.map(|schema| println!(\"{}\", schema.to_yaml()))\n", "file_path": "src/main.rs", "rank": 39, "score": 15.269662047519962 }, { "content": " }\n\n }\n\n\n\n /// Fetches the raw content of the color scheme for the given name.\n\n pub async fn get(&self, name: &str) -> Result<ColorScheme> {\n\n let req = surf::get(&self.individual_url(name));\n\n let body = http_get(req).await?;\n\n self.parse_color_scheme(&body)\n\n }\n\n\n\n /// Returns all color schemes in the provider.\n\n ///\n\n /// This function caches color schemes in the file system.\n\n pub async fn list(self) -> Result<Vec<(String, ColorScheme)>> {\n\n match self.read_color_schemes().await {\n\n Ok(color_schemes) => {\n\n if color_schemes.len() > 0 {\n\n return Ok(color_schemes);\n\n }\n\n }\n", "file_path": "src/provider.rs", "rank": 40, "score": 14.788920441869498 }, { "content": "\n\n for (name, color_scheme) in &color_schemes {\n\n println!(\n\n \"{:width$} {}\",\n\n name,\n\n color_scheme.to_preview(),\n\n width = max_name_length\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nasync fn get(args: Vec<String>) -> Result<()> {\n\n let mut opts = Options::new();\n\n set_provider_option(&mut opts);\n\n let matches = opts.parse(&args[2..]).context(ErrorKind::InvalidArgument)?;\n\n\n\n if matches.free.is_empty() {\n\n return Err(ErrorKind::MissingName.into());\n", "file_path": "src/main.rs", "rank": 41, "score": 12.562691187480013 }, { "content": "}\n\n\n\nasync fn list(args: Vec<String>) -> Result<()> {\n\n let mut opts = Options::new();\n\n set_provider_option(&mut opts);\n\n opts.optflag(\"u\", \"update-cache\", \"update color scheme cache\");\n\n\n\n let matches = opts.parse(&args[2..]).context(ErrorKind::InvalidArgument)?;\n\n let provider = get_provider(&matches)?;\n\n\n\n if matches.opt_present(\"u\") {\n\n provider.download_all().await?;\n\n }\n\n\n\n let color_schemes = provider.list().await?;\n\n\n\n let mut max_name_length = 0;\n\n for (name, _) in &color_schemes {\n\n max_name_length = max_name_length.max(name.len());\n\n }\n", "file_path": "src/main.rs", "rank": 42, "score": 12.252270640370185 }, { "content": " /// Parses a color scheme data.\n\n fn parse_color_scheme(&self, body: &str) -> Result<ColorScheme> {\n\n // TODO: Think about better abstraction.\n\n if self.extension == \".itermcolors\" {\n\n ColorScheme::from_iterm(&body)\n\n } else {\n\n ColorScheme::from_gogh(&body)\n\n }\n\n }\n\n}\n\n\n\n/// Returns the body of the given request.\n\n///\n\n/// Fails when the URL responds with non-200 status code. Sends `colortty` as `User-Agent` header\n\nasync fn http_get<C: HttpClient>(req: Request<C>) -> Result<String> {\n\n let mut res = req\n\n .set_header(\"User-Agent\", \"colortty\")\n\n .await\n\n .map_err(|e| {\n\n println!(\"HTTP request error: {}\", e);\n", "file_path": "src/provider.rs", "rank": 43, "score": 12.24726879645769 }, { "content": "pub mod color;\n\npub mod error;\n\npub mod provider;\n\n\n\npub use crate::color::{Color, ColorScheme, ColorSchemeFormat};\n\npub use crate::error::{Error, ErrorKind, Result};\n\npub use crate::provider::Provider;\n", "file_path": "src/lib.rs", "rank": 44, "score": 10.989272155373104 }, { "content": " // -- Provider errors\n\n #[fail(display = \"unknown color scheme provider: {}\", _0)]\n\n UnknownProvider(String),\n\n\n\n #[fail(display = \"missing color scheme name\")]\n\n MissingName,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Error {\n\n inner: Context<ErrorKind>,\n\n}\n\n\n\nimpl Error {\n\n pub fn kind(&self) -> &ErrorKind {\n\n &*self.inner.get_context()\n\n }\n\n}\n\n\n\nimpl Fail for Error {\n", "file_path": "src/error.rs", "rank": 45, "score": 10.964816708338262 }, { "content": " }\n\n let name = &matches.free[0].to_string();\n\n\n\n let provider = get_provider(&matches)?;\n\n let color_scheme = provider.get(name).await?;\n\n print!(\"# {}\\n{}\", name, color_scheme.to_yaml());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 46, "score": 10.788669386803992 }, { "content": "# colortty\n\n\n\ncolortty is a utility to generate color schemes for [alacritty](https://github.com/jwilm/alacritty). It also supports the following conversions:\n\n\n\n- iTerm 2 -> alacritty\n\n- [mintty](https://github.com/mintty/mintty) -> alacritty\n\n- [Gogh](https://github.com/Mayccoll/Gogh) -> alacritty\n\n\n\n![screenshot of colortty list](img/list.png)\n\n\n\n## Installation\n\n\n\n```sh\n\ncargo install colortty\n\n```\n\n\n\n## Usage\n\n\n\n```sh\n\ncolortty - color scheme converter for alacritty\n\n\n\nUSAGE:\n\n # List color schemes at https://github.com/mbadolato/iTerm2-Color-Schemes\n\n colortty list\n\n colortty list -p iterm\n\n colortty list -u # update cached color schemes\n\n\n\n # List color schemes at https://github.com/Mayccoll/Gogh\n\n colortty list -p gogh\n\n colortty list -p gogh -u # update cached color schemes\n\n\n\n # Get color scheme from https://github.com/mbadolato/iTerm2-Color-Schemes\n\n colortty get <color scheme name>\n\n colortty get -p iterm <color scheme name>\n\n\n\n # Get color scheme from https://github.com/Mayccoll/Gogh\n\n colortty get -p gogh <color scheme name>\n\n\n\n # Convert with implicit input type\n\n colortty convert some-color.itermcolors\n\n colortty convert some-color.minttyrc\n\n colortty convert some-color.sh\n\n\n\n # Convert with explicit input type\n\n colortty convert -i iterm some-color-theme\n\n colortty convert -i mintty some-color-theme\n\n colortty convert -i gogh some-color-theme\n\n\n\n # Convert stdin (explicit input type is necessary)\n\n cat some-color-theme | colortty convert -i iterm -\n\n cat some-color-theme | colortty convert -i mintty -\n\n cat some-color-theme | colortty convert -i gogh -\"\n\n```\n\n\n\n## Development\n\n\n\nInstall:\n\n\n\n```sh\n\ncargo install --path .\n\n```\n\n\n\nBuild:\n\n\n\n```sh\n\ncargo build\n\n```\n\n\n\nTest:\n\n\n\n```sh\n\ncargo test\n\n```\n\n\n\n## License\n\n\n\nMIT\n", "file_path": "README.md", "rank": 47, "score": 10.437433815838762 }, { "content": " file_path.set_extension(&self.extension[1..]);\n\n Ok(file_path)\n\n }\n\n\n\n /// Returns the URL for a color scheme on GitHub.\n\n fn individual_url(&self, name: &str) -> String {\n\n format!(\n\n \"https://raw.githubusercontent.com/{}/{}/master/{}/{}{}\",\n\n self.user_name, self.repo_name, self.list_path, name, self.extension\n\n )\n\n }\n\n\n\n /// Returns the URL for the color scheme list on GitHub API.\n\n fn list_url(&self) -> String {\n\n format!(\n\n \"https://api.github.com/repos/{}/{}/contents/{}\",\n\n self.user_name, self.repo_name, self.list_path\n\n )\n\n }\n\n\n", "file_path": "src/provider.rs", "rank": 48, "score": 10.32800257308233 }, { "content": " async fn read_color_schemes(&self) -> Result<Vec<(String, ColorScheme)>> {\n\n let mut entries = fs::read_dir(self.repo_dir()?)\n\n .await\n\n .context(ErrorKind::ReadDir)?;\n\n\n\n // Collect futures and run them in parallel.\n\n let mut futures = Vec::new();\n\n while let Some(entry) = entries.next().await {\n\n let dir_entry = entry.context(ErrorKind::ReadDirEntry)?;\n\n let filename = dir_entry.file_name().into_string().unwrap();\n\n\n\n let name = filename.replace(&self.extension, \"\").to_string();\n\n futures.push(self.read_color_scheme(name));\n\n }\n\n\n\n let color_schemes = future::try_join_all(futures).await?;\n\n\n\n Ok(color_schemes)\n\n }\n\n\n", "file_path": "src/provider.rs", "rank": 49, "score": 10.30043273955982 }, { "content": " /// Reads a color scheme from the repository cache.\n\n async fn read_color_scheme(&self, name: String) -> Result<(String, ColorScheme)> {\n\n let file_path = self.individual_path(&name)?;\n\n\n\n let body = fs::read_to_string(file_path)\n\n .await\n\n .context(ErrorKind::ReadFile)?;\n\n let color_scheme = self.parse_color_scheme(&body)?;\n\n\n\n Ok((name, color_scheme))\n\n }\n\n\n\n // TODO: Pass `Client` instead of `Request`. However, the ownership rule blocks it...\n\n /// Downloads a color scheme file and save it in the cache directory.\n\n async fn download_color_scheme<C: HttpClient>(\n\n &self,\n\n req: Request<C>,\n\n name: String,\n\n ) -> Result<()> {\n\n let body = http_get(req).await?;\n", "file_path": "src/provider.rs", "rank": 50, "score": 9.831519267056322 }, { "content": " fs::write(self.individual_path(&name)?, body)\n\n .await\n\n .context(ErrorKind::WriteFile)?;\n\n Ok(())\n\n }\n\n\n\n /// The repository cache directory.\n\n fn repo_dir(&self) -> Result<PathBuf> {\n\n let mut repo_dir = dirs::cache_dir().ok_or(ErrorKind::NoCacheDir)?;\n\n repo_dir.push(\"colortty\");\n\n repo_dir.push(\"repositories\");\n\n repo_dir.push(&self.user_name);\n\n repo_dir.push(&self.repo_name);\n\n Ok(repo_dir)\n\n }\n\n\n\n /// Returns the path for the given color scheme name.\n\n fn individual_path(&self, name: &str) -> Result<PathBuf> {\n\n let mut file_path = self.repo_dir()?;\n\n file_path.push(name);\n", "file_path": "src/provider.rs", "rank": 51, "score": 9.612610354091528 }, { "content": "use failure::{Backtrace, Context, Fail};\n\nuse std::convert::From;\n\nuse std::fmt::{self, Display};\n\nuse std::result;\n\nuse xml::Xml;\n\n\n\npub type Result<T> = result::Result<T, Error>;\n\n\n\n#[derive(Debug, Fail, PartialEq)]\n\npub enum ErrorKind {\n\n // -- CLI errors\n\n #[fail(display = \"failed on HTTP GET\")]\n\n HttpGet,\n\n\n\n #[fail(display = \"failed to parse JSON\")]\n\n ParseJson,\n\n\n\n #[fail(display = \"source is not specified\")]\n\n MissingSource,\n\n\n", "file_path": "src/error.rs", "rank": 52, "score": 9.233958968877296 }, { "content": " CreateDirAll,\n\n\n\n #[fail(display = \"failed to read a file\")]\n\n ReadFile,\n\n\n\n #[fail(display = \"failed to write a file\")]\n\n WriteFile,\n\n\n\n #[fail(display = \"there is no cache directory\")]\n\n NoCacheDir,\n\n\n\n // -- Mintty errors\n\n #[fail(display = \"invalid color representation: {}\", _0)]\n\n InvalidColorFormat(String),\n\n\n\n #[fail(display = \"invalid line: {}\", _0)]\n\n InvalidLineFormat(String),\n\n\n\n #[fail(display = \"unknown color name: {}\", _0)]\n\n UnknownColorName(String),\n", "file_path": "src/error.rs", "rank": 53, "score": 8.958924246673805 }, { "content": "use colortty::{ColorScheme, ColorSchemeFormat, ErrorKind, Provider, Result};\n\nuse failure::ResultExt;\n\nuse getopts::Options;\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::io::{self, Read};\n\nuse std::process;\n\n\n\n#[async_std::main]\n\nasync fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() < 2 {\n\n return help();\n\n }\n\n\n\n match args[1].as_ref() {\n\n \"convert\" => handle_error(convert(args)),\n\n \"list\" => handle_error(list(args).await),\n\n \"get\" => handle_error(get(args).await),\n\n \"help\" => help(),\n\n _ => {\n\n eprintln!(\"error: no such subcommand: `{}`\", args[1]);\n\n process::exit(1);\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 54, "score": 8.937047592840349 }, { "content": "\n\n #[fail(display = \"failed to parse int\")]\n\n ParseInt,\n\n\n\n // -- iTerm errors\n\n #[fail(display = \"invalid XML\")]\n\n XMLParse,\n\n\n\n #[fail(display = \"root dict was not found\")]\n\n NoRootDict,\n\n\n\n #[fail(display = \"cannot extract text from: {}\", _0)]\n\n NotCharacterNode(Box<Xml>),\n\n\n\n #[fail(display = \"unknown color component: {}\", _0)]\n\n UnknownColorComponent(String),\n\n\n\n #[fail(display = \"failed to parse float\")]\n\n ParseFloat,\n\n\n", "file_path": "src/error.rs", "rank": 55, "score": 8.620151666504945 }, { "content": " .context(ErrorKind::CreateDirAll)?;\n\n\n\n let list_req = surf::get(&self.list_url());\n\n let list_body = http_get(list_req).await?;\n\n let items = json::parse(&list_body).context(ErrorKind::ParseJson)?;\n\n\n\n // Download and save color scheme files.\n\n let mut futures = Vec::new();\n\n let client = surf::Client::new();\n\n for item in items.members() {\n\n let filename = item[\"name\"].as_str().unwrap();\n\n\n\n // Ignoring files starting with `_` for Gogh.\n\n if filename.starts_with('_') || !filename.ends_with(&self.extension) {\n\n continue;\n\n }\n\n\n\n let name = filename.replace(&self.extension, \"\");\n\n let req = client.get(&self.individual_url(&name));\n\n futures.push(self.download_color_scheme(req, name));\n", "file_path": "src/provider.rs", "rank": 56, "score": 7.832127970502652 }, { "content": " _ => {}\n\n }\n\n\n\n // If there are no cached files, download them.\n\n self.download_all().await?;\n\n self.read_color_schemes().await\n\n }\n\n\n\n /// Download color scheme files into the cache directory.\n\n pub async fn download_all(&self) -> Result<()> {\n\n let repo_dir = self.repo_dir()?;\n\n\n\n eprintln!(\n\n \"Downloading color schemes into {}\",\n\n repo_dir.to_str().unwrap()\n\n );\n\n\n\n // Create the cache directory if it doesn't exist.\n\n fs::create_dir_all(&repo_dir)\n\n .await\n", "file_path": "src/provider.rs", "rank": 57, "score": 7.823150596534 }, { "content": "\n\n # Convert with implicit input type\n\n colortty convert some-color.itermcolors\n\n colortty convert some-color.minttyrc\n\n colortty convert some-color.sh\n\n\n\n # Convert with explicit input type\n\n colortty convert -i iterm some-color-theme\n\n colortty convert -i mintty some-color-theme\n\n colortty convert -i gogh some-color-theme\n\n\n\n # Convert stdin (explicit input type is necessary)\n\n cat some-color-theme | colortty convert -i iterm -\n\n cat some-color-theme | colortty convert -i mintty -\n\n cat some-color-theme | colortty convert -i gogh -\"\n\n );\n\n}\n\n\n\n// -- Utility functions\n\n\n", "file_path": "src/main.rs", "rank": 58, "score": 7.399574610177209 }, { "content": " fn cause(&self) -> Option<&dyn Fail> {\n\n self.inner.cause()\n\n }\n\n\n\n fn backtrace(&self) -> Option<&Backtrace> {\n\n self.inner.backtrace()\n\n }\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n Display::fmt(&self.inner, f)\n\n }\n\n}\n\n\n\nimpl From<ErrorKind> for Error {\n\n fn from(kind: ErrorKind) -> Error {\n\n let inner = Context::new(kind);\n\n Error { inner }\n\n }\n\n}\n\n\n\nimpl From<Context<ErrorKind>> for Error {\n\n fn from(inner: Context<ErrorKind>) -> Error {\n\n Error { inner }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 59, "score": 5.242054530781383 }, { "content": "\n\n // Download files in batches.\n\n //\n\n // If this requests all files in parallel, the HTTP client (isahc) throws the\n\n // following error:\n\n //\n\n // HTTP request error: ConnectFailed: failed to connect to the server\n\n //\n\n // isahc doesn't limit the number of connections per client by default, but\n\n // it exposes an API to limit it. However, surf doesn't expose the API.\n\n if futures.len() > 10 {\n\n future::try_join_all(futures).await?;\n\n futures = Vec::new();\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Read color schemes from the cache directory.\n", "file_path": "src/provider.rs", "rank": 60, "score": 1.8689556547527235 }, { "content": " ErrorKind::HttpGet\n\n })?;\n\n\n\n if !res.status().is_success() {\n\n println!(\"HTTP status code: {}\", res.status());\n\n return Err(ErrorKind::HttpGet.into());\n\n }\n\n\n\n // TODO: Propagate information from the original error.\n\n let body = res.body_string().await.map_err(|_| ErrorKind::HttpGet)?;\n\n Ok(body)\n\n}\n", "file_path": "src/provider.rs", "rank": 61, "score": 1.4502060523260962 } ]
Rust
experimental/benchmark/src/application/native.rs
DavidKorczynski/oak
fa53908358dd9e9b37f21e4aca17fb41485f5f33
use crate::{ application::ApplicationClient, database::Database, proto::{ trusted_database_client::TrustedDatabaseClient, trusted_database_server::{TrustedDatabase, TrustedDatabaseServer}, GetPointOfInterestRequest, GetPointOfInterestResponse, ListPointsOfInterestRequest, ListPointsOfInterestResponse, PointOfInterestMap, }, }; use anyhow::Context; use async_trait::async_trait; use futures_util::FutureExt; use log::{debug, info, warn}; use oak_abi::label::Label; use oak_client::interceptors::label::LabelInterceptor; use tokio::sync::oneshot; use tonic::{ service::interceptor::InterceptedService, transport::{Channel, Server}, Request, Response, Status, }; pub struct TrustedDatabaseService { points_of_interest: PointOfInterestMap, } #[tonic::async_trait] impl TrustedDatabase for TrustedDatabaseService { async fn get_point_of_interest( &self, request: Request<GetPointOfInterestRequest>, ) -> Result<Response<GetPointOfInterestResponse>, Status> { debug!("Received request: {:?}", request); match self.points_of_interest.entries.get(&request.get_ref().id) { Some(point) => { debug!("Found Point Of Interest: {:?}", point); Ok(Response::new(GetPointOfInterestResponse { point_of_interest: Some(point.clone()), })) } None => { let err = tonic::Status::new(tonic::Code::NotFound, "ID not found"); warn!("{:?}", err); Err(err) } } } async fn list_points_of_interest( &self, _request: Request<ListPointsOfInterestRequest>, ) -> Result<Response<ListPointsOfInterestResponse>, Status> { unimplemented!(); } } pub struct NativeApplication { notification_sender: oneshot::Sender<()>, client: TrustedDatabaseClient<InterceptedService<Channel, LabelInterceptor>>, } impl NativeApplication { pub async fn start(database: &Database, port: u16) -> Self { info!("Running native application"); let (notification_sender, notification_receiver) = oneshot::channel::<()>(); tokio::spawn(NativeApplication::create_server( database.points_of_interest.clone(), port, notification_receiver, )); let client = NativeApplication::create_client(port).await; NativeApplication { notification_sender, client, } } pub fn stop(self) -> anyhow::Result<()> { self.notification_sender .send(()) .ok() .context("Couldn't stop native application") } async fn create_server( database: PointOfInterestMap, port: u16, termination_notification_receiver: oneshot::Receiver<()>, ) { let address = format!("[::]:{}", port) .parse() .expect("Couldn't parse address"); let handler = TrustedDatabaseService { points_of_interest: database, }; Server::builder() .add_service(TrustedDatabaseServer::new(handler)) .serve_with_shutdown(address, termination_notification_receiver.map(drop)) .await .expect("Couldn't start server"); } async fn create_client( port: u16, ) -> TrustedDatabaseClient<InterceptedService<Channel, LabelInterceptor>> { let address = format!("https://localhost:{}", port) .parse() .expect("Couldn't parse address"); let channel = Channel::builder(address) .connect() .await .expect("Couldn't connect to Oak Application"); let label = Label::public_untrusted(); let interceptor = LabelInterceptor::create(&label).expect("Couldn't create gRPC interceptor"); TrustedDatabaseClient::with_interceptor(channel, interceptor) } } #[async_trait] impl ApplicationClient for NativeApplication { async fn send_request(&mut self, id: &str) -> Result<(), tonic::Status> { let request = Request::new(GetPointOfInterestRequest { id: id.to_string() }); self.client.get_point_of_interest(request).await?; Ok(()) } }
use crate::{ application::ApplicationClient, database::Database, proto::{ trusted_database_client::TrustedDatabaseClient, trusted_database_server::{TrustedDatabase, TrustedDatabaseServer}, GetPointOfInterestRequest, GetPointOfInterestResponse, ListPointsOfInterestRequest, ListPointsOfInterestResponse, PointOfInterestMap, }, }; use anyhow::Context; use async_trait::async_trait; use futures_util::FutureExt; use log::{debug, info, warn}; use oak_abi::label::Label; use oak_client::interceptors::label::LabelInterceptor; use tokio::sync::oneshot; use tonic::{ service::interceptor::InterceptedService, transport::{Channel, Server}, Request, Response, Status, }; pub struct TrustedDatabaseService { points_of_interest: PointOfInterestMap, } #[tonic::async_trait] impl TrustedDatabase for TrustedDatabaseService { async fn get_point_of_interest( &self, request: Request<GetPointOfInterestRequest>, ) -> Result<Response<GetPointOfInterestResponse>, Status> { debug!("Received request: {:?}", request); match self.points_of_interest.entries.get(&request.get_ref().id) { Some(point) => { debug!("Found Point Of Interest: {:?}", point);
} None => { let err = tonic::Status::new(tonic::Code::NotFound, "ID not found"); warn!("{:?}", err); Err(err) } } } async fn list_points_of_interest( &self, _request: Request<ListPointsOfInterestRequest>, ) -> Result<Response<ListPointsOfInterestResponse>, Status> { unimplemented!(); } } pub struct NativeApplication { notification_sender: oneshot::Sender<()>, client: TrustedDatabaseClient<InterceptedService<Channel, LabelInterceptor>>, } impl NativeApplication { pub async fn start(database: &Database, port: u16) -> Self { info!("Running native application"); let (notification_sender, notification_receiver) = oneshot::channel::<()>(); tokio::spawn(NativeApplication::create_server( database.points_of_interest.clone(), port, notification_receiver, )); let client = NativeApplication::create_client(port).await; NativeApplication { notification_sender, client, } } pub fn stop(self) -> anyhow::Result<()> { self.notification_sender .send(()) .ok() .context("Couldn't stop native application") } async fn create_server( database: PointOfInterestMap, port: u16, termination_notification_receiver: oneshot::Receiver<()>, ) { let address = format!("[::]:{}", port) .parse() .expect("Couldn't parse address"); let handler = TrustedDatabaseService { points_of_interest: database, }; Server::builder() .add_service(TrustedDatabaseServer::new(handler)) .serve_with_shutdown(address, termination_notification_receiver.map(drop)) .await .expect("Couldn't start server"); } async fn create_client( port: u16, ) -> TrustedDatabaseClient<InterceptedService<Channel, LabelInterceptor>> { let address = format!("https://localhost:{}", port) .parse() .expect("Couldn't parse address"); let channel = Channel::builder(address) .connect() .await .expect("Couldn't connect to Oak Application"); let label = Label::public_untrusted(); let interceptor = LabelInterceptor::create(&label).expect("Couldn't create gRPC interceptor"); TrustedDatabaseClient::with_interceptor(channel, interceptor) } } #[async_trait] impl ApplicationClient for NativeApplication { async fn send_request(&mut self, id: &str) -> Result<(), tonic::Status> { let request = Request::new(GetPointOfInterestRequest { id: id.to_string() }); self.client.get_point_of_interest(request).await?; Ok(()) } }
Ok(Response::new(GetPointOfInterestResponse { point_of_interest: Some(point.clone()), }))
call_expression
[ { "content": "/// Return an iterator of all known Cargo Manifest files that define crates.\n\npub fn crate_manifest_files() -> impl Iterator<Item = PathBuf> {\n\n source_files()\n\n .filter(|p| is_cargo_toml_file(p))\n\n .filter(|p| is_cargo_package_file(p))\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 0, "score": 255605.70720351127 }, { "content": "/// Convenience function for creating labels with a single confidentiality tag.\n\npub fn confidentiality_label(tag: Tag) -> crate::proto::oak::label::Label {\n\n Label {\n\n confidentiality_tags: vec![tag],\n\n integrity_tags: vec![],\n\n }\n\n}\n\n\n", "file_path": "oak_abi/src/label/mod.rs", "rank": 1, "score": 255544.1740027585 }, { "content": "/// Compile `.proto` files into Rust files during a Cargo build.\n\n///\n\n/// The generated `.rs` files are written to the Cargo `OUT_DIR` directory, suitable for use with\n\n/// the [include!][1] macro. See the [Cargo `build.rs` code generation][2] example for more info.\n\n///\n\n/// This function should be called in a project's `build.rs`.\n\n///\n\n/// # Arguments\n\n///\n\n/// **`protos`** - Paths to `.proto` files to compile. Any transitively [imported][3] `.proto`\n\n/// files are automatically be included.\n\n///\n\n/// **`includes`** - Paths to directories in which to search for imports. Directories are searched\n\n/// in order. The `.proto` files passed in **`protos`** must be found in one of the provided\n\n/// include directories.\n\n///\n\n/// # Errors\n\n///\n\n/// This function can fail for a number of reasons:\n\n///\n\n/// - Failure to locate or download `protoc`.\n\n/// - Failure to parse the `.proto`s.\n\n/// - Failure to locate an imported `.proto`.\n\n/// - Failure to compile a `.proto` without a [package specifier][4].\n\n///\n\n/// It's expected that this function call be `unwrap`ed in a `build.rs`; there is typically no\n\n/// reason to gracefully recover from errors during a build.\n\n///\n\n/// # Example `build.rs`\n\n///\n\n/// ```rust,no_run\n\n/// # use std::io::Result;\n\n/// fn main() -> Result<()> {\n\n/// prost_build::compile_protos(&[\"src/frontend.proto\", \"src/backend.proto\"], &[\"src\"])?;\n\n/// Ok(())\n\n/// }\n\n/// ```\n\n///\n\n/// [1]: https://doc.rust-lang.org/std/macro.include.html\n\n/// [2]: http://doc.crates.io/build-script.html#case-study-code-generation\n\n/// [3]: https://developers.google.com/protocol-buffers/docs/proto3#importing-definitions\n\n/// [4]: https://developers.google.com/protocol-buffers/docs/proto#packages\n\npub fn compile_protos(protos: &[impl AsRef<Path>], includes: &[impl AsRef<Path>]) -> Result<()> {\n\n Config::new().compile_protos(protos, includes)\n\n}\n\n\n", "file_path": "third_party/prost/prost-build/src/lib.rs", "rank": 2, "score": 250486.96268572853 }, { "content": "/// Parse an XML database into a [`PointOfInterestMap`].\n\npub fn parse_database(xml_database: &[u8]) -> Result<PointOfInterestMap, OakError> {\n\n let database: Database = quick_xml::de::from_str(\n\n String::from_utf8(xml_database.to_vec())\n\n .map_err(|error| {\n\n error!(\"Couldn't convert vector to string: {:?}\", error);\n\n OakError::OakStatus(oak_abi::OakStatus::ErrInvalidArgs)\n\n })?\n\n .as_ref(),\n\n )\n\n .map_err(|error| {\n\n error!(\"Couldn't parse XML data: {:?}\", error);\n\n OakError::OakStatus(oak_abi::OakStatus::ErrInvalidArgs)\n\n })?;\n\n\n\n let points_of_interest = PointOfInterestMap {\n\n entries: database\n\n .stations\n\n .iter()\n\n .map(|station| {\n\n let point_of_interest = PointOfInterest {\n", "file_path": "examples/trusted_database/module/rust/src/database.rs", "rank": 3, "score": 226392.1789396068 }, { "content": "// Start running a metrics server on the given port, running until the\n\n// `termination_notificiation_receiver` is triggered.\n\npub fn start_metrics_server(\n\n port: u16,\n\n runtime: Arc<Runtime>,\n\n termination_notificiation_receiver: tokio::sync::oneshot::Receiver<()>,\n\n) {\n\n let tokio_runtime = tokio::runtime::Runtime::new().expect(\"Couldn't create Tokio runtime\");\n\n tokio_runtime.block_on(make_server(\n\n port,\n\n runtime,\n\n termination_notificiation_receiver,\n\n ));\n\n}\n", "file_path": "oak_runtime/src/metrics/server.rs", "rank": 4, "score": 224801.24445066557 }, { "content": "/// Load an XML database from [`ConfigMap`] and parse it.\n\npub fn load_database(config_map: ConfigMap) -> Result<PointOfInterestMap, OakError> {\n\n debug!(\"Loading database\");\n\n match config_map.items.get(\"database\") {\n\n Some(xml_database) => {\n\n debug!(\"Parsing database - size: {} bytes\", xml_database.len());\n\n let points_of_interest = parse_database(xml_database).map_err(|error| {\n\n error!(\"Couldn't parse database: {:?}\", error);\n\n OakError::OakStatus(oak_abi::OakStatus::ErrInvalidArgs)\n\n })?;\n\n debug!(\n\n \"Database loaded - size: {} entries ({} bytes)\",\n\n points_of_interest.entries.len(),\n\n std::mem::size_of_val(&points_of_interest),\n\n );\n\n Ok(points_of_interest)\n\n }\n\n None => {\n\n error!(\"`database` configuration argument is not specified\");\n\n Err(OakError::OakStatus(oak_abi::OakStatus::ErrInvalidArgs))\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/trusted_database/module/rust/src/database.rs", "rank": 5, "score": 223494.17160201742 }, { "content": "pub fn build_functions_server(\n\n opt: &BuildFunctionsServer,\n\n additional_features: Vec<String>,\n\n) -> Step {\n\n Step::Multiple {\n\n name: \"server\".to_string(),\n\n steps: vec![\n\n vec![Step::Single {\n\n name: \"create bin folder\".to_string(),\n\n command: Cmd::new(\n\n \"mkdir\",\n\n vec![\"-p\".to_string(), \"oak_functions/loader/bin\".to_string()],\n\n ),\n\n }],\n\n vec![build_rust_binary(\n\n \"oak_functions/loader\",\n\n opt,\n\n additional_features,\n\n &hashmap! {},\n\n )],\n\n ]\n\n .into_iter()\n\n .flatten()\n\n .collect::<Vec<_>>(),\n\n }\n\n}\n\n\n", "file_path": "runner/src/examples.rs", "rank": 6, "score": 219029.23982866274 }, { "content": "/// Return an iterator of all the first-party and non-ignored files in the repository, which can be\n\n/// then additionally filtered by the caller.\n\npub fn source_files() -> impl Iterator<Item = PathBuf> {\n\n let walker = walkdir::WalkDir::new(\".\").into_iter();\n\n walker\n\n .filter_entry(|e| !is_ignored_entry(e))\n\n .filter_map(Result::ok)\n\n .map(|e| e.into_path())\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 7, "score": 216061.0927385555 }, { "content": "pub fn init() {\n\n WAIT_ON_CHANNELS_HANDLER.with(|handler| handler.replace(None));\n\n READY_DATA.with(|ready_data| ready_data.borrow_mut().clear());\n\n}\n\n\n\n#[repr(packed)]\n\npub struct HandleWithStatus {\n\n handle: Handle,\n\n status: u8,\n\n}\n\n\n\nimpl HandleWithStatus {\n\n pub fn handle(&self) -> Handle {\n\n self.handle\n\n }\n\n\n\n pub fn set_status(&mut self, status: ChannelReadStatus) {\n\n self.status = status as i32 as u8;\n\n }\n\n}\n", "file_path": "experimental/oak_async/tests/fake_runtime.rs", "rank": 8, "score": 215595.62065840996 }, { "content": "/// Return an iterator of all known Cargo Manifest files that define workspaces.\n\npub fn workspace_manifest_files() -> impl Iterator<Item = PathBuf> {\n\n source_files()\n\n .filter(|p| is_cargo_toml_file(p))\n\n .filter(|p| is_cargo_workspace_file(p))\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 9, "score": 212996.32297032935 }, { "content": "/// Verifies the validity of the attestation info:\n\n/// - Checks that the TEE report is signed by TEE Provider’s root key.\n\n/// - Checks that the public key hash from the TEE report is equal to the hash of the public key\n\n/// presented in the server response.\n\n/// - Extracts the TEE measurement from the TEE report and compares it to the\n\n/// `expected_tee_measurement`.\n\npub fn verify_attestation_info(\n\n attestation_info_bytes: &[u8],\n\n expected_tee_measurement: &[u8],\n\n) -> anyhow::Result<()> {\n\n let attestation_info = AttestationInfo::decode(attestation_info_bytes)\n\n .context(\"Couldn't decode attestation info Protobuf message\")?;\n\n\n\n // TODO(#1867): Add remote attestation support, use real TEE reports and check that\n\n // `AttestationInfo::certificate` is signed by one of the root certificates.\n\n\n\n let report = attestation_info\n\n .report\n\n .as_ref()\n\n .context(\"Couldn't find report in peer attestation info\")?;\n\n\n\n // Check that the report contains non-empty data. This should be a hash of the public key\n\n // and the additional info field.\n\n if report.data.is_empty() {\n\n anyhow::bail!(\"Hash of the public key and additional info is not provided.\")\n\n }\n\n\n\n // Verify TEE measurement.\n\n if expected_tee_measurement == report.measurement {\n\n Ok(())\n\n } else {\n\n Err(anyhow!(\"Incorrect TEE measurement\"))\n\n }\n\n}\n\n\n", "file_path": "remote_attestation/rust/src/handshaker.rs", "rank": 10, "score": 212132.48952179737 }, { "content": "/// Generates attestation info with a TEE report.\n\n/// TEE report contains a hash of the signer's public key, and additional info if provided as a\n\n/// non-empty string by the caller.\n\npub fn create_attestation_info(\n\n signer: &Signer,\n\n additional_info: &[u8],\n\n tee_certificate: &[u8],\n\n) -> anyhow::Result<Vec<u8>> {\n\n let signing_public_key = signer\n\n .public_key()\n\n .context(\"Couldn't get singing public key\")?;\n\n let mut data = get_sha256(signing_public_key.as_ref()).to_vec();\n\n if !additional_info.is_empty() {\n\n data.extend(get_sha256(additional_info));\n\n data = get_sha256(&data).to_vec();\n\n }\n\n let report = AttestationReport::new(&data);\n\n let attestation_info = AttestationInfo {\n\n report: Some(report),\n\n certificate: tee_certificate.to_vec(),\n\n };\n\n serialize_protobuf(&attestation_info)\n\n .context(\"Couldn't encode attestation info Protobuf message\")\n\n}\n\n\n", "file_path": "remote_attestation/rust/src/handshaker.rs", "rank": 11, "score": 212129.8266508085 }, { "content": "/// Initializes a gRPC server pseudo-Node listening on the provided address.\n\n///\n\n/// Accepts a [`Sender`] of [`Invocation`] messages on which to send incoming gRPC invocations.\n\npub fn init_with_sender(\n\n address: &str,\n\n invocation_sender: Sender<Invocation>,\n\n) -> Result<(), OakStatus> {\n\n let config = crate::node_config::grpc_server(address);\n\n // TODO(#1631): When we have a separate top for each sub-lattice, this should be changed to\n\n // the top of the identity sub-lattice.\n\n let top_label = oak_abi::label::confidentiality_label(oak_abi::label::top());\n\n // Create a channel and pass the read half to a new gRPC server pseudo-Node.\n\n let init_sender =\n\n match crate::io::node_create::<GrpcInvocationSender>(\"grpc_server\", &top_label, &config) {\n\n Ok(s) => s,\n\n Err(e) => {\n\n let _ = invocation_sender.close();\n\n return Err(e);\n\n }\n\n };\n\n\n\n let grpc_server_init = GrpcInvocationSender {\n\n sender: Some(invocation_sender),\n", "file_path": "sdk/rust/oak/src/grpc/server.rs", "rank": 12, "score": 211750.80740887314 }, { "content": "/// Initializes an HTTP server pseudo-Node listening on the provided address.\n\n///\n\n/// Accepts a [`Sender`] of [`Invocation`] messages on which to send incoming HTTP invocations.\n\npub fn init_with_sender(\n\n address: &str,\n\n invocation_sender: Sender<Invocation>,\n\n) -> Result<(), OakStatus> {\n\n let config = crate::node_config::http_server(address);\n\n // TODO(#1631): When we have a separate top for each sub-lattice, this should be changed to\n\n // the top of the identity sub-lattice.\n\n let top_label = oak_abi::label::confidentiality_label(oak_abi::label::top());\n\n // Create a channel and pass the read half to a new HTTP server pseudo-Node.\n\n let init_sender =\n\n match crate::io::node_create::<HttpInvocationSender>(\"http_server\", &top_label, &config) {\n\n Ok(s) => s,\n\n Err(e) => {\n\n let _ = invocation_sender.close();\n\n return Err(e);\n\n }\n\n };\n\n\n\n let http_server_init = HttpInvocationSender {\n\n sender: Some(invocation_sender),\n", "file_path": "sdk/rust/oak/src/http/server.rs", "rank": 13, "score": 211750.80740887314 }, { "content": "pub fn fuzz_config_toml_files() -> impl Iterator<Item = PathBuf> {\n\n source_files().filter(|p| is_fuzz_config_toml_file(p))\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 14, "score": 210073.08960330748 }, { "content": "pub fn get_config_info(\n\n wasm_module_bytes: &[u8],\n\n policy: ServerPolicy,\n\n ml_inference: bool,\n\n metrics: Option<PrivateMetricsConfig>,\n\n) -> ConfigurationInfo {\n\n ConfigurationInfo {\n\n wasm_hash: get_sha256(wasm_module_bytes).to_vec(),\n\n policy: Some(policy),\n\n ml_inference,\n\n metrics,\n\n }\n\n}\n", "file_path": "oak_functions/sdk/test_utils/src/lib.rs", "rank": 15, "score": 205527.0095708792 }, { "content": "/// Gets the default Roughtime servers in the ecosystem.\n\n///\n\n/// Based on\n\n/// https://github.com/cloudflare/roughtime/blob/569dc6f5119970035fe0a008b83398d59363ed45/ecosystem.json\n\npub fn get_default_servers() -> Vec<RoughtimeServer> {\n\n vec![\n\n RoughtimeServer {\n\n name: \"Caesium\".to_owned(),\n\n host: \"caesium.tannerryan.ca\".to_owned(),\n\n port: 2002,\n\n public_key_base64: \"iBVjxg/1j7y1+kQUTBYdTabxCppesU/07D4PMDJk2WA=\".to_owned(),\n\n },\n\n RoughtimeServer {\n\n name: \"Chainpoint-Roughtime\".to_owned(),\n\n host: \"roughtime.chainpoint.org\".to_owned(),\n\n port: 2002,\n\n public_key_base64: \"bbT+RPS7zKX6w71ssPibzmwWqU9ffRV5oj2OresSmhE=\".to_owned(),\n\n },\n\n RoughtimeServer {\n\n name: \"Cloudflare-Roughtime\".to_owned(),\n\n host: \"roughtime.cloudflare.com\".to_owned(),\n\n port: 2002,\n\n public_key_base64: \"gD63hSj3ScS+wuOeGrubXlq35N1c5Lby/S+T7MNTjxo=\".to_owned(),\n\n },\n", "file_path": "oak_runtime/src/time/mod.rs", "rank": 16, "score": 204406.98122774914 }, { "content": "pub fn build_service(\n\n client_id: &str,\n\n client_secret: &str,\n\n) -> AuthenticationServer<AuthenticationHandler> {\n\n AuthenticationServer::new(AuthenticationHandler::new(client_id, client_secret))\n\n}\n\n\n\n/// Service implementation to handle authentication requests.\n\n#[derive(Default)]\n\npub struct AuthenticationHandler {\n\n client_id: String,\n\n client_secret: String,\n\n /* TODO(#922): Add a state storage mechanism to be able to match a client request to the\n\n * nonce and code challenge. */\n\n}\n\n\n\nimpl AuthenticationHandler {\n\n pub fn new(client_id: &str, client_secret: &str) -> AuthenticationHandler {\n\n AuthenticationHandler {\n\n client_id: client_id.to_owned(),\n", "file_path": "oak_runtime/src/node/grpc/server/auth/oidc_service.rs", "rank": 17, "score": 202111.39877324138 }, { "content": "/// Path to the `Cargo.toml` files for all crates that are either directly modified or have a\n\n/// dependency to a modified crate.\n\npub fn all_affected_crates(commits: &Commits) -> ModifiedContent {\n\n let files = directly_modified_crates(commits)\n\n .files\n\n .map(|modified_files| {\n\n let crate_manifest_files = crate_manifest_files();\n\n // A map of `Cargo.toml` files visited by the algorithm. If the value associated with a\n\n // key is `true`, the crate is affected by the changes and should be included in the\n\n // result.\n\n let mut affected_crates: HashMap<String, bool> = modified_files\n\n .into_iter()\n\n .map(|path| (path, true))\n\n .collect();\n\n\n\n crates_affected_by_protos(&affected_protos(commits))\n\n .iter()\n\n .fold(&mut affected_crates, |affected_crates, toml_path| {\n\n affected_crates.insert(toml_path.clone(), true);\n\n affected_crates\n\n });\n\n\n", "file_path": "runner/src/diffs.rs", "rank": 18, "score": 193057.9431585697 }, { "content": "/// Converts a binary sequence to a string if it is a valid UTF-8 string, or formats it as a numeric\n\n/// vector of bytes otherwise.\n\npub fn format_bytes(v: &[u8]) -> String {\n\n std::str::from_utf8(v)\n\n .map(|s| s.to_string())\n\n .unwrap_or_else(|_| format!(\"{:?}\", v))\n\n}\n", "file_path": "oak_functions/loader/src/server.rs", "rank": 19, "score": 192671.78358555582 }, { "content": "/// Returns the list of paths to `Cargo.toml` files for all crates in which at least one file is\n\n/// modified.\n\npub fn directly_modified_crates(commits: &Commits) -> ModifiedContent {\n\n let files = modified_files(commits).files.map(|modified_files| {\n\n let mut crates = hashset![];\n\n for str_path in modified_files {\n\n if let Some(crate_path) = find_crate_toml_file(str_path) {\n\n crates.insert(crate_path);\n\n }\n\n }\n\n crates.iter().cloned().collect()\n\n });\n\n ModifiedContent { files }\n\n}\n\n\n", "file_path": "runner/src/diffs.rs", "rank": 20, "score": 189840.17094531457 }, { "content": "pub fn build_server(opt: &BuildServer, additional_features: Vec<String>) -> Step {\n\n Step::Multiple {\n\n name: \"server\".to_string(),\n\n steps: vec![\n\n vec![Step::Single {\n\n name: \"create bin folder\".to_string(),\n\n command: Cmd::new(\n\n \"mkdir\",\n\n vec![\"-p\".to_string(), \"oak_loader/bin\".to_string()],\n\n ),\n\n }],\n\n match opt.server_variant {\n\n ServerVariant::Unsafe | ServerVariant::Coverage | ServerVariant::Experimental => vec![Step::Single {\n\n name: \"build introspection browser client\".to_string(),\n\n command: Cmd::new(\"npm\",\n\n vec![\n\n \"--prefix\",\n\n \"oak_runtime/introspection_browser_client\",\n\n \"run\",\n\n \"build\",\n", "file_path": "runner/src/examples.rs", "rank": 21, "score": 184145.60405579253 }, { "content": "pub fn grpc_server(address: &str) -> NodeConfiguration {\n\n NodeConfiguration {\n\n config_type: Some(ConfigType::GrpcServerConfig(GrpcServerConfiguration {\n\n address: address.to_string(),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/node_config.rs", "rank": 22, "score": 183493.5237281194 }, { "content": "pub fn http_server(address: &str) -> NodeConfiguration {\n\n NodeConfiguration {\n\n config_type: Some(ConfigType::HttpServerConfig(HttpServerConfiguration {\n\n address: address.to_string(),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/node_config.rs", "rank": 23, "score": 183493.5237281194 }, { "content": "#[derive(Clone)]\n\nstruct HttpRequestHandler {\n\n /// Reference to the Runtime in the context of this HTTP server pseudo-Node.\n\n runtime: RuntimeProxy,\n\n /// Channel handle used for writing HTTP invocations.\n\n invocation_channel: WriteHandle,\n\n}\n\n\n\nimpl HttpRequestHandler {\n\n async fn handle(&self, req: Request<Body>) -> anyhow::Result<Response<Body>> {\n\n let request = to_oak_http_request(req).await?;\n\n match get_oak_label(&request) {\n\n Ok(oak_label) => {\n\n info!(\n\n \"Handling HTTP request; request body size: {} bytes, label: {:?}\",\n\n request.body.len(),\n\n oak_label\n\n );\n\n\n\n debug!(\"Injecting the request into the Oak Node\");\n\n let response = self.inject_http_request(request, oak_label)?;\n", "file_path": "oak_runtime/src/node/http/server.rs", "rank": 24, "score": 181039.74744089026 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct Request {\n\n #[serde(rename = \"lat\")]\n\n latitude_degrees: f64,\n\n #[serde(rename = \"lng\")]\n\n longitude_degrees: f64,\n\n}\n\n\n\n#[cfg_attr(not(test), no_mangle)]\n\npub extern \"C\" fn main() {\n\n // Produce a result which is either a successful response (as raw bytes), or an error message to\n\n // return to the client (as a human-readable string).\n\n let result: Result<Vec<u8>, String> = try {\n\n // Read the request.\n\n let request_body = oak_functions::read_request()\n\n .map_err(|err| format!(\"could not read request body: {:?}\", err))?;\n\n\n\n // Parse the request as JSON.\n\n let request: Request = serde_json::from_slice(&request_body)\n\n .map_err(|err| format!(\"could not deserialize request as JSON: {:?}\", err))?;\n\n log!(\"parsed request: {:?}\\n\", request);\n", "file_path": "oak_functions/examples/weather_lookup/module/src/lib.rs", "rank": 25, "score": 180045.46745579207 }, { "content": "#[derive(Clone)]\n\nstruct HttpRequestHandler {\n\n /// Reference to the Runtime in the context of this gRPC server pseudo-Node.\n\n runtime: RuntimeProxy,\n\n /// Channel handle used for writing gRPC invocations.\n\n invocation_channel: WriteHandle,\n\n}\n\n\n\n/// Set a mandatory prefix for all gRPC requests processed by a gRPC pseudo-Node.\n\nimpl NamedService for HttpRequestHandler {\n\n const NAME: &'static str = \"\";\n\n}\n\n\n\nimpl Service<http::Request<hyper::Body>> for HttpRequestHandler {\n\n type Response = http::Response<tonic::body::BoxBody>;\n\n type Error = http::Error;\n\n type Future = BoxFuture<Self::Response, Self::Error>;\n\n\n\n fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n", "file_path": "oak_runtime/src/node/grpc/server/mod.rs", "rank": 26, "score": 177640.13097345573 }, { "content": "// Note: this status is added into the queue of ready data, so any previously add ready data must\n\n// first be read before this status will be returned. The channel status is never removed, any\n\n// future reads will always return this error status.\n\npub fn set_error(handle: Handle, status: OakStatus) {\n\n assert_ne!(status, OakStatus::Ok);\n\n READY_DATA.with(|ready_data| {\n\n ready_data\n\n .borrow_mut()\n\n .entry(handle)\n\n .or_default()\n\n .push_back(Err(status))\n\n });\n\n}\n", "file_path": "experimental/oak_async/tests/fake_runtime.rs", "rank": 27, "score": 177089.24028673244 }, { "content": "/// Build Rust code corresponding to a set of protocol buffer message and service definitions,\n\n/// emitting generated code to crate's `OUT_DIR`. For gRPC service definitions, this\n\n/// function generates Oak-specific code that is suitable for use inside an Oak Node (i.e. *not*\n\n/// code that is suitable for use in a normal application running on the host platform).\n\n///\n\n/// `root_repo` is the path to the root repository. All paths to `.proto` files must be specified\n\n/// relative to `repo_root`. Likewise, all imported paths in `.proto` files must be specified\n\n/// relative to this path.\n\npub fn compile_protos<P>(inputs: &[P], repo_root: P)\n\nwhere\n\n P: AsRef<std::path::Path>,\n\n{\n\n compile_protos_with_options(inputs, repo_root, ProtoOptions::default());\n\n}\n\n\n", "file_path": "oak_utils/src/lib.rs", "rank": 28, "score": 176477.1972928285 }, { "content": "/// Reads and returns the user request.\n\n///\n\n/// This function is idempotent. Multiple calls to this function all return the same value.\n\n///\n\n/// See [`read_request`](https://github.com/project-oak/oak/blob/main/docs/oak_functions_abi.md#read_request).\n\npub fn read_request() -> Result<Vec<u8>, OakStatus> {\n\n // TODO(#1989): Share this logic with other similar methods.\n\n\n\n let mut buf_ptr: *mut u8 = std::ptr::null_mut();\n\n let mut buf_len: usize = 0;\n\n let status_code = unsafe { oak_functions_abi::read_request(&mut buf_ptr, &mut buf_len) };\n\n let status = OakStatus::from_i32(status_code as i32).ok_or(OakStatus::ErrInternal)?;\n\n match status {\n\n OakStatus::Ok => {\n\n let buf = from_alloc_buffer(buf_ptr, buf_len);\n\n Ok(buf)\n\n }\n\n status => Err(status),\n\n }\n\n}\n\n\n", "file_path": "oak_functions/sdk/oak_functions/src/lib.rs", "rank": 29, "score": 174482.62151330482 }, { "content": "/// Like `compile_protos`, but allows for configuring options through `ProtoOptions`.\n\npub fn compile_protos_with_options<P>(inputs: &[P], repo_root: P, options: ProtoOptions)\n\nwhere\n\n P: AsRef<std::path::Path>,\n\n{\n\n set_protoc_env_if_unset();\n\n\n\n for input in inputs {\n\n // Tell cargo to rerun this build script if the proto file has changed.\n\n // https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath\n\n println!(\n\n \"cargo:rerun-if-changed={}/{}\",\n\n repo_root.as_ref().display(),\n\n input.as_ref().display()\n\n );\n\n }\n\n\n\n let mut prost_config = prost_build::Config::new();\n\n if options.generate_services {\n\n if options.experimental_async {\n\n // AsyncServiceGenerator calls OakServiceGenerator, so the sync code is either way.\n", "file_path": "oak_utils/src/lib.rs", "rank": 30, "score": 174355.02838785292 }, { "content": "/// Runs the given closure, providing it with a handle to the current executor\n\npub fn with_executor<F: FnOnce(&mut Executor) -> R, R>(f: F) -> R {\n\n EXECUTOR.with(|executor| f(&mut executor.borrow_mut()))\n\n}\n\n\n", "file_path": "experimental/oak_async/src/executor.rs", "rank": 31, "score": 174343.04380430508 }, { "content": "// Start running an introspection server on the given port, running until the\n\n// `termination_notificiation_receiver` is triggered.\n\npub fn serve(\n\n port: u16,\n\n runtime: Arc<Runtime>,\n\n termination_notificiation_receiver: tokio::sync::oneshot::Receiver<()>,\n\n) {\n\n let tokio_runtime = tokio::runtime::Runtime::new().expect(\"Couldn't create Tokio runtime\");\n\n tokio_runtime.block_on(make_server(\n\n port,\n\n runtime,\n\n termination_notificiation_receiver,\n\n ));\n\n}\n", "file_path": "oak_runtime/src/introspect.rs", "rank": 32, "score": 173754.6025512529 }, { "content": "pub fn translate(\n\n client: &TranslatorClient,\n\n text: &str,\n\n from_lang: &str,\n\n to_lang: &str,\n\n) -> Option<String> {\n\n info!(\n\n \"attempt to translate '{}' from {} to {}\",\n\n text, from_lang, to_lang\n\n );\n\n let req = TranslateRequest {\n\n text: text.to_string(),\n\n from_lang: from_lang.to_string(),\n\n to_lang: to_lang.to_string(),\n\n };\n\n\n\n match client.translate(req) {\n\n Ok(rsp) => {\n\n info!(\"translation '{}'\", rsp.translated_text);\n\n Some(rsp.translated_text)\n", "file_path": "examples/translator/common/src/lib.rs", "rank": 33, "score": 171125.8243652939 }, { "content": "pub fn init_logging() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "oak_runtime/src/tests.rs", "rank": 34, "score": 171125.8243652939 }, { "content": "pub fn run_fuzz_targets_in_crate(path: &Path, opt: &RunCargoFuzz) -> Step {\n\n // `cargo-fuzz` can only run in the crate that contains the `fuzz` crate. So we need to use\n\n // `Cmd::new_in_dir` to execute the command inside the crate's directory. Pop the two components\n\n // (i.e., `fuzz/Cargo.toml`) to get to the crate path.\n\n let mut crate_path = path.to_path_buf();\n\n crate_path.pop();\n\n crate_path.pop();\n\n\n\n let cargo_manifest: CargoManifest = toml::from_str(&read_file(path))\n\n .unwrap_or_else(|err| panic!(\"could not parse cargo manifest file {:?}: {}\", path, err));\n\n\n\n Step::Multiple {\n\n name: format!(\"fuzzing {:?}\", &crate_path.file_name().unwrap()),\n\n steps: cargo_manifest\n\n .bin\n\n .iter()\n\n .filter(|binary| match &opt.target_name {\n\n Some(target_name) => &binary.name == target_name,\n\n None => true,\n\n })\n", "file_path": "runner/src/main.rs", "rank": 35, "score": 171091.96241496177 }, { "content": "/// Converts a nonce to a Roughtime request.\n\npub fn make_request(nonce: &[u8]) -> Result<Vec<u8>, Error> {\n\n let mut msg = RtMessage::new(1);\n\n msg.add_field(Tag::NONC, nonce)?;\n\n msg.pad_to_kilobyte();\n\n\n\n msg.encode()\n\n}\n\n\n\n/// The parsed data extracted from a Roughtime response.\n\npub struct ParsedResponse {\n\n pub verified: bool,\n\n pub midpoint: u64,\n\n pub radius: u32,\n\n}\n\n\n\n/// Decodes, parses and validates Roughtime responses.\n\n///\n\n/// See https://roughtime.googlesource.com/roughtime/+/HEAD/PROTOCOL.md#processing-a-response\n\npub struct ResponseHandler {\n\n /// The 256-bit Ed25519 public key for validating the signature.\n", "file_path": "third_party/roughenough/src/client.rs", "rank": 36, "score": 170714.5709476734 }, { "content": "pub fn set_wait_on_channels_handler<F: 'static + FnMut(&mut [HandleWithStatus]) -> OakStatus>(\n\n handler: F,\n\n) {\n\n WAIT_ON_CHANNELS_HANDLER.with(|cell| cell.replace(Some(Box::new(handler))));\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn channel_read(\n\n handle: Handle,\n\n buf: *mut u8,\n\n size: usize,\n\n actual_size: *mut u32,\n\n _handle_buf: *mut u8,\n\n _handle_count: u32,\n\n actual_handle_count: *mut u32,\n\n) -> u32 {\n\n READY_DATA.with(|ready_data| {\n\n let mut ready_data = ready_data.borrow_mut();\n\n let data_queue = ready_data.entry(handle).or_default();\n\n let status = match data_queue.front() {\n", "file_path": "experimental/oak_async/tests/fake_runtime.rs", "rank": 37, "score": 170121.34972786272 }, { "content": "/// Returns the paths to `Cargo.toml` files of crates affected by the changed proto files.\n\nfn crates_affected_by_protos(affected_protos: &[String]) -> HashSet<String> {\n\n source_files()\n\n .filter(|path| to_string(path.clone()).ends_with(\"build.rs\"))\n\n .filter(|path| {\n\n for proto in affected_protos {\n\n if file_contains(path, proto) {\n\n return true;\n\n }\n\n }\n\n false\n\n })\n\n .map(to_string)\n\n .map(|build_path| find_crate_toml_file(build_path).unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "runner/src/diffs.rs", "rank": 38, "score": 169306.3046901874 }, { "content": "/// Parses the content of the downloaded OpenID Connect client secret file.\n\npub fn parse_client_info_json(client_info_json: &str) -> Result<ClientInfo, Box<dyn error::Error>> {\n\n let wrapper: ClientInfoWrapper = serde_json::from_str(client_info_json)?;\n\n Ok(wrapper.installed)\n\n}\n\n\n\n/// Exchanges an authorisation code for an ID token and validates the token.\n\npub async fn exchange_code_for_token(\n\n code: &str,\n\n client_id: &str,\n\n client_secret: &str,\n\n) -> Result<String, Box<dyn error::Error>> {\n\n let body = TokenRequest {\n\n code: code.to_owned(),\n\n client_id: client_id.to_owned(),\n\n client_secret: client_secret.to_owned(),\n\n // TODO(#922): Pass the actual redirect URI from the client app.\n\n redirect_uri: \"http://127.0.0.1:8089\".to_owned(),\n\n grant_type: \"authorization_code\".to_owned(),\n\n };\n\n // TODO(#923): Get the token service URL from the discovery document.\n", "file_path": "oak_runtime/src/auth/oidc_utils.rs", "rank": 39, "score": 169073.2655768037 }, { "content": "/// Generate gRPC code from Protobuf using `tonic` library.\n\n///\n\n/// The path to the root repository must be passed as `proto_path`. All paths to `.proto` files\n\n/// must be specified relative to this path. Likewise, all imported paths in `.proto` files must\n\n/// be specified relative to this path.\n\npub fn generate_grpc_code(\n\n proto_path: &str,\n\n file_paths: &[&str],\n\n options: CodegenOptions,\n\n) -> std::io::Result<()> {\n\n set_protoc_env_if_unset();\n\n\n\n // TODO(#1093): Move all proto generation to a common crate.\n\n let proto_path = std::path::Path::new(proto_path);\n\n let file_paths: Vec<std::path::PathBuf> = file_paths\n\n .iter()\n\n .map(|file_path| proto_path.join(file_path))\n\n .collect();\n\n\n\n // Tell cargo to rerun this build script if the proto file has changed.\n\n // https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorerun-if-changedpath\n\n for file_path in file_paths.iter() {\n\n println!(\"cargo:rerun-if-changed={}\", file_path.display());\n\n }\n\n\n", "file_path": "oak_utils/src/lib.rs", "rank": 40, "score": 168654.99141763058 }, { "content": "/// Creates a new Node running the configuration identified by `config_name`, running the entrypoint\n\n/// identified by `entrypoint_name` (for a Web Assembly Node; this parameter is ignored when\n\n/// creating a pseudo-Node), with the provided `label`, and passing it the given handle.\n\n///\n\n/// The provided label must be equal or more restrictive than the label of the calling node, i.e.\n\n/// the label of the calling node must \"flow to\" the provided label.\n\n///\n\n/// See https://github.com/project-oak/oak/blob/main/docs/concepts.md#labels\n\npub fn node_create(\n\n name: &str,\n\n config: &NodeConfiguration,\n\n label: &Label,\n\n half: ReadHandle,\n\n) -> Result<(), OakStatus> {\n\n let name_bytes = name.as_bytes();\n\n let label_bytes = label.serialize();\n\n let mut config_bytes = Vec::new();\n\n config.encode(&mut config_bytes).map_err(|err| {\n\n warn!(\"Could not encode node configuration: {:?}\", err);\n\n OakStatus::ErrInvalidArgs\n\n })?;\n\n let status = unsafe {\n\n oak_abi::node_create(\n\n name_bytes.as_ptr(),\n\n name_bytes.len(),\n\n config_bytes.as_ptr(),\n\n config_bytes.len(),\n\n label_bytes.as_ptr(),\n\n label_bytes.len(),\n\n half.handle,\n\n )\n\n };\n\n result_from_status(status as i32, ())\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/lib.rs", "rank": 41, "score": 168639.45508123309 }, { "content": "/// Initializes an HTTP server pseudo-Node listening on the provided address.\n\n///\n\n/// Returns a [`Receiver`] to read HTTP [`Invocation`]s from.\n\npub fn init(address: &str) -> Result<Receiver<Invocation>, OakStatus> {\n\n // Create a separate channel for receiving invocations and pass it to a newly created HTTP\n\n // pseudo-Node.\n\n let (invocation_sender, invocation_receiver) =\n\n crate::io::channel_create::<Invocation>(\"HTTP invocation\", &Label::public_untrusted())\n\n .expect(\"Couldn't create HTTP invocation channel\");\n\n match init_with_sender(address, invocation_sender) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n let _ = invocation_receiver.close();\n\n return Err(e);\n\n }\n\n };\n\n Ok(invocation_receiver)\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/http/server.rs", "rank": 42, "score": 167737.18235736655 }, { "content": "/// Initializes a gRPC server pseudo-Node listening on the provided address, additionally creating a\n\n/// local channel to collect [`Invocation`] messages.\n\n///\n\n/// Returns a [`Receiver`] to read gRPC [`Invocation`]s from.\n\npub fn init(address: &str) -> Result<Receiver<Invocation>, OakStatus> {\n\n // Create a separate channel for receiving invocations and pass it to a newly created gRPC\n\n // pseudo-Node.\n\n let (invocation_sender, invocation_receiver) =\n\n crate::io::channel_create::<Invocation>(\"gRPC invocation\", &Label::public_untrusted())\n\n .expect(\"Couldn't create gRPC invocation channel\");\n\n match init_with_sender(address, invocation_sender) {\n\n Ok(_) => {}\n\n Err(e) => {\n\n let _ = invocation_receiver.close();\n\n return Err(e);\n\n }\n\n };\n\n Ok(invocation_receiver)\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/grpc/server.rs", "rank": 43, "score": 167736.83781345037 }, { "content": "/// Sends [`crate::grpc::Invocation`] (containing [`oak_io::Receiver`] and [`oak_io::Sender`])\n\n/// through invocation sender if [`oak_io::Receiver`] label flows to invocation sender's label.\n\n/// If failed - sends error back through [`oak_io::Sender`].\n\n///\n\n/// Useful for sending invocations from router nodes and checking label correctness without actually\n\n/// reading the contents of invocation.\n\npub fn forward_invocation(\n\n invocation: crate::grpc::Invocation,\n\n invocation_sender: &Sender<crate::grpc::Invocation>,\n\n) -> anyhow::Result<()> {\n\n match (&invocation.receiver, &invocation.sender) {\n\n (Some(request_receiver), Some(response_sender)) => {\n\n let request_label = request_receiver\n\n .label()\n\n .context(\"Couldn't read request label\")?;\n\n let sender_label = invocation_sender\n\n .label()\n\n .context(\"Couldn't read invocation sender label\")?;\n\n // Check if request label is valid in the context of invocation sender.\n\n if request_label.flows_to(&sender_label) {\n\n // Forward invocation through invocation sender.\n\n let result = invocation_sender\n\n .send(&invocation)\n\n .context(\"Couldn't forward invocation\");\n\n // Close the channels.\n\n invocation.close()?;\n", "file_path": "sdk/rust/oak/src/io/mod.rs", "rank": 44, "score": 166287.90585261397 }, { "content": "/// Install a panic hook that logs [panic information].\n\n///\n\n/// Logs panic infomation to the logging channel, if one is set.\n\n///\n\n/// [panic information]: std::panic::PanicInfo\n\npub fn set_panic_hook() {\n\n std::panic::set_hook(Box::new(|panic_info| {\n\n let payload = panic_info.payload();\n\n // The payload can be a static string slice or a string, depending on how panic was called.\n\n // Code for extracting the message is inspired by the rust default panic hook:\n\n // https://github.com/rust-lang/rust/blob/master/src/libstd/panicking.rs#L188-L194\n\n let msg = match payload.downcast_ref::<&'static str>() {\n\n Some(content) => *content,\n\n None => match payload.downcast_ref::<String>() {\n\n Some(content) => content.as_ref(),\n\n None => \"<UNKNOWN MESSAGE>\",\n\n },\n\n };\n\n let (file, line) = match panic_info.location() {\n\n Some(location) => (location.file(), location.line()),\n\n None => (\"<UNKNOWN FILE>\", 0),\n\n };\n\n error!(\n\n \"panic occurred in file '{}' at line {}: {}\",\n\n file, line, msg\n\n );\n\n }));\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/lib.rs", "rank": 45, "score": 166284.66726418896 }, { "content": "/// The same as [`channel_create`](#method.channel_create), but also applies the current Node's\n\n/// downgrade privilege when checking IFC restrictions.\n\npub fn channel_create_with_downgrade(\n\n name: &str,\n\n label: &Label,\n\n) -> Result<(WriteHandle, ReadHandle), OakStatus> {\n\n let mut write = WriteHandle {\n\n handle: crate::handle::invalid(),\n\n };\n\n let mut read = ReadHandle {\n\n handle: crate::handle::invalid(),\n\n };\n\n let label_bytes = label.serialize();\n\n let name_bytes = name.as_bytes();\n\n let status = unsafe {\n\n oak_abi::channel_create_with_downgrade(\n\n &mut write.handle as *mut u64,\n\n &mut read.handle as *mut u64,\n\n name_bytes.as_ptr(),\n\n name_bytes.len(),\n\n label_bytes.as_ptr(),\n\n label_bytes.len(),\n\n )\n\n };\n\n result_from_status(status as i32, (write, read))\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/lib.rs", "rank": 46, "score": 166279.59553739757 }, { "content": "/// Creates client TLS configuration with a custom X.509 certificate verifier.\n\n/// The verifier also checks that the `tee_measurement` is the same as the TEE measurement included\n\n/// in the certificate.\n\n/// [`root_tls_certificate`] should contain only a single PEM encoded certificate.\n\n/// https://tools.ietf.org/html/rfc1421\n\npub fn create_tls_attestation_config(\n\n root_tls_certificate: &[u8],\n\n tee_measurement: &[u8],\n\n) -> anyhow::Result<rustls::ClientConfig> {\n\n let mut config = rustls::ClientConfig::new();\n\n\n\n // Configure ALPN to accept HTTP/1.1 and HTTP/2.\n\n // https://tools.ietf.org/html/rfc7639\n\n config.set_protocols(&[b\"http/1.1\".to_vec(), b\"h2\".to_vec()]);\n\n\n\n // Add root TLS certificate.\n\n let mut cc_reader = std::io::BufReader::new(&root_tls_certificate[..]);\n\n let certs = rustls::internal::pemfile::certs(&mut cc_reader)\n\n .map_err(|error| anyhow!(\"Couldn't parse TLS certificate: {:?}\", error))?;\n\n for certificate in certs.iter() {\n\n config\n\n .root_store\n\n .add(certificate)\n\n .context(\"Couldn't add root certificate\")?;\n\n }\n", "file_path": "oak_client/src/attestation.rs", "rank": 47, "score": 166279.59553739757 }, { "content": "/// Build the configuration needed to launch a test Runtime instance that runs a single-Node\n\n/// application with the provided entrypoint name.\n\n///\n\n/// The Wasm module is compiled with [`Profile::Release`] in order to maintain its SHA256 hash\n\n/// consistent.\n\npub fn runtime_config(\n\n entrypoint_name: &str,\n\n config_map: ConfigMap,\n\n permissions: oak_runtime::permissions::PermissionsConfiguration,\n\n) -> oak_runtime::RuntimeConfiguration {\n\n let wasm: HashMap<String, Vec<u8>> = [(\n\n DEFAULT_MODULE_NAME.to_string(),\n\n compile_rust_wasm(DEFAULT_MODULE_MANIFEST, Profile::Release)\n\n .expect(\"failed to build wasm module\"),\n\n )]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n\n\n runtime_config_wasm(\n\n wasm,\n\n DEFAULT_MODULE_NAME,\n\n entrypoint_name,\n\n config_map,\n\n permissions,\n\n oak_runtime::SignatureTable::default(),\n\n )\n\n}\n\n\n", "file_path": "sdk/rust/oak_tests/src/lib.rs", "rank": 48, "score": 166279.59553739757 }, { "content": "/// Creates a [`WasmHandler`] with the given Wasm module, lookup data, metrics aggregator, and\n\n/// extensions.\n\npub fn create_wasm_handler(\n\n wasm_module_bytes: &[u8],\n\n lookup_data: Arc<LookupData>,\n\n extensions: Vec<BoxedExtensionFactory>,\n\n logger: Logger,\n\n) -> anyhow::Result<WasmHandler> {\n\n let wasm_handler = WasmHandler::create(wasm_module_bytes, lookup_data, extensions, logger)?;\n\n\n\n Ok(wasm_handler)\n\n}\n\n\n\n/// Starts a gRPC server on the given address, serving the `main` function from the given\n\n/// [`WasmHandler`].\n\n#[allow(clippy::too_many_arguments)]\n\npub async fn create_and_start_grpc_server<F: Future<Output = ()>>(\n\n address: &SocketAddr,\n\n wasm_handler: WasmHandler,\n\n tee_certificate: Vec<u8>,\n\n policy: ServerPolicy,\n\n config_info: ConfigurationInfo,\n", "file_path": "oak_functions/loader/src/grpc.rs", "rank": 49, "score": 166279.59553739757 }, { "content": "/// The same as [`node_create`](#method.node_create), but also applies the current Node's downgrade\n\n/// privilege when checking IFC restrictions.\n\npub fn node_create_with_downgrade(\n\n name: &str,\n\n config: &NodeConfiguration,\n\n label: &Label,\n\n half: ReadHandle,\n\n) -> Result<(), OakStatus> {\n\n let name_bytes = name.as_bytes();\n\n let label_bytes = label.serialize();\n\n let mut config_bytes = Vec::new();\n\n config.encode(&mut config_bytes).map_err(|err| {\n\n warn!(\"Could not encode node configuration: {:?}\", err);\n\n OakStatus::ErrInvalidArgs\n\n })?;\n\n let status = unsafe {\n\n oak_abi::node_create_with_downgrade(\n\n name_bytes.as_ptr(),\n\n name_bytes.len(),\n\n config_bytes.as_ptr(),\n\n config_bytes.len(),\n\n label_bytes.as_ptr(),\n\n label_bytes.len(),\n\n half.handle,\n\n )\n\n };\n\n result_from_status(status as i32, ())\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/lib.rs", "rank": 50, "score": 166279.59553739757 }, { "content": "pub fn add_ready_data<T: Encodable>(handle: Handle, data: &T) {\n\n let msg = data.encode().expect(\"Failed to encode ready data\");\n\n READY_DATA.with(|ready_data| {\n\n ready_data\n\n .borrow_mut()\n\n .entry(handle)\n\n .or_default()\n\n .push_back(Ok(msg.bytes))\n\n });\n\n}\n\n\n", "file_path": "experimental/oak_async/tests/fake_runtime.rs", "rank": 51, "score": 165437.25291910802 }, { "content": "/// Encapsulate a protocol buffer message in a GrpcRequest wrapper using the\n\n/// given method name.\n\npub fn encap_request<T: prost::Message>(req: &T, method_name: &str) -> Option<GrpcRequest> {\n\n // Put the request in a GrpcRequest wrapper and serialize it.\n\n let mut bytes = Vec::new();\n\n if let Err(e) = req.encode(&mut bytes) {\n\n warn!(\"failed to serialize gRPC request: {}\", e);\n\n return None;\n\n };\n\n let grpc_req = GrpcRequest {\n\n method_name: method_name.to_string(),\n\n req_msg: bytes,\n\n last: true,\n\n };\n\n Some(grpc_req)\n\n}\n", "file_path": "oak_services/src/grpc/mod.rs", "rank": 52, "score": 165316.11981289505 }, { "content": "/// Creates a node and corresponding inbound channel of the same type, sends an init message to it,\n\n/// and returns a [`Sender`] of command messages; for nodes that are instantiated via the\n\n/// [`crate::entrypoint_command_handler_init`] macro.\n\npub fn entrypoint_node_create<\n\n T: crate::WasmEntrypoint<Message = oak_io::InitWrapper<Init, Command>>,\n\n Init: Encodable + Decodable,\n\n Command: Encodable + Decodable,\n\n>(\n\n name: &str,\n\n label: &Label,\n\n wasm_module_name: &str,\n\n init: Init,\n\n) -> Result<Sender<Command>, oak_io::OakError> {\n\n let node_config = &crate::node_config::wasm(wasm_module_name, T::ENTRYPOINT_NAME);\n\n let init_sender = node_create(name, label, node_config)?;\n\n let result = send_init(&init_sender, init, label);\n\n init_sender.close()?;\n\n result\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/io/mod.rs", "rank": 53, "score": 164041.78313964297 }, { "content": "/// Creates an `HttpInvocation` and a corresponding `HttpInvocationSource`.\n\npub fn create_http_invocation(\n\n label: &Label,\n\n) -> Result<(HttpInvocation, HttpInvocationSource), OakStatus> {\n\n // Create a channel for passing HTTP requests.\n\n let (request_sender, request_receiver) = crate::io::channel_create(\"HTTP request\", label)?;\n\n // Create a channel for collecting HTTP responses.\n\n let (response_sender, response_receiver) = crate::io::channel_create(\"HTTP response\", label)?;\n\n\n\n Ok((\n\n HttpInvocation {\n\n receiver: Some(request_receiver),\n\n sender: Some(response_sender),\n\n },\n\n HttpInvocationSource {\n\n sender: Some(request_sender),\n\n receiver: Some(response_receiver),\n\n },\n\n ))\n\n}\n", "file_path": "sdk/rust/oak/src/http/mod.rs", "rank": 54, "score": 164036.82980810327 }, { "content": "/// Build the configuration needed to launch a test Runtime instance that runs the given collection\n\n/// of Wasm modules, starting with the given module name and entrypoint.\n\npub fn runtime_config_wasm(\n\n wasm_modules: HashMap<String, Vec<u8>>,\n\n module_config_name: &str,\n\n entrypoint_name: &str,\n\n config_map: ConfigMap,\n\n permissions: oak_runtime::permissions::PermissionsConfiguration,\n\n sign_table: oak_runtime::SignatureTable,\n\n) -> oak_runtime::RuntimeConfiguration {\n\n oak_runtime::RuntimeConfiguration {\n\n metrics_port: Some(9090),\n\n introspect_port: Some(1909),\n\n kms_credentials: None,\n\n secure_server_configuration: oak_runtime::SecureServerConfiguration {\n\n grpc_config: Some(oak_runtime::GrpcConfiguration {\n\n grpc_server_tls_identity: Some(Identity::from_pem(\n\n include_str!(\"../certs/local.pem\"),\n\n include_str!(\"../certs/local.key\"),\n\n )),\n\n grpc_client_root_tls_certificate: oak_runtime::tls::Certificate::parse(\n\n include_bytes!(\"../certs/ca.pem\").to_vec(),\n", "file_path": "sdk/rust/oak_tests/src/lib.rs", "rank": 55, "score": 164036.82980810327 }, { "content": "/// Convenience helper to build and run a single-Node application with the provided entrypoint name.\n\npub fn run_single_module(\n\n entrypoint_name: &str,\n\n permissions: oak_runtime::permissions::PermissionsConfiguration,\n\n) -> Result<Arc<oak_runtime::Runtime>, oak::OakError> {\n\n run_single_module_with_config(entrypoint_name, ConfigMap::default(), permissions)\n\n}\n\n\n", "file_path": "sdk/rust/oak_tests/src/lib.rs", "rank": 56, "score": 164036.82980810327 }, { "content": "#[test]\n\nfn test_warns_when_using_fields_with_deprecated_field() {\n\n #[allow(deprecated)]\n\n let message = deprecated_field::Test {\n\n not_outdated: \".ogg\".to_string(),\n\n outdated: \".wav\".to_string(),\n\n };\n\n // This test relies on the `#[allow(deprecated)]` attribute to ignore the warning that should\n\n // be raised by the compiler.\n\n // This test has a shortcoming since it doesn't explicitly check for the presence of the\n\n // `deprecated` attribute since it doesn't exist at runtime. If complied without the `allow`\n\n // attribute the following warning would be raised:\n\n //\n\n // warning: use of deprecated item 'deprecated_field::deprecated_field::Test::outdated'\n\n // --> tests/src/deprecated_field.rs:11:9\n\n // |\n\n // 11 | outdated: \".wav\".to_string(),\n\n // | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n // |\n\n // = note: `#[warn(deprecated)]` on by default\n\n drop(message);\n\n}\n", "file_path": "third_party/prost/tests/src/deprecated_field.rs", "rank": 57, "score": 163867.87739757012 }, { "content": "/// Convenience function for creating the top tag.\n\npub fn top() -> Tag {\n\n let tag = Some(tag::Tag::TopTag(Top {}));\n\n Tag { tag }\n\n}\n", "file_path": "oak_abi/src/label/mod.rs", "rank": 58, "score": 162567.36340086372 }, { "content": "/// Returns an intentionally invalid handle.\n\npub fn invalid() -> Handle {\n\n oak_abi::INVALID_HANDLE\n\n}\n\n\n\n/// Pack a slice of `Handles` into the Wasm host ABI format.\n\npub(crate) fn pack(handles: &[Handle]) -> Vec<u8> {\n\n let mut packed = Vec::with_capacity(handles.len() * 8);\n\n for handle in handles {\n\n packed\n\n .write_u64::<byteorder::LittleEndian>(handle.to_owned())\n\n .unwrap();\n\n }\n\n packed\n\n}\n\n\n\n/// Unpack a slice of Handles from the Wasm host ABI format.\n\npub(crate) fn unpack(bytes: &[u8], handle_count: u32, handles: &mut Vec<Handle>) {\n\n handles.clear();\n\n let mut reader = std::io::Cursor::new(bytes);\n\n for _ in 0..handle_count {\n\n handles.push(reader.read_u64::<byteorder::LittleEndian>().unwrap());\n\n }\n\n}\n", "file_path": "sdk/rust/oak/src/handle.rs", "rank": 59, "score": 162567.36340086372 }, { "content": "fn handle_request(request: ConformanceRequest) -> conformance_response::Result {\n\n match request.requested_output_format() {\n\n WireFormat::Unspecified => {\n\n return conformance_response::Result::ParseError(\n\n \"output format unspecified\".to_string(),\n\n );\n\n }\n\n WireFormat::Json => {\n\n return conformance_response::Result::Skipped(\n\n \"JSON output is not supported\".to_string(),\n\n );\n\n }\n\n WireFormat::Jspb => {\n\n return conformance_response::Result::Skipped(\n\n \"JSPB output is not supported\".to_string(),\n\n );\n\n }\n\n WireFormat::TextFormat => {\n\n return conformance_response::Result::Skipped(\n\n \"TEXT_FORMAT output is not supported\".to_string(),\n", "file_path": "third_party/prost/conformance/src/main.rs", "rank": 60, "score": 162378.66151938142 }, { "content": "/// Convenience helper to build and run a single-Node Application using the default name \"oak_main\"\n\n/// for its entrypoint.\n\npub fn run_single_module_default(\n\n permissions: oak_runtime::permissions::PermissionsConfiguration,\n\n) -> Result<Arc<oak_runtime::Runtime>, oak::OakError> {\n\n run_single_module(DEFAULT_ENTRYPOINT_NAME, permissions)\n\n}\n\n\n", "file_path": "sdk/rust/oak_tests/src/lib.rs", "rank": 61, "score": 161907.86474392563 }, { "content": "/// Serializes an application configuration from `app_config` and writes it into `filename`.\n\npub fn write_config_to_file(\n\n app_config: &ApplicationConfiguration,\n\n filename: &Path,\n\n) -> anyhow::Result<()> {\n\n let mut bytes = Vec::new();\n\n app_config\n\n .encode(&mut bytes)\n\n .context(\"Couldn't encode application configuration\")?;\n\n fs::write(filename, &bytes)\n\n .with_context(|| format!(\"Couldn't write file {}\", filename.display()))?;\n\n Ok(())\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> anyhow::Result<()> {\n\n env_logger::init();\n\n\n\n let opt = Opt::from_args();\n\n debug!(\"Parsed opts: {:?}\", opt);\n\n\n", "file_path": "sdk/rust/oak_app_build/src/main.rs", "rank": 62, "score": 161902.6537636593 }, { "content": "/// Convenience helper to build and run a single-Node application with the provided entrypoint name,\n\n/// passing in the provided `ConfigMap` at start-of-day.\n\npub fn run_single_module_with_config(\n\n entrypoint_name: &str,\n\n config_map: ConfigMap,\n\n permissions: oak_runtime::permissions::PermissionsConfiguration,\n\n) -> Result<Arc<oak_runtime::Runtime>, oak::OakError> {\n\n let combined_config = runtime_config(entrypoint_name, config_map, permissions);\n\n oak_runtime::configure_and_run(combined_config)\n\n}\n\n\n", "file_path": "sdk/rust/oak_tests/src/lib.rs", "rank": 63, "score": 161902.6537636593 }, { "content": "fn struct_impls(name: &Ident, data: &syn::DataStruct) -> TokenStream {\n\n let accessors: Vec<TokenStream> = match &data.fields {\n\n Fields::Named(named) => named\n\n .named\n\n .iter()\n\n .flat_map(|f| f.ident.clone())\n\n .map(|i| quote!(self.#i))\n\n .collect(),\n\n Fields::Unnamed(unnamed) => unnamed\n\n .unnamed\n\n .iter()\n\n .enumerate()\n\n .map(|(i, _)| {\n\n let index = syn::Index::from(i);\n\n quote!(self.#index)\n\n })\n\n .collect(),\n\n Fields::Unit => Vec::new(),\n\n };\n\n let body = accessors_fold(&accessors);\n\n\n\n quote! {\n\n impl ::oak_io::handle::HandleVisit for #name {\n\n fn fold<B>(&mut self, init: B, f: fn(B, &mut ::oak_io::Handle) -> B) -> B {\n\n #body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "oak_derive/src/lib.rs", "rank": 64, "score": 160272.27890021802 }, { "content": "pub fn skip_field<B>(\n\n wire_type: WireType,\n\n tag: u32,\n\n buf: &mut B,\n\n ctx: DecodeContext,\n\n) -> Result<(), DecodeError>\n\nwhere\n\n B: Buf,\n\n{\n\n ctx.limit_reached()?;\n\n let len = match wire_type {\n\n WireType::Varint => decode_varint(buf).map(|_| 0)?,\n\n WireType::ThirtyTwoBit => 4,\n\n WireType::SixtyFourBit => 8,\n\n WireType::LengthDelimited => decode_varint(buf)?,\n\n WireType::StartGroup => loop {\n\n let (inner_tag, inner_wire_type) = decode_key(buf)?;\n\n match inner_wire_type {\n\n WireType::EndGroup => {\n\n if inner_tag != tag {\n", "file_path": "third_party/prost/src/encoding.rs", "rank": 65, "score": 160207.5038570282 }, { "content": "/// Block the current thread until the provided `Future` has been `poll`ed to completion.\n\n///\n\n/// Returns `Err(_)` if the call to `wait_on_channels` fails.\n\npub fn block_on<F: Future + 'static>(f: F) -> Result<F::Output, OakStatus> {\n\n let mut pool = LocalPool::new();\n\n let spawner = pool.spawner();\n\n let main_join_handle = spawner\n\n .spawn_local_with_handle(f)\n\n .expect(\"Failed to spawn main future\");\n\n loop {\n\n // Poll futures in the pool until none of them can make any progress.\n\n pool.run_until_stalled();\n\n\n\n // We could not make more progress but no handles are waiting: we should be done!\n\n if with_executor(|e| e.none_waiting()) {\n\n break;\n\n }\n\n\n\n // There are pending futures but none of them could make progress, which means they are all\n\n // waiting for channels to become ready.\n\n\n\n // O(n) where n = number of pending readers. Dominated by `executor.pending_handles()` which\n\n // needs to loop over all readers. All subsequent operations are on the order of the number\n", "file_path": "experimental/oak_async/src/executor.rs", "rank": 66, "score": 158139.59446613604 }, { "content": "pub fn log() -> NodeConfiguration {\n\n NodeConfiguration {\n\n config_type: Some(ConfigType::LogConfig(LogConfiguration {})),\n\n }\n\n}\n", "file_path": "sdk/rust/oak/src/node_config.rs", "rank": 67, "score": 157964.7381277339 }, { "content": "/// Process the stream of messages coming in on `receiver` using the provided handler.\n\n///\n\n/// If the runtime signals that the node is being terminated while waiting for new commands, the\n\n/// loop will terminate and this function will return without completing the `handler` future.\n\n///\n\n/// `panic!`s if any other error occurs while reading commands.\n\npub fn run_command_loop<T, F, R>(receiver: Receiver<T>, handler: F)\n\nwhere\n\n T: Decodable + Send,\n\n F: FnOnce(ChannelReadStream<T>) -> R,\n\n R: Future<Output = ()> + 'static,\n\n{\n\n match crate::block_on(handler(receiver.receive_stream())) {\n\n Ok(()) => {}\n\n Err(OakStatus::ErrTerminated) => {\n\n info!(\"Received termination status, terminating command loop\");\n\n }\n\n Err(e) => error!(\"Command loop received non-termination error: {:?}\", e),\n\n }\n\n}\n", "file_path": "experimental/oak_async/src/io.rs", "rank": 68, "score": 157883.92615284302 }, { "content": "/// Returns the path to the `protoc` binary.\n\npub fn protoc() -> PathBuf {\n\n match env::var_os(\"PROTOC\") {\n\n Some(protoc) => PathBuf::from(protoc),\n\n None => PathBuf::from(env!(\"PROTOC\")),\n\n }\n\n}\n\n\n", "file_path": "third_party/prost/prost-build/src/lib.rs", "rank": 69, "score": 155830.5620832899 }, { "content": "pub fn free_port() -> u16 {\n\n port_check::free_local_port().expect(\"could not pick free local port\")\n\n}\n\n\n\n/// Wrapper around a termination signal [`oneshot::Sender`] and the [`JoinHandle`] of the associated\n\n/// background task, created by [`background`].\n\npub struct Background<T> {\n\n term_tx: oneshot::Sender<()>,\n\n join_handle: JoinHandle<T>,\n\n}\n\n\n\nimpl<T> Background<T> {\n\n /// Sends the termination signal to the background task and awaits for it to gracefully\n\n /// terminate.\n\n ///\n\n /// This does not guarantee that the background task terminates (e.g. if it ignores the\n\n /// termination signal), it requires the cooperation of the task in order to work correctly.\n\n pub async fn terminate_and_join(self) -> T {\n\n self.term_tx\n\n .send(())\n\n .expect(\"could not send signal on termination channel\");\n\n self.join_handle\n\n .await\n\n .expect(\"could not wait for background task to terminate\")\n\n }\n\n}\n\n\n", "file_path": "oak_functions/sdk/test_utils/src/lib.rs", "rank": 70, "score": 155830.5620832899 }, { "content": "pub fn kill_process(raw_pid: i32) {\n\n let pid = nix::unistd::Pid::from_raw(raw_pid);\n\n for i in 0..5 {\n\n if process_gone(raw_pid) {\n\n return;\n\n }\n\n\n\n // Ignore errors.\n\n let _ = nix::sys::signal::kill(pid, Signal::SIGINT);\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(200 * i));\n\n }\n\n let _ = nix::sys::signal::kill(pid, Signal::SIGKILL);\n\n}\n\n\n", "file_path": "runner/src/internal.rs", "rank": 71, "score": 155035.2125616828 }, { "content": "/// Helper to inject a (single) gRPC request message via a notification channel,\n\n/// in the same manner as the gRPC pseudo-Node does, and return a channel for\n\n/// reading responses from.\n\npub fn invoke_grpc_method_stream<R>(\n\n method_name: &str,\n\n req: &R,\n\n invocation_channel: &crate::io::Sender<Invocation>,\n\n) -> Result<crate::io::Receiver<GrpcResponse>>\n\nwhere\n\n R: prost::Message,\n\n{\n\n // Create a new channel for request message delivery.\n\n // TODO(#1739): Don't use privilege automatically.\n\n let (req_sender, req_receiver) = crate::io::channel_create_with_downgrade::<GrpcRequest>(\n\n \"gRPC request\",\n\n &Label::public_untrusted(),\n\n )\n\n .expect(\"failed to create channel\");\n\n\n\n // Put the request in a GrpcRequest wrapper and send it into the request\n\n // message channel.\n\n let req = oak_services::grpc::encap_request(req, method_name)\n\n .expect(\"failed to serialize GrpcRequest\");\n", "file_path": "sdk/rust/oak/src/grpc/mod.rs", "rank": 72, "score": 153802.40396657807 }, { "content": "/// Returns the path to the Protobuf include directory.\n\npub fn protoc_include() -> PathBuf {\n\n match env::var_os(\"PROTOC_INCLUDE\") {\n\n Some(include) => PathBuf::from(include),\n\n None => PathBuf::from(env!(\"PROTOC_INCLUDE\")),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::cell::RefCell;\n\n use std::rc::Rc;\n\n\n\n /// An example service generator that generates a trait with methods corresponding to the\n\n /// service methods.\n\n struct ServiceTraitGenerator;\n\n impl ServiceGenerator for ServiceTraitGenerator {\n\n fn generate(&mut self, service: Service, buf: &mut String) {\n\n // Generate a trait for the service.\n\n service.comments.append_with_indent(0, buf);\n", "file_path": "third_party/prost/prost-build/src/lib.rs", "rank": 73, "score": 153797.2756309335 }, { "content": "/// Converts [`rpc::Status`] to [`tonic::Status`].\n\nfn to_tonic_status(status: rpc::Status) -> tonic::Status {\n\n tonic::Status::new(tonic::Code::from_i32(status.code), status.message)\n\n}\n", "file_path": "oak_runtime/src/node/grpc/mod.rs", "rank": 74, "score": 153263.8085828547 }, { "content": "pub fn is_dockerfile(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\"Dockerfile\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 75, "score": 152936.37104299112 }, { "content": "fn tonic_code_to_grpc(code: tonic::Code) -> rpc::Code {\n\n match code {\n\n tonic::Code::Ok => rpc::Code::Ok,\n\n tonic::Code::Cancelled => rpc::Code::Cancelled,\n\n tonic::Code::Unknown => rpc::Code::Unknown,\n\n tonic::Code::InvalidArgument => rpc::Code::InvalidArgument,\n\n tonic::Code::DeadlineExceeded => rpc::Code::DeadlineExceeded,\n\n tonic::Code::NotFound => rpc::Code::NotFound,\n\n tonic::Code::AlreadyExists => rpc::Code::AlreadyExists,\n\n tonic::Code::PermissionDenied => rpc::Code::PermissionDenied,\n\n tonic::Code::ResourceExhausted => rpc::Code::ResourceExhausted,\n\n tonic::Code::FailedPrecondition => rpc::Code::FailedPrecondition,\n\n tonic::Code::Aborted => rpc::Code::Aborted,\n\n tonic::Code::OutOfRange => rpc::Code::OutOfRange,\n\n tonic::Code::Unimplemented => rpc::Code::Unimplemented,\n\n tonic::Code::Internal => rpc::Code::Internal,\n\n tonic::Code::Unavailable => rpc::Code::Unavailable,\n\n tonic::Code::DataLoss => rpc::Code::DataLoss,\n\n tonic::Code::Unauthenticated => rpc::Code::Unauthenticated,\n\n }\n\n}\n", "file_path": "oak_runtime/src/node/grpc/client.rs", "rank": 76, "score": 151500.4908563848 }, { "content": "/// Return whether the provided path refers to a Bazel file (`BUILD`, `WORKSPACE`, or `*.bzl`)\n\npub fn is_bazel_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename == \"BUILD\" || filename == \"WORKSPACE\" || filename.ends_with(\".bzl\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 77, "score": 150576.5114991556 }, { "content": "pub fn is_shell_script(path: &Path) -> bool {\n\n if path.is_file() {\n\n let mut file = std::fs::File::open(path).expect(\"could not open file\");\n\n let mut contents = String::new();\n\n match file.read_to_string(&mut contents) {\n\n Ok(_size) => contents.starts_with(\"#!\"),\n\n Err(_err) => false,\n\n }\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 78, "score": 150576.5114991556 }, { "content": "pub fn is_html_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\".htm\") || filename.ends_with(\".html\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 79, "score": 150576.5114991556 }, { "content": "pub fn to_string(path: PathBuf) -> String {\n\n path.to_str().unwrap().to_string()\n\n}\n", "file_path": "runner/src/files.rs", "rank": 80, "score": 150576.5114991556 }, { "content": "pub fn is_build_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename == \"BUILD\"\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 81, "score": 150576.5114991556 }, { "content": "pub fn is_javascript_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\".js\") || filename.ends_with(\".mjs\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 82, "score": 150576.5114991556 }, { "content": "pub fn is_yaml_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\".yaml\") || filename.ends_with(\".yml\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 83, "score": 150576.5114991556 }, { "content": "pub fn is_typescript_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\".ts\") || filename.ends_with(\".tsx\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 84, "score": 150576.5114991556 }, { "content": "pub fn read_file(path: &Path) -> String {\n\n let mut file = std::fs::File::open(path).expect(\"could not open file\");\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)\n\n .expect(\"could not read file contents\");\n\n contents\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 85, "score": 150576.5114991556 }, { "content": "/// Return whether the provided path refers to a markdown file (`*.md`)\n\npub fn is_markdown_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\".md\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 86, "score": 150576.5114991556 }, { "content": "pub fn is_toml_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\".toml\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 87, "score": 150576.5114991556 }, { "content": "/// Helper to inject a (single) gRPC request message via a notification channel,\n\n/// in the same manner as the gRPC pseudo-Node does, and collect a (single)\n\n/// response.\n\npub fn invoke_grpc_method<R, Q>(\n\n method_name: &str,\n\n req: &R,\n\n invocation_channel: &crate::io::Sender<Invocation>,\n\n) -> Result<Q>\n\nwhere\n\n R: prost::Message,\n\n Q: prost::Message + Default,\n\n{\n\n let rsp_receiver = invoke_grpc_method_stream(method_name, req, invocation_channel)?;\n\n // Read a single encapsulated response.\n\n let result = rsp_receiver.receive();\n\n rsp_receiver.close().expect(\"failed to close channel\");\n\n let grpc_rsp = result.map_err(|status| {\n\n error!(\"failed to receive response: {:?}\", status);\n\n build_status(\n\n rpc::Code::Internal,\n\n &format!(\"failed to receive gRPC response: {:?}\", status),\n\n )\n\n })?;\n\n let (rsp, _last) = decap_response(grpc_rsp)?;\n\n\n\n Ok(rsp)\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/grpc/mod.rs", "rank": 88, "score": 148630.16176441155 }, { "content": "/// Return whether the provided path refers to a `fuzz` crate for fuzz testing with `cargo-fuzz`.\n\npub fn is_fuzzing_toml_file(path: &Path) -> bool {\n\n format!(\"{:?}\", path).contains(\"/fuzz/\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 89, "score": 148339.24691914773 }, { "content": "/// Return whether the provided path refers to a source file that can be formatted by clang-tidy.\n\npub fn is_clang_format_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\".cc\")\n\n || filename.ends_with(\".h\")\n\n || filename.ends_with(\".proto\")\n\n || filename.ends_with(\".java\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 90, "score": 148333.7457698613 }, { "content": "#[allow(dead_code)]\n\npub fn channel_create<T: Encodable + Decodable>(\n\n runtime: &RuntimeProxy,\n\n name: &str,\n\n label: &Label,\n\n) -> Result<(Sender<T>, Receiver<T>), OakStatus> {\n\n let (wh, rh) = runtime.channel_create(name, label)?;\n\n Ok((Sender::<T>::new(wh.into()), Receiver::<T>::new(rh.into())))\n\n}\n\n\n", "file_path": "oak_runtime/src/io.rs", "rank": 91, "score": 148333.7457698613 }, { "content": "// Get all the files that have been modified in the commits specified by `commits`. Does not include\n\n// new files, unless they are added to git. If present, `commits.commits` must be a positive number.\n\n// If it is zero or negative, only the last commit will be considered for finding the modified\n\n// files. If `commits.commits` is not present, all files will be considered.\n\npub fn modified_files(commits: &Commits) -> ModifiedContent {\n\n let files = commits.commits.map(|commits| {\n\n let vec = Command::new(\"git\")\n\n .args(&[\n\n \"diff\",\n\n \"--name-only\",\n\n &format!(\"HEAD~{}\", std::cmp::max(1, commits)),\n\n ])\n\n .output()\n\n .expect(\"could not get modified files\")\n\n .stdout;\n\n\n\n // Extract the file names from the git output\n\n String::from_utf8(vec)\n\n .expect(\"could not convert to string\")\n\n .split('\\n')\n\n .map(|s| format!(\"./{}\", s))\n\n .collect()\n\n });\n\n ModifiedContent { files }\n\n}\n\n\n", "file_path": "runner/src/diffs.rs", "rank": 92, "score": 148333.7457698613 }, { "content": "/// Return whether the provided path refers to a source file in a programming language.\n\npub fn is_source_code_file(path: &Path) -> bool {\n\n let filename = path.file_name().and_then(|s| s.to_str()).unwrap_or(\"\");\n\n filename.ends_with(\".cc\")\n\n || filename.ends_with(\".h\")\n\n || filename.ends_with(\".rs\")\n\n || filename.ends_with(\".proto\")\n\n || filename.ends_with(\".js\")\n\n || filename.ends_with(\".go\")\n\n || filename.ends_with(\".java\")\n\n}\n\n\n", "file_path": "runner/src/files.rs", "rank": 93, "score": 148333.7457698613 }, { "content": "/// Check this handle is valid.\n\npub fn is_valid(handle: Handle) -> bool {\n\n handle != oak_abi::INVALID_HANDLE\n\n}\n\n\n", "file_path": "sdk/rust/oak/src/handle.rs", "rank": 94, "score": 148333.7457698613 }, { "content": "pub fn process_gone(raw_pid: i32) -> bool {\n\n // Shell out to `ps` as there's no portable Rust equivalent.\n\n let mut cmd = std::process::Command::new(\"ps\");\n\n cmd.args(&[\"-p\", &format!(\"{}\", raw_pid)]);\n\n let mut child = cmd\n\n .stdin(std::process::Stdio::null())\n\n .stdout(std::process::Stdio::null())\n\n .stderr(std::process::Stdio::null())\n\n .spawn()\n\n .unwrap_or_else(|err| panic!(\"could not spawn command: {:?}: {}\", cmd, err));\n\n let output = child.wait().expect(\"could not get exit status\");\n\n // ps -p has success exit code if the pid exists.\n\n !output.success()\n\n}\n\n\n", "file_path": "runner/src/internal.rs", "rank": 95, "score": 148333.7457698613 }, { "content": "pub fn run_examples(opt: &RunExamples) -> Step {\n\n let examples: Vec<Example> = example_toml_files(&opt.commits)\n\n .map(|path| {\n\n toml::from_str(&read_file(&path)).unwrap_or_else(|err| {\n\n panic!(\"could not parse example manifest file {:?}: {}\", path, err)\n\n })\n\n })\n\n .filter(|example: &Example| !example.has_functions_app())\n\n .collect();\n\n Step::Multiple {\n\n name: \"examples\".to_string(),\n\n /// TODO(#396): Check that all the example folders are covered by an entry here, or\n\n /// explicitly ignored. This will probably require pulling out the `Vec<Example>` to a\n\n /// top-level method first.\n\n steps: examples\n\n .iter()\n\n .filter(|example| match &opt.example_name {\n\n Some(example_name) => &example.name == example_name,\n\n None => true,\n\n })\n\n .filter(|example| {\n\n example.applications.is_empty()\n\n || example.applications.get(&opt.application_variant).is_some()\n\n })\n\n .map(|example| ClassicExample::new(example, opt.clone()))\n\n .map(|example| run_example(&example))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "runner/src/examples.rs", "rank": 96, "score": 148333.7457698613 }, { "content": "/// Uses the current node's label-downgrading privilege to create a new channel for transmission of\n\n/// [`Encodable`] and [`Decodable`] types.\n\npub fn channel_create_with_downgrade<T: Encodable + Decodable>(\n\n runtime: &RuntimeProxy,\n\n name: &str,\n\n label: &Label,\n\n) -> Result<(Sender<T>, Receiver<T>), OakStatus> {\n\n let (wh, rh) = runtime.channel_create_with_downgrade(name, label)?;\n\n Ok((Sender::<T>::new(wh.into()), Receiver::<T>::new(rh.into())))\n\n}\n", "file_path": "oak_runtime/src/io.rs", "rank": 97, "score": 146204.78070568366 }, { "content": "/// Converts a `camelCase` or `SCREAMING_SNAKE_CASE` identifier to a `lower_snake` case Rust field\n\n/// identifier.\n\npub fn to_snake(s: &str) -> String {\n\n let mut ident = s.to_snake_case();\n\n\n\n // Use a raw identifier if the identifier matches a Rust keyword:\n\n // https://doc.rust-lang.org/reference/keywords.html.\n\n match ident.as_str() {\n\n // 2015 strict keywords.\n\n | \"as\" | \"break\" | \"const\" | \"continue\" | \"else\" | \"enum\" | \"false\"\n\n | \"fn\" | \"for\" | \"if\" | \"impl\" | \"in\" | \"let\" | \"loop\" | \"match\" | \"mod\" | \"move\" | \"mut\"\n\n | \"pub\" | \"ref\" | \"return\" | \"static\" | \"struct\" | \"trait\" | \"true\"\n\n | \"type\" | \"unsafe\" | \"use\" | \"where\" | \"while\"\n\n // 2018 strict keywords.\n\n | \"dyn\"\n\n // 2015 reserved keywords.\n\n | \"abstract\" | \"become\" | \"box\" | \"do\" | \"final\" | \"macro\" | \"override\" | \"priv\" | \"typeof\"\n\n | \"unsized\" | \"virtual\" | \"yield\"\n\n // 2018 reserved keywords.\n\n | \"async\" | \"await\" | \"try\" => ident.insert_str(0, \"r#\"),\n\n // the following keywords are not supported as raw identifiers and are therefore suffixed with an underscore.\n\n \"self\" | \"super\" | \"extern\" | \"crate\" => ident += \"_\",\n\n _ => (),\n\n }\n\n ident\n\n}\n\n\n", "file_path": "third_party/prost/prost-build/src/ident.rs", "rank": 98, "score": 146199.5697254173 }, { "content": "/// Computes a SHA-256 digest of `bytes` and returns it in a hex encoded string.\n\npub fn get_sha256_hex(bytes: &[u8]) -> String {\n\n let hash_value = get_sha256(bytes);\n\n hex::encode(hash_value)\n\n}\n", "file_path": "oak_sign/src/lib.rs", "rank": 99, "score": 146199.5697254173 } ]
Rust
types/src/proof/proptest_proof.rs
PragmaTwice/diem
a290b0859a6152a5ffd6f85773a875f17334adac
use crate::proof::{ definition::MAX_ACCUMULATOR_PROOF_DEPTH, AccumulatorConsistencyProof, AccumulatorProof, AccumulatorRangeProof, SparseMerkleLeafNode, SparseMerkleProof, SparseMerkleRangeProof, TransactionAccumulatorSummary, }; use diem_crypto::{ hash::{ CryptoHash, CryptoHasher, ACCUMULATOR_PLACEHOLDER_HASH, SPARSE_MERKLE_PLACEHOLDER_HASH, }, HashValue, }; use proptest::{collection::vec, prelude::*}; fn arb_non_placeholder_accumulator_sibling() -> impl Strategy<Value = HashValue> { any::<HashValue>().prop_filter("Filter out placeholder sibling.", |x| { *x != *ACCUMULATOR_PLACEHOLDER_HASH }) } fn arb_accumulator_sibling() -> impl Strategy<Value = HashValue> { prop_oneof![ arb_non_placeholder_accumulator_sibling(), Just(*ACCUMULATOR_PLACEHOLDER_HASH), ] } fn arb_non_placeholder_sparse_merkle_sibling() -> impl Strategy<Value = HashValue> { any::<HashValue>().prop_filter("Filter out placeholder sibling.", |x| { *x != *SPARSE_MERKLE_PLACEHOLDER_HASH }) } fn arb_sparse_merkle_sibling() -> impl Strategy<Value = HashValue> { prop_oneof![ arb_non_placeholder_sparse_merkle_sibling(), Just(*SPARSE_MERKLE_PLACEHOLDER_HASH), ] } impl<H> Arbitrary for AccumulatorProof<H> where H: CryptoHasher + 'static, { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { (0..=MAX_ACCUMULATOR_PROOF_DEPTH) .prop_flat_map(|len| { if len == 0 { Just(vec![]).boxed() } else { ( vec(arb_accumulator_sibling(), len - 1), arb_non_placeholder_accumulator_sibling(), ) .prop_map(|(mut siblings, last_sibling)| { siblings.push(last_sibling); siblings }) .boxed() } }) .prop_map(AccumulatorProof::<H>::new) .boxed() } } impl<V> Arbitrary for SparseMerkleProof<V> where V: std::fmt::Debug + CryptoHash, { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { ( any::<Option<SparseMerkleLeafNode>>(), (0..=256usize).prop_flat_map(|len| { if len == 0 { Just(vec![]).boxed() } else { ( arb_non_placeholder_sparse_merkle_sibling(), vec(arb_sparse_merkle_sibling(), len), ) .prop_map(|(first_sibling, mut siblings)| { siblings[0] = first_sibling; siblings }) .boxed() } }), ) .prop_map(|(leaf, siblings)| SparseMerkleProof::new(leaf, siblings)) .boxed() } } impl Arbitrary for AccumulatorConsistencyProof { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { vec( arb_non_placeholder_accumulator_sibling(), 0..=MAX_ACCUMULATOR_PROOF_DEPTH, ) .prop_map(AccumulatorConsistencyProof::new) .boxed() } } impl<H> Arbitrary for AccumulatorRangeProof<H> where H: CryptoHasher, { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { ( vec( arb_non_placeholder_accumulator_sibling(), 0..MAX_ACCUMULATOR_PROOF_DEPTH, ), vec(arb_accumulator_sibling(), 0..MAX_ACCUMULATOR_PROOF_DEPTH), ) .prop_map(|(left_siblings, right_siblings)| { AccumulatorRangeProof::new(left_siblings, right_siblings) }) .boxed() } } impl Arbitrary for SparseMerkleRangeProof { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { vec(arb_sparse_merkle_sibling(), 0..=256) .prop_map(Self::new) .boxed() } } impl Arbitrary for TransactionAccumulatorSummary { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { let arb_version = 0u64..=256; arb_version .prop_map(|version| { let num_leaves = version + 1; let num_subtrees = num_leaves.count_ones() as u64; let mock_subtrees = (0..num_subtrees) .map(HashValue::from_u64) .collect::<Vec<_>>(); let consistency_proof = AccumulatorConsistencyProof::new(mock_subtrees); Self::try_from_genesis_proof(consistency_proof, version).unwrap() }) .boxed() } }
use crate::proof::{ definition::MAX_ACCUMULATOR_PROOF_DEPTH, AccumulatorConsistencyProof, AccumulatorProof, AccumulatorRangeProof, SparseMerkleLeafNode, SparseMerkleProof, SparseMerkleRangeProof, TransactionAccumulatorSummary, }; use diem_crypto::{ hash::{ CryptoHash, CryptoHasher, ACCUMULATOR_PLACEHOLDER_HASH, SPARSE_MERKLE_PLACEHOLDER_HASH, }, HashValue, }; use proptest::{collection::vec, prelude::*}; fn arb_non_placeholder_accumulator_sibling() -> impl Strategy<Value = HashValue> { any::<HashValue>().prop_filter("Filter out placeholder sibling.", |x| { *x != *ACCUMULATOR_PLACEHOLDER_HASH }) } fn arb_accumulator_sibling() -> impl Strategy<Value = HashValue> { prop_oneof![ arb_non_placeholder_accumulator_sibling(), Just(*ACCUMULATOR_PLACEHOLDER_HASH), ] } fn arb_non_placeholder_sparse_merkle_sibling() -> impl Strategy<Value = HashValue> { any::<HashValue>().prop_filter("Filter out placeholder sibling.", |x| { *x != *SPARSE_MERKLE_PLACEHOLDER_HASH }) } fn arb_sparse_merkle_sibling() -> impl Strategy<Value = HashValue> { prop_oneof![ arb_non_placeholder_sparse_merkle_sibling(), Just(*SPARSE_MERKLE_PLACEHOLDER_HASH), ] } impl<H> Arbitrary for AccumulatorProof<H> where H: CryptoHasher + 'static, { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { (0..=MAX_ACCUMULATOR_PROOF_DEPTH) .prop_flat_map(|len| {
}) .prop_map(AccumulatorProof::<H>::new) .boxed() } } impl<V> Arbitrary for SparseMerkleProof<V> where V: std::fmt::Debug + CryptoHash, { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { ( any::<Option<SparseMerkleLeafNode>>(), (0..=256usize).prop_flat_map(|len| { if len == 0 { Just(vec![]).boxed() } else { ( arb_non_placeholder_sparse_merkle_sibling(), vec(arb_sparse_merkle_sibling(), len), ) .prop_map(|(first_sibling, mut siblings)| { siblings[0] = first_sibling; siblings }) .boxed() } }), ) .prop_map(|(leaf, siblings)| SparseMerkleProof::new(leaf, siblings)) .boxed() } } impl Arbitrary for AccumulatorConsistencyProof { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { vec( arb_non_placeholder_accumulator_sibling(), 0..=MAX_ACCUMULATOR_PROOF_DEPTH, ) .prop_map(AccumulatorConsistencyProof::new) .boxed() } } impl<H> Arbitrary for AccumulatorRangeProof<H> where H: CryptoHasher, { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { ( vec( arb_non_placeholder_accumulator_sibling(), 0..MAX_ACCUMULATOR_PROOF_DEPTH, ), vec(arb_accumulator_sibling(), 0..MAX_ACCUMULATOR_PROOF_DEPTH), ) .prop_map(|(left_siblings, right_siblings)| { AccumulatorRangeProof::new(left_siblings, right_siblings) }) .boxed() } } impl Arbitrary for SparseMerkleRangeProof { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { vec(arb_sparse_merkle_sibling(), 0..=256) .prop_map(Self::new) .boxed() } } impl Arbitrary for TransactionAccumulatorSummary { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { let arb_version = 0u64..=256; arb_version .prop_map(|version| { let num_leaves = version + 1; let num_subtrees = num_leaves.count_ones() as u64; let mock_subtrees = (0..num_subtrees) .map(HashValue::from_u64) .collect::<Vec<_>>(); let consistency_proof = AccumulatorConsistencyProof::new(mock_subtrees); Self::try_from_genesis_proof(consistency_proof, version).unwrap() }) .boxed() } }
if len == 0 { Just(vec![]).boxed() } else { ( vec(arb_accumulator_sibling(), len - 1), arb_non_placeholder_accumulator_sibling(), ) .prop_map(|(mut siblings, last_sibling)| { siblings.push(last_sibling); siblings }) .boxed() }
if_condition
[ { "content": "fn value_strategy<V: Arbitrary + Clone + 'static>(\n\n keep_rate: f64,\n\n) -> impl Strategy<Value = Option<V>> {\n\n let value_strategy = any::<V>();\n\n proptest::option::weighted(keep_rate, value_strategy)\n\n}\n\n\n\nimpl<V: Arbitrary + Debug + Clone> TransactionGen<V> {\n\n pub fn materialize<K: Clone + Eq + Ord>(self, universe: &[K]) -> Transaction<K, V> {\n\n let mut keys_modified = BTreeSet::new();\n\n let mut actual_writes = vec![];\n\n let mut skipped_writes = vec![];\n\n for (idx, value) in self.keys_modified.into_iter() {\n\n let key = universe[idx.index(universe.len())].clone();\n\n if !keys_modified.contains(&key) {\n\n keys_modified.insert(key.clone());\n\n match value {\n\n None => skipped_writes.push(key),\n\n Some(v) => actual_writes.push((key, v)),\n\n };\n", "file_path": "diem-move/parallel-executor/src/proptest_types/types.rs", "rank": 4, "score": 349470.5959896882 }, { "content": "fn freeze_ty(sp!(tloc, t): H::Type) -> H::Type {\n\n use H::Type_ as T;\n\n match t {\n\n T::Single(s) => sp(tloc, T::Single(freeze_single(s))),\n\n t => sp(tloc, t),\n\n }\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 5, "score": 315791.9008853048 }, { "content": "fn operator_strategy<V: Arbitrary + Clone>() -> impl Strategy<Value = Operator<V>> {\n\n prop_oneof![\n\n 2 => any::<V>().prop_map(Operator::Insert),\n\n 1 => Just(Operator::Remove),\n\n 1 => Just(Operator::Skip),\n\n 4 => Just(Operator::Read),\n\n ]\n\n}\n\n\n", "file_path": "diem-move/mvhashmap/src/unit_tests/proptest_types.rs", "rank": 6, "score": 315079.92980718974 }, { "content": "// This function generates an arbitrary serde_json::Value.\n\npub fn arb_json_value() -> impl Strategy<Value = Value> {\n\n let leaf = prop_oneof![\n\n Just(Value::Null),\n\n any::<bool>().prop_map(Value::Bool),\n\n any::<f64>().prop_map(|n| serde_json::json!(n)),\n\n any::<String>().prop_map(Value::String),\n\n ];\n\n\n\n leaf.prop_recursive(\n\n 10, // 10 levels deep\n\n 256, // Maximum size of 256 nodes\n\n 10, // Up to 10 items per collection\n\n |inner| {\n\n prop_oneof![\n\n prop::collection::vec(inner.clone(), 0..10).prop_map(Value::Array),\n\n prop::collection::hash_map(any::<String>(), inner, 0..10)\n\n .prop_map(|map| serde_json::json!(map)),\n\n ]\n\n },\n\n )\n\n}\n", "file_path": "types/src/proptest_types.rs", "rank": 7, "score": 301999.7031366088 }, { "content": "// This generates an arbitrary BlockType enum.\n\nfn arb_block_type() -> impl Strategy<Value = BlockType> {\n\n prop_oneof![\n\n arb_block_type_proposal(),\n\n Just(BlockType::NilBlock),\n\n Just(BlockType::Genesis),\n\n ]\n\n}\n\n\n", "file_path": "consensus/safety-rules/src/fuzzing_utils.rs", "rank": 8, "score": 297808.0574909957 }, { "content": "fn arb_transaction_list_with_proof() -> impl Strategy<Value = TransactionListWithProof> {\n\n (\n\n vec(\n\n (\n\n any::<SignedTransaction>(),\n\n vec(any::<ContractEvent>(), 0..10),\n\n ),\n\n 0..10,\n\n ),\n\n any::<TransactionInfoListWithProof>(),\n\n )\n\n .prop_flat_map(|(transaction_and_events, proof)| {\n\n let transactions: Vec<_> = transaction_and_events\n\n .clone()\n\n .into_iter()\n\n .map(|(transaction, _event)| Transaction::UserTransaction(transaction))\n\n .collect();\n\n let events: Vec<_> = transaction_and_events\n\n .into_iter()\n\n .map(|(_transaction, event)| event)\n", "file_path": "types/src/proptest_types.rs", "rank": 9, "score": 297793.66322708107 }, { "content": "/// This produces the genesis block\n\npub fn genesis_strategy() -> impl Strategy<Value = Block> {\n\n Just(Block::make_genesis_block())\n\n}\n\n\n\nprop_compose! {\n\n /// This produces an unmoored block, with arbitrary parent & QC ancestor\n\n pub fn unmoored_block(ancestor_id_strategy: impl Strategy<Value = HashValue>)(\n\n ancestor_id in ancestor_id_strategy,\n\n )(\n\n block in new_proposal(\n\n ancestor_id,\n\n Round::arbitrary(),\n\n proptests::arb_signer(),\n\n certificate_for_genesis(),\n\n )\n\n ) -> Block {\n\n block\n\n }\n\n}\n\n\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 10, "score": 293584.6657486459 }, { "content": "/// Offers the genesis block.\n\npub fn leaf_strategy() -> impl Strategy<Value = Block> {\n\n genesis_strategy().boxed()\n\n}\n\n\n\nprop_compose! {\n\n /// This produces a block with an invalid id (and therefore signature)\n\n /// given a valid block\n\n pub fn fake_id(block_strategy: impl Strategy<Value = Block>)\n\n (fake_id in HashValue::arbitrary(),\n\n block in block_strategy) -> Block {\n\n Block {\n\n id: fake_id,\n\n block_data: BlockData::new_proposal(\n\n block.payload().unwrap().clone(),\n\n block.author().unwrap(),\n\n block.round(),\n\n diem_infallible::duration_since_epoch().as_micros() as u64,\n\n block.quorum_cert().clone(),\n\n ),\n\n signature: Some(block.signature().unwrap().clone()),\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 11, "score": 293584.66574864584 }, { "content": "fn test_data_strategy() -> impl Strategy<Value = TestData> {\n\n let db = test_execution_with_storage_impl();\n\n let latest_ver = db.get_latest_version().unwrap();\n\n\n\n (0..=latest_ver)\n\n .prop_flat_map(move |txn_start_ver| (Just(txn_start_ver), txn_start_ver..=latest_ver))\n\n .prop_flat_map(move |(txn_start_ver, state_snapshot_ver)| {\n\n (\n\n Just(txn_start_ver),\n\n prop_oneof![Just(Some(state_snapshot_ver)), Just(None)],\n\n state_snapshot_ver..=latest_ver,\n\n )\n\n })\n\n .prop_map(\n\n move |(txn_start_ver, state_snapshot_ver, target_ver)| TestData {\n\n db: Arc::clone(&db),\n\n txn_start_ver,\n\n state_snapshot_ver,\n\n target_ver,\n\n latest_ver,\n\n },\n\n )\n\n}\n\n\n", "file_path": "storage/backup/backup-cli/src/backup_types/tests.rs", "rank": 12, "score": 290033.76521788875 }, { "content": "fn invalid_identifier_strategy() -> impl Strategy<Value = String> {\n\n static ALLOWED_IDENTIFIERS_REGEX: Lazy<Regex> = Lazy::new(|| {\n\n // Need to add anchors to ensure the entire string is matched.\n\n Regex::new(&format!(\"^(?:{})$\", ALLOWED_IDENTIFIERS)).unwrap()\n\n });\n\n\n\n \".*\".prop_filter(\"Valid identifiers should not be generated\", |s| {\n\n // Most strings won't match the regex above, so local rejects are OK.\n\n !ALLOWED_IDENTIFIERS_REGEX.is_match(s)\n\n })\n\n}\n\n\n\n/// Ensure that Identifier instances serialize into strings directly, with no wrapper.\n", "file_path": "language/move-core/types/src/unit_tests/identifier_test.rs", "rank": 13, "score": 290033.7652178887 }, { "content": "pub fn bad_txn_strategy() -> impl Strategy<Value = Arc<dyn AUTransactionGen + 'static>> {\n\n prop_oneof![\n\n 1 => any_with::<SequenceNumberMismatchGen>((0, 10_000)).prop_map(SequenceNumberMismatchGen::arced),\n\n 1 => any_with::<InvalidAuthkeyGen>(()).prop_map(InvalidAuthkeyGen::arced),\n\n 1 => any_with::<InsufficientBalanceGen>((1, 20_000)).prop_map(InsufficientBalanceGen::arced),\n\n ]\n\n}\n", "file_path": "diem-move/e2e-tests/src/account_universe/bad_transaction.rs", "rank": 14, "score": 288297.7525262744 }, { "content": "// hack strategy to generate a length from `impl Into<SizeRange>`\n\nfn arb_length(size_range: impl Into<SizeRange>) -> impl Strategy<Value = usize> {\n\n vec(Just(()), size_range).prop_map(|vec| vec.len())\n\n}\n\n\n", "file_path": "types/src/unit_tests/trusted_state_test.rs", "rank": 15, "score": 282203.5517140558 }, { "content": "fn type_(context: &Context, sp!(loc, ty_): N::Type) -> H::Type {\n\n use H::Type_ as HT;\n\n use N::{TypeName_ as TN, Type_ as NT};\n\n let t_ = match ty_ {\n\n NT::Unit => HT::Unit,\n\n NT::Apply(None, n, tys) => {\n\n crate::shared::ast_debug::print_verbose(&NT::Apply(None, n, tys));\n\n panic!(\"ICE kind not expanded: {:#?}\", loc)\n\n }\n\n NT::Apply(Some(_), sp!(_, TN::Multiple(_)), ss) => HT::Multiple(single_types(context, ss)),\n\n _ => HT::Single(single_type(context, sp(loc, ty_))),\n\n };\n\n sp(loc, t_)\n\n}\n\n\n\n//**************************************************************************************************\n\n// Statements\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 16, "score": 277269.92418196413 }, { "content": "fn needs_freeze(context: &Context, sp!(_, actual): &H::Type, sp!(_, expected): &H::Type) -> Freeze {\n\n use H::Type_ as T;\n\n match (actual, expected) {\n\n (T::Unit, T::Unit) => Freeze::NotNeeded,\n\n (T::Single(actaul_s), T::Single(actual_e)) => {\n\n let needs = needs_freeze_single(actaul_s, actual_e);\n\n if needs {\n\n Freeze::Point\n\n } else {\n\n Freeze::NotNeeded\n\n }\n\n }\n\n (T::Multiple(actaul_ss), T::Multiple(actual_es)) => {\n\n assert!(actaul_ss.len() == actual_es.len());\n\n let points = actaul_ss\n\n .iter()\n\n .zip(actual_es)\n\n .map(|(a, e)| needs_freeze_single(a, e))\n\n .collect::<Vec<_>>();\n\n if points.iter().any(|needs| *needs) {\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 17, "score": 277080.0856437202 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn arb_diemnet_addr() -> impl Strategy<Value = NetworkAddress> {\n\n let arb_transport_protos = prop_oneof![\n\n any::<u16>().prop_map(|port| vec![Protocol::Memory(port)]),\n\n any::<(Ipv4Addr, u16)>()\n\n .prop_map(|(addr, port)| vec![Protocol::Ip4(addr), Protocol::Tcp(port)]),\n\n any::<(Ipv6Addr, u16)>()\n\n .prop_map(|(addr, port)| vec![Protocol::Ip6(addr), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns(name), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns4(name), Protocol::Tcp(port)]),\n\n any::<(DnsName, u16)>()\n\n .prop_map(|(name, port)| vec![Protocol::Dns6(name), Protocol::Tcp(port)]),\n\n ];\n\n let arb_diemnet_protos = any::<(x25519::PublicKey, u8)>()\n\n .prop_map(|(pubkey, hs)| vec![Protocol::NoiseIK(pubkey), Protocol::Handshake(hs)]);\n\n\n\n (arb_transport_protos, arb_diemnet_protos).prop_map(\n\n |(mut transport_protos, mut diemnet_protos)| {\n\n transport_protos.append(&mut diemnet_protos);\n", "file_path": "types/src/network_address/mod.rs", "rank": 18, "score": 276900.618004167 }, { "content": "// Record metrics by method, operation_id and status.\n\n// The operation_id is the id for the request handler.\n\n// Should use same `operationId` defined in `openapi.yaml` whenever possible.\n\npub fn metrics(operation_id: &'static str) -> Log<impl Fn(Info) + Copy> {\n\n let func = move |info: Info| {\n\n HISTOGRAM\n\n .with_label_values(&[\n\n info.method().to_string().as_str(),\n\n operation_id,\n\n info.status().as_u16().to_string().as_str(),\n\n ])\n\n .observe(info.elapsed().as_secs_f64());\n\n };\n\n custom(func)\n\n}\n\n\n", "file_path": "api/src/metrics.rs", "rank": 19, "score": 273037.4856820188 }, { "content": "fn freeze(context: &mut Context, result: &mut Block, expected_type: &H::Type, e: H::Exp) -> H::Exp {\n\n use H::{Type_ as T, UnannotatedExp_ as E};\n\n\n\n match needs_freeze(context, &e.ty, expected_type) {\n\n Freeze::NotNeeded => e,\n\n Freeze::Point => freeze_point(e),\n\n\n\n Freeze::Sub(points) => {\n\n let loc = e.exp.loc;\n\n let actual_tys = match &e.ty.value {\n\n T::Multiple(v) => v.clone(),\n\n _ => unreachable!(\"ICE needs_freeze failed\"),\n\n };\n\n assert!(actual_tys.len() == points.len());\n\n let new_temps = actual_tys\n\n .into_iter()\n\n .map(|ty| (context.new_temp(loc, ty.clone()), ty))\n\n .collect::<Vec<_>>();\n\n\n\n let lvalues = new_temps\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 20, "score": 272871.11866817554 }, { "content": "fn types(context: &mut Context, sp!(_, t_): H::Type) -> Vec<IR::Type> {\n\n use H::Type_ as T;\n\n match t_ {\n\n T::Unit => vec![],\n\n T::Single(st) => vec![single_type(context, st)],\n\n T::Multiple(ss) => ss.into_iter().map(|st| single_type(context, st)).collect(),\n\n }\n\n}\n\n\n\n//**************************************************************************************************\n\n// Commands\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-compiler/src/to_bytecode/translate.rs", "rank": 21, "score": 269232.6030097888 }, { "content": "fn encode_metrics(encoder: impl Encoder, whitelist: &'static [&'static str]) -> Vec<u8> {\n\n let mut metric_families = gather_metrics();\n\n if !whitelist.is_empty() {\n\n metric_families = whitelist_metrics(metric_families, whitelist);\n\n }\n\n let mut buffer = vec![];\n\n encoder.encode(&metric_families, &mut buffer).unwrap();\n\n\n\n NUM_METRICS\n\n .with_label_values(&[\"total_bytes\"])\n\n .inc_by(buffer.len() as u64);\n\n buffer\n\n}\n\n\n", "file_path": "crates/diem-metrics/src/metric_server.rs", "rank": 22, "score": 267017.35742570536 }, { "content": "fn basic_ops_impl(repeat_vec: impl RepeatVecMethods<&'static str>) {\n\n let mut repeat_vec = repeat_vec;\n\n\n\n repeat_vec.extend(\"foo\", 3);\n\n repeat_vec.extend(\"bar\", 4);\n\n repeat_vec.extend(\"baz\", 0);\n\n assert_eq!(repeat_vec.len(), 7);\n\n\n\n // Basic queries work.\n\n assert_eq!(repeat_vec.get(0), Some((&\"foo\", 0)));\n\n assert_eq!(repeat_vec.get(1), Some((&\"foo\", 1)));\n\n assert_eq!(repeat_vec.get(2), Some((&\"foo\", 2)));\n\n assert_eq!(repeat_vec.get(3), Some((&\"bar\", 0)));\n\n assert_eq!(repeat_vec.get(4), Some((&\"bar\", 1)));\n\n assert_eq!(repeat_vec.get(5), Some((&\"bar\", 2)));\n\n assert_eq!(repeat_vec.get(6), Some((&\"bar\", 3)));\n\n assert_eq!(repeat_vec.get(7), None);\n\n\n\n // Removing an element shifts all further elements to the left.\n\n repeat_vec.remove(1);\n", "file_path": "crates/diem-proptest-helpers/src/unit_tests/repeat_vec_tests.rs", "rank": 23, "score": 263003.494323566 }, { "content": "fn expected_types(context: &Context, loc: Loc, nss: Vec<Option<N::Type>>) -> H::Type {\n\n let any = || {\n\n sp(\n\n loc,\n\n H::SingleType_::Base(sp(loc, H::BaseType_::UnresolvedError)),\n\n )\n\n };\n\n let ss = nss\n\n .into_iter()\n\n .map(|sopt| sopt.map(|s| single_type(context, s)).unwrap_or_else(any))\n\n .collect::<Vec<_>>();\n\n H::Type_::from_vec(loc, ss)\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 24, "score": 261466.3859714217 }, { "content": "fn make_temps(context: &mut Context, loc: Loc, ty: H::Type) -> Vec<(Var, H::SingleType)> {\n\n use H::Type_ as T;\n\n match ty.value {\n\n T::Unit => vec![],\n\n T::Single(s) => vec![(context.new_temp(loc, s.clone()), s)],\n\n T::Multiple(ss) => ss\n\n .into_iter()\n\n .map(|s| (context.new_temp(loc, s.clone()), s))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 25, "score": 261438.81314667093 }, { "content": "/// This produces a round that is often higher than the parent, but not\n\n/// too high\n\npub fn some_round(initial_round: Round) -> impl Strategy<Value = Round> {\n\n prop_oneof![\n\n 9 => Just(1 + initial_round),\n\n 1 => bigger_round(initial_round),\n\n ]\n\n}\n\n\n\nprop_compose! {\n\n /// This creates a child with a parent on its left, and a QC on the left\n\n /// of the parent. This, depending on branching, does not require the\n\n /// QC to always be an ancestor or the parent to always be the highest QC\n\n fn child(\n\n signer_strategy: impl Strategy<Value = ValidatorSigner>,\n\n block_forest_strategy: impl Strategy<Value = LinearizedBlockForest>,\n\n )(\n\n signer in signer_strategy,\n\n (forest_vec, parent_idx, qc_idx) in block_forest_strategy\n\n .prop_flat_map(|forest_vec| {\n\n let len = forest_vec.len();\n\n (Just(forest_vec), 0..len)\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 26, "score": 261163.17428805045 }, { "content": "fn freeze_single(sp!(sloc, s): H::SingleType) -> H::SingleType {\n\n use H::SingleType_ as S;\n\n match s {\n\n S::Ref(true, inner) => sp(sloc, S::Ref(false, inner)),\n\n s => sp(sloc, s),\n\n }\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 27, "score": 260120.07271262724 }, { "content": "fn base_type(context: &Context, sp!(loc, nb_): N::Type) -> H::BaseType {\n\n use H::BaseType_ as HB;\n\n use N::Type_ as NT;\n\n let b_ = match nb_ {\n\n NT::Var(_) => panic!(\n\n \"ICE tvar not expanded: {}:{}-{}\",\n\n loc.file_hash(),\n\n loc.start(),\n\n loc.end()\n\n ),\n\n NT::Apply(None, n, tys) => {\n\n crate::shared::ast_debug::print_verbose(&NT::Apply(None, n, tys));\n\n panic!(\"ICE kind not expanded: {:#?}\", loc)\n\n }\n\n NT::Apply(Some(k), n, nbs) => HB::Apply(k, type_name(context, n), base_types(context, nbs)),\n\n NT::Param(tp) => HB::Param(tp),\n\n NT::UnresolvedError => HB::UnresolvedError,\n\n NT::Anything => HB::Unreachable,\n\n NT::Ref(_, _) | NT::Unit => {\n\n panic!(\n\n \"ICE type constraints failed {}:{}-{}\",\n\n loc.file_hash(),\n\n loc.start(),\n\n loc.end()\n\n )\n\n }\n\n };\n\n sp(loc, b_)\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 28, "score": 257370.6087002224 }, { "content": "fn single_types(context: &Context, ss: Vec<N::Type>) -> Vec<H::SingleType> {\n\n ss.into_iter().map(|s| single_type(context, s)).collect()\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 29, "score": 257370.6087002224 }, { "content": "fn single_type(context: &Context, sp!(loc, ty_): N::Type) -> H::SingleType {\n\n use H::SingleType_ as HS;\n\n use N::Type_ as NT;\n\n let s_ = match ty_ {\n\n NT::Ref(mut_, nb) => HS::Ref(mut_, base_type(context, *nb)),\n\n _ => HS::Base(base_type(context, sp(loc, ty_))),\n\n };\n\n sp(loc, s_)\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 30, "score": 257370.6087002224 }, { "content": "/// Helper to generate random bytes that can be used with proptest\n\n/// to generate a value following the passed strategy.\n\nfn corpus_from_strategy(strategy: impl Strategy) -> Vec<u8> {\n\n // randomly-seeded recording RNG\n\n let mut seed = [0u8; 32];\n\n let mut rng = rand::thread_rng();\n\n rng.fill_bytes(&mut seed);\n\n let recorder_rng = test_runner::TestRng::from_seed(RngAlgorithm::Recorder, &seed);\n\n let mut runner = TestRunner::new_with_rng(test_runner::Config::default(), recorder_rng);\n\n\n\n // generate the value\n\n strategy\n\n .new_tree(&mut runner)\n\n .expect(\"creating a new value should succeed\")\n\n .current();\n\n\n\n // dump the bytes\n\n runner.bytes_used()\n\n}\n\n\n", "file_path": "testsuite/diem-fuzzer/src/lib.rs", "rank": 31, "score": 257067.98024698353 }, { "content": "fn base_type(context: &mut Context, sp!(_, bt_): H::BaseType) -> IR::Type {\n\n use BuiltinTypeName_ as BT;\n\n use H::{BaseType_ as B, TypeName_ as TN};\n\n use IR::Type as IRT;\n\n match bt_ {\n\n B::Unreachable | B::UnresolvedError => {\n\n panic!(\"ICE should not have reached compilation if there are errors\")\n\n }\n\n B::Apply(_, sp!(_, TN::Builtin(sp!(_, BT::Address))), _) => IRT::Address,\n\n B::Apply(_, sp!(_, TN::Builtin(sp!(_, BT::Signer))), _) => IRT::Signer,\n\n B::Apply(_, sp!(_, TN::Builtin(sp!(_, BT::U8))), _) => IRT::U8,\n\n B::Apply(_, sp!(_, TN::Builtin(sp!(_, BT::U64))), _) => IRT::U64,\n\n B::Apply(_, sp!(_, TN::Builtin(sp!(_, BT::U128))), _) => IRT::U128,\n\n\n\n B::Apply(_, sp!(_, TN::Builtin(sp!(_, BT::Bool))), _) => IRT::Bool,\n\n B::Apply(_, sp!(_, TN::Builtin(sp!(_, BT::Vector))), mut args) => {\n\n assert!(\n\n args.len() == 1,\n\n \"ICE vector must have exactly 1 type argument\"\n\n );\n", "file_path": "language/move-compiler/src/to_bytecode/translate.rs", "rank": 32, "score": 253299.1772716171 }, { "content": "fn single_type(context: &mut Context, sp!(_, st_): H::SingleType) -> IR::Type {\n\n use H::SingleType_ as S;\n\n use IR::Type as IRT;\n\n match st_ {\n\n S::Base(bt) => base_type(context, bt),\n\n S::Ref(mut_, bt) => IRT::Reference(mut_, Box::new(base_type(context, bt))),\n\n }\n\n}\n\n\n", "file_path": "language/move-compiler/src/to_bytecode/translate.rs", "rank": 33, "score": 253299.1772716171 }, { "content": "fn base_types(context: &mut Context, bs: Vec<H::BaseType>) -> Vec<IR::Type> {\n\n bs.into_iter().map(|b| base_type(context, b)).collect()\n\n}\n\n\n", "file_path": "language/move-compiler/src/to_bytecode/translate.rs", "rank": 34, "score": 250796.87140032707 }, { "content": "fn types<'a>(context: &mut Context, tys: impl IntoIterator<Item = &'a E::Type>) {\n\n tys.into_iter().for_each(|ty| type_(context, ty))\n\n}\n\n\n", "file_path": "language/move-compiler/src/expansion/dependency_ordering.rs", "rank": 35, "score": 243537.93328443237 }, { "content": "fn needs_freeze_single(sp!(_, actual): &H::SingleType, sp!(_, expected): &H::SingleType) -> bool {\n\n use H::SingleType_ as T;\n\n matches!((actual, expected), (T::Ref(true, _), T::Ref(false, _)))\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 36, "score": 238241.77258843996 }, { "content": "fn seen_structs_type(seen: &mut BTreeSet<(ModuleIdent, StructName)>, sp!(_, t_): &H::Type) {\n\n use H::Type_ as T;\n\n match t_ {\n\n T::Unit => (),\n\n T::Single(st) => seen_structs_single_type(seen, st),\n\n T::Multiple(ss) => ss.iter().for_each(|st| seen_structs_single_type(seen, st)),\n\n }\n\n}\n\n\n", "file_path": "language/move-compiler/src/to_bytecode/translate.rs", "rank": 37, "score": 237044.66073706478 }, { "content": "fn base_types<R: std::iter::FromIterator<H::BaseType>>(\n\n context: &Context,\n\n tys: impl IntoIterator<Item = N::Type>,\n\n) -> R {\n\n tys.into_iter().map(|t| base_type(context, t)).collect()\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 38, "score": 234861.52845180128 }, { "content": "fn use_tmp(var: Var) -> H::UnannotatedExp_ {\n\n use H::UnannotatedExp_ as E;\n\n E::Move {\n\n from_user: false,\n\n var,\n\n }\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 39, "score": 233875.0333153603 }, { "content": "fn check_elem_layout(ty: &Type, v: &Container) -> PartialVMResult<()> {\n\n match (ty, v) {\n\n (Type::U8, Container::VecU8(_))\n\n | (Type::U64, Container::VecU64(_))\n\n | (Type::U128, Container::VecU128(_))\n\n | (Type::Bool, Container::VecBool(_))\n\n | (Type::Address, Container::VecAddress(_))\n\n | (Type::Signer, Container::Struct(_)) => Ok(()),\n\n\n\n (Type::Vector(_), Container::Vec(_)) => Ok(()),\n\n\n\n (Type::Struct(_), Container::Vec(_))\n\n | (Type::Signer, Container::Vec(_))\n\n | (Type::StructInstantiation(_, _), Container::Vec(_)) => Ok(()),\n\n\n\n (Type::Reference(_), _) | (Type::MutableReference(_), _) | (Type::TyParam(_), _) => Err(\n\n PartialVMError::new(StatusCode::UNKNOWN_INVARIANT_VIOLATION_ERROR)\n\n .with_message(format!(\"invalid type param for vector: {:?}\", ty)),\n\n ),\n\n\n", "file_path": "language/move-vm/types/src/values/values_impl.rs", "rank": 40, "score": 232917.54270063006 }, { "content": "fn join_impl_types(\n\n mut subst: Subst,\n\n case: TypingCase,\n\n tys1: &[Type],\n\n tys2: &[Type],\n\n) -> Result<(Subst, Vec<Type>), TypingError> {\n\n // if tys1.len() != tys2.len(), we will get an error when instantiating the type elsewhere\n\n // as all types are instantiated as a sanity check\n\n let mut tys = vec![];\n\n for (ty1, ty2) in tys1.iter().zip(tys2) {\n\n let (nsubst, t) = join_impl(subst, case, ty1, ty2)?;\n\n subst = nsubst;\n\n tys.push(t)\n\n }\n\n Ok((subst, tys))\n\n}\n\n\n", "file_path": "language/move-compiler/src/typing/core.rs", "rank": 41, "score": 232716.3997886681 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn keypair_strategy() -> impl Strategy<Value = KeyPair<PrivateKey, PublicKey>> {\n\n test_utils::uniform_keypair_strategy::<PrivateKey, PublicKey>()\n\n}\n", "file_path": "crates/diem-crypto/src/x25519.rs", "rank": 42, "score": 226442.79182384972 }, { "content": "/// Spawns a future on a specified runtime. If no runtime is specified, uses\n\n/// the current runtime.\n\nfn spawn(runtime: Option<&Runtime>, future: impl Future<Output = ()> + Send + 'static) {\n\n if let Some(runtime) = runtime {\n\n runtime.spawn(future);\n\n } else {\n\n tokio::spawn(future);\n\n }\n\n}\n", "file_path": "state-sync/state-sync-v2/state-sync-driver/src/storage_synchronizer.rs", "rank": 43, "score": 224767.81265296432 }, { "content": "/// Returns a [`Strategy`] that provides a variety of balances (or transfer amounts) over a roughly\n\n/// logarithmic distribution.\n\npub fn log_balance_strategy(max_balance: u64) -> impl Strategy<Value = u64> {\n\n // The logarithmic distribution is modeled by uniformly picking from ranges of powers of 2.\n\n let minimum = gas_costs::TXN_RESERVED.next_power_of_two();\n\n assert!(max_balance >= minimum, \"minimum to make sense\");\n\n let mut strategies = vec![];\n\n // Balances below and around the minimum are interesting but don't cover *every* power of 2,\n\n // just those starting from the minimum.\n\n let mut lower_bound: u64 = 0;\n\n let mut upper_bound: u64 = minimum;\n\n loop {\n\n strategies.push(lower_bound..upper_bound);\n\n if upper_bound >= max_balance {\n\n break;\n\n }\n\n lower_bound = upper_bound;\n\n upper_bound = (upper_bound * 2).min(max_balance);\n\n }\n\n Union::new(strategies)\n\n}\n\n\n", "file_path": "diem-move/e2e-tests/src/account_universe.rs", "rank": 44, "score": 223730.40087746613 }, { "content": "fn write_type_parameter(idx: TypeParameterIndex) -> String {\n\n format!(\"T{}\", idx)\n\n}\n", "file_path": "language/move-compiler/src/interface_generator.rs", "rank": 45, "score": 223397.1825849421 }, { "content": "fn type_name(_context: &Context, sp!(loc, ntn_): N::TypeName) -> H::TypeName {\n\n use H::TypeName_ as HT;\n\n use N::TypeName_ as NT;\n\n let tn_ = match ntn_ {\n\n NT::Multiple(_) => panic!(\n\n \"ICE type constraints failed {}:{}-{}\",\n\n loc.file_hash(),\n\n loc.start(),\n\n loc.end()\n\n ),\n\n NT::Builtin(bt) => HT::Builtin(bt),\n\n NT::ModuleType(m, s) => HT::ModuleType(m, s),\n\n };\n\n sp(loc, tn_)\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 46, "score": 222955.6751489355 }, { "content": "#[test]\n\nfn test_leaf_hash() {\n\n {\n\n let address = HashValue::random();\n\n let blob = ValueBlob::from(vec![0x02]);\n\n let value_hash = blob.hash();\n\n let hash = hash_leaf(address, value_hash);\n\n let leaf_node = Node::new_leaf(address, blob);\n\n assert_eq!(leaf_node.hash(), hash);\n\n }\n\n}\n\n\n\nproptest! {\n\n #[test]\n\n fn two_leaves_test1(index1 in (0..8u8).prop_map(Nibble::from), index2 in (8..16u8).prop_map(Nibble::from)) {\n\n let internal_node_key = random_63nibbles_node_key();\n\n let mut children = Children::default();\n\n\n\n let leaf1_node_key = gen_leaf_keys(0 /* version */, internal_node_key.nibble_path(), index1).0;\n\n let leaf2_node_key = gen_leaf_keys(1 /* version */, internal_node_key.nibble_path(), index2).0;\n\n let hash1 = HashValue::random();\n", "file_path": "storage/jellyfish-merkle/src/node_type/node_type_test.rs", "rank": 47, "score": 222176.47039492894 }, { "content": "fn error_format_impl(sp!(_, b_): &Type, subst: &Subst, nested: bool) -> String {\n\n error_format_impl_(b_, subst, nested)\n\n}\n\n\n", "file_path": "language/move-compiler/src/typing/core.rs", "rank": 48, "score": 222139.418453047 }, { "content": "#[test]\n\nfn test_is_placeholder() {\n\n assert_eq!(Position::from_inorder_index(5).is_placeholder(0), true);\n\n assert_eq!(Position::from_inorder_index(5).is_placeholder(1), true);\n\n assert_eq!(Position::from_inorder_index(5).is_placeholder(2), false);\n\n assert_eq!(Position::from_inorder_index(5).is_placeholder(3), false);\n\n assert_eq!(Position::from_inorder_index(13).is_placeholder(5), true);\n\n assert_eq!(Position::from_inorder_index(13).is_placeholder(6), false);\n\n}\n\n\n", "file_path": "types/src/proof/position/position_test.rs", "rank": 49, "score": 221388.61558321514 }, { "content": "fn struct_type_parameters(tps: Vec<StructTypeParameter>) -> Vec<IR::StructTypeParameter> {\n\n tps.into_iter()\n\n .map(|StructTypeParameter { is_phantom, param }| {\n\n let name = type_var(param.user_specified_name);\n\n let constraints = abilities(&param.abilities);\n\n (is_phantom, name, constraints)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "language/move-compiler/src/to_bytecode/translate.rs", "rank": 50, "score": 221383.25983220307 }, { "content": "fn type_parameters(\n\n context: &mut Context,\n\n pty_params: Vec<(Name, Vec<Ability>)>,\n\n) -> Vec<(Name, E::AbilitySet)> {\n\n pty_params\n\n .into_iter()\n\n .map(|(name, constraints_vec)| {\n\n let constraints = ability_set(context, \"constraint\", constraints_vec);\n\n (name, constraints)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "language/move-compiler/src/expansion/translate.rs", "rank": 51, "score": 221379.65200686175 }, { "content": "fn type_parameter(\n\n context: &mut Context,\n\n unique_tparams: &mut UniqueMap<Name, ()>,\n\n name: Name,\n\n abilities: AbilitySet,\n\n) -> N::TParam {\n\n let id = N::TParamID::next();\n\n let user_specified_name = name;\n\n let tp = N::TParam {\n\n id,\n\n user_specified_name,\n\n abilities,\n\n };\n\n let loc = name.loc;\n\n context.bind_type(name.value, ResolvedType::TParam(loc, tp.clone()));\n\n if let Err((name, old_loc)) = unique_tparams.add(name, ()) {\n\n let msg = format!(\"Duplicate type parameter declared with name '{}'\", name);\n\n context.env.add_diag(diag!(\n\n Declarations::DuplicateItem,\n\n (loc, msg),\n\n (old_loc, \"Type parameter previously defined here\"),\n\n ))\n\n }\n\n tp\n\n}\n\n\n", "file_path": "language/move-compiler/src/naming/translate.rs", "rank": 52, "score": 221379.65200686175 }, { "content": "/// Verifies a list of events against an expected event root hash. This is done\n\n/// by calculating the hash of the events using an event accumulator hasher.\n\nfn verify_events_against_root_hash(\n\n events: &[ContractEvent],\n\n transaction_info: &TransactionInfo,\n\n) -> Result<()> {\n\n let event_hashes: Vec<_> = events.iter().map(CryptoHash::hash).collect();\n\n let event_root_hash =\n\n InMemoryAccumulator::<EventAccumulatorHasher>::from_leaves(&event_hashes).root_hash();\n\n ensure!(\n\n event_root_hash == transaction_info.event_root_hash(),\n\n \"The event root hash calculated doesn't match that carried on the \\\n\n transaction info! Calculated hash {:?}, transaction info hash {:?}\",\n\n event_root_hash,\n\n transaction_info.event_root_hash()\n\n );\n\n Ok(())\n\n}\n\n\n\n/// A list of transactions under an account that are contiguous by sequence number\n\n/// and include proofs.\n\n#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]\n", "file_path": "types/src/transaction/mod.rs", "rank": 53, "score": 221364.63036269695 }, { "content": "fn join_impl(\n\n mut subst: Subst,\n\n case: TypingCase,\n\n lhs: &Type,\n\n rhs: &Type,\n\n) -> Result<(Subst, Type), TypingError> {\n\n use TypeName_::*;\n\n use Type_::*;\n\n use TypingCase::*;\n\n match (lhs, rhs) {\n\n (sp!(_, Anything), other) | (other, sp!(_, Anything)) => Ok((subst, other.clone())),\n\n\n\n (sp!(_, Unit), sp!(loc, Unit)) => Ok((subst, sp(*loc, Unit))),\n\n\n\n (sp!(loc1, Ref(mut1, t1)), sp!(loc2, Ref(mut2, t2))) => {\n\n let (loc, mut_) = match (case, mut1, mut2) {\n\n (Join, _, _) => {\n\n // if 1 is imm and 2 is mut, use loc1. Else, loc2\n\n let loc = if !*mut1 && *mut2 { *loc1 } else { *loc2 };\n\n (loc, *mut1 && *mut2)\n", "file_path": "language/move-compiler/src/typing/core.rs", "rank": 54, "score": 221305.3173692627 }, { "content": "fn hash_leaf(key: HashValue, value_hash: HashValue) -> HashValue {\n\n SparseMerkleLeafNode::new(key, value_hash).hash()\n\n}\n\n\n", "file_path": "storage/jellyfish-merkle/src/node_type/node_type_test.rs", "rank": 55, "score": 221303.1148892886 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn keypair_strategy() -> impl Strategy<Value = KeyPair<Ed25519PrivateKey, Ed25519PublicKey>> {\n\n test_utils::uniform_keypair_strategy::<Ed25519PrivateKey, Ed25519PublicKey>()\n\n}\n\n\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\nuse proptest::prelude::*;\n\n\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\nimpl proptest::arbitrary::Arbitrary for Ed25519PublicKey {\n\n type Parameters = ();\n\n type Strategy = BoxedStrategy<Self>;\n\n\n\n fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy {\n\n crate::test_utils::uniform_keypair_strategy::<Ed25519PrivateKey, Ed25519PublicKey>()\n\n .prop_map(|v| v.public_key)\n\n .boxed()\n\n }\n\n}\n", "file_path": "crates/diem-crypto/src/ed25519.rs", "rank": 56, "score": 221096.95072189416 }, { "content": "fn write_struct_type_parameters(tps: &[StructTypeParameter]) -> String {\n\n if tps.is_empty() {\n\n return \"\".to_string();\n\n }\n\n\n\n let tp_and_constraints = tps\n\n .iter()\n\n .enumerate()\n\n .map(|(idx, ty_param)| {\n\n format!(\n\n \"{}{}{}\",\n\n if ty_param.is_phantom { \"phantom \" } else { \"\" },\n\n write_type_parameter(idx as TypeParameterIndex),\n\n write_ability_constraint(ty_param.constraints),\n\n )\n\n })\n\n .collect::<Vec<_>>()\n\n .join(\", \");\n\n format!(\"<{}>\", tp_and_constraints)\n\n}\n\n\n", "file_path": "language/move-compiler/src/interface_generator.rs", "rank": 57, "score": 220504.03218418724 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn random_serializable_struct() -> impl Strategy<Value = TestDiemCrypto> {\n\n (String::arbitrary()).prop_map(TestDiemCrypto).no_shrink()\n\n}\n", "file_path": "crates/diem-crypto/src/test_utils.rs", "rank": 58, "score": 219850.12031286367 }, { "content": "fn hash_internal(left: HashValue, right: HashValue) -> HashValue {\n\n SparseMerkleInternalNode::new(left, right).hash()\n\n}\n\n\n", "file_path": "storage/jellyfish-merkle/src/node_type/node_type_test.rs", "rank": 59, "score": 219820.31182583788 }, { "content": "fn extract_internal_type(ty: &Type) -> Option<&Type> {\n\n if let Type::Path(TypePath {\n\n qself: None,\n\n path: Path { segments, .. },\n\n }) = ty\n\n {\n\n if let Some(PathSegment { ident, arguments }) = segments.first() {\n\n // Extract the inner type if it is \"Option\"\n\n if ident == \"Option\" {\n\n if let PathArguments::AngleBracketed(AngleBracketedGenericArguments {\n\n args, ..\n\n }) = arguments\n\n {\n\n if let Some(GenericArgument::Type(ty)) = args.first() {\n\n return Some(ty);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "crates/diem-log-derive/src/lib.rs", "rank": 60, "score": 219011.18777546525 }, { "content": "#[test]\n\nfn test_internal_hash_and_proof() {\n\n // non-leaf case 1\n\n {\n\n let internal_node_key = random_63nibbles_node_key();\n\n let mut children = Children::default();\n\n\n\n let index1 = Nibble::from(4);\n\n let index2 = Nibble::from(15);\n\n let hash1 = HashValue::random();\n\n let hash2 = HashValue::random();\n\n let child1_node_key = gen_leaf_keys(\n\n 0, /* version */\n\n internal_node_key.nibble_path(),\n\n index1,\n\n )\n\n .0;\n\n let child2_node_key = gen_leaf_keys(\n\n 1, /* version */\n\n internal_node_key.nibble_path(),\n\n index2,\n", "file_path": "storage/jellyfish-merkle/src/node_type/node_type_test.rs", "rank": 61, "score": 218920.2604454115 }, { "content": "#[test]\n\nfn test_is_placeholder_out_of_boundary() {\n\n // Testing out of boundary\n\n assert_eq!(Position::from_inorder_index(7).is_placeholder(2), false);\n\n assert_eq!(Position::from_inorder_index(11).is_placeholder(2), true);\n\n assert_eq!(Position::from_inorder_index(14).is_placeholder(2), true);\n\n}\n\n\n", "file_path": "types/src/proof/position/position_test.rs", "rank": 62, "score": 217290.00808856235 }, { "content": "fn struct_type_parameters(\n\n context: &mut Context,\n\n pty_params: Vec<P::StructTypeParameter>,\n\n) -> Vec<E::StructTypeParameter> {\n\n pty_params\n\n .into_iter()\n\n .map(|param| E::StructTypeParameter {\n\n is_phantom: param.is_phantom,\n\n name: param.name,\n\n constraints: ability_set(context, \"constraint\", param.constraints),\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "language/move-compiler/src/expansion/translate.rs", "rank": 63, "score": 217281.25431738136 }, { "content": "fn serialize_type_parameter(\n\n binary: &mut BinaryData,\n\n type_param: &StructTypeParameter,\n\n) -> Result<()> {\n\n serialize_ability_set(binary, type_param.constraints)?;\n\n write_as_uleb128(binary, type_param.is_phantom as u8, 1u64)\n\n}\n\n\n", "file_path": "language/move-binary-format/src/serializer.rs", "rank": 64, "score": 217281.25431738136 }, { "content": "fn serialize_type_parameters(\n\n binary: &mut BinaryData,\n\n type_parameters: &[StructTypeParameter],\n\n) -> Result<()> {\n\n serialize_type_parameter_count(binary, type_parameters.len())?;\n\n for type_param in type_parameters {\n\n serialize_type_parameter(binary, type_param)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/move-binary-format/src/serializer.rs", "rank": 65, "score": 217281.25431738136 }, { "content": "fn fun_type_parameters(\n\n context: &mut Context,\n\n type_parameters: Vec<(Name, AbilitySet)>,\n\n) -> Vec<N::TParam> {\n\n let mut unique_tparams = UniqueMap::new();\n\n type_parameters\n\n .into_iter()\n\n .map(|(name, abilities)| type_parameter(context, &mut unique_tparams, name, abilities))\n\n .collect()\n\n}\n\n\n", "file_path": "language/move-compiler/src/naming/translate.rs", "rank": 66, "score": 217281.25431738136 }, { "content": "fn struct_type_parameters(\n\n context: &mut Context,\n\n type_parameters: Vec<E::StructTypeParameter>,\n\n) -> Vec<N::StructTypeParameter> {\n\n let mut unique_tparams = UniqueMap::new();\n\n type_parameters\n\n .into_iter()\n\n .map(|param| {\n\n let is_phantom = param.is_phantom;\n\n let param = type_parameter(context, &mut unique_tparams, param.name, param.constraints);\n\n N::StructTypeParameter { param, is_phantom }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "language/move-compiler/src/naming/translate.rs", "rank": 67, "score": 217281.25431738136 }, { "content": "// This generates an arbitrary SafetyRulesInput enum.\n\npub fn arb_safety_rules_input() -> impl Strategy<Value = SafetyRulesInput> {\n\n prop_oneof![\n\n Just(SafetyRulesInput::ConsensusState),\n\n arb_epoch_change_proof().prop_map(|input| SafetyRulesInput::Initialize(Box::new(input))),\n\n arb_maybe_signed_vote_proposal()\n\n .prop_map(|input| { SafetyRulesInput::ConstructAndSignVote(Box::new(input)) }),\n\n arb_block_data().prop_map(|input| { SafetyRulesInput::SignProposal(Box::new(input)) }),\n\n arb_timeout().prop_map(|input| { SafetyRulesInput::SignTimeout(Box::new(input)) }),\n\n ]\n\n}\n\n\n\n#[cfg(any(test, feature = \"fuzzing\"))]\n\npub mod fuzzing {\n\n use crate::{error::Error, serializer::SafetyRulesInput, test_utils, TSafetyRules};\n\n use consensus_types::{\n\n block_data::BlockData, timeout::Timeout, vote::Vote, vote_proposal::MaybeSignedVoteProposal,\n\n };\n\n use diem_crypto::ed25519::Ed25519Signature;\n\n use diem_types::epoch_change::EpochChangeProof;\n\n\n", "file_path": "consensus/safety-rules/src/fuzzing_utils.rs", "rank": 68, "score": 216948.06362698606 }, { "content": "fn struct_type_parameters(ast_tys: &[ast::StructTypeParameter]) -> Vec<StructTypeParameter> {\n\n ast_tys\n\n .iter()\n\n .map(|(is_phantom, _, abs)| StructTypeParameter {\n\n constraints: abilities(abs),\n\n is_phantom: *is_phantom,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "language/move-ir-compiler/move-ir-to-bytecode/src/compiler.rs", "rank": 69, "score": 216570.22298147454 }, { "content": "fn print_write_set_by_type(storage: &impl MoveResolver, ws: &WriteSet) {\n\n println!(\"* Modules:\");\n\n print_modules(ws);\n\n println!(\"* Resources:\");\n\n print_resources(storage, ws);\n\n}\n\n\n", "file_path": "diem-move/genesis-viewer/src/main.rs", "rank": 70, "score": 216250.44293543644 }, { "content": "// Create an error for an integer literal that is too big to fit in its type.\n\n// This assumes that the literal is the current token.\n\nfn num_too_big_error(loc: Loc, type_description: &'static str) -> Diagnostic {\n\n diag!(\n\n Syntax::InvalidNumber,\n\n (\n\n loc,\n\n format!(\n\n \"Invalid number literal. The given literal is too large to fit into {}\",\n\n type_description\n\n )\n\n ),\n\n )\n\n}\n\n\n\n//**************************************************************************************************\n\n// Fields\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-compiler/src/expansion/translate.rs", "rank": 71, "score": 215253.96198254963 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn uniform_keypair_strategy<Priv, Pub>() -> impl Strategy<Value = KeyPair<Priv, Pub>>\n\nwhere\n\n Pub: Serialize + for<'a> From<&'a Priv>,\n\n Priv: Serialize + Uniform,\n\n{\n\n // The no_shrink is because keypairs should be fixed -- shrinking would cause a different\n\n // keypair to be generated, which appears to not be very useful.\n\n any::<[u8; 32]>()\n\n .prop_map(|seed| {\n\n let mut rng = StdRng::from_seed(seed);\n\n KeyPair::<Priv, Pub>::generate(&mut rng)\n\n })\n\n .no_shrink()\n\n}\n\n\n\n/// Produces a uniformly random keypair from a seed and the user can alter this sleed slightly.\n\n/// Useful for circumstances where you want two disjoint keypair generations that may interact with\n\n/// each other.\n", "file_path": "crates/diem-crypto/src/test_utils.rs", "rank": 72, "score": 215002.43284922699 }, { "content": "// Parse a function parameter:\n\n// Parameter = <Var> \":\" <Type>\n\nfn parse_parameter(context: &mut Context) -> Result<(Var, Type), Diagnostic> {\n\n let v = parse_var(context)?;\n\n consume_token(context.tokens, Tok::Colon)?;\n\n let t = parse_type(context)?;\n\n Ok((v, t))\n\n}\n\n\n\n//**************************************************************************************************\n\n// Structs\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-compiler/src/parser/syntax.rs", "rank": 73, "score": 214642.17739701434 }, { "content": "pub fn arb_state_sync_msg() -> impl Strategy<Value = StateSyncMessage> {\n\n prop_oneof![\n\n (any::<GetChunkRequest>()).prop_map(|chunk_request| {\n\n StateSyncMessage::GetChunkRequest(Box::new(chunk_request))\n\n }),\n\n (any::<GetChunkResponse>()).prop_map(|chunk_response| {\n\n StateSyncMessage::GetChunkResponse(Box::new(chunk_response))\n\n })\n\n ]\n\n}\n\n\n\nimpl Arbitrary for GetChunkRequest {\n\n type Parameters = ();\n\n fn arbitrary_with(_args: ()) -> Self::Strategy {\n\n (\n\n any::<u64>(),\n\n any::<u64>(),\n\n any::<u64>(),\n\n any::<TargetType>(),\n\n )\n", "file_path": "state-sync/state-sync-v1/src/fuzzing.rs", "rank": 74, "score": 214144.08063400915 }, { "content": "// Parse optional type parameter list.\n\n// OptionalTypeParameters = \"<\" Comma<TypeParameter> \">\" | <empty>\n\nfn parse_optional_type_parameters(\n\n context: &mut Context,\n\n) -> Result<Vec<(Name, Vec<Ability>)>, Diagnostic> {\n\n if context.tokens.peek() == Tok::Less {\n\n parse_comma_list(\n\n context,\n\n Tok::Less,\n\n Tok::Greater,\n\n parse_type_parameter,\n\n \"a type parameter\",\n\n )\n\n } else {\n\n Ok(vec![])\n\n }\n\n}\n\n\n", "file_path": "language/move-compiler/src/parser/syntax.rs", "rank": 75, "score": 213387.34209506883 }, { "content": "// - The number of type parameters must be the same\n\n// - Each pair of parameters must satisfy [`compatible_type_parameter_constraints`] and [`compatible_type_parameter_phantom_decl`]\n\nfn compatible_struct_type_parameters(\n\n local_type_parameters_declaration: &[StructTypeParameter],\n\n defined_type_parameters: &[StructTypeParameter],\n\n) -> bool {\n\n local_type_parameters_declaration.len() == defined_type_parameters.len()\n\n && local_type_parameters_declaration\n\n .iter()\n\n .zip(defined_type_parameters)\n\n .all(\n\n |(local_type_parameter_declaration, defined_type_parameter)| {\n\n compatible_type_parameter_phantom_decl(\n\n local_type_parameter_declaration,\n\n defined_type_parameter,\n\n ) && compatible_type_parameter_constraints(\n\n local_type_parameter_declaration.constraints,\n\n defined_type_parameter.constraints,\n\n )\n\n },\n\n )\n\n}\n\n\n", "file_path": "language/move-bytecode-verifier/src/dependencies.rs", "rank": 76, "score": 213387.3449010445 }, { "content": "// Parse optional struct type parameters:\n\n// StructTypeParameter = \"<\" Comma<TypeParameterWithPhantomDecl> \">\" | <empty>\n\nfn parse_struct_type_parameters(\n\n context: &mut Context,\n\n) -> Result<Vec<StructTypeParameter>, Diagnostic> {\n\n if context.tokens.peek() == Tok::Less {\n\n parse_comma_list(\n\n context,\n\n Tok::Less,\n\n Tok::Greater,\n\n parse_type_parameter_with_phantom_decl,\n\n \"a type parameter\",\n\n )\n\n } else {\n\n Ok(vec![])\n\n }\n\n}\n\n\n\n//**************************************************************************************************\n\n// Functions\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-compiler/src/parser/syntax.rs", "rank": 77, "score": 213387.2104379356 }, { "content": "// - The number of type parameters must be the same\n\n// - Each pair of parameters must satisfy [`compatible_type_parameter_constraints`]\n\nfn compatible_fun_type_parameters(\n\n local_type_parameters_declaration: &[AbilitySet],\n\n defined_type_parameters: &[AbilitySet],\n\n) -> bool {\n\n local_type_parameters_declaration.len() == defined_type_parameters.len()\n\n && local_type_parameters_declaration\n\n .iter()\n\n .zip(defined_type_parameters)\n\n .all(\n\n |(\n\n local_type_parameter_constraints_declaration,\n\n defined_type_parameter_constraints,\n\n )| {\n\n compatible_type_parameter_constraints(\n\n *local_type_parameter_constraints_declaration,\n\n *defined_type_parameter_constraints,\n\n )\n\n },\n\n )\n\n}\n\n\n", "file_path": "language/move-bytecode-verifier/src/dependencies.rs", "rank": 78, "score": 213386.8148648534 }, { "content": "// When upgrading, the new type parameters must be the same length, and the new type parameter\n\n// constraints must be compatible\n\nfn fun_type_parameters_compatibile(\n\n old_type_parameters: &[AbilitySet],\n\n new_type_parameters: &[AbilitySet],\n\n) -> bool {\n\n old_type_parameters.len() == new_type_parameters.len()\n\n && old_type_parameters.iter().zip(new_type_parameters).all(\n\n |(old_type_parameter_constraint, new_type_parameter_constraint)| {\n\n type_parameter_constraints_compatibile(\n\n *old_type_parameter_constraint,\n\n *new_type_parameter_constraint,\n\n )\n\n },\n\n )\n\n}\n\n\n", "file_path": "language/move-binary-format/src/compatibility.rs", "rank": 79, "score": 213386.27779247682 }, { "content": "// The local view of a type parameter must be a superset of (or equal to) the defined\n\n// constraints. Conceptually, the local view can be more constrained than the defined one as the\n\n// local context is only limiting usage, and cannot take advantage of the additional constraints.\n\nfn compatible_type_parameter_constraints(\n\n local_type_parameter_constraints_declaration: AbilitySet,\n\n defined_type_parameter_constraints: AbilitySet,\n\n) -> bool {\n\n defined_type_parameter_constraints.is_subset(local_type_parameter_constraints_declaration)\n\n}\n\n\n", "file_path": "language/move-bytecode-verifier/src/dependencies.rs", "rank": 80, "score": 213382.41787619778 }, { "content": "#[test]\n\nfn test_position_sibling_left() {\n\n let position = Position::from_inorder_index(4);\n\n let target = position.sibling();\n\n assert_eq!(target, Position::from_inorder_index(6));\n\n}\n\n\n", "file_path": "types/src/proof/position/position_test.rs", "rank": 81, "score": 213378.18460483942 }, { "content": "#[test]\n\nfn test_position_sibling_right() {\n\n let position = Position::from_inorder_index(5);\n\n let target = position.sibling();\n\n assert_eq!(target, Position::from_inorder_index(1));\n\n}\n\n\n", "file_path": "types/src/proof/position/position_test.rs", "rank": 82, "score": 213378.18460483942 }, { "content": "fn invalid_phantom_use_error(\n\n context: &mut Context,\n\n non_phantom_pos: NonPhantomPos,\n\n param: &TParam,\n\n ty_loc: Loc,\n\n) {\n\n let msg = match non_phantom_pos {\n\n NonPhantomPos::FieldType => \"Phantom type parameter cannot be used as a field type\",\n\n NonPhantomPos::TypeArg => {\n\n \"Phantom type parameter cannot be used as an argument to a non-phantom parameter\"\n\n }\n\n };\n\n let decl_msg = format!(\"'{}' declared here as phantom\", &param.user_specified_name);\n\n context.env.add_diag(diag!(\n\n Declarations::InvalidPhantomUse,\n\n (ty_loc, msg),\n\n (param.user_specified_name.loc, decl_msg),\n\n ));\n\n}\n\n\n", "file_path": "language/move-compiler/src/typing/translate.rs", "rank": 83, "score": 213373.33058241016 }, { "content": "fn load_struct_type_parameter(\n\n cursor: &mut VersionedCursor,\n\n) -> BinaryLoaderResult<StructTypeParameter> {\n\n let constraints = load_ability_set(cursor, AbilitySetPosition::StructTypeParameters)?;\n\n let is_phantom = if cursor.version() < VERSION_3 {\n\n false\n\n } else {\n\n let byte: u8 = read_uleb_internal(cursor, 1)?;\n\n byte != 0\n\n };\n\n Ok(StructTypeParameter {\n\n constraints,\n\n is_phantom,\n\n })\n\n}\n\n\n", "file_path": "language/move-binary-format/src/deserializer.rs", "rank": 84, "score": 213370.32621723978 }, { "content": "fn struct_type_parameters_compatibile(\n\n old_type_parameters: &[StructTypeParameter],\n\n new_type_parameters: &[StructTypeParameter],\n\n) -> bool {\n\n old_type_parameters.len() == new_type_parameters.len()\n\n && old_type_parameters.iter().zip(new_type_parameters).all(\n\n |(old_type_parameter, new_type_parameter)| {\n\n type_parameter_phantom_decl_compatibile(old_type_parameter, new_type_parameter)\n\n && type_parameter_constraints_compatibile(\n\n old_type_parameter.constraints,\n\n new_type_parameter.constraints,\n\n )\n\n },\n\n )\n\n}\n\n\n", "file_path": "language/move-binary-format/src/compatibility.rs", "rank": 85, "score": 213370.32621723978 }, { "content": "// When upgrading, the new constraints must be a subset of (or equal to) the old constraints.\n\n// Removing an ability is fine, but adding an ability could cause existing callsites to fail\n\nfn type_parameter_constraints_compatibile(\n\n old_type_constraints: AbilitySet,\n\n new_type_constraints: AbilitySet,\n\n) -> bool {\n\n new_type_constraints.is_subset(old_type_constraints)\n\n}\n\n\n", "file_path": "language/move-binary-format/src/compatibility.rs", "rank": 86, "score": 213370.32621723978 }, { "content": "fn load_struct_type_parameters(\n\n cursor: &mut VersionedCursor,\n\n) -> BinaryLoaderResult<Vec<StructTypeParameter>> {\n\n let len = load_type_parameter_count(cursor)?;\n\n let mut type_params = Vec::with_capacity(len);\n\n for _ in 0..len {\n\n type_params.push(load_struct_type_parameter(cursor)?);\n\n }\n\n Ok(type_params)\n\n}\n\n\n", "file_path": "language/move-binary-format/src/deserializer.rs", "rank": 87, "score": 213370.32621723978 }, { "content": "pub fn arb_mock_genesis() -> impl Strategy<Value = (TransactionToCommit, LedgerInfoWithSignatures)>\n\n{\n\n arb_blocks_to_commit_impl(\n\n 1, /* num_accounts */\n\n 1, /* max_txn_per_block */\n\n 1, /* max_blocks */\n\n )\n\n .prop_map(|blocks| {\n\n let (block, ledger_info_with_sigs) = &blocks[0];\n\n\n\n (block[0].clone(), ledger_info_with_sigs.clone())\n\n })\n\n}\n", "file_path": "storage/diemdb/src/test_helper.rs", "rank": 88, "score": 212981.66779368403 }, { "content": "// Computes the root hash of an accumulator with given leaves.\n\nfn compute_root_hash_naive(leaves: &[HashValue]) -> HashValue {\n\n let position_to_hash = compute_hashes_for_all_positions(leaves);\n\n if position_to_hash.is_empty() {\n\n return *ACCUMULATOR_PLACEHOLDER_HASH;\n\n }\n\n\n\n let rightmost_leaf_index = leaves.len() as u64 - 1;\n\n *position_to_hash\n\n .get(&Position::root_from_leaf_index(rightmost_leaf_index))\n\n .expect(\"Root position should exist in the map.\")\n\n}\n\n\n", "file_path": "types/src/proof/accumulator/accumulator_test.rs", "rank": 89, "score": 212301.50835063445 }, { "content": "fn format_struct_type_formals(formals: &[StructTypeParameter]) -> String {\n\n if formals.is_empty() {\n\n \"\".to_string()\n\n } else {\n\n let formatted = formals\n\n .iter()\n\n .map(|(is_phantom, tv, abilities)| {\n\n format!(\n\n \"{}{}: {}\",\n\n if *is_phantom { \"phantom \" } else { \"\" },\n\n tv.value,\n\n format_constraints(abilities)\n\n )\n\n })\n\n .collect::<Vec<_>>();\n\n format!(\"<{}>\", intersperse(&formatted, \", \"))\n\n }\n\n}\n\n\n\nimpl fmt::Display for Type {\n", "file_path": "language/move-ir/types/src/ast.rs", "rank": 90, "score": 211971.52899300383 }, { "content": "fn has_unresolved_error_type(ty: &Type) -> bool {\n\n match &ty.value {\n\n Type_::UnresolvedError => true,\n\n Type_::Ref(_, ty) => has_unresolved_error_type(ty),\n\n Type_::Apply(_, _, ty_args) => ty_args.iter().any(has_unresolved_error_type),\n\n Type_::Param(_) | Type_::Var(_) | Type_::Anything | Type_::Unit => false,\n\n }\n\n}\n\n\n\n//**************************************************************************************************\n\n// Types\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-compiler/src/typing/translate.rs", "rank": 91, "score": 211511.58546038857 }, { "content": "fn format_type_actuals(tys: &[Type]) -> String {\n\n if tys.is_empty() {\n\n \"\".to_string()\n\n } else {\n\n format!(\"<{}>\", intersperse(tys, \", \"))\n\n }\n\n}\n\n\n", "file_path": "language/move-ir/types/src/ast.rs", "rank": 92, "score": 211511.58546038857 }, { "content": "/// If an iterator contains exactly one item, then return it. Otherwise return\n\n/// `None` if there are no items or more than one items.\n\nfn collect_exactly_one<T>(iter: impl Iterator<Item = T>) -> Option<T> {\n\n let mut iter = iter.fuse();\n\n match (iter.next(), iter.next()) {\n\n (Some(item), None) => Some(item),\n\n _ => None,\n\n }\n\n}\n", "file_path": "types/src/account_state.rs", "rank": 93, "score": 211455.1909749312 }, { "content": "fn arb_ledger_infos_with_sigs() -> impl Strategy<Value = Vec<LedgerInfoWithSignatures>> {\n\n (\n\n any_with::<AccountInfoUniverse>(3),\n\n vec((any::<LedgerInfoWithSignaturesGen>(), 1..10usize), 1..10),\n\n )\n\n .prop_map(|(mut universe, gens)| {\n\n let ledger_infos_with_sigs: Vec<_> = gens\n\n .into_iter()\n\n .map(|(ledger_info_gen, block_size)| {\n\n ledger_info_gen.materialize(&mut universe, block_size)\n\n })\n\n .collect();\n\n assert_eq!(get_first_epoch(&ledger_infos_with_sigs), 0);\n\n ledger_infos_with_sigs\n\n })\n\n}\n\n\n", "file_path": "storage/diemdb/src/ledger_store/ledger_info_test.rs", "rank": 94, "score": 211453.39991366508 }, { "content": "fn freeze_point(e: H::Exp) -> H::Exp {\n\n let frozen_ty = freeze_ty(e.ty.clone());\n\n let eloc = e.exp.loc;\n\n let e_ = H::UnannotatedExp_::Freeze(Box::new(e));\n\n H::exp(frozen_ty, sp(eloc, e_))\n\n}\n\n\n", "file_path": "language/move-compiler/src/hlir/translate.rs", "rank": 95, "score": 210065.0659850831 }, { "content": "#[test]\n\npub fn test_sibling_sequence() {\n\n let sibling_sequence1 = Position::from_inorder_index(0)\n\n .iter_ancestor_sibling()\n\n .take(20)\n\n .map(Position::to_inorder_index)\n\n .collect::<Vec<u64>>();\n\n assert_eq!(\n\n sibling_sequence1,\n\n vec![\n\n 2, 5, 11, 23, 47, 95, 191, 383, 767, 1535, 3071, 6143, 12287, 24575, 49151, 98303,\n\n 196_607, 393_215, 786_431, 1_572_863\n\n ]\n\n );\n\n\n\n let sibling_sequence2 = Position::from_inorder_index(6)\n\n .iter_ancestor_sibling()\n\n .take(20)\n\n .map(Position::to_inorder_index)\n\n .collect::<Vec<u64>>();\n\n assert_eq!(\n", "file_path": "types/src/proof/position/position_test.rs", "rank": 96, "score": 209886.98568098087 }, { "content": "// Parse type parameter with optional phantom declaration:\n\n// TypeParameterWithPhantomDecl = \"phantom\"? <TypeParameter>\n\nfn parse_type_parameter_with_phantom_decl(\n\n context: &mut Context,\n\n) -> Result<StructTypeParameter, Diagnostic> {\n\n let is_phantom =\n\n if context.tokens.peek() == Tok::Identifier && context.tokens.content() == \"phantom\" {\n\n context.tokens.advance()?;\n\n true\n\n } else {\n\n false\n\n };\n\n let (name, constraints) = parse_type_parameter(context)?;\n\n Ok(StructTypeParameter {\n\n is_phantom,\n\n name,\n\n constraints,\n\n })\n\n}\n\n\n", "file_path": "language/move-compiler/src/parser/syntax.rs", "rank": 97, "score": 209651.24221274187 }, { "content": "#[test]\n\nfn test_frozen_subtree_sibling_iterator() {\n\n assert!(collect_all_positions(0, 0).is_empty());\n\n assert_eq!(collect_all_positions(0, 1), vec![0]);\n\n assert_eq!(collect_all_positions(0, 2), vec![1]);\n\n assert_eq!(collect_all_positions(0, 7), vec![3, 9, 12]);\n\n assert_eq!(collect_all_positions(0, 1 << 63), vec![(1 << 63) - 1]);\n\n\n\n assert!(collect_all_positions(1, 1).is_empty());\n\n assert_eq!(collect_all_positions(1, 2), vec![2]);\n\n assert_eq!(collect_all_positions(1, 3), vec![2, 4]);\n\n assert_eq!(collect_all_positions(1, 4), vec![2, 5]);\n\n assert_eq!(collect_all_positions(1, 5), vec![2, 5, 8]);\n\n assert_eq!(collect_all_positions(1, 1 << 63).len(), 63);\n\n\n\n assert!(collect_all_positions(2, 2).is_empty());\n\n assert_eq!(collect_all_positions(2, 3), vec![4]);\n\n assert_eq!(collect_all_positions(2, 4), vec![5]);\n\n assert_eq!(collect_all_positions(2, 5), vec![5, 8]);\n\n assert_eq!(collect_all_positions(2, 6), vec![5, 9]);\n\n assert_eq!(collect_all_positions(2, 7), vec![5, 9, 12]);\n", "file_path": "types/src/proof/position/position_test.rs", "rank": 98, "score": 209641.97509560327 }, { "content": "// Adding a phantom annotation to a parameter won't break clients because that can only increase the\n\n// the set of abilities in struct instantiations. Put it differently, adding phantom declarations\n\n// relaxes the requirements for clients.\n\nfn type_parameter_phantom_decl_compatibile(\n\n old_type_parameter: &StructTypeParameter,\n\n new_type_parameter: &StructTypeParameter,\n\n) -> bool {\n\n // old_type_paramter.is_phantom => new_type_parameter.is_phantom\n\n !old_type_parameter.is_phantom || new_type_parameter.is_phantom\n\n}\n", "file_path": "language/move-binary-format/src/compatibility.rs", "rank": 99, "score": 209640.55019447993 } ]
Rust
src/query/insert.rs
rex-remind101/sea-query
67a8ffba1351c754aae2559051930d4fe2fe70ae
use crate::{ backend::QueryBuilder, error::*, prepare::*, types::*, value::*, Expr, Query, QueryStatementBuilder, SelectExpr, SelectStatement, SimpleExpr, }; #[derive(Debug, Default, Clone)] pub struct InsertStatement { pub(crate) table: Option<Box<TableRef>>, pub(crate) columns: Vec<DynIden>, pub(crate) values: Vec<Vec<SimpleExpr>>, pub(crate) returning: Vec<SelectExpr>, } impl InsertStatement { pub fn new() -> Self { Self::default() } #[allow(clippy::wrong_self_convention)] pub fn into_table<T>(&mut self, tbl_ref: T) -> &mut Self where T: IntoTableRef, { self.table = Some(Box::new(tbl_ref.into_table_ref())); self } pub fn columns<C, I>(&mut self, columns: I) -> &mut Self where C: IntoIden, I: IntoIterator<Item = C>, { self.columns = columns.into_iter().map(|c| c.into_iden()).collect(); self } pub fn values<I>(&mut self, values: I) -> Result<&mut Self> where I: IntoIterator<Item = Value>, { let values = values .into_iter() .map(|v| Expr::val(v).into()) .collect::<Vec<SimpleExpr>>(); if self.columns.len() != values.len() { return Err(Error::ColValNumMismatch { col_len: self.columns.len(), val_len: values.len(), }); } self.values.push(values); Ok(self) } pub fn exprs<I>(&mut self, values: I) -> Result<&mut Self> where I: IntoIterator<Item = SimpleExpr>, { let values = values.into_iter().collect::<Vec<SimpleExpr>>(); if self.columns.len() != values.len() { return Err(Error::ColValNumMismatch { col_len: self.columns.len(), val_len: values.len(), }); } self.values.push(values); Ok(self) } pub fn values_panic<I>(&mut self, values: I) -> &mut Self where I: IntoIterator<Item = Value>, { self.values(values).unwrap() } pub fn exprs_panic<I>(&mut self, values: I) -> &mut Self where I: IntoIterator<Item = SimpleExpr>, { self.exprs(values).unwrap() } pub fn returning(&mut self, select: SelectStatement) -> &mut Self { self.returning = select.selects; self } pub fn returning_col<C>(&mut self, col: C) -> &mut Self where C: IntoIden, { self.returning(Query::select().column(col.into_iden()).take()) } } impl QueryStatementBuilder for InsertStatement { fn build_collect<T: QueryBuilder>( &self, query_builder: T, collector: &mut dyn FnMut(Value), ) -> String { let mut sql = SqlWriter::new(); query_builder.prepare_insert_statement(self, &mut sql, collector); sql.result() } fn build_collect_any( &self, query_builder: &dyn QueryBuilder, collector: &mut dyn FnMut(Value), ) -> String { let mut sql = SqlWriter::new(); query_builder.prepare_insert_statement(self, &mut sql, collector); sql.result() } }
use crate::{ backend::QueryBuilder, error::*, prepare::*, types::*, value::*, Expr, Query, QueryStatementBuilder, SelectExpr, SelectStatement, SimpleExpr, }; #[derive(Debug, Default, Clone)] pub struct InsertStatement { pub(crate) table: Option<Box<TableRef>>, pub(crate) columns: Vec<DynIden>, pub(crate) values: Vec<Vec<SimpleExpr>>, pub(crate) returning: Vec<SelectExpr>, } impl InsertStatement { pub fn new() -> Self { Self::default() } #[allow(clippy::wrong_self_convention)] pub fn into_table<T>(&mut self, tbl_ref: T) -> &mut Self where T: IntoTableRef, { self.table = Some(Box::new(tbl_ref.into_table_ref())); self } pub fn columns<C, I>(&mut self, columns: I) -> &mut Self where C: IntoIden, I: IntoIterator<Item = C>, { self.columns = columns.into_iter().map(|c| c.into_iden()).collect(); self } pub fn values<I>(&mut self, values: I) -> Result<&mut Self> where I: IntoIterator<Item = Value>, { let values = values .into_iter() .map(|v| Expr::val(v).into()) .collect::<Vec<SimpleExpr>>(); if self.columns.len() != values.len() { return Err(Error::ColValNumMismatch { col_len: self.columns.len(), val_len: values.len(), }); } self.values.push(values); Ok(self) } pub fn exprs<I>(&mut self, values: I) -> Result<&mut Self> where I: IntoIterator<Item = SimpleExpr>, { let values = values.into_iter().collect::<Vec<SimpleExpr>>(); if self.columns.len() != values.len() { return Err(Error::ColValNumMismatch { col_len: self.columns.len(), val_len: values.len(), }); } self.values.push(values); Ok(self) } pub fn values_panic<I>(&mut self, values: I) -> &mut Self where I: IntoIterator<Item = Value>, { self.values(values).unwrap() } pub fn exprs_panic<I>(&mut self, values: I) -> &mut Self where I: IntoIterator<Item = SimpleExpr>, { self.exprs(values).unwrap() } pub fn returning(&mut self, select: SelectStatement) -> &mut Self { self.returning = select.selects; self }
} impl QueryStatementBuilder for InsertStatement { fn build_collect<T: QueryBuilder>( &self, query_builder: T, collector: &mut dyn FnMut(Value), ) -> String { let mut sql = SqlWriter::new(); query_builder.prepare_insert_statement(self, &mut sql, collector); sql.result() } fn build_collect_any( &self, query_builder: &dyn QueryBuilder, collector: &mut dyn FnMut(Value), ) -> String { let mut sql = SqlWriter::new(); query_builder.prepare_insert_statement(self, &mut sql, collector); sql.result() } }
pub fn returning_col<C>(&mut self, col: C) -> &mut Self where C: IntoIden, { self.returning(Query::select().column(col.into_iden()).take()) }
function_block-full_function
[ { "content": "pub trait IntoTableRef {\n\n fn into_table_ref(self) -> TableRef;\n\n}\n\n\n\n/// Unary operator\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum UnOper {\n\n Not,\n\n}\n\n\n\n/// Binary operator\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum BinOper {\n\n And,\n\n Or,\n\n Like,\n\n NotLike,\n\n Is,\n\n IsNot,\n\n In,\n", "file_path": "src/types.rs", "rank": 0, "score": 171404.5614091992 }, { "content": "#[allow(clippy::many_single_char_names)]\n\n#[cfg(feature = \"with-json\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"with-json\")))]\n\npub fn sea_value_to_json_value(value: &Value) -> Json {\n\n use crate::{CommonSqlQueryBuilder, QueryBuilder};\n\n\n\n match value {\n\n Value::Bool(None)\n\n | Value::TinyInt(None)\n\n | Value::SmallInt(None)\n\n | Value::Int(None)\n\n | Value::BigInt(None)\n\n | Value::TinyUnsigned(None)\n\n | Value::SmallUnsigned(None)\n\n | Value::Unsigned(None)\n\n | Value::BigUnsigned(None)\n\n | Value::Float(None)\n\n | Value::Double(None)\n\n | Value::String(None)\n\n | Value::Bytes(None)\n\n | Value::Json(None) => Json::Null,\n\n #[cfg(feature = \"with-rust_decimal\")]\n\n Value::Decimal(None) => Json::Null,\n", "file_path": "src/value.rs", "rank": 1, "score": 157035.6533741798 }, { "content": " /// Identifier\n\n pub trait Iden where $(Self: $bounds),* {\n\n fn prepare(&self, s: &mut dyn fmt::Write, q: char) {\n\n write!(s, \"{}{}{}\", q, self.quoted(q), q).unwrap();\n\n }\n\n\n\n fn quoted(&self, q: char) -> String {\n\n let mut b = [0; 4];\n\n let qq: &str = q.encode_utf8(&mut b);\n\n self.to_string().replace(qq, qq.repeat(2).as_str())\n\n }\n\n\n\n fn to_string(&self) -> String {\n\n let s = &mut String::new();\n\n self.unquoted(s);\n\n s.to_owned()\n\n }\n\n\n\n fn unquoted(&self, s: &mut dyn fmt::Write);\n\n }\n\n };\n\n}\n\n\n\n#[cfg(feature = \"thread-safe\")]\n\niden_trait!(Send, Sync);\n\n#[cfg(not(feature = \"thread-safe\"))]\n\niden_trait!();\n\n\n\npub type DynIden = SeaRc<dyn Iden>;\n\n\n", "file_path": "src/types.rs", "rank": 2, "score": 152537.52249885883 }, { "content": "pub fn inject_parameters<I>(sql: &str, params: I, query_builder: &dyn QueryBuilder) -> String\n\nwhere\n\n I: IntoIterator<Item = Value>,\n\n{\n\n let params: Vec<Value> = params.into_iter().collect();\n\n let tokenizer = Tokenizer::new(sql);\n\n let tokens: Vec<Token> = tokenizer.iter().collect();\n\n let mut counter = 0;\n\n let mut output = Vec::new();\n\n let mut i = 0;\n\n while i < tokens.len() {\n\n let token = &tokens[i];\n\n match token {\n\n Token::Punctuation(mark) => {\n\n if (mark.as_ref(), false) == query_builder.placeholder() {\n\n output.push(query_builder.value_to_string(&params[counter]));\n\n counter += 1;\n\n i += 1;\n\n continue;\n\n } else if (mark.as_ref(), true) == query_builder.placeholder()\n", "file_path": "src/prepare.rs", "rank": 3, "score": 151159.20587330378 }, { "content": "pub trait ValueType: Sized {\n\n fn try_from(v: Value) -> Result<Self, ValueTypeErr>;\n\n\n\n fn unwrap(v: Value) -> Self {\n\n Self::try_from(v).unwrap()\n\n }\n\n\n\n fn type_name() -> String;\n\n\n\n fn column_type() -> ColumnType;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ValueTypeErr;\n\n\n\nimpl std::error::Error for ValueTypeErr {}\n\n\n\nimpl std::fmt::Display for ValueTypeErr {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"Value type mismatch\")\n", "file_path": "src/value.rs", "rank": 4, "score": 151091.39654615914 }, { "content": "pub trait IntoColumnRef {\n\n fn into_column_ref(self) -> ColumnRef;\n\n}\n\n\n\n/// Table references\n\n#[allow(clippy::large_enum_variant)]\n\n#[derive(Debug, Clone)]\n\npub enum TableRef {\n\n /// Table identifier without any schema / database prefix\n\n Table(DynIden),\n\n /// Table identifier with schema prefix\n\n SchemaTable(DynIden, DynIden),\n\n /// Table identifier with database and schema prefix\n\n DatabaseSchemaTable(DynIden, DynIden, DynIden),\n\n /// Table identifier with alias\n\n TableAlias(DynIden, DynIden),\n\n /// Table identifier with schema prefix and alias\n\n SchemaTableAlias(DynIden, DynIden, DynIden),\n\n /// Table identifier with database and schema prefix and alias\n\n DatabaseSchemaTableAlias(DynIden, DynIden, DynIden, DynIden),\n\n /// Subquery with alias\n\n SubQuery(SelectStatement, DynIden),\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 5, "score": 147540.44615226323 }, { "content": "#[test]\n\nfn select_30() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character, Char::SizeW, Char::SizeH\n\n ])\n\n .from(Char::Table)\n\n .and_where(\n\n Expr::col(Char::SizeW).mul(2)\n\n .add(Expr::col(Char::SizeH).div(3))\n\n .equals(Expr::value(4))\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` WHERE (`size_w` * 2) + (`size_h` / 3) = 4\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 6, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_41() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect])\n\n .exprs(vec![Expr::col(Glyph::Image).max()])\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect])\n\n .cond_having(any![Expr::col(Glyph::Aspect).gt(2)])\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"aspect\", MAX(\"image\") FROM \"glyph\" GROUP BY \"aspect\" HAVING \"aspect\" > 2\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 7, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_24() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .conditions(\n\n true,\n\n |x| {\n\n x.and_where(Expr::col(Char::FontId).eq(5));\n\n },\n\n |_| ()\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE `font_id` = 5\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 8, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_40() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(any![\n\n Expr::col(Glyph::Aspect).is_null(),\n\n all![\n\n Expr::col(Glyph::Aspect).is_not_null(),\n\n Expr::col(Glyph::Aspect).lt(8)\n\n ]\n\n ])\n\n .to_string(sea_query::MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE `aspect` IS NULL OR (`aspect` IS NOT NULL AND `aspect` < 8)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 9, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_37() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(Cond::any().add(Cond::all()).add(Cond::any()))\n\n .build(sea_query::PostgresQueryBuilder);\n\n\n\n assert_eq!(statement, r#\"SELECT \"id\" FROM \"glyph\"\"#);\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 10, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_47() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::all()\n\n .not()\n\n .add_option(Some(Expr::col(Glyph::Aspect).lt(8)))\n\n .add(Expr::col(Glyph::Aspect).is_not_null()),\n\n )\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE NOT (`aspect` < 8 AND `aspect` IS NOT NULL)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 11, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_32() {\n\n assert_eq!(\n\n Query::select()\n\n .expr_as(Expr::col(Char::Character), Alias::new(\"C\"))\n\n .from(Char::Table)\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` AS `C` FROM `character`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 12, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_26() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .and_where(\n\n Expr::expr(Expr::col(Char::SizeW).add(1))\n\n .mul(2)\n\n .equals(Expr::expr(Expr::col(Char::SizeH).div(2)).sub(1))\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE (`size_w` + 1) * 2 = (`size_h` / 2) - 1\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 13, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_23() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .and_where_option(None)\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 14, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_4() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect])\n\n .from_subquery(\n\n Query::select()\n\n .columns(vec![Glyph::Image, Glyph::Aspect])\n\n .from(Glyph::Table)\n\n .take(),\n\n Alias::new(\"subglyph\")\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"aspect\" FROM (SELECT \"image\", \"aspect\" FROM \"glyph\") AS \"subglyph\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 15, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_11() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Glyph::Aspect,\n\n ])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .order_by(Glyph::Image, Order::Desc)\n\n .order_by((Glyph::Table, Glyph::Aspect), Order::Asc)\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2 ORDER BY `image` DESC, `glyph`.`aspect` ASC\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 16, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_17() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![(Glyph::Table, Glyph::Image),])\n\n .from(Glyph::Table)\n\n .and_where(Expr::tbl(Glyph::Table, Glyph::Aspect).between(3, 5))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"glyph\".\"image\" FROM \"glyph\" WHERE \"glyph\".\"aspect\" BETWEEN 3 AND 5\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 17, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_36() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(Cond::any().add(Expr::col(Glyph::Aspect).is_null()))\n\n .build(sea_query::PostgresQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT \"id\" FROM \"glyph\" WHERE \"aspect\" IS NULL\"#\n\n );\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 18, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_38() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::any()\n\n .add(Expr::col(Glyph::Aspect).is_null())\n\n .add(Expr::col(Glyph::Aspect).is_not_null()),\n\n )\n\n .build(sea_query::MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE `aspect` IS NULL OR `aspect` IS NOT NULL\"#\n\n );\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 19, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_7() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"aspect\" FROM \"glyph\" WHERE COALESCE(\"aspect\", 0) > 2\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 20, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_27() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character, Char::SizeW, Char::SizeH\n\n ])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .and_where(Expr::col(Char::SizeH).eq(4))\n\n .and_where(Expr::col(Char::SizeH).eq(5))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` WHERE `size_w` = 3 AND `size_h` = 4 AND `size_h` = 5\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 21, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_27() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .and_where(Expr::col(Char::SizeH).eq(4))\n\n .and_where(Expr::col(Char::SizeH).eq(5))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\", \"size_w\", \"size_h\" FROM \"character\" WHERE \"size_w\" = 3 AND \"size_h\" = 4 AND \"size_h\" = 5\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 22, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_30() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .and_where(\n\n Expr::col(Char::SizeW)\n\n .mul(2)\n\n .add(Expr::col(Char::SizeH).div(3))\n\n .equals(Expr::value(4))\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\", \"size_w\", \"size_h\" FROM \"character\" WHERE (\"size_w\" * 2) + (\"size_h\" / 3) = 4\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 23, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_13() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Glyph::Aspect,\n\n ])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .order_by_columns(vec![\n\n ((Glyph::Table, Glyph::Id), Order::Asc),\n\n ((Glyph::Table, Glyph::Aspect), Order::Desc),\n\n ])\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2 ORDER BY `glyph`.`id` ASC, `glyph`.`aspect` DESC\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 24, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_36() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(Cond::any().add(Expr::col(Glyph::Aspect).is_null()))\n\n .build(sea_query::MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE `aspect` IS NULL\"#\n\n );\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 25, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_32() {\n\n assert_eq!(\n\n Query::select()\n\n .expr_as(Expr::col(Char::Character), Alias::new(\"C\"))\n\n .from(Char::Table)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" AS \"C\" FROM \"character\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 26, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_6() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .exprs(vec![Expr::col(Glyph::Image).max(),])\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect,])\n\n .and_having(Expr::col(Glyph::Aspect).gt(2))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect`, MAX(`image`) FROM `glyph` GROUP BY `aspect` HAVING `aspect` > 2\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 27, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_49() {\n\n let statement = sea_query::Query::select()\n\n .expr(Expr::asterisk())\n\n .from(Char::Table)\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(statement, r#\"SELECT * FROM `character`\"#);\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 28, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_43() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(Cond::all().add_option::<SimpleExpr>(None))\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(statement, \"SELECT `id` FROM `glyph`\");\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 29, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_16() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::FontId).is_null())\n\n .and_where(Expr::col(Char::Character).is_not_null())\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE `font_id` IS NULL AND `character` IS NOT NULL\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 30, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_12() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .order_by_columns(vec![(Glyph::Id, Order::Asc), (Glyph::Aspect, Order::Desc),])\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"aspect\" FROM \"glyph\" WHERE COALESCE(\"aspect\", 0) > 2 ORDER BY \"id\" ASC, \"aspect\" DESC\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 31, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_28() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character, Char::SizeW, Char::SizeH\n\n ])\n\n .from(Char::Table)\n\n .or_where(Expr::col(Char::SizeW).eq(3))\n\n .or_where(Expr::col(Char::SizeH).eq(4))\n\n .or_where(Expr::col(Char::SizeH).eq(5))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` WHERE `size_w` = 3 OR `size_h` = 4 OR `size_h` = 5\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 32, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_4() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Image])\n\n .from_subquery(\n\n Query::select()\n\n .columns(vec![Glyph::Image, Glyph::Aspect])\n\n .from(Glyph::Table)\n\n .take(),\n\n Alias::new(\"subglyph\")\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `image` FROM (SELECT `image`, `aspect` FROM `glyph`) AS `subglyph`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 33, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_26() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .and_where(\n\n Expr::expr(Expr::col(Char::SizeW).add(1))\n\n .mul(2)\n\n .equals(Expr::expr(Expr::col(Char::SizeH).div(2)).sub(1))\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE (\"size_w\" + 1) * 2 = (\"size_h\" / 2) - 1\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 34, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_1() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .limit(10)\n\n .offset(100)\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` LIMIT 10 OFFSET 100\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 35, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_2() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` WHERE `size_w` = 3\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 36, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_14() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Glyph::Id,\n\n Glyph::Aspect,\n\n ])\n\n .expr(Expr::col(Glyph::Image).max())\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![\n\n (Glyph::Table, Glyph::Id),\n\n (Glyph::Table, Glyph::Aspect),\n\n ])\n\n .and_having(Expr::col(Glyph::Aspect).gt(2))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `id`, `aspect`, MAX(`image`) FROM `glyph` GROUP BY `glyph`.`id`, `glyph`.`aspect` HAVING `aspect` > 2\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 37, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_2() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\", \"size_w\", \"size_h\" FROM \"character\" WHERE \"size_w\" = 3\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 38, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_16() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::FontId).is_null())\n\n .and_where(Expr::col(Char::Character).is_not_null())\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE \"font_id\" IS NULL AND \"character\" IS NOT NULL\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 39, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_18() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Glyph::Aspect,\n\n ])\n\n .from(Glyph::Table)\n\n .and_where(Expr::col(Glyph::Aspect).between(3, 5))\n\n .and_where(Expr::col(Glyph::Aspect).not_between(8, 10))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect` FROM `glyph` WHERE (`aspect` BETWEEN 3 AND 5) AND (`aspect` NOT BETWEEN 8 AND 10)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 40, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_25() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .and_where(\n\n Expr::col(Char::SizeW)\n\n .mul(2)\n\n .equals(Expr::col(Char::SizeH).div(2))\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE `size_w` * 2 = `size_h` / 2\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 41, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_7() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 42, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_22() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .cond_where(\n\n Cond::all()\n\n .add(\n\n Cond::any().add(Expr::col(Char::Character).like(\"C\")).add(\n\n Expr::col(Char::Character)\n\n .like(\"D\")\n\n .and(Expr::col(Char::Character).like(\"E\"))\n\n )\n\n )\n\n .add(\n\n Expr::col(Char::Character)\n\n .like(\"F\")\n\n .or(Expr::col(Char::Character).like(\"G\"))\n\n )\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE (\"character\" LIKE 'C' OR ((\"character\" LIKE 'D') AND (\"character\" LIKE 'E'))) AND ((\"character\" LIKE 'F') OR (\"character\" LIKE 'G'))\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 43, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_22() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .cond_where(\n\n Cond::all()\n\n .add(\n\n Cond::any()\n\n .add(Expr::col(Char::Character).like(\"C\"))\n\n .add(Expr::col(Char::Character).like(\"D\").and(Expr::col(Char::Character).like(\"E\")))\n\n )\n\n .add(\n\n Expr::col(Char::Character).like(\"F\").or(Expr::col(Char::Character).like(\"G\"))\n\n )\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE (`character` LIKE 'C' OR ((`character` LIKE 'D') AND (`character` LIKE 'E'))) AND ((`character` LIKE 'F') OR (`character` LIKE 'G'))\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 44, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_5() {\n\n assert_eq!(\n\n Query::select()\n\n .column((Glyph::Table, Glyph::Image))\n\n .from(Glyph::Table)\n\n .and_where(Expr::tbl(Glyph::Table, Glyph::Aspect).is_in(vec![3, 4]))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `glyph`.`image` FROM `glyph` WHERE `glyph`.`aspect` IN (3, 4)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 45, "score": 145059.41611518612 }, { "content": "#[test]\n\n#[should_panic]\n\nfn select_34b() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Glyph::Aspect)\n\n .expr(Expr::col(Glyph::Image).max())\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect,])\n\n .or_having(\n\n Expr::col(Glyph::Aspect)\n\n .gt(2)\n\n .or(Expr::col(Glyph::Aspect).lt(8))\n\n )\n\n .and_having(\n\n Expr::col(Glyph::Aspect)\n\n .gt(22)\n\n .or(Expr::col(Glyph::Aspect).lt(28))\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n vec![\n\n \"SELECT `aspect`, MAX(`image`) FROM `glyph` GROUP BY `aspect`\",\n\n \"HAVING ((`aspect` > 2) OR (`aspect` < 8))\",\n\n \"AND ((`aspect` > 22) OR (`aspect` < 28))\",\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 46, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_33() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Glyph::Image)\n\n .from(Glyph::Table)\n\n .and_where(\n\n Expr::col(Glyph::Aspect)\n\n .in_subquery(Query::select().expr(Expr::cust(\"3 + 2 * 2\")).take())\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `image` FROM `glyph` WHERE `aspect` IN (SELECT 3 + 2 * 2)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 47, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_19() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::Character).eq(\"A\"))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE `character` = 'A'\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 48, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_35() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .and_where(Expr::col(Glyph::Aspect).is_null())\n\n .build(sea_query::PostgresQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT \"id\" FROM \"glyph\" WHERE \"aspect\" IS NULL\"#\n\n );\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 49, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_24() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .conditions(\n\n true,\n\n |x| {\n\n x.and_where(Expr::col(Char::FontId).eq(5));\n\n },\n\n |_| ()\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE \"font_id\" = 5\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 50, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_9() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character,\n\n ])\n\n .from(Char::Table)\n\n .left_join(Font::Table, Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id))\n\n .inner_join(Glyph::Table, Expr::tbl(Char::Table, Char::Character).equals(Glyph::Table, Glyph::Image))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` LEFT JOIN `font` ON `character`.`font_id` = `font`.`id` INNER JOIN `glyph` ON `character`.`character` = `glyph`.`image`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 51, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_8() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character,\n\n ])\n\n .from(Char::Table)\n\n .left_join(Font::Table, Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` LEFT JOIN `font` ON `character`.`font_id` = `font`.`id`\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 52, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_13() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .order_by_columns(vec![\n\n ((Glyph::Table, Glyph::Id), Order::Asc),\n\n ((Glyph::Table, Glyph::Aspect), Order::Desc),\n\n ])\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"aspect\" FROM \"glyph\" WHERE COALESCE(\"aspect\", 0) > 2 ORDER BY \"glyph\".\"id\" ASC, \"glyph\".\"aspect\" DESC\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 53, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_14() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Id, Glyph::Aspect,])\n\n .expr(Expr::col(Glyph::Image).max())\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![\n\n (Glyph::Table, Glyph::Id),\n\n (Glyph::Table, Glyph::Aspect),\n\n ])\n\n .and_having(Expr::col(Glyph::Aspect).gt(2))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"id\", \"aspect\", MAX(\"image\") FROM \"glyph\" GROUP BY \"glyph\".\"id\", \"glyph\".\"aspect\" HAVING \"aspect\" > 2\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 54, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_33() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Glyph::Image)\n\n .from(Glyph::Table)\n\n .and_where(\n\n Expr::col(Glyph::Aspect)\n\n .in_subquery(Query::select().expr(Expr::cust(\"3 + 2 * 2\")).take())\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"image\" FROM \"glyph\" WHERE \"aspect\" IN (SELECT 3 + 2 * 2)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 55, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_3() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .and_where(Expr::col(Char::SizeH).eq(4))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\", \"size_w\", \"size_h\" FROM \"character\" WHERE \"size_w\" = 3 AND \"size_h\" = 4\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 56, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_38() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::any()\n\n .add(Expr::col(Glyph::Aspect).is_null())\n\n .add(Expr::col(Glyph::Aspect).is_not_null()),\n\n )\n\n .build(sea_query::PostgresQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT \"id\" FROM \"glyph\" WHERE \"aspect\" IS NULL OR \"aspect\" IS NOT NULL\"#\n\n );\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 57, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_8() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character,])\n\n .from(Char::Table)\n\n .left_join(\n\n Font::Table,\n\n Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id)\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" LEFT JOIN \"font\" ON \"character\".\"font_id\" = \"font\".\"id\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 58, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_39() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::all()\n\n .add(Expr::col(Glyph::Aspect).is_null())\n\n .add(Expr::col(Glyph::Aspect).is_not_null()),\n\n )\n\n .build(sea_query::PostgresQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT \"id\" FROM \"glyph\" WHERE \"aspect\" IS NULL AND \"aspect\" IS NOT NULL\"#\n\n );\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 59, "score": 145059.41611518612 }, { "content": "#[test]\n\n#[should_panic]\n\nfn select_29() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .or_where(Expr::col(Char::SizeH).eq(4))\n\n .and_where(Expr::col(Char::SizeH).eq(5))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\", \"size_w\", \"size_h\" FROM \"character\" WHERE \"size_w\" = 3 OR \"size_h\" = 4 AND \"size_h\" = 5\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 60, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_10() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character,])\n\n .from(Char::Table)\n\n .left_join(\n\n Font::Table,\n\n Expr::tbl(Char::Table, Char::FontId)\n\n .equals(Font::Table, Font::Id)\n\n .and(Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id))\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" LEFT JOIN \"font\" ON (\"character\".\"font_id\" = \"font\".\"id\") AND (\"character\".\"font_id\" = \"font\".\"id\")\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 61, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_46() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::all()\n\n .not()\n\n .add_option(Some(Expr::col(Glyph::Aspect).lt(8))),\n\n )\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE NOT (`aspect` < 8)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 62, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_34a() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Glyph::Aspect)\n\n .expr(Expr::col(Glyph::Image).max())\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect,])\n\n .or_having(\n\n Expr::col(Glyph::Aspect)\n\n .gt(2)\n\n .or(Expr::col(Glyph::Aspect).lt(8))\n\n )\n\n .or_having(\n\n Expr::col(Glyph::Aspect)\n\n .gt(12)\n\n .and(Expr::col(Glyph::Aspect).lt(18))\n\n )\n\n .or_having(Expr::col(Glyph::Aspect).gt(32))\n\n .to_string(MysqlQueryBuilder),\n\n vec![\n\n \"SELECT `aspect`, MAX(`image`) FROM `glyph` GROUP BY `aspect`\",\n\n \"HAVING ((`aspect` > 2) OR (`aspect` < 8))\",\n\n \"OR ((`aspect` > 12) AND (`aspect` < 18))\",\n\n \"OR `aspect` > 32\",\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 63, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_45() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::any()\n\n .not()\n\n .add_option(Some(Expr::col(Glyph::Aspect).lt(8)))\n\n .add(Expr::col(Glyph::Aspect).is_not_null()),\n\n )\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE NOT (`aspect` < 8 OR `aspect` IS NOT NULL)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 64, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_20() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::Character).like(\"A\"))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE \"character\" LIKE 'A'\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 65, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_23() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .and_where_option(None)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 66, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_15() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::FontId).is_null())\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE `font_id` IS NULL\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 67, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_42() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::all()\n\n .add_option(Some(Expr::col(Glyph::Aspect).lt(8)))\n\n .add(Expr::col(Glyph::Aspect).is_not_null()),\n\n )\n\n .to_string(PostgresQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT \"id\" FROM \"glyph\" WHERE \"aspect\" < 8 AND \"aspect\" IS NOT NULL\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 68, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_12() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Glyph::Aspect,\n\n ])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .order_by_columns(vec![\n\n (Glyph::Id, Order::Asc),\n\n (Glyph::Aspect, Order::Desc),\n\n ])\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect` FROM `glyph` WHERE IFNULL(`aspect`, 0) > 2 ORDER BY `id` ASC, `aspect` DESC\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 69, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_6() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .exprs(vec![Expr::col(Glyph::Image).max(),])\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect,])\n\n .and_having(Expr::col(Glyph::Aspect).gt(2))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"aspect\", MAX(\"image\") FROM \"glyph\" GROUP BY \"aspect\" HAVING \"aspect\" > 2\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 70, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_21() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character])\n\n .from(Char::Table)\n\n .or_where(Expr::col(Char::Character).like(\"A%\"))\n\n .or_where(Expr::col(Char::Character).like(\"%B\"))\n\n .or_where(Expr::col(Char::Character).like(\"%C%\"))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE \"character\" LIKE 'A%' OR \"character\" LIKE '%B' OR \"character\" LIKE '%C%'\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 71, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_31() {\n\n assert_eq!(\n\n Query::select()\n\n .expr((1..10_i32).fold(Expr::value(0), |expr, i| { expr.add(Expr::value(i)) }))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 72, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_44() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::any()\n\n .not()\n\n .add_option(Some(Expr::col(Glyph::Aspect).lt(8))),\n\n )\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE NOT (`aspect` < 8)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 73, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_21() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character\n\n ])\n\n .from(Char::Table)\n\n .or_where(Expr::col(Char::Character).like(\"A%\"))\n\n .or_where(Expr::col(Char::Character).like(\"%B\"))\n\n .or_where(Expr::col(Char::Character).like(\"%C%\"))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE `character` LIKE 'A%' OR `character` LIKE '%B' OR `character` LIKE '%C%'\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 74, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_11() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .from(Glyph::Table)\n\n .and_where(Expr::expr(Expr::col(Glyph::Aspect).if_null(0)).gt(2))\n\n .order_by(Glyph::Image, Order::Desc)\n\n .order_by((Glyph::Table, Glyph::Aspect), Order::Asc)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"aspect\" FROM \"glyph\" WHERE COALESCE(\"aspect\", 0) > 2 ORDER BY \"image\" DESC, \"glyph\".\"aspect\" ASC\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 75, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_17() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![(Glyph::Table, Glyph::Image),])\n\n .from(Glyph::Table)\n\n .and_where(Expr::tbl(Glyph::Table, Glyph::Aspect).between(3, 5))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `glyph`.`image` FROM `glyph` WHERE `glyph`.`aspect` BETWEEN 3 AND 5\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 76, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_28() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .or_where(Expr::col(Char::SizeW).eq(3))\n\n .or_where(Expr::col(Char::SizeH).eq(4))\n\n .or_where(Expr::col(Char::SizeH).eq(5))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\", \"size_w\", \"size_h\" FROM \"character\" WHERE \"size_w\" = 3 OR \"size_h\" = 4 OR \"size_h\" = 5\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 77, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_9() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character,])\n\n .from(Char::Table)\n\n .left_join(\n\n Font::Table,\n\n Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id)\n\n )\n\n .inner_join(\n\n Glyph::Table,\n\n Expr::tbl(Char::Table, Char::Character).equals(Glyph::Table, Glyph::Image)\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" LEFT JOIN \"font\" ON \"character\".\"font_id\" = \"font\".\"id\" INNER JOIN \"glyph\" ON \"character\".\"character\" = \"glyph\".\"image\"\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 78, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_31() {\n\n assert_eq!(\n\n Query::select()\n\n .expr((1..10_i32).fold(Expr::value(0), |expr, i| { expr.add(Expr::value(i)) }))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT 0 + 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8 + 9\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 79, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_41() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect])\n\n .exprs(vec![Expr::col(Glyph::Image).max()])\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect])\n\n .cond_having(any![Expr::col(Glyph::Aspect).gt(2)])\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `aspect`, MAX(`image`) FROM `glyph` GROUP BY `aspect` HAVING `aspect` > 2\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 80, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_15() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::FontId).is_null())\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE \"font_id\" IS NULL\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 81, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_18() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Glyph::Aspect,])\n\n .from(Glyph::Table)\n\n .and_where(Expr::col(Glyph::Aspect).between(3, 5))\n\n .and_where(Expr::col(Glyph::Aspect).not_between(8, 10))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"aspect\" FROM \"glyph\" WHERE (\"aspect\" BETWEEN 3 AND 5) AND (\"aspect\" NOT BETWEEN 8 AND 10)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 82, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_1() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character, Char::SizeW, Char::SizeH])\n\n .from(Char::Table)\n\n .limit(10)\n\n .offset(100)\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\", \"size_w\", \"size_h\" FROM \"character\" LIMIT 10 OFFSET 100\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 83, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_10() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character,\n\n ])\n\n .from(Char::Table)\n\n .left_join(Font::Table,\n\n Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id)\n\n .and(Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id))\n\n )\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` LEFT JOIN `font` ON (`character`.`font_id` = `font`.`id`) AND (`character`.`font_id` = `font`.`id`)\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 84, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_35() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .and_where(Expr::col(Glyph::Aspect).is_null())\n\n .build(sea_query::MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE `aspect` IS NULL\"#\n\n );\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 85, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_20() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::Character).like(\"A\"))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character` FROM `character` WHERE `character` LIKE 'A'\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 86, "score": 145059.41611518612 }, { "content": "#[test]\n\n#[should_panic]\n\nfn select_34b() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Glyph::Aspect)\n\n .expr(Expr::col(Glyph::Image).max())\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect,])\n\n .or_having(\n\n Expr::col(Glyph::Aspect)\n\n .gt(2)\n\n .or(Expr::col(Glyph::Aspect).lt(8))\n\n )\n\n .and_having(\n\n Expr::col(Glyph::Aspect)\n\n .gt(22)\n\n .or(Expr::col(Glyph::Aspect).lt(28))\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n vec![\n\n r#\"SELECT \"aspect\", MAX(\"image\") FROM \"glyph\" GROUP BY \"aspect\"\"#,\n\n r#\"HAVING ((\"aspect\" > 2) OR (\"aspect\" < 8))\"#,\n\n r#\"AND ((\"aspect\" > 22) OR (\"aspect\" < 28))\"#,\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 87, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_42() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::all()\n\n .add_option(Some(Expr::col(Glyph::Aspect).lt(8)))\n\n .add(Expr::col(Glyph::Aspect).is_not_null()),\n\n )\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE `aspect` < 8 AND `aspect` IS NOT NULL\"#\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 88, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_25() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Char::Character)\n\n .from(Char::Table)\n\n .and_where(\n\n Expr::col(Char::SizeW)\n\n .mul(2)\n\n .equals(Expr::col(Char::SizeH).div(2))\n\n )\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE \"size_w\" * 2 = \"size_h\" / 2\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 89, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_34a() {\n\n assert_eq!(\n\n Query::select()\n\n .column(Glyph::Aspect)\n\n .expr(Expr::col(Glyph::Image).max())\n\n .from(Glyph::Table)\n\n .group_by_columns(vec![Glyph::Aspect,])\n\n .or_having(\n\n Expr::col(Glyph::Aspect)\n\n .gt(2)\n\n .or(Expr::col(Glyph::Aspect).lt(8))\n\n )\n\n .or_having(\n\n Expr::col(Glyph::Aspect)\n\n .gt(12)\n\n .and(Expr::col(Glyph::Aspect).lt(18))\n\n )\n\n .or_having(Expr::col(Glyph::Aspect).gt(32))\n\n .to_string(PostgresQueryBuilder),\n\n vec![\n\n r#\"SELECT \"aspect\", MAX(\"image\") FROM \"glyph\" GROUP BY \"aspect\"\"#,\n\n r#\"HAVING ((\"aspect\" > 2) OR (\"aspect\" < 8))\"#,\n\n r#\"OR ((\"aspect\" > 12) AND (\"aspect\" < 18))\"#,\n\n r#\"OR \"aspect\" > 32\"#,\n\n ]\n\n .join(\" \")\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 90, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_40() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(any![\n\n Expr::col(Glyph::Aspect).is_null(),\n\n all![\n\n Expr::col(Glyph::Aspect).is_not_null(),\n\n Expr::col(Glyph::Aspect).lt(8)\n\n ]\n\n ])\n\n .to_string(sea_query::PostgresQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT \"id\" FROM \"glyph\" WHERE \"aspect\" IS NULL OR (\"aspect\" IS NOT NULL AND \"aspect\" < 8)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 91, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_50() {\n\n let statement = sea_query::Query::select()\n\n .expr(Expr::table_asterisk(Char::Table))\n\n .column((Font::Table, Font::Name))\n\n .from(Char::Table)\n\n .inner_join(\n\n Font::Table,\n\n Expr::tbl(Char::Table, Char::FontId).equals(Font::Table, Font::Id),\n\n )\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `character`.*, `font`.`name` FROM `character` INNER JOIN `font` ON `character`.`font_id` = `font`.`id`\"#\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 92, "score": 145059.41611518612 }, { "content": "#[test]\n\n#[should_panic]\n\nfn select_29() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character, Char::SizeW, Char::SizeH\n\n ])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .or_where(Expr::col(Char::SizeH).eq(4))\n\n .and_where(Expr::col(Char::SizeH).eq(5))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` WHERE `size_w` = 3 OR `size_h` = 4 AND `size_h` = 5\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 93, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_5() {\n\n assert_eq!(\n\n Query::select()\n\n .column((Glyph::Table, Glyph::Image))\n\n .from(Glyph::Table)\n\n .and_where(Expr::tbl(Glyph::Table, Glyph::Aspect).is_in(vec![3, 4]))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"glyph\".\"image\" FROM \"glyph\" WHERE \"glyph\".\"aspect\" IN (3, 4)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 94, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_37() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(Cond::any().add(Cond::all()).add(Cond::any()))\n\n .build(sea_query::MysqlQueryBuilder);\n\n\n\n assert_eq!(statement, r#\"SELECT `id` FROM `glyph`\"#);\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 95, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_3() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![\n\n Char::Character, Char::SizeW, Char::SizeH\n\n ])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::SizeW).eq(3))\n\n .and_where(Expr::col(Char::SizeH).eq(4))\n\n .to_string(MysqlQueryBuilder),\n\n \"SELECT `character`, `size_w`, `size_h` FROM `character` WHERE `size_w` = 3 AND `size_h` = 4\"\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 96, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_39() {\n\n let (statement, values) = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::all()\n\n .add(Expr::col(Glyph::Aspect).is_null())\n\n .add(Expr::col(Glyph::Aspect).is_not_null()),\n\n )\n\n .build(sea_query::MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE `aspect` IS NULL AND `aspect` IS NOT NULL\"#\n\n );\n\n assert_eq!(values.0, vec![]);\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 97, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_19() {\n\n assert_eq!(\n\n Query::select()\n\n .columns(vec![Char::Character])\n\n .from(Char::Table)\n\n .and_where(Expr::col(Char::Character).eq(\"A\"))\n\n .to_string(PostgresQueryBuilder),\n\n r#\"SELECT \"character\" FROM \"character\" WHERE \"character\" = 'A'\"#\n\n );\n\n}\n\n\n", "file_path": "tests/postgres/query.rs", "rank": 98, "score": 145059.41611518612 }, { "content": "#[test]\n\nfn select_48() {\n\n let statement = sea_query::Query::select()\n\n .column(Glyph::Id)\n\n .from(Glyph::Table)\n\n .cond_where(\n\n Cond::all().add_option(Some(ConditionExpression::SimpleExpr(\n\n Expr::tuple([\n\n Expr::col(Glyph::Aspect).into_simple_expr(),\n\n Expr::value(100),\n\n ])\n\n .less_than(Expr::tuple([Expr::value(8), Expr::value(100)])),\n\n ))),\n\n )\n\n .to_string(MysqlQueryBuilder);\n\n\n\n assert_eq!(\n\n statement,\n\n r#\"SELECT `id` FROM `glyph` WHERE (`aspect`, 100) < (8, 100)\"#\n\n );\n\n}\n\n\n", "file_path": "tests/mysql/query.rs", "rank": 99, "score": 145059.41611518612 } ]
Rust
07-rust/stm32f446/stm32f446_pac/src/otg_hs_device/otg_hs_dctl.rs
aaronhktan/stm32-exploration
dcd7674424cd17b02b85c6b3ce533456d5037d65
#[doc = "Reader of register OTG_HS_DCTL"] pub type R = crate::R<u32, super::OTG_HS_DCTL>; #[doc = "Writer for register OTG_HS_DCTL"] pub type W = crate::W<u32, super::OTG_HS_DCTL>; #[doc = "Register OTG_HS_DCTL `reset()`'s with value 0"] impl crate::ResetValue for super::OTG_HS_DCTL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `RWUSIG`"] pub type RWUSIG_R = crate::R<bool, bool>; #[doc = "Write proxy for field `RWUSIG`"] pub struct RWUSIG_W<'a> { w: &'a mut W, } impl<'a> RWUSIG_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `SDIS`"] pub type SDIS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SDIS`"] pub struct SDIS_W<'a> { w: &'a mut W, } impl<'a> SDIS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `GINSTS`"] pub type GINSTS_R = crate::R<bool, bool>; #[doc = "Reader of field `GONSTS`"] pub type GONSTS_R = crate::R<bool, bool>; #[doc = "Reader of field `TCTL`"] pub type TCTL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TCTL`"] pub struct TCTL_W<'a> { w: &'a mut W, } impl<'a> TCTL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4); self.w } } #[doc = "Write proxy for field `SGINAK`"] pub struct SGINAK_W<'a> { w: &'a mut W, } impl<'a> SGINAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Write proxy for field `CGINAK`"] pub struct CGINAK_W<'a> { w: &'a mut W, } impl<'a> CGINAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Write proxy for field `SGONAK`"] pub struct SGONAK_W<'a> { w: &'a mut W, } impl<'a> SGONAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Write proxy for field `CGONAK`"] pub struct CGONAK_W<'a> { w: &'a mut W, } impl<'a> CGONAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Reader of field `POPRGDNE`"] pub type POPRGDNE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `POPRGDNE`"] pub struct POPRGDNE_W<'a> { w: &'a mut W, } impl<'a> POPRGDNE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } impl R { #[doc = "Bit 0 - Remote wakeup signaling"] #[inline(always)] pub fn rwusig(&self) -> RWUSIG_R { RWUSIG_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Soft disconnect"] #[inline(always)] pub fn sdis(&self) -> SDIS_R { SDIS_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Global IN NAK status"] #[inline(always)] pub fn ginsts(&self) -> GINSTS_R { GINSTS_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Global OUT NAK status"] #[inline(always)] pub fn gonsts(&self) -> GONSTS_R { GONSTS_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bits 4:6 - Test control"] #[inline(always)] pub fn tctl(&self) -> TCTL_R { TCTL_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 11 - Power-on programming done"] #[inline(always)] pub fn poprgdne(&self) -> POPRGDNE_R { POPRGDNE_R::new(((self.bits >> 11) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Remote wakeup signaling"] #[inline(always)] pub fn rwusig(&mut self) -> RWUSIG_W { RWUSIG_W { w: self } } #[doc = "Bit 1 - Soft disconnect"] #[inline(always)] pub fn sdis(&mut self) -> SDIS_W { SDIS_W { w: self } } #[doc = "Bits 4:6 - Test control"] #[inline(always)] pub fn tctl(&mut self) -> TCTL_W { TCTL_W { w: self } } #[doc = "Bit 7 - Set global IN NAK"] #[inline(always)] pub fn sginak(&mut self) -> SGINAK_W { SGINAK_W { w: self } } #[doc = "Bit 8 - Clear global IN NAK"] #[inline(always)] pub fn cginak(&mut self) -> CGINAK_W { CGINAK_W { w: self } } #[doc = "Bit 9 - Set global OUT NAK"] #[inline(always)] pub fn sgonak(&mut self) -> SGONAK_W { SGONAK_W { w: self } } #[doc = "Bit 10 - Clear global OUT NAK"] #[inline(always)] pub fn cgonak(&mut self) -> CGONAK_W { CGONAK_W { w: self } } #[doc = "Bit 11 - Power-on programming done"] #[inline(always)] pub fn poprgdne(&mut self) -> POPRGDNE_W { POPRGDNE_W { w: self } } }
#[doc = "Reader of register OTG_HS_DCTL"] pub type R = crate::R<u32, super::OTG_HS_DCTL>; #[doc = "Writer for register OTG_HS_DCTL"] pub type W = crate::W<u32, super::OTG_HS_DCTL>; #[doc = "Register OTG_HS_DCTL `reset()`'s with value 0"] impl crate::ResetValue for super::OTG_HS_DCTL { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `RWUSIG`"] pub type RWUSIG_R = crate::R<bool, bool>; #[doc = "Write proxy for field `RWUSIG`"] pub struct RWUSIG_W<'a> { w: &'a mut W, } impl<'a> RWUSIG_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Reader of field `SDIS`"] pub type SDIS_R = crate::R<bool, bool>; #[doc = "Write proxy for field `SDIS`"] pub struct SDIS_W<'a> { w: &'a mut W, } impl<'a> SDIS_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Reader of field `GINSTS`"] pub type GINSTS_R = crate::R<bool, bool>; #[doc = "Reader of field `GONSTS`"] pub type GONSTS_R = crate::R<bool, bool>; #[doc = "Reader of field `TCTL`"] pub type TCTL_R = crate::R<u8, u8>; #[doc = "Write proxy for field `TCTL`"] pub struct TCTL_W<'a> { w: &'a mut W, } impl<'a> TCTL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4); self.w } } #[doc = "Write proxy for field `SGINAK`"] pub struct SGINAK_W<'a> { w: &'a mut W, } impl<'a> SGINAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Write proxy for field `CGINAK`"] pub struct CGINAK_W<'a> { w: &'a mut W, } impl<'a> CGINAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bi
} #[doc = "Bit 3 - Global OUT NAK status"] #[inline(always)] pub fn gonsts(&self) -> GONSTS_R { GONSTS_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bits 4:6 - Test control"] #[inline(always)] pub fn tctl(&self) -> TCTL_R { TCTL_R::new(((self.bits >> 4) & 0x07) as u8) } #[doc = "Bit 11 - Power-on programming done"] #[inline(always)] pub fn poprgdne(&self) -> POPRGDNE_R { POPRGDNE_R::new(((self.bits >> 11) & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Remote wakeup signaling"] #[inline(always)] pub fn rwusig(&mut self) -> RWUSIG_W { RWUSIG_W { w: self } } #[doc = "Bit 1 - Soft disconnect"] #[inline(always)] pub fn sdis(&mut self) -> SDIS_W { SDIS_W { w: self } } #[doc = "Bits 4:6 - Test control"] #[inline(always)] pub fn tctl(&mut self) -> TCTL_W { TCTL_W { w: self } } #[doc = "Bit 7 - Set global IN NAK"] #[inline(always)] pub fn sginak(&mut self) -> SGINAK_W { SGINAK_W { w: self } } #[doc = "Bit 8 - Clear global IN NAK"] #[inline(always)] pub fn cginak(&mut self) -> CGINAK_W { CGINAK_W { w: self } } #[doc = "Bit 9 - Set global OUT NAK"] #[inline(always)] pub fn sgonak(&mut self) -> SGONAK_W { SGONAK_W { w: self } } #[doc = "Bit 10 - Clear global OUT NAK"] #[inline(always)] pub fn cgonak(&mut self) -> CGONAK_W { CGONAK_W { w: self } } #[doc = "Bit 11 - Power-on programming done"] #[inline(always)] pub fn poprgdne(&mut self) -> POPRGDNE_W { POPRGDNE_W { w: self } } }
t(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Write proxy for field `SGONAK`"] pub struct SGONAK_W<'a> { w: &'a mut W, } impl<'a> SGONAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Write proxy for field `CGONAK`"] pub struct CGONAK_W<'a> { w: &'a mut W, } impl<'a> CGONAK_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } #[doc = "Reader of field `POPRGDNE`"] pub type POPRGDNE_R = crate::R<bool, bool>; #[doc = "Write proxy for field `POPRGDNE`"] pub struct POPRGDNE_W<'a> { w: &'a mut W, } impl<'a> POPRGDNE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11); self.w } } impl R { #[doc = "Bit 0 - Remote wakeup signaling"] #[inline(always)] pub fn rwusig(&self) -> RWUSIG_R { RWUSIG_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Soft disconnect"] #[inline(always)] pub fn sdis(&self) -> SDIS_R { SDIS_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Global IN NAK status"] #[inline(always)] pub fn ginsts(&self) -> GINSTS_R { GINSTS_R::new(((self.bits >> 2) & 0x01) != 0)
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 0, "score": 192988.70578231275 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 1, "score": 192988.70578231278 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 2, "score": 192988.70578231278 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~2s period; STM32F0 by default uses the 8MHz HSI on boot\n\n // (See section 6.2 of the reference manual)\n\n syst.set_reload(16_000_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin B3 as push-pull output\n\n let p = stm32f0x1::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.ahbenr.write(|w| w.iopben().set_bit());\n\n\n\n // Set moder on third pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32f0x1/rust-blink-f031k6/src/main.rs", "rank": 3, "score": 134178.0685062561 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~1s period; STM32F4 by default uses the 16MHz HSI on boot\n\n // (See section 6.2.2 in the reference manual)\n\n syst.set_reload(16_000_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin A5 as push-pull output\n\n let p = stm32f446::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.ahb1enr.write(|w| w.gpioaen().set_bit());\n\n\n\n // Set moder on fifth pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32f446/rust-blink-f446re/src/main.rs", "rank": 4, "score": 134178.0685062561 }, { "content": "#[entry]\n\nfn main() -> ! { // ! means no return type\n\n // Check out the 'Cortex-M Peripherals' singleton\n\n let cm_p = cortex_m::Peripherals::take().unwrap();\n\n // Set up the SysTick peripheral\n\n // Rust variables are immutable by default; use mut to make mutable\n\n let mut syst = cm_p.SYST;\n\n syst.set_clock_source(SystClkSource::Core);\n\n // ~2s period; STM32L0 boots to a ~2.1MHz internal oscillator\n\n // (See Section 7.2 of the STM32L0x1 reference manual)\n\n syst.set_reload(4_200_000);\n\n syst.enable_counter();\n\n\n\n // Set up GPIO pin B3 as push-pull output\n\n let p = stm32l0x1::Peripherals::take().unwrap();\n\n let rcc = p.RCC;\n\n // rcc.iopenr is the GPIO clock enable register\n\n // |x| is closure notation in Rust\n\n rcc.iopenr.write(|w| w.iopben().set_bit());\n\n\n\n // Set moder on third pin of GPIOB to 0b01, output\n", "file_path": "07-rust/stm32l0x1/rust-blink-l031k6/src/main.rs", "rank": 5, "score": 134178.0685062561 }, { "content": "TickType_t uxTaskResetEventItemValue( void )\n\n{\n\nTickType_t uxReturn;\n\n\n\n\tuxReturn = listGET_LIST_ITEM_VALUE( &( pxCurrentTCB->xEventListItem ) );\n\n\n\n\t/* Reset the event list item to its normal value - so it can be used with\n\n\tqueues and semaphores. */\n\n\tlistSET_LIST_ITEM_VALUE( &( pxCurrentTCB->xEventListItem ), ( ( TickType_t ) configMAX_PRIORITIES - ( TickType_t ) pxCurrentTCB->uxPriority ) ); /*lint !e961 MISRA exception as the casts are only redundant for some ports. */\n\n\n\n\treturn uxReturn;\n", "file_path": "06-freertos/freertos/Source/tasks.c", "rank": 6, "score": 104903.10307163426 }, { "content": "#define portMAX_8_BIT_VALUE\t\t\t\t\t( ( uint8_t ) 0xff )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 7, "score": 100509.13876308527 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/build.rs", "rank": 8, "score": 88441.66588380146 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/build.rs", "rank": 9, "score": 88441.66588380146 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"device.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/build.rs", "rank": 10, "score": 88441.66588380146 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32f446/rust-blink-f446re/build.rs", "rank": 11, "score": 86885.72247686045 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32l0x1/rust-blink-l031k6/build.rs", "rank": 12, "score": 86885.72247686045 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "07-rust/stm32f0x1/rust-blink-f031k6/build.rs", "rank": 13, "score": 86885.72247686045 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 14, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 15, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 16, "score": 79445.02919668888 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/generic.rs", "rank": 17, "score": 79431.945204443 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/generic.rs", "rank": 18, "score": 79431.945204443 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/generic.rs", "rank": 19, "score": 79431.945204443 }, { "content": " CAN_FilterRegister_TypeDef sFilterRegister[28]; /*!< CAN Filter Register, Address offset: 0x240-0x31C */\n", "file_path": "03-gpio/include_f446re/stm32f446xx.h", "rank": 20, "score": 57472.86116302295 }, { "content": " CAN_FilterRegister_TypeDef sFilterRegister[28]; /*!< CAN Filter Register, Address offset: 0x240-0x31C */\n", "file_path": "05-timer/include_f446re/stm32f446xx.h", "rank": 21, "score": 57472.86116302295 }, { "content": " CAN_FilterRegister_TypeDef sFilterRegister[28]; /*!< CAN Filter Register, Address offset: 0x240-0x31C */\n", "file_path": "04-interrupt/include_f446re/stm32f446xx.h", "rank": 22, "score": 57472.86116302295 }, { "content": " CAN_FilterRegister_TypeDef sFilterRegister[28]; /*!< CAN Filter Register, Address offset: 0x240-0x31C */\n", "file_path": "06-freertos/include_f446re/stm32f446xx.h", "rank": 23, "score": 57472.86116302295 }, { "content": "\tint8_t *pcWriteTo;\t\t\t\t/*< Points to the free next place in the storage area. */\n", "file_path": "06-freertos/freertos/Source/queue.c", "rank": 24, "score": 57472.86116302295 }, { "content": "\tTickType_t\t\t\txMessageValue;\t\t/*<< An optional value used by a subset of commands, for example, when changing the period of a timer. */\n", "file_path": "06-freertos/freertos/Source/timers.c", "rank": 25, "score": 57471.50877440298 }, { "content": "PRIVILEGED_DATA static volatile BaseType_t xNumOfOverflows \t\t\t= ( BaseType_t ) 0;\n", "file_path": "06-freertos/freertos/Source/tasks.c", "rank": 26, "score": 57470.61156608577 }, { "content": "PRIVILEGED_DATA static volatile TickType_t xNextTaskUnblockTime\t\t= ( TickType_t ) 0U; /* Initialised to portMAX_DELAY before the scheduler starts. */\n", "file_path": "06-freertos/freertos/Source/tasks.c", "rank": 27, "score": 57470.61156608577 }, { "content": "#define uxQueueType\t\t\t\t\t\tpcHead\n", "file_path": "06-freertos/freertos/Source/queue.c", "rank": 28, "score": 56131.91337094036 }, { "content": "BaseType_t xQueueGenericReset( QueueHandle_t xQueue, BaseType_t xNewQueue )\n\n{\n\nQueue_t * const pxQueue = xQueue;\n\n\n\n\tconfigASSERT( pxQueue );\n\n\n\n\ttaskENTER_CRITICAL();\n\n\t{\n\n\t\tpxQueue->u.xQueue.pcTail = pxQueue->pcHead + ( pxQueue->uxLength * pxQueue->uxItemSize ); /*lint !e9016 Pointer arithmetic allowed on char types, especially when it assists conveying intent. */\n\n\t\tpxQueue->uxMessagesWaiting = ( UBaseType_t ) 0U;\n\n\t\tpxQueue->pcWriteTo = pxQueue->pcHead;\n\n\t\tpxQueue->u.xQueue.pcReadFrom = pxQueue->pcHead + ( ( pxQueue->uxLength - 1U ) * pxQueue->uxItemSize ); /*lint !e9016 Pointer arithmetic allowed on char types, especially when it assists conveying intent. */\n\n\t\tpxQueue->cRxLock = queueUNLOCKED;\n\n\t\tpxQueue->cTxLock = queueUNLOCKED;\n\n\n\n\t\tif( xNewQueue == pdFALSE )\n\n\t\t{\n\n\t\t\t/* If there are tasks blocked waiting to read from the queue, then\n\n\t\t\tthe tasks will remain blocked as after this function exits the queue\n\n\t\t\twill still be empty. If there are tasks blocked waiting to write to\n\n\t\t\tthe queue, then one should be unblocked as after this function exits\n\n\t\t\tit will be possible to write to it. */\n\n\t\t\tif( listLIST_IS_EMPTY( &( pxQueue->xTasksWaitingToSend ) ) == pdFALSE )\n\n\t\t\t{\n\n\t\t\t\tif( xTaskRemoveFromEventList( &( pxQueue->xTasksWaitingToSend ) ) != pdFALSE )\n\n\t\t\t\t{\n\n\t\t\t\t\tqueueYIELD_IF_USING_PREEMPTION();\n\n\t\t\t\t}\n\n\t\t\t\telse\n\n\t\t\t\t{\n\n\t\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\telse\n\n\t\t\t{\n\n\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t}\n\n\t\t}\n\n\t\telse\n\n\t\t{\n\n\t\t\t/* Ensure the event queues start in the correct state. */\n\n\t\t\tvListInitialise( &( pxQueue->xTasksWaitingToSend ) );\n\n\t\t\tvListInitialise( &( pxQueue->xTasksWaitingToReceive ) );\n\n\t\t}\n\n\t}\n\n\ttaskEXIT_CRITICAL();\n\n\n\n\t/* A value is returned for calling semantic consistency with previous\n\n\tversions. */\n\n\treturn pdPASS;\n", "file_path": "06-freertos/freertos/Source/queue.c", "rank": 29, "score": 56128.73144872263 }, { "content": "\tconfigLIST_VOLATILE TickType_t xItemValue;\n", "file_path": "06-freertos/freertos/Source/include/list.h", "rank": 30, "score": 56128.51211806137 }, { "content": "PRIVILEGED_DATA static volatile UBaseType_t uxSchedulerSuspended\t= ( UBaseType_t ) pdFALSE;\n", "file_path": "06-freertos/freertos/Source/tasks.c", "rank": 31, "score": 56127.635875746186 }, { "content": "BaseType_t xStreamBufferReset( StreamBufferHandle_t xStreamBuffer )\n\n{\n\nStreamBuffer_t * const pxStreamBuffer = xStreamBuffer;\n\nBaseType_t xReturn = pdFAIL;\n\n\n\n#if( configUSE_TRACE_FACILITY == 1 )\n\n\tUBaseType_t uxStreamBufferNumber;\n\n#endif\n\n\n\n\tconfigASSERT( pxStreamBuffer );\n\n\n\n\t#if( configUSE_TRACE_FACILITY == 1 )\n\n\t{\n\n\t\t/* Store the stream buffer number so it can be restored after the\n\n\t\treset. */\n\n\t\tuxStreamBufferNumber = pxStreamBuffer->uxStreamBufferNumber;\n\n\t}\n\n\t#endif\n\n\n\n\t/* Can only reset a message buffer if there are no tasks blocked on it. */\n\n\ttaskENTER_CRITICAL();\n\n\t{\n\n\t\tif( pxStreamBuffer->xTaskWaitingToReceive == NULL )\n\n\t\t{\n\n\t\t\tif( pxStreamBuffer->xTaskWaitingToSend == NULL )\n\n\t\t\t{\n\n\t\t\t\tprvInitialiseNewStreamBuffer( pxStreamBuffer,\n\n\t\t\t\t\t\t\t\t\t\t\t pxStreamBuffer->pucBuffer,\n\n\t\t\t\t\t\t\t\t\t\t\t pxStreamBuffer->xLength,\n\n\t\t\t\t\t\t\t\t\t\t\t pxStreamBuffer->xTriggerLevelBytes,\n\n\t\t\t\t\t\t\t\t\t\t\t pxStreamBuffer->ucFlags );\n\n\t\t\t\txReturn = pdPASS;\n\n\n\n\t\t\t\t#if( configUSE_TRACE_FACILITY == 1 )\n\n\t\t\t\t{\n\n\t\t\t\t\tpxStreamBuffer->uxStreamBufferNumber = uxStreamBufferNumber;\n\n\t\t\t\t}\n\n\t\t\t\t#endif\n\n\n\n\t\t\t\ttraceSTREAM_BUFFER_RESET( xStreamBuffer );\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\ttaskEXIT_CRITICAL();\n\n\n\n\treturn xReturn;\n", "file_path": "06-freertos/freertos/Source/stream_buffer.c", "rank": 32, "score": 54847.06295371009 }, { "content": "\tEventBits_t uxEventBits;\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 33, "score": 54842.57219325134 }, { "content": "\tList_t xTasksWaitingForBits;\t\t/*< List of tasks waiting for a bit to be set. */\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 34, "score": 54842.57219325134 }, { "content": "static size_t prvWriteMessageToBuffer( StreamBuffer_t * const pxStreamBuffer,\n\n\t\t\t\t\t\t\t\t\t\tconst void * pvTxData,\n\n\t\t\t\t\t\t\t\t\t\tsize_t xDataLengthBytes,\n\n\t\t\t\t\t\t\t\t\t\tsize_t xSpace,\n", "file_path": "06-freertos/freertos/Source/stream_buffer.c", "rank": 35, "score": 53630.907899285085 }, { "content": "static size_t prvWriteBytesToBuffer( StreamBuffer_t * const pxStreamBuffer, const uint8_t *pucData, size_t xCount ) PRIVILEGED_FUNCTION;\n", "file_path": "06-freertos/freertos/Source/stream_buffer.c", "rank": 36, "score": 53623.67239880589 }, { "content": "EventBits_t xEventGroupWaitBits( EventGroupHandle_t xEventGroup, const EventBits_t uxBitsToWaitFor, const BaseType_t xClearOnExit, const BaseType_t xWaitForAllBits, TickType_t xTicksToWait )\n\n{\n\nEventGroup_t *pxEventBits = xEventGroup;\n\nEventBits_t uxReturn, uxControlBits = 0;\n\nBaseType_t xWaitConditionMet, xAlreadyYielded;\n\nBaseType_t xTimeoutOccurred = pdFALSE;\n\n\n\n\t/* Check the user is not attempting to wait on the bits used by the kernel\n\n\titself, and that at least one bit is being requested. */\n\n\tconfigASSERT( xEventGroup );\n\n\tconfigASSERT( ( uxBitsToWaitFor & eventEVENT_BITS_CONTROL_BYTES ) == 0 );\n\n\tconfigASSERT( uxBitsToWaitFor != 0 );\n\n\t#if ( ( INCLUDE_xTaskGetSchedulerState == 1 ) || ( configUSE_TIMERS == 1 ) )\n\n\t{\n\n\t\tconfigASSERT( !( ( xTaskGetSchedulerState() == taskSCHEDULER_SUSPENDED ) && ( xTicksToWait != 0 ) ) );\n\n\t}\n\n\t#endif\n\n\n\n\tvTaskSuspendAll();\n\n\t{\n\n\t\tconst EventBits_t uxCurrentEventBits = pxEventBits->uxEventBits;\n\n\n\n\t\t/* Check to see if the wait condition is already met or not. */\n\n\t\txWaitConditionMet = prvTestWaitCondition( uxCurrentEventBits, uxBitsToWaitFor, xWaitForAllBits );\n\n\n\n\t\tif( xWaitConditionMet != pdFALSE )\n\n\t\t{\n\n\t\t\t/* The wait condition has already been met so there is no need to\n\n\t\t\tblock. */\n\n\t\t\tuxReturn = uxCurrentEventBits;\n\n\t\t\txTicksToWait = ( TickType_t ) 0;\n\n\n\n\t\t\t/* Clear the wait bits if requested to do so. */\n\n\t\t\tif( xClearOnExit != pdFALSE )\n\n\t\t\t{\n\n\t\t\t\tpxEventBits->uxEventBits &= ~uxBitsToWaitFor;\n\n\t\t\t}\n\n\t\t\telse\n\n\t\t\t{\n\n\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t}\n\n\t\t}\n\n\t\telse if( xTicksToWait == ( TickType_t ) 0 )\n\n\t\t{\n\n\t\t\t/* The wait condition has not been met, but no block time was\n\n\t\t\tspecified, so just return the current value. */\n\n\t\t\tuxReturn = uxCurrentEventBits;\n\n\t\t\txTimeoutOccurred = pdTRUE;\n\n\t\t}\n\n\t\telse\n\n\t\t{\n\n\t\t\t/* The task is going to block to wait for its required bits to be\n\n\t\t\tset. uxControlBits are used to remember the specified behaviour of\n\n\t\t\tthis call to xEventGroupWaitBits() - for use when the event bits\n\n\t\t\tunblock the task. */\n\n\t\t\tif( xClearOnExit != pdFALSE )\n\n\t\t\t{\n\n\t\t\t\tuxControlBits |= eventCLEAR_EVENTS_ON_EXIT_BIT;\n\n\t\t\t}\n\n\t\t\telse\n\n\t\t\t{\n\n\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t}\n\n\n\n\t\t\tif( xWaitForAllBits != pdFALSE )\n\n\t\t\t{\n\n\t\t\t\tuxControlBits |= eventWAIT_FOR_ALL_BITS;\n\n\t\t\t}\n\n\t\t\telse\n\n\t\t\t{\n\n\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t}\n\n\n\n\t\t\t/* Store the bits that the calling task is waiting for in the\n\n\t\t\ttask's event list item so the kernel knows when a match is\n\n\t\t\tfound. Then enter the blocked state. */\n\n\t\t\tvTaskPlaceOnUnorderedEventList( &( pxEventBits->xTasksWaitingForBits ), ( uxBitsToWaitFor | uxControlBits ), xTicksToWait );\n\n\n\n\t\t\t/* This is obsolete as it will get set after the task unblocks, but\n\n\t\t\tsome compilers mistakenly generate a warning about the variable\n\n\t\t\tbeing returned without being set if it is not done. */\n\n\t\t\tuxReturn = 0;\n\n\n\n\t\t\ttraceEVENT_GROUP_WAIT_BITS_BLOCK( xEventGroup, uxBitsToWaitFor );\n\n\t\t}\n\n\t}\n\n\txAlreadyYielded = xTaskResumeAll();\n\n\n\n\tif( xTicksToWait != ( TickType_t ) 0 )\n\n\t{\n\n\t\tif( xAlreadyYielded == pdFALSE )\n\n\t\t{\n\n\t\t\tportYIELD_WITHIN_API();\n\n\t\t}\n\n\t\telse\n\n\t\t{\n\n\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t}\n\n\n\n\t\t/* The task blocked to wait for its required bits to be set - at this\n\n\t\tpoint either the required bits were set or the block time expired. If\n\n\t\tthe required bits were set they will have been stored in the task's\n\n\t\tevent list item, and they should now be retrieved then cleared. */\n\n\t\tuxReturn = uxTaskResetEventItemValue();\n\n\n\n\t\tif( ( uxReturn & eventUNBLOCKED_DUE_TO_BIT_SET ) == ( EventBits_t ) 0 )\n\n\t\t{\n\n\t\t\ttaskENTER_CRITICAL();\n\n\t\t\t{\n\n\t\t\t\t/* The task timed out, just return the current event bit value. */\n\n\t\t\t\tuxReturn = pxEventBits->uxEventBits;\n\n\n\n\t\t\t\t/* It is possible that the event bits were updated between this\n\n\t\t\t\ttask leaving the Blocked state and running again. */\n\n\t\t\t\tif( prvTestWaitCondition( uxReturn, uxBitsToWaitFor, xWaitForAllBits ) != pdFALSE )\n\n\t\t\t\t{\n\n\t\t\t\t\tif( xClearOnExit != pdFALSE )\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tpxEventBits->uxEventBits &= ~uxBitsToWaitFor;\n\n\t\t\t\t\t}\n\n\t\t\t\t\telse\n\n\t\t\t\t\t{\n\n\t\t\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\telse\n\n\t\t\t\t{\n\n\t\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t\t}\n\n\t\t\t\txTimeoutOccurred = pdTRUE;\n\n\t\t\t}\n\n\t\t\ttaskEXIT_CRITICAL();\n\n\t\t}\n\n\t\telse\n\n\t\t{\n\n\t\t\t/* The task unblocked because the bits were set. */\n\n\t\t}\n\n\n\n\t\t/* The task blocked so control bits may have been set. */\n\n\t\tuxReturn &= ~eventEVENT_BITS_CONTROL_BYTES;\n\n\t}\n\n\ttraceEVENT_GROUP_WAIT_BITS_END( xEventGroup, uxBitsToWaitFor, xTimeoutOccurred );\n\n\n\n\t/* Prevent compiler warnings when trace macros are not used. */\n\n\t( void ) xTimeoutOccurred;\n\n\n\n\treturn uxReturn;\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 37, "score": 53618.229617090416 }, { "content": "EventBits_t xEventGroupClearBits( EventGroupHandle_t xEventGroup, const EventBits_t uxBitsToClear )\n\n{\n\nEventGroup_t *pxEventBits = xEventGroup;\n\nEventBits_t uxReturn;\n\n\n\n\t/* Check the user is not attempting to clear the bits used by the kernel\n\n\titself. */\n\n\tconfigASSERT( xEventGroup );\n\n\tconfigASSERT( ( uxBitsToClear & eventEVENT_BITS_CONTROL_BYTES ) == 0 );\n\n\n\n\ttaskENTER_CRITICAL();\n\n\t{\n\n\t\ttraceEVENT_GROUP_CLEAR_BITS( xEventGroup, uxBitsToClear );\n\n\n\n\t\t/* The value returned is the event group value prior to the bits being\n\n\t\tcleared. */\n\n\t\tuxReturn = pxEventBits->uxEventBits;\n\n\n\n\t\t/* Clear the bits. */\n\n\t\tpxEventBits->uxEventBits &= ~uxBitsToClear;\n\n\t}\n\n\ttaskEXIT_CRITICAL();\n\n\n\n\treturn uxReturn;\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 38, "score": 53618.229617090416 }, { "content": "EventBits_t xEventGroupSetBits( EventGroupHandle_t xEventGroup, const EventBits_t uxBitsToSet )\n\n{\n\nListItem_t *pxListItem, *pxNext;\n\nListItem_t const *pxListEnd;\n\nList_t const * pxList;\n\nEventBits_t uxBitsToClear = 0, uxBitsWaitedFor, uxControlBits;\n\nEventGroup_t *pxEventBits = xEventGroup;\n\nBaseType_t xMatchFound = pdFALSE;\n\n\n\n\t/* Check the user is not attempting to set the bits used by the kernel\n\n\titself. */\n\n\tconfigASSERT( xEventGroup );\n\n\tconfigASSERT( ( uxBitsToSet & eventEVENT_BITS_CONTROL_BYTES ) == 0 );\n\n\n\n\tpxList = &( pxEventBits->xTasksWaitingForBits );\n\n\tpxListEnd = listGET_END_MARKER( pxList ); /*lint !e826 !e740 !e9087 The mini list structure is used as the list end to save RAM. This is checked and valid. */\n\n\tvTaskSuspendAll();\n\n\t{\n\n\t\ttraceEVENT_GROUP_SET_BITS( xEventGroup, uxBitsToSet );\n\n\n\n\t\tpxListItem = listGET_HEAD_ENTRY( pxList );\n\n\n\n\t\t/* Set the bits. */\n\n\t\tpxEventBits->uxEventBits |= uxBitsToSet;\n\n\n\n\t\t/* See if the new bit value should unblock any tasks. */\n\n\t\twhile( pxListItem != pxListEnd )\n\n\t\t{\n\n\t\t\tpxNext = listGET_NEXT( pxListItem );\n\n\t\t\tuxBitsWaitedFor = listGET_LIST_ITEM_VALUE( pxListItem );\n\n\t\t\txMatchFound = pdFALSE;\n\n\n\n\t\t\t/* Split the bits waited for from the control bits. */\n\n\t\t\tuxControlBits = uxBitsWaitedFor & eventEVENT_BITS_CONTROL_BYTES;\n\n\t\t\tuxBitsWaitedFor &= ~eventEVENT_BITS_CONTROL_BYTES;\n\n\n\n\t\t\tif( ( uxControlBits & eventWAIT_FOR_ALL_BITS ) == ( EventBits_t ) 0 )\n\n\t\t\t{\n\n\t\t\t\t/* Just looking for single bit being set. */\n\n\t\t\t\tif( ( uxBitsWaitedFor & pxEventBits->uxEventBits ) != ( EventBits_t ) 0 )\n\n\t\t\t\t{\n\n\t\t\t\t\txMatchFound = pdTRUE;\n\n\t\t\t\t}\n\n\t\t\t\telse\n\n\t\t\t\t{\n\n\t\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\telse if( ( uxBitsWaitedFor & pxEventBits->uxEventBits ) == uxBitsWaitedFor )\n\n\t\t\t{\n\n\t\t\t\t/* All bits are set. */\n\n\t\t\t\txMatchFound = pdTRUE;\n\n\t\t\t}\n\n\t\t\telse\n\n\t\t\t{\n\n\t\t\t\t/* Need all bits to be set, but not all the bits were set. */\n\n\t\t\t}\n\n\n\n\t\t\tif( xMatchFound != pdFALSE )\n\n\t\t\t{\n\n\t\t\t\t/* The bits match. Should the bits be cleared on exit? */\n\n\t\t\t\tif( ( uxControlBits & eventCLEAR_EVENTS_ON_EXIT_BIT ) != ( EventBits_t ) 0 )\n\n\t\t\t\t{\n\n\t\t\t\t\tuxBitsToClear |= uxBitsWaitedFor;\n\n\t\t\t\t}\n\n\t\t\t\telse\n\n\t\t\t\t{\n\n\t\t\t\t\tmtCOVERAGE_TEST_MARKER();\n\n\t\t\t\t}\n\n\n\n\t\t\t\t/* Store the actual event flag value in the task's event list\n\n\t\t\t\titem before removing the task from the event list. The\n\n\t\t\t\teventUNBLOCKED_DUE_TO_BIT_SET bit is set so the task knows\n\n\t\t\t\tthat is was unblocked due to its required bits matching, rather\n\n\t\t\t\tthan because it timed out. */\n\n\t\t\t\tvTaskRemoveFromUnorderedEventList( pxListItem, pxEventBits->uxEventBits | eventUNBLOCKED_DUE_TO_BIT_SET );\n\n\t\t\t}\n\n\n\n\t\t\t/* Move onto the next list item. Note pxListItem->pxNext is not\n\n\t\t\tused here as the list item may have been removed from the event list\n\n\t\t\tand inserted into the ready/pending reading list. */\n\n\t\t\tpxListItem = pxNext;\n\n\t\t}\n\n\n\n\t\t/* Clear any bits that matched when the eventCLEAR_EVENTS_ON_EXIT_BIT\n\n\t\tbit was set in the control word. */\n\n\t\tpxEventBits->uxEventBits &= ~uxBitsToClear;\n\n\t}\n\n\t( void ) xTaskResumeAll();\n\n\n\n\treturn pxEventBits->uxEventBits;\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 39, "score": 53618.229617090416 }, { "content": "static size_t xBlockAllocatedBit = 0;\n", "file_path": "06-freertos/freertos/Source/portable/MemMang/heap_5.c", "rank": 40, "score": 52459.95583543297 }, { "content": "static size_t xBlockAllocatedBit = 0;\n", "file_path": "06-freertos/freertos/Source/portable/MemMang/heap_4.c", "rank": 41, "score": 52459.95583543297 }, { "content": "void vEventGroupClearBitsCallback( void *pvEventGroup, const uint32_t ulBitsToClear )\n\n{\n\n\t( void ) xEventGroupClearBits( pvEventGroup, ( EventBits_t ) ulBitsToClear ); /*lint !e9079 Can't avoid cast to void* as a generic timer callback prototype. Callback casts back to original type so safe. */\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 42, "score": 52453.166317740535 }, { "content": "void vEventGroupSetBitsCallback( void *pvEventGroup, const uint32_t ulBitsToSet )\n\n{\n\n\t( void ) xEventGroupSetBits( pvEventGroup, ( EventBits_t ) ulBitsToSet ); /*lint !e9079 Can't avoid cast to void* as a generic timer callback prototype. Callback casts back to original type so safe. */\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 43, "score": 52453.166317740535 }, { "content": "static const uint16_t heapSTRUCT_SIZE\t= ( ( sizeof ( BlockLink_t ) + ( portBYTE_ALIGNMENT - 1 ) ) & ~portBYTE_ALIGNMENT_MASK );\n", "file_path": "06-freertos/freertos/Source/portable/MemMang/heap_2.c", "rank": 44, "score": 52453.10204254395 }, { "content": "static const size_t xHeapStructSize\t= ( sizeof( BlockLink_t ) + ( ( size_t ) ( portBYTE_ALIGNMENT - 1 ) ) ) & ~( ( size_t ) portBYTE_ALIGNMENT_MASK );\n", "file_path": "06-freertos/freertos/Source/portable/MemMang/heap_5.c", "rank": 45, "score": 52453.10204254395 }, { "content": "static const size_t xHeapStructSize\t= ( sizeof( BlockLink_t ) + ( ( size_t ) ( portBYTE_ALIGNMENT - 1 ) ) ) & ~( ( size_t ) portBYTE_ALIGNMENT_MASK );\n", "file_path": "06-freertos/freertos/Source/portable/MemMang/heap_4.c", "rank": 46, "score": 52453.10204254395 }, { "content": "static void prvResetNextTaskUnblockTime( void );\n", "file_path": "06-freertos/freertos/Source/tasks.c", "rank": 47, "score": 52451.65401680738 }, { "content": "BaseType_t MPU_xStreamBufferReset( StreamBufferHandle_t xStreamBuffer ) FREERTOS_SYSTEM_CALL;\n", "file_path": "06-freertos/freertos/Source/include/mpu_prototypes.h", "rank": 48, "score": 52451.65401680738 }, { "content": "BaseType_t MPU_xQueueGenericReset( QueueHandle_t xQueue, BaseType_t xNewQueue ) FREERTOS_SYSTEM_CALL;\n", "file_path": "06-freertos/freertos/Source/include/mpu_prototypes.h", "rank": 49, "score": 52451.65401680738 }, { "content": "EventBits_t xEventGroupGetBitsFromISR( EventGroupHandle_t xEventGroup )\n\n{\n\nUBaseType_t uxSavedInterruptStatus;\n\nEventGroup_t const * const pxEventBits = xEventGroup;\n\nEventBits_t uxReturn;\n\n\n\n\tuxSavedInterruptStatus = portSET_INTERRUPT_MASK_FROM_ISR();\n\n\t{\n\n\t\tuxReturn = pxEventBits->uxEventBits;\n\n\t}\n\n\tportCLEAR_INTERRUPT_MASK_FROM_ISR( uxSavedInterruptStatus );\n\n\n\n\treturn uxReturn;\n", "file_path": "06-freertos/freertos/Source/event_groups.c", "rank": 50, "score": 52447.35938732006 }, { "content": "#define heapBITS_PER_BYTE\t\t( ( size_t ) 8 )\n", "file_path": "06-freertos/freertos/Source/portable/MemMang/heap_5.c", "rank": 51, "score": 51332.692742444764 }, { "content": "#define heapBITS_PER_BYTE\t\t( ( size_t ) 8 )\n", "file_path": "06-freertos/freertos/Source/portable/MemMang/heap_4.c", "rank": 52, "score": 51332.692742444764 }, { "content": "EventBits_t MPU_xEventGroupSetBits( EventGroupHandle_t xEventGroup, const EventBits_t uxBitsToSet ) FREERTOS_SYSTEM_CALL;\n", "file_path": "06-freertos/freertos/Source/include/mpu_prototypes.h", "rank": 53, "score": 51326.53331227369 }, { "content": "EventBits_t MPU_xEventGroupWaitBits( EventGroupHandle_t xEventGroup, const EventBits_t uxBitsToWaitFor, const BaseType_t xClearOnExit, const BaseType_t xWaitForAllBits, TickType_t xTicksToWait ) FREERTOS_SYSTEM_CALL;\n", "file_path": "06-freertos/freertos/Source/include/mpu_prototypes.h", "rank": 54, "score": 51326.53331227369 }, { "content": "EventBits_t MPU_xEventGroupClearBits( EventGroupHandle_t xEventGroup, const EventBits_t uxBitsToClear ) FREERTOS_SYSTEM_CALL;\n", "file_path": "06-freertos/freertos/Source/include/mpu_prototypes.h", "rank": 55, "score": 51326.53331227369 }, { "content": "#define portMAX_24_BIT_NUMBER\t\t\t\t( 0xffffffUL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM0/port.c", "rank": 56, "score": 50258.769547518794 }, { "content": "#define portMAX_24_BIT_NUMBER\t\t\t\t( 0xffffffUL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 57, "score": 50258.769547518794 }, { "content": "uint32_t MPU_ulTaskNotifyValueClear( TaskHandle_t xTask, uint32_t ulBitsToClear ) FREERTOS_SYSTEM_CALL;\n", "file_path": "06-freertos/freertos/Source/include/mpu_prototypes.h", "rank": 58, "score": 50256.52864573755 }, { "content": "uint8_t MPU_ucQueueGetQueueType( QueueHandle_t xQueue ) FREERTOS_SYSTEM_CALL;\n", "file_path": "06-freertos/freertos/Source/include/mpu_prototypes.h", "rank": 59, "score": 50255.74407306045 }, { "content": "#define portMAX_PRIGROUP_BITS\t\t\t\t( ( uint8_t ) 7 )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 60, "score": 50252.61011734772 }, { "content": "#define portTOP_BIT_OF_BYTE\t\t\t\t\t( ( uint8_t ) 0x80 )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 61, "score": 50252.61011734772 }, { "content": "#define portASPEN_AND_LSPEN_BITS\t\t\t( 0x3UL << 30UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 62, "score": 50252.61011734772 }, { "content": "#define portNVIC_PENDSVCLEAR_BIT \t\t\t( 1UL << 27UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 63, "score": 50252.61011734772 }, { "content": "#define portNVIC_PENDSVSET_BIT\t\t\t\t( 1UL << 28UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM0/port.c", "rank": 64, "score": 50252.61011734772 }, { "content": "#define portNVIC_SYSTICK_INT_BIT\t\t\t( 1UL << 1UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 65, "score": 49236.35037227041 }, { "content": "#define portNVIC_IP_REGISTERS_OFFSET_16 \t( 0xE000E3F0 )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 66, "score": 49227.70263634573 }, { "content": "#define portNVIC_SYSTICK_ENABLE_BIT\t\t\t( 1UL << 0UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM0/port.c", "rank": 67, "score": 49222.7060438369 }, { "content": "#define portNVIC_SYSTICK_INT_BIT\t\t\t( 1UL << 1UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM0/port.c", "rank": 68, "score": 49222.7060438369 }, { "content": "#define portNVIC_SYSTICK_CLK_BIT\t\t\t( 1UL << 2UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM0/port.c", "rank": 69, "score": 49222.7060438369 }, { "content": "#define portNVIC_SYSTICK_ENABLE_BIT\t\t\t( 1UL << 0UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 70, "score": 49222.7060438369 }, { "content": "#define portNVIC_SYSTICK_CURRENT_VALUE_REG\t( * ( ( volatile uint32_t * ) 0xe000e018 ) )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 71, "score": 48237.930127727086 }, { "content": "#define portNVIC_SYSTICK_CURRENT_VALUE_REG\t( * ( ( volatile uint32_t * ) 0xe000e018 ) )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM0/port.c", "rank": 72, "score": 48237.930127727086 }, { "content": "#define portNVIC_SYSTICK_COUNT_FLAG_BIT\t\t( 1UL << 16UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM0/port.c", "rank": 73, "score": 48234.16899054223 }, { "content": "#define portNVIC_PEND_SYSTICK_CLEAR_BIT\t\t( 1UL << 25UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 74, "score": 48234.16899054223 }, { "content": "#define portNVIC_SYSTICK_COUNT_FLAG_BIT\t\t( 1UL << 16UL )\n", "file_path": "06-freertos/freertos/Source/portable/GCC/ARM_CM4F/port.c", "rank": 75, "score": 48234.16899054223 }, { "content": "#[doc = \"Reader of field `GINSTS`\"]\n\npub type GINSTS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `GONSTS`\"]\n\npub type GONSTS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TCTL`\"]\n\npub type TCTL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TCTL`\"]\n\npub struct TCTL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TCTL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 4)) | (((value as u32) & 0x07) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SGINAK`\"]\n\npub type SGINAK_R = crate::R<bool, bool>;\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/otg_fs_device/fs_dctl.rs", "rank": 77, "score": 108.06750741753791 }, { "content": "#[doc = \"Reader of register SDCMR\"]\n\npub type R = crate::R<u32, super::SDCMR>;\n\n#[doc = \"Writer for register SDCMR\"]\n\npub type W = crate::W<u32, super::SDCMR>;\n\n#[doc = \"Register SDCMR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SDCMR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `MODE`\"]\n\npub struct MODE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MODE_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/fmc/sdcmr.rs", "rank": 78, "score": 98.06184003549437 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RMP`\"]\n\npub type RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RMP`\"]\n\npub struct RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/tim11/or.rs", "rank": 79, "score": 96.96972511099108 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RMP`\"]\n\npub type RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `RMP`\"]\n\npub struct RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/tim14/or.rs", "rank": 80, "score": 96.96972511099106 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ETR_RMP`\"]\n\npub type ETR_RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ETR_RMP`\"]\n\npub struct ETR_RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ETR_RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/tim2/or.rs", "rank": 81, "score": 94.72445113417837 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ETR_RMP`\"]\n\npub type ETR_RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ETR_RMP`\"]\n\npub struct ETR_RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ETR_RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/tim22/or.rs", "rank": 82, "score": 94.72445113417837 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ITR1_RMP`\"]\n\npub type ITR1_RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ITR1_RMP`\"]\n\npub struct ITR1_RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ITR1_RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/tim2/or.rs", "rank": 83, "score": 94.72445113417837 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ETR_RMP`\"]\n\npub type ETR_RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ETR_RMP`\"]\n\npub struct ETR_RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ETR_RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/tim21/or.rs", "rank": 84, "score": 94.72445113417837 }, { "content": "#[doc = \"Reader of register OR\"]\n\npub type R = crate::R<u32, super::OR>;\n\n#[doc = \"Writer for register OR\"]\n\npub type W = crate::W<u32, super::OR>;\n\n#[doc = \"Register OR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::OR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `IT4_RMP`\"]\n\npub type IT4_RMP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `IT4_RMP`\"]\n\npub struct IT4_RMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IT4_RMP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/tim5/or.rs", "rank": 85, "score": 94.72445113417835 }, { "content": "#[doc = \"Reader of register DCR\"]\n\npub type R = crate::R<u32, super::DCR>;\n\n#[doc = \"Writer for register DCR\"]\n\npub type W = crate::W<u32, super::DCR>;\n\n#[doc = \"Register DCR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DBL`\"]\n\npub type DBL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DBL`\"]\n\npub struct DBL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DBL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/tim5/dcr.rs", "rank": 86, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register JSQR\"]\n\npub type R = crate::R<u32, super::JSQR>;\n\n#[doc = \"Writer for register JSQR\"]\n\npub type W = crate::W<u32, super::JSQR>;\n\n#[doc = \"Register JSQR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::JSQR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `JL`\"]\n\npub type JL_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `JL`\"]\n\npub struct JL_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> JL_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/adc1/jsqr.rs", "rank": 87, "score": 94.28899823669877 }, { "content": "#[doc = \"Reader of register RTOR\"]\n\npub type R = crate::R<u32, super::RTOR>;\n\n#[doc = \"Writer for register RTOR\"]\n\npub type W = crate::W<u32, super::RTOR>;\n\n#[doc = \"Register RTOR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RTOR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BLEN`\"]\n\npub type BLEN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `BLEN`\"]\n\npub struct BLEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BLEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/rtor.rs", "rank": 88, "score": 94.28899823669877 }, { "content": "#[doc = \"Reader of register FLTR\"]\n\npub type R = crate::R<u32, super::FLTR>;\n\n#[doc = \"Writer for register FLTR\"]\n\npub type W = crate::W<u32, super::FLTR>;\n\n#[doc = \"Register FLTR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::FLTR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DNF`\"]\n\npub type DNF_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DNF`\"]\n\npub struct DNF_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DNF_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/i2c3/fltr.rs", "rank": 89, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register EP6R\"]\n\npub type R = crate::R<u32, super::EP6R>;\n\n#[doc = \"Writer for register EP6R\"]\n\npub type W = crate::W<u32, super::EP6R>;\n\n#[doc = \"Register EP6R `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EP6R {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `EA`\"]\n\npub type EA_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `EA`\"]\n\npub struct EA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usb/ep6r.rs", "rank": 90, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register DHR8RD\"]\n\npub type R = crate::R<u32, super::DHR8RD>;\n\n#[doc = \"Writer for register DHR8RD\"]\n\npub type W = crate::W<u32, super::DHR8RD>;\n\n#[doc = \"Register DHR8RD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DHR8RD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DACC2DHR`\"]\n\npub type DACC2DHR_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DACC2DHR`\"]\n\npub struct DACC2DHR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DACC2DHR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/dhr8rd.rs", "rank": 91, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register CR1\"]\n\npub type R = crate::R<u32, super::CR1>;\n\n#[doc = \"Writer for register CR1\"]\n\npub type W = crate::W<u32, super::CR1>;\n\n#[doc = \"Register CR1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CKD`\"]\n\npub type CKD_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CKD`\"]\n\npub struct CKD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CKD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/tim11/cr1.rs", "rank": 92, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register CFGR2\"]\n\npub type R = crate::R<u32, super::CFGR2>;\n\n#[doc = \"Writer for register CFGR2\"]\n\npub type W = crate::W<u32, super::CFGR2>;\n\n#[doc = \"Register CFGR2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CFGR2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `PREDIV`\"]\n\npub type PREDIV_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `PREDIV`\"]\n\npub struct PREDIV_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PREDIV_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/rcc/cfgr2.rs", "rank": 93, "score": 94.28899823669877 }, { "content": "#[doc = \"Reader of register CR2\"]\n\npub type R = crate::R<u32, super::CR2>;\n\n#[doc = \"Writer for register CR2\"]\n\npub type W = crate::W<u32, super::CR2>;\n\n#[doc = \"Register CR2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADD4`\"]\n\npub type ADD4_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `ADD4`\"]\n\npub struct ADD4_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADD4_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usart1/cr2.rs", "rank": 94, "score": 94.28899823669877 }, { "content": "#[doc = \"Reader of register RTOR\"]\n\npub type R = crate::R<u32, super::RTOR>;\n\n#[doc = \"Writer for register RTOR\"]\n\npub type W = crate::W<u32, super::RTOR>;\n\n#[doc = \"Register RTOR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RTOR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BLEN`\"]\n\npub type BLEN_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `BLEN`\"]\n\npub struct BLEN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BLEN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/usart1/rtor.rs", "rank": 95, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register CR1\"]\n\npub type R = crate::R<u32, super::CR1>;\n\n#[doc = \"Writer for register CR1\"]\n\npub type W = crate::W<u32, super::CR1>;\n\n#[doc = \"Register CR1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CR1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `CKD`\"]\n\npub type CKD_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `CKD`\"]\n\npub struct CKD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CKD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f446/stm32f446_pac/src/tim9/cr1.rs", "rank": 96, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register DHR8R1\"]\n\npub type R = crate::R<u32, super::DHR8R1>;\n\n#[doc = \"Writer for register DHR8R1\"]\n\npub type W = crate::W<u32, super::DHR8R1>;\n\n#[doc = \"Register DHR8R1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DHR8R1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DACC1DHR`\"]\n\npub type DACC1DHR_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `DACC1DHR`\"]\n\npub struct DACC1DHR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DACC1DHR_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/dac/dhr8r1.rs", "rank": 97, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register EP4R\"]\n\npub type R = crate::R<u32, super::EP4R>;\n\n#[doc = \"Writer for register EP4R\"]\n\npub type W = crate::W<u32, super::EP4R>;\n\n#[doc = \"Register EP4R `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EP4R {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `EA`\"]\n\npub type EA_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `EA`\"]\n\npub struct EA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> EA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32f0x1/stm32f0x1_pac/src/usb/ep4r.rs", "rank": 98, "score": 94.28899823669879 }, { "content": "#[doc = \"Reader of register CFGR1\"]\n\npub type R = crate::R<u32, super::CFGR1>;\n\n#[doc = \"Writer for register CFGR1\"]\n\npub type W = crate::W<u32, super::CFGR1>;\n\n#[doc = \"Register CFGR1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::CFGR1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `AWDCH`\"]\n\npub type AWDCH_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `AWDCH`\"]\n\npub struct AWDCH_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> AWDCH_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "07-rust/stm32l0x1/stm32l0x1_pac/src/adc/cfgr1.rs", "rank": 99, "score": 94.28899823669879 } ]
Rust
src/frac.rs
Kate-Painter/BunnyFrac
e7ff387054dc9348adc5b70cb8a0cb94f0979997
use crate::color::*; use std::{process::exit, u8}; use std::fs::create_dir; use std::process::Command; use substring::Substring; pub struct Details { pub frac_type: String, pub imgx: u32, pub imgy: u32, pub scalex: f64, pub scaley: f64, pub centerx: f64, pub centery: f64, pub imax: u32, pub filename: String, } /** / \ / \ / * | O| | O| -- * \ / \ / \ */ pub fn animate_zoom(mut fractal: Details, frames: u32, rate: f64) { if frames > 99999 { println!("Exceeded frame limit."); exit(1); } let dirname: String = fractal.filename .to_string() .substring(0, fractal.filename .to_string() .find(".") .unwrap()) .to_string(); let file_ext: String = fractal.filename .to_string() .substring(fractal.filename .to_string() .find(".") .unwrap() + 1, fractal.filename.len()) .to_string(); create_dir(&dirname).expect("Unable to create animation directory"); for n in 0..frames { fractal.filename = format!("./{}/{:#05}.png", &dirname, n); fractal.scalex = fractal.scalex * rate; fractal.scaley = fractal.scalex * (fractal.imgy as f64 / fractal.imgx as f64); create_fractal(&fractal); } Command::new("ffmpeg") .args(&["-r", "30", "-f", "image2", "-s", &format!("{}x{}", fractal.imgx, fractal.imgy), "-i", &format!("./{}/%05d.{}", &dirname, &file_ext), "-c:v", "libvpx", "-b:v", "1M", &format!("{}.webm", dirname)]) .output() .expect("Failed to spawn ffmpeg procress"); } /** * Draws and saves a fractal when provided with a &Details struct. */ pub fn create_fractal(fractal: &Details) { let scalefx = fractal.scalex / fractal.imgx as f64; let scalefy = fractal.scaley / fractal.imgy as f64; let mut imgbuf = image::ImageBuffer::new(fractal.imgx, fractal.imgy); let mut time: f64 = 0.0; let iterate: fn(&Details, f64, f64) -> u32; match &fractal.frac_type as &str { "m" => { iterate = self::mandelbrot_iter; }, "j" => { iterate = self::julia_iter; }, "b" => { iterate = self::burning_iter; }, _ => { println!("Fractal type not found."); exit(1); }, }; let pick_color: fn(u32) -> image::Rgb<u8>; match "x" { "x" => { pick_color = test_transition_color; }, "b" => { pick_color = test_color; }, _ => { exit(1); }, }; for x in 0..fractal.imgx { for y in 0..fractal.imgy { let cx = x as f64 * scalefx - (fractal.scalex / 2.0) + fractal.centerx; let cy = y as f64 * scalefy - (fractal.scaley / 2.0) - fractal.centery; let i = iterate(fractal, cx, cy); let pixel = imgbuf.get_pixel_mut(x, y); let image::Rgb(_data) = *pixel; if i == fractal.imax { *pixel = image::Rgb([0, 0, 0]); } else { *pixel = pick_color(i); } } time += (1.0 / fractal.imgx as f64) * 100 as f64; print!("\r >>>> {:.2}% done", time); } imgbuf.save(&fractal.filename).unwrap(); } /** * Finds out whether a provided C value is part of the mandelbrot set and returns the escape time as a u32. */ pub fn mandelbrot_iter(fractal: &Details, cx: f64, cy: f64) -> u32 { let c = num_complex::Complex::new(cx, cy); let mut z = num_complex::Complex::new(0.0, 0.0); let mut i: u32 = 0; while i < fractal.imax && z.norm() <= 2.0 { z = z * z + c; i += 1; } return i; } /** * Finds out whether a provided C value is part of the julia set and returns the escape time as a u32. */ pub fn julia_iter(fractal: &Details, zx: f64, zy: f64) -> u32 { let c = num_complex::Complex::new(-0.8, 0.156); let mut z = num_complex::Complex::new(zx, zy); let mut i: u32 = 0; while i < fractal.imax && z.norm() <= 2.0 { z = z * z + c; i += 1; } return i; } /** * Finds out whether a provided C value is part of the burning-ship set and returns the escape time as a u32. */ pub fn burning_iter(fractal: &Details, cx: f64, cy: f64) -> u32 { let c = num_complex::Complex::new(cx, cy); let mut z = num_complex::Complex::new(0.0, 0.0); let mut i: u32 = 0; while i < fractal.imax && z.norm() <= 2.0 { z.re = f64::abs(z.re); z.im = f64::abs(z.im); z = z * z + c; i += 1; } return i; }
use crate::color::*; use std::{process::exit, u8}; use std::fs::create_dir; use std::process::Command; use substring::Substring; pub struct Details { pub frac_type: String, pub imgx: u32, pub imgy: u32, pub scalex: f64, pub scaley: f64, pub centerx: f64, pub centery: f64, pub imax: u32, pub filename: String, } /** / \ / \ / * | O| | O| -- * \ / \ / \ */ pub fn animate_zoom(mut fractal: Details, frames: u32, rate: f64) { if frames > 99999 { println!("Exceeded frame limit."); exit(1); } let dirname: String = fractal.filename .to_string() .substring(0, fractal.filename .to_string() .find(".") .unwrap()) .to_string(); let file_ext: String = fractal.filename .to_string() .substring(fractal.filename .to_string() .find(".") .unwrap() + 1, fractal.filename.len()) .to_string(); create_dir(&dirname).expect("Unable to create animation directory"); for n in 0..frames { fractal.filename = format!("./{}/{:#05}.png", &dirname, n); fractal.scalex = fractal.scalex * rate; fractal.scaley = fractal.scalex * (fractal.imgy as f64 / fractal.imgx as f64); create_fractal(&fractal); } Command::new("ffmpeg") .args(&["-r", "30", "-f", "image2", "-s", &format!("{}x{}", fractal.imgx, fractal.imgy), "-i", &format!("./{}/%05d.{}", &dirname, &file_ext), "-c:v", "libvpx", "-b:v", "1M", &format!("{}.webm", dirname)]) .output() .expect("Failed to spawn ffmpeg procress"); } /** * Draws and saves a fractal when provided with a &Details struct. */ pub fn create_fractal(fractal: &Details) { let scalefx = fractal.scalex / fractal.imgx as f64; let scalefy = fractal.scaley / fractal.imgy as f64; let mut imgbuf = image::ImageBuffer::new(fractal.imgx, fractal.imgy); let mut time: f64 = 0.0; let iterate: fn(&Details, f64, f64) -> u32; match &fractal.frac_type as &str { "m" => { iterate = self::mandelbrot_iter; }, "j" => { iterate = self::julia_iter; }, "b" => { iterate = self::burning_iter; }, _ => { println!("Fractal type not found."); exit(1); }, }; let pick_color: fn(u32) -> image::Rgb<u8>; match "x" { "x" => { pick_color = test_transition_color; }, "b" => { pick_color = test_color; }, _ => { exit(1); }, }; for x in 0..fractal.imgx { for y in 0..fractal.imgy { let cx = x as f64 * scalefx - (fractal.scalex / 2.0) + fractal.centerx; let cy = y as f64 * scalefy - (fractal.scaley / 2.0) - fractal.centery; let i = iterate(fractal, cx, cy); let pixel = imgbuf.get_pixel_mut(x, y); let image::Rgb(_data) = *pixel; if i == fractal.imax { *pixel = image::Rgb([0, 0, 0]); } else { *pixel = pick_color(i); } } time += (1.0 / fractal.imgx as f64) * 100 as f64; print!("\r >>>> {:.2}% done", time); } imgbuf.save(&fractal.filename).unwrap(); } /** * Finds out whether a provided C value is part of the mandelbrot set and returns the escape time as a u32. */ pub fn mandelbrot_iter(fractal: &Details, cx: f64, cy: f64) -> u32 { let c = num_complex::Complex::new(cx, cy); let mut z = num_complex::Complex::new(0.0, 0.0);
/** * Finds out whether a provided C value is part of the julia set and returns the escape time as a u32. */ pub fn julia_iter(fractal: &Details, zx: f64, zy: f64) -> u32 { let c = num_complex::Complex::new(-0.8, 0.156); let mut z = num_complex::Complex::new(zx, zy); let mut i: u32 = 0; while i < fractal.imax && z.norm() <= 2.0 { z = z * z + c; i += 1; } return i; } /** * Finds out whether a provided C value is part of the burning-ship set and returns the escape time as a u32. */ pub fn burning_iter(fractal: &Details, cx: f64, cy: f64) -> u32 { let c = num_complex::Complex::new(cx, cy); let mut z = num_complex::Complex::new(0.0, 0.0); let mut i: u32 = 0; while i < fractal.imax && z.norm() <= 2.0 { z.re = f64::abs(z.re); z.im = f64::abs(z.im); z = z * z + c; i += 1; } return i; }
let mut i: u32 = 0; while i < fractal.imax && z.norm() <= 2.0 { z = z * z + c; i += 1; } return i; }
function_block-function_prefix_line
[]
Rust
demos/in-game/src/ui/components/app.rs
zicklag/raui
bdabe92953c80ea52e67ffd9f8006807d79eb292
use crate::ui::components::{ inventory::inventory, item_cell::{ItemCellProps, ItemCellsProps}, minimap::minimap, new_theme, popup::{popup, PopupProps}, }; use raui_core::prelude::*; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AppProps { #[serde(default)] #[serde(skip_serializing_if = "Vec::is_empty")] pub texts: Vec<String>, } implement_props_data!(AppProps); #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct AppSharedProps(pub WidgetId); implement_props_data!(AppSharedProps); #[derive(Debug, Default, Copy, Clone, Serialize, Deserialize)] pub struct AppState { pub popup_index: Option<usize>, } implement_props_data!(AppState); #[derive(Debug, Clone)] pub enum AppMessage { ShowPopup(usize), ClosePopup, } implement_message_data!(AppMessage); fn use_app(context: &mut WidgetContext) { context.life_cycle.change(|context| { for msg in context.messenger.messages { if let Some(msg) = msg.as_any().downcast_ref::<AppMessage>() { match msg { AppMessage::ShowPopup(index) => { drop(context.state.write(AppState { popup_index: Some(*index), })); } AppMessage::ClosePopup => { drop(context.state.write(AppState { popup_index: None })); } } } } }); } #[pre_hooks(use_nav_container_active, use_app)] pub fn app(mut context: WidgetContext) -> WidgetNode { let WidgetContext { id, key, props, state, .. } = context; let shared_props = Props::new(AppSharedProps(id.to_owned())).with(new_theme()); let minimap_props = ContentBoxItemLayout { anchors: Rect { left: 1.0, right: 1.0, top: 0.0, bottom: 0.0, }, align: Vec2 { x: 1.0, y: 0.0 }, offset: Vec2 { x: -6.0, y: 6.0 }, ..Default::default() }; let inventory_props = Props::new(ContentBoxItemLayout { anchors: Rect { left: 0.5, right: 0.5, top: 1.0, bottom: 1.0, }, align: Vec2 { x: 0.5, y: 1.0 }, offset: Vec2 { x: 0.0, y: -6.0 }, ..Default::default() }) .with(ItemCellsProps { items: (0..=18) .map(|i| ItemCellProps { image: format!("icon-{}", i), thin: false, }) .collect::<Vec<_>>(), }); let popup = match state.read::<AppState>() { Ok(data) => { if let Some(index) = data.popup_index { let text = match props.read::<AppProps>() { Ok(props) => props.texts.get(index).cloned().unwrap_or_default(), Err(_) => String::new(), }; let popup_props = Props::new(ContentBoxItemLayout { margin: Rect { left: 20.0, right: 20.0, top: 20.0, bottom: 46.0, }, ..Default::default() }) .with(PopupProps { index, text }); widget! { (#{"popup"} popup: {popup_props}) } } else { widget! {()} } } Err(_) => { widget! {()} } }; widget! {(#{key} content_box: {props.clone()} | {shared_props} [ (#{"minimap"} minimap: {minimap_props}) (#{"inventory"} inventory: {inventory_props}) {popup} ])} }
use crate::ui::components::{ inventory::inventory, item_cell::{ItemCellProps, ItemCellsProps}, minimap::minimap, new_theme, popup::{popup, PopupProps}, }; use raui_core::prelude::*; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AppProps { #[serde(default)] #[serde(skip_serializing_if = "Vec::is_empty")] pub texts: Vec<String>, } implement_props_data!(AppProps); #[derive(Debug, Default, Clone, Serialize, Deserialize)] pub struct AppSharedProps(pub WidgetId); implement_props_data!(AppSharedProps); #[derive(Debug, Default, Copy, Clone, Serialize, Deserialize)] pub struct AppState { pub popup_index: Option<usize>, } implement_props_data!(AppState); #[derive(Debug, Clone)] pub enum AppMessage { ShowPopup(usize), ClosePopup, } implement_message_data!(AppMessage); fn use_app(context: &mut WidgetContext) { context.life_cycle.change(|context| { for msg in context.messenger.messages { if let Some(msg) = msg.as_any().downcast_ref::<AppMessage>() { match msg { AppMessage::ShowPopup(index) => { drop(context.state.write(AppState { popup_index: Some(*index), })); } AppMessage::ClosePopup => { drop(context.state.write(AppState { popup_index: None })); } } } } }); } #[pre_hooks(use_nav_container_active, use_app)] pub fn app(mut context: WidgetContext) -> WidgetNode {
let WidgetContext { id, key, props, state, .. } = context; let shared_props = Props::new(AppSharedProps(id.to_owned())).with(new_theme()); let minimap_props = ContentBoxItemLayout { anchors: Rect { left: 1.0, right: 1.0, top: 0.0, bottom: 0.0, }, align: Vec2 { x: 1.0, y: 0.0 }, offset: Vec2 { x: -6.0, y: 6.0 }, ..Default::default() }; let inventory_props = Props::new(ContentBoxItemLayout { anchors: Rect { left: 0.5, right: 0.5, top: 1.0, bottom: 1.0, }, align: Vec2 { x: 0.5, y: 1.0 }, offset: Vec2 { x: 0.0, y: -6.0 }, ..Default::default() }) .with(ItemCellsProps { items: (0..=18) .map(|i| ItemCellProps { image: format!("icon-{}", i), thin: false, }) .collect::<Vec<_>>(), }); let popup = match state.read::<AppState>() { Ok(data) => { if let Some(index) = data.popup_index { let text = match props.read::<AppProps>() { Ok(props) => props.texts.get(index).cloned().unwrap_or_default(), Err(_) => String::new(), }; let popup_props = Props::new(ContentBoxItemLayout { margin: Rect { left: 20.0, right: 20.0, top: 20.0, bottom: 46.0, }, ..Default::default() }) .with(PopupProps { index, text }); widget! { (#{"popup"} popup: {popup_props}) } } else { widget! {()} } } Err(_) => { widget! {()} } }; widget! {(#{key} content_box: {props.clone()} | {shared_props} [ (#{"minimap"} minimap: {minimap_props}) (#{"inventory"} inventory: {inventory_props}) {popup} ])} }
function_block-function_prefix_line
[ { "content": "#[pre_hooks(use_nav_text_input)]\n\npub fn use_text_input(context: &mut WidgetContext) {\n\n fn notify<T>(context: &WidgetMountOrChangeContext, data: T)\n\n where\n\n T: 'static + MessageData,\n\n {\n\n if let Ok(notify) = context.props.read::<TextInputNotifyProps>() {\n\n if let Some(to) = notify.0.read() {\n\n context.messenger.write(to, data);\n\n }\n\n }\n\n }\n\n\n\n context.life_cycle.mount(|context| {\n\n let mut data = context.props.read_cloned_or_default::<TextInputProps>();\n\n data.focused = false;\n\n notify(\n\n &context,\n\n TextInputNotifyMessage {\n\n sender: context.id.to_owned(),\n\n state: data.to_owned(),\n", "file_path": "raui-core/src/widget/component/interactive/input_field.rs", "rank": 0, "score": 358700.9039367027 }, { "content": "pub fn use_nav_text_input(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n context\n\n .signals\n\n .write(NavSignal::Register(NavType::TextInput));\n\n });\n\n\n\n context.life_cycle.unmount(|context| {\n\n context\n\n .signals\n\n .write(NavSignal::Unregister(NavType::TextInput));\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 1, "score": 358691.4707350582 }, { "content": "pub fn use_text_input_notified_state(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<TextInputNotifyMessage>() {\n\n drop(context.state.write_with(msg.state.to_owned()));\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/input_field.rs", "rank": 2, "score": 351771.1353354726 }, { "content": "#[pre_hooks(use_nav_button)]\n\npub fn use_button(context: &mut WidgetContext) {\n\n fn notify<T>(context: &WidgetMountOrChangeContext, data: T)\n\n where\n\n T: 'static + MessageData,\n\n {\n\n if let Ok(notify) = context.props.read::<ButtonNotifyProps>() {\n\n if let Some(to) = notify.0.read() {\n\n context.messenger.write(to, data);\n\n }\n\n }\n\n }\n\n\n\n context.life_cycle.mount(|context| {\n\n notify(\n\n &context,\n\n ButtonNotifyMessage {\n\n sender: context.id.to_owned(),\n\n state: ButtonProps::default(),\n\n prev: ButtonProps::default(),\n\n },\n", "file_path": "raui-core/src/widget/component/interactive/button.rs", "rank": 3, "score": 328368.413919389 }, { "content": "pub fn use_message_forward(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n let (id, no_wrap, types) = match context.props.read::<MessageForwardProps>() {\n\n Ok(forward) => match forward.to.read() {\n\n Some(id) => (id, forward.no_wrap, &forward.types),\n\n _ => return,\n\n },\n\n _ => match context.shared_props.read::<MessageForwardProps>() {\n\n Ok(forward) => match forward.to.read() {\n\n Some(id) => (id, forward.no_wrap, &forward.types),\n\n _ => return,\n\n },\n\n _ => return,\n\n },\n\n };\n\n for msg in context.messenger.messages {\n\n let t = msg.as_any().type_id();\n\n if types.contains(&t) {\n\n if no_wrap {\n\n context\n", "file_path": "raui-core/src/widget/component/mod.rs", "rank": 4, "score": 328363.66961660265 }, { "content": "pub fn use_resize_listener(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n context.signals.write(ResizeListenerSignal::Register);\n\n });\n\n\n\n context.life_cycle.unmount(|context| {\n\n context.signals.write(ResizeListenerSignal::Unregister);\n\n });\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Serialize, Deserialize)]\n\npub struct WidgetAlpha(pub Scalar);\n\nimplement_props_data!(WidgetAlpha);\n\n\n\nimpl Default for WidgetAlpha {\n\n fn default() -> Self {\n\n Self(1.0)\n\n }\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/mod.rs", "rank": 5, "score": 328363.66961660265 }, { "content": "pub fn use_nav_button(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n let tracked = context.props.has::<NavButtonTrackingActive>();\n\n context\n\n .signals\n\n .write(NavSignal::Register(NavType::Button(tracked)));\n\n });\n\n\n\n context.life_cycle.unmount(|context| {\n\n context\n\n .signals\n\n .write(NavSignal::Unregister(NavType::Button(false)));\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 6, "score": 325261.81393164344 }, { "content": "pub fn use_nav_jump(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n let mode = match context.props.read::<NavJumpActive>() {\n\n Ok(data) => data.0,\n\n Err(_) => return,\n\n };\n\n let looped = context.props.has::<NavJumpLooped>();\n\n let jump = context.props.read_cloned_or_default::<NavJumpMapProps>();\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<NavSignal>() {\n\n match (mode, msg) {\n\n (NavJumpMode::Direction, NavSignal::Up) => {\n\n if looped {\n\n context\n\n .signals\n\n .write(NavSignal::Jump(NavJump::Loop(NavDirection::Up)));\n\n } else {\n\n context.signals.write(NavSignal::Jump(NavJump::Escape(\n\n NavDirection::Up,\n\n jump.up.to_owned(),\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 7, "score": 325261.81393164344 }, { "content": "pub fn use_nav_item(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n if context.props.has::<NavItemActive>() {\n\n context.signals.write(NavSignal::Register(NavType::Item));\n\n }\n\n });\n\n\n\n context.life_cycle.unmount(|context| {\n\n context.signals.write(NavSignal::Unregister(NavType::Item));\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 8, "score": 325261.81393164344 }, { "content": "pub fn use_nav_container(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n if context.props.has::<NavContainerActive>() {\n\n context\n\n .signals\n\n .write(NavSignal::Register(NavType::Container));\n\n }\n\n });\n\n\n\n context.life_cycle.unmount(|context| {\n\n context\n\n .signals\n\n .write(NavSignal::Unregister(NavType::Container));\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 9, "score": 325261.81393164344 }, { "content": "#[pre_hooks(use_button, use_text_input)]\n\npub fn use_input_field(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n let focused = context\n\n .state\n\n .map_or_default::<TextInputProps, _, _>(|s| s.focused);\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<NavSignal>() {\n\n match msg {\n\n NavSignal::Accept(true) => {\n\n if !focused {\n\n context\n\n .signals\n\n .write(NavSignal::FocusTextInput(context.id.to_owned().into()));\n\n }\n\n }\n\n NavSignal::Cancel(true) => {\n\n if focused {\n\n context.signals.write(NavSignal::FocusTextInput(().into()));\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/input_field.rs", "rank": 10, "score": 322268.5866736771 }, { "content": "#[post_hooks(use_nav_item)]\n\npub fn use_nav_item_active(context: &mut WidgetContext) {\n\n context.props.write(NavItemActive);\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 11, "score": 322263.2241713105 }, { "content": "#[post_hooks(use_nav_container)]\n\npub fn use_nav_container_active(context: &mut WidgetContext) {\n\n context.props.write(NavContainerActive);\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 12, "score": 322263.2241713105 }, { "content": "#[pre_hooks(use_nav_scroll_view)]\n\npub fn use_scroll_view(context: &mut WidgetContext) {\n\n fn notify<T>(context: &WidgetMountOrChangeContext, data: T)\n\n where\n\n T: 'static + MessageData,\n\n {\n\n if let Ok(notify) = context.props.read::<ScrollViewNotifyProps>() {\n\n if let Some(to) = notify.0.read() {\n\n context.messenger.write(to, data);\n\n }\n\n }\n\n }\n\n\n\n context.life_cycle.mount(|context| {\n\n notify(\n\n &context,\n\n ScrollViewNotifyMessage {\n\n sender: context.id.to_owned(),\n\n state: ScrollViewState::default(),\n\n },\n\n );\n", "file_path": "raui-core/src/widget/component/interactive/scroll_view.rs", "rank": 13, "score": 322263.17906602484 }, { "content": "pub fn use_nav_jump_map(context: &mut WidgetContext) {\n\n if !context.props.has::<NavJumpActive>() {\n\n return;\n\n }\n\n\n\n context.life_cycle.change(|context| {\n\n let jump = match context.props.read::<NavJumpMapProps>() {\n\n Ok(jump) => jump,\n\n _ => return,\n\n };\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<NavSignal>() {\n\n match msg {\n\n NavSignal::Up => {\n\n if jump.up.is_some() {\n\n context.signals.write(NavSignal::Select(jump.up.to_owned()));\n\n }\n\n }\n\n NavSignal::Down => {\n\n if jump.down.is_some() {\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 14, "score": 322258.4798685241 }, { "content": "pub fn use_button_notified_state(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<ButtonNotifyMessage>() {\n\n drop(context.state.write_with(msg.state));\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/button.rs", "rank": 15, "score": 322258.4798685241 }, { "content": "pub fn use_nav_scroll_view(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n context\n\n .signals\n\n .write(NavSignal::Register(NavType::ScrollView));\n\n });\n\n\n\n context.life_cycle.unmount(|context| {\n\n context\n\n .signals\n\n .write(NavSignal::Unregister(NavType::ScrollView));\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 16, "score": 322258.4798685241 }, { "content": "#[post_hooks(use_nav_jump)]\n\npub fn use_nav_jump_direction_active(context: &mut WidgetContext) {\n\n context.props.write(NavJumpActive(NavJumpMode::Direction));\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 17, "score": 319353.70325364044 }, { "content": "#[post_hooks(use_nav_button)]\n\npub fn use_nav_button_tracking_active(context: &mut WidgetContext) {\n\n context.props.write(NavButtonTrackingActive);\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 18, "score": 319353.70325364044 }, { "content": "pub fn use_nav_scroll_box(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(ResizeListenerSignal::Change) = msg.as_any().downcast_ref() {\n\n if let Ok(data) = context.state.read::<ScrollViewState>() {\n\n context\n\n .signals\n\n .write(NavSignal::Jump(NavJump::Scroll(NavScroll::Factor(\n\n data.value, false,\n\n ))));\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n\n#[pre_hooks(\n\n use_resize_listener,\n\n use_nav_item,\n\n use_nav_container_active,\n\n use_scroll_view,\n\n use_nav_scroll_box\n\n)]\n", "file_path": "raui-core/src/widget/component/containers/scroll_box.rs", "rank": 19, "score": 319348.9589508541 }, { "content": "pub fn use_nav_scroll_view_content(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n context\n\n .signals\n\n .write(NavSignal::Register(NavType::ScrollViewContent));\n\n });\n\n\n\n context.life_cycle.unmount(|context| {\n\n context\n\n .signals\n\n .write(NavSignal::Unregister(NavType::ScrollViewContent));\n\n });\n\n}\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 20, "score": 319348.9589508541 }, { "content": "#[post_hooks(use_nav_jump)]\n\npub fn use_nav_jump_vertical_step_active(context: &mut WidgetContext) {\n\n context\n\n .props\n\n .write(NavJumpActive(NavJumpMode::StepVertical));\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 21, "score": 316533.5862302859 }, { "content": "#[post_hooks(use_nav_jump)]\n\npub fn use_nav_jump_step_pages_active(context: &mut WidgetContext) {\n\n context.props.write(NavJumpActive(NavJumpMode::StepPages));\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 22, "score": 316533.5862302858 }, { "content": "#[post_hooks(use_nav_jump)]\n\npub fn use_nav_jump_horizontal_step_active(context: &mut WidgetContext) {\n\n context\n\n .props\n\n .write(NavJumpActive(NavJumpMode::StepHorizontal));\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/navigation.rs", "rank": 23, "score": 316533.5862302858 }, { "content": "pub fn use_nav_scroll_box_content(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(ResizeListenerSignal::Change) = msg.as_any().downcast_ref() {\n\n if let Ok(data) = context.props.read::<ScrollBoxOwner>() {\n\n context\n\n .messenger\n\n .write(data.0.to_owned(), ResizeListenerSignal::Change);\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n\n#[pre_hooks(\n\n use_resize_listener,\n\n use_nav_item_active,\n\n use_nav_container_active,\n\n use_nav_scroll_view_content,\n\n use_nav_scroll_box_content\n\n)]\n", "file_path": "raui-core/src/widget/component/containers/scroll_box.rs", "rank": 24, "score": 316528.84192749945 }, { "content": "pub fn use_scroll_view_notified_state(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<ScrollViewNotifyMessage>() {\n\n drop(context.state.write_with(msg.state.clone()));\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/scroll_view.rs", "rank": 25, "score": 316528.84192749945 }, { "content": "#[pre_hooks(use_nav_item, use_text_input)]\n\npub fn text_input(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n state,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n if let Some(p) = content.props_mut() {\n\n p.write(state.read_cloned_or_default::<TextInputProps>());\n\n }\n\n\n\n widget! {{{\n\n AreaBoxNode {\n\n id: id.to_owned(),\n\n slot: Box::new(content),\n\n ..Default::default()\n\n }\n\n }}}\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/interactive/input_field.rs", "rank": 26, "score": 314196.1862532964 }, { "content": "pub fn use_nav_scroll_box_side_scrollbars(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n drop(context.state.write_with(SideScrollbarsState::default()));\n\n });\n\n\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<ButtonNotifyMessage>() {\n\n if msg.state.selected && (msg.state.trigger || msg.state.context) {\n\n let mut dirty = false;\n\n let mut data = context\n\n .state\n\n .read_cloned_or_default::<SideScrollbarsState>();\n\n if msg.sender.key() == \"hbar\" {\n\n data.horizontal = msg.state.pointer.x;\n\n dirty = true;\n\n } else if msg.sender.key() == \"vbar\" {\n\n data.vertical = msg.state.pointer.y;\n\n dirty = true;\n\n }\n", "file_path": "raui-core/src/widget/component/containers/scroll_box.rs", "rank": 27, "score": 313793.99516498006 }, { "content": "fn use_app(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n drop(context.state.write(AppState::default()));\n\n context\n\n .signals\n\n .write(AppSignal::Ready(context.id.to_owned()));\n\n });\n\n\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<AppMessage>() {\n\n match msg {\n\n AppMessage::ToggleTheme => {\n\n let mut data = match context.state.read::<AppState>() {\n\n Ok(state) => state.clone(),\n\n Err(_) => AppState::default(),\n\n };\n\n data.theme = match data.theme {\n\n ThemeMode::Light => ThemeMode::Dark,\n\n ThemeMode::Dark => ThemeMode::Light,\n", "file_path": "demos/todo-app/src/ui/components/app.rs", "rank": 29, "score": 304164.86151509034 }, { "content": "#[pre_hooks(use_popup)]\n\npub fn popup(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { id, key, props, .. } = context;\n\n\n\n let PopupProps { index, text } = props.read_cloned_or_default::<PopupProps>();\n\n let button_props = Props::new(NavItemActive).with(ButtonNotifyProps(id.to_owned().into()));\n\n let panel_props = props\n\n .clone()\n\n .with(PaperProps {\n\n frame: None,\n\n ..Default::default()\n\n })\n\n .with(VerticalBoxProps {\n\n separation: 10.0,\n\n ..Default::default()\n\n });\n\n let image_props = Props::new(ImageBoxProps {\n\n width: ImageBoxSizeValue::Exact(48.0),\n\n height: ImageBoxSizeValue::Exact(48.0),\n\n material: ImageBoxMaterial::Image(ImageBoxImage {\n\n id: format!(\"icon-{}\", index),\n", "file_path": "demos/in-game/src/ui/components/popup.rs", "rank": 31, "score": 285499.30654745473 }, { "content": "#[pre_hooks(use_inventory)]\n\npub fn inventory(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n key,\n\n props,\n\n state,\n\n ..\n\n } = context;\n\n\n\n let ItemCellsProps { items } = props.read_cloned_or_default();\n\n let data = match state.read::<InventoryState>() {\n\n Ok(data) => *data,\n\n Err(_) => InventoryState::default(),\n\n };\n\n let list_props = Props::new(PaperProps {\n\n frame: None,\n\n ..Default::default()\n\n })\n\n .with(ContentBoxItemLayout {\n\n margin: Rect {\n", "file_path": "demos/in-game/src/ui/components/inventory.rs", "rank": 32, "score": 285499.30654745473 }, { "content": "#[pre_hooks(use_nav_item, use_button)]\n\npub fn button(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n state,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n if let Some(p) = content.props_mut() {\n\n p.write(state.read_cloned_or_default::<ButtonProps>());\n\n }\n\n\n\n widget! {{{\n\n AreaBoxNode {\n\n id: id.to_owned(),\n\n slot: Box::new(content),\n\n ..Default::default()\n\n }\n\n }}}\n\n}\n", "file_path": "raui-core/src/widget/component/interactive/button.rs", "rank": 33, "score": 283045.7966538604 }, { "content": "#[pre_hooks(use_nav_container_active, use_app)]\n\npub fn app(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n key,\n\n props,\n\n state,\n\n ..\n\n } = context;\n\n\n\n let (theme_mode, tasks) =\n\n state.map_or_default::<AppState, _, _>(|s| (s.theme, s.tasks.clone()));\n\n let theme = new_theme(theme_mode);\n\n let shared_props = Props::new(AppSharedProps { id: id.to_owned() })\n\n .with(theme)\n\n .with(theme_mode);\n\n let bar_props = FlexBoxItemLayout {\n\n grow: 0.0,\n\n shrink: 0.0,\n\n ..Default::default()\n\n };\n", "file_path": "demos/todo-app/src/ui/components/app.rs", "rank": 34, "score": 283045.7671276863 }, { "content": "#[pre_hooks(use_task)]\n\npub fn task(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { id, key, props, .. } = context;\n\n\n\n let data = props.read_cloned_or_default::<TaskProps>();\n\n let checkbox_props = Props::new(FlexBoxItemLayout {\n\n fill: 0.0,\n\n grow: 0.0,\n\n shrink: 0.0,\n\n align: 0.5,\n\n ..Default::default()\n\n })\n\n .with(SwitchPaperProps {\n\n on: data.done,\n\n variant: \"checkbox\".to_owned(),\n\n size_level: 2,\n\n })\n\n .with(NavItemActive)\n\n .with(ButtonNotifyProps(id.to_owned().into()))\n\n .with(ThemedWidgetProps {\n\n color: ThemeColor::Primary,\n", "file_path": "demos/todo-app/src/ui/components/tasks_list.rs", "rank": 35, "score": 280667.6703296333 }, { "content": "#[pre_hooks(use_item_cell)]\n\npub fn item_cell(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n key,\n\n props,\n\n animator,\n\n ..\n\n } = context;\n\n\n\n let ItemCellProps { image, thin } = props.read_cloned_or_default();\n\n let button_props = props\n\n .clone()\n\n .with(NavItemActive)\n\n .with(ButtonNotifyProps(id.to_owned().into()));\n\n let size_props = SizeBoxProps {\n\n width: SizeBoxSizeValue::Exact(if thin { 18.0 } else { 24.0 }),\n\n height: SizeBoxSizeValue::Exact(24.0),\n\n margin: Rect {\n\n left: if thin { -4.0 } else { 1.0 },\n\n right: if thin { -4.0 } else { 1.0 },\n", "file_path": "demos/in-game/src/ui/components/item_cell.rs", "rank": 36, "score": 280667.624350069 }, { "content": "#[pre_hooks(use_button_notified_state, use_text_input_notified_state)]\n\npub fn title_bar(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { id, key, state, .. } = context;\n\n\n\n let ButtonProps {\n\n selected, trigger, ..\n\n } = state.read_cloned_or_default();\n\n let TextInputProps {\n\n text,\n\n cursor_position,\n\n focused,\n\n ..\n\n } = state.read_cloned_or_default();\n\n let text = if text.trim().is_empty() {\n\n \"> Focus here and start typing...\".to_owned()\n\n } else if focused {\n\n if cursor_position < text.len() {\n\n format!(\"{}|{}\", &text[..cursor_position], &text[cursor_position..])\n\n } else {\n\n format!(\"{}|\", text)\n\n }\n", "file_path": "demos/hello-world/src/ui/components/title_bar.rs", "rank": 37, "score": 278368.22111040395 }, { "content": "#[pre_hooks(use_nav_item, use_input_field)]\n\npub fn input_field(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n state,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n if let Some(p) = content.props_mut() {\n\n p.write(state.read_cloned_or_default::<ButtonProps>());\n\n p.write(state.read_cloned_or_default::<TextInputProps>());\n\n }\n\n\n\n widget! {{{\n\n AreaBoxNode {\n\n id: id.to_owned(),\n\n slot: Box::new(content),\n\n ..Default::default()\n\n }\n\n }}}\n\n}\n", "file_path": "raui-core/src/widget/component/interactive/input_field.rs", "rank": 38, "score": 278363.78860964096 }, { "content": "#[pre_hooks(use_app_bar)]\n\npub fn app_bar(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n shared_props,\n\n state,\n\n ..\n\n } = context;\n\n\n\n let theme_mode = shared_props.read_cloned_or_default::<ThemeModeProps>();\n\n let props = props.clone().with(VerticalBoxProps {\n\n separation: 10.0,\n\n ..Default::default()\n\n });\n\n let line_props = props.clone().with(HorizontalBoxProps {\n\n separation: 10.0,\n\n ..Default::default()\n\n });\n\n let title_props = TextPaperProps {\n\n text: \"TODO Demo App\".to_owned(),\n", "file_path": "demos/todo-app/src/ui/components/app_bar.rs", "rank": 39, "score": 278363.14505408506 }, { "content": "#[pre_hooks(use_button_notified_state)]\n\npub fn image_button(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n key,\n\n props,\n\n state,\n\n ..\n\n } = context;\n\n\n\n let ImageButtonProps {\n\n image,\n\n horizontal_alignment,\n\n } = props.read_cloned_or_default();\n\n let ButtonProps {\n\n selected,\n\n trigger,\n\n context,\n\n ..\n\n } = state.read_cloned_or_default();\n\n let scale = if trigger || context {\n", "file_path": "demos/hello-world/src/ui/components/image_button.rs", "rank": 40, "score": 278363.09994879935 }, { "content": "#[pre_hooks(use_nav_container_active, use_nav_jump, use_nav_item)]\n\npub fn nav_flex_box(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let props = props\n\n .clone()\n\n .without::<NavContainerActive>()\n\n .without::<NavJumpActive>()\n\n .without::<NavItemActive>();\n\n\n\n widget! {\n\n (#{key} flex_box: {props} |[listed_slots]|)\n\n }\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/containers/flex_box.rs", "rank": 41, "score": 276129.17309092893 }, { "content": "#[pre_hooks(use_nav_container_active, use_nav_jump_step_pages_active, use_nav_item)]\n\npub fn nav_switch_box(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let props = props\n\n .clone()\n\n .without::<NavContainerActive>()\n\n .without::<NavJumpActive>()\n\n .without::<NavItemActive>();\n\n\n\n widget! {\n\n (#{key} switch_box: {props} |[listed_slots]|)\n\n }\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/containers/switch_box.rs", "rank": 42, "score": 276129.10987264983 }, { "content": "#[pre_hooks(use_nav_container_active, use_nav_jump_direction_active, use_nav_item)]\n\npub fn nav_content_box(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let props = props\n\n .clone()\n\n .without::<NavContainerActive>()\n\n .without::<NavJumpActive>()\n\n .without::<NavItemActive>();\n\n\n\n widget! {\n\n (#{key} content_box: {props} |[listed_slots]|)\n\n }\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/containers/content_box.rs", "rank": 43, "score": 276129.1307868317 }, { "content": "#[pre_hooks(use_nav_container_active, use_nav_jump_direction_active, use_nav_item)]\n\npub fn nav_grid_box(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let props = props\n\n .clone()\n\n .without::<NavContainerActive>()\n\n .without::<NavJumpActive>()\n\n .without::<NavItemActive>();\n\n\n\n widget! {\n\n (#{key} grid_box: {props} |[listed_slots]|)\n\n }\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/containers/grid_box.rs", "rank": 44, "score": 276129.1307868317 }, { "content": "pub fn nav_vertical_box(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let props = props\n\n .clone()\n\n .without::<NavContainerActive>()\n\n .without::<NavJumpActive>()\n\n .without::<NavItemActive>();\n\n\n\n widget! {\n\n (#{key} vertical_box: {props} |[listed_slots]|)\n\n }\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/containers/vertical_box.rs", "rank": 45, "score": 276123.5514715013 }, { "content": "pub fn nav_horizontal_box(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let props = props\n\n .clone()\n\n .without::<NavContainerActive>()\n\n .without::<NavJumpActive>()\n\n .without::<NavItemActive>();\n\n\n\n widget! {\n\n (#{key} horizontal_box: {props} |[listed_slots]|)\n\n }\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/containers/horizontal_box.rs", "rank": 46, "score": 276123.55147150124 }, { "content": "pub fn nav_scroll_box(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n key,\n\n props,\n\n state,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => {content, scrollbars});\n\n\n\n let scroll_props = state.read_cloned_or_default::<ScrollViewState>();\n\n\n\n let content_props = Props::new(ContentBoxItemLayout {\n\n align: scroll_props.value,\n\n ..Default::default()\n\n })\n\n .with(ScrollBoxOwner(id.to_owned()));\n\n\n\n if let Some(props) = scrollbars.props_mut() {\n", "file_path": "raui-core/src/widget/component/containers/scroll_box.rs", "rank": 47, "score": 276123.5514715013 }, { "content": "fn use_popup(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<ButtonNotifyMessage>() {\n\n if msg.trigger_start() {\n\n let id = context\n\n .shared_props\n\n .read_cloned_or_default::<AppSharedProps>()\n\n .0;\n\n context.messenger.write(id, AppMessage::ClosePopup);\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "demos/in-game/src/ui/components/popup.rs", "rank": 48, "score": 275082.699578283 }, { "content": "fn use_inventory(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<InventoryMessage>() {\n\n match msg {\n\n InventoryMessage::Prev => {\n\n let mut data = match context.state.read::<InventoryState>() {\n\n Ok(state) => *state,\n\n Err(_) => InventoryState::default(),\n\n };\n\n data.index = data.index.saturating_sub(1);\n\n drop(context.state.write(data));\n\n }\n\n InventoryMessage::Next => {\n\n let mut data = match context.state.read::<InventoryState>() {\n\n Ok(state) => *state,\n\n Err(_) => InventoryState::default(),\n\n };\n\n let count = context\n\n .props\n\n .map_or_default::<ItemCellsProps, _, _>(|p| p.items.len());\n\n data.index = (data.index + 1).min(count.saturating_sub(data.count));\n\n drop(context.state.write(data));\n\n }\n\n }\n\n }\n\n }\n\n });\n\n}\n\n\n", "file_path": "demos/in-game/src/ui/components/inventory.rs", "rank": 49, "score": 275082.699578283 }, { "content": "pub fn nav_scroll_box_content(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id, named_slots, ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n widget! {{{\n\n AreaBoxNode {\n\n id: id.to_owned(),\n\n slot: Box::new(content),\n\n ..Default::default()\n\n }\n\n }}}\n\n}\n\n\n", "file_path": "raui-core/src/widget/component/containers/scroll_box.rs", "rank": 50, "score": 273955.15235899895 }, { "content": "pub fn text_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n shared_props,\n\n ..\n\n } = context;\n\n\n\n let TextPaperProps {\n\n text,\n\n width,\n\n height,\n\n variant,\n\n use_main_color,\n\n alignment_override,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n let themed_props = props.read_cloned_or_default::<ThemedWidgetProps>();\n\n let ThemedTextMaterial {\n\n mut alignment,\n", "file_path": "raui-material/src/component/text_paper.rs", "rank": 51, "score": 273324.3004627506 }, { "content": "pub fn nav_scroll_box_side_scrollbars(mut context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { id, key, props, .. } = context;\n\n\n\n let view_props = props.read_cloned_or_default::<ScrollViewState>();\n\n\n\n let SideScrollbarsProps {\n\n size,\n\n back_material,\n\n front_material,\n\n } = props.read_cloned_or_default();\n\n\n\n let hbar = if view_props.size_factor.x > 1.0 {\n\n let props = Props::new(ButtonNotifyProps(id.to_owned().into()))\n\n .with(NavItemActive)\n\n .with(NavButtonTrackingActive)\n\n .with(ContentBoxItemLayout {\n\n anchors: Rect {\n\n left: 0.0,\n\n right: 1.0,\n\n top: 1.0,\n", "file_path": "raui-core/src/widget/component/containers/scroll_box.rs", "rank": 52, "score": 271850.2174185889 }, { "content": "pub fn text_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n shared_props,\n\n ..\n\n } = context;\n\n\n\n let TextBoxProps {\n\n width,\n\n height,\n\n text,\n\n alignment,\n\n direction,\n\n font,\n\n mut color,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n let alpha = shared_props.read_cloned_or_default::<WidgetAlpha>().0;\n", "file_path": "raui-core/src/widget/component/text_box.rs", "rank": 53, "score": 270975.320723169 }, { "content": "fn use_task(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<ButtonNotifyMessage>() {\n\n if msg.trigger_start() {\n\n match msg.sender.key() {\n\n \"checkbox\" => {\n\n // TODO: figure out better to pass index to the message.\n\n // maybe using props? anything would be better than parsing string.\n\n if let Ok(index) = context.id.key().parse::<usize>() {\n\n let id = context\n\n .shared_props\n\n .read_cloned_or_default::<AppSharedProps>()\n\n .id;\n\n context.messenger.write(id, AppMessage::ToggleTask(index));\n\n }\n\n }\n\n \"delete\" => {\n\n // TODO: figure out better to pass index to the message.\n\n // maybe using props? anything would be better than parsing string.\n", "file_path": "demos/todo-app/src/ui/components/tasks_list.rs", "rank": 54, "score": 269911.816182654 }, { "content": "fn use_item_cell(context: &mut WidgetContext) {\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<ButtonNotifyMessage>() {\n\n if msg.trigger_start() {\n\n drop(context.animator.change(\n\n \"\",\n\n Some(Animation::Value(AnimatedValue {\n\n name: \"click\".to_owned(),\n\n duration: 0.15,\n\n })),\n\n ));\n\n match msg.sender.key() {\n\n \"prev\" => {\n\n let id = context\n\n .shared_props\n\n .read_cloned_or_default::<OwningInventoryProps>()\n\n .0;\n\n context.messenger.write(id, InventoryMessage::Prev);\n\n }\n", "file_path": "demos/in-game/src/ui/components/item_cell.rs", "rank": 55, "score": 269911.816182654 }, { "content": "fn use_app_bar(context: &mut WidgetContext) {\n\n context.life_cycle.mount(|context| {\n\n drop(context.state.write(AppBarState::default()));\n\n });\n\n\n\n context.life_cycle.change(|context| {\n\n for msg in context.messenger.messages {\n\n if let Some(msg) = msg.as_any().downcast_ref::<ButtonNotifyMessage>() {\n\n if msg.trigger_start() {\n\n match msg.sender.key() {\n\n \"theme\" => {\n\n let id = context\n\n .shared_props\n\n .read_cloned_or_default::<AppSharedProps>()\n\n .id;\n\n context.messenger.write(id, AppMessage::ToggleTheme);\n\n }\n\n \"save\" => {\n\n let id = context\n\n .shared_props\n", "file_path": "demos/todo-app/src/ui/components/app_bar.rs", "rank": 56, "score": 267449.6342507891 }, { "content": "pub fn text_button_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n widget! {\n\n (#{key} button_paper: {props.clone()} {\n\n content = (#{\"text\"} text_paper: {props.clone()})\n\n })\n\n }\n\n}\n", "file_path": "raui-material/src/component/interactive/text_button_paper.rs", "rank": 57, "score": 266474.6165170446 }, { "content": "pub fn text_field_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n widget! {\n\n (#{key} input_field: {props.clone()} {\n\n content = (#{\"content\"} text_field_paper_content: {props.clone()})\n\n })\n\n }\n\n}\n", "file_path": "raui-material/src/component/interactive/text_field_paper.rs", "rank": 58, "score": 266474.6165170446 }, { "content": "pub fn minimap(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n let size_props = props.clone().with(SizeBoxProps {\n\n width: SizeBoxSizeValue::Exact(64.0),\n\n height: SizeBoxSizeValue::Exact(64.0),\n\n ..Default::default()\n\n });\n\n let panel_props = props.clone().with(PaperProps {\n\n frame: None,\n\n ..Default::default()\n\n });\n\n let image_props = Props::new(ImageBoxProps {\n\n material: ImageBoxMaterial::Image(ImageBoxImage {\n\n id: \"in-game-minimap\".to_owned(),\n\n tint: Color {\n\n r: 0.75,\n\n g: 0.75,\n\n b: 0.75,\n\n a: 1.0,\n", "file_path": "demos/in-game/src/ui/components/minimap.rs", "rank": 59, "score": 226663.49031630502 }, { "content": "pub fn paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n shared_props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let paper_props = props.read_cloned_or_default::<PaperProps>();\n\n let themed_props = props.read_cloned_or_default::<ThemedWidgetProps>();\n\n\n\n let items = match themed_props.variant {\n\n ThemeVariant::ContentOnly => listed_slots,\n\n ThemeVariant::Filled => {\n\n let content_background = shared_props.map_or_default::<ThemeProps, _, _>(|props| {\n\n props\n\n .content_backgrounds\n\n .get(&paper_props.variant)\n\n .cloned()\n", "file_path": "raui-material/src/component/containers/paper.rs", "rank": 60, "score": 226663.49031630502 }, { "content": "pub fn content(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n let props0 = Props::new(ImageButtonProps {\n\n image: \"cat\".to_owned(),\n\n horizontal_alignment: 1.0,\n\n })\n\n .with(GridBoxItemLayout {\n\n space_occupancy: IntRect {\n\n left: 0,\n\n right: 1,\n\n top: 0,\n\n bottom: 1,\n\n },\n\n margin: Rect {\n\n left: 8.0,\n\n right: 8.0,\n\n top: 8.0,\n\n bottom: 8.0,\n\n },\n", "file_path": "demos/hello-world/src/ui/components/content.rs", "rank": 61, "score": 224796.8458776654 }, { "content": "pub fn icon_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n shared_props,\n\n ..\n\n } = context;\n\n\n\n let themed_props = props.read_cloned_or_default::<ThemedWidgetProps>();\n\n let tint = match shared_props.read::<ThemeProps>() {\n\n Ok(props) => match themed_props.color {\n\n ThemeColor::Default => props.active_colors.contrast.default.main,\n\n ThemeColor::Primary => props.active_colors.contrast.primary.main,\n\n ThemeColor::Secondary => props.active_colors.contrast.secondary.main,\n\n },\n\n Err(_) => Default::default(),\n\n };\n\n let icon_props = props.read_cloned_or_default::<IconPaperProps>();\n\n let size = match shared_props.read::<ThemeProps>() {\n\n Ok(props) => props\n", "file_path": "raui-material/src/component/icon_paper.rs", "rank": 62, "score": 224796.8458776654 }, { "content": "pub fn app(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key, named_slots, ..\n\n } = context;\n\n unpack_named_slots!(named_slots => { title, content });\n\n\n\n title.remap_props(|props| {\n\n props.with(FlexBoxItemLayout {\n\n grow: 0.0,\n\n shrink: 0.0,\n\n ..Default::default()\n\n })\n\n });\n\n let props = Props::new(VerticalBoxProps {\n\n separation: 16.0,\n\n ..Default::default()\n\n })\n\n .with(NavJumpLooped);\n\n\n\n widget! {\n\n (#{key} nav_vertical_box: {props} [\n\n {title}\n\n {content}\n\n ])\n\n }\n\n}\n", "file_path": "demos/hello-world/src/ui/components/app.rs", "rank": 63, "score": 224796.8458776654 }, { "content": "pub fn switch_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n shared_props,\n\n ..\n\n } = context;\n\n\n\n let SwitchPaperProps {\n\n on,\n\n variant,\n\n size_level,\n\n } = props.read_cloned_or_default();\n\n let themed_props = props.read_cloned_or_default::<ThemedWidgetProps>();\n\n let color = match shared_props.read::<ThemeProps>() {\n\n Ok(props) => match themed_props.color {\n\n ThemeColor::Default => props.active_colors.main.default.main,\n\n ThemeColor::Primary => props.active_colors.main.primary.main,\n\n ThemeColor::Secondary => props.active_colors.main.secondary.main,\n\n },\n", "file_path": "raui-material/src/component/switch_paper.rs", "rank": 64, "score": 224796.8458776654 }, { "content": "pub fn vertical_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"vertical\"} vertical_box: {props.clone()} |[ listed_slots ]|)\n\n ])\n\n }\n\n}\n", "file_path": "raui-material/src/component/containers/vertical_paper.rs", "rank": 65, "score": 222987.7480073361 }, { "content": "pub fn horizontal_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"horizontal\"} horizontal_box: {props.clone()} |[ listed_slots ]|)\n\n ])\n\n }\n\n}\n", "file_path": "raui-material/src/component/containers/horizontal_paper.rs", "rank": 66, "score": 222987.7480073361 }, { "content": "pub fn space_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { id, props, .. } = context;\n\n\n\n let SpaceBoxProps { width, height } = props.read_cloned_or_default();\n\n\n\n widget! {{{\n\n SizeBoxNode {\n\n id: id.to_owned(),\n\n props: props.clone(),\n\n width: SizeBoxSizeValue::Exact(width),\n\n height: SizeBoxSizeValue::Exact(height),\n\n ..Default::default()\n\n }\n\n }}}\n\n}\n", "file_path": "raui-core/src/widget/component/space_box.rs", "rank": 67, "score": 222987.7480073361 }, { "content": "pub fn button_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n widget! {\n\n (#{key} button: {props.clone()} {\n\n content = (#{\"content\"} button_paper_content: {props.clone()} {\n\n content = {content}\n\n })\n\n })\n\n }\n\n}\n", "file_path": "raui-material/src/component/interactive/button_paper.rs", "rank": 68, "score": 222987.7480073361 }, { "content": "pub fn wrap_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"wrap\"} wrap_box: {props.clone()} {\n\n content = {content}\n\n })\n\n ])\n\n }\n\n}\n", "file_path": "raui-material/src/component/containers/wrap_paper.rs", "rank": 69, "score": 222987.7480073361 }, { "content": "pub fn grid_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"grid\"} grid_box: {props.clone()} |[ listed_slots ]|)\n\n ])\n\n }\n\n}\n", "file_path": "raui-material/src/component/containers/grid_paper.rs", "rank": 70, "score": 222987.7480073361 }, { "content": "pub fn image_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n shared_props,\n\n ..\n\n } = context;\n\n\n\n let ImageBoxProps {\n\n width,\n\n height,\n\n content_keep_aspect_ratio,\n\n mut material,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n let alpha = shared_props.read_cloned_or_default::<WidgetAlpha>().0;\n\n match &mut material {\n\n ImageBoxMaterial::Color(image) => {\n\n image.color.a *= alpha;\n", "file_path": "raui-core/src/widget/component/image_box.rs", "rank": 71, "score": 222987.7480073361 }, { "content": "pub fn flex_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"flex\"} flex_box: {props.clone()} |[ listed_slots ]|)\n\n ])\n\n }\n\n}\n", "file_path": "raui-material/src/component/containers/flex_paper.rs", "rank": 72, "score": 222987.7480073361 }, { "content": "pub fn nav_horizontal_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"horizontal\"} nav_horizontal_box: {props.clone()} |[ listed_slots ]|)\n\n ])\n\n }\n\n}\n\n\n", "file_path": "raui-material/src/component/containers/horizontal_paper.rs", "rank": 73, "score": 221233.49459440698 }, { "content": "pub fn nav_flex_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"flex\"} nav_flex_box: {props.clone()} |[ listed_slots ]|)\n\n ])\n\n }\n\n}\n\n\n", "file_path": "raui-material/src/component/containers/flex_paper.rs", "rank": 74, "score": 221233.49459440698 }, { "content": "pub fn color_rect(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n let color = props.read_cloned_or_default::<ColorRectProps>().color;\n\n let props = props.clone().with(ImageBoxProps {\n\n material: ImageBoxMaterial::Color(ImageBoxColor {\n\n color,\n\n scaling: ImageBoxImageScaling::Frame((10.0, true).into()),\n\n }),\n\n ..Default::default()\n\n });\n\n\n\n widget! {\n\n (#{key} image_box: {props})\n\n }\n\n}\n", "file_path": "demos/hello-world/src/ui/components/color_rect.rs", "rank": 75, "score": 221233.49459440698 }, { "content": "pub fn nav_grid_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"grid\"} nav_grid_box: {props.clone()} |[ listed_slots ]|)\n\n ])\n\n }\n\n}\n\n\n", "file_path": "raui-material/src/component/containers/grid_paper.rs", "rank": 76, "score": 221233.49459440698 }, { "content": "pub fn content_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let ContentBoxProps {\n\n clipping,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n let items = listed_slots\n\n .into_iter()\n\n .filter_map(|slot| {\n\n if let Some(props) = slot.props() {\n\n let layout = props.read_cloned_or_default::<ContentBoxItemLayout>();\n\n Some(ContentBoxItemNode { slot, layout })\n\n } else {\n", "file_path": "raui-core/src/widget/component/containers/content_box.rs", "rank": 77, "score": 221233.49459440698 }, { "content": "pub fn variant_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n props,\n\n mut named_slots,\n\n ..\n\n } = context;\n\n\n\n let VariantBoxProps { variant_name } = props.read_cloned_or_default();\n\n\n\n if let Some(variant_name) = variant_name {\n\n named_slots.remove(&variant_name).unwrap_or_default()\n\n } else {\n\n Default::default()\n\n }\n\n}\n", "file_path": "raui-core/src/widget/component/containers/variant_box.rs", "rank": 78, "score": 221233.49459440698 }, { "content": "pub fn size_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n let SizeBoxProps {\n\n width,\n\n height,\n\n margin,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n widget! {{{\n\n SizeBoxNode {\n\n id: id.to_owned(),\n\n props: props.clone(),\n\n slot: Box::new(content),\n\n width,\n\n height,\n\n margin,\n\n transform,\n\n }\n\n }}}\n\n}\n", "file_path": "raui-core/src/widget/component/containers/size_box.rs", "rank": 79, "score": 221233.49459440698 }, { "content": "pub fn switch_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let SwitchBoxProps {\n\n active_index,\n\n clipping,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n let items = if let Some(index) = active_index {\n\n if let Some(slot) = listed_slots.into_iter().nth(index) {\n\n vec![ContentBoxItemNode {\n\n slot,\n\n ..Default::default()\n\n }]\n", "file_path": "raui-core/src/widget/component/containers/switch_box.rs", "rank": 80, "score": 221233.49459440698 }, { "content": "pub fn vertical_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let VerticalBoxProps {\n\n separation,\n\n reversed,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n let props = props.clone().with(FlexBoxProps {\n\n direction: if reversed {\n\n FlexBoxDirection::VerticalBottomToTop\n\n } else {\n\n FlexBoxDirection::VerticalTopToBottom\n\n },\n\n separation,\n\n wrap: false,\n\n transform,\n\n });\n\n\n\n widget! {\n\n (#{key} flex_box: {props} |[ listed_slots ]|)\n\n }\n\n}\n", "file_path": "raui-core/src/widget/component/containers/vertical_box.rs", "rank": 81, "score": 221233.49459440698 }, { "content": "pub fn nav_vertical_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n widget! {\n\n (#{key} paper: {props.clone()} [\n\n (#{\"vertical\"} nav_vertical_box: {props.clone()} |[ listed_slots ]|)\n\n ])\n\n }\n\n}\n\n\n", "file_path": "raui-material/src/component/containers/vertical_paper.rs", "rank": 82, "score": 221233.49459440698 }, { "content": "pub fn grid_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let GridBoxProps {\n\n cols,\n\n rows,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n let items = listed_slots\n\n .into_iter()\n\n .filter_map(|slot| {\n\n if let Some(props) = slot.props() {\n\n let layout = props.read_cloned_or_default::<GridBoxItemLayout>();\n\n Some(GridBoxItemNode { slot, layout })\n", "file_path": "raui-core/src/widget/component/containers/grid_box.rs", "rank": 83, "score": 221233.49459440698 }, { "content": "pub fn tasks_list(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n let TasksProps { tasks } = props.read_cloned_or_default();\n\n let tasks = tasks\n\n .into_iter()\n\n .enumerate()\n\n .map(|(i, item)| {\n\n widget! { (#{i} task: {item}) }\n\n })\n\n .collect::<Vec<_>>();\n\n let props = props.clone().with(VerticalBoxProps {\n\n separation: 10.0,\n\n ..Default::default()\n\n });\n\n\n\n widget! {\n\n (#{key} vertical_box: {props} |[ tasks ]|)\n\n }\n\n}\n", "file_path": "demos/todo-app/src/ui/components/tasks_list.rs", "rank": 84, "score": 221233.49459440698 }, { "content": "pub fn flex_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let FlexBoxProps {\n\n direction,\n\n separation,\n\n wrap,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n let items = listed_slots\n\n .into_iter()\n\n .filter_map(|slot| {\n\n if let Some(props) = slot.props() {\n\n let layout = props.read_cloned_or_default::<FlexBoxItemLayout>();\n", "file_path": "raui-core/src/widget/component/containers/flex_box.rs", "rank": 85, "score": 221233.49459440698 }, { "content": "pub fn horizontal_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n listed_slots,\n\n ..\n\n } = context;\n\n\n\n let HorizontalBoxProps {\n\n separation,\n\n reversed,\n\n transform,\n\n } = props.read_cloned_or_default();\n\n\n\n let props = props.clone().with(FlexBoxProps {\n\n direction: if reversed {\n\n FlexBoxDirection::HorizontalRightToLeft\n\n } else {\n\n FlexBoxDirection::HorizontalLeftToRight\n\n },\n\n separation,\n\n wrap: false,\n\n transform,\n\n });\n\n\n\n widget! {\n\n (#{key} flex_box: {props} |[ listed_slots ]|)\n\n }\n\n}\n", "file_path": "raui-core/src/widget/component/containers/horizontal_box.rs", "rank": 86, "score": 221233.49459440698 }, { "content": "pub fn wrap_box(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n id,\n\n props,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n let WrapBoxProps { margin } = props.read_cloned_or_default();\n\n\n\n widget! {{{\n\n SizeBoxNode {\n\n id: id.to_owned(),\n\n props: props.clone(),\n\n slot: Box::new(content),\n\n margin,\n\n ..Default::default()\n\n }\n\n }}}\n\n}\n", "file_path": "raui-core/src/widget/component/containers/wrap_box.rs", "rank": 87, "score": 221233.49459440698 }, { "content": "pub fn switch_button_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n widget! {\n\n (#{key} button_paper: {props.clone()} {\n\n content = (#{\"switch\"} switch_paper: {props.clone()})\n\n })\n\n }\n\n}\n", "file_path": "raui-material/src/component/interactive/switch_button_paper.rs", "rank": 88, "score": 219531.55390817684 }, { "content": "pub fn icon_button_paper(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n widget! {\n\n (#{key} button_paper: {props.clone()} {\n\n content = (#{\"icon\"} icon_paper: {props.clone()})\n\n })\n\n }\n\n}\n", "file_path": "raui-material/src/component/interactive/icon_button_paper.rs", "rank": 89, "score": 219531.55390817684 }, { "content": "fn text_field_paper_content(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext { key, props, .. } = context;\n\n\n\n let TextFieldPaperProps {\n\n hint,\n\n width,\n\n height,\n\n variant,\n\n use_main_color,\n\n inactive_alpha,\n\n alignment_override,\n\n transform,\n\n paper_theme,\n\n padding,\n\n } = props.read_cloned_or_default();\n\n let TextInputProps {\n\n text,\n\n cursor_position,\n\n focused,\n\n ..\n", "file_path": "raui-material/src/component/interactive/text_field_paper.rs", "rank": 90, "score": 208662.46282770147 }, { "content": "pub fn setup(app: &mut Application) {\n\n app.register_props::<component::containers::paper::PaperProps>(\"PaperProps\");\n\n app.register_props::<component::icon_paper::IconPaperProps>(\"IconPaperProps\");\n\n app.register_props::<component::interactive::text_field_paper::TextFieldPaperProps>(\n\n \"TextFieldPaperProps\",\n\n );\n\n app.register_props::<component::switch_paper::SwitchPaperProps>(\"SwitchPaperProps\");\n\n app.register_props::<component::text_paper::TextPaperProps>(\"TextPaperProps\");\n\n app.register_props::<theme::ThemedWidgetProps>(\"ThemedWidgetProps\");\n\n app.register_props::<theme::ThemeProps>(\"ThemeProps\");\n\n\n\n app.register_component(\n\n \"nav_flex_paper\",\n\n component::containers::flex_paper::nav_flex_paper,\n\n );\n\n app.register_component(\"flex_paper\", component::containers::flex_paper::flex_paper);\n\n app.register_component(\n\n \"nav_grid_paper\",\n\n component::containers::grid_paper::nav_grid_paper,\n\n );\n", "file_path": "raui-material/src/lib.rs", "rank": 91, "score": 200116.62531866244 }, { "content": "pub fn setup(app: &mut Application) {\n\n app.register_props::<()>(\"()\");\n\n app.register_props::<i8>(\"i8\");\n\n app.register_props::<i16>(\"i16\");\n\n app.register_props::<i32>(\"i32\");\n\n app.register_props::<i64>(\"i64\");\n\n app.register_props::<i128>(\"i128\");\n\n app.register_props::<u8>(\"u8\");\n\n app.register_props::<u16>(\"u16\");\n\n app.register_props::<u32>(\"u32\");\n\n app.register_props::<u64>(\"u64\");\n\n app.register_props::<u128>(\"u128\");\n\n app.register_props::<f32>(\"f32\");\n\n app.register_props::<f64>(\"f64\");\n\n app.register_props::<bool>(\"bool\");\n\n app.register_props::<String>(\"String\");\n\n app.register_props::<component::containers::content_box::ContentBoxProps>(\"ContentBoxProps\");\n\n app.register_props::<component::containers::flex_box::FlexBoxProps>(\"FlexBoxProps\");\n\n app.register_props::<component::containers::grid_box::GridBoxProps>(\"GridBoxProps\");\n\n app.register_props::<component::containers::horizontal_box::HorizontalBoxProps>(\n", "file_path": "raui-core/src/widget/mod.rs", "rank": 92, "score": 197813.4767388514 }, { "content": "pub fn make_default_theme(\n\n default: Color,\n\n primary: Color,\n\n secondary: Color,\n\n background: Color,\n\n) -> ThemeProps {\n\n let background_primary = color_lerp(background, primary, 0.05);\n\n let background_secondary = color_lerp(background, secondary, 0.05);\n\n let mut content_backgrounds = HashMap::with_capacity(1);\n\n content_backgrounds.insert(String::new(), Default::default());\n\n let mut button_backgrounds = HashMap::with_capacity(1);\n\n button_backgrounds.insert(String::new(), Default::default());\n\n let mut text_variants = HashMap::with_capacity(1);\n\n text_variants.insert(\n\n String::new(),\n\n ThemedTextMaterial {\n\n font: TextBoxFont {\n\n size: 18.0,\n\n ..Default::default()\n\n },\n", "file_path": "raui-material/src/theme.rs", "rank": 93, "score": 180778.22416374827 }, { "content": "fn button_paper_content(context: WidgetContext) -> WidgetNode {\n\n let WidgetContext {\n\n key,\n\n props,\n\n shared_props,\n\n named_slots,\n\n ..\n\n } = context;\n\n unpack_named_slots!(named_slots => content);\n\n\n\n let button_props = props.read_cloned_or_default::<ButtonProps>();\n\n let paper_props = props.read_cloned_or_default::<PaperProps>();\n\n let themed_props = props.read_cloned_or_default::<ThemedWidgetProps>();\n\n\n\n let items = match themed_props.variant {\n\n ThemeVariant::ContentOnly => vec![content],\n\n ThemeVariant::Filled => {\n\n let button_background = shared_props.map_or_default::<ThemeProps, _, _>(|props| {\n\n if button_props.trigger || button_props.context {\n\n props\n", "file_path": "raui-material/src/component/interactive/button_paper.rs", "rank": 94, "score": 163960.00585397566 }, { "content": "pub trait Prefab: Serialize + DeserializeOwned {\n\n fn from_prefab(data: PrefabValue) -> Result<Self, PrefabError> {\n\n match serde_yaml::from_value(data) {\n\n Ok(result) => Ok(result),\n\n Err(error) => Err(PrefabError::CouldNotDeserialize(error.to_string())),\n\n }\n\n }\n\n\n\n fn to_prefab(&self) -> Result<PrefabValue, PrefabError> {\n\n match serde_yaml::to_value(self) {\n\n Ok(result) => Ok(result),\n\n Err(error) => Err(PrefabError::CouldNotSerialize(error.to_string())),\n\n }\n\n }\n\n}\n\n\n\npub mod prelude {\n\n pub use crate::{\n\n animator::*,\n\n application::*,\n", "file_path": "raui-core/src/lib.rs", "rank": 95, "score": 150673.6174363891 }, { "content": "pub fn make_colors_bundle(\n\n default: ThemeColorSet,\n\n primary: ThemeColorSet,\n\n secondary: ThemeColorSet,\n\n) -> ThemeColorsBundle {\n\n let contrast = ThemeColors {\n\n default: ThemeColorSet {\n\n main: contrast_color(default.main),\n\n light: contrast_color(default.light),\n\n dark: contrast_color(default.dark),\n\n },\n\n primary: ThemeColorSet {\n\n main: contrast_color(primary.main),\n\n light: contrast_color(primary.light),\n\n dark: contrast_color(primary.dark),\n\n },\n\n secondary: ThemeColorSet {\n\n main: contrast_color(secondary.main),\n\n light: contrast_color(secondary.light),\n\n dark: contrast_color(secondary.dark),\n\n },\n\n };\n\n let main = ThemeColors {\n\n default,\n\n primary,\n\n secondary,\n\n };\n\n ThemeColorsBundle { main, contrast }\n\n}\n\n\n", "file_path": "raui-material/src/theme.rs", "rank": 96, "score": 139173.9890639518 }, { "content": "fn setup(app: &mut Application) {\n\n app.setup(setup_core);\n\n app.setup(setup_material);\n\n}\n\n\n\npub struct AppState {\n\n ui: TetraSimpleHost,\n\n mockup_image: Texture,\n\n}\n\n\n\nimpl AppState {\n\n pub fn new(context: &mut Context) -> tetra::Result<Self> {\n\n let mockup_image = Texture::new(context, \"./resources/images/in-game-mockup.png\")?;\n\n let assets = serde_json::from_str::<AssetsManifest>(\n\n &read_to_string(\"./resources/assets.json\").expect(\"Could not load assets manifest!\"),\n\n )\n\n .expect(\"Could not parse assets manifest\");\n\n let fonts = assets\n\n .fonts\n\n .iter()\n", "file_path": "demos/in-game/src/app.rs", "rank": 97, "score": 137405.7267054973 }, { "content": "fn setup(app: &mut Application) {\n\n app.setup(setup_core);\n\n app.setup(setup_material);\n\n}\n\n\n\npub struct TodoState {\n\n ui: TetraSimpleHost,\n\n}\n\n\n\nimpl TodoState {\n\n pub fn new(context: &mut Context) -> tetra::Result<Self> {\n\n let assets = serde_json::from_str::<AssetsManifest>(\n\n &read_to_string(\"./resources/assets.json\").expect(\"Could not load assets manifest\"),\n\n )\n\n .expect(\"Could not parse assets manifest\");\n\n let fonts = assets\n\n .fonts\n\n .iter()\n\n .map(|(k, (s, f, p))| (k.as_str(), *s, *f, p.as_str()))\n\n .collect::<Vec<_>>();\n", "file_path": "demos/todo-app/src/app.rs", "rank": 98, "score": 135809.39464192998 }, { "content": "pub fn new_light_theme() -> ThemeProps {\n\n make_default_theme(\n\n color_from_rgba(241, 250, 238, 1.0),\n\n color_from_rgba(29, 53, 87, 1.0),\n\n color_from_rgba(230, 57, 70, 1.0),\n\n color_from_rgba(255, 255, 255, 1.0),\n\n )\n\n}\n\n\n", "file_path": "raui-material/src/theme.rs", "rank": 99, "score": 133535.2563498422 } ]